@durable-streams/client 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +799 -0
- package/dist/index.cjs +1172 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +627 -0
- package/dist/index.d.ts +1072 -0
- package/dist/index.js +1830 -0
- package/dist/index.js.map +1 -0
- package/package.json +46 -0
- package/src/asyncIterableReadableStream.ts +220 -0
- package/src/constants.ts +105 -0
- package/src/error.ts +189 -0
- package/src/fetch.ts +267 -0
- package/src/index.ts +103 -0
- package/src/response.ts +1053 -0
- package/src/sse.ts +130 -0
- package/src/stream-api.ts +284 -0
- package/src/stream.ts +867 -0
- package/src/types.ts +737 -0
- package/src/utils.ts +104 -0
package/README.md
ADDED
|
@@ -0,0 +1,799 @@
|
|
|
1
|
+
# @durable-streams/client
|
|
2
|
+
|
|
3
|
+
TypeScript client for the Electric Durable Streams protocol.
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
The Durable Streams client provides two main APIs:
|
|
8
|
+
|
|
9
|
+
1. **`stream()` function** - A fetch-like read-only API for consuming streams
|
|
10
|
+
2. **`DurableStream` class** - A handle for read/write operations on a stream
|
|
11
|
+
|
|
12
|
+
## Key Features
|
|
13
|
+
|
|
14
|
+
- **Automatic Batching**: Multiple `append()` calls are automatically batched together when a POST is in-flight, significantly improving throughput for high-frequency writes
|
|
15
|
+
- **Streaming Reads**: `stream()` and `DurableStream.stream()` provide rich consumption options (promises, ReadableStreams, subscribers)
|
|
16
|
+
- **Resumable**: Offset-based reads let you resume from any point
|
|
17
|
+
- **Real-time**: Long-poll and SSE modes for live tailing with catch-up from any offset
|
|
18
|
+
|
|
19
|
+
## Usage
|
|
20
|
+
|
|
21
|
+
### Read-only: Using `stream()` (fetch-like API)
|
|
22
|
+
|
|
23
|
+
The `stream()` function provides a simple, fetch-like interface for reading from streams:
|
|
24
|
+
|
|
25
|
+
```typescript
|
|
26
|
+
import { stream } from "@durable-streams/client"
|
|
27
|
+
|
|
28
|
+
// Connect and get a StreamResponse
|
|
29
|
+
const res = await stream<{ message: string }>({
|
|
30
|
+
url: "https://streams.example.com/my-account/chat/room-1",
|
|
31
|
+
headers: {
|
|
32
|
+
Authorization: `Bearer ${process.env.DS_TOKEN!}`,
|
|
33
|
+
},
|
|
34
|
+
offset: savedOffset, // optional: resume from offset
|
|
35
|
+
live: "auto", // default: behavior driven by consumption method
|
|
36
|
+
})
|
|
37
|
+
|
|
38
|
+
// Accumulate all JSON items until up-to-date
|
|
39
|
+
const items = await res.json()
|
|
40
|
+
console.log("All items:", items)
|
|
41
|
+
|
|
42
|
+
// Or stream live with a subscriber
|
|
43
|
+
res.subscribeJson(async (batch) => {
|
|
44
|
+
for (const item of batch.items) {
|
|
45
|
+
console.log("item:", item)
|
|
46
|
+
saveOffset(batch.offset) // persist for resumption
|
|
47
|
+
}
|
|
48
|
+
})
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
### StreamResponse consumption methods
|
|
52
|
+
|
|
53
|
+
The `StreamResponse` object returned by `stream()` offers multiple ways to consume data:
|
|
54
|
+
|
|
55
|
+
```typescript
|
|
56
|
+
// Promise helpers (accumulate until first upToDate)
|
|
57
|
+
const bytes = await res.body() // Uint8Array
|
|
58
|
+
const items = await res.json() // Array<TJson>
|
|
59
|
+
const text = await res.text() // string
|
|
60
|
+
|
|
61
|
+
// ReadableStreams
|
|
62
|
+
const byteStream = res.bodyStream() // ReadableStream<Uint8Array>
|
|
63
|
+
const jsonStream = res.jsonStream() // ReadableStream<TJson>
|
|
64
|
+
const textStream = res.textStream() // ReadableStream<string>
|
|
65
|
+
|
|
66
|
+
// Subscribers (with backpressure)
|
|
67
|
+
const unsubscribe = res.subscribeJson(async (batch) => {
|
|
68
|
+
await processBatch(batch.items)
|
|
69
|
+
})
|
|
70
|
+
const unsubscribe2 = res.subscribeBytes(async (chunk) => {
|
|
71
|
+
await processBytes(chunk.data)
|
|
72
|
+
})
|
|
73
|
+
const unsubscribe3 = res.subscribeText(async (chunk) => {
|
|
74
|
+
await processText(chunk.text)
|
|
75
|
+
})
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
### Read/Write: Using `DurableStream`
|
|
79
|
+
|
|
80
|
+
For write operations or when you need a persistent handle:
|
|
81
|
+
|
|
82
|
+
```typescript
|
|
83
|
+
import { DurableStream } from "@durable-streams/client"
|
|
84
|
+
|
|
85
|
+
// Create a new stream
|
|
86
|
+
const handle = await DurableStream.create({
|
|
87
|
+
url: "https://streams.example.com/my-account/chat/room-1",
|
|
88
|
+
headers: {
|
|
89
|
+
Authorization: `Bearer ${process.env.DS_TOKEN!}`,
|
|
90
|
+
},
|
|
91
|
+
contentType: "application/json",
|
|
92
|
+
ttlSeconds: 3600,
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
// Append data
|
|
96
|
+
await handle.append(JSON.stringify({ type: "message", text: "Hello" }), {
|
|
97
|
+
seq: "writer-1-000001",
|
|
98
|
+
})
|
|
99
|
+
|
|
100
|
+
// Read using the new stream() API
|
|
101
|
+
const res = await handle.stream<{ type: string; text: string }>()
|
|
102
|
+
res.subscribeJson(async (batch) => {
|
|
103
|
+
for (const item of batch.items) {
|
|
104
|
+
console.log("message:", item.text)
|
|
105
|
+
}
|
|
106
|
+
})
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
### Read from "now" (skip existing data)
|
|
110
|
+
|
|
111
|
+
```typescript
|
|
112
|
+
// HEAD gives you the current tail offset if the server exposes it
|
|
113
|
+
const handle = await DurableStream.connect({
|
|
114
|
+
url,
|
|
115
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
116
|
+
})
|
|
117
|
+
const { offset } = await handle.head()
|
|
118
|
+
|
|
119
|
+
// Read only new data from that point on
|
|
120
|
+
const res = await handle.stream({ offset })
|
|
121
|
+
res.subscribeBytes(async (chunk) => {
|
|
122
|
+
console.log("new data:", new TextDecoder().decode(chunk.data))
|
|
123
|
+
})
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
### Read catch-up only (no live updates)
|
|
127
|
+
|
|
128
|
+
```typescript
|
|
129
|
+
// Read existing data only, stop when up-to-date
|
|
130
|
+
const res = await stream({
|
|
131
|
+
url: "https://streams.example.com/my-stream",
|
|
132
|
+
live: false,
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
const text = await res.text()
|
|
136
|
+
console.log("All existing data:", text)
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
## API
|
|
140
|
+
|
|
141
|
+
### `stream(options): Promise<StreamResponse>`
|
|
142
|
+
|
|
143
|
+
Creates a fetch-like streaming session:
|
|
144
|
+
|
|
145
|
+
```typescript
|
|
146
|
+
const res = await stream<TJson>({
|
|
147
|
+
url: string | URL, // Stream URL
|
|
148
|
+
headers?: HeadersRecord, // Headers (static or function-based)
|
|
149
|
+
params?: ParamsRecord, // Query params (static or function-based)
|
|
150
|
+
signal?: AbortSignal, // Cancellation
|
|
151
|
+
fetch?: typeof fetch, // Custom fetch implementation
|
|
152
|
+
backoffOptions?: BackoffOptions,// Retry backoff configuration
|
|
153
|
+
offset?: Offset, // Starting offset (default: start of stream)
|
|
154
|
+
live?: LiveMode, // Live mode (default: "auto")
|
|
155
|
+
json?: boolean, // Force JSON mode
|
|
156
|
+
onError?: StreamErrorHandler, // Error handler
|
|
157
|
+
})
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
### `DurableStream`
|
|
161
|
+
|
|
162
|
+
```typescript
|
|
163
|
+
class DurableStream {
|
|
164
|
+
readonly url: string
|
|
165
|
+
readonly contentType?: string
|
|
166
|
+
|
|
167
|
+
constructor(opts: DurableStreamConstructorOptions)
|
|
168
|
+
|
|
169
|
+
// Static methods
|
|
170
|
+
static create(opts: CreateOptions): Promise<DurableStream>
|
|
171
|
+
static connect(opts: DurableStreamOptions): Promise<DurableStream>
|
|
172
|
+
static head(opts: DurableStreamOptions): Promise<HeadResult>
|
|
173
|
+
static delete(opts: DurableStreamOptions): Promise<void>
|
|
174
|
+
|
|
175
|
+
// Instance methods
|
|
176
|
+
head(opts?: { signal?: AbortSignal }): Promise<HeadResult>
|
|
177
|
+
create(opts?: CreateOptions): Promise<this>
|
|
178
|
+
delete(opts?: { signal?: AbortSignal }): Promise<void>
|
|
179
|
+
append(
|
|
180
|
+
body: BodyInit | Uint8Array | string,
|
|
181
|
+
opts?: AppendOptions
|
|
182
|
+
): Promise<void>
|
|
183
|
+
appendStream(
|
|
184
|
+
source: AsyncIterable<Uint8Array | string>,
|
|
185
|
+
opts?: AppendOptions
|
|
186
|
+
): Promise<void>
|
|
187
|
+
|
|
188
|
+
// Fetch-like read API
|
|
189
|
+
stream<TJson>(opts?: StreamOptions): Promise<StreamResponse<TJson>>
|
|
190
|
+
}
|
|
191
|
+
```
|
|
192
|
+
|
|
193
|
+
### Live Modes
|
|
194
|
+
|
|
195
|
+
```typescript
|
|
196
|
+
// "auto" (default): behavior driven by consumption method
|
|
197
|
+
// - Promise helpers (body/json/text): stop after upToDate
|
|
198
|
+
// - Streams/subscribers: continue with long-poll
|
|
199
|
+
|
|
200
|
+
// false: catch-up only, stop at first upToDate
|
|
201
|
+
const res = await stream({ url, live: false })
|
|
202
|
+
|
|
203
|
+
// "long-poll": explicit long-poll mode for live updates
|
|
204
|
+
const res = await stream({ url, live: "long-poll" })
|
|
205
|
+
|
|
206
|
+
// "sse": explicit SSE mode for live updates
|
|
207
|
+
const res = await stream({ url, live: "sse" })
|
|
208
|
+
```
|
|
209
|
+
|
|
210
|
+
### Headers and Params
|
|
211
|
+
|
|
212
|
+
Headers and params support both static values and functions (sync or async) for dynamic values like authentication tokens.
|
|
213
|
+
|
|
214
|
+
```typescript
|
|
215
|
+
// Static headers
|
|
216
|
+
{
|
|
217
|
+
headers: {
|
|
218
|
+
Authorization: "Bearer my-token",
|
|
219
|
+
"X-Custom-Header": "value",
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// Function-based headers (sync)
|
|
224
|
+
{
|
|
225
|
+
headers: {
|
|
226
|
+
Authorization: () => `Bearer ${getCurrentToken()}`,
|
|
227
|
+
"X-Tenant-Id": () => getCurrentTenant(),
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Async function headers (for refreshing tokens)
|
|
232
|
+
{
|
|
233
|
+
headers: {
|
|
234
|
+
Authorization: async () => {
|
|
235
|
+
const token = await refreshToken()
|
|
236
|
+
return `Bearer ${token}`
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// Mix static and function headers
|
|
242
|
+
{
|
|
243
|
+
headers: {
|
|
244
|
+
"X-Static": "always-the-same",
|
|
245
|
+
Authorization: async () => `Bearer ${await getToken()}`,
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// Query params work the same way
|
|
250
|
+
{
|
|
251
|
+
params: {
|
|
252
|
+
tenant: "static-tenant",
|
|
253
|
+
region: () => getCurrentRegion(),
|
|
254
|
+
token: async () => await getSessionToken(),
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
```
|
|
258
|
+
|
|
259
|
+
### Error Handling
|
|
260
|
+
|
|
261
|
+
```typescript
|
|
262
|
+
import { stream, FetchError, DurableStreamError } from "@durable-streams/client"
|
|
263
|
+
|
|
264
|
+
const res = await stream({
|
|
265
|
+
url: "https://streams.example.com/my-stream",
|
|
266
|
+
headers: {
|
|
267
|
+
Authorization: "Bearer my-token",
|
|
268
|
+
},
|
|
269
|
+
onError: async (error) => {
|
|
270
|
+
if (error instanceof FetchError) {
|
|
271
|
+
if (error.status === 401) {
|
|
272
|
+
const newToken = await refreshAuthToken()
|
|
273
|
+
return { headers: { Authorization: `Bearer ${newToken}` } }
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
if (error instanceof DurableStreamError) {
|
|
277
|
+
console.error(`Stream error: ${error.code}`)
|
|
278
|
+
}
|
|
279
|
+
return {} // Retry with same params
|
|
280
|
+
},
|
|
281
|
+
})
|
|
282
|
+
```
|
|
283
|
+
|
|
284
|
+
## StreamResponse Methods
|
|
285
|
+
|
|
286
|
+
The `StreamResponse` object provides multiple ways to consume stream data. All methods respect the `live` mode setting.
|
|
287
|
+
|
|
288
|
+
### Promise Helpers
|
|
289
|
+
|
|
290
|
+
These methods accumulate data until the stream is up-to-date, then resolve.
|
|
291
|
+
|
|
292
|
+
#### `body(): Promise<Uint8Array>`
|
|
293
|
+
|
|
294
|
+
Accumulates all bytes until up-to-date.
|
|
295
|
+
|
|
296
|
+
```typescript
|
|
297
|
+
const res = await stream({ url, live: false })
|
|
298
|
+
const bytes = await res.body()
|
|
299
|
+
console.log("Total bytes:", bytes.length)
|
|
300
|
+
|
|
301
|
+
// Process as needed
|
|
302
|
+
const text = new TextDecoder().decode(bytes)
|
|
303
|
+
```
|
|
304
|
+
|
|
305
|
+
#### `json(): Promise<Array<TJson>>`
|
|
306
|
+
|
|
307
|
+
Accumulates all JSON items until up-to-date. Only works with JSON content.
|
|
308
|
+
|
|
309
|
+
```typescript
|
|
310
|
+
const res = await stream<{ id: number; name: string }>({
|
|
311
|
+
url,
|
|
312
|
+
live: false,
|
|
313
|
+
})
|
|
314
|
+
const items = await res.json()
|
|
315
|
+
|
|
316
|
+
for (const item of items) {
|
|
317
|
+
console.log(`User ${item.id}: ${item.name}`)
|
|
318
|
+
}
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
#### `text(): Promise<string>`
|
|
322
|
+
|
|
323
|
+
Accumulates all text until up-to-date.
|
|
324
|
+
|
|
325
|
+
```typescript
|
|
326
|
+
const res = await stream({ url, live: false })
|
|
327
|
+
const text = await res.text()
|
|
328
|
+
console.log("Full content:", text)
|
|
329
|
+
```
|
|
330
|
+
|
|
331
|
+
### ReadableStreams
|
|
332
|
+
|
|
333
|
+
Web Streams API for piping to other streams or using with streaming APIs. ReadableStreams can be consumed using either `getReader()` or `for await...of` syntax.
|
|
334
|
+
|
|
335
|
+
> **Safari/iOS Compatibility**: The client ensures all returned streams are async-iterable by defining `[Symbol.asyncIterator]` on stream instances when missing. This allows `for await...of` consumption without requiring a global polyfill, while preserving `instanceof ReadableStream` behavior.
|
|
336
|
+
>
|
|
337
|
+
> **Derived streams**: Streams created via `.pipeThrough()` or similar transformations are NOT automatically patched. Use the exported `asAsyncIterableReadableStream()` helper:
|
|
338
|
+
>
|
|
339
|
+
> ```typescript
|
|
340
|
+
> import { asAsyncIterableReadableStream } from "@durable-streams/client"
|
|
341
|
+
>
|
|
342
|
+
> const derived = res.bodyStream().pipeThrough(myTransform)
|
|
343
|
+
> const iterable = asAsyncIterableReadableStream(derived)
|
|
344
|
+
> for await (const chunk of iterable) { ... }
|
|
345
|
+
> ```
|
|
346
|
+
|
|
347
|
+
#### `bodyStream(): ReadableStream<Uint8Array> & AsyncIterable<Uint8Array>`
|
|
348
|
+
|
|
349
|
+
Raw bytes as a ReadableStream.
|
|
350
|
+
|
|
351
|
+
**Using `getReader()`:**
|
|
352
|
+
|
|
353
|
+
```typescript
|
|
354
|
+
const res = await stream({ url, live: false })
|
|
355
|
+
const readable = res.bodyStream()
|
|
356
|
+
|
|
357
|
+
const reader = readable.getReader()
|
|
358
|
+
while (true) {
|
|
359
|
+
const { done, value } = await reader.read()
|
|
360
|
+
if (done) break
|
|
361
|
+
console.log("Received:", value.length, "bytes")
|
|
362
|
+
}
|
|
363
|
+
```
|
|
364
|
+
|
|
365
|
+
**Using `for await...of`:**
|
|
366
|
+
|
|
367
|
+
```typescript
|
|
368
|
+
const res = await stream({ url, live: false })
|
|
369
|
+
|
|
370
|
+
for await (const chunk of res.bodyStream()) {
|
|
371
|
+
console.log("Received:", chunk.length, "bytes")
|
|
372
|
+
}
|
|
373
|
+
```
|
|
374
|
+
|
|
375
|
+
**Piping to a file (Node.js):**
|
|
376
|
+
|
|
377
|
+
```typescript
|
|
378
|
+
import { Readable } from "node:stream"
|
|
379
|
+
import { pipeline } from "node:stream/promises"
|
|
380
|
+
|
|
381
|
+
const res = await stream({ url, live: false })
|
|
382
|
+
await pipeline(
|
|
383
|
+
Readable.fromWeb(res.bodyStream()),
|
|
384
|
+
fs.createWriteStream("output.bin")
|
|
385
|
+
)
|
|
386
|
+
```
|
|
387
|
+
|
|
388
|
+
#### `jsonStream(): ReadableStream<TJson> & AsyncIterable<TJson>`
|
|
389
|
+
|
|
390
|
+
Individual JSON items as a ReadableStream.
|
|
391
|
+
|
|
392
|
+
**Using `getReader()`:**
|
|
393
|
+
|
|
394
|
+
```typescript
|
|
395
|
+
const res = await stream<{ id: number }>({ url, live: false })
|
|
396
|
+
const readable = res.jsonStream()
|
|
397
|
+
|
|
398
|
+
const reader = readable.getReader()
|
|
399
|
+
while (true) {
|
|
400
|
+
const { done, value } = await reader.read()
|
|
401
|
+
if (done) break
|
|
402
|
+
console.log("Item:", value)
|
|
403
|
+
}
|
|
404
|
+
```
|
|
405
|
+
|
|
406
|
+
**Using `for await...of`:**
|
|
407
|
+
|
|
408
|
+
```typescript
|
|
409
|
+
const res = await stream<{ id: number; name: string }>({ url, live: false })
|
|
410
|
+
|
|
411
|
+
for await (const item of res.jsonStream()) {
|
|
412
|
+
console.log(`User ${item.id}: ${item.name}`)
|
|
413
|
+
}
|
|
414
|
+
```
|
|
415
|
+
|
|
416
|
+
#### `textStream(): ReadableStream<string> & AsyncIterable<string>`
|
|
417
|
+
|
|
418
|
+
Text chunks as a ReadableStream.
|
|
419
|
+
|
|
420
|
+
**Using `getReader()`:**
|
|
421
|
+
|
|
422
|
+
```typescript
|
|
423
|
+
const res = await stream({ url, live: false })
|
|
424
|
+
const readable = res.textStream()
|
|
425
|
+
|
|
426
|
+
const reader = readable.getReader()
|
|
427
|
+
while (true) {
|
|
428
|
+
const { done, value } = await reader.read()
|
|
429
|
+
if (done) break
|
|
430
|
+
console.log("Text chunk:", value)
|
|
431
|
+
}
|
|
432
|
+
```
|
|
433
|
+
|
|
434
|
+
**Using `for await...of`:**
|
|
435
|
+
|
|
436
|
+
```typescript
|
|
437
|
+
const res = await stream({ url, live: false })
|
|
438
|
+
|
|
439
|
+
for await (const text of res.textStream()) {
|
|
440
|
+
console.log("Text chunk:", text)
|
|
441
|
+
}
|
|
442
|
+
```
|
|
443
|
+
|
|
444
|
+
**Using with Response API:**
|
|
445
|
+
|
|
446
|
+
```typescript
|
|
447
|
+
const res = await stream({ url, live: false })
|
|
448
|
+
const textResponse = new Response(res.textStream())
|
|
449
|
+
const fullText = await textResponse.text()
|
|
450
|
+
```
|
|
451
|
+
|
|
452
|
+
### Subscribers
|
|
453
|
+
|
|
454
|
+
Subscribers provide callback-based consumption with backpressure. The next chunk isn't fetched until your callback's promise resolves. Returns an unsubscribe function.
|
|
455
|
+
|
|
456
|
+
#### `subscribeJson(callback): () => void`
|
|
457
|
+
|
|
458
|
+
Subscribe to JSON batches with metadata. Provides backpressure-aware consumption.
|
|
459
|
+
|
|
460
|
+
```typescript
|
|
461
|
+
const res = await stream<{ event: string }>({ url, live: "auto" })
|
|
462
|
+
|
|
463
|
+
const unsubscribe = res.subscribeJson(async (batch) => {
|
|
464
|
+
// Process items - next batch waits until this resolves
|
|
465
|
+
for (const item of batch.items) {
|
|
466
|
+
await processEvent(item)
|
|
467
|
+
}
|
|
468
|
+
await saveCheckpoint(batch.offset)
|
|
469
|
+
})
|
|
470
|
+
|
|
471
|
+
// Later: stop receiving updates
|
|
472
|
+
setTimeout(() => {
|
|
473
|
+
unsubscribe()
|
|
474
|
+
}, 60000)
|
|
475
|
+
```
|
|
476
|
+
|
|
477
|
+
#### `subscribeBytes(callback): () => void`
|
|
478
|
+
|
|
479
|
+
Subscribe to byte chunks with metadata.
|
|
480
|
+
|
|
481
|
+
```typescript
|
|
482
|
+
const res = await stream({ url, live: "auto" })
|
|
483
|
+
|
|
484
|
+
const unsubscribe = res.subscribeBytes(async (chunk) => {
|
|
485
|
+
console.log("Received bytes:", chunk.data.length)
|
|
486
|
+
console.log("Offset:", chunk.offset)
|
|
487
|
+
console.log("Up to date:", chunk.upToDate)
|
|
488
|
+
|
|
489
|
+
await writeToFile(chunk.data)
|
|
490
|
+
await saveCheckpoint(chunk.offset)
|
|
491
|
+
})
|
|
492
|
+
```
|
|
493
|
+
|
|
494
|
+
#### `subscribeText(callback): () => void`
|
|
495
|
+
|
|
496
|
+
Subscribe to text chunks with metadata.
|
|
497
|
+
|
|
498
|
+
```typescript
|
|
499
|
+
const res = await stream({ url, live: "auto" })
|
|
500
|
+
|
|
501
|
+
const unsubscribe = res.subscribeText(async (chunk) => {
|
|
502
|
+
console.log("Text:", chunk.text)
|
|
503
|
+
console.log("Offset:", chunk.offset)
|
|
504
|
+
|
|
505
|
+
await appendToLog(chunk.text)
|
|
506
|
+
})
|
|
507
|
+
```
|
|
508
|
+
|
|
509
|
+
### Lifecycle
|
|
510
|
+
|
|
511
|
+
#### `cancel(reason?: unknown): void`
|
|
512
|
+
|
|
513
|
+
Cancel the stream session. Aborts any pending requests.
|
|
514
|
+
|
|
515
|
+
```typescript
|
|
516
|
+
const res = await stream({ url, live: "auto" })
|
|
517
|
+
|
|
518
|
+
// Start consuming
|
|
519
|
+
res.subscribeBytes(async (chunk) => {
|
|
520
|
+
console.log("Chunk:", chunk)
|
|
521
|
+
})
|
|
522
|
+
|
|
523
|
+
// Cancel after 10 seconds
|
|
524
|
+
setTimeout(() => {
|
|
525
|
+
res.cancel("Timeout")
|
|
526
|
+
}, 10000)
|
|
527
|
+
```
|
|
528
|
+
|
|
529
|
+
#### `closed: Promise<void>`
|
|
530
|
+
|
|
531
|
+
Promise that resolves when the session is complete or cancelled.
|
|
532
|
+
|
|
533
|
+
```typescript
|
|
534
|
+
const res = await stream({ url, live: false })
|
|
535
|
+
|
|
536
|
+
// Start consuming in background
|
|
537
|
+
const consumer = res.text()
|
|
538
|
+
|
|
539
|
+
// Wait for completion
|
|
540
|
+
await res.closed
|
|
541
|
+
console.log("Stream fully consumed")
|
|
542
|
+
```
|
|
543
|
+
|
|
544
|
+
### State Properties
|
|
545
|
+
|
|
546
|
+
```typescript
|
|
547
|
+
const res = await stream({ url })
|
|
548
|
+
|
|
549
|
+
res.url // The stream URL
|
|
550
|
+
res.contentType // Content-Type from response headers
|
|
551
|
+
res.live // The live mode ("auto", "long-poll", "sse", or false)
|
|
552
|
+
res.startOffset // The starting offset passed to stream()
|
|
553
|
+
res.offset // Current offset (updates as data is consumed)
|
|
554
|
+
res.cursor // Cursor for collapsing (if provided by server)
|
|
555
|
+
res.upToDate // Whether we've caught up to the stream head
|
|
556
|
+
```
|
|
557
|
+
|
|
558
|
+
---
|
|
559
|
+
|
|
560
|
+
## DurableStream Methods
|
|
561
|
+
|
|
562
|
+
### Static Methods
|
|
563
|
+
|
|
564
|
+
#### `DurableStream.create(opts): Promise<DurableStream>`
|
|
565
|
+
|
|
566
|
+
Create a new stream on the server.
|
|
567
|
+
|
|
568
|
+
```typescript
|
|
569
|
+
const handle = await DurableStream.create({
|
|
570
|
+
url: "https://streams.example.com/my-stream",
|
|
571
|
+
headers: {
|
|
572
|
+
Authorization: "Bearer my-token",
|
|
573
|
+
},
|
|
574
|
+
contentType: "application/json",
|
|
575
|
+
ttlSeconds: 3600, // Optional: auto-delete after 1 hour
|
|
576
|
+
})
|
|
577
|
+
|
|
578
|
+
await handle.append('{"hello": "world"}')
|
|
579
|
+
```
|
|
580
|
+
|
|
581
|
+
#### `DurableStream.connect(opts): Promise<DurableStream>`
|
|
582
|
+
|
|
583
|
+
Connect to an existing stream (validates it exists via HEAD).
|
|
584
|
+
|
|
585
|
+
```typescript
|
|
586
|
+
const handle = await DurableStream.connect({
|
|
587
|
+
url: "https://streams.example.com/my-stream",
|
|
588
|
+
headers: {
|
|
589
|
+
Authorization: "Bearer my-token",
|
|
590
|
+
},
|
|
591
|
+
})
|
|
592
|
+
|
|
593
|
+
console.log("Content-Type:", handle.contentType)
|
|
594
|
+
```
|
|
595
|
+
|
|
596
|
+
#### `DurableStream.head(opts): Promise<HeadResult>`
|
|
597
|
+
|
|
598
|
+
Get stream metadata without creating a handle.
|
|
599
|
+
|
|
600
|
+
```typescript
|
|
601
|
+
const metadata = await DurableStream.head({
|
|
602
|
+
url: "https://streams.example.com/my-stream",
|
|
603
|
+
headers: {
|
|
604
|
+
Authorization: "Bearer my-token",
|
|
605
|
+
},
|
|
606
|
+
})
|
|
607
|
+
|
|
608
|
+
console.log("Offset:", metadata.offset)
|
|
609
|
+
console.log("Content-Type:", metadata.contentType)
|
|
610
|
+
```
|
|
611
|
+
|
|
612
|
+
#### `DurableStream.delete(opts): Promise<void>`
|
|
613
|
+
|
|
614
|
+
Delete a stream without creating a handle.
|
|
615
|
+
|
|
616
|
+
```typescript
|
|
617
|
+
await DurableStream.delete({
|
|
618
|
+
url: "https://streams.example.com/my-stream",
|
|
619
|
+
headers: {
|
|
620
|
+
Authorization: "Bearer my-token",
|
|
621
|
+
},
|
|
622
|
+
})
|
|
623
|
+
```
|
|
624
|
+
|
|
625
|
+
### Instance Methods
|
|
626
|
+
|
|
627
|
+
#### `head(opts?): Promise<HeadResult>`
|
|
628
|
+
|
|
629
|
+
Get metadata for this stream.
|
|
630
|
+
|
|
631
|
+
```typescript
|
|
632
|
+
const handle = new DurableStream({
|
|
633
|
+
url,
|
|
634
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
635
|
+
})
|
|
636
|
+
const metadata = await handle.head()
|
|
637
|
+
|
|
638
|
+
console.log("Current offset:", metadata.offset)
|
|
639
|
+
```
|
|
640
|
+
|
|
641
|
+
#### `create(opts?): Promise<this>`
|
|
642
|
+
|
|
643
|
+
Create this stream on the server.
|
|
644
|
+
|
|
645
|
+
```typescript
|
|
646
|
+
const handle = new DurableStream({
|
|
647
|
+
url,
|
|
648
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
649
|
+
})
|
|
650
|
+
await handle.create({
|
|
651
|
+
contentType: "text/plain",
|
|
652
|
+
ttlSeconds: 7200,
|
|
653
|
+
})
|
|
654
|
+
```
|
|
655
|
+
|
|
656
|
+
#### `delete(opts?): Promise<void>`
|
|
657
|
+
|
|
658
|
+
Delete this stream.
|
|
659
|
+
|
|
660
|
+
```typescript
|
|
661
|
+
const handle = new DurableStream({
|
|
662
|
+
url,
|
|
663
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
664
|
+
})
|
|
665
|
+
await handle.delete()
|
|
666
|
+
```
|
|
667
|
+
|
|
668
|
+
#### `append(body, opts?): Promise<void>`
|
|
669
|
+
|
|
670
|
+
Append data to the stream. By default, **automatic batching is enabled**: multiple `append()` calls made while a POST is in-flight will be batched together into a single request. This significantly improves throughput for high-frequency writes.
|
|
671
|
+
|
|
672
|
+
```typescript
|
|
673
|
+
const handle = await DurableStream.connect({
|
|
674
|
+
url,
|
|
675
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
676
|
+
})
|
|
677
|
+
|
|
678
|
+
// Append string
|
|
679
|
+
await handle.append("Hello, world!")
|
|
680
|
+
|
|
681
|
+
// Append with sequence number for ordering
|
|
682
|
+
await handle.append("Message 1", { seq: "writer-1-001" })
|
|
683
|
+
await handle.append("Message 2", { seq: "writer-1-002" })
|
|
684
|
+
|
|
685
|
+
// For JSON streams, append objects directly (serialized automatically)
|
|
686
|
+
await handle.append({ event: "click", x: 100, y: 200 })
|
|
687
|
+
|
|
688
|
+
// Batching happens automatically - these may be sent in a single request
|
|
689
|
+
await Promise.all([
|
|
690
|
+
handle.append({ event: "msg1" }),
|
|
691
|
+
handle.append({ event: "msg2" }),
|
|
692
|
+
handle.append({ event: "msg3" }),
|
|
693
|
+
])
|
|
694
|
+
```
|
|
695
|
+
|
|
696
|
+
**Batching behavior:**
|
|
697
|
+
|
|
698
|
+
- **JSON mode** (`contentType: "application/json"`): Multiple values are sent as a JSON array `[val1, val2, ...]`
|
|
699
|
+
- **Byte mode**: Binary data is concatenated
|
|
700
|
+
|
|
701
|
+
**Disabling batching:**
|
|
702
|
+
|
|
703
|
+
If you need to ensure each append is sent immediately (e.g., for precise timing or debugging):
|
|
704
|
+
|
|
705
|
+
```typescript
|
|
706
|
+
const handle = new DurableStream({
|
|
707
|
+
url,
|
|
708
|
+
batching: false, // Disable automatic batching
|
|
709
|
+
})
|
|
710
|
+
```
|
|
711
|
+
|
|
712
|
+
#### `appendStream(source, opts?): Promise<void>`
|
|
713
|
+
|
|
714
|
+
Append streaming data from an async iterable or ReadableStream. This method supports piping from any source.
|
|
715
|
+
|
|
716
|
+
```typescript
|
|
717
|
+
const handle = await DurableStream.connect({
|
|
718
|
+
url,
|
|
719
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
720
|
+
})
|
|
721
|
+
|
|
722
|
+
// From async generator
|
|
723
|
+
async function* generateData() {
|
|
724
|
+
for (let i = 0; i < 100; i++) {
|
|
725
|
+
yield `Line ${i}\n`
|
|
726
|
+
}
|
|
727
|
+
}
|
|
728
|
+
await handle.appendStream(generateData())
|
|
729
|
+
|
|
730
|
+
// From ReadableStream
|
|
731
|
+
const readable = new ReadableStream({
|
|
732
|
+
start(controller) {
|
|
733
|
+
controller.enqueue("chunk 1")
|
|
734
|
+
controller.enqueue("chunk 2")
|
|
735
|
+
controller.close()
|
|
736
|
+
},
|
|
737
|
+
})
|
|
738
|
+
await handle.appendStream(readable)
|
|
739
|
+
|
|
740
|
+
// Pipe from a fetch response body
|
|
741
|
+
const response = await fetch("https://example.com/data")
|
|
742
|
+
await handle.appendStream(response.body!)
|
|
743
|
+
```
|
|
744
|
+
|
|
745
|
+
#### `writable(opts?): WritableStream<Uint8Array | string>`
|
|
746
|
+
|
|
747
|
+
Create a WritableStream that can receive piped data. Useful for stream composition:
|
|
748
|
+
|
|
749
|
+
```typescript
|
|
750
|
+
const handle = await DurableStream.connect({ url, auth })
|
|
751
|
+
|
|
752
|
+
// Pipe from any ReadableStream
|
|
753
|
+
await someReadableStream.pipeTo(handle.writable())
|
|
754
|
+
|
|
755
|
+
// Pipe through a transform
|
|
756
|
+
const readable = inputStream.pipeThrough(new TextEncoderStream())
|
|
757
|
+
await readable.pipeTo(handle.writable())
|
|
758
|
+
```
|
|
759
|
+
|
|
760
|
+
#### `stream(opts?): Promise<StreamResponse>`
|
|
761
|
+
|
|
762
|
+
Start a read session (same as standalone `stream()` function).
|
|
763
|
+
|
|
764
|
+
```typescript
|
|
765
|
+
const handle = await DurableStream.connect({
|
|
766
|
+
url,
|
|
767
|
+
headers: { Authorization: `Bearer ${token}` },
|
|
768
|
+
})
|
|
769
|
+
|
|
770
|
+
const res = await handle.stream<{ message: string }>({
|
|
771
|
+
offset: savedOffset,
|
|
772
|
+
live: "auto",
|
|
773
|
+
})
|
|
774
|
+
|
|
775
|
+
res.subscribeJson(async (batch) => {
|
|
776
|
+
for (const item of batch.items) {
|
|
777
|
+
console.log(item.message)
|
|
778
|
+
}
|
|
779
|
+
})
|
|
780
|
+
```
|
|
781
|
+
|
|
782
|
+
---
|
|
783
|
+
|
|
784
|
+
## Types
|
|
785
|
+
|
|
786
|
+
Key types exported from the package:
|
|
787
|
+
|
|
788
|
+
- `Offset` - Opaque string for stream position
|
|
789
|
+
- `StreamResponse` - Response object from stream()
|
|
790
|
+
- `ByteChunk` - `{ data: Uint8Array, offset: Offset, upToDate: boolean, cursor?: string }`
|
|
791
|
+
- `JsonBatch<T>` - `{ items: T[], offset: Offset, upToDate: boolean, cursor?: string }`
|
|
792
|
+
- `TextChunk` - `{ text: string, offset: Offset, upToDate: boolean, cursor?: string }`
|
|
793
|
+
- `HeadResult` - Metadata from HEAD requests
|
|
794
|
+
- `DurableStreamError` - Protocol-level errors with codes
|
|
795
|
+
- `FetchError` - Transport/network errors
|
|
796
|
+
|
|
797
|
+
## License
|
|
798
|
+
|
|
799
|
+
Apache-2.0
|