@durable-streams/client 0.1.4 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/stream-api.ts CHANGED
@@ -14,7 +14,12 @@ import {
14
14
  import { DurableStreamError, FetchBackoffAbortError } from "./error"
15
15
  import { BackoffDefaults, createFetchWithBackoff } from "./fetch"
16
16
  import { StreamResponseImpl } from "./response"
17
- import { handleErrorResponse, resolveHeaders, resolveParams } from "./utils"
17
+ import {
18
+ handleErrorResponse,
19
+ resolveHeaders,
20
+ resolveParams,
21
+ warnIfUsingHttpInBrowser,
22
+ } from "./utils"
18
23
  import type { LiveMode, Offset, StreamOptions, StreamResponse } from "./types"
19
24
 
20
25
  /**
@@ -41,7 +46,7 @@ import type { LiveMode, Offset, StreamOptions, StreamResponse } from "./types"
41
46
  * url,
42
47
  * auth,
43
48
  * offset: savedOffset,
44
- * live: "auto",
49
+ * live: true,
45
50
  * })
46
51
  * live.subscribeJson(async (batch) => {
47
52
  * for (const item of batch.items) {
@@ -119,6 +124,9 @@ async function streamInternal<TJson = unknown>(
119
124
  // Normalize URL
120
125
  const url = options.url instanceof URL ? options.url.toString() : options.url
121
126
 
127
+ // Warn if using HTTP in browser (can cause connection limit issues)
128
+ warnIfUsingHttpInBrowser(url, options.warnOnHttp)
129
+
122
130
  // Build the first request
123
131
  const fetchUrl = new URL(url)
124
132
 
@@ -127,7 +135,8 @@ async function streamInternal<TJson = unknown>(
127
135
  fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, startOffset)
128
136
 
129
137
  // Set live query param for explicit modes
130
- const live: LiveMode = options.live ?? `auto`
138
+ // true means auto-select (no query param, handled by consumption method)
139
+ const live: LiveMode = options.live ?? true
131
140
  if (live === `long-poll` || live === `sse`) {
132
141
  fetchUrl.searchParams.set(LIVE_QUERY_PARAM, live)
133
142
  }
@@ -191,16 +200,20 @@ async function streamInternal<TJson = unknown>(
191
200
  const fetchNext = async (
192
201
  offset: Offset,
193
202
  cursor: string | undefined,
194
- signal: AbortSignal
203
+ signal: AbortSignal,
204
+ resumingFromPause?: boolean
195
205
  ): Promise<Response> => {
196
206
  const nextUrl = new URL(url)
197
207
  nextUrl.searchParams.set(OFFSET_QUERY_PARAM, offset)
198
208
 
199
- // For subsequent requests in auto mode, use long-poll
200
- if (live === `auto` || live === `long-poll`) {
201
- nextUrl.searchParams.set(LIVE_QUERY_PARAM, `long-poll`)
202
- } else if (live === `sse`) {
203
- nextUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`)
209
+ // For subsequent requests, set live mode unless resuming from pause
210
+ // (resuming from pause needs immediate response for UI status)
211
+ if (!resumingFromPause) {
212
+ if (live === `sse`) {
213
+ nextUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`)
214
+ } else if (live === true || live === `long-poll`) {
215
+ nextUrl.searchParams.set(LIVE_QUERY_PARAM, `long-poll`)
216
+ }
204
217
  }
205
218
 
206
219
  if (cursor) {
package/src/stream.ts CHANGED
@@ -11,6 +11,7 @@ import {
11
11
  InvalidSignalError,
12
12
  MissingStreamUrlError,
13
13
  } from "./error"
14
+ import { IdempotentProducer } from "./idempotent-producer"
14
15
  import {
15
16
  SSE_COMPATIBLE_CONTENT_TYPES,
16
17
  STREAM_EXPIRES_AT_HEADER,
@@ -37,6 +38,7 @@ import type {
37
38
  CreateOptions,
38
39
  HeadResult,
39
40
  HeadersRecord,
41
+ IdempotentProducerOptions,
40
42
  MaybePromise,
41
43
  ParamsRecord,
42
44
  StreamErrorHandler,
@@ -49,7 +51,7 @@ import type {
49
51
  * Queued message for batching.
50
52
  */
51
53
  interface QueuedMessage {
52
- data: unknown
54
+ data: Uint8Array | string
53
55
  seq?: string
54
56
  contentType?: string
55
57
  signal?: AbortSignal
@@ -71,10 +73,7 @@ function normalizeContentType(contentType: string | undefined): string {
71
73
  */
72
74
  function isPromiseLike(value: unknown): value is PromiseLike<unknown> {
73
75
  return (
74
- value !== null &&
75
- typeof value === `object` &&
76
- `then` in value &&
77
- typeof (value as PromiseLike<unknown>).then === `function`
76
+ value != null && typeof (value as PromiseLike<unknown>).then === `function`
78
77
  )
79
78
  }
80
79
 
@@ -121,7 +120,7 @@ export interface DurableStreamOptions extends StreamHandleOptions {
121
120
  * });
122
121
  *
123
122
  * // Write data
124
- * await stream.append({ message: "hello" });
123
+ * await stream.append(JSON.stringify({ message: "hello" }));
125
124
  *
126
125
  * // Read with the new API
127
126
  * const res = await stream.stream<{ message: string }>();
@@ -350,23 +349,27 @@ export class DurableStream {
350
349
  * a POST is in-flight will be batched together into a single request.
351
350
  * This significantly improves throughput for high-frequency writes.
352
351
  *
353
- * - `body` may be Uint8Array, string, or any JSON-serializable value (for JSON streams).
354
- * - `body` may also be a Promise that resolves to any of the above types.
352
+ * - `body` must be string or Uint8Array.
353
+ * - For JSON streams, pass pre-serialized JSON strings.
354
+ * - `body` may also be a Promise that resolves to string or Uint8Array.
355
355
  * - Strings are encoded as UTF-8.
356
356
  * - `seq` (if provided) is sent as stream-seq (writer coordination).
357
357
  *
358
358
  * @example
359
359
  * ```typescript
360
- * // Direct value
361
- * await stream.append({ message: "hello" });
360
+ * // JSON stream - pass pre-serialized JSON
361
+ * await stream.append(JSON.stringify({ message: "hello" }));
362
+ *
363
+ * // Byte stream
364
+ * await stream.append("raw text data");
365
+ * await stream.append(new Uint8Array([1, 2, 3]));
362
366
  *
363
367
  * // Promise value - awaited before buffering
364
368
  * await stream.append(fetchData());
365
- * await stream.append(Promise.all([a, b, c]));
366
369
  * ```
367
370
  */
368
371
  async append(
369
- body: BodyInit | Uint8Array | string | unknown,
372
+ body: Uint8Array | string | Promise<Uint8Array | string>,
370
373
  opts?: AppendOptions
371
374
  ): Promise<void> {
372
375
  // Await promises before buffering
@@ -382,7 +385,7 @@ export class DurableStream {
382
385
  * Direct append without batching (used when batching is disabled).
383
386
  */
384
387
  async #appendDirect(
385
- body: BodyInit | Uint8Array | string | unknown,
388
+ body: Uint8Array | string,
386
389
  opts?: AppendOptions
387
390
  ): Promise<void> {
388
391
  const { requestHeaders, fetchUrl } = await this.#buildRequest()
@@ -398,9 +401,11 @@ export class DurableStream {
398
401
  }
399
402
 
400
403
  // For JSON mode, wrap body in array to match protocol (server flattens one level)
404
+ // Input is pre-serialized JSON string
401
405
  const isJson = normalizeContentType(contentType) === `application/json`
402
- const bodyToEncode = isJson ? [body] : body
403
- const encodedBody = encodeBody(bodyToEncode)
406
+ const bodyStr =
407
+ typeof body === `string` ? body : new TextDecoder().decode(body)
408
+ const encodedBody: BodyInit = isJson ? `[${bodyStr}]` : bodyStr
404
409
 
405
410
  const response = await this.#fetchClient(fetchUrl.toString(), {
406
411
  method: `POST`,
@@ -418,7 +423,7 @@ export class DurableStream {
418
423
  * Append with batching - buffers messages and sends them in batches.
419
424
  */
420
425
  async #appendWithBatching(
421
- body: unknown,
426
+ body: Uint8Array | string,
422
427
  opts?: AppendOptions
423
428
  ): Promise<void> {
424
429
  return new Promise<void>((resolve, reject) => {
@@ -511,29 +516,17 @@ export class DurableStream {
511
516
  // For JSON mode: always send as array (server flattens one level)
512
517
  // Single append: [value] → server stores value
513
518
  // Multiple appends: [val1, val2] → server stores val1, val2
514
- const values = batch.map((m) => m.data)
515
- batchedBody = JSON.stringify(values)
519
+ // Input is pre-serialized JSON strings, join them into an array
520
+ const jsonStrings = batch.map((m) =>
521
+ typeof m.data === `string` ? m.data : new TextDecoder().decode(m.data)
522
+ )
523
+ batchedBody = `[${jsonStrings.join(`,`)}]`
516
524
  } else {
517
- // For byte mode: concatenate all chunks
518
- const totalSize = batch.reduce((sum, m) => {
519
- const size =
520
- typeof m.data === `string`
521
- ? new TextEncoder().encode(m.data).length
522
- : (m.data as Uint8Array).length
523
- return sum + size
524
- }, 0)
525
-
526
- const concatenated = new Uint8Array(totalSize)
527
- let offset = 0
528
- for (const msg of batch) {
529
- const bytes =
530
- typeof msg.data === `string`
531
- ? new TextEncoder().encode(msg.data)
532
- : (msg.data as Uint8Array)
533
- concatenated.set(bytes, offset)
534
- offset += bytes.length
535
- }
536
- batchedBody = concatenated
525
+ // For byte mode: concatenate all chunks as a string
526
+ const strings = batch.map((m) =>
527
+ typeof m.data === `string` ? m.data : new TextDecoder().decode(m.data)
528
+ )
529
+ batchedBody = strings.join(``)
537
530
  }
538
531
 
539
532
  // Combine signals: stream-level signal + any per-message signals
@@ -634,6 +627,11 @@ export class DurableStream {
634
627
  * Returns a WritableStream that can be used with `pipeTo()` or
635
628
  * `pipeThrough()` from any ReadableStream source.
636
629
  *
630
+ * Uses IdempotentProducer internally for:
631
+ * - Automatic batching (controlled by lingerMs, maxBatchBytes)
632
+ * - Exactly-once delivery semantics
633
+ * - Streaming writes (doesn't buffer entire content in memory)
634
+ *
637
635
  * @example
638
636
  * ```typescript
639
637
  * // Pipe from fetch response
@@ -643,32 +641,55 @@ export class DurableStream {
643
641
  * // Pipe through a transform
644
642
  * const readable = someStream.pipeThrough(new TextEncoderStream());
645
643
  * await readable.pipeTo(stream.writable());
644
+ *
645
+ * // With custom producer options
646
+ * await source.pipeTo(stream.writable({
647
+ * producerId: "my-producer",
648
+ * lingerMs: 10,
649
+ * maxBatchBytes: 64 * 1024,
650
+ * }));
646
651
  * ```
647
652
  */
648
- writable(opts?: AppendOptions): WritableStream<Uint8Array | string> {
649
- const chunks: Array<Uint8Array | string> = []
650
- const stream = this
653
+ writable(
654
+ opts?: Pick<
655
+ IdempotentProducerOptions,
656
+ `lingerMs` | `maxBatchBytes` | `onError`
657
+ > & {
658
+ producerId?: string
659
+ signal?: AbortSignal
660
+ }
661
+ ): WritableStream<Uint8Array | string> {
662
+ // Generate a random producer ID if not provided
663
+ const producerId =
664
+ opts?.producerId ?? `writable-${crypto.randomUUID().slice(0, 8)}`
665
+
666
+ // Track async errors to surface in close() so pipeTo() rejects on failure
667
+ let writeError: Error | null = null
668
+
669
+ const producer = new IdempotentProducer(this, producerId, {
670
+ autoClaim: true, // Ephemeral producer, auto-claim epoch
671
+ lingerMs: opts?.lingerMs,
672
+ maxBatchBytes: opts?.maxBatchBytes,
673
+ onError: (error) => {
674
+ if (!writeError) writeError = error // Capture first error
675
+ opts?.onError?.(error) // Still call user's handler
676
+ },
677
+ signal: opts?.signal ?? this.#options.signal,
678
+ })
651
679
 
652
680
  return new WritableStream<Uint8Array | string>({
653
681
  write(chunk) {
654
- chunks.push(chunk)
682
+ producer.append(chunk)
655
683
  },
656
684
  async close() {
657
- if (chunks.length > 0) {
658
- // Create a ReadableStream from collected chunks
659
- const readable = new ReadableStream<Uint8Array | string>({
660
- start(controller) {
661
- for (const chunk of chunks) {
662
- controller.enqueue(chunk)
663
- }
664
- controller.close()
665
- },
666
- })
667
- await stream.appendStream(readable, opts)
668
- }
685
+ await producer.flush()
686
+ await producer.close()
687
+ if (writeError) throw writeError // Causes pipeTo() to reject
669
688
  },
670
- abort(reason) {
671
- console.error(`WritableStream aborted:`, reason)
689
+ abort(_reason) {
690
+ producer.close().catch((err) => {
691
+ opts?.onError?.(err) // Report instead of swallowing
692
+ })
672
693
  },
673
694
  })
674
695
  }
package/src/types.ts CHANGED
@@ -62,11 +62,11 @@ export type ParamsRecord = {
62
62
  /**
63
63
  * Live mode for reading from a stream.
64
64
  * - false: Catch-up only, stop at first `upToDate`
65
- * - "auto": Behavior driven by consumption method (default)
65
+ * - true: Auto-select best mode (SSE for JSON streams, long-poll for binary)
66
66
  * - "long-poll": Explicit long-poll mode for live updates
67
67
  * - "sse": Explicit server-sent events for live updates
68
68
  */
69
- export type LiveMode = false | `auto` | `long-poll` | `sse`
69
+ export type LiveMode = boolean | `long-poll` | `sse`
70
70
 
71
71
  // ============================================================================
72
72
  // Stream Options (Read API)
@@ -136,7 +136,7 @@ export interface StreamOptions {
136
136
  /**
137
137
  * Live mode behavior:
138
138
  * - false: Catch-up only, stop at first `upToDate`
139
- * - "auto" (default): Behavior driven by consumption method
139
+ * - true (default): Auto-select best mode (SSE for JSON, long-poll for binary)
140
140
  * - "long-poll": Explicit long-poll mode for live updates
141
141
  * - "sse": Explicit server-sent events for live updates
142
142
  */
@@ -518,6 +518,7 @@ export type DurableStreamErrorCode =
518
518
  | `RATE_LIMITED`
519
519
  | `ALREADY_CONSUMED`
520
520
  | `ALREADY_CLOSED`
521
+ | `PARSE_ERROR`
521
522
  | `UNKNOWN`
522
523
 
523
524
  /**
@@ -659,21 +660,21 @@ export interface StreamResponse<TJson = unknown> {
659
660
  *
660
661
  * Use this for resuming reads after a disconnect or saving checkpoints.
661
662
  */
662
- offset: Offset
663
+ readonly offset: Offset
663
664
 
664
665
  /**
665
666
  * Stream cursor for CDN collapsing (stream-cursor header).
666
667
  *
667
668
  * Updated after each chunk is delivered to the consumer.
668
669
  */
669
- cursor?: string
670
+ readonly cursor?: string
670
671
 
671
672
  /**
672
673
  * Whether we've reached the current end of the stream (stream-up-to-date header).
673
674
  *
674
675
  * Updated after each chunk is delivered to the consumer.
675
676
  */
676
- upToDate: boolean
677
+ readonly upToDate: boolean
677
678
 
678
679
  // =================================
679
680
  // 1) Accumulating helpers (Promise)
@@ -682,20 +683,20 @@ export interface StreamResponse<TJson = unknown> {
682
683
 
683
684
  /**
684
685
  * Accumulate raw bytes until first `upToDate` batch, then resolve.
685
- * When used with `live: "auto"`, signals the session to stop after upToDate.
686
+ * When used with `live: true`, signals the session to stop after upToDate.
686
687
  */
687
688
  body: () => Promise<Uint8Array>
688
689
 
689
690
  /**
690
691
  * Accumulate JSON *items* across batches into a single array, resolve at `upToDate`.
691
692
  * Only valid in JSON-mode; throws otherwise.
692
- * When used with `live: "auto"`, signals the session to stop after upToDate.
693
+ * When used with `live: true`, signals the session to stop after upToDate.
693
694
  */
694
695
  json: <T = TJson>() => Promise<Array<T>>
695
696
 
696
697
  /**
697
698
  * Accumulate text chunks into a single string, resolve at `upToDate`.
698
- * When used with `live: "auto"`, signals the session to stop after upToDate.
699
+ * When used with `live: true`, signals the session to stop after upToDate.
699
700
  */
700
701
  text: () => Promise<string>
701
702
 
@@ -737,24 +738,35 @@ export interface StreamResponse<TJson = unknown> {
737
738
  /**
738
739
  * Subscribe to JSON batches as they arrive.
739
740
  * Returns unsubscribe function.
741
+ *
742
+ * The subscriber can be sync or async. If async, backpressure is applied
743
+ * (the next batch waits for the previous callback to complete).
740
744
  */
741
745
  subscribeJson: <T = TJson>(
742
- subscriber: (batch: JsonBatch<T>) => Promise<void>
746
+ subscriber: (batch: JsonBatch<T>) => void | Promise<void>
743
747
  ) => () => void
744
748
 
745
749
  /**
746
750
  * Subscribe to raw byte chunks as they arrive.
747
751
  * Returns unsubscribe function.
752
+ *
753
+ * The subscriber can be sync or async. If async, backpressure is applied
754
+ * (the next chunk waits for the previous callback to complete).
748
755
  */
749
756
  subscribeBytes: (
750
- subscriber: (chunk: ByteChunk) => Promise<void>
757
+ subscriber: (chunk: ByteChunk) => void | Promise<void>
751
758
  ) => () => void
752
759
 
753
760
  /**
754
761
  * Subscribe to text chunks as they arrive.
755
762
  * Returns unsubscribe function.
763
+ *
764
+ * The subscriber can be sync or async. If async, backpressure is applied
765
+ * (the next chunk waits for the previous callback to complete).
756
766
  */
757
- subscribeText: (subscriber: (chunk: TextChunk) => Promise<void>) => () => void
767
+ subscribeText: (
768
+ subscriber: (chunk: TextChunk) => void | Promise<void>
769
+ ) => () => void
758
770
 
759
771
  // =====================
760
772
  // 4) Lifecycle