@durable-streams/client 0.1.5 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -46,6 +46,11 @@ const STREAM_CURSOR_HEADER = `Stream-Cursor`;
46
46
  */
47
47
  const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
48
48
  /**
49
+ * Response/request header indicating stream is closed (EOF).
50
+ * When present with value "true", the stream is permanently closed.
51
+ */
52
+ const STREAM_CLOSED_HEADER = `Stream-Closed`;
53
+ /**
49
54
  * Request header for writer coordination sequence.
50
55
  * Monotonic, lexicographic. If lower than last appended seq -> 409 Conflict.
51
56
  */
@@ -94,8 +99,17 @@ const LIVE_QUERY_PARAM = `live`;
94
99
  */
95
100
  const CURSOR_QUERY_PARAM = `cursor`;
96
101
  /**
97
- * Content types that support SSE mode.
98
- * SSE is only valid for text/* or application/json streams.
102
+ * Response header indicating SSE data encoding (e.g., base64 for binary streams).
103
+ */
104
+ const STREAM_SSE_DATA_ENCODING_HEADER = `stream-sse-data-encoding`;
105
+ /**
106
+ * SSE control event field for stream closed state.
107
+ * Note: Different from HTTP header name (camelCase vs Header-Case).
108
+ */
109
+ const SSE_CLOSED_FIELD = `streamClosed`;
110
+ /**
111
+ * Content types that are natively compatible with SSE (UTF-8 text).
112
+ * Binary content types are also supported via automatic base64 encoding.
99
113
  */
100
114
  const SSE_COMPATIBLE_CONTENT_TYPES = [`text/`, `application/json`];
101
115
  /**
@@ -225,6 +239,23 @@ var MissingStreamUrlError = class extends Error {
225
239
  }
226
240
  };
227
241
  /**
242
+ * Error thrown when attempting to append to a closed stream.
243
+ */
244
+ var StreamClosedError = class extends DurableStreamError {
245
+ code = `STREAM_CLOSED`;
246
+ status = 409;
247
+ streamClosed = true;
248
+ /**
249
+ * The final offset of the stream, if available from the response.
250
+ */
251
+ finalOffset;
252
+ constructor(url, finalOffset) {
253
+ super(`Cannot append to closed stream`, `STREAM_CLOSED`, 409, url);
254
+ this.name = `StreamClosedError`;
255
+ this.finalOffset = finalOffset;
256
+ }
257
+ };
258
+ /**
228
259
  * Error thrown when signal option is invalid.
229
260
  */
230
261
  var InvalidSignalError = class extends Error {
@@ -504,13 +535,19 @@ async function* parseSSEStream(stream$1, signal) {
504
535
  type: `control`,
505
536
  streamNextOffset: control.streamNextOffset,
506
537
  streamCursor: control.streamCursor,
507
- upToDate: control.upToDate
538
+ upToDate: control.upToDate,
539
+ streamClosed: control.streamClosed
508
540
  };
509
- } catch {}
541
+ } catch (err) {
542
+ const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
543
+ throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
544
+ }
510
545
  }
511
546
  currentEvent = { data: [] };
512
- } else if (line.startsWith(`event:`)) currentEvent.type = line.slice(6).trim();
513
- else if (line.startsWith(`data:`)) {
547
+ } else if (line.startsWith(`event:`)) {
548
+ const eventType = line.slice(6);
549
+ currentEvent.type = eventType.startsWith(` `) ? eventType.slice(1) : eventType;
550
+ } else if (line.startsWith(`data:`)) {
514
551
  const content = line.slice(5);
515
552
  currentEvent.data.push(content.startsWith(` `) ? content.slice(1) : content);
516
553
  }
@@ -529,9 +566,13 @@ async function* parseSSEStream(stream$1, signal) {
529
566
  type: `control`,
530
567
  streamNextOffset: control.streamNextOffset,
531
568
  streamCursor: control.streamCursor,
532
- upToDate: control.upToDate
569
+ upToDate: control.upToDate,
570
+ streamClosed: control.streamClosed
533
571
  };
534
- } catch {}
572
+ } catch (err) {
573
+ const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
574
+ throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
575
+ }
535
576
  }
536
577
  } finally {
537
578
  reader.releaseLock();
@@ -557,9 +598,10 @@ var StreamResponseImpl = class {
557
598
  #statusText;
558
599
  #ok;
559
600
  #isLoading;
560
- offset;
561
- cursor;
562
- upToDate;
601
+ #offset;
602
+ #cursor;
603
+ #upToDate;
604
+ #streamClosed;
563
605
  #isJsonMode;
564
606
  #abortController;
565
607
  #fetchNext;
@@ -579,15 +621,17 @@ var StreamResponseImpl = class {
579
621
  #lastSSEConnectionStartTime;
580
622
  #consecutiveShortSSEConnections = 0;
581
623
  #sseFallbackToLongPoll = false;
624
+ #encoding;
582
625
  #responseStream;
583
626
  constructor(config) {
584
627
  this.url = config.url;
585
628
  this.contentType = config.contentType;
586
629
  this.live = config.live;
587
630
  this.startOffset = config.startOffset;
588
- this.offset = config.initialOffset;
589
- this.cursor = config.initialCursor;
590
- this.upToDate = config.initialUpToDate;
631
+ this.#offset = config.initialOffset;
632
+ this.#cursor = config.initialCursor;
633
+ this.#upToDate = config.initialUpToDate;
634
+ this.#streamClosed = config.initialStreamClosed;
591
635
  this.#headers = config.firstResponse.headers;
592
636
  this.#status = config.firstResponse.status;
593
637
  this.#statusText = config.firstResponse.statusText;
@@ -604,6 +648,7 @@ var StreamResponseImpl = class {
604
648
  backoffMaxDelay: config.sseResilience?.backoffMaxDelay ?? 5e3,
605
649
  logWarnings: config.sseResilience?.logWarnings ?? true
606
650
  };
651
+ this.#encoding = config.encoding;
607
652
  this.#closed = new Promise((resolve, reject) => {
608
653
  this.#closedResolve = resolve;
609
654
  this.#closedReject = reject;
@@ -678,6 +723,18 @@ var StreamResponseImpl = class {
678
723
  get isLoading() {
679
724
  return this.#isLoading;
680
725
  }
726
+ get offset() {
727
+ return this.#offset;
728
+ }
729
+ get cursor() {
730
+ return this.#cursor;
731
+ }
732
+ get upToDate() {
733
+ return this.#upToDate;
734
+ }
735
+ get streamClosed() {
736
+ return this.#streamClosed;
737
+ }
681
738
  #ensureJsonMode() {
682
739
  if (!this.#isJsonMode) throw new DurableStreamError(`JSON methods are only valid for JSON-mode streams. Content-Type is "${this.contentType}" and json hint was not set.`, `BAD_REQUEST`);
683
740
  }
@@ -699,11 +756,12 @@ var StreamResponseImpl = class {
699
756
  }
700
757
  /**
701
758
  * Determine if we should continue with live updates based on live mode
702
- * and whether we've received upToDate.
759
+ * and whether we've received upToDate or streamClosed.
703
760
  */
704
761
  #shouldContinueLive() {
705
762
  if (this.#stopAfterUpToDate && this.upToDate) return false;
706
763
  if (this.live === false) return false;
764
+ if (this.#streamClosed) return false;
707
765
  return true;
708
766
  }
709
767
  /**
@@ -711,10 +769,12 @@ var StreamResponseImpl = class {
711
769
  */
712
770
  #updateStateFromResponse(response) {
713
771
  const offset = response.headers.get(STREAM_OFFSET_HEADER);
714
- if (offset) this.offset = offset;
772
+ if (offset) this.#offset = offset;
715
773
  const cursor = response.headers.get(STREAM_CURSOR_HEADER);
716
- if (cursor) this.cursor = cursor;
717
- this.upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
774
+ if (cursor) this.#cursor = cursor;
775
+ this.#upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
776
+ const streamClosedHeader = response.headers.get(STREAM_CLOSED_HEADER);
777
+ if (streamClosedHeader?.toLowerCase() === `true`) this.#streamClosed = true;
718
778
  this.#headers = response.headers;
719
779
  this.#status = response.status;
720
780
  this.#statusText = response.statusText;
@@ -722,7 +782,7 @@ var StreamResponseImpl = class {
722
782
  }
723
783
  /**
724
784
  * Extract stream metadata from Response headers.
725
- * Used by subscriber APIs to get the correct offset/cursor/upToDate for each
785
+ * Used by subscriber APIs to get the correct offset/cursor/upToDate/streamClosed for each
726
786
  * specific Response, rather than reading from `this` which may be stale due to
727
787
  * ReadableStream prefetching or timing issues.
728
788
  */
@@ -730,24 +790,74 @@ var StreamResponseImpl = class {
730
790
  const offset = response.headers.get(STREAM_OFFSET_HEADER);
731
791
  const cursor = response.headers.get(STREAM_CURSOR_HEADER);
732
792
  const upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
793
+ const streamClosed = response.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`;
733
794
  return {
734
795
  offset: offset ?? this.offset,
735
796
  cursor: cursor ?? this.cursor,
736
- upToDate
797
+ upToDate,
798
+ streamClosed: streamClosed || this.streamClosed
737
799
  };
738
800
  }
739
801
  /**
802
+ * Decode base64 string to Uint8Array.
803
+ * Per protocol: concatenate data lines, remove \n and \r, then decode.
804
+ */
805
+ #decodeBase64(base64Str) {
806
+ const cleaned = base64Str.replace(/[\n\r]/g, ``);
807
+ if (cleaned.length === 0) return new Uint8Array(0);
808
+ if (cleaned.length % 4 !== 0) throw new DurableStreamError(`Invalid base64 data: length ${cleaned.length} is not a multiple of 4`, `PARSE_ERROR`);
809
+ try {
810
+ if (typeof Buffer !== `undefined`) return new Uint8Array(Buffer.from(cleaned, `base64`));
811
+ else {
812
+ const binaryStr = atob(cleaned);
813
+ const bytes = new Uint8Array(binaryStr.length);
814
+ for (let i = 0; i < binaryStr.length; i++) bytes[i] = binaryStr.charCodeAt(i);
815
+ return bytes;
816
+ }
817
+ } catch (err) {
818
+ throw new DurableStreamError(`Failed to decode base64 data: ${err instanceof Error ? err.message : String(err)}`, `PARSE_ERROR`);
819
+ }
820
+ }
821
+ /**
740
822
  * Create a synthetic Response from SSE data with proper headers.
741
- * Includes offset/cursor/upToDate in headers so subscribers can read them.
823
+ * Includes offset/cursor/upToDate/streamClosed in headers so subscribers can read them.
742
824
  */
743
- #createSSESyntheticResponse(data, offset, cursor, upToDate) {
825
+ #createSSESyntheticResponse(data, offset, cursor, upToDate, streamClosed) {
826
+ return this.#createSSESyntheticResponseFromParts([data], offset, cursor, upToDate, streamClosed);
827
+ }
828
+ /**
829
+ * Create a synthetic Response from multiple SSE data parts.
830
+ * For base64 mode, each part is independently encoded, so we decode each
831
+ * separately and concatenate the binary results.
832
+ * For text mode, parts are simply concatenated as strings.
833
+ */
834
+ #createSSESyntheticResponseFromParts(dataParts, offset, cursor, upToDate, streamClosed) {
744
835
  const headers = {
745
836
  "content-type": this.contentType ?? `application/json`,
746
837
  [STREAM_OFFSET_HEADER]: String(offset)
747
838
  };
748
839
  if (cursor) headers[STREAM_CURSOR_HEADER] = cursor;
749
840
  if (upToDate) headers[STREAM_UP_TO_DATE_HEADER] = `true`;
750
- return new Response(data, {
841
+ if (streamClosed) headers[STREAM_CLOSED_HEADER] = `true`;
842
+ let body;
843
+ if (this.#encoding === `base64`) {
844
+ const decodedParts = dataParts.filter((part) => part.length > 0).map((part) => this.#decodeBase64(part));
845
+ if (decodedParts.length === 0) body = new ArrayBuffer(0);
846
+ else if (decodedParts.length === 1) {
847
+ const decoded = decodedParts[0];
848
+ body = decoded.buffer.slice(decoded.byteOffset, decoded.byteOffset + decoded.byteLength);
849
+ } else {
850
+ const totalLength = decodedParts.reduce((sum, part) => sum + part.length, 0);
851
+ const combined = new Uint8Array(totalLength);
852
+ let offset$1 = 0;
853
+ for (const part of decodedParts) {
854
+ combined.set(part, offset$1);
855
+ offset$1 += part.length;
856
+ }
857
+ body = combined.buffer;
858
+ }
859
+ } else body = dataParts.join(``);
860
+ return new Response(body, {
751
861
  status: 200,
752
862
  headers
753
863
  });
@@ -756,9 +866,13 @@ var StreamResponseImpl = class {
756
866
  * Update instance state from an SSE control event.
757
867
  */
758
868
  #updateStateFromSSEControl(controlEvent) {
759
- this.offset = controlEvent.streamNextOffset;
760
- if (controlEvent.streamCursor) this.cursor = controlEvent.streamCursor;
761
- if (controlEvent.upToDate !== void 0) this.upToDate = controlEvent.upToDate;
869
+ this.#offset = controlEvent.streamNextOffset;
870
+ if (controlEvent.streamCursor) this.#cursor = controlEvent.streamCursor;
871
+ if (controlEvent.upToDate !== void 0) this.#upToDate = controlEvent.upToDate;
872
+ if (controlEvent.streamClosed) {
873
+ this.#streamClosed = true;
874
+ this.#upToDate = true;
875
+ }
762
876
  }
763
877
  /**
764
878
  * Mark the start of an SSE connection for duration tracking.
@@ -831,19 +945,29 @@ var StreamResponseImpl = class {
831
945
  }
832
946
  if (event.type === `data`) return this.#processSSEDataEvent(event.data, sseEventIterator);
833
947
  this.#updateStateFromSSEControl(event);
948
+ if (event.upToDate) {
949
+ const response = this.#createSSESyntheticResponse(``, event.streamNextOffset, event.streamCursor, true, event.streamClosed ?? false);
950
+ return {
951
+ type: `response`,
952
+ response
953
+ };
954
+ }
834
955
  return { type: `continue` };
835
956
  }
836
957
  /**
837
958
  * Process an SSE data event by waiting for its corresponding control event.
838
959
  * In SSE protocol, control events come AFTER data events.
839
960
  * Multiple data events may arrive before a single control event - we buffer them.
961
+ *
962
+ * For base64 mode, each data event is independently base64 encoded, so we
963
+ * collect them as an array and decode each separately.
840
964
  */
841
965
  async #processSSEDataEvent(pendingData, sseEventIterator) {
842
- let bufferedData = pendingData;
966
+ const bufferedDataParts = [pendingData];
843
967
  while (true) {
844
968
  const { done: controlDone, value: controlEvent } = await sseEventIterator.next();
845
969
  if (controlDone) {
846
- const response = this.#createSSESyntheticResponse(bufferedData, this.offset, this.cursor, this.upToDate);
970
+ const response = this.#createSSESyntheticResponseFromParts(bufferedDataParts, this.offset, this.cursor, this.upToDate, this.streamClosed);
847
971
  try {
848
972
  const newIterator = await this.#trySSEReconnect();
849
973
  return {
@@ -860,13 +984,13 @@ var StreamResponseImpl = class {
860
984
  }
861
985
  if (controlEvent.type === `control`) {
862
986
  this.#updateStateFromSSEControl(controlEvent);
863
- const response = this.#createSSESyntheticResponse(bufferedData, controlEvent.streamNextOffset, controlEvent.streamCursor, controlEvent.upToDate ?? false);
987
+ const response = this.#createSSESyntheticResponseFromParts(bufferedDataParts, controlEvent.streamNextOffset, controlEvent.streamCursor, controlEvent.upToDate ?? false, controlEvent.streamClosed ?? false);
864
988
  return {
865
989
  type: `response`,
866
990
  response
867
991
  };
868
992
  }
869
- bufferedData += controlEvent.data;
993
+ bufferedDataParts.push(controlEvent.data);
870
994
  }
871
995
  }
872
996
  /**
@@ -1024,7 +1148,13 @@ var StreamResponseImpl = class {
1024
1148
  const wasUpToDate = this.upToDate;
1025
1149
  const text = await result.value.text();
1026
1150
  const content = text.trim() || `[]`;
1027
- const parsed = JSON.parse(content);
1151
+ let parsed;
1152
+ try {
1153
+ parsed = JSON.parse(content);
1154
+ } catch (err) {
1155
+ const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
1156
+ throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
1157
+ }
1028
1158
  if (Array.isArray(parsed)) items.push(...parsed);
1029
1159
  else items.push(parsed);
1030
1160
  if (wasUpToDate) break;
@@ -1121,7 +1251,13 @@ var StreamResponseImpl = class {
1121
1251
  }
1122
1252
  const text = await response.text();
1123
1253
  const content = text.trim() || `[]`;
1124
- const parsed = JSON.parse(content);
1254
+ let parsed;
1255
+ try {
1256
+ parsed = JSON.parse(content);
1257
+ } catch (err) {
1258
+ const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
1259
+ throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
1260
+ }
1125
1261
  pendingItems = Array.isArray(parsed) ? parsed : [parsed];
1126
1262
  if (pendingItems.length > 0) controller.enqueue(pendingItems.shift());
1127
1263
  },
@@ -1157,16 +1293,23 @@ var StreamResponseImpl = class {
1157
1293
  while (!result.done) {
1158
1294
  if (abortController.signal.aborted) break;
1159
1295
  const response = result.value;
1160
- const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
1296
+ const { offset, cursor, upToDate, streamClosed } = this.#getMetadataFromResponse(response);
1161
1297
  const text = await response.text();
1162
1298
  const content = text.trim() || `[]`;
1163
- const parsed = JSON.parse(content);
1299
+ let parsed;
1300
+ try {
1301
+ parsed = JSON.parse(content);
1302
+ } catch (err) {
1303
+ const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
1304
+ throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
1305
+ }
1164
1306
  const items = Array.isArray(parsed) ? parsed : [parsed];
1165
1307
  await subscriber({
1166
1308
  items,
1167
1309
  offset,
1168
1310
  cursor,
1169
- upToDate
1311
+ upToDate,
1312
+ streamClosed
1170
1313
  });
1171
1314
  result = await reader.read();
1172
1315
  }
@@ -1196,13 +1339,14 @@ var StreamResponseImpl = class {
1196
1339
  while (!result.done) {
1197
1340
  if (abortController.signal.aborted) break;
1198
1341
  const response = result.value;
1199
- const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
1342
+ const { offset, cursor, upToDate, streamClosed } = this.#getMetadataFromResponse(response);
1200
1343
  const buffer = await response.arrayBuffer();
1201
1344
  await subscriber({
1202
1345
  data: new Uint8Array(buffer),
1203
1346
  offset,
1204
1347
  cursor,
1205
- upToDate
1348
+ upToDate,
1349
+ streamClosed
1206
1350
  });
1207
1351
  result = await reader.read();
1208
1352
  }
@@ -1232,13 +1376,14 @@ var StreamResponseImpl = class {
1232
1376
  while (!result.done) {
1233
1377
  if (abortController.signal.aborted) break;
1234
1378
  const response = result.value;
1235
- const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
1379
+ const { offset, cursor, upToDate, streamClosed } = this.#getMetadataFromResponse(response);
1236
1380
  const text = await response.text();
1237
1381
  await subscriber({
1238
1382
  text,
1239
1383
  offset,
1240
1384
  cursor,
1241
- upToDate
1385
+ upToDate,
1386
+ streamClosed
1242
1387
  });
1243
1388
  result = await reader.read();
1244
1389
  }
@@ -1289,6 +1434,11 @@ async function handleErrorResponse(response, url, context) {
1289
1434
  const status = response.status;
1290
1435
  if (status === 404) throw new DurableStreamError(`Stream not found: ${url}`, `NOT_FOUND`, 404);
1291
1436
  if (status === 409) {
1437
+ const streamClosedHeader = response.headers.get(STREAM_CLOSED_HEADER);
1438
+ if (streamClosedHeader?.toLowerCase() === `true`) {
1439
+ const finalOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
1440
+ throw new StreamClosedError(url, finalOffset);
1441
+ }
1292
1442
  const message = context?.operation === `create` ? `Stream already exists: ${url}` : `Sequence conflict: seq is lower than last appended`;
1293
1443
  const code = context?.operation === `create` ? `CONFLICT_EXISTS` : `CONFLICT_SEQ`;
1294
1444
  throw new DurableStreamError(message, code, 409);
@@ -1405,7 +1555,7 @@ function _resetHttpWarningForTesting() {
1405
1555
  * url,
1406
1556
  * auth,
1407
1557
  * offset: savedOffset,
1408
- * live: "auto",
1558
+ * live: true,
1409
1559
  * })
1410
1560
  * live.subscribeJson(async (batch) => {
1411
1561
  * for (const item of batch.items) {
@@ -1446,10 +1596,11 @@ async function stream(options) {
1446
1596
  */
1447
1597
  async function streamInternal(options) {
1448
1598
  const url = options.url instanceof URL ? options.url.toString() : options.url;
1599
+ warnIfUsingHttpInBrowser(url, options.warnOnHttp);
1449
1600
  const fetchUrl = new URL(url);
1450
1601
  const startOffset = options.offset ?? `-1`;
1451
1602
  fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, startOffset);
1452
- const live = options.live ?? `auto`;
1603
+ const live = options.live ?? true;
1453
1604
  if (live === `long-poll` || live === `sse`) fetchUrl.searchParams.set(LIVE_QUERY_PARAM, live);
1454
1605
  const params = await resolveParams(options.params);
1455
1606
  for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
@@ -1474,13 +1625,16 @@ async function streamInternal(options) {
1474
1625
  const initialOffset = firstResponse.headers.get(STREAM_OFFSET_HEADER) ?? startOffset;
1475
1626
  const initialCursor = firstResponse.headers.get(STREAM_CURSOR_HEADER) ?? void 0;
1476
1627
  const initialUpToDate = firstResponse.headers.has(STREAM_UP_TO_DATE_HEADER);
1628
+ const initialStreamClosed = firstResponse.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`;
1477
1629
  const isJsonMode = options.json === true || (contentType?.includes(`application/json`) ?? false);
1630
+ const sseDataEncoding = firstResponse.headers.get(STREAM_SSE_DATA_ENCODING_HEADER);
1631
+ const encoding = sseDataEncoding === `base64` ? `base64` : void 0;
1478
1632
  const fetchNext = async (offset, cursor, signal, resumingFromPause) => {
1479
1633
  const nextUrl = new URL(url);
1480
1634
  nextUrl.searchParams.set(OFFSET_QUERY_PARAM, offset);
1481
1635
  if (!resumingFromPause) {
1482
- if (live === `auto` || live === `long-poll`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `long-poll`);
1483
- else if (live === `sse`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`);
1636
+ if (live === `sse`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`);
1637
+ else if (live === true || live === `long-poll`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `long-poll`);
1484
1638
  }
1485
1639
  if (cursor) nextUrl.searchParams.set(`cursor`, cursor);
1486
1640
  const nextParams = await resolveParams(options.params);
@@ -1519,936 +1673,1126 @@ async function streamInternal(options) {
1519
1673
  initialOffset,
1520
1674
  initialCursor,
1521
1675
  initialUpToDate,
1676
+ initialStreamClosed,
1522
1677
  firstResponse,
1523
1678
  abortController,
1524
1679
  fetchNext,
1525
1680
  startSSE,
1526
- sseResilience: options.sseResilience
1681
+ sseResilience: options.sseResilience,
1682
+ encoding
1527
1683
  });
1528
1684
  }
1529
1685
 
1530
1686
  //#endregion
1531
- //#region src/stream.ts
1687
+ //#region src/idempotent-producer.ts
1688
+ /**
1689
+ * Error thrown when a producer's epoch is stale (zombie fencing).
1690
+ */
1691
+ var StaleEpochError = class extends Error {
1692
+ /**
1693
+ * The current epoch on the server.
1694
+ */
1695
+ currentEpoch;
1696
+ constructor(currentEpoch) {
1697
+ super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
1698
+ this.name = `StaleEpochError`;
1699
+ this.currentEpoch = currentEpoch;
1700
+ }
1701
+ };
1702
+ /**
1703
+ * Error thrown when an unrecoverable sequence gap is detected.
1704
+ *
1705
+ * With maxInFlight > 1, HTTP requests can arrive out of order at the server,
1706
+ * causing temporary 409 responses. The client automatically handles these
1707
+ * by waiting for earlier sequences to complete, then retrying.
1708
+ *
1709
+ * This error is only thrown when the gap cannot be resolved (e.g., the
1710
+ * expected sequence is >= our sequence, indicating a true protocol violation).
1711
+ */
1712
+ var SequenceGapError = class extends Error {
1713
+ expectedSeq;
1714
+ receivedSeq;
1715
+ constructor(expectedSeq, receivedSeq) {
1716
+ super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
1717
+ this.name = `SequenceGapError`;
1718
+ this.expectedSeq = expectedSeq;
1719
+ this.receivedSeq = receivedSeq;
1720
+ }
1721
+ };
1532
1722
  /**
1533
1723
  * Normalize content-type by extracting the media type (before any semicolon).
1534
- * Handles cases like "application/json; charset=utf-8".
1535
1724
  */
1536
1725
  function normalizeContentType$1(contentType) {
1537
1726
  if (!contentType) return ``;
1538
1727
  return contentType.split(`;`)[0].trim().toLowerCase();
1539
1728
  }
1540
1729
  /**
1541
- * Check if a value is a Promise or Promise-like (thenable).
1542
- */
1543
- function isPromiseLike(value) {
1544
- return value !== null && typeof value === `object` && `then` in value && typeof value.then === `function`;
1545
- }
1546
- /**
1547
- * A handle to a remote durable stream for read/write operations.
1730
+ * An idempotent producer for exactly-once writes to a durable stream.
1548
1731
  *
1549
- * This is a lightweight, reusable handle - not a persistent connection.
1550
- * It does not automatically start reading or listening.
1551
- * Create sessions as needed via stream().
1732
+ * Features:
1733
+ * - Fire-and-forget: append() returns immediately, batches in background
1734
+ * - Exactly-once: server deduplicates using (producerId, epoch, seq)
1735
+ * - Batching: multiple appends batched into single HTTP request
1736
+ * - Pipelining: up to maxInFlight concurrent batches
1737
+ * - Zombie fencing: stale producers rejected via epoch validation
1552
1738
  *
1553
1739
  * @example
1554
1740
  * ```typescript
1555
- * // Create a new stream
1556
- * const stream = await DurableStream.create({
1557
- * url: "https://streams.example.com/my-stream",
1558
- * headers: { Authorization: "Bearer my-token" },
1559
- * contentType: "application/json"
1741
+ * const stream = new DurableStream({ url: "https://..." });
1742
+ * const producer = new IdempotentProducer(stream, "order-service-1", {
1743
+ * epoch: 0,
1744
+ * autoClaim: true,
1560
1745
  * });
1561
1746
  *
1562
- * // Write data
1563
- * await stream.append({ message: "hello" });
1747
+ * // Fire-and-forget writes (synchronous, returns immediately)
1748
+ * producer.append("message 1");
1749
+ * producer.append("message 2");
1564
1750
  *
1565
- * // Read with the new API
1566
- * const res = await stream.stream<{ message: string }>();
1567
- * res.subscribeJson(async (batch) => {
1568
- * for (const item of batch.items) {
1569
- * console.log(item.message);
1570
- * }
1571
- * });
1751
+ * // Ensure all messages are delivered before shutdown
1752
+ * await producer.flush();
1753
+ * await producer.close();
1572
1754
  * ```
1573
1755
  */
1574
- var DurableStream = class DurableStream {
1575
- /**
1576
- * The URL of the durable stream.
1577
- */
1578
- url;
1579
- /**
1580
- * The content type of the stream (populated after connect/head/read).
1581
- */
1582
- contentType;
1583
- #options;
1756
+ var IdempotentProducer = class {
1757
+ #stream;
1758
+ #producerId;
1759
+ #epoch;
1760
+ #nextSeq = 0;
1761
+ #autoClaim;
1762
+ #maxBatchBytes;
1763
+ #lingerMs;
1584
1764
  #fetchClient;
1765
+ #signal;
1585
1766
  #onError;
1586
- #batchingEnabled;
1767
+ #pendingBatch = [];
1768
+ #batchBytes = 0;
1769
+ #lingerTimeout = null;
1587
1770
  #queue;
1588
- #buffer = [];
1589
- /**
1590
- * Create a cold handle to a stream.
1591
- * No network IO is performed by the constructor.
1592
- */
1593
- constructor(opts) {
1594
- validateOptions(opts);
1595
- const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
1596
- this.url = urlStr;
1597
- this.#options = {
1598
- ...opts,
1599
- url: urlStr
1600
- };
1601
- this.#onError = opts.onError;
1602
- if (opts.contentType) this.contentType = opts.contentType;
1603
- this.#batchingEnabled = opts.batching !== false;
1604
- if (this.#batchingEnabled) this.#queue = fastq.default.promise(this.#batchWorker.bind(this), 1);
1605
- const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
1606
- const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
1607
- const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
1608
- this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
1609
- }
1771
+ #maxInFlight;
1772
+ #closed = false;
1773
+ #closeResult = null;
1774
+ #pendingFinalMessage;
1775
+ #epochClaimed;
1776
+ #seqState = new Map();
1610
1777
  /**
1611
- * Create a new stream (create-only PUT) and return a handle.
1612
- * Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
1778
+ * Create an idempotent producer for a stream.
1779
+ *
1780
+ * @param stream - The DurableStream to write to
1781
+ * @param producerId - Stable identifier for this producer (e.g., "order-service-1")
1782
+ * @param opts - Producer options
1613
1783
  */
1614
- static async create(opts) {
1615
- const stream$1 = new DurableStream(opts);
1616
- await stream$1.create({
1617
- contentType: opts.contentType,
1618
- ttlSeconds: opts.ttlSeconds,
1619
- expiresAt: opts.expiresAt,
1620
- body: opts.body
1621
- });
1622
- return stream$1;
1784
+ constructor(stream$1, producerId, opts) {
1785
+ const epoch = opts?.epoch ?? 0;
1786
+ const maxBatchBytes = opts?.maxBatchBytes ?? 1024 * 1024;
1787
+ const maxInFlight = opts?.maxInFlight ?? 5;
1788
+ const lingerMs = opts?.lingerMs ?? 5;
1789
+ if (epoch < 0) throw new Error(`epoch must be >= 0`);
1790
+ if (maxBatchBytes <= 0) throw new Error(`maxBatchBytes must be > 0`);
1791
+ if (maxInFlight <= 0) throw new Error(`maxInFlight must be > 0`);
1792
+ if (lingerMs < 0) throw new Error(`lingerMs must be >= 0`);
1793
+ this.#stream = stream$1;
1794
+ this.#producerId = producerId;
1795
+ this.#epoch = epoch;
1796
+ this.#autoClaim = opts?.autoClaim ?? false;
1797
+ this.#maxBatchBytes = maxBatchBytes;
1798
+ this.#lingerMs = lingerMs;
1799
+ this.#signal = opts?.signal;
1800
+ this.#onError = opts?.onError;
1801
+ this.#fetchClient = opts?.fetch ?? ((...args) => fetch(...args));
1802
+ this.#maxInFlight = maxInFlight;
1803
+ this.#epochClaimed = !this.#autoClaim;
1804
+ this.#queue = fastq.default.promise(this.#batchWorker.bind(this), this.#maxInFlight);
1805
+ if (this.#signal) this.#signal.addEventListener(`abort`, () => {
1806
+ this.#rejectPendingBatch(new DurableStreamError(`Producer aborted`, `ALREADY_CLOSED`, void 0, void 0));
1807
+ }, { once: true });
1623
1808
  }
1624
1809
  /**
1625
- * Validate that a stream exists and fetch metadata via HEAD.
1626
- * Returns a handle with contentType populated (if sent by server).
1810
+ * Append data to the stream.
1627
1811
  *
1628
- * **Important**: This only performs a HEAD request for validation - it does
1629
- * NOT open a session or start reading data. To read from the stream, call
1630
- * `stream()` on the returned handle.
1812
+ * This is fire-and-forget: returns immediately after adding to the batch.
1813
+ * The message is batched and sent when:
1814
+ * - maxBatchBytes is reached
1815
+ * - lingerMs elapses
1816
+ * - flush() is called
1817
+ *
1818
+ * Errors are reported via onError callback if configured. Use flush() to
1819
+ * wait for all pending messages to be sent.
1820
+ *
1821
+ * For JSON streams, pass pre-serialized JSON strings.
1822
+ * For byte streams, pass string or Uint8Array.
1823
+ *
1824
+ * @param body - Data to append (string or Uint8Array)
1631
1825
  *
1632
1826
  * @example
1633
1827
  * ```typescript
1634
- * // Validate stream exists before reading
1635
- * const handle = await DurableStream.connect({ url })
1636
- * const res = await handle.stream() // Now actually read
1828
+ * // JSON stream
1829
+ * producer.append(JSON.stringify({ message: "hello" }));
1830
+ *
1831
+ * // Byte stream
1832
+ * producer.append("raw text data");
1833
+ * producer.append(new Uint8Array([1, 2, 3]));
1637
1834
  * ```
1638
1835
  */
1639
- static async connect(opts) {
1640
- const stream$1 = new DurableStream(opts);
1641
- await stream$1.head();
1642
- return stream$1;
1836
+ append(body) {
1837
+ if (this.#closed) throw new DurableStreamError(`Producer is closed`, `ALREADY_CLOSED`, void 0, void 0);
1838
+ let bytes;
1839
+ if (typeof body === `string`) bytes = new TextEncoder().encode(body);
1840
+ else if (body instanceof Uint8Array) bytes = body;
1841
+ else throw new DurableStreamError(`append() requires string or Uint8Array. For objects, use JSON.stringify().`, `BAD_REQUEST`, 400, void 0);
1842
+ this.#pendingBatch.push({ body: bytes });
1843
+ this.#batchBytes += bytes.length;
1844
+ if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
1845
+ else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
1846
+ this.#lingerTimeout = null;
1847
+ if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
1848
+ }, this.#lingerMs);
1643
1849
  }
1644
1850
  /**
1645
- * HEAD metadata for a stream without creating a handle.
1851
+ * Send any pending batch immediately and wait for all in-flight batches.
1852
+ *
1853
+ * Call this before shutdown to ensure all messages are delivered.
1646
1854
  */
1647
- static async head(opts) {
1648
- const stream$1 = new DurableStream(opts);
1649
- return stream$1.head();
1855
+ async flush() {
1856
+ if (this.#lingerTimeout) {
1857
+ clearTimeout(this.#lingerTimeout);
1858
+ this.#lingerTimeout = null;
1859
+ }
1860
+ if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
1861
+ await this.#queue.drained();
1650
1862
  }
1651
1863
  /**
1652
- * Delete a stream without creating a handle.
1864
+ * Stop the producer without closing the underlying stream.
1865
+ *
1866
+ * Use this when you want to:
1867
+ * - Hand off writing to another producer
1868
+ * - Keep the stream open for future writes
1869
+ * - Stop this producer but not signal EOF to readers
1870
+ *
1871
+ * Flushes any pending messages before detaching.
1872
+ * After calling detach(), further append() calls will throw.
1653
1873
  */
1654
- static async delete(opts) {
1655
- const stream$1 = new DurableStream(opts);
1656
- return stream$1.delete();
1657
- }
1658
- /**
1659
- * HEAD metadata for this stream.
1874
+ async detach() {
1875
+ if (this.#closed) return;
1876
+ this.#closed = true;
1877
+ try {
1878
+ await this.flush();
1879
+ } catch {}
1880
+ }
1881
+ /**
1882
+ * Flush pending messages and close the underlying stream (EOF).
1883
+ *
1884
+ * This is the typical way to end a producer session. It:
1885
+ * 1. Flushes all pending messages
1886
+ * 2. Optionally appends a final message
1887
+ * 3. Closes the stream (no further appends permitted)
1888
+ *
1889
+ * **Idempotent**: Unlike `DurableStream.close({ body })`, this method is
1890
+ * idempotent even with a final message because it uses producer headers
1891
+ * for deduplication. Safe to retry on network failures.
1892
+ *
1893
+ * @param finalMessage - Optional final message to append atomically with close
1894
+ * @returns CloseResult with the final offset
1660
1895
  */
1661
- async head(opts) {
1662
- const { requestHeaders, fetchUrl } = await this.#buildRequest();
1663
- const response = await this.#fetchClient(fetchUrl.toString(), {
1664
- method: `HEAD`,
1665
- headers: requestHeaders,
1666
- signal: opts?.signal ?? this.#options.signal
1667
- });
1668
- if (!response.ok) await handleErrorResponse(response, this.url);
1669
- const contentType = response.headers.get(`content-type`) ?? void 0;
1670
- const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
1671
- const etag = response.headers.get(`etag`) ?? void 0;
1672
- const cacheControl = response.headers.get(`cache-control`) ?? void 0;
1673
- if (contentType) this.contentType = contentType;
1674
- return {
1675
- exists: true,
1676
- contentType,
1677
- offset,
1678
- etag,
1679
- cacheControl
1680
- };
1896
+ async close(finalMessage) {
1897
+ if (this.#closed) {
1898
+ if (this.#closeResult) return this.#closeResult;
1899
+ await this.flush();
1900
+ const result$1 = await this.#doClose(this.#pendingFinalMessage);
1901
+ this.#closeResult = result$1;
1902
+ return result$1;
1903
+ }
1904
+ this.#closed = true;
1905
+ this.#pendingFinalMessage = finalMessage;
1906
+ await this.flush();
1907
+ const result = await this.#doClose(finalMessage);
1908
+ this.#closeResult = result;
1909
+ return result;
1681
1910
  }
1682
1911
  /**
1683
- * Create this stream (create-only PUT) using the URL/auth from the handle.
1912
+ * Actually close the stream with optional final message.
1913
+ * Uses producer headers for idempotency.
1684
1914
  */
1685
- async create(opts) {
1686
- const { requestHeaders, fetchUrl } = await this.#buildRequest();
1687
- const contentType = opts?.contentType ?? this.#options.contentType;
1688
- if (contentType) requestHeaders[`content-type`] = contentType;
1689
- if (opts?.ttlSeconds !== void 0) requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
1690
- if (opts?.expiresAt) requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
1691
- const body = encodeBody(opts?.body);
1692
- const response = await this.#fetchClient(fetchUrl.toString(), {
1693
- method: `PUT`,
1694
- headers: requestHeaders,
1915
+ async #doClose(finalMessage) {
1916
+ const contentType = this.#stream.contentType ?? `application/octet-stream`;
1917
+ const isJson = normalizeContentType$1(contentType) === `application/json`;
1918
+ let body;
1919
+ if (finalMessage !== void 0) {
1920
+ const bodyBytes = typeof finalMessage === `string` ? new TextEncoder().encode(finalMessage) : finalMessage;
1921
+ if (isJson) {
1922
+ const jsonStr = new TextDecoder().decode(bodyBytes);
1923
+ body = `[${jsonStr}]`;
1924
+ } else body = bodyBytes;
1925
+ }
1926
+ const seqForThisRequest = this.#nextSeq;
1927
+ const headers = {
1928
+ "content-type": contentType,
1929
+ [PRODUCER_ID_HEADER]: this.#producerId,
1930
+ [PRODUCER_EPOCH_HEADER]: this.#epoch.toString(),
1931
+ [PRODUCER_SEQ_HEADER]: seqForThisRequest.toString(),
1932
+ [STREAM_CLOSED_HEADER]: `true`
1933
+ };
1934
+ const response = await this.#fetchClient(this.#stream.url, {
1935
+ method: `POST`,
1936
+ headers,
1695
1937
  body,
1696
- signal: this.#options.signal
1938
+ signal: this.#signal
1697
1939
  });
1698
- if (!response.ok) await handleErrorResponse(response, this.url, { operation: `create` });
1699
- const responseContentType = response.headers.get(`content-type`);
1700
- if (responseContentType) this.contentType = responseContentType;
1701
- else if (contentType) this.contentType = contentType;
1702
- return this;
1940
+ if (response.status === 204) {
1941
+ this.#nextSeq = seqForThisRequest + 1;
1942
+ const finalOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
1943
+ return { finalOffset };
1944
+ }
1945
+ if (response.status === 200) {
1946
+ this.#nextSeq = seqForThisRequest + 1;
1947
+ const finalOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
1948
+ return { finalOffset };
1949
+ }
1950
+ if (response.status === 403) {
1951
+ const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
1952
+ const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : this.#epoch;
1953
+ if (this.#autoClaim) {
1954
+ const newEpoch = currentEpoch + 1;
1955
+ this.#epoch = newEpoch;
1956
+ this.#nextSeq = 0;
1957
+ return this.#doClose(finalMessage);
1958
+ }
1959
+ throw new StaleEpochError(currentEpoch);
1960
+ }
1961
+ const error = await FetchError.fromResponse(response, this.#stream.url);
1962
+ throw error;
1703
1963
  }
1704
1964
  /**
1705
- * Delete this stream.
1965
+ * Increment epoch and reset sequence.
1966
+ *
1967
+ * Call this when restarting the producer to establish a new session.
1968
+ * Flushes any pending messages first.
1706
1969
  */
1707
- async delete(opts) {
1708
- const { requestHeaders, fetchUrl } = await this.#buildRequest();
1709
- const response = await this.#fetchClient(fetchUrl.toString(), {
1710
- method: `DELETE`,
1711
- headers: requestHeaders,
1712
- signal: opts?.signal ?? this.#options.signal
1713
- });
1714
- if (!response.ok) await handleErrorResponse(response, this.url);
1970
+ async restart() {
1971
+ await this.flush();
1972
+ this.#epoch++;
1973
+ this.#nextSeq = 0;
1715
1974
  }
1716
1975
  /**
1717
- * Append a single payload to the stream.
1718
- *
1719
- * When batching is enabled (default), multiple append() calls made while
1720
- * a POST is in-flight will be batched together into a single request.
1721
- * This significantly improves throughput for high-frequency writes.
1722
- *
1723
- * - `body` may be Uint8Array, string, or any JSON-serializable value (for JSON streams).
1724
- * - `body` may also be a Promise that resolves to any of the above types.
1725
- * - Strings are encoded as UTF-8.
1726
- * - `seq` (if provided) is sent as stream-seq (writer coordination).
1727
- *
1728
- * @example
1729
- * ```typescript
1730
- * // Direct value
1731
- * await stream.append({ message: "hello" });
1732
- *
1733
- * // Promise value - awaited before buffering
1734
- * await stream.append(fetchData());
1735
- * await stream.append(Promise.all([a, b, c]));
1736
- * ```
1976
+ * Current epoch for this producer.
1737
1977
  */
1738
- async append(body, opts) {
1739
- const resolvedBody = isPromiseLike(body) ? await body : body;
1740
- if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
1741
- return this.#appendDirect(resolvedBody, opts);
1978
+ get epoch() {
1979
+ return this.#epoch;
1742
1980
  }
1743
1981
  /**
1744
- * Direct append without batching (used when batching is disabled).
1982
+ * Next sequence number to be assigned.
1745
1983
  */
1746
- async #appendDirect(body, opts) {
1747
- const { requestHeaders, fetchUrl } = await this.#buildRequest();
1748
- const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
1749
- if (contentType) requestHeaders[`content-type`] = contentType;
1750
- if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
1751
- const isJson = normalizeContentType$1(contentType) === `application/json`;
1752
- const bodyToEncode = isJson ? [body] : body;
1753
- const encodedBody = encodeBody(bodyToEncode);
1754
- const response = await this.#fetchClient(fetchUrl.toString(), {
1755
- method: `POST`,
1756
- headers: requestHeaders,
1757
- body: encodedBody,
1758
- signal: opts?.signal ?? this.#options.signal
1759
- });
1760
- if (!response.ok) await handleErrorResponse(response, this.url);
1984
+ get nextSeq() {
1985
+ return this.#nextSeq;
1761
1986
  }
1762
1987
  /**
1763
- * Append with batching - buffers messages and sends them in batches.
1988
+ * Number of messages in the current pending batch.
1764
1989
  */
1765
- async #appendWithBatching(body, opts) {
1766
- return new Promise((resolve, reject) => {
1767
- this.#buffer.push({
1768
- data: body,
1769
- seq: opts?.seq,
1770
- contentType: opts?.contentType,
1771
- signal: opts?.signal,
1772
- resolve,
1773
- reject
1774
- });
1775
- if (this.#queue.idle()) {
1776
- const batch = this.#buffer.splice(0);
1777
- this.#queue.push(batch).catch((err) => {
1778
- for (const msg of batch) msg.reject(err);
1779
- });
1780
- }
1990
+ get pendingCount() {
1991
+ return this.#pendingBatch.length;
1992
+ }
1993
+ /**
1994
+ * Number of batches currently in flight.
1995
+ */
1996
+ get inFlightCount() {
1997
+ return this.#queue.length();
1998
+ }
1999
+ /**
2000
+ * Enqueue the current pending batch for processing.
2001
+ */
2002
+ #enqueuePendingBatch() {
2003
+ if (this.#pendingBatch.length === 0) return;
2004
+ const batch = this.#pendingBatch;
2005
+ const seq = this.#nextSeq;
2006
+ this.#pendingBatch = [];
2007
+ this.#batchBytes = 0;
2008
+ this.#nextSeq++;
2009
+ if (this.#autoClaim && !this.#epochClaimed && this.#queue.length() > 0) this.#queue.drained().then(() => {
2010
+ this.#queue.push({
2011
+ batch,
2012
+ seq
2013
+ }).catch(() => {});
1781
2014
  });
2015
+ else this.#queue.push({
2016
+ batch,
2017
+ seq
2018
+ }).catch(() => {});
1782
2019
  }
1783
2020
  /**
1784
- * Batch worker - processes batches of messages.
2021
+ * Batch worker - processes batches via fastq.
1785
2022
  */
1786
- async #batchWorker(batch) {
2023
+ async #batchWorker(task) {
2024
+ const { batch, seq } = task;
2025
+ const epoch = this.#epoch;
1787
2026
  try {
1788
- await this.#sendBatch(batch);
1789
- for (const msg of batch) msg.resolve();
1790
- if (this.#buffer.length > 0) {
1791
- const nextBatch = this.#buffer.splice(0);
1792
- this.#queue.push(nextBatch).catch((err) => {
1793
- for (const msg of nextBatch) msg.reject(err);
1794
- });
1795
- }
2027
+ await this.#doSendBatch(batch, seq, epoch);
2028
+ if (!this.#epochClaimed) this.#epochClaimed = true;
2029
+ this.#signalSeqComplete(epoch, seq, void 0);
1796
2030
  } catch (error) {
1797
- for (const msg of batch) msg.reject(error);
1798
- for (const msg of this.#buffer) msg.reject(error);
1799
- this.#buffer = [];
2031
+ this.#signalSeqComplete(epoch, seq, error);
2032
+ if (this.#onError) this.#onError(error);
1800
2033
  throw error;
1801
2034
  }
1802
2035
  }
1803
2036
  /**
1804
- * Send a batch of messages as a single POST request.
2037
+ * Signal that a sequence has completed (success or failure).
1805
2038
  */
1806
- async #sendBatch(batch) {
1807
- if (batch.length === 0) return;
1808
- const { requestHeaders, fetchUrl } = await this.#buildRequest();
1809
- const contentType = batch[0]?.contentType ?? this.#options.contentType ?? this.contentType;
1810
- if (contentType) requestHeaders[`content-type`] = contentType;
1811
- let highestSeq;
1812
- for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
1813
- highestSeq = batch[i].seq;
1814
- break;
1815
- }
1816
- if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
1817
- const isJson = normalizeContentType$1(contentType) === `application/json`;
1818
- let batchedBody;
1819
- if (isJson) {
1820
- const values = batch.map((m) => m.data);
1821
- batchedBody = JSON.stringify(values);
1822
- } else {
1823
- const totalSize = batch.reduce((sum, m) => {
1824
- const size = typeof m.data === `string` ? new TextEncoder().encode(m.data).length : m.data.length;
1825
- return sum + size;
1826
- }, 0);
1827
- const concatenated = new Uint8Array(totalSize);
1828
- let offset = 0;
1829
- for (const msg of batch) {
1830
- const bytes = typeof msg.data === `string` ? new TextEncoder().encode(msg.data) : msg.data;
1831
- concatenated.set(bytes, offset);
1832
- offset += bytes.length;
1833
- }
1834
- batchedBody = concatenated;
2039
+ #signalSeqComplete(epoch, seq, error) {
2040
+ let epochMap = this.#seqState.get(epoch);
2041
+ if (!epochMap) {
2042
+ epochMap = new Map();
2043
+ this.#seqState.set(epoch, epochMap);
1835
2044
  }
1836
- const signals = [];
1837
- if (this.#options.signal) signals.push(this.#options.signal);
1838
- for (const msg of batch) if (msg.signal) signals.push(msg.signal);
1839
- const combinedSignal = signals.length > 0 ? AbortSignal.any(signals) : void 0;
1840
- const response = await this.#fetchClient(fetchUrl.toString(), {
1841
- method: `POST`,
1842
- headers: requestHeaders,
1843
- body: batchedBody,
1844
- signal: combinedSignal
2045
+ const state = epochMap.get(seq);
2046
+ if (state) {
2047
+ state.resolved = true;
2048
+ state.error = error;
2049
+ for (const waiter of state.waiters) waiter(error);
2050
+ state.waiters = [];
2051
+ } else epochMap.set(seq, {
2052
+ resolved: true,
2053
+ error,
2054
+ waiters: []
1845
2055
  });
1846
- if (!response.ok) await handleErrorResponse(response, this.url);
2056
+ const cleanupThreshold = seq - this.#maxInFlight * 3;
2057
+ if (cleanupThreshold > 0) {
2058
+ for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
2059
+ }
1847
2060
  }
1848
2061
  /**
1849
- * Append a streaming body to the stream.
1850
- *
1851
- * Supports piping from any ReadableStream or async iterable:
1852
- * - `source` yields Uint8Array or string chunks.
1853
- * - Strings are encoded as UTF-8; no delimiters are added.
1854
- * - Internally uses chunked transfer or HTTP/2 streaming.
1855
- *
1856
- * @example
1857
- * ```typescript
1858
- * // Pipe from a ReadableStream
1859
- * const readable = new ReadableStream({
1860
- * start(controller) {
1861
- * controller.enqueue("chunk 1");
1862
- * controller.enqueue("chunk 2");
1863
- * controller.close();
1864
- * }
1865
- * });
1866
- * await stream.appendStream(readable);
1867
- *
1868
- * // Pipe from an async generator
1869
- * async function* generate() {
1870
- * yield "line 1\n";
1871
- * yield "line 2\n";
1872
- * }
1873
- * await stream.appendStream(generate());
1874
- *
1875
- * // Pipe from fetch response body
1876
- * const response = await fetch("https://example.com/data");
1877
- * await stream.appendStream(response.body!);
1878
- * ```
2062
+ * Wait for a specific sequence to complete.
2063
+ * Returns immediately if already completed.
2064
+ * Throws if the sequence failed.
1879
2065
  */
1880
- async appendStream(source, opts) {
1881
- const { requestHeaders, fetchUrl } = await this.#buildRequest();
1882
- const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
1883
- if (contentType) requestHeaders[`content-type`] = contentType;
1884
- if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
1885
- const body = toReadableStream(source);
1886
- const response = await this.#fetchClient(fetchUrl.toString(), {
1887
- method: `POST`,
1888
- headers: requestHeaders,
1889
- body,
1890
- duplex: `half`,
1891
- signal: opts?.signal ?? this.#options.signal
2066
+ #waitForSeq(epoch, seq) {
2067
+ let epochMap = this.#seqState.get(epoch);
2068
+ if (!epochMap) {
2069
+ epochMap = new Map();
2070
+ this.#seqState.set(epoch, epochMap);
2071
+ }
2072
+ const state = epochMap.get(seq);
2073
+ if (state?.resolved) {
2074
+ if (state.error) return Promise.reject(state.error);
2075
+ return Promise.resolve();
2076
+ }
2077
+ return new Promise((resolve, reject) => {
2078
+ const waiter = (err) => {
2079
+ if (err) reject(err);
2080
+ else resolve();
2081
+ };
2082
+ if (state) state.waiters.push(waiter);
2083
+ else epochMap.set(seq, {
2084
+ resolved: false,
2085
+ waiters: [waiter]
2086
+ });
1892
2087
  });
1893
- if (!response.ok) await handleErrorResponse(response, this.url);
1894
2088
  }
1895
2089
  /**
1896
- * Create a writable stream that pipes data to this durable stream.
1897
- *
1898
- * Returns a WritableStream that can be used with `pipeTo()` or
1899
- * `pipeThrough()` from any ReadableStream source.
1900
- *
1901
- * @example
1902
- * ```typescript
1903
- * // Pipe from fetch response
1904
- * const response = await fetch("https://example.com/data");
1905
- * await response.body!.pipeTo(stream.writable());
1906
- *
1907
- * // Pipe through a transform
1908
- * const readable = someStream.pipeThrough(new TextEncoderStream());
1909
- * await readable.pipeTo(stream.writable());
1910
- * ```
2090
+ * Actually send the batch to the server.
2091
+ * Handles auto-claim retry on 403 (stale epoch) if autoClaim is enabled.
2092
+ * Does NOT implement general retry/backoff for network errors or 5xx responses.
1911
2093
  */
1912
- writable(opts) {
1913
- const chunks = [];
1914
- const stream$1 = this;
1915
- return new WritableStream({
1916
- write(chunk) {
1917
- chunks.push(chunk);
1918
- },
1919
- async close() {
1920
- if (chunks.length > 0) {
1921
- const readable = new ReadableStream({ start(controller) {
1922
- for (const chunk of chunks) controller.enqueue(chunk);
1923
- controller.close();
1924
- } });
1925
- await stream$1.appendStream(readable, opts);
1926
- }
1927
- },
1928
- abort(reason) {
1929
- console.error(`WritableStream aborted:`, reason);
2094
+ async #doSendBatch(batch, seq, epoch) {
2095
+ const contentType = this.#stream.contentType ?? `application/octet-stream`;
2096
+ const isJson = normalizeContentType$1(contentType) === `application/json`;
2097
+ let batchedBody;
2098
+ if (isJson) {
2099
+ const jsonStrings = batch.map((e) => new TextDecoder().decode(e.body));
2100
+ batchedBody = `[${jsonStrings.join(`,`)}]`;
2101
+ } else {
2102
+ const totalSize = batch.reduce((sum, e) => sum + e.body.length, 0);
2103
+ const concatenated = new Uint8Array(totalSize);
2104
+ let offset = 0;
2105
+ for (const entry of batch) {
2106
+ concatenated.set(entry.body, offset);
2107
+ offset += entry.body.length;
1930
2108
  }
1931
- });
1932
- }
1933
- /**
1934
- * Start a fetch-like streaming session against this handle's URL/headers/params.
1935
- * The first request is made inside this method; it resolves when we have
1936
- * a valid first response, or rejects on errors.
1937
- *
1938
- * Call-specific headers and params are merged with handle-level ones,
1939
- * with call-specific values taking precedence.
1940
- *
1941
- * @example
1942
- * ```typescript
1943
- * const handle = await DurableStream.connect({
1944
- * url,
1945
- * headers: { Authorization: `Bearer ${token}` }
1946
- * });
1947
- * const res = await handle.stream<{ message: string }>();
1948
- *
1949
- * // Accumulate all JSON items
1950
- * const items = await res.json();
1951
- *
1952
- * // Or stream live with ReadableStream
1953
- * const reader = res.jsonStream().getReader();
1954
- * let result = await reader.read();
1955
- * while (!result.done) {
1956
- * console.log(result.value);
1957
- * result = await reader.read();
1958
- * }
1959
- *
1960
- * // Or use subscriber for backpressure-aware consumption
1961
- * res.subscribeJson(async (batch) => {
1962
- * for (const item of batch.items) {
1963
- * console.log(item);
1964
- * }
1965
- * });
1966
- * ```
1967
- */
1968
- async stream(options) {
1969
- if (options?.live === `sse` && this.contentType) {
1970
- const isSSECompatible = SSE_COMPATIBLE_CONTENT_TYPES.some((prefix) => this.contentType.startsWith(prefix));
1971
- if (!isSSECompatible) throw new DurableStreamError(`SSE is not supported for content-type: ${this.contentType}`, `SSE_NOT_SUPPORTED`, 400);
2109
+ batchedBody = concatenated;
1972
2110
  }
1973
- const mergedHeaders = {
1974
- ...this.#options.headers,
1975
- ...options?.headers
1976
- };
1977
- const mergedParams = {
1978
- ...this.#options.params,
1979
- ...options?.params
2111
+ const url = this.#stream.url;
2112
+ const headers = {
2113
+ "content-type": contentType,
2114
+ [PRODUCER_ID_HEADER]: this.#producerId,
2115
+ [PRODUCER_EPOCH_HEADER]: epoch.toString(),
2116
+ [PRODUCER_SEQ_HEADER]: seq.toString()
1980
2117
  };
1981
- return stream({
1982
- url: this.url,
1983
- headers: mergedHeaders,
1984
- params: mergedParams,
1985
- signal: options?.signal ?? this.#options.signal,
1986
- fetch: this.#options.fetch,
1987
- backoffOptions: this.#options.backoffOptions,
1988
- offset: options?.offset,
1989
- live: options?.live,
1990
- json: options?.json,
1991
- onError: options?.onError ?? this.#onError,
1992
- warnOnHttp: options?.warnOnHttp ?? this.#options.warnOnHttp
2118
+ const response = await this.#fetchClient(url, {
2119
+ method: `POST`,
2120
+ headers,
2121
+ body: batchedBody,
2122
+ signal: this.#signal
1993
2123
  });
2124
+ if (response.status === 204) return {
2125
+ offset: ``,
2126
+ duplicate: true
2127
+ };
2128
+ if (response.status === 200) {
2129
+ const resultOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
2130
+ return {
2131
+ offset: resultOffset,
2132
+ duplicate: false
2133
+ };
2134
+ }
2135
+ if (response.status === 403) {
2136
+ const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
2137
+ const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : epoch;
2138
+ if (this.#autoClaim) {
2139
+ const newEpoch = currentEpoch + 1;
2140
+ this.#epoch = newEpoch;
2141
+ this.#nextSeq = 1;
2142
+ return this.#doSendBatch(batch, 0, newEpoch);
2143
+ }
2144
+ throw new StaleEpochError(currentEpoch);
2145
+ }
2146
+ if (response.status === 409) {
2147
+ const expectedSeqStr = response.headers.get(PRODUCER_EXPECTED_SEQ_HEADER);
2148
+ const expectedSeq = expectedSeqStr ? parseInt(expectedSeqStr, 10) : 0;
2149
+ if (expectedSeq < seq) {
2150
+ const waitPromises = [];
2151
+ for (let s = expectedSeq; s < seq; s++) waitPromises.push(this.#waitForSeq(epoch, s));
2152
+ await Promise.all(waitPromises);
2153
+ return this.#doSendBatch(batch, seq, epoch);
2154
+ }
2155
+ const receivedSeqStr = response.headers.get(PRODUCER_RECEIVED_SEQ_HEADER);
2156
+ const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
2157
+ throw new SequenceGapError(expectedSeq, receivedSeq);
2158
+ }
2159
+ if (response.status === 400) {
2160
+ const error$1 = await DurableStreamError.fromResponse(response, url);
2161
+ throw error$1;
2162
+ }
2163
+ const error = await FetchError.fromResponse(response, url);
2164
+ throw error;
1994
2165
  }
1995
2166
  /**
1996
- * Build request headers and URL.
2167
+ * Clear pending batch and report error.
1997
2168
  */
1998
- async #buildRequest() {
1999
- const requestHeaders = await resolveHeaders(this.#options.headers);
2000
- const fetchUrl = new URL(this.url);
2001
- const params = await resolveParams(this.#options.params);
2002
- for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
2003
- return {
2004
- requestHeaders,
2005
- fetchUrl
2006
- };
2169
+ #rejectPendingBatch(error) {
2170
+ if (this.#onError && this.#pendingBatch.length > 0) this.#onError(error);
2171
+ this.#pendingBatch = [];
2172
+ this.#batchBytes = 0;
2173
+ if (this.#lingerTimeout) {
2174
+ clearTimeout(this.#lingerTimeout);
2175
+ this.#lingerTimeout = null;
2176
+ }
2007
2177
  }
2008
2178
  };
2179
+
2180
+ //#endregion
2181
+ //#region src/stream.ts
2009
2182
  /**
2010
- * Encode a body value to the appropriate format.
2011
- * Strings are encoded as UTF-8.
2012
- * Objects are JSON-serialized.
2183
+ * Normalize content-type by extracting the media type (before any semicolon).
2184
+ * Handles cases like "application/json; charset=utf-8".
2013
2185
  */
2014
- function encodeBody(body) {
2015
- if (body === void 0) return void 0;
2016
- if (typeof body === `string`) return new TextEncoder().encode(body);
2017
- if (body instanceof Uint8Array) return body;
2018
- if (body instanceof Blob || body instanceof FormData || body instanceof ReadableStream || body instanceof ArrayBuffer || ArrayBuffer.isView(body)) return body;
2019
- return new TextEncoder().encode(JSON.stringify(body));
2186
+ function normalizeContentType(contentType) {
2187
+ if (!contentType) return ``;
2188
+ return contentType.split(`;`)[0].trim().toLowerCase();
2020
2189
  }
2021
2190
  /**
2022
- * Convert an async iterable to a ReadableStream.
2191
+ * Check if a value is a Promise or Promise-like (thenable).
2023
2192
  */
2024
- function toReadableStream(source) {
2025
- if (source instanceof ReadableStream) return source.pipeThrough(new TransformStream({ transform(chunk, controller) {
2026
- if (typeof chunk === `string`) controller.enqueue(new TextEncoder().encode(chunk));
2027
- else controller.enqueue(chunk);
2028
- } }));
2029
- const encoder = new TextEncoder();
2030
- const iterator = source[Symbol.asyncIterator]();
2031
- return new ReadableStream({
2032
- async pull(controller) {
2033
- try {
2034
- const { done, value } = await iterator.next();
2035
- if (done) controller.close();
2036
- else if (typeof value === `string`) controller.enqueue(encoder.encode(value));
2037
- else controller.enqueue(value);
2038
- } catch (e) {
2039
- controller.error(e);
2040
- }
2041
- },
2042
- cancel() {
2043
- iterator.return?.();
2044
- }
2045
- });
2046
- }
2047
- /**
2048
- * Validate stream options.
2049
- */
2050
- function validateOptions(options) {
2051
- if (!options.url) throw new MissingStreamUrlError();
2052
- if (options.signal && !(options.signal instanceof AbortSignal)) throw new InvalidSignalError();
2053
- warnIfUsingHttpInBrowser(options.url, options.warnOnHttp);
2054
- }
2055
-
2056
- //#endregion
2057
- //#region src/idempotent-producer.ts
2058
- /**
2059
- * Error thrown when a producer's epoch is stale (zombie fencing).
2060
- */
2061
- var StaleEpochError = class extends Error {
2062
- /**
2063
- * The current epoch on the server.
2064
- */
2065
- currentEpoch;
2066
- constructor(currentEpoch) {
2067
- super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
2068
- this.name = `StaleEpochError`;
2069
- this.currentEpoch = currentEpoch;
2070
- }
2071
- };
2072
- /**
2073
- * Error thrown when an unrecoverable sequence gap is detected.
2074
- *
2075
- * With maxInFlight > 1, HTTP requests can arrive out of order at the server,
2076
- * causing temporary 409 responses. The client automatically handles these
2077
- * by waiting for earlier sequences to complete, then retrying.
2078
- *
2079
- * This error is only thrown when the gap cannot be resolved (e.g., the
2080
- * expected sequence is >= our sequence, indicating a true protocol violation).
2081
- */
2082
- var SequenceGapError = class extends Error {
2083
- expectedSeq;
2084
- receivedSeq;
2085
- constructor(expectedSeq, receivedSeq) {
2086
- super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
2087
- this.name = `SequenceGapError`;
2088
- this.expectedSeq = expectedSeq;
2089
- this.receivedSeq = receivedSeq;
2090
- }
2091
- };
2092
- /**
2093
- * Normalize content-type by extracting the media type (before any semicolon).
2094
- */
2095
- function normalizeContentType(contentType) {
2096
- if (!contentType) return ``;
2097
- return contentType.split(`;`)[0].trim().toLowerCase();
2193
+ function isPromiseLike(value) {
2194
+ return value != null && typeof value.then === `function`;
2098
2195
  }
2099
2196
  /**
2100
- * An idempotent producer for exactly-once writes to a durable stream.
2197
+ * A handle to a remote durable stream for read/write operations.
2101
2198
  *
2102
- * Features:
2103
- * - Fire-and-forget: append() returns immediately, batches in background
2104
- * - Exactly-once: server deduplicates using (producerId, epoch, seq)
2105
- * - Batching: multiple appends batched into single HTTP request
2106
- * - Pipelining: up to maxInFlight concurrent batches
2107
- * - Zombie fencing: stale producers rejected via epoch validation
2199
+ * This is a lightweight, reusable handle - not a persistent connection.
2200
+ * It does not automatically start reading or listening.
2201
+ * Create sessions as needed via stream().
2108
2202
  *
2109
2203
  * @example
2110
2204
  * ```typescript
2111
- * const stream = new DurableStream({ url: "https://..." });
2112
- * const producer = new IdempotentProducer(stream, "order-service-1", {
2113
- * epoch: 0,
2114
- * autoClaim: true,
2205
+ * // Create a new stream
2206
+ * const stream = await DurableStream.create({
2207
+ * url: "https://streams.example.com/my-stream",
2208
+ * headers: { Authorization: "Bearer my-token" },
2209
+ * contentType: "application/json"
2115
2210
  * });
2116
2211
  *
2117
- * // Fire-and-forget writes (synchronous, returns immediately)
2118
- * producer.append("message 1");
2119
- * producer.append("message 2");
2212
+ * // Write data
2213
+ * await stream.append(JSON.stringify({ message: "hello" }));
2120
2214
  *
2121
- * // Ensure all messages are delivered before shutdown
2122
- * await producer.flush();
2123
- * await producer.close();
2215
+ * // Read with the new API
2216
+ * const res = await stream.stream<{ message: string }>();
2217
+ * res.subscribeJson(async (batch) => {
2218
+ * for (const item of batch.items) {
2219
+ * console.log(item.message);
2220
+ * }
2221
+ * });
2124
2222
  * ```
2125
2223
  */
2126
- var IdempotentProducer = class {
2127
- #stream;
2128
- #producerId;
2129
- #epoch;
2130
- #nextSeq = 0;
2131
- #autoClaim;
2132
- #maxBatchBytes;
2133
- #lingerMs;
2224
+ var DurableStream = class DurableStream {
2225
+ /**
2226
+ * The URL of the durable stream.
2227
+ */
2228
+ url;
2229
+ /**
2230
+ * The content type of the stream (populated after connect/head/read).
2231
+ */
2232
+ contentType;
2233
+ #options;
2134
2234
  #fetchClient;
2135
- #signal;
2136
2235
  #onError;
2137
- #pendingBatch = [];
2138
- #batchBytes = 0;
2139
- #lingerTimeout = null;
2236
+ #batchingEnabled;
2140
2237
  #queue;
2141
- #maxInFlight;
2142
- #closed = false;
2143
- #epochClaimed;
2144
- #seqState = new Map();
2238
+ #buffer = [];
2145
2239
  /**
2146
- * Create an idempotent producer for a stream.
2147
- *
2148
- * @param stream - The DurableStream to write to
2149
- * @param producerId - Stable identifier for this producer (e.g., "order-service-1")
2150
- * @param opts - Producer options
2240
+ * Create a cold handle to a stream.
2241
+ * No network IO is performed by the constructor.
2151
2242
  */
2152
- constructor(stream$1, producerId, opts) {
2153
- this.#stream = stream$1;
2154
- this.#producerId = producerId;
2155
- this.#epoch = opts?.epoch ?? 0;
2156
- this.#autoClaim = opts?.autoClaim ?? false;
2157
- this.#maxBatchBytes = opts?.maxBatchBytes ?? 1024 * 1024;
2158
- this.#lingerMs = opts?.lingerMs ?? 5;
2159
- this.#signal = opts?.signal;
2160
- this.#onError = opts?.onError;
2161
- this.#fetchClient = opts?.fetch ?? ((...args) => fetch(...args));
2162
- this.#maxInFlight = opts?.maxInFlight ?? 5;
2163
- this.#epochClaimed = !this.#autoClaim;
2164
- this.#queue = fastq.default.promise(this.#batchWorker.bind(this), this.#maxInFlight);
2165
- if (this.#signal) this.#signal.addEventListener(`abort`, () => {
2166
- this.#rejectPendingBatch(new DurableStreamError(`Producer aborted`, `ALREADY_CLOSED`, void 0, void 0));
2167
- }, { once: true });
2243
+ constructor(opts) {
2244
+ validateOptions(opts);
2245
+ const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
2246
+ this.url = urlStr;
2247
+ this.#options = {
2248
+ ...opts,
2249
+ url: urlStr
2250
+ };
2251
+ this.#onError = opts.onError;
2252
+ if (opts.contentType) this.contentType = opts.contentType;
2253
+ this.#batchingEnabled = opts.batching !== false;
2254
+ if (this.#batchingEnabled) this.#queue = fastq.default.promise(this.#batchWorker.bind(this), 1);
2255
+ const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
2256
+ const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
2257
+ const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
2258
+ this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
2168
2259
  }
2169
2260
  /**
2170
- * Append data to the stream.
2171
- *
2172
- * This is fire-and-forget: returns immediately after adding to the batch.
2173
- * The message is batched and sent when:
2174
- * - maxBatchBytes is reached
2175
- * - lingerMs elapses
2176
- * - flush() is called
2177
- *
2178
- * Errors are reported via onError callback if configured. Use flush() to
2179
- * wait for all pending messages to be sent.
2180
- *
2181
- * For JSON streams, pass native objects (which will be serialized internally).
2182
- * For byte streams, pass string or Uint8Array.
2183
- *
2184
- * @param body - Data to append (object for JSON streams, string or Uint8Array for byte streams)
2261
+ * Create a new stream (create-only PUT) and return a handle.
2262
+ * Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
2185
2263
  */
2186
- append(body) {
2187
- if (this.#closed) throw new DurableStreamError(`Producer is closed`, `ALREADY_CLOSED`, void 0, void 0);
2188
- const isJson = normalizeContentType(this.#stream.contentType) === `application/json`;
2189
- let bytes;
2190
- let data;
2191
- if (isJson) {
2192
- const json = JSON.stringify(body);
2193
- bytes = new TextEncoder().encode(json);
2194
- data = body;
2195
- } else {
2196
- if (typeof body === `string`) bytes = new TextEncoder().encode(body);
2197
- else if (body instanceof Uint8Array) bytes = body;
2198
- else throw new DurableStreamError(`Non-JSON streams require string or Uint8Array`, `BAD_REQUEST`, 400, void 0);
2199
- data = bytes;
2200
- }
2201
- this.#pendingBatch.push({
2202
- data,
2203
- body: bytes
2264
+ static async create(opts) {
2265
+ const stream$1 = new DurableStream(opts);
2266
+ await stream$1.create({
2267
+ contentType: opts.contentType,
2268
+ ttlSeconds: opts.ttlSeconds,
2269
+ expiresAt: opts.expiresAt,
2270
+ body: opts.body,
2271
+ closed: opts.closed
2204
2272
  });
2205
- this.#batchBytes += bytes.length;
2206
- if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
2207
- else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
2208
- this.#lingerTimeout = null;
2209
- if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
2210
- }, this.#lingerMs);
2211
- }
2212
- /**
2213
- * Send any pending batch immediately and wait for all in-flight batches.
2214
- *
2215
- * Call this before shutdown to ensure all messages are delivered.
2216
- */
2217
- async flush() {
2218
- if (this.#lingerTimeout) {
2219
- clearTimeout(this.#lingerTimeout);
2220
- this.#lingerTimeout = null;
2221
- }
2222
- if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
2223
- await this.#queue.drained();
2273
+ return stream$1;
2224
2274
  }
2225
2275
  /**
2226
- * Flush pending messages and close the producer.
2276
+ * Validate that a stream exists and fetch metadata via HEAD.
2277
+ * Returns a handle with contentType populated (if sent by server).
2227
2278
  *
2228
- * After calling close(), further append() calls will throw.
2229
- */
2230
- async close() {
2231
- if (this.#closed) return;
2232
- this.#closed = true;
2233
- try {
2234
- await this.flush();
2235
- } catch {}
2236
- }
2237
- /**
2238
- * Increment epoch and reset sequence.
2279
+ * **Important**: This only performs a HEAD request for validation - it does
2280
+ * NOT open a session or start reading data. To read from the stream, call
2281
+ * `stream()` on the returned handle.
2239
2282
  *
2240
- * Call this when restarting the producer to establish a new session.
2241
- * Flushes any pending messages first.
2283
+ * @example
2284
+ * ```typescript
2285
+ * // Validate stream exists before reading
2286
+ * const handle = await DurableStream.connect({ url })
2287
+ * const res = await handle.stream() // Now actually read
2288
+ * ```
2242
2289
  */
2243
- async restart() {
2244
- await this.flush();
2245
- this.#epoch++;
2246
- this.#nextSeq = 0;
2290
+ static async connect(opts) {
2291
+ const stream$1 = new DurableStream(opts);
2292
+ await stream$1.head();
2293
+ return stream$1;
2247
2294
  }
2248
2295
  /**
2249
- * Current epoch for this producer.
2296
+ * HEAD metadata for a stream without creating a handle.
2250
2297
  */
2251
- get epoch() {
2252
- return this.#epoch;
2298
+ static async head(opts) {
2299
+ const stream$1 = new DurableStream(opts);
2300
+ return stream$1.head();
2253
2301
  }
2254
2302
  /**
2255
- * Next sequence number to be assigned.
2303
+ * Delete a stream without creating a handle.
2256
2304
  */
2257
- get nextSeq() {
2258
- return this.#nextSeq;
2305
+ static async delete(opts) {
2306
+ const stream$1 = new DurableStream(opts);
2307
+ return stream$1.delete();
2259
2308
  }
2260
2309
  /**
2261
- * Number of messages in the current pending batch.
2310
+ * HEAD metadata for this stream.
2262
2311
  */
2263
- get pendingCount() {
2264
- return this.#pendingBatch.length;
2265
- }
2312
+ async head(opts) {
2313
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
2314
+ const response = await this.#fetchClient(fetchUrl.toString(), {
2315
+ method: `HEAD`,
2316
+ headers: requestHeaders,
2317
+ signal: opts?.signal ?? this.#options.signal
2318
+ });
2319
+ if (!response.ok) await handleErrorResponse(response, this.url);
2320
+ const contentType = response.headers.get(`content-type`) ?? void 0;
2321
+ const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
2322
+ const etag = response.headers.get(`etag`) ?? void 0;
2323
+ const cacheControl = response.headers.get(`cache-control`) ?? void 0;
2324
+ const streamClosed = response.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`;
2325
+ if (contentType) this.contentType = contentType;
2326
+ return {
2327
+ exists: true,
2328
+ contentType,
2329
+ offset,
2330
+ etag,
2331
+ cacheControl,
2332
+ streamClosed
2333
+ };
2334
+ }
2266
2335
  /**
2267
- * Number of batches currently in flight.
2336
+ * Create this stream (create-only PUT) using the URL/auth from the handle.
2268
2337
  */
2269
- get inFlightCount() {
2270
- return this.#queue.length();
2338
+ async create(opts) {
2339
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
2340
+ const contentType = opts?.contentType ?? this.#options.contentType;
2341
+ if (contentType) requestHeaders[`content-type`] = contentType;
2342
+ if (opts?.ttlSeconds !== void 0) requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
2343
+ if (opts?.expiresAt) requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
2344
+ if (opts?.closed) requestHeaders[STREAM_CLOSED_HEADER] = `true`;
2345
+ const body = encodeBody(opts?.body);
2346
+ const response = await this.#fetchClient(fetchUrl.toString(), {
2347
+ method: `PUT`,
2348
+ headers: requestHeaders,
2349
+ body,
2350
+ signal: this.#options.signal
2351
+ });
2352
+ if (!response.ok) await handleErrorResponse(response, this.url, { operation: `create` });
2353
+ const responseContentType = response.headers.get(`content-type`);
2354
+ if (responseContentType) this.contentType = responseContentType;
2355
+ else if (contentType) this.contentType = contentType;
2356
+ return this;
2271
2357
  }
2272
2358
  /**
2273
- * Enqueue the current pending batch for processing.
2359
+ * Delete this stream.
2274
2360
  */
2275
- #enqueuePendingBatch() {
2276
- if (this.#pendingBatch.length === 0) return;
2277
- const batch = this.#pendingBatch;
2278
- const seq = this.#nextSeq;
2279
- this.#pendingBatch = [];
2280
- this.#batchBytes = 0;
2281
- this.#nextSeq++;
2282
- if (this.#autoClaim && !this.#epochClaimed && this.#queue.length() > 0) this.#queue.drained().then(() => {
2283
- this.#queue.push({
2284
- batch,
2285
- seq
2286
- }).catch(() => {});
2361
+ async delete(opts) {
2362
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
2363
+ const response = await this.#fetchClient(fetchUrl.toString(), {
2364
+ method: `DELETE`,
2365
+ headers: requestHeaders,
2366
+ signal: opts?.signal ?? this.#options.signal
2287
2367
  });
2288
- else this.#queue.push({
2289
- batch,
2290
- seq
2291
- }).catch(() => {});
2368
+ if (!response.ok) await handleErrorResponse(response, this.url);
2292
2369
  }
2293
2370
  /**
2294
- * Batch worker - processes batches via fastq.
2371
+ * Close the stream, optionally with a final message.
2372
+ *
2373
+ * After closing:
2374
+ * - No further appends are permitted (server returns 409)
2375
+ * - Readers can observe the closed state and treat it as EOF
2376
+ * - The stream's data remains fully readable
2377
+ *
2378
+ * Closing is:
2379
+ * - **Durable**: The closed state is persisted
2380
+ * - **Monotonic**: Once closed, a stream cannot be reopened
2381
+ *
2382
+ * **Idempotency:**
2383
+ * - `close()` without body: Idempotent — safe to call multiple times
2384
+ * - `close({ body })` with body: NOT idempotent — throws `StreamClosedError`
2385
+ * if stream is already closed (use `IdempotentProducer.close()` for
2386
+ * idempotent close-with-body semantics)
2387
+ *
2388
+ * @returns CloseResult with the final offset
2389
+ * @throws StreamClosedError if called with body on an already-closed stream
2295
2390
  */
2296
- async #batchWorker(task) {
2297
- const { batch, seq } = task;
2298
- const epoch = this.#epoch;
2299
- try {
2300
- await this.#doSendBatch(batch, seq, epoch);
2301
- if (!this.#epochClaimed) this.#epochClaimed = true;
2302
- this.#signalSeqComplete(epoch, seq, void 0);
2303
- } catch (error) {
2304
- this.#signalSeqComplete(epoch, seq, error);
2305
- if (this.#onError) this.#onError(error);
2306
- throw error;
2391
+ async close(opts) {
2392
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
2393
+ const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
2394
+ if (contentType) requestHeaders[`content-type`] = contentType;
2395
+ requestHeaders[STREAM_CLOSED_HEADER] = `true`;
2396
+ let body;
2397
+ if (opts?.body !== void 0) {
2398
+ const isJson = normalizeContentType(contentType) === `application/json`;
2399
+ if (isJson) {
2400
+ const bodyStr = typeof opts.body === `string` ? opts.body : new TextDecoder().decode(opts.body);
2401
+ body = `[${bodyStr}]`;
2402
+ } else body = typeof opts.body === `string` ? opts.body : opts.body;
2403
+ }
2404
+ const response = await this.#fetchClient(fetchUrl.toString(), {
2405
+ method: `POST`,
2406
+ headers: requestHeaders,
2407
+ body,
2408
+ signal: opts?.signal ?? this.#options.signal
2409
+ });
2410
+ if (response.status === 409) {
2411
+ const isClosed = response.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`;
2412
+ if (isClosed) {
2413
+ const finalOffset$1 = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
2414
+ throw new StreamClosedError(this.url, finalOffset$1);
2415
+ }
2307
2416
  }
2417
+ if (!response.ok) await handleErrorResponse(response, this.url);
2418
+ const finalOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
2419
+ return { finalOffset };
2308
2420
  }
2309
2421
  /**
2310
- * Signal that a sequence has completed (success or failure).
2422
+ * Append a single payload to the stream.
2423
+ *
2424
+ * When batching is enabled (default), multiple append() calls made while
2425
+ * a POST is in-flight will be batched together into a single request.
2426
+ * This significantly improves throughput for high-frequency writes.
2427
+ *
2428
+ * - `body` must be string or Uint8Array.
2429
+ * - For JSON streams, pass pre-serialized JSON strings.
2430
+ * - `body` may also be a Promise that resolves to string or Uint8Array.
2431
+ * - Strings are encoded as UTF-8.
2432
+ * - `seq` (if provided) is sent as stream-seq (writer coordination).
2433
+ *
2434
+ * @example
2435
+ * ```typescript
2436
+ * // JSON stream - pass pre-serialized JSON
2437
+ * await stream.append(JSON.stringify({ message: "hello" }));
2438
+ *
2439
+ * // Byte stream
2440
+ * await stream.append("raw text data");
2441
+ * await stream.append(new Uint8Array([1, 2, 3]));
2442
+ *
2443
+ * // Promise value - awaited before buffering
2444
+ * await stream.append(fetchData());
2445
+ * ```
2311
2446
  */
2312
- #signalSeqComplete(epoch, seq, error) {
2313
- let epochMap = this.#seqState.get(epoch);
2314
- if (!epochMap) {
2315
- epochMap = new Map();
2316
- this.#seqState.set(epoch, epochMap);
2317
- }
2318
- const state = epochMap.get(seq);
2319
- if (state) {
2320
- state.resolved = true;
2321
- state.error = error;
2322
- for (const waiter of state.waiters) waiter(error);
2323
- state.waiters = [];
2324
- } else epochMap.set(seq, {
2325
- resolved: true,
2326
- error,
2327
- waiters: []
2447
+ async append(body, opts) {
2448
+ const resolvedBody = isPromiseLike(body) ? await body : body;
2449
+ if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
2450
+ return this.#appendDirect(resolvedBody, opts);
2451
+ }
2452
+ /**
2453
+ * Direct append without batching (used when batching is disabled).
2454
+ */
2455
+ async #appendDirect(body, opts) {
2456
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
2457
+ const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
2458
+ if (contentType) requestHeaders[`content-type`] = contentType;
2459
+ if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
2460
+ const isJson = normalizeContentType(contentType) === `application/json`;
2461
+ let encodedBody;
2462
+ if (isJson) {
2463
+ const bodyStr = typeof body === `string` ? body : new TextDecoder().decode(body);
2464
+ encodedBody = `[${bodyStr}]`;
2465
+ } else if (typeof body === `string`) encodedBody = body;
2466
+ else encodedBody = body.buffer.slice(body.byteOffset, body.byteOffset + body.byteLength);
2467
+ const response = await this.#fetchClient(fetchUrl.toString(), {
2468
+ method: `POST`,
2469
+ headers: requestHeaders,
2470
+ body: encodedBody,
2471
+ signal: opts?.signal ?? this.#options.signal
2328
2472
  });
2329
- const cleanupThreshold = seq - this.#maxInFlight * 3;
2330
- if (cleanupThreshold > 0) {
2331
- for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
2332
- }
2473
+ if (!response.ok) await handleErrorResponse(response, this.url);
2333
2474
  }
2334
2475
  /**
2335
- * Wait for a specific sequence to complete.
2336
- * Returns immediately if already completed.
2337
- * Throws if the sequence failed.
2476
+ * Append with batching - buffers messages and sends them in batches.
2338
2477
  */
2339
- #waitForSeq(epoch, seq) {
2340
- let epochMap = this.#seqState.get(epoch);
2341
- if (!epochMap) {
2342
- epochMap = new Map();
2343
- this.#seqState.set(epoch, epochMap);
2344
- }
2345
- const state = epochMap.get(seq);
2346
- if (state?.resolved) {
2347
- if (state.error) return Promise.reject(state.error);
2348
- return Promise.resolve();
2349
- }
2478
+ async #appendWithBatching(body, opts) {
2350
2479
  return new Promise((resolve, reject) => {
2351
- const waiter = (err) => {
2352
- if (err) reject(err);
2353
- else resolve();
2354
- };
2355
- if (state) state.waiters.push(waiter);
2356
- else epochMap.set(seq, {
2357
- resolved: false,
2358
- waiters: [waiter]
2480
+ this.#buffer.push({
2481
+ data: body,
2482
+ seq: opts?.seq,
2483
+ contentType: opts?.contentType,
2484
+ signal: opts?.signal,
2485
+ resolve,
2486
+ reject
2359
2487
  });
2488
+ if (this.#queue.idle()) {
2489
+ const batch = this.#buffer.splice(0);
2490
+ this.#queue.push(batch).catch((err) => {
2491
+ for (const msg of batch) msg.reject(err);
2492
+ });
2493
+ }
2360
2494
  });
2361
2495
  }
2362
2496
  /**
2363
- * Actually send the batch to the server.
2364
- * Handles auto-claim retry on 403 (stale epoch) if autoClaim is enabled.
2365
- * Does NOT implement general retry/backoff for network errors or 5xx responses.
2497
+ * Batch worker - processes batches of messages.
2366
2498
  */
2367
- async #doSendBatch(batch, seq, epoch) {
2368
- const contentType = this.#stream.contentType ?? `application/octet-stream`;
2499
+ async #batchWorker(batch) {
2500
+ try {
2501
+ await this.#sendBatch(batch);
2502
+ for (const msg of batch) msg.resolve();
2503
+ if (this.#buffer.length > 0) {
2504
+ const nextBatch = this.#buffer.splice(0);
2505
+ this.#queue.push(nextBatch).catch((err) => {
2506
+ for (const msg of nextBatch) msg.reject(err);
2507
+ });
2508
+ }
2509
+ } catch (error) {
2510
+ for (const msg of batch) msg.reject(error);
2511
+ for (const msg of this.#buffer) msg.reject(error);
2512
+ this.#buffer = [];
2513
+ throw error;
2514
+ }
2515
+ }
2516
+ /**
2517
+ * Send a batch of messages as a single POST request.
2518
+ */
2519
+ async #sendBatch(batch) {
2520
+ if (batch.length === 0) return;
2521
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
2522
+ const contentType = batch[0]?.contentType ?? this.#options.contentType ?? this.contentType;
2523
+ if (contentType) requestHeaders[`content-type`] = contentType;
2524
+ let highestSeq;
2525
+ for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
2526
+ highestSeq = batch[i].seq;
2527
+ break;
2528
+ }
2529
+ if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
2369
2530
  const isJson = normalizeContentType(contentType) === `application/json`;
2370
2531
  let batchedBody;
2371
2532
  if (isJson) {
2372
- const values = batch.map((e) => e.data);
2373
- batchedBody = JSON.stringify(values);
2533
+ const jsonStrings = batch.map((m) => typeof m.data === `string` ? m.data : new TextDecoder().decode(m.data));
2534
+ batchedBody = `[${jsonStrings.join(`,`)}]`;
2374
2535
  } else {
2375
- const totalSize = batch.reduce((sum, e) => sum + e.body.length, 0);
2376
- const concatenated = new Uint8Array(totalSize);
2377
- let offset = 0;
2378
- for (const entry of batch) {
2379
- concatenated.set(entry.body, offset);
2380
- offset += entry.body.length;
2536
+ const hasUint8Array = batch.some((m) => m.data instanceof Uint8Array);
2537
+ const hasString = batch.some((m) => typeof m.data === `string`);
2538
+ if (hasUint8Array && !hasString) {
2539
+ const chunks = batch.map((m) => m.data);
2540
+ const totalLength = chunks.reduce((sum, c) => sum + c.length, 0);
2541
+ const combined = new Uint8Array(totalLength);
2542
+ let offset = 0;
2543
+ for (const chunk of chunks) {
2544
+ combined.set(chunk, offset);
2545
+ offset += chunk.length;
2546
+ }
2547
+ batchedBody = combined;
2548
+ } else if (hasString && !hasUint8Array) batchedBody = batch.map((m) => m.data).join(``);
2549
+ else {
2550
+ const encoder = new TextEncoder();
2551
+ const chunks = batch.map((m) => typeof m.data === `string` ? encoder.encode(m.data) : m.data);
2552
+ const totalLength = chunks.reduce((sum, c) => sum + c.length, 0);
2553
+ const combined = new Uint8Array(totalLength);
2554
+ let offset = 0;
2555
+ for (const chunk of chunks) {
2556
+ combined.set(chunk, offset);
2557
+ offset += chunk.length;
2558
+ }
2559
+ batchedBody = combined;
2381
2560
  }
2382
- batchedBody = concatenated;
2383
2561
  }
2384
- const url = this.#stream.url;
2385
- const headers = {
2386
- "content-type": contentType,
2387
- [PRODUCER_ID_HEADER]: this.#producerId,
2388
- [PRODUCER_EPOCH_HEADER]: epoch.toString(),
2389
- [PRODUCER_SEQ_HEADER]: seq.toString()
2390
- };
2391
- const response = await this.#fetchClient(url, {
2562
+ const signals = [];
2563
+ if (this.#options.signal) signals.push(this.#options.signal);
2564
+ for (const msg of batch) if (msg.signal) signals.push(msg.signal);
2565
+ const combinedSignal = signals.length > 0 ? AbortSignal.any(signals) : void 0;
2566
+ const response = await this.#fetchClient(fetchUrl.toString(), {
2392
2567
  method: `POST`,
2393
- headers,
2568
+ headers: requestHeaders,
2394
2569
  body: batchedBody,
2395
- signal: this.#signal
2570
+ signal: combinedSignal
2396
2571
  });
2397
- if (response.status === 204) return {
2398
- offset: ``,
2399
- duplicate: true
2400
- };
2401
- if (response.status === 200) {
2402
- const resultOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
2403
- return {
2404
- offset: resultOffset,
2405
- duplicate: false
2406
- };
2407
- }
2408
- if (response.status === 403) {
2409
- const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
2410
- const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : epoch;
2411
- if (this.#autoClaim) {
2412
- const newEpoch = currentEpoch + 1;
2413
- this.#epoch = newEpoch;
2414
- this.#nextSeq = 1;
2415
- return this.#doSendBatch(batch, 0, newEpoch);
2416
- }
2417
- throw new StaleEpochError(currentEpoch);
2418
- }
2419
- if (response.status === 409) {
2420
- const expectedSeqStr = response.headers.get(PRODUCER_EXPECTED_SEQ_HEADER);
2421
- const expectedSeq = expectedSeqStr ? parseInt(expectedSeqStr, 10) : 0;
2422
- if (expectedSeq < seq) {
2423
- const waitPromises = [];
2424
- for (let s = expectedSeq; s < seq; s++) waitPromises.push(this.#waitForSeq(epoch, s));
2425
- await Promise.all(waitPromises);
2426
- return this.#doSendBatch(batch, seq, epoch);
2572
+ if (!response.ok) await handleErrorResponse(response, this.url);
2573
+ }
2574
+ /**
2575
+ * Append a streaming body to the stream.
2576
+ *
2577
+ * Supports piping from any ReadableStream or async iterable:
2578
+ * - `source` yields Uint8Array or string chunks.
2579
+ * - Strings are encoded as UTF-8; no delimiters are added.
2580
+ * - Internally uses chunked transfer or HTTP/2 streaming.
2581
+ *
2582
+ * @example
2583
+ * ```typescript
2584
+ * // Pipe from a ReadableStream
2585
+ * const readable = new ReadableStream({
2586
+ * start(controller) {
2587
+ * controller.enqueue("chunk 1");
2588
+ * controller.enqueue("chunk 2");
2589
+ * controller.close();
2590
+ * }
2591
+ * });
2592
+ * await stream.appendStream(readable);
2593
+ *
2594
+ * // Pipe from an async generator
2595
+ * async function* generate() {
2596
+ * yield "line 1\n";
2597
+ * yield "line 2\n";
2598
+ * }
2599
+ * await stream.appendStream(generate());
2600
+ *
2601
+ * // Pipe from fetch response body
2602
+ * const response = await fetch("https://example.com/data");
2603
+ * await stream.appendStream(response.body!);
2604
+ * ```
2605
+ */
2606
+ async appendStream(source, opts) {
2607
+ const { requestHeaders, fetchUrl } = await this.#buildRequest();
2608
+ const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
2609
+ if (contentType) requestHeaders[`content-type`] = contentType;
2610
+ if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
2611
+ const body = toReadableStream(source);
2612
+ const response = await this.#fetchClient(fetchUrl.toString(), {
2613
+ method: `POST`,
2614
+ headers: requestHeaders,
2615
+ body,
2616
+ duplex: `half`,
2617
+ signal: opts?.signal ?? this.#options.signal
2618
+ });
2619
+ if (!response.ok) await handleErrorResponse(response, this.url);
2620
+ }
2621
+ /**
2622
+ * Create a writable stream that pipes data to this durable stream.
2623
+ *
2624
+ * Returns a WritableStream that can be used with `pipeTo()` or
2625
+ * `pipeThrough()` from any ReadableStream source.
2626
+ *
2627
+ * Uses IdempotentProducer internally for:
2628
+ * - Automatic batching (controlled by lingerMs, maxBatchBytes)
2629
+ * - Exactly-once delivery semantics
2630
+ * - Streaming writes (doesn't buffer entire content in memory)
2631
+ *
2632
+ * @example
2633
+ * ```typescript
2634
+ * // Pipe from fetch response
2635
+ * const response = await fetch("https://example.com/data");
2636
+ * await response.body!.pipeTo(stream.writable());
2637
+ *
2638
+ * // Pipe through a transform
2639
+ * const readable = someStream.pipeThrough(new TextEncoderStream());
2640
+ * await readable.pipeTo(stream.writable());
2641
+ *
2642
+ * // With custom producer options
2643
+ * await source.pipeTo(stream.writable({
2644
+ * producerId: "my-producer",
2645
+ * lingerMs: 10,
2646
+ * maxBatchBytes: 64 * 1024,
2647
+ * }));
2648
+ * ```
2649
+ */
2650
+ writable(opts) {
2651
+ const producerId = opts?.producerId ?? `writable-${crypto.randomUUID().slice(0, 8)}`;
2652
+ let writeError = null;
2653
+ const producer = new IdempotentProducer(this, producerId, {
2654
+ autoClaim: true,
2655
+ lingerMs: opts?.lingerMs,
2656
+ maxBatchBytes: opts?.maxBatchBytes,
2657
+ onError: (error) => {
2658
+ if (!writeError) writeError = error;
2659
+ opts?.onError?.(error);
2660
+ },
2661
+ signal: opts?.signal ?? this.#options.signal
2662
+ });
2663
+ return new WritableStream({
2664
+ write(chunk) {
2665
+ producer.append(chunk);
2666
+ },
2667
+ async close() {
2668
+ await producer.close();
2669
+ if (writeError) throw writeError;
2670
+ },
2671
+ abort(_reason) {
2672
+ producer.detach().catch((err) => {
2673
+ opts?.onError?.(err);
2674
+ });
2427
2675
  }
2428
- const receivedSeqStr = response.headers.get(PRODUCER_RECEIVED_SEQ_HEADER);
2429
- const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
2430
- throw new SequenceGapError(expectedSeq, receivedSeq);
2431
- }
2432
- if (response.status === 400) {
2433
- const error$1 = await DurableStreamError.fromResponse(response, url);
2434
- throw error$1;
2435
- }
2436
- const error = await FetchError.fromResponse(response, url);
2437
- throw error;
2676
+ });
2438
2677
  }
2439
2678
  /**
2440
- * Clear pending batch and report error.
2679
+ * Start a fetch-like streaming session against this handle's URL/headers/params.
2680
+ * The first request is made inside this method; it resolves when we have
2681
+ * a valid first response, or rejects on errors.
2682
+ *
2683
+ * Call-specific headers and params are merged with handle-level ones,
2684
+ * with call-specific values taking precedence.
2685
+ *
2686
+ * @example
2687
+ * ```typescript
2688
+ * const handle = await DurableStream.connect({
2689
+ * url,
2690
+ * headers: { Authorization: `Bearer ${token}` }
2691
+ * });
2692
+ * const res = await handle.stream<{ message: string }>();
2693
+ *
2694
+ * // Accumulate all JSON items
2695
+ * const items = await res.json();
2696
+ *
2697
+ * // Or stream live with ReadableStream
2698
+ * const reader = res.jsonStream().getReader();
2699
+ * let result = await reader.read();
2700
+ * while (!result.done) {
2701
+ * console.log(result.value);
2702
+ * result = await reader.read();
2703
+ * }
2704
+ *
2705
+ * // Or use subscriber for backpressure-aware consumption
2706
+ * res.subscribeJson(async (batch) => {
2707
+ * for (const item of batch.items) {
2708
+ * console.log(item);
2709
+ * }
2710
+ * });
2711
+ * ```
2441
2712
  */
2442
- #rejectPendingBatch(error) {
2443
- if (this.#onError && this.#pendingBatch.length > 0) this.#onError(error);
2444
- this.#pendingBatch = [];
2445
- this.#batchBytes = 0;
2446
- if (this.#lingerTimeout) {
2447
- clearTimeout(this.#lingerTimeout);
2448
- this.#lingerTimeout = null;
2449
- }
2713
+ async stream(options) {
2714
+ const mergedHeaders = {
2715
+ ...this.#options.headers,
2716
+ ...options?.headers
2717
+ };
2718
+ const mergedParams = {
2719
+ ...this.#options.params,
2720
+ ...options?.params
2721
+ };
2722
+ return stream({
2723
+ url: this.url,
2724
+ headers: mergedHeaders,
2725
+ params: mergedParams,
2726
+ signal: options?.signal ?? this.#options.signal,
2727
+ fetch: this.#options.fetch,
2728
+ backoffOptions: this.#options.backoffOptions,
2729
+ offset: options?.offset,
2730
+ live: options?.live,
2731
+ json: options?.json,
2732
+ onError: options?.onError ?? this.#onError,
2733
+ warnOnHttp: options?.warnOnHttp ?? this.#options.warnOnHttp
2734
+ });
2735
+ }
2736
+ /**
2737
+ * Build request headers and URL.
2738
+ */
2739
+ async #buildRequest() {
2740
+ const requestHeaders = await resolveHeaders(this.#options.headers);
2741
+ const fetchUrl = new URL(this.url);
2742
+ const params = await resolveParams(this.#options.params);
2743
+ for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
2744
+ return {
2745
+ requestHeaders,
2746
+ fetchUrl
2747
+ };
2450
2748
  }
2451
2749
  };
2750
+ /**
2751
+ * Encode a body value to the appropriate format.
2752
+ * Strings are encoded as UTF-8.
2753
+ * Objects are JSON-serialized.
2754
+ */
2755
+ function encodeBody(body) {
2756
+ if (body === void 0) return void 0;
2757
+ if (typeof body === `string`) return new TextEncoder().encode(body);
2758
+ if (body instanceof Uint8Array) return body;
2759
+ if (body instanceof Blob || body instanceof FormData || body instanceof ReadableStream || body instanceof ArrayBuffer || ArrayBuffer.isView(body)) return body;
2760
+ return new TextEncoder().encode(JSON.stringify(body));
2761
+ }
2762
+ /**
2763
+ * Convert an async iterable to a ReadableStream.
2764
+ */
2765
+ function toReadableStream(source) {
2766
+ if (source instanceof ReadableStream) return source.pipeThrough(new TransformStream({ transform(chunk, controller) {
2767
+ if (typeof chunk === `string`) controller.enqueue(new TextEncoder().encode(chunk));
2768
+ else controller.enqueue(chunk);
2769
+ } }));
2770
+ const encoder = new TextEncoder();
2771
+ const iterator = source[Symbol.asyncIterator]();
2772
+ return new ReadableStream({
2773
+ async pull(controller) {
2774
+ try {
2775
+ const { done, value } = await iterator.next();
2776
+ if (done) controller.close();
2777
+ else if (typeof value === `string`) controller.enqueue(encoder.encode(value));
2778
+ else controller.enqueue(value);
2779
+ } catch (e) {
2780
+ controller.error(e);
2781
+ }
2782
+ },
2783
+ cancel() {
2784
+ iterator.return?.();
2785
+ }
2786
+ });
2787
+ }
2788
+ /**
2789
+ * Validate stream options.
2790
+ */
2791
+ function validateOptions(options) {
2792
+ if (!options.url) throw new MissingStreamUrlError();
2793
+ if (options.signal && !(options.signal instanceof AbortSignal)) throw new InvalidSignalError();
2794
+ warnIfUsingHttpInBrowser(options.url, options.warnOnHttp);
2795
+ }
2452
2796
 
2453
2797
  //#endregion
2454
2798
  exports.BackoffDefaults = BackoffDefaults
@@ -2468,7 +2812,9 @@ exports.PRODUCER_EXPECTED_SEQ_HEADER = PRODUCER_EXPECTED_SEQ_HEADER
2468
2812
  exports.PRODUCER_ID_HEADER = PRODUCER_ID_HEADER
2469
2813
  exports.PRODUCER_RECEIVED_SEQ_HEADER = PRODUCER_RECEIVED_SEQ_HEADER
2470
2814
  exports.PRODUCER_SEQ_HEADER = PRODUCER_SEQ_HEADER
2815
+ exports.SSE_CLOSED_FIELD = SSE_CLOSED_FIELD
2471
2816
  exports.SSE_COMPATIBLE_CONTENT_TYPES = SSE_COMPATIBLE_CONTENT_TYPES
2817
+ exports.STREAM_CLOSED_HEADER = STREAM_CLOSED_HEADER
2472
2818
  exports.STREAM_CURSOR_HEADER = STREAM_CURSOR_HEADER
2473
2819
  exports.STREAM_EXPIRES_AT_HEADER = STREAM_EXPIRES_AT_HEADER
2474
2820
  exports.STREAM_OFFSET_HEADER = STREAM_OFFSET_HEADER
@@ -2477,6 +2823,7 @@ exports.STREAM_TTL_HEADER = STREAM_TTL_HEADER
2477
2823
  exports.STREAM_UP_TO_DATE_HEADER = STREAM_UP_TO_DATE_HEADER
2478
2824
  exports.SequenceGapError = SequenceGapError
2479
2825
  exports.StaleEpochError = StaleEpochError
2826
+ exports.StreamClosedError = StreamClosedError
2480
2827
  exports._resetHttpWarningForTesting = _resetHttpWarningForTesting
2481
2828
  exports.asAsyncIterableReadableStream = asAsyncIterableReadableStream
2482
2829
  exports.createFetchWithBackoff = createFetchWithBackoff