@durable-streams/client 0.1.5 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +212 -18
- package/dist/index.cjs +1152 -805
- package/dist/index.d.cts +201 -33
- package/dist/index.d.ts +201 -33
- package/dist/index.js +1150 -806
- package/package.json +2 -2
- package/src/constants.ts +19 -2
- package/src/error.ts +20 -0
- package/src/idempotent-producer.ts +195 -43
- package/src/index.ts +7 -0
- package/src/response.ts +245 -35
- package/src/sse.ts +27 -5
- package/src/stream-api.ts +30 -10
- package/src/stream.ts +213 -71
- package/src/types.ts +97 -12
- package/src/utils.ts +10 -1
package/dist/index.js
CHANGED
|
@@ -22,6 +22,11 @@ const STREAM_CURSOR_HEADER = `Stream-Cursor`;
|
|
|
22
22
|
*/
|
|
23
23
|
const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
|
|
24
24
|
/**
|
|
25
|
+
* Response/request header indicating stream is closed (EOF).
|
|
26
|
+
* When present with value "true", the stream is permanently closed.
|
|
27
|
+
*/
|
|
28
|
+
const STREAM_CLOSED_HEADER = `Stream-Closed`;
|
|
29
|
+
/**
|
|
25
30
|
* Request header for writer coordination sequence.
|
|
26
31
|
* Monotonic, lexicographic. If lower than last appended seq -> 409 Conflict.
|
|
27
32
|
*/
|
|
@@ -70,8 +75,17 @@ const LIVE_QUERY_PARAM = `live`;
|
|
|
70
75
|
*/
|
|
71
76
|
const CURSOR_QUERY_PARAM = `cursor`;
|
|
72
77
|
/**
|
|
73
|
-
*
|
|
74
|
-
|
|
78
|
+
* Response header indicating SSE data encoding (e.g., base64 for binary streams).
|
|
79
|
+
*/
|
|
80
|
+
const STREAM_SSE_DATA_ENCODING_HEADER = `stream-sse-data-encoding`;
|
|
81
|
+
/**
|
|
82
|
+
* SSE control event field for stream closed state.
|
|
83
|
+
* Note: Different from HTTP header name (camelCase vs Header-Case).
|
|
84
|
+
*/
|
|
85
|
+
const SSE_CLOSED_FIELD = `streamClosed`;
|
|
86
|
+
/**
|
|
87
|
+
* Content types that are natively compatible with SSE (UTF-8 text).
|
|
88
|
+
* Binary content types are also supported via automatic base64 encoding.
|
|
75
89
|
*/
|
|
76
90
|
const SSE_COMPATIBLE_CONTENT_TYPES = [`text/`, `application/json`];
|
|
77
91
|
/**
|
|
@@ -201,6 +215,23 @@ var MissingStreamUrlError = class extends Error {
|
|
|
201
215
|
}
|
|
202
216
|
};
|
|
203
217
|
/**
|
|
218
|
+
* Error thrown when attempting to append to a closed stream.
|
|
219
|
+
*/
|
|
220
|
+
var StreamClosedError = class extends DurableStreamError {
|
|
221
|
+
code = `STREAM_CLOSED`;
|
|
222
|
+
status = 409;
|
|
223
|
+
streamClosed = true;
|
|
224
|
+
/**
|
|
225
|
+
* The final offset of the stream, if available from the response.
|
|
226
|
+
*/
|
|
227
|
+
finalOffset;
|
|
228
|
+
constructor(url, finalOffset) {
|
|
229
|
+
super(`Cannot append to closed stream`, `STREAM_CLOSED`, 409, url);
|
|
230
|
+
this.name = `StreamClosedError`;
|
|
231
|
+
this.finalOffset = finalOffset;
|
|
232
|
+
}
|
|
233
|
+
};
|
|
234
|
+
/**
|
|
204
235
|
* Error thrown when signal option is invalid.
|
|
205
236
|
*/
|
|
206
237
|
var InvalidSignalError = class extends Error {
|
|
@@ -480,13 +511,19 @@ async function* parseSSEStream(stream$1, signal) {
|
|
|
480
511
|
type: `control`,
|
|
481
512
|
streamNextOffset: control.streamNextOffset,
|
|
482
513
|
streamCursor: control.streamCursor,
|
|
483
|
-
upToDate: control.upToDate
|
|
514
|
+
upToDate: control.upToDate,
|
|
515
|
+
streamClosed: control.streamClosed
|
|
484
516
|
};
|
|
485
|
-
} catch {
|
|
517
|
+
} catch (err) {
|
|
518
|
+
const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
|
|
519
|
+
throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
520
|
+
}
|
|
486
521
|
}
|
|
487
522
|
currentEvent = { data: [] };
|
|
488
|
-
} else if (line.startsWith(`event:`))
|
|
489
|
-
|
|
523
|
+
} else if (line.startsWith(`event:`)) {
|
|
524
|
+
const eventType = line.slice(6);
|
|
525
|
+
currentEvent.type = eventType.startsWith(` `) ? eventType.slice(1) : eventType;
|
|
526
|
+
} else if (line.startsWith(`data:`)) {
|
|
490
527
|
const content = line.slice(5);
|
|
491
528
|
currentEvent.data.push(content.startsWith(` `) ? content.slice(1) : content);
|
|
492
529
|
}
|
|
@@ -505,9 +542,13 @@ async function* parseSSEStream(stream$1, signal) {
|
|
|
505
542
|
type: `control`,
|
|
506
543
|
streamNextOffset: control.streamNextOffset,
|
|
507
544
|
streamCursor: control.streamCursor,
|
|
508
|
-
upToDate: control.upToDate
|
|
545
|
+
upToDate: control.upToDate,
|
|
546
|
+
streamClosed: control.streamClosed
|
|
509
547
|
};
|
|
510
|
-
} catch {
|
|
548
|
+
} catch (err) {
|
|
549
|
+
const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
|
|
550
|
+
throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
551
|
+
}
|
|
511
552
|
}
|
|
512
553
|
} finally {
|
|
513
554
|
reader.releaseLock();
|
|
@@ -533,9 +574,10 @@ var StreamResponseImpl = class {
|
|
|
533
574
|
#statusText;
|
|
534
575
|
#ok;
|
|
535
576
|
#isLoading;
|
|
536
|
-
offset;
|
|
537
|
-
cursor;
|
|
538
|
-
upToDate;
|
|
577
|
+
#offset;
|
|
578
|
+
#cursor;
|
|
579
|
+
#upToDate;
|
|
580
|
+
#streamClosed;
|
|
539
581
|
#isJsonMode;
|
|
540
582
|
#abortController;
|
|
541
583
|
#fetchNext;
|
|
@@ -555,15 +597,17 @@ var StreamResponseImpl = class {
|
|
|
555
597
|
#lastSSEConnectionStartTime;
|
|
556
598
|
#consecutiveShortSSEConnections = 0;
|
|
557
599
|
#sseFallbackToLongPoll = false;
|
|
600
|
+
#encoding;
|
|
558
601
|
#responseStream;
|
|
559
602
|
constructor(config) {
|
|
560
603
|
this.url = config.url;
|
|
561
604
|
this.contentType = config.contentType;
|
|
562
605
|
this.live = config.live;
|
|
563
606
|
this.startOffset = config.startOffset;
|
|
564
|
-
this
|
|
565
|
-
this
|
|
566
|
-
this
|
|
607
|
+
this.#offset = config.initialOffset;
|
|
608
|
+
this.#cursor = config.initialCursor;
|
|
609
|
+
this.#upToDate = config.initialUpToDate;
|
|
610
|
+
this.#streamClosed = config.initialStreamClosed;
|
|
567
611
|
this.#headers = config.firstResponse.headers;
|
|
568
612
|
this.#status = config.firstResponse.status;
|
|
569
613
|
this.#statusText = config.firstResponse.statusText;
|
|
@@ -580,6 +624,7 @@ var StreamResponseImpl = class {
|
|
|
580
624
|
backoffMaxDelay: config.sseResilience?.backoffMaxDelay ?? 5e3,
|
|
581
625
|
logWarnings: config.sseResilience?.logWarnings ?? true
|
|
582
626
|
};
|
|
627
|
+
this.#encoding = config.encoding;
|
|
583
628
|
this.#closed = new Promise((resolve, reject) => {
|
|
584
629
|
this.#closedResolve = resolve;
|
|
585
630
|
this.#closedReject = reject;
|
|
@@ -654,6 +699,18 @@ var StreamResponseImpl = class {
|
|
|
654
699
|
get isLoading() {
|
|
655
700
|
return this.#isLoading;
|
|
656
701
|
}
|
|
702
|
+
get offset() {
|
|
703
|
+
return this.#offset;
|
|
704
|
+
}
|
|
705
|
+
get cursor() {
|
|
706
|
+
return this.#cursor;
|
|
707
|
+
}
|
|
708
|
+
get upToDate() {
|
|
709
|
+
return this.#upToDate;
|
|
710
|
+
}
|
|
711
|
+
get streamClosed() {
|
|
712
|
+
return this.#streamClosed;
|
|
713
|
+
}
|
|
657
714
|
#ensureJsonMode() {
|
|
658
715
|
if (!this.#isJsonMode) throw new DurableStreamError(`JSON methods are only valid for JSON-mode streams. Content-Type is "${this.contentType}" and json hint was not set.`, `BAD_REQUEST`);
|
|
659
716
|
}
|
|
@@ -675,11 +732,12 @@ var StreamResponseImpl = class {
|
|
|
675
732
|
}
|
|
676
733
|
/**
|
|
677
734
|
* Determine if we should continue with live updates based on live mode
|
|
678
|
-
* and whether we've received upToDate.
|
|
735
|
+
* and whether we've received upToDate or streamClosed.
|
|
679
736
|
*/
|
|
680
737
|
#shouldContinueLive() {
|
|
681
738
|
if (this.#stopAfterUpToDate && this.upToDate) return false;
|
|
682
739
|
if (this.live === false) return false;
|
|
740
|
+
if (this.#streamClosed) return false;
|
|
683
741
|
return true;
|
|
684
742
|
}
|
|
685
743
|
/**
|
|
@@ -687,10 +745,12 @@ var StreamResponseImpl = class {
|
|
|
687
745
|
*/
|
|
688
746
|
#updateStateFromResponse(response) {
|
|
689
747
|
const offset = response.headers.get(STREAM_OFFSET_HEADER);
|
|
690
|
-
if (offset) this
|
|
748
|
+
if (offset) this.#offset = offset;
|
|
691
749
|
const cursor = response.headers.get(STREAM_CURSOR_HEADER);
|
|
692
|
-
if (cursor) this
|
|
693
|
-
this
|
|
750
|
+
if (cursor) this.#cursor = cursor;
|
|
751
|
+
this.#upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
752
|
+
const streamClosedHeader = response.headers.get(STREAM_CLOSED_HEADER);
|
|
753
|
+
if (streamClosedHeader?.toLowerCase() === `true`) this.#streamClosed = true;
|
|
694
754
|
this.#headers = response.headers;
|
|
695
755
|
this.#status = response.status;
|
|
696
756
|
this.#statusText = response.statusText;
|
|
@@ -698,7 +758,7 @@ var StreamResponseImpl = class {
|
|
|
698
758
|
}
|
|
699
759
|
/**
|
|
700
760
|
* Extract stream metadata from Response headers.
|
|
701
|
-
* Used by subscriber APIs to get the correct offset/cursor/upToDate for each
|
|
761
|
+
* Used by subscriber APIs to get the correct offset/cursor/upToDate/streamClosed for each
|
|
702
762
|
* specific Response, rather than reading from `this` which may be stale due to
|
|
703
763
|
* ReadableStream prefetching or timing issues.
|
|
704
764
|
*/
|
|
@@ -706,24 +766,74 @@ var StreamResponseImpl = class {
|
|
|
706
766
|
const offset = response.headers.get(STREAM_OFFSET_HEADER);
|
|
707
767
|
const cursor = response.headers.get(STREAM_CURSOR_HEADER);
|
|
708
768
|
const upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
769
|
+
const streamClosed = response.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`;
|
|
709
770
|
return {
|
|
710
771
|
offset: offset ?? this.offset,
|
|
711
772
|
cursor: cursor ?? this.cursor,
|
|
712
|
-
upToDate
|
|
773
|
+
upToDate,
|
|
774
|
+
streamClosed: streamClosed || this.streamClosed
|
|
713
775
|
};
|
|
714
776
|
}
|
|
715
777
|
/**
|
|
778
|
+
* Decode base64 string to Uint8Array.
|
|
779
|
+
* Per protocol: concatenate data lines, remove \n and \r, then decode.
|
|
780
|
+
*/
|
|
781
|
+
#decodeBase64(base64Str) {
|
|
782
|
+
const cleaned = base64Str.replace(/[\n\r]/g, ``);
|
|
783
|
+
if (cleaned.length === 0) return new Uint8Array(0);
|
|
784
|
+
if (cleaned.length % 4 !== 0) throw new DurableStreamError(`Invalid base64 data: length ${cleaned.length} is not a multiple of 4`, `PARSE_ERROR`);
|
|
785
|
+
try {
|
|
786
|
+
if (typeof Buffer !== `undefined`) return new Uint8Array(Buffer.from(cleaned, `base64`));
|
|
787
|
+
else {
|
|
788
|
+
const binaryStr = atob(cleaned);
|
|
789
|
+
const bytes = new Uint8Array(binaryStr.length);
|
|
790
|
+
for (let i = 0; i < binaryStr.length; i++) bytes[i] = binaryStr.charCodeAt(i);
|
|
791
|
+
return bytes;
|
|
792
|
+
}
|
|
793
|
+
} catch (err) {
|
|
794
|
+
throw new DurableStreamError(`Failed to decode base64 data: ${err instanceof Error ? err.message : String(err)}`, `PARSE_ERROR`);
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
/**
|
|
716
798
|
* Create a synthetic Response from SSE data with proper headers.
|
|
717
|
-
* Includes offset/cursor/upToDate in headers so subscribers can read them.
|
|
799
|
+
* Includes offset/cursor/upToDate/streamClosed in headers so subscribers can read them.
|
|
718
800
|
*/
|
|
719
|
-
#createSSESyntheticResponse(data, offset, cursor, upToDate) {
|
|
801
|
+
#createSSESyntheticResponse(data, offset, cursor, upToDate, streamClosed) {
|
|
802
|
+
return this.#createSSESyntheticResponseFromParts([data], offset, cursor, upToDate, streamClosed);
|
|
803
|
+
}
|
|
804
|
+
/**
|
|
805
|
+
* Create a synthetic Response from multiple SSE data parts.
|
|
806
|
+
* For base64 mode, each part is independently encoded, so we decode each
|
|
807
|
+
* separately and concatenate the binary results.
|
|
808
|
+
* For text mode, parts are simply concatenated as strings.
|
|
809
|
+
*/
|
|
810
|
+
#createSSESyntheticResponseFromParts(dataParts, offset, cursor, upToDate, streamClosed) {
|
|
720
811
|
const headers = {
|
|
721
812
|
"content-type": this.contentType ?? `application/json`,
|
|
722
813
|
[STREAM_OFFSET_HEADER]: String(offset)
|
|
723
814
|
};
|
|
724
815
|
if (cursor) headers[STREAM_CURSOR_HEADER] = cursor;
|
|
725
816
|
if (upToDate) headers[STREAM_UP_TO_DATE_HEADER] = `true`;
|
|
726
|
-
|
|
817
|
+
if (streamClosed) headers[STREAM_CLOSED_HEADER] = `true`;
|
|
818
|
+
let body;
|
|
819
|
+
if (this.#encoding === `base64`) {
|
|
820
|
+
const decodedParts = dataParts.filter((part) => part.length > 0).map((part) => this.#decodeBase64(part));
|
|
821
|
+
if (decodedParts.length === 0) body = new ArrayBuffer(0);
|
|
822
|
+
else if (decodedParts.length === 1) {
|
|
823
|
+
const decoded = decodedParts[0];
|
|
824
|
+
body = decoded.buffer.slice(decoded.byteOffset, decoded.byteOffset + decoded.byteLength);
|
|
825
|
+
} else {
|
|
826
|
+
const totalLength = decodedParts.reduce((sum, part) => sum + part.length, 0);
|
|
827
|
+
const combined = new Uint8Array(totalLength);
|
|
828
|
+
let offset$1 = 0;
|
|
829
|
+
for (const part of decodedParts) {
|
|
830
|
+
combined.set(part, offset$1);
|
|
831
|
+
offset$1 += part.length;
|
|
832
|
+
}
|
|
833
|
+
body = combined.buffer;
|
|
834
|
+
}
|
|
835
|
+
} else body = dataParts.join(``);
|
|
836
|
+
return new Response(body, {
|
|
727
837
|
status: 200,
|
|
728
838
|
headers
|
|
729
839
|
});
|
|
@@ -732,9 +842,13 @@ var StreamResponseImpl = class {
|
|
|
732
842
|
* Update instance state from an SSE control event.
|
|
733
843
|
*/
|
|
734
844
|
#updateStateFromSSEControl(controlEvent) {
|
|
735
|
-
this
|
|
736
|
-
if (controlEvent.streamCursor) this
|
|
737
|
-
if (controlEvent.upToDate !== void 0) this
|
|
845
|
+
this.#offset = controlEvent.streamNextOffset;
|
|
846
|
+
if (controlEvent.streamCursor) this.#cursor = controlEvent.streamCursor;
|
|
847
|
+
if (controlEvent.upToDate !== void 0) this.#upToDate = controlEvent.upToDate;
|
|
848
|
+
if (controlEvent.streamClosed) {
|
|
849
|
+
this.#streamClosed = true;
|
|
850
|
+
this.#upToDate = true;
|
|
851
|
+
}
|
|
738
852
|
}
|
|
739
853
|
/**
|
|
740
854
|
* Mark the start of an SSE connection for duration tracking.
|
|
@@ -807,19 +921,29 @@ var StreamResponseImpl = class {
|
|
|
807
921
|
}
|
|
808
922
|
if (event.type === `data`) return this.#processSSEDataEvent(event.data, sseEventIterator);
|
|
809
923
|
this.#updateStateFromSSEControl(event);
|
|
924
|
+
if (event.upToDate) {
|
|
925
|
+
const response = this.#createSSESyntheticResponse(``, event.streamNextOffset, event.streamCursor, true, event.streamClosed ?? false);
|
|
926
|
+
return {
|
|
927
|
+
type: `response`,
|
|
928
|
+
response
|
|
929
|
+
};
|
|
930
|
+
}
|
|
810
931
|
return { type: `continue` };
|
|
811
932
|
}
|
|
812
933
|
/**
|
|
813
934
|
* Process an SSE data event by waiting for its corresponding control event.
|
|
814
935
|
* In SSE protocol, control events come AFTER data events.
|
|
815
936
|
* Multiple data events may arrive before a single control event - we buffer them.
|
|
937
|
+
*
|
|
938
|
+
* For base64 mode, each data event is independently base64 encoded, so we
|
|
939
|
+
* collect them as an array and decode each separately.
|
|
816
940
|
*/
|
|
817
941
|
async #processSSEDataEvent(pendingData, sseEventIterator) {
|
|
818
|
-
|
|
942
|
+
const bufferedDataParts = [pendingData];
|
|
819
943
|
while (true) {
|
|
820
944
|
const { done: controlDone, value: controlEvent } = await sseEventIterator.next();
|
|
821
945
|
if (controlDone) {
|
|
822
|
-
const response = this.#
|
|
946
|
+
const response = this.#createSSESyntheticResponseFromParts(bufferedDataParts, this.offset, this.cursor, this.upToDate, this.streamClosed);
|
|
823
947
|
try {
|
|
824
948
|
const newIterator = await this.#trySSEReconnect();
|
|
825
949
|
return {
|
|
@@ -836,13 +960,13 @@ var StreamResponseImpl = class {
|
|
|
836
960
|
}
|
|
837
961
|
if (controlEvent.type === `control`) {
|
|
838
962
|
this.#updateStateFromSSEControl(controlEvent);
|
|
839
|
-
const response = this.#
|
|
963
|
+
const response = this.#createSSESyntheticResponseFromParts(bufferedDataParts, controlEvent.streamNextOffset, controlEvent.streamCursor, controlEvent.upToDate ?? false, controlEvent.streamClosed ?? false);
|
|
840
964
|
return {
|
|
841
965
|
type: `response`,
|
|
842
966
|
response
|
|
843
967
|
};
|
|
844
968
|
}
|
|
845
|
-
|
|
969
|
+
bufferedDataParts.push(controlEvent.data);
|
|
846
970
|
}
|
|
847
971
|
}
|
|
848
972
|
/**
|
|
@@ -1000,7 +1124,13 @@ var StreamResponseImpl = class {
|
|
|
1000
1124
|
const wasUpToDate = this.upToDate;
|
|
1001
1125
|
const text = await result.value.text();
|
|
1002
1126
|
const content = text.trim() || `[]`;
|
|
1003
|
-
|
|
1127
|
+
let parsed;
|
|
1128
|
+
try {
|
|
1129
|
+
parsed = JSON.parse(content);
|
|
1130
|
+
} catch (err) {
|
|
1131
|
+
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
|
|
1132
|
+
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
1133
|
+
}
|
|
1004
1134
|
if (Array.isArray(parsed)) items.push(...parsed);
|
|
1005
1135
|
else items.push(parsed);
|
|
1006
1136
|
if (wasUpToDate) break;
|
|
@@ -1097,7 +1227,13 @@ var StreamResponseImpl = class {
|
|
|
1097
1227
|
}
|
|
1098
1228
|
const text = await response.text();
|
|
1099
1229
|
const content = text.trim() || `[]`;
|
|
1100
|
-
|
|
1230
|
+
let parsed;
|
|
1231
|
+
try {
|
|
1232
|
+
parsed = JSON.parse(content);
|
|
1233
|
+
} catch (err) {
|
|
1234
|
+
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
|
|
1235
|
+
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
1236
|
+
}
|
|
1101
1237
|
pendingItems = Array.isArray(parsed) ? parsed : [parsed];
|
|
1102
1238
|
if (pendingItems.length > 0) controller.enqueue(pendingItems.shift());
|
|
1103
1239
|
},
|
|
@@ -1133,16 +1269,23 @@ var StreamResponseImpl = class {
|
|
|
1133
1269
|
while (!result.done) {
|
|
1134
1270
|
if (abortController.signal.aborted) break;
|
|
1135
1271
|
const response = result.value;
|
|
1136
|
-
const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
|
|
1272
|
+
const { offset, cursor, upToDate, streamClosed } = this.#getMetadataFromResponse(response);
|
|
1137
1273
|
const text = await response.text();
|
|
1138
1274
|
const content = text.trim() || `[]`;
|
|
1139
|
-
|
|
1275
|
+
let parsed;
|
|
1276
|
+
try {
|
|
1277
|
+
parsed = JSON.parse(content);
|
|
1278
|
+
} catch (err) {
|
|
1279
|
+
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
|
|
1280
|
+
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
1281
|
+
}
|
|
1140
1282
|
const items = Array.isArray(parsed) ? parsed : [parsed];
|
|
1141
1283
|
await subscriber({
|
|
1142
1284
|
items,
|
|
1143
1285
|
offset,
|
|
1144
1286
|
cursor,
|
|
1145
|
-
upToDate
|
|
1287
|
+
upToDate,
|
|
1288
|
+
streamClosed
|
|
1146
1289
|
});
|
|
1147
1290
|
result = await reader.read();
|
|
1148
1291
|
}
|
|
@@ -1172,13 +1315,14 @@ var StreamResponseImpl = class {
|
|
|
1172
1315
|
while (!result.done) {
|
|
1173
1316
|
if (abortController.signal.aborted) break;
|
|
1174
1317
|
const response = result.value;
|
|
1175
|
-
const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
|
|
1318
|
+
const { offset, cursor, upToDate, streamClosed } = this.#getMetadataFromResponse(response);
|
|
1176
1319
|
const buffer = await response.arrayBuffer();
|
|
1177
1320
|
await subscriber({
|
|
1178
1321
|
data: new Uint8Array(buffer),
|
|
1179
1322
|
offset,
|
|
1180
1323
|
cursor,
|
|
1181
|
-
upToDate
|
|
1324
|
+
upToDate,
|
|
1325
|
+
streamClosed
|
|
1182
1326
|
});
|
|
1183
1327
|
result = await reader.read();
|
|
1184
1328
|
}
|
|
@@ -1208,13 +1352,14 @@ var StreamResponseImpl = class {
|
|
|
1208
1352
|
while (!result.done) {
|
|
1209
1353
|
if (abortController.signal.aborted) break;
|
|
1210
1354
|
const response = result.value;
|
|
1211
|
-
const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
|
|
1355
|
+
const { offset, cursor, upToDate, streamClosed } = this.#getMetadataFromResponse(response);
|
|
1212
1356
|
const text = await response.text();
|
|
1213
1357
|
await subscriber({
|
|
1214
1358
|
text,
|
|
1215
1359
|
offset,
|
|
1216
1360
|
cursor,
|
|
1217
|
-
upToDate
|
|
1361
|
+
upToDate,
|
|
1362
|
+
streamClosed
|
|
1218
1363
|
});
|
|
1219
1364
|
result = await reader.read();
|
|
1220
1365
|
}
|
|
@@ -1265,6 +1410,11 @@ async function handleErrorResponse(response, url, context) {
|
|
|
1265
1410
|
const status = response.status;
|
|
1266
1411
|
if (status === 404) throw new DurableStreamError(`Stream not found: ${url}`, `NOT_FOUND`, 404);
|
|
1267
1412
|
if (status === 409) {
|
|
1413
|
+
const streamClosedHeader = response.headers.get(STREAM_CLOSED_HEADER);
|
|
1414
|
+
if (streamClosedHeader?.toLowerCase() === `true`) {
|
|
1415
|
+
const finalOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
|
|
1416
|
+
throw new StreamClosedError(url, finalOffset);
|
|
1417
|
+
}
|
|
1268
1418
|
const message = context?.operation === `create` ? `Stream already exists: ${url}` : `Sequence conflict: seq is lower than last appended`;
|
|
1269
1419
|
const code = context?.operation === `create` ? `CONFLICT_EXISTS` : `CONFLICT_SEQ`;
|
|
1270
1420
|
throw new DurableStreamError(message, code, 409);
|
|
@@ -1381,7 +1531,7 @@ function _resetHttpWarningForTesting() {
|
|
|
1381
1531
|
* url,
|
|
1382
1532
|
* auth,
|
|
1383
1533
|
* offset: savedOffset,
|
|
1384
|
-
* live:
|
|
1534
|
+
* live: true,
|
|
1385
1535
|
* })
|
|
1386
1536
|
* live.subscribeJson(async (batch) => {
|
|
1387
1537
|
* for (const item of batch.items) {
|
|
@@ -1422,10 +1572,11 @@ async function stream(options) {
|
|
|
1422
1572
|
*/
|
|
1423
1573
|
async function streamInternal(options) {
|
|
1424
1574
|
const url = options.url instanceof URL ? options.url.toString() : options.url;
|
|
1575
|
+
warnIfUsingHttpInBrowser(url, options.warnOnHttp);
|
|
1425
1576
|
const fetchUrl = new URL(url);
|
|
1426
1577
|
const startOffset = options.offset ?? `-1`;
|
|
1427
1578
|
fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, startOffset);
|
|
1428
|
-
const live = options.live ??
|
|
1579
|
+
const live = options.live ?? true;
|
|
1429
1580
|
if (live === `long-poll` || live === `sse`) fetchUrl.searchParams.set(LIVE_QUERY_PARAM, live);
|
|
1430
1581
|
const params = await resolveParams(options.params);
|
|
1431
1582
|
for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
|
|
@@ -1450,13 +1601,16 @@ async function streamInternal(options) {
|
|
|
1450
1601
|
const initialOffset = firstResponse.headers.get(STREAM_OFFSET_HEADER) ?? startOffset;
|
|
1451
1602
|
const initialCursor = firstResponse.headers.get(STREAM_CURSOR_HEADER) ?? void 0;
|
|
1452
1603
|
const initialUpToDate = firstResponse.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
1604
|
+
const initialStreamClosed = firstResponse.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`;
|
|
1453
1605
|
const isJsonMode = options.json === true || (contentType?.includes(`application/json`) ?? false);
|
|
1606
|
+
const sseDataEncoding = firstResponse.headers.get(STREAM_SSE_DATA_ENCODING_HEADER);
|
|
1607
|
+
const encoding = sseDataEncoding === `base64` ? `base64` : void 0;
|
|
1454
1608
|
const fetchNext = async (offset, cursor, signal, resumingFromPause) => {
|
|
1455
1609
|
const nextUrl = new URL(url);
|
|
1456
1610
|
nextUrl.searchParams.set(OFFSET_QUERY_PARAM, offset);
|
|
1457
1611
|
if (!resumingFromPause) {
|
|
1458
|
-
if (live === `
|
|
1459
|
-
else if (live === `
|
|
1612
|
+
if (live === `sse`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`);
|
|
1613
|
+
else if (live === true || live === `long-poll`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `long-poll`);
|
|
1460
1614
|
}
|
|
1461
1615
|
if (cursor) nextUrl.searchParams.set(`cursor`, cursor);
|
|
1462
1616
|
const nextParams = await resolveParams(options.params);
|
|
@@ -1495,936 +1649,1126 @@ async function streamInternal(options) {
|
|
|
1495
1649
|
initialOffset,
|
|
1496
1650
|
initialCursor,
|
|
1497
1651
|
initialUpToDate,
|
|
1652
|
+
initialStreamClosed,
|
|
1498
1653
|
firstResponse,
|
|
1499
1654
|
abortController,
|
|
1500
1655
|
fetchNext,
|
|
1501
1656
|
startSSE,
|
|
1502
|
-
sseResilience: options.sseResilience
|
|
1657
|
+
sseResilience: options.sseResilience,
|
|
1658
|
+
encoding
|
|
1503
1659
|
});
|
|
1504
1660
|
}
|
|
1505
1661
|
|
|
1506
1662
|
//#endregion
|
|
1507
|
-
//#region src/
|
|
1663
|
+
//#region src/idempotent-producer.ts
|
|
1664
|
+
/**
|
|
1665
|
+
* Error thrown when a producer's epoch is stale (zombie fencing).
|
|
1666
|
+
*/
|
|
1667
|
+
var StaleEpochError = class extends Error {
|
|
1668
|
+
/**
|
|
1669
|
+
* The current epoch on the server.
|
|
1670
|
+
*/
|
|
1671
|
+
currentEpoch;
|
|
1672
|
+
constructor(currentEpoch) {
|
|
1673
|
+
super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
|
|
1674
|
+
this.name = `StaleEpochError`;
|
|
1675
|
+
this.currentEpoch = currentEpoch;
|
|
1676
|
+
}
|
|
1677
|
+
};
|
|
1678
|
+
/**
|
|
1679
|
+
* Error thrown when an unrecoverable sequence gap is detected.
|
|
1680
|
+
*
|
|
1681
|
+
* With maxInFlight > 1, HTTP requests can arrive out of order at the server,
|
|
1682
|
+
* causing temporary 409 responses. The client automatically handles these
|
|
1683
|
+
* by waiting for earlier sequences to complete, then retrying.
|
|
1684
|
+
*
|
|
1685
|
+
* This error is only thrown when the gap cannot be resolved (e.g., the
|
|
1686
|
+
* expected sequence is >= our sequence, indicating a true protocol violation).
|
|
1687
|
+
*/
|
|
1688
|
+
var SequenceGapError = class extends Error {
|
|
1689
|
+
expectedSeq;
|
|
1690
|
+
receivedSeq;
|
|
1691
|
+
constructor(expectedSeq, receivedSeq) {
|
|
1692
|
+
super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
|
|
1693
|
+
this.name = `SequenceGapError`;
|
|
1694
|
+
this.expectedSeq = expectedSeq;
|
|
1695
|
+
this.receivedSeq = receivedSeq;
|
|
1696
|
+
}
|
|
1697
|
+
};
|
|
1508
1698
|
/**
|
|
1509
1699
|
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1510
|
-
* Handles cases like "application/json; charset=utf-8".
|
|
1511
1700
|
*/
|
|
1512
1701
|
function normalizeContentType$1(contentType) {
|
|
1513
1702
|
if (!contentType) return ``;
|
|
1514
1703
|
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1515
1704
|
}
|
|
1516
1705
|
/**
|
|
1517
|
-
*
|
|
1518
|
-
*/
|
|
1519
|
-
function isPromiseLike(value) {
|
|
1520
|
-
return value !== null && typeof value === `object` && `then` in value && typeof value.then === `function`;
|
|
1521
|
-
}
|
|
1522
|
-
/**
|
|
1523
|
-
* A handle to a remote durable stream for read/write operations.
|
|
1706
|
+
* An idempotent producer for exactly-once writes to a durable stream.
|
|
1524
1707
|
*
|
|
1525
|
-
*
|
|
1526
|
-
*
|
|
1527
|
-
*
|
|
1708
|
+
* Features:
|
|
1709
|
+
* - Fire-and-forget: append() returns immediately, batches in background
|
|
1710
|
+
* - Exactly-once: server deduplicates using (producerId, epoch, seq)
|
|
1711
|
+
* - Batching: multiple appends batched into single HTTP request
|
|
1712
|
+
* - Pipelining: up to maxInFlight concurrent batches
|
|
1713
|
+
* - Zombie fencing: stale producers rejected via epoch validation
|
|
1528
1714
|
*
|
|
1529
1715
|
* @example
|
|
1530
1716
|
* ```typescript
|
|
1531
|
-
*
|
|
1532
|
-
* const
|
|
1533
|
-
*
|
|
1534
|
-
*
|
|
1535
|
-
* contentType: "application/json"
|
|
1717
|
+
* const stream = new DurableStream({ url: "https://..." });
|
|
1718
|
+
* const producer = new IdempotentProducer(stream, "order-service-1", {
|
|
1719
|
+
* epoch: 0,
|
|
1720
|
+
* autoClaim: true,
|
|
1536
1721
|
* });
|
|
1537
1722
|
*
|
|
1538
|
-
* //
|
|
1539
|
-
*
|
|
1723
|
+
* // Fire-and-forget writes (synchronous, returns immediately)
|
|
1724
|
+
* producer.append("message 1");
|
|
1725
|
+
* producer.append("message 2");
|
|
1540
1726
|
*
|
|
1541
|
-
* //
|
|
1542
|
-
*
|
|
1543
|
-
*
|
|
1544
|
-
* for (const item of batch.items) {
|
|
1545
|
-
* console.log(item.message);
|
|
1546
|
-
* }
|
|
1547
|
-
* });
|
|
1727
|
+
* // Ensure all messages are delivered before shutdown
|
|
1728
|
+
* await producer.flush();
|
|
1729
|
+
* await producer.close();
|
|
1548
1730
|
* ```
|
|
1549
1731
|
*/
|
|
1550
|
-
var
|
|
1551
|
-
|
|
1552
|
-
|
|
1553
|
-
|
|
1554
|
-
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
contentType;
|
|
1559
|
-
#options;
|
|
1732
|
+
var IdempotentProducer = class {
|
|
1733
|
+
#stream;
|
|
1734
|
+
#producerId;
|
|
1735
|
+
#epoch;
|
|
1736
|
+
#nextSeq = 0;
|
|
1737
|
+
#autoClaim;
|
|
1738
|
+
#maxBatchBytes;
|
|
1739
|
+
#lingerMs;
|
|
1560
1740
|
#fetchClient;
|
|
1741
|
+
#signal;
|
|
1561
1742
|
#onError;
|
|
1562
|
-
#
|
|
1743
|
+
#pendingBatch = [];
|
|
1744
|
+
#batchBytes = 0;
|
|
1745
|
+
#lingerTimeout = null;
|
|
1563
1746
|
#queue;
|
|
1564
|
-
#
|
|
1565
|
-
|
|
1566
|
-
|
|
1567
|
-
|
|
1568
|
-
|
|
1569
|
-
|
|
1570
|
-
validateOptions(opts);
|
|
1571
|
-
const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
|
|
1572
|
-
this.url = urlStr;
|
|
1573
|
-
this.#options = {
|
|
1574
|
-
...opts,
|
|
1575
|
-
url: urlStr
|
|
1576
|
-
};
|
|
1577
|
-
this.#onError = opts.onError;
|
|
1578
|
-
if (opts.contentType) this.contentType = opts.contentType;
|
|
1579
|
-
this.#batchingEnabled = opts.batching !== false;
|
|
1580
|
-
if (this.#batchingEnabled) this.#queue = fastq.promise(this.#batchWorker.bind(this), 1);
|
|
1581
|
-
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
1582
|
-
const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
|
|
1583
|
-
const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
|
|
1584
|
-
this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
|
|
1585
|
-
}
|
|
1747
|
+
#maxInFlight;
|
|
1748
|
+
#closed = false;
|
|
1749
|
+
#closeResult = null;
|
|
1750
|
+
#pendingFinalMessage;
|
|
1751
|
+
#epochClaimed;
|
|
1752
|
+
#seqState = new Map();
|
|
1586
1753
|
/**
|
|
1587
|
-
* Create
|
|
1588
|
-
*
|
|
1754
|
+
* Create an idempotent producer for a stream.
|
|
1755
|
+
*
|
|
1756
|
+
* @param stream - The DurableStream to write to
|
|
1757
|
+
* @param producerId - Stable identifier for this producer (e.g., "order-service-1")
|
|
1758
|
+
* @param opts - Producer options
|
|
1589
1759
|
*/
|
|
1590
|
-
|
|
1591
|
-
const
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
|
|
1598
|
-
|
|
1760
|
+
constructor(stream$1, producerId, opts) {
|
|
1761
|
+
const epoch = opts?.epoch ?? 0;
|
|
1762
|
+
const maxBatchBytes = opts?.maxBatchBytes ?? 1024 * 1024;
|
|
1763
|
+
const maxInFlight = opts?.maxInFlight ?? 5;
|
|
1764
|
+
const lingerMs = opts?.lingerMs ?? 5;
|
|
1765
|
+
if (epoch < 0) throw new Error(`epoch must be >= 0`);
|
|
1766
|
+
if (maxBatchBytes <= 0) throw new Error(`maxBatchBytes must be > 0`);
|
|
1767
|
+
if (maxInFlight <= 0) throw new Error(`maxInFlight must be > 0`);
|
|
1768
|
+
if (lingerMs < 0) throw new Error(`lingerMs must be >= 0`);
|
|
1769
|
+
this.#stream = stream$1;
|
|
1770
|
+
this.#producerId = producerId;
|
|
1771
|
+
this.#epoch = epoch;
|
|
1772
|
+
this.#autoClaim = opts?.autoClaim ?? false;
|
|
1773
|
+
this.#maxBatchBytes = maxBatchBytes;
|
|
1774
|
+
this.#lingerMs = lingerMs;
|
|
1775
|
+
this.#signal = opts?.signal;
|
|
1776
|
+
this.#onError = opts?.onError;
|
|
1777
|
+
this.#fetchClient = opts?.fetch ?? ((...args) => fetch(...args));
|
|
1778
|
+
this.#maxInFlight = maxInFlight;
|
|
1779
|
+
this.#epochClaimed = !this.#autoClaim;
|
|
1780
|
+
this.#queue = fastq.promise(this.#batchWorker.bind(this), this.#maxInFlight);
|
|
1781
|
+
if (this.#signal) this.#signal.addEventListener(`abort`, () => {
|
|
1782
|
+
this.#rejectPendingBatch(new DurableStreamError(`Producer aborted`, `ALREADY_CLOSED`, void 0, void 0));
|
|
1783
|
+
}, { once: true });
|
|
1599
1784
|
}
|
|
1600
1785
|
/**
|
|
1601
|
-
*
|
|
1602
|
-
* Returns a handle with contentType populated (if sent by server).
|
|
1786
|
+
* Append data to the stream.
|
|
1603
1787
|
*
|
|
1604
|
-
*
|
|
1605
|
-
*
|
|
1606
|
-
*
|
|
1788
|
+
* This is fire-and-forget: returns immediately after adding to the batch.
|
|
1789
|
+
* The message is batched and sent when:
|
|
1790
|
+
* - maxBatchBytes is reached
|
|
1791
|
+
* - lingerMs elapses
|
|
1792
|
+
* - flush() is called
|
|
1793
|
+
*
|
|
1794
|
+
* Errors are reported via onError callback if configured. Use flush() to
|
|
1795
|
+
* wait for all pending messages to be sent.
|
|
1796
|
+
*
|
|
1797
|
+
* For JSON streams, pass pre-serialized JSON strings.
|
|
1798
|
+
* For byte streams, pass string or Uint8Array.
|
|
1799
|
+
*
|
|
1800
|
+
* @param body - Data to append (string or Uint8Array)
|
|
1607
1801
|
*
|
|
1608
1802
|
* @example
|
|
1609
1803
|
* ```typescript
|
|
1610
|
-
* //
|
|
1611
|
-
*
|
|
1612
|
-
*
|
|
1804
|
+
* // JSON stream
|
|
1805
|
+
* producer.append(JSON.stringify({ message: "hello" }));
|
|
1806
|
+
*
|
|
1807
|
+
* // Byte stream
|
|
1808
|
+
* producer.append("raw text data");
|
|
1809
|
+
* producer.append(new Uint8Array([1, 2, 3]));
|
|
1613
1810
|
* ```
|
|
1614
1811
|
*/
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
|
|
1812
|
+
append(body) {
|
|
1813
|
+
if (this.#closed) throw new DurableStreamError(`Producer is closed`, `ALREADY_CLOSED`, void 0, void 0);
|
|
1814
|
+
let bytes;
|
|
1815
|
+
if (typeof body === `string`) bytes = new TextEncoder().encode(body);
|
|
1816
|
+
else if (body instanceof Uint8Array) bytes = body;
|
|
1817
|
+
else throw new DurableStreamError(`append() requires string or Uint8Array. For objects, use JSON.stringify().`, `BAD_REQUEST`, 400, void 0);
|
|
1818
|
+
this.#pendingBatch.push({ body: bytes });
|
|
1819
|
+
this.#batchBytes += bytes.length;
|
|
1820
|
+
if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
|
|
1821
|
+
else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
|
|
1822
|
+
this.#lingerTimeout = null;
|
|
1823
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
1824
|
+
}, this.#lingerMs);
|
|
1619
1825
|
}
|
|
1620
1826
|
/**
|
|
1621
|
-
*
|
|
1827
|
+
* Send any pending batch immediately and wait for all in-flight batches.
|
|
1828
|
+
*
|
|
1829
|
+
* Call this before shutdown to ensure all messages are delivered.
|
|
1622
1830
|
*/
|
|
1623
|
-
|
|
1624
|
-
|
|
1625
|
-
|
|
1831
|
+
async flush() {
|
|
1832
|
+
if (this.#lingerTimeout) {
|
|
1833
|
+
clearTimeout(this.#lingerTimeout);
|
|
1834
|
+
this.#lingerTimeout = null;
|
|
1835
|
+
}
|
|
1836
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
1837
|
+
await this.#queue.drained();
|
|
1626
1838
|
}
|
|
1627
1839
|
/**
|
|
1628
|
-
*
|
|
1840
|
+
* Stop the producer without closing the underlying stream.
|
|
1841
|
+
*
|
|
1842
|
+
* Use this when you want to:
|
|
1843
|
+
* - Hand off writing to another producer
|
|
1844
|
+
* - Keep the stream open for future writes
|
|
1845
|
+
* - Stop this producer but not signal EOF to readers
|
|
1846
|
+
*
|
|
1847
|
+
* Flushes any pending messages before detaching.
|
|
1848
|
+
* After calling detach(), further append() calls will throw.
|
|
1629
1849
|
*/
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
|
|
1850
|
+
async detach() {
|
|
1851
|
+
if (this.#closed) return;
|
|
1852
|
+
this.#closed = true;
|
|
1853
|
+
try {
|
|
1854
|
+
await this.flush();
|
|
1855
|
+
} catch {}
|
|
1856
|
+
}
|
|
1857
|
+
/**
|
|
1858
|
+
* Flush pending messages and close the underlying stream (EOF).
|
|
1859
|
+
*
|
|
1860
|
+
* This is the typical way to end a producer session. It:
|
|
1861
|
+
* 1. Flushes all pending messages
|
|
1862
|
+
* 2. Optionally appends a final message
|
|
1863
|
+
* 3. Closes the stream (no further appends permitted)
|
|
1864
|
+
*
|
|
1865
|
+
* **Idempotent**: Unlike `DurableStream.close({ body })`, this method is
|
|
1866
|
+
* idempotent even with a final message because it uses producer headers
|
|
1867
|
+
* for deduplication. Safe to retry on network failures.
|
|
1868
|
+
*
|
|
1869
|
+
* @param finalMessage - Optional final message to append atomically with close
|
|
1870
|
+
* @returns CloseResult with the final offset
|
|
1636
1871
|
*/
|
|
1637
|
-
async
|
|
1638
|
-
|
|
1639
|
-
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
const
|
|
1649
|
-
|
|
1650
|
-
return
|
|
1651
|
-
exists: true,
|
|
1652
|
-
contentType,
|
|
1653
|
-
offset,
|
|
1654
|
-
etag,
|
|
1655
|
-
cacheControl
|
|
1656
|
-
};
|
|
1872
|
+
async close(finalMessage) {
|
|
1873
|
+
if (this.#closed) {
|
|
1874
|
+
if (this.#closeResult) return this.#closeResult;
|
|
1875
|
+
await this.flush();
|
|
1876
|
+
const result$1 = await this.#doClose(this.#pendingFinalMessage);
|
|
1877
|
+
this.#closeResult = result$1;
|
|
1878
|
+
return result$1;
|
|
1879
|
+
}
|
|
1880
|
+
this.#closed = true;
|
|
1881
|
+
this.#pendingFinalMessage = finalMessage;
|
|
1882
|
+
await this.flush();
|
|
1883
|
+
const result = await this.#doClose(finalMessage);
|
|
1884
|
+
this.#closeResult = result;
|
|
1885
|
+
return result;
|
|
1657
1886
|
}
|
|
1658
1887
|
/**
|
|
1659
|
-
*
|
|
1888
|
+
* Actually close the stream with optional final message.
|
|
1889
|
+
* Uses producer headers for idempotency.
|
|
1660
1890
|
*/
|
|
1661
|
-
async
|
|
1662
|
-
const
|
|
1663
|
-
const
|
|
1664
|
-
|
|
1665
|
-
if (
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
|
|
1891
|
+
async #doClose(finalMessage) {
|
|
1892
|
+
const contentType = this.#stream.contentType ?? `application/octet-stream`;
|
|
1893
|
+
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1894
|
+
let body;
|
|
1895
|
+
if (finalMessage !== void 0) {
|
|
1896
|
+
const bodyBytes = typeof finalMessage === `string` ? new TextEncoder().encode(finalMessage) : finalMessage;
|
|
1897
|
+
if (isJson) {
|
|
1898
|
+
const jsonStr = new TextDecoder().decode(bodyBytes);
|
|
1899
|
+
body = `[${jsonStr}]`;
|
|
1900
|
+
} else body = bodyBytes;
|
|
1901
|
+
}
|
|
1902
|
+
const seqForThisRequest = this.#nextSeq;
|
|
1903
|
+
const headers = {
|
|
1904
|
+
"content-type": contentType,
|
|
1905
|
+
[PRODUCER_ID_HEADER]: this.#producerId,
|
|
1906
|
+
[PRODUCER_EPOCH_HEADER]: this.#epoch.toString(),
|
|
1907
|
+
[PRODUCER_SEQ_HEADER]: seqForThisRequest.toString(),
|
|
1908
|
+
[STREAM_CLOSED_HEADER]: `true`
|
|
1909
|
+
};
|
|
1910
|
+
const response = await this.#fetchClient(this.#stream.url, {
|
|
1911
|
+
method: `POST`,
|
|
1912
|
+
headers,
|
|
1671
1913
|
body,
|
|
1672
|
-
signal: this.#
|
|
1914
|
+
signal: this.#signal
|
|
1673
1915
|
});
|
|
1674
|
-
if (
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1916
|
+
if (response.status === 204) {
|
|
1917
|
+
this.#nextSeq = seqForThisRequest + 1;
|
|
1918
|
+
const finalOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
1919
|
+
return { finalOffset };
|
|
1920
|
+
}
|
|
1921
|
+
if (response.status === 200) {
|
|
1922
|
+
this.#nextSeq = seqForThisRequest + 1;
|
|
1923
|
+
const finalOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
1924
|
+
return { finalOffset };
|
|
1925
|
+
}
|
|
1926
|
+
if (response.status === 403) {
|
|
1927
|
+
const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
|
|
1928
|
+
const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : this.#epoch;
|
|
1929
|
+
if (this.#autoClaim) {
|
|
1930
|
+
const newEpoch = currentEpoch + 1;
|
|
1931
|
+
this.#epoch = newEpoch;
|
|
1932
|
+
this.#nextSeq = 0;
|
|
1933
|
+
return this.#doClose(finalMessage);
|
|
1934
|
+
}
|
|
1935
|
+
throw new StaleEpochError(currentEpoch);
|
|
1936
|
+
}
|
|
1937
|
+
const error = await FetchError.fromResponse(response, this.#stream.url);
|
|
1938
|
+
throw error;
|
|
1679
1939
|
}
|
|
1680
1940
|
/**
|
|
1681
|
-
*
|
|
1941
|
+
* Increment epoch and reset sequence.
|
|
1942
|
+
*
|
|
1943
|
+
* Call this when restarting the producer to establish a new session.
|
|
1944
|
+
* Flushes any pending messages first.
|
|
1682
1945
|
*/
|
|
1683
|
-
async
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
headers: requestHeaders,
|
|
1688
|
-
signal: opts?.signal ?? this.#options.signal
|
|
1689
|
-
});
|
|
1690
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1946
|
+
async restart() {
|
|
1947
|
+
await this.flush();
|
|
1948
|
+
this.#epoch++;
|
|
1949
|
+
this.#nextSeq = 0;
|
|
1691
1950
|
}
|
|
1692
1951
|
/**
|
|
1693
|
-
*
|
|
1694
|
-
*
|
|
1695
|
-
* When batching is enabled (default), multiple append() calls made while
|
|
1696
|
-
* a POST is in-flight will be batched together into a single request.
|
|
1697
|
-
* This significantly improves throughput for high-frequency writes.
|
|
1698
|
-
*
|
|
1699
|
-
* - `body` may be Uint8Array, string, or any JSON-serializable value (for JSON streams).
|
|
1700
|
-
* - `body` may also be a Promise that resolves to any of the above types.
|
|
1701
|
-
* - Strings are encoded as UTF-8.
|
|
1702
|
-
* - `seq` (if provided) is sent as stream-seq (writer coordination).
|
|
1703
|
-
*
|
|
1704
|
-
* @example
|
|
1705
|
-
* ```typescript
|
|
1706
|
-
* // Direct value
|
|
1707
|
-
* await stream.append({ message: "hello" });
|
|
1708
|
-
*
|
|
1709
|
-
* // Promise value - awaited before buffering
|
|
1710
|
-
* await stream.append(fetchData());
|
|
1711
|
-
* await stream.append(Promise.all([a, b, c]));
|
|
1712
|
-
* ```
|
|
1952
|
+
* Current epoch for this producer.
|
|
1713
1953
|
*/
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
|
|
1717
|
-
return this.#appendDirect(resolvedBody, opts);
|
|
1954
|
+
get epoch() {
|
|
1955
|
+
return this.#epoch;
|
|
1718
1956
|
}
|
|
1719
1957
|
/**
|
|
1720
|
-
*
|
|
1958
|
+
* Next sequence number to be assigned.
|
|
1721
1959
|
*/
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
1725
|
-
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1726
|
-
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
1727
|
-
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1728
|
-
const bodyToEncode = isJson ? [body] : body;
|
|
1729
|
-
const encodedBody = encodeBody(bodyToEncode);
|
|
1730
|
-
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1731
|
-
method: `POST`,
|
|
1732
|
-
headers: requestHeaders,
|
|
1733
|
-
body: encodedBody,
|
|
1734
|
-
signal: opts?.signal ?? this.#options.signal
|
|
1735
|
-
});
|
|
1736
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1960
|
+
get nextSeq() {
|
|
1961
|
+
return this.#nextSeq;
|
|
1737
1962
|
}
|
|
1738
1963
|
/**
|
|
1739
|
-
*
|
|
1964
|
+
* Number of messages in the current pending batch.
|
|
1740
1965
|
*/
|
|
1741
|
-
|
|
1742
|
-
return
|
|
1743
|
-
|
|
1744
|
-
|
|
1745
|
-
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1750
|
-
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
|
|
1966
|
+
get pendingCount() {
|
|
1967
|
+
return this.#pendingBatch.length;
|
|
1968
|
+
}
|
|
1969
|
+
/**
|
|
1970
|
+
* Number of batches currently in flight.
|
|
1971
|
+
*/
|
|
1972
|
+
get inFlightCount() {
|
|
1973
|
+
return this.#queue.length();
|
|
1974
|
+
}
|
|
1975
|
+
/**
|
|
1976
|
+
* Enqueue the current pending batch for processing.
|
|
1977
|
+
*/
|
|
1978
|
+
#enqueuePendingBatch() {
|
|
1979
|
+
if (this.#pendingBatch.length === 0) return;
|
|
1980
|
+
const batch = this.#pendingBatch;
|
|
1981
|
+
const seq = this.#nextSeq;
|
|
1982
|
+
this.#pendingBatch = [];
|
|
1983
|
+
this.#batchBytes = 0;
|
|
1984
|
+
this.#nextSeq++;
|
|
1985
|
+
if (this.#autoClaim && !this.#epochClaimed && this.#queue.length() > 0) this.#queue.drained().then(() => {
|
|
1986
|
+
this.#queue.push({
|
|
1987
|
+
batch,
|
|
1988
|
+
seq
|
|
1989
|
+
}).catch(() => {});
|
|
1757
1990
|
});
|
|
1991
|
+
else this.#queue.push({
|
|
1992
|
+
batch,
|
|
1993
|
+
seq
|
|
1994
|
+
}).catch(() => {});
|
|
1758
1995
|
}
|
|
1759
1996
|
/**
|
|
1760
|
-
* Batch worker - processes batches
|
|
1997
|
+
* Batch worker - processes batches via fastq.
|
|
1761
1998
|
*/
|
|
1762
|
-
async #batchWorker(
|
|
1999
|
+
async #batchWorker(task) {
|
|
2000
|
+
const { batch, seq } = task;
|
|
2001
|
+
const epoch = this.#epoch;
|
|
1763
2002
|
try {
|
|
1764
|
-
await this.#
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
const nextBatch = this.#buffer.splice(0);
|
|
1768
|
-
this.#queue.push(nextBatch).catch((err) => {
|
|
1769
|
-
for (const msg of nextBatch) msg.reject(err);
|
|
1770
|
-
});
|
|
1771
|
-
}
|
|
2003
|
+
await this.#doSendBatch(batch, seq, epoch);
|
|
2004
|
+
if (!this.#epochClaimed) this.#epochClaimed = true;
|
|
2005
|
+
this.#signalSeqComplete(epoch, seq, void 0);
|
|
1772
2006
|
} catch (error) {
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
this.#buffer = [];
|
|
2007
|
+
this.#signalSeqComplete(epoch, seq, error);
|
|
2008
|
+
if (this.#onError) this.#onError(error);
|
|
1776
2009
|
throw error;
|
|
1777
2010
|
}
|
|
1778
2011
|
}
|
|
1779
2012
|
/**
|
|
1780
|
-
*
|
|
2013
|
+
* Signal that a sequence has completed (success or failure).
|
|
1781
2014
|
*/
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
let highestSeq;
|
|
1788
|
-
for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
|
|
1789
|
-
highestSeq = batch[i].seq;
|
|
1790
|
-
break;
|
|
1791
|
-
}
|
|
1792
|
-
if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
|
|
1793
|
-
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1794
|
-
let batchedBody;
|
|
1795
|
-
if (isJson) {
|
|
1796
|
-
const values = batch.map((m) => m.data);
|
|
1797
|
-
batchedBody = JSON.stringify(values);
|
|
1798
|
-
} else {
|
|
1799
|
-
const totalSize = batch.reduce((sum, m) => {
|
|
1800
|
-
const size = typeof m.data === `string` ? new TextEncoder().encode(m.data).length : m.data.length;
|
|
1801
|
-
return sum + size;
|
|
1802
|
-
}, 0);
|
|
1803
|
-
const concatenated = new Uint8Array(totalSize);
|
|
1804
|
-
let offset = 0;
|
|
1805
|
-
for (const msg of batch) {
|
|
1806
|
-
const bytes = typeof msg.data === `string` ? new TextEncoder().encode(msg.data) : msg.data;
|
|
1807
|
-
concatenated.set(bytes, offset);
|
|
1808
|
-
offset += bytes.length;
|
|
1809
|
-
}
|
|
1810
|
-
batchedBody = concatenated;
|
|
2015
|
+
#signalSeqComplete(epoch, seq, error) {
|
|
2016
|
+
let epochMap = this.#seqState.get(epoch);
|
|
2017
|
+
if (!epochMap) {
|
|
2018
|
+
epochMap = new Map();
|
|
2019
|
+
this.#seqState.set(epoch, epochMap);
|
|
1811
2020
|
}
|
|
1812
|
-
const
|
|
1813
|
-
if (
|
|
1814
|
-
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
|
|
1819
|
-
|
|
1820
|
-
|
|
2021
|
+
const state = epochMap.get(seq);
|
|
2022
|
+
if (state) {
|
|
2023
|
+
state.resolved = true;
|
|
2024
|
+
state.error = error;
|
|
2025
|
+
for (const waiter of state.waiters) waiter(error);
|
|
2026
|
+
state.waiters = [];
|
|
2027
|
+
} else epochMap.set(seq, {
|
|
2028
|
+
resolved: true,
|
|
2029
|
+
error,
|
|
2030
|
+
waiters: []
|
|
1821
2031
|
});
|
|
1822
|
-
|
|
2032
|
+
const cleanupThreshold = seq - this.#maxInFlight * 3;
|
|
2033
|
+
if (cleanupThreshold > 0) {
|
|
2034
|
+
for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
|
|
2035
|
+
}
|
|
1823
2036
|
}
|
|
1824
2037
|
/**
|
|
1825
|
-
*
|
|
1826
|
-
*
|
|
1827
|
-
*
|
|
1828
|
-
* - `source` yields Uint8Array or string chunks.
|
|
1829
|
-
* - Strings are encoded as UTF-8; no delimiters are added.
|
|
1830
|
-
* - Internally uses chunked transfer or HTTP/2 streaming.
|
|
1831
|
-
*
|
|
1832
|
-
* @example
|
|
1833
|
-
* ```typescript
|
|
1834
|
-
* // Pipe from a ReadableStream
|
|
1835
|
-
* const readable = new ReadableStream({
|
|
1836
|
-
* start(controller) {
|
|
1837
|
-
* controller.enqueue("chunk 1");
|
|
1838
|
-
* controller.enqueue("chunk 2");
|
|
1839
|
-
* controller.close();
|
|
1840
|
-
* }
|
|
1841
|
-
* });
|
|
1842
|
-
* await stream.appendStream(readable);
|
|
1843
|
-
*
|
|
1844
|
-
* // Pipe from an async generator
|
|
1845
|
-
* async function* generate() {
|
|
1846
|
-
* yield "line 1\n";
|
|
1847
|
-
* yield "line 2\n";
|
|
1848
|
-
* }
|
|
1849
|
-
* await stream.appendStream(generate());
|
|
1850
|
-
*
|
|
1851
|
-
* // Pipe from fetch response body
|
|
1852
|
-
* const response = await fetch("https://example.com/data");
|
|
1853
|
-
* await stream.appendStream(response.body!);
|
|
1854
|
-
* ```
|
|
2038
|
+
* Wait for a specific sequence to complete.
|
|
2039
|
+
* Returns immediately if already completed.
|
|
2040
|
+
* Throws if the sequence failed.
|
|
1855
2041
|
*/
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
|
|
1859
|
-
|
|
1860
|
-
|
|
1861
|
-
|
|
1862
|
-
const
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
1867
|
-
|
|
2042
|
+
#waitForSeq(epoch, seq) {
|
|
2043
|
+
let epochMap = this.#seqState.get(epoch);
|
|
2044
|
+
if (!epochMap) {
|
|
2045
|
+
epochMap = new Map();
|
|
2046
|
+
this.#seqState.set(epoch, epochMap);
|
|
2047
|
+
}
|
|
2048
|
+
const state = epochMap.get(seq);
|
|
2049
|
+
if (state?.resolved) {
|
|
2050
|
+
if (state.error) return Promise.reject(state.error);
|
|
2051
|
+
return Promise.resolve();
|
|
2052
|
+
}
|
|
2053
|
+
return new Promise((resolve, reject) => {
|
|
2054
|
+
const waiter = (err) => {
|
|
2055
|
+
if (err) reject(err);
|
|
2056
|
+
else resolve();
|
|
2057
|
+
};
|
|
2058
|
+
if (state) state.waiters.push(waiter);
|
|
2059
|
+
else epochMap.set(seq, {
|
|
2060
|
+
resolved: false,
|
|
2061
|
+
waiters: [waiter]
|
|
2062
|
+
});
|
|
1868
2063
|
});
|
|
1869
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1870
2064
|
}
|
|
1871
2065
|
/**
|
|
1872
|
-
*
|
|
1873
|
-
*
|
|
1874
|
-
*
|
|
1875
|
-
* `pipeThrough()` from any ReadableStream source.
|
|
1876
|
-
*
|
|
1877
|
-
* @example
|
|
1878
|
-
* ```typescript
|
|
1879
|
-
* // Pipe from fetch response
|
|
1880
|
-
* const response = await fetch("https://example.com/data");
|
|
1881
|
-
* await response.body!.pipeTo(stream.writable());
|
|
1882
|
-
*
|
|
1883
|
-
* // Pipe through a transform
|
|
1884
|
-
* const readable = someStream.pipeThrough(new TextEncoderStream());
|
|
1885
|
-
* await readable.pipeTo(stream.writable());
|
|
1886
|
-
* ```
|
|
2066
|
+
* Actually send the batch to the server.
|
|
2067
|
+
* Handles auto-claim retry on 403 (stale epoch) if autoClaim is enabled.
|
|
2068
|
+
* Does NOT implement general retry/backoff for network errors or 5xx responses.
|
|
1887
2069
|
*/
|
|
1888
|
-
|
|
1889
|
-
const
|
|
1890
|
-
const
|
|
1891
|
-
|
|
1892
|
-
|
|
1893
|
-
|
|
1894
|
-
}
|
|
1895
|
-
|
|
1896
|
-
|
|
1897
|
-
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1902
|
-
}
|
|
1903
|
-
},
|
|
1904
|
-
abort(reason) {
|
|
1905
|
-
console.error(`WritableStream aborted:`, reason);
|
|
2070
|
+
async #doSendBatch(batch, seq, epoch) {
|
|
2071
|
+
const contentType = this.#stream.contentType ?? `application/octet-stream`;
|
|
2072
|
+
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
2073
|
+
let batchedBody;
|
|
2074
|
+
if (isJson) {
|
|
2075
|
+
const jsonStrings = batch.map((e) => new TextDecoder().decode(e.body));
|
|
2076
|
+
batchedBody = `[${jsonStrings.join(`,`)}]`;
|
|
2077
|
+
} else {
|
|
2078
|
+
const totalSize = batch.reduce((sum, e) => sum + e.body.length, 0);
|
|
2079
|
+
const concatenated = new Uint8Array(totalSize);
|
|
2080
|
+
let offset = 0;
|
|
2081
|
+
for (const entry of batch) {
|
|
2082
|
+
concatenated.set(entry.body, offset);
|
|
2083
|
+
offset += entry.body.length;
|
|
1906
2084
|
}
|
|
1907
|
-
|
|
1908
|
-
}
|
|
1909
|
-
/**
|
|
1910
|
-
* Start a fetch-like streaming session against this handle's URL/headers/params.
|
|
1911
|
-
* The first request is made inside this method; it resolves when we have
|
|
1912
|
-
* a valid first response, or rejects on errors.
|
|
1913
|
-
*
|
|
1914
|
-
* Call-specific headers and params are merged with handle-level ones,
|
|
1915
|
-
* with call-specific values taking precedence.
|
|
1916
|
-
*
|
|
1917
|
-
* @example
|
|
1918
|
-
* ```typescript
|
|
1919
|
-
* const handle = await DurableStream.connect({
|
|
1920
|
-
* url,
|
|
1921
|
-
* headers: { Authorization: `Bearer ${token}` }
|
|
1922
|
-
* });
|
|
1923
|
-
* const res = await handle.stream<{ message: string }>();
|
|
1924
|
-
*
|
|
1925
|
-
* // Accumulate all JSON items
|
|
1926
|
-
* const items = await res.json();
|
|
1927
|
-
*
|
|
1928
|
-
* // Or stream live with ReadableStream
|
|
1929
|
-
* const reader = res.jsonStream().getReader();
|
|
1930
|
-
* let result = await reader.read();
|
|
1931
|
-
* while (!result.done) {
|
|
1932
|
-
* console.log(result.value);
|
|
1933
|
-
* result = await reader.read();
|
|
1934
|
-
* }
|
|
1935
|
-
*
|
|
1936
|
-
* // Or use subscriber for backpressure-aware consumption
|
|
1937
|
-
* res.subscribeJson(async (batch) => {
|
|
1938
|
-
* for (const item of batch.items) {
|
|
1939
|
-
* console.log(item);
|
|
1940
|
-
* }
|
|
1941
|
-
* });
|
|
1942
|
-
* ```
|
|
1943
|
-
*/
|
|
1944
|
-
async stream(options) {
|
|
1945
|
-
if (options?.live === `sse` && this.contentType) {
|
|
1946
|
-
const isSSECompatible = SSE_COMPATIBLE_CONTENT_TYPES.some((prefix) => this.contentType.startsWith(prefix));
|
|
1947
|
-
if (!isSSECompatible) throw new DurableStreamError(`SSE is not supported for content-type: ${this.contentType}`, `SSE_NOT_SUPPORTED`, 400);
|
|
2085
|
+
batchedBody = concatenated;
|
|
1948
2086
|
}
|
|
1949
|
-
const
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
|
|
1955
|
-
...options?.params
|
|
2087
|
+
const url = this.#stream.url;
|
|
2088
|
+
const headers = {
|
|
2089
|
+
"content-type": contentType,
|
|
2090
|
+
[PRODUCER_ID_HEADER]: this.#producerId,
|
|
2091
|
+
[PRODUCER_EPOCH_HEADER]: epoch.toString(),
|
|
2092
|
+
[PRODUCER_SEQ_HEADER]: seq.toString()
|
|
1956
2093
|
};
|
|
1957
|
-
|
|
1958
|
-
|
|
1959
|
-
headers
|
|
1960
|
-
|
|
1961
|
-
signal:
|
|
1962
|
-
fetch: this.#options.fetch,
|
|
1963
|
-
backoffOptions: this.#options.backoffOptions,
|
|
1964
|
-
offset: options?.offset,
|
|
1965
|
-
live: options?.live,
|
|
1966
|
-
json: options?.json,
|
|
1967
|
-
onError: options?.onError ?? this.#onError,
|
|
1968
|
-
warnOnHttp: options?.warnOnHttp ?? this.#options.warnOnHttp
|
|
2094
|
+
const response = await this.#fetchClient(url, {
|
|
2095
|
+
method: `POST`,
|
|
2096
|
+
headers,
|
|
2097
|
+
body: batchedBody,
|
|
2098
|
+
signal: this.#signal
|
|
1969
2099
|
});
|
|
2100
|
+
if (response.status === 204) return {
|
|
2101
|
+
offset: ``,
|
|
2102
|
+
duplicate: true
|
|
2103
|
+
};
|
|
2104
|
+
if (response.status === 200) {
|
|
2105
|
+
const resultOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
2106
|
+
return {
|
|
2107
|
+
offset: resultOffset,
|
|
2108
|
+
duplicate: false
|
|
2109
|
+
};
|
|
2110
|
+
}
|
|
2111
|
+
if (response.status === 403) {
|
|
2112
|
+
const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
|
|
2113
|
+
const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : epoch;
|
|
2114
|
+
if (this.#autoClaim) {
|
|
2115
|
+
const newEpoch = currentEpoch + 1;
|
|
2116
|
+
this.#epoch = newEpoch;
|
|
2117
|
+
this.#nextSeq = 1;
|
|
2118
|
+
return this.#doSendBatch(batch, 0, newEpoch);
|
|
2119
|
+
}
|
|
2120
|
+
throw new StaleEpochError(currentEpoch);
|
|
2121
|
+
}
|
|
2122
|
+
if (response.status === 409) {
|
|
2123
|
+
const expectedSeqStr = response.headers.get(PRODUCER_EXPECTED_SEQ_HEADER);
|
|
2124
|
+
const expectedSeq = expectedSeqStr ? parseInt(expectedSeqStr, 10) : 0;
|
|
2125
|
+
if (expectedSeq < seq) {
|
|
2126
|
+
const waitPromises = [];
|
|
2127
|
+
for (let s = expectedSeq; s < seq; s++) waitPromises.push(this.#waitForSeq(epoch, s));
|
|
2128
|
+
await Promise.all(waitPromises);
|
|
2129
|
+
return this.#doSendBatch(batch, seq, epoch);
|
|
2130
|
+
}
|
|
2131
|
+
const receivedSeqStr = response.headers.get(PRODUCER_RECEIVED_SEQ_HEADER);
|
|
2132
|
+
const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
|
|
2133
|
+
throw new SequenceGapError(expectedSeq, receivedSeq);
|
|
2134
|
+
}
|
|
2135
|
+
if (response.status === 400) {
|
|
2136
|
+
const error$1 = await DurableStreamError.fromResponse(response, url);
|
|
2137
|
+
throw error$1;
|
|
2138
|
+
}
|
|
2139
|
+
const error = await FetchError.fromResponse(response, url);
|
|
2140
|
+
throw error;
|
|
1970
2141
|
}
|
|
1971
2142
|
/**
|
|
1972
|
-
*
|
|
2143
|
+
* Clear pending batch and report error.
|
|
1973
2144
|
*/
|
|
1974
|
-
|
|
1975
|
-
|
|
1976
|
-
|
|
1977
|
-
|
|
1978
|
-
|
|
1979
|
-
|
|
1980
|
-
|
|
1981
|
-
|
|
1982
|
-
};
|
|
2145
|
+
#rejectPendingBatch(error) {
|
|
2146
|
+
if (this.#onError && this.#pendingBatch.length > 0) this.#onError(error);
|
|
2147
|
+
this.#pendingBatch = [];
|
|
2148
|
+
this.#batchBytes = 0;
|
|
2149
|
+
if (this.#lingerTimeout) {
|
|
2150
|
+
clearTimeout(this.#lingerTimeout);
|
|
2151
|
+
this.#lingerTimeout = null;
|
|
2152
|
+
}
|
|
1983
2153
|
}
|
|
1984
2154
|
};
|
|
2155
|
+
|
|
2156
|
+
//#endregion
|
|
2157
|
+
//#region src/stream.ts
|
|
1985
2158
|
/**
|
|
1986
|
-
*
|
|
1987
|
-
*
|
|
1988
|
-
* Objects are JSON-serialized.
|
|
2159
|
+
* Normalize content-type by extracting the media type (before any semicolon).
|
|
2160
|
+
* Handles cases like "application/json; charset=utf-8".
|
|
1989
2161
|
*/
|
|
1990
|
-
function
|
|
1991
|
-
if (
|
|
1992
|
-
|
|
1993
|
-
if (body instanceof Uint8Array) return body;
|
|
1994
|
-
if (body instanceof Blob || body instanceof FormData || body instanceof ReadableStream || body instanceof ArrayBuffer || ArrayBuffer.isView(body)) return body;
|
|
1995
|
-
return new TextEncoder().encode(JSON.stringify(body));
|
|
2162
|
+
function normalizeContentType(contentType) {
|
|
2163
|
+
if (!contentType) return ``;
|
|
2164
|
+
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1996
2165
|
}
|
|
1997
2166
|
/**
|
|
1998
|
-
*
|
|
2167
|
+
* Check if a value is a Promise or Promise-like (thenable).
|
|
1999
2168
|
*/
|
|
2000
|
-
function
|
|
2001
|
-
|
|
2002
|
-
if (typeof chunk === `string`) controller.enqueue(new TextEncoder().encode(chunk));
|
|
2003
|
-
else controller.enqueue(chunk);
|
|
2004
|
-
} }));
|
|
2005
|
-
const encoder = new TextEncoder();
|
|
2006
|
-
const iterator = source[Symbol.asyncIterator]();
|
|
2007
|
-
return new ReadableStream({
|
|
2008
|
-
async pull(controller) {
|
|
2009
|
-
try {
|
|
2010
|
-
const { done, value } = await iterator.next();
|
|
2011
|
-
if (done) controller.close();
|
|
2012
|
-
else if (typeof value === `string`) controller.enqueue(encoder.encode(value));
|
|
2013
|
-
else controller.enqueue(value);
|
|
2014
|
-
} catch (e) {
|
|
2015
|
-
controller.error(e);
|
|
2016
|
-
}
|
|
2017
|
-
},
|
|
2018
|
-
cancel() {
|
|
2019
|
-
iterator.return?.();
|
|
2020
|
-
}
|
|
2021
|
-
});
|
|
2022
|
-
}
|
|
2023
|
-
/**
|
|
2024
|
-
* Validate stream options.
|
|
2025
|
-
*/
|
|
2026
|
-
function validateOptions(options) {
|
|
2027
|
-
if (!options.url) throw new MissingStreamUrlError();
|
|
2028
|
-
if (options.signal && !(options.signal instanceof AbortSignal)) throw new InvalidSignalError();
|
|
2029
|
-
warnIfUsingHttpInBrowser(options.url, options.warnOnHttp);
|
|
2030
|
-
}
|
|
2031
|
-
|
|
2032
|
-
//#endregion
|
|
2033
|
-
//#region src/idempotent-producer.ts
|
|
2034
|
-
/**
|
|
2035
|
-
* Error thrown when a producer's epoch is stale (zombie fencing).
|
|
2036
|
-
*/
|
|
2037
|
-
var StaleEpochError = class extends Error {
|
|
2038
|
-
/**
|
|
2039
|
-
* The current epoch on the server.
|
|
2040
|
-
*/
|
|
2041
|
-
currentEpoch;
|
|
2042
|
-
constructor(currentEpoch) {
|
|
2043
|
-
super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
|
|
2044
|
-
this.name = `StaleEpochError`;
|
|
2045
|
-
this.currentEpoch = currentEpoch;
|
|
2046
|
-
}
|
|
2047
|
-
};
|
|
2048
|
-
/**
|
|
2049
|
-
* Error thrown when an unrecoverable sequence gap is detected.
|
|
2050
|
-
*
|
|
2051
|
-
* With maxInFlight > 1, HTTP requests can arrive out of order at the server,
|
|
2052
|
-
* causing temporary 409 responses. The client automatically handles these
|
|
2053
|
-
* by waiting for earlier sequences to complete, then retrying.
|
|
2054
|
-
*
|
|
2055
|
-
* This error is only thrown when the gap cannot be resolved (e.g., the
|
|
2056
|
-
* expected sequence is >= our sequence, indicating a true protocol violation).
|
|
2057
|
-
*/
|
|
2058
|
-
var SequenceGapError = class extends Error {
|
|
2059
|
-
expectedSeq;
|
|
2060
|
-
receivedSeq;
|
|
2061
|
-
constructor(expectedSeq, receivedSeq) {
|
|
2062
|
-
super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
|
|
2063
|
-
this.name = `SequenceGapError`;
|
|
2064
|
-
this.expectedSeq = expectedSeq;
|
|
2065
|
-
this.receivedSeq = receivedSeq;
|
|
2066
|
-
}
|
|
2067
|
-
};
|
|
2068
|
-
/**
|
|
2069
|
-
* Normalize content-type by extracting the media type (before any semicolon).
|
|
2070
|
-
*/
|
|
2071
|
-
function normalizeContentType(contentType) {
|
|
2072
|
-
if (!contentType) return ``;
|
|
2073
|
-
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
2169
|
+
function isPromiseLike(value) {
|
|
2170
|
+
return value != null && typeof value.then === `function`;
|
|
2074
2171
|
}
|
|
2075
2172
|
/**
|
|
2076
|
-
*
|
|
2173
|
+
* A handle to a remote durable stream for read/write operations.
|
|
2077
2174
|
*
|
|
2078
|
-
*
|
|
2079
|
-
*
|
|
2080
|
-
*
|
|
2081
|
-
* - Batching: multiple appends batched into single HTTP request
|
|
2082
|
-
* - Pipelining: up to maxInFlight concurrent batches
|
|
2083
|
-
* - Zombie fencing: stale producers rejected via epoch validation
|
|
2175
|
+
* This is a lightweight, reusable handle - not a persistent connection.
|
|
2176
|
+
* It does not automatically start reading or listening.
|
|
2177
|
+
* Create sessions as needed via stream().
|
|
2084
2178
|
*
|
|
2085
2179
|
* @example
|
|
2086
2180
|
* ```typescript
|
|
2087
|
-
*
|
|
2088
|
-
* const
|
|
2089
|
-
*
|
|
2090
|
-
*
|
|
2181
|
+
* // Create a new stream
|
|
2182
|
+
* const stream = await DurableStream.create({
|
|
2183
|
+
* url: "https://streams.example.com/my-stream",
|
|
2184
|
+
* headers: { Authorization: "Bearer my-token" },
|
|
2185
|
+
* contentType: "application/json"
|
|
2091
2186
|
* });
|
|
2092
2187
|
*
|
|
2093
|
-
* //
|
|
2094
|
-
*
|
|
2095
|
-
* producer.append("message 2");
|
|
2188
|
+
* // Write data
|
|
2189
|
+
* await stream.append(JSON.stringify({ message: "hello" }));
|
|
2096
2190
|
*
|
|
2097
|
-
* //
|
|
2098
|
-
* await
|
|
2099
|
-
*
|
|
2191
|
+
* // Read with the new API
|
|
2192
|
+
* const res = await stream.stream<{ message: string }>();
|
|
2193
|
+
* res.subscribeJson(async (batch) => {
|
|
2194
|
+
* for (const item of batch.items) {
|
|
2195
|
+
* console.log(item.message);
|
|
2196
|
+
* }
|
|
2197
|
+
* });
|
|
2100
2198
|
* ```
|
|
2101
2199
|
*/
|
|
2102
|
-
var
|
|
2103
|
-
|
|
2104
|
-
|
|
2105
|
-
|
|
2106
|
-
|
|
2107
|
-
|
|
2108
|
-
|
|
2109
|
-
|
|
2200
|
+
var DurableStream = class DurableStream {
|
|
2201
|
+
/**
|
|
2202
|
+
* The URL of the durable stream.
|
|
2203
|
+
*/
|
|
2204
|
+
url;
|
|
2205
|
+
/**
|
|
2206
|
+
* The content type of the stream (populated after connect/head/read).
|
|
2207
|
+
*/
|
|
2208
|
+
contentType;
|
|
2209
|
+
#options;
|
|
2110
2210
|
#fetchClient;
|
|
2111
|
-
#signal;
|
|
2112
2211
|
#onError;
|
|
2113
|
-
#
|
|
2114
|
-
#batchBytes = 0;
|
|
2115
|
-
#lingerTimeout = null;
|
|
2212
|
+
#batchingEnabled;
|
|
2116
2213
|
#queue;
|
|
2117
|
-
#
|
|
2118
|
-
#closed = false;
|
|
2119
|
-
#epochClaimed;
|
|
2120
|
-
#seqState = new Map();
|
|
2214
|
+
#buffer = [];
|
|
2121
2215
|
/**
|
|
2122
|
-
* Create
|
|
2123
|
-
*
|
|
2124
|
-
* @param stream - The DurableStream to write to
|
|
2125
|
-
* @param producerId - Stable identifier for this producer (e.g., "order-service-1")
|
|
2126
|
-
* @param opts - Producer options
|
|
2216
|
+
* Create a cold handle to a stream.
|
|
2217
|
+
* No network IO is performed by the constructor.
|
|
2127
2218
|
*/
|
|
2128
|
-
constructor(
|
|
2129
|
-
|
|
2130
|
-
|
|
2131
|
-
this
|
|
2132
|
-
this.#
|
|
2133
|
-
|
|
2134
|
-
|
|
2135
|
-
|
|
2136
|
-
this.#onError = opts
|
|
2137
|
-
this
|
|
2138
|
-
this.#
|
|
2139
|
-
this.#
|
|
2140
|
-
|
|
2141
|
-
|
|
2142
|
-
|
|
2143
|
-
|
|
2219
|
+
constructor(opts) {
|
|
2220
|
+
validateOptions(opts);
|
|
2221
|
+
const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
|
|
2222
|
+
this.url = urlStr;
|
|
2223
|
+
this.#options = {
|
|
2224
|
+
...opts,
|
|
2225
|
+
url: urlStr
|
|
2226
|
+
};
|
|
2227
|
+
this.#onError = opts.onError;
|
|
2228
|
+
if (opts.contentType) this.contentType = opts.contentType;
|
|
2229
|
+
this.#batchingEnabled = opts.batching !== false;
|
|
2230
|
+
if (this.#batchingEnabled) this.#queue = fastq.promise(this.#batchWorker.bind(this), 1);
|
|
2231
|
+
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
2232
|
+
const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
|
|
2233
|
+
const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
|
|
2234
|
+
this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
|
|
2144
2235
|
}
|
|
2145
2236
|
/**
|
|
2146
|
-
*
|
|
2147
|
-
*
|
|
2148
|
-
* This is fire-and-forget: returns immediately after adding to the batch.
|
|
2149
|
-
* The message is batched and sent when:
|
|
2150
|
-
* - maxBatchBytes is reached
|
|
2151
|
-
* - lingerMs elapses
|
|
2152
|
-
* - flush() is called
|
|
2153
|
-
*
|
|
2154
|
-
* Errors are reported via onError callback if configured. Use flush() to
|
|
2155
|
-
* wait for all pending messages to be sent.
|
|
2156
|
-
*
|
|
2157
|
-
* For JSON streams, pass native objects (which will be serialized internally).
|
|
2158
|
-
* For byte streams, pass string or Uint8Array.
|
|
2159
|
-
*
|
|
2160
|
-
* @param body - Data to append (object for JSON streams, string or Uint8Array for byte streams)
|
|
2237
|
+
* Create a new stream (create-only PUT) and return a handle.
|
|
2238
|
+
* Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
|
|
2161
2239
|
*/
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2165
|
-
|
|
2166
|
-
|
|
2167
|
-
|
|
2168
|
-
|
|
2169
|
-
|
|
2170
|
-
data = body;
|
|
2171
|
-
} else {
|
|
2172
|
-
if (typeof body === `string`) bytes = new TextEncoder().encode(body);
|
|
2173
|
-
else if (body instanceof Uint8Array) bytes = body;
|
|
2174
|
-
else throw new DurableStreamError(`Non-JSON streams require string or Uint8Array`, `BAD_REQUEST`, 400, void 0);
|
|
2175
|
-
data = bytes;
|
|
2176
|
-
}
|
|
2177
|
-
this.#pendingBatch.push({
|
|
2178
|
-
data,
|
|
2179
|
-
body: bytes
|
|
2240
|
+
static async create(opts) {
|
|
2241
|
+
const stream$1 = new DurableStream(opts);
|
|
2242
|
+
await stream$1.create({
|
|
2243
|
+
contentType: opts.contentType,
|
|
2244
|
+
ttlSeconds: opts.ttlSeconds,
|
|
2245
|
+
expiresAt: opts.expiresAt,
|
|
2246
|
+
body: opts.body,
|
|
2247
|
+
closed: opts.closed
|
|
2180
2248
|
});
|
|
2181
|
-
|
|
2182
|
-
if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
|
|
2183
|
-
else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
|
|
2184
|
-
this.#lingerTimeout = null;
|
|
2185
|
-
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
2186
|
-
}, this.#lingerMs);
|
|
2187
|
-
}
|
|
2188
|
-
/**
|
|
2189
|
-
* Send any pending batch immediately and wait for all in-flight batches.
|
|
2190
|
-
*
|
|
2191
|
-
* Call this before shutdown to ensure all messages are delivered.
|
|
2192
|
-
*/
|
|
2193
|
-
async flush() {
|
|
2194
|
-
if (this.#lingerTimeout) {
|
|
2195
|
-
clearTimeout(this.#lingerTimeout);
|
|
2196
|
-
this.#lingerTimeout = null;
|
|
2197
|
-
}
|
|
2198
|
-
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
2199
|
-
await this.#queue.drained();
|
|
2249
|
+
return stream$1;
|
|
2200
2250
|
}
|
|
2201
2251
|
/**
|
|
2202
|
-
*
|
|
2252
|
+
* Validate that a stream exists and fetch metadata via HEAD.
|
|
2253
|
+
* Returns a handle with contentType populated (if sent by server).
|
|
2203
2254
|
*
|
|
2204
|
-
*
|
|
2205
|
-
|
|
2206
|
-
|
|
2207
|
-
if (this.#closed) return;
|
|
2208
|
-
this.#closed = true;
|
|
2209
|
-
try {
|
|
2210
|
-
await this.flush();
|
|
2211
|
-
} catch {}
|
|
2212
|
-
}
|
|
2213
|
-
/**
|
|
2214
|
-
* Increment epoch and reset sequence.
|
|
2255
|
+
* **Important**: This only performs a HEAD request for validation - it does
|
|
2256
|
+
* NOT open a session or start reading data. To read from the stream, call
|
|
2257
|
+
* `stream()` on the returned handle.
|
|
2215
2258
|
*
|
|
2216
|
-
*
|
|
2217
|
-
*
|
|
2259
|
+
* @example
|
|
2260
|
+
* ```typescript
|
|
2261
|
+
* // Validate stream exists before reading
|
|
2262
|
+
* const handle = await DurableStream.connect({ url })
|
|
2263
|
+
* const res = await handle.stream() // Now actually read
|
|
2264
|
+
* ```
|
|
2218
2265
|
*/
|
|
2219
|
-
async
|
|
2220
|
-
|
|
2221
|
-
|
|
2222
|
-
|
|
2266
|
+
static async connect(opts) {
|
|
2267
|
+
const stream$1 = new DurableStream(opts);
|
|
2268
|
+
await stream$1.head();
|
|
2269
|
+
return stream$1;
|
|
2223
2270
|
}
|
|
2224
2271
|
/**
|
|
2225
|
-
*
|
|
2272
|
+
* HEAD metadata for a stream without creating a handle.
|
|
2226
2273
|
*/
|
|
2227
|
-
|
|
2228
|
-
|
|
2274
|
+
static async head(opts) {
|
|
2275
|
+
const stream$1 = new DurableStream(opts);
|
|
2276
|
+
return stream$1.head();
|
|
2229
2277
|
}
|
|
2230
2278
|
/**
|
|
2231
|
-
*
|
|
2279
|
+
* Delete a stream without creating a handle.
|
|
2232
2280
|
*/
|
|
2233
|
-
|
|
2234
|
-
|
|
2281
|
+
static async delete(opts) {
|
|
2282
|
+
const stream$1 = new DurableStream(opts);
|
|
2283
|
+
return stream$1.delete();
|
|
2235
2284
|
}
|
|
2236
2285
|
/**
|
|
2237
|
-
*
|
|
2286
|
+
* HEAD metadata for this stream.
|
|
2238
2287
|
*/
|
|
2239
|
-
|
|
2240
|
-
|
|
2241
|
-
|
|
2288
|
+
async head(opts) {
|
|
2289
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2290
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2291
|
+
method: `HEAD`,
|
|
2292
|
+
headers: requestHeaders,
|
|
2293
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2294
|
+
});
|
|
2295
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2296
|
+
const contentType = response.headers.get(`content-type`) ?? void 0;
|
|
2297
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
|
|
2298
|
+
const etag = response.headers.get(`etag`) ?? void 0;
|
|
2299
|
+
const cacheControl = response.headers.get(`cache-control`) ?? void 0;
|
|
2300
|
+
const streamClosed = response.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`;
|
|
2301
|
+
if (contentType) this.contentType = contentType;
|
|
2302
|
+
return {
|
|
2303
|
+
exists: true,
|
|
2304
|
+
contentType,
|
|
2305
|
+
offset,
|
|
2306
|
+
etag,
|
|
2307
|
+
cacheControl,
|
|
2308
|
+
streamClosed
|
|
2309
|
+
};
|
|
2310
|
+
}
|
|
2242
2311
|
/**
|
|
2243
|
-
*
|
|
2312
|
+
* Create this stream (create-only PUT) using the URL/auth from the handle.
|
|
2244
2313
|
*/
|
|
2245
|
-
|
|
2246
|
-
|
|
2314
|
+
async create(opts) {
|
|
2315
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2316
|
+
const contentType = opts?.contentType ?? this.#options.contentType;
|
|
2317
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2318
|
+
if (opts?.ttlSeconds !== void 0) requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
|
|
2319
|
+
if (opts?.expiresAt) requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
|
|
2320
|
+
if (opts?.closed) requestHeaders[STREAM_CLOSED_HEADER] = `true`;
|
|
2321
|
+
const body = encodeBody(opts?.body);
|
|
2322
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2323
|
+
method: `PUT`,
|
|
2324
|
+
headers: requestHeaders,
|
|
2325
|
+
body,
|
|
2326
|
+
signal: this.#options.signal
|
|
2327
|
+
});
|
|
2328
|
+
if (!response.ok) await handleErrorResponse(response, this.url, { operation: `create` });
|
|
2329
|
+
const responseContentType = response.headers.get(`content-type`);
|
|
2330
|
+
if (responseContentType) this.contentType = responseContentType;
|
|
2331
|
+
else if (contentType) this.contentType = contentType;
|
|
2332
|
+
return this;
|
|
2247
2333
|
}
|
|
2248
2334
|
/**
|
|
2249
|
-
*
|
|
2335
|
+
* Delete this stream.
|
|
2250
2336
|
*/
|
|
2251
|
-
|
|
2252
|
-
|
|
2253
|
-
const
|
|
2254
|
-
|
|
2255
|
-
|
|
2256
|
-
|
|
2257
|
-
this.#nextSeq++;
|
|
2258
|
-
if (this.#autoClaim && !this.#epochClaimed && this.#queue.length() > 0) this.#queue.drained().then(() => {
|
|
2259
|
-
this.#queue.push({
|
|
2260
|
-
batch,
|
|
2261
|
-
seq
|
|
2262
|
-
}).catch(() => {});
|
|
2337
|
+
async delete(opts) {
|
|
2338
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2339
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2340
|
+
method: `DELETE`,
|
|
2341
|
+
headers: requestHeaders,
|
|
2342
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2263
2343
|
});
|
|
2264
|
-
|
|
2265
|
-
batch,
|
|
2266
|
-
seq
|
|
2267
|
-
}).catch(() => {});
|
|
2344
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2268
2345
|
}
|
|
2269
2346
|
/**
|
|
2270
|
-
*
|
|
2347
|
+
* Close the stream, optionally with a final message.
|
|
2348
|
+
*
|
|
2349
|
+
* After closing:
|
|
2350
|
+
* - No further appends are permitted (server returns 409)
|
|
2351
|
+
* - Readers can observe the closed state and treat it as EOF
|
|
2352
|
+
* - The stream's data remains fully readable
|
|
2353
|
+
*
|
|
2354
|
+
* Closing is:
|
|
2355
|
+
* - **Durable**: The closed state is persisted
|
|
2356
|
+
* - **Monotonic**: Once closed, a stream cannot be reopened
|
|
2357
|
+
*
|
|
2358
|
+
* **Idempotency:**
|
|
2359
|
+
* - `close()` without body: Idempotent — safe to call multiple times
|
|
2360
|
+
* - `close({ body })` with body: NOT idempotent — throws `StreamClosedError`
|
|
2361
|
+
* if stream is already closed (use `IdempotentProducer.close()` for
|
|
2362
|
+
* idempotent close-with-body semantics)
|
|
2363
|
+
*
|
|
2364
|
+
* @returns CloseResult with the final offset
|
|
2365
|
+
* @throws StreamClosedError if called with body on an already-closed stream
|
|
2271
2366
|
*/
|
|
2272
|
-
async
|
|
2273
|
-
const {
|
|
2274
|
-
const
|
|
2275
|
-
|
|
2276
|
-
|
|
2277
|
-
|
|
2278
|
-
|
|
2279
|
-
|
|
2280
|
-
|
|
2281
|
-
|
|
2282
|
-
|
|
2367
|
+
async close(opts) {
|
|
2368
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2369
|
+
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2370
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2371
|
+
requestHeaders[STREAM_CLOSED_HEADER] = `true`;
|
|
2372
|
+
let body;
|
|
2373
|
+
if (opts?.body !== void 0) {
|
|
2374
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2375
|
+
if (isJson) {
|
|
2376
|
+
const bodyStr = typeof opts.body === `string` ? opts.body : new TextDecoder().decode(opts.body);
|
|
2377
|
+
body = `[${bodyStr}]`;
|
|
2378
|
+
} else body = typeof opts.body === `string` ? opts.body : opts.body;
|
|
2379
|
+
}
|
|
2380
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2381
|
+
method: `POST`,
|
|
2382
|
+
headers: requestHeaders,
|
|
2383
|
+
body,
|
|
2384
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2385
|
+
});
|
|
2386
|
+
if (response.status === 409) {
|
|
2387
|
+
const isClosed = response.headers.get(STREAM_CLOSED_HEADER)?.toLowerCase() === `true`;
|
|
2388
|
+
if (isClosed) {
|
|
2389
|
+
const finalOffset$1 = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
|
|
2390
|
+
throw new StreamClosedError(this.url, finalOffset$1);
|
|
2391
|
+
}
|
|
2283
2392
|
}
|
|
2393
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2394
|
+
const finalOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
2395
|
+
return { finalOffset };
|
|
2284
2396
|
}
|
|
2285
2397
|
/**
|
|
2286
|
-
*
|
|
2398
|
+
* Append a single payload to the stream.
|
|
2399
|
+
*
|
|
2400
|
+
* When batching is enabled (default), multiple append() calls made while
|
|
2401
|
+
* a POST is in-flight will be batched together into a single request.
|
|
2402
|
+
* This significantly improves throughput for high-frequency writes.
|
|
2403
|
+
*
|
|
2404
|
+
* - `body` must be string or Uint8Array.
|
|
2405
|
+
* - For JSON streams, pass pre-serialized JSON strings.
|
|
2406
|
+
* - `body` may also be a Promise that resolves to string or Uint8Array.
|
|
2407
|
+
* - Strings are encoded as UTF-8.
|
|
2408
|
+
* - `seq` (if provided) is sent as stream-seq (writer coordination).
|
|
2409
|
+
*
|
|
2410
|
+
* @example
|
|
2411
|
+
* ```typescript
|
|
2412
|
+
* // JSON stream - pass pre-serialized JSON
|
|
2413
|
+
* await stream.append(JSON.stringify({ message: "hello" }));
|
|
2414
|
+
*
|
|
2415
|
+
* // Byte stream
|
|
2416
|
+
* await stream.append("raw text data");
|
|
2417
|
+
* await stream.append(new Uint8Array([1, 2, 3]));
|
|
2418
|
+
*
|
|
2419
|
+
* // Promise value - awaited before buffering
|
|
2420
|
+
* await stream.append(fetchData());
|
|
2421
|
+
* ```
|
|
2287
2422
|
*/
|
|
2288
|
-
|
|
2289
|
-
|
|
2290
|
-
if (
|
|
2291
|
-
|
|
2292
|
-
|
|
2293
|
-
|
|
2294
|
-
|
|
2295
|
-
|
|
2296
|
-
|
|
2297
|
-
|
|
2298
|
-
|
|
2299
|
-
|
|
2300
|
-
|
|
2301
|
-
|
|
2302
|
-
|
|
2303
|
-
|
|
2423
|
+
async append(body, opts) {
|
|
2424
|
+
const resolvedBody = isPromiseLike(body) ? await body : body;
|
|
2425
|
+
if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
|
|
2426
|
+
return this.#appendDirect(resolvedBody, opts);
|
|
2427
|
+
}
|
|
2428
|
+
/**
|
|
2429
|
+
* Direct append without batching (used when batching is disabled).
|
|
2430
|
+
*/
|
|
2431
|
+
async #appendDirect(body, opts) {
|
|
2432
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2433
|
+
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2434
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2435
|
+
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
2436
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2437
|
+
let encodedBody;
|
|
2438
|
+
if (isJson) {
|
|
2439
|
+
const bodyStr = typeof body === `string` ? body : new TextDecoder().decode(body);
|
|
2440
|
+
encodedBody = `[${bodyStr}]`;
|
|
2441
|
+
} else if (typeof body === `string`) encodedBody = body;
|
|
2442
|
+
else encodedBody = body.buffer.slice(body.byteOffset, body.byteOffset + body.byteLength);
|
|
2443
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2444
|
+
method: `POST`,
|
|
2445
|
+
headers: requestHeaders,
|
|
2446
|
+
body: encodedBody,
|
|
2447
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2304
2448
|
});
|
|
2305
|
-
|
|
2306
|
-
if (cleanupThreshold > 0) {
|
|
2307
|
-
for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
|
|
2308
|
-
}
|
|
2449
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2309
2450
|
}
|
|
2310
2451
|
/**
|
|
2311
|
-
*
|
|
2312
|
-
* Returns immediately if already completed.
|
|
2313
|
-
* Throws if the sequence failed.
|
|
2452
|
+
* Append with batching - buffers messages and sends them in batches.
|
|
2314
2453
|
*/
|
|
2315
|
-
#
|
|
2316
|
-
let epochMap = this.#seqState.get(epoch);
|
|
2317
|
-
if (!epochMap) {
|
|
2318
|
-
epochMap = new Map();
|
|
2319
|
-
this.#seqState.set(epoch, epochMap);
|
|
2320
|
-
}
|
|
2321
|
-
const state = epochMap.get(seq);
|
|
2322
|
-
if (state?.resolved) {
|
|
2323
|
-
if (state.error) return Promise.reject(state.error);
|
|
2324
|
-
return Promise.resolve();
|
|
2325
|
-
}
|
|
2454
|
+
async #appendWithBatching(body, opts) {
|
|
2326
2455
|
return new Promise((resolve, reject) => {
|
|
2327
|
-
|
|
2328
|
-
|
|
2329
|
-
|
|
2330
|
-
|
|
2331
|
-
|
|
2332
|
-
|
|
2333
|
-
|
|
2334
|
-
waiters: [waiter]
|
|
2456
|
+
this.#buffer.push({
|
|
2457
|
+
data: body,
|
|
2458
|
+
seq: opts?.seq,
|
|
2459
|
+
contentType: opts?.contentType,
|
|
2460
|
+
signal: opts?.signal,
|
|
2461
|
+
resolve,
|
|
2462
|
+
reject
|
|
2335
2463
|
});
|
|
2464
|
+
if (this.#queue.idle()) {
|
|
2465
|
+
const batch = this.#buffer.splice(0);
|
|
2466
|
+
this.#queue.push(batch).catch((err) => {
|
|
2467
|
+
for (const msg of batch) msg.reject(err);
|
|
2468
|
+
});
|
|
2469
|
+
}
|
|
2336
2470
|
});
|
|
2337
2471
|
}
|
|
2338
2472
|
/**
|
|
2339
|
-
*
|
|
2340
|
-
* Handles auto-claim retry on 403 (stale epoch) if autoClaim is enabled.
|
|
2341
|
-
* Does NOT implement general retry/backoff for network errors or 5xx responses.
|
|
2473
|
+
* Batch worker - processes batches of messages.
|
|
2342
2474
|
*/
|
|
2343
|
-
async #
|
|
2344
|
-
|
|
2475
|
+
async #batchWorker(batch) {
|
|
2476
|
+
try {
|
|
2477
|
+
await this.#sendBatch(batch);
|
|
2478
|
+
for (const msg of batch) msg.resolve();
|
|
2479
|
+
if (this.#buffer.length > 0) {
|
|
2480
|
+
const nextBatch = this.#buffer.splice(0);
|
|
2481
|
+
this.#queue.push(nextBatch).catch((err) => {
|
|
2482
|
+
for (const msg of nextBatch) msg.reject(err);
|
|
2483
|
+
});
|
|
2484
|
+
}
|
|
2485
|
+
} catch (error) {
|
|
2486
|
+
for (const msg of batch) msg.reject(error);
|
|
2487
|
+
for (const msg of this.#buffer) msg.reject(error);
|
|
2488
|
+
this.#buffer = [];
|
|
2489
|
+
throw error;
|
|
2490
|
+
}
|
|
2491
|
+
}
|
|
2492
|
+
/**
|
|
2493
|
+
* Send a batch of messages as a single POST request.
|
|
2494
|
+
*/
|
|
2495
|
+
async #sendBatch(batch) {
|
|
2496
|
+
if (batch.length === 0) return;
|
|
2497
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2498
|
+
const contentType = batch[0]?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2499
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2500
|
+
let highestSeq;
|
|
2501
|
+
for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
|
|
2502
|
+
highestSeq = batch[i].seq;
|
|
2503
|
+
break;
|
|
2504
|
+
}
|
|
2505
|
+
if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
|
|
2345
2506
|
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2346
2507
|
let batchedBody;
|
|
2347
2508
|
if (isJson) {
|
|
2348
|
-
const
|
|
2349
|
-
batchedBody =
|
|
2509
|
+
const jsonStrings = batch.map((m) => typeof m.data === `string` ? m.data : new TextDecoder().decode(m.data));
|
|
2510
|
+
batchedBody = `[${jsonStrings.join(`,`)}]`;
|
|
2350
2511
|
} else {
|
|
2351
|
-
const
|
|
2352
|
-
const
|
|
2353
|
-
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2512
|
+
const hasUint8Array = batch.some((m) => m.data instanceof Uint8Array);
|
|
2513
|
+
const hasString = batch.some((m) => typeof m.data === `string`);
|
|
2514
|
+
if (hasUint8Array && !hasString) {
|
|
2515
|
+
const chunks = batch.map((m) => m.data);
|
|
2516
|
+
const totalLength = chunks.reduce((sum, c) => sum + c.length, 0);
|
|
2517
|
+
const combined = new Uint8Array(totalLength);
|
|
2518
|
+
let offset = 0;
|
|
2519
|
+
for (const chunk of chunks) {
|
|
2520
|
+
combined.set(chunk, offset);
|
|
2521
|
+
offset += chunk.length;
|
|
2522
|
+
}
|
|
2523
|
+
batchedBody = combined;
|
|
2524
|
+
} else if (hasString && !hasUint8Array) batchedBody = batch.map((m) => m.data).join(``);
|
|
2525
|
+
else {
|
|
2526
|
+
const encoder = new TextEncoder();
|
|
2527
|
+
const chunks = batch.map((m) => typeof m.data === `string` ? encoder.encode(m.data) : m.data);
|
|
2528
|
+
const totalLength = chunks.reduce((sum, c) => sum + c.length, 0);
|
|
2529
|
+
const combined = new Uint8Array(totalLength);
|
|
2530
|
+
let offset = 0;
|
|
2531
|
+
for (const chunk of chunks) {
|
|
2532
|
+
combined.set(chunk, offset);
|
|
2533
|
+
offset += chunk.length;
|
|
2534
|
+
}
|
|
2535
|
+
batchedBody = combined;
|
|
2357
2536
|
}
|
|
2358
|
-
batchedBody = concatenated;
|
|
2359
2537
|
}
|
|
2360
|
-
const
|
|
2361
|
-
|
|
2362
|
-
|
|
2363
|
-
|
|
2364
|
-
|
|
2365
|
-
[PRODUCER_SEQ_HEADER]: seq.toString()
|
|
2366
|
-
};
|
|
2367
|
-
const response = await this.#fetchClient(url, {
|
|
2538
|
+
const signals = [];
|
|
2539
|
+
if (this.#options.signal) signals.push(this.#options.signal);
|
|
2540
|
+
for (const msg of batch) if (msg.signal) signals.push(msg.signal);
|
|
2541
|
+
const combinedSignal = signals.length > 0 ? AbortSignal.any(signals) : void 0;
|
|
2542
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2368
2543
|
method: `POST`,
|
|
2369
|
-
headers,
|
|
2544
|
+
headers: requestHeaders,
|
|
2370
2545
|
body: batchedBody,
|
|
2371
|
-
signal:
|
|
2546
|
+
signal: combinedSignal
|
|
2372
2547
|
});
|
|
2373
|
-
if (response.
|
|
2374
|
-
|
|
2375
|
-
|
|
2376
|
-
|
|
2377
|
-
|
|
2378
|
-
|
|
2379
|
-
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2391
|
-
|
|
2392
|
-
|
|
2393
|
-
|
|
2394
|
-
|
|
2395
|
-
|
|
2396
|
-
|
|
2397
|
-
|
|
2398
|
-
|
|
2399
|
-
|
|
2400
|
-
|
|
2401
|
-
|
|
2402
|
-
|
|
2548
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2549
|
+
}
|
|
2550
|
+
/**
|
|
2551
|
+
* Append a streaming body to the stream.
|
|
2552
|
+
*
|
|
2553
|
+
* Supports piping from any ReadableStream or async iterable:
|
|
2554
|
+
* - `source` yields Uint8Array or string chunks.
|
|
2555
|
+
* - Strings are encoded as UTF-8; no delimiters are added.
|
|
2556
|
+
* - Internally uses chunked transfer or HTTP/2 streaming.
|
|
2557
|
+
*
|
|
2558
|
+
* @example
|
|
2559
|
+
* ```typescript
|
|
2560
|
+
* // Pipe from a ReadableStream
|
|
2561
|
+
* const readable = new ReadableStream({
|
|
2562
|
+
* start(controller) {
|
|
2563
|
+
* controller.enqueue("chunk 1");
|
|
2564
|
+
* controller.enqueue("chunk 2");
|
|
2565
|
+
* controller.close();
|
|
2566
|
+
* }
|
|
2567
|
+
* });
|
|
2568
|
+
* await stream.appendStream(readable);
|
|
2569
|
+
*
|
|
2570
|
+
* // Pipe from an async generator
|
|
2571
|
+
* async function* generate() {
|
|
2572
|
+
* yield "line 1\n";
|
|
2573
|
+
* yield "line 2\n";
|
|
2574
|
+
* }
|
|
2575
|
+
* await stream.appendStream(generate());
|
|
2576
|
+
*
|
|
2577
|
+
* // Pipe from fetch response body
|
|
2578
|
+
* const response = await fetch("https://example.com/data");
|
|
2579
|
+
* await stream.appendStream(response.body!);
|
|
2580
|
+
* ```
|
|
2581
|
+
*/
|
|
2582
|
+
async appendStream(source, opts) {
|
|
2583
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2584
|
+
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2585
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2586
|
+
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
2587
|
+
const body = toReadableStream(source);
|
|
2588
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2589
|
+
method: `POST`,
|
|
2590
|
+
headers: requestHeaders,
|
|
2591
|
+
body,
|
|
2592
|
+
duplex: `half`,
|
|
2593
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2594
|
+
});
|
|
2595
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2596
|
+
}
|
|
2597
|
+
/**
|
|
2598
|
+
* Create a writable stream that pipes data to this durable stream.
|
|
2599
|
+
*
|
|
2600
|
+
* Returns a WritableStream that can be used with `pipeTo()` or
|
|
2601
|
+
* `pipeThrough()` from any ReadableStream source.
|
|
2602
|
+
*
|
|
2603
|
+
* Uses IdempotentProducer internally for:
|
|
2604
|
+
* - Automatic batching (controlled by lingerMs, maxBatchBytes)
|
|
2605
|
+
* - Exactly-once delivery semantics
|
|
2606
|
+
* - Streaming writes (doesn't buffer entire content in memory)
|
|
2607
|
+
*
|
|
2608
|
+
* @example
|
|
2609
|
+
* ```typescript
|
|
2610
|
+
* // Pipe from fetch response
|
|
2611
|
+
* const response = await fetch("https://example.com/data");
|
|
2612
|
+
* await response.body!.pipeTo(stream.writable());
|
|
2613
|
+
*
|
|
2614
|
+
* // Pipe through a transform
|
|
2615
|
+
* const readable = someStream.pipeThrough(new TextEncoderStream());
|
|
2616
|
+
* await readable.pipeTo(stream.writable());
|
|
2617
|
+
*
|
|
2618
|
+
* // With custom producer options
|
|
2619
|
+
* await source.pipeTo(stream.writable({
|
|
2620
|
+
* producerId: "my-producer",
|
|
2621
|
+
* lingerMs: 10,
|
|
2622
|
+
* maxBatchBytes: 64 * 1024,
|
|
2623
|
+
* }));
|
|
2624
|
+
* ```
|
|
2625
|
+
*/
|
|
2626
|
+
writable(opts) {
|
|
2627
|
+
const producerId = opts?.producerId ?? `writable-${crypto.randomUUID().slice(0, 8)}`;
|
|
2628
|
+
let writeError = null;
|
|
2629
|
+
const producer = new IdempotentProducer(this, producerId, {
|
|
2630
|
+
autoClaim: true,
|
|
2631
|
+
lingerMs: opts?.lingerMs,
|
|
2632
|
+
maxBatchBytes: opts?.maxBatchBytes,
|
|
2633
|
+
onError: (error) => {
|
|
2634
|
+
if (!writeError) writeError = error;
|
|
2635
|
+
opts?.onError?.(error);
|
|
2636
|
+
},
|
|
2637
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2638
|
+
});
|
|
2639
|
+
return new WritableStream({
|
|
2640
|
+
write(chunk) {
|
|
2641
|
+
producer.append(chunk);
|
|
2642
|
+
},
|
|
2643
|
+
async close() {
|
|
2644
|
+
await producer.close();
|
|
2645
|
+
if (writeError) throw writeError;
|
|
2646
|
+
},
|
|
2647
|
+
abort(_reason) {
|
|
2648
|
+
producer.detach().catch((err) => {
|
|
2649
|
+
opts?.onError?.(err);
|
|
2650
|
+
});
|
|
2403
2651
|
}
|
|
2404
|
-
|
|
2405
|
-
const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
|
|
2406
|
-
throw new SequenceGapError(expectedSeq, receivedSeq);
|
|
2407
|
-
}
|
|
2408
|
-
if (response.status === 400) {
|
|
2409
|
-
const error$1 = await DurableStreamError.fromResponse(response, url);
|
|
2410
|
-
throw error$1;
|
|
2411
|
-
}
|
|
2412
|
-
const error = await FetchError.fromResponse(response, url);
|
|
2413
|
-
throw error;
|
|
2652
|
+
});
|
|
2414
2653
|
}
|
|
2415
2654
|
/**
|
|
2416
|
-
*
|
|
2655
|
+
* Start a fetch-like streaming session against this handle's URL/headers/params.
|
|
2656
|
+
* The first request is made inside this method; it resolves when we have
|
|
2657
|
+
* a valid first response, or rejects on errors.
|
|
2658
|
+
*
|
|
2659
|
+
* Call-specific headers and params are merged with handle-level ones,
|
|
2660
|
+
* with call-specific values taking precedence.
|
|
2661
|
+
*
|
|
2662
|
+
* @example
|
|
2663
|
+
* ```typescript
|
|
2664
|
+
* const handle = await DurableStream.connect({
|
|
2665
|
+
* url,
|
|
2666
|
+
* headers: { Authorization: `Bearer ${token}` }
|
|
2667
|
+
* });
|
|
2668
|
+
* const res = await handle.stream<{ message: string }>();
|
|
2669
|
+
*
|
|
2670
|
+
* // Accumulate all JSON items
|
|
2671
|
+
* const items = await res.json();
|
|
2672
|
+
*
|
|
2673
|
+
* // Or stream live with ReadableStream
|
|
2674
|
+
* const reader = res.jsonStream().getReader();
|
|
2675
|
+
* let result = await reader.read();
|
|
2676
|
+
* while (!result.done) {
|
|
2677
|
+
* console.log(result.value);
|
|
2678
|
+
* result = await reader.read();
|
|
2679
|
+
* }
|
|
2680
|
+
*
|
|
2681
|
+
* // Or use subscriber for backpressure-aware consumption
|
|
2682
|
+
* res.subscribeJson(async (batch) => {
|
|
2683
|
+
* for (const item of batch.items) {
|
|
2684
|
+
* console.log(item);
|
|
2685
|
+
* }
|
|
2686
|
+
* });
|
|
2687
|
+
* ```
|
|
2417
2688
|
*/
|
|
2418
|
-
|
|
2419
|
-
|
|
2420
|
-
|
|
2421
|
-
|
|
2422
|
-
|
|
2423
|
-
|
|
2424
|
-
this.#
|
|
2425
|
-
|
|
2689
|
+
async stream(options) {
|
|
2690
|
+
const mergedHeaders = {
|
|
2691
|
+
...this.#options.headers,
|
|
2692
|
+
...options?.headers
|
|
2693
|
+
};
|
|
2694
|
+
const mergedParams = {
|
|
2695
|
+
...this.#options.params,
|
|
2696
|
+
...options?.params
|
|
2697
|
+
};
|
|
2698
|
+
return stream({
|
|
2699
|
+
url: this.url,
|
|
2700
|
+
headers: mergedHeaders,
|
|
2701
|
+
params: mergedParams,
|
|
2702
|
+
signal: options?.signal ?? this.#options.signal,
|
|
2703
|
+
fetch: this.#options.fetch,
|
|
2704
|
+
backoffOptions: this.#options.backoffOptions,
|
|
2705
|
+
offset: options?.offset,
|
|
2706
|
+
live: options?.live,
|
|
2707
|
+
json: options?.json,
|
|
2708
|
+
onError: options?.onError ?? this.#onError,
|
|
2709
|
+
warnOnHttp: options?.warnOnHttp ?? this.#options.warnOnHttp
|
|
2710
|
+
});
|
|
2711
|
+
}
|
|
2712
|
+
/**
|
|
2713
|
+
* Build request headers and URL.
|
|
2714
|
+
*/
|
|
2715
|
+
async #buildRequest() {
|
|
2716
|
+
const requestHeaders = await resolveHeaders(this.#options.headers);
|
|
2717
|
+
const fetchUrl = new URL(this.url);
|
|
2718
|
+
const params = await resolveParams(this.#options.params);
|
|
2719
|
+
for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
|
|
2720
|
+
return {
|
|
2721
|
+
requestHeaders,
|
|
2722
|
+
fetchUrl
|
|
2723
|
+
};
|
|
2426
2724
|
}
|
|
2427
2725
|
};
|
|
2726
|
+
/**
|
|
2727
|
+
* Encode a body value to the appropriate format.
|
|
2728
|
+
* Strings are encoded as UTF-8.
|
|
2729
|
+
* Objects are JSON-serialized.
|
|
2730
|
+
*/
|
|
2731
|
+
function encodeBody(body) {
|
|
2732
|
+
if (body === void 0) return void 0;
|
|
2733
|
+
if (typeof body === `string`) return new TextEncoder().encode(body);
|
|
2734
|
+
if (body instanceof Uint8Array) return body;
|
|
2735
|
+
if (body instanceof Blob || body instanceof FormData || body instanceof ReadableStream || body instanceof ArrayBuffer || ArrayBuffer.isView(body)) return body;
|
|
2736
|
+
return new TextEncoder().encode(JSON.stringify(body));
|
|
2737
|
+
}
|
|
2738
|
+
/**
|
|
2739
|
+
* Convert an async iterable to a ReadableStream.
|
|
2740
|
+
*/
|
|
2741
|
+
function toReadableStream(source) {
|
|
2742
|
+
if (source instanceof ReadableStream) return source.pipeThrough(new TransformStream({ transform(chunk, controller) {
|
|
2743
|
+
if (typeof chunk === `string`) controller.enqueue(new TextEncoder().encode(chunk));
|
|
2744
|
+
else controller.enqueue(chunk);
|
|
2745
|
+
} }));
|
|
2746
|
+
const encoder = new TextEncoder();
|
|
2747
|
+
const iterator = source[Symbol.asyncIterator]();
|
|
2748
|
+
return new ReadableStream({
|
|
2749
|
+
async pull(controller) {
|
|
2750
|
+
try {
|
|
2751
|
+
const { done, value } = await iterator.next();
|
|
2752
|
+
if (done) controller.close();
|
|
2753
|
+
else if (typeof value === `string`) controller.enqueue(encoder.encode(value));
|
|
2754
|
+
else controller.enqueue(value);
|
|
2755
|
+
} catch (e) {
|
|
2756
|
+
controller.error(e);
|
|
2757
|
+
}
|
|
2758
|
+
},
|
|
2759
|
+
cancel() {
|
|
2760
|
+
iterator.return?.();
|
|
2761
|
+
}
|
|
2762
|
+
});
|
|
2763
|
+
}
|
|
2764
|
+
/**
|
|
2765
|
+
* Validate stream options.
|
|
2766
|
+
*/
|
|
2767
|
+
function validateOptions(options) {
|
|
2768
|
+
if (!options.url) throw new MissingStreamUrlError();
|
|
2769
|
+
if (options.signal && !(options.signal instanceof AbortSignal)) throw new InvalidSignalError();
|
|
2770
|
+
warnIfUsingHttpInBrowser(options.url, options.warnOnHttp);
|
|
2771
|
+
}
|
|
2428
2772
|
|
|
2429
2773
|
//#endregion
|
|
2430
|
-
export { BackoffDefaults, CURSOR_QUERY_PARAM, DURABLE_STREAM_PROTOCOL_QUERY_PARAMS, DurableStream, DurableStreamError, FetchBackoffAbortError, FetchError, IdempotentProducer, InvalidSignalError, LIVE_QUERY_PARAM, MissingStreamUrlError, OFFSET_QUERY_PARAM, PRODUCER_EPOCH_HEADER, PRODUCER_EXPECTED_SEQ_HEADER, PRODUCER_ID_HEADER, PRODUCER_RECEIVED_SEQ_HEADER, PRODUCER_SEQ_HEADER, SSE_COMPATIBLE_CONTENT_TYPES, STREAM_CURSOR_HEADER, STREAM_EXPIRES_AT_HEADER, STREAM_OFFSET_HEADER, STREAM_SEQ_HEADER, STREAM_TTL_HEADER, STREAM_UP_TO_DATE_HEADER, SequenceGapError, StaleEpochError, _resetHttpWarningForTesting, asAsyncIterableReadableStream, createFetchWithBackoff, createFetchWithConsumedBody, stream, warnIfUsingHttpInBrowser };
|
|
2774
|
+
export { BackoffDefaults, CURSOR_QUERY_PARAM, DURABLE_STREAM_PROTOCOL_QUERY_PARAMS, DurableStream, DurableStreamError, FetchBackoffAbortError, FetchError, IdempotentProducer, InvalidSignalError, LIVE_QUERY_PARAM, MissingStreamUrlError, OFFSET_QUERY_PARAM, PRODUCER_EPOCH_HEADER, PRODUCER_EXPECTED_SEQ_HEADER, PRODUCER_ID_HEADER, PRODUCER_RECEIVED_SEQ_HEADER, PRODUCER_SEQ_HEADER, SSE_CLOSED_FIELD, SSE_COMPATIBLE_CONTENT_TYPES, STREAM_CLOSED_HEADER, STREAM_CURSOR_HEADER, STREAM_EXPIRES_AT_HEADER, STREAM_OFFSET_HEADER, STREAM_SEQ_HEADER, STREAM_TTL_HEADER, STREAM_UP_TO_DATE_HEADER, SequenceGapError, StaleEpochError, StreamClosedError, _resetHttpWarningForTesting, asAsyncIterableReadableStream, createFetchWithBackoff, createFetchWithConsumedBody, stream, warnIfUsingHttpInBrowser };
|