@durable-streams/client 0.1.4 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -10
- package/dist/index.cjs +954 -795
- package/dist/index.d.cts +63 -25
- package/dist/index.d.ts +63 -25
- package/dist/index.js +954 -795
- package/package.json +2 -2
- package/src/idempotent-producer.ts +51 -38
- package/src/response.ts +258 -23
- package/src/sse.ts +17 -4
- package/src/stream-api.ts +22 -9
- package/src/stream.ts +77 -56
- package/src/types.ts +24 -12
package/dist/index.cjs
CHANGED
|
@@ -506,7 +506,10 @@ async function* parseSSEStream(stream$1, signal) {
|
|
|
506
506
|
streamCursor: control.streamCursor,
|
|
507
507
|
upToDate: control.upToDate
|
|
508
508
|
};
|
|
509
|
-
} catch {
|
|
509
|
+
} catch (err) {
|
|
510
|
+
const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
|
|
511
|
+
throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
512
|
+
}
|
|
510
513
|
}
|
|
511
514
|
currentEvent = { data: [] };
|
|
512
515
|
} else if (line.startsWith(`event:`)) currentEvent.type = line.slice(6).trim();
|
|
@@ -531,7 +534,10 @@ async function* parseSSEStream(stream$1, signal) {
|
|
|
531
534
|
streamCursor: control.streamCursor,
|
|
532
535
|
upToDate: control.upToDate
|
|
533
536
|
};
|
|
534
|
-
} catch {
|
|
537
|
+
} catch (err) {
|
|
538
|
+
const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
|
|
539
|
+
throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
540
|
+
}
|
|
535
541
|
}
|
|
536
542
|
} finally {
|
|
537
543
|
reader.releaseLock();
|
|
@@ -541,6 +547,10 @@ async function* parseSSEStream(stream$1, signal) {
|
|
|
541
547
|
//#endregion
|
|
542
548
|
//#region src/response.ts
|
|
543
549
|
/**
|
|
550
|
+
* Constant used as abort reason when pausing the stream due to visibility change.
|
|
551
|
+
*/
|
|
552
|
+
const PAUSE_STREAM = `PAUSE_STREAM`;
|
|
553
|
+
/**
|
|
544
554
|
* Implementation of the StreamResponse interface.
|
|
545
555
|
*/
|
|
546
556
|
var StreamResponseImpl = class {
|
|
@@ -553,9 +563,9 @@ var StreamResponseImpl = class {
|
|
|
553
563
|
#statusText;
|
|
554
564
|
#ok;
|
|
555
565
|
#isLoading;
|
|
556
|
-
offset;
|
|
557
|
-
cursor;
|
|
558
|
-
upToDate;
|
|
566
|
+
#offset;
|
|
567
|
+
#cursor;
|
|
568
|
+
#upToDate;
|
|
559
569
|
#isJsonMode;
|
|
560
570
|
#abortController;
|
|
561
571
|
#fetchNext;
|
|
@@ -565,6 +575,12 @@ var StreamResponseImpl = class {
|
|
|
565
575
|
#closed;
|
|
566
576
|
#stopAfterUpToDate = false;
|
|
567
577
|
#consumptionMethod = null;
|
|
578
|
+
#state = `active`;
|
|
579
|
+
#requestAbortController;
|
|
580
|
+
#unsubscribeFromVisibilityChanges;
|
|
581
|
+
#pausePromise;
|
|
582
|
+
#pauseResolve;
|
|
583
|
+
#justResumedFromPause = false;
|
|
568
584
|
#sseResilience;
|
|
569
585
|
#lastSSEConnectionStartTime;
|
|
570
586
|
#consecutiveShortSSEConnections = 0;
|
|
@@ -575,9 +591,9 @@ var StreamResponseImpl = class {
|
|
|
575
591
|
this.contentType = config.contentType;
|
|
576
592
|
this.live = config.live;
|
|
577
593
|
this.startOffset = config.startOffset;
|
|
578
|
-
this
|
|
579
|
-
this
|
|
580
|
-
this
|
|
594
|
+
this.#offset = config.initialOffset;
|
|
595
|
+
this.#cursor = config.initialCursor;
|
|
596
|
+
this.#upToDate = config.initialUpToDate;
|
|
581
597
|
this.#headers = config.firstResponse.headers;
|
|
582
598
|
this.#status = config.firstResponse.status;
|
|
583
599
|
this.#statusText = config.firstResponse.statusText;
|
|
@@ -599,6 +615,59 @@ var StreamResponseImpl = class {
|
|
|
599
615
|
this.#closedReject = reject;
|
|
600
616
|
});
|
|
601
617
|
this.#responseStream = this.#createResponseStream(config.firstResponse);
|
|
618
|
+
this.#abortController.signal.addEventListener(`abort`, () => {
|
|
619
|
+
this.#requestAbortController?.abort(this.#abortController.signal.reason);
|
|
620
|
+
this.#pauseResolve?.();
|
|
621
|
+
this.#pausePromise = void 0;
|
|
622
|
+
this.#pauseResolve = void 0;
|
|
623
|
+
}, { once: true });
|
|
624
|
+
this.#subscribeToVisibilityChanges();
|
|
625
|
+
}
|
|
626
|
+
/**
|
|
627
|
+
* Subscribe to document visibility changes to pause/resume syncing.
|
|
628
|
+
* When the page is hidden, we pause to save battery and bandwidth.
|
|
629
|
+
* When visible again, we resume syncing.
|
|
630
|
+
*/
|
|
631
|
+
#subscribeToVisibilityChanges() {
|
|
632
|
+
if (typeof document === `object` && typeof document.hidden === `boolean` && typeof document.addEventListener === `function`) {
|
|
633
|
+
const visibilityHandler = () => {
|
|
634
|
+
if (document.hidden) this.#pause();
|
|
635
|
+
else this.#resume();
|
|
636
|
+
};
|
|
637
|
+
document.addEventListener(`visibilitychange`, visibilityHandler);
|
|
638
|
+
this.#unsubscribeFromVisibilityChanges = () => {
|
|
639
|
+
if (typeof document === `object`) document.removeEventListener(`visibilitychange`, visibilityHandler);
|
|
640
|
+
};
|
|
641
|
+
if (document.hidden) this.#pause();
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
/**
|
|
645
|
+
* Pause the stream when page becomes hidden.
|
|
646
|
+
* Aborts any in-flight request to free resources.
|
|
647
|
+
* Creates a promise that pull() will await while paused.
|
|
648
|
+
*/
|
|
649
|
+
#pause() {
|
|
650
|
+
if (this.#state === `active`) {
|
|
651
|
+
this.#state = `pause-requested`;
|
|
652
|
+
this.#pausePromise = new Promise((resolve) => {
|
|
653
|
+
this.#pauseResolve = resolve;
|
|
654
|
+
});
|
|
655
|
+
this.#requestAbortController?.abort(PAUSE_STREAM);
|
|
656
|
+
}
|
|
657
|
+
}
|
|
658
|
+
/**
|
|
659
|
+
* Resume the stream when page becomes visible.
|
|
660
|
+
* Resolves the pause promise to unblock pull().
|
|
661
|
+
*/
|
|
662
|
+
#resume() {
|
|
663
|
+
if (this.#state === `paused` || this.#state === `pause-requested`) {
|
|
664
|
+
if (this.#abortController.signal.aborted) return;
|
|
665
|
+
this.#state = `active`;
|
|
666
|
+
this.#justResumedFromPause = true;
|
|
667
|
+
this.#pauseResolve?.();
|
|
668
|
+
this.#pausePromise = void 0;
|
|
669
|
+
this.#pauseResolve = void 0;
|
|
670
|
+
}
|
|
602
671
|
}
|
|
603
672
|
get headers() {
|
|
604
673
|
return this.#headers;
|
|
@@ -615,13 +684,24 @@ var StreamResponseImpl = class {
|
|
|
615
684
|
get isLoading() {
|
|
616
685
|
return this.#isLoading;
|
|
617
686
|
}
|
|
687
|
+
get offset() {
|
|
688
|
+
return this.#offset;
|
|
689
|
+
}
|
|
690
|
+
get cursor() {
|
|
691
|
+
return this.#cursor;
|
|
692
|
+
}
|
|
693
|
+
get upToDate() {
|
|
694
|
+
return this.#upToDate;
|
|
695
|
+
}
|
|
618
696
|
#ensureJsonMode() {
|
|
619
697
|
if (!this.#isJsonMode) throw new DurableStreamError(`JSON methods are only valid for JSON-mode streams. Content-Type is "${this.contentType}" and json hint was not set.`, `BAD_REQUEST`);
|
|
620
698
|
}
|
|
621
699
|
#markClosed() {
|
|
700
|
+
this.#unsubscribeFromVisibilityChanges?.();
|
|
622
701
|
this.#closedResolve();
|
|
623
702
|
}
|
|
624
703
|
#markError(err) {
|
|
704
|
+
this.#unsubscribeFromVisibilityChanges?.();
|
|
625
705
|
this.#closedReject(err);
|
|
626
706
|
}
|
|
627
707
|
/**
|
|
@@ -646,10 +726,10 @@ var StreamResponseImpl = class {
|
|
|
646
726
|
*/
|
|
647
727
|
#updateStateFromResponse(response) {
|
|
648
728
|
const offset = response.headers.get(STREAM_OFFSET_HEADER);
|
|
649
|
-
if (offset) this
|
|
729
|
+
if (offset) this.#offset = offset;
|
|
650
730
|
const cursor = response.headers.get(STREAM_CURSOR_HEADER);
|
|
651
|
-
if (cursor) this
|
|
652
|
-
this
|
|
731
|
+
if (cursor) this.#cursor = cursor;
|
|
732
|
+
this.#upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
653
733
|
this.#headers = response.headers;
|
|
654
734
|
this.#status = response.status;
|
|
655
735
|
this.#statusText = response.statusText;
|
|
@@ -691,9 +771,9 @@ var StreamResponseImpl = class {
|
|
|
691
771
|
* Update instance state from an SSE control event.
|
|
692
772
|
*/
|
|
693
773
|
#updateStateFromSSEControl(controlEvent) {
|
|
694
|
-
this
|
|
695
|
-
if (controlEvent.streamCursor) this
|
|
696
|
-
if (controlEvent.upToDate !== void 0) this
|
|
774
|
+
this.#offset = controlEvent.streamNextOffset;
|
|
775
|
+
if (controlEvent.streamCursor) this.#cursor = controlEvent.streamCursor;
|
|
776
|
+
if (controlEvent.upToDate !== void 0) this.#upToDate = controlEvent.upToDate;
|
|
697
777
|
}
|
|
698
778
|
/**
|
|
699
779
|
* Mark the start of an SSE connection for duration tracking.
|
|
@@ -734,8 +814,9 @@ var StreamResponseImpl = class {
|
|
|
734
814
|
const delayOrNull = await this.#handleSSEConnectionEnd();
|
|
735
815
|
if (delayOrNull === null) return null;
|
|
736
816
|
this.#markSSEConnectionStart();
|
|
737
|
-
|
|
738
|
-
|
|
817
|
+
this.#requestAbortController = new AbortController();
|
|
818
|
+
const newSSEResponse = await this.#startSSE(this.offset, this.cursor, this.#requestAbortController.signal);
|
|
819
|
+
if (newSSEResponse.body) return parseSSEStream(newSSEResponse.body, this.#requestAbortController.signal);
|
|
739
820
|
return null;
|
|
740
821
|
}
|
|
741
822
|
/**
|
|
@@ -821,7 +902,8 @@ var StreamResponseImpl = class {
|
|
|
821
902
|
const isSSE = firstResponse.headers.get(`content-type`)?.includes(`text/event-stream`) ?? false;
|
|
822
903
|
if (isSSE && firstResponse.body) {
|
|
823
904
|
this.#markSSEConnectionStart();
|
|
824
|
-
|
|
905
|
+
this.#requestAbortController = new AbortController();
|
|
906
|
+
sseEventIterator = parseSSEStream(firstResponse.body, this.#requestAbortController.signal);
|
|
825
907
|
} else {
|
|
826
908
|
controller.enqueue(firstResponse);
|
|
827
909
|
if (this.upToDate && !this.#shouldContinueLive()) {
|
|
@@ -832,33 +914,63 @@ var StreamResponseImpl = class {
|
|
|
832
914
|
return;
|
|
833
915
|
}
|
|
834
916
|
}
|
|
835
|
-
if (sseEventIterator)
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
controller.enqueue(result.response);
|
|
841
|
-
return;
|
|
842
|
-
case `closed`:
|
|
917
|
+
if (sseEventIterator) {
|
|
918
|
+
if (this.#state === `pause-requested` || this.#state === `paused`) {
|
|
919
|
+
this.#state = `paused`;
|
|
920
|
+
if (this.#pausePromise) await this.#pausePromise;
|
|
921
|
+
if (this.#abortController.signal.aborted) {
|
|
843
922
|
this.#markClosed();
|
|
844
923
|
controller.close();
|
|
845
924
|
return;
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
925
|
+
}
|
|
926
|
+
const newIterator = await this.#trySSEReconnect();
|
|
927
|
+
if (newIterator) sseEventIterator = newIterator;
|
|
928
|
+
else {
|
|
929
|
+
this.#markClosed();
|
|
930
|
+
controller.close();
|
|
849
931
|
return;
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
932
|
+
}
|
|
933
|
+
}
|
|
934
|
+
while (true) {
|
|
935
|
+
const result = await this.#processSSEEvents(sseEventIterator);
|
|
936
|
+
switch (result.type) {
|
|
937
|
+
case `response`:
|
|
938
|
+
if (result.newIterator) sseEventIterator = result.newIterator;
|
|
939
|
+
controller.enqueue(result.response);
|
|
940
|
+
return;
|
|
941
|
+
case `closed`:
|
|
942
|
+
this.#markClosed();
|
|
943
|
+
controller.close();
|
|
944
|
+
return;
|
|
945
|
+
case `error`:
|
|
946
|
+
this.#markError(result.error);
|
|
947
|
+
controller.error(result.error);
|
|
948
|
+
return;
|
|
949
|
+
case `continue`:
|
|
950
|
+
if (result.newIterator) sseEventIterator = result.newIterator;
|
|
951
|
+
continue;
|
|
952
|
+
}
|
|
853
953
|
}
|
|
854
954
|
}
|
|
855
955
|
if (this.#shouldContinueLive()) {
|
|
956
|
+
if (this.#state === `pause-requested` || this.#state === `paused`) {
|
|
957
|
+
this.#state = `paused`;
|
|
958
|
+
if (this.#pausePromise) await this.#pausePromise;
|
|
959
|
+
if (this.#abortController.signal.aborted) {
|
|
960
|
+
this.#markClosed();
|
|
961
|
+
controller.close();
|
|
962
|
+
return;
|
|
963
|
+
}
|
|
964
|
+
}
|
|
856
965
|
if (this.#abortController.signal.aborted) {
|
|
857
966
|
this.#markClosed();
|
|
858
967
|
controller.close();
|
|
859
968
|
return;
|
|
860
969
|
}
|
|
861
|
-
const
|
|
970
|
+
const resumingFromPause = this.#justResumedFromPause;
|
|
971
|
+
this.#justResumedFromPause = false;
|
|
972
|
+
this.#requestAbortController = new AbortController();
|
|
973
|
+
const response = await this.#fetchNext(this.offset, this.cursor, this.#requestAbortController.signal, resumingFromPause);
|
|
862
974
|
this.#updateStateFromResponse(response);
|
|
863
975
|
controller.enqueue(response);
|
|
864
976
|
return;
|
|
@@ -866,6 +978,10 @@ var StreamResponseImpl = class {
|
|
|
866
978
|
this.#markClosed();
|
|
867
979
|
controller.close();
|
|
868
980
|
} catch (err) {
|
|
981
|
+
if (this.#requestAbortController?.signal.aborted && this.#requestAbortController.signal.reason === PAUSE_STREAM) {
|
|
982
|
+
if (this.#state === `pause-requested`) this.#state = `paused`;
|
|
983
|
+
return;
|
|
984
|
+
}
|
|
869
985
|
if (this.#abortController.signal.aborted) {
|
|
870
986
|
this.#markClosed();
|
|
871
987
|
controller.close();
|
|
@@ -877,6 +993,7 @@ var StreamResponseImpl = class {
|
|
|
877
993
|
},
|
|
878
994
|
cancel: () => {
|
|
879
995
|
this.#abortController.abort();
|
|
996
|
+
this.#unsubscribeFromVisibilityChanges?.();
|
|
880
997
|
this.#markClosed();
|
|
881
998
|
}
|
|
882
999
|
});
|
|
@@ -922,7 +1039,13 @@ var StreamResponseImpl = class {
|
|
|
922
1039
|
const wasUpToDate = this.upToDate;
|
|
923
1040
|
const text = await result.value.text();
|
|
924
1041
|
const content = text.trim() || `[]`;
|
|
925
|
-
|
|
1042
|
+
let parsed;
|
|
1043
|
+
try {
|
|
1044
|
+
parsed = JSON.parse(content);
|
|
1045
|
+
} catch (err) {
|
|
1046
|
+
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
|
|
1047
|
+
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
1048
|
+
}
|
|
926
1049
|
if (Array.isArray(parsed)) items.push(...parsed);
|
|
927
1050
|
else items.push(parsed);
|
|
928
1051
|
if (wasUpToDate) break;
|
|
@@ -1019,7 +1142,13 @@ var StreamResponseImpl = class {
|
|
|
1019
1142
|
}
|
|
1020
1143
|
const text = await response.text();
|
|
1021
1144
|
const content = text.trim() || `[]`;
|
|
1022
|
-
|
|
1145
|
+
let parsed;
|
|
1146
|
+
try {
|
|
1147
|
+
parsed = JSON.parse(content);
|
|
1148
|
+
} catch (err) {
|
|
1149
|
+
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
|
|
1150
|
+
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
1151
|
+
}
|
|
1023
1152
|
pendingItems = Array.isArray(parsed) ? parsed : [parsed];
|
|
1024
1153
|
if (pendingItems.length > 0) controller.enqueue(pendingItems.shift());
|
|
1025
1154
|
},
|
|
@@ -1058,7 +1187,13 @@ var StreamResponseImpl = class {
|
|
|
1058
1187
|
const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
|
|
1059
1188
|
const text = await response.text();
|
|
1060
1189
|
const content = text.trim() || `[]`;
|
|
1061
|
-
|
|
1190
|
+
let parsed;
|
|
1191
|
+
try {
|
|
1192
|
+
parsed = JSON.parse(content);
|
|
1193
|
+
} catch (err) {
|
|
1194
|
+
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
|
|
1195
|
+
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
1196
|
+
}
|
|
1062
1197
|
const items = Array.isArray(parsed) ? parsed : [parsed];
|
|
1063
1198
|
await subscriber({
|
|
1064
1199
|
items,
|
|
@@ -1158,6 +1293,7 @@ var StreamResponseImpl = class {
|
|
|
1158
1293
|
}
|
|
1159
1294
|
cancel(reason) {
|
|
1160
1295
|
this.#abortController.abort(reason);
|
|
1296
|
+
this.#unsubscribeFromVisibilityChanges?.();
|
|
1161
1297
|
this.#markClosed();
|
|
1162
1298
|
}
|
|
1163
1299
|
get closed() {
|
|
@@ -1302,7 +1438,7 @@ function _resetHttpWarningForTesting() {
|
|
|
1302
1438
|
* url,
|
|
1303
1439
|
* auth,
|
|
1304
1440
|
* offset: savedOffset,
|
|
1305
|
-
* live:
|
|
1441
|
+
* live: true,
|
|
1306
1442
|
* })
|
|
1307
1443
|
* live.subscribeJson(async (batch) => {
|
|
1308
1444
|
* for (const item of batch.items) {
|
|
@@ -1343,10 +1479,11 @@ async function stream(options) {
|
|
|
1343
1479
|
*/
|
|
1344
1480
|
async function streamInternal(options) {
|
|
1345
1481
|
const url = options.url instanceof URL ? options.url.toString() : options.url;
|
|
1482
|
+
warnIfUsingHttpInBrowser(url, options.warnOnHttp);
|
|
1346
1483
|
const fetchUrl = new URL(url);
|
|
1347
1484
|
const startOffset = options.offset ?? `-1`;
|
|
1348
1485
|
fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, startOffset);
|
|
1349
|
-
const live = options.live ??
|
|
1486
|
+
const live = options.live ?? true;
|
|
1350
1487
|
if (live === `long-poll` || live === `sse`) fetchUrl.searchParams.set(LIVE_QUERY_PARAM, live);
|
|
1351
1488
|
const params = await resolveParams(options.params);
|
|
1352
1489
|
for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
|
|
@@ -1372,11 +1509,13 @@ async function streamInternal(options) {
|
|
|
1372
1509
|
const initialCursor = firstResponse.headers.get(STREAM_CURSOR_HEADER) ?? void 0;
|
|
1373
1510
|
const initialUpToDate = firstResponse.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
1374
1511
|
const isJsonMode = options.json === true || (contentType?.includes(`application/json`) ?? false);
|
|
1375
|
-
const fetchNext = async (offset, cursor, signal) => {
|
|
1512
|
+
const fetchNext = async (offset, cursor, signal, resumingFromPause) => {
|
|
1376
1513
|
const nextUrl = new URL(url);
|
|
1377
1514
|
nextUrl.searchParams.set(OFFSET_QUERY_PARAM, offset);
|
|
1378
|
-
if (
|
|
1379
|
-
|
|
1515
|
+
if (!resumingFromPause) {
|
|
1516
|
+
if (live === `sse`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`);
|
|
1517
|
+
else if (live === true || live === `long-poll`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `long-poll`);
|
|
1518
|
+
}
|
|
1380
1519
|
if (cursor) nextUrl.searchParams.set(`cursor`, cursor);
|
|
1381
1520
|
const nextParams = await resolveParams(options.params);
|
|
1382
1521
|
for (const [key, value] of Object.entries(nextParams)) nextUrl.searchParams.set(key, value);
|
|
@@ -1423,927 +1562,947 @@ async function streamInternal(options) {
|
|
|
1423
1562
|
}
|
|
1424
1563
|
|
|
1425
1564
|
//#endregion
|
|
1426
|
-
//#region src/
|
|
1565
|
+
//#region src/idempotent-producer.ts
|
|
1566
|
+
/**
|
|
1567
|
+
* Error thrown when a producer's epoch is stale (zombie fencing).
|
|
1568
|
+
*/
|
|
1569
|
+
var StaleEpochError = class extends Error {
|
|
1570
|
+
/**
|
|
1571
|
+
* The current epoch on the server.
|
|
1572
|
+
*/
|
|
1573
|
+
currentEpoch;
|
|
1574
|
+
constructor(currentEpoch) {
|
|
1575
|
+
super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
|
|
1576
|
+
this.name = `StaleEpochError`;
|
|
1577
|
+
this.currentEpoch = currentEpoch;
|
|
1578
|
+
}
|
|
1579
|
+
};
|
|
1580
|
+
/**
|
|
1581
|
+
* Error thrown when an unrecoverable sequence gap is detected.
|
|
1582
|
+
*
|
|
1583
|
+
* With maxInFlight > 1, HTTP requests can arrive out of order at the server,
|
|
1584
|
+
* causing temporary 409 responses. The client automatically handles these
|
|
1585
|
+
* by waiting for earlier sequences to complete, then retrying.
|
|
1586
|
+
*
|
|
1587
|
+
* This error is only thrown when the gap cannot be resolved (e.g., the
|
|
1588
|
+
* expected sequence is >= our sequence, indicating a true protocol violation).
|
|
1589
|
+
*/
|
|
1590
|
+
var SequenceGapError = class extends Error {
|
|
1591
|
+
expectedSeq;
|
|
1592
|
+
receivedSeq;
|
|
1593
|
+
constructor(expectedSeq, receivedSeq) {
|
|
1594
|
+
super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
|
|
1595
|
+
this.name = `SequenceGapError`;
|
|
1596
|
+
this.expectedSeq = expectedSeq;
|
|
1597
|
+
this.receivedSeq = receivedSeq;
|
|
1598
|
+
}
|
|
1599
|
+
};
|
|
1427
1600
|
/**
|
|
1428
1601
|
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1429
|
-
* Handles cases like "application/json; charset=utf-8".
|
|
1430
1602
|
*/
|
|
1431
1603
|
function normalizeContentType$1(contentType) {
|
|
1432
1604
|
if (!contentType) return ``;
|
|
1433
1605
|
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1434
1606
|
}
|
|
1435
1607
|
/**
|
|
1436
|
-
*
|
|
1437
|
-
*/
|
|
1438
|
-
function isPromiseLike(value) {
|
|
1439
|
-
return value !== null && typeof value === `object` && `then` in value && typeof value.then === `function`;
|
|
1440
|
-
}
|
|
1441
|
-
/**
|
|
1442
|
-
* A handle to a remote durable stream for read/write operations.
|
|
1608
|
+
* An idempotent producer for exactly-once writes to a durable stream.
|
|
1443
1609
|
*
|
|
1444
|
-
*
|
|
1445
|
-
*
|
|
1446
|
-
*
|
|
1610
|
+
* Features:
|
|
1611
|
+
* - Fire-and-forget: append() returns immediately, batches in background
|
|
1612
|
+
* - Exactly-once: server deduplicates using (producerId, epoch, seq)
|
|
1613
|
+
* - Batching: multiple appends batched into single HTTP request
|
|
1614
|
+
* - Pipelining: up to maxInFlight concurrent batches
|
|
1615
|
+
* - Zombie fencing: stale producers rejected via epoch validation
|
|
1447
1616
|
*
|
|
1448
1617
|
* @example
|
|
1449
1618
|
* ```typescript
|
|
1450
|
-
*
|
|
1451
|
-
* const
|
|
1452
|
-
*
|
|
1453
|
-
*
|
|
1454
|
-
* contentType: "application/json"
|
|
1619
|
+
* const stream = new DurableStream({ url: "https://..." });
|
|
1620
|
+
* const producer = new IdempotentProducer(stream, "order-service-1", {
|
|
1621
|
+
* epoch: 0,
|
|
1622
|
+
* autoClaim: true,
|
|
1455
1623
|
* });
|
|
1456
1624
|
*
|
|
1457
|
-
* //
|
|
1458
|
-
*
|
|
1625
|
+
* // Fire-and-forget writes (synchronous, returns immediately)
|
|
1626
|
+
* producer.append("message 1");
|
|
1627
|
+
* producer.append("message 2");
|
|
1459
1628
|
*
|
|
1460
|
-
* //
|
|
1461
|
-
*
|
|
1462
|
-
*
|
|
1463
|
-
* for (const item of batch.items) {
|
|
1464
|
-
* console.log(item.message);
|
|
1465
|
-
* }
|
|
1466
|
-
* });
|
|
1629
|
+
* // Ensure all messages are delivered before shutdown
|
|
1630
|
+
* await producer.flush();
|
|
1631
|
+
* await producer.close();
|
|
1467
1632
|
* ```
|
|
1468
1633
|
*/
|
|
1469
|
-
var
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
contentType;
|
|
1478
|
-
#options;
|
|
1634
|
+
var IdempotentProducer = class {
|
|
1635
|
+
#stream;
|
|
1636
|
+
#producerId;
|
|
1637
|
+
#epoch;
|
|
1638
|
+
#nextSeq = 0;
|
|
1639
|
+
#autoClaim;
|
|
1640
|
+
#maxBatchBytes;
|
|
1641
|
+
#lingerMs;
|
|
1479
1642
|
#fetchClient;
|
|
1643
|
+
#signal;
|
|
1480
1644
|
#onError;
|
|
1481
|
-
#
|
|
1645
|
+
#pendingBatch = [];
|
|
1646
|
+
#batchBytes = 0;
|
|
1647
|
+
#lingerTimeout = null;
|
|
1482
1648
|
#queue;
|
|
1483
|
-
#
|
|
1484
|
-
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
*/
|
|
1488
|
-
constructor(opts) {
|
|
1489
|
-
validateOptions(opts);
|
|
1490
|
-
const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
|
|
1491
|
-
this.url = urlStr;
|
|
1492
|
-
this.#options = {
|
|
1493
|
-
...opts,
|
|
1494
|
-
url: urlStr
|
|
1495
|
-
};
|
|
1496
|
-
this.#onError = opts.onError;
|
|
1497
|
-
if (opts.contentType) this.contentType = opts.contentType;
|
|
1498
|
-
this.#batchingEnabled = opts.batching !== false;
|
|
1499
|
-
if (this.#batchingEnabled) this.#queue = fastq.default.promise(this.#batchWorker.bind(this), 1);
|
|
1500
|
-
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
1501
|
-
const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
|
|
1502
|
-
const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
|
|
1503
|
-
this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
|
|
1504
|
-
}
|
|
1649
|
+
#maxInFlight;
|
|
1650
|
+
#closed = false;
|
|
1651
|
+
#epochClaimed;
|
|
1652
|
+
#seqState = new Map();
|
|
1505
1653
|
/**
|
|
1506
|
-
* Create
|
|
1507
|
-
*
|
|
1654
|
+
* Create an idempotent producer for a stream.
|
|
1655
|
+
*
|
|
1656
|
+
* @param stream - The DurableStream to write to
|
|
1657
|
+
* @param producerId - Stable identifier for this producer (e.g., "order-service-1")
|
|
1658
|
+
* @param opts - Producer options
|
|
1508
1659
|
*/
|
|
1509
|
-
|
|
1510
|
-
const
|
|
1511
|
-
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
|
|
1516
|
-
|
|
1517
|
-
|
|
1660
|
+
constructor(stream$1, producerId, opts) {
|
|
1661
|
+
const epoch = opts?.epoch ?? 0;
|
|
1662
|
+
const maxBatchBytes = opts?.maxBatchBytes ?? 1024 * 1024;
|
|
1663
|
+
const maxInFlight = opts?.maxInFlight ?? 5;
|
|
1664
|
+
const lingerMs = opts?.lingerMs ?? 5;
|
|
1665
|
+
if (epoch < 0) throw new Error(`epoch must be >= 0`);
|
|
1666
|
+
if (maxBatchBytes <= 0) throw new Error(`maxBatchBytes must be > 0`);
|
|
1667
|
+
if (maxInFlight <= 0) throw new Error(`maxInFlight must be > 0`);
|
|
1668
|
+
if (lingerMs < 0) throw new Error(`lingerMs must be >= 0`);
|
|
1669
|
+
this.#stream = stream$1;
|
|
1670
|
+
this.#producerId = producerId;
|
|
1671
|
+
this.#epoch = epoch;
|
|
1672
|
+
this.#autoClaim = opts?.autoClaim ?? false;
|
|
1673
|
+
this.#maxBatchBytes = maxBatchBytes;
|
|
1674
|
+
this.#lingerMs = lingerMs;
|
|
1675
|
+
this.#signal = opts?.signal;
|
|
1676
|
+
this.#onError = opts?.onError;
|
|
1677
|
+
this.#fetchClient = opts?.fetch ?? ((...args) => fetch(...args));
|
|
1678
|
+
this.#maxInFlight = maxInFlight;
|
|
1679
|
+
this.#epochClaimed = !this.#autoClaim;
|
|
1680
|
+
this.#queue = fastq.default.promise(this.#batchWorker.bind(this), this.#maxInFlight);
|
|
1681
|
+
if (this.#signal) this.#signal.addEventListener(`abort`, () => {
|
|
1682
|
+
this.#rejectPendingBatch(new DurableStreamError(`Producer aborted`, `ALREADY_CLOSED`, void 0, void 0));
|
|
1683
|
+
}, { once: true });
|
|
1518
1684
|
}
|
|
1519
1685
|
/**
|
|
1520
|
-
*
|
|
1521
|
-
* Returns a handle with contentType populated (if sent by server).
|
|
1686
|
+
* Append data to the stream.
|
|
1522
1687
|
*
|
|
1523
|
-
*
|
|
1524
|
-
*
|
|
1525
|
-
*
|
|
1688
|
+
* This is fire-and-forget: returns immediately after adding to the batch.
|
|
1689
|
+
* The message is batched and sent when:
|
|
1690
|
+
* - maxBatchBytes is reached
|
|
1691
|
+
* - lingerMs elapses
|
|
1692
|
+
* - flush() is called
|
|
1693
|
+
*
|
|
1694
|
+
* Errors are reported via onError callback if configured. Use flush() to
|
|
1695
|
+
* wait for all pending messages to be sent.
|
|
1696
|
+
*
|
|
1697
|
+
* For JSON streams, pass pre-serialized JSON strings.
|
|
1698
|
+
* For byte streams, pass string or Uint8Array.
|
|
1699
|
+
*
|
|
1700
|
+
* @param body - Data to append (string or Uint8Array)
|
|
1526
1701
|
*
|
|
1527
1702
|
* @example
|
|
1528
1703
|
* ```typescript
|
|
1529
|
-
* //
|
|
1530
|
-
*
|
|
1531
|
-
*
|
|
1704
|
+
* // JSON stream
|
|
1705
|
+
* producer.append(JSON.stringify({ message: "hello" }));
|
|
1706
|
+
*
|
|
1707
|
+
* // Byte stream
|
|
1708
|
+
* producer.append("raw text data");
|
|
1709
|
+
* producer.append(new Uint8Array([1, 2, 3]));
|
|
1532
1710
|
* ```
|
|
1533
1711
|
*/
|
|
1534
|
-
|
|
1535
|
-
|
|
1536
|
-
|
|
1537
|
-
|
|
1712
|
+
append(body) {
|
|
1713
|
+
if (this.#closed) throw new DurableStreamError(`Producer is closed`, `ALREADY_CLOSED`, void 0, void 0);
|
|
1714
|
+
let bytes;
|
|
1715
|
+
if (typeof body === `string`) bytes = new TextEncoder().encode(body);
|
|
1716
|
+
else if (body instanceof Uint8Array) bytes = body;
|
|
1717
|
+
else throw new DurableStreamError(`append() requires string or Uint8Array. For objects, use JSON.stringify().`, `BAD_REQUEST`, 400, void 0);
|
|
1718
|
+
this.#pendingBatch.push({ body: bytes });
|
|
1719
|
+
this.#batchBytes += bytes.length;
|
|
1720
|
+
if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
|
|
1721
|
+
else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
|
|
1722
|
+
this.#lingerTimeout = null;
|
|
1723
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
1724
|
+
}, this.#lingerMs);
|
|
1538
1725
|
}
|
|
1539
1726
|
/**
|
|
1540
|
-
*
|
|
1727
|
+
* Send any pending batch immediately and wait for all in-flight batches.
|
|
1728
|
+
*
|
|
1729
|
+
* Call this before shutdown to ensure all messages are delivered.
|
|
1541
1730
|
*/
|
|
1542
|
-
|
|
1543
|
-
|
|
1544
|
-
|
|
1731
|
+
async flush() {
|
|
1732
|
+
if (this.#lingerTimeout) {
|
|
1733
|
+
clearTimeout(this.#lingerTimeout);
|
|
1734
|
+
this.#lingerTimeout = null;
|
|
1735
|
+
}
|
|
1736
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
1737
|
+
await this.#queue.drained();
|
|
1545
1738
|
}
|
|
1546
1739
|
/**
|
|
1547
|
-
*
|
|
1740
|
+
* Flush pending messages and close the producer.
|
|
1741
|
+
*
|
|
1742
|
+
* After calling close(), further append() calls will throw.
|
|
1548
1743
|
*/
|
|
1549
|
-
|
|
1550
|
-
|
|
1551
|
-
|
|
1744
|
+
async close() {
|
|
1745
|
+
if (this.#closed) return;
|
|
1746
|
+
this.#closed = true;
|
|
1747
|
+
try {
|
|
1748
|
+
await this.flush();
|
|
1749
|
+
} catch {}
|
|
1552
1750
|
}
|
|
1553
1751
|
/**
|
|
1554
|
-
*
|
|
1752
|
+
* Increment epoch and reset sequence.
|
|
1753
|
+
*
|
|
1754
|
+
* Call this when restarting the producer to establish a new session.
|
|
1755
|
+
* Flushes any pending messages first.
|
|
1555
1756
|
*/
|
|
1556
|
-
async
|
|
1557
|
-
|
|
1558
|
-
|
|
1559
|
-
|
|
1560
|
-
headers: requestHeaders,
|
|
1561
|
-
signal: opts?.signal ?? this.#options.signal
|
|
1562
|
-
});
|
|
1563
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1564
|
-
const contentType = response.headers.get(`content-type`) ?? void 0;
|
|
1565
|
-
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
|
|
1566
|
-
const etag = response.headers.get(`etag`) ?? void 0;
|
|
1567
|
-
const cacheControl = response.headers.get(`cache-control`) ?? void 0;
|
|
1568
|
-
if (contentType) this.contentType = contentType;
|
|
1569
|
-
return {
|
|
1570
|
-
exists: true,
|
|
1571
|
-
contentType,
|
|
1572
|
-
offset,
|
|
1573
|
-
etag,
|
|
1574
|
-
cacheControl
|
|
1575
|
-
};
|
|
1757
|
+
async restart() {
|
|
1758
|
+
await this.flush();
|
|
1759
|
+
this.#epoch++;
|
|
1760
|
+
this.#nextSeq = 0;
|
|
1576
1761
|
}
|
|
1577
1762
|
/**
|
|
1578
|
-
*
|
|
1763
|
+
* Current epoch for this producer.
|
|
1579
1764
|
*/
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
const contentType = opts?.contentType ?? this.#options.contentType;
|
|
1583
|
-
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1584
|
-
if (opts?.ttlSeconds !== void 0) requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
|
|
1585
|
-
if (opts?.expiresAt) requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
|
|
1586
|
-
const body = encodeBody(opts?.body);
|
|
1587
|
-
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1588
|
-
method: `PUT`,
|
|
1589
|
-
headers: requestHeaders,
|
|
1590
|
-
body,
|
|
1591
|
-
signal: this.#options.signal
|
|
1592
|
-
});
|
|
1593
|
-
if (!response.ok) await handleErrorResponse(response, this.url, { operation: `create` });
|
|
1594
|
-
const responseContentType = response.headers.get(`content-type`);
|
|
1595
|
-
if (responseContentType) this.contentType = responseContentType;
|
|
1596
|
-
else if (contentType) this.contentType = contentType;
|
|
1597
|
-
return this;
|
|
1765
|
+
get epoch() {
|
|
1766
|
+
return this.#epoch;
|
|
1598
1767
|
}
|
|
1599
1768
|
/**
|
|
1600
|
-
*
|
|
1769
|
+
* Next sequence number to be assigned.
|
|
1601
1770
|
*/
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
|
-
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1605
|
-
method: `DELETE`,
|
|
1606
|
-
headers: requestHeaders,
|
|
1607
|
-
signal: opts?.signal ?? this.#options.signal
|
|
1608
|
-
});
|
|
1609
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1771
|
+
get nextSeq() {
|
|
1772
|
+
return this.#nextSeq;
|
|
1610
1773
|
}
|
|
1611
1774
|
/**
|
|
1612
|
-
*
|
|
1613
|
-
*
|
|
1614
|
-
* When batching is enabled (default), multiple append() calls made while
|
|
1615
|
-
* a POST is in-flight will be batched together into a single request.
|
|
1616
|
-
* This significantly improves throughput for high-frequency writes.
|
|
1617
|
-
*
|
|
1618
|
-
* - `body` may be Uint8Array, string, or any JSON-serializable value (for JSON streams).
|
|
1619
|
-
* - `body` may also be a Promise that resolves to any of the above types.
|
|
1620
|
-
* - Strings are encoded as UTF-8.
|
|
1621
|
-
* - `seq` (if provided) is sent as stream-seq (writer coordination).
|
|
1622
|
-
*
|
|
1623
|
-
* @example
|
|
1624
|
-
* ```typescript
|
|
1625
|
-
* // Direct value
|
|
1626
|
-
* await stream.append({ message: "hello" });
|
|
1627
|
-
*
|
|
1628
|
-
* // Promise value - awaited before buffering
|
|
1629
|
-
* await stream.append(fetchData());
|
|
1630
|
-
* await stream.append(Promise.all([a, b, c]));
|
|
1631
|
-
* ```
|
|
1775
|
+
* Number of messages in the current pending batch.
|
|
1632
1776
|
*/
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
|
|
1636
|
-
return this.#appendDirect(resolvedBody, opts);
|
|
1777
|
+
get pendingCount() {
|
|
1778
|
+
return this.#pendingBatch.length;
|
|
1637
1779
|
}
|
|
1638
1780
|
/**
|
|
1639
|
-
*
|
|
1781
|
+
* Number of batches currently in flight.
|
|
1640
1782
|
*/
|
|
1641
|
-
|
|
1642
|
-
|
|
1643
|
-
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
1644
|
-
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1645
|
-
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
1646
|
-
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1647
|
-
const bodyToEncode = isJson ? [body] : body;
|
|
1648
|
-
const encodedBody = encodeBody(bodyToEncode);
|
|
1649
|
-
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1650
|
-
method: `POST`,
|
|
1651
|
-
headers: requestHeaders,
|
|
1652
|
-
body: encodedBody,
|
|
1653
|
-
signal: opts?.signal ?? this.#options.signal
|
|
1654
|
-
});
|
|
1655
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1783
|
+
get inFlightCount() {
|
|
1784
|
+
return this.#queue.length();
|
|
1656
1785
|
}
|
|
1657
1786
|
/**
|
|
1658
|
-
*
|
|
1787
|
+
* Enqueue the current pending batch for processing.
|
|
1659
1788
|
*/
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
this.#queue.push(batch).catch((err) => {
|
|
1673
|
-
for (const msg of batch) msg.reject(err);
|
|
1674
|
-
});
|
|
1675
|
-
}
|
|
1789
|
+
#enqueuePendingBatch() {
|
|
1790
|
+
if (this.#pendingBatch.length === 0) return;
|
|
1791
|
+
const batch = this.#pendingBatch;
|
|
1792
|
+
const seq = this.#nextSeq;
|
|
1793
|
+
this.#pendingBatch = [];
|
|
1794
|
+
this.#batchBytes = 0;
|
|
1795
|
+
this.#nextSeq++;
|
|
1796
|
+
if (this.#autoClaim && !this.#epochClaimed && this.#queue.length() > 0) this.#queue.drained().then(() => {
|
|
1797
|
+
this.#queue.push({
|
|
1798
|
+
batch,
|
|
1799
|
+
seq
|
|
1800
|
+
}).catch(() => {});
|
|
1676
1801
|
});
|
|
1802
|
+
else this.#queue.push({
|
|
1803
|
+
batch,
|
|
1804
|
+
seq
|
|
1805
|
+
}).catch(() => {});
|
|
1677
1806
|
}
|
|
1678
1807
|
/**
|
|
1679
|
-
* Batch worker - processes batches
|
|
1808
|
+
* Batch worker - processes batches via fastq.
|
|
1680
1809
|
*/
|
|
1681
|
-
async #batchWorker(
|
|
1810
|
+
async #batchWorker(task) {
|
|
1811
|
+
const { batch, seq } = task;
|
|
1812
|
+
const epoch = this.#epoch;
|
|
1682
1813
|
try {
|
|
1683
|
-
await this.#
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
const nextBatch = this.#buffer.splice(0);
|
|
1687
|
-
this.#queue.push(nextBatch).catch((err) => {
|
|
1688
|
-
for (const msg of nextBatch) msg.reject(err);
|
|
1689
|
-
});
|
|
1690
|
-
}
|
|
1814
|
+
await this.#doSendBatch(batch, seq, epoch);
|
|
1815
|
+
if (!this.#epochClaimed) this.#epochClaimed = true;
|
|
1816
|
+
this.#signalSeqComplete(epoch, seq, void 0);
|
|
1691
1817
|
} catch (error) {
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
this.#buffer = [];
|
|
1818
|
+
this.#signalSeqComplete(epoch, seq, error);
|
|
1819
|
+
if (this.#onError) this.#onError(error);
|
|
1695
1820
|
throw error;
|
|
1696
1821
|
}
|
|
1697
1822
|
}
|
|
1698
1823
|
/**
|
|
1699
|
-
*
|
|
1824
|
+
* Signal that a sequence has completed (success or failure).
|
|
1700
1825
|
*/
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
let highestSeq;
|
|
1707
|
-
for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
|
|
1708
|
-
highestSeq = batch[i].seq;
|
|
1709
|
-
break;
|
|
1826
|
+
#signalSeqComplete(epoch, seq, error) {
|
|
1827
|
+
let epochMap = this.#seqState.get(epoch);
|
|
1828
|
+
if (!epochMap) {
|
|
1829
|
+
epochMap = new Map();
|
|
1830
|
+
this.#seqState.set(epoch, epochMap);
|
|
1710
1831
|
}
|
|
1711
|
-
|
|
1832
|
+
const state = epochMap.get(seq);
|
|
1833
|
+
if (state) {
|
|
1834
|
+
state.resolved = true;
|
|
1835
|
+
state.error = error;
|
|
1836
|
+
for (const waiter of state.waiters) waiter(error);
|
|
1837
|
+
state.waiters = [];
|
|
1838
|
+
} else epochMap.set(seq, {
|
|
1839
|
+
resolved: true,
|
|
1840
|
+
error,
|
|
1841
|
+
waiters: []
|
|
1842
|
+
});
|
|
1843
|
+
const cleanupThreshold = seq - this.#maxInFlight * 3;
|
|
1844
|
+
if (cleanupThreshold > 0) {
|
|
1845
|
+
for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
|
|
1846
|
+
}
|
|
1847
|
+
}
|
|
1848
|
+
/**
|
|
1849
|
+
* Wait for a specific sequence to complete.
|
|
1850
|
+
* Returns immediately if already completed.
|
|
1851
|
+
* Throws if the sequence failed.
|
|
1852
|
+
*/
|
|
1853
|
+
#waitForSeq(epoch, seq) {
|
|
1854
|
+
let epochMap = this.#seqState.get(epoch);
|
|
1855
|
+
if (!epochMap) {
|
|
1856
|
+
epochMap = new Map();
|
|
1857
|
+
this.#seqState.set(epoch, epochMap);
|
|
1858
|
+
}
|
|
1859
|
+
const state = epochMap.get(seq);
|
|
1860
|
+
if (state?.resolved) {
|
|
1861
|
+
if (state.error) return Promise.reject(state.error);
|
|
1862
|
+
return Promise.resolve();
|
|
1863
|
+
}
|
|
1864
|
+
return new Promise((resolve, reject) => {
|
|
1865
|
+
const waiter = (err) => {
|
|
1866
|
+
if (err) reject(err);
|
|
1867
|
+
else resolve();
|
|
1868
|
+
};
|
|
1869
|
+
if (state) state.waiters.push(waiter);
|
|
1870
|
+
else epochMap.set(seq, {
|
|
1871
|
+
resolved: false,
|
|
1872
|
+
waiters: [waiter]
|
|
1873
|
+
});
|
|
1874
|
+
});
|
|
1875
|
+
}
|
|
1876
|
+
/**
|
|
1877
|
+
* Actually send the batch to the server.
|
|
1878
|
+
* Handles auto-claim retry on 403 (stale epoch) if autoClaim is enabled.
|
|
1879
|
+
* Does NOT implement general retry/backoff for network errors or 5xx responses.
|
|
1880
|
+
*/
|
|
1881
|
+
async #doSendBatch(batch, seq, epoch) {
|
|
1882
|
+
const contentType = this.#stream.contentType ?? `application/octet-stream`;
|
|
1712
1883
|
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1713
1884
|
let batchedBody;
|
|
1714
1885
|
if (isJson) {
|
|
1715
|
-
const
|
|
1716
|
-
batchedBody =
|
|
1886
|
+
const jsonStrings = batch.map((e) => new TextDecoder().decode(e.body));
|
|
1887
|
+
batchedBody = `[${jsonStrings.join(`,`)}]`;
|
|
1717
1888
|
} else {
|
|
1718
|
-
const totalSize = batch.reduce((sum,
|
|
1719
|
-
const size = typeof m.data === `string` ? new TextEncoder().encode(m.data).length : m.data.length;
|
|
1720
|
-
return sum + size;
|
|
1721
|
-
}, 0);
|
|
1889
|
+
const totalSize = batch.reduce((sum, e) => sum + e.body.length, 0);
|
|
1722
1890
|
const concatenated = new Uint8Array(totalSize);
|
|
1723
1891
|
let offset = 0;
|
|
1724
|
-
for (const
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
offset += bytes.length;
|
|
1892
|
+
for (const entry of batch) {
|
|
1893
|
+
concatenated.set(entry.body, offset);
|
|
1894
|
+
offset += entry.body.length;
|
|
1728
1895
|
}
|
|
1729
1896
|
batchedBody = concatenated;
|
|
1730
1897
|
}
|
|
1731
|
-
const
|
|
1732
|
-
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
|
|
1898
|
+
const url = this.#stream.url;
|
|
1899
|
+
const headers = {
|
|
1900
|
+
"content-type": contentType,
|
|
1901
|
+
[PRODUCER_ID_HEADER]: this.#producerId,
|
|
1902
|
+
[PRODUCER_EPOCH_HEADER]: epoch.toString(),
|
|
1903
|
+
[PRODUCER_SEQ_HEADER]: seq.toString()
|
|
1904
|
+
};
|
|
1905
|
+
const response = await this.#fetchClient(url, {
|
|
1736
1906
|
method: `POST`,
|
|
1737
|
-
headers
|
|
1907
|
+
headers,
|
|
1738
1908
|
body: batchedBody,
|
|
1739
|
-
signal:
|
|
1909
|
+
signal: this.#signal
|
|
1740
1910
|
});
|
|
1741
|
-
if (
|
|
1742
|
-
|
|
1743
|
-
|
|
1744
|
-
|
|
1745
|
-
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1750
|
-
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
|
|
1757
|
-
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
* });
|
|
1761
|
-
* await stream.appendStream(readable);
|
|
1762
|
-
*
|
|
1763
|
-
* // Pipe from an async generator
|
|
1764
|
-
* async function* generate() {
|
|
1765
|
-
* yield "line 1\n";
|
|
1766
|
-
* yield "line 2\n";
|
|
1767
|
-
* }
|
|
1768
|
-
* await stream.appendStream(generate());
|
|
1769
|
-
*
|
|
1770
|
-
* // Pipe from fetch response body
|
|
1771
|
-
* const response = await fetch("https://example.com/data");
|
|
1772
|
-
* await stream.appendStream(response.body!);
|
|
1773
|
-
* ```
|
|
1774
|
-
*/
|
|
1775
|
-
async appendStream(source, opts) {
|
|
1776
|
-
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
1777
|
-
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
1778
|
-
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1779
|
-
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
1780
|
-
const body = toReadableStream(source);
|
|
1781
|
-
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1782
|
-
method: `POST`,
|
|
1783
|
-
headers: requestHeaders,
|
|
1784
|
-
body,
|
|
1785
|
-
duplex: `half`,
|
|
1786
|
-
signal: opts?.signal ?? this.#options.signal
|
|
1787
|
-
});
|
|
1788
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1789
|
-
}
|
|
1790
|
-
/**
|
|
1791
|
-
* Create a writable stream that pipes data to this durable stream.
|
|
1792
|
-
*
|
|
1793
|
-
* Returns a WritableStream that can be used with `pipeTo()` or
|
|
1794
|
-
* `pipeThrough()` from any ReadableStream source.
|
|
1795
|
-
*
|
|
1796
|
-
* @example
|
|
1797
|
-
* ```typescript
|
|
1798
|
-
* // Pipe from fetch response
|
|
1799
|
-
* const response = await fetch("https://example.com/data");
|
|
1800
|
-
* await response.body!.pipeTo(stream.writable());
|
|
1801
|
-
*
|
|
1802
|
-
* // Pipe through a transform
|
|
1803
|
-
* const readable = someStream.pipeThrough(new TextEncoderStream());
|
|
1804
|
-
* await readable.pipeTo(stream.writable());
|
|
1805
|
-
* ```
|
|
1806
|
-
*/
|
|
1807
|
-
writable(opts) {
|
|
1808
|
-
const chunks = [];
|
|
1809
|
-
const stream$1 = this;
|
|
1810
|
-
return new WritableStream({
|
|
1811
|
-
write(chunk) {
|
|
1812
|
-
chunks.push(chunk);
|
|
1813
|
-
},
|
|
1814
|
-
async close() {
|
|
1815
|
-
if (chunks.length > 0) {
|
|
1816
|
-
const readable = new ReadableStream({ start(controller) {
|
|
1817
|
-
for (const chunk of chunks) controller.enqueue(chunk);
|
|
1818
|
-
controller.close();
|
|
1819
|
-
} });
|
|
1820
|
-
await stream$1.appendStream(readable, opts);
|
|
1821
|
-
}
|
|
1822
|
-
},
|
|
1823
|
-
abort(reason) {
|
|
1824
|
-
console.error(`WritableStream aborted:`, reason);
|
|
1911
|
+
if (response.status === 204) return {
|
|
1912
|
+
offset: ``,
|
|
1913
|
+
duplicate: true
|
|
1914
|
+
};
|
|
1915
|
+
if (response.status === 200) {
|
|
1916
|
+
const resultOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
1917
|
+
return {
|
|
1918
|
+
offset: resultOffset,
|
|
1919
|
+
duplicate: false
|
|
1920
|
+
};
|
|
1921
|
+
}
|
|
1922
|
+
if (response.status === 403) {
|
|
1923
|
+
const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
|
|
1924
|
+
const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : epoch;
|
|
1925
|
+
if (this.#autoClaim) {
|
|
1926
|
+
const newEpoch = currentEpoch + 1;
|
|
1927
|
+
this.#epoch = newEpoch;
|
|
1928
|
+
this.#nextSeq = 1;
|
|
1929
|
+
return this.#doSendBatch(batch, 0, newEpoch);
|
|
1825
1930
|
}
|
|
1826
|
-
|
|
1827
|
-
}
|
|
1828
|
-
/**
|
|
1829
|
-
* Start a fetch-like streaming session against this handle's URL/headers/params.
|
|
1830
|
-
* The first request is made inside this method; it resolves when we have
|
|
1831
|
-
* a valid first response, or rejects on errors.
|
|
1832
|
-
*
|
|
1833
|
-
* Call-specific headers and params are merged with handle-level ones,
|
|
1834
|
-
* with call-specific values taking precedence.
|
|
1835
|
-
*
|
|
1836
|
-
* @example
|
|
1837
|
-
* ```typescript
|
|
1838
|
-
* const handle = await DurableStream.connect({
|
|
1839
|
-
* url,
|
|
1840
|
-
* headers: { Authorization: `Bearer ${token}` }
|
|
1841
|
-
* });
|
|
1842
|
-
* const res = await handle.stream<{ message: string }>();
|
|
1843
|
-
*
|
|
1844
|
-
* // Accumulate all JSON items
|
|
1845
|
-
* const items = await res.json();
|
|
1846
|
-
*
|
|
1847
|
-
* // Or stream live with ReadableStream
|
|
1848
|
-
* const reader = res.jsonStream().getReader();
|
|
1849
|
-
* let result = await reader.read();
|
|
1850
|
-
* while (!result.done) {
|
|
1851
|
-
* console.log(result.value);
|
|
1852
|
-
* result = await reader.read();
|
|
1853
|
-
* }
|
|
1854
|
-
*
|
|
1855
|
-
* // Or use subscriber for backpressure-aware consumption
|
|
1856
|
-
* res.subscribeJson(async (batch) => {
|
|
1857
|
-
* for (const item of batch.items) {
|
|
1858
|
-
* console.log(item);
|
|
1859
|
-
* }
|
|
1860
|
-
* });
|
|
1861
|
-
* ```
|
|
1862
|
-
*/
|
|
1863
|
-
async stream(options) {
|
|
1864
|
-
if (options?.live === `sse` && this.contentType) {
|
|
1865
|
-
const isSSECompatible = SSE_COMPATIBLE_CONTENT_TYPES.some((prefix) => this.contentType.startsWith(prefix));
|
|
1866
|
-
if (!isSSECompatible) throw new DurableStreamError(`SSE is not supported for content-type: ${this.contentType}`, `SSE_NOT_SUPPORTED`, 400);
|
|
1931
|
+
throw new StaleEpochError(currentEpoch);
|
|
1867
1932
|
}
|
|
1868
|
-
|
|
1869
|
-
|
|
1870
|
-
|
|
1871
|
-
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
warnOnHttp: options?.warnOnHttp ?? this.#options.warnOnHttp
|
|
1888
|
-
});
|
|
1933
|
+
if (response.status === 409) {
|
|
1934
|
+
const expectedSeqStr = response.headers.get(PRODUCER_EXPECTED_SEQ_HEADER);
|
|
1935
|
+
const expectedSeq = expectedSeqStr ? parseInt(expectedSeqStr, 10) : 0;
|
|
1936
|
+
if (expectedSeq < seq) {
|
|
1937
|
+
const waitPromises = [];
|
|
1938
|
+
for (let s = expectedSeq; s < seq; s++) waitPromises.push(this.#waitForSeq(epoch, s));
|
|
1939
|
+
await Promise.all(waitPromises);
|
|
1940
|
+
return this.#doSendBatch(batch, seq, epoch);
|
|
1941
|
+
}
|
|
1942
|
+
const receivedSeqStr = response.headers.get(PRODUCER_RECEIVED_SEQ_HEADER);
|
|
1943
|
+
const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
|
|
1944
|
+
throw new SequenceGapError(expectedSeq, receivedSeq);
|
|
1945
|
+
}
|
|
1946
|
+
if (response.status === 400) {
|
|
1947
|
+
const error$1 = await DurableStreamError.fromResponse(response, url);
|
|
1948
|
+
throw error$1;
|
|
1949
|
+
}
|
|
1950
|
+
const error = await FetchError.fromResponse(response, url);
|
|
1951
|
+
throw error;
|
|
1889
1952
|
}
|
|
1890
1953
|
/**
|
|
1891
|
-
*
|
|
1954
|
+
* Clear pending batch and report error.
|
|
1892
1955
|
*/
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
|
|
1896
|
-
|
|
1897
|
-
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
};
|
|
1956
|
+
#rejectPendingBatch(error) {
|
|
1957
|
+
if (this.#onError && this.#pendingBatch.length > 0) this.#onError(error);
|
|
1958
|
+
this.#pendingBatch = [];
|
|
1959
|
+
this.#batchBytes = 0;
|
|
1960
|
+
if (this.#lingerTimeout) {
|
|
1961
|
+
clearTimeout(this.#lingerTimeout);
|
|
1962
|
+
this.#lingerTimeout = null;
|
|
1963
|
+
}
|
|
1902
1964
|
}
|
|
1903
1965
|
};
|
|
1904
|
-
/**
|
|
1905
|
-
* Encode a body value to the appropriate format.
|
|
1906
|
-
* Strings are encoded as UTF-8.
|
|
1907
|
-
* Objects are JSON-serialized.
|
|
1908
|
-
*/
|
|
1909
|
-
function encodeBody(body) {
|
|
1910
|
-
if (body === void 0) return void 0;
|
|
1911
|
-
if (typeof body === `string`) return new TextEncoder().encode(body);
|
|
1912
|
-
if (body instanceof Uint8Array) return body;
|
|
1913
|
-
if (body instanceof Blob || body instanceof FormData || body instanceof ReadableStream || body instanceof ArrayBuffer || ArrayBuffer.isView(body)) return body;
|
|
1914
|
-
return new TextEncoder().encode(JSON.stringify(body));
|
|
1915
|
-
}
|
|
1916
|
-
/**
|
|
1917
|
-
* Convert an async iterable to a ReadableStream.
|
|
1918
|
-
*/
|
|
1919
|
-
function toReadableStream(source) {
|
|
1920
|
-
if (source instanceof ReadableStream) return source.pipeThrough(new TransformStream({ transform(chunk, controller) {
|
|
1921
|
-
if (typeof chunk === `string`) controller.enqueue(new TextEncoder().encode(chunk));
|
|
1922
|
-
else controller.enqueue(chunk);
|
|
1923
|
-
} }));
|
|
1924
|
-
const encoder = new TextEncoder();
|
|
1925
|
-
const iterator = source[Symbol.asyncIterator]();
|
|
1926
|
-
return new ReadableStream({
|
|
1927
|
-
async pull(controller) {
|
|
1928
|
-
try {
|
|
1929
|
-
const { done, value } = await iterator.next();
|
|
1930
|
-
if (done) controller.close();
|
|
1931
|
-
else if (typeof value === `string`) controller.enqueue(encoder.encode(value));
|
|
1932
|
-
else controller.enqueue(value);
|
|
1933
|
-
} catch (e) {
|
|
1934
|
-
controller.error(e);
|
|
1935
|
-
}
|
|
1936
|
-
},
|
|
1937
|
-
cancel() {
|
|
1938
|
-
iterator.return?.();
|
|
1939
|
-
}
|
|
1940
|
-
});
|
|
1941
|
-
}
|
|
1942
|
-
/**
|
|
1943
|
-
* Validate stream options.
|
|
1944
|
-
*/
|
|
1945
|
-
function validateOptions(options) {
|
|
1946
|
-
if (!options.url) throw new MissingStreamUrlError();
|
|
1947
|
-
if (options.signal && !(options.signal instanceof AbortSignal)) throw new InvalidSignalError();
|
|
1948
|
-
warnIfUsingHttpInBrowser(options.url, options.warnOnHttp);
|
|
1949
|
-
}
|
|
1950
1966
|
|
|
1951
1967
|
//#endregion
|
|
1952
|
-
//#region src/
|
|
1953
|
-
/**
|
|
1954
|
-
* Error thrown when a producer's epoch is stale (zombie fencing).
|
|
1955
|
-
*/
|
|
1956
|
-
var StaleEpochError = class extends Error {
|
|
1957
|
-
/**
|
|
1958
|
-
* The current epoch on the server.
|
|
1959
|
-
*/
|
|
1960
|
-
currentEpoch;
|
|
1961
|
-
constructor(currentEpoch) {
|
|
1962
|
-
super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
|
|
1963
|
-
this.name = `StaleEpochError`;
|
|
1964
|
-
this.currentEpoch = currentEpoch;
|
|
1965
|
-
}
|
|
1966
|
-
};
|
|
1967
|
-
/**
|
|
1968
|
-
* Error thrown when an unrecoverable sequence gap is detected.
|
|
1969
|
-
*
|
|
1970
|
-
* With maxInFlight > 1, HTTP requests can arrive out of order at the server,
|
|
1971
|
-
* causing temporary 409 responses. The client automatically handles these
|
|
1972
|
-
* by waiting for earlier sequences to complete, then retrying.
|
|
1973
|
-
*
|
|
1974
|
-
* This error is only thrown when the gap cannot be resolved (e.g., the
|
|
1975
|
-
* expected sequence is >= our sequence, indicating a true protocol violation).
|
|
1976
|
-
*/
|
|
1977
|
-
var SequenceGapError = class extends Error {
|
|
1978
|
-
expectedSeq;
|
|
1979
|
-
receivedSeq;
|
|
1980
|
-
constructor(expectedSeq, receivedSeq) {
|
|
1981
|
-
super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
|
|
1982
|
-
this.name = `SequenceGapError`;
|
|
1983
|
-
this.expectedSeq = expectedSeq;
|
|
1984
|
-
this.receivedSeq = receivedSeq;
|
|
1985
|
-
}
|
|
1986
|
-
};
|
|
1968
|
+
//#region src/stream.ts
|
|
1987
1969
|
/**
|
|
1988
1970
|
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1971
|
+
* Handles cases like "application/json; charset=utf-8".
|
|
1989
1972
|
*/
|
|
1990
1973
|
function normalizeContentType(contentType) {
|
|
1991
1974
|
if (!contentType) return ``;
|
|
1992
1975
|
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1993
1976
|
}
|
|
1994
1977
|
/**
|
|
1995
|
-
*
|
|
1978
|
+
* Check if a value is a Promise or Promise-like (thenable).
|
|
1979
|
+
*/
|
|
1980
|
+
function isPromiseLike(value) {
|
|
1981
|
+
return value != null && typeof value.then === `function`;
|
|
1982
|
+
}
|
|
1983
|
+
/**
|
|
1984
|
+
* A handle to a remote durable stream for read/write operations.
|
|
1996
1985
|
*
|
|
1997
|
-
*
|
|
1998
|
-
*
|
|
1999
|
-
*
|
|
2000
|
-
* - Batching: multiple appends batched into single HTTP request
|
|
2001
|
-
* - Pipelining: up to maxInFlight concurrent batches
|
|
2002
|
-
* - Zombie fencing: stale producers rejected via epoch validation
|
|
1986
|
+
* This is a lightweight, reusable handle - not a persistent connection.
|
|
1987
|
+
* It does not automatically start reading or listening.
|
|
1988
|
+
* Create sessions as needed via stream().
|
|
2003
1989
|
*
|
|
2004
1990
|
* @example
|
|
2005
1991
|
* ```typescript
|
|
2006
|
-
*
|
|
2007
|
-
* const
|
|
2008
|
-
*
|
|
2009
|
-
*
|
|
1992
|
+
* // Create a new stream
|
|
1993
|
+
* const stream = await DurableStream.create({
|
|
1994
|
+
* url: "https://streams.example.com/my-stream",
|
|
1995
|
+
* headers: { Authorization: "Bearer my-token" },
|
|
1996
|
+
* contentType: "application/json"
|
|
2010
1997
|
* });
|
|
2011
1998
|
*
|
|
2012
|
-
* //
|
|
2013
|
-
*
|
|
2014
|
-
* producer.append("message 2");
|
|
1999
|
+
* // Write data
|
|
2000
|
+
* await stream.append(JSON.stringify({ message: "hello" }));
|
|
2015
2001
|
*
|
|
2016
|
-
* //
|
|
2017
|
-
* await
|
|
2018
|
-
*
|
|
2002
|
+
* // Read with the new API
|
|
2003
|
+
* const res = await stream.stream<{ message: string }>();
|
|
2004
|
+
* res.subscribeJson(async (batch) => {
|
|
2005
|
+
* for (const item of batch.items) {
|
|
2006
|
+
* console.log(item.message);
|
|
2007
|
+
* }
|
|
2008
|
+
* });
|
|
2019
2009
|
* ```
|
|
2020
2010
|
*/
|
|
2021
|
-
var
|
|
2022
|
-
|
|
2023
|
-
|
|
2024
|
-
|
|
2025
|
-
|
|
2026
|
-
|
|
2027
|
-
|
|
2028
|
-
|
|
2011
|
+
var DurableStream = class DurableStream {
|
|
2012
|
+
/**
|
|
2013
|
+
* The URL of the durable stream.
|
|
2014
|
+
*/
|
|
2015
|
+
url;
|
|
2016
|
+
/**
|
|
2017
|
+
* The content type of the stream (populated after connect/head/read).
|
|
2018
|
+
*/
|
|
2019
|
+
contentType;
|
|
2020
|
+
#options;
|
|
2029
2021
|
#fetchClient;
|
|
2030
|
-
#signal;
|
|
2031
2022
|
#onError;
|
|
2032
|
-
#
|
|
2033
|
-
#batchBytes = 0;
|
|
2034
|
-
#lingerTimeout = null;
|
|
2023
|
+
#batchingEnabled;
|
|
2035
2024
|
#queue;
|
|
2036
|
-
#
|
|
2037
|
-
#closed = false;
|
|
2038
|
-
#epochClaimed;
|
|
2039
|
-
#seqState = new Map();
|
|
2025
|
+
#buffer = [];
|
|
2040
2026
|
/**
|
|
2041
|
-
* Create
|
|
2042
|
-
*
|
|
2043
|
-
* @param stream - The DurableStream to write to
|
|
2044
|
-
* @param producerId - Stable identifier for this producer (e.g., "order-service-1")
|
|
2045
|
-
* @param opts - Producer options
|
|
2027
|
+
* Create a cold handle to a stream.
|
|
2028
|
+
* No network IO is performed by the constructor.
|
|
2046
2029
|
*/
|
|
2047
|
-
constructor(
|
|
2048
|
-
|
|
2049
|
-
|
|
2050
|
-
this
|
|
2051
|
-
this.#
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
this.#onError = opts
|
|
2056
|
-
this
|
|
2057
|
-
this.#
|
|
2058
|
-
this.#
|
|
2059
|
-
|
|
2060
|
-
|
|
2061
|
-
|
|
2062
|
-
|
|
2030
|
+
constructor(opts) {
|
|
2031
|
+
validateOptions(opts);
|
|
2032
|
+
const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
|
|
2033
|
+
this.url = urlStr;
|
|
2034
|
+
this.#options = {
|
|
2035
|
+
...opts,
|
|
2036
|
+
url: urlStr
|
|
2037
|
+
};
|
|
2038
|
+
this.#onError = opts.onError;
|
|
2039
|
+
if (opts.contentType) this.contentType = opts.contentType;
|
|
2040
|
+
this.#batchingEnabled = opts.batching !== false;
|
|
2041
|
+
if (this.#batchingEnabled) this.#queue = fastq.default.promise(this.#batchWorker.bind(this), 1);
|
|
2042
|
+
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
2043
|
+
const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
|
|
2044
|
+
const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
|
|
2045
|
+
this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
|
|
2063
2046
|
}
|
|
2064
2047
|
/**
|
|
2065
|
-
*
|
|
2066
|
-
*
|
|
2067
|
-
* This is fire-and-forget: returns immediately after adding to the batch.
|
|
2068
|
-
* The message is batched and sent when:
|
|
2069
|
-
* - maxBatchBytes is reached
|
|
2070
|
-
* - lingerMs elapses
|
|
2071
|
-
* - flush() is called
|
|
2072
|
-
*
|
|
2073
|
-
* Errors are reported via onError callback if configured. Use flush() to
|
|
2074
|
-
* wait for all pending messages to be sent.
|
|
2075
|
-
*
|
|
2076
|
-
* For JSON streams, pass native objects (which will be serialized internally).
|
|
2077
|
-
* For byte streams, pass string or Uint8Array.
|
|
2078
|
-
*
|
|
2079
|
-
* @param body - Data to append (object for JSON streams, string or Uint8Array for byte streams)
|
|
2048
|
+
* Create a new stream (create-only PUT) and return a handle.
|
|
2049
|
+
* Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
|
|
2080
2050
|
*/
|
|
2081
|
-
|
|
2082
|
-
|
|
2083
|
-
|
|
2084
|
-
|
|
2085
|
-
|
|
2086
|
-
|
|
2087
|
-
|
|
2088
|
-
bytes = new TextEncoder().encode(json);
|
|
2089
|
-
data = body;
|
|
2090
|
-
} else {
|
|
2091
|
-
if (typeof body === `string`) bytes = new TextEncoder().encode(body);
|
|
2092
|
-
else if (body instanceof Uint8Array) bytes = body;
|
|
2093
|
-
else throw new DurableStreamError(`Non-JSON streams require string or Uint8Array`, `BAD_REQUEST`, 400, void 0);
|
|
2094
|
-
data = bytes;
|
|
2095
|
-
}
|
|
2096
|
-
this.#pendingBatch.push({
|
|
2097
|
-
data,
|
|
2098
|
-
body: bytes
|
|
2051
|
+
static async create(opts) {
|
|
2052
|
+
const stream$1 = new DurableStream(opts);
|
|
2053
|
+
await stream$1.create({
|
|
2054
|
+
contentType: opts.contentType,
|
|
2055
|
+
ttlSeconds: opts.ttlSeconds,
|
|
2056
|
+
expiresAt: opts.expiresAt,
|
|
2057
|
+
body: opts.body
|
|
2099
2058
|
});
|
|
2100
|
-
|
|
2101
|
-
if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
|
|
2102
|
-
else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
|
|
2103
|
-
this.#lingerTimeout = null;
|
|
2104
|
-
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
2105
|
-
}, this.#lingerMs);
|
|
2059
|
+
return stream$1;
|
|
2106
2060
|
}
|
|
2107
2061
|
/**
|
|
2108
|
-
*
|
|
2062
|
+
* Validate that a stream exists and fetch metadata via HEAD.
|
|
2063
|
+
* Returns a handle with contentType populated (if sent by server).
|
|
2109
2064
|
*
|
|
2110
|
-
*
|
|
2065
|
+
* **Important**: This only performs a HEAD request for validation - it does
|
|
2066
|
+
* NOT open a session or start reading data. To read from the stream, call
|
|
2067
|
+
* `stream()` on the returned handle.
|
|
2068
|
+
*
|
|
2069
|
+
* @example
|
|
2070
|
+
* ```typescript
|
|
2071
|
+
* // Validate stream exists before reading
|
|
2072
|
+
* const handle = await DurableStream.connect({ url })
|
|
2073
|
+
* const res = await handle.stream() // Now actually read
|
|
2074
|
+
* ```
|
|
2111
2075
|
*/
|
|
2112
|
-
async
|
|
2113
|
-
|
|
2114
|
-
|
|
2115
|
-
|
|
2116
|
-
|
|
2117
|
-
|
|
2118
|
-
|
|
2076
|
+
static async connect(opts) {
|
|
2077
|
+
const stream$1 = new DurableStream(opts);
|
|
2078
|
+
await stream$1.head();
|
|
2079
|
+
return stream$1;
|
|
2080
|
+
}
|
|
2081
|
+
/**
|
|
2082
|
+
* HEAD metadata for a stream without creating a handle.
|
|
2083
|
+
*/
|
|
2084
|
+
static async head(opts) {
|
|
2085
|
+
const stream$1 = new DurableStream(opts);
|
|
2086
|
+
return stream$1.head();
|
|
2119
2087
|
}
|
|
2120
2088
|
/**
|
|
2121
|
-
*
|
|
2122
|
-
*
|
|
2123
|
-
* After calling close(), further append() calls will throw.
|
|
2089
|
+
* Delete a stream without creating a handle.
|
|
2124
2090
|
*/
|
|
2125
|
-
async
|
|
2126
|
-
|
|
2127
|
-
|
|
2128
|
-
try {
|
|
2129
|
-
await this.flush();
|
|
2130
|
-
} catch {}
|
|
2091
|
+
static async delete(opts) {
|
|
2092
|
+
const stream$1 = new DurableStream(opts);
|
|
2093
|
+
return stream$1.delete();
|
|
2131
2094
|
}
|
|
2132
2095
|
/**
|
|
2133
|
-
*
|
|
2134
|
-
*
|
|
2135
|
-
* Call this when restarting the producer to establish a new session.
|
|
2136
|
-
* Flushes any pending messages first.
|
|
2096
|
+
* HEAD metadata for this stream.
|
|
2137
2097
|
*/
|
|
2138
|
-
async
|
|
2139
|
-
await this
|
|
2140
|
-
this.#
|
|
2141
|
-
|
|
2098
|
+
async head(opts) {
|
|
2099
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2100
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2101
|
+
method: `HEAD`,
|
|
2102
|
+
headers: requestHeaders,
|
|
2103
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2104
|
+
});
|
|
2105
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2106
|
+
const contentType = response.headers.get(`content-type`) ?? void 0;
|
|
2107
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
|
|
2108
|
+
const etag = response.headers.get(`etag`) ?? void 0;
|
|
2109
|
+
const cacheControl = response.headers.get(`cache-control`) ?? void 0;
|
|
2110
|
+
if (contentType) this.contentType = contentType;
|
|
2111
|
+
return {
|
|
2112
|
+
exists: true,
|
|
2113
|
+
contentType,
|
|
2114
|
+
offset,
|
|
2115
|
+
etag,
|
|
2116
|
+
cacheControl
|
|
2117
|
+
};
|
|
2142
2118
|
}
|
|
2143
2119
|
/**
|
|
2144
|
-
*
|
|
2120
|
+
* Create this stream (create-only PUT) using the URL/auth from the handle.
|
|
2145
2121
|
*/
|
|
2146
|
-
|
|
2147
|
-
|
|
2122
|
+
async create(opts) {
|
|
2123
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2124
|
+
const contentType = opts?.contentType ?? this.#options.contentType;
|
|
2125
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2126
|
+
if (opts?.ttlSeconds !== void 0) requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
|
|
2127
|
+
if (opts?.expiresAt) requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
|
|
2128
|
+
const body = encodeBody(opts?.body);
|
|
2129
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2130
|
+
method: `PUT`,
|
|
2131
|
+
headers: requestHeaders,
|
|
2132
|
+
body,
|
|
2133
|
+
signal: this.#options.signal
|
|
2134
|
+
});
|
|
2135
|
+
if (!response.ok) await handleErrorResponse(response, this.url, { operation: `create` });
|
|
2136
|
+
const responseContentType = response.headers.get(`content-type`);
|
|
2137
|
+
if (responseContentType) this.contentType = responseContentType;
|
|
2138
|
+
else if (contentType) this.contentType = contentType;
|
|
2139
|
+
return this;
|
|
2148
2140
|
}
|
|
2149
2141
|
/**
|
|
2150
|
-
*
|
|
2142
|
+
* Delete this stream.
|
|
2151
2143
|
*/
|
|
2152
|
-
|
|
2153
|
-
|
|
2144
|
+
async delete(opts) {
|
|
2145
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2146
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2147
|
+
method: `DELETE`,
|
|
2148
|
+
headers: requestHeaders,
|
|
2149
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2150
|
+
});
|
|
2151
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2154
2152
|
}
|
|
2155
2153
|
/**
|
|
2156
|
-
*
|
|
2154
|
+
* Append a single payload to the stream.
|
|
2155
|
+
*
|
|
2156
|
+
* When batching is enabled (default), multiple append() calls made while
|
|
2157
|
+
* a POST is in-flight will be batched together into a single request.
|
|
2158
|
+
* This significantly improves throughput for high-frequency writes.
|
|
2159
|
+
*
|
|
2160
|
+
* - `body` must be string or Uint8Array.
|
|
2161
|
+
* - For JSON streams, pass pre-serialized JSON strings.
|
|
2162
|
+
* - `body` may also be a Promise that resolves to string or Uint8Array.
|
|
2163
|
+
* - Strings are encoded as UTF-8.
|
|
2164
|
+
* - `seq` (if provided) is sent as stream-seq (writer coordination).
|
|
2165
|
+
*
|
|
2166
|
+
* @example
|
|
2167
|
+
* ```typescript
|
|
2168
|
+
* // JSON stream - pass pre-serialized JSON
|
|
2169
|
+
* await stream.append(JSON.stringify({ message: "hello" }));
|
|
2170
|
+
*
|
|
2171
|
+
* // Byte stream
|
|
2172
|
+
* await stream.append("raw text data");
|
|
2173
|
+
* await stream.append(new Uint8Array([1, 2, 3]));
|
|
2174
|
+
*
|
|
2175
|
+
* // Promise value - awaited before buffering
|
|
2176
|
+
* await stream.append(fetchData());
|
|
2177
|
+
* ```
|
|
2157
2178
|
*/
|
|
2158
|
-
|
|
2159
|
-
|
|
2179
|
+
async append(body, opts) {
|
|
2180
|
+
const resolvedBody = isPromiseLike(body) ? await body : body;
|
|
2181
|
+
if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
|
|
2182
|
+
return this.#appendDirect(resolvedBody, opts);
|
|
2160
2183
|
}
|
|
2161
2184
|
/**
|
|
2162
|
-
*
|
|
2185
|
+
* Direct append without batching (used when batching is disabled).
|
|
2163
2186
|
*/
|
|
2164
|
-
|
|
2165
|
-
|
|
2187
|
+
async #appendDirect(body, opts) {
|
|
2188
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2189
|
+
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2190
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2191
|
+
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
2192
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2193
|
+
const bodyStr = typeof body === `string` ? body : new TextDecoder().decode(body);
|
|
2194
|
+
const encodedBody = isJson ? `[${bodyStr}]` : bodyStr;
|
|
2195
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2196
|
+
method: `POST`,
|
|
2197
|
+
headers: requestHeaders,
|
|
2198
|
+
body: encodedBody,
|
|
2199
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2200
|
+
});
|
|
2201
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2166
2202
|
}
|
|
2167
2203
|
/**
|
|
2168
|
-
*
|
|
2204
|
+
* Append with batching - buffers messages and sends them in batches.
|
|
2169
2205
|
*/
|
|
2170
|
-
#
|
|
2171
|
-
|
|
2172
|
-
|
|
2173
|
-
|
|
2174
|
-
|
|
2175
|
-
|
|
2176
|
-
|
|
2177
|
-
|
|
2178
|
-
|
|
2179
|
-
|
|
2180
|
-
|
|
2181
|
-
|
|
2206
|
+
async #appendWithBatching(body, opts) {
|
|
2207
|
+
return new Promise((resolve, reject) => {
|
|
2208
|
+
this.#buffer.push({
|
|
2209
|
+
data: body,
|
|
2210
|
+
seq: opts?.seq,
|
|
2211
|
+
contentType: opts?.contentType,
|
|
2212
|
+
signal: opts?.signal,
|
|
2213
|
+
resolve,
|
|
2214
|
+
reject
|
|
2215
|
+
});
|
|
2216
|
+
if (this.#queue.idle()) {
|
|
2217
|
+
const batch = this.#buffer.splice(0);
|
|
2218
|
+
this.#queue.push(batch).catch((err) => {
|
|
2219
|
+
for (const msg of batch) msg.reject(err);
|
|
2220
|
+
});
|
|
2221
|
+
}
|
|
2182
2222
|
});
|
|
2183
|
-
else this.#queue.push({
|
|
2184
|
-
batch,
|
|
2185
|
-
seq
|
|
2186
|
-
}).catch(() => {});
|
|
2187
2223
|
}
|
|
2188
2224
|
/**
|
|
2189
|
-
* Batch worker - processes batches
|
|
2225
|
+
* Batch worker - processes batches of messages.
|
|
2190
2226
|
*/
|
|
2191
|
-
async #batchWorker(
|
|
2192
|
-
const { batch, seq } = task;
|
|
2193
|
-
const epoch = this.#epoch;
|
|
2227
|
+
async #batchWorker(batch) {
|
|
2194
2228
|
try {
|
|
2195
|
-
await this.#
|
|
2196
|
-
|
|
2197
|
-
this.#
|
|
2229
|
+
await this.#sendBatch(batch);
|
|
2230
|
+
for (const msg of batch) msg.resolve();
|
|
2231
|
+
if (this.#buffer.length > 0) {
|
|
2232
|
+
const nextBatch = this.#buffer.splice(0);
|
|
2233
|
+
this.#queue.push(nextBatch).catch((err) => {
|
|
2234
|
+
for (const msg of nextBatch) msg.reject(err);
|
|
2235
|
+
});
|
|
2236
|
+
}
|
|
2198
2237
|
} catch (error) {
|
|
2199
|
-
|
|
2200
|
-
|
|
2238
|
+
for (const msg of batch) msg.reject(error);
|
|
2239
|
+
for (const msg of this.#buffer) msg.reject(error);
|
|
2240
|
+
this.#buffer = [];
|
|
2201
2241
|
throw error;
|
|
2202
2242
|
}
|
|
2203
2243
|
}
|
|
2204
2244
|
/**
|
|
2205
|
-
*
|
|
2245
|
+
* Send a batch of messages as a single POST request.
|
|
2206
2246
|
*/
|
|
2207
|
-
#
|
|
2208
|
-
|
|
2209
|
-
|
|
2210
|
-
|
|
2211
|
-
|
|
2247
|
+
async #sendBatch(batch) {
|
|
2248
|
+
if (batch.length === 0) return;
|
|
2249
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2250
|
+
const contentType = batch[0]?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2251
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2252
|
+
let highestSeq;
|
|
2253
|
+
for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
|
|
2254
|
+
highestSeq = batch[i].seq;
|
|
2255
|
+
break;
|
|
2212
2256
|
}
|
|
2213
|
-
|
|
2214
|
-
|
|
2215
|
-
|
|
2216
|
-
|
|
2217
|
-
|
|
2218
|
-
|
|
2219
|
-
} else
|
|
2220
|
-
|
|
2221
|
-
|
|
2222
|
-
|
|
2257
|
+
if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
|
|
2258
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2259
|
+
let batchedBody;
|
|
2260
|
+
if (isJson) {
|
|
2261
|
+
const jsonStrings = batch.map((m) => typeof m.data === `string` ? m.data : new TextDecoder().decode(m.data));
|
|
2262
|
+
batchedBody = `[${jsonStrings.join(`,`)}]`;
|
|
2263
|
+
} else {
|
|
2264
|
+
const strings = batch.map((m) => typeof m.data === `string` ? m.data : new TextDecoder().decode(m.data));
|
|
2265
|
+
batchedBody = strings.join(``);
|
|
2266
|
+
}
|
|
2267
|
+
const signals = [];
|
|
2268
|
+
if (this.#options.signal) signals.push(this.#options.signal);
|
|
2269
|
+
for (const msg of batch) if (msg.signal) signals.push(msg.signal);
|
|
2270
|
+
const combinedSignal = signals.length > 0 ? AbortSignal.any(signals) : void 0;
|
|
2271
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2272
|
+
method: `POST`,
|
|
2273
|
+
headers: requestHeaders,
|
|
2274
|
+
body: batchedBody,
|
|
2275
|
+
signal: combinedSignal
|
|
2276
|
+
});
|
|
2277
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2278
|
+
}
|
|
2279
|
+
/**
|
|
2280
|
+
* Append a streaming body to the stream.
|
|
2281
|
+
*
|
|
2282
|
+
* Supports piping from any ReadableStream or async iterable:
|
|
2283
|
+
* - `source` yields Uint8Array or string chunks.
|
|
2284
|
+
* - Strings are encoded as UTF-8; no delimiters are added.
|
|
2285
|
+
* - Internally uses chunked transfer or HTTP/2 streaming.
|
|
2286
|
+
*
|
|
2287
|
+
* @example
|
|
2288
|
+
* ```typescript
|
|
2289
|
+
* // Pipe from a ReadableStream
|
|
2290
|
+
* const readable = new ReadableStream({
|
|
2291
|
+
* start(controller) {
|
|
2292
|
+
* controller.enqueue("chunk 1");
|
|
2293
|
+
* controller.enqueue("chunk 2");
|
|
2294
|
+
* controller.close();
|
|
2295
|
+
* }
|
|
2296
|
+
* });
|
|
2297
|
+
* await stream.appendStream(readable);
|
|
2298
|
+
*
|
|
2299
|
+
* // Pipe from an async generator
|
|
2300
|
+
* async function* generate() {
|
|
2301
|
+
* yield "line 1\n";
|
|
2302
|
+
* yield "line 2\n";
|
|
2303
|
+
* }
|
|
2304
|
+
* await stream.appendStream(generate());
|
|
2305
|
+
*
|
|
2306
|
+
* // Pipe from fetch response body
|
|
2307
|
+
* const response = await fetch("https://example.com/data");
|
|
2308
|
+
* await stream.appendStream(response.body!);
|
|
2309
|
+
* ```
|
|
2310
|
+
*/
|
|
2311
|
+
async appendStream(source, opts) {
|
|
2312
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2313
|
+
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2314
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2315
|
+
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
2316
|
+
const body = toReadableStream(source);
|
|
2317
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2318
|
+
method: `POST`,
|
|
2319
|
+
headers: requestHeaders,
|
|
2320
|
+
body,
|
|
2321
|
+
duplex: `half`,
|
|
2322
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2223
2323
|
});
|
|
2224
|
-
|
|
2225
|
-
if (cleanupThreshold > 0) {
|
|
2226
|
-
for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
|
|
2227
|
-
}
|
|
2324
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2228
2325
|
}
|
|
2229
2326
|
/**
|
|
2230
|
-
*
|
|
2231
|
-
*
|
|
2232
|
-
*
|
|
2327
|
+
* Create a writable stream that pipes data to this durable stream.
|
|
2328
|
+
*
|
|
2329
|
+
* Returns a WritableStream that can be used with `pipeTo()` or
|
|
2330
|
+
* `pipeThrough()` from any ReadableStream source.
|
|
2331
|
+
*
|
|
2332
|
+
* Uses IdempotentProducer internally for:
|
|
2333
|
+
* - Automatic batching (controlled by lingerMs, maxBatchBytes)
|
|
2334
|
+
* - Exactly-once delivery semantics
|
|
2335
|
+
* - Streaming writes (doesn't buffer entire content in memory)
|
|
2336
|
+
*
|
|
2337
|
+
* @example
|
|
2338
|
+
* ```typescript
|
|
2339
|
+
* // Pipe from fetch response
|
|
2340
|
+
* const response = await fetch("https://example.com/data");
|
|
2341
|
+
* await response.body!.pipeTo(stream.writable());
|
|
2342
|
+
*
|
|
2343
|
+
* // Pipe through a transform
|
|
2344
|
+
* const readable = someStream.pipeThrough(new TextEncoderStream());
|
|
2345
|
+
* await readable.pipeTo(stream.writable());
|
|
2346
|
+
*
|
|
2347
|
+
* // With custom producer options
|
|
2348
|
+
* await source.pipeTo(stream.writable({
|
|
2349
|
+
* producerId: "my-producer",
|
|
2350
|
+
* lingerMs: 10,
|
|
2351
|
+
* maxBatchBytes: 64 * 1024,
|
|
2352
|
+
* }));
|
|
2353
|
+
* ```
|
|
2233
2354
|
*/
|
|
2234
|
-
|
|
2235
|
-
|
|
2236
|
-
|
|
2237
|
-
|
|
2238
|
-
|
|
2239
|
-
|
|
2240
|
-
|
|
2241
|
-
|
|
2242
|
-
|
|
2243
|
-
|
|
2244
|
-
|
|
2245
|
-
|
|
2246
|
-
|
|
2247
|
-
|
|
2248
|
-
|
|
2249
|
-
|
|
2250
|
-
|
|
2251
|
-
|
|
2252
|
-
|
|
2253
|
-
|
|
2254
|
-
|
|
2355
|
+
writable(opts) {
|
|
2356
|
+
const producerId = opts?.producerId ?? `writable-${crypto.randomUUID().slice(0, 8)}`;
|
|
2357
|
+
let writeError = null;
|
|
2358
|
+
const producer = new IdempotentProducer(this, producerId, {
|
|
2359
|
+
autoClaim: true,
|
|
2360
|
+
lingerMs: opts?.lingerMs,
|
|
2361
|
+
maxBatchBytes: opts?.maxBatchBytes,
|
|
2362
|
+
onError: (error) => {
|
|
2363
|
+
if (!writeError) writeError = error;
|
|
2364
|
+
opts?.onError?.(error);
|
|
2365
|
+
},
|
|
2366
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2367
|
+
});
|
|
2368
|
+
return new WritableStream({
|
|
2369
|
+
write(chunk) {
|
|
2370
|
+
producer.append(chunk);
|
|
2371
|
+
},
|
|
2372
|
+
async close() {
|
|
2373
|
+
await producer.flush();
|
|
2374
|
+
await producer.close();
|
|
2375
|
+
if (writeError) throw writeError;
|
|
2376
|
+
},
|
|
2377
|
+
abort(_reason) {
|
|
2378
|
+
producer.close().catch((err) => {
|
|
2379
|
+
opts?.onError?.(err);
|
|
2380
|
+
});
|
|
2381
|
+
}
|
|
2255
2382
|
});
|
|
2256
2383
|
}
|
|
2257
2384
|
/**
|
|
2258
|
-
*
|
|
2259
|
-
*
|
|
2260
|
-
*
|
|
2385
|
+
* Start a fetch-like streaming session against this handle's URL/headers/params.
|
|
2386
|
+
* The first request is made inside this method; it resolves when we have
|
|
2387
|
+
* a valid first response, or rejects on errors.
|
|
2388
|
+
*
|
|
2389
|
+
* Call-specific headers and params are merged with handle-level ones,
|
|
2390
|
+
* with call-specific values taking precedence.
|
|
2391
|
+
*
|
|
2392
|
+
* @example
|
|
2393
|
+
* ```typescript
|
|
2394
|
+
* const handle = await DurableStream.connect({
|
|
2395
|
+
* url,
|
|
2396
|
+
* headers: { Authorization: `Bearer ${token}` }
|
|
2397
|
+
* });
|
|
2398
|
+
* const res = await handle.stream<{ message: string }>();
|
|
2399
|
+
*
|
|
2400
|
+
* // Accumulate all JSON items
|
|
2401
|
+
* const items = await res.json();
|
|
2402
|
+
*
|
|
2403
|
+
* // Or stream live with ReadableStream
|
|
2404
|
+
* const reader = res.jsonStream().getReader();
|
|
2405
|
+
* let result = await reader.read();
|
|
2406
|
+
* while (!result.done) {
|
|
2407
|
+
* console.log(result.value);
|
|
2408
|
+
* result = await reader.read();
|
|
2409
|
+
* }
|
|
2410
|
+
*
|
|
2411
|
+
* // Or use subscriber for backpressure-aware consumption
|
|
2412
|
+
* res.subscribeJson(async (batch) => {
|
|
2413
|
+
* for (const item of batch.items) {
|
|
2414
|
+
* console.log(item);
|
|
2415
|
+
* }
|
|
2416
|
+
* });
|
|
2417
|
+
* ```
|
|
2261
2418
|
*/
|
|
2262
|
-
async
|
|
2263
|
-
|
|
2264
|
-
|
|
2265
|
-
|
|
2266
|
-
if (isJson) {
|
|
2267
|
-
const values = batch.map((e) => e.data);
|
|
2268
|
-
batchedBody = JSON.stringify(values);
|
|
2269
|
-
} else {
|
|
2270
|
-
const totalSize = batch.reduce((sum, e) => sum + e.body.length, 0);
|
|
2271
|
-
const concatenated = new Uint8Array(totalSize);
|
|
2272
|
-
let offset = 0;
|
|
2273
|
-
for (const entry of batch) {
|
|
2274
|
-
concatenated.set(entry.body, offset);
|
|
2275
|
-
offset += entry.body.length;
|
|
2276
|
-
}
|
|
2277
|
-
batchedBody = concatenated;
|
|
2419
|
+
async stream(options) {
|
|
2420
|
+
if (options?.live === `sse` && this.contentType) {
|
|
2421
|
+
const isSSECompatible = SSE_COMPATIBLE_CONTENT_TYPES.some((prefix) => this.contentType.startsWith(prefix));
|
|
2422
|
+
if (!isSSECompatible) throw new DurableStreamError(`SSE is not supported for content-type: ${this.contentType}`, `SSE_NOT_SUPPORTED`, 400);
|
|
2278
2423
|
}
|
|
2279
|
-
const
|
|
2280
|
-
|
|
2281
|
-
|
|
2282
|
-
[PRODUCER_ID_HEADER]: this.#producerId,
|
|
2283
|
-
[PRODUCER_EPOCH_HEADER]: epoch.toString(),
|
|
2284
|
-
[PRODUCER_SEQ_HEADER]: seq.toString()
|
|
2424
|
+
const mergedHeaders = {
|
|
2425
|
+
...this.#options.headers,
|
|
2426
|
+
...options?.headers
|
|
2285
2427
|
};
|
|
2286
|
-
const
|
|
2287
|
-
|
|
2288
|
-
|
|
2289
|
-
body: batchedBody,
|
|
2290
|
-
signal: this.#signal
|
|
2291
|
-
});
|
|
2292
|
-
if (response.status === 204) return {
|
|
2293
|
-
offset: ``,
|
|
2294
|
-
duplicate: true
|
|
2428
|
+
const mergedParams = {
|
|
2429
|
+
...this.#options.params,
|
|
2430
|
+
...options?.params
|
|
2295
2431
|
};
|
|
2296
|
-
|
|
2297
|
-
|
|
2298
|
-
|
|
2299
|
-
|
|
2300
|
-
|
|
2301
|
-
|
|
2302
|
-
|
|
2303
|
-
|
|
2304
|
-
|
|
2305
|
-
|
|
2306
|
-
|
|
2307
|
-
|
|
2308
|
-
|
|
2309
|
-
this.#nextSeq = 1;
|
|
2310
|
-
return this.#doSendBatch(batch, 0, newEpoch);
|
|
2311
|
-
}
|
|
2312
|
-
throw new StaleEpochError(currentEpoch);
|
|
2313
|
-
}
|
|
2314
|
-
if (response.status === 409) {
|
|
2315
|
-
const expectedSeqStr = response.headers.get(PRODUCER_EXPECTED_SEQ_HEADER);
|
|
2316
|
-
const expectedSeq = expectedSeqStr ? parseInt(expectedSeqStr, 10) : 0;
|
|
2317
|
-
if (expectedSeq < seq) {
|
|
2318
|
-
const waitPromises = [];
|
|
2319
|
-
for (let s = expectedSeq; s < seq; s++) waitPromises.push(this.#waitForSeq(epoch, s));
|
|
2320
|
-
await Promise.all(waitPromises);
|
|
2321
|
-
return this.#doSendBatch(batch, seq, epoch);
|
|
2322
|
-
}
|
|
2323
|
-
const receivedSeqStr = response.headers.get(PRODUCER_RECEIVED_SEQ_HEADER);
|
|
2324
|
-
const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
|
|
2325
|
-
throw new SequenceGapError(expectedSeq, receivedSeq);
|
|
2326
|
-
}
|
|
2327
|
-
if (response.status === 400) {
|
|
2328
|
-
const error$1 = await DurableStreamError.fromResponse(response, url);
|
|
2329
|
-
throw error$1;
|
|
2330
|
-
}
|
|
2331
|
-
const error = await FetchError.fromResponse(response, url);
|
|
2332
|
-
throw error;
|
|
2432
|
+
return stream({
|
|
2433
|
+
url: this.url,
|
|
2434
|
+
headers: mergedHeaders,
|
|
2435
|
+
params: mergedParams,
|
|
2436
|
+
signal: options?.signal ?? this.#options.signal,
|
|
2437
|
+
fetch: this.#options.fetch,
|
|
2438
|
+
backoffOptions: this.#options.backoffOptions,
|
|
2439
|
+
offset: options?.offset,
|
|
2440
|
+
live: options?.live,
|
|
2441
|
+
json: options?.json,
|
|
2442
|
+
onError: options?.onError ?? this.#onError,
|
|
2443
|
+
warnOnHttp: options?.warnOnHttp ?? this.#options.warnOnHttp
|
|
2444
|
+
});
|
|
2333
2445
|
}
|
|
2334
2446
|
/**
|
|
2335
|
-
*
|
|
2447
|
+
* Build request headers and URL.
|
|
2336
2448
|
*/
|
|
2337
|
-
#
|
|
2338
|
-
|
|
2339
|
-
|
|
2340
|
-
|
|
2341
|
-
|
|
2342
|
-
|
|
2343
|
-
|
|
2344
|
-
|
|
2449
|
+
async #buildRequest() {
|
|
2450
|
+
const requestHeaders = await resolveHeaders(this.#options.headers);
|
|
2451
|
+
const fetchUrl = new URL(this.url);
|
|
2452
|
+
const params = await resolveParams(this.#options.params);
|
|
2453
|
+
for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
|
|
2454
|
+
return {
|
|
2455
|
+
requestHeaders,
|
|
2456
|
+
fetchUrl
|
|
2457
|
+
};
|
|
2345
2458
|
}
|
|
2346
2459
|
};
|
|
2460
|
+
/**
|
|
2461
|
+
* Encode a body value to the appropriate format.
|
|
2462
|
+
* Strings are encoded as UTF-8.
|
|
2463
|
+
* Objects are JSON-serialized.
|
|
2464
|
+
*/
|
|
2465
|
+
function encodeBody(body) {
|
|
2466
|
+
if (body === void 0) return void 0;
|
|
2467
|
+
if (typeof body === `string`) return new TextEncoder().encode(body);
|
|
2468
|
+
if (body instanceof Uint8Array) return body;
|
|
2469
|
+
if (body instanceof Blob || body instanceof FormData || body instanceof ReadableStream || body instanceof ArrayBuffer || ArrayBuffer.isView(body)) return body;
|
|
2470
|
+
return new TextEncoder().encode(JSON.stringify(body));
|
|
2471
|
+
}
|
|
2472
|
+
/**
|
|
2473
|
+
* Convert an async iterable to a ReadableStream.
|
|
2474
|
+
*/
|
|
2475
|
+
function toReadableStream(source) {
|
|
2476
|
+
if (source instanceof ReadableStream) return source.pipeThrough(new TransformStream({ transform(chunk, controller) {
|
|
2477
|
+
if (typeof chunk === `string`) controller.enqueue(new TextEncoder().encode(chunk));
|
|
2478
|
+
else controller.enqueue(chunk);
|
|
2479
|
+
} }));
|
|
2480
|
+
const encoder = new TextEncoder();
|
|
2481
|
+
const iterator = source[Symbol.asyncIterator]();
|
|
2482
|
+
return new ReadableStream({
|
|
2483
|
+
async pull(controller) {
|
|
2484
|
+
try {
|
|
2485
|
+
const { done, value } = await iterator.next();
|
|
2486
|
+
if (done) controller.close();
|
|
2487
|
+
else if (typeof value === `string`) controller.enqueue(encoder.encode(value));
|
|
2488
|
+
else controller.enqueue(value);
|
|
2489
|
+
} catch (e) {
|
|
2490
|
+
controller.error(e);
|
|
2491
|
+
}
|
|
2492
|
+
},
|
|
2493
|
+
cancel() {
|
|
2494
|
+
iterator.return?.();
|
|
2495
|
+
}
|
|
2496
|
+
});
|
|
2497
|
+
}
|
|
2498
|
+
/**
|
|
2499
|
+
* Validate stream options.
|
|
2500
|
+
*/
|
|
2501
|
+
function validateOptions(options) {
|
|
2502
|
+
if (!options.url) throw new MissingStreamUrlError();
|
|
2503
|
+
if (options.signal && !(options.signal instanceof AbortSignal)) throw new InvalidSignalError();
|
|
2504
|
+
warnIfUsingHttpInBrowser(options.url, options.warnOnHttp);
|
|
2505
|
+
}
|
|
2347
2506
|
|
|
2348
2507
|
//#endregion
|
|
2349
2508
|
exports.BackoffDefaults = BackoffDefaults
|