@durable-streams/client 0.1.5 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -10
- package/dist/index.cjs +772 -718
- package/dist/index.d.cts +63 -25
- package/dist/index.d.ts +63 -25
- package/dist/index.js +772 -718
- package/package.json +2 -2
- package/src/idempotent-producer.ts +51 -38
- package/src/response.ts +69 -18
- package/src/sse.ts +17 -4
- package/src/stream-api.ts +17 -10
- package/src/stream.ts +77 -56
- package/src/types.ts +24 -12
package/dist/index.js
CHANGED
|
@@ -482,7 +482,10 @@ async function* parseSSEStream(stream$1, signal) {
|
|
|
482
482
|
streamCursor: control.streamCursor,
|
|
483
483
|
upToDate: control.upToDate
|
|
484
484
|
};
|
|
485
|
-
} catch {
|
|
485
|
+
} catch (err) {
|
|
486
|
+
const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
|
|
487
|
+
throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
488
|
+
}
|
|
486
489
|
}
|
|
487
490
|
currentEvent = { data: [] };
|
|
488
491
|
} else if (line.startsWith(`event:`)) currentEvent.type = line.slice(6).trim();
|
|
@@ -507,7 +510,10 @@ async function* parseSSEStream(stream$1, signal) {
|
|
|
507
510
|
streamCursor: control.streamCursor,
|
|
508
511
|
upToDate: control.upToDate
|
|
509
512
|
};
|
|
510
|
-
} catch {
|
|
513
|
+
} catch (err) {
|
|
514
|
+
const preview = dataStr.length > 100 ? dataStr.slice(0, 100) + `...` : dataStr;
|
|
515
|
+
throw new DurableStreamError(`Failed to parse SSE control event: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
516
|
+
}
|
|
511
517
|
}
|
|
512
518
|
} finally {
|
|
513
519
|
reader.releaseLock();
|
|
@@ -533,9 +539,9 @@ var StreamResponseImpl = class {
|
|
|
533
539
|
#statusText;
|
|
534
540
|
#ok;
|
|
535
541
|
#isLoading;
|
|
536
|
-
offset;
|
|
537
|
-
cursor;
|
|
538
|
-
upToDate;
|
|
542
|
+
#offset;
|
|
543
|
+
#cursor;
|
|
544
|
+
#upToDate;
|
|
539
545
|
#isJsonMode;
|
|
540
546
|
#abortController;
|
|
541
547
|
#fetchNext;
|
|
@@ -561,9 +567,9 @@ var StreamResponseImpl = class {
|
|
|
561
567
|
this.contentType = config.contentType;
|
|
562
568
|
this.live = config.live;
|
|
563
569
|
this.startOffset = config.startOffset;
|
|
564
|
-
this
|
|
565
|
-
this
|
|
566
|
-
this
|
|
570
|
+
this.#offset = config.initialOffset;
|
|
571
|
+
this.#cursor = config.initialCursor;
|
|
572
|
+
this.#upToDate = config.initialUpToDate;
|
|
567
573
|
this.#headers = config.firstResponse.headers;
|
|
568
574
|
this.#status = config.firstResponse.status;
|
|
569
575
|
this.#statusText = config.firstResponse.statusText;
|
|
@@ -654,6 +660,15 @@ var StreamResponseImpl = class {
|
|
|
654
660
|
get isLoading() {
|
|
655
661
|
return this.#isLoading;
|
|
656
662
|
}
|
|
663
|
+
get offset() {
|
|
664
|
+
return this.#offset;
|
|
665
|
+
}
|
|
666
|
+
get cursor() {
|
|
667
|
+
return this.#cursor;
|
|
668
|
+
}
|
|
669
|
+
get upToDate() {
|
|
670
|
+
return this.#upToDate;
|
|
671
|
+
}
|
|
657
672
|
#ensureJsonMode() {
|
|
658
673
|
if (!this.#isJsonMode) throw new DurableStreamError(`JSON methods are only valid for JSON-mode streams. Content-Type is "${this.contentType}" and json hint was not set.`, `BAD_REQUEST`);
|
|
659
674
|
}
|
|
@@ -687,10 +702,10 @@ var StreamResponseImpl = class {
|
|
|
687
702
|
*/
|
|
688
703
|
#updateStateFromResponse(response) {
|
|
689
704
|
const offset = response.headers.get(STREAM_OFFSET_HEADER);
|
|
690
|
-
if (offset) this
|
|
705
|
+
if (offset) this.#offset = offset;
|
|
691
706
|
const cursor = response.headers.get(STREAM_CURSOR_HEADER);
|
|
692
|
-
if (cursor) this
|
|
693
|
-
this
|
|
707
|
+
if (cursor) this.#cursor = cursor;
|
|
708
|
+
this.#upToDate = response.headers.has(STREAM_UP_TO_DATE_HEADER);
|
|
694
709
|
this.#headers = response.headers;
|
|
695
710
|
this.#status = response.status;
|
|
696
711
|
this.#statusText = response.statusText;
|
|
@@ -732,9 +747,9 @@ var StreamResponseImpl = class {
|
|
|
732
747
|
* Update instance state from an SSE control event.
|
|
733
748
|
*/
|
|
734
749
|
#updateStateFromSSEControl(controlEvent) {
|
|
735
|
-
this
|
|
736
|
-
if (controlEvent.streamCursor) this
|
|
737
|
-
if (controlEvent.upToDate !== void 0) this
|
|
750
|
+
this.#offset = controlEvent.streamNextOffset;
|
|
751
|
+
if (controlEvent.streamCursor) this.#cursor = controlEvent.streamCursor;
|
|
752
|
+
if (controlEvent.upToDate !== void 0) this.#upToDate = controlEvent.upToDate;
|
|
738
753
|
}
|
|
739
754
|
/**
|
|
740
755
|
* Mark the start of an SSE connection for duration tracking.
|
|
@@ -1000,7 +1015,13 @@ var StreamResponseImpl = class {
|
|
|
1000
1015
|
const wasUpToDate = this.upToDate;
|
|
1001
1016
|
const text = await result.value.text();
|
|
1002
1017
|
const content = text.trim() || `[]`;
|
|
1003
|
-
|
|
1018
|
+
let parsed;
|
|
1019
|
+
try {
|
|
1020
|
+
parsed = JSON.parse(content);
|
|
1021
|
+
} catch (err) {
|
|
1022
|
+
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
|
|
1023
|
+
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
1024
|
+
}
|
|
1004
1025
|
if (Array.isArray(parsed)) items.push(...parsed);
|
|
1005
1026
|
else items.push(parsed);
|
|
1006
1027
|
if (wasUpToDate) break;
|
|
@@ -1097,7 +1118,13 @@ var StreamResponseImpl = class {
|
|
|
1097
1118
|
}
|
|
1098
1119
|
const text = await response.text();
|
|
1099
1120
|
const content = text.trim() || `[]`;
|
|
1100
|
-
|
|
1121
|
+
let parsed;
|
|
1122
|
+
try {
|
|
1123
|
+
parsed = JSON.parse(content);
|
|
1124
|
+
} catch (err) {
|
|
1125
|
+
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
|
|
1126
|
+
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
1127
|
+
}
|
|
1101
1128
|
pendingItems = Array.isArray(parsed) ? parsed : [parsed];
|
|
1102
1129
|
if (pendingItems.length > 0) controller.enqueue(pendingItems.shift());
|
|
1103
1130
|
},
|
|
@@ -1136,7 +1163,13 @@ var StreamResponseImpl = class {
|
|
|
1136
1163
|
const { offset, cursor, upToDate } = this.#getMetadataFromResponse(response);
|
|
1137
1164
|
const text = await response.text();
|
|
1138
1165
|
const content = text.trim() || `[]`;
|
|
1139
|
-
|
|
1166
|
+
let parsed;
|
|
1167
|
+
try {
|
|
1168
|
+
parsed = JSON.parse(content);
|
|
1169
|
+
} catch (err) {
|
|
1170
|
+
const preview = content.length > 100 ? content.slice(0, 100) + `...` : content;
|
|
1171
|
+
throw new DurableStreamError(`Failed to parse JSON response: ${err instanceof Error ? err.message : String(err)}. Data: ${preview}`, `PARSE_ERROR`);
|
|
1172
|
+
}
|
|
1140
1173
|
const items = Array.isArray(parsed) ? parsed : [parsed];
|
|
1141
1174
|
await subscriber({
|
|
1142
1175
|
items,
|
|
@@ -1381,7 +1414,7 @@ function _resetHttpWarningForTesting() {
|
|
|
1381
1414
|
* url,
|
|
1382
1415
|
* auth,
|
|
1383
1416
|
* offset: savedOffset,
|
|
1384
|
-
* live:
|
|
1417
|
+
* live: true,
|
|
1385
1418
|
* })
|
|
1386
1419
|
* live.subscribeJson(async (batch) => {
|
|
1387
1420
|
* for (const item of batch.items) {
|
|
@@ -1422,10 +1455,11 @@ async function stream(options) {
|
|
|
1422
1455
|
*/
|
|
1423
1456
|
async function streamInternal(options) {
|
|
1424
1457
|
const url = options.url instanceof URL ? options.url.toString() : options.url;
|
|
1458
|
+
warnIfUsingHttpInBrowser(url, options.warnOnHttp);
|
|
1425
1459
|
const fetchUrl = new URL(url);
|
|
1426
1460
|
const startOffset = options.offset ?? `-1`;
|
|
1427
1461
|
fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, startOffset);
|
|
1428
|
-
const live = options.live ??
|
|
1462
|
+
const live = options.live ?? true;
|
|
1429
1463
|
if (live === `long-poll` || live === `sse`) fetchUrl.searchParams.set(LIVE_QUERY_PARAM, live);
|
|
1430
1464
|
const params = await resolveParams(options.params);
|
|
1431
1465
|
for (const [key, value] of Object.entries(params)) fetchUrl.searchParams.set(key, value);
|
|
@@ -1455,8 +1489,8 @@ async function streamInternal(options) {
|
|
|
1455
1489
|
const nextUrl = new URL(url);
|
|
1456
1490
|
nextUrl.searchParams.set(OFFSET_QUERY_PARAM, offset);
|
|
1457
1491
|
if (!resumingFromPause) {
|
|
1458
|
-
if (live === `
|
|
1459
|
-
else if (live === `
|
|
1492
|
+
if (live === `sse`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `sse`);
|
|
1493
|
+
else if (live === true || live === `long-poll`) nextUrl.searchParams.set(LIVE_QUERY_PARAM, `long-poll`);
|
|
1460
1494
|
}
|
|
1461
1495
|
if (cursor) nextUrl.searchParams.set(`cursor`, cursor);
|
|
1462
1496
|
const nextParams = await resolveParams(options.params);
|
|
@@ -1504,405 +1538,822 @@ async function streamInternal(options) {
|
|
|
1504
1538
|
}
|
|
1505
1539
|
|
|
1506
1540
|
//#endregion
|
|
1507
|
-
//#region src/
|
|
1541
|
+
//#region src/idempotent-producer.ts
|
|
1542
|
+
/**
|
|
1543
|
+
* Error thrown when a producer's epoch is stale (zombie fencing).
|
|
1544
|
+
*/
|
|
1545
|
+
var StaleEpochError = class extends Error {
|
|
1546
|
+
/**
|
|
1547
|
+
* The current epoch on the server.
|
|
1548
|
+
*/
|
|
1549
|
+
currentEpoch;
|
|
1550
|
+
constructor(currentEpoch) {
|
|
1551
|
+
super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
|
|
1552
|
+
this.name = `StaleEpochError`;
|
|
1553
|
+
this.currentEpoch = currentEpoch;
|
|
1554
|
+
}
|
|
1555
|
+
};
|
|
1556
|
+
/**
|
|
1557
|
+
* Error thrown when an unrecoverable sequence gap is detected.
|
|
1558
|
+
*
|
|
1559
|
+
* With maxInFlight > 1, HTTP requests can arrive out of order at the server,
|
|
1560
|
+
* causing temporary 409 responses. The client automatically handles these
|
|
1561
|
+
* by waiting for earlier sequences to complete, then retrying.
|
|
1562
|
+
*
|
|
1563
|
+
* This error is only thrown when the gap cannot be resolved (e.g., the
|
|
1564
|
+
* expected sequence is >= our sequence, indicating a true protocol violation).
|
|
1565
|
+
*/
|
|
1566
|
+
var SequenceGapError = class extends Error {
|
|
1567
|
+
expectedSeq;
|
|
1568
|
+
receivedSeq;
|
|
1569
|
+
constructor(expectedSeq, receivedSeq) {
|
|
1570
|
+
super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
|
|
1571
|
+
this.name = `SequenceGapError`;
|
|
1572
|
+
this.expectedSeq = expectedSeq;
|
|
1573
|
+
this.receivedSeq = receivedSeq;
|
|
1574
|
+
}
|
|
1575
|
+
};
|
|
1508
1576
|
/**
|
|
1509
1577
|
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1510
|
-
* Handles cases like "application/json; charset=utf-8".
|
|
1511
1578
|
*/
|
|
1512
1579
|
function normalizeContentType$1(contentType) {
|
|
1513
1580
|
if (!contentType) return ``;
|
|
1514
1581
|
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1515
1582
|
}
|
|
1516
1583
|
/**
|
|
1517
|
-
*
|
|
1518
|
-
*/
|
|
1519
|
-
function isPromiseLike(value) {
|
|
1520
|
-
return value !== null && typeof value === `object` && `then` in value && typeof value.then === `function`;
|
|
1521
|
-
}
|
|
1522
|
-
/**
|
|
1523
|
-
* A handle to a remote durable stream for read/write operations.
|
|
1584
|
+
* An idempotent producer for exactly-once writes to a durable stream.
|
|
1524
1585
|
*
|
|
1525
|
-
*
|
|
1526
|
-
*
|
|
1527
|
-
*
|
|
1586
|
+
* Features:
|
|
1587
|
+
* - Fire-and-forget: append() returns immediately, batches in background
|
|
1588
|
+
* - Exactly-once: server deduplicates using (producerId, epoch, seq)
|
|
1589
|
+
* - Batching: multiple appends batched into single HTTP request
|
|
1590
|
+
* - Pipelining: up to maxInFlight concurrent batches
|
|
1591
|
+
* - Zombie fencing: stale producers rejected via epoch validation
|
|
1528
1592
|
*
|
|
1529
1593
|
* @example
|
|
1530
1594
|
* ```typescript
|
|
1531
|
-
*
|
|
1532
|
-
* const
|
|
1533
|
-
*
|
|
1534
|
-
*
|
|
1535
|
-
* contentType: "application/json"
|
|
1595
|
+
* const stream = new DurableStream({ url: "https://..." });
|
|
1596
|
+
* const producer = new IdempotentProducer(stream, "order-service-1", {
|
|
1597
|
+
* epoch: 0,
|
|
1598
|
+
* autoClaim: true,
|
|
1536
1599
|
* });
|
|
1537
1600
|
*
|
|
1538
|
-
* //
|
|
1539
|
-
*
|
|
1601
|
+
* // Fire-and-forget writes (synchronous, returns immediately)
|
|
1602
|
+
* producer.append("message 1");
|
|
1603
|
+
* producer.append("message 2");
|
|
1540
1604
|
*
|
|
1541
|
-
* //
|
|
1542
|
-
*
|
|
1543
|
-
*
|
|
1544
|
-
* for (const item of batch.items) {
|
|
1545
|
-
* console.log(item.message);
|
|
1546
|
-
* }
|
|
1547
|
-
* });
|
|
1605
|
+
* // Ensure all messages are delivered before shutdown
|
|
1606
|
+
* await producer.flush();
|
|
1607
|
+
* await producer.close();
|
|
1548
1608
|
* ```
|
|
1549
1609
|
*/
|
|
1550
|
-
var
|
|
1551
|
-
|
|
1552
|
-
|
|
1553
|
-
|
|
1554
|
-
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
contentType;
|
|
1559
|
-
#options;
|
|
1610
|
+
var IdempotentProducer = class {
|
|
1611
|
+
#stream;
|
|
1612
|
+
#producerId;
|
|
1613
|
+
#epoch;
|
|
1614
|
+
#nextSeq = 0;
|
|
1615
|
+
#autoClaim;
|
|
1616
|
+
#maxBatchBytes;
|
|
1617
|
+
#lingerMs;
|
|
1560
1618
|
#fetchClient;
|
|
1619
|
+
#signal;
|
|
1561
1620
|
#onError;
|
|
1562
|
-
#
|
|
1621
|
+
#pendingBatch = [];
|
|
1622
|
+
#batchBytes = 0;
|
|
1623
|
+
#lingerTimeout = null;
|
|
1563
1624
|
#queue;
|
|
1564
|
-
#
|
|
1565
|
-
|
|
1566
|
-
|
|
1567
|
-
|
|
1568
|
-
*/
|
|
1569
|
-
constructor(opts) {
|
|
1570
|
-
validateOptions(opts);
|
|
1571
|
-
const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
|
|
1572
|
-
this.url = urlStr;
|
|
1573
|
-
this.#options = {
|
|
1574
|
-
...opts,
|
|
1575
|
-
url: urlStr
|
|
1576
|
-
};
|
|
1577
|
-
this.#onError = opts.onError;
|
|
1578
|
-
if (opts.contentType) this.contentType = opts.contentType;
|
|
1579
|
-
this.#batchingEnabled = opts.batching !== false;
|
|
1580
|
-
if (this.#batchingEnabled) this.#queue = fastq.promise(this.#batchWorker.bind(this), 1);
|
|
1581
|
-
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
1582
|
-
const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
|
|
1583
|
-
const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
|
|
1584
|
-
this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
|
|
1585
|
-
}
|
|
1625
|
+
#maxInFlight;
|
|
1626
|
+
#closed = false;
|
|
1627
|
+
#epochClaimed;
|
|
1628
|
+
#seqState = new Map();
|
|
1586
1629
|
/**
|
|
1587
|
-
* Create
|
|
1588
|
-
*
|
|
1630
|
+
* Create an idempotent producer for a stream.
|
|
1631
|
+
*
|
|
1632
|
+
* @param stream - The DurableStream to write to
|
|
1633
|
+
* @param producerId - Stable identifier for this producer (e.g., "order-service-1")
|
|
1634
|
+
* @param opts - Producer options
|
|
1589
1635
|
*/
|
|
1590
|
-
|
|
1591
|
-
const
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
|
|
1598
|
-
|
|
1636
|
+
constructor(stream$1, producerId, opts) {
|
|
1637
|
+
const epoch = opts?.epoch ?? 0;
|
|
1638
|
+
const maxBatchBytes = opts?.maxBatchBytes ?? 1024 * 1024;
|
|
1639
|
+
const maxInFlight = opts?.maxInFlight ?? 5;
|
|
1640
|
+
const lingerMs = opts?.lingerMs ?? 5;
|
|
1641
|
+
if (epoch < 0) throw new Error(`epoch must be >= 0`);
|
|
1642
|
+
if (maxBatchBytes <= 0) throw new Error(`maxBatchBytes must be > 0`);
|
|
1643
|
+
if (maxInFlight <= 0) throw new Error(`maxInFlight must be > 0`);
|
|
1644
|
+
if (lingerMs < 0) throw new Error(`lingerMs must be >= 0`);
|
|
1645
|
+
this.#stream = stream$1;
|
|
1646
|
+
this.#producerId = producerId;
|
|
1647
|
+
this.#epoch = epoch;
|
|
1648
|
+
this.#autoClaim = opts?.autoClaim ?? false;
|
|
1649
|
+
this.#maxBatchBytes = maxBatchBytes;
|
|
1650
|
+
this.#lingerMs = lingerMs;
|
|
1651
|
+
this.#signal = opts?.signal;
|
|
1652
|
+
this.#onError = opts?.onError;
|
|
1653
|
+
this.#fetchClient = opts?.fetch ?? ((...args) => fetch(...args));
|
|
1654
|
+
this.#maxInFlight = maxInFlight;
|
|
1655
|
+
this.#epochClaimed = !this.#autoClaim;
|
|
1656
|
+
this.#queue = fastq.promise(this.#batchWorker.bind(this), this.#maxInFlight);
|
|
1657
|
+
if (this.#signal) this.#signal.addEventListener(`abort`, () => {
|
|
1658
|
+
this.#rejectPendingBatch(new DurableStreamError(`Producer aborted`, `ALREADY_CLOSED`, void 0, void 0));
|
|
1659
|
+
}, { once: true });
|
|
1599
1660
|
}
|
|
1600
1661
|
/**
|
|
1601
|
-
*
|
|
1602
|
-
* Returns a handle with contentType populated (if sent by server).
|
|
1662
|
+
* Append data to the stream.
|
|
1603
1663
|
*
|
|
1604
|
-
*
|
|
1605
|
-
*
|
|
1606
|
-
*
|
|
1664
|
+
* This is fire-and-forget: returns immediately after adding to the batch.
|
|
1665
|
+
* The message is batched and sent when:
|
|
1666
|
+
* - maxBatchBytes is reached
|
|
1667
|
+
* - lingerMs elapses
|
|
1668
|
+
* - flush() is called
|
|
1669
|
+
*
|
|
1670
|
+
* Errors are reported via onError callback if configured. Use flush() to
|
|
1671
|
+
* wait for all pending messages to be sent.
|
|
1672
|
+
*
|
|
1673
|
+
* For JSON streams, pass pre-serialized JSON strings.
|
|
1674
|
+
* For byte streams, pass string or Uint8Array.
|
|
1675
|
+
*
|
|
1676
|
+
* @param body - Data to append (string or Uint8Array)
|
|
1607
1677
|
*
|
|
1608
1678
|
* @example
|
|
1609
1679
|
* ```typescript
|
|
1610
|
-
* //
|
|
1611
|
-
*
|
|
1612
|
-
*
|
|
1680
|
+
* // JSON stream
|
|
1681
|
+
* producer.append(JSON.stringify({ message: "hello" }));
|
|
1682
|
+
*
|
|
1683
|
+
* // Byte stream
|
|
1684
|
+
* producer.append("raw text data");
|
|
1685
|
+
* producer.append(new Uint8Array([1, 2, 3]));
|
|
1613
1686
|
* ```
|
|
1614
1687
|
*/
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
|
|
1688
|
+
append(body) {
|
|
1689
|
+
if (this.#closed) throw new DurableStreamError(`Producer is closed`, `ALREADY_CLOSED`, void 0, void 0);
|
|
1690
|
+
let bytes;
|
|
1691
|
+
if (typeof body === `string`) bytes = new TextEncoder().encode(body);
|
|
1692
|
+
else if (body instanceof Uint8Array) bytes = body;
|
|
1693
|
+
else throw new DurableStreamError(`append() requires string or Uint8Array. For objects, use JSON.stringify().`, `BAD_REQUEST`, 400, void 0);
|
|
1694
|
+
this.#pendingBatch.push({ body: bytes });
|
|
1695
|
+
this.#batchBytes += bytes.length;
|
|
1696
|
+
if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
|
|
1697
|
+
else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
|
|
1698
|
+
this.#lingerTimeout = null;
|
|
1699
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
1700
|
+
}, this.#lingerMs);
|
|
1619
1701
|
}
|
|
1620
1702
|
/**
|
|
1621
|
-
*
|
|
1703
|
+
* Send any pending batch immediately and wait for all in-flight batches.
|
|
1704
|
+
*
|
|
1705
|
+
* Call this before shutdown to ensure all messages are delivered.
|
|
1622
1706
|
*/
|
|
1623
|
-
|
|
1624
|
-
|
|
1625
|
-
|
|
1707
|
+
async flush() {
|
|
1708
|
+
if (this.#lingerTimeout) {
|
|
1709
|
+
clearTimeout(this.#lingerTimeout);
|
|
1710
|
+
this.#lingerTimeout = null;
|
|
1711
|
+
}
|
|
1712
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
1713
|
+
await this.#queue.drained();
|
|
1626
1714
|
}
|
|
1627
1715
|
/**
|
|
1628
|
-
*
|
|
1716
|
+
* Flush pending messages and close the producer.
|
|
1717
|
+
*
|
|
1718
|
+
* After calling close(), further append() calls will throw.
|
|
1629
1719
|
*/
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1720
|
+
async close() {
|
|
1721
|
+
if (this.#closed) return;
|
|
1722
|
+
this.#closed = true;
|
|
1723
|
+
try {
|
|
1724
|
+
await this.flush();
|
|
1725
|
+
} catch {}
|
|
1633
1726
|
}
|
|
1634
1727
|
/**
|
|
1635
|
-
*
|
|
1728
|
+
* Increment epoch and reset sequence.
|
|
1729
|
+
*
|
|
1730
|
+
* Call this when restarting the producer to establish a new session.
|
|
1731
|
+
* Flushes any pending messages first.
|
|
1636
1732
|
*/
|
|
1637
|
-
async
|
|
1638
|
-
|
|
1639
|
-
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
|
|
1643
|
-
|
|
1644
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1645
|
-
const contentType = response.headers.get(`content-type`) ?? void 0;
|
|
1646
|
-
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
|
|
1647
|
-
const etag = response.headers.get(`etag`) ?? void 0;
|
|
1648
|
-
const cacheControl = response.headers.get(`cache-control`) ?? void 0;
|
|
1649
|
-
if (contentType) this.contentType = contentType;
|
|
1650
|
-
return {
|
|
1651
|
-
exists: true,
|
|
1652
|
-
contentType,
|
|
1653
|
-
offset,
|
|
1654
|
-
etag,
|
|
1655
|
-
cacheControl
|
|
1656
|
-
};
|
|
1657
|
-
}
|
|
1658
|
-
/**
|
|
1659
|
-
* Create this stream (create-only PUT) using the URL/auth from the handle.
|
|
1733
|
+
async restart() {
|
|
1734
|
+
await this.flush();
|
|
1735
|
+
this.#epoch++;
|
|
1736
|
+
this.#nextSeq = 0;
|
|
1737
|
+
}
|
|
1738
|
+
/**
|
|
1739
|
+
* Current epoch for this producer.
|
|
1660
1740
|
*/
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
const contentType = opts?.contentType ?? this.#options.contentType;
|
|
1664
|
-
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1665
|
-
if (opts?.ttlSeconds !== void 0) requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
|
|
1666
|
-
if (opts?.expiresAt) requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
|
|
1667
|
-
const body = encodeBody(opts?.body);
|
|
1668
|
-
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1669
|
-
method: `PUT`,
|
|
1670
|
-
headers: requestHeaders,
|
|
1671
|
-
body,
|
|
1672
|
-
signal: this.#options.signal
|
|
1673
|
-
});
|
|
1674
|
-
if (!response.ok) await handleErrorResponse(response, this.url, { operation: `create` });
|
|
1675
|
-
const responseContentType = response.headers.get(`content-type`);
|
|
1676
|
-
if (responseContentType) this.contentType = responseContentType;
|
|
1677
|
-
else if (contentType) this.contentType = contentType;
|
|
1678
|
-
return this;
|
|
1741
|
+
get epoch() {
|
|
1742
|
+
return this.#epoch;
|
|
1679
1743
|
}
|
|
1680
1744
|
/**
|
|
1681
|
-
*
|
|
1745
|
+
* Next sequence number to be assigned.
|
|
1682
1746
|
*/
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1686
|
-
method: `DELETE`,
|
|
1687
|
-
headers: requestHeaders,
|
|
1688
|
-
signal: opts?.signal ?? this.#options.signal
|
|
1689
|
-
});
|
|
1690
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1747
|
+
get nextSeq() {
|
|
1748
|
+
return this.#nextSeq;
|
|
1691
1749
|
}
|
|
1692
1750
|
/**
|
|
1693
|
-
*
|
|
1694
|
-
*
|
|
1695
|
-
* When batching is enabled (default), multiple append() calls made while
|
|
1696
|
-
* a POST is in-flight will be batched together into a single request.
|
|
1697
|
-
* This significantly improves throughput for high-frequency writes.
|
|
1698
|
-
*
|
|
1699
|
-
* - `body` may be Uint8Array, string, or any JSON-serializable value (for JSON streams).
|
|
1700
|
-
* - `body` may also be a Promise that resolves to any of the above types.
|
|
1701
|
-
* - Strings are encoded as UTF-8.
|
|
1702
|
-
* - `seq` (if provided) is sent as stream-seq (writer coordination).
|
|
1703
|
-
*
|
|
1704
|
-
* @example
|
|
1705
|
-
* ```typescript
|
|
1706
|
-
* // Direct value
|
|
1707
|
-
* await stream.append({ message: "hello" });
|
|
1708
|
-
*
|
|
1709
|
-
* // Promise value - awaited before buffering
|
|
1710
|
-
* await stream.append(fetchData());
|
|
1711
|
-
* await stream.append(Promise.all([a, b, c]));
|
|
1712
|
-
* ```
|
|
1751
|
+
* Number of messages in the current pending batch.
|
|
1713
1752
|
*/
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
|
|
1717
|
-
return this.#appendDirect(resolvedBody, opts);
|
|
1753
|
+
get pendingCount() {
|
|
1754
|
+
return this.#pendingBatch.length;
|
|
1718
1755
|
}
|
|
1719
1756
|
/**
|
|
1720
|
-
*
|
|
1757
|
+
* Number of batches currently in flight.
|
|
1721
1758
|
*/
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
1725
|
-
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1726
|
-
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
1727
|
-
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1728
|
-
const bodyToEncode = isJson ? [body] : body;
|
|
1729
|
-
const encodedBody = encodeBody(bodyToEncode);
|
|
1730
|
-
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1731
|
-
method: `POST`,
|
|
1732
|
-
headers: requestHeaders,
|
|
1733
|
-
body: encodedBody,
|
|
1734
|
-
signal: opts?.signal ?? this.#options.signal
|
|
1735
|
-
});
|
|
1736
|
-
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1759
|
+
get inFlightCount() {
|
|
1760
|
+
return this.#queue.length();
|
|
1737
1761
|
}
|
|
1738
1762
|
/**
|
|
1739
|
-
*
|
|
1763
|
+
* Enqueue the current pending batch for processing.
|
|
1740
1764
|
*/
|
|
1741
|
-
|
|
1742
|
-
|
|
1743
|
-
|
|
1744
|
-
|
|
1745
|
-
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1750
|
-
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
this.#queue.push(batch).catch((err) => {
|
|
1754
|
-
for (const msg of batch) msg.reject(err);
|
|
1755
|
-
});
|
|
1756
|
-
}
|
|
1765
|
+
#enqueuePendingBatch() {
|
|
1766
|
+
if (this.#pendingBatch.length === 0) return;
|
|
1767
|
+
const batch = this.#pendingBatch;
|
|
1768
|
+
const seq = this.#nextSeq;
|
|
1769
|
+
this.#pendingBatch = [];
|
|
1770
|
+
this.#batchBytes = 0;
|
|
1771
|
+
this.#nextSeq++;
|
|
1772
|
+
if (this.#autoClaim && !this.#epochClaimed && this.#queue.length() > 0) this.#queue.drained().then(() => {
|
|
1773
|
+
this.#queue.push({
|
|
1774
|
+
batch,
|
|
1775
|
+
seq
|
|
1776
|
+
}).catch(() => {});
|
|
1757
1777
|
});
|
|
1778
|
+
else this.#queue.push({
|
|
1779
|
+
batch,
|
|
1780
|
+
seq
|
|
1781
|
+
}).catch(() => {});
|
|
1758
1782
|
}
|
|
1759
1783
|
/**
|
|
1760
|
-
* Batch worker - processes batches
|
|
1784
|
+
* Batch worker - processes batches via fastq.
|
|
1761
1785
|
*/
|
|
1762
|
-
async #batchWorker(
|
|
1786
|
+
async #batchWorker(task) {
|
|
1787
|
+
const { batch, seq } = task;
|
|
1788
|
+
const epoch = this.#epoch;
|
|
1763
1789
|
try {
|
|
1764
|
-
await this.#
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
const nextBatch = this.#buffer.splice(0);
|
|
1768
|
-
this.#queue.push(nextBatch).catch((err) => {
|
|
1769
|
-
for (const msg of nextBatch) msg.reject(err);
|
|
1770
|
-
});
|
|
1771
|
-
}
|
|
1790
|
+
await this.#doSendBatch(batch, seq, epoch);
|
|
1791
|
+
if (!this.#epochClaimed) this.#epochClaimed = true;
|
|
1792
|
+
this.#signalSeqComplete(epoch, seq, void 0);
|
|
1772
1793
|
} catch (error) {
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
this.#buffer = [];
|
|
1794
|
+
this.#signalSeqComplete(epoch, seq, error);
|
|
1795
|
+
if (this.#onError) this.#onError(error);
|
|
1776
1796
|
throw error;
|
|
1777
1797
|
}
|
|
1778
1798
|
}
|
|
1779
1799
|
/**
|
|
1780
|
-
*
|
|
1800
|
+
* Signal that a sequence has completed (success or failure).
|
|
1781
1801
|
*/
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
let highestSeq;
|
|
1788
|
-
for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
|
|
1789
|
-
highestSeq = batch[i].seq;
|
|
1790
|
-
break;
|
|
1802
|
+
#signalSeqComplete(epoch, seq, error) {
|
|
1803
|
+
let epochMap = this.#seqState.get(epoch);
|
|
1804
|
+
if (!epochMap) {
|
|
1805
|
+
epochMap = new Map();
|
|
1806
|
+
this.#seqState.set(epoch, epochMap);
|
|
1791
1807
|
}
|
|
1792
|
-
|
|
1808
|
+
const state = epochMap.get(seq);
|
|
1809
|
+
if (state) {
|
|
1810
|
+
state.resolved = true;
|
|
1811
|
+
state.error = error;
|
|
1812
|
+
for (const waiter of state.waiters) waiter(error);
|
|
1813
|
+
state.waiters = [];
|
|
1814
|
+
} else epochMap.set(seq, {
|
|
1815
|
+
resolved: true,
|
|
1816
|
+
error,
|
|
1817
|
+
waiters: []
|
|
1818
|
+
});
|
|
1819
|
+
const cleanupThreshold = seq - this.#maxInFlight * 3;
|
|
1820
|
+
if (cleanupThreshold > 0) {
|
|
1821
|
+
for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
|
|
1822
|
+
}
|
|
1823
|
+
}
|
|
1824
|
+
/**
|
|
1825
|
+
* Wait for a specific sequence to complete.
|
|
1826
|
+
* Returns immediately if already completed.
|
|
1827
|
+
* Throws if the sequence failed.
|
|
1828
|
+
*/
|
|
1829
|
+
#waitForSeq(epoch, seq) {
|
|
1830
|
+
let epochMap = this.#seqState.get(epoch);
|
|
1831
|
+
if (!epochMap) {
|
|
1832
|
+
epochMap = new Map();
|
|
1833
|
+
this.#seqState.set(epoch, epochMap);
|
|
1834
|
+
}
|
|
1835
|
+
const state = epochMap.get(seq);
|
|
1836
|
+
if (state?.resolved) {
|
|
1837
|
+
if (state.error) return Promise.reject(state.error);
|
|
1838
|
+
return Promise.resolve();
|
|
1839
|
+
}
|
|
1840
|
+
return new Promise((resolve, reject) => {
|
|
1841
|
+
const waiter = (err) => {
|
|
1842
|
+
if (err) reject(err);
|
|
1843
|
+
else resolve();
|
|
1844
|
+
};
|
|
1845
|
+
if (state) state.waiters.push(waiter);
|
|
1846
|
+
else epochMap.set(seq, {
|
|
1847
|
+
resolved: false,
|
|
1848
|
+
waiters: [waiter]
|
|
1849
|
+
});
|
|
1850
|
+
});
|
|
1851
|
+
}
|
|
1852
|
+
/**
|
|
1853
|
+
* Actually send the batch to the server.
|
|
1854
|
+
* Handles auto-claim retry on 403 (stale epoch) if autoClaim is enabled.
|
|
1855
|
+
* Does NOT implement general retry/backoff for network errors or 5xx responses.
|
|
1856
|
+
*/
|
|
1857
|
+
async #doSendBatch(batch, seq, epoch) {
|
|
1858
|
+
const contentType = this.#stream.contentType ?? `application/octet-stream`;
|
|
1793
1859
|
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1794
1860
|
let batchedBody;
|
|
1795
1861
|
if (isJson) {
|
|
1796
|
-
const
|
|
1797
|
-
batchedBody =
|
|
1862
|
+
const jsonStrings = batch.map((e) => new TextDecoder().decode(e.body));
|
|
1863
|
+
batchedBody = `[${jsonStrings.join(`,`)}]`;
|
|
1798
1864
|
} else {
|
|
1799
|
-
const totalSize = batch.reduce((sum,
|
|
1800
|
-
const size = typeof m.data === `string` ? new TextEncoder().encode(m.data).length : m.data.length;
|
|
1801
|
-
return sum + size;
|
|
1802
|
-
}, 0);
|
|
1865
|
+
const totalSize = batch.reduce((sum, e) => sum + e.body.length, 0);
|
|
1803
1866
|
const concatenated = new Uint8Array(totalSize);
|
|
1804
1867
|
let offset = 0;
|
|
1805
|
-
for (const
|
|
1806
|
-
|
|
1807
|
-
|
|
1808
|
-
offset += bytes.length;
|
|
1868
|
+
for (const entry of batch) {
|
|
1869
|
+
concatenated.set(entry.body, offset);
|
|
1870
|
+
offset += entry.body.length;
|
|
1809
1871
|
}
|
|
1810
1872
|
batchedBody = concatenated;
|
|
1811
1873
|
}
|
|
1812
|
-
const
|
|
1813
|
-
|
|
1814
|
-
|
|
1815
|
-
|
|
1816
|
-
|
|
1874
|
+
const url = this.#stream.url;
|
|
1875
|
+
const headers = {
|
|
1876
|
+
"content-type": contentType,
|
|
1877
|
+
[PRODUCER_ID_HEADER]: this.#producerId,
|
|
1878
|
+
[PRODUCER_EPOCH_HEADER]: epoch.toString(),
|
|
1879
|
+
[PRODUCER_SEQ_HEADER]: seq.toString()
|
|
1880
|
+
};
|
|
1881
|
+
const response = await this.#fetchClient(url, {
|
|
1817
1882
|
method: `POST`,
|
|
1818
|
-
headers
|
|
1883
|
+
headers,
|
|
1819
1884
|
body: batchedBody,
|
|
1820
|
-
signal:
|
|
1885
|
+
signal: this.#signal
|
|
1821
1886
|
});
|
|
1822
|
-
if (
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
|
|
1830
|
-
|
|
1831
|
-
|
|
1832
|
-
|
|
1833
|
-
|
|
1834
|
-
|
|
1835
|
-
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
|
|
1842
|
-
|
|
1843
|
-
|
|
1844
|
-
|
|
1845
|
-
|
|
1846
|
-
|
|
1847
|
-
|
|
1848
|
-
|
|
1849
|
-
|
|
1850
|
-
|
|
1851
|
-
|
|
1852
|
-
|
|
1853
|
-
|
|
1887
|
+
if (response.status === 204) return {
|
|
1888
|
+
offset: ``,
|
|
1889
|
+
duplicate: true
|
|
1890
|
+
};
|
|
1891
|
+
if (response.status === 200) {
|
|
1892
|
+
const resultOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
1893
|
+
return {
|
|
1894
|
+
offset: resultOffset,
|
|
1895
|
+
duplicate: false
|
|
1896
|
+
};
|
|
1897
|
+
}
|
|
1898
|
+
if (response.status === 403) {
|
|
1899
|
+
const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
|
|
1900
|
+
const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : epoch;
|
|
1901
|
+
if (this.#autoClaim) {
|
|
1902
|
+
const newEpoch = currentEpoch + 1;
|
|
1903
|
+
this.#epoch = newEpoch;
|
|
1904
|
+
this.#nextSeq = 1;
|
|
1905
|
+
return this.#doSendBatch(batch, 0, newEpoch);
|
|
1906
|
+
}
|
|
1907
|
+
throw new StaleEpochError(currentEpoch);
|
|
1908
|
+
}
|
|
1909
|
+
if (response.status === 409) {
|
|
1910
|
+
const expectedSeqStr = response.headers.get(PRODUCER_EXPECTED_SEQ_HEADER);
|
|
1911
|
+
const expectedSeq = expectedSeqStr ? parseInt(expectedSeqStr, 10) : 0;
|
|
1912
|
+
if (expectedSeq < seq) {
|
|
1913
|
+
const waitPromises = [];
|
|
1914
|
+
for (let s = expectedSeq; s < seq; s++) waitPromises.push(this.#waitForSeq(epoch, s));
|
|
1915
|
+
await Promise.all(waitPromises);
|
|
1916
|
+
return this.#doSendBatch(batch, seq, epoch);
|
|
1917
|
+
}
|
|
1918
|
+
const receivedSeqStr = response.headers.get(PRODUCER_RECEIVED_SEQ_HEADER);
|
|
1919
|
+
const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
|
|
1920
|
+
throw new SequenceGapError(expectedSeq, receivedSeq);
|
|
1921
|
+
}
|
|
1922
|
+
if (response.status === 400) {
|
|
1923
|
+
const error$1 = await DurableStreamError.fromResponse(response, url);
|
|
1924
|
+
throw error$1;
|
|
1925
|
+
}
|
|
1926
|
+
const error = await FetchError.fromResponse(response, url);
|
|
1927
|
+
throw error;
|
|
1928
|
+
}
|
|
1929
|
+
/**
|
|
1930
|
+
* Clear pending batch and report error.
|
|
1931
|
+
*/
|
|
1932
|
+
#rejectPendingBatch(error) {
|
|
1933
|
+
if (this.#onError && this.#pendingBatch.length > 0) this.#onError(error);
|
|
1934
|
+
this.#pendingBatch = [];
|
|
1935
|
+
this.#batchBytes = 0;
|
|
1936
|
+
if (this.#lingerTimeout) {
|
|
1937
|
+
clearTimeout(this.#lingerTimeout);
|
|
1938
|
+
this.#lingerTimeout = null;
|
|
1939
|
+
}
|
|
1940
|
+
}
|
|
1941
|
+
};
|
|
1942
|
+
|
|
1943
|
+
//#endregion
|
|
1944
|
+
//#region src/stream.ts
|
|
1945
|
+
/**
|
|
1946
|
+
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1947
|
+
* Handles cases like "application/json; charset=utf-8".
|
|
1948
|
+
*/
|
|
1949
|
+
function normalizeContentType(contentType) {
|
|
1950
|
+
if (!contentType) return ``;
|
|
1951
|
+
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1952
|
+
}
|
|
1953
|
+
/**
|
|
1954
|
+
* Check if a value is a Promise or Promise-like (thenable).
|
|
1955
|
+
*/
|
|
1956
|
+
function isPromiseLike(value) {
|
|
1957
|
+
return value != null && typeof value.then === `function`;
|
|
1958
|
+
}
|
|
1959
|
+
/**
|
|
1960
|
+
* A handle to a remote durable stream for read/write operations.
|
|
1961
|
+
*
|
|
1962
|
+
* This is a lightweight, reusable handle - not a persistent connection.
|
|
1963
|
+
* It does not automatically start reading or listening.
|
|
1964
|
+
* Create sessions as needed via stream().
|
|
1965
|
+
*
|
|
1966
|
+
* @example
|
|
1967
|
+
* ```typescript
|
|
1968
|
+
* // Create a new stream
|
|
1969
|
+
* const stream = await DurableStream.create({
|
|
1970
|
+
* url: "https://streams.example.com/my-stream",
|
|
1971
|
+
* headers: { Authorization: "Bearer my-token" },
|
|
1972
|
+
* contentType: "application/json"
|
|
1973
|
+
* });
|
|
1974
|
+
*
|
|
1975
|
+
* // Write data
|
|
1976
|
+
* await stream.append(JSON.stringify({ message: "hello" }));
|
|
1977
|
+
*
|
|
1978
|
+
* // Read with the new API
|
|
1979
|
+
* const res = await stream.stream<{ message: string }>();
|
|
1980
|
+
* res.subscribeJson(async (batch) => {
|
|
1981
|
+
* for (const item of batch.items) {
|
|
1982
|
+
* console.log(item.message);
|
|
1983
|
+
* }
|
|
1984
|
+
* });
|
|
1985
|
+
* ```
|
|
1986
|
+
*/
|
|
1987
|
+
var DurableStream = class DurableStream {
|
|
1988
|
+
/**
|
|
1989
|
+
* The URL of the durable stream.
|
|
1990
|
+
*/
|
|
1991
|
+
url;
|
|
1992
|
+
/**
|
|
1993
|
+
* The content type of the stream (populated after connect/head/read).
|
|
1994
|
+
*/
|
|
1995
|
+
contentType;
|
|
1996
|
+
#options;
|
|
1997
|
+
#fetchClient;
|
|
1998
|
+
#onError;
|
|
1999
|
+
#batchingEnabled;
|
|
2000
|
+
#queue;
|
|
2001
|
+
#buffer = [];
|
|
2002
|
+
/**
|
|
2003
|
+
* Create a cold handle to a stream.
|
|
2004
|
+
* No network IO is performed by the constructor.
|
|
2005
|
+
*/
|
|
2006
|
+
constructor(opts) {
|
|
2007
|
+
validateOptions(opts);
|
|
2008
|
+
const urlStr = opts.url instanceof URL ? opts.url.toString() : opts.url;
|
|
2009
|
+
this.url = urlStr;
|
|
2010
|
+
this.#options = {
|
|
2011
|
+
...opts,
|
|
2012
|
+
url: urlStr
|
|
2013
|
+
};
|
|
2014
|
+
this.#onError = opts.onError;
|
|
2015
|
+
if (opts.contentType) this.contentType = opts.contentType;
|
|
2016
|
+
this.#batchingEnabled = opts.batching !== false;
|
|
2017
|
+
if (this.#batchingEnabled) this.#queue = fastq.promise(this.#batchWorker.bind(this), 1);
|
|
2018
|
+
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
2019
|
+
const backOffOpts = { ...opts.backoffOptions ?? BackoffDefaults };
|
|
2020
|
+
const fetchWithBackoffClient = createFetchWithBackoff(baseFetchClient, backOffOpts);
|
|
2021
|
+
this.#fetchClient = createFetchWithConsumedBody(fetchWithBackoffClient);
|
|
2022
|
+
}
|
|
2023
|
+
/**
|
|
2024
|
+
* Create a new stream (create-only PUT) and return a handle.
|
|
2025
|
+
* Fails with DurableStreamError(code="CONFLICT_EXISTS") if it already exists.
|
|
2026
|
+
*/
|
|
2027
|
+
static async create(opts) {
|
|
2028
|
+
const stream$1 = new DurableStream(opts);
|
|
2029
|
+
await stream$1.create({
|
|
2030
|
+
contentType: opts.contentType,
|
|
2031
|
+
ttlSeconds: opts.ttlSeconds,
|
|
2032
|
+
expiresAt: opts.expiresAt,
|
|
2033
|
+
body: opts.body
|
|
2034
|
+
});
|
|
2035
|
+
return stream$1;
|
|
2036
|
+
}
|
|
2037
|
+
/**
|
|
2038
|
+
* Validate that a stream exists and fetch metadata via HEAD.
|
|
2039
|
+
* Returns a handle with contentType populated (if sent by server).
|
|
2040
|
+
*
|
|
2041
|
+
* **Important**: This only performs a HEAD request for validation - it does
|
|
2042
|
+
* NOT open a session or start reading data. To read from the stream, call
|
|
2043
|
+
* `stream()` on the returned handle.
|
|
2044
|
+
*
|
|
2045
|
+
* @example
|
|
2046
|
+
* ```typescript
|
|
2047
|
+
* // Validate stream exists before reading
|
|
2048
|
+
* const handle = await DurableStream.connect({ url })
|
|
2049
|
+
* const res = await handle.stream() // Now actually read
|
|
1854
2050
|
* ```
|
|
1855
2051
|
*/
|
|
1856
|
-
async
|
|
2052
|
+
static async connect(opts) {
|
|
2053
|
+
const stream$1 = new DurableStream(opts);
|
|
2054
|
+
await stream$1.head();
|
|
2055
|
+
return stream$1;
|
|
2056
|
+
}
|
|
2057
|
+
/**
|
|
2058
|
+
* HEAD metadata for a stream without creating a handle.
|
|
2059
|
+
*/
|
|
2060
|
+
static async head(opts) {
|
|
2061
|
+
const stream$1 = new DurableStream(opts);
|
|
2062
|
+
return stream$1.head();
|
|
2063
|
+
}
|
|
2064
|
+
/**
|
|
2065
|
+
* Delete a stream without creating a handle.
|
|
2066
|
+
*/
|
|
2067
|
+
static async delete(opts) {
|
|
2068
|
+
const stream$1 = new DurableStream(opts);
|
|
2069
|
+
return stream$1.delete();
|
|
2070
|
+
}
|
|
2071
|
+
/**
|
|
2072
|
+
* HEAD metadata for this stream.
|
|
2073
|
+
*/
|
|
2074
|
+
async head(opts) {
|
|
1857
2075
|
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
1858
|
-
const
|
|
2076
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2077
|
+
method: `HEAD`,
|
|
2078
|
+
headers: requestHeaders,
|
|
2079
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2080
|
+
});
|
|
2081
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2082
|
+
const contentType = response.headers.get(`content-type`) ?? void 0;
|
|
2083
|
+
const offset = response.headers.get(STREAM_OFFSET_HEADER) ?? void 0;
|
|
2084
|
+
const etag = response.headers.get(`etag`) ?? void 0;
|
|
2085
|
+
const cacheControl = response.headers.get(`cache-control`) ?? void 0;
|
|
2086
|
+
if (contentType) this.contentType = contentType;
|
|
2087
|
+
return {
|
|
2088
|
+
exists: true,
|
|
2089
|
+
contentType,
|
|
2090
|
+
offset,
|
|
2091
|
+
etag,
|
|
2092
|
+
cacheControl
|
|
2093
|
+
};
|
|
2094
|
+
}
|
|
2095
|
+
/**
|
|
2096
|
+
* Create this stream (create-only PUT) using the URL/auth from the handle.
|
|
2097
|
+
*/
|
|
2098
|
+
async create(opts) {
|
|
2099
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2100
|
+
const contentType = opts?.contentType ?? this.#options.contentType;
|
|
1859
2101
|
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1860
|
-
if (opts?.
|
|
1861
|
-
|
|
2102
|
+
if (opts?.ttlSeconds !== void 0) requestHeaders[STREAM_TTL_HEADER] = String(opts.ttlSeconds);
|
|
2103
|
+
if (opts?.expiresAt) requestHeaders[STREAM_EXPIRES_AT_HEADER] = opts.expiresAt;
|
|
2104
|
+
const body = encodeBody(opts?.body);
|
|
1862
2105
|
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
1863
|
-
method: `
|
|
2106
|
+
method: `PUT`,
|
|
1864
2107
|
headers: requestHeaders,
|
|
1865
2108
|
body,
|
|
1866
|
-
|
|
2109
|
+
signal: this.#options.signal
|
|
2110
|
+
});
|
|
2111
|
+
if (!response.ok) await handleErrorResponse(response, this.url, { operation: `create` });
|
|
2112
|
+
const responseContentType = response.headers.get(`content-type`);
|
|
2113
|
+
if (responseContentType) this.contentType = responseContentType;
|
|
2114
|
+
else if (contentType) this.contentType = contentType;
|
|
2115
|
+
return this;
|
|
2116
|
+
}
|
|
2117
|
+
/**
|
|
2118
|
+
* Delete this stream.
|
|
2119
|
+
*/
|
|
2120
|
+
async delete(opts) {
|
|
2121
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2122
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2123
|
+
method: `DELETE`,
|
|
2124
|
+
headers: requestHeaders,
|
|
1867
2125
|
signal: opts?.signal ?? this.#options.signal
|
|
1868
2126
|
});
|
|
1869
2127
|
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
1870
2128
|
}
|
|
1871
2129
|
/**
|
|
1872
|
-
*
|
|
2130
|
+
* Append a single payload to the stream.
|
|
1873
2131
|
*
|
|
1874
|
-
*
|
|
1875
|
-
*
|
|
2132
|
+
* When batching is enabled (default), multiple append() calls made while
|
|
2133
|
+
* a POST is in-flight will be batched together into a single request.
|
|
2134
|
+
* This significantly improves throughput for high-frequency writes.
|
|
2135
|
+
*
|
|
2136
|
+
* - `body` must be string or Uint8Array.
|
|
2137
|
+
* - For JSON streams, pass pre-serialized JSON strings.
|
|
2138
|
+
* - `body` may also be a Promise that resolves to string or Uint8Array.
|
|
2139
|
+
* - Strings are encoded as UTF-8.
|
|
2140
|
+
* - `seq` (if provided) is sent as stream-seq (writer coordination).
|
|
1876
2141
|
*
|
|
1877
2142
|
* @example
|
|
1878
2143
|
* ```typescript
|
|
1879
|
-
* //
|
|
1880
|
-
*
|
|
1881
|
-
* await response.body!.pipeTo(stream.writable());
|
|
2144
|
+
* // JSON stream - pass pre-serialized JSON
|
|
2145
|
+
* await stream.append(JSON.stringify({ message: "hello" }));
|
|
1882
2146
|
*
|
|
1883
|
-
* //
|
|
1884
|
-
*
|
|
1885
|
-
* await
|
|
2147
|
+
* // Byte stream
|
|
2148
|
+
* await stream.append("raw text data");
|
|
2149
|
+
* await stream.append(new Uint8Array([1, 2, 3]));
|
|
2150
|
+
*
|
|
2151
|
+
* // Promise value - awaited before buffering
|
|
2152
|
+
* await stream.append(fetchData());
|
|
1886
2153
|
* ```
|
|
1887
2154
|
*/
|
|
1888
|
-
|
|
1889
|
-
const
|
|
1890
|
-
|
|
1891
|
-
return
|
|
2155
|
+
async append(body, opts) {
|
|
2156
|
+
const resolvedBody = isPromiseLike(body) ? await body : body;
|
|
2157
|
+
if (this.#batchingEnabled && this.#queue) return this.#appendWithBatching(resolvedBody, opts);
|
|
2158
|
+
return this.#appendDirect(resolvedBody, opts);
|
|
2159
|
+
}
|
|
2160
|
+
/**
|
|
2161
|
+
* Direct append without batching (used when batching is disabled).
|
|
2162
|
+
*/
|
|
2163
|
+
async #appendDirect(body, opts) {
|
|
2164
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2165
|
+
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2166
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2167
|
+
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
2168
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2169
|
+
const bodyStr = typeof body === `string` ? body : new TextDecoder().decode(body);
|
|
2170
|
+
const encodedBody = isJson ? `[${bodyStr}]` : bodyStr;
|
|
2171
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2172
|
+
method: `POST`,
|
|
2173
|
+
headers: requestHeaders,
|
|
2174
|
+
body: encodedBody,
|
|
2175
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2176
|
+
});
|
|
2177
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2178
|
+
}
|
|
2179
|
+
/**
|
|
2180
|
+
* Append with batching - buffers messages and sends them in batches.
|
|
2181
|
+
*/
|
|
2182
|
+
async #appendWithBatching(body, opts) {
|
|
2183
|
+
return new Promise((resolve, reject) => {
|
|
2184
|
+
this.#buffer.push({
|
|
2185
|
+
data: body,
|
|
2186
|
+
seq: opts?.seq,
|
|
2187
|
+
contentType: opts?.contentType,
|
|
2188
|
+
signal: opts?.signal,
|
|
2189
|
+
resolve,
|
|
2190
|
+
reject
|
|
2191
|
+
});
|
|
2192
|
+
if (this.#queue.idle()) {
|
|
2193
|
+
const batch = this.#buffer.splice(0);
|
|
2194
|
+
this.#queue.push(batch).catch((err) => {
|
|
2195
|
+
for (const msg of batch) msg.reject(err);
|
|
2196
|
+
});
|
|
2197
|
+
}
|
|
2198
|
+
});
|
|
2199
|
+
}
|
|
2200
|
+
/**
|
|
2201
|
+
* Batch worker - processes batches of messages.
|
|
2202
|
+
*/
|
|
2203
|
+
async #batchWorker(batch) {
|
|
2204
|
+
try {
|
|
2205
|
+
await this.#sendBatch(batch);
|
|
2206
|
+
for (const msg of batch) msg.resolve();
|
|
2207
|
+
if (this.#buffer.length > 0) {
|
|
2208
|
+
const nextBatch = this.#buffer.splice(0);
|
|
2209
|
+
this.#queue.push(nextBatch).catch((err) => {
|
|
2210
|
+
for (const msg of nextBatch) msg.reject(err);
|
|
2211
|
+
});
|
|
2212
|
+
}
|
|
2213
|
+
} catch (error) {
|
|
2214
|
+
for (const msg of batch) msg.reject(error);
|
|
2215
|
+
for (const msg of this.#buffer) msg.reject(error);
|
|
2216
|
+
this.#buffer = [];
|
|
2217
|
+
throw error;
|
|
2218
|
+
}
|
|
2219
|
+
}
|
|
2220
|
+
/**
|
|
2221
|
+
* Send a batch of messages as a single POST request.
|
|
2222
|
+
*/
|
|
2223
|
+
async #sendBatch(batch) {
|
|
2224
|
+
if (batch.length === 0) return;
|
|
2225
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2226
|
+
const contentType = batch[0]?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2227
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2228
|
+
let highestSeq;
|
|
2229
|
+
for (let i = batch.length - 1; i >= 0; i--) if (batch[i].seq !== void 0) {
|
|
2230
|
+
highestSeq = batch[i].seq;
|
|
2231
|
+
break;
|
|
2232
|
+
}
|
|
2233
|
+
if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
|
|
2234
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2235
|
+
let batchedBody;
|
|
2236
|
+
if (isJson) {
|
|
2237
|
+
const jsonStrings = batch.map((m) => typeof m.data === `string` ? m.data : new TextDecoder().decode(m.data));
|
|
2238
|
+
batchedBody = `[${jsonStrings.join(`,`)}]`;
|
|
2239
|
+
} else {
|
|
2240
|
+
const strings = batch.map((m) => typeof m.data === `string` ? m.data : new TextDecoder().decode(m.data));
|
|
2241
|
+
batchedBody = strings.join(``);
|
|
2242
|
+
}
|
|
2243
|
+
const signals = [];
|
|
2244
|
+
if (this.#options.signal) signals.push(this.#options.signal);
|
|
2245
|
+
for (const msg of batch) if (msg.signal) signals.push(msg.signal);
|
|
2246
|
+
const combinedSignal = signals.length > 0 ? AbortSignal.any(signals) : void 0;
|
|
2247
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2248
|
+
method: `POST`,
|
|
2249
|
+
headers: requestHeaders,
|
|
2250
|
+
body: batchedBody,
|
|
2251
|
+
signal: combinedSignal
|
|
2252
|
+
});
|
|
2253
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2254
|
+
}
|
|
2255
|
+
/**
|
|
2256
|
+
* Append a streaming body to the stream.
|
|
2257
|
+
*
|
|
2258
|
+
* Supports piping from any ReadableStream or async iterable:
|
|
2259
|
+
* - `source` yields Uint8Array or string chunks.
|
|
2260
|
+
* - Strings are encoded as UTF-8; no delimiters are added.
|
|
2261
|
+
* - Internally uses chunked transfer or HTTP/2 streaming.
|
|
2262
|
+
*
|
|
2263
|
+
* @example
|
|
2264
|
+
* ```typescript
|
|
2265
|
+
* // Pipe from a ReadableStream
|
|
2266
|
+
* const readable = new ReadableStream({
|
|
2267
|
+
* start(controller) {
|
|
2268
|
+
* controller.enqueue("chunk 1");
|
|
2269
|
+
* controller.enqueue("chunk 2");
|
|
2270
|
+
* controller.close();
|
|
2271
|
+
* }
|
|
2272
|
+
* });
|
|
2273
|
+
* await stream.appendStream(readable);
|
|
2274
|
+
*
|
|
2275
|
+
* // Pipe from an async generator
|
|
2276
|
+
* async function* generate() {
|
|
2277
|
+
* yield "line 1\n";
|
|
2278
|
+
* yield "line 2\n";
|
|
2279
|
+
* }
|
|
2280
|
+
* await stream.appendStream(generate());
|
|
2281
|
+
*
|
|
2282
|
+
* // Pipe from fetch response body
|
|
2283
|
+
* const response = await fetch("https://example.com/data");
|
|
2284
|
+
* await stream.appendStream(response.body!);
|
|
2285
|
+
* ```
|
|
2286
|
+
*/
|
|
2287
|
+
async appendStream(source, opts) {
|
|
2288
|
+
const { requestHeaders, fetchUrl } = await this.#buildRequest();
|
|
2289
|
+
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
2290
|
+
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
2291
|
+
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
2292
|
+
const body = toReadableStream(source);
|
|
2293
|
+
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
2294
|
+
method: `POST`,
|
|
2295
|
+
headers: requestHeaders,
|
|
2296
|
+
body,
|
|
2297
|
+
duplex: `half`,
|
|
2298
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2299
|
+
});
|
|
2300
|
+
if (!response.ok) await handleErrorResponse(response, this.url);
|
|
2301
|
+
}
|
|
2302
|
+
/**
|
|
2303
|
+
* Create a writable stream that pipes data to this durable stream.
|
|
2304
|
+
*
|
|
2305
|
+
* Returns a WritableStream that can be used with `pipeTo()` or
|
|
2306
|
+
* `pipeThrough()` from any ReadableStream source.
|
|
2307
|
+
*
|
|
2308
|
+
* Uses IdempotentProducer internally for:
|
|
2309
|
+
* - Automatic batching (controlled by lingerMs, maxBatchBytes)
|
|
2310
|
+
* - Exactly-once delivery semantics
|
|
2311
|
+
* - Streaming writes (doesn't buffer entire content in memory)
|
|
2312
|
+
*
|
|
2313
|
+
* @example
|
|
2314
|
+
* ```typescript
|
|
2315
|
+
* // Pipe from fetch response
|
|
2316
|
+
* const response = await fetch("https://example.com/data");
|
|
2317
|
+
* await response.body!.pipeTo(stream.writable());
|
|
2318
|
+
*
|
|
2319
|
+
* // Pipe through a transform
|
|
2320
|
+
* const readable = someStream.pipeThrough(new TextEncoderStream());
|
|
2321
|
+
* await readable.pipeTo(stream.writable());
|
|
2322
|
+
*
|
|
2323
|
+
* // With custom producer options
|
|
2324
|
+
* await source.pipeTo(stream.writable({
|
|
2325
|
+
* producerId: "my-producer",
|
|
2326
|
+
* lingerMs: 10,
|
|
2327
|
+
* maxBatchBytes: 64 * 1024,
|
|
2328
|
+
* }));
|
|
2329
|
+
* ```
|
|
2330
|
+
*/
|
|
2331
|
+
writable(opts) {
|
|
2332
|
+
const producerId = opts?.producerId ?? `writable-${crypto.randomUUID().slice(0, 8)}`;
|
|
2333
|
+
let writeError = null;
|
|
2334
|
+
const producer = new IdempotentProducer(this, producerId, {
|
|
2335
|
+
autoClaim: true,
|
|
2336
|
+
lingerMs: opts?.lingerMs,
|
|
2337
|
+
maxBatchBytes: opts?.maxBatchBytes,
|
|
2338
|
+
onError: (error) => {
|
|
2339
|
+
if (!writeError) writeError = error;
|
|
2340
|
+
opts?.onError?.(error);
|
|
2341
|
+
},
|
|
2342
|
+
signal: opts?.signal ?? this.#options.signal
|
|
2343
|
+
});
|
|
2344
|
+
return new WritableStream({
|
|
1892
2345
|
write(chunk) {
|
|
1893
|
-
|
|
2346
|
+
producer.append(chunk);
|
|
1894
2347
|
},
|
|
1895
2348
|
async close() {
|
|
1896
|
-
|
|
1897
|
-
|
|
1898
|
-
|
|
1899
|
-
controller.close();
|
|
1900
|
-
} });
|
|
1901
|
-
await stream$1.appendStream(readable, opts);
|
|
1902
|
-
}
|
|
2349
|
+
await producer.flush();
|
|
2350
|
+
await producer.close();
|
|
2351
|
+
if (writeError) throw writeError;
|
|
1903
2352
|
},
|
|
1904
|
-
abort(
|
|
1905
|
-
|
|
2353
|
+
abort(_reason) {
|
|
2354
|
+
producer.close().catch((err) => {
|
|
2355
|
+
opts?.onError?.(err);
|
|
2356
|
+
});
|
|
1906
2357
|
}
|
|
1907
2358
|
});
|
|
1908
2359
|
}
|
|
@@ -2029,402 +2480,5 @@ function validateOptions(options) {
|
|
|
2029
2480
|
warnIfUsingHttpInBrowser(options.url, options.warnOnHttp);
|
|
2030
2481
|
}
|
|
2031
2482
|
|
|
2032
|
-
//#endregion
|
|
2033
|
-
//#region src/idempotent-producer.ts
|
|
2034
|
-
/**
|
|
2035
|
-
* Error thrown when a producer's epoch is stale (zombie fencing).
|
|
2036
|
-
*/
|
|
2037
|
-
var StaleEpochError = class extends Error {
|
|
2038
|
-
/**
|
|
2039
|
-
* The current epoch on the server.
|
|
2040
|
-
*/
|
|
2041
|
-
currentEpoch;
|
|
2042
|
-
constructor(currentEpoch) {
|
|
2043
|
-
super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
|
|
2044
|
-
this.name = `StaleEpochError`;
|
|
2045
|
-
this.currentEpoch = currentEpoch;
|
|
2046
|
-
}
|
|
2047
|
-
};
|
|
2048
|
-
/**
|
|
2049
|
-
* Error thrown when an unrecoverable sequence gap is detected.
|
|
2050
|
-
*
|
|
2051
|
-
* With maxInFlight > 1, HTTP requests can arrive out of order at the server,
|
|
2052
|
-
* causing temporary 409 responses. The client automatically handles these
|
|
2053
|
-
* by waiting for earlier sequences to complete, then retrying.
|
|
2054
|
-
*
|
|
2055
|
-
* This error is only thrown when the gap cannot be resolved (e.g., the
|
|
2056
|
-
* expected sequence is >= our sequence, indicating a true protocol violation).
|
|
2057
|
-
*/
|
|
2058
|
-
var SequenceGapError = class extends Error {
|
|
2059
|
-
expectedSeq;
|
|
2060
|
-
receivedSeq;
|
|
2061
|
-
constructor(expectedSeq, receivedSeq) {
|
|
2062
|
-
super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
|
|
2063
|
-
this.name = `SequenceGapError`;
|
|
2064
|
-
this.expectedSeq = expectedSeq;
|
|
2065
|
-
this.receivedSeq = receivedSeq;
|
|
2066
|
-
}
|
|
2067
|
-
};
|
|
2068
|
-
/**
|
|
2069
|
-
* Normalize content-type by extracting the media type (before any semicolon).
|
|
2070
|
-
*/
|
|
2071
|
-
function normalizeContentType(contentType) {
|
|
2072
|
-
if (!contentType) return ``;
|
|
2073
|
-
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
2074
|
-
}
|
|
2075
|
-
/**
|
|
2076
|
-
* An idempotent producer for exactly-once writes to a durable stream.
|
|
2077
|
-
*
|
|
2078
|
-
* Features:
|
|
2079
|
-
* - Fire-and-forget: append() returns immediately, batches in background
|
|
2080
|
-
* - Exactly-once: server deduplicates using (producerId, epoch, seq)
|
|
2081
|
-
* - Batching: multiple appends batched into single HTTP request
|
|
2082
|
-
* - Pipelining: up to maxInFlight concurrent batches
|
|
2083
|
-
* - Zombie fencing: stale producers rejected via epoch validation
|
|
2084
|
-
*
|
|
2085
|
-
* @example
|
|
2086
|
-
* ```typescript
|
|
2087
|
-
* const stream = new DurableStream({ url: "https://..." });
|
|
2088
|
-
* const producer = new IdempotentProducer(stream, "order-service-1", {
|
|
2089
|
-
* epoch: 0,
|
|
2090
|
-
* autoClaim: true,
|
|
2091
|
-
* });
|
|
2092
|
-
*
|
|
2093
|
-
* // Fire-and-forget writes (synchronous, returns immediately)
|
|
2094
|
-
* producer.append("message 1");
|
|
2095
|
-
* producer.append("message 2");
|
|
2096
|
-
*
|
|
2097
|
-
* // Ensure all messages are delivered before shutdown
|
|
2098
|
-
* await producer.flush();
|
|
2099
|
-
* await producer.close();
|
|
2100
|
-
* ```
|
|
2101
|
-
*/
|
|
2102
|
-
var IdempotentProducer = class {
|
|
2103
|
-
#stream;
|
|
2104
|
-
#producerId;
|
|
2105
|
-
#epoch;
|
|
2106
|
-
#nextSeq = 0;
|
|
2107
|
-
#autoClaim;
|
|
2108
|
-
#maxBatchBytes;
|
|
2109
|
-
#lingerMs;
|
|
2110
|
-
#fetchClient;
|
|
2111
|
-
#signal;
|
|
2112
|
-
#onError;
|
|
2113
|
-
#pendingBatch = [];
|
|
2114
|
-
#batchBytes = 0;
|
|
2115
|
-
#lingerTimeout = null;
|
|
2116
|
-
#queue;
|
|
2117
|
-
#maxInFlight;
|
|
2118
|
-
#closed = false;
|
|
2119
|
-
#epochClaimed;
|
|
2120
|
-
#seqState = new Map();
|
|
2121
|
-
/**
|
|
2122
|
-
* Create an idempotent producer for a stream.
|
|
2123
|
-
*
|
|
2124
|
-
* @param stream - The DurableStream to write to
|
|
2125
|
-
* @param producerId - Stable identifier for this producer (e.g., "order-service-1")
|
|
2126
|
-
* @param opts - Producer options
|
|
2127
|
-
*/
|
|
2128
|
-
constructor(stream$1, producerId, opts) {
|
|
2129
|
-
this.#stream = stream$1;
|
|
2130
|
-
this.#producerId = producerId;
|
|
2131
|
-
this.#epoch = opts?.epoch ?? 0;
|
|
2132
|
-
this.#autoClaim = opts?.autoClaim ?? false;
|
|
2133
|
-
this.#maxBatchBytes = opts?.maxBatchBytes ?? 1024 * 1024;
|
|
2134
|
-
this.#lingerMs = opts?.lingerMs ?? 5;
|
|
2135
|
-
this.#signal = opts?.signal;
|
|
2136
|
-
this.#onError = opts?.onError;
|
|
2137
|
-
this.#fetchClient = opts?.fetch ?? ((...args) => fetch(...args));
|
|
2138
|
-
this.#maxInFlight = opts?.maxInFlight ?? 5;
|
|
2139
|
-
this.#epochClaimed = !this.#autoClaim;
|
|
2140
|
-
this.#queue = fastq.promise(this.#batchWorker.bind(this), this.#maxInFlight);
|
|
2141
|
-
if (this.#signal) this.#signal.addEventListener(`abort`, () => {
|
|
2142
|
-
this.#rejectPendingBatch(new DurableStreamError(`Producer aborted`, `ALREADY_CLOSED`, void 0, void 0));
|
|
2143
|
-
}, { once: true });
|
|
2144
|
-
}
|
|
2145
|
-
/**
|
|
2146
|
-
* Append data to the stream.
|
|
2147
|
-
*
|
|
2148
|
-
* This is fire-and-forget: returns immediately after adding to the batch.
|
|
2149
|
-
* The message is batched and sent when:
|
|
2150
|
-
* - maxBatchBytes is reached
|
|
2151
|
-
* - lingerMs elapses
|
|
2152
|
-
* - flush() is called
|
|
2153
|
-
*
|
|
2154
|
-
* Errors are reported via onError callback if configured. Use flush() to
|
|
2155
|
-
* wait for all pending messages to be sent.
|
|
2156
|
-
*
|
|
2157
|
-
* For JSON streams, pass native objects (which will be serialized internally).
|
|
2158
|
-
* For byte streams, pass string or Uint8Array.
|
|
2159
|
-
*
|
|
2160
|
-
* @param body - Data to append (object for JSON streams, string or Uint8Array for byte streams)
|
|
2161
|
-
*/
|
|
2162
|
-
append(body) {
|
|
2163
|
-
if (this.#closed) throw new DurableStreamError(`Producer is closed`, `ALREADY_CLOSED`, void 0, void 0);
|
|
2164
|
-
const isJson = normalizeContentType(this.#stream.contentType) === `application/json`;
|
|
2165
|
-
let bytes;
|
|
2166
|
-
let data;
|
|
2167
|
-
if (isJson) {
|
|
2168
|
-
const json = JSON.stringify(body);
|
|
2169
|
-
bytes = new TextEncoder().encode(json);
|
|
2170
|
-
data = body;
|
|
2171
|
-
} else {
|
|
2172
|
-
if (typeof body === `string`) bytes = new TextEncoder().encode(body);
|
|
2173
|
-
else if (body instanceof Uint8Array) bytes = body;
|
|
2174
|
-
else throw new DurableStreamError(`Non-JSON streams require string or Uint8Array`, `BAD_REQUEST`, 400, void 0);
|
|
2175
|
-
data = bytes;
|
|
2176
|
-
}
|
|
2177
|
-
this.#pendingBatch.push({
|
|
2178
|
-
data,
|
|
2179
|
-
body: bytes
|
|
2180
|
-
});
|
|
2181
|
-
this.#batchBytes += bytes.length;
|
|
2182
|
-
if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
|
|
2183
|
-
else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
|
|
2184
|
-
this.#lingerTimeout = null;
|
|
2185
|
-
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
2186
|
-
}, this.#lingerMs);
|
|
2187
|
-
}
|
|
2188
|
-
/**
|
|
2189
|
-
* Send any pending batch immediately and wait for all in-flight batches.
|
|
2190
|
-
*
|
|
2191
|
-
* Call this before shutdown to ensure all messages are delivered.
|
|
2192
|
-
*/
|
|
2193
|
-
async flush() {
|
|
2194
|
-
if (this.#lingerTimeout) {
|
|
2195
|
-
clearTimeout(this.#lingerTimeout);
|
|
2196
|
-
this.#lingerTimeout = null;
|
|
2197
|
-
}
|
|
2198
|
-
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
2199
|
-
await this.#queue.drained();
|
|
2200
|
-
}
|
|
2201
|
-
/**
|
|
2202
|
-
* Flush pending messages and close the producer.
|
|
2203
|
-
*
|
|
2204
|
-
* After calling close(), further append() calls will throw.
|
|
2205
|
-
*/
|
|
2206
|
-
async close() {
|
|
2207
|
-
if (this.#closed) return;
|
|
2208
|
-
this.#closed = true;
|
|
2209
|
-
try {
|
|
2210
|
-
await this.flush();
|
|
2211
|
-
} catch {}
|
|
2212
|
-
}
|
|
2213
|
-
/**
|
|
2214
|
-
* Increment epoch and reset sequence.
|
|
2215
|
-
*
|
|
2216
|
-
* Call this when restarting the producer to establish a new session.
|
|
2217
|
-
* Flushes any pending messages first.
|
|
2218
|
-
*/
|
|
2219
|
-
async restart() {
|
|
2220
|
-
await this.flush();
|
|
2221
|
-
this.#epoch++;
|
|
2222
|
-
this.#nextSeq = 0;
|
|
2223
|
-
}
|
|
2224
|
-
/**
|
|
2225
|
-
* Current epoch for this producer.
|
|
2226
|
-
*/
|
|
2227
|
-
get epoch() {
|
|
2228
|
-
return this.#epoch;
|
|
2229
|
-
}
|
|
2230
|
-
/**
|
|
2231
|
-
* Next sequence number to be assigned.
|
|
2232
|
-
*/
|
|
2233
|
-
get nextSeq() {
|
|
2234
|
-
return this.#nextSeq;
|
|
2235
|
-
}
|
|
2236
|
-
/**
|
|
2237
|
-
* Number of messages in the current pending batch.
|
|
2238
|
-
*/
|
|
2239
|
-
get pendingCount() {
|
|
2240
|
-
return this.#pendingBatch.length;
|
|
2241
|
-
}
|
|
2242
|
-
/**
|
|
2243
|
-
* Number of batches currently in flight.
|
|
2244
|
-
*/
|
|
2245
|
-
get inFlightCount() {
|
|
2246
|
-
return this.#queue.length();
|
|
2247
|
-
}
|
|
2248
|
-
/**
|
|
2249
|
-
* Enqueue the current pending batch for processing.
|
|
2250
|
-
*/
|
|
2251
|
-
#enqueuePendingBatch() {
|
|
2252
|
-
if (this.#pendingBatch.length === 0) return;
|
|
2253
|
-
const batch = this.#pendingBatch;
|
|
2254
|
-
const seq = this.#nextSeq;
|
|
2255
|
-
this.#pendingBatch = [];
|
|
2256
|
-
this.#batchBytes = 0;
|
|
2257
|
-
this.#nextSeq++;
|
|
2258
|
-
if (this.#autoClaim && !this.#epochClaimed && this.#queue.length() > 0) this.#queue.drained().then(() => {
|
|
2259
|
-
this.#queue.push({
|
|
2260
|
-
batch,
|
|
2261
|
-
seq
|
|
2262
|
-
}).catch(() => {});
|
|
2263
|
-
});
|
|
2264
|
-
else this.#queue.push({
|
|
2265
|
-
batch,
|
|
2266
|
-
seq
|
|
2267
|
-
}).catch(() => {});
|
|
2268
|
-
}
|
|
2269
|
-
/**
|
|
2270
|
-
* Batch worker - processes batches via fastq.
|
|
2271
|
-
*/
|
|
2272
|
-
async #batchWorker(task) {
|
|
2273
|
-
const { batch, seq } = task;
|
|
2274
|
-
const epoch = this.#epoch;
|
|
2275
|
-
try {
|
|
2276
|
-
await this.#doSendBatch(batch, seq, epoch);
|
|
2277
|
-
if (!this.#epochClaimed) this.#epochClaimed = true;
|
|
2278
|
-
this.#signalSeqComplete(epoch, seq, void 0);
|
|
2279
|
-
} catch (error) {
|
|
2280
|
-
this.#signalSeqComplete(epoch, seq, error);
|
|
2281
|
-
if (this.#onError) this.#onError(error);
|
|
2282
|
-
throw error;
|
|
2283
|
-
}
|
|
2284
|
-
}
|
|
2285
|
-
/**
|
|
2286
|
-
* Signal that a sequence has completed (success or failure).
|
|
2287
|
-
*/
|
|
2288
|
-
#signalSeqComplete(epoch, seq, error) {
|
|
2289
|
-
let epochMap = this.#seqState.get(epoch);
|
|
2290
|
-
if (!epochMap) {
|
|
2291
|
-
epochMap = new Map();
|
|
2292
|
-
this.#seqState.set(epoch, epochMap);
|
|
2293
|
-
}
|
|
2294
|
-
const state = epochMap.get(seq);
|
|
2295
|
-
if (state) {
|
|
2296
|
-
state.resolved = true;
|
|
2297
|
-
state.error = error;
|
|
2298
|
-
for (const waiter of state.waiters) waiter(error);
|
|
2299
|
-
state.waiters = [];
|
|
2300
|
-
} else epochMap.set(seq, {
|
|
2301
|
-
resolved: true,
|
|
2302
|
-
error,
|
|
2303
|
-
waiters: []
|
|
2304
|
-
});
|
|
2305
|
-
const cleanupThreshold = seq - this.#maxInFlight * 3;
|
|
2306
|
-
if (cleanupThreshold > 0) {
|
|
2307
|
-
for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
|
|
2308
|
-
}
|
|
2309
|
-
}
|
|
2310
|
-
/**
|
|
2311
|
-
* Wait for a specific sequence to complete.
|
|
2312
|
-
* Returns immediately if already completed.
|
|
2313
|
-
* Throws if the sequence failed.
|
|
2314
|
-
*/
|
|
2315
|
-
#waitForSeq(epoch, seq) {
|
|
2316
|
-
let epochMap = this.#seqState.get(epoch);
|
|
2317
|
-
if (!epochMap) {
|
|
2318
|
-
epochMap = new Map();
|
|
2319
|
-
this.#seqState.set(epoch, epochMap);
|
|
2320
|
-
}
|
|
2321
|
-
const state = epochMap.get(seq);
|
|
2322
|
-
if (state?.resolved) {
|
|
2323
|
-
if (state.error) return Promise.reject(state.error);
|
|
2324
|
-
return Promise.resolve();
|
|
2325
|
-
}
|
|
2326
|
-
return new Promise((resolve, reject) => {
|
|
2327
|
-
const waiter = (err) => {
|
|
2328
|
-
if (err) reject(err);
|
|
2329
|
-
else resolve();
|
|
2330
|
-
};
|
|
2331
|
-
if (state) state.waiters.push(waiter);
|
|
2332
|
-
else epochMap.set(seq, {
|
|
2333
|
-
resolved: false,
|
|
2334
|
-
waiters: [waiter]
|
|
2335
|
-
});
|
|
2336
|
-
});
|
|
2337
|
-
}
|
|
2338
|
-
/**
|
|
2339
|
-
* Actually send the batch to the server.
|
|
2340
|
-
* Handles auto-claim retry on 403 (stale epoch) if autoClaim is enabled.
|
|
2341
|
-
* Does NOT implement general retry/backoff for network errors or 5xx responses.
|
|
2342
|
-
*/
|
|
2343
|
-
async #doSendBatch(batch, seq, epoch) {
|
|
2344
|
-
const contentType = this.#stream.contentType ?? `application/octet-stream`;
|
|
2345
|
-
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2346
|
-
let batchedBody;
|
|
2347
|
-
if (isJson) {
|
|
2348
|
-
const values = batch.map((e) => e.data);
|
|
2349
|
-
batchedBody = JSON.stringify(values);
|
|
2350
|
-
} else {
|
|
2351
|
-
const totalSize = batch.reduce((sum, e) => sum + e.body.length, 0);
|
|
2352
|
-
const concatenated = new Uint8Array(totalSize);
|
|
2353
|
-
let offset = 0;
|
|
2354
|
-
for (const entry of batch) {
|
|
2355
|
-
concatenated.set(entry.body, offset);
|
|
2356
|
-
offset += entry.body.length;
|
|
2357
|
-
}
|
|
2358
|
-
batchedBody = concatenated;
|
|
2359
|
-
}
|
|
2360
|
-
const url = this.#stream.url;
|
|
2361
|
-
const headers = {
|
|
2362
|
-
"content-type": contentType,
|
|
2363
|
-
[PRODUCER_ID_HEADER]: this.#producerId,
|
|
2364
|
-
[PRODUCER_EPOCH_HEADER]: epoch.toString(),
|
|
2365
|
-
[PRODUCER_SEQ_HEADER]: seq.toString()
|
|
2366
|
-
};
|
|
2367
|
-
const response = await this.#fetchClient(url, {
|
|
2368
|
-
method: `POST`,
|
|
2369
|
-
headers,
|
|
2370
|
-
body: batchedBody,
|
|
2371
|
-
signal: this.#signal
|
|
2372
|
-
});
|
|
2373
|
-
if (response.status === 204) return {
|
|
2374
|
-
offset: ``,
|
|
2375
|
-
duplicate: true
|
|
2376
|
-
};
|
|
2377
|
-
if (response.status === 200) {
|
|
2378
|
-
const resultOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
2379
|
-
return {
|
|
2380
|
-
offset: resultOffset,
|
|
2381
|
-
duplicate: false
|
|
2382
|
-
};
|
|
2383
|
-
}
|
|
2384
|
-
if (response.status === 403) {
|
|
2385
|
-
const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
|
|
2386
|
-
const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : epoch;
|
|
2387
|
-
if (this.#autoClaim) {
|
|
2388
|
-
const newEpoch = currentEpoch + 1;
|
|
2389
|
-
this.#epoch = newEpoch;
|
|
2390
|
-
this.#nextSeq = 1;
|
|
2391
|
-
return this.#doSendBatch(batch, 0, newEpoch);
|
|
2392
|
-
}
|
|
2393
|
-
throw new StaleEpochError(currentEpoch);
|
|
2394
|
-
}
|
|
2395
|
-
if (response.status === 409) {
|
|
2396
|
-
const expectedSeqStr = response.headers.get(PRODUCER_EXPECTED_SEQ_HEADER);
|
|
2397
|
-
const expectedSeq = expectedSeqStr ? parseInt(expectedSeqStr, 10) : 0;
|
|
2398
|
-
if (expectedSeq < seq) {
|
|
2399
|
-
const waitPromises = [];
|
|
2400
|
-
for (let s = expectedSeq; s < seq; s++) waitPromises.push(this.#waitForSeq(epoch, s));
|
|
2401
|
-
await Promise.all(waitPromises);
|
|
2402
|
-
return this.#doSendBatch(batch, seq, epoch);
|
|
2403
|
-
}
|
|
2404
|
-
const receivedSeqStr = response.headers.get(PRODUCER_RECEIVED_SEQ_HEADER);
|
|
2405
|
-
const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
|
|
2406
|
-
throw new SequenceGapError(expectedSeq, receivedSeq);
|
|
2407
|
-
}
|
|
2408
|
-
if (response.status === 400) {
|
|
2409
|
-
const error$1 = await DurableStreamError.fromResponse(response, url);
|
|
2410
|
-
throw error$1;
|
|
2411
|
-
}
|
|
2412
|
-
const error = await FetchError.fromResponse(response, url);
|
|
2413
|
-
throw error;
|
|
2414
|
-
}
|
|
2415
|
-
/**
|
|
2416
|
-
* Clear pending batch and report error.
|
|
2417
|
-
*/
|
|
2418
|
-
#rejectPendingBatch(error) {
|
|
2419
|
-
if (this.#onError && this.#pendingBatch.length > 0) this.#onError(error);
|
|
2420
|
-
this.#pendingBatch = [];
|
|
2421
|
-
this.#batchBytes = 0;
|
|
2422
|
-
if (this.#lingerTimeout) {
|
|
2423
|
-
clearTimeout(this.#lingerTimeout);
|
|
2424
|
-
this.#lingerTimeout = null;
|
|
2425
|
-
}
|
|
2426
|
-
}
|
|
2427
|
-
};
|
|
2428
|
-
|
|
2429
2483
|
//#endregion
|
|
2430
2484
|
export { BackoffDefaults, CURSOR_QUERY_PARAM, DURABLE_STREAM_PROTOCOL_QUERY_PARAMS, DurableStream, DurableStreamError, FetchBackoffAbortError, FetchError, IdempotentProducer, InvalidSignalError, LIVE_QUERY_PARAM, MissingStreamUrlError, OFFSET_QUERY_PARAM, PRODUCER_EPOCH_HEADER, PRODUCER_EXPECTED_SEQ_HEADER, PRODUCER_ID_HEADER, PRODUCER_RECEIVED_SEQ_HEADER, PRODUCER_SEQ_HEADER, SSE_COMPATIBLE_CONTENT_TYPES, STREAM_CURSOR_HEADER, STREAM_EXPIRES_AT_HEADER, STREAM_OFFSET_HEADER, STREAM_SEQ_HEADER, STREAM_TTL_HEADER, STREAM_UP_TO_DATE_HEADER, SequenceGapError, StaleEpochError, _resetHttpWarningForTesting, asAsyncIterableReadableStream, createFetchWithBackoff, createFetchWithConsumedBody, stream, warnIfUsingHttpInBrowser };
|