@durable-streams/client 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +175 -5
- package/dist/index.cjs +510 -5
- package/dist/index.d.cts +254 -1
- package/dist/index.d.ts +254 -1
- package/dist/index.js +500 -5
- package/package.json +2 -2
- package/src/constants.ts +31 -0
- package/src/idempotent-producer.ts +642 -0
- package/src/index.ts +24 -0
- package/src/sse.ts +3 -0
- package/src/stream.ts +13 -1
- package/src/types.ts +111 -0
- package/src/utils.ts +120 -0
package/dist/index.cjs
CHANGED
|
@@ -59,6 +59,28 @@ const STREAM_TTL_HEADER = `Stream-TTL`;
|
|
|
59
59
|
*/
|
|
60
60
|
const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
|
|
61
61
|
/**
|
|
62
|
+
* Request header for producer ID (client-supplied stable identifier).
|
|
63
|
+
*/
|
|
64
|
+
const PRODUCER_ID_HEADER = `Producer-Id`;
|
|
65
|
+
/**
|
|
66
|
+
* Request/response header for producer epoch.
|
|
67
|
+
* Client-declared, server-validated monotonically increasing.
|
|
68
|
+
*/
|
|
69
|
+
const PRODUCER_EPOCH_HEADER = `Producer-Epoch`;
|
|
70
|
+
/**
|
|
71
|
+
* Request header for producer sequence number.
|
|
72
|
+
* Monotonically increasing per epoch, per-batch (not per-message).
|
|
73
|
+
*/
|
|
74
|
+
const PRODUCER_SEQ_HEADER = `Producer-Seq`;
|
|
75
|
+
/**
|
|
76
|
+
* Response header indicating expected sequence number on 409 Conflict.
|
|
77
|
+
*/
|
|
78
|
+
const PRODUCER_EXPECTED_SEQ_HEADER = `Producer-Expected-Seq`;
|
|
79
|
+
/**
|
|
80
|
+
* Response header indicating received sequence number on 409 Conflict.
|
|
81
|
+
*/
|
|
82
|
+
const PRODUCER_RECEIVED_SEQ_HEADER = `Producer-Received-Seq`;
|
|
83
|
+
/**
|
|
62
84
|
* Query parameter for starting offset.
|
|
63
85
|
*/
|
|
64
86
|
const OFFSET_QUERY_PARAM = `offset`;
|
|
@@ -466,6 +488,7 @@ async function* parseSSEStream(stream$1, signal) {
|
|
|
466
488
|
const { done, value } = await reader.read();
|
|
467
489
|
if (done) break;
|
|
468
490
|
buffer += decoder.decode(value, { stream: true });
|
|
491
|
+
buffer = buffer.replace(/\r\n/g, `\n`).replace(/\r/g, `\n`);
|
|
469
492
|
const lines = buffer.split(`\n`);
|
|
470
493
|
buffer = lines.pop() ?? ``;
|
|
471
494
|
for (const line of lines) if (line === ``) {
|
|
@@ -1180,6 +1203,78 @@ async function resolveParams(params) {
|
|
|
1180
1203
|
else resolved[key] = value;
|
|
1181
1204
|
return resolved;
|
|
1182
1205
|
}
|
|
1206
|
+
const warnedOrigins = new Set();
|
|
1207
|
+
/**
|
|
1208
|
+
* Safely read NODE_ENV without triggering "process is not defined" errors.
|
|
1209
|
+
* Works in both browser and Node.js environments.
|
|
1210
|
+
*/
|
|
1211
|
+
function getNodeEnvSafely() {
|
|
1212
|
+
if (typeof process === `undefined`) return void 0;
|
|
1213
|
+
return process.env?.NODE_ENV;
|
|
1214
|
+
}
|
|
1215
|
+
/**
|
|
1216
|
+
* Check if we're in a browser environment.
|
|
1217
|
+
*/
|
|
1218
|
+
function isBrowserEnvironment() {
|
|
1219
|
+
return typeof globalThis.window !== `undefined`;
|
|
1220
|
+
}
|
|
1221
|
+
/**
|
|
1222
|
+
* Get window.location.href safely, returning undefined if not available.
|
|
1223
|
+
*/
|
|
1224
|
+
function getWindowLocationHref() {
|
|
1225
|
+
if (typeof globalThis.window !== `undefined` && typeof globalThis.window.location !== `undefined`) return globalThis.window.location.href;
|
|
1226
|
+
return void 0;
|
|
1227
|
+
}
|
|
1228
|
+
/**
|
|
1229
|
+
* Resolve a URL string, handling relative URLs in browser environments.
|
|
1230
|
+
* Returns undefined if the URL cannot be parsed.
|
|
1231
|
+
*/
|
|
1232
|
+
function resolveUrlMaybe(urlString) {
|
|
1233
|
+
try {
|
|
1234
|
+
return new URL(urlString);
|
|
1235
|
+
} catch {
|
|
1236
|
+
const base = getWindowLocationHref();
|
|
1237
|
+
if (base) try {
|
|
1238
|
+
return new URL(urlString, base);
|
|
1239
|
+
} catch {
|
|
1240
|
+
return void 0;
|
|
1241
|
+
}
|
|
1242
|
+
return void 0;
|
|
1243
|
+
}
|
|
1244
|
+
}
|
|
1245
|
+
/**
|
|
1246
|
+
* Warn if using HTTP (not HTTPS) URL in a browser environment.
|
|
1247
|
+
* HTTP typically limits browsers to ~6 concurrent connections per origin under HTTP/1.1,
|
|
1248
|
+
* which can cause slow streams and app freezes with multiple active streams.
|
|
1249
|
+
*
|
|
1250
|
+
* Features:
|
|
1251
|
+
* - Warns only once per origin to prevent log spam
|
|
1252
|
+
* - Handles relative URLs by resolving against window.location.href
|
|
1253
|
+
* - Safe to call in Node.js environments (no-op)
|
|
1254
|
+
* - Skips warning during tests (NODE_ENV=test)
|
|
1255
|
+
*/
|
|
1256
|
+
function warnIfUsingHttpInBrowser(url, warnOnHttp) {
|
|
1257
|
+
if (warnOnHttp === false) return;
|
|
1258
|
+
const nodeEnv = getNodeEnvSafely();
|
|
1259
|
+
if (nodeEnv === `test`) return;
|
|
1260
|
+
if (!isBrowserEnvironment() || typeof console === `undefined` || typeof console.warn !== `function`) return;
|
|
1261
|
+
const urlStr = url instanceof URL ? url.toString() : url;
|
|
1262
|
+
const parsedUrl = resolveUrlMaybe(urlStr);
|
|
1263
|
+
if (!parsedUrl) return;
|
|
1264
|
+
if (parsedUrl.protocol === `http:`) {
|
|
1265
|
+
if (!warnedOrigins.has(parsedUrl.origin)) {
|
|
1266
|
+
warnedOrigins.add(parsedUrl.origin);
|
|
1267
|
+
console.warn("[DurableStream] Using HTTP (not HTTPS) typically limits browsers to ~6 concurrent connections per origin under HTTP/1.1. This can cause slow streams and app freezes with multiple active streams. Use HTTPS for HTTP/2 support. See https://electric-sql.com/r/electric-http2 for more information.");
|
|
1268
|
+
}
|
|
1269
|
+
}
|
|
1270
|
+
}
|
|
1271
|
+
/**
|
|
1272
|
+
* Reset the HTTP warning state. Only exported for testing purposes.
|
|
1273
|
+
* @internal
|
|
1274
|
+
*/
|
|
1275
|
+
function _resetHttpWarningForTesting() {
|
|
1276
|
+
warnedOrigins.clear();
|
|
1277
|
+
}
|
|
1183
1278
|
|
|
1184
1279
|
//#endregion
|
|
1185
1280
|
//#region src/stream-api.ts
|
|
@@ -1333,7 +1428,7 @@ async function streamInternal(options) {
|
|
|
1333
1428
|
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1334
1429
|
* Handles cases like "application/json; charset=utf-8".
|
|
1335
1430
|
*/
|
|
1336
|
-
function normalizeContentType(contentType) {
|
|
1431
|
+
function normalizeContentType$1(contentType) {
|
|
1337
1432
|
if (!contentType) return ``;
|
|
1338
1433
|
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1339
1434
|
}
|
|
@@ -1399,6 +1494,7 @@ var DurableStream = class DurableStream {
|
|
|
1399
1494
|
url: urlStr
|
|
1400
1495
|
};
|
|
1401
1496
|
this.#onError = opts.onError;
|
|
1497
|
+
if (opts.contentType) this.contentType = opts.contentType;
|
|
1402
1498
|
this.#batchingEnabled = opts.batching !== false;
|
|
1403
1499
|
if (this.#batchingEnabled) this.#queue = fastq.default.promise(this.#batchWorker.bind(this), 1);
|
|
1404
1500
|
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
@@ -1547,7 +1643,7 @@ var DurableStream = class DurableStream {
|
|
|
1547
1643
|
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
1548
1644
|
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1549
1645
|
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
1550
|
-
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
1646
|
+
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1551
1647
|
const bodyToEncode = isJson ? [body] : body;
|
|
1552
1648
|
const encodedBody = encodeBody(bodyToEncode);
|
|
1553
1649
|
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
@@ -1613,7 +1709,7 @@ var DurableStream = class DurableStream {
|
|
|
1613
1709
|
break;
|
|
1614
1710
|
}
|
|
1615
1711
|
if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
|
|
1616
|
-
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
1712
|
+
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1617
1713
|
let batchedBody;
|
|
1618
1714
|
if (isJson) {
|
|
1619
1715
|
const values = batch.map((m) => m.data);
|
|
@@ -1787,7 +1883,8 @@ var DurableStream = class DurableStream {
|
|
|
1787
1883
|
offset: options?.offset,
|
|
1788
1884
|
live: options?.live,
|
|
1789
1885
|
json: options?.json,
|
|
1790
|
-
onError: options?.onError ?? this.#onError
|
|
1886
|
+
onError: options?.onError ?? this.#onError,
|
|
1887
|
+
warnOnHttp: options?.warnOnHttp ?? this.#options.warnOnHttp
|
|
1791
1888
|
});
|
|
1792
1889
|
}
|
|
1793
1890
|
/**
|
|
@@ -1848,7 +1945,405 @@ function toReadableStream(source) {
|
|
|
1848
1945
|
function validateOptions(options) {
|
|
1849
1946
|
if (!options.url) throw new MissingStreamUrlError();
|
|
1850
1947
|
if (options.signal && !(options.signal instanceof AbortSignal)) throw new InvalidSignalError();
|
|
1948
|
+
warnIfUsingHttpInBrowser(options.url, options.warnOnHttp);
|
|
1949
|
+
}
|
|
1950
|
+
|
|
1951
|
+
//#endregion
|
|
1952
|
+
//#region src/idempotent-producer.ts
|
|
1953
|
+
/**
|
|
1954
|
+
* Error thrown when a producer's epoch is stale (zombie fencing).
|
|
1955
|
+
*/
|
|
1956
|
+
var StaleEpochError = class extends Error {
|
|
1957
|
+
/**
|
|
1958
|
+
* The current epoch on the server.
|
|
1959
|
+
*/
|
|
1960
|
+
currentEpoch;
|
|
1961
|
+
constructor(currentEpoch) {
|
|
1962
|
+
super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
|
|
1963
|
+
this.name = `StaleEpochError`;
|
|
1964
|
+
this.currentEpoch = currentEpoch;
|
|
1965
|
+
}
|
|
1966
|
+
};
|
|
1967
|
+
/**
|
|
1968
|
+
* Error thrown when an unrecoverable sequence gap is detected.
|
|
1969
|
+
*
|
|
1970
|
+
* With maxInFlight > 1, HTTP requests can arrive out of order at the server,
|
|
1971
|
+
* causing temporary 409 responses. The client automatically handles these
|
|
1972
|
+
* by waiting for earlier sequences to complete, then retrying.
|
|
1973
|
+
*
|
|
1974
|
+
* This error is only thrown when the gap cannot be resolved (e.g., the
|
|
1975
|
+
* expected sequence is >= our sequence, indicating a true protocol violation).
|
|
1976
|
+
*/
|
|
1977
|
+
var SequenceGapError = class extends Error {
|
|
1978
|
+
expectedSeq;
|
|
1979
|
+
receivedSeq;
|
|
1980
|
+
constructor(expectedSeq, receivedSeq) {
|
|
1981
|
+
super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
|
|
1982
|
+
this.name = `SequenceGapError`;
|
|
1983
|
+
this.expectedSeq = expectedSeq;
|
|
1984
|
+
this.receivedSeq = receivedSeq;
|
|
1985
|
+
}
|
|
1986
|
+
};
|
|
1987
|
+
/**
|
|
1988
|
+
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1989
|
+
*/
|
|
1990
|
+
function normalizeContentType(contentType) {
|
|
1991
|
+
if (!contentType) return ``;
|
|
1992
|
+
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1851
1993
|
}
|
|
1994
|
+
/**
|
|
1995
|
+
* An idempotent producer for exactly-once writes to a durable stream.
|
|
1996
|
+
*
|
|
1997
|
+
* Features:
|
|
1998
|
+
* - Fire-and-forget: append() returns immediately, batches in background
|
|
1999
|
+
* - Exactly-once: server deduplicates using (producerId, epoch, seq)
|
|
2000
|
+
* - Batching: multiple appends batched into single HTTP request
|
|
2001
|
+
* - Pipelining: up to maxInFlight concurrent batches
|
|
2002
|
+
* - Zombie fencing: stale producers rejected via epoch validation
|
|
2003
|
+
*
|
|
2004
|
+
* @example
|
|
2005
|
+
* ```typescript
|
|
2006
|
+
* const stream = new DurableStream({ url: "https://..." });
|
|
2007
|
+
* const producer = new IdempotentProducer(stream, "order-service-1", {
|
|
2008
|
+
* epoch: 0,
|
|
2009
|
+
* autoClaim: true,
|
|
2010
|
+
* });
|
|
2011
|
+
*
|
|
2012
|
+
* // Fire-and-forget writes (synchronous, returns immediately)
|
|
2013
|
+
* producer.append("message 1");
|
|
2014
|
+
* producer.append("message 2");
|
|
2015
|
+
*
|
|
2016
|
+
* // Ensure all messages are delivered before shutdown
|
|
2017
|
+
* await producer.flush();
|
|
2018
|
+
* await producer.close();
|
|
2019
|
+
* ```
|
|
2020
|
+
*/
|
|
2021
|
+
var IdempotentProducer = class {
|
|
2022
|
+
#stream;
|
|
2023
|
+
#producerId;
|
|
2024
|
+
#epoch;
|
|
2025
|
+
#nextSeq = 0;
|
|
2026
|
+
#autoClaim;
|
|
2027
|
+
#maxBatchBytes;
|
|
2028
|
+
#lingerMs;
|
|
2029
|
+
#fetchClient;
|
|
2030
|
+
#signal;
|
|
2031
|
+
#onError;
|
|
2032
|
+
#pendingBatch = [];
|
|
2033
|
+
#batchBytes = 0;
|
|
2034
|
+
#lingerTimeout = null;
|
|
2035
|
+
#queue;
|
|
2036
|
+
#maxInFlight;
|
|
2037
|
+
#closed = false;
|
|
2038
|
+
#epochClaimed;
|
|
2039
|
+
#seqState = new Map();
|
|
2040
|
+
/**
|
|
2041
|
+
* Create an idempotent producer for a stream.
|
|
2042
|
+
*
|
|
2043
|
+
* @param stream - The DurableStream to write to
|
|
2044
|
+
* @param producerId - Stable identifier for this producer (e.g., "order-service-1")
|
|
2045
|
+
* @param opts - Producer options
|
|
2046
|
+
*/
|
|
2047
|
+
constructor(stream$1, producerId, opts) {
|
|
2048
|
+
this.#stream = stream$1;
|
|
2049
|
+
this.#producerId = producerId;
|
|
2050
|
+
this.#epoch = opts?.epoch ?? 0;
|
|
2051
|
+
this.#autoClaim = opts?.autoClaim ?? false;
|
|
2052
|
+
this.#maxBatchBytes = opts?.maxBatchBytes ?? 1024 * 1024;
|
|
2053
|
+
this.#lingerMs = opts?.lingerMs ?? 5;
|
|
2054
|
+
this.#signal = opts?.signal;
|
|
2055
|
+
this.#onError = opts?.onError;
|
|
2056
|
+
this.#fetchClient = opts?.fetch ?? ((...args) => fetch(...args));
|
|
2057
|
+
this.#maxInFlight = opts?.maxInFlight ?? 5;
|
|
2058
|
+
this.#epochClaimed = !this.#autoClaim;
|
|
2059
|
+
this.#queue = fastq.default.promise(this.#batchWorker.bind(this), this.#maxInFlight);
|
|
2060
|
+
if (this.#signal) this.#signal.addEventListener(`abort`, () => {
|
|
2061
|
+
this.#rejectPendingBatch(new DurableStreamError(`Producer aborted`, `ALREADY_CLOSED`, void 0, void 0));
|
|
2062
|
+
}, { once: true });
|
|
2063
|
+
}
|
|
2064
|
+
/**
|
|
2065
|
+
* Append data to the stream.
|
|
2066
|
+
*
|
|
2067
|
+
* This is fire-and-forget: returns immediately after adding to the batch.
|
|
2068
|
+
* The message is batched and sent when:
|
|
2069
|
+
* - maxBatchBytes is reached
|
|
2070
|
+
* - lingerMs elapses
|
|
2071
|
+
* - flush() is called
|
|
2072
|
+
*
|
|
2073
|
+
* Errors are reported via onError callback if configured. Use flush() to
|
|
2074
|
+
* wait for all pending messages to be sent.
|
|
2075
|
+
*
|
|
2076
|
+
* For JSON streams, pass native objects (which will be serialized internally).
|
|
2077
|
+
* For byte streams, pass string or Uint8Array.
|
|
2078
|
+
*
|
|
2079
|
+
* @param body - Data to append (object for JSON streams, string or Uint8Array for byte streams)
|
|
2080
|
+
*/
|
|
2081
|
+
append(body) {
|
|
2082
|
+
if (this.#closed) throw new DurableStreamError(`Producer is closed`, `ALREADY_CLOSED`, void 0, void 0);
|
|
2083
|
+
const isJson = normalizeContentType(this.#stream.contentType) === `application/json`;
|
|
2084
|
+
let bytes;
|
|
2085
|
+
let data;
|
|
2086
|
+
if (isJson) {
|
|
2087
|
+
const json = JSON.stringify(body);
|
|
2088
|
+
bytes = new TextEncoder().encode(json);
|
|
2089
|
+
data = body;
|
|
2090
|
+
} else {
|
|
2091
|
+
if (typeof body === `string`) bytes = new TextEncoder().encode(body);
|
|
2092
|
+
else if (body instanceof Uint8Array) bytes = body;
|
|
2093
|
+
else throw new DurableStreamError(`Non-JSON streams require string or Uint8Array`, `BAD_REQUEST`, 400, void 0);
|
|
2094
|
+
data = bytes;
|
|
2095
|
+
}
|
|
2096
|
+
this.#pendingBatch.push({
|
|
2097
|
+
data,
|
|
2098
|
+
body: bytes
|
|
2099
|
+
});
|
|
2100
|
+
this.#batchBytes += bytes.length;
|
|
2101
|
+
if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
|
|
2102
|
+
else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
|
|
2103
|
+
this.#lingerTimeout = null;
|
|
2104
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
2105
|
+
}, this.#lingerMs);
|
|
2106
|
+
}
|
|
2107
|
+
/**
|
|
2108
|
+
* Send any pending batch immediately and wait for all in-flight batches.
|
|
2109
|
+
*
|
|
2110
|
+
* Call this before shutdown to ensure all messages are delivered.
|
|
2111
|
+
*/
|
|
2112
|
+
async flush() {
|
|
2113
|
+
if (this.#lingerTimeout) {
|
|
2114
|
+
clearTimeout(this.#lingerTimeout);
|
|
2115
|
+
this.#lingerTimeout = null;
|
|
2116
|
+
}
|
|
2117
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
2118
|
+
await this.#queue.drained();
|
|
2119
|
+
}
|
|
2120
|
+
/**
|
|
2121
|
+
* Flush pending messages and close the producer.
|
|
2122
|
+
*
|
|
2123
|
+
* After calling close(), further append() calls will throw.
|
|
2124
|
+
*/
|
|
2125
|
+
async close() {
|
|
2126
|
+
if (this.#closed) return;
|
|
2127
|
+
this.#closed = true;
|
|
2128
|
+
try {
|
|
2129
|
+
await this.flush();
|
|
2130
|
+
} catch {}
|
|
2131
|
+
}
|
|
2132
|
+
/**
|
|
2133
|
+
* Increment epoch and reset sequence.
|
|
2134
|
+
*
|
|
2135
|
+
* Call this when restarting the producer to establish a new session.
|
|
2136
|
+
* Flushes any pending messages first.
|
|
2137
|
+
*/
|
|
2138
|
+
async restart() {
|
|
2139
|
+
await this.flush();
|
|
2140
|
+
this.#epoch++;
|
|
2141
|
+
this.#nextSeq = 0;
|
|
2142
|
+
}
|
|
2143
|
+
/**
|
|
2144
|
+
* Current epoch for this producer.
|
|
2145
|
+
*/
|
|
2146
|
+
get epoch() {
|
|
2147
|
+
return this.#epoch;
|
|
2148
|
+
}
|
|
2149
|
+
/**
|
|
2150
|
+
* Next sequence number to be assigned.
|
|
2151
|
+
*/
|
|
2152
|
+
get nextSeq() {
|
|
2153
|
+
return this.#nextSeq;
|
|
2154
|
+
}
|
|
2155
|
+
/**
|
|
2156
|
+
* Number of messages in the current pending batch.
|
|
2157
|
+
*/
|
|
2158
|
+
get pendingCount() {
|
|
2159
|
+
return this.#pendingBatch.length;
|
|
2160
|
+
}
|
|
2161
|
+
/**
|
|
2162
|
+
* Number of batches currently in flight.
|
|
2163
|
+
*/
|
|
2164
|
+
get inFlightCount() {
|
|
2165
|
+
return this.#queue.length();
|
|
2166
|
+
}
|
|
2167
|
+
/**
|
|
2168
|
+
* Enqueue the current pending batch for processing.
|
|
2169
|
+
*/
|
|
2170
|
+
#enqueuePendingBatch() {
|
|
2171
|
+
if (this.#pendingBatch.length === 0) return;
|
|
2172
|
+
const batch = this.#pendingBatch;
|
|
2173
|
+
const seq = this.#nextSeq;
|
|
2174
|
+
this.#pendingBatch = [];
|
|
2175
|
+
this.#batchBytes = 0;
|
|
2176
|
+
this.#nextSeq++;
|
|
2177
|
+
if (this.#autoClaim && !this.#epochClaimed && this.#queue.length() > 0) this.#queue.drained().then(() => {
|
|
2178
|
+
this.#queue.push({
|
|
2179
|
+
batch,
|
|
2180
|
+
seq
|
|
2181
|
+
}).catch(() => {});
|
|
2182
|
+
});
|
|
2183
|
+
else this.#queue.push({
|
|
2184
|
+
batch,
|
|
2185
|
+
seq
|
|
2186
|
+
}).catch(() => {});
|
|
2187
|
+
}
|
|
2188
|
+
/**
|
|
2189
|
+
* Batch worker - processes batches via fastq.
|
|
2190
|
+
*/
|
|
2191
|
+
async #batchWorker(task) {
|
|
2192
|
+
const { batch, seq } = task;
|
|
2193
|
+
const epoch = this.#epoch;
|
|
2194
|
+
try {
|
|
2195
|
+
await this.#doSendBatch(batch, seq, epoch);
|
|
2196
|
+
if (!this.#epochClaimed) this.#epochClaimed = true;
|
|
2197
|
+
this.#signalSeqComplete(epoch, seq, void 0);
|
|
2198
|
+
} catch (error) {
|
|
2199
|
+
this.#signalSeqComplete(epoch, seq, error);
|
|
2200
|
+
if (this.#onError) this.#onError(error);
|
|
2201
|
+
throw error;
|
|
2202
|
+
}
|
|
2203
|
+
}
|
|
2204
|
+
/**
|
|
2205
|
+
* Signal that a sequence has completed (success or failure).
|
|
2206
|
+
*/
|
|
2207
|
+
#signalSeqComplete(epoch, seq, error) {
|
|
2208
|
+
let epochMap = this.#seqState.get(epoch);
|
|
2209
|
+
if (!epochMap) {
|
|
2210
|
+
epochMap = new Map();
|
|
2211
|
+
this.#seqState.set(epoch, epochMap);
|
|
2212
|
+
}
|
|
2213
|
+
const state = epochMap.get(seq);
|
|
2214
|
+
if (state) {
|
|
2215
|
+
state.resolved = true;
|
|
2216
|
+
state.error = error;
|
|
2217
|
+
for (const waiter of state.waiters) waiter(error);
|
|
2218
|
+
state.waiters = [];
|
|
2219
|
+
} else epochMap.set(seq, {
|
|
2220
|
+
resolved: true,
|
|
2221
|
+
error,
|
|
2222
|
+
waiters: []
|
|
2223
|
+
});
|
|
2224
|
+
const cleanupThreshold = seq - this.#maxInFlight * 3;
|
|
2225
|
+
if (cleanupThreshold > 0) {
|
|
2226
|
+
for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
|
|
2227
|
+
}
|
|
2228
|
+
}
|
|
2229
|
+
/**
|
|
2230
|
+
* Wait for a specific sequence to complete.
|
|
2231
|
+
* Returns immediately if already completed.
|
|
2232
|
+
* Throws if the sequence failed.
|
|
2233
|
+
*/
|
|
2234
|
+
#waitForSeq(epoch, seq) {
|
|
2235
|
+
let epochMap = this.#seqState.get(epoch);
|
|
2236
|
+
if (!epochMap) {
|
|
2237
|
+
epochMap = new Map();
|
|
2238
|
+
this.#seqState.set(epoch, epochMap);
|
|
2239
|
+
}
|
|
2240
|
+
const state = epochMap.get(seq);
|
|
2241
|
+
if (state?.resolved) {
|
|
2242
|
+
if (state.error) return Promise.reject(state.error);
|
|
2243
|
+
return Promise.resolve();
|
|
2244
|
+
}
|
|
2245
|
+
return new Promise((resolve, reject) => {
|
|
2246
|
+
const waiter = (err) => {
|
|
2247
|
+
if (err) reject(err);
|
|
2248
|
+
else resolve();
|
|
2249
|
+
};
|
|
2250
|
+
if (state) state.waiters.push(waiter);
|
|
2251
|
+
else epochMap.set(seq, {
|
|
2252
|
+
resolved: false,
|
|
2253
|
+
waiters: [waiter]
|
|
2254
|
+
});
|
|
2255
|
+
});
|
|
2256
|
+
}
|
|
2257
|
+
/**
|
|
2258
|
+
* Actually send the batch to the server.
|
|
2259
|
+
* Handles auto-claim retry on 403 (stale epoch) if autoClaim is enabled.
|
|
2260
|
+
* Does NOT implement general retry/backoff for network errors or 5xx responses.
|
|
2261
|
+
*/
|
|
2262
|
+
async #doSendBatch(batch, seq, epoch) {
|
|
2263
|
+
const contentType = this.#stream.contentType ?? `application/octet-stream`;
|
|
2264
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2265
|
+
let batchedBody;
|
|
2266
|
+
if (isJson) {
|
|
2267
|
+
const values = batch.map((e) => e.data);
|
|
2268
|
+
batchedBody = JSON.stringify(values);
|
|
2269
|
+
} else {
|
|
2270
|
+
const totalSize = batch.reduce((sum, e) => sum + e.body.length, 0);
|
|
2271
|
+
const concatenated = new Uint8Array(totalSize);
|
|
2272
|
+
let offset = 0;
|
|
2273
|
+
for (const entry of batch) {
|
|
2274
|
+
concatenated.set(entry.body, offset);
|
|
2275
|
+
offset += entry.body.length;
|
|
2276
|
+
}
|
|
2277
|
+
batchedBody = concatenated;
|
|
2278
|
+
}
|
|
2279
|
+
const url = this.#stream.url;
|
|
2280
|
+
const headers = {
|
|
2281
|
+
"content-type": contentType,
|
|
2282
|
+
[PRODUCER_ID_HEADER]: this.#producerId,
|
|
2283
|
+
[PRODUCER_EPOCH_HEADER]: epoch.toString(),
|
|
2284
|
+
[PRODUCER_SEQ_HEADER]: seq.toString()
|
|
2285
|
+
};
|
|
2286
|
+
const response = await this.#fetchClient(url, {
|
|
2287
|
+
method: `POST`,
|
|
2288
|
+
headers,
|
|
2289
|
+
body: batchedBody,
|
|
2290
|
+
signal: this.#signal
|
|
2291
|
+
});
|
|
2292
|
+
if (response.status === 204) return {
|
|
2293
|
+
offset: ``,
|
|
2294
|
+
duplicate: true
|
|
2295
|
+
};
|
|
2296
|
+
if (response.status === 200) {
|
|
2297
|
+
const resultOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
2298
|
+
return {
|
|
2299
|
+
offset: resultOffset,
|
|
2300
|
+
duplicate: false
|
|
2301
|
+
};
|
|
2302
|
+
}
|
|
2303
|
+
if (response.status === 403) {
|
|
2304
|
+
const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
|
|
2305
|
+
const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : epoch;
|
|
2306
|
+
if (this.#autoClaim) {
|
|
2307
|
+
const newEpoch = currentEpoch + 1;
|
|
2308
|
+
this.#epoch = newEpoch;
|
|
2309
|
+
this.#nextSeq = 1;
|
|
2310
|
+
return this.#doSendBatch(batch, 0, newEpoch);
|
|
2311
|
+
}
|
|
2312
|
+
throw new StaleEpochError(currentEpoch);
|
|
2313
|
+
}
|
|
2314
|
+
if (response.status === 409) {
|
|
2315
|
+
const expectedSeqStr = response.headers.get(PRODUCER_EXPECTED_SEQ_HEADER);
|
|
2316
|
+
const expectedSeq = expectedSeqStr ? parseInt(expectedSeqStr, 10) : 0;
|
|
2317
|
+
if (expectedSeq < seq) {
|
|
2318
|
+
const waitPromises = [];
|
|
2319
|
+
for (let s = expectedSeq; s < seq; s++) waitPromises.push(this.#waitForSeq(epoch, s));
|
|
2320
|
+
await Promise.all(waitPromises);
|
|
2321
|
+
return this.#doSendBatch(batch, seq, epoch);
|
|
2322
|
+
}
|
|
2323
|
+
const receivedSeqStr = response.headers.get(PRODUCER_RECEIVED_SEQ_HEADER);
|
|
2324
|
+
const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
|
|
2325
|
+
throw new SequenceGapError(expectedSeq, receivedSeq);
|
|
2326
|
+
}
|
|
2327
|
+
if (response.status === 400) {
|
|
2328
|
+
const error$1 = await DurableStreamError.fromResponse(response, url);
|
|
2329
|
+
throw error$1;
|
|
2330
|
+
}
|
|
2331
|
+
const error = await FetchError.fromResponse(response, url);
|
|
2332
|
+
throw error;
|
|
2333
|
+
}
|
|
2334
|
+
/**
|
|
2335
|
+
* Clear pending batch and report error.
|
|
2336
|
+
*/
|
|
2337
|
+
#rejectPendingBatch(error) {
|
|
2338
|
+
if (this.#onError && this.#pendingBatch.length > 0) this.#onError(error);
|
|
2339
|
+
this.#pendingBatch = [];
|
|
2340
|
+
this.#batchBytes = 0;
|
|
2341
|
+
if (this.#lingerTimeout) {
|
|
2342
|
+
clearTimeout(this.#lingerTimeout);
|
|
2343
|
+
this.#lingerTimeout = null;
|
|
2344
|
+
}
|
|
2345
|
+
}
|
|
2346
|
+
};
|
|
1852
2347
|
|
|
1853
2348
|
//#endregion
|
|
1854
2349
|
exports.BackoffDefaults = BackoffDefaults
|
|
@@ -1858,10 +2353,16 @@ exports.DurableStream = DurableStream
|
|
|
1858
2353
|
exports.DurableStreamError = DurableStreamError
|
|
1859
2354
|
exports.FetchBackoffAbortError = FetchBackoffAbortError
|
|
1860
2355
|
exports.FetchError = FetchError
|
|
2356
|
+
exports.IdempotentProducer = IdempotentProducer
|
|
1861
2357
|
exports.InvalidSignalError = InvalidSignalError
|
|
1862
2358
|
exports.LIVE_QUERY_PARAM = LIVE_QUERY_PARAM
|
|
1863
2359
|
exports.MissingStreamUrlError = MissingStreamUrlError
|
|
1864
2360
|
exports.OFFSET_QUERY_PARAM = OFFSET_QUERY_PARAM
|
|
2361
|
+
exports.PRODUCER_EPOCH_HEADER = PRODUCER_EPOCH_HEADER
|
|
2362
|
+
exports.PRODUCER_EXPECTED_SEQ_HEADER = PRODUCER_EXPECTED_SEQ_HEADER
|
|
2363
|
+
exports.PRODUCER_ID_HEADER = PRODUCER_ID_HEADER
|
|
2364
|
+
exports.PRODUCER_RECEIVED_SEQ_HEADER = PRODUCER_RECEIVED_SEQ_HEADER
|
|
2365
|
+
exports.PRODUCER_SEQ_HEADER = PRODUCER_SEQ_HEADER
|
|
1865
2366
|
exports.SSE_COMPATIBLE_CONTENT_TYPES = SSE_COMPATIBLE_CONTENT_TYPES
|
|
1866
2367
|
exports.STREAM_CURSOR_HEADER = STREAM_CURSOR_HEADER
|
|
1867
2368
|
exports.STREAM_EXPIRES_AT_HEADER = STREAM_EXPIRES_AT_HEADER
|
|
@@ -1869,7 +2370,11 @@ exports.STREAM_OFFSET_HEADER = STREAM_OFFSET_HEADER
|
|
|
1869
2370
|
exports.STREAM_SEQ_HEADER = STREAM_SEQ_HEADER
|
|
1870
2371
|
exports.STREAM_TTL_HEADER = STREAM_TTL_HEADER
|
|
1871
2372
|
exports.STREAM_UP_TO_DATE_HEADER = STREAM_UP_TO_DATE_HEADER
|
|
2373
|
+
exports.SequenceGapError = SequenceGapError
|
|
2374
|
+
exports.StaleEpochError = StaleEpochError
|
|
2375
|
+
exports._resetHttpWarningForTesting = _resetHttpWarningForTesting
|
|
1872
2376
|
exports.asAsyncIterableReadableStream = asAsyncIterableReadableStream
|
|
1873
2377
|
exports.createFetchWithBackoff = createFetchWithBackoff
|
|
1874
2378
|
exports.createFetchWithConsumedBody = createFetchWithConsumedBody
|
|
1875
|
-
exports.stream = stream
|
|
2379
|
+
exports.stream = stream
|
|
2380
|
+
exports.warnIfUsingHttpInBrowser = warnIfUsingHttpInBrowser
|