@durable-streams/client 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +168 -4
- package/dist/index.cjs +431 -3
- package/dist/index.d.cts +218 -1
- package/dist/index.d.ts +218 -1
- package/dist/index.js +424 -4
- package/package.json +2 -2
- package/src/constants.ts +31 -0
- package/src/idempotent-producer.ts +642 -0
- package/src/index.ts +21 -0
- package/src/stream.ts +5 -0
- package/src/types.ts +93 -0
package/dist/index.js
CHANGED
|
@@ -35,6 +35,28 @@ const STREAM_TTL_HEADER = `Stream-TTL`;
|
|
|
35
35
|
*/
|
|
36
36
|
const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
|
|
37
37
|
/**
|
|
38
|
+
* Request header for producer ID (client-supplied stable identifier).
|
|
39
|
+
*/
|
|
40
|
+
const PRODUCER_ID_HEADER = `Producer-Id`;
|
|
41
|
+
/**
|
|
42
|
+
* Request/response header for producer epoch.
|
|
43
|
+
* Client-declared, server-validated monotonically increasing.
|
|
44
|
+
*/
|
|
45
|
+
const PRODUCER_EPOCH_HEADER = `Producer-Epoch`;
|
|
46
|
+
/**
|
|
47
|
+
* Request header for producer sequence number.
|
|
48
|
+
* Monotonically increasing per epoch, per-batch (not per-message).
|
|
49
|
+
*/
|
|
50
|
+
const PRODUCER_SEQ_HEADER = `Producer-Seq`;
|
|
51
|
+
/**
|
|
52
|
+
* Response header indicating expected sequence number on 409 Conflict.
|
|
53
|
+
*/
|
|
54
|
+
const PRODUCER_EXPECTED_SEQ_HEADER = `Producer-Expected-Seq`;
|
|
55
|
+
/**
|
|
56
|
+
* Response header indicating received sequence number on 409 Conflict.
|
|
57
|
+
*/
|
|
58
|
+
const PRODUCER_RECEIVED_SEQ_HEADER = `Producer-Received-Seq`;
|
|
59
|
+
/**
|
|
38
60
|
* Query parameter for starting offset.
|
|
39
61
|
*/
|
|
40
62
|
const OFFSET_QUERY_PARAM = `offset`;
|
|
@@ -1382,7 +1404,7 @@ async function streamInternal(options) {
|
|
|
1382
1404
|
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1383
1405
|
* Handles cases like "application/json; charset=utf-8".
|
|
1384
1406
|
*/
|
|
1385
|
-
function normalizeContentType(contentType) {
|
|
1407
|
+
function normalizeContentType$1(contentType) {
|
|
1386
1408
|
if (!contentType) return ``;
|
|
1387
1409
|
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1388
1410
|
}
|
|
@@ -1448,6 +1470,7 @@ var DurableStream = class DurableStream {
|
|
|
1448
1470
|
url: urlStr
|
|
1449
1471
|
};
|
|
1450
1472
|
this.#onError = opts.onError;
|
|
1473
|
+
if (opts.contentType) this.contentType = opts.contentType;
|
|
1451
1474
|
this.#batchingEnabled = opts.batching !== false;
|
|
1452
1475
|
if (this.#batchingEnabled) this.#queue = fastq.promise(this.#batchWorker.bind(this), 1);
|
|
1453
1476
|
const baseFetchClient = opts.fetch ?? ((...args) => fetch(...args));
|
|
@@ -1596,7 +1619,7 @@ var DurableStream = class DurableStream {
|
|
|
1596
1619
|
const contentType = opts?.contentType ?? this.#options.contentType ?? this.contentType;
|
|
1597
1620
|
if (contentType) requestHeaders[`content-type`] = contentType;
|
|
1598
1621
|
if (opts?.seq) requestHeaders[STREAM_SEQ_HEADER] = opts.seq;
|
|
1599
|
-
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
1622
|
+
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1600
1623
|
const bodyToEncode = isJson ? [body] : body;
|
|
1601
1624
|
const encodedBody = encodeBody(bodyToEncode);
|
|
1602
1625
|
const response = await this.#fetchClient(fetchUrl.toString(), {
|
|
@@ -1662,7 +1685,7 @@ var DurableStream = class DurableStream {
|
|
|
1662
1685
|
break;
|
|
1663
1686
|
}
|
|
1664
1687
|
if (highestSeq) requestHeaders[STREAM_SEQ_HEADER] = highestSeq;
|
|
1665
|
-
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
1688
|
+
const isJson = normalizeContentType$1(contentType) === `application/json`;
|
|
1666
1689
|
let batchedBody;
|
|
1667
1690
|
if (isJson) {
|
|
1668
1691
|
const values = batch.map((m) => m.data);
|
|
@@ -1902,4 +1925,401 @@ function validateOptions(options) {
|
|
|
1902
1925
|
}
|
|
1903
1926
|
|
|
1904
1927
|
//#endregion
|
|
1905
|
-
|
|
1928
|
+
//#region src/idempotent-producer.ts
|
|
1929
|
+
/**
|
|
1930
|
+
* Error thrown when a producer's epoch is stale (zombie fencing).
|
|
1931
|
+
*/
|
|
1932
|
+
var StaleEpochError = class extends Error {
|
|
1933
|
+
/**
|
|
1934
|
+
* The current epoch on the server.
|
|
1935
|
+
*/
|
|
1936
|
+
currentEpoch;
|
|
1937
|
+
constructor(currentEpoch) {
|
|
1938
|
+
super(`Producer epoch is stale. Current server epoch: ${currentEpoch}. Call restart() or create a new producer with a higher epoch.`);
|
|
1939
|
+
this.name = `StaleEpochError`;
|
|
1940
|
+
this.currentEpoch = currentEpoch;
|
|
1941
|
+
}
|
|
1942
|
+
};
|
|
1943
|
+
/**
|
|
1944
|
+
* Error thrown when an unrecoverable sequence gap is detected.
|
|
1945
|
+
*
|
|
1946
|
+
* With maxInFlight > 1, HTTP requests can arrive out of order at the server,
|
|
1947
|
+
* causing temporary 409 responses. The client automatically handles these
|
|
1948
|
+
* by waiting for earlier sequences to complete, then retrying.
|
|
1949
|
+
*
|
|
1950
|
+
* This error is only thrown when the gap cannot be resolved (e.g., the
|
|
1951
|
+
* expected sequence is >= our sequence, indicating a true protocol violation).
|
|
1952
|
+
*/
|
|
1953
|
+
var SequenceGapError = class extends Error {
|
|
1954
|
+
expectedSeq;
|
|
1955
|
+
receivedSeq;
|
|
1956
|
+
constructor(expectedSeq, receivedSeq) {
|
|
1957
|
+
super(`Producer sequence gap: expected ${expectedSeq}, received ${receivedSeq}`);
|
|
1958
|
+
this.name = `SequenceGapError`;
|
|
1959
|
+
this.expectedSeq = expectedSeq;
|
|
1960
|
+
this.receivedSeq = receivedSeq;
|
|
1961
|
+
}
|
|
1962
|
+
};
|
|
1963
|
+
/**
|
|
1964
|
+
* Normalize content-type by extracting the media type (before any semicolon).
|
|
1965
|
+
*/
|
|
1966
|
+
function normalizeContentType(contentType) {
|
|
1967
|
+
if (!contentType) return ``;
|
|
1968
|
+
return contentType.split(`;`)[0].trim().toLowerCase();
|
|
1969
|
+
}
|
|
1970
|
+
/**
|
|
1971
|
+
* An idempotent producer for exactly-once writes to a durable stream.
|
|
1972
|
+
*
|
|
1973
|
+
* Features:
|
|
1974
|
+
* - Fire-and-forget: append() returns immediately, batches in background
|
|
1975
|
+
* - Exactly-once: server deduplicates using (producerId, epoch, seq)
|
|
1976
|
+
* - Batching: multiple appends batched into single HTTP request
|
|
1977
|
+
* - Pipelining: up to maxInFlight concurrent batches
|
|
1978
|
+
* - Zombie fencing: stale producers rejected via epoch validation
|
|
1979
|
+
*
|
|
1980
|
+
* @example
|
|
1981
|
+
* ```typescript
|
|
1982
|
+
* const stream = new DurableStream({ url: "https://..." });
|
|
1983
|
+
* const producer = new IdempotentProducer(stream, "order-service-1", {
|
|
1984
|
+
* epoch: 0,
|
|
1985
|
+
* autoClaim: true,
|
|
1986
|
+
* });
|
|
1987
|
+
*
|
|
1988
|
+
* // Fire-and-forget writes (synchronous, returns immediately)
|
|
1989
|
+
* producer.append("message 1");
|
|
1990
|
+
* producer.append("message 2");
|
|
1991
|
+
*
|
|
1992
|
+
* // Ensure all messages are delivered before shutdown
|
|
1993
|
+
* await producer.flush();
|
|
1994
|
+
* await producer.close();
|
|
1995
|
+
* ```
|
|
1996
|
+
*/
|
|
1997
|
+
var IdempotentProducer = class {
|
|
1998
|
+
#stream;
|
|
1999
|
+
#producerId;
|
|
2000
|
+
#epoch;
|
|
2001
|
+
#nextSeq = 0;
|
|
2002
|
+
#autoClaim;
|
|
2003
|
+
#maxBatchBytes;
|
|
2004
|
+
#lingerMs;
|
|
2005
|
+
#fetchClient;
|
|
2006
|
+
#signal;
|
|
2007
|
+
#onError;
|
|
2008
|
+
#pendingBatch = [];
|
|
2009
|
+
#batchBytes = 0;
|
|
2010
|
+
#lingerTimeout = null;
|
|
2011
|
+
#queue;
|
|
2012
|
+
#maxInFlight;
|
|
2013
|
+
#closed = false;
|
|
2014
|
+
#epochClaimed;
|
|
2015
|
+
#seqState = new Map();
|
|
2016
|
+
/**
|
|
2017
|
+
* Create an idempotent producer for a stream.
|
|
2018
|
+
*
|
|
2019
|
+
* @param stream - The DurableStream to write to
|
|
2020
|
+
* @param producerId - Stable identifier for this producer (e.g., "order-service-1")
|
|
2021
|
+
* @param opts - Producer options
|
|
2022
|
+
*/
|
|
2023
|
+
constructor(stream$1, producerId, opts) {
|
|
2024
|
+
this.#stream = stream$1;
|
|
2025
|
+
this.#producerId = producerId;
|
|
2026
|
+
this.#epoch = opts?.epoch ?? 0;
|
|
2027
|
+
this.#autoClaim = opts?.autoClaim ?? false;
|
|
2028
|
+
this.#maxBatchBytes = opts?.maxBatchBytes ?? 1024 * 1024;
|
|
2029
|
+
this.#lingerMs = opts?.lingerMs ?? 5;
|
|
2030
|
+
this.#signal = opts?.signal;
|
|
2031
|
+
this.#onError = opts?.onError;
|
|
2032
|
+
this.#fetchClient = opts?.fetch ?? ((...args) => fetch(...args));
|
|
2033
|
+
this.#maxInFlight = opts?.maxInFlight ?? 5;
|
|
2034
|
+
this.#epochClaimed = !this.#autoClaim;
|
|
2035
|
+
this.#queue = fastq.promise(this.#batchWorker.bind(this), this.#maxInFlight);
|
|
2036
|
+
if (this.#signal) this.#signal.addEventListener(`abort`, () => {
|
|
2037
|
+
this.#rejectPendingBatch(new DurableStreamError(`Producer aborted`, `ALREADY_CLOSED`, void 0, void 0));
|
|
2038
|
+
}, { once: true });
|
|
2039
|
+
}
|
|
2040
|
+
/**
|
|
2041
|
+
* Append data to the stream.
|
|
2042
|
+
*
|
|
2043
|
+
* This is fire-and-forget: returns immediately after adding to the batch.
|
|
2044
|
+
* The message is batched and sent when:
|
|
2045
|
+
* - maxBatchBytes is reached
|
|
2046
|
+
* - lingerMs elapses
|
|
2047
|
+
* - flush() is called
|
|
2048
|
+
*
|
|
2049
|
+
* Errors are reported via onError callback if configured. Use flush() to
|
|
2050
|
+
* wait for all pending messages to be sent.
|
|
2051
|
+
*
|
|
2052
|
+
* For JSON streams, pass native objects (which will be serialized internally).
|
|
2053
|
+
* For byte streams, pass string or Uint8Array.
|
|
2054
|
+
*
|
|
2055
|
+
* @param body - Data to append (object for JSON streams, string or Uint8Array for byte streams)
|
|
2056
|
+
*/
|
|
2057
|
+
append(body) {
|
|
2058
|
+
if (this.#closed) throw new DurableStreamError(`Producer is closed`, `ALREADY_CLOSED`, void 0, void 0);
|
|
2059
|
+
const isJson = normalizeContentType(this.#stream.contentType) === `application/json`;
|
|
2060
|
+
let bytes;
|
|
2061
|
+
let data;
|
|
2062
|
+
if (isJson) {
|
|
2063
|
+
const json = JSON.stringify(body);
|
|
2064
|
+
bytes = new TextEncoder().encode(json);
|
|
2065
|
+
data = body;
|
|
2066
|
+
} else {
|
|
2067
|
+
if (typeof body === `string`) bytes = new TextEncoder().encode(body);
|
|
2068
|
+
else if (body instanceof Uint8Array) bytes = body;
|
|
2069
|
+
else throw new DurableStreamError(`Non-JSON streams require string or Uint8Array`, `BAD_REQUEST`, 400, void 0);
|
|
2070
|
+
data = bytes;
|
|
2071
|
+
}
|
|
2072
|
+
this.#pendingBatch.push({
|
|
2073
|
+
data,
|
|
2074
|
+
body: bytes
|
|
2075
|
+
});
|
|
2076
|
+
this.#batchBytes += bytes.length;
|
|
2077
|
+
if (this.#batchBytes >= this.#maxBatchBytes) this.#enqueuePendingBatch();
|
|
2078
|
+
else if (!this.#lingerTimeout) this.#lingerTimeout = setTimeout(() => {
|
|
2079
|
+
this.#lingerTimeout = null;
|
|
2080
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
2081
|
+
}, this.#lingerMs);
|
|
2082
|
+
}
|
|
2083
|
+
/**
|
|
2084
|
+
* Send any pending batch immediately and wait for all in-flight batches.
|
|
2085
|
+
*
|
|
2086
|
+
* Call this before shutdown to ensure all messages are delivered.
|
|
2087
|
+
*/
|
|
2088
|
+
async flush() {
|
|
2089
|
+
if (this.#lingerTimeout) {
|
|
2090
|
+
clearTimeout(this.#lingerTimeout);
|
|
2091
|
+
this.#lingerTimeout = null;
|
|
2092
|
+
}
|
|
2093
|
+
if (this.#pendingBatch.length > 0) this.#enqueuePendingBatch();
|
|
2094
|
+
await this.#queue.drained();
|
|
2095
|
+
}
|
|
2096
|
+
/**
|
|
2097
|
+
* Flush pending messages and close the producer.
|
|
2098
|
+
*
|
|
2099
|
+
* After calling close(), further append() calls will throw.
|
|
2100
|
+
*/
|
|
2101
|
+
async close() {
|
|
2102
|
+
if (this.#closed) return;
|
|
2103
|
+
this.#closed = true;
|
|
2104
|
+
try {
|
|
2105
|
+
await this.flush();
|
|
2106
|
+
} catch {}
|
|
2107
|
+
}
|
|
2108
|
+
/**
|
|
2109
|
+
* Increment epoch and reset sequence.
|
|
2110
|
+
*
|
|
2111
|
+
* Call this when restarting the producer to establish a new session.
|
|
2112
|
+
* Flushes any pending messages first.
|
|
2113
|
+
*/
|
|
2114
|
+
async restart() {
|
|
2115
|
+
await this.flush();
|
|
2116
|
+
this.#epoch++;
|
|
2117
|
+
this.#nextSeq = 0;
|
|
2118
|
+
}
|
|
2119
|
+
/**
|
|
2120
|
+
* Current epoch for this producer.
|
|
2121
|
+
*/
|
|
2122
|
+
get epoch() {
|
|
2123
|
+
return this.#epoch;
|
|
2124
|
+
}
|
|
2125
|
+
/**
|
|
2126
|
+
* Next sequence number to be assigned.
|
|
2127
|
+
*/
|
|
2128
|
+
get nextSeq() {
|
|
2129
|
+
return this.#nextSeq;
|
|
2130
|
+
}
|
|
2131
|
+
/**
|
|
2132
|
+
* Number of messages in the current pending batch.
|
|
2133
|
+
*/
|
|
2134
|
+
get pendingCount() {
|
|
2135
|
+
return this.#pendingBatch.length;
|
|
2136
|
+
}
|
|
2137
|
+
/**
|
|
2138
|
+
* Number of batches currently in flight.
|
|
2139
|
+
*/
|
|
2140
|
+
get inFlightCount() {
|
|
2141
|
+
return this.#queue.length();
|
|
2142
|
+
}
|
|
2143
|
+
/**
|
|
2144
|
+
* Enqueue the current pending batch for processing.
|
|
2145
|
+
*/
|
|
2146
|
+
#enqueuePendingBatch() {
|
|
2147
|
+
if (this.#pendingBatch.length === 0) return;
|
|
2148
|
+
const batch = this.#pendingBatch;
|
|
2149
|
+
const seq = this.#nextSeq;
|
|
2150
|
+
this.#pendingBatch = [];
|
|
2151
|
+
this.#batchBytes = 0;
|
|
2152
|
+
this.#nextSeq++;
|
|
2153
|
+
if (this.#autoClaim && !this.#epochClaimed && this.#queue.length() > 0) this.#queue.drained().then(() => {
|
|
2154
|
+
this.#queue.push({
|
|
2155
|
+
batch,
|
|
2156
|
+
seq
|
|
2157
|
+
}).catch(() => {});
|
|
2158
|
+
});
|
|
2159
|
+
else this.#queue.push({
|
|
2160
|
+
batch,
|
|
2161
|
+
seq
|
|
2162
|
+
}).catch(() => {});
|
|
2163
|
+
}
|
|
2164
|
+
/**
|
|
2165
|
+
* Batch worker - processes batches via fastq.
|
|
2166
|
+
*/
|
|
2167
|
+
async #batchWorker(task) {
|
|
2168
|
+
const { batch, seq } = task;
|
|
2169
|
+
const epoch = this.#epoch;
|
|
2170
|
+
try {
|
|
2171
|
+
await this.#doSendBatch(batch, seq, epoch);
|
|
2172
|
+
if (!this.#epochClaimed) this.#epochClaimed = true;
|
|
2173
|
+
this.#signalSeqComplete(epoch, seq, void 0);
|
|
2174
|
+
} catch (error) {
|
|
2175
|
+
this.#signalSeqComplete(epoch, seq, error);
|
|
2176
|
+
if (this.#onError) this.#onError(error);
|
|
2177
|
+
throw error;
|
|
2178
|
+
}
|
|
2179
|
+
}
|
|
2180
|
+
/**
|
|
2181
|
+
* Signal that a sequence has completed (success or failure).
|
|
2182
|
+
*/
|
|
2183
|
+
#signalSeqComplete(epoch, seq, error) {
|
|
2184
|
+
let epochMap = this.#seqState.get(epoch);
|
|
2185
|
+
if (!epochMap) {
|
|
2186
|
+
epochMap = new Map();
|
|
2187
|
+
this.#seqState.set(epoch, epochMap);
|
|
2188
|
+
}
|
|
2189
|
+
const state = epochMap.get(seq);
|
|
2190
|
+
if (state) {
|
|
2191
|
+
state.resolved = true;
|
|
2192
|
+
state.error = error;
|
|
2193
|
+
for (const waiter of state.waiters) waiter(error);
|
|
2194
|
+
state.waiters = [];
|
|
2195
|
+
} else epochMap.set(seq, {
|
|
2196
|
+
resolved: true,
|
|
2197
|
+
error,
|
|
2198
|
+
waiters: []
|
|
2199
|
+
});
|
|
2200
|
+
const cleanupThreshold = seq - this.#maxInFlight * 3;
|
|
2201
|
+
if (cleanupThreshold > 0) {
|
|
2202
|
+
for (const oldSeq of epochMap.keys()) if (oldSeq < cleanupThreshold) epochMap.delete(oldSeq);
|
|
2203
|
+
}
|
|
2204
|
+
}
|
|
2205
|
+
/**
|
|
2206
|
+
* Wait for a specific sequence to complete.
|
|
2207
|
+
* Returns immediately if already completed.
|
|
2208
|
+
* Throws if the sequence failed.
|
|
2209
|
+
*/
|
|
2210
|
+
#waitForSeq(epoch, seq) {
|
|
2211
|
+
let epochMap = this.#seqState.get(epoch);
|
|
2212
|
+
if (!epochMap) {
|
|
2213
|
+
epochMap = new Map();
|
|
2214
|
+
this.#seqState.set(epoch, epochMap);
|
|
2215
|
+
}
|
|
2216
|
+
const state = epochMap.get(seq);
|
|
2217
|
+
if (state?.resolved) {
|
|
2218
|
+
if (state.error) return Promise.reject(state.error);
|
|
2219
|
+
return Promise.resolve();
|
|
2220
|
+
}
|
|
2221
|
+
return new Promise((resolve, reject) => {
|
|
2222
|
+
const waiter = (err) => {
|
|
2223
|
+
if (err) reject(err);
|
|
2224
|
+
else resolve();
|
|
2225
|
+
};
|
|
2226
|
+
if (state) state.waiters.push(waiter);
|
|
2227
|
+
else epochMap.set(seq, {
|
|
2228
|
+
resolved: false,
|
|
2229
|
+
waiters: [waiter]
|
|
2230
|
+
});
|
|
2231
|
+
});
|
|
2232
|
+
}
|
|
2233
|
+
/**
|
|
2234
|
+
* Actually send the batch to the server.
|
|
2235
|
+
* Handles auto-claim retry on 403 (stale epoch) if autoClaim is enabled.
|
|
2236
|
+
* Does NOT implement general retry/backoff for network errors or 5xx responses.
|
|
2237
|
+
*/
|
|
2238
|
+
async #doSendBatch(batch, seq, epoch) {
|
|
2239
|
+
const contentType = this.#stream.contentType ?? `application/octet-stream`;
|
|
2240
|
+
const isJson = normalizeContentType(contentType) === `application/json`;
|
|
2241
|
+
let batchedBody;
|
|
2242
|
+
if (isJson) {
|
|
2243
|
+
const values = batch.map((e) => e.data);
|
|
2244
|
+
batchedBody = JSON.stringify(values);
|
|
2245
|
+
} else {
|
|
2246
|
+
const totalSize = batch.reduce((sum, e) => sum + e.body.length, 0);
|
|
2247
|
+
const concatenated = new Uint8Array(totalSize);
|
|
2248
|
+
let offset = 0;
|
|
2249
|
+
for (const entry of batch) {
|
|
2250
|
+
concatenated.set(entry.body, offset);
|
|
2251
|
+
offset += entry.body.length;
|
|
2252
|
+
}
|
|
2253
|
+
batchedBody = concatenated;
|
|
2254
|
+
}
|
|
2255
|
+
const url = this.#stream.url;
|
|
2256
|
+
const headers = {
|
|
2257
|
+
"content-type": contentType,
|
|
2258
|
+
[PRODUCER_ID_HEADER]: this.#producerId,
|
|
2259
|
+
[PRODUCER_EPOCH_HEADER]: epoch.toString(),
|
|
2260
|
+
[PRODUCER_SEQ_HEADER]: seq.toString()
|
|
2261
|
+
};
|
|
2262
|
+
const response = await this.#fetchClient(url, {
|
|
2263
|
+
method: `POST`,
|
|
2264
|
+
headers,
|
|
2265
|
+
body: batchedBody,
|
|
2266
|
+
signal: this.#signal
|
|
2267
|
+
});
|
|
2268
|
+
if (response.status === 204) return {
|
|
2269
|
+
offset: ``,
|
|
2270
|
+
duplicate: true
|
|
2271
|
+
};
|
|
2272
|
+
if (response.status === 200) {
|
|
2273
|
+
const resultOffset = response.headers.get(STREAM_OFFSET_HEADER) ?? ``;
|
|
2274
|
+
return {
|
|
2275
|
+
offset: resultOffset,
|
|
2276
|
+
duplicate: false
|
|
2277
|
+
};
|
|
2278
|
+
}
|
|
2279
|
+
if (response.status === 403) {
|
|
2280
|
+
const currentEpochStr = response.headers.get(PRODUCER_EPOCH_HEADER);
|
|
2281
|
+
const currentEpoch = currentEpochStr ? parseInt(currentEpochStr, 10) : epoch;
|
|
2282
|
+
if (this.#autoClaim) {
|
|
2283
|
+
const newEpoch = currentEpoch + 1;
|
|
2284
|
+
this.#epoch = newEpoch;
|
|
2285
|
+
this.#nextSeq = 1;
|
|
2286
|
+
return this.#doSendBatch(batch, 0, newEpoch);
|
|
2287
|
+
}
|
|
2288
|
+
throw new StaleEpochError(currentEpoch);
|
|
2289
|
+
}
|
|
2290
|
+
if (response.status === 409) {
|
|
2291
|
+
const expectedSeqStr = response.headers.get(PRODUCER_EXPECTED_SEQ_HEADER);
|
|
2292
|
+
const expectedSeq = expectedSeqStr ? parseInt(expectedSeqStr, 10) : 0;
|
|
2293
|
+
if (expectedSeq < seq) {
|
|
2294
|
+
const waitPromises = [];
|
|
2295
|
+
for (let s = expectedSeq; s < seq; s++) waitPromises.push(this.#waitForSeq(epoch, s));
|
|
2296
|
+
await Promise.all(waitPromises);
|
|
2297
|
+
return this.#doSendBatch(batch, seq, epoch);
|
|
2298
|
+
}
|
|
2299
|
+
const receivedSeqStr = response.headers.get(PRODUCER_RECEIVED_SEQ_HEADER);
|
|
2300
|
+
const receivedSeq = receivedSeqStr ? parseInt(receivedSeqStr, 10) : seq;
|
|
2301
|
+
throw new SequenceGapError(expectedSeq, receivedSeq);
|
|
2302
|
+
}
|
|
2303
|
+
if (response.status === 400) {
|
|
2304
|
+
const error$1 = await DurableStreamError.fromResponse(response, url);
|
|
2305
|
+
throw error$1;
|
|
2306
|
+
}
|
|
2307
|
+
const error = await FetchError.fromResponse(response, url);
|
|
2308
|
+
throw error;
|
|
2309
|
+
}
|
|
2310
|
+
/**
|
|
2311
|
+
* Clear pending batch and report error.
|
|
2312
|
+
*/
|
|
2313
|
+
#rejectPendingBatch(error) {
|
|
2314
|
+
if (this.#onError && this.#pendingBatch.length > 0) this.#onError(error);
|
|
2315
|
+
this.#pendingBatch = [];
|
|
2316
|
+
this.#batchBytes = 0;
|
|
2317
|
+
if (this.#lingerTimeout) {
|
|
2318
|
+
clearTimeout(this.#lingerTimeout);
|
|
2319
|
+
this.#lingerTimeout = null;
|
|
2320
|
+
}
|
|
2321
|
+
}
|
|
2322
|
+
};
|
|
2323
|
+
|
|
2324
|
+
//#endregion
|
|
2325
|
+
export { BackoffDefaults, CURSOR_QUERY_PARAM, DURABLE_STREAM_PROTOCOL_QUERY_PARAMS, DurableStream, DurableStreamError, FetchBackoffAbortError, FetchError, IdempotentProducer, InvalidSignalError, LIVE_QUERY_PARAM, MissingStreamUrlError, OFFSET_QUERY_PARAM, PRODUCER_EPOCH_HEADER, PRODUCER_EXPECTED_SEQ_HEADER, PRODUCER_ID_HEADER, PRODUCER_RECEIVED_SEQ_HEADER, PRODUCER_SEQ_HEADER, SSE_COMPATIBLE_CONTENT_TYPES, STREAM_CURSOR_HEADER, STREAM_EXPIRES_AT_HEADER, STREAM_OFFSET_HEADER, STREAM_SEQ_HEADER, STREAM_TTL_HEADER, STREAM_UP_TO_DATE_HEADER, SequenceGapError, StaleEpochError, _resetHttpWarningForTesting, asAsyncIterableReadableStream, createFetchWithBackoff, createFetchWithConsumedBody, stream, warnIfUsingHttpInBrowser };
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@durable-streams/client",
|
|
3
3
|
"description": "TypeScript client for the Durable Streams protocol",
|
|
4
|
-
"version": "0.1.
|
|
4
|
+
"version": "0.1.4",
|
|
5
5
|
"author": "Durable Stream contributors",
|
|
6
6
|
"license": "Apache-2.0",
|
|
7
7
|
"repository": {
|
|
@@ -47,7 +47,7 @@
|
|
|
47
47
|
"devDependencies": {
|
|
48
48
|
"fast-check": "^4.4.0",
|
|
49
49
|
"tsdown": "^0.9.0",
|
|
50
|
-
"@durable-streams/server": "0.1.
|
|
50
|
+
"@durable-streams/server": "0.1.5"
|
|
51
51
|
},
|
|
52
52
|
"engines": {
|
|
53
53
|
"node": ">=18.0.0"
|
package/src/constants.ts
CHANGED
|
@@ -46,6 +46,37 @@ export const STREAM_TTL_HEADER = `Stream-TTL`
|
|
|
46
46
|
*/
|
|
47
47
|
export const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`
|
|
48
48
|
|
|
49
|
+
// ============================================================================
|
|
50
|
+
// Idempotent Producer Headers
|
|
51
|
+
// ============================================================================
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Request header for producer ID (client-supplied stable identifier).
|
|
55
|
+
*/
|
|
56
|
+
export const PRODUCER_ID_HEADER = `Producer-Id`
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Request/response header for producer epoch.
|
|
60
|
+
* Client-declared, server-validated monotonically increasing.
|
|
61
|
+
*/
|
|
62
|
+
export const PRODUCER_EPOCH_HEADER = `Producer-Epoch`
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Request header for producer sequence number.
|
|
66
|
+
* Monotonically increasing per epoch, per-batch (not per-message).
|
|
67
|
+
*/
|
|
68
|
+
export const PRODUCER_SEQ_HEADER = `Producer-Seq`
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Response header indicating expected sequence number on 409 Conflict.
|
|
72
|
+
*/
|
|
73
|
+
export const PRODUCER_EXPECTED_SEQ_HEADER = `Producer-Expected-Seq`
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Response header indicating received sequence number on 409 Conflict.
|
|
77
|
+
*/
|
|
78
|
+
export const PRODUCER_RECEIVED_SEQ_HEADER = `Producer-Received-Seq`
|
|
79
|
+
|
|
49
80
|
// ============================================================================
|
|
50
81
|
// Query Parameters
|
|
51
82
|
// ============================================================================
|