@durable-streams/server 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -35,6 +35,10 @@ const __durable_streams_state = __toESM(require("@durable-streams/state"));
35
35
 
36
36
  //#region src/store.ts
37
37
  /**
38
+ * TTL for in-memory producer state cleanup (7 days).
39
+ */
40
+ const PRODUCER_STATE_TTL_MS = 7 * 24 * 60 * 60 * 1e3;
41
+ /**
38
42
  * Normalize content-type by extracting the media type (before any semicolon).
39
43
  * Handles cases like "application/json; charset=utf-8".
40
44
  */
@@ -81,13 +85,15 @@ function formatJsonResponse(data) {
81
85
  const wrapped = `[${text}]`;
82
86
  return new TextEncoder().encode(wrapped);
83
87
  }
84
- /**
85
- * In-memory store for durable streams.
86
- */
87
88
  var StreamStore = class {
88
89
  streams = new Map();
89
90
  pendingLongPolls = [];
90
91
  /**
92
+ * Per-producer locks for serializing validation+append operations.
93
+ * Key: "{streamPath}:{producerId}"
94
+ */
95
+ producerLocks = new Map();
96
+ /**
91
97
  * Check if a stream is expired based on TTL or Expires-At.
92
98
  */
93
99
  isExpired(stream) {
@@ -163,6 +169,108 @@ var StreamStore = class {
163
169
  return this.streams.delete(path);
164
170
  }
165
171
  /**
172
+ * Validate producer state WITHOUT mutating.
173
+ * Returns proposed state to commit after successful append.
174
+ * Implements Kafka-style idempotent producer validation.
175
+ *
176
+ * IMPORTANT: This function does NOT mutate producer state. The caller must
177
+ * call commitProducerState() after successful append to apply the mutation.
178
+ * This ensures atomicity: if append fails (e.g., JSON validation), producer
179
+ * state is not incorrectly advanced.
180
+ */
181
+ validateProducer(stream, producerId, epoch, seq) {
182
+ if (!stream.producers) stream.producers = new Map();
183
+ this.cleanupExpiredProducers(stream);
184
+ const state = stream.producers.get(producerId);
185
+ const now = Date.now();
186
+ if (!state) {
187
+ if (seq !== 0) return {
188
+ status: `sequence_gap`,
189
+ expectedSeq: 0,
190
+ receivedSeq: seq
191
+ };
192
+ return {
193
+ status: `accepted`,
194
+ isNew: true,
195
+ producerId,
196
+ proposedState: {
197
+ epoch,
198
+ lastSeq: 0,
199
+ lastUpdated: now
200
+ }
201
+ };
202
+ }
203
+ if (epoch < state.epoch) return {
204
+ status: `stale_epoch`,
205
+ currentEpoch: state.epoch
206
+ };
207
+ if (epoch > state.epoch) {
208
+ if (seq !== 0) return { status: `invalid_epoch_seq` };
209
+ return {
210
+ status: `accepted`,
211
+ isNew: true,
212
+ producerId,
213
+ proposedState: {
214
+ epoch,
215
+ lastSeq: 0,
216
+ lastUpdated: now
217
+ }
218
+ };
219
+ }
220
+ if (seq <= state.lastSeq) return {
221
+ status: `duplicate`,
222
+ lastSeq: state.lastSeq
223
+ };
224
+ if (seq === state.lastSeq + 1) return {
225
+ status: `accepted`,
226
+ isNew: false,
227
+ producerId,
228
+ proposedState: {
229
+ epoch,
230
+ lastSeq: seq,
231
+ lastUpdated: now
232
+ }
233
+ };
234
+ return {
235
+ status: `sequence_gap`,
236
+ expectedSeq: state.lastSeq + 1,
237
+ receivedSeq: seq
238
+ };
239
+ }
240
+ /**
241
+ * Commit producer state after successful append.
242
+ * This is the only place where producer state is mutated.
243
+ */
244
+ commitProducerState(stream, result) {
245
+ if (result.status !== `accepted`) return;
246
+ stream.producers.set(result.producerId, result.proposedState);
247
+ }
248
+ /**
249
+ * Clean up expired producer states from a stream.
250
+ */
251
+ cleanupExpiredProducers(stream) {
252
+ if (!stream.producers) return;
253
+ const now = Date.now();
254
+ for (const [id, state] of stream.producers) if (now - state.lastUpdated > PRODUCER_STATE_TTL_MS) stream.producers.delete(id);
255
+ }
256
+ /**
257
+ * Acquire a lock for serialized producer operations.
258
+ * Returns a release function.
259
+ */
260
+ async acquireProducerLock(path, producerId) {
261
+ const lockKey = `${path}:${producerId}`;
262
+ while (this.producerLocks.has(lockKey)) await this.producerLocks.get(lockKey);
263
+ let releaseLock;
264
+ const lockPromise = new Promise((resolve) => {
265
+ releaseLock = resolve;
266
+ });
267
+ this.producerLocks.set(lockKey, lockPromise);
268
+ return () => {
269
+ this.producerLocks.delete(lockKey);
270
+ releaseLock();
271
+ };
272
+ }
273
+ /**
166
274
  * Append data to a stream.
167
275
  * @throws Error if stream doesn't exist or is expired
168
276
  * @throws Error if seq is lower than lastSeq
@@ -176,15 +284,56 @@ var StreamStore = class {
176
284
  const streamType = normalizeContentType(stream.contentType);
177
285
  if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${stream.contentType}, got ${options.contentType}`);
178
286
  }
287
+ let producerResult;
288
+ if (options.producerId !== void 0 && options.producerEpoch !== void 0 && options.producerSeq !== void 0) {
289
+ producerResult = this.validateProducer(stream, options.producerId, options.producerEpoch, options.producerSeq);
290
+ if (producerResult.status !== `accepted`) return {
291
+ message: null,
292
+ producerResult
293
+ };
294
+ }
179
295
  if (options.seq !== void 0) {
180
296
  if (stream.lastSeq !== void 0 && options.seq <= stream.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${stream.lastSeq}`);
181
- stream.lastSeq = options.seq;
182
297
  }
183
298
  const message = this.appendToStream(stream, data);
299
+ if (producerResult) this.commitProducerState(stream, producerResult);
300
+ if (options.seq !== void 0) stream.lastSeq = options.seq;
184
301
  this.notifyLongPolls(path);
302
+ if (producerResult) return {
303
+ message,
304
+ producerResult
305
+ };
185
306
  return message;
186
307
  }
187
308
  /**
309
+ * Append with producer serialization for concurrent request handling.
310
+ * This ensures that validation+append is atomic per producer.
311
+ */
312
+ async appendWithProducer(path, data, options) {
313
+ if (!options.producerId) {
314
+ const result = this.append(path, data, options);
315
+ if (`message` in result) return result;
316
+ return { message: result };
317
+ }
318
+ const releaseLock = await this.acquireProducerLock(path, options.producerId);
319
+ try {
320
+ const result = this.append(path, data, options);
321
+ if (`message` in result) return result;
322
+ return { message: result };
323
+ } finally {
324
+ releaseLock();
325
+ }
326
+ }
327
+ /**
328
+ * Get the current epoch for a producer on a stream.
329
+ * Returns undefined if the producer doesn't exist or stream not found.
330
+ */
331
+ getProducerEpoch(path, producerId) {
332
+ const stream = this.getIfNotExpired(path);
333
+ if (!stream?.producers) return void 0;
334
+ return stream.producers.get(producerId)?.epoch;
335
+ }
336
+ /**
188
337
  * Read messages from a stream starting at the given offset.
189
338
  * @throws Error if stream doesn't exist or is expired
190
339
  */
@@ -545,6 +694,11 @@ var FileBackedStreamStore = class {
545
694
  fileHandlePool;
546
695
  pendingLongPolls = [];
547
696
  dataDir;
697
+ /**
698
+ * Per-producer locks for serializing validation+append operations.
699
+ * Key: "{streamPath}:{producerId}"
700
+ */
701
+ producerLocks = new Map();
548
702
  constructor(options) {
549
703
  this.dataDir = options.dataDir;
550
704
  this.db = (0, lmdb.open)({
@@ -626,6 +780,11 @@ var FileBackedStreamStore = class {
626
780
  * Convert LMDB metadata to Stream object.
627
781
  */
628
782
  streamMetaToStream(meta) {
783
+ let producers;
784
+ if (meta.producers) {
785
+ producers = new Map();
786
+ for (const [id, state] of Object.entries(meta.producers)) producers.set(id, { ...state });
787
+ }
629
788
  return {
630
789
  path: meta.path,
631
790
  contentType: meta.contentType,
@@ -634,10 +793,103 @@ var FileBackedStreamStore = class {
634
793
  lastSeq: meta.lastSeq,
635
794
  ttlSeconds: meta.ttlSeconds,
636
795
  expiresAt: meta.expiresAt,
637
- createdAt: meta.createdAt
796
+ createdAt: meta.createdAt,
797
+ producers
798
+ };
799
+ }
800
+ /**
801
+ * Validate producer state WITHOUT mutating.
802
+ * Returns proposed state to commit after successful append.
803
+ *
804
+ * IMPORTANT: This function does NOT mutate producer state. The caller must
805
+ * commit the proposedState after successful append (file write + fsync + LMDB).
806
+ * This ensures atomicity: if any step fails, producer state is not advanced.
807
+ */
808
+ validateProducer(meta, producerId, epoch, seq) {
809
+ if (!meta.producers) meta.producers = {};
810
+ const state = meta.producers[producerId];
811
+ const now = Date.now();
812
+ if (!state) {
813
+ if (seq !== 0) return {
814
+ status: `sequence_gap`,
815
+ expectedSeq: 0,
816
+ receivedSeq: seq
817
+ };
818
+ return {
819
+ status: `accepted`,
820
+ isNew: true,
821
+ producerId,
822
+ proposedState: {
823
+ epoch,
824
+ lastSeq: 0,
825
+ lastUpdated: now
826
+ }
827
+ };
828
+ }
829
+ if (epoch < state.epoch) return {
830
+ status: `stale_epoch`,
831
+ currentEpoch: state.epoch
832
+ };
833
+ if (epoch > state.epoch) {
834
+ if (seq !== 0) return { status: `invalid_epoch_seq` };
835
+ return {
836
+ status: `accepted`,
837
+ isNew: true,
838
+ producerId,
839
+ proposedState: {
840
+ epoch,
841
+ lastSeq: 0,
842
+ lastUpdated: now
843
+ }
844
+ };
845
+ }
846
+ if (seq <= state.lastSeq) return {
847
+ status: `duplicate`,
848
+ lastSeq: state.lastSeq
849
+ };
850
+ if (seq === state.lastSeq + 1) return {
851
+ status: `accepted`,
852
+ isNew: false,
853
+ producerId,
854
+ proposedState: {
855
+ epoch,
856
+ lastSeq: seq,
857
+ lastUpdated: now
858
+ }
859
+ };
860
+ return {
861
+ status: `sequence_gap`,
862
+ expectedSeq: state.lastSeq + 1,
863
+ receivedSeq: seq
638
864
  };
639
865
  }
640
866
  /**
867
+ * Acquire a lock for serialized producer operations.
868
+ * Returns a release function.
869
+ */
870
+ async acquireProducerLock(streamPath, producerId) {
871
+ const lockKey = `${streamPath}:${producerId}`;
872
+ while (this.producerLocks.has(lockKey)) await this.producerLocks.get(lockKey);
873
+ let releaseLock;
874
+ const lockPromise = new Promise((resolve) => {
875
+ releaseLock = resolve;
876
+ });
877
+ this.producerLocks.set(lockKey, lockPromise);
878
+ return () => {
879
+ this.producerLocks.delete(lockKey);
880
+ releaseLock();
881
+ };
882
+ }
883
+ /**
884
+ * Get the current epoch for a producer on a stream.
885
+ * Returns undefined if the producer doesn't exist or stream not found.
886
+ */
887
+ getProducerEpoch(streamPath, producerId) {
888
+ const meta = this.getMetaIfNotExpired(streamPath);
889
+ if (!meta?.producers) return void 0;
890
+ return meta.producers[producerId]?.epoch;
891
+ }
892
+ /**
641
893
  * Check if a stream is expired based on TTL or Expires-At.
642
894
  */
643
895
  isExpired(meta) {
@@ -747,6 +999,14 @@ var FileBackedStreamStore = class {
747
999
  const streamType = normalizeContentType(streamMeta.contentType);
748
1000
  if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${streamMeta.contentType}, got ${options.contentType}`);
749
1001
  }
1002
+ let producerResult;
1003
+ if (options.producerId !== void 0 && options.producerEpoch !== void 0 && options.producerSeq !== void 0) {
1004
+ producerResult = this.validateProducer(streamMeta, options.producerId, options.producerEpoch, options.producerSeq);
1005
+ if (producerResult.status !== `accepted`) return {
1006
+ message: null,
1007
+ producerResult
1008
+ };
1009
+ }
750
1010
  if (options.seq !== void 0) {
751
1011
  if (streamMeta.lastSeq !== void 0 && options.seq <= streamMeta.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${streamMeta.lastSeq}`);
752
1012
  }
@@ -782,17 +1042,43 @@ var FileBackedStreamStore = class {
782
1042
  timestamp: Date.now()
783
1043
  };
784
1044
  await this.fileHandlePool.fsyncFile(segmentPath);
1045
+ const updatedProducers = { ...streamMeta.producers };
1046
+ if (producerResult && producerResult.status === `accepted`) updatedProducers[producerResult.producerId] = producerResult.proposedState;
785
1047
  const updatedMeta = {
786
1048
  ...streamMeta,
787
1049
  currentOffset: newOffset,
788
1050
  lastSeq: options.seq ?? streamMeta.lastSeq,
789
- totalBytes: streamMeta.totalBytes + processedData.length + 5
1051
+ totalBytes: streamMeta.totalBytes + processedData.length + 5,
1052
+ producers: updatedProducers
790
1053
  };
791
1054
  const key = `stream:${streamPath}`;
792
1055
  this.db.putSync(key, updatedMeta);
793
1056
  this.notifyLongPolls(streamPath);
1057
+ if (producerResult) return {
1058
+ message,
1059
+ producerResult
1060
+ };
794
1061
  return message;
795
1062
  }
1063
+ /**
1064
+ * Append with producer serialization for concurrent request handling.
1065
+ * This ensures that validation+append is atomic per producer.
1066
+ */
1067
+ async appendWithProducer(streamPath, data, options) {
1068
+ if (!options.producerId) {
1069
+ const result = await this.append(streamPath, data, options);
1070
+ if (result && `message` in result) return result;
1071
+ return { message: result };
1072
+ }
1073
+ const releaseLock = await this.acquireProducerLock(streamPath, options.producerId);
1074
+ try {
1075
+ const result = await this.append(streamPath, data, options);
1076
+ if (result && `message` in result) return result;
1077
+ return { message: result };
1078
+ } finally {
1079
+ releaseLock();
1080
+ }
1081
+ }
796
1082
  read(streamPath, offset) {
797
1083
  const streamMeta = this.getMetaIfNotExpired(streamPath);
798
1084
  if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
@@ -1066,6 +1352,11 @@ const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
1066
1352
  const STREAM_SEQ_HEADER = `Stream-Seq`;
1067
1353
  const STREAM_TTL_HEADER = `Stream-TTL`;
1068
1354
  const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
1355
+ const PRODUCER_ID_HEADER = `Producer-Id`;
1356
+ const PRODUCER_EPOCH_HEADER = `Producer-Epoch`;
1357
+ const PRODUCER_SEQ_HEADER = `Producer-Seq`;
1358
+ const PRODUCER_EXPECTED_SEQ_HEADER = `Producer-Expected-Seq`;
1359
+ const PRODUCER_RECEIVED_SEQ_HEADER = `Producer-Received-Seq`;
1069
1360
  const SSE_OFFSET_FIELD = `streamNextOffset`;
1070
1361
  const SSE_CURSOR_FIELD = `streamCursor`;
1071
1362
  const SSE_UP_TO_DATE_FIELD = `upToDate`;
@@ -1075,10 +1366,12 @@ const CURSOR_QUERY_PARAM = `cursor`;
1075
1366
  /**
1076
1367
  * Encode data for SSE format.
1077
1368
  * Per SSE spec, each line in the payload needs its own "data:" prefix.
1078
- * Newlines in the payload become separate data: lines.
1369
+ * Line terminators in the payload (CR, LF, or CRLF) become separate data: lines.
1370
+ * This prevents CRLF injection attacks where malicious payloads could inject
1371
+ * fake SSE events using CR-only line terminators.
1079
1372
  */
1080
1373
  function encodeSSEData(payload) {
1081
- const lines = payload.split(`\n`);
1374
+ const lines = payload.split(/\r\n|\r|\n/);
1082
1375
  return lines.map((line) => `data: ${line}`).join(`\n`) + `\n\n`;
1083
1376
  }
1084
1377
  /**
@@ -1117,8 +1410,8 @@ var DurableStreamTestServer = class {
1117
1410
  _url = null;
1118
1411
  activeSSEResponses = new Set();
1119
1412
  isShuttingDown = false;
1120
- /** Injected errors for testing retry/resilience */
1121
- injectedErrors = new Map();
1413
+ /** Injected faults for testing retry/resilience */
1414
+ injectedFaults = new Map();
1122
1415
  constructor(options = {}) {
1123
1416
  if (options.dataDir) this.store = new FileBackedStreamStore({ dataDir: options.dataDir });
1124
1417
  else this.store = new StreamStore();
@@ -1203,30 +1496,71 @@ var DurableStreamTestServer = class {
1203
1496
  /**
1204
1497
  * Inject an error to be returned on the next N requests to a path.
1205
1498
  * Used for testing retry/resilience behavior.
1499
+ * @deprecated Use injectFault for full fault injection capabilities
1206
1500
  */
1207
1501
  injectError(path, status, count = 1, retryAfter) {
1208
- this.injectedErrors.set(path, {
1502
+ this.injectedFaults.set(path, {
1209
1503
  status,
1210
1504
  count,
1211
1505
  retryAfter
1212
1506
  });
1213
1507
  }
1214
1508
  /**
1215
- * Clear all injected errors.
1509
+ * Inject a fault to be triggered on the next N requests to a path.
1510
+ * Supports various fault types: delays, connection drops, body corruption, etc.
1511
+ */
1512
+ injectFault(path, fault) {
1513
+ this.injectedFaults.set(path, {
1514
+ count: 1,
1515
+ ...fault
1516
+ });
1517
+ }
1518
+ /**
1519
+ * Clear all injected faults.
1520
+ */
1521
+ clearInjectedFaults() {
1522
+ this.injectedFaults.clear();
1523
+ }
1524
+ /**
1525
+ * Check if there's an injected fault for this path/method and consume it.
1526
+ * Returns the fault config if one should be triggered, null otherwise.
1216
1527
  */
1217
- clearInjectedErrors() {
1218
- this.injectedErrors.clear();
1528
+ consumeInjectedFault(path, method) {
1529
+ const fault = this.injectedFaults.get(path);
1530
+ if (!fault) return null;
1531
+ if (fault.method && fault.method.toUpperCase() !== method.toUpperCase()) return null;
1532
+ if (fault.probability !== void 0 && Math.random() > fault.probability) return null;
1533
+ fault.count--;
1534
+ if (fault.count <= 0) this.injectedFaults.delete(path);
1535
+ return fault;
1219
1536
  }
1220
1537
  /**
1221
- * Check if there's an injected error for this path and consume it.
1222
- * Returns the error config if one should be returned, null otherwise.
1538
+ * Apply delay from fault config (including jitter).
1223
1539
  */
1224
- consumeInjectedError(path) {
1225
- const error = this.injectedErrors.get(path);
1226
- if (!error) return null;
1227
- error.count--;
1228
- if (error.count <= 0) this.injectedErrors.delete(path);
1229
- return error;
1540
+ async applyFaultDelay(fault) {
1541
+ if (fault.delayMs !== void 0 && fault.delayMs > 0) {
1542
+ const jitter = fault.jitterMs ? Math.random() * fault.jitterMs : 0;
1543
+ await new Promise((resolve) => setTimeout(resolve, fault.delayMs + jitter));
1544
+ }
1545
+ }
1546
+ /**
1547
+ * Apply body modifications from stored fault (truncation, corruption).
1548
+ * Returns modified body, or original if no modifications needed.
1549
+ */
1550
+ applyFaultBodyModification(res, body) {
1551
+ const fault = res._injectedFault;
1552
+ if (!fault) return body;
1553
+ let modified = body;
1554
+ if (fault.truncateBodyBytes !== void 0 && modified.length > fault.truncateBodyBytes) modified = modified.slice(0, fault.truncateBodyBytes);
1555
+ if (fault.corruptBody && modified.length > 0) {
1556
+ modified = new Uint8Array(modified);
1557
+ const numCorrupt = Math.max(1, Math.floor(modified.length * .03));
1558
+ for (let i = 0; i < numCorrupt; i++) {
1559
+ const pos = Math.floor(Math.random() * modified.length);
1560
+ modified[pos] = modified[pos] ^ 1 << Math.floor(Math.random() * 8);
1561
+ }
1562
+ }
1563
+ return modified;
1230
1564
  }
1231
1565
  async handleRequest(req, res) {
1232
1566
  const url = new URL(req.url ?? `/`, `http://${req.headers.host}`);
@@ -1234,8 +1568,10 @@ var DurableStreamTestServer = class {
1234
1568
  const method = req.method?.toUpperCase();
1235
1569
  res.setHeader(`access-control-allow-origin`, `*`);
1236
1570
  res.setHeader(`access-control-allow-methods`, `GET, POST, PUT, DELETE, HEAD, OPTIONS`);
1237
- res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At`);
1238
- res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, etag, content-type, content-encoding, vary`);
1571
+ res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At, Producer-Id, Producer-Epoch, Producer-Seq`);
1572
+ res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, Producer-Epoch, Producer-Seq, Producer-Expected-Seq, Producer-Received-Seq, etag, content-type, content-encoding, vary`);
1573
+ res.setHeader(`x-content-type-options`, `nosniff`);
1574
+ res.setHeader(`cross-origin-resource-policy`, `cross-origin`);
1239
1575
  if (method === `OPTIONS`) {
1240
1576
  res.writeHead(204);
1241
1577
  res.end();
@@ -1245,13 +1581,21 @@ var DurableStreamTestServer = class {
1245
1581
  await this.handleTestInjectError(method, req, res);
1246
1582
  return;
1247
1583
  }
1248
- const injectedError = this.consumeInjectedError(path);
1249
- if (injectedError) {
1250
- const headers = { "content-type": `text/plain` };
1251
- if (injectedError.retryAfter !== void 0) headers[`retry-after`] = injectedError.retryAfter.toString();
1252
- res.writeHead(injectedError.status, headers);
1253
- res.end(`Injected error for testing`);
1254
- return;
1584
+ const fault = this.consumeInjectedFault(path, method ?? `GET`);
1585
+ if (fault) {
1586
+ await this.applyFaultDelay(fault);
1587
+ if (fault.dropConnection) {
1588
+ res.socket?.destroy();
1589
+ return;
1590
+ }
1591
+ if (fault.status !== void 0) {
1592
+ const headers = { "content-type": `text/plain` };
1593
+ if (fault.retryAfter !== void 0) headers[`retry-after`] = fault.retryAfter.toString();
1594
+ res.writeHead(fault.status, headers);
1595
+ res.end(`Injected error for testing`);
1596
+ return;
1597
+ }
1598
+ if (fault.truncateBodyBytes !== void 0 || fault.corruptBody) res._injectedFault = fault;
1255
1599
  }
1256
1600
  try {
1257
1601
  switch (method) {
@@ -1366,7 +1710,10 @@ var DurableStreamTestServer = class {
1366
1710
  res.end();
1367
1711
  return;
1368
1712
  }
1369
- const headers = { [STREAM_OFFSET_HEADER]: stream.currentOffset };
1713
+ const headers = {
1714
+ [STREAM_OFFSET_HEADER]: stream.currentOffset,
1715
+ "cache-control": `no-store`
1716
+ };
1370
1717
  if (stream.contentType) headers[`content-type`] = stream.contentType;
1371
1718
  headers[`etag`] = `"${Buffer.from(path).toString(`base64`)}:-1:${stream.currentOffset}"`;
1372
1719
  res.writeHead(200, headers);
@@ -1397,7 +1744,7 @@ var DurableStreamTestServer = class {
1397
1744
  res.end(`Multiple offset parameters not allowed`);
1398
1745
  return;
1399
1746
  }
1400
- const validOffsetPattern = /^(-1|\d+_\d+)$/;
1747
+ const validOffsetPattern = /^(-1|now|\d+_\d+)$/;
1401
1748
  if (!validOffsetPattern.test(offset)) {
1402
1749
  res.writeHead(400, { "content-type": `text/plain` });
1403
1750
  res.end(`Invalid offset format`);
@@ -1410,17 +1757,32 @@ var DurableStreamTestServer = class {
1410
1757
  return;
1411
1758
  }
1412
1759
  if (live === `sse`) {
1413
- await this.handleSSE(path, stream, offset, cursor, res);
1760
+ const sseOffset = offset === `now` ? stream.currentOffset : offset;
1761
+ await this.handleSSE(path, stream, sseOffset, cursor, res);
1414
1762
  return;
1415
1763
  }
1416
- let { messages, upToDate } = this.store.read(path, offset);
1417
- const clientIsCaughtUp = offset && offset === stream.currentOffset;
1764
+ const effectiveOffset = offset === `now` ? stream.currentOffset : offset;
1765
+ if (offset === `now` && live !== `long-poll`) {
1766
+ const headers$1 = {
1767
+ [STREAM_OFFSET_HEADER]: stream.currentOffset,
1768
+ [STREAM_UP_TO_DATE_HEADER]: `true`,
1769
+ [`cache-control`]: `no-store`
1770
+ };
1771
+ if (stream.contentType) headers$1[`content-type`] = stream.contentType;
1772
+ const isJsonMode = stream.contentType?.includes(`application/json`);
1773
+ const responseBody = isJsonMode ? `[]` : ``;
1774
+ res.writeHead(200, headers$1);
1775
+ res.end(responseBody);
1776
+ return;
1777
+ }
1778
+ let { messages, upToDate } = this.store.read(path, effectiveOffset);
1779
+ const clientIsCaughtUp = effectiveOffset && effectiveOffset === stream.currentOffset || offset === `now`;
1418
1780
  if (live === `long-poll` && clientIsCaughtUp && messages.length === 0) {
1419
- const result = await this.store.waitForMessages(path, offset, this.options.longPollTimeout);
1781
+ const result = await this.store.waitForMessages(path, effectiveOffset ?? stream.currentOffset, this.options.longPollTimeout);
1420
1782
  if (result.timedOut) {
1421
1783
  const responseCursor = generateResponseCursor(cursor, this.options.cursorOptions);
1422
1784
  res.writeHead(204, {
1423
- [STREAM_OFFSET_HEADER]: offset,
1785
+ [STREAM_OFFSET_HEADER]: effectiveOffset ?? stream.currentOffset,
1424
1786
  [STREAM_UP_TO_DATE_HEADER]: `true`,
1425
1787
  [STREAM_CURSOR_HEADER]: responseCursor
1426
1788
  });
@@ -1457,6 +1819,7 @@ var DurableStreamTestServer = class {
1457
1819
  headers[`vary`] = `accept-encoding`;
1458
1820
  }
1459
1821
  }
1822
+ finalData = this.applyFaultBodyModification(res, finalData);
1460
1823
  res.writeHead(200, headers);
1461
1824
  res.end(Buffer.from(finalData));
1462
1825
  }
@@ -1469,7 +1832,9 @@ var DurableStreamTestServer = class {
1469
1832
  "content-type": `text/event-stream`,
1470
1833
  "cache-control": `no-cache`,
1471
1834
  connection: `keep-alive`,
1472
- "access-control-allow-origin": `*`
1835
+ "access-control-allow-origin": `*`,
1836
+ "x-content-type-options": `nosniff`,
1837
+ "cross-origin-resource-policy": `cross-origin`
1473
1838
  });
1474
1839
  let currentOffset = initialOffset;
1475
1840
  let isConnected = true;
@@ -1525,6 +1890,9 @@ var DurableStreamTestServer = class {
1525
1890
  async handleAppend(path, req, res) {
1526
1891
  const contentType = req.headers[`content-type`];
1527
1892
  const seq = req.headers[STREAM_SEQ_HEADER.toLowerCase()];
1893
+ const producerId = req.headers[PRODUCER_ID_HEADER.toLowerCase()];
1894
+ const producerEpochStr = req.headers[PRODUCER_EPOCH_HEADER.toLowerCase()];
1895
+ const producerSeqStr = req.headers[PRODUCER_SEQ_HEADER.toLowerCase()];
1528
1896
  const body = await this.readBody(req);
1529
1897
  if (body.length === 0) {
1530
1898
  res.writeHead(400, { "content-type": `text/plain` });
@@ -1536,11 +1904,97 @@ var DurableStreamTestServer = class {
1536
1904
  res.end(`Content-Type header is required`);
1537
1905
  return;
1538
1906
  }
1539
- const message = await Promise.resolve(this.store.append(path, body, {
1907
+ const hasProducerHeaders = producerId !== void 0 || producerEpochStr !== void 0 || producerSeqStr !== void 0;
1908
+ const hasAllProducerHeaders = producerId !== void 0 && producerEpochStr !== void 0 && producerSeqStr !== void 0;
1909
+ if (hasProducerHeaders && !hasAllProducerHeaders) {
1910
+ res.writeHead(400, { "content-type": `text/plain` });
1911
+ res.end(`All producer headers (Producer-Id, Producer-Epoch, Producer-Seq) must be provided together`);
1912
+ return;
1913
+ }
1914
+ if (hasAllProducerHeaders && producerId === ``) {
1915
+ res.writeHead(400, { "content-type": `text/plain` });
1916
+ res.end(`Invalid Producer-Id: must not be empty`);
1917
+ return;
1918
+ }
1919
+ const STRICT_INTEGER_REGEX = /^\d+$/;
1920
+ let producerEpoch;
1921
+ let producerSeq;
1922
+ if (hasAllProducerHeaders) {
1923
+ if (!STRICT_INTEGER_REGEX.test(producerEpochStr)) {
1924
+ res.writeHead(400, { "content-type": `text/plain` });
1925
+ res.end(`Invalid Producer-Epoch: must be a non-negative integer`);
1926
+ return;
1927
+ }
1928
+ producerEpoch = Number(producerEpochStr);
1929
+ if (!Number.isSafeInteger(producerEpoch)) {
1930
+ res.writeHead(400, { "content-type": `text/plain` });
1931
+ res.end(`Invalid Producer-Epoch: must be a non-negative integer`);
1932
+ return;
1933
+ }
1934
+ if (!STRICT_INTEGER_REGEX.test(producerSeqStr)) {
1935
+ res.writeHead(400, { "content-type": `text/plain` });
1936
+ res.end(`Invalid Producer-Seq: must be a non-negative integer`);
1937
+ return;
1938
+ }
1939
+ producerSeq = Number(producerSeqStr);
1940
+ if (!Number.isSafeInteger(producerSeq)) {
1941
+ res.writeHead(400, { "content-type": `text/plain` });
1942
+ res.end(`Invalid Producer-Seq: must be a non-negative integer`);
1943
+ return;
1944
+ }
1945
+ }
1946
+ const appendOptions = {
1540
1947
  seq,
1541
- contentType
1542
- }));
1543
- res.writeHead(200, { [STREAM_OFFSET_HEADER]: message.offset });
1948
+ contentType,
1949
+ producerId,
1950
+ producerEpoch,
1951
+ producerSeq
1952
+ };
1953
+ let result;
1954
+ if (producerId !== void 0) result = await this.store.appendWithProducer(path, body, appendOptions);
1955
+ else result = await Promise.resolve(this.store.append(path, body, appendOptions));
1956
+ if (result && typeof result === `object` && `producerResult` in result) {
1957
+ const { message: message$1, producerResult } = result;
1958
+ if (!producerResult || producerResult.status === `accepted`) {
1959
+ const responseHeaders = { [STREAM_OFFSET_HEADER]: message$1.offset };
1960
+ if (producerEpoch !== void 0) responseHeaders[PRODUCER_EPOCH_HEADER] = producerEpoch.toString();
1961
+ if (producerSeq !== void 0) responseHeaders[PRODUCER_SEQ_HEADER] = producerSeq.toString();
1962
+ res.writeHead(200, responseHeaders);
1963
+ res.end();
1964
+ return;
1965
+ }
1966
+ switch (producerResult.status) {
1967
+ case `duplicate`:
1968
+ res.writeHead(204, {
1969
+ [PRODUCER_EPOCH_HEADER]: producerEpoch.toString(),
1970
+ [PRODUCER_SEQ_HEADER]: producerResult.lastSeq.toString()
1971
+ });
1972
+ res.end();
1973
+ return;
1974
+ case `stale_epoch`: {
1975
+ res.writeHead(403, {
1976
+ "content-type": `text/plain`,
1977
+ [PRODUCER_EPOCH_HEADER]: producerResult.currentEpoch.toString()
1978
+ });
1979
+ res.end(`Stale producer epoch`);
1980
+ return;
1981
+ }
1982
+ case `invalid_epoch_seq`:
1983
+ res.writeHead(400, { "content-type": `text/plain` });
1984
+ res.end(`New epoch must start with sequence 0`);
1985
+ return;
1986
+ case `sequence_gap`:
1987
+ res.writeHead(409, {
1988
+ "content-type": `text/plain`,
1989
+ [PRODUCER_EXPECTED_SEQ_HEADER]: producerResult.expectedSeq.toString(),
1990
+ [PRODUCER_RECEIVED_SEQ_HEADER]: producerResult.receivedSeq.toString()
1991
+ });
1992
+ res.end(`Producer sequence gap`);
1993
+ return;
1994
+ }
1995
+ }
1996
+ const message = result;
1997
+ res.writeHead(204, { [STREAM_OFFSET_HEADER]: message.offset });
1544
1998
  res.end();
1545
1999
  }
1546
2000
  /**
@@ -1571,12 +2025,29 @@ var DurableStreamTestServer = class {
1571
2025
  const body = await this.readBody(req);
1572
2026
  try {
1573
2027
  const config = JSON.parse(new TextDecoder().decode(body));
1574
- if (!config.path || !config.status) {
2028
+ if (!config.path) {
1575
2029
  res.writeHead(400, { "content-type": `text/plain` });
1576
- res.end(`Missing required fields: path, status`);
2030
+ res.end(`Missing required field: path`);
1577
2031
  return;
1578
2032
  }
1579
- this.injectError(config.path, config.status, config.count ?? 1, config.retryAfter);
2033
+ const hasFaultType = config.status !== void 0 || config.delayMs !== void 0 || config.dropConnection || config.truncateBodyBytes !== void 0 || config.corruptBody;
2034
+ if (!hasFaultType) {
2035
+ res.writeHead(400, { "content-type": `text/plain` });
2036
+ res.end(`Must specify at least one fault type: status, delayMs, dropConnection, truncateBodyBytes, or corruptBody`);
2037
+ return;
2038
+ }
2039
+ this.injectFault(config.path, {
2040
+ status: config.status,
2041
+ count: config.count ?? 1,
2042
+ retryAfter: config.retryAfter,
2043
+ delayMs: config.delayMs,
2044
+ dropConnection: config.dropConnection,
2045
+ truncateBodyBytes: config.truncateBodyBytes,
2046
+ probability: config.probability,
2047
+ method: config.method,
2048
+ corruptBody: config.corruptBody,
2049
+ jitterMs: config.jitterMs
2050
+ });
1580
2051
  res.writeHead(200, { "content-type": `application/json` });
1581
2052
  res.end(JSON.stringify({ ok: true }));
1582
2053
  } catch {
@@ -1584,7 +2055,7 @@ var DurableStreamTestServer = class {
1584
2055
  res.end(`Invalid JSON body`);
1585
2056
  }
1586
2057
  } else if (method === `DELETE`) {
1587
- this.clearInjectedErrors();
2058
+ this.clearInjectedFaults();
1588
2059
  res.writeHead(200, { "content-type": `application/json` });
1589
2060
  res.end(JSON.stringify({ ok: true }));
1590
2061
  } else {