@durable-streams/server 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -12,6 +12,10 @@ import { createStateSchema } from "@durable-streams/state";
12
12
 
13
13
  //#region src/store.ts
14
14
  /**
15
+ * TTL for in-memory producer state cleanup (7 days).
16
+ */
17
+ const PRODUCER_STATE_TTL_MS = 7 * 24 * 60 * 60 * 1e3;
18
+ /**
15
19
  * Normalize content-type by extracting the media type (before any semicolon).
16
20
  * Handles cases like "application/json; charset=utf-8".
17
21
  */
@@ -58,13 +62,15 @@ function formatJsonResponse(data) {
58
62
  const wrapped = `[${text}]`;
59
63
  return new TextEncoder().encode(wrapped);
60
64
  }
61
- /**
62
- * In-memory store for durable streams.
63
- */
64
65
  var StreamStore = class {
65
66
  streams = new Map();
66
67
  pendingLongPolls = [];
67
68
  /**
69
+ * Per-producer locks for serializing validation+append operations.
70
+ * Key: "{streamPath}:{producerId}"
71
+ */
72
+ producerLocks = new Map();
73
+ /**
68
74
  * Check if a stream is expired based on TTL or Expires-At.
69
75
  */
70
76
  isExpired(stream) {
@@ -140,6 +146,108 @@ var StreamStore = class {
140
146
  return this.streams.delete(path$2);
141
147
  }
142
148
  /**
149
+ * Validate producer state WITHOUT mutating.
150
+ * Returns proposed state to commit after successful append.
151
+ * Implements Kafka-style idempotent producer validation.
152
+ *
153
+ * IMPORTANT: This function does NOT mutate producer state. The caller must
154
+ * call commitProducerState() after successful append to apply the mutation.
155
+ * This ensures atomicity: if append fails (e.g., JSON validation), producer
156
+ * state is not incorrectly advanced.
157
+ */
158
+ validateProducer(stream, producerId, epoch, seq) {
159
+ if (!stream.producers) stream.producers = new Map();
160
+ this.cleanupExpiredProducers(stream);
161
+ const state = stream.producers.get(producerId);
162
+ const now = Date.now();
163
+ if (!state) {
164
+ if (seq !== 0) return {
165
+ status: `sequence_gap`,
166
+ expectedSeq: 0,
167
+ receivedSeq: seq
168
+ };
169
+ return {
170
+ status: `accepted`,
171
+ isNew: true,
172
+ producerId,
173
+ proposedState: {
174
+ epoch,
175
+ lastSeq: 0,
176
+ lastUpdated: now
177
+ }
178
+ };
179
+ }
180
+ if (epoch < state.epoch) return {
181
+ status: `stale_epoch`,
182
+ currentEpoch: state.epoch
183
+ };
184
+ if (epoch > state.epoch) {
185
+ if (seq !== 0) return { status: `invalid_epoch_seq` };
186
+ return {
187
+ status: `accepted`,
188
+ isNew: true,
189
+ producerId,
190
+ proposedState: {
191
+ epoch,
192
+ lastSeq: 0,
193
+ lastUpdated: now
194
+ }
195
+ };
196
+ }
197
+ if (seq <= state.lastSeq) return {
198
+ status: `duplicate`,
199
+ lastSeq: state.lastSeq
200
+ };
201
+ if (seq === state.lastSeq + 1) return {
202
+ status: `accepted`,
203
+ isNew: false,
204
+ producerId,
205
+ proposedState: {
206
+ epoch,
207
+ lastSeq: seq,
208
+ lastUpdated: now
209
+ }
210
+ };
211
+ return {
212
+ status: `sequence_gap`,
213
+ expectedSeq: state.lastSeq + 1,
214
+ receivedSeq: seq
215
+ };
216
+ }
217
+ /**
218
+ * Commit producer state after successful append.
219
+ * This is the only place where producer state is mutated.
220
+ */
221
+ commitProducerState(stream, result) {
222
+ if (result.status !== `accepted`) return;
223
+ stream.producers.set(result.producerId, result.proposedState);
224
+ }
225
+ /**
226
+ * Clean up expired producer states from a stream.
227
+ */
228
+ cleanupExpiredProducers(stream) {
229
+ if (!stream.producers) return;
230
+ const now = Date.now();
231
+ for (const [id, state] of stream.producers) if (now - state.lastUpdated > PRODUCER_STATE_TTL_MS) stream.producers.delete(id);
232
+ }
233
+ /**
234
+ * Acquire a lock for serialized producer operations.
235
+ * Returns a release function.
236
+ */
237
+ async acquireProducerLock(path$2, producerId) {
238
+ const lockKey = `${path$2}:${producerId}`;
239
+ while (this.producerLocks.has(lockKey)) await this.producerLocks.get(lockKey);
240
+ let releaseLock;
241
+ const lockPromise = new Promise((resolve) => {
242
+ releaseLock = resolve;
243
+ });
244
+ this.producerLocks.set(lockKey, lockPromise);
245
+ return () => {
246
+ this.producerLocks.delete(lockKey);
247
+ releaseLock();
248
+ };
249
+ }
250
+ /**
143
251
  * Append data to a stream.
144
252
  * @throws Error if stream doesn't exist or is expired
145
253
  * @throws Error if seq is lower than lastSeq
@@ -153,15 +261,56 @@ var StreamStore = class {
153
261
  const streamType = normalizeContentType(stream.contentType);
154
262
  if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${stream.contentType}, got ${options.contentType}`);
155
263
  }
264
+ let producerResult;
265
+ if (options.producerId !== void 0 && options.producerEpoch !== void 0 && options.producerSeq !== void 0) {
266
+ producerResult = this.validateProducer(stream, options.producerId, options.producerEpoch, options.producerSeq);
267
+ if (producerResult.status !== `accepted`) return {
268
+ message: null,
269
+ producerResult
270
+ };
271
+ }
156
272
  if (options.seq !== void 0) {
157
273
  if (stream.lastSeq !== void 0 && options.seq <= stream.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${stream.lastSeq}`);
158
- stream.lastSeq = options.seq;
159
274
  }
160
275
  const message = this.appendToStream(stream, data);
276
+ if (producerResult) this.commitProducerState(stream, producerResult);
277
+ if (options.seq !== void 0) stream.lastSeq = options.seq;
161
278
  this.notifyLongPolls(path$2);
279
+ if (producerResult) return {
280
+ message,
281
+ producerResult
282
+ };
162
283
  return message;
163
284
  }
164
285
  /**
286
+ * Append with producer serialization for concurrent request handling.
287
+ * This ensures that validation+append is atomic per producer.
288
+ */
289
+ async appendWithProducer(path$2, data, options) {
290
+ if (!options.producerId) {
291
+ const result = this.append(path$2, data, options);
292
+ if (`message` in result) return result;
293
+ return { message: result };
294
+ }
295
+ const releaseLock = await this.acquireProducerLock(path$2, options.producerId);
296
+ try {
297
+ const result = this.append(path$2, data, options);
298
+ if (`message` in result) return result;
299
+ return { message: result };
300
+ } finally {
301
+ releaseLock();
302
+ }
303
+ }
304
+ /**
305
+ * Get the current epoch for a producer on a stream.
306
+ * Returns undefined if the producer doesn't exist or stream not found.
307
+ */
308
+ getProducerEpoch(path$2, producerId) {
309
+ const stream = this.getIfNotExpired(path$2);
310
+ if (!stream?.producers) return void 0;
311
+ return stream.producers.get(producerId)?.epoch;
312
+ }
313
+ /**
165
314
  * Read messages from a stream starting at the given offset.
166
315
  * @throws Error if stream doesn't exist or is expired
167
316
  */
@@ -522,6 +671,11 @@ var FileBackedStreamStore = class {
522
671
  fileHandlePool;
523
672
  pendingLongPolls = [];
524
673
  dataDir;
674
+ /**
675
+ * Per-producer locks for serializing validation+append operations.
676
+ * Key: "{streamPath}:{producerId}"
677
+ */
678
+ producerLocks = new Map();
525
679
  constructor(options) {
526
680
  this.dataDir = options.dataDir;
527
681
  this.db = open({
@@ -603,6 +757,11 @@ var FileBackedStreamStore = class {
603
757
  * Convert LMDB metadata to Stream object.
604
758
  */
605
759
  streamMetaToStream(meta) {
760
+ let producers;
761
+ if (meta.producers) {
762
+ producers = new Map();
763
+ for (const [id, state] of Object.entries(meta.producers)) producers.set(id, { ...state });
764
+ }
606
765
  return {
607
766
  path: meta.path,
608
767
  contentType: meta.contentType,
@@ -611,10 +770,103 @@ var FileBackedStreamStore = class {
611
770
  lastSeq: meta.lastSeq,
612
771
  ttlSeconds: meta.ttlSeconds,
613
772
  expiresAt: meta.expiresAt,
614
- createdAt: meta.createdAt
773
+ createdAt: meta.createdAt,
774
+ producers
615
775
  };
616
776
  }
617
777
  /**
778
+ * Validate producer state WITHOUT mutating.
779
+ * Returns proposed state to commit after successful append.
780
+ *
781
+ * IMPORTANT: This function does NOT mutate producer state. The caller must
782
+ * commit the proposedState after successful append (file write + fsync + LMDB).
783
+ * This ensures atomicity: if any step fails, producer state is not advanced.
784
+ */
785
+ validateProducer(meta, producerId, epoch, seq) {
786
+ if (!meta.producers) meta.producers = {};
787
+ const state = meta.producers[producerId];
788
+ const now = Date.now();
789
+ if (!state) {
790
+ if (seq !== 0) return {
791
+ status: `sequence_gap`,
792
+ expectedSeq: 0,
793
+ receivedSeq: seq
794
+ };
795
+ return {
796
+ status: `accepted`,
797
+ isNew: true,
798
+ producerId,
799
+ proposedState: {
800
+ epoch,
801
+ lastSeq: 0,
802
+ lastUpdated: now
803
+ }
804
+ };
805
+ }
806
+ if (epoch < state.epoch) return {
807
+ status: `stale_epoch`,
808
+ currentEpoch: state.epoch
809
+ };
810
+ if (epoch > state.epoch) {
811
+ if (seq !== 0) return { status: `invalid_epoch_seq` };
812
+ return {
813
+ status: `accepted`,
814
+ isNew: true,
815
+ producerId,
816
+ proposedState: {
817
+ epoch,
818
+ lastSeq: 0,
819
+ lastUpdated: now
820
+ }
821
+ };
822
+ }
823
+ if (seq <= state.lastSeq) return {
824
+ status: `duplicate`,
825
+ lastSeq: state.lastSeq
826
+ };
827
+ if (seq === state.lastSeq + 1) return {
828
+ status: `accepted`,
829
+ isNew: false,
830
+ producerId,
831
+ proposedState: {
832
+ epoch,
833
+ lastSeq: seq,
834
+ lastUpdated: now
835
+ }
836
+ };
837
+ return {
838
+ status: `sequence_gap`,
839
+ expectedSeq: state.lastSeq + 1,
840
+ receivedSeq: seq
841
+ };
842
+ }
843
+ /**
844
+ * Acquire a lock for serialized producer operations.
845
+ * Returns a release function.
846
+ */
847
+ async acquireProducerLock(streamPath, producerId) {
848
+ const lockKey = `${streamPath}:${producerId}`;
849
+ while (this.producerLocks.has(lockKey)) await this.producerLocks.get(lockKey);
850
+ let releaseLock;
851
+ const lockPromise = new Promise((resolve) => {
852
+ releaseLock = resolve;
853
+ });
854
+ this.producerLocks.set(lockKey, lockPromise);
855
+ return () => {
856
+ this.producerLocks.delete(lockKey);
857
+ releaseLock();
858
+ };
859
+ }
860
+ /**
861
+ * Get the current epoch for a producer on a stream.
862
+ * Returns undefined if the producer doesn't exist or stream not found.
863
+ */
864
+ getProducerEpoch(streamPath, producerId) {
865
+ const meta = this.getMetaIfNotExpired(streamPath);
866
+ if (!meta?.producers) return void 0;
867
+ return meta.producers[producerId]?.epoch;
868
+ }
869
+ /**
618
870
  * Check if a stream is expired based on TTL or Expires-At.
619
871
  */
620
872
  isExpired(meta) {
@@ -724,6 +976,14 @@ var FileBackedStreamStore = class {
724
976
  const streamType = normalizeContentType(streamMeta.contentType);
725
977
  if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${streamMeta.contentType}, got ${options.contentType}`);
726
978
  }
979
+ let producerResult;
980
+ if (options.producerId !== void 0 && options.producerEpoch !== void 0 && options.producerSeq !== void 0) {
981
+ producerResult = this.validateProducer(streamMeta, options.producerId, options.producerEpoch, options.producerSeq);
982
+ if (producerResult.status !== `accepted`) return {
983
+ message: null,
984
+ producerResult
985
+ };
986
+ }
727
987
  if (options.seq !== void 0) {
728
988
  if (streamMeta.lastSeq !== void 0 && options.seq <= streamMeta.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${streamMeta.lastSeq}`);
729
989
  }
@@ -759,17 +1019,43 @@ var FileBackedStreamStore = class {
759
1019
  timestamp: Date.now()
760
1020
  };
761
1021
  await this.fileHandlePool.fsyncFile(segmentPath);
1022
+ const updatedProducers = { ...streamMeta.producers };
1023
+ if (producerResult && producerResult.status === `accepted`) updatedProducers[producerResult.producerId] = producerResult.proposedState;
762
1024
  const updatedMeta = {
763
1025
  ...streamMeta,
764
1026
  currentOffset: newOffset,
765
1027
  lastSeq: options.seq ?? streamMeta.lastSeq,
766
- totalBytes: streamMeta.totalBytes + processedData.length + 5
1028
+ totalBytes: streamMeta.totalBytes + processedData.length + 5,
1029
+ producers: updatedProducers
767
1030
  };
768
1031
  const key = `stream:${streamPath}`;
769
1032
  this.db.putSync(key, updatedMeta);
770
1033
  this.notifyLongPolls(streamPath);
1034
+ if (producerResult) return {
1035
+ message,
1036
+ producerResult
1037
+ };
771
1038
  return message;
772
1039
  }
1040
+ /**
1041
+ * Append with producer serialization for concurrent request handling.
1042
+ * This ensures that validation+append is atomic per producer.
1043
+ */
1044
+ async appendWithProducer(streamPath, data, options) {
1045
+ if (!options.producerId) {
1046
+ const result = await this.append(streamPath, data, options);
1047
+ if (result && `message` in result) return result;
1048
+ return { message: result };
1049
+ }
1050
+ const releaseLock = await this.acquireProducerLock(streamPath, options.producerId);
1051
+ try {
1052
+ const result = await this.append(streamPath, data, options);
1053
+ if (result && `message` in result) return result;
1054
+ return { message: result };
1055
+ } finally {
1056
+ releaseLock();
1057
+ }
1058
+ }
773
1059
  read(streamPath, offset) {
774
1060
  const streamMeta = this.getMetaIfNotExpired(streamPath);
775
1061
  if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
@@ -1043,6 +1329,11 @@ const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
1043
1329
  const STREAM_SEQ_HEADER = `Stream-Seq`;
1044
1330
  const STREAM_TTL_HEADER = `Stream-TTL`;
1045
1331
  const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
1332
+ const PRODUCER_ID_HEADER = `Producer-Id`;
1333
+ const PRODUCER_EPOCH_HEADER = `Producer-Epoch`;
1334
+ const PRODUCER_SEQ_HEADER = `Producer-Seq`;
1335
+ const PRODUCER_EXPECTED_SEQ_HEADER = `Producer-Expected-Seq`;
1336
+ const PRODUCER_RECEIVED_SEQ_HEADER = `Producer-Received-Seq`;
1046
1337
  const SSE_OFFSET_FIELD = `streamNextOffset`;
1047
1338
  const SSE_CURSOR_FIELD = `streamCursor`;
1048
1339
  const SSE_UP_TO_DATE_FIELD = `upToDate`;
@@ -1254,8 +1545,8 @@ var DurableStreamTestServer = class {
1254
1545
  const method = req.method?.toUpperCase();
1255
1546
  res.setHeader(`access-control-allow-origin`, `*`);
1256
1547
  res.setHeader(`access-control-allow-methods`, `GET, POST, PUT, DELETE, HEAD, OPTIONS`);
1257
- res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At`);
1258
- res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, etag, content-type, content-encoding, vary`);
1548
+ res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At, Producer-Id, Producer-Epoch, Producer-Seq`);
1549
+ res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, Producer-Epoch, Producer-Seq, Producer-Expected-Seq, Producer-Received-Seq, etag, content-type, content-encoding, vary`);
1259
1550
  res.setHeader(`x-content-type-options`, `nosniff`);
1260
1551
  res.setHeader(`cross-origin-resource-policy`, `cross-origin`);
1261
1552
  if (method === `OPTIONS`) {
@@ -1430,7 +1721,7 @@ var DurableStreamTestServer = class {
1430
1721
  res.end(`Multiple offset parameters not allowed`);
1431
1722
  return;
1432
1723
  }
1433
- const validOffsetPattern = /^(-1|\d+_\d+)$/;
1724
+ const validOffsetPattern = /^(-1|now|\d+_\d+)$/;
1434
1725
  if (!validOffsetPattern.test(offset)) {
1435
1726
  res.writeHead(400, { "content-type": `text/plain` });
1436
1727
  res.end(`Invalid offset format`);
@@ -1443,17 +1734,32 @@ var DurableStreamTestServer = class {
1443
1734
  return;
1444
1735
  }
1445
1736
  if (live === `sse`) {
1446
- await this.handleSSE(path$2, stream, offset, cursor, res);
1737
+ const sseOffset = offset === `now` ? stream.currentOffset : offset;
1738
+ await this.handleSSE(path$2, stream, sseOffset, cursor, res);
1447
1739
  return;
1448
1740
  }
1449
- let { messages, upToDate } = this.store.read(path$2, offset);
1450
- const clientIsCaughtUp = offset && offset === stream.currentOffset;
1741
+ const effectiveOffset = offset === `now` ? stream.currentOffset : offset;
1742
+ if (offset === `now` && live !== `long-poll`) {
1743
+ const headers$1 = {
1744
+ [STREAM_OFFSET_HEADER]: stream.currentOffset,
1745
+ [STREAM_UP_TO_DATE_HEADER]: `true`,
1746
+ [`cache-control`]: `no-store`
1747
+ };
1748
+ if (stream.contentType) headers$1[`content-type`] = stream.contentType;
1749
+ const isJsonMode = stream.contentType?.includes(`application/json`);
1750
+ const responseBody = isJsonMode ? `[]` : ``;
1751
+ res.writeHead(200, headers$1);
1752
+ res.end(responseBody);
1753
+ return;
1754
+ }
1755
+ let { messages, upToDate } = this.store.read(path$2, effectiveOffset);
1756
+ const clientIsCaughtUp = effectiveOffset && effectiveOffset === stream.currentOffset || offset === `now`;
1451
1757
  if (live === `long-poll` && clientIsCaughtUp && messages.length === 0) {
1452
- const result = await this.store.waitForMessages(path$2, offset, this.options.longPollTimeout);
1758
+ const result = await this.store.waitForMessages(path$2, effectiveOffset ?? stream.currentOffset, this.options.longPollTimeout);
1453
1759
  if (result.timedOut) {
1454
1760
  const responseCursor = generateResponseCursor(cursor, this.options.cursorOptions);
1455
1761
  res.writeHead(204, {
1456
- [STREAM_OFFSET_HEADER]: offset,
1762
+ [STREAM_OFFSET_HEADER]: effectiveOffset ?? stream.currentOffset,
1457
1763
  [STREAM_UP_TO_DATE_HEADER]: `true`,
1458
1764
  [STREAM_CURSOR_HEADER]: responseCursor
1459
1765
  });
@@ -1561,6 +1867,9 @@ var DurableStreamTestServer = class {
1561
1867
  async handleAppend(path$2, req, res) {
1562
1868
  const contentType = req.headers[`content-type`];
1563
1869
  const seq = req.headers[STREAM_SEQ_HEADER.toLowerCase()];
1870
+ const producerId = req.headers[PRODUCER_ID_HEADER.toLowerCase()];
1871
+ const producerEpochStr = req.headers[PRODUCER_EPOCH_HEADER.toLowerCase()];
1872
+ const producerSeqStr = req.headers[PRODUCER_SEQ_HEADER.toLowerCase()];
1564
1873
  const body = await this.readBody(req);
1565
1874
  if (body.length === 0) {
1566
1875
  res.writeHead(400, { "content-type": `text/plain` });
@@ -1572,10 +1881,96 @@ var DurableStreamTestServer = class {
1572
1881
  res.end(`Content-Type header is required`);
1573
1882
  return;
1574
1883
  }
1575
- const message = await Promise.resolve(this.store.append(path$2, body, {
1884
+ const hasProducerHeaders = producerId !== void 0 || producerEpochStr !== void 0 || producerSeqStr !== void 0;
1885
+ const hasAllProducerHeaders = producerId !== void 0 && producerEpochStr !== void 0 && producerSeqStr !== void 0;
1886
+ if (hasProducerHeaders && !hasAllProducerHeaders) {
1887
+ res.writeHead(400, { "content-type": `text/plain` });
1888
+ res.end(`All producer headers (Producer-Id, Producer-Epoch, Producer-Seq) must be provided together`);
1889
+ return;
1890
+ }
1891
+ if (hasAllProducerHeaders && producerId === ``) {
1892
+ res.writeHead(400, { "content-type": `text/plain` });
1893
+ res.end(`Invalid Producer-Id: must not be empty`);
1894
+ return;
1895
+ }
1896
+ const STRICT_INTEGER_REGEX = /^\d+$/;
1897
+ let producerEpoch;
1898
+ let producerSeq;
1899
+ if (hasAllProducerHeaders) {
1900
+ if (!STRICT_INTEGER_REGEX.test(producerEpochStr)) {
1901
+ res.writeHead(400, { "content-type": `text/plain` });
1902
+ res.end(`Invalid Producer-Epoch: must be a non-negative integer`);
1903
+ return;
1904
+ }
1905
+ producerEpoch = Number(producerEpochStr);
1906
+ if (!Number.isSafeInteger(producerEpoch)) {
1907
+ res.writeHead(400, { "content-type": `text/plain` });
1908
+ res.end(`Invalid Producer-Epoch: must be a non-negative integer`);
1909
+ return;
1910
+ }
1911
+ if (!STRICT_INTEGER_REGEX.test(producerSeqStr)) {
1912
+ res.writeHead(400, { "content-type": `text/plain` });
1913
+ res.end(`Invalid Producer-Seq: must be a non-negative integer`);
1914
+ return;
1915
+ }
1916
+ producerSeq = Number(producerSeqStr);
1917
+ if (!Number.isSafeInteger(producerSeq)) {
1918
+ res.writeHead(400, { "content-type": `text/plain` });
1919
+ res.end(`Invalid Producer-Seq: must be a non-negative integer`);
1920
+ return;
1921
+ }
1922
+ }
1923
+ const appendOptions = {
1576
1924
  seq,
1577
- contentType
1578
- }));
1925
+ contentType,
1926
+ producerId,
1927
+ producerEpoch,
1928
+ producerSeq
1929
+ };
1930
+ let result;
1931
+ if (producerId !== void 0) result = await this.store.appendWithProducer(path$2, body, appendOptions);
1932
+ else result = await Promise.resolve(this.store.append(path$2, body, appendOptions));
1933
+ if (result && typeof result === `object` && `producerResult` in result) {
1934
+ const { message: message$1, producerResult } = result;
1935
+ if (!producerResult || producerResult.status === `accepted`) {
1936
+ const responseHeaders = { [STREAM_OFFSET_HEADER]: message$1.offset };
1937
+ if (producerEpoch !== void 0) responseHeaders[PRODUCER_EPOCH_HEADER] = producerEpoch.toString();
1938
+ if (producerSeq !== void 0) responseHeaders[PRODUCER_SEQ_HEADER] = producerSeq.toString();
1939
+ res.writeHead(200, responseHeaders);
1940
+ res.end();
1941
+ return;
1942
+ }
1943
+ switch (producerResult.status) {
1944
+ case `duplicate`:
1945
+ res.writeHead(204, {
1946
+ [PRODUCER_EPOCH_HEADER]: producerEpoch.toString(),
1947
+ [PRODUCER_SEQ_HEADER]: producerResult.lastSeq.toString()
1948
+ });
1949
+ res.end();
1950
+ return;
1951
+ case `stale_epoch`: {
1952
+ res.writeHead(403, {
1953
+ "content-type": `text/plain`,
1954
+ [PRODUCER_EPOCH_HEADER]: producerResult.currentEpoch.toString()
1955
+ });
1956
+ res.end(`Stale producer epoch`);
1957
+ return;
1958
+ }
1959
+ case `invalid_epoch_seq`:
1960
+ res.writeHead(400, { "content-type": `text/plain` });
1961
+ res.end(`New epoch must start with sequence 0`);
1962
+ return;
1963
+ case `sequence_gap`:
1964
+ res.writeHead(409, {
1965
+ "content-type": `text/plain`,
1966
+ [PRODUCER_EXPECTED_SEQ_HEADER]: producerResult.expectedSeq.toString(),
1967
+ [PRODUCER_RECEIVED_SEQ_HEADER]: producerResult.receivedSeq.toString()
1968
+ });
1969
+ res.end(`Producer sequence gap`);
1970
+ return;
1971
+ }
1972
+ }
1973
+ const message = result;
1579
1974
  res.writeHead(204, { [STREAM_OFFSET_HEADER]: message.offset });
1580
1975
  res.end();
1581
1976
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@durable-streams/server",
3
- "version": "0.1.4",
3
+ "version": "0.1.6",
4
4
  "description": "Node.js reference server implementation for Durable Streams",
5
5
  "author": "Durable Stream contributors",
6
6
  "license": "Apache-2.0",
@@ -39,15 +39,15 @@
39
39
  "dependencies": {
40
40
  "@neophi/sieve-cache": "^1.0.0",
41
41
  "lmdb": "^3.3.0",
42
- "@durable-streams/client": "0.1.3",
43
- "@durable-streams/state": "0.1.3"
42
+ "@durable-streams/client": "0.1.5",
43
+ "@durable-streams/state": "0.1.5"
44
44
  },
45
45
  "devDependencies": {
46
46
  "@types/node": "^22.0.0",
47
47
  "tsdown": "^0.9.0",
48
48
  "typescript": "^5.0.0",
49
49
  "vitest": "^4.0.0",
50
- "@durable-streams/server-conformance-tests": "0.1.6"
50
+ "@durable-streams/server-conformance-tests": "0.1.8"
51
51
  },
52
52
  "files": [
53
53
  "dist",