@durable-streams/server 0.1.4 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +412 -17
- package/dist/index.d.cts +140 -9
- package/dist/index.d.ts +140 -9
- package/dist/index.js +412 -17
- package/package.json +4 -4
- package/src/file-store.ts +238 -10
- package/src/server.ts +202 -17
- package/src/store.ts +272 -7
- package/src/types.ts +46 -0
package/dist/index.cjs
CHANGED
|
@@ -35,6 +35,10 @@ const __durable_streams_state = __toESM(require("@durable-streams/state"));
|
|
|
35
35
|
|
|
36
36
|
//#region src/store.ts
|
|
37
37
|
/**
|
|
38
|
+
* TTL for in-memory producer state cleanup (7 days).
|
|
39
|
+
*/
|
|
40
|
+
const PRODUCER_STATE_TTL_MS = 7 * 24 * 60 * 60 * 1e3;
|
|
41
|
+
/**
|
|
38
42
|
* Normalize content-type by extracting the media type (before any semicolon).
|
|
39
43
|
* Handles cases like "application/json; charset=utf-8".
|
|
40
44
|
*/
|
|
@@ -81,13 +85,15 @@ function formatJsonResponse(data) {
|
|
|
81
85
|
const wrapped = `[${text}]`;
|
|
82
86
|
return new TextEncoder().encode(wrapped);
|
|
83
87
|
}
|
|
84
|
-
/**
|
|
85
|
-
* In-memory store for durable streams.
|
|
86
|
-
*/
|
|
87
88
|
var StreamStore = class {
|
|
88
89
|
streams = new Map();
|
|
89
90
|
pendingLongPolls = [];
|
|
90
91
|
/**
|
|
92
|
+
* Per-producer locks for serializing validation+append operations.
|
|
93
|
+
* Key: "{streamPath}:{producerId}"
|
|
94
|
+
*/
|
|
95
|
+
producerLocks = new Map();
|
|
96
|
+
/**
|
|
91
97
|
* Check if a stream is expired based on TTL or Expires-At.
|
|
92
98
|
*/
|
|
93
99
|
isExpired(stream) {
|
|
@@ -163,6 +169,108 @@ var StreamStore = class {
|
|
|
163
169
|
return this.streams.delete(path);
|
|
164
170
|
}
|
|
165
171
|
/**
|
|
172
|
+
* Validate producer state WITHOUT mutating.
|
|
173
|
+
* Returns proposed state to commit after successful append.
|
|
174
|
+
* Implements Kafka-style idempotent producer validation.
|
|
175
|
+
*
|
|
176
|
+
* IMPORTANT: This function does NOT mutate producer state. The caller must
|
|
177
|
+
* call commitProducerState() after successful append to apply the mutation.
|
|
178
|
+
* This ensures atomicity: if append fails (e.g., JSON validation), producer
|
|
179
|
+
* state is not incorrectly advanced.
|
|
180
|
+
*/
|
|
181
|
+
validateProducer(stream, producerId, epoch, seq) {
|
|
182
|
+
if (!stream.producers) stream.producers = new Map();
|
|
183
|
+
this.cleanupExpiredProducers(stream);
|
|
184
|
+
const state = stream.producers.get(producerId);
|
|
185
|
+
const now = Date.now();
|
|
186
|
+
if (!state) {
|
|
187
|
+
if (seq !== 0) return {
|
|
188
|
+
status: `sequence_gap`,
|
|
189
|
+
expectedSeq: 0,
|
|
190
|
+
receivedSeq: seq
|
|
191
|
+
};
|
|
192
|
+
return {
|
|
193
|
+
status: `accepted`,
|
|
194
|
+
isNew: true,
|
|
195
|
+
producerId,
|
|
196
|
+
proposedState: {
|
|
197
|
+
epoch,
|
|
198
|
+
lastSeq: 0,
|
|
199
|
+
lastUpdated: now
|
|
200
|
+
}
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
if (epoch < state.epoch) return {
|
|
204
|
+
status: `stale_epoch`,
|
|
205
|
+
currentEpoch: state.epoch
|
|
206
|
+
};
|
|
207
|
+
if (epoch > state.epoch) {
|
|
208
|
+
if (seq !== 0) return { status: `invalid_epoch_seq` };
|
|
209
|
+
return {
|
|
210
|
+
status: `accepted`,
|
|
211
|
+
isNew: true,
|
|
212
|
+
producerId,
|
|
213
|
+
proposedState: {
|
|
214
|
+
epoch,
|
|
215
|
+
lastSeq: 0,
|
|
216
|
+
lastUpdated: now
|
|
217
|
+
}
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
if (seq <= state.lastSeq) return {
|
|
221
|
+
status: `duplicate`,
|
|
222
|
+
lastSeq: state.lastSeq
|
|
223
|
+
};
|
|
224
|
+
if (seq === state.lastSeq + 1) return {
|
|
225
|
+
status: `accepted`,
|
|
226
|
+
isNew: false,
|
|
227
|
+
producerId,
|
|
228
|
+
proposedState: {
|
|
229
|
+
epoch,
|
|
230
|
+
lastSeq: seq,
|
|
231
|
+
lastUpdated: now
|
|
232
|
+
}
|
|
233
|
+
};
|
|
234
|
+
return {
|
|
235
|
+
status: `sequence_gap`,
|
|
236
|
+
expectedSeq: state.lastSeq + 1,
|
|
237
|
+
receivedSeq: seq
|
|
238
|
+
};
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* Commit producer state after successful append.
|
|
242
|
+
* This is the only place where producer state is mutated.
|
|
243
|
+
*/
|
|
244
|
+
commitProducerState(stream, result) {
|
|
245
|
+
if (result.status !== `accepted`) return;
|
|
246
|
+
stream.producers.set(result.producerId, result.proposedState);
|
|
247
|
+
}
|
|
248
|
+
/**
|
|
249
|
+
* Clean up expired producer states from a stream.
|
|
250
|
+
*/
|
|
251
|
+
cleanupExpiredProducers(stream) {
|
|
252
|
+
if (!stream.producers) return;
|
|
253
|
+
const now = Date.now();
|
|
254
|
+
for (const [id, state] of stream.producers) if (now - state.lastUpdated > PRODUCER_STATE_TTL_MS) stream.producers.delete(id);
|
|
255
|
+
}
|
|
256
|
+
/**
|
|
257
|
+
* Acquire a lock for serialized producer operations.
|
|
258
|
+
* Returns a release function.
|
|
259
|
+
*/
|
|
260
|
+
async acquireProducerLock(path, producerId) {
|
|
261
|
+
const lockKey = `${path}:${producerId}`;
|
|
262
|
+
while (this.producerLocks.has(lockKey)) await this.producerLocks.get(lockKey);
|
|
263
|
+
let releaseLock;
|
|
264
|
+
const lockPromise = new Promise((resolve) => {
|
|
265
|
+
releaseLock = resolve;
|
|
266
|
+
});
|
|
267
|
+
this.producerLocks.set(lockKey, lockPromise);
|
|
268
|
+
return () => {
|
|
269
|
+
this.producerLocks.delete(lockKey);
|
|
270
|
+
releaseLock();
|
|
271
|
+
};
|
|
272
|
+
}
|
|
273
|
+
/**
|
|
166
274
|
* Append data to a stream.
|
|
167
275
|
* @throws Error if stream doesn't exist or is expired
|
|
168
276
|
* @throws Error if seq is lower than lastSeq
|
|
@@ -176,15 +284,56 @@ var StreamStore = class {
|
|
|
176
284
|
const streamType = normalizeContentType(stream.contentType);
|
|
177
285
|
if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${stream.contentType}, got ${options.contentType}`);
|
|
178
286
|
}
|
|
287
|
+
let producerResult;
|
|
288
|
+
if (options.producerId !== void 0 && options.producerEpoch !== void 0 && options.producerSeq !== void 0) {
|
|
289
|
+
producerResult = this.validateProducer(stream, options.producerId, options.producerEpoch, options.producerSeq);
|
|
290
|
+
if (producerResult.status !== `accepted`) return {
|
|
291
|
+
message: null,
|
|
292
|
+
producerResult
|
|
293
|
+
};
|
|
294
|
+
}
|
|
179
295
|
if (options.seq !== void 0) {
|
|
180
296
|
if (stream.lastSeq !== void 0 && options.seq <= stream.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${stream.lastSeq}`);
|
|
181
|
-
stream.lastSeq = options.seq;
|
|
182
297
|
}
|
|
183
298
|
const message = this.appendToStream(stream, data);
|
|
299
|
+
if (producerResult) this.commitProducerState(stream, producerResult);
|
|
300
|
+
if (options.seq !== void 0) stream.lastSeq = options.seq;
|
|
184
301
|
this.notifyLongPolls(path);
|
|
302
|
+
if (producerResult) return {
|
|
303
|
+
message,
|
|
304
|
+
producerResult
|
|
305
|
+
};
|
|
185
306
|
return message;
|
|
186
307
|
}
|
|
187
308
|
/**
|
|
309
|
+
* Append with producer serialization for concurrent request handling.
|
|
310
|
+
* This ensures that validation+append is atomic per producer.
|
|
311
|
+
*/
|
|
312
|
+
async appendWithProducer(path, data, options) {
|
|
313
|
+
if (!options.producerId) {
|
|
314
|
+
const result = this.append(path, data, options);
|
|
315
|
+
if (`message` in result) return result;
|
|
316
|
+
return { message: result };
|
|
317
|
+
}
|
|
318
|
+
const releaseLock = await this.acquireProducerLock(path, options.producerId);
|
|
319
|
+
try {
|
|
320
|
+
const result = this.append(path, data, options);
|
|
321
|
+
if (`message` in result) return result;
|
|
322
|
+
return { message: result };
|
|
323
|
+
} finally {
|
|
324
|
+
releaseLock();
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
/**
|
|
328
|
+
* Get the current epoch for a producer on a stream.
|
|
329
|
+
* Returns undefined if the producer doesn't exist or stream not found.
|
|
330
|
+
*/
|
|
331
|
+
getProducerEpoch(path, producerId) {
|
|
332
|
+
const stream = this.getIfNotExpired(path);
|
|
333
|
+
if (!stream?.producers) return void 0;
|
|
334
|
+
return stream.producers.get(producerId)?.epoch;
|
|
335
|
+
}
|
|
336
|
+
/**
|
|
188
337
|
* Read messages from a stream starting at the given offset.
|
|
189
338
|
* @throws Error if stream doesn't exist or is expired
|
|
190
339
|
*/
|
|
@@ -545,6 +694,11 @@ var FileBackedStreamStore = class {
|
|
|
545
694
|
fileHandlePool;
|
|
546
695
|
pendingLongPolls = [];
|
|
547
696
|
dataDir;
|
|
697
|
+
/**
|
|
698
|
+
* Per-producer locks for serializing validation+append operations.
|
|
699
|
+
* Key: "{streamPath}:{producerId}"
|
|
700
|
+
*/
|
|
701
|
+
producerLocks = new Map();
|
|
548
702
|
constructor(options) {
|
|
549
703
|
this.dataDir = options.dataDir;
|
|
550
704
|
this.db = (0, lmdb.open)({
|
|
@@ -626,6 +780,11 @@ var FileBackedStreamStore = class {
|
|
|
626
780
|
* Convert LMDB metadata to Stream object.
|
|
627
781
|
*/
|
|
628
782
|
streamMetaToStream(meta) {
|
|
783
|
+
let producers;
|
|
784
|
+
if (meta.producers) {
|
|
785
|
+
producers = new Map();
|
|
786
|
+
for (const [id, state] of Object.entries(meta.producers)) producers.set(id, { ...state });
|
|
787
|
+
}
|
|
629
788
|
return {
|
|
630
789
|
path: meta.path,
|
|
631
790
|
contentType: meta.contentType,
|
|
@@ -634,10 +793,103 @@ var FileBackedStreamStore = class {
|
|
|
634
793
|
lastSeq: meta.lastSeq,
|
|
635
794
|
ttlSeconds: meta.ttlSeconds,
|
|
636
795
|
expiresAt: meta.expiresAt,
|
|
637
|
-
createdAt: meta.createdAt
|
|
796
|
+
createdAt: meta.createdAt,
|
|
797
|
+
producers
|
|
638
798
|
};
|
|
639
799
|
}
|
|
640
800
|
/**
|
|
801
|
+
* Validate producer state WITHOUT mutating.
|
|
802
|
+
* Returns proposed state to commit after successful append.
|
|
803
|
+
*
|
|
804
|
+
* IMPORTANT: This function does NOT mutate producer state. The caller must
|
|
805
|
+
* commit the proposedState after successful append (file write + fsync + LMDB).
|
|
806
|
+
* This ensures atomicity: if any step fails, producer state is not advanced.
|
|
807
|
+
*/
|
|
808
|
+
validateProducer(meta, producerId, epoch, seq) {
|
|
809
|
+
if (!meta.producers) meta.producers = {};
|
|
810
|
+
const state = meta.producers[producerId];
|
|
811
|
+
const now = Date.now();
|
|
812
|
+
if (!state) {
|
|
813
|
+
if (seq !== 0) return {
|
|
814
|
+
status: `sequence_gap`,
|
|
815
|
+
expectedSeq: 0,
|
|
816
|
+
receivedSeq: seq
|
|
817
|
+
};
|
|
818
|
+
return {
|
|
819
|
+
status: `accepted`,
|
|
820
|
+
isNew: true,
|
|
821
|
+
producerId,
|
|
822
|
+
proposedState: {
|
|
823
|
+
epoch,
|
|
824
|
+
lastSeq: 0,
|
|
825
|
+
lastUpdated: now
|
|
826
|
+
}
|
|
827
|
+
};
|
|
828
|
+
}
|
|
829
|
+
if (epoch < state.epoch) return {
|
|
830
|
+
status: `stale_epoch`,
|
|
831
|
+
currentEpoch: state.epoch
|
|
832
|
+
};
|
|
833
|
+
if (epoch > state.epoch) {
|
|
834
|
+
if (seq !== 0) return { status: `invalid_epoch_seq` };
|
|
835
|
+
return {
|
|
836
|
+
status: `accepted`,
|
|
837
|
+
isNew: true,
|
|
838
|
+
producerId,
|
|
839
|
+
proposedState: {
|
|
840
|
+
epoch,
|
|
841
|
+
lastSeq: 0,
|
|
842
|
+
lastUpdated: now
|
|
843
|
+
}
|
|
844
|
+
};
|
|
845
|
+
}
|
|
846
|
+
if (seq <= state.lastSeq) return {
|
|
847
|
+
status: `duplicate`,
|
|
848
|
+
lastSeq: state.lastSeq
|
|
849
|
+
};
|
|
850
|
+
if (seq === state.lastSeq + 1) return {
|
|
851
|
+
status: `accepted`,
|
|
852
|
+
isNew: false,
|
|
853
|
+
producerId,
|
|
854
|
+
proposedState: {
|
|
855
|
+
epoch,
|
|
856
|
+
lastSeq: seq,
|
|
857
|
+
lastUpdated: now
|
|
858
|
+
}
|
|
859
|
+
};
|
|
860
|
+
return {
|
|
861
|
+
status: `sequence_gap`,
|
|
862
|
+
expectedSeq: state.lastSeq + 1,
|
|
863
|
+
receivedSeq: seq
|
|
864
|
+
};
|
|
865
|
+
}
|
|
866
|
+
/**
|
|
867
|
+
* Acquire a lock for serialized producer operations.
|
|
868
|
+
* Returns a release function.
|
|
869
|
+
*/
|
|
870
|
+
async acquireProducerLock(streamPath, producerId) {
|
|
871
|
+
const lockKey = `${streamPath}:${producerId}`;
|
|
872
|
+
while (this.producerLocks.has(lockKey)) await this.producerLocks.get(lockKey);
|
|
873
|
+
let releaseLock;
|
|
874
|
+
const lockPromise = new Promise((resolve) => {
|
|
875
|
+
releaseLock = resolve;
|
|
876
|
+
});
|
|
877
|
+
this.producerLocks.set(lockKey, lockPromise);
|
|
878
|
+
return () => {
|
|
879
|
+
this.producerLocks.delete(lockKey);
|
|
880
|
+
releaseLock();
|
|
881
|
+
};
|
|
882
|
+
}
|
|
883
|
+
/**
|
|
884
|
+
* Get the current epoch for a producer on a stream.
|
|
885
|
+
* Returns undefined if the producer doesn't exist or stream not found.
|
|
886
|
+
*/
|
|
887
|
+
getProducerEpoch(streamPath, producerId) {
|
|
888
|
+
const meta = this.getMetaIfNotExpired(streamPath);
|
|
889
|
+
if (!meta?.producers) return void 0;
|
|
890
|
+
return meta.producers[producerId]?.epoch;
|
|
891
|
+
}
|
|
892
|
+
/**
|
|
641
893
|
* Check if a stream is expired based on TTL or Expires-At.
|
|
642
894
|
*/
|
|
643
895
|
isExpired(meta) {
|
|
@@ -747,6 +999,14 @@ var FileBackedStreamStore = class {
|
|
|
747
999
|
const streamType = normalizeContentType(streamMeta.contentType);
|
|
748
1000
|
if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${streamMeta.contentType}, got ${options.contentType}`);
|
|
749
1001
|
}
|
|
1002
|
+
let producerResult;
|
|
1003
|
+
if (options.producerId !== void 0 && options.producerEpoch !== void 0 && options.producerSeq !== void 0) {
|
|
1004
|
+
producerResult = this.validateProducer(streamMeta, options.producerId, options.producerEpoch, options.producerSeq);
|
|
1005
|
+
if (producerResult.status !== `accepted`) return {
|
|
1006
|
+
message: null,
|
|
1007
|
+
producerResult
|
|
1008
|
+
};
|
|
1009
|
+
}
|
|
750
1010
|
if (options.seq !== void 0) {
|
|
751
1011
|
if (streamMeta.lastSeq !== void 0 && options.seq <= streamMeta.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${streamMeta.lastSeq}`);
|
|
752
1012
|
}
|
|
@@ -782,17 +1042,43 @@ var FileBackedStreamStore = class {
|
|
|
782
1042
|
timestamp: Date.now()
|
|
783
1043
|
};
|
|
784
1044
|
await this.fileHandlePool.fsyncFile(segmentPath);
|
|
1045
|
+
const updatedProducers = { ...streamMeta.producers };
|
|
1046
|
+
if (producerResult && producerResult.status === `accepted`) updatedProducers[producerResult.producerId] = producerResult.proposedState;
|
|
785
1047
|
const updatedMeta = {
|
|
786
1048
|
...streamMeta,
|
|
787
1049
|
currentOffset: newOffset,
|
|
788
1050
|
lastSeq: options.seq ?? streamMeta.lastSeq,
|
|
789
|
-
totalBytes: streamMeta.totalBytes + processedData.length + 5
|
|
1051
|
+
totalBytes: streamMeta.totalBytes + processedData.length + 5,
|
|
1052
|
+
producers: updatedProducers
|
|
790
1053
|
};
|
|
791
1054
|
const key = `stream:${streamPath}`;
|
|
792
1055
|
this.db.putSync(key, updatedMeta);
|
|
793
1056
|
this.notifyLongPolls(streamPath);
|
|
1057
|
+
if (producerResult) return {
|
|
1058
|
+
message,
|
|
1059
|
+
producerResult
|
|
1060
|
+
};
|
|
794
1061
|
return message;
|
|
795
1062
|
}
|
|
1063
|
+
/**
|
|
1064
|
+
* Append with producer serialization for concurrent request handling.
|
|
1065
|
+
* This ensures that validation+append is atomic per producer.
|
|
1066
|
+
*/
|
|
1067
|
+
async appendWithProducer(streamPath, data, options) {
|
|
1068
|
+
if (!options.producerId) {
|
|
1069
|
+
const result = await this.append(streamPath, data, options);
|
|
1070
|
+
if (result && `message` in result) return result;
|
|
1071
|
+
return { message: result };
|
|
1072
|
+
}
|
|
1073
|
+
const releaseLock = await this.acquireProducerLock(streamPath, options.producerId);
|
|
1074
|
+
try {
|
|
1075
|
+
const result = await this.append(streamPath, data, options);
|
|
1076
|
+
if (result && `message` in result) return result;
|
|
1077
|
+
return { message: result };
|
|
1078
|
+
} finally {
|
|
1079
|
+
releaseLock();
|
|
1080
|
+
}
|
|
1081
|
+
}
|
|
796
1082
|
read(streamPath, offset) {
|
|
797
1083
|
const streamMeta = this.getMetaIfNotExpired(streamPath);
|
|
798
1084
|
if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
|
|
@@ -1066,6 +1352,11 @@ const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
|
|
|
1066
1352
|
const STREAM_SEQ_HEADER = `Stream-Seq`;
|
|
1067
1353
|
const STREAM_TTL_HEADER = `Stream-TTL`;
|
|
1068
1354
|
const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
|
|
1355
|
+
const PRODUCER_ID_HEADER = `Producer-Id`;
|
|
1356
|
+
const PRODUCER_EPOCH_HEADER = `Producer-Epoch`;
|
|
1357
|
+
const PRODUCER_SEQ_HEADER = `Producer-Seq`;
|
|
1358
|
+
const PRODUCER_EXPECTED_SEQ_HEADER = `Producer-Expected-Seq`;
|
|
1359
|
+
const PRODUCER_RECEIVED_SEQ_HEADER = `Producer-Received-Seq`;
|
|
1069
1360
|
const SSE_OFFSET_FIELD = `streamNextOffset`;
|
|
1070
1361
|
const SSE_CURSOR_FIELD = `streamCursor`;
|
|
1071
1362
|
const SSE_UP_TO_DATE_FIELD = `upToDate`;
|
|
@@ -1277,8 +1568,8 @@ var DurableStreamTestServer = class {
|
|
|
1277
1568
|
const method = req.method?.toUpperCase();
|
|
1278
1569
|
res.setHeader(`access-control-allow-origin`, `*`);
|
|
1279
1570
|
res.setHeader(`access-control-allow-methods`, `GET, POST, PUT, DELETE, HEAD, OPTIONS`);
|
|
1280
|
-
res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At`);
|
|
1281
|
-
res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, etag, content-type, content-encoding, vary`);
|
|
1571
|
+
res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At, Producer-Id, Producer-Epoch, Producer-Seq`);
|
|
1572
|
+
res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, Producer-Epoch, Producer-Seq, Producer-Expected-Seq, Producer-Received-Seq, etag, content-type, content-encoding, vary`);
|
|
1282
1573
|
res.setHeader(`x-content-type-options`, `nosniff`);
|
|
1283
1574
|
res.setHeader(`cross-origin-resource-policy`, `cross-origin`);
|
|
1284
1575
|
if (method === `OPTIONS`) {
|
|
@@ -1453,7 +1744,7 @@ var DurableStreamTestServer = class {
|
|
|
1453
1744
|
res.end(`Multiple offset parameters not allowed`);
|
|
1454
1745
|
return;
|
|
1455
1746
|
}
|
|
1456
|
-
const validOffsetPattern = /^(-1|\d+_\d+)$/;
|
|
1747
|
+
const validOffsetPattern = /^(-1|now|\d+_\d+)$/;
|
|
1457
1748
|
if (!validOffsetPattern.test(offset)) {
|
|
1458
1749
|
res.writeHead(400, { "content-type": `text/plain` });
|
|
1459
1750
|
res.end(`Invalid offset format`);
|
|
@@ -1466,17 +1757,32 @@ var DurableStreamTestServer = class {
|
|
|
1466
1757
|
return;
|
|
1467
1758
|
}
|
|
1468
1759
|
if (live === `sse`) {
|
|
1469
|
-
|
|
1760
|
+
const sseOffset = offset === `now` ? stream.currentOffset : offset;
|
|
1761
|
+
await this.handleSSE(path, stream, sseOffset, cursor, res);
|
|
1470
1762
|
return;
|
|
1471
1763
|
}
|
|
1472
|
-
|
|
1473
|
-
|
|
1764
|
+
const effectiveOffset = offset === `now` ? stream.currentOffset : offset;
|
|
1765
|
+
if (offset === `now` && live !== `long-poll`) {
|
|
1766
|
+
const headers$1 = {
|
|
1767
|
+
[STREAM_OFFSET_HEADER]: stream.currentOffset,
|
|
1768
|
+
[STREAM_UP_TO_DATE_HEADER]: `true`,
|
|
1769
|
+
[`cache-control`]: `no-store`
|
|
1770
|
+
};
|
|
1771
|
+
if (stream.contentType) headers$1[`content-type`] = stream.contentType;
|
|
1772
|
+
const isJsonMode = stream.contentType?.includes(`application/json`);
|
|
1773
|
+
const responseBody = isJsonMode ? `[]` : ``;
|
|
1774
|
+
res.writeHead(200, headers$1);
|
|
1775
|
+
res.end(responseBody);
|
|
1776
|
+
return;
|
|
1777
|
+
}
|
|
1778
|
+
let { messages, upToDate } = this.store.read(path, effectiveOffset);
|
|
1779
|
+
const clientIsCaughtUp = effectiveOffset && effectiveOffset === stream.currentOffset || offset === `now`;
|
|
1474
1780
|
if (live === `long-poll` && clientIsCaughtUp && messages.length === 0) {
|
|
1475
|
-
const result = await this.store.waitForMessages(path,
|
|
1781
|
+
const result = await this.store.waitForMessages(path, effectiveOffset ?? stream.currentOffset, this.options.longPollTimeout);
|
|
1476
1782
|
if (result.timedOut) {
|
|
1477
1783
|
const responseCursor = generateResponseCursor(cursor, this.options.cursorOptions);
|
|
1478
1784
|
res.writeHead(204, {
|
|
1479
|
-
[STREAM_OFFSET_HEADER]:
|
|
1785
|
+
[STREAM_OFFSET_HEADER]: effectiveOffset ?? stream.currentOffset,
|
|
1480
1786
|
[STREAM_UP_TO_DATE_HEADER]: `true`,
|
|
1481
1787
|
[STREAM_CURSOR_HEADER]: responseCursor
|
|
1482
1788
|
});
|
|
@@ -1584,6 +1890,9 @@ var DurableStreamTestServer = class {
|
|
|
1584
1890
|
async handleAppend(path, req, res) {
|
|
1585
1891
|
const contentType = req.headers[`content-type`];
|
|
1586
1892
|
const seq = req.headers[STREAM_SEQ_HEADER.toLowerCase()];
|
|
1893
|
+
const producerId = req.headers[PRODUCER_ID_HEADER.toLowerCase()];
|
|
1894
|
+
const producerEpochStr = req.headers[PRODUCER_EPOCH_HEADER.toLowerCase()];
|
|
1895
|
+
const producerSeqStr = req.headers[PRODUCER_SEQ_HEADER.toLowerCase()];
|
|
1587
1896
|
const body = await this.readBody(req);
|
|
1588
1897
|
if (body.length === 0) {
|
|
1589
1898
|
res.writeHead(400, { "content-type": `text/plain` });
|
|
@@ -1595,10 +1904,96 @@ var DurableStreamTestServer = class {
|
|
|
1595
1904
|
res.end(`Content-Type header is required`);
|
|
1596
1905
|
return;
|
|
1597
1906
|
}
|
|
1598
|
-
const
|
|
1907
|
+
const hasProducerHeaders = producerId !== void 0 || producerEpochStr !== void 0 || producerSeqStr !== void 0;
|
|
1908
|
+
const hasAllProducerHeaders = producerId !== void 0 && producerEpochStr !== void 0 && producerSeqStr !== void 0;
|
|
1909
|
+
if (hasProducerHeaders && !hasAllProducerHeaders) {
|
|
1910
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1911
|
+
res.end(`All producer headers (Producer-Id, Producer-Epoch, Producer-Seq) must be provided together`);
|
|
1912
|
+
return;
|
|
1913
|
+
}
|
|
1914
|
+
if (hasAllProducerHeaders && producerId === ``) {
|
|
1915
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1916
|
+
res.end(`Invalid Producer-Id: must not be empty`);
|
|
1917
|
+
return;
|
|
1918
|
+
}
|
|
1919
|
+
const STRICT_INTEGER_REGEX = /^\d+$/;
|
|
1920
|
+
let producerEpoch;
|
|
1921
|
+
let producerSeq;
|
|
1922
|
+
if (hasAllProducerHeaders) {
|
|
1923
|
+
if (!STRICT_INTEGER_REGEX.test(producerEpochStr)) {
|
|
1924
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1925
|
+
res.end(`Invalid Producer-Epoch: must be a non-negative integer`);
|
|
1926
|
+
return;
|
|
1927
|
+
}
|
|
1928
|
+
producerEpoch = Number(producerEpochStr);
|
|
1929
|
+
if (!Number.isSafeInteger(producerEpoch)) {
|
|
1930
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1931
|
+
res.end(`Invalid Producer-Epoch: must be a non-negative integer`);
|
|
1932
|
+
return;
|
|
1933
|
+
}
|
|
1934
|
+
if (!STRICT_INTEGER_REGEX.test(producerSeqStr)) {
|
|
1935
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1936
|
+
res.end(`Invalid Producer-Seq: must be a non-negative integer`);
|
|
1937
|
+
return;
|
|
1938
|
+
}
|
|
1939
|
+
producerSeq = Number(producerSeqStr);
|
|
1940
|
+
if (!Number.isSafeInteger(producerSeq)) {
|
|
1941
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1942
|
+
res.end(`Invalid Producer-Seq: must be a non-negative integer`);
|
|
1943
|
+
return;
|
|
1944
|
+
}
|
|
1945
|
+
}
|
|
1946
|
+
const appendOptions = {
|
|
1599
1947
|
seq,
|
|
1600
|
-
contentType
|
|
1601
|
-
|
|
1948
|
+
contentType,
|
|
1949
|
+
producerId,
|
|
1950
|
+
producerEpoch,
|
|
1951
|
+
producerSeq
|
|
1952
|
+
};
|
|
1953
|
+
let result;
|
|
1954
|
+
if (producerId !== void 0) result = await this.store.appendWithProducer(path, body, appendOptions);
|
|
1955
|
+
else result = await Promise.resolve(this.store.append(path, body, appendOptions));
|
|
1956
|
+
if (result && typeof result === `object` && `producerResult` in result) {
|
|
1957
|
+
const { message: message$1, producerResult } = result;
|
|
1958
|
+
if (!producerResult || producerResult.status === `accepted`) {
|
|
1959
|
+
const responseHeaders = { [STREAM_OFFSET_HEADER]: message$1.offset };
|
|
1960
|
+
if (producerEpoch !== void 0) responseHeaders[PRODUCER_EPOCH_HEADER] = producerEpoch.toString();
|
|
1961
|
+
if (producerSeq !== void 0) responseHeaders[PRODUCER_SEQ_HEADER] = producerSeq.toString();
|
|
1962
|
+
res.writeHead(200, responseHeaders);
|
|
1963
|
+
res.end();
|
|
1964
|
+
return;
|
|
1965
|
+
}
|
|
1966
|
+
switch (producerResult.status) {
|
|
1967
|
+
case `duplicate`:
|
|
1968
|
+
res.writeHead(204, {
|
|
1969
|
+
[PRODUCER_EPOCH_HEADER]: producerEpoch.toString(),
|
|
1970
|
+
[PRODUCER_SEQ_HEADER]: producerResult.lastSeq.toString()
|
|
1971
|
+
});
|
|
1972
|
+
res.end();
|
|
1973
|
+
return;
|
|
1974
|
+
case `stale_epoch`: {
|
|
1975
|
+
res.writeHead(403, {
|
|
1976
|
+
"content-type": `text/plain`,
|
|
1977
|
+
[PRODUCER_EPOCH_HEADER]: producerResult.currentEpoch.toString()
|
|
1978
|
+
});
|
|
1979
|
+
res.end(`Stale producer epoch`);
|
|
1980
|
+
return;
|
|
1981
|
+
}
|
|
1982
|
+
case `invalid_epoch_seq`:
|
|
1983
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1984
|
+
res.end(`New epoch must start with sequence 0`);
|
|
1985
|
+
return;
|
|
1986
|
+
case `sequence_gap`:
|
|
1987
|
+
res.writeHead(409, {
|
|
1988
|
+
"content-type": `text/plain`,
|
|
1989
|
+
[PRODUCER_EXPECTED_SEQ_HEADER]: producerResult.expectedSeq.toString(),
|
|
1990
|
+
[PRODUCER_RECEIVED_SEQ_HEADER]: producerResult.receivedSeq.toString()
|
|
1991
|
+
});
|
|
1992
|
+
res.end(`Producer sequence gap`);
|
|
1993
|
+
return;
|
|
1994
|
+
}
|
|
1995
|
+
}
|
|
1996
|
+
const message = result;
|
|
1602
1997
|
res.writeHead(204, { [STREAM_OFFSET_HEADER]: message.offset });
|
|
1603
1998
|
res.end();
|
|
1604
1999
|
}
|