@durable-streams/server 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +518 -47
- package/dist/index.d.cts +191 -15
- package/dist/index.d.ts +191 -15
- package/dist/index.js +518 -47
- package/package.json +5 -5
- package/src/file-store.ts +238 -10
- package/src/server.ts +398 -61
- package/src/store.ts +272 -7
- package/src/types.ts +46 -0
package/dist/index.js
CHANGED
|
@@ -12,6 +12,10 @@ import { createStateSchema } from "@durable-streams/state";
|
|
|
12
12
|
|
|
13
13
|
//#region src/store.ts
|
|
14
14
|
/**
|
|
15
|
+
* TTL for in-memory producer state cleanup (7 days).
|
|
16
|
+
*/
|
|
17
|
+
const PRODUCER_STATE_TTL_MS = 7 * 24 * 60 * 60 * 1e3;
|
|
18
|
+
/**
|
|
15
19
|
* Normalize content-type by extracting the media type (before any semicolon).
|
|
16
20
|
* Handles cases like "application/json; charset=utf-8".
|
|
17
21
|
*/
|
|
@@ -58,13 +62,15 @@ function formatJsonResponse(data) {
|
|
|
58
62
|
const wrapped = `[${text}]`;
|
|
59
63
|
return new TextEncoder().encode(wrapped);
|
|
60
64
|
}
|
|
61
|
-
/**
|
|
62
|
-
* In-memory store for durable streams.
|
|
63
|
-
*/
|
|
64
65
|
var StreamStore = class {
|
|
65
66
|
streams = new Map();
|
|
66
67
|
pendingLongPolls = [];
|
|
67
68
|
/**
|
|
69
|
+
* Per-producer locks for serializing validation+append operations.
|
|
70
|
+
* Key: "{streamPath}:{producerId}"
|
|
71
|
+
*/
|
|
72
|
+
producerLocks = new Map();
|
|
73
|
+
/**
|
|
68
74
|
* Check if a stream is expired based on TTL or Expires-At.
|
|
69
75
|
*/
|
|
70
76
|
isExpired(stream) {
|
|
@@ -140,6 +146,108 @@ var StreamStore = class {
|
|
|
140
146
|
return this.streams.delete(path$2);
|
|
141
147
|
}
|
|
142
148
|
/**
|
|
149
|
+
* Validate producer state WITHOUT mutating.
|
|
150
|
+
* Returns proposed state to commit after successful append.
|
|
151
|
+
* Implements Kafka-style idempotent producer validation.
|
|
152
|
+
*
|
|
153
|
+
* IMPORTANT: This function does NOT mutate producer state. The caller must
|
|
154
|
+
* call commitProducerState() after successful append to apply the mutation.
|
|
155
|
+
* This ensures atomicity: if append fails (e.g., JSON validation), producer
|
|
156
|
+
* state is not incorrectly advanced.
|
|
157
|
+
*/
|
|
158
|
+
validateProducer(stream, producerId, epoch, seq) {
|
|
159
|
+
if (!stream.producers) stream.producers = new Map();
|
|
160
|
+
this.cleanupExpiredProducers(stream);
|
|
161
|
+
const state = stream.producers.get(producerId);
|
|
162
|
+
const now = Date.now();
|
|
163
|
+
if (!state) {
|
|
164
|
+
if (seq !== 0) return {
|
|
165
|
+
status: `sequence_gap`,
|
|
166
|
+
expectedSeq: 0,
|
|
167
|
+
receivedSeq: seq
|
|
168
|
+
};
|
|
169
|
+
return {
|
|
170
|
+
status: `accepted`,
|
|
171
|
+
isNew: true,
|
|
172
|
+
producerId,
|
|
173
|
+
proposedState: {
|
|
174
|
+
epoch,
|
|
175
|
+
lastSeq: 0,
|
|
176
|
+
lastUpdated: now
|
|
177
|
+
}
|
|
178
|
+
};
|
|
179
|
+
}
|
|
180
|
+
if (epoch < state.epoch) return {
|
|
181
|
+
status: `stale_epoch`,
|
|
182
|
+
currentEpoch: state.epoch
|
|
183
|
+
};
|
|
184
|
+
if (epoch > state.epoch) {
|
|
185
|
+
if (seq !== 0) return { status: `invalid_epoch_seq` };
|
|
186
|
+
return {
|
|
187
|
+
status: `accepted`,
|
|
188
|
+
isNew: true,
|
|
189
|
+
producerId,
|
|
190
|
+
proposedState: {
|
|
191
|
+
epoch,
|
|
192
|
+
lastSeq: 0,
|
|
193
|
+
lastUpdated: now
|
|
194
|
+
}
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
if (seq <= state.lastSeq) return {
|
|
198
|
+
status: `duplicate`,
|
|
199
|
+
lastSeq: state.lastSeq
|
|
200
|
+
};
|
|
201
|
+
if (seq === state.lastSeq + 1) return {
|
|
202
|
+
status: `accepted`,
|
|
203
|
+
isNew: false,
|
|
204
|
+
producerId,
|
|
205
|
+
proposedState: {
|
|
206
|
+
epoch,
|
|
207
|
+
lastSeq: seq,
|
|
208
|
+
lastUpdated: now
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
return {
|
|
212
|
+
status: `sequence_gap`,
|
|
213
|
+
expectedSeq: state.lastSeq + 1,
|
|
214
|
+
receivedSeq: seq
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
/**
|
|
218
|
+
* Commit producer state after successful append.
|
|
219
|
+
* This is the only place where producer state is mutated.
|
|
220
|
+
*/
|
|
221
|
+
commitProducerState(stream, result) {
|
|
222
|
+
if (result.status !== `accepted`) return;
|
|
223
|
+
stream.producers.set(result.producerId, result.proposedState);
|
|
224
|
+
}
|
|
225
|
+
/**
|
|
226
|
+
* Clean up expired producer states from a stream.
|
|
227
|
+
*/
|
|
228
|
+
cleanupExpiredProducers(stream) {
|
|
229
|
+
if (!stream.producers) return;
|
|
230
|
+
const now = Date.now();
|
|
231
|
+
for (const [id, state] of stream.producers) if (now - state.lastUpdated > PRODUCER_STATE_TTL_MS) stream.producers.delete(id);
|
|
232
|
+
}
|
|
233
|
+
/**
|
|
234
|
+
* Acquire a lock for serialized producer operations.
|
|
235
|
+
* Returns a release function.
|
|
236
|
+
*/
|
|
237
|
+
async acquireProducerLock(path$2, producerId) {
|
|
238
|
+
const lockKey = `${path$2}:${producerId}`;
|
|
239
|
+
while (this.producerLocks.has(lockKey)) await this.producerLocks.get(lockKey);
|
|
240
|
+
let releaseLock;
|
|
241
|
+
const lockPromise = new Promise((resolve) => {
|
|
242
|
+
releaseLock = resolve;
|
|
243
|
+
});
|
|
244
|
+
this.producerLocks.set(lockKey, lockPromise);
|
|
245
|
+
return () => {
|
|
246
|
+
this.producerLocks.delete(lockKey);
|
|
247
|
+
releaseLock();
|
|
248
|
+
};
|
|
249
|
+
}
|
|
250
|
+
/**
|
|
143
251
|
* Append data to a stream.
|
|
144
252
|
* @throws Error if stream doesn't exist or is expired
|
|
145
253
|
* @throws Error if seq is lower than lastSeq
|
|
@@ -153,15 +261,56 @@ var StreamStore = class {
|
|
|
153
261
|
const streamType = normalizeContentType(stream.contentType);
|
|
154
262
|
if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${stream.contentType}, got ${options.contentType}`);
|
|
155
263
|
}
|
|
264
|
+
let producerResult;
|
|
265
|
+
if (options.producerId !== void 0 && options.producerEpoch !== void 0 && options.producerSeq !== void 0) {
|
|
266
|
+
producerResult = this.validateProducer(stream, options.producerId, options.producerEpoch, options.producerSeq);
|
|
267
|
+
if (producerResult.status !== `accepted`) return {
|
|
268
|
+
message: null,
|
|
269
|
+
producerResult
|
|
270
|
+
};
|
|
271
|
+
}
|
|
156
272
|
if (options.seq !== void 0) {
|
|
157
273
|
if (stream.lastSeq !== void 0 && options.seq <= stream.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${stream.lastSeq}`);
|
|
158
|
-
stream.lastSeq = options.seq;
|
|
159
274
|
}
|
|
160
275
|
const message = this.appendToStream(stream, data);
|
|
276
|
+
if (producerResult) this.commitProducerState(stream, producerResult);
|
|
277
|
+
if (options.seq !== void 0) stream.lastSeq = options.seq;
|
|
161
278
|
this.notifyLongPolls(path$2);
|
|
279
|
+
if (producerResult) return {
|
|
280
|
+
message,
|
|
281
|
+
producerResult
|
|
282
|
+
};
|
|
162
283
|
return message;
|
|
163
284
|
}
|
|
164
285
|
/**
|
|
286
|
+
* Append with producer serialization for concurrent request handling.
|
|
287
|
+
* This ensures that validation+append is atomic per producer.
|
|
288
|
+
*/
|
|
289
|
+
async appendWithProducer(path$2, data, options) {
|
|
290
|
+
if (!options.producerId) {
|
|
291
|
+
const result = this.append(path$2, data, options);
|
|
292
|
+
if (`message` in result) return result;
|
|
293
|
+
return { message: result };
|
|
294
|
+
}
|
|
295
|
+
const releaseLock = await this.acquireProducerLock(path$2, options.producerId);
|
|
296
|
+
try {
|
|
297
|
+
const result = this.append(path$2, data, options);
|
|
298
|
+
if (`message` in result) return result;
|
|
299
|
+
return { message: result };
|
|
300
|
+
} finally {
|
|
301
|
+
releaseLock();
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
/**
|
|
305
|
+
* Get the current epoch for a producer on a stream.
|
|
306
|
+
* Returns undefined if the producer doesn't exist or stream not found.
|
|
307
|
+
*/
|
|
308
|
+
getProducerEpoch(path$2, producerId) {
|
|
309
|
+
const stream = this.getIfNotExpired(path$2);
|
|
310
|
+
if (!stream?.producers) return void 0;
|
|
311
|
+
return stream.producers.get(producerId)?.epoch;
|
|
312
|
+
}
|
|
313
|
+
/**
|
|
165
314
|
* Read messages from a stream starting at the given offset.
|
|
166
315
|
* @throws Error if stream doesn't exist or is expired
|
|
167
316
|
*/
|
|
@@ -522,6 +671,11 @@ var FileBackedStreamStore = class {
|
|
|
522
671
|
fileHandlePool;
|
|
523
672
|
pendingLongPolls = [];
|
|
524
673
|
dataDir;
|
|
674
|
+
/**
|
|
675
|
+
* Per-producer locks for serializing validation+append operations.
|
|
676
|
+
* Key: "{streamPath}:{producerId}"
|
|
677
|
+
*/
|
|
678
|
+
producerLocks = new Map();
|
|
525
679
|
constructor(options) {
|
|
526
680
|
this.dataDir = options.dataDir;
|
|
527
681
|
this.db = open({
|
|
@@ -603,6 +757,11 @@ var FileBackedStreamStore = class {
|
|
|
603
757
|
* Convert LMDB metadata to Stream object.
|
|
604
758
|
*/
|
|
605
759
|
streamMetaToStream(meta) {
|
|
760
|
+
let producers;
|
|
761
|
+
if (meta.producers) {
|
|
762
|
+
producers = new Map();
|
|
763
|
+
for (const [id, state] of Object.entries(meta.producers)) producers.set(id, { ...state });
|
|
764
|
+
}
|
|
606
765
|
return {
|
|
607
766
|
path: meta.path,
|
|
608
767
|
contentType: meta.contentType,
|
|
@@ -611,10 +770,103 @@ var FileBackedStreamStore = class {
|
|
|
611
770
|
lastSeq: meta.lastSeq,
|
|
612
771
|
ttlSeconds: meta.ttlSeconds,
|
|
613
772
|
expiresAt: meta.expiresAt,
|
|
614
|
-
createdAt: meta.createdAt
|
|
773
|
+
createdAt: meta.createdAt,
|
|
774
|
+
producers
|
|
775
|
+
};
|
|
776
|
+
}
|
|
777
|
+
/**
|
|
778
|
+
* Validate producer state WITHOUT mutating.
|
|
779
|
+
* Returns proposed state to commit after successful append.
|
|
780
|
+
*
|
|
781
|
+
* IMPORTANT: This function does NOT mutate producer state. The caller must
|
|
782
|
+
* commit the proposedState after successful append (file write + fsync + LMDB).
|
|
783
|
+
* This ensures atomicity: if any step fails, producer state is not advanced.
|
|
784
|
+
*/
|
|
785
|
+
validateProducer(meta, producerId, epoch, seq) {
|
|
786
|
+
if (!meta.producers) meta.producers = {};
|
|
787
|
+
const state = meta.producers[producerId];
|
|
788
|
+
const now = Date.now();
|
|
789
|
+
if (!state) {
|
|
790
|
+
if (seq !== 0) return {
|
|
791
|
+
status: `sequence_gap`,
|
|
792
|
+
expectedSeq: 0,
|
|
793
|
+
receivedSeq: seq
|
|
794
|
+
};
|
|
795
|
+
return {
|
|
796
|
+
status: `accepted`,
|
|
797
|
+
isNew: true,
|
|
798
|
+
producerId,
|
|
799
|
+
proposedState: {
|
|
800
|
+
epoch,
|
|
801
|
+
lastSeq: 0,
|
|
802
|
+
lastUpdated: now
|
|
803
|
+
}
|
|
804
|
+
};
|
|
805
|
+
}
|
|
806
|
+
if (epoch < state.epoch) return {
|
|
807
|
+
status: `stale_epoch`,
|
|
808
|
+
currentEpoch: state.epoch
|
|
809
|
+
};
|
|
810
|
+
if (epoch > state.epoch) {
|
|
811
|
+
if (seq !== 0) return { status: `invalid_epoch_seq` };
|
|
812
|
+
return {
|
|
813
|
+
status: `accepted`,
|
|
814
|
+
isNew: true,
|
|
815
|
+
producerId,
|
|
816
|
+
proposedState: {
|
|
817
|
+
epoch,
|
|
818
|
+
lastSeq: 0,
|
|
819
|
+
lastUpdated: now
|
|
820
|
+
}
|
|
821
|
+
};
|
|
822
|
+
}
|
|
823
|
+
if (seq <= state.lastSeq) return {
|
|
824
|
+
status: `duplicate`,
|
|
825
|
+
lastSeq: state.lastSeq
|
|
826
|
+
};
|
|
827
|
+
if (seq === state.lastSeq + 1) return {
|
|
828
|
+
status: `accepted`,
|
|
829
|
+
isNew: false,
|
|
830
|
+
producerId,
|
|
831
|
+
proposedState: {
|
|
832
|
+
epoch,
|
|
833
|
+
lastSeq: seq,
|
|
834
|
+
lastUpdated: now
|
|
835
|
+
}
|
|
836
|
+
};
|
|
837
|
+
return {
|
|
838
|
+
status: `sequence_gap`,
|
|
839
|
+
expectedSeq: state.lastSeq + 1,
|
|
840
|
+
receivedSeq: seq
|
|
615
841
|
};
|
|
616
842
|
}
|
|
617
843
|
/**
|
|
844
|
+
* Acquire a lock for serialized producer operations.
|
|
845
|
+
* Returns a release function.
|
|
846
|
+
*/
|
|
847
|
+
async acquireProducerLock(streamPath, producerId) {
|
|
848
|
+
const lockKey = `${streamPath}:${producerId}`;
|
|
849
|
+
while (this.producerLocks.has(lockKey)) await this.producerLocks.get(lockKey);
|
|
850
|
+
let releaseLock;
|
|
851
|
+
const lockPromise = new Promise((resolve) => {
|
|
852
|
+
releaseLock = resolve;
|
|
853
|
+
});
|
|
854
|
+
this.producerLocks.set(lockKey, lockPromise);
|
|
855
|
+
return () => {
|
|
856
|
+
this.producerLocks.delete(lockKey);
|
|
857
|
+
releaseLock();
|
|
858
|
+
};
|
|
859
|
+
}
|
|
860
|
+
/**
|
|
861
|
+
* Get the current epoch for a producer on a stream.
|
|
862
|
+
* Returns undefined if the producer doesn't exist or stream not found.
|
|
863
|
+
*/
|
|
864
|
+
getProducerEpoch(streamPath, producerId) {
|
|
865
|
+
const meta = this.getMetaIfNotExpired(streamPath);
|
|
866
|
+
if (!meta?.producers) return void 0;
|
|
867
|
+
return meta.producers[producerId]?.epoch;
|
|
868
|
+
}
|
|
869
|
+
/**
|
|
618
870
|
* Check if a stream is expired based on TTL or Expires-At.
|
|
619
871
|
*/
|
|
620
872
|
isExpired(meta) {
|
|
@@ -724,6 +976,14 @@ var FileBackedStreamStore = class {
|
|
|
724
976
|
const streamType = normalizeContentType(streamMeta.contentType);
|
|
725
977
|
if (providedType !== streamType) throw new Error(`Content-type mismatch: expected ${streamMeta.contentType}, got ${options.contentType}`);
|
|
726
978
|
}
|
|
979
|
+
let producerResult;
|
|
980
|
+
if (options.producerId !== void 0 && options.producerEpoch !== void 0 && options.producerSeq !== void 0) {
|
|
981
|
+
producerResult = this.validateProducer(streamMeta, options.producerId, options.producerEpoch, options.producerSeq);
|
|
982
|
+
if (producerResult.status !== `accepted`) return {
|
|
983
|
+
message: null,
|
|
984
|
+
producerResult
|
|
985
|
+
};
|
|
986
|
+
}
|
|
727
987
|
if (options.seq !== void 0) {
|
|
728
988
|
if (streamMeta.lastSeq !== void 0 && options.seq <= streamMeta.lastSeq) throw new Error(`Sequence conflict: ${options.seq} <= ${streamMeta.lastSeq}`);
|
|
729
989
|
}
|
|
@@ -759,17 +1019,43 @@ var FileBackedStreamStore = class {
|
|
|
759
1019
|
timestamp: Date.now()
|
|
760
1020
|
};
|
|
761
1021
|
await this.fileHandlePool.fsyncFile(segmentPath);
|
|
1022
|
+
const updatedProducers = { ...streamMeta.producers };
|
|
1023
|
+
if (producerResult && producerResult.status === `accepted`) updatedProducers[producerResult.producerId] = producerResult.proposedState;
|
|
762
1024
|
const updatedMeta = {
|
|
763
1025
|
...streamMeta,
|
|
764
1026
|
currentOffset: newOffset,
|
|
765
1027
|
lastSeq: options.seq ?? streamMeta.lastSeq,
|
|
766
|
-
totalBytes: streamMeta.totalBytes + processedData.length + 5
|
|
1028
|
+
totalBytes: streamMeta.totalBytes + processedData.length + 5,
|
|
1029
|
+
producers: updatedProducers
|
|
767
1030
|
};
|
|
768
1031
|
const key = `stream:${streamPath}`;
|
|
769
1032
|
this.db.putSync(key, updatedMeta);
|
|
770
1033
|
this.notifyLongPolls(streamPath);
|
|
1034
|
+
if (producerResult) return {
|
|
1035
|
+
message,
|
|
1036
|
+
producerResult
|
|
1037
|
+
};
|
|
771
1038
|
return message;
|
|
772
1039
|
}
|
|
1040
|
+
/**
|
|
1041
|
+
* Append with producer serialization for concurrent request handling.
|
|
1042
|
+
* This ensures that validation+append is atomic per producer.
|
|
1043
|
+
*/
|
|
1044
|
+
async appendWithProducer(streamPath, data, options) {
|
|
1045
|
+
if (!options.producerId) {
|
|
1046
|
+
const result = await this.append(streamPath, data, options);
|
|
1047
|
+
if (result && `message` in result) return result;
|
|
1048
|
+
return { message: result };
|
|
1049
|
+
}
|
|
1050
|
+
const releaseLock = await this.acquireProducerLock(streamPath, options.producerId);
|
|
1051
|
+
try {
|
|
1052
|
+
const result = await this.append(streamPath, data, options);
|
|
1053
|
+
if (result && `message` in result) return result;
|
|
1054
|
+
return { message: result };
|
|
1055
|
+
} finally {
|
|
1056
|
+
releaseLock();
|
|
1057
|
+
}
|
|
1058
|
+
}
|
|
773
1059
|
read(streamPath, offset) {
|
|
774
1060
|
const streamMeta = this.getMetaIfNotExpired(streamPath);
|
|
775
1061
|
if (!streamMeta) throw new Error(`Stream not found: ${streamPath}`);
|
|
@@ -1043,6 +1329,11 @@ const STREAM_UP_TO_DATE_HEADER = `Stream-Up-To-Date`;
|
|
|
1043
1329
|
const STREAM_SEQ_HEADER = `Stream-Seq`;
|
|
1044
1330
|
const STREAM_TTL_HEADER = `Stream-TTL`;
|
|
1045
1331
|
const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`;
|
|
1332
|
+
const PRODUCER_ID_HEADER = `Producer-Id`;
|
|
1333
|
+
const PRODUCER_EPOCH_HEADER = `Producer-Epoch`;
|
|
1334
|
+
const PRODUCER_SEQ_HEADER = `Producer-Seq`;
|
|
1335
|
+
const PRODUCER_EXPECTED_SEQ_HEADER = `Producer-Expected-Seq`;
|
|
1336
|
+
const PRODUCER_RECEIVED_SEQ_HEADER = `Producer-Received-Seq`;
|
|
1046
1337
|
const SSE_OFFSET_FIELD = `streamNextOffset`;
|
|
1047
1338
|
const SSE_CURSOR_FIELD = `streamCursor`;
|
|
1048
1339
|
const SSE_UP_TO_DATE_FIELD = `upToDate`;
|
|
@@ -1052,10 +1343,12 @@ const CURSOR_QUERY_PARAM = `cursor`;
|
|
|
1052
1343
|
/**
|
|
1053
1344
|
* Encode data for SSE format.
|
|
1054
1345
|
* Per SSE spec, each line in the payload needs its own "data:" prefix.
|
|
1055
|
-
*
|
|
1346
|
+
* Line terminators in the payload (CR, LF, or CRLF) become separate data: lines.
|
|
1347
|
+
* This prevents CRLF injection attacks where malicious payloads could inject
|
|
1348
|
+
* fake SSE events using CR-only line terminators.
|
|
1056
1349
|
*/
|
|
1057
1350
|
function encodeSSEData(payload) {
|
|
1058
|
-
const lines = payload.split(
|
|
1351
|
+
const lines = payload.split(/\r\n|\r|\n/);
|
|
1059
1352
|
return lines.map((line) => `data: ${line}`).join(`\n`) + `\n\n`;
|
|
1060
1353
|
}
|
|
1061
1354
|
/**
|
|
@@ -1094,8 +1387,8 @@ var DurableStreamTestServer = class {
|
|
|
1094
1387
|
_url = null;
|
|
1095
1388
|
activeSSEResponses = new Set();
|
|
1096
1389
|
isShuttingDown = false;
|
|
1097
|
-
/** Injected
|
|
1098
|
-
|
|
1390
|
+
/** Injected faults for testing retry/resilience */
|
|
1391
|
+
injectedFaults = new Map();
|
|
1099
1392
|
constructor(options = {}) {
|
|
1100
1393
|
if (options.dataDir) this.store = new FileBackedStreamStore({ dataDir: options.dataDir });
|
|
1101
1394
|
else this.store = new StreamStore();
|
|
@@ -1180,30 +1473,71 @@ var DurableStreamTestServer = class {
|
|
|
1180
1473
|
/**
|
|
1181
1474
|
* Inject an error to be returned on the next N requests to a path.
|
|
1182
1475
|
* Used for testing retry/resilience behavior.
|
|
1476
|
+
* @deprecated Use injectFault for full fault injection capabilities
|
|
1183
1477
|
*/
|
|
1184
1478
|
injectError(path$2, status, count = 1, retryAfter) {
|
|
1185
|
-
this.
|
|
1479
|
+
this.injectedFaults.set(path$2, {
|
|
1186
1480
|
status,
|
|
1187
1481
|
count,
|
|
1188
1482
|
retryAfter
|
|
1189
1483
|
});
|
|
1190
1484
|
}
|
|
1191
1485
|
/**
|
|
1192
|
-
*
|
|
1486
|
+
* Inject a fault to be triggered on the next N requests to a path.
|
|
1487
|
+
* Supports various fault types: delays, connection drops, body corruption, etc.
|
|
1488
|
+
*/
|
|
1489
|
+
injectFault(path$2, fault) {
|
|
1490
|
+
this.injectedFaults.set(path$2, {
|
|
1491
|
+
count: 1,
|
|
1492
|
+
...fault
|
|
1493
|
+
});
|
|
1494
|
+
}
|
|
1495
|
+
/**
|
|
1496
|
+
* Clear all injected faults.
|
|
1497
|
+
*/
|
|
1498
|
+
clearInjectedFaults() {
|
|
1499
|
+
this.injectedFaults.clear();
|
|
1500
|
+
}
|
|
1501
|
+
/**
|
|
1502
|
+
* Check if there's an injected fault for this path/method and consume it.
|
|
1503
|
+
* Returns the fault config if one should be triggered, null otherwise.
|
|
1193
1504
|
*/
|
|
1194
|
-
|
|
1195
|
-
this.
|
|
1505
|
+
consumeInjectedFault(path$2, method) {
|
|
1506
|
+
const fault = this.injectedFaults.get(path$2);
|
|
1507
|
+
if (!fault) return null;
|
|
1508
|
+
if (fault.method && fault.method.toUpperCase() !== method.toUpperCase()) return null;
|
|
1509
|
+
if (fault.probability !== void 0 && Math.random() > fault.probability) return null;
|
|
1510
|
+
fault.count--;
|
|
1511
|
+
if (fault.count <= 0) this.injectedFaults.delete(path$2);
|
|
1512
|
+
return fault;
|
|
1196
1513
|
}
|
|
1197
1514
|
/**
|
|
1198
|
-
*
|
|
1199
|
-
* Returns the error config if one should be returned, null otherwise.
|
|
1515
|
+
* Apply delay from fault config (including jitter).
|
|
1200
1516
|
*/
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1517
|
+
async applyFaultDelay(fault) {
|
|
1518
|
+
if (fault.delayMs !== void 0 && fault.delayMs > 0) {
|
|
1519
|
+
const jitter = fault.jitterMs ? Math.random() * fault.jitterMs : 0;
|
|
1520
|
+
await new Promise((resolve) => setTimeout(resolve, fault.delayMs + jitter));
|
|
1521
|
+
}
|
|
1522
|
+
}
|
|
1523
|
+
/**
|
|
1524
|
+
* Apply body modifications from stored fault (truncation, corruption).
|
|
1525
|
+
* Returns modified body, or original if no modifications needed.
|
|
1526
|
+
*/
|
|
1527
|
+
applyFaultBodyModification(res, body) {
|
|
1528
|
+
const fault = res._injectedFault;
|
|
1529
|
+
if (!fault) return body;
|
|
1530
|
+
let modified = body;
|
|
1531
|
+
if (fault.truncateBodyBytes !== void 0 && modified.length > fault.truncateBodyBytes) modified = modified.slice(0, fault.truncateBodyBytes);
|
|
1532
|
+
if (fault.corruptBody && modified.length > 0) {
|
|
1533
|
+
modified = new Uint8Array(modified);
|
|
1534
|
+
const numCorrupt = Math.max(1, Math.floor(modified.length * .03));
|
|
1535
|
+
for (let i = 0; i < numCorrupt; i++) {
|
|
1536
|
+
const pos = Math.floor(Math.random() * modified.length);
|
|
1537
|
+
modified[pos] = modified[pos] ^ 1 << Math.floor(Math.random() * 8);
|
|
1538
|
+
}
|
|
1539
|
+
}
|
|
1540
|
+
return modified;
|
|
1207
1541
|
}
|
|
1208
1542
|
async handleRequest(req, res) {
|
|
1209
1543
|
const url = new URL(req.url ?? `/`, `http://${req.headers.host}`);
|
|
@@ -1211,8 +1545,10 @@ var DurableStreamTestServer = class {
|
|
|
1211
1545
|
const method = req.method?.toUpperCase();
|
|
1212
1546
|
res.setHeader(`access-control-allow-origin`, `*`);
|
|
1213
1547
|
res.setHeader(`access-control-allow-methods`, `GET, POST, PUT, DELETE, HEAD, OPTIONS`);
|
|
1214
|
-
res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At`);
|
|
1215
|
-
res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, etag, content-type, content-encoding, vary`);
|
|
1548
|
+
res.setHeader(`access-control-allow-headers`, `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At, Producer-Id, Producer-Epoch, Producer-Seq`);
|
|
1549
|
+
res.setHeader(`access-control-expose-headers`, `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, Producer-Epoch, Producer-Seq, Producer-Expected-Seq, Producer-Received-Seq, etag, content-type, content-encoding, vary`);
|
|
1550
|
+
res.setHeader(`x-content-type-options`, `nosniff`);
|
|
1551
|
+
res.setHeader(`cross-origin-resource-policy`, `cross-origin`);
|
|
1216
1552
|
if (method === `OPTIONS`) {
|
|
1217
1553
|
res.writeHead(204);
|
|
1218
1554
|
res.end();
|
|
@@ -1222,13 +1558,21 @@ var DurableStreamTestServer = class {
|
|
|
1222
1558
|
await this.handleTestInjectError(method, req, res);
|
|
1223
1559
|
return;
|
|
1224
1560
|
}
|
|
1225
|
-
const
|
|
1226
|
-
if (
|
|
1227
|
-
|
|
1228
|
-
if (
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1561
|
+
const fault = this.consumeInjectedFault(path$2, method ?? `GET`);
|
|
1562
|
+
if (fault) {
|
|
1563
|
+
await this.applyFaultDelay(fault);
|
|
1564
|
+
if (fault.dropConnection) {
|
|
1565
|
+
res.socket?.destroy();
|
|
1566
|
+
return;
|
|
1567
|
+
}
|
|
1568
|
+
if (fault.status !== void 0) {
|
|
1569
|
+
const headers = { "content-type": `text/plain` };
|
|
1570
|
+
if (fault.retryAfter !== void 0) headers[`retry-after`] = fault.retryAfter.toString();
|
|
1571
|
+
res.writeHead(fault.status, headers);
|
|
1572
|
+
res.end(`Injected error for testing`);
|
|
1573
|
+
return;
|
|
1574
|
+
}
|
|
1575
|
+
if (fault.truncateBodyBytes !== void 0 || fault.corruptBody) res._injectedFault = fault;
|
|
1232
1576
|
}
|
|
1233
1577
|
try {
|
|
1234
1578
|
switch (method) {
|
|
@@ -1343,7 +1687,10 @@ var DurableStreamTestServer = class {
|
|
|
1343
1687
|
res.end();
|
|
1344
1688
|
return;
|
|
1345
1689
|
}
|
|
1346
|
-
const headers = {
|
|
1690
|
+
const headers = {
|
|
1691
|
+
[STREAM_OFFSET_HEADER]: stream.currentOffset,
|
|
1692
|
+
"cache-control": `no-store`
|
|
1693
|
+
};
|
|
1347
1694
|
if (stream.contentType) headers[`content-type`] = stream.contentType;
|
|
1348
1695
|
headers[`etag`] = `"${Buffer.from(path$2).toString(`base64`)}:-1:${stream.currentOffset}"`;
|
|
1349
1696
|
res.writeHead(200, headers);
|
|
@@ -1374,7 +1721,7 @@ var DurableStreamTestServer = class {
|
|
|
1374
1721
|
res.end(`Multiple offset parameters not allowed`);
|
|
1375
1722
|
return;
|
|
1376
1723
|
}
|
|
1377
|
-
const validOffsetPattern = /^(-1|\d+_\d+)$/;
|
|
1724
|
+
const validOffsetPattern = /^(-1|now|\d+_\d+)$/;
|
|
1378
1725
|
if (!validOffsetPattern.test(offset)) {
|
|
1379
1726
|
res.writeHead(400, { "content-type": `text/plain` });
|
|
1380
1727
|
res.end(`Invalid offset format`);
|
|
@@ -1387,17 +1734,32 @@ var DurableStreamTestServer = class {
|
|
|
1387
1734
|
return;
|
|
1388
1735
|
}
|
|
1389
1736
|
if (live === `sse`) {
|
|
1390
|
-
|
|
1737
|
+
const sseOffset = offset === `now` ? stream.currentOffset : offset;
|
|
1738
|
+
await this.handleSSE(path$2, stream, sseOffset, cursor, res);
|
|
1391
1739
|
return;
|
|
1392
1740
|
}
|
|
1393
|
-
|
|
1394
|
-
|
|
1741
|
+
const effectiveOffset = offset === `now` ? stream.currentOffset : offset;
|
|
1742
|
+
if (offset === `now` && live !== `long-poll`) {
|
|
1743
|
+
const headers$1 = {
|
|
1744
|
+
[STREAM_OFFSET_HEADER]: stream.currentOffset,
|
|
1745
|
+
[STREAM_UP_TO_DATE_HEADER]: `true`,
|
|
1746
|
+
[`cache-control`]: `no-store`
|
|
1747
|
+
};
|
|
1748
|
+
if (stream.contentType) headers$1[`content-type`] = stream.contentType;
|
|
1749
|
+
const isJsonMode = stream.contentType?.includes(`application/json`);
|
|
1750
|
+
const responseBody = isJsonMode ? `[]` : ``;
|
|
1751
|
+
res.writeHead(200, headers$1);
|
|
1752
|
+
res.end(responseBody);
|
|
1753
|
+
return;
|
|
1754
|
+
}
|
|
1755
|
+
let { messages, upToDate } = this.store.read(path$2, effectiveOffset);
|
|
1756
|
+
const clientIsCaughtUp = effectiveOffset && effectiveOffset === stream.currentOffset || offset === `now`;
|
|
1395
1757
|
if (live === `long-poll` && clientIsCaughtUp && messages.length === 0) {
|
|
1396
|
-
const result = await this.store.waitForMessages(path$2,
|
|
1758
|
+
const result = await this.store.waitForMessages(path$2, effectiveOffset ?? stream.currentOffset, this.options.longPollTimeout);
|
|
1397
1759
|
if (result.timedOut) {
|
|
1398
1760
|
const responseCursor = generateResponseCursor(cursor, this.options.cursorOptions);
|
|
1399
1761
|
res.writeHead(204, {
|
|
1400
|
-
[STREAM_OFFSET_HEADER]:
|
|
1762
|
+
[STREAM_OFFSET_HEADER]: effectiveOffset ?? stream.currentOffset,
|
|
1401
1763
|
[STREAM_UP_TO_DATE_HEADER]: `true`,
|
|
1402
1764
|
[STREAM_CURSOR_HEADER]: responseCursor
|
|
1403
1765
|
});
|
|
@@ -1434,6 +1796,7 @@ var DurableStreamTestServer = class {
|
|
|
1434
1796
|
headers[`vary`] = `accept-encoding`;
|
|
1435
1797
|
}
|
|
1436
1798
|
}
|
|
1799
|
+
finalData = this.applyFaultBodyModification(res, finalData);
|
|
1437
1800
|
res.writeHead(200, headers);
|
|
1438
1801
|
res.end(Buffer.from(finalData));
|
|
1439
1802
|
}
|
|
@@ -1446,7 +1809,9 @@ var DurableStreamTestServer = class {
|
|
|
1446
1809
|
"content-type": `text/event-stream`,
|
|
1447
1810
|
"cache-control": `no-cache`,
|
|
1448
1811
|
connection: `keep-alive`,
|
|
1449
|
-
"access-control-allow-origin":
|
|
1812
|
+
"access-control-allow-origin": `*`,
|
|
1813
|
+
"x-content-type-options": `nosniff`,
|
|
1814
|
+
"cross-origin-resource-policy": `cross-origin`
|
|
1450
1815
|
});
|
|
1451
1816
|
let currentOffset = initialOffset;
|
|
1452
1817
|
let isConnected = true;
|
|
@@ -1502,6 +1867,9 @@ var DurableStreamTestServer = class {
|
|
|
1502
1867
|
async handleAppend(path$2, req, res) {
|
|
1503
1868
|
const contentType = req.headers[`content-type`];
|
|
1504
1869
|
const seq = req.headers[STREAM_SEQ_HEADER.toLowerCase()];
|
|
1870
|
+
const producerId = req.headers[PRODUCER_ID_HEADER.toLowerCase()];
|
|
1871
|
+
const producerEpochStr = req.headers[PRODUCER_EPOCH_HEADER.toLowerCase()];
|
|
1872
|
+
const producerSeqStr = req.headers[PRODUCER_SEQ_HEADER.toLowerCase()];
|
|
1505
1873
|
const body = await this.readBody(req);
|
|
1506
1874
|
if (body.length === 0) {
|
|
1507
1875
|
res.writeHead(400, { "content-type": `text/plain` });
|
|
@@ -1513,11 +1881,97 @@ var DurableStreamTestServer = class {
|
|
|
1513
1881
|
res.end(`Content-Type header is required`);
|
|
1514
1882
|
return;
|
|
1515
1883
|
}
|
|
1516
|
-
const
|
|
1884
|
+
const hasProducerHeaders = producerId !== void 0 || producerEpochStr !== void 0 || producerSeqStr !== void 0;
|
|
1885
|
+
const hasAllProducerHeaders = producerId !== void 0 && producerEpochStr !== void 0 && producerSeqStr !== void 0;
|
|
1886
|
+
if (hasProducerHeaders && !hasAllProducerHeaders) {
|
|
1887
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1888
|
+
res.end(`All producer headers (Producer-Id, Producer-Epoch, Producer-Seq) must be provided together`);
|
|
1889
|
+
return;
|
|
1890
|
+
}
|
|
1891
|
+
if (hasAllProducerHeaders && producerId === ``) {
|
|
1892
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1893
|
+
res.end(`Invalid Producer-Id: must not be empty`);
|
|
1894
|
+
return;
|
|
1895
|
+
}
|
|
1896
|
+
const STRICT_INTEGER_REGEX = /^\d+$/;
|
|
1897
|
+
let producerEpoch;
|
|
1898
|
+
let producerSeq;
|
|
1899
|
+
if (hasAllProducerHeaders) {
|
|
1900
|
+
if (!STRICT_INTEGER_REGEX.test(producerEpochStr)) {
|
|
1901
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1902
|
+
res.end(`Invalid Producer-Epoch: must be a non-negative integer`);
|
|
1903
|
+
return;
|
|
1904
|
+
}
|
|
1905
|
+
producerEpoch = Number(producerEpochStr);
|
|
1906
|
+
if (!Number.isSafeInteger(producerEpoch)) {
|
|
1907
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1908
|
+
res.end(`Invalid Producer-Epoch: must be a non-negative integer`);
|
|
1909
|
+
return;
|
|
1910
|
+
}
|
|
1911
|
+
if (!STRICT_INTEGER_REGEX.test(producerSeqStr)) {
|
|
1912
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1913
|
+
res.end(`Invalid Producer-Seq: must be a non-negative integer`);
|
|
1914
|
+
return;
|
|
1915
|
+
}
|
|
1916
|
+
producerSeq = Number(producerSeqStr);
|
|
1917
|
+
if (!Number.isSafeInteger(producerSeq)) {
|
|
1918
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1919
|
+
res.end(`Invalid Producer-Seq: must be a non-negative integer`);
|
|
1920
|
+
return;
|
|
1921
|
+
}
|
|
1922
|
+
}
|
|
1923
|
+
const appendOptions = {
|
|
1517
1924
|
seq,
|
|
1518
|
-
contentType
|
|
1519
|
-
|
|
1520
|
-
|
|
1925
|
+
contentType,
|
|
1926
|
+
producerId,
|
|
1927
|
+
producerEpoch,
|
|
1928
|
+
producerSeq
|
|
1929
|
+
};
|
|
1930
|
+
let result;
|
|
1931
|
+
if (producerId !== void 0) result = await this.store.appendWithProducer(path$2, body, appendOptions);
|
|
1932
|
+
else result = await Promise.resolve(this.store.append(path$2, body, appendOptions));
|
|
1933
|
+
if (result && typeof result === `object` && `producerResult` in result) {
|
|
1934
|
+
const { message: message$1, producerResult } = result;
|
|
1935
|
+
if (!producerResult || producerResult.status === `accepted`) {
|
|
1936
|
+
const responseHeaders = { [STREAM_OFFSET_HEADER]: message$1.offset };
|
|
1937
|
+
if (producerEpoch !== void 0) responseHeaders[PRODUCER_EPOCH_HEADER] = producerEpoch.toString();
|
|
1938
|
+
if (producerSeq !== void 0) responseHeaders[PRODUCER_SEQ_HEADER] = producerSeq.toString();
|
|
1939
|
+
res.writeHead(200, responseHeaders);
|
|
1940
|
+
res.end();
|
|
1941
|
+
return;
|
|
1942
|
+
}
|
|
1943
|
+
switch (producerResult.status) {
|
|
1944
|
+
case `duplicate`:
|
|
1945
|
+
res.writeHead(204, {
|
|
1946
|
+
[PRODUCER_EPOCH_HEADER]: producerEpoch.toString(),
|
|
1947
|
+
[PRODUCER_SEQ_HEADER]: producerResult.lastSeq.toString()
|
|
1948
|
+
});
|
|
1949
|
+
res.end();
|
|
1950
|
+
return;
|
|
1951
|
+
case `stale_epoch`: {
|
|
1952
|
+
res.writeHead(403, {
|
|
1953
|
+
"content-type": `text/plain`,
|
|
1954
|
+
[PRODUCER_EPOCH_HEADER]: producerResult.currentEpoch.toString()
|
|
1955
|
+
});
|
|
1956
|
+
res.end(`Stale producer epoch`);
|
|
1957
|
+
return;
|
|
1958
|
+
}
|
|
1959
|
+
case `invalid_epoch_seq`:
|
|
1960
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
1961
|
+
res.end(`New epoch must start with sequence 0`);
|
|
1962
|
+
return;
|
|
1963
|
+
case `sequence_gap`:
|
|
1964
|
+
res.writeHead(409, {
|
|
1965
|
+
"content-type": `text/plain`,
|
|
1966
|
+
[PRODUCER_EXPECTED_SEQ_HEADER]: producerResult.expectedSeq.toString(),
|
|
1967
|
+
[PRODUCER_RECEIVED_SEQ_HEADER]: producerResult.receivedSeq.toString()
|
|
1968
|
+
});
|
|
1969
|
+
res.end(`Producer sequence gap`);
|
|
1970
|
+
return;
|
|
1971
|
+
}
|
|
1972
|
+
}
|
|
1973
|
+
const message = result;
|
|
1974
|
+
res.writeHead(204, { [STREAM_OFFSET_HEADER]: message.offset });
|
|
1521
1975
|
res.end();
|
|
1522
1976
|
}
|
|
1523
1977
|
/**
|
|
@@ -1548,12 +2002,29 @@ var DurableStreamTestServer = class {
|
|
|
1548
2002
|
const body = await this.readBody(req);
|
|
1549
2003
|
try {
|
|
1550
2004
|
const config = JSON.parse(new TextDecoder().decode(body));
|
|
1551
|
-
if (!config.path
|
|
2005
|
+
if (!config.path) {
|
|
1552
2006
|
res.writeHead(400, { "content-type": `text/plain` });
|
|
1553
|
-
res.end(`Missing required
|
|
2007
|
+
res.end(`Missing required field: path`);
|
|
1554
2008
|
return;
|
|
1555
2009
|
}
|
|
1556
|
-
|
|
2010
|
+
const hasFaultType = config.status !== void 0 || config.delayMs !== void 0 || config.dropConnection || config.truncateBodyBytes !== void 0 || config.corruptBody;
|
|
2011
|
+
if (!hasFaultType) {
|
|
2012
|
+
res.writeHead(400, { "content-type": `text/plain` });
|
|
2013
|
+
res.end(`Must specify at least one fault type: status, delayMs, dropConnection, truncateBodyBytes, or corruptBody`);
|
|
2014
|
+
return;
|
|
2015
|
+
}
|
|
2016
|
+
this.injectFault(config.path, {
|
|
2017
|
+
status: config.status,
|
|
2018
|
+
count: config.count ?? 1,
|
|
2019
|
+
retryAfter: config.retryAfter,
|
|
2020
|
+
delayMs: config.delayMs,
|
|
2021
|
+
dropConnection: config.dropConnection,
|
|
2022
|
+
truncateBodyBytes: config.truncateBodyBytes,
|
|
2023
|
+
probability: config.probability,
|
|
2024
|
+
method: config.method,
|
|
2025
|
+
corruptBody: config.corruptBody,
|
|
2026
|
+
jitterMs: config.jitterMs
|
|
2027
|
+
});
|
|
1557
2028
|
res.writeHead(200, { "content-type": `application/json` });
|
|
1558
2029
|
res.end(JSON.stringify({ ok: true }));
|
|
1559
2030
|
} catch {
|
|
@@ -1561,7 +2032,7 @@ var DurableStreamTestServer = class {
|
|
|
1561
2032
|
res.end(`Invalid JSON body`);
|
|
1562
2033
|
}
|
|
1563
2034
|
} else if (method === `DELETE`) {
|
|
1564
|
-
this.
|
|
2035
|
+
this.clearInjectedFaults();
|
|
1565
2036
|
res.writeHead(200, { "content-type": `application/json` });
|
|
1566
2037
|
res.end(JSON.stringify({ ok: true }));
|
|
1567
2038
|
} else {
|