@durable-streams/server 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/file-store.ts CHANGED
@@ -15,8 +15,24 @@ import {
15
15
  normalizeContentType,
16
16
  processJsonAppend,
17
17
  } from "./store"
18
+ import type { AppendOptions, AppendResult } from "./store"
18
19
  import type { Database } from "lmdb"
19
- import type { PendingLongPoll, Stream, StreamMessage } from "./types"
20
+ import type {
21
+ PendingLongPoll,
22
+ ProducerState,
23
+ ProducerValidationResult,
24
+ Stream,
25
+ StreamMessage,
26
+ } from "./types"
27
+
28
+ /**
29
+ * Serializable producer state for LMDB storage.
30
+ */
31
+ interface SerializableProducerState {
32
+ epoch: number
33
+ lastSeq: number
34
+ lastUpdated: number
35
+ }
20
36
 
21
37
  /**
22
38
  * Stream metadata stored in LMDB.
@@ -37,6 +53,11 @@ interface StreamMetadata {
37
53
  * This allows safe async deletion and immediate reuse of stream paths.
38
54
  */
39
55
  directoryName: string
56
+ /**
57
+ * Producer states for idempotent writes.
58
+ * Stored as a plain object for LMDB serialization.
59
+ */
60
+ producers?: Record<string, SerializableProducerState>
40
61
  }
41
62
 
42
63
  /**
@@ -168,6 +189,11 @@ export class FileBackedStreamStore {
168
189
  private fileHandlePool: FileHandlePool
169
190
  private pendingLongPolls: Array<PendingLongPoll> = []
170
191
  private dataDir: string
192
+ /**
193
+ * Per-producer locks for serializing validation+append operations.
194
+ * Key: "{streamPath}:{producerId}"
195
+ */
196
+ private producerLocks = new Map<string, Promise<unknown>>()
171
197
 
172
198
  constructor(options: FileBackedStreamStoreOptions) {
173
199
  this.dataDir = options.dataDir
@@ -320,6 +346,15 @@ export class FileBackedStreamStore {
320
346
  * Convert LMDB metadata to Stream object.
321
347
  */
322
348
  private streamMetaToStream(meta: StreamMetadata): Stream {
349
+ // Convert producers from object to Map if present
350
+ let producers: Map<string, ProducerState> | undefined
351
+ if (meta.producers) {
352
+ producers = new Map()
353
+ for (const [id, state] of Object.entries(meta.producers)) {
354
+ producers.set(id, { ...state })
355
+ }
356
+ }
357
+
323
358
  return {
324
359
  path: meta.path,
325
360
  contentType: meta.contentType,
@@ -329,9 +364,132 @@ export class FileBackedStreamStore {
329
364
  ttlSeconds: meta.ttlSeconds,
330
365
  expiresAt: meta.expiresAt,
331
366
  createdAt: meta.createdAt,
367
+ producers,
332
368
  }
333
369
  }
334
370
 
371
+ /**
372
+ * Validate producer state WITHOUT mutating.
373
+ * Returns proposed state to commit after successful append.
374
+ *
375
+ * IMPORTANT: This function does NOT mutate producer state. The caller must
376
+ * commit the proposedState after successful append (file write + fsync + LMDB).
377
+ * This ensures atomicity: if any step fails, producer state is not advanced.
378
+ */
379
+ private validateProducer(
380
+ meta: StreamMetadata,
381
+ producerId: string,
382
+ epoch: number,
383
+ seq: number
384
+ ): ProducerValidationResult {
385
+ // Initialize producers map if needed (safe - just ensures map exists)
386
+ if (!meta.producers) {
387
+ meta.producers = {}
388
+ }
389
+
390
+ const state = meta.producers[producerId]
391
+ const now = Date.now()
392
+
393
+ // New producer - accept if seq is 0
394
+ if (!state) {
395
+ if (seq !== 0) {
396
+ return {
397
+ status: `sequence_gap`,
398
+ expectedSeq: 0,
399
+ receivedSeq: seq,
400
+ }
401
+ }
402
+ // Return proposed state, don't mutate yet
403
+ return {
404
+ status: `accepted`,
405
+ isNew: true,
406
+ producerId,
407
+ proposedState: { epoch, lastSeq: 0, lastUpdated: now },
408
+ }
409
+ }
410
+
411
+ // Epoch validation (client-declared, server-validated)
412
+ if (epoch < state.epoch) {
413
+ return { status: `stale_epoch`, currentEpoch: state.epoch }
414
+ }
415
+
416
+ if (epoch > state.epoch) {
417
+ // New epoch must start at seq=0
418
+ if (seq !== 0) {
419
+ return { status: `invalid_epoch_seq` }
420
+ }
421
+ // Return proposed state for new epoch, don't mutate yet
422
+ return {
423
+ status: `accepted`,
424
+ isNew: true,
425
+ producerId,
426
+ proposedState: { epoch, lastSeq: 0, lastUpdated: now },
427
+ }
428
+ }
429
+
430
+ // Same epoch: sequence validation
431
+ if (seq <= state.lastSeq) {
432
+ return { status: `duplicate`, lastSeq: state.lastSeq }
433
+ }
434
+
435
+ if (seq === state.lastSeq + 1) {
436
+ // Return proposed state, don't mutate yet
437
+ return {
438
+ status: `accepted`,
439
+ isNew: false,
440
+ producerId,
441
+ proposedState: { epoch, lastSeq: seq, lastUpdated: now },
442
+ }
443
+ }
444
+
445
+ // Sequence gap
446
+ return {
447
+ status: `sequence_gap`,
448
+ expectedSeq: state.lastSeq + 1,
449
+ receivedSeq: seq,
450
+ }
451
+ }
452
+
453
+ /**
454
+ * Acquire a lock for serialized producer operations.
455
+ * Returns a release function.
456
+ */
457
+ private async acquireProducerLock(
458
+ streamPath: string,
459
+ producerId: string
460
+ ): Promise<() => void> {
461
+ const lockKey = `${streamPath}:${producerId}`
462
+
463
+ // Wait for any existing lock
464
+ while (this.producerLocks.has(lockKey)) {
465
+ await this.producerLocks.get(lockKey)
466
+ }
467
+
468
+ // Create our lock
469
+ let releaseLock: () => void
470
+ const lockPromise = new Promise<void>((resolve) => {
471
+ releaseLock = resolve
472
+ })
473
+ this.producerLocks.set(lockKey, lockPromise)
474
+
475
+ return () => {
476
+ this.producerLocks.delete(lockKey)
477
+ releaseLock!()
478
+ }
479
+ }
480
+
481
+ /**
482
+ * Get the current epoch for a producer on a stream.
483
+ * Returns undefined if the producer doesn't exist or stream not found.
484
+ */
485
+ getProducerEpoch(streamPath: string, producerId: string): number | undefined {
486
+ const meta = this.getMetaIfNotExpired(streamPath)
487
+ if (!meta?.producers) {
488
+ return undefined
489
+ }
490
+ return meta.producers[producerId]?.epoch
491
+ }
492
+
335
493
  /**
336
494
  * Check if a stream is expired based on TTL or Expires-At.
337
495
  */
@@ -528,12 +686,8 @@ export class FileBackedStreamStore {
528
686
  async append(
529
687
  streamPath: string,
530
688
  data: Uint8Array,
531
- options: {
532
- seq?: string
533
- contentType?: string
534
- isInitialCreate?: boolean
535
- } = {}
536
- ): Promise<StreamMessage | null> {
689
+ options: AppendOptions & { isInitialCreate?: boolean } = {}
690
+ ): Promise<StreamMessage | AppendResult | null> {
537
691
  const streamMeta = this.getMetaIfNotExpired(streamPath)
538
692
 
539
693
  if (!streamMeta) {
@@ -551,7 +705,32 @@ export class FileBackedStreamStore {
551
705
  }
552
706
  }
553
707
 
554
- // Check sequence for writer coordination
708
+ // Handle producer validation FIRST if producer headers are present
709
+ // This must happen before Stream-Seq check so that retries with both
710
+ // producer headers AND Stream-Seq can return 204 (duplicate) instead of
711
+ // failing the Stream-Seq conflict check.
712
+ let producerResult: ProducerValidationResult | undefined
713
+ if (
714
+ options.producerId !== undefined &&
715
+ options.producerEpoch !== undefined &&
716
+ options.producerSeq !== undefined
717
+ ) {
718
+ producerResult = this.validateProducer(
719
+ streamMeta,
720
+ options.producerId,
721
+ options.producerEpoch,
722
+ options.producerSeq
723
+ )
724
+
725
+ // Return early for non-accepted results (duplicate, stale epoch, gap)
726
+ // IMPORTANT: Return 204 for duplicate BEFORE Stream-Seq check
727
+ if (producerResult.status !== `accepted`) {
728
+ return { message: null, producerResult }
729
+ }
730
+ }
731
+
732
+ // Check sequence for writer coordination (Stream-Seq, separate from Producer-Seq)
733
+ // This happens AFTER producer validation so retries can be deduplicated
555
734
  if (options.seq !== undefined) {
556
735
  if (
557
736
  streamMeta.lastSeq !== undefined &&
@@ -620,12 +799,19 @@ export class FileBackedStreamStore {
620
799
  // 3. Flush to disk (blocks here until durable)
621
800
  await this.fileHandlePool.fsyncFile(segmentPath)
622
801
 
623
- // 4. Update LMDB metadata (only after flush, so metadata reflects durability)
802
+ // 4. Update LMDB metadata atomically (only after flush, so metadata reflects durability)
803
+ // This includes both the offset update and producer state update
804
+ // Producer state is committed HERE (not in validateProducer) for atomicity
805
+ const updatedProducers = { ...streamMeta.producers }
806
+ if (producerResult && producerResult.status === `accepted`) {
807
+ updatedProducers[producerResult.producerId] = producerResult.proposedState
808
+ }
624
809
  const updatedMeta: StreamMetadata = {
625
810
  ...streamMeta,
626
811
  currentOffset: newOffset,
627
812
  lastSeq: options.seq ?? streamMeta.lastSeq,
628
813
  totalBytes: streamMeta.totalBytes + processedData.length + 5, // +4 for length, +1 for newline
814
+ producers: updatedProducers,
629
815
  }
630
816
  const key = `stream:${streamPath}`
631
817
  this.db.putSync(key, updatedMeta)
@@ -633,10 +819,52 @@ export class FileBackedStreamStore {
633
819
  // 5. Notify long-polls (data is now readable from disk)
634
820
  this.notifyLongPolls(streamPath)
635
821
 
636
- // 6. Return (client knows data is durable)
822
+ // 6. Return AppendResult if producer headers were used
823
+ if (producerResult) {
824
+ return {
825
+ message,
826
+ producerResult,
827
+ }
828
+ }
829
+
637
830
  return message
638
831
  }
639
832
 
833
+ /**
834
+ * Append with producer serialization for concurrent request handling.
835
+ * This ensures that validation+append is atomic per producer.
836
+ */
837
+ async appendWithProducer(
838
+ streamPath: string,
839
+ data: Uint8Array,
840
+ options: AppendOptions
841
+ ): Promise<AppendResult> {
842
+ if (!options.producerId) {
843
+ // No producer - just do a normal append
844
+ const result = await this.append(streamPath, data, options)
845
+ if (result && `message` in result) {
846
+ return result
847
+ }
848
+ return { message: result }
849
+ }
850
+
851
+ // Acquire lock for this producer
852
+ const releaseLock = await this.acquireProducerLock(
853
+ streamPath,
854
+ options.producerId
855
+ )
856
+
857
+ try {
858
+ const result = await this.append(streamPath, data, options)
859
+ if (result && `message` in result) {
860
+ return result
861
+ }
862
+ return { message: result }
863
+ } finally {
864
+ releaseLock()
865
+ }
866
+ }
867
+
640
868
  read(
641
869
  streamPath: string,
642
870
  offset?: string
package/src/server.ts CHANGED
@@ -19,6 +19,13 @@ const STREAM_SEQ_HEADER = `Stream-Seq`
19
19
  const STREAM_TTL_HEADER = `Stream-TTL`
20
20
  const STREAM_EXPIRES_AT_HEADER = `Stream-Expires-At`
21
21
 
22
+ // Idempotent producer headers
23
+ const PRODUCER_ID_HEADER = `Producer-Id`
24
+ const PRODUCER_EPOCH_HEADER = `Producer-Epoch`
25
+ const PRODUCER_SEQ_HEADER = `Producer-Seq`
26
+ const PRODUCER_EXPECTED_SEQ_HEADER = `Producer-Expected-Seq`
27
+ const PRODUCER_RECEIVED_SEQ_HEADER = `Producer-Received-Seq`
28
+
22
29
  // SSE control event fields (Protocol Section 5.7)
23
30
  const SSE_OFFSET_FIELD = `streamNextOffset`
24
31
  const SSE_CURSOR_FIELD = `streamCursor`
@@ -398,11 +405,11 @@ export class DurableStreamTestServer {
398
405
  )
399
406
  res.setHeader(
400
407
  `access-control-allow-headers`,
401
- `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At`
408
+ `content-type, authorization, Stream-Seq, Stream-TTL, Stream-Expires-At, Producer-Id, Producer-Epoch, Producer-Seq`
402
409
  )
403
410
  res.setHeader(
404
411
  `access-control-expose-headers`,
405
- `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, etag, content-type, content-encoding, vary`
412
+ `Stream-Next-Offset, Stream-Cursor, Stream-Up-To-Date, Producer-Epoch, Producer-Seq, Producer-Expected-Seq, Producer-Received-Seq, etag, content-type, content-encoding, vary`
406
413
  )
407
414
 
408
415
  // Browser security headers (Protocol Section 10.7)
@@ -679,9 +686,9 @@ export class DurableStreamTestServer {
679
686
  return
680
687
  }
681
688
 
682
- // Validate offset format: must be "-1" or match our offset format (digits_digits)
689
+ // Validate offset format: must be "-1", "now", or match our offset format (digits_digits)
683
690
  // This prevents path traversal, injection attacks, and invalid characters
684
- const validOffsetPattern = /^(-1|\d+_\d+)$/
691
+ const validOffsetPattern = /^(-1|now|\d+_\d+)$/
685
692
  if (!validOffsetPattern.test(offset)) {
686
693
  res.writeHead(400, { "content-type": `text/plain` })
687
694
  res.end(`Invalid offset format`)
@@ -700,23 +707,57 @@ export class DurableStreamTestServer {
700
707
 
701
708
  // Handle SSE mode
702
709
  if (live === `sse`) {
703
- await this.handleSSE(path, stream, offset!, cursor, res)
710
+ // For SSE with offset=now, convert to actual tail offset
711
+ const sseOffset = offset === `now` ? stream.currentOffset : offset!
712
+ await this.handleSSE(path, stream, sseOffset, cursor, res)
713
+ return
714
+ }
715
+
716
+ // For offset=now, convert to actual tail offset
717
+ // This allows long-poll to immediately start waiting for new data
718
+ const effectiveOffset = offset === `now` ? stream.currentOffset : offset
719
+
720
+ // Handle catch-up mode offset=now: return empty response with tail offset
721
+ // For long-poll mode, we fall through to wait for new data instead
722
+ if (offset === `now` && live !== `long-poll`) {
723
+ const headers: Record<string, string> = {
724
+ [STREAM_OFFSET_HEADER]: stream.currentOffset,
725
+ [STREAM_UP_TO_DATE_HEADER]: `true`,
726
+ // Prevent caching - tail offset changes with each append
727
+ [`cache-control`]: `no-store`,
728
+ }
729
+
730
+ if (stream.contentType) {
731
+ headers[`content-type`] = stream.contentType
732
+ }
733
+
734
+ // No ETag for offset=now responses - Cache-Control: no-store makes ETag unnecessary
735
+ // and some CDNs may behave unexpectedly with both headers
736
+
737
+ // For JSON mode, return empty array; otherwise empty body
738
+ const isJsonMode = stream.contentType?.includes(`application/json`)
739
+ const responseBody = isJsonMode ? `[]` : ``
740
+
741
+ res.writeHead(200, headers)
742
+ res.end(responseBody)
704
743
  return
705
744
  }
706
745
 
707
746
  // Read current messages
708
- let { messages, upToDate } = this.store.read(path, offset)
747
+ let { messages, upToDate } = this.store.read(path, effectiveOffset)
709
748
 
710
749
  // Only wait in long-poll if:
711
750
  // 1. long-poll mode is enabled
712
- // 2. Client provided an offset (not first request)
751
+ // 2. Client provided an offset (not first request) OR used offset=now
713
752
  // 3. Client's offset matches current offset (already caught up)
714
753
  // 4. No new messages
715
- const clientIsCaughtUp = offset && offset === stream.currentOffset
754
+ const clientIsCaughtUp =
755
+ (effectiveOffset && effectiveOffset === stream.currentOffset) ||
756
+ offset === `now`
716
757
  if (live === `long-poll` && clientIsCaughtUp && messages.length === 0) {
717
758
  const result = await this.store.waitForMessages(
718
759
  path,
719
- offset,
760
+ effectiveOffset ?? stream.currentOffset,
720
761
  this.options.longPollTimeout
721
762
  )
722
763
 
@@ -728,7 +769,7 @@ export class DurableStreamTestServer {
728
769
  this.options.cursorOptions
729
770
  )
730
771
  res.writeHead(204, {
731
- [STREAM_OFFSET_HEADER]: offset,
772
+ [STREAM_OFFSET_HEADER]: effectiveOffset ?? stream.currentOffset,
732
773
  [STREAM_UP_TO_DATE_HEADER]: `true`,
733
774
  [STREAM_CURSOR_HEADER]: responseCursor,
734
775
  })
@@ -941,6 +982,17 @@ export class DurableStreamTestServer {
941
982
  | string
942
983
  | undefined
943
984
 
985
+ // Extract producer headers
986
+ const producerId = req.headers[PRODUCER_ID_HEADER.toLowerCase()] as
987
+ | string
988
+ | undefined
989
+ const producerEpochStr = req.headers[
990
+ PRODUCER_EPOCH_HEADER.toLowerCase()
991
+ ] as string | undefined
992
+ const producerSeqStr = req.headers[PRODUCER_SEQ_HEADER.toLowerCase()] as
993
+ | string
994
+ | undefined
995
+
944
996
  const body = await this.readBody(req)
945
997
 
946
998
  if (body.length === 0) {
@@ -956,15 +1008,148 @@ export class DurableStreamTestServer {
956
1008
  return
957
1009
  }
958
1010
 
959
- // Support both sync (StreamStore) and async (FileBackedStreamStore) append
960
- // Note: append returns null only for empty arrays with isInitialCreate=true,
961
- // which doesn't apply to POST requests (those throw on empty arrays)
962
- const message = await Promise.resolve(
963
- this.store.append(path, body, { seq, contentType })
964
- )
1011
+ // Validate producer headers - all three must be present together or none
1012
+ // Also reject empty producer ID
1013
+ const hasProducerHeaders =
1014
+ producerId !== undefined ||
1015
+ producerEpochStr !== undefined ||
1016
+ producerSeqStr !== undefined
1017
+ const hasAllProducerHeaders =
1018
+ producerId !== undefined &&
1019
+ producerEpochStr !== undefined &&
1020
+ producerSeqStr !== undefined
1021
+
1022
+ if (hasProducerHeaders && !hasAllProducerHeaders) {
1023
+ res.writeHead(400, { "content-type": `text/plain` })
1024
+ res.end(
1025
+ `All producer headers (Producer-Id, Producer-Epoch, Producer-Seq) must be provided together`
1026
+ )
1027
+ return
1028
+ }
1029
+
1030
+ if (hasAllProducerHeaders && producerId === ``) {
1031
+ res.writeHead(400, { "content-type": `text/plain` })
1032
+ res.end(`Invalid Producer-Id: must not be empty`)
1033
+ return
1034
+ }
1035
+
1036
+ // Parse and validate producer epoch and seq as integers
1037
+ // Use strict digit-only validation to reject values like "1abc" or "1e3"
1038
+ const STRICT_INTEGER_REGEX = /^\d+$/
1039
+ let producerEpoch: number | undefined
1040
+ let producerSeq: number | undefined
1041
+ if (hasAllProducerHeaders) {
1042
+ if (!STRICT_INTEGER_REGEX.test(producerEpochStr)) {
1043
+ res.writeHead(400, { "content-type": `text/plain` })
1044
+ res.end(`Invalid Producer-Epoch: must be a non-negative integer`)
1045
+ return
1046
+ }
1047
+ producerEpoch = Number(producerEpochStr)
1048
+ if (!Number.isSafeInteger(producerEpoch)) {
1049
+ res.writeHead(400, { "content-type": `text/plain` })
1050
+ res.end(`Invalid Producer-Epoch: must be a non-negative integer`)
1051
+ return
1052
+ }
1053
+
1054
+ if (!STRICT_INTEGER_REGEX.test(producerSeqStr)) {
1055
+ res.writeHead(400, { "content-type": `text/plain` })
1056
+ res.end(`Invalid Producer-Seq: must be a non-negative integer`)
1057
+ return
1058
+ }
1059
+ producerSeq = Number(producerSeqStr)
1060
+ if (!Number.isSafeInteger(producerSeq)) {
1061
+ res.writeHead(400, { "content-type": `text/plain` })
1062
+ res.end(`Invalid Producer-Seq: must be a non-negative integer`)
1063
+ return
1064
+ }
1065
+ }
1066
+
1067
+ // Build append options
1068
+ const appendOptions = {
1069
+ seq,
1070
+ contentType,
1071
+ producerId,
1072
+ producerEpoch,
1073
+ producerSeq,
1074
+ }
1075
+
1076
+ // Use appendWithProducer for serialized producer operations
1077
+ let result
1078
+ if (producerId !== undefined) {
1079
+ result = await this.store.appendWithProducer(path, body, appendOptions)
1080
+ } else {
1081
+ result = await Promise.resolve(
1082
+ this.store.append(path, body, appendOptions)
1083
+ )
1084
+ }
1085
+
1086
+ // Handle AppendResult with producer validation
1087
+ if (result && typeof result === `object` && `producerResult` in result) {
1088
+ const { message, producerResult } = result
1089
+
1090
+ if (!producerResult || producerResult.status === `accepted`) {
1091
+ // Success - return offset
1092
+ const responseHeaders: Record<string, string> = {
1093
+ [STREAM_OFFSET_HEADER]: message!.offset,
1094
+ }
1095
+ // Echo back the producer epoch and seq (highest accepted)
1096
+ if (producerEpoch !== undefined) {
1097
+ responseHeaders[PRODUCER_EPOCH_HEADER] = producerEpoch.toString()
1098
+ }
1099
+ if (producerSeq !== undefined) {
1100
+ responseHeaders[PRODUCER_SEQ_HEADER] = producerSeq.toString()
1101
+ }
1102
+ res.writeHead(200, responseHeaders)
1103
+ res.end()
1104
+ return
1105
+ }
1106
+
1107
+ // Handle producer validation failures
1108
+ switch (producerResult.status) {
1109
+ case `duplicate`:
1110
+ // 204 No Content for duplicates (idempotent success)
1111
+ // Return Producer-Seq as highest accepted (per PROTOCOL.md)
1112
+ res.writeHead(204, {
1113
+ [PRODUCER_EPOCH_HEADER]: producerEpoch!.toString(),
1114
+ [PRODUCER_SEQ_HEADER]: producerResult.lastSeq.toString(),
1115
+ })
1116
+ res.end()
1117
+ return
1118
+
1119
+ case `stale_epoch`: {
1120
+ // 403 Forbidden for stale epochs (zombie fencing)
1121
+ res.writeHead(403, {
1122
+ "content-type": `text/plain`,
1123
+ [PRODUCER_EPOCH_HEADER]: producerResult.currentEpoch.toString(),
1124
+ })
1125
+ res.end(`Stale producer epoch`)
1126
+ return
1127
+ }
1128
+
1129
+ case `invalid_epoch_seq`:
1130
+ // 400 Bad Request for epoch increase with seq != 0
1131
+ res.writeHead(400, { "content-type": `text/plain` })
1132
+ res.end(`New epoch must start with sequence 0`)
1133
+ return
1134
+
1135
+ case `sequence_gap`:
1136
+ // 409 Conflict for sequence gaps
1137
+ res.writeHead(409, {
1138
+ "content-type": `text/plain`,
1139
+ [PRODUCER_EXPECTED_SEQ_HEADER]:
1140
+ producerResult.expectedSeq.toString(),
1141
+ [PRODUCER_RECEIVED_SEQ_HEADER]:
1142
+ producerResult.receivedSeq.toString(),
1143
+ })
1144
+ res.end(`Producer sequence gap`)
1145
+ return
1146
+ }
1147
+ }
965
1148
 
1149
+ // Standard append (no producer) - result is StreamMessage
1150
+ const message = result as { offset: string }
966
1151
  res.writeHead(204, {
967
- [STREAM_OFFSET_HEADER]: message!.offset,
1152
+ [STREAM_OFFSET_HEADER]: message.offset,
968
1153
  })
969
1154
  res.end()
970
1155
  }