@electric-sql/client 1.5.1 → 1.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/client.ts CHANGED
@@ -51,6 +51,7 @@ import {
51
51
  REPLICA_PARAM,
52
52
  FORCE_DISCONNECT_AND_REFRESH,
53
53
  PAUSE_STREAM,
54
+ SYSTEM_WAKE,
54
55
  EXPERIMENTAL_LIVE_SSE_QUERY_PARAM,
55
56
  LIVE_SSE_QUERY_PARAM,
56
57
  ELECTRIC_PROTOCOL_QUERY_PARAMS,
@@ -72,6 +73,13 @@ import {
72
73
  import { expiredShapesCache } from './expired-shapes-cache'
73
74
  import { upToDateTracker } from './up-to-date-tracker'
74
75
  import { SnapshotTracker } from './snapshot-tracker'
76
+ import {
77
+ createInitialState,
78
+ ErrorState,
79
+ PausedState,
80
+ ShapeStreamState,
81
+ } from './shape-stream-state'
82
+ import { PauseLock } from './pause-lock'
75
83
 
76
84
  const RESERVED_PARAMS: Set<ReservedParamKeys> = new Set([
77
85
  LIVE_CACHE_BUSTER_QUERY_PARAM,
@@ -567,52 +575,60 @@ export class ShapeStream<T extends Row<unknown> = Row>
567
575
  >()
568
576
 
569
577
  #started = false
570
- #state = `active` as `active` | `pause-requested` | `paused`
571
- #lastOffset: Offset
572
- #liveCacheBuster: string // Seconds since our Electric Epoch 😎
573
- #lastSyncedAt?: number // unix time
574
- #isUpToDate: boolean = false
575
- #isMidStream: boolean = true
578
+ #syncState: ShapeStreamState
576
579
  #connected: boolean = false
577
- #shapeHandle?: string
578
580
  #mode: LogMode
579
- #schema?: Schema
580
581
  #onError?: ShapeStreamErrorHandler
581
582
  #requestAbortController?: AbortController
582
- #isRefreshing = false
583
+ #refreshCount = 0
584
+ #snapshotCounter = 0
585
+
586
+ get #isRefreshing(): boolean {
587
+ return this.#refreshCount > 0
588
+ }
583
589
  #tickPromise?: Promise<void>
584
590
  #tickPromiseResolver?: () => void
585
591
  #tickPromiseRejecter?: (reason?: unknown) => void
586
592
  #messageChain = Promise.resolve<void[]>([]) // promise chain for incoming messages
587
593
  #snapshotTracker = new SnapshotTracker()
588
- #activeSnapshotRequests = 0 // counter for concurrent snapshot requests
589
- #midStreamPromise?: Promise<void>
590
- #midStreamPromiseResolver?: () => void
591
- #lastSeenCursor?: string // Last seen cursor from previous session (used to detect cached responses)
594
+ #pauseLock: PauseLock
592
595
  #currentFetchUrl?: URL // Current fetch URL for computing shape key
593
596
  #lastSseConnectionStartTime?: number
594
597
  #minSseConnectionDuration = 1000 // Minimum expected SSE connection duration (1 second)
595
- #consecutiveShortSseConnections = 0
596
598
  #maxShortSseConnections = 3 // Fall back to long polling after this many short connections
597
- #sseFallbackToLongPolling = false
598
599
  #sseBackoffBaseDelay = 100 // Base delay for exponential backoff (ms)
599
600
  #sseBackoffMaxDelay = 5000 // Maximum delay cap (ms)
600
601
  #unsubscribeFromVisibilityChanges?: () => void
601
- #staleCacheBuster?: string // Cache buster set when stale CDN response detected, used on retry requests to bypass cache
602
- #staleCacheRetryCount = 0
602
+ #unsubscribeFromWakeDetection?: () => void
603
603
  #maxStaleCacheRetries = 3
604
604
 
605
- // Derived state: we're in replay mode if we have a last seen cursor
606
- get #replayMode(): boolean {
607
- return this.#lastSeenCursor !== undefined
608
- }
609
-
610
605
  constructor(options: ShapeStreamOptions<GetExtensions<T>>) {
611
606
  this.options = { subscribe: true, ...options }
612
607
  validateOptions(this.options)
613
- this.#lastOffset = this.options.offset ?? `-1`
614
- this.#liveCacheBuster = ``
615
- this.#shapeHandle = this.options.handle
608
+ this.#syncState = createInitialState({
609
+ offset: this.options.offset ?? `-1`,
610
+ handle: this.options.handle,
611
+ })
612
+
613
+ this.#pauseLock = new PauseLock({
614
+ onAcquired: () => {
615
+ this.#syncState = this.#syncState.pause()
616
+ if (this.#started) {
617
+ this.#requestAbortController?.abort(PAUSE_STREAM)
618
+ }
619
+ },
620
+ onReleased: () => {
621
+ if (!this.#started) return
622
+ if (this.options.signal?.aborted) return
623
+ // Don't transition syncState here — let #requestShape handle
624
+ // the PausedState→previous transition so it can detect
625
+ // resumingFromPause and avoid live long-polling.
626
+ this.#start().catch(() => {
627
+ // Errors from #start are handled internally via onError.
628
+ // This catch prevents unhandled promise rejection in Node/Bun.
629
+ })
630
+ },
631
+ })
616
632
 
617
633
  // Build transformer chain: columnMapper.decode -> transformer
618
634
  // columnMapper transforms column names, transformer transforms values
@@ -666,10 +682,11 @@ export class ShapeStream<T extends Row<unknown> = Row>
666
682
  this.#fetchClient = createFetchWithConsumedMessages(this.#sseFetchClient)
667
683
 
668
684
  this.#subscribeToVisibilityChanges()
685
+ this.#subscribeToWakeDetection()
669
686
  }
670
687
 
671
688
  get shapeHandle() {
672
- return this.#shapeHandle
689
+ return this.#syncState.handle
673
690
  }
674
691
 
675
692
  get error() {
@@ -677,11 +694,11 @@ export class ShapeStream<T extends Row<unknown> = Row>
677
694
  }
678
695
 
679
696
  get isUpToDate() {
680
- return this.#isUpToDate
697
+ return this.#syncState.isUpToDate
681
698
  }
682
699
 
683
700
  get lastOffset() {
684
- return this.#lastOffset
701
+ return this.#syncState.offset
685
702
  }
686
703
 
687
704
  get mode() {
@@ -695,6 +712,9 @@ export class ShapeStream<T extends Row<unknown> = Row>
695
712
  await this.#requestShape()
696
713
  } catch (err) {
697
714
  this.#error = err
715
+ if (err instanceof Error) {
716
+ this.#syncState = this.#syncState.toErrorState(err)
717
+ }
698
718
 
699
719
  // Check if onError handler wants to retry
700
720
  if (this.#onError) {
@@ -722,6 +742,9 @@ export class ShapeStream<T extends Row<unknown> = Row>
722
742
 
723
743
  // Clear the error since we're retrying
724
744
  this.#error = null
745
+ if (this.#syncState instanceof ErrorState) {
746
+ this.#syncState = this.#syncState.retry()
747
+ }
725
748
 
726
749
  // Restart from current offset
727
750
  this.#started = false
@@ -733,8 +756,7 @@ export class ShapeStream<T extends Row<unknown> = Row>
733
756
  if (err instanceof Error) {
734
757
  this.#sendErrorToSubscribers(err)
735
758
  }
736
- this.#connected = false
737
- this.#tickPromiseRejecter?.()
759
+ this.#teardown()
738
760
  return
739
761
  }
740
762
 
@@ -743,31 +765,34 @@ export class ShapeStream<T extends Row<unknown> = Row>
743
765
  if (err instanceof Error) {
744
766
  this.#sendErrorToSubscribers(err)
745
767
  }
746
- this.#connected = false
747
- this.#tickPromiseRejecter?.()
768
+ this.#teardown()
748
769
  throw err
749
770
  }
750
771
 
751
- // Normal completion, clean up
772
+ this.#teardown()
773
+ }
774
+
775
+ #teardown() {
752
776
  this.#connected = false
753
777
  this.#tickPromiseRejecter?.()
778
+ this.#unsubscribeFromWakeDetection?.()
754
779
  }
755
780
 
756
781
  async #requestShape(): Promise<void> {
757
- if (this.#state === `pause-requested`) {
758
- this.#state = `paused`
759
- return
760
- }
782
+ if (this.#pauseLock.isPaused) return
761
783
 
762
784
  if (
763
785
  !this.options.subscribe &&
764
- (this.options.signal?.aborted || this.#isUpToDate)
786
+ (this.options.signal?.aborted || this.#syncState.isUpToDate)
765
787
  ) {
766
788
  return
767
789
  }
768
790
 
769
- const resumingFromPause = this.#state === `paused`
770
- this.#state = `active`
791
+ let resumingFromPause = false
792
+ if (this.#syncState instanceof PausedState) {
793
+ resumingFromPause = true
794
+ this.#syncState = this.#syncState.resume()
795
+ }
771
796
 
772
797
  const { url, signal } = this.options
773
798
  const { fetchUrl, requestHeaders } = await this.#constructUrl(
@@ -777,6 +802,17 @@ export class ShapeStream<T extends Row<unknown> = Row>
777
802
  const abortListener = await this.#createAbortListener(signal)
778
803
  const requestAbortController = this.#requestAbortController! // we know that it is not undefined because it is set by `this.#createAbortListener`
779
804
 
805
+ // Re-check after async setup — the lock may have been acquired
806
+ // during URL construction or abort controller creation (e.g., by
807
+ // requestSnapshot), when the abort controller didn't exist yet.
808
+ if (this.#pauseLock.isPaused) {
809
+ if (abortListener && signal) {
810
+ signal.removeEventListener(`abort`, abortListener)
811
+ }
812
+ this.#requestAbortController = undefined
813
+ return
814
+ }
815
+
780
816
  try {
781
817
  await this.#fetchShape({
782
818
  fetchUrl,
@@ -785,31 +821,20 @@ export class ShapeStream<T extends Row<unknown> = Row>
785
821
  resumingFromPause,
786
822
  })
787
823
  } catch (e) {
788
- // Handle abort error triggered by refresh
824
+ const abortReason = requestAbortController.signal.reason
825
+ const isRestartAbort =
826
+ requestAbortController.signal.aborted &&
827
+ (abortReason === FORCE_DISCONNECT_AND_REFRESH ||
828
+ abortReason === SYSTEM_WAKE)
829
+
789
830
  if (
790
831
  (e instanceof FetchError || e instanceof FetchBackoffAbortError) &&
791
- requestAbortController.signal.aborted &&
792
- requestAbortController.signal.reason === FORCE_DISCONNECT_AND_REFRESH
832
+ isRestartAbort
793
833
  ) {
794
- // Start a new request
795
834
  return this.#requestShape()
796
835
  }
797
836
 
798
837
  if (e instanceof FetchBackoffAbortError) {
799
- // Check current state - it may have changed due to concurrent pause/resume calls
800
- // from the visibility change handler during the async fetch operation.
801
- // TypeScript's flow analysis doesn't account for concurrent state changes.
802
- const currentState = this.#state as
803
- | `active`
804
- | `pause-requested`
805
- | `paused`
806
- if (
807
- requestAbortController.signal.aborted &&
808
- requestAbortController.signal.reason === PAUSE_STREAM &&
809
- currentState === `pause-requested`
810
- ) {
811
- this.#state = `paused`
812
- }
813
838
  return // interrupted
814
839
  }
815
840
 
@@ -830,13 +855,13 @@ export class ShapeStream<T extends Row<unknown> = Row>
830
855
  // consistent cache buster
831
856
 
832
857
  // Store the current shape URL as expired to avoid future 409s
833
- if (this.#shapeHandle) {
858
+ if (this.#syncState.handle) {
834
859
  const shapeKey = canonicalShapeKey(fetchUrl)
835
- expiredShapesCache.markExpired(shapeKey, this.#shapeHandle)
860
+ expiredShapesCache.markExpired(shapeKey, this.#syncState.handle)
836
861
  }
837
862
 
838
863
  const newShapeHandle =
839
- e.headers[SHAPE_HANDLE_HEADER] || `${this.#shapeHandle!}-next`
864
+ e.headers[SHAPE_HANDLE_HEADER] || `${this.#syncState.handle!}-next`
840
865
  this.#reset(newShapeHandle)
841
866
 
842
867
  // must refetch control message might be in a list or not depending
@@ -991,31 +1016,15 @@ export class ShapeStream<T extends Row<unknown> = Row>
991
1016
  }
992
1017
  }
993
1018
 
994
- // Add Electric's internal parameters
995
- fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, this.#lastOffset)
1019
+ // Add state-specific parameters (offset, handle, live cache busters, etc.)
1020
+ this.#syncState.applyUrlParams(fetchUrl, {
1021
+ isSnapshotRequest: subsetParams !== undefined,
1022
+ // Don't long-poll when resuming from pause or refreshing — avoids
1023
+ // a 20s hold during which `isConnected` would be false
1024
+ canLongPoll: !this.#isRefreshing && !resumingFromPause,
1025
+ })
996
1026
  fetchUrl.searchParams.set(LOG_MODE_QUERY_PARAM, this.#mode)
997
1027
 
998
- // Snapshot requests (with subsetParams) should never use live polling
999
- const isSnapshotRequest = subsetParams !== undefined
1000
-
1001
- if (this.#isUpToDate && !isSnapshotRequest) {
1002
- // If we are resuming from a paused state, we don't want to perform a live request
1003
- // because it could be a long poll that holds for 20sec
1004
- // and during all that time `isConnected` will be false
1005
- if (!this.#isRefreshing && !resumingFromPause) {
1006
- fetchUrl.searchParams.set(LIVE_QUERY_PARAM, `true`)
1007
- }
1008
- fetchUrl.searchParams.set(
1009
- LIVE_CACHE_BUSTER_QUERY_PARAM,
1010
- this.#liveCacheBuster
1011
- )
1012
- }
1013
-
1014
- if (this.#shapeHandle) {
1015
- // This should probably be a header for better cache breaking?
1016
- fetchUrl.searchParams.set(SHAPE_HANDLE_QUERY_PARAM, this.#shapeHandle!)
1017
- }
1018
-
1019
1028
  // Add cache buster for shapes known to be expired to prevent 409s
1020
1029
  const shapeKey = canonicalShapeKey(fetchUrl)
1021
1030
  const expiredHandle = expiredShapesCache.getExpiredHandle(shapeKey)
@@ -1023,15 +1032,6 @@ export class ShapeStream<T extends Row<unknown> = Row>
1023
1032
  fetchUrl.searchParams.set(EXPIRED_HANDLE_QUERY_PARAM, expiredHandle)
1024
1033
  }
1025
1034
 
1026
- // Add random cache buster if we received a stale response from CDN
1027
- // This forces a fresh request bypassing the misconfigured CDN cache
1028
- if (this.#staleCacheBuster) {
1029
- fetchUrl.searchParams.set(
1030
- CACHE_BUSTER_QUERY_PARAM,
1031
- this.#staleCacheBuster
1032
- )
1033
- }
1034
-
1035
1035
  // sort query params in-place for stable URLs and improved cache hits
1036
1036
  fetchUrl.searchParams.sort()
1037
1037
 
@@ -1062,165 +1062,132 @@ export class ShapeStream<T extends Row<unknown> = Row>
1062
1062
  }
1063
1063
  }
1064
1064
 
1065
- async #onInitialResponse(response: Response) {
1065
+ /**
1066
+ * Processes response metadata (headers, status) and updates sync state.
1067
+ * Returns `true` if the response body should be processed by the caller,
1068
+ * or `false` if the response was ignored (stale) and the body should be skipped.
1069
+ * Throws on stale-retry (to trigger a retry with cache buster).
1070
+ */
1071
+ async #onInitialResponse(response: Response): Promise<boolean> {
1066
1072
  const { headers, status } = response
1067
1073
  const shapeHandle = headers.get(SHAPE_HANDLE_HEADER)
1068
- if (shapeHandle) {
1069
- // Don't accept a handle we know is expired - this can happen if a
1070
- // proxy serves a stale cached response despite the expired_handle
1071
- // cache buster parameter
1072
- const shapeKey = this.#currentFetchUrl
1073
- ? canonicalShapeKey(this.#currentFetchUrl)
1074
- : null
1075
- const expiredHandle = shapeKey
1076
- ? expiredShapesCache.getExpiredHandle(shapeKey)
1077
- : null
1078
- if (shapeHandle !== expiredHandle) {
1079
- this.#shapeHandle = shapeHandle
1080
- // Clear cache buster after successful response with valid handle
1081
- if (this.#staleCacheBuster) {
1082
- this.#staleCacheBuster = undefined
1083
- this.#staleCacheRetryCount = 0
1084
- }
1085
- } else if (this.#shapeHandle === undefined) {
1086
- // We received a stale response from cache and don't have a handle yet.
1087
- // Instead of accepting the stale handle, throw an error to trigger a retry
1088
- // with a random cache buster to bypass the CDN cache.
1089
- this.#staleCacheRetryCount++
1090
- // Cancel the response body to release the connection before retrying
1091
- await response.body?.cancel()
1092
- if (this.#staleCacheRetryCount > this.#maxStaleCacheRetries) {
1093
- throw new FetchError(
1094
- 502,
1095
- undefined,
1096
- undefined,
1097
- {},
1098
- this.#currentFetchUrl?.toString() ?? ``,
1099
- `CDN continues serving stale cached responses after ${this.#maxStaleCacheRetries} retry attempts. ` +
1100
- `This indicates a severe proxy/CDN misconfiguration. ` +
1101
- `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1102
- `For more information visit the troubleshooting guide: https://electric-sql.com/docs/guides/troubleshooting`
1103
- )
1104
- }
1105
- console.warn(
1106
- `[Electric] Received stale cached response with expired shape handle. ` +
1107
- `This should not happen and indicates a proxy/CDN caching misconfiguration. ` +
1108
- `The response contained handle "${shapeHandle}" which was previously marked as expired. ` +
1109
- `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1110
- `For more information visit the troubleshooting guide: https://electric-sql.com/docs/guides/troubleshooting ` +
1111
- `Retrying with a random cache buster to bypass the stale cache (attempt ${this.#staleCacheRetryCount}/${this.#maxStaleCacheRetries}).`
1112
- )
1113
- // Generate a random cache buster for the retry
1114
- this.#staleCacheBuster = `${Date.now()}-${Math.random().toString(36).substring(2, 9)}`
1115
- throw new StaleCacheError(
1116
- `Received stale cached response with expired handle "${shapeHandle}". ` +
1117
- `This indicates a proxy/CDN caching misconfiguration. ` +
1118
- `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key.`
1119
- )
1120
- } else {
1121
- // We already have a valid handle, so ignore the stale response entirely
1122
- // to prevent a mismatch between our current handle and the stale offset.
1123
- console.warn(
1124
- `[Electric] Received stale cached response with expired shape handle. ` +
1125
- `This should not happen and indicates a proxy/CDN caching misconfiguration. ` +
1126
- `The response contained handle "${shapeHandle}" which was previously marked as expired. ` +
1074
+ const shapeKey = this.#currentFetchUrl
1075
+ ? canonicalShapeKey(this.#currentFetchUrl)
1076
+ : null
1077
+ const expiredHandle = shapeKey
1078
+ ? expiredShapesCache.getExpiredHandle(shapeKey)
1079
+ : null
1080
+
1081
+ const transition = this.#syncState.handleResponseMetadata({
1082
+ status,
1083
+ responseHandle: shapeHandle,
1084
+ responseOffset: headers.get(CHUNK_LAST_OFFSET_HEADER) as Offset | null,
1085
+ responseCursor: headers.get(LIVE_CACHE_BUSTER_HEADER),
1086
+ responseSchema: getSchemaFromHeaders(headers),
1087
+ expiredHandle,
1088
+ now: Date.now(),
1089
+ maxStaleCacheRetries: this.#maxStaleCacheRetries,
1090
+ createCacheBuster: () =>
1091
+ `${Date.now()}-${Math.random().toString(36).substring(2, 9)}`,
1092
+ })
1093
+
1094
+ this.#syncState = transition.state
1095
+
1096
+ if (transition.action === `stale-retry`) {
1097
+ // Cancel the response body to release the connection before retrying.
1098
+ await response.body?.cancel()
1099
+ if (transition.exceededMaxRetries) {
1100
+ throw new FetchError(
1101
+ 502,
1102
+ undefined,
1103
+ undefined,
1104
+ {},
1105
+ this.#currentFetchUrl?.toString() ?? ``,
1106
+ `CDN continues serving stale cached responses after ${this.#maxStaleCacheRetries} retry attempts. ` +
1107
+ `This indicates a severe proxy/CDN misconfiguration. ` +
1127
1108
  `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1128
- `Ignoring the stale response and continuing with handle "${this.#shapeHandle}".`
1109
+ `For more information visit the troubleshooting guide: https://electric-sql.com/docs/guides/troubleshooting`
1129
1110
  )
1130
- return
1131
1111
  }
1112
+ console.warn(
1113
+ `[Electric] Received stale cached response with expired shape handle. ` +
1114
+ `This should not happen and indicates a proxy/CDN caching misconfiguration. ` +
1115
+ `The response contained handle "${shapeHandle}" which was previously marked as expired. ` +
1116
+ `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1117
+ `For more information visit the troubleshooting guide: https://electric-sql.com/docs/guides/troubleshooting ` +
1118
+ `Retrying with a random cache buster to bypass the stale cache (attempt ${this.#syncState.staleCacheRetryCount}/${this.#maxStaleCacheRetries}).`
1119
+ )
1120
+ throw new StaleCacheError(
1121
+ `Received stale cached response with expired handle "${shapeHandle}". ` +
1122
+ `This indicates a proxy/CDN caching misconfiguration. ` +
1123
+ `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key.`
1124
+ )
1132
1125
  }
1133
1126
 
1134
- const lastOffset = headers.get(CHUNK_LAST_OFFSET_HEADER)
1135
- if (lastOffset) {
1136
- this.#lastOffset = lastOffset as Offset
1137
- }
1138
-
1139
- const liveCacheBuster = headers.get(LIVE_CACHE_BUSTER_HEADER)
1140
- if (liveCacheBuster) {
1141
- this.#liveCacheBuster = liveCacheBuster
1127
+ if (transition.action === `ignored`) {
1128
+ // We already have a valid handle, so ignore the entire stale response
1129
+ // (both metadata and body) to prevent a mismatch between our current
1130
+ // handle and the stale data.
1131
+ console.warn(
1132
+ `[Electric] Received stale cached response with expired shape handle. ` +
1133
+ `This should not happen and indicates a proxy/CDN caching misconfiguration. ` +
1134
+ `The response contained handle "${shapeHandle}" which was previously marked as expired. ` +
1135
+ `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1136
+ `Ignoring the stale response and continuing with handle "${this.#syncState.handle}".`
1137
+ )
1138
+ return false
1142
1139
  }
1143
1140
 
1144
- this.#schema = this.#schema ?? getSchemaFromHeaders(headers)
1145
-
1146
- // NOTE: 204s are deprecated, the Electric server should not
1147
- // send these in latest versions but this is here for backwards
1148
- // compatibility
1149
- if (status === 204) {
1150
- // There's no content so we are live and up to date
1151
- this.#lastSyncedAt = Date.now()
1152
- }
1141
+ return true
1153
1142
  }
1154
1143
 
1155
1144
  async #onMessages(batch: Array<Message<T>>, isSseMessage = false) {
1156
- // Update isUpToDate
1157
- if (batch.length > 0) {
1158
- // Set isMidStream to true when we receive any data
1159
- this.#isMidStream = true
1160
-
1161
- const lastMessage = batch[batch.length - 1]
1162
- if (isUpToDateMessage(lastMessage)) {
1163
- if (isSseMessage) {
1164
- // Only use the offset from the up-to-date message if this was an SSE message.
1165
- // If we would use this offset from a regular fetch, then it will be wrong
1166
- // and we will get an "offset is out of bounds for this shape" error
1167
- const offset = getOffset(lastMessage)
1168
- if (offset) {
1169
- this.#lastOffset = offset
1170
- }
1171
- }
1172
- this.#lastSyncedAt = Date.now()
1173
- this.#isUpToDate = true
1174
- // Set isMidStream to false when we see an up-to-date message
1175
- this.#isMidStream = false
1176
- // Resolve the promise waiting for mid-stream to end
1177
- this.#midStreamPromiseResolver?.()
1178
-
1179
- // Check if we should suppress this up-to-date notification
1180
- // to prevent multiple renders from cached responses
1181
- if (this.#replayMode && !isSseMessage) {
1182
- // We're in replay mode (replaying cached responses during initial sync).
1183
- // Check if the cursor has changed - cursors are time-based and always
1184
- // increment, so a new cursor means fresh data from the server.
1185
- const currentCursor = this.#liveCacheBuster
1186
-
1187
- if (currentCursor === this.#lastSeenCursor) {
1188
- // Same cursor as previous session - suppress this up-to-date notification.
1189
- // Exit replay mode after first suppression to ensure we don't get stuck
1190
- // if CDN keeps returning the same cursor indefinitely.
1191
- this.#lastSeenCursor = undefined
1192
- return
1193
- }
1194
- }
1145
+ if (batch.length === 0) return
1146
+
1147
+ const lastMessage = batch[batch.length - 1]
1148
+ const hasUpToDateMessage = isUpToDateMessage(lastMessage)
1149
+ const upToDateOffset = hasUpToDateMessage
1150
+ ? getOffset(lastMessage)
1151
+ : undefined
1152
+
1153
+ const transition = this.#syncState.handleMessageBatch({
1154
+ hasMessages: true,
1155
+ hasUpToDateMessage,
1156
+ isSse: isSseMessage,
1157
+ upToDateOffset,
1158
+ now: Date.now(),
1159
+ currentCursor: this.#syncState.liveCacheBuster,
1160
+ })
1161
+ this.#syncState = transition.state
1195
1162
 
1196
- // We're either:
1197
- // 1. Not in replay mode (normal operation), or
1198
- // 2. This is a live/SSE message (always fresh), or
1199
- // 3. Cursor has changed (exited replay mode with fresh data)
1200
- // In all cases, notify subscribers and record the up-to-date.
1201
- this.#lastSeenCursor = undefined // Exit replay mode
1163
+ if (hasUpToDateMessage) {
1164
+ if (transition.suppressBatch) {
1165
+ return
1166
+ }
1202
1167
 
1203
- if (this.#currentFetchUrl) {
1204
- const shapeKey = canonicalShapeKey(this.#currentFetchUrl)
1205
- upToDateTracker.recordUpToDate(shapeKey, this.#liveCacheBuster)
1206
- }
1168
+ if (this.#currentFetchUrl) {
1169
+ const shapeKey = canonicalShapeKey(this.#currentFetchUrl)
1170
+ upToDateTracker.recordUpToDate(
1171
+ shapeKey,
1172
+ this.#syncState.liveCacheBuster
1173
+ )
1207
1174
  }
1175
+ }
1208
1176
 
1209
- // Filter messages using snapshot tracker
1210
- const messagesToProcess = batch.filter((message) => {
1211
- if (isChangeMessage(message)) {
1212
- return !this.#snapshotTracker.shouldRejectMessage(message)
1213
- }
1214
- return true // Always process control messages
1215
- })
1177
+ // Filter messages using snapshot tracker
1178
+ const messagesToProcess = batch.filter((message) => {
1179
+ if (isChangeMessage(message)) {
1180
+ return !this.#snapshotTracker.shouldRejectMessage(message)
1181
+ }
1182
+ return true // Always process control messages
1183
+ })
1216
1184
 
1217
- await this.#publish(messagesToProcess)
1218
- }
1185
+ await this.#publish(messagesToProcess)
1219
1186
  }
1220
1187
 
1221
1188
  /**
1222
1189
  * Fetches the shape from the server using either long polling or SSE.
1223
- * Upon receiving a successfull response, the #onInitialResponse method is called.
1190
+ * Upon receiving a successful response, the #onInitialResponse method is called.
1224
1191
  * Afterwards, the #onMessages method is called for all the incoming updates.
1225
1192
  * @param opts - The options for the request.
1226
1193
  * @returns A promise that resolves when the request is complete (i.e. the long poll receives a response or the SSE connection is closed).
@@ -1237,22 +1204,22 @@ export class ShapeStream<T extends Row<unknown> = Row>
1237
1204
  // Check if we should enter replay mode (replaying cached responses)
1238
1205
  // This happens when we're starting fresh (offset=-1 or before first up-to-date)
1239
1206
  // and there's a recent up-to-date in localStorage (< 60s)
1240
- if (!this.#isUpToDate && !this.#replayMode) {
1207
+ if (!this.#syncState.isUpToDate && this.#syncState.canEnterReplayMode()) {
1241
1208
  const shapeKey = canonicalShapeKey(opts.fetchUrl)
1242
1209
  const lastSeenCursor = upToDateTracker.shouldEnterReplayMode(shapeKey)
1243
1210
  if (lastSeenCursor) {
1244
1211
  // Enter replay mode and store the last seen cursor
1245
- this.#lastSeenCursor = lastSeenCursor
1212
+ this.#syncState = this.#syncState.enterReplayMode(lastSeenCursor)
1246
1213
  }
1247
1214
  }
1248
1215
 
1249
1216
  const useSse = this.options.liveSse ?? this.options.experimentalLiveSse
1250
1217
  if (
1251
- this.#isUpToDate &&
1252
- useSse &&
1253
- !this.#isRefreshing &&
1254
- !opts.resumingFromPause &&
1255
- !this.#sseFallbackToLongPolling
1218
+ this.#syncState.shouldUseSse({
1219
+ liveSseEnabled: !!useSse,
1220
+ isRefreshing: this.#isRefreshing,
1221
+ resumingFromPause: !!opts.resumingFromPause,
1222
+ })
1256
1223
  ) {
1257
1224
  opts.fetchUrl.searchParams.set(EXPERIMENTAL_LIVE_SSE_QUERY_PARAM, `true`)
1258
1225
  opts.fetchUrl.searchParams.set(LIVE_SSE_QUERY_PARAM, `true`)
@@ -1274,9 +1241,10 @@ export class ShapeStream<T extends Row<unknown> = Row>
1274
1241
  })
1275
1242
 
1276
1243
  this.#connected = true
1277
- await this.#onInitialResponse(response)
1244
+ const shouldProcessBody = await this.#onInitialResponse(response)
1245
+ if (!shouldProcessBody) return
1278
1246
 
1279
- const schema = this.#schema! // we know that it is not undefined because it is set by `this.#onInitialResponse`
1247
+ const schema = this.#syncState.schema! // we know that it is not undefined because it is set by `this.#onInitialResponse`
1280
1248
  const res = await response.text()
1281
1249
  const messages = res || `[]`
1282
1250
  const batch = this.#messageParser.parse<Array<Message<T>>>(messages, schema)
@@ -1301,6 +1269,7 @@ export class ShapeStream<T extends Row<unknown> = Row>
1301
1269
  Accept: `text/event-stream`,
1302
1270
  }
1303
1271
 
1272
+ let ignoredStaleResponse = false
1304
1273
  try {
1305
1274
  let buffer: Array<Message<T>> = []
1306
1275
  await fetchEventSource(fetchUrl.toString(), {
@@ -1308,12 +1277,16 @@ export class ShapeStream<T extends Row<unknown> = Row>
1308
1277
  fetch,
1309
1278
  onopen: async (response: Response) => {
1310
1279
  this.#connected = true
1311
- await this.#onInitialResponse(response)
1280
+ const shouldProcessBody = await this.#onInitialResponse(response)
1281
+ if (!shouldProcessBody) {
1282
+ ignoredStaleResponse = true
1283
+ throw new Error(`stale response ignored`)
1284
+ }
1312
1285
  },
1313
1286
  onmessage: (event: EventSourceMessage) => {
1314
1287
  if (event.data) {
1315
1288
  // event.data is a single JSON object
1316
- const schema = this.#schema! // we know that it is not undefined because it is set in onopen when we call this.#onInitialResponse
1289
+ const schema = this.#syncState.schema! // we know that it is not undefined because it is set in onopen when we call this.#onInitialResponse
1317
1290
  const message = this.#messageParser.parse<Message<T>>(
1318
1291
  event.data,
1319
1292
  schema
@@ -1335,14 +1308,14 @@ export class ShapeStream<T extends Row<unknown> = Row>
1335
1308
  signal: requestAbortController.signal,
1336
1309
  })
1337
1310
  } catch (error) {
1311
+ if (ignoredStaleResponse) {
1312
+ // Stale response was ignored in onopen — let the fetch loop retry
1313
+ return
1314
+ }
1338
1315
  if (requestAbortController.signal.aborted) {
1339
- // During an SSE request, the fetch might have succeeded
1340
- // and we are parsing the incoming stream.
1341
- // If the abort happens while we're parsing the stream,
1342
- // then it won't be caught by our `createFetchWithBackoff` wrapper
1343
- // and instead we will get a raw AbortError here
1344
- // which we need to turn into a `FetchBackoffAbortError`
1345
- // such that #start handles it correctly.`
1316
+ // An abort during SSE stream parsing produces a raw AbortError
1317
+ // instead of going through createFetchWithBackoff -- wrap it so
1318
+ // #start handles it correctly.
1346
1319
  throw new FetchBackoffAbortError()
1347
1320
  }
1348
1321
  throw error
@@ -1353,65 +1326,32 @@ export class ShapeStream<T extends Row<unknown> = Row>
1353
1326
  const connectionDuration = Date.now() - this.#lastSseConnectionStartTime!
1354
1327
  const wasAborted = requestAbortController.signal.aborted
1355
1328
 
1356
- if (connectionDuration < this.#minSseConnectionDuration && !wasAborted) {
1357
- // Connection was too short - likely a cached response or misconfiguration
1358
- this.#consecutiveShortSseConnections++
1359
-
1360
- if (
1361
- this.#consecutiveShortSseConnections >= this.#maxShortSseConnections
1362
- ) {
1363
- // Too many short connections - fall back to long polling
1364
- this.#sseFallbackToLongPolling = true
1365
- console.warn(
1366
- `[Electric] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). ` +
1367
- `Falling back to long polling. ` +
1368
- `Your proxy must support streaming SSE responses (not buffer the complete response). ` +
1369
- `Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy. ` +
1370
- `Note: Do NOT disable caching entirely - Electric uses cache headers to enable request collapsing for efficiency.`
1371
- )
1372
- } else {
1373
- // Add exponential backoff with full jitter to prevent tight infinite loop
1374
- // Formula: random(0, min(cap, base * 2^attempt))
1375
- const maxDelay = Math.min(
1376
- this.#sseBackoffMaxDelay,
1377
- this.#sseBackoffBaseDelay *
1378
- Math.pow(2, this.#consecutiveShortSseConnections)
1379
- )
1380
- const delayMs = Math.floor(Math.random() * maxDelay)
1381
- await new Promise((resolve) => setTimeout(resolve, delayMs))
1382
- }
1383
- } else if (connectionDuration >= this.#minSseConnectionDuration) {
1384
- // Connection was healthy - reset counter
1385
- this.#consecutiveShortSseConnections = 0
1386
- }
1387
- }
1388
- }
1389
-
1390
- #pause() {
1391
- if (this.#started && this.#state === `active`) {
1392
- this.#state = `pause-requested`
1393
- this.#requestAbortController?.abort(PAUSE_STREAM)
1394
- }
1395
- }
1396
-
1397
- #resume() {
1398
- if (
1399
- this.#started &&
1400
- (this.#state === `paused` || this.#state === `pause-requested`)
1401
- ) {
1402
- // Don't resume if the user's signal is already aborted
1403
- // This can happen if the signal was aborted while we were paused
1404
- // (e.g., TanStack DB collection was GC'd)
1405
- if (this.options.signal?.aborted) {
1406
- return
1407
- }
1329
+ const transition = this.#syncState.handleSseConnectionClosed({
1330
+ connectionDuration,
1331
+ wasAborted,
1332
+ minConnectionDuration: this.#minSseConnectionDuration,
1333
+ maxShortConnections: this.#maxShortSseConnections,
1334
+ })
1335
+ this.#syncState = transition.state
1408
1336
 
1409
- // If we're resuming from pause-requested state, we need to set state back to active
1410
- // to prevent the pause from completing
1411
- if (this.#state === `pause-requested`) {
1412
- this.#state = `active`
1337
+ if (transition.fellBackToLongPolling) {
1338
+ console.warn(
1339
+ `[Electric] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). ` +
1340
+ `Falling back to long polling. ` +
1341
+ `Your proxy must support streaming SSE responses (not buffer the complete response). ` +
1342
+ `Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy. ` +
1343
+ `Note: Do NOT disable caching entirely - Electric uses cache headers to enable request collapsing for efficiency.`
1344
+ )
1345
+ } else if (transition.wasShortConnection) {
1346
+ // Exponential backoff with full jitter: random(0, min(cap, base * 2^attempt))
1347
+ const maxDelay = Math.min(
1348
+ this.#sseBackoffMaxDelay,
1349
+ this.#sseBackoffBaseDelay *
1350
+ Math.pow(2, this.#syncState.consecutiveShortSseConnections)
1351
+ )
1352
+ const delayMs = Math.floor(Math.random() * maxDelay)
1353
+ await new Promise((resolve) => setTimeout(resolve, delayMs))
1413
1354
  }
1414
- this.#start()
1415
1355
  }
1416
1356
  }
1417
1357
 
@@ -1432,17 +1372,18 @@ export class ShapeStream<T extends Row<unknown> = Row>
1432
1372
  unsubscribeAll(): void {
1433
1373
  this.#subscribers.clear()
1434
1374
  this.#unsubscribeFromVisibilityChanges?.()
1375
+ this.#unsubscribeFromWakeDetection?.()
1435
1376
  }
1436
1377
 
1437
- /** Unix time at which we last synced. Undefined when `isLoading` is true. */
1378
+ /** Unix time at which we last synced. Undefined until first successful up-to-date. */
1438
1379
  lastSyncedAt(): number | undefined {
1439
- return this.#lastSyncedAt
1380
+ return this.#syncState.lastSyncedAt
1440
1381
  }
1441
1382
 
1442
1383
  /** Time elapsed since last sync (in ms). Infinity if we did not yet sync. */
1443
1384
  lastSynced(): number {
1444
- if (this.#lastSyncedAt === undefined) return Infinity
1445
- return Date.now() - this.#lastSyncedAt
1385
+ if (this.#syncState.lastSyncedAt === undefined) return Infinity
1386
+ return Date.now() - this.#syncState.lastSyncedAt
1446
1387
  }
1447
1388
 
1448
1389
  /** Indicates if we are connected to the Electric sync service. */
@@ -1450,9 +1391,9 @@ export class ShapeStream<T extends Row<unknown> = Row>
1450
1391
  return this.#connected
1451
1392
  }
1452
1393
 
1453
- /** True during initial fetch. False afterwise. */
1394
+ /** True during initial fetch. False afterwards. */
1454
1395
  isLoading(): boolean {
1455
- return !this.#isUpToDate
1396
+ return !this.#syncState.isUpToDate
1456
1397
  }
1457
1398
 
1458
1399
  hasStarted(): boolean {
@@ -1460,11 +1401,16 @@ export class ShapeStream<T extends Row<unknown> = Row>
1460
1401
  }
1461
1402
 
1462
1403
  isPaused(): boolean {
1463
- return this.#state === `paused`
1404
+ return this.#pauseLock.isPaused
1464
1405
  }
1465
1406
 
1466
1407
  /** Await the next tick of the request loop */
1467
1408
  async #nextTick() {
1409
+ if (this.#pauseLock.isPaused) {
1410
+ throw new Error(
1411
+ `Cannot wait for next tick while PauseLock is held — this would deadlock because the request loop is paused`
1412
+ )
1413
+ }
1468
1414
  if (this.#tickPromise) {
1469
1415
  return this.#tickPromise
1470
1416
  }
@@ -1480,24 +1426,6 @@ export class ShapeStream<T extends Row<unknown> = Row>
1480
1426
  return this.#tickPromise
1481
1427
  }
1482
1428
 
1483
- /** Await until we're not in the middle of a stream (i.e., until we see an up-to-date message) */
1484
- async #waitForStreamEnd() {
1485
- if (!this.#isMidStream) {
1486
- return
1487
- }
1488
- if (this.#midStreamPromise) {
1489
- return this.#midStreamPromise
1490
- }
1491
- this.#midStreamPromise = new Promise((resolve) => {
1492
- this.#midStreamPromiseResolver = resolve
1493
- })
1494
- this.#midStreamPromise.finally(() => {
1495
- this.#midStreamPromise = undefined
1496
- this.#midStreamPromiseResolver = undefined
1497
- })
1498
- return this.#midStreamPromise
1499
- }
1500
-
1501
1429
  /**
1502
1430
  * Refreshes the shape stream.
1503
1431
  * This preemptively aborts any ongoing long poll and reconnects without
@@ -1505,14 +1433,20 @@ export class ShapeStream<T extends Row<unknown> = Row>
1505
1433
  * latest LSN from Postgres at that point in time.
1506
1434
  */
1507
1435
  async forceDisconnectAndRefresh(): Promise<void> {
1508
- this.#isRefreshing = true
1509
- if (this.#isUpToDate && !this.#requestAbortController?.signal.aborted) {
1510
- // If we are "up to date", any current request will be a "live" request
1511
- // and needs to be aborted
1512
- this.#requestAbortController?.abort(FORCE_DISCONNECT_AND_REFRESH)
1436
+ this.#refreshCount++
1437
+ try {
1438
+ if (
1439
+ this.#syncState.isUpToDate &&
1440
+ !this.#requestAbortController?.signal.aborted
1441
+ ) {
1442
+ // If we are "up to date", any current request will be a "live" request
1443
+ // and needs to be aborted
1444
+ this.#requestAbortController?.abort(FORCE_DISCONNECT_AND_REFRESH)
1445
+ }
1446
+ await this.#nextTick()
1447
+ } finally {
1448
+ this.#refreshCount--
1513
1449
  }
1514
- await this.#nextTick()
1515
- this.#isRefreshing = false
1516
1450
  }
1517
1451
 
1518
1452
  async #publish(messages: Message<T>[]): Promise<void[]> {
@@ -1543,17 +1477,21 @@ export class ShapeStream<T extends Row<unknown> = Row>
1543
1477
  })
1544
1478
  }
1545
1479
 
1546
- #subscribeToVisibilityChanges() {
1547
- if (
1480
+ #hasBrowserVisibilityAPI(): boolean {
1481
+ return (
1548
1482
  typeof document === `object` &&
1549
1483
  typeof document.hidden === `boolean` &&
1550
1484
  typeof document.addEventListener === `function`
1551
- ) {
1485
+ )
1486
+ }
1487
+
1488
+ #subscribeToVisibilityChanges() {
1489
+ if (this.#hasBrowserVisibilityAPI()) {
1552
1490
  const visibilityHandler = () => {
1553
1491
  if (document.hidden) {
1554
- this.#pause()
1492
+ this.#pauseLock.acquire(`visibility`)
1555
1493
  } else {
1556
- this.#resume()
1494
+ this.#pauseLock.release(`visibility`)
1557
1495
  }
1558
1496
  }
1559
1497
 
@@ -1566,25 +1504,68 @@ export class ShapeStream<T extends Row<unknown> = Row>
1566
1504
  }
1567
1505
  }
1568
1506
 
1507
+ /**
1508
+ * Detects system wake from sleep using timer gap detection.
1509
+ * When the system sleeps, setInterval timers are paused. On wake,
1510
+ * the elapsed wall-clock time since the last tick will be much larger
1511
+ * than the interval period, indicating the system was asleep.
1512
+ *
1513
+ * Only active in non-browser environments (Bun, Node.js) where
1514
+ * `document.visibilitychange` is not available. In browsers,
1515
+ * `#subscribeToVisibilityChanges` handles this instead. Without wake
1516
+ * detection, in-flight HTTP requests (long-poll or SSE) may hang until
1517
+ * the OS TCP timeout.
1518
+ */
1519
+ #subscribeToWakeDetection() {
1520
+ if (this.#hasBrowserVisibilityAPI()) return
1521
+
1522
+ const INTERVAL_MS = 2_000
1523
+ const WAKE_THRESHOLD_MS = 4_000
1524
+
1525
+ let lastTickTime = Date.now()
1526
+
1527
+ const timer = setInterval(() => {
1528
+ const now = Date.now()
1529
+ const elapsed = now - lastTickTime
1530
+ lastTickTime = now
1531
+
1532
+ if (elapsed > INTERVAL_MS + WAKE_THRESHOLD_MS) {
1533
+ if (!this.#pauseLock.isPaused && this.#requestAbortController) {
1534
+ this.#refreshCount++
1535
+ this.#requestAbortController.abort(SYSTEM_WAKE)
1536
+ // Wake handler is synchronous (setInterval callback) so we can't
1537
+ // use try/finally + await like forceDisconnectAndRefresh. Instead,
1538
+ // decrement via queueMicrotask — safe because the abort triggers
1539
+ // #requestShape to re-run, which reads #isRefreshing synchronously
1540
+ // before the microtask fires.
1541
+ queueMicrotask(() => {
1542
+ this.#refreshCount--
1543
+ })
1544
+ }
1545
+ }
1546
+ }, INTERVAL_MS)
1547
+
1548
+ // Ensure the timer doesn't prevent the process from exiting
1549
+ if (typeof timer === `object` && `unref` in timer) {
1550
+ timer.unref()
1551
+ }
1552
+
1553
+ this.#unsubscribeFromWakeDetection = () => {
1554
+ clearInterval(timer)
1555
+ }
1556
+ }
1557
+
1569
1558
  /**
1570
1559
  * Resets the state of the stream, optionally with a provided
1571
1560
  * shape handle
1572
1561
  */
1573
1562
  #reset(handle?: string) {
1574
- this.#lastOffset = `-1`
1575
- this.#liveCacheBuster = ``
1576
- this.#shapeHandle = handle
1577
- this.#isUpToDate = false
1578
- this.#isMidStream = true
1563
+ this.#syncState = this.#syncState.markMustRefetch(handle)
1579
1564
  this.#connected = false
1580
- this.#schema = undefined
1581
- this.#activeSnapshotRequests = 0
1582
- // Reset SSE fallback state to try SSE again after reset
1583
- this.#consecutiveShortSseConnections = 0
1584
- this.#sseFallbackToLongPolling = false
1585
- // Reset stale cache retry state
1586
- this.#staleCacheBuster = undefined
1587
- this.#staleCacheRetryCount = 0
1565
+ // releaseAllMatching intentionally doesn't fire onReleased — it's called
1566
+ // from within the running stream loop (#requestShape's 409 handler), so
1567
+ // the stream is already active and doesn't need a resume signal.
1568
+ this.#pauseLock.releaseAllMatching(`snapshot`)
1588
1569
  }
1589
1570
 
1590
1571
  /**
@@ -1610,22 +1591,30 @@ export class ShapeStream<T extends Row<unknown> = Row>
1610
1591
  `Snapshot requests are not supported in ${this.#mode} mode, as the consumer is guaranteed to observe all data`
1611
1592
  )
1612
1593
  }
1613
- // We shouldn't be getting a snapshot on a shape that's not started
1614
- if (!this.#started) await this.#start()
1594
+ // Start the stream if not started fire-and-forget like subscribe() does.
1595
+ // We must NOT await #start() because it runs the full request loop. The
1596
+ // PauseLock acquire below will abort the in-flight request, and the
1597
+ // re-check guard in #requestShape handles the race.
1598
+ if (!this.#started) {
1599
+ this.#start().catch(() => {})
1600
+ }
1615
1601
 
1616
- // Wait until we're not mid-stream before pausing
1617
- // This ensures we don't pause in the middle of a transaction
1618
- await this.#waitForStreamEnd()
1602
+ const snapshotReason = `snapshot-${++this.#snapshotCounter}`
1619
1603
 
1620
- // Pause the stream if this is the first snapshot request
1621
- this.#activeSnapshotRequests++
1604
+ this.#pauseLock.acquire(snapshotReason)
1622
1605
 
1623
- try {
1624
- if (this.#activeSnapshotRequests === 1) {
1625
- // Currently this cannot throw, but in case it can later it's in this try block to not have a stuck counter
1626
- this.#pause()
1627
- }
1606
+ // Warn if the snapshot holds the pause lock for too long — this likely
1607
+ // indicates a hung fetch or leaked lock. Visibility pauses are
1608
+ // intentionally long-lived so the warning lives here, not in PauseLock.
1609
+ const snapshotWarnTimer = setTimeout(() => {
1610
+ console.warn(
1611
+ `[Electric] Snapshot "${snapshotReason}" has held the pause lock for 30s — ` +
1612
+ `possible hung request or leaked lock. ` +
1613
+ `Current holders: ${[...new Set([snapshotReason])].join(`, `)}`
1614
+ )
1615
+ }, 30_000)
1628
1616
 
1617
+ try {
1629
1618
  const { metadata, data } = await this.fetchSnapshot(opts)
1630
1619
 
1631
1620
  const dataWithEndBoundary = (data as Array<Message<T>>).concat([
@@ -1644,11 +1633,8 @@ export class ShapeStream<T extends Row<unknown> = Row>
1644
1633
  data,
1645
1634
  }
1646
1635
  } finally {
1647
- // Resume the stream if this was the last snapshot request
1648
- this.#activeSnapshotRequests--
1649
- if (this.#activeSnapshotRequests === 0) {
1650
- this.#resume()
1651
- }
1636
+ clearTimeout(snapshotWarnTimer)
1637
+ this.#pauseLock.release(snapshotReason)
1652
1638
  }
1653
1639
  }
1654
1640
 
@@ -1691,7 +1677,7 @@ export class ShapeStream<T extends Row<unknown> = Row>
1691
1677
  }
1692
1678
 
1693
1679
  // Capture handle before fetch to avoid race conditions if it changes during the request
1694
- const usedHandle = this.#shapeHandle
1680
+ const usedHandle = this.#syncState.handle
1695
1681
 
1696
1682
  let response: Response
1697
1683
  try {
@@ -1700,15 +1686,18 @@ export class ShapeStream<T extends Row<unknown> = Row>
1700
1686
  // Handle 409 "must-refetch" - shape handle changed/expired.
1701
1687
  // The fetch wrapper throws FetchError for non-OK responses, so we catch here.
1702
1688
  // Unlike #requestShape, we don't call #reset() here as that would
1703
- // clear #activeSnapshotRequests and break requestSnapshot's pause/resume logic.
1689
+ // clear the pause lock and break requestSnapshot's pause/resume logic.
1704
1690
  if (e instanceof FetchError && e.status === 409) {
1705
1691
  if (usedHandle) {
1706
1692
  const shapeKey = canonicalShapeKey(fetchUrl)
1707
1693
  expiredShapesCache.markExpired(shapeKey, usedHandle)
1708
1694
  }
1709
1695
 
1710
- this.#shapeHandle =
1696
+ // For snapshot 409s, only update the handle — don't reset offset/schema/etc.
1697
+ // The main stream is paused and should not be disturbed.
1698
+ const nextHandle =
1711
1699
  e.headers[SHAPE_HANDLE_HEADER] || `${usedHandle ?? `handle`}-next`
1700
+ this.#syncState = this.#syncState.withHandle(nextHandle)
1712
1701
 
1713
1702
  return this.fetchSnapshot(opts)
1714
1703
  }
@@ -1721,7 +1710,7 @@ export class ShapeStream<T extends Row<unknown> = Row>
1721
1710
  }
1722
1711
 
1723
1712
  const schema: Schema =
1724
- this.#schema ??
1713
+ this.#syncState.schema ??
1725
1714
  getSchemaFromHeaders(response.headers, {
1726
1715
  required: true,
1727
1716
  url: fetchUrl.toString(),