@electric-sql/client 1.5.2 → 1.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/client.ts CHANGED
@@ -73,6 +73,13 @@ import {
73
73
  import { expiredShapesCache } from './expired-shapes-cache'
74
74
  import { upToDateTracker } from './up-to-date-tracker'
75
75
  import { SnapshotTracker } from './snapshot-tracker'
76
+ import {
77
+ createInitialState,
78
+ ErrorState,
79
+ PausedState,
80
+ ShapeStreamState,
81
+ } from './shape-stream-state'
82
+ import { PauseLock } from './pause-lock'
76
83
 
77
84
  const RESERVED_PARAMS: Set<ReservedParamKeys> = new Set([
78
85
  LIVE_CACHE_BUSTER_QUERY_PARAM,
@@ -568,53 +575,60 @@ export class ShapeStream<T extends Row<unknown> = Row>
568
575
  >()
569
576
 
570
577
  #started = false
571
- #state = `active` as `active` | `pause-requested` | `paused`
572
- #lastOffset: Offset
573
- #liveCacheBuster: string // Seconds since our Electric Epoch 😎
574
- #lastSyncedAt?: number // unix time
575
- #isUpToDate: boolean = false
576
- #isMidStream: boolean = true
578
+ #syncState: ShapeStreamState
577
579
  #connected: boolean = false
578
- #shapeHandle?: string
579
580
  #mode: LogMode
580
- #schema?: Schema
581
581
  #onError?: ShapeStreamErrorHandler
582
582
  #requestAbortController?: AbortController
583
- #isRefreshing = false
583
+ #refreshCount = 0
584
+ #snapshotCounter = 0
585
+
586
+ get #isRefreshing(): boolean {
587
+ return this.#refreshCount > 0
588
+ }
584
589
  #tickPromise?: Promise<void>
585
590
  #tickPromiseResolver?: () => void
586
591
  #tickPromiseRejecter?: (reason?: unknown) => void
587
592
  #messageChain = Promise.resolve<void[]>([]) // promise chain for incoming messages
588
593
  #snapshotTracker = new SnapshotTracker()
589
- #activeSnapshotRequests = 0 // counter for concurrent snapshot requests
590
- #midStreamPromise?: Promise<void>
591
- #midStreamPromiseResolver?: () => void
592
- #lastSeenCursor?: string // Last seen cursor from previous session (used to detect cached responses)
594
+ #pauseLock: PauseLock
593
595
  #currentFetchUrl?: URL // Current fetch URL for computing shape key
594
596
  #lastSseConnectionStartTime?: number
595
597
  #minSseConnectionDuration = 1000 // Minimum expected SSE connection duration (1 second)
596
- #consecutiveShortSseConnections = 0
597
598
  #maxShortSseConnections = 3 // Fall back to long polling after this many short connections
598
- #sseFallbackToLongPolling = false
599
599
  #sseBackoffBaseDelay = 100 // Base delay for exponential backoff (ms)
600
600
  #sseBackoffMaxDelay = 5000 // Maximum delay cap (ms)
601
601
  #unsubscribeFromVisibilityChanges?: () => void
602
602
  #unsubscribeFromWakeDetection?: () => void
603
- #staleCacheBuster?: string // Cache buster set when stale CDN response detected, used on retry requests to bypass cache
604
- #staleCacheRetryCount = 0
605
603
  #maxStaleCacheRetries = 3
606
604
 
607
- // Derived state: we're in replay mode if we have a last seen cursor
608
- get #replayMode(): boolean {
609
- return this.#lastSeenCursor !== undefined
610
- }
611
-
612
605
  constructor(options: ShapeStreamOptions<GetExtensions<T>>) {
613
606
  this.options = { subscribe: true, ...options }
614
607
  validateOptions(this.options)
615
- this.#lastOffset = this.options.offset ?? `-1`
616
- this.#liveCacheBuster = ``
617
- this.#shapeHandle = this.options.handle
608
+ this.#syncState = createInitialState({
609
+ offset: this.options.offset ?? `-1`,
610
+ handle: this.options.handle,
611
+ })
612
+
613
+ this.#pauseLock = new PauseLock({
614
+ onAcquired: () => {
615
+ this.#syncState = this.#syncState.pause()
616
+ if (this.#started) {
617
+ this.#requestAbortController?.abort(PAUSE_STREAM)
618
+ }
619
+ },
620
+ onReleased: () => {
621
+ if (!this.#started) return
622
+ if (this.options.signal?.aborted) return
623
+ // Don't transition syncState here — let #requestShape handle
624
+ // the PausedState→previous transition so it can detect
625
+ // resumingFromPause and avoid live long-polling.
626
+ this.#start().catch(() => {
627
+ // Errors from #start are handled internally via onError.
628
+ // This catch prevents unhandled promise rejection in Node/Bun.
629
+ })
630
+ },
631
+ })
618
632
 
619
633
  // Build transformer chain: columnMapper.decode -> transformer
620
634
  // columnMapper transforms column names, transformer transforms values
@@ -672,7 +686,7 @@ export class ShapeStream<T extends Row<unknown> = Row>
672
686
  }
673
687
 
674
688
  get shapeHandle() {
675
- return this.#shapeHandle
689
+ return this.#syncState.handle
676
690
  }
677
691
 
678
692
  get error() {
@@ -680,11 +694,11 @@ export class ShapeStream<T extends Row<unknown> = Row>
680
694
  }
681
695
 
682
696
  get isUpToDate() {
683
- return this.#isUpToDate
697
+ return this.#syncState.isUpToDate
684
698
  }
685
699
 
686
700
  get lastOffset() {
687
- return this.#lastOffset
701
+ return this.#syncState.offset
688
702
  }
689
703
 
690
704
  get mode() {
@@ -698,6 +712,9 @@ export class ShapeStream<T extends Row<unknown> = Row>
698
712
  await this.#requestShape()
699
713
  } catch (err) {
700
714
  this.#error = err
715
+ if (err instanceof Error) {
716
+ this.#syncState = this.#syncState.toErrorState(err)
717
+ }
701
718
 
702
719
  // Check if onError handler wants to retry
703
720
  if (this.#onError) {
@@ -725,6 +742,9 @@ export class ShapeStream<T extends Row<unknown> = Row>
725
742
 
726
743
  // Clear the error since we're retrying
727
744
  this.#error = null
745
+ if (this.#syncState instanceof ErrorState) {
746
+ this.#syncState = this.#syncState.retry()
747
+ }
728
748
 
729
749
  // Restart from current offset
730
750
  this.#started = false
@@ -736,9 +756,7 @@ export class ShapeStream<T extends Row<unknown> = Row>
736
756
  if (err instanceof Error) {
737
757
  this.#sendErrorToSubscribers(err)
738
758
  }
739
- this.#connected = false
740
- this.#tickPromiseRejecter?.()
741
- this.#unsubscribeFromWakeDetection?.()
759
+ this.#teardown()
742
760
  return
743
761
  }
744
762
 
@@ -747,33 +765,34 @@ export class ShapeStream<T extends Row<unknown> = Row>
747
765
  if (err instanceof Error) {
748
766
  this.#sendErrorToSubscribers(err)
749
767
  }
750
- this.#connected = false
751
- this.#tickPromiseRejecter?.()
752
- this.#unsubscribeFromWakeDetection?.()
768
+ this.#teardown()
753
769
  throw err
754
770
  }
755
771
 
756
- // Normal completion, clean up
772
+ this.#teardown()
773
+ }
774
+
775
+ #teardown() {
757
776
  this.#connected = false
758
777
  this.#tickPromiseRejecter?.()
759
778
  this.#unsubscribeFromWakeDetection?.()
760
779
  }
761
780
 
762
781
  async #requestShape(): Promise<void> {
763
- if (this.#state === `pause-requested`) {
764
- this.#state = `paused`
765
- return
766
- }
782
+ if (this.#pauseLock.isPaused) return
767
783
 
768
784
  if (
769
785
  !this.options.subscribe &&
770
- (this.options.signal?.aborted || this.#isUpToDate)
786
+ (this.options.signal?.aborted || this.#syncState.isUpToDate)
771
787
  ) {
772
788
  return
773
789
  }
774
790
 
775
- const resumingFromPause = this.#state === `paused`
776
- this.#state = `active`
791
+ let resumingFromPause = false
792
+ if (this.#syncState instanceof PausedState) {
793
+ resumingFromPause = true
794
+ this.#syncState = this.#syncState.resume()
795
+ }
777
796
 
778
797
  const { url, signal } = this.options
779
798
  const { fetchUrl, requestHeaders } = await this.#constructUrl(
@@ -783,6 +802,17 @@ export class ShapeStream<T extends Row<unknown> = Row>
783
802
  const abortListener = await this.#createAbortListener(signal)
784
803
  const requestAbortController = this.#requestAbortController! // we know that it is not undefined because it is set by `this.#createAbortListener`
785
804
 
805
+ // Re-check after async setup — the lock may have been acquired
806
+ // during URL construction or abort controller creation (e.g., by
807
+ // requestSnapshot), when the abort controller didn't exist yet.
808
+ if (this.#pauseLock.isPaused) {
809
+ if (abortListener && signal) {
810
+ signal.removeEventListener(`abort`, abortListener)
811
+ }
812
+ this.#requestAbortController = undefined
813
+ return
814
+ }
815
+
786
816
  try {
787
817
  await this.#fetchShape({
788
818
  fetchUrl,
@@ -805,20 +835,6 @@ export class ShapeStream<T extends Row<unknown> = Row>
805
835
  }
806
836
 
807
837
  if (e instanceof FetchBackoffAbortError) {
808
- // Check current state - it may have changed due to concurrent pause/resume calls
809
- // from the visibility change handler during the async fetch operation.
810
- // TypeScript's flow analysis doesn't account for concurrent state changes.
811
- const currentState = this.#state as
812
- | `active`
813
- | `pause-requested`
814
- | `paused`
815
- if (
816
- requestAbortController.signal.aborted &&
817
- requestAbortController.signal.reason === PAUSE_STREAM &&
818
- currentState === `pause-requested`
819
- ) {
820
- this.#state = `paused`
821
- }
822
838
  return // interrupted
823
839
  }
824
840
 
@@ -839,13 +855,13 @@ export class ShapeStream<T extends Row<unknown> = Row>
839
855
  // consistent cache buster
840
856
 
841
857
  // Store the current shape URL as expired to avoid future 409s
842
- if (this.#shapeHandle) {
858
+ if (this.#syncState.handle) {
843
859
  const shapeKey = canonicalShapeKey(fetchUrl)
844
- expiredShapesCache.markExpired(shapeKey, this.#shapeHandle)
860
+ expiredShapesCache.markExpired(shapeKey, this.#syncState.handle)
845
861
  }
846
862
 
847
863
  const newShapeHandle =
848
- e.headers[SHAPE_HANDLE_HEADER] || `${this.#shapeHandle!}-next`
864
+ e.headers[SHAPE_HANDLE_HEADER] || `${this.#syncState.handle!}-next`
849
865
  this.#reset(newShapeHandle)
850
866
 
851
867
  // must refetch control message might be in a list or not depending
@@ -1000,31 +1016,15 @@ export class ShapeStream<T extends Row<unknown> = Row>
1000
1016
  }
1001
1017
  }
1002
1018
 
1003
- // Add Electric's internal parameters
1004
- fetchUrl.searchParams.set(OFFSET_QUERY_PARAM, this.#lastOffset)
1019
+ // Add state-specific parameters (offset, handle, live cache busters, etc.)
1020
+ this.#syncState.applyUrlParams(fetchUrl, {
1021
+ isSnapshotRequest: subsetParams !== undefined,
1022
+ // Don't long-poll when resuming from pause or refreshing — avoids
1023
+ // a 20s hold during which `isConnected` would be false
1024
+ canLongPoll: !this.#isRefreshing && !resumingFromPause,
1025
+ })
1005
1026
  fetchUrl.searchParams.set(LOG_MODE_QUERY_PARAM, this.#mode)
1006
1027
 
1007
- // Snapshot requests (with subsetParams) should never use live polling
1008
- const isSnapshotRequest = subsetParams !== undefined
1009
-
1010
- if (this.#isUpToDate && !isSnapshotRequest) {
1011
- // If we are resuming from a paused state, we don't want to perform a live request
1012
- // because it could be a long poll that holds for 20sec
1013
- // and during all that time `isConnected` will be false
1014
- if (!this.#isRefreshing && !resumingFromPause) {
1015
- fetchUrl.searchParams.set(LIVE_QUERY_PARAM, `true`)
1016
- }
1017
- fetchUrl.searchParams.set(
1018
- LIVE_CACHE_BUSTER_QUERY_PARAM,
1019
- this.#liveCacheBuster
1020
- )
1021
- }
1022
-
1023
- if (this.#shapeHandle) {
1024
- // This should probably be a header for better cache breaking?
1025
- fetchUrl.searchParams.set(SHAPE_HANDLE_QUERY_PARAM, this.#shapeHandle!)
1026
- }
1027
-
1028
1028
  // Add cache buster for shapes known to be expired to prevent 409s
1029
1029
  const shapeKey = canonicalShapeKey(fetchUrl)
1030
1030
  const expiredHandle = expiredShapesCache.getExpiredHandle(shapeKey)
@@ -1032,15 +1032,6 @@ export class ShapeStream<T extends Row<unknown> = Row>
1032
1032
  fetchUrl.searchParams.set(EXPIRED_HANDLE_QUERY_PARAM, expiredHandle)
1033
1033
  }
1034
1034
 
1035
- // Add random cache buster if we received a stale response from CDN
1036
- // This forces a fresh request bypassing the misconfigured CDN cache
1037
- if (this.#staleCacheBuster) {
1038
- fetchUrl.searchParams.set(
1039
- CACHE_BUSTER_QUERY_PARAM,
1040
- this.#staleCacheBuster
1041
- )
1042
- }
1043
-
1044
1035
  // sort query params in-place for stable URLs and improved cache hits
1045
1036
  fetchUrl.searchParams.sort()
1046
1037
 
@@ -1071,165 +1062,132 @@ export class ShapeStream<T extends Row<unknown> = Row>
1071
1062
  }
1072
1063
  }
1073
1064
 
1074
- async #onInitialResponse(response: Response) {
1065
+ /**
1066
+ * Processes response metadata (headers, status) and updates sync state.
1067
+ * Returns `true` if the response body should be processed by the caller,
1068
+ * or `false` if the response was ignored (stale) and the body should be skipped.
1069
+ * Throws on stale-retry (to trigger a retry with cache buster).
1070
+ */
1071
+ async #onInitialResponse(response: Response): Promise<boolean> {
1075
1072
  const { headers, status } = response
1076
1073
  const shapeHandle = headers.get(SHAPE_HANDLE_HEADER)
1077
- if (shapeHandle) {
1078
- // Don't accept a handle we know is expired - this can happen if a
1079
- // proxy serves a stale cached response despite the expired_handle
1080
- // cache buster parameter
1081
- const shapeKey = this.#currentFetchUrl
1082
- ? canonicalShapeKey(this.#currentFetchUrl)
1083
- : null
1084
- const expiredHandle = shapeKey
1085
- ? expiredShapesCache.getExpiredHandle(shapeKey)
1086
- : null
1087
- if (shapeHandle !== expiredHandle) {
1088
- this.#shapeHandle = shapeHandle
1089
- // Clear cache buster after successful response with valid handle
1090
- if (this.#staleCacheBuster) {
1091
- this.#staleCacheBuster = undefined
1092
- this.#staleCacheRetryCount = 0
1093
- }
1094
- } else if (this.#shapeHandle === undefined) {
1095
- // We received a stale response from cache and don't have a handle yet.
1096
- // Instead of accepting the stale handle, throw an error to trigger a retry
1097
- // with a random cache buster to bypass the CDN cache.
1098
- this.#staleCacheRetryCount++
1099
- // Cancel the response body to release the connection before retrying
1100
- await response.body?.cancel()
1101
- if (this.#staleCacheRetryCount > this.#maxStaleCacheRetries) {
1102
- throw new FetchError(
1103
- 502,
1104
- undefined,
1105
- undefined,
1106
- {},
1107
- this.#currentFetchUrl?.toString() ?? ``,
1108
- `CDN continues serving stale cached responses after ${this.#maxStaleCacheRetries} retry attempts. ` +
1109
- `This indicates a severe proxy/CDN misconfiguration. ` +
1110
- `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1111
- `For more information visit the troubleshooting guide: https://electric-sql.com/docs/guides/troubleshooting`
1112
- )
1113
- }
1114
- console.warn(
1115
- `[Electric] Received stale cached response with expired shape handle. ` +
1116
- `This should not happen and indicates a proxy/CDN caching misconfiguration. ` +
1117
- `The response contained handle "${shapeHandle}" which was previously marked as expired. ` +
1118
- `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1119
- `For more information visit the troubleshooting guide: https://electric-sql.com/docs/guides/troubleshooting ` +
1120
- `Retrying with a random cache buster to bypass the stale cache (attempt ${this.#staleCacheRetryCount}/${this.#maxStaleCacheRetries}).`
1121
- )
1122
- // Generate a random cache buster for the retry
1123
- this.#staleCacheBuster = `${Date.now()}-${Math.random().toString(36).substring(2, 9)}`
1124
- throw new StaleCacheError(
1125
- `Received stale cached response with expired handle "${shapeHandle}". ` +
1126
- `This indicates a proxy/CDN caching misconfiguration. ` +
1127
- `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key.`
1128
- )
1129
- } else {
1130
- // We already have a valid handle, so ignore the stale response entirely
1131
- // to prevent a mismatch between our current handle and the stale offset.
1132
- console.warn(
1133
- `[Electric] Received stale cached response with expired shape handle. ` +
1134
- `This should not happen and indicates a proxy/CDN caching misconfiguration. ` +
1135
- `The response contained handle "${shapeHandle}" which was previously marked as expired. ` +
1074
+ const shapeKey = this.#currentFetchUrl
1075
+ ? canonicalShapeKey(this.#currentFetchUrl)
1076
+ : null
1077
+ const expiredHandle = shapeKey
1078
+ ? expiredShapesCache.getExpiredHandle(shapeKey)
1079
+ : null
1080
+
1081
+ const transition = this.#syncState.handleResponseMetadata({
1082
+ status,
1083
+ responseHandle: shapeHandle,
1084
+ responseOffset: headers.get(CHUNK_LAST_OFFSET_HEADER) as Offset | null,
1085
+ responseCursor: headers.get(LIVE_CACHE_BUSTER_HEADER),
1086
+ responseSchema: getSchemaFromHeaders(headers),
1087
+ expiredHandle,
1088
+ now: Date.now(),
1089
+ maxStaleCacheRetries: this.#maxStaleCacheRetries,
1090
+ createCacheBuster: () =>
1091
+ `${Date.now()}-${Math.random().toString(36).substring(2, 9)}`,
1092
+ })
1093
+
1094
+ this.#syncState = transition.state
1095
+
1096
+ if (transition.action === `stale-retry`) {
1097
+ // Cancel the response body to release the connection before retrying.
1098
+ await response.body?.cancel()
1099
+ if (transition.exceededMaxRetries) {
1100
+ throw new FetchError(
1101
+ 502,
1102
+ undefined,
1103
+ undefined,
1104
+ {},
1105
+ this.#currentFetchUrl?.toString() ?? ``,
1106
+ `CDN continues serving stale cached responses after ${this.#maxStaleCacheRetries} retry attempts. ` +
1107
+ `This indicates a severe proxy/CDN misconfiguration. ` +
1136
1108
  `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1137
- `Ignoring the stale response and continuing with handle "${this.#shapeHandle}".`
1109
+ `For more information visit the troubleshooting guide: https://electric-sql.com/docs/guides/troubleshooting`
1138
1110
  )
1139
- return
1140
1111
  }
1112
+ console.warn(
1113
+ `[Electric] Received stale cached response with expired shape handle. ` +
1114
+ `This should not happen and indicates a proxy/CDN caching misconfiguration. ` +
1115
+ `The response contained handle "${shapeHandle}" which was previously marked as expired. ` +
1116
+ `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1117
+ `For more information visit the troubleshooting guide: https://electric-sql.com/docs/guides/troubleshooting ` +
1118
+ `Retrying with a random cache buster to bypass the stale cache (attempt ${this.#syncState.staleCacheRetryCount}/${this.#maxStaleCacheRetries}).`
1119
+ )
1120
+ throw new StaleCacheError(
1121
+ `Received stale cached response with expired handle "${shapeHandle}". ` +
1122
+ `This indicates a proxy/CDN caching misconfiguration. ` +
1123
+ `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key.`
1124
+ )
1141
1125
  }
1142
1126
 
1143
- const lastOffset = headers.get(CHUNK_LAST_OFFSET_HEADER)
1144
- if (lastOffset) {
1145
- this.#lastOffset = lastOffset as Offset
1146
- }
1147
-
1148
- const liveCacheBuster = headers.get(LIVE_CACHE_BUSTER_HEADER)
1149
- if (liveCacheBuster) {
1150
- this.#liveCacheBuster = liveCacheBuster
1127
+ if (transition.action === `ignored`) {
1128
+ // We already have a valid handle, so ignore the entire stale response
1129
+ // (both metadata and body) to prevent a mismatch between our current
1130
+ // handle and the stale data.
1131
+ console.warn(
1132
+ `[Electric] Received stale cached response with expired shape handle. ` +
1133
+ `This should not happen and indicates a proxy/CDN caching misconfiguration. ` +
1134
+ `The response contained handle "${shapeHandle}" which was previously marked as expired. ` +
1135
+ `Check that your proxy includes all query parameters (especially 'handle' and 'offset') in its cache key. ` +
1136
+ `Ignoring the stale response and continuing with handle "${this.#syncState.handle}".`
1137
+ )
1138
+ return false
1151
1139
  }
1152
1140
 
1153
- this.#schema = this.#schema ?? getSchemaFromHeaders(headers)
1154
-
1155
- // NOTE: 204s are deprecated, the Electric server should not
1156
- // send these in latest versions but this is here for backwards
1157
- // compatibility
1158
- if (status === 204) {
1159
- // There's no content so we are live and up to date
1160
- this.#lastSyncedAt = Date.now()
1161
- }
1141
+ return true
1162
1142
  }
1163
1143
 
1164
1144
  async #onMessages(batch: Array<Message<T>>, isSseMessage = false) {
1165
- // Update isUpToDate
1166
- if (batch.length > 0) {
1167
- // Set isMidStream to true when we receive any data
1168
- this.#isMidStream = true
1169
-
1170
- const lastMessage = batch[batch.length - 1]
1171
- if (isUpToDateMessage(lastMessage)) {
1172
- if (isSseMessage) {
1173
- // Only use the offset from the up-to-date message if this was an SSE message.
1174
- // If we would use this offset from a regular fetch, then it will be wrong
1175
- // and we will get an "offset is out of bounds for this shape" error
1176
- const offset = getOffset(lastMessage)
1177
- if (offset) {
1178
- this.#lastOffset = offset
1179
- }
1180
- }
1181
- this.#lastSyncedAt = Date.now()
1182
- this.#isUpToDate = true
1183
- // Set isMidStream to false when we see an up-to-date message
1184
- this.#isMidStream = false
1185
- // Resolve the promise waiting for mid-stream to end
1186
- this.#midStreamPromiseResolver?.()
1187
-
1188
- // Check if we should suppress this up-to-date notification
1189
- // to prevent multiple renders from cached responses
1190
- if (this.#replayMode && !isSseMessage) {
1191
- // We're in replay mode (replaying cached responses during initial sync).
1192
- // Check if the cursor has changed - cursors are time-based and always
1193
- // increment, so a new cursor means fresh data from the server.
1194
- const currentCursor = this.#liveCacheBuster
1195
-
1196
- if (currentCursor === this.#lastSeenCursor) {
1197
- // Same cursor as previous session - suppress this up-to-date notification.
1198
- // Exit replay mode after first suppression to ensure we don't get stuck
1199
- // if CDN keeps returning the same cursor indefinitely.
1200
- this.#lastSeenCursor = undefined
1201
- return
1202
- }
1203
- }
1145
+ if (batch.length === 0) return
1146
+
1147
+ const lastMessage = batch[batch.length - 1]
1148
+ const hasUpToDateMessage = isUpToDateMessage(lastMessage)
1149
+ const upToDateOffset = hasUpToDateMessage
1150
+ ? getOffset(lastMessage)
1151
+ : undefined
1152
+
1153
+ const transition = this.#syncState.handleMessageBatch({
1154
+ hasMessages: true,
1155
+ hasUpToDateMessage,
1156
+ isSse: isSseMessage,
1157
+ upToDateOffset,
1158
+ now: Date.now(),
1159
+ currentCursor: this.#syncState.liveCacheBuster,
1160
+ })
1161
+ this.#syncState = transition.state
1204
1162
 
1205
- // We're either:
1206
- // 1. Not in replay mode (normal operation), or
1207
- // 2. This is a live/SSE message (always fresh), or
1208
- // 3. Cursor has changed (exited replay mode with fresh data)
1209
- // In all cases, notify subscribers and record the up-to-date.
1210
- this.#lastSeenCursor = undefined // Exit replay mode
1163
+ if (hasUpToDateMessage) {
1164
+ if (transition.suppressBatch) {
1165
+ return
1166
+ }
1211
1167
 
1212
- if (this.#currentFetchUrl) {
1213
- const shapeKey = canonicalShapeKey(this.#currentFetchUrl)
1214
- upToDateTracker.recordUpToDate(shapeKey, this.#liveCacheBuster)
1215
- }
1168
+ if (this.#currentFetchUrl) {
1169
+ const shapeKey = canonicalShapeKey(this.#currentFetchUrl)
1170
+ upToDateTracker.recordUpToDate(
1171
+ shapeKey,
1172
+ this.#syncState.liveCacheBuster
1173
+ )
1216
1174
  }
1175
+ }
1217
1176
 
1218
- // Filter messages using snapshot tracker
1219
- const messagesToProcess = batch.filter((message) => {
1220
- if (isChangeMessage(message)) {
1221
- return !this.#snapshotTracker.shouldRejectMessage(message)
1222
- }
1223
- return true // Always process control messages
1224
- })
1177
+ // Filter messages using snapshot tracker
1178
+ const messagesToProcess = batch.filter((message) => {
1179
+ if (isChangeMessage(message)) {
1180
+ return !this.#snapshotTracker.shouldRejectMessage(message)
1181
+ }
1182
+ return true // Always process control messages
1183
+ })
1225
1184
 
1226
- await this.#publish(messagesToProcess)
1227
- }
1185
+ await this.#publish(messagesToProcess)
1228
1186
  }
1229
1187
 
1230
1188
  /**
1231
1189
  * Fetches the shape from the server using either long polling or SSE.
1232
- * Upon receiving a successfull response, the #onInitialResponse method is called.
1190
+ * Upon receiving a successful response, the #onInitialResponse method is called.
1233
1191
  * Afterwards, the #onMessages method is called for all the incoming updates.
1234
1192
  * @param opts - The options for the request.
1235
1193
  * @returns A promise that resolves when the request is complete (i.e. the long poll receives a response or the SSE connection is closed).
@@ -1246,22 +1204,22 @@ export class ShapeStream<T extends Row<unknown> = Row>
1246
1204
  // Check if we should enter replay mode (replaying cached responses)
1247
1205
  // This happens when we're starting fresh (offset=-1 or before first up-to-date)
1248
1206
  // and there's a recent up-to-date in localStorage (< 60s)
1249
- if (!this.#isUpToDate && !this.#replayMode) {
1207
+ if (!this.#syncState.isUpToDate && this.#syncState.canEnterReplayMode()) {
1250
1208
  const shapeKey = canonicalShapeKey(opts.fetchUrl)
1251
1209
  const lastSeenCursor = upToDateTracker.shouldEnterReplayMode(shapeKey)
1252
1210
  if (lastSeenCursor) {
1253
1211
  // Enter replay mode and store the last seen cursor
1254
- this.#lastSeenCursor = lastSeenCursor
1212
+ this.#syncState = this.#syncState.enterReplayMode(lastSeenCursor)
1255
1213
  }
1256
1214
  }
1257
1215
 
1258
1216
  const useSse = this.options.liveSse ?? this.options.experimentalLiveSse
1259
1217
  if (
1260
- this.#isUpToDate &&
1261
- useSse &&
1262
- !this.#isRefreshing &&
1263
- !opts.resumingFromPause &&
1264
- !this.#sseFallbackToLongPolling
1218
+ this.#syncState.shouldUseSse({
1219
+ liveSseEnabled: !!useSse,
1220
+ isRefreshing: this.#isRefreshing,
1221
+ resumingFromPause: !!opts.resumingFromPause,
1222
+ })
1265
1223
  ) {
1266
1224
  opts.fetchUrl.searchParams.set(EXPERIMENTAL_LIVE_SSE_QUERY_PARAM, `true`)
1267
1225
  opts.fetchUrl.searchParams.set(LIVE_SSE_QUERY_PARAM, `true`)
@@ -1283,9 +1241,10 @@ export class ShapeStream<T extends Row<unknown> = Row>
1283
1241
  })
1284
1242
 
1285
1243
  this.#connected = true
1286
- await this.#onInitialResponse(response)
1244
+ const shouldProcessBody = await this.#onInitialResponse(response)
1245
+ if (!shouldProcessBody) return
1287
1246
 
1288
- const schema = this.#schema! // we know that it is not undefined because it is set by `this.#onInitialResponse`
1247
+ const schema = this.#syncState.schema! // we know that it is not undefined because it is set by `this.#onInitialResponse`
1289
1248
  const res = await response.text()
1290
1249
  const messages = res || `[]`
1291
1250
  const batch = this.#messageParser.parse<Array<Message<T>>>(messages, schema)
@@ -1310,6 +1269,7 @@ export class ShapeStream<T extends Row<unknown> = Row>
1310
1269
  Accept: `text/event-stream`,
1311
1270
  }
1312
1271
 
1272
+ let ignoredStaleResponse = false
1313
1273
  try {
1314
1274
  let buffer: Array<Message<T>> = []
1315
1275
  await fetchEventSource(fetchUrl.toString(), {
@@ -1317,12 +1277,16 @@ export class ShapeStream<T extends Row<unknown> = Row>
1317
1277
  fetch,
1318
1278
  onopen: async (response: Response) => {
1319
1279
  this.#connected = true
1320
- await this.#onInitialResponse(response)
1280
+ const shouldProcessBody = await this.#onInitialResponse(response)
1281
+ if (!shouldProcessBody) {
1282
+ ignoredStaleResponse = true
1283
+ throw new Error(`stale response ignored`)
1284
+ }
1321
1285
  },
1322
1286
  onmessage: (event: EventSourceMessage) => {
1323
1287
  if (event.data) {
1324
1288
  // event.data is a single JSON object
1325
- const schema = this.#schema! // we know that it is not undefined because it is set in onopen when we call this.#onInitialResponse
1289
+ const schema = this.#syncState.schema! // we know that it is not undefined because it is set in onopen when we call this.#onInitialResponse
1326
1290
  const message = this.#messageParser.parse<Message<T>>(
1327
1291
  event.data,
1328
1292
  schema
@@ -1344,14 +1308,14 @@ export class ShapeStream<T extends Row<unknown> = Row>
1344
1308
  signal: requestAbortController.signal,
1345
1309
  })
1346
1310
  } catch (error) {
1311
+ if (ignoredStaleResponse) {
1312
+ // Stale response was ignored in onopen — let the fetch loop retry
1313
+ return
1314
+ }
1347
1315
  if (requestAbortController.signal.aborted) {
1348
- // During an SSE request, the fetch might have succeeded
1349
- // and we are parsing the incoming stream.
1350
- // If the abort happens while we're parsing the stream,
1351
- // then it won't be caught by our `createFetchWithBackoff` wrapper
1352
- // and instead we will get a raw AbortError here
1353
- // which we need to turn into a `FetchBackoffAbortError`
1354
- // such that #start handles it correctly.`
1316
+ // An abort during SSE stream parsing produces a raw AbortError
1317
+ // instead of going through createFetchWithBackoff -- wrap it so
1318
+ // #start handles it correctly.
1355
1319
  throw new FetchBackoffAbortError()
1356
1320
  }
1357
1321
  throw error
@@ -1362,65 +1326,32 @@ export class ShapeStream<T extends Row<unknown> = Row>
1362
1326
  const connectionDuration = Date.now() - this.#lastSseConnectionStartTime!
1363
1327
  const wasAborted = requestAbortController.signal.aborted
1364
1328
 
1365
- if (connectionDuration < this.#minSseConnectionDuration && !wasAborted) {
1366
- // Connection was too short - likely a cached response or misconfiguration
1367
- this.#consecutiveShortSseConnections++
1368
-
1369
- if (
1370
- this.#consecutiveShortSseConnections >= this.#maxShortSseConnections
1371
- ) {
1372
- // Too many short connections - fall back to long polling
1373
- this.#sseFallbackToLongPolling = true
1374
- console.warn(
1375
- `[Electric] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). ` +
1376
- `Falling back to long polling. ` +
1377
- `Your proxy must support streaming SSE responses (not buffer the complete response). ` +
1378
- `Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy. ` +
1379
- `Note: Do NOT disable caching entirely - Electric uses cache headers to enable request collapsing for efficiency.`
1380
- )
1381
- } else {
1382
- // Add exponential backoff with full jitter to prevent tight infinite loop
1383
- // Formula: random(0, min(cap, base * 2^attempt))
1384
- const maxDelay = Math.min(
1385
- this.#sseBackoffMaxDelay,
1386
- this.#sseBackoffBaseDelay *
1387
- Math.pow(2, this.#consecutiveShortSseConnections)
1388
- )
1389
- const delayMs = Math.floor(Math.random() * maxDelay)
1390
- await new Promise((resolve) => setTimeout(resolve, delayMs))
1391
- }
1392
- } else if (connectionDuration >= this.#minSseConnectionDuration) {
1393
- // Connection was healthy - reset counter
1394
- this.#consecutiveShortSseConnections = 0
1395
- }
1396
- }
1397
- }
1398
-
1399
- #pause() {
1400
- if (this.#started && this.#state === `active`) {
1401
- this.#state = `pause-requested`
1402
- this.#requestAbortController?.abort(PAUSE_STREAM)
1403
- }
1404
- }
1405
-
1406
- #resume() {
1407
- if (
1408
- this.#started &&
1409
- (this.#state === `paused` || this.#state === `pause-requested`)
1410
- ) {
1411
- // Don't resume if the user's signal is already aborted
1412
- // This can happen if the signal was aborted while we were paused
1413
- // (e.g., TanStack DB collection was GC'd)
1414
- if (this.options.signal?.aborted) {
1415
- return
1416
- }
1329
+ const transition = this.#syncState.handleSseConnectionClosed({
1330
+ connectionDuration,
1331
+ wasAborted,
1332
+ minConnectionDuration: this.#minSseConnectionDuration,
1333
+ maxShortConnections: this.#maxShortSseConnections,
1334
+ })
1335
+ this.#syncState = transition.state
1417
1336
 
1418
- // If we're resuming from pause-requested state, we need to set state back to active
1419
- // to prevent the pause from completing
1420
- if (this.#state === `pause-requested`) {
1421
- this.#state = `active`
1337
+ if (transition.fellBackToLongPolling) {
1338
+ console.warn(
1339
+ `[Electric] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). ` +
1340
+ `Falling back to long polling. ` +
1341
+ `Your proxy must support streaming SSE responses (not buffer the complete response). ` +
1342
+ `Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy. ` +
1343
+ `Note: Do NOT disable caching entirely - Electric uses cache headers to enable request collapsing for efficiency.`
1344
+ )
1345
+ } else if (transition.wasShortConnection) {
1346
+ // Exponential backoff with full jitter: random(0, min(cap, base * 2^attempt))
1347
+ const maxDelay = Math.min(
1348
+ this.#sseBackoffMaxDelay,
1349
+ this.#sseBackoffBaseDelay *
1350
+ Math.pow(2, this.#syncState.consecutiveShortSseConnections)
1351
+ )
1352
+ const delayMs = Math.floor(Math.random() * maxDelay)
1353
+ await new Promise((resolve) => setTimeout(resolve, delayMs))
1422
1354
  }
1423
- this.#start()
1424
1355
  }
1425
1356
  }
1426
1357
 
@@ -1444,15 +1375,15 @@ export class ShapeStream<T extends Row<unknown> = Row>
1444
1375
  this.#unsubscribeFromWakeDetection?.()
1445
1376
  }
1446
1377
 
1447
- /** Unix time at which we last synced. Undefined when `isLoading` is true. */
1378
+ /** Unix time at which we last synced. Undefined until first successful up-to-date. */
1448
1379
  lastSyncedAt(): number | undefined {
1449
- return this.#lastSyncedAt
1380
+ return this.#syncState.lastSyncedAt
1450
1381
  }
1451
1382
 
1452
1383
  /** Time elapsed since last sync (in ms). Infinity if we did not yet sync. */
1453
1384
  lastSynced(): number {
1454
- if (this.#lastSyncedAt === undefined) return Infinity
1455
- return Date.now() - this.#lastSyncedAt
1385
+ if (this.#syncState.lastSyncedAt === undefined) return Infinity
1386
+ return Date.now() - this.#syncState.lastSyncedAt
1456
1387
  }
1457
1388
 
1458
1389
  /** Indicates if we are connected to the Electric sync service. */
@@ -1460,9 +1391,9 @@ export class ShapeStream<T extends Row<unknown> = Row>
1460
1391
  return this.#connected
1461
1392
  }
1462
1393
 
1463
- /** True during initial fetch. False afterwise. */
1394
+ /** True during initial fetch. False afterwards. */
1464
1395
  isLoading(): boolean {
1465
- return !this.#isUpToDate
1396
+ return !this.#syncState.isUpToDate
1466
1397
  }
1467
1398
 
1468
1399
  hasStarted(): boolean {
@@ -1470,11 +1401,16 @@ export class ShapeStream<T extends Row<unknown> = Row>
1470
1401
  }
1471
1402
 
1472
1403
  isPaused(): boolean {
1473
- return this.#state === `paused`
1404
+ return this.#pauseLock.isPaused
1474
1405
  }
1475
1406
 
1476
1407
  /** Await the next tick of the request loop */
1477
1408
  async #nextTick() {
1409
+ if (this.#pauseLock.isPaused) {
1410
+ throw new Error(
1411
+ `Cannot wait for next tick while PauseLock is held — this would deadlock because the request loop is paused`
1412
+ )
1413
+ }
1478
1414
  if (this.#tickPromise) {
1479
1415
  return this.#tickPromise
1480
1416
  }
@@ -1490,24 +1426,6 @@ export class ShapeStream<T extends Row<unknown> = Row>
1490
1426
  return this.#tickPromise
1491
1427
  }
1492
1428
 
1493
- /** Await until we're not in the middle of a stream (i.e., until we see an up-to-date message) */
1494
- async #waitForStreamEnd() {
1495
- if (!this.#isMidStream) {
1496
- return
1497
- }
1498
- if (this.#midStreamPromise) {
1499
- return this.#midStreamPromise
1500
- }
1501
- this.#midStreamPromise = new Promise((resolve) => {
1502
- this.#midStreamPromiseResolver = resolve
1503
- })
1504
- this.#midStreamPromise.finally(() => {
1505
- this.#midStreamPromise = undefined
1506
- this.#midStreamPromiseResolver = undefined
1507
- })
1508
- return this.#midStreamPromise
1509
- }
1510
-
1511
1429
  /**
1512
1430
  * Refreshes the shape stream.
1513
1431
  * This preemptively aborts any ongoing long poll and reconnects without
@@ -1515,14 +1433,20 @@ export class ShapeStream<T extends Row<unknown> = Row>
1515
1433
  * latest LSN from Postgres at that point in time.
1516
1434
  */
1517
1435
  async forceDisconnectAndRefresh(): Promise<void> {
1518
- this.#isRefreshing = true
1519
- if (this.#isUpToDate && !this.#requestAbortController?.signal.aborted) {
1520
- // If we are "up to date", any current request will be a "live" request
1521
- // and needs to be aborted
1522
- this.#requestAbortController?.abort(FORCE_DISCONNECT_AND_REFRESH)
1436
+ this.#refreshCount++
1437
+ try {
1438
+ if (
1439
+ this.#syncState.isUpToDate &&
1440
+ !this.#requestAbortController?.signal.aborted
1441
+ ) {
1442
+ // If we are "up to date", any current request will be a "live" request
1443
+ // and needs to be aborted
1444
+ this.#requestAbortController?.abort(FORCE_DISCONNECT_AND_REFRESH)
1445
+ }
1446
+ await this.#nextTick()
1447
+ } finally {
1448
+ this.#refreshCount--
1523
1449
  }
1524
- await this.#nextTick()
1525
- this.#isRefreshing = false
1526
1450
  }
1527
1451
 
1528
1452
  async #publish(messages: Message<T>[]): Promise<void[]> {
@@ -1565,9 +1489,9 @@ export class ShapeStream<T extends Row<unknown> = Row>
1565
1489
  if (this.#hasBrowserVisibilityAPI()) {
1566
1490
  const visibilityHandler = () => {
1567
1491
  if (document.hidden) {
1568
- this.#pause()
1492
+ this.#pauseLock.acquire(`visibility`)
1569
1493
  } else {
1570
- this.#resume()
1494
+ this.#pauseLock.release(`visibility`)
1571
1495
  }
1572
1496
  }
1573
1497
 
@@ -1606,11 +1530,16 @@ export class ShapeStream<T extends Row<unknown> = Row>
1606
1530
  lastTickTime = now
1607
1531
 
1608
1532
  if (elapsed > INTERVAL_MS + WAKE_THRESHOLD_MS) {
1609
- if (this.#state === `active` && this.#requestAbortController) {
1610
- this.#isRefreshing = true
1533
+ if (!this.#pauseLock.isPaused && this.#requestAbortController) {
1534
+ this.#refreshCount++
1611
1535
  this.#requestAbortController.abort(SYSTEM_WAKE)
1536
+ // Wake handler is synchronous (setInterval callback) so we can't
1537
+ // use try/finally + await like forceDisconnectAndRefresh. Instead,
1538
+ // decrement via queueMicrotask — safe because the abort triggers
1539
+ // #requestShape to re-run, which reads #isRefreshing synchronously
1540
+ // before the microtask fires.
1612
1541
  queueMicrotask(() => {
1613
- this.#isRefreshing = false
1542
+ this.#refreshCount--
1614
1543
  })
1615
1544
  }
1616
1545
  }
@@ -1631,20 +1560,12 @@ export class ShapeStream<T extends Row<unknown> = Row>
1631
1560
  * shape handle
1632
1561
  */
1633
1562
  #reset(handle?: string) {
1634
- this.#lastOffset = `-1`
1635
- this.#liveCacheBuster = ``
1636
- this.#shapeHandle = handle
1637
- this.#isUpToDate = false
1638
- this.#isMidStream = true
1563
+ this.#syncState = this.#syncState.markMustRefetch(handle)
1639
1564
  this.#connected = false
1640
- this.#schema = undefined
1641
- this.#activeSnapshotRequests = 0
1642
- // Reset SSE fallback state to try SSE again after reset
1643
- this.#consecutiveShortSseConnections = 0
1644
- this.#sseFallbackToLongPolling = false
1645
- // Reset stale cache retry state
1646
- this.#staleCacheBuster = undefined
1647
- this.#staleCacheRetryCount = 0
1565
+ // releaseAllMatching intentionally doesn't fire onReleased — it's called
1566
+ // from within the running stream loop (#requestShape's 409 handler), so
1567
+ // the stream is already active and doesn't need a resume signal.
1568
+ this.#pauseLock.releaseAllMatching(`snapshot`)
1648
1569
  }
1649
1570
 
1650
1571
  /**
@@ -1670,22 +1591,30 @@ export class ShapeStream<T extends Row<unknown> = Row>
1670
1591
  `Snapshot requests are not supported in ${this.#mode} mode, as the consumer is guaranteed to observe all data`
1671
1592
  )
1672
1593
  }
1673
- // We shouldn't be getting a snapshot on a shape that's not started
1674
- if (!this.#started) await this.#start()
1594
+ // Start the stream if not started fire-and-forget like subscribe() does.
1595
+ // We must NOT await #start() because it runs the full request loop. The
1596
+ // PauseLock acquire below will abort the in-flight request, and the
1597
+ // re-check guard in #requestShape handles the race.
1598
+ if (!this.#started) {
1599
+ this.#start().catch(() => {})
1600
+ }
1675
1601
 
1676
- // Wait until we're not mid-stream before pausing
1677
- // This ensures we don't pause in the middle of a transaction
1678
- await this.#waitForStreamEnd()
1602
+ const snapshotReason = `snapshot-${++this.#snapshotCounter}`
1679
1603
 
1680
- // Pause the stream if this is the first snapshot request
1681
- this.#activeSnapshotRequests++
1604
+ this.#pauseLock.acquire(snapshotReason)
1682
1605
 
1683
- try {
1684
- if (this.#activeSnapshotRequests === 1) {
1685
- // Currently this cannot throw, but in case it can later it's in this try block to not have a stuck counter
1686
- this.#pause()
1687
- }
1606
+ // Warn if the snapshot holds the pause lock for too long — this likely
1607
+ // indicates a hung fetch or leaked lock. Visibility pauses are
1608
+ // intentionally long-lived so the warning lives here, not in PauseLock.
1609
+ const snapshotWarnTimer = setTimeout(() => {
1610
+ console.warn(
1611
+ `[Electric] Snapshot "${snapshotReason}" has held the pause lock for 30s — ` +
1612
+ `possible hung request or leaked lock. ` +
1613
+ `Current holders: ${[...new Set([snapshotReason])].join(`, `)}`
1614
+ )
1615
+ }, 30_000)
1688
1616
 
1617
+ try {
1689
1618
  const { metadata, data } = await this.fetchSnapshot(opts)
1690
1619
 
1691
1620
  const dataWithEndBoundary = (data as Array<Message<T>>).concat([
@@ -1704,11 +1633,8 @@ export class ShapeStream<T extends Row<unknown> = Row>
1704
1633
  data,
1705
1634
  }
1706
1635
  } finally {
1707
- // Resume the stream if this was the last snapshot request
1708
- this.#activeSnapshotRequests--
1709
- if (this.#activeSnapshotRequests === 0) {
1710
- this.#resume()
1711
- }
1636
+ clearTimeout(snapshotWarnTimer)
1637
+ this.#pauseLock.release(snapshotReason)
1712
1638
  }
1713
1639
  }
1714
1640
 
@@ -1751,7 +1677,7 @@ export class ShapeStream<T extends Row<unknown> = Row>
1751
1677
  }
1752
1678
 
1753
1679
  // Capture handle before fetch to avoid race conditions if it changes during the request
1754
- const usedHandle = this.#shapeHandle
1680
+ const usedHandle = this.#syncState.handle
1755
1681
 
1756
1682
  let response: Response
1757
1683
  try {
@@ -1760,15 +1686,18 @@ export class ShapeStream<T extends Row<unknown> = Row>
1760
1686
  // Handle 409 "must-refetch" - shape handle changed/expired.
1761
1687
  // The fetch wrapper throws FetchError for non-OK responses, so we catch here.
1762
1688
  // Unlike #requestShape, we don't call #reset() here as that would
1763
- // clear #activeSnapshotRequests and break requestSnapshot's pause/resume logic.
1689
+ // clear the pause lock and break requestSnapshot's pause/resume logic.
1764
1690
  if (e instanceof FetchError && e.status === 409) {
1765
1691
  if (usedHandle) {
1766
1692
  const shapeKey = canonicalShapeKey(fetchUrl)
1767
1693
  expiredShapesCache.markExpired(shapeKey, usedHandle)
1768
1694
  }
1769
1695
 
1770
- this.#shapeHandle =
1696
+ // For snapshot 409s, only update the handle — don't reset offset/schema/etc.
1697
+ // The main stream is paused and should not be disturbed.
1698
+ const nextHandle =
1771
1699
  e.headers[SHAPE_HANDLE_HEADER] || `${usedHandle ?? `handle`}-next`
1700
+ this.#syncState = this.#syncState.withHandle(nextHandle)
1772
1701
 
1773
1702
  return this.fetchSnapshot(opts)
1774
1703
  }
@@ -1781,7 +1710,7 @@ export class ShapeStream<T extends Row<unknown> = Row>
1781
1710
  }
1782
1711
 
1783
1712
  const schema: Schema =
1784
- this.#schema ??
1713
+ this.#syncState.schema ??
1785
1714
  getSchemaFromHeaders(response.headers, {
1786
1715
  required: true,
1787
1716
  url: fetchUrl.toString(),