@electric-sql/client 1.1.1 → 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +74 -12
- package/dist/cjs/index.cjs +83 -65
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +45 -18
- package/dist/index.browser.mjs +3 -3
- package/dist/index.browser.mjs.map +1 -1
- package/dist/index.d.ts +45 -18
- package/dist/index.legacy-esm.js +83 -65
- package/dist/index.legacy-esm.js.map +1 -1
- package/dist/index.mjs +83 -65
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
- package/src/client.ts +142 -21
- package/src/constants.ts +1 -0
- package/src/fetch.ts +37 -88
package/src/client.ts
CHANGED
|
@@ -296,10 +296,51 @@ export interface ShapeStreamOptions<T = never> {
|
|
|
296
296
|
|
|
297
297
|
/**
|
|
298
298
|
* A function for handling shapestream errors.
|
|
299
|
-
*
|
|
300
|
-
*
|
|
301
|
-
*
|
|
302
|
-
*
|
|
299
|
+
*
|
|
300
|
+
* **Automatic retries**: The client automatically retries 5xx server errors, network
|
|
301
|
+
* errors, and 429 rate limits with exponential backoff. The `onError` callback is
|
|
302
|
+
* only invoked after these automatic retries are exhausted, or for non-retryable
|
|
303
|
+
* errors like 4xx client errors.
|
|
304
|
+
*
|
|
305
|
+
* When not provided, non-retryable errors will be thrown and syncing will stop.
|
|
306
|
+
*
|
|
307
|
+
* **Return value behavior**:
|
|
308
|
+
* - Return an **object** (RetryOpts or empty `{}`) to retry syncing:
|
|
309
|
+
* - `{}` - Retry with the same params and headers
|
|
310
|
+
* - `{ params }` - Retry with modified params
|
|
311
|
+
* - `{ headers }` - Retry with modified headers (e.g., refreshed auth token)
|
|
312
|
+
* - `{ params, headers }` - Retry with both modified
|
|
313
|
+
* - Return **void** or **undefined** to stop the stream permanently
|
|
314
|
+
*
|
|
315
|
+
* **Important**: If you want syncing to continue after an error (e.g., to retry
|
|
316
|
+
* on network failures), you MUST return at least an empty object `{}`. Simply
|
|
317
|
+
* logging the error and returning nothing will stop syncing.
|
|
318
|
+
*
|
|
319
|
+
* Supports async functions that return `Promise<void | RetryOpts>`.
|
|
320
|
+
*
|
|
321
|
+
* @example
|
|
322
|
+
* ```typescript
|
|
323
|
+
* // Retry on network errors, stop on others
|
|
324
|
+
* onError: (error) => {
|
|
325
|
+
* console.error('Stream error:', error)
|
|
326
|
+
* if (error instanceof FetchError && error.status >= 500) {
|
|
327
|
+
* return {} // Retry with same params
|
|
328
|
+
* }
|
|
329
|
+
* // Return void to stop on other errors
|
|
330
|
+
* }
|
|
331
|
+
* ```
|
|
332
|
+
*
|
|
333
|
+
* @example
|
|
334
|
+
* ```typescript
|
|
335
|
+
* // Refresh auth token on 401
|
|
336
|
+
* onError: async (error) => {
|
|
337
|
+
* if (error instanceof FetchError && error.status === 401) {
|
|
338
|
+
* const newToken = await refreshAuthToken()
|
|
339
|
+
* return { headers: { Authorization: `Bearer ${newToken}` } }
|
|
340
|
+
* }
|
|
341
|
+
* return {} // Retry other errors
|
|
342
|
+
* }
|
|
343
|
+
* ```
|
|
303
344
|
*/
|
|
304
345
|
onError?: ShapeStreamErrorHandler
|
|
305
346
|
}
|
|
@@ -441,6 +482,13 @@ export class ShapeStream<T extends Row<unknown> = Row>
|
|
|
441
482
|
#activeSnapshotRequests = 0 // counter for concurrent snapshot requests
|
|
442
483
|
#midStreamPromise?: Promise<void>
|
|
443
484
|
#midStreamPromiseResolver?: () => void
|
|
485
|
+
#lastSseConnectionStartTime?: number
|
|
486
|
+
#minSseConnectionDuration = 1000 // Minimum expected SSE connection duration (1 second)
|
|
487
|
+
#consecutiveShortSseConnections = 0
|
|
488
|
+
#maxShortSseConnections = 3 // Fall back to long polling after this many short connections
|
|
489
|
+
#sseFallbackToLongPolling = false
|
|
490
|
+
#sseBackoffBaseDelay = 100 // Base delay for exponential backoff (ms)
|
|
491
|
+
#sseBackoffMaxDelay = 5000 // Maximum delay cap (ms)
|
|
444
492
|
|
|
445
493
|
constructor(options: ShapeStreamOptions<GetExtensions<T>>) {
|
|
446
494
|
this.options = { subscribe: true, ...options }
|
|
@@ -507,32 +555,61 @@ export class ShapeStream<T extends Row<unknown> = Row>
|
|
|
507
555
|
await this.#requestShape()
|
|
508
556
|
} catch (err) {
|
|
509
557
|
this.#error = err
|
|
558
|
+
|
|
559
|
+
// Check if onError handler wants to retry
|
|
510
560
|
if (this.#onError) {
|
|
511
561
|
const retryOpts = await this.#onError(err as Error)
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
562
|
+
// Guard against null (typeof null === "object" in JavaScript)
|
|
563
|
+
if (retryOpts && typeof retryOpts === `object`) {
|
|
564
|
+
// Update params/headers but don't reset offset
|
|
565
|
+
// We want to continue from where we left off, not refetch everything
|
|
566
|
+
if (retryOpts.params) {
|
|
567
|
+
// Merge new params with existing params to preserve other parameters
|
|
568
|
+
this.options.params = {
|
|
569
|
+
...(this.options.params ?? {}),
|
|
570
|
+
...retryOpts.params,
|
|
571
|
+
}
|
|
517
572
|
}
|
|
518
573
|
|
|
519
|
-
if (
|
|
520
|
-
|
|
574
|
+
if (retryOpts.headers) {
|
|
575
|
+
// Merge new headers with existing headers to preserve other headers
|
|
576
|
+
this.options.headers = {
|
|
577
|
+
...(this.options.headers ?? {}),
|
|
578
|
+
...retryOpts.headers,
|
|
579
|
+
}
|
|
521
580
|
}
|
|
522
581
|
|
|
523
|
-
//
|
|
582
|
+
// Clear the error since we're retrying
|
|
583
|
+
this.#error = null
|
|
584
|
+
|
|
585
|
+
// Restart from current offset
|
|
524
586
|
this.#started = false
|
|
525
|
-
this.#start()
|
|
587
|
+
await this.#start()
|
|
588
|
+
return
|
|
526
589
|
}
|
|
590
|
+
// onError returned void, meaning it doesn't want to retry
|
|
591
|
+
// This is an unrecoverable error, notify subscribers
|
|
592
|
+
if (err instanceof Error) {
|
|
593
|
+
this.#sendErrorToSubscribers(err)
|
|
594
|
+
}
|
|
595
|
+
this.#connected = false
|
|
596
|
+
this.#tickPromiseRejecter?.()
|
|
527
597
|
return
|
|
528
598
|
}
|
|
529
599
|
|
|
530
|
-
//
|
|
531
|
-
throw
|
|
532
|
-
|
|
600
|
+
// No onError handler provided, this is an unrecoverable error
|
|
601
|
+
// Notify subscribers and throw
|
|
602
|
+
if (err instanceof Error) {
|
|
603
|
+
this.#sendErrorToSubscribers(err)
|
|
604
|
+
}
|
|
533
605
|
this.#connected = false
|
|
534
606
|
this.#tickPromiseRejecter?.()
|
|
607
|
+
throw err
|
|
535
608
|
}
|
|
609
|
+
|
|
610
|
+
// Normal completion, clean up
|
|
611
|
+
this.#connected = false
|
|
612
|
+
this.#tickPromiseRejecter?.()
|
|
536
613
|
}
|
|
537
614
|
|
|
538
615
|
async #requestShape(): Promise<void> {
|
|
@@ -613,12 +690,10 @@ export class ShapeStream<T extends Row<unknown> = Row>
|
|
|
613
690
|
)
|
|
614
691
|
return this.#requestShape()
|
|
615
692
|
} else {
|
|
616
|
-
// Notify subscribers
|
|
617
|
-
this.#sendErrorToSubscribers(e)
|
|
618
|
-
|
|
619
693
|
// errors that have reached this point are not actionable without
|
|
620
694
|
// additional user input, such as 400s or failures to read the
|
|
621
|
-
// body of a response, so we exit the loop
|
|
695
|
+
// body of a response, so we exit the loop and let #start handle it
|
|
696
|
+
// Note: We don't notify subscribers here because onError might recover
|
|
622
697
|
throw e
|
|
623
698
|
}
|
|
624
699
|
} finally {
|
|
@@ -832,7 +907,8 @@ export class ShapeStream<T extends Row<unknown> = Row>
|
|
|
832
907
|
this.#isUpToDate &&
|
|
833
908
|
useSse &&
|
|
834
909
|
!this.#isRefreshing &&
|
|
835
|
-
!opts.resumingFromPause
|
|
910
|
+
!opts.resumingFromPause &&
|
|
911
|
+
!this.#sseFallbackToLongPolling
|
|
836
912
|
) {
|
|
837
913
|
opts.fetchUrl.searchParams.set(EXPERIMENTAL_LIVE_SSE_QUERY_PARAM, `true`)
|
|
838
914
|
opts.fetchUrl.searchParams.set(LIVE_SSE_QUERY_PARAM, `true`)
|
|
@@ -871,6 +947,10 @@ export class ShapeStream<T extends Row<unknown> = Row>
|
|
|
871
947
|
}): Promise<void> {
|
|
872
948
|
const { fetchUrl, requestAbortController, headers } = opts
|
|
873
949
|
const fetch = this.#sseFetchClient
|
|
950
|
+
|
|
951
|
+
// Track when the SSE connection starts
|
|
952
|
+
this.#lastSseConnectionStartTime = Date.now()
|
|
953
|
+
|
|
874
954
|
try {
|
|
875
955
|
let buffer: Array<Message<T>> = []
|
|
876
956
|
await fetchEventSource(fetchUrl.toString(), {
|
|
@@ -916,6 +996,44 @@ export class ShapeStream<T extends Row<unknown> = Row>
|
|
|
916
996
|
throw new FetchBackoffAbortError()
|
|
917
997
|
}
|
|
918
998
|
throw error
|
|
999
|
+
} finally {
|
|
1000
|
+
// Check if the SSE connection closed too quickly
|
|
1001
|
+
// This can happen when responses are cached or when the proxy/server
|
|
1002
|
+
// is misconfigured for SSE and closes the connection immediately
|
|
1003
|
+
const connectionDuration = Date.now() - this.#lastSseConnectionStartTime!
|
|
1004
|
+
const wasAborted = requestAbortController.signal.aborted
|
|
1005
|
+
|
|
1006
|
+
if (connectionDuration < this.#minSseConnectionDuration && !wasAborted) {
|
|
1007
|
+
// Connection was too short - likely a cached response or misconfiguration
|
|
1008
|
+
this.#consecutiveShortSseConnections++
|
|
1009
|
+
|
|
1010
|
+
if (
|
|
1011
|
+
this.#consecutiveShortSseConnections >= this.#maxShortSseConnections
|
|
1012
|
+
) {
|
|
1013
|
+
// Too many short connections - fall back to long polling
|
|
1014
|
+
this.#sseFallbackToLongPolling = true
|
|
1015
|
+
console.warn(
|
|
1016
|
+
`[Electric] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). ` +
|
|
1017
|
+
`Falling back to long polling. ` +
|
|
1018
|
+
`Your proxy must support streaming SSE responses (not buffer the complete response). ` +
|
|
1019
|
+
`Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy. ` +
|
|
1020
|
+
`Note: Do NOT disable caching entirely - Electric uses cache headers to enable request collapsing for efficiency.`
|
|
1021
|
+
)
|
|
1022
|
+
} else {
|
|
1023
|
+
// Add exponential backoff with full jitter to prevent tight infinite loop
|
|
1024
|
+
// Formula: random(0, min(cap, base * 2^attempt))
|
|
1025
|
+
const maxDelay = Math.min(
|
|
1026
|
+
this.#sseBackoffMaxDelay,
|
|
1027
|
+
this.#sseBackoffBaseDelay *
|
|
1028
|
+
Math.pow(2, this.#consecutiveShortSseConnections)
|
|
1029
|
+
)
|
|
1030
|
+
const delayMs = Math.floor(Math.random() * maxDelay)
|
|
1031
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs))
|
|
1032
|
+
}
|
|
1033
|
+
} else if (connectionDuration >= this.#minSseConnectionDuration) {
|
|
1034
|
+
// Connection was healthy - reset counter
|
|
1035
|
+
this.#consecutiveShortSseConnections = 0
|
|
1036
|
+
}
|
|
919
1037
|
}
|
|
920
1038
|
}
|
|
921
1039
|
|
|
@@ -1090,6 +1208,9 @@ export class ShapeStream<T extends Row<unknown> = Row>
|
|
|
1090
1208
|
this.#connected = false
|
|
1091
1209
|
this.#schema = undefined
|
|
1092
1210
|
this.#activeSnapshotRequests = 0
|
|
1211
|
+
// Reset SSE fallback state to try SSE again after reset
|
|
1212
|
+
this.#consecutiveShortSseConnections = 0
|
|
1213
|
+
this.#sseFallbackToLongPolling = false
|
|
1093
1214
|
}
|
|
1094
1215
|
|
|
1095
1216
|
/**
|
package/src/constants.ts
CHANGED
|
@@ -30,6 +30,7 @@ export const SUBSET_PARAM_WHERE_PARAMS = `subset__params`
|
|
|
30
30
|
// Query parameters that should be passed through when proxying Electric requests
|
|
31
31
|
export const ELECTRIC_PROTOCOL_QUERY_PARAMS: Array<string> = [
|
|
32
32
|
LIVE_QUERY_PARAM,
|
|
33
|
+
LIVE_SSE_QUERY_PARAM,
|
|
33
34
|
SHAPE_HANDLE_QUERY_PARAM,
|
|
34
35
|
OFFSET_QUERY_PARAM,
|
|
35
36
|
LIVE_CACHE_BUSTER_QUERY_PARAM,
|
package/src/fetch.ts
CHANGED
|
@@ -38,21 +38,8 @@ export interface BackoffOptions {
|
|
|
38
38
|
* Maximum number of retry attempts before giving up.
|
|
39
39
|
* Set to Infinity (default) for indefinite retries - needed for offline scenarios
|
|
40
40
|
* where clients may go offline and come back later.
|
|
41
|
-
*
|
|
42
|
-
* The retry budget provides protection against retry storms even with infinite retries.
|
|
43
41
|
*/
|
|
44
42
|
maxRetries?: number
|
|
45
|
-
/**
|
|
46
|
-
* Percentage of requests that can be retries (0.1 = 10%)
|
|
47
|
-
*
|
|
48
|
-
* This is the primary load shedding mechanism. It limits the *rate* of retries,
|
|
49
|
-
* not the total count. Even with infinite retries, at most 10% of your traffic
|
|
50
|
-
* will be retries, preventing retry storms from amplifying server load.
|
|
51
|
-
*
|
|
52
|
-
* The budget resets every 60 seconds, so a temporary spike of errors won't
|
|
53
|
-
* permanently exhaust the budget.
|
|
54
|
-
*/
|
|
55
|
-
retryBudgetPercent?: number
|
|
56
43
|
}
|
|
57
44
|
|
|
58
45
|
export const BackoffDefaults = {
|
|
@@ -60,7 +47,31 @@ export const BackoffDefaults = {
|
|
|
60
47
|
maxDelay: 60_000, // Cap at 60s - reasonable for long-lived connections
|
|
61
48
|
multiplier: 1.3,
|
|
62
49
|
maxRetries: Infinity, // Retry forever - clients may go offline and come back
|
|
63
|
-
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Parse Retry-After header value and return delay in milliseconds
|
|
54
|
+
* Supports both delta-seconds format and HTTP-date format
|
|
55
|
+
* Returns 0 if header is not present or invalid
|
|
56
|
+
*/
|
|
57
|
+
export function parseRetryAfterHeader(retryAfter: string | undefined): number {
|
|
58
|
+
if (!retryAfter) return 0
|
|
59
|
+
|
|
60
|
+
// Try parsing as seconds (delta-seconds format)
|
|
61
|
+
const retryAfterSec = Number(retryAfter)
|
|
62
|
+
if (Number.isFinite(retryAfterSec) && retryAfterSec > 0) {
|
|
63
|
+
return retryAfterSec * 1000
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Try parsing as HTTP-date
|
|
67
|
+
const retryDate = Date.parse(retryAfter)
|
|
68
|
+
if (!isNaN(retryDate)) {
|
|
69
|
+
// Handle clock skew: clamp to non-negative, cap at reasonable max
|
|
70
|
+
const deltaMs = retryDate - Date.now()
|
|
71
|
+
return Math.max(0, Math.min(deltaMs, 3600_000)) // Cap at 1 hour
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return 0
|
|
64
75
|
}
|
|
65
76
|
|
|
66
77
|
export function createFetchWithBackoff(
|
|
@@ -74,37 +85,7 @@ export function createFetchWithBackoff(
|
|
|
74
85
|
debug = false,
|
|
75
86
|
onFailedAttempt,
|
|
76
87
|
maxRetries = Infinity,
|
|
77
|
-
retryBudgetPercent = 0.1,
|
|
78
88
|
} = backoffOptions
|
|
79
|
-
|
|
80
|
-
// Retry budget tracking (closure-scoped)
|
|
81
|
-
// Resets every minute to prevent retry storms
|
|
82
|
-
let totalRequests = 0
|
|
83
|
-
let totalRetries = 0
|
|
84
|
-
let budgetResetTime = Date.now() + 60_000
|
|
85
|
-
|
|
86
|
-
function checkRetryBudget(percent: number): boolean {
|
|
87
|
-
const now = Date.now()
|
|
88
|
-
if (now > budgetResetTime) {
|
|
89
|
-
totalRequests = 0
|
|
90
|
-
totalRetries = 0
|
|
91
|
-
budgetResetTime = now + 60_000
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
totalRequests++
|
|
95
|
-
|
|
96
|
-
// Allow retries for first 10 requests to avoid cold start issues
|
|
97
|
-
if (totalRequests < 10) return true
|
|
98
|
-
|
|
99
|
-
const currentRetryRate = totalRetries / totalRequests
|
|
100
|
-
const hasCapacity = currentRetryRate < percent
|
|
101
|
-
|
|
102
|
-
if (hasCapacity) {
|
|
103
|
-
totalRetries++
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
return hasCapacity
|
|
107
|
-
}
|
|
108
89
|
return async (...args: Parameters<typeof fetch>): Promise<Response> => {
|
|
109
90
|
const url = args[0]
|
|
110
91
|
const options = args[1]
|
|
@@ -116,8 +97,6 @@ export function createFetchWithBackoff(
|
|
|
116
97
|
try {
|
|
117
98
|
const result = await fetchClient(...args)
|
|
118
99
|
if (result.ok) {
|
|
119
|
-
// Reset backoff on successful request
|
|
120
|
-
delay = initialDelay
|
|
121
100
|
return result
|
|
122
101
|
}
|
|
123
102
|
|
|
@@ -137,9 +116,9 @@ export function createFetchWithBackoff(
|
|
|
137
116
|
// Any client errors cannot be backed off on, leave it to the caller to handle.
|
|
138
117
|
throw e
|
|
139
118
|
} else {
|
|
140
|
-
// Check
|
|
119
|
+
// Check max retries
|
|
141
120
|
attempt++
|
|
142
|
-
if (attempt
|
|
121
|
+
if (attempt > maxRetries) {
|
|
143
122
|
if (debug) {
|
|
144
123
|
console.log(
|
|
145
124
|
`Max retries reached (${attempt}/${maxRetries}), giving up`
|
|
@@ -148,50 +127,20 @@ export function createFetchWithBackoff(
|
|
|
148
127
|
throw e
|
|
149
128
|
}
|
|
150
129
|
|
|
151
|
-
// Check retry budget - this is our primary load shedding mechanism
|
|
152
|
-
// It limits the *rate* of retries (10% of traffic) not the count
|
|
153
|
-
// This prevents retry storms even with infinite retries
|
|
154
|
-
if (!checkRetryBudget(retryBudgetPercent)) {
|
|
155
|
-
if (debug) {
|
|
156
|
-
console.log(
|
|
157
|
-
`Retry budget exhausted (attempt ${attempt}), backing off`
|
|
158
|
-
)
|
|
159
|
-
}
|
|
160
|
-
// Wait for maxDelay before checking budget again
|
|
161
|
-
// This prevents tight retry loops when budget is exhausted
|
|
162
|
-
await new Promise((resolve) => setTimeout(resolve, maxDelay))
|
|
163
|
-
// Don't throw - continue retrying after the wait
|
|
164
|
-
// This allows offline clients to eventually reconnect
|
|
165
|
-
continue
|
|
166
|
-
}
|
|
167
|
-
|
|
168
130
|
// Calculate wait time honoring server-driven backoff as a floor
|
|
169
131
|
// Precedence: max(serverMinimum, min(clientMaxDelay, backoffWithJitter))
|
|
170
132
|
|
|
171
133
|
// 1. Parse server-provided Retry-After (if present)
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
const retryDate = Date.parse(retryAfter)
|
|
183
|
-
if (!isNaN(retryDate)) {
|
|
184
|
-
// Handle clock skew: clamp to non-negative, cap at reasonable max
|
|
185
|
-
const deltaMs = retryDate - Date.now()
|
|
186
|
-
serverMinimumMs = Math.max(0, Math.min(deltaMs, 3600_000)) // Cap at 1 hour
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
// 2. Calculate client backoff with full jitter
|
|
193
|
-
const jitter = Math.random() * delay
|
|
194
|
-
const clientBackoffMs = Math.min(jitter, maxDelay)
|
|
134
|
+
const serverMinimumMs =
|
|
135
|
+
e instanceof FetchError && e.headers
|
|
136
|
+
? parseRetryAfterHeader(e.headers[`retry-after`])
|
|
137
|
+
: 0
|
|
138
|
+
|
|
139
|
+
// 2. Calculate client backoff with full jitter strategy
|
|
140
|
+
// Full jitter: random_between(0, min(cap, exponential_backoff))
|
|
141
|
+
// See: https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
|
|
142
|
+
const jitter = Math.random() * delay // random value between 0 and current delay
|
|
143
|
+
const clientBackoffMs = Math.min(jitter, maxDelay) // cap at maxDelay
|
|
195
144
|
|
|
196
145
|
// 3. Server minimum is the floor, client cap is the ceiling
|
|
197
146
|
const waitMs = Math.max(serverMinimumMs, clientBackoffMs)
|