@electric-sql/client 1.1.1 → 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +74 -12
- package/dist/cjs/index.cjs +83 -65
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +45 -18
- package/dist/index.browser.mjs +3 -3
- package/dist/index.browser.mjs.map +1 -1
- package/dist/index.d.ts +45 -18
- package/dist/index.legacy-esm.js +83 -65
- package/dist/index.legacy-esm.js.map +1 -1
- package/dist/index.mjs +83 -65
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
- package/src/client.ts +142 -21
- package/src/constants.ts +1 -0
- package/src/fetch.ts +37 -88
package/dist/index.d.ts
CHANGED
|
@@ -145,28 +145,14 @@ interface BackoffOptions {
|
|
|
145
145
|
* Maximum number of retry attempts before giving up.
|
|
146
146
|
* Set to Infinity (default) for indefinite retries - needed for offline scenarios
|
|
147
147
|
* where clients may go offline and come back later.
|
|
148
|
-
*
|
|
149
|
-
* The retry budget provides protection against retry storms even with infinite retries.
|
|
150
148
|
*/
|
|
151
149
|
maxRetries?: number;
|
|
152
|
-
/**
|
|
153
|
-
* Percentage of requests that can be retries (0.1 = 10%)
|
|
154
|
-
*
|
|
155
|
-
* This is the primary load shedding mechanism. It limits the *rate* of retries,
|
|
156
|
-
* not the total count. Even with infinite retries, at most 10% of your traffic
|
|
157
|
-
* will be retries, preventing retry storms from amplifying server load.
|
|
158
|
-
*
|
|
159
|
-
* The budget resets every 60 seconds, so a temporary spike of errors won't
|
|
160
|
-
* permanently exhaust the budget.
|
|
161
|
-
*/
|
|
162
|
-
retryBudgetPercent?: number;
|
|
163
150
|
}
|
|
164
151
|
declare const BackoffDefaults: {
|
|
165
152
|
initialDelay: number;
|
|
166
153
|
maxDelay: number;
|
|
167
154
|
multiplier: number;
|
|
168
155
|
maxRetries: number;
|
|
169
|
-
retryBudgetPercent: number;
|
|
170
156
|
};
|
|
171
157
|
|
|
172
158
|
declare const LIVE_CACHE_BUSTER_QUERY_PARAM = "cursor";
|
|
@@ -318,10 +304,51 @@ interface ShapeStreamOptions<T = never> {
|
|
|
318
304
|
transformer?: TransformFunction<T>;
|
|
319
305
|
/**
|
|
320
306
|
* A function for handling shapestream errors.
|
|
321
|
-
*
|
|
322
|
-
*
|
|
323
|
-
*
|
|
324
|
-
*
|
|
307
|
+
*
|
|
308
|
+
* **Automatic retries**: The client automatically retries 5xx server errors, network
|
|
309
|
+
* errors, and 429 rate limits with exponential backoff. The `onError` callback is
|
|
310
|
+
* only invoked after these automatic retries are exhausted, or for non-retryable
|
|
311
|
+
* errors like 4xx client errors.
|
|
312
|
+
*
|
|
313
|
+
* When not provided, non-retryable errors will be thrown and syncing will stop.
|
|
314
|
+
*
|
|
315
|
+
* **Return value behavior**:
|
|
316
|
+
* - Return an **object** (RetryOpts or empty `{}`) to retry syncing:
|
|
317
|
+
* - `{}` - Retry with the same params and headers
|
|
318
|
+
* - `{ params }` - Retry with modified params
|
|
319
|
+
* - `{ headers }` - Retry with modified headers (e.g., refreshed auth token)
|
|
320
|
+
* - `{ params, headers }` - Retry with both modified
|
|
321
|
+
* - Return **void** or **undefined** to stop the stream permanently
|
|
322
|
+
*
|
|
323
|
+
* **Important**: If you want syncing to continue after an error (e.g., to retry
|
|
324
|
+
* on network failures), you MUST return at least an empty object `{}`. Simply
|
|
325
|
+
* logging the error and returning nothing will stop syncing.
|
|
326
|
+
*
|
|
327
|
+
* Supports async functions that return `Promise<void | RetryOpts>`.
|
|
328
|
+
*
|
|
329
|
+
* @example
|
|
330
|
+
* ```typescript
|
|
331
|
+
* // Retry on network errors, stop on others
|
|
332
|
+
* onError: (error) => {
|
|
333
|
+
* console.error('Stream error:', error)
|
|
334
|
+
* if (error instanceof FetchError && error.status >= 500) {
|
|
335
|
+
* return {} // Retry with same params
|
|
336
|
+
* }
|
|
337
|
+
* // Return void to stop on other errors
|
|
338
|
+
* }
|
|
339
|
+
* ```
|
|
340
|
+
*
|
|
341
|
+
* @example
|
|
342
|
+
* ```typescript
|
|
343
|
+
* // Refresh auth token on 401
|
|
344
|
+
* onError: async (error) => {
|
|
345
|
+
* if (error instanceof FetchError && error.status === 401) {
|
|
346
|
+
* const newToken = await refreshAuthToken()
|
|
347
|
+
* return { headers: { Authorization: `Bearer ${newToken}` } }
|
|
348
|
+
* }
|
|
349
|
+
* return {} // Retry other errors
|
|
350
|
+
* }
|
|
351
|
+
* ```
|
|
325
352
|
*/
|
|
326
353
|
onError?: ShapeStreamErrorHandler;
|
|
327
354
|
}
|
package/dist/index.legacy-esm.js
CHANGED
|
@@ -300,6 +300,7 @@ var SUBSET_PARAM_ORDER_BY = `subset__order_by`;
|
|
|
300
300
|
var SUBSET_PARAM_WHERE_PARAMS = `subset__params`;
|
|
301
301
|
var ELECTRIC_PROTOCOL_QUERY_PARAMS = [
|
|
302
302
|
LIVE_QUERY_PARAM,
|
|
303
|
+
LIVE_SSE_QUERY_PARAM,
|
|
303
304
|
SHAPE_HANDLE_QUERY_PARAM,
|
|
304
305
|
OFFSET_QUERY_PARAM,
|
|
305
306
|
LIVE_CACHE_BUSTER_QUERY_PARAM,
|
|
@@ -319,11 +320,22 @@ var BackoffDefaults = {
|
|
|
319
320
|
maxDelay: 6e4,
|
|
320
321
|
// Cap at 60s - reasonable for long-lived connections
|
|
321
322
|
multiplier: 1.3,
|
|
322
|
-
maxRetries: Infinity
|
|
323
|
+
maxRetries: Infinity
|
|
323
324
|
// Retry forever - clients may go offline and come back
|
|
324
|
-
retryBudgetPercent: 0.1
|
|
325
|
-
// 10% retry budget prevents amplification
|
|
326
325
|
};
|
|
326
|
+
function parseRetryAfterHeader(retryAfter) {
|
|
327
|
+
if (!retryAfter) return 0;
|
|
328
|
+
const retryAfterSec = Number(retryAfter);
|
|
329
|
+
if (Number.isFinite(retryAfterSec) && retryAfterSec > 0) {
|
|
330
|
+
return retryAfterSec * 1e3;
|
|
331
|
+
}
|
|
332
|
+
const retryDate = Date.parse(retryAfter);
|
|
333
|
+
if (!isNaN(retryDate)) {
|
|
334
|
+
const deltaMs = retryDate - Date.now();
|
|
335
|
+
return Math.max(0, Math.min(deltaMs, 36e5));
|
|
336
|
+
}
|
|
337
|
+
return 0;
|
|
338
|
+
}
|
|
327
339
|
function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
328
340
|
const {
|
|
329
341
|
initialDelay,
|
|
@@ -331,28 +343,8 @@ function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
|
331
343
|
multiplier,
|
|
332
344
|
debug = false,
|
|
333
345
|
onFailedAttempt,
|
|
334
|
-
maxRetries = Infinity
|
|
335
|
-
retryBudgetPercent = 0.1
|
|
346
|
+
maxRetries = Infinity
|
|
336
347
|
} = backoffOptions;
|
|
337
|
-
let totalRequests = 0;
|
|
338
|
-
let totalRetries = 0;
|
|
339
|
-
let budgetResetTime = Date.now() + 6e4;
|
|
340
|
-
function checkRetryBudget(percent) {
|
|
341
|
-
const now = Date.now();
|
|
342
|
-
if (now > budgetResetTime) {
|
|
343
|
-
totalRequests = 0;
|
|
344
|
-
totalRetries = 0;
|
|
345
|
-
budgetResetTime = now + 6e4;
|
|
346
|
-
}
|
|
347
|
-
totalRequests++;
|
|
348
|
-
if (totalRequests < 10) return true;
|
|
349
|
-
const currentRetryRate = totalRetries / totalRequests;
|
|
350
|
-
const hasCapacity = currentRetryRate < percent;
|
|
351
|
-
if (hasCapacity) {
|
|
352
|
-
totalRetries++;
|
|
353
|
-
}
|
|
354
|
-
return hasCapacity;
|
|
355
|
-
}
|
|
356
348
|
return async (...args) => {
|
|
357
349
|
var _a;
|
|
358
350
|
const url = args[0];
|
|
@@ -363,7 +355,6 @@ function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
|
363
355
|
try {
|
|
364
356
|
const result = await fetchClient(...args);
|
|
365
357
|
if (result.ok) {
|
|
366
|
-
delay = initialDelay;
|
|
367
358
|
return result;
|
|
368
359
|
}
|
|
369
360
|
const err = await FetchError.fromResponse(result, url.toString());
|
|
@@ -376,7 +367,7 @@ function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
|
376
367
|
throw e;
|
|
377
368
|
} else {
|
|
378
369
|
attempt++;
|
|
379
|
-
if (attempt
|
|
370
|
+
if (attempt > maxRetries) {
|
|
380
371
|
if (debug) {
|
|
381
372
|
console.log(
|
|
382
373
|
`Max retries reached (${attempt}/${maxRetries}), giving up`
|
|
@@ -384,31 +375,7 @@ function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
|
384
375
|
}
|
|
385
376
|
throw e;
|
|
386
377
|
}
|
|
387
|
-
|
|
388
|
-
if (debug) {
|
|
389
|
-
console.log(
|
|
390
|
-
`Retry budget exhausted (attempt ${attempt}), backing off`
|
|
391
|
-
);
|
|
392
|
-
}
|
|
393
|
-
await new Promise((resolve) => setTimeout(resolve, maxDelay));
|
|
394
|
-
continue;
|
|
395
|
-
}
|
|
396
|
-
let serverMinimumMs = 0;
|
|
397
|
-
if (e instanceof FetchError && e.headers) {
|
|
398
|
-
const retryAfter = e.headers[`retry-after`];
|
|
399
|
-
if (retryAfter) {
|
|
400
|
-
const retryAfterSec = Number(retryAfter);
|
|
401
|
-
if (Number.isFinite(retryAfterSec) && retryAfterSec > 0) {
|
|
402
|
-
serverMinimumMs = retryAfterSec * 1e3;
|
|
403
|
-
} else {
|
|
404
|
-
const retryDate = Date.parse(retryAfter);
|
|
405
|
-
if (!isNaN(retryDate)) {
|
|
406
|
-
const deltaMs = retryDate - Date.now();
|
|
407
|
-
serverMinimumMs = Math.max(0, Math.min(deltaMs, 36e5));
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
}
|
|
411
|
-
}
|
|
378
|
+
const serverMinimumMs = e instanceof FetchError && e.headers ? parseRetryAfterHeader(e.headers[`retry-after`]) : 0;
|
|
412
379
|
const jitter = Math.random() * delay;
|
|
413
380
|
const clientBackoffMs = Math.min(jitter, maxDelay);
|
|
414
381
|
const waitMs = Math.max(serverMinimumMs, clientBackoffMs);
|
|
@@ -784,8 +751,9 @@ function canonicalShapeKey(url) {
|
|
|
784
751
|
cleanUrl.searchParams.sort();
|
|
785
752
|
return cleanUrl.toString();
|
|
786
753
|
}
|
|
787
|
-
var _error, _fetchClient2, _sseFetchClient, _messageParser, _subscribers, _started, _state, _lastOffset, _liveCacheBuster, _lastSyncedAt, _isUpToDate, _isMidStream, _connected, _shapeHandle, _mode, _schema, _onError, _requestAbortController, _isRefreshing, _tickPromise, _tickPromiseResolver, _tickPromiseRejecter, _messageChain, _snapshotTracker, _activeSnapshotRequests, _midStreamPromise, _midStreamPromiseResolver, _ShapeStream_instances, start_fn, requestShape_fn, constructUrl_fn, createAbortListener_fn, onInitialResponse_fn, onMessages_fn, fetchShape_fn, requestShapeLongPoll_fn, requestShapeSSE_fn, pause_fn, resume_fn, nextTick_fn, waitForStreamEnd_fn, publish_fn, sendErrorToSubscribers_fn, subscribeToVisibilityChanges_fn, reset_fn, fetchSnapshot_fn;
|
|
754
|
+
var _error, _fetchClient2, _sseFetchClient, _messageParser, _subscribers, _started, _state, _lastOffset, _liveCacheBuster, _lastSyncedAt, _isUpToDate, _isMidStream, _connected, _shapeHandle, _mode, _schema, _onError, _requestAbortController, _isRefreshing, _tickPromise, _tickPromiseResolver, _tickPromiseRejecter, _messageChain, _snapshotTracker, _activeSnapshotRequests, _midStreamPromise, _midStreamPromiseResolver, _lastSseConnectionStartTime, _minSseConnectionDuration, _consecutiveShortSseConnections, _maxShortSseConnections, _sseFallbackToLongPolling, _sseBackoffBaseDelay, _sseBackoffMaxDelay, _ShapeStream_instances, start_fn, requestShape_fn, constructUrl_fn, createAbortListener_fn, onInitialResponse_fn, onMessages_fn, fetchShape_fn, requestShapeLongPoll_fn, requestShapeSSE_fn, pause_fn, resume_fn, nextTick_fn, waitForStreamEnd_fn, publish_fn, sendErrorToSubscribers_fn, subscribeToVisibilityChanges_fn, reset_fn, fetchSnapshot_fn;
|
|
788
755
|
var ShapeStream = class {
|
|
756
|
+
// Maximum delay cap (ms)
|
|
789
757
|
constructor(options) {
|
|
790
758
|
__privateAdd(this, _ShapeStream_instances);
|
|
791
759
|
__privateAdd(this, _error, null);
|
|
@@ -819,6 +787,16 @@ var ShapeStream = class {
|
|
|
819
787
|
// counter for concurrent snapshot requests
|
|
820
788
|
__privateAdd(this, _midStreamPromise);
|
|
821
789
|
__privateAdd(this, _midStreamPromiseResolver);
|
|
790
|
+
__privateAdd(this, _lastSseConnectionStartTime);
|
|
791
|
+
__privateAdd(this, _minSseConnectionDuration, 1e3);
|
|
792
|
+
// Minimum expected SSE connection duration (1 second)
|
|
793
|
+
__privateAdd(this, _consecutiveShortSseConnections, 0);
|
|
794
|
+
__privateAdd(this, _maxShortSseConnections, 3);
|
|
795
|
+
// Fall back to long polling after this many short connections
|
|
796
|
+
__privateAdd(this, _sseFallbackToLongPolling, false);
|
|
797
|
+
__privateAdd(this, _sseBackoffBaseDelay, 100);
|
|
798
|
+
// Base delay for exponential backoff (ms)
|
|
799
|
+
__privateAdd(this, _sseBackoffMaxDelay, 5e3);
|
|
822
800
|
var _a, _b, _c, _d;
|
|
823
801
|
this.options = __spreadValues({ subscribe: true }, options);
|
|
824
802
|
validateOptions(this.options);
|
|
@@ -990,9 +968,16 @@ _snapshotTracker = new WeakMap();
|
|
|
990
968
|
_activeSnapshotRequests = new WeakMap();
|
|
991
969
|
_midStreamPromise = new WeakMap();
|
|
992
970
|
_midStreamPromiseResolver = new WeakMap();
|
|
971
|
+
_lastSseConnectionStartTime = new WeakMap();
|
|
972
|
+
_minSseConnectionDuration = new WeakMap();
|
|
973
|
+
_consecutiveShortSseConnections = new WeakMap();
|
|
974
|
+
_maxShortSseConnections = new WeakMap();
|
|
975
|
+
_sseFallbackToLongPolling = new WeakMap();
|
|
976
|
+
_sseBackoffBaseDelay = new WeakMap();
|
|
977
|
+
_sseBackoffMaxDelay = new WeakMap();
|
|
993
978
|
_ShapeStream_instances = new WeakSet();
|
|
994
979
|
start_fn = async function() {
|
|
995
|
-
var _a;
|
|
980
|
+
var _a, _b, _c, _d, _e;
|
|
996
981
|
__privateSet(this, _started, true);
|
|
997
982
|
try {
|
|
998
983
|
await __privateMethod(this, _ShapeStream_instances, requestShape_fn).call(this);
|
|
@@ -1000,24 +985,34 @@ start_fn = async function() {
|
|
|
1000
985
|
__privateSet(this, _error, err);
|
|
1001
986
|
if (__privateGet(this, _onError)) {
|
|
1002
987
|
const retryOpts = await __privateGet(this, _onError).call(this, err);
|
|
1003
|
-
if (typeof retryOpts === `object`) {
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
this.options.params = retryOpts.params;
|
|
988
|
+
if (retryOpts && typeof retryOpts === `object`) {
|
|
989
|
+
if (retryOpts.params) {
|
|
990
|
+
this.options.params = __spreadValues(__spreadValues({}, (_a = this.options.params) != null ? _a : {}), retryOpts.params);
|
|
1007
991
|
}
|
|
1008
|
-
if (
|
|
1009
|
-
this.options.headers = retryOpts.headers;
|
|
992
|
+
if (retryOpts.headers) {
|
|
993
|
+
this.options.headers = __spreadValues(__spreadValues({}, (_b = this.options.headers) != null ? _b : {}), retryOpts.headers);
|
|
1010
994
|
}
|
|
995
|
+
__privateSet(this, _error, null);
|
|
1011
996
|
__privateSet(this, _started, false);
|
|
1012
|
-
__privateMethod(this, _ShapeStream_instances, start_fn).call(this);
|
|
997
|
+
await __privateMethod(this, _ShapeStream_instances, start_fn).call(this);
|
|
998
|
+
return;
|
|
999
|
+
}
|
|
1000
|
+
if (err instanceof Error) {
|
|
1001
|
+
__privateMethod(this, _ShapeStream_instances, sendErrorToSubscribers_fn).call(this, err);
|
|
1013
1002
|
}
|
|
1003
|
+
__privateSet(this, _connected, false);
|
|
1004
|
+
(_c = __privateGet(this, _tickPromiseRejecter)) == null ? void 0 : _c.call(this);
|
|
1014
1005
|
return;
|
|
1015
1006
|
}
|
|
1016
|
-
|
|
1017
|
-
|
|
1007
|
+
if (err instanceof Error) {
|
|
1008
|
+
__privateMethod(this, _ShapeStream_instances, sendErrorToSubscribers_fn).call(this, err);
|
|
1009
|
+
}
|
|
1018
1010
|
__privateSet(this, _connected, false);
|
|
1019
|
-
(
|
|
1011
|
+
(_d = __privateGet(this, _tickPromiseRejecter)) == null ? void 0 : _d.call(this);
|
|
1012
|
+
throw err;
|
|
1020
1013
|
}
|
|
1014
|
+
__privateSet(this, _connected, false);
|
|
1015
|
+
(_e = __privateGet(this, _tickPromiseRejecter)) == null ? void 0 : _e.call(this);
|
|
1021
1016
|
};
|
|
1022
1017
|
requestShape_fn = async function() {
|
|
1023
1018
|
var _a, _b;
|
|
@@ -1062,7 +1057,6 @@ requestShape_fn = async function() {
|
|
|
1062
1057
|
await __privateMethod(this, _ShapeStream_instances, publish_fn).call(this, Array.isArray(e.json) ? e.json : [e.json]);
|
|
1063
1058
|
return __privateMethod(this, _ShapeStream_instances, requestShape_fn).call(this);
|
|
1064
1059
|
} else {
|
|
1065
|
-
__privateMethod(this, _ShapeStream_instances, sendErrorToSubscribers_fn).call(this, e);
|
|
1066
1060
|
throw e;
|
|
1067
1061
|
}
|
|
1068
1062
|
} finally {
|
|
@@ -1204,7 +1198,7 @@ onMessages_fn = async function(batch, isSseMessage = false) {
|
|
|
1204
1198
|
fetchShape_fn = async function(opts) {
|
|
1205
1199
|
var _a;
|
|
1206
1200
|
const useSse = (_a = this.options.liveSse) != null ? _a : this.options.experimentalLiveSse;
|
|
1207
|
-
if (__privateGet(this, _isUpToDate) && useSse && !__privateGet(this, _isRefreshing) && !opts.resumingFromPause) {
|
|
1201
|
+
if (__privateGet(this, _isUpToDate) && useSse && !__privateGet(this, _isRefreshing) && !opts.resumingFromPause && !__privateGet(this, _sseFallbackToLongPolling)) {
|
|
1208
1202
|
opts.fetchUrl.searchParams.set(EXPERIMENTAL_LIVE_SSE_QUERY_PARAM, `true`);
|
|
1209
1203
|
opts.fetchUrl.searchParams.set(LIVE_SSE_QUERY_PARAM, `true`);
|
|
1210
1204
|
return __privateMethod(this, _ShapeStream_instances, requestShapeSSE_fn).call(this, opts);
|
|
@@ -1228,6 +1222,7 @@ requestShapeLongPoll_fn = async function(opts) {
|
|
|
1228
1222
|
requestShapeSSE_fn = async function(opts) {
|
|
1229
1223
|
const { fetchUrl, requestAbortController, headers } = opts;
|
|
1230
1224
|
const fetch2 = __privateGet(this, _sseFetchClient);
|
|
1225
|
+
__privateSet(this, _lastSseConnectionStartTime, Date.now());
|
|
1231
1226
|
try {
|
|
1232
1227
|
let buffer = [];
|
|
1233
1228
|
await fetchEventSource(fetchUrl.toString(), {
|
|
@@ -1261,6 +1256,27 @@ requestShapeSSE_fn = async function(opts) {
|
|
|
1261
1256
|
throw new FetchBackoffAbortError();
|
|
1262
1257
|
}
|
|
1263
1258
|
throw error;
|
|
1259
|
+
} finally {
|
|
1260
|
+
const connectionDuration = Date.now() - __privateGet(this, _lastSseConnectionStartTime);
|
|
1261
|
+
const wasAborted = requestAbortController.signal.aborted;
|
|
1262
|
+
if (connectionDuration < __privateGet(this, _minSseConnectionDuration) && !wasAborted) {
|
|
1263
|
+
__privateWrapper(this, _consecutiveShortSseConnections)._++;
|
|
1264
|
+
if (__privateGet(this, _consecutiveShortSseConnections) >= __privateGet(this, _maxShortSseConnections)) {
|
|
1265
|
+
__privateSet(this, _sseFallbackToLongPolling, true);
|
|
1266
|
+
console.warn(
|
|
1267
|
+
`[Electric] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). Falling back to long polling. Your proxy must support streaming SSE responses (not buffer the complete response). Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy. Note: Do NOT disable caching entirely - Electric uses cache headers to enable request collapsing for efficiency.`
|
|
1268
|
+
);
|
|
1269
|
+
} else {
|
|
1270
|
+
const maxDelay = Math.min(
|
|
1271
|
+
__privateGet(this, _sseBackoffMaxDelay),
|
|
1272
|
+
__privateGet(this, _sseBackoffBaseDelay) * Math.pow(2, __privateGet(this, _consecutiveShortSseConnections))
|
|
1273
|
+
);
|
|
1274
|
+
const delayMs = Math.floor(Math.random() * maxDelay);
|
|
1275
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
1276
|
+
}
|
|
1277
|
+
} else if (connectionDuration >= __privateGet(this, _minSseConnectionDuration)) {
|
|
1278
|
+
__privateSet(this, _consecutiveShortSseConnections, 0);
|
|
1279
|
+
}
|
|
1264
1280
|
}
|
|
1265
1281
|
};
|
|
1266
1282
|
pause_fn = function() {
|
|
@@ -1352,6 +1368,8 @@ reset_fn = function(handle) {
|
|
|
1352
1368
|
__privateSet(this, _connected, false);
|
|
1353
1369
|
__privateSet(this, _schema, void 0);
|
|
1354
1370
|
__privateSet(this, _activeSnapshotRequests, 0);
|
|
1371
|
+
__privateSet(this, _consecutiveShortSseConnections, 0);
|
|
1372
|
+
__privateSet(this, _sseFallbackToLongPolling, false);
|
|
1355
1373
|
};
|
|
1356
1374
|
fetchSnapshot_fn = async function(url, headers) {
|
|
1357
1375
|
const response = await __privateGet(this, _fetchClient2).call(this, url.toString(), { headers });
|