@electric-sql/client 1.1.1 → 1.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.cjs +73 -90
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +0 -22
- package/dist/index.browser.mjs +3 -3
- package/dist/index.browser.mjs.map +1 -1
- package/dist/index.d.ts +0 -22
- package/dist/index.legacy-esm.js +73 -90
- package/dist/index.legacy-esm.js.map +1 -1
- package/dist/index.mjs +73 -90
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
- package/src/client.ts +97 -17
- package/src/fetch.ts +9 -126
package/dist/index.d.ts
CHANGED
|
@@ -135,38 +135,16 @@ interface BackoffOptions {
|
|
|
135
135
|
initialDelay: number;
|
|
136
136
|
/**
|
|
137
137
|
* Maximum retry delay in milliseconds
|
|
138
|
-
* After reaching this, delay stays constant (e.g., retry every 60s)
|
|
139
138
|
*/
|
|
140
139
|
maxDelay: number;
|
|
141
140
|
multiplier: number;
|
|
142
141
|
onFailedAttempt?: () => void;
|
|
143
142
|
debug?: boolean;
|
|
144
|
-
/**
|
|
145
|
-
* Maximum number of retry attempts before giving up.
|
|
146
|
-
* Set to Infinity (default) for indefinite retries - needed for offline scenarios
|
|
147
|
-
* where clients may go offline and come back later.
|
|
148
|
-
*
|
|
149
|
-
* The retry budget provides protection against retry storms even with infinite retries.
|
|
150
|
-
*/
|
|
151
|
-
maxRetries?: number;
|
|
152
|
-
/**
|
|
153
|
-
* Percentage of requests that can be retries (0.1 = 10%)
|
|
154
|
-
*
|
|
155
|
-
* This is the primary load shedding mechanism. It limits the *rate* of retries,
|
|
156
|
-
* not the total count. Even with infinite retries, at most 10% of your traffic
|
|
157
|
-
* will be retries, preventing retry storms from amplifying server load.
|
|
158
|
-
*
|
|
159
|
-
* The budget resets every 60 seconds, so a temporary spike of errors won't
|
|
160
|
-
* permanently exhaust the budget.
|
|
161
|
-
*/
|
|
162
|
-
retryBudgetPercent?: number;
|
|
163
143
|
}
|
|
164
144
|
declare const BackoffDefaults: {
|
|
165
145
|
initialDelay: number;
|
|
166
146
|
maxDelay: number;
|
|
167
147
|
multiplier: number;
|
|
168
|
-
maxRetries: number;
|
|
169
|
-
retryBudgetPercent: number;
|
|
170
148
|
};
|
|
171
149
|
|
|
172
150
|
declare const LIVE_CACHE_BUSTER_QUERY_PARAM = "cursor";
|
package/dist/index.legacy-esm.js
CHANGED
|
@@ -316,13 +316,8 @@ var ELECTRIC_PROTOCOL_QUERY_PARAMS = [
|
|
|
316
316
|
var HTTP_RETRY_STATUS_CODES = [429];
|
|
317
317
|
var BackoffDefaults = {
|
|
318
318
|
initialDelay: 100,
|
|
319
|
-
maxDelay:
|
|
320
|
-
|
|
321
|
-
multiplier: 1.3,
|
|
322
|
-
maxRetries: Infinity,
|
|
323
|
-
// Retry forever - clients may go offline and come back
|
|
324
|
-
retryBudgetPercent: 0.1
|
|
325
|
-
// 10% retry budget prevents amplification
|
|
319
|
+
maxDelay: 1e4,
|
|
320
|
+
multiplier: 1.3
|
|
326
321
|
};
|
|
327
322
|
function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
328
323
|
const {
|
|
@@ -330,29 +325,8 @@ function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
|
330
325
|
maxDelay,
|
|
331
326
|
multiplier,
|
|
332
327
|
debug = false,
|
|
333
|
-
onFailedAttempt
|
|
334
|
-
maxRetries = Infinity,
|
|
335
|
-
retryBudgetPercent = 0.1
|
|
328
|
+
onFailedAttempt
|
|
336
329
|
} = backoffOptions;
|
|
337
|
-
let totalRequests = 0;
|
|
338
|
-
let totalRetries = 0;
|
|
339
|
-
let budgetResetTime = Date.now() + 6e4;
|
|
340
|
-
function checkRetryBudget(percent) {
|
|
341
|
-
const now = Date.now();
|
|
342
|
-
if (now > budgetResetTime) {
|
|
343
|
-
totalRequests = 0;
|
|
344
|
-
totalRetries = 0;
|
|
345
|
-
budgetResetTime = now + 6e4;
|
|
346
|
-
}
|
|
347
|
-
totalRequests++;
|
|
348
|
-
if (totalRequests < 10) return true;
|
|
349
|
-
const currentRetryRate = totalRetries / totalRequests;
|
|
350
|
-
const hasCapacity = currentRetryRate < percent;
|
|
351
|
-
if (hasCapacity) {
|
|
352
|
-
totalRetries++;
|
|
353
|
-
}
|
|
354
|
-
return hasCapacity;
|
|
355
|
-
}
|
|
356
330
|
return async (...args) => {
|
|
357
331
|
var _a;
|
|
358
332
|
const url = args[0];
|
|
@@ -362,10 +336,7 @@ function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
|
362
336
|
while (true) {
|
|
363
337
|
try {
|
|
364
338
|
const result = await fetchClient(...args);
|
|
365
|
-
if (result.ok)
|
|
366
|
-
delay = initialDelay;
|
|
367
|
-
return result;
|
|
368
|
-
}
|
|
339
|
+
if (result.ok) return result;
|
|
369
340
|
const err = await FetchError.fromResponse(result, url.toString());
|
|
370
341
|
throw err;
|
|
371
342
|
} catch (e) {
|
|
@@ -375,51 +346,12 @@ function createFetchWithBackoff(fetchClient, backoffOptions = BackoffDefaults) {
|
|
|
375
346
|
} else if (e instanceof FetchError && !HTTP_RETRY_STATUS_CODES.includes(e.status) && e.status >= 400 && e.status < 500) {
|
|
376
347
|
throw e;
|
|
377
348
|
} else {
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
if (debug) {
|
|
381
|
-
console.log(
|
|
382
|
-
`Max retries reached (${attempt}/${maxRetries}), giving up`
|
|
383
|
-
);
|
|
384
|
-
}
|
|
385
|
-
throw e;
|
|
386
|
-
}
|
|
387
|
-
if (!checkRetryBudget(retryBudgetPercent)) {
|
|
388
|
-
if (debug) {
|
|
389
|
-
console.log(
|
|
390
|
-
`Retry budget exhausted (attempt ${attempt}), backing off`
|
|
391
|
-
);
|
|
392
|
-
}
|
|
393
|
-
await new Promise((resolve) => setTimeout(resolve, maxDelay));
|
|
394
|
-
continue;
|
|
395
|
-
}
|
|
396
|
-
let serverMinimumMs = 0;
|
|
397
|
-
if (e instanceof FetchError && e.headers) {
|
|
398
|
-
const retryAfter = e.headers[`retry-after`];
|
|
399
|
-
if (retryAfter) {
|
|
400
|
-
const retryAfterSec = Number(retryAfter);
|
|
401
|
-
if (Number.isFinite(retryAfterSec) && retryAfterSec > 0) {
|
|
402
|
-
serverMinimumMs = retryAfterSec * 1e3;
|
|
403
|
-
} else {
|
|
404
|
-
const retryDate = Date.parse(retryAfter);
|
|
405
|
-
if (!isNaN(retryDate)) {
|
|
406
|
-
const deltaMs = retryDate - Date.now();
|
|
407
|
-
serverMinimumMs = Math.max(0, Math.min(deltaMs, 36e5));
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
}
|
|
411
|
-
}
|
|
412
|
-
const jitter = Math.random() * delay;
|
|
413
|
-
const clientBackoffMs = Math.min(jitter, maxDelay);
|
|
414
|
-
const waitMs = Math.max(serverMinimumMs, clientBackoffMs);
|
|
349
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
350
|
+
delay = Math.min(delay * multiplier, maxDelay);
|
|
415
351
|
if (debug) {
|
|
416
|
-
|
|
417
|
-
console.log(
|
|
418
|
-
`Retry attempt #${attempt} after ${waitMs}ms (${source}, serverMin=${serverMinimumMs}ms, clientBackoff=${clientBackoffMs}ms)`
|
|
419
|
-
);
|
|
352
|
+
attempt++;
|
|
353
|
+
console.log(`Retry attempt #${attempt} after ${delay}ms`);
|
|
420
354
|
}
|
|
421
|
-
await new Promise((resolve) => setTimeout(resolve, waitMs));
|
|
422
|
-
delay = Math.min(delay * multiplier, maxDelay);
|
|
423
355
|
}
|
|
424
356
|
}
|
|
425
357
|
}
|
|
@@ -784,8 +716,9 @@ function canonicalShapeKey(url) {
|
|
|
784
716
|
cleanUrl.searchParams.sort();
|
|
785
717
|
return cleanUrl.toString();
|
|
786
718
|
}
|
|
787
|
-
var _error, _fetchClient2, _sseFetchClient, _messageParser, _subscribers, _started, _state, _lastOffset, _liveCacheBuster, _lastSyncedAt, _isUpToDate, _isMidStream, _connected, _shapeHandle, _mode, _schema, _onError, _requestAbortController, _isRefreshing, _tickPromise, _tickPromiseResolver, _tickPromiseRejecter, _messageChain, _snapshotTracker, _activeSnapshotRequests, _midStreamPromise, _midStreamPromiseResolver, _ShapeStream_instances, start_fn, requestShape_fn, constructUrl_fn, createAbortListener_fn, onInitialResponse_fn, onMessages_fn, fetchShape_fn, requestShapeLongPoll_fn, requestShapeSSE_fn, pause_fn, resume_fn, nextTick_fn, waitForStreamEnd_fn, publish_fn, sendErrorToSubscribers_fn, subscribeToVisibilityChanges_fn, reset_fn, fetchSnapshot_fn;
|
|
719
|
+
var _error, _fetchClient2, _sseFetchClient, _messageParser, _subscribers, _started, _state, _lastOffset, _liveCacheBuster, _lastSyncedAt, _isUpToDate, _isMidStream, _connected, _shapeHandle, _mode, _schema, _onError, _requestAbortController, _isRefreshing, _tickPromise, _tickPromiseResolver, _tickPromiseRejecter, _messageChain, _snapshotTracker, _activeSnapshotRequests, _midStreamPromise, _midStreamPromiseResolver, _lastSseConnectionStartTime, _minSseConnectionDuration, _consecutiveShortSseConnections, _maxShortSseConnections, _sseFallbackToLongPolling, _sseBackoffBaseDelay, _sseBackoffMaxDelay, _ShapeStream_instances, start_fn, requestShape_fn, constructUrl_fn, createAbortListener_fn, onInitialResponse_fn, onMessages_fn, fetchShape_fn, requestShapeLongPoll_fn, requestShapeSSE_fn, pause_fn, resume_fn, nextTick_fn, waitForStreamEnd_fn, publish_fn, sendErrorToSubscribers_fn, subscribeToVisibilityChanges_fn, reset_fn, fetchSnapshot_fn;
|
|
788
720
|
var ShapeStream = class {
|
|
721
|
+
// Maximum delay cap (ms)
|
|
789
722
|
constructor(options) {
|
|
790
723
|
__privateAdd(this, _ShapeStream_instances);
|
|
791
724
|
__privateAdd(this, _error, null);
|
|
@@ -819,6 +752,16 @@ var ShapeStream = class {
|
|
|
819
752
|
// counter for concurrent snapshot requests
|
|
820
753
|
__privateAdd(this, _midStreamPromise);
|
|
821
754
|
__privateAdd(this, _midStreamPromiseResolver);
|
|
755
|
+
__privateAdd(this, _lastSseConnectionStartTime);
|
|
756
|
+
__privateAdd(this, _minSseConnectionDuration, 1e3);
|
|
757
|
+
// Minimum expected SSE connection duration (1 second)
|
|
758
|
+
__privateAdd(this, _consecutiveShortSseConnections, 0);
|
|
759
|
+
__privateAdd(this, _maxShortSseConnections, 3);
|
|
760
|
+
// Fall back to long polling after this many short connections
|
|
761
|
+
__privateAdd(this, _sseFallbackToLongPolling, false);
|
|
762
|
+
__privateAdd(this, _sseBackoffBaseDelay, 100);
|
|
763
|
+
// Base delay for exponential backoff (ms)
|
|
764
|
+
__privateAdd(this, _sseBackoffMaxDelay, 5e3);
|
|
822
765
|
var _a, _b, _c, _d;
|
|
823
766
|
this.options = __spreadValues({ subscribe: true }, options);
|
|
824
767
|
validateOptions(this.options);
|
|
@@ -990,9 +933,16 @@ _snapshotTracker = new WeakMap();
|
|
|
990
933
|
_activeSnapshotRequests = new WeakMap();
|
|
991
934
|
_midStreamPromise = new WeakMap();
|
|
992
935
|
_midStreamPromiseResolver = new WeakMap();
|
|
936
|
+
_lastSseConnectionStartTime = new WeakMap();
|
|
937
|
+
_minSseConnectionDuration = new WeakMap();
|
|
938
|
+
_consecutiveShortSseConnections = new WeakMap();
|
|
939
|
+
_maxShortSseConnections = new WeakMap();
|
|
940
|
+
_sseFallbackToLongPolling = new WeakMap();
|
|
941
|
+
_sseBackoffBaseDelay = new WeakMap();
|
|
942
|
+
_sseBackoffMaxDelay = new WeakMap();
|
|
993
943
|
_ShapeStream_instances = new WeakSet();
|
|
994
944
|
start_fn = async function() {
|
|
995
|
-
var _a;
|
|
945
|
+
var _a, _b, _c, _d, _e;
|
|
996
946
|
__privateSet(this, _started, true);
|
|
997
947
|
try {
|
|
998
948
|
await __privateMethod(this, _ShapeStream_instances, requestShape_fn).call(this);
|
|
@@ -1000,24 +950,34 @@ start_fn = async function() {
|
|
|
1000
950
|
__privateSet(this, _error, err);
|
|
1001
951
|
if (__privateGet(this, _onError)) {
|
|
1002
952
|
const retryOpts = await __privateGet(this, _onError).call(this, err);
|
|
1003
|
-
if (typeof retryOpts === `object`) {
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
this.options.params = retryOpts.params;
|
|
953
|
+
if (retryOpts && typeof retryOpts === `object`) {
|
|
954
|
+
if (retryOpts.params) {
|
|
955
|
+
this.options.params = __spreadValues(__spreadValues({}, (_a = this.options.params) != null ? _a : {}), retryOpts.params);
|
|
1007
956
|
}
|
|
1008
|
-
if (
|
|
1009
|
-
this.options.headers = retryOpts.headers;
|
|
957
|
+
if (retryOpts.headers) {
|
|
958
|
+
this.options.headers = __spreadValues(__spreadValues({}, (_b = this.options.headers) != null ? _b : {}), retryOpts.headers);
|
|
1010
959
|
}
|
|
960
|
+
__privateSet(this, _error, null);
|
|
1011
961
|
__privateSet(this, _started, false);
|
|
1012
|
-
__privateMethod(this, _ShapeStream_instances, start_fn).call(this);
|
|
962
|
+
await __privateMethod(this, _ShapeStream_instances, start_fn).call(this);
|
|
963
|
+
return;
|
|
964
|
+
}
|
|
965
|
+
if (err instanceof Error) {
|
|
966
|
+
__privateMethod(this, _ShapeStream_instances, sendErrorToSubscribers_fn).call(this, err);
|
|
1013
967
|
}
|
|
968
|
+
__privateSet(this, _connected, false);
|
|
969
|
+
(_c = __privateGet(this, _tickPromiseRejecter)) == null ? void 0 : _c.call(this);
|
|
1014
970
|
return;
|
|
1015
971
|
}
|
|
1016
|
-
|
|
1017
|
-
|
|
972
|
+
if (err instanceof Error) {
|
|
973
|
+
__privateMethod(this, _ShapeStream_instances, sendErrorToSubscribers_fn).call(this, err);
|
|
974
|
+
}
|
|
1018
975
|
__privateSet(this, _connected, false);
|
|
1019
|
-
(
|
|
976
|
+
(_d = __privateGet(this, _tickPromiseRejecter)) == null ? void 0 : _d.call(this);
|
|
977
|
+
throw err;
|
|
1020
978
|
}
|
|
979
|
+
__privateSet(this, _connected, false);
|
|
980
|
+
(_e = __privateGet(this, _tickPromiseRejecter)) == null ? void 0 : _e.call(this);
|
|
1021
981
|
};
|
|
1022
982
|
requestShape_fn = async function() {
|
|
1023
983
|
var _a, _b;
|
|
@@ -1062,7 +1022,6 @@ requestShape_fn = async function() {
|
|
|
1062
1022
|
await __privateMethod(this, _ShapeStream_instances, publish_fn).call(this, Array.isArray(e.json) ? e.json : [e.json]);
|
|
1063
1023
|
return __privateMethod(this, _ShapeStream_instances, requestShape_fn).call(this);
|
|
1064
1024
|
} else {
|
|
1065
|
-
__privateMethod(this, _ShapeStream_instances, sendErrorToSubscribers_fn).call(this, e);
|
|
1066
1025
|
throw e;
|
|
1067
1026
|
}
|
|
1068
1027
|
} finally {
|
|
@@ -1204,7 +1163,7 @@ onMessages_fn = async function(batch, isSseMessage = false) {
|
|
|
1204
1163
|
fetchShape_fn = async function(opts) {
|
|
1205
1164
|
var _a;
|
|
1206
1165
|
const useSse = (_a = this.options.liveSse) != null ? _a : this.options.experimentalLiveSse;
|
|
1207
|
-
if (__privateGet(this, _isUpToDate) && useSse && !__privateGet(this, _isRefreshing) && !opts.resumingFromPause) {
|
|
1166
|
+
if (__privateGet(this, _isUpToDate) && useSse && !__privateGet(this, _isRefreshing) && !opts.resumingFromPause && !__privateGet(this, _sseFallbackToLongPolling)) {
|
|
1208
1167
|
opts.fetchUrl.searchParams.set(EXPERIMENTAL_LIVE_SSE_QUERY_PARAM, `true`);
|
|
1209
1168
|
opts.fetchUrl.searchParams.set(LIVE_SSE_QUERY_PARAM, `true`);
|
|
1210
1169
|
return __privateMethod(this, _ShapeStream_instances, requestShapeSSE_fn).call(this, opts);
|
|
@@ -1228,6 +1187,7 @@ requestShapeLongPoll_fn = async function(opts) {
|
|
|
1228
1187
|
requestShapeSSE_fn = async function(opts) {
|
|
1229
1188
|
const { fetchUrl, requestAbortController, headers } = opts;
|
|
1230
1189
|
const fetch2 = __privateGet(this, _sseFetchClient);
|
|
1190
|
+
__privateSet(this, _lastSseConnectionStartTime, Date.now());
|
|
1231
1191
|
try {
|
|
1232
1192
|
let buffer = [];
|
|
1233
1193
|
await fetchEventSource(fetchUrl.toString(), {
|
|
@@ -1261,6 +1221,27 @@ requestShapeSSE_fn = async function(opts) {
|
|
|
1261
1221
|
throw new FetchBackoffAbortError();
|
|
1262
1222
|
}
|
|
1263
1223
|
throw error;
|
|
1224
|
+
} finally {
|
|
1225
|
+
const connectionDuration = Date.now() - __privateGet(this, _lastSseConnectionStartTime);
|
|
1226
|
+
const wasAborted = requestAbortController.signal.aborted;
|
|
1227
|
+
if (connectionDuration < __privateGet(this, _minSseConnectionDuration) && !wasAborted) {
|
|
1228
|
+
__privateWrapper(this, _consecutiveShortSseConnections)._++;
|
|
1229
|
+
if (__privateGet(this, _consecutiveShortSseConnections) >= __privateGet(this, _maxShortSseConnections)) {
|
|
1230
|
+
__privateSet(this, _sseFallbackToLongPolling, true);
|
|
1231
|
+
console.warn(
|
|
1232
|
+
`[Electric] SSE connections are closing immediately (possibly due to proxy buffering or misconfiguration). Falling back to long polling. Your proxy must support streaming SSE responses (not buffer the complete response). Configuration: Nginx add 'X-Accel-Buffering: no', Caddy add 'flush_interval -1' to reverse_proxy. Note: Do NOT disable caching entirely - Electric uses cache headers to enable request collapsing for efficiency.`
|
|
1233
|
+
);
|
|
1234
|
+
} else {
|
|
1235
|
+
const maxDelay = Math.min(
|
|
1236
|
+
__privateGet(this, _sseBackoffMaxDelay),
|
|
1237
|
+
__privateGet(this, _sseBackoffBaseDelay) * Math.pow(2, __privateGet(this, _consecutiveShortSseConnections))
|
|
1238
|
+
);
|
|
1239
|
+
const delayMs = Math.floor(Math.random() * maxDelay);
|
|
1240
|
+
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
|
1241
|
+
}
|
|
1242
|
+
} else if (connectionDuration >= __privateGet(this, _minSseConnectionDuration)) {
|
|
1243
|
+
__privateSet(this, _consecutiveShortSseConnections, 0);
|
|
1244
|
+
}
|
|
1264
1245
|
}
|
|
1265
1246
|
};
|
|
1266
1247
|
pause_fn = function() {
|
|
@@ -1352,6 +1333,8 @@ reset_fn = function(handle) {
|
|
|
1352
1333
|
__privateSet(this, _connected, false);
|
|
1353
1334
|
__privateSet(this, _schema, void 0);
|
|
1354
1335
|
__privateSet(this, _activeSnapshotRequests, 0);
|
|
1336
|
+
__privateSet(this, _consecutiveShortSseConnections, 0);
|
|
1337
|
+
__privateSet(this, _sseFallbackToLongPolling, false);
|
|
1355
1338
|
};
|
|
1356
1339
|
fetchSnapshot_fn = async function(url, headers) {
|
|
1357
1340
|
const response = await __privateGet(this, _fetchClient2).call(this, url.toString(), { headers });
|