@powersync/web 0.0.0-dev-20260414110516 → 0.0.0-dev-20260504100448
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/2075a31bb151adbb9767.wasm +0 -0
- package/dist/3322bc84de986b63c2cd.wasm +0 -0
- package/dist/8e97452e297be23b5e50.wasm +0 -0
- package/dist/fbc178b70d530e8ce02b.wasm +0 -0
- package/dist/index.umd.js +4289 -4786
- package/dist/index.umd.js.map +1 -1
- package/dist/worker/SharedSyncImplementation.umd.js +37 -798
- package/dist/worker/SharedSyncImplementation.umd.js.map +1 -1
- package/dist/worker/WASQLiteDB.umd.js +99 -737
- package/dist/worker/WASQLiteDB.umd.js.map +1 -1
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_dist_mc-wa-s-9af0a7.umd.js +31 -0
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_dist_mc-wa-s-9af0a7.umd.js.map +1 -0
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_dist_mc-wa-s-bbf5a9.umd.js +31 -0
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_dist_mc-wa-s-bbf5a9.umd.js.map +1 -0
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_dist_wa-sqli-c26e0f.umd.js +31 -0
- package/dist/worker/{node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_dist_wa-sqli-cc5fcc.umd.js.map → node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_dist_wa-sqli-c26e0f.umd.js.map} +1 -1
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_dist_wa-sqlite_mjs.umd.js +31 -0
- package/dist/worker/{node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_dist_wa-sqlite_mjs.umd.js.map → node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_dist_wa-sqlite_mjs.umd.js.map} +1 -1
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_src_examples-2fb422.umd.js +3562 -0
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_src_examples-2fb422.umd.js.map +1 -0
- package/dist/worker/{node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_src_examples-0df390.umd.js → node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_src_examples-96fb23.umd.js} +16 -16
- package/dist/worker/{node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_src_examples-0df390.umd.js.map → node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_src_examples-96fb23.umd.js.map} +1 -1
- package/dist/worker/{node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_src_examples-151024.umd.js → node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_src_examples-c89911.umd.js} +12 -12
- package/dist/worker/{node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_src_examples-151024.umd.js.map → node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_src_examples-c89911.umd.js.map} +1 -1
- package/dist/worker/{node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_src_examples-c01ef0.umd.js → node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_src_examples-ec4eb1.umd.js} +14 -14
- package/dist/worker/{node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_src_examples-c01ef0.umd.js.map → node_modules_pnpm_journeyapps_wa-sqlite_1_7_0_node_modules_journeyapps_wa-sqlite_src_examples-ec4eb1.umd.js.map} +1 -1
- package/lib/package.json +4 -7
- package/lib/src/db/PowerSyncDatabase.d.ts +1 -1
- package/lib/src/db/PowerSyncDatabase.js +0 -8
- package/lib/src/db/adapters/AsyncWebAdapter.d.ts +13 -3
- package/lib/src/db/adapters/AsyncWebAdapter.js +115 -21
- package/lib/src/db/adapters/wa-sqlite/DatabaseServer.js +1 -2
- package/lib/src/db/adapters/wa-sqlite/RawSqliteConnection.js +1 -1
- package/lib/src/db/adapters/wa-sqlite/WASQLiteOpenFactory.d.ts +16 -2
- package/lib/src/db/adapters/wa-sqlite/WASQLiteOpenFactory.js +60 -38
- package/lib/src/db/adapters/wa-sqlite/vfs.d.ts +7 -18
- package/lib/src/db/adapters/wa-sqlite/vfs.js +34 -49
- package/lib/src/db/sync/SSRWebStreamingSyncImplementation.d.ts +4 -0
- package/lib/src/db/sync/SSRWebStreamingSyncImplementation.js +4 -0
- package/lib/src/db/sync/SharedWebStreamingSyncImplementation.d.ts +0 -1
- package/lib/src/db/sync/SharedWebStreamingSyncImplementation.js +0 -3
- package/lib/src/db/sync/WebRemote.d.ts +1 -3
- package/lib/src/db/sync/WebRemote.js +0 -12
- package/lib/src/worker/db/MultiDatabaseServer.js +4 -1
- package/lib/src/worker/db/open-worker-database.js +2 -2
- package/lib/src/worker/sync/SharedSyncImplementation.d.ts +0 -2
- package/lib/src/worker/sync/SharedSyncImplementation.js +4 -16
- package/lib/src/worker/sync/WorkerClient.d.ts +0 -1
- package/lib/src/worker/sync/WorkerClient.js +0 -3
- package/lib/tsconfig.tsbuildinfo +1 -1
- package/package.json +5 -8
- package/src/db/PowerSyncDatabase.ts +1 -9
- package/src/db/adapters/AsyncWebAdapter.ts +138 -22
- package/src/db/adapters/wa-sqlite/DatabaseServer.ts +4 -2
- package/src/db/adapters/wa-sqlite/RawSqliteConnection.ts +4 -1
- package/src/db/adapters/wa-sqlite/WASQLiteOpenFactory.ts +89 -44
- package/src/db/adapters/wa-sqlite/vfs.ts +33 -49
- package/src/db/sync/SSRWebStreamingSyncImplementation.ts +5 -0
- package/src/db/sync/SharedWebStreamingSyncImplementation.ts +0 -4
- package/src/db/sync/WebRemote.ts +0 -16
- package/src/worker/db/MultiDatabaseServer.ts +4 -1
- package/src/worker/db/open-worker-database.ts +2 -2
- package/src/worker/sync/SharedSyncImplementation.ts +4 -18
- package/src/worker/sync/WorkerClient.ts +0 -4
- package/dist/26d61ca9f5694d064635.wasm +0 -0
- package/dist/b4c6283dc473b6b3fd24.wasm +0 -0
- package/dist/c78985091a0b22aaef03.wasm +0 -0
- package/dist/ca59e199e1138b553fad.wasm +0 -0
- package/dist/worker/node_modules_pnpm_bson_6_10_4_node_modules_bson_lib_bson_mjs.umd.js +0 -4646
- package/dist/worker/node_modules_pnpm_bson_6_10_4_node_modules_bson_lib_bson_mjs.umd.js.map +0 -1
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_dist_mc-wa-s-b9c070.umd.js +0 -31
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_dist_mc-wa-s-b9c070.umd.js.map +0 -1
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_dist_mc-wa-s-c99c07.umd.js +0 -31
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_dist_mc-wa-s-c99c07.umd.js.map +0 -1
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_dist_wa-sqli-cc5fcc.umd.js +0 -31
- package/dist/worker/node_modules_pnpm_journeyapps_wa-sqlite_1_5_0_node_modules_journeyapps_wa-sqlite_dist_wa-sqlite_mjs.umd.js +0 -31
|
@@ -652,7 +652,6 @@ class WebFetchProvider extends _powersync_common__WEBPACK_IMPORTED_MODULE_0__.Fe
|
|
|
652
652
|
class WebRemote extends _powersync_common__WEBPACK_IMPORTED_MODULE_0__.AbstractRemote {
|
|
653
653
|
connector;
|
|
654
654
|
logger;
|
|
655
|
-
_bson;
|
|
656
655
|
constructor(connector, logger = _powersync_common__WEBPACK_IMPORTED_MODULE_0__.DEFAULT_REMOTE_LOGGER, options) {
|
|
657
656
|
super(connector, logger, {
|
|
658
657
|
...(options ?? {}),
|
|
@@ -671,17 +670,6 @@ class WebRemote extends _powersync_common__WEBPACK_IMPORTED_MODULE_0__.AbstractR
|
|
|
671
670
|
}
|
|
672
671
|
return ua.join(' ');
|
|
673
672
|
}
|
|
674
|
-
async getBSON() {
|
|
675
|
-
if (this._bson) {
|
|
676
|
-
return this._bson;
|
|
677
|
-
}
|
|
678
|
-
/**
|
|
679
|
-
* Dynamic import to be used only when needed.
|
|
680
|
-
*/
|
|
681
|
-
const { BSON } = await __webpack_require__.e(/*! import() */ "node_modules_pnpm_bson_6_10_4_node_modules_bson_lib_bson_mjs").then(__webpack_require__.bind(__webpack_require__, /*! bson */ "../../node_modules/.pnpm/bson@6.10.4/node_modules/bson/lib/bson.mjs"));
|
|
682
|
-
this._bson = BSON;
|
|
683
|
-
return this._bson;
|
|
684
|
-
}
|
|
685
673
|
}
|
|
686
674
|
|
|
687
675
|
|
|
@@ -1100,9 +1088,6 @@ class SharedSyncImplementation extends _powersync_common__WEBPACK_IMPORTED_MODUL
|
|
|
1100
1088
|
logger: this.logger
|
|
1101
1089
|
});
|
|
1102
1090
|
}
|
|
1103
|
-
get lastSyncedAt() {
|
|
1104
|
-
return this.connectionManager.syncStreamImplementation?.lastSyncedAt;
|
|
1105
|
-
}
|
|
1106
1091
|
get isConnected() {
|
|
1107
1092
|
return this.connectionManager.syncStreamImplementation?.isConnected ?? false;
|
|
1108
1093
|
}
|
|
@@ -1268,11 +1253,6 @@ class SharedSyncImplementation extends _powersync_common__WEBPACK_IMPORTED_MODUL
|
|
|
1268
1253
|
sync.triggerCrudUpload();
|
|
1269
1254
|
});
|
|
1270
1255
|
}
|
|
1271
|
-
async hasCompletedSync() {
|
|
1272
|
-
return this.withSyncImplementation(async (sync) => {
|
|
1273
|
-
return sync.hasCompletedSync();
|
|
1274
|
-
});
|
|
1275
|
-
}
|
|
1276
1256
|
async getWriteCheckpoint() {
|
|
1277
1257
|
return this.withSyncImplementation(async (sync) => {
|
|
1278
1258
|
return sync.getWriteCheckpoint();
|
|
@@ -1490,14 +1470,10 @@ class SharedSyncImplementation extends _powersync_common__WEBPACK_IMPORTED_MODUL
|
|
|
1490
1470
|
// We can poke the crud in case we missed any updates.
|
|
1491
1471
|
const impl = sharedSync.connectionManager.syncStreamImplementation;
|
|
1492
1472
|
impl?.triggerCrudUpload();
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
* StreamingSyncImplementation to reconnect. It would be nicer if we could trigger
|
|
1498
|
-
* this reconnect earlier.
|
|
1499
|
-
* This reconnect is not required for IndexedDB.
|
|
1500
|
-
*/
|
|
1473
|
+
// The Rust client implementation stores sync state on the connection level. Reopening the database causes a
|
|
1474
|
+
// disruption of the connection state and forces us to reconnect. We want to do that as soon as possible to
|
|
1475
|
+
// minimize downtime.
|
|
1476
|
+
impl?.markConnectionMayHaveChanged();
|
|
1501
1477
|
}
|
|
1502
1478
|
}
|
|
1503
1479
|
async readLock(fn, options) {
|
|
@@ -1640,9 +1616,6 @@ class WorkerClient {
|
|
|
1640
1616
|
getWriteCheckpoint() {
|
|
1641
1617
|
return this.sync.getWriteCheckpoint();
|
|
1642
1618
|
}
|
|
1643
|
-
hasCompletedSync() {
|
|
1644
|
-
return this.sync.hasCompletedSync();
|
|
1645
|
-
}
|
|
1646
1619
|
connect(options) {
|
|
1647
1620
|
return this.sync.connect(options);
|
|
1648
1621
|
}
|
|
@@ -1726,9 +1699,6 @@ __webpack_require__.r(__webpack_exports__);
|
|
|
1726
1699
|
/* harmony export */ MEMORY_TRIGGER_CLAIM_MANAGER: () => (/* binding */ MEMORY_TRIGGER_CLAIM_MANAGER),
|
|
1727
1700
|
/* harmony export */ Mutex: () => (/* binding */ Mutex),
|
|
1728
1701
|
/* harmony export */ OnChangeQueryProcessor: () => (/* binding */ OnChangeQueryProcessor),
|
|
1729
|
-
/* harmony export */ OpType: () => (/* binding */ OpType),
|
|
1730
|
-
/* harmony export */ OpTypeEnum: () => (/* binding */ OpTypeEnum),
|
|
1731
|
-
/* harmony export */ OplogEntry: () => (/* binding */ OplogEntry),
|
|
1732
1702
|
/* harmony export */ PSInternalTable: () => (/* binding */ PSInternalTable),
|
|
1733
1703
|
/* harmony export */ PowerSyncControlCommand: () => (/* binding */ PowerSyncControlCommand),
|
|
1734
1704
|
/* harmony export */ RowUpdateType: () => (/* binding */ RowUpdateType),
|
|
@@ -1736,8 +1706,6 @@ __webpack_require__.r(__webpack_exports__);
|
|
|
1736
1706
|
/* harmony export */ Semaphore: () => (/* binding */ Semaphore),
|
|
1737
1707
|
/* harmony export */ SqliteBucketStorage: () => (/* binding */ SqliteBucketStorage),
|
|
1738
1708
|
/* harmony export */ SyncClientImplementation: () => (/* binding */ SyncClientImplementation),
|
|
1739
|
-
/* harmony export */ SyncDataBatch: () => (/* binding */ SyncDataBatch),
|
|
1740
|
-
/* harmony export */ SyncDataBucket: () => (/* binding */ SyncDataBucket),
|
|
1741
1709
|
/* harmony export */ SyncProgress: () => (/* binding */ SyncProgress),
|
|
1742
1710
|
/* harmony export */ SyncStatus: () => (/* binding */ SyncStatus),
|
|
1743
1711
|
/* harmony export */ SyncStreamConnectionMethod: () => (/* binding */ SyncStreamConnectionMethod),
|
|
@@ -1755,18 +1723,10 @@ __webpack_require__.r(__webpack_exports__);
|
|
|
1755
1723
|
/* harmony export */ createLogger: () => (/* binding */ createLogger),
|
|
1756
1724
|
/* harmony export */ extractTableUpdates: () => (/* binding */ extractTableUpdates),
|
|
1757
1725
|
/* harmony export */ isBatchedUpdateNotification: () => (/* binding */ isBatchedUpdateNotification),
|
|
1758
|
-
/* harmony export */ isContinueCheckpointRequest: () => (/* binding */ isContinueCheckpointRequest),
|
|
1759
1726
|
/* harmony export */ isDBAdapter: () => (/* binding */ isDBAdapter),
|
|
1760
1727
|
/* harmony export */ isPowerSyncDatabaseOptionsWithSettings: () => (/* binding */ isPowerSyncDatabaseOptionsWithSettings),
|
|
1761
1728
|
/* harmony export */ isSQLOpenFactory: () => (/* binding */ isSQLOpenFactory),
|
|
1762
1729
|
/* harmony export */ isSQLOpenOptions: () => (/* binding */ isSQLOpenOptions),
|
|
1763
|
-
/* harmony export */ isStreamingKeepalive: () => (/* binding */ isStreamingKeepalive),
|
|
1764
|
-
/* harmony export */ isStreamingSyncCheckpoint: () => (/* binding */ isStreamingSyncCheckpoint),
|
|
1765
|
-
/* harmony export */ isStreamingSyncCheckpointComplete: () => (/* binding */ isStreamingSyncCheckpointComplete),
|
|
1766
|
-
/* harmony export */ isStreamingSyncCheckpointDiff: () => (/* binding */ isStreamingSyncCheckpointDiff),
|
|
1767
|
-
/* harmony export */ isStreamingSyncCheckpointPartiallyComplete: () => (/* binding */ isStreamingSyncCheckpointPartiallyComplete),
|
|
1768
|
-
/* harmony export */ isStreamingSyncData: () => (/* binding */ isStreamingSyncData),
|
|
1769
|
-
/* harmony export */ isSyncNewCheckpointRequest: () => (/* binding */ isSyncNewCheckpointRequest),
|
|
1770
1730
|
/* harmony export */ parseQuery: () => (/* binding */ parseQuery),
|
|
1771
1731
|
/* harmony export */ runOnSchemaChange: () => (/* binding */ runOnSchemaChange),
|
|
1772
1732
|
/* harmony export */ sanitizeSQL: () => (/* binding */ sanitizeSQL),
|
|
@@ -3904,16 +3864,12 @@ class SyncStatus {
|
|
|
3904
3864
|
*
|
|
3905
3865
|
* This returns null when the database is currently being opened and we don't have reliable information about all
|
|
3906
3866
|
* included streams yet.
|
|
3907
|
-
*
|
|
3908
|
-
* @experimental Sync streams are currently in alpha.
|
|
3909
3867
|
*/
|
|
3910
3868
|
get syncStreams() {
|
|
3911
3869
|
return this.options.dataFlow?.internalStreamSubscriptions?.map((core) => new SyncStreamStatusView(this, core));
|
|
3912
3870
|
}
|
|
3913
3871
|
/**
|
|
3914
3872
|
* If the `stream` appears in {@link syncStreams}, returns the current status for that stream.
|
|
3915
|
-
*
|
|
3916
|
-
* @experimental Sync streams are currently in alpha.
|
|
3917
3873
|
*/
|
|
3918
3874
|
forStream(stream) {
|
|
3919
3875
|
const asJson = JSON.stringify(stream.parameters);
|
|
@@ -5266,103 +5222,6 @@ class AbortOperation extends Error {
|
|
|
5266
5222
|
}
|
|
5267
5223
|
}
|
|
5268
5224
|
|
|
5269
|
-
var OpTypeEnum;
|
|
5270
|
-
(function (OpTypeEnum) {
|
|
5271
|
-
OpTypeEnum[OpTypeEnum["CLEAR"] = 1] = "CLEAR";
|
|
5272
|
-
OpTypeEnum[OpTypeEnum["MOVE"] = 2] = "MOVE";
|
|
5273
|
-
OpTypeEnum[OpTypeEnum["PUT"] = 3] = "PUT";
|
|
5274
|
-
OpTypeEnum[OpTypeEnum["REMOVE"] = 4] = "REMOVE";
|
|
5275
|
-
})(OpTypeEnum || (OpTypeEnum = {}));
|
|
5276
|
-
/**
|
|
5277
|
-
* Used internally for sync buckets.
|
|
5278
|
-
*/
|
|
5279
|
-
class OpType {
|
|
5280
|
-
value;
|
|
5281
|
-
static fromJSON(jsonValue) {
|
|
5282
|
-
return new OpType(OpTypeEnum[jsonValue]);
|
|
5283
|
-
}
|
|
5284
|
-
constructor(value) {
|
|
5285
|
-
this.value = value;
|
|
5286
|
-
}
|
|
5287
|
-
toJSON() {
|
|
5288
|
-
return Object.entries(OpTypeEnum).find(([, value]) => value === this.value)[0];
|
|
5289
|
-
}
|
|
5290
|
-
}
|
|
5291
|
-
|
|
5292
|
-
class OplogEntry {
|
|
5293
|
-
op_id;
|
|
5294
|
-
op;
|
|
5295
|
-
checksum;
|
|
5296
|
-
subkey;
|
|
5297
|
-
object_type;
|
|
5298
|
-
object_id;
|
|
5299
|
-
data;
|
|
5300
|
-
static fromRow(row) {
|
|
5301
|
-
return new OplogEntry(row.op_id, OpType.fromJSON(row.op), row.checksum, row.subkey, row.object_type, row.object_id, row.data);
|
|
5302
|
-
}
|
|
5303
|
-
constructor(op_id, op, checksum, subkey, object_type, object_id, data) {
|
|
5304
|
-
this.op_id = op_id;
|
|
5305
|
-
this.op = op;
|
|
5306
|
-
this.checksum = checksum;
|
|
5307
|
-
this.subkey = subkey;
|
|
5308
|
-
this.object_type = object_type;
|
|
5309
|
-
this.object_id = object_id;
|
|
5310
|
-
this.data = data;
|
|
5311
|
-
}
|
|
5312
|
-
toJSON(fixedKeyEncoding = false) {
|
|
5313
|
-
return {
|
|
5314
|
-
op_id: this.op_id,
|
|
5315
|
-
op: this.op.toJSON(),
|
|
5316
|
-
object_type: this.object_type,
|
|
5317
|
-
object_id: this.object_id,
|
|
5318
|
-
checksum: this.checksum,
|
|
5319
|
-
data: this.data,
|
|
5320
|
-
// Older versions of the JS SDK used to always JSON.stringify here. That has always been wrong,
|
|
5321
|
-
// but we need to migrate gradually to not break existing databases.
|
|
5322
|
-
subkey: fixedKeyEncoding ? this.subkey : JSON.stringify(this.subkey)
|
|
5323
|
-
};
|
|
5324
|
-
}
|
|
5325
|
-
}
|
|
5326
|
-
|
|
5327
|
-
class SyncDataBucket {
|
|
5328
|
-
bucket;
|
|
5329
|
-
data;
|
|
5330
|
-
has_more;
|
|
5331
|
-
after;
|
|
5332
|
-
next_after;
|
|
5333
|
-
static fromRow(row) {
|
|
5334
|
-
return new SyncDataBucket(row.bucket, row.data.map((entry) => OplogEntry.fromRow(entry)), row.has_more ?? false, row.after, row.next_after);
|
|
5335
|
-
}
|
|
5336
|
-
constructor(bucket, data,
|
|
5337
|
-
/**
|
|
5338
|
-
* True if the response does not contain all the data for this bucket, and another request must be made.
|
|
5339
|
-
*/
|
|
5340
|
-
has_more,
|
|
5341
|
-
/**
|
|
5342
|
-
* The `after` specified in the request.
|
|
5343
|
-
*/
|
|
5344
|
-
after,
|
|
5345
|
-
/**
|
|
5346
|
-
* Use this for the next request.
|
|
5347
|
-
*/
|
|
5348
|
-
next_after) {
|
|
5349
|
-
this.bucket = bucket;
|
|
5350
|
-
this.data = data;
|
|
5351
|
-
this.has_more = has_more;
|
|
5352
|
-
this.after = after;
|
|
5353
|
-
this.next_after = next_after;
|
|
5354
|
-
}
|
|
5355
|
-
toJSON(fixedKeyEncoding = false) {
|
|
5356
|
-
return {
|
|
5357
|
-
bucket: this.bucket,
|
|
5358
|
-
has_more: this.has_more,
|
|
5359
|
-
after: this.after,
|
|
5360
|
-
next_after: this.next_after,
|
|
5361
|
-
data: this.data.map((entry) => entry.toJSON(fixedKeyEncoding))
|
|
5362
|
-
};
|
|
5363
|
-
}
|
|
5364
|
-
}
|
|
5365
|
-
|
|
5366
5225
|
var buffer = {};
|
|
5367
5226
|
|
|
5368
5227
|
var base64Js = {};
|
|
@@ -12518,7 +12377,7 @@ function requireDist () {
|
|
|
12518
12377
|
|
|
12519
12378
|
var distExports = requireDist();
|
|
12520
12379
|
|
|
12521
|
-
var version = "1.
|
|
12380
|
+
var version = "1.52.0";
|
|
12522
12381
|
var PACKAGE = {
|
|
12523
12382
|
version: version};
|
|
12524
12383
|
|
|
@@ -13075,22 +12934,12 @@ class AbstractRemote {
|
|
|
13075
12934
|
* Returns a data stream of sync line data, fetched via RSocket-over-WebSocket.
|
|
13076
12935
|
*
|
|
13077
12936
|
* The only mechanism to abort the returned stream is to use the abort signal in {@link SocketSyncStreamOptions}.
|
|
13078
|
-
*
|
|
13079
|
-
* @param bson A BSON encoder and decoder. When set, the data stream will be requested with a BSON payload
|
|
13080
|
-
* (required for compatibility with older sync services).
|
|
13081
12937
|
*/
|
|
13082
|
-
async socketStreamRaw(options
|
|
12938
|
+
async socketStreamRaw(options) {
|
|
13083
12939
|
const { path, fetchStrategy = FetchStrategy.Buffered } = options;
|
|
13084
|
-
const mimeType =
|
|
12940
|
+
const mimeType = 'application/json';
|
|
13085
12941
|
function toBuffer(js) {
|
|
13086
|
-
|
|
13087
|
-
if (bson != null) {
|
|
13088
|
-
contents = bson.serialize(js);
|
|
13089
|
-
}
|
|
13090
|
-
else {
|
|
13091
|
-
contents = JSON.stringify(js);
|
|
13092
|
-
}
|
|
13093
|
-
return bufferExports.Buffer.from(contents);
|
|
12942
|
+
return bufferExports.Buffer.from(JSON.stringify(js));
|
|
13094
12943
|
}
|
|
13095
12944
|
const syncQueueRequestSize = fetchStrategy == FetchStrategy.Buffered ? 10 : 1;
|
|
13096
12945
|
const request = await this.buildRequest(path);
|
|
@@ -13392,32 +13241,6 @@ function coreStatusToJs(status) {
|
|
|
13392
13241
|
};
|
|
13393
13242
|
}
|
|
13394
13243
|
|
|
13395
|
-
function isStreamingSyncData(line) {
|
|
13396
|
-
return line.data != null;
|
|
13397
|
-
}
|
|
13398
|
-
function isStreamingKeepalive(line) {
|
|
13399
|
-
return line.token_expires_in != null;
|
|
13400
|
-
}
|
|
13401
|
-
function isStreamingSyncCheckpoint(line) {
|
|
13402
|
-
return line.checkpoint != null;
|
|
13403
|
-
}
|
|
13404
|
-
function isStreamingSyncCheckpointComplete(line) {
|
|
13405
|
-
return line.checkpoint_complete != null;
|
|
13406
|
-
}
|
|
13407
|
-
function isStreamingSyncCheckpointPartiallyComplete(line) {
|
|
13408
|
-
return line.partial_checkpoint_complete != null;
|
|
13409
|
-
}
|
|
13410
|
-
function isStreamingSyncCheckpointDiff(line) {
|
|
13411
|
-
return line.checkpoint_diff != null;
|
|
13412
|
-
}
|
|
13413
|
-
function isContinueCheckpointRequest(request) {
|
|
13414
|
-
return (Array.isArray(request.buckets) &&
|
|
13415
|
-
typeof request.checkpoint_token == 'string');
|
|
13416
|
-
}
|
|
13417
|
-
function isSyncNewCheckpointRequest(request) {
|
|
13418
|
-
return typeof request.request_checkpoint == 'object';
|
|
13419
|
-
}
|
|
13420
|
-
|
|
13421
13244
|
var LockType;
|
|
13422
13245
|
(function (LockType) {
|
|
13423
13246
|
LockType["CRUD"] = "crud";
|
|
@@ -13430,35 +13253,21 @@ var SyncStreamConnectionMethod;
|
|
|
13430
13253
|
})(SyncStreamConnectionMethod || (SyncStreamConnectionMethod = {}));
|
|
13431
13254
|
var SyncClientImplementation;
|
|
13432
13255
|
(function (SyncClientImplementation) {
|
|
13433
|
-
/**
|
|
13434
|
-
* Decodes and handles sync lines received from the sync service in JavaScript.
|
|
13435
|
-
*
|
|
13436
|
-
* This is the default option.
|
|
13437
|
-
*
|
|
13438
|
-
* @deprecated We recommend the {@link RUST} client implementation for all apps. If you have issues with
|
|
13439
|
-
* the Rust client, please file an issue or reach out to us. The JavaScript client will be removed in a future
|
|
13440
|
-
* version of the PowerSync SDK.
|
|
13441
|
-
*/
|
|
13442
|
-
SyncClientImplementation["JAVASCRIPT"] = "js";
|
|
13443
13256
|
/**
|
|
13444
13257
|
* This implementation offloads the sync line decoding and handling into the PowerSync
|
|
13445
13258
|
* core extension.
|
|
13446
13259
|
*
|
|
13447
|
-
* This
|
|
13448
|
-
* recommended client implementation for all apps.
|
|
13260
|
+
* This is the only option, as an older JavaScript client implementation has been removed from the SDK.
|
|
13449
13261
|
*
|
|
13450
13262
|
* ## Compatibility warning
|
|
13451
13263
|
*
|
|
13452
13264
|
* The Rust sync client stores sync data in a format that is slightly different than the one used
|
|
13453
|
-
* by the old
|
|
13454
|
-
*
|
|
13455
|
-
* Further, the {@link JAVASCRIPT} client in recent versions of the PowerSync JS SDK (starting from
|
|
13456
|
-
* the version introducing {@link RUST} as an option) also supports the new format, so you can switch
|
|
13457
|
-
* back to {@link JAVASCRIPT} later.
|
|
13265
|
+
* by the old JavaScript client. When adopting the {@link RUST} client on existing databases, the PowerSync SDK will
|
|
13266
|
+
* migrate the format automatically.
|
|
13458
13267
|
*
|
|
13459
|
-
*
|
|
13460
|
-
*
|
|
13461
|
-
*
|
|
13268
|
+
* SDK versions supporting both the JavaScript and the Rust client support both formats with the JavaScript client
|
|
13269
|
+
* implementaiton. However, downgrading to an SDK version that only supports the JavaScript client would not be
|
|
13270
|
+
* possible anymore. Problematic SDK versions have been released before 2025-06-09.
|
|
13462
13271
|
*/
|
|
13463
13272
|
SyncClientImplementation["RUST"] = "rust";
|
|
13464
13273
|
})(SyncClientImplementation || (SyncClientImplementation = {}));
|
|
@@ -13481,13 +13290,7 @@ const DEFAULT_STREAM_CONNECTION_OPTIONS = {
|
|
|
13481
13290
|
serializedSchema: undefined,
|
|
13482
13291
|
includeDefaultStreams: true
|
|
13483
13292
|
};
|
|
13484
|
-
// The priority we assume when we receive checkpoint lines where no priority is set.
|
|
13485
|
-
// This is the default priority used by the sync service, but can be set to an arbitrary
|
|
13486
|
-
// value since sync services without priorities also won't send partial sync completion
|
|
13487
|
-
// messages.
|
|
13488
|
-
const FALLBACK_PRIORITY = 3;
|
|
13489
13293
|
class AbstractStreamingSyncImplementation extends BaseObserver {
|
|
13490
|
-
_lastSyncedAt;
|
|
13491
13294
|
options;
|
|
13492
13295
|
abortController;
|
|
13493
13296
|
// In rare cases, mostly for tests, uploads can be triggered without being properly connected.
|
|
@@ -13497,6 +13300,7 @@ class AbstractStreamingSyncImplementation extends BaseObserver {
|
|
|
13497
13300
|
streamingSyncPromise;
|
|
13498
13301
|
logger;
|
|
13499
13302
|
activeStreams;
|
|
13303
|
+
connectionMayHaveChanged = false;
|
|
13500
13304
|
isUploadingCrud = false;
|
|
13501
13305
|
notifyCompletedUploads;
|
|
13502
13306
|
handleActiveStreamsChange;
|
|
@@ -13576,9 +13380,6 @@ class AbstractStreamingSyncImplementation extends BaseObserver {
|
|
|
13576
13380
|
this.crudUpdateListener = undefined;
|
|
13577
13381
|
this.uploadAbortController?.abort();
|
|
13578
13382
|
}
|
|
13579
|
-
async hasCompletedSync() {
|
|
13580
|
-
return this.options.adapter.hasCompletedSync();
|
|
13581
|
-
}
|
|
13582
13383
|
async getWriteCheckpoint() {
|
|
13583
13384
|
const clientId = await this.options.adapter.getClientId();
|
|
13584
13385
|
let path = `/write-checkpoint2.json?client_id=${clientId}`;
|
|
@@ -13660,7 +13461,7 @@ The next upload iteration will be delayed.`);
|
|
|
13660
13461
|
});
|
|
13661
13462
|
}
|
|
13662
13463
|
}
|
|
13663
|
-
this.uploadAbortController =
|
|
13464
|
+
this.uploadAbortController = undefined;
|
|
13664
13465
|
}
|
|
13665
13466
|
});
|
|
13666
13467
|
}
|
|
@@ -13776,6 +13577,11 @@ The next upload iteration will be delayed.`);
|
|
|
13776
13577
|
shouldDelayRetry = false;
|
|
13777
13578
|
// A disconnect was requested, we should not delay since there is no explicit retry
|
|
13778
13579
|
}
|
|
13580
|
+
else if (this.connectionMayHaveChanged && ex.message?.indexOf('No iteration is active') >= 0) {
|
|
13581
|
+
this.connectionMayHaveChanged = false;
|
|
13582
|
+
this.logger.info('Sync error after changed connection, retrying immediately');
|
|
13583
|
+
shouldDelayRetry = false;
|
|
13584
|
+
}
|
|
13779
13585
|
else {
|
|
13780
13586
|
this.logger.error(ex);
|
|
13781
13587
|
}
|
|
@@ -13806,17 +13612,14 @@ The next upload iteration will be delayed.`);
|
|
|
13806
13612
|
// Mark as disconnected if here
|
|
13807
13613
|
this.updateSyncStatus({ connected: false, connecting: false });
|
|
13808
13614
|
}
|
|
13809
|
-
|
|
13810
|
-
|
|
13811
|
-
|
|
13812
|
-
|
|
13813
|
-
|
|
13814
|
-
|
|
13815
|
-
|
|
13816
|
-
|
|
13817
|
-
localDescriptions.set(entry.bucket, null);
|
|
13818
|
-
}
|
|
13819
|
-
return [req, localDescriptions];
|
|
13615
|
+
markConnectionMayHaveChanged() {
|
|
13616
|
+
// By setting this field, we'll immediately retry if the next sync event causes an error triggered by us not having
|
|
13617
|
+
// an active sync iteration on the connection in use.
|
|
13618
|
+
this.connectionMayHaveChanged = true;
|
|
13619
|
+
// This triggers a `powersync_control` invocation if a sync iteration is currently active. This is a cheap call to
|
|
13620
|
+
// make when no subscriptions have actually changed, we're mainly interested in this immediately throwing if no
|
|
13621
|
+
// iteration is active. That allows us to reconnect ASAP, instead of having to wait for the next sync line.
|
|
13622
|
+
this.handleActiveStreamsChange?.();
|
|
13820
13623
|
}
|
|
13821
13624
|
/**
|
|
13822
13625
|
* Older versions of the JS SDK used to encode subkeys as JSON in {@link OplogEntry.toJSON}.
|
|
@@ -13859,19 +13662,13 @@ The next upload iteration will be delayed.`);
|
|
|
13859
13662
|
}
|
|
13860
13663
|
const clientImplementation = resolvedOptions.clientImplementation;
|
|
13861
13664
|
this.updateSyncStatus({ clientImplementation });
|
|
13862
|
-
|
|
13863
|
-
|
|
13864
|
-
return null;
|
|
13865
|
-
}
|
|
13866
|
-
else {
|
|
13867
|
-
await this.requireKeyFormat(true);
|
|
13868
|
-
return await this.rustSyncIteration(signal, resolvedOptions);
|
|
13869
|
-
}
|
|
13665
|
+
await this.requireKeyFormat(true);
|
|
13666
|
+
return await this.rustSyncIteration(signal, resolvedOptions);
|
|
13870
13667
|
}
|
|
13871
13668
|
});
|
|
13872
13669
|
}
|
|
13873
13670
|
async receiveSyncLines(data) {
|
|
13874
|
-
const { options, connection
|
|
13671
|
+
const { options, connection } = data;
|
|
13875
13672
|
const remote = this.options.remote;
|
|
13876
13673
|
if (connection.connectionMethod == SyncStreamConnectionMethod.HTTP) {
|
|
13877
13674
|
return await remote.fetchStream(options);
|
|
@@ -13880,232 +13677,8 @@ The next upload iteration will be delayed.`);
|
|
|
13880
13677
|
return await this.options.remote.socketStreamRaw({
|
|
13881
13678
|
...options,
|
|
13882
13679
|
...{ fetchStrategy: connection.fetchStrategy }
|
|
13883
|
-
}
|
|
13884
|
-
}
|
|
13885
|
-
}
|
|
13886
|
-
async legacyStreamingSyncIteration(signal, resolvedOptions) {
|
|
13887
|
-
const rawTables = resolvedOptions.serializedSchema?.raw_tables;
|
|
13888
|
-
if (rawTables != null && rawTables.length) {
|
|
13889
|
-
this.logger.warn('Raw tables require the Rust-based sync client. The JS client will ignore them.');
|
|
13890
|
-
}
|
|
13891
|
-
if (this.activeStreams.length) {
|
|
13892
|
-
this.logger.error('Sync streams require `clientImplementation: SyncClientImplementation.RUST` when connecting.');
|
|
13893
|
-
}
|
|
13894
|
-
this.logger.debug('Streaming sync iteration started');
|
|
13895
|
-
this.options.adapter.startSession();
|
|
13896
|
-
let [req, bucketMap] = await this.collectLocalBucketState();
|
|
13897
|
-
let targetCheckpoint = null;
|
|
13898
|
-
// A checkpoint that has been validated but not applied (e.g. due to pending local writes)
|
|
13899
|
-
let pendingValidatedCheckpoint = null;
|
|
13900
|
-
const clientId = await this.options.adapter.getClientId();
|
|
13901
|
-
const usingFixedKeyFormat = await this.requireKeyFormat(false);
|
|
13902
|
-
this.logger.debug('Requesting stream from server');
|
|
13903
|
-
const syncOptions = {
|
|
13904
|
-
path: '/sync/stream',
|
|
13905
|
-
abortSignal: signal,
|
|
13906
|
-
data: {
|
|
13907
|
-
buckets: req,
|
|
13908
|
-
include_checksum: true,
|
|
13909
|
-
raw_data: true,
|
|
13910
|
-
parameters: resolvedOptions.params,
|
|
13911
|
-
app_metadata: resolvedOptions.appMetadata,
|
|
13912
|
-
client_id: clientId
|
|
13913
|
-
}
|
|
13914
|
-
};
|
|
13915
|
-
const bson = await this.options.remote.getBSON();
|
|
13916
|
-
const source = await this.receiveSyncLines({
|
|
13917
|
-
options: syncOptions,
|
|
13918
|
-
connection: resolvedOptions,
|
|
13919
|
-
bson
|
|
13920
|
-
});
|
|
13921
|
-
const stream = injectable(map(source, (line) => {
|
|
13922
|
-
if (typeof line == 'string') {
|
|
13923
|
-
return JSON.parse(line);
|
|
13924
|
-
}
|
|
13925
|
-
else {
|
|
13926
|
-
return bson.deserialize(line);
|
|
13927
|
-
}
|
|
13928
|
-
}));
|
|
13929
|
-
this.logger.debug('Stream established. Processing events');
|
|
13930
|
-
this.notifyCompletedUploads = () => {
|
|
13931
|
-
stream.inject({ crud_upload_completed: null });
|
|
13932
|
-
};
|
|
13933
|
-
while (true) {
|
|
13934
|
-
const { value: line, done } = await stream.next();
|
|
13935
|
-
if (done) {
|
|
13936
|
-
// The stream has closed while waiting
|
|
13937
|
-
return;
|
|
13938
|
-
}
|
|
13939
|
-
if ('crud_upload_completed' in line) {
|
|
13940
|
-
if (pendingValidatedCheckpoint != null) {
|
|
13941
|
-
const { applied, endIteration } = await this.applyCheckpoint(pendingValidatedCheckpoint);
|
|
13942
|
-
if (applied) {
|
|
13943
|
-
pendingValidatedCheckpoint = null;
|
|
13944
|
-
}
|
|
13945
|
-
else if (endIteration) {
|
|
13946
|
-
break;
|
|
13947
|
-
}
|
|
13948
|
-
}
|
|
13949
|
-
continue;
|
|
13950
|
-
}
|
|
13951
|
-
// A connection is active and messages are being received
|
|
13952
|
-
if (!this.syncStatus.connected) {
|
|
13953
|
-
// There is a connection now
|
|
13954
|
-
Promise.resolve().then(() => this.triggerCrudUpload());
|
|
13955
|
-
this.updateSyncStatus({
|
|
13956
|
-
connected: true
|
|
13957
|
-
});
|
|
13958
|
-
}
|
|
13959
|
-
if (isStreamingSyncCheckpoint(line)) {
|
|
13960
|
-
targetCheckpoint = line.checkpoint;
|
|
13961
|
-
// New checkpoint - existing validated checkpoint is no longer valid
|
|
13962
|
-
pendingValidatedCheckpoint = null;
|
|
13963
|
-
const bucketsToDelete = new Set(bucketMap.keys());
|
|
13964
|
-
const newBuckets = new Map();
|
|
13965
|
-
for (const checksum of line.checkpoint.buckets) {
|
|
13966
|
-
newBuckets.set(checksum.bucket, {
|
|
13967
|
-
name: checksum.bucket,
|
|
13968
|
-
priority: checksum.priority ?? FALLBACK_PRIORITY
|
|
13969
|
-
});
|
|
13970
|
-
bucketsToDelete.delete(checksum.bucket);
|
|
13971
|
-
}
|
|
13972
|
-
if (bucketsToDelete.size > 0) {
|
|
13973
|
-
this.logger.debug('Removing buckets', [...bucketsToDelete]);
|
|
13974
|
-
}
|
|
13975
|
-
bucketMap = newBuckets;
|
|
13976
|
-
await this.options.adapter.removeBuckets([...bucketsToDelete]);
|
|
13977
|
-
await this.options.adapter.setTargetCheckpoint(targetCheckpoint);
|
|
13978
|
-
await this.updateSyncStatusForStartingCheckpoint(targetCheckpoint);
|
|
13979
|
-
}
|
|
13980
|
-
else if (isStreamingSyncCheckpointComplete(line)) {
|
|
13981
|
-
const result = await this.applyCheckpoint(targetCheckpoint);
|
|
13982
|
-
if (result.endIteration) {
|
|
13983
|
-
return;
|
|
13984
|
-
}
|
|
13985
|
-
else if (!result.applied) {
|
|
13986
|
-
// "Could not apply checkpoint due to local data". We need to retry after
|
|
13987
|
-
// finishing uploads.
|
|
13988
|
-
pendingValidatedCheckpoint = targetCheckpoint;
|
|
13989
|
-
}
|
|
13990
|
-
else {
|
|
13991
|
-
// Nothing to retry later. This would likely already be null from the last
|
|
13992
|
-
// checksum or checksum_diff operation, but we make sure.
|
|
13993
|
-
pendingValidatedCheckpoint = null;
|
|
13994
|
-
}
|
|
13995
|
-
}
|
|
13996
|
-
else if (isStreamingSyncCheckpointPartiallyComplete(line)) {
|
|
13997
|
-
const priority = line.partial_checkpoint_complete.priority;
|
|
13998
|
-
this.logger.debug('Partial checkpoint complete', priority);
|
|
13999
|
-
const result = await this.options.adapter.syncLocalDatabase(targetCheckpoint, priority);
|
|
14000
|
-
if (!result.checkpointValid) {
|
|
14001
|
-
// This means checksums failed. Start again with a new checkpoint.
|
|
14002
|
-
// TODO: better back-off
|
|
14003
|
-
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
14004
|
-
return;
|
|
14005
|
-
}
|
|
14006
|
-
else if (!result.ready) ;
|
|
14007
|
-
else {
|
|
14008
|
-
// We'll keep on downloading, but can report that this priority is synced now.
|
|
14009
|
-
this.logger.debug('partial checkpoint validation succeeded');
|
|
14010
|
-
// All states with a higher priority can be deleted since this partial sync includes them.
|
|
14011
|
-
const priorityStates = this.syncStatus.priorityStatusEntries.filter((s) => s.priority <= priority);
|
|
14012
|
-
priorityStates.push({
|
|
14013
|
-
priority,
|
|
14014
|
-
lastSyncedAt: new Date(),
|
|
14015
|
-
hasSynced: true
|
|
14016
|
-
});
|
|
14017
|
-
this.updateSyncStatus({
|
|
14018
|
-
connected: true,
|
|
14019
|
-
priorityStatusEntries: priorityStates
|
|
14020
|
-
});
|
|
14021
|
-
}
|
|
14022
|
-
}
|
|
14023
|
-
else if (isStreamingSyncCheckpointDiff(line)) {
|
|
14024
|
-
// TODO: It may be faster to just keep track of the diff, instead of the entire checkpoint
|
|
14025
|
-
if (targetCheckpoint == null) {
|
|
14026
|
-
throw new Error('Checkpoint diff without previous checkpoint');
|
|
14027
|
-
}
|
|
14028
|
-
// New checkpoint - existing validated checkpoint is no longer valid
|
|
14029
|
-
pendingValidatedCheckpoint = null;
|
|
14030
|
-
const diff = line.checkpoint_diff;
|
|
14031
|
-
const newBuckets = new Map();
|
|
14032
|
-
for (const checksum of targetCheckpoint.buckets) {
|
|
14033
|
-
newBuckets.set(checksum.bucket, checksum);
|
|
14034
|
-
}
|
|
14035
|
-
for (const checksum of diff.updated_buckets) {
|
|
14036
|
-
newBuckets.set(checksum.bucket, checksum);
|
|
14037
|
-
}
|
|
14038
|
-
for (const bucket of diff.removed_buckets) {
|
|
14039
|
-
newBuckets.delete(bucket);
|
|
14040
|
-
}
|
|
14041
|
-
const newCheckpoint = {
|
|
14042
|
-
last_op_id: diff.last_op_id,
|
|
14043
|
-
buckets: [...newBuckets.values()],
|
|
14044
|
-
write_checkpoint: diff.write_checkpoint
|
|
14045
|
-
};
|
|
14046
|
-
targetCheckpoint = newCheckpoint;
|
|
14047
|
-
await this.updateSyncStatusForStartingCheckpoint(targetCheckpoint);
|
|
14048
|
-
bucketMap = new Map();
|
|
14049
|
-
newBuckets.forEach((checksum, name) => bucketMap.set(name, {
|
|
14050
|
-
name: checksum.bucket,
|
|
14051
|
-
priority: checksum.priority ?? FALLBACK_PRIORITY
|
|
14052
|
-
}));
|
|
14053
|
-
const bucketsToDelete = diff.removed_buckets;
|
|
14054
|
-
if (bucketsToDelete.length > 0) {
|
|
14055
|
-
this.logger.debug('Remove buckets', bucketsToDelete);
|
|
14056
|
-
}
|
|
14057
|
-
await this.options.adapter.removeBuckets(bucketsToDelete);
|
|
14058
|
-
await this.options.adapter.setTargetCheckpoint(targetCheckpoint);
|
|
14059
|
-
}
|
|
14060
|
-
else if (isStreamingSyncData(line)) {
|
|
14061
|
-
const { data } = line;
|
|
14062
|
-
const previousProgress = this.syncStatus.dataFlowStatus.downloadProgress;
|
|
14063
|
-
let updatedProgress = null;
|
|
14064
|
-
if (previousProgress) {
|
|
14065
|
-
updatedProgress = { ...previousProgress };
|
|
14066
|
-
const progressForBucket = updatedProgress[data.bucket];
|
|
14067
|
-
if (progressForBucket) {
|
|
14068
|
-
updatedProgress[data.bucket] = {
|
|
14069
|
-
...progressForBucket,
|
|
14070
|
-
since_last: progressForBucket.since_last + data.data.length
|
|
14071
|
-
};
|
|
14072
|
-
}
|
|
14073
|
-
}
|
|
14074
|
-
this.updateSyncStatus({
|
|
14075
|
-
dataFlow: {
|
|
14076
|
-
downloading: true,
|
|
14077
|
-
downloadProgress: updatedProgress
|
|
14078
|
-
}
|
|
14079
|
-
});
|
|
14080
|
-
await this.options.adapter.saveSyncData({ buckets: [SyncDataBucket.fromRow(data)] }, usingFixedKeyFormat);
|
|
14081
|
-
}
|
|
14082
|
-
else if (isStreamingKeepalive(line)) {
|
|
14083
|
-
const remaining_seconds = line.token_expires_in;
|
|
14084
|
-
if (remaining_seconds == 0) {
|
|
14085
|
-
// Connection would be closed automatically right after this
|
|
14086
|
-
this.logger.debug('Token expiring; reconnect');
|
|
14087
|
-
/**
|
|
14088
|
-
* For a rare case where the backend connector does not update the token
|
|
14089
|
-
* (uses the same one), this should have some delay.
|
|
14090
|
-
*/
|
|
14091
|
-
await this.delayRetry();
|
|
14092
|
-
return;
|
|
14093
|
-
}
|
|
14094
|
-
else if (remaining_seconds < 30) {
|
|
14095
|
-
this.logger.debug('Token will expire soon; reconnect');
|
|
14096
|
-
// Pre-emptively refresh the token
|
|
14097
|
-
this.options.remote.invalidateCredentials();
|
|
14098
|
-
return;
|
|
14099
|
-
}
|
|
14100
|
-
this.triggerCrudUpload();
|
|
14101
|
-
}
|
|
14102
|
-
else {
|
|
14103
|
-
this.logger.debug('Received unknown sync line', line);
|
|
14104
|
-
}
|
|
13680
|
+
});
|
|
14105
13681
|
}
|
|
14106
|
-
this.logger.debug('Stream input empty');
|
|
14107
|
-
// Connection closed. Likely due to auth issue.
|
|
14108
|
-
return;
|
|
14109
13682
|
}
|
|
14110
13683
|
async rustSyncIteration(signal, resolvedOptions) {
|
|
14111
13684
|
const syncImplementation = this;
|
|
@@ -14178,6 +13751,10 @@ The next upload iteration will be delayed.`);
|
|
|
14178
13751
|
const rawResponse = await adapter.control(op, payload ?? null);
|
|
14179
13752
|
const logger = syncImplementation.logger;
|
|
14180
13753
|
logger.trace('powersync_control', op, payload == null || typeof payload == 'string' ? payload : '<bytes>', rawResponse);
|
|
13754
|
+
if (op != PowerSyncControlCommand.STOP) {
|
|
13755
|
+
// Evidently we have a working connection here, otherwise powersync_control would have failed.
|
|
13756
|
+
syncImplementation.connectionMayHaveChanged = false;
|
|
13757
|
+
}
|
|
14181
13758
|
await handleInstructions(JSON.parse(rawResponse));
|
|
14182
13759
|
}
|
|
14183
13760
|
async function handleInstruction(instruction) {
|
|
@@ -14264,68 +13841,6 @@ The next upload iteration will be delayed.`);
|
|
|
14264
13841
|
}
|
|
14265
13842
|
return { immediateRestart: hideDisconnectOnRestart };
|
|
14266
13843
|
}
|
|
14267
|
-
async updateSyncStatusForStartingCheckpoint(checkpoint) {
|
|
14268
|
-
const localProgress = await this.options.adapter.getBucketOperationProgress();
|
|
14269
|
-
const progress = {};
|
|
14270
|
-
let invalidated = false;
|
|
14271
|
-
for (const bucket of checkpoint.buckets) {
|
|
14272
|
-
const savedProgress = localProgress[bucket.bucket];
|
|
14273
|
-
const atLast = savedProgress?.atLast ?? 0;
|
|
14274
|
-
const sinceLast = savedProgress?.sinceLast ?? 0;
|
|
14275
|
-
progress[bucket.bucket] = {
|
|
14276
|
-
// The fallback priority doesn't matter here, but 3 is the one newer versions of the sync service
|
|
14277
|
-
// will use by default.
|
|
14278
|
-
priority: bucket.priority ?? 3,
|
|
14279
|
-
at_last: atLast,
|
|
14280
|
-
since_last: sinceLast,
|
|
14281
|
-
target_count: bucket.count ?? 0
|
|
14282
|
-
};
|
|
14283
|
-
if (bucket.count != null && bucket.count < atLast + sinceLast) {
|
|
14284
|
-
// Either due to a defrag / sync rule deploy or a compaction operation, the size
|
|
14285
|
-
// of the bucket shrank so much that the local ops exceed the ops in the updated
|
|
14286
|
-
// bucket. We can't prossibly report progress in this case (it would overshoot 100%).
|
|
14287
|
-
invalidated = true;
|
|
14288
|
-
}
|
|
14289
|
-
}
|
|
14290
|
-
if (invalidated) {
|
|
14291
|
-
for (const bucket in progress) {
|
|
14292
|
-
const bucketProgress = progress[bucket];
|
|
14293
|
-
bucketProgress.at_last = 0;
|
|
14294
|
-
bucketProgress.since_last = 0;
|
|
14295
|
-
}
|
|
14296
|
-
}
|
|
14297
|
-
this.updateSyncStatus({
|
|
14298
|
-
dataFlow: {
|
|
14299
|
-
downloading: true,
|
|
14300
|
-
downloadProgress: progress
|
|
14301
|
-
}
|
|
14302
|
-
});
|
|
14303
|
-
}
|
|
14304
|
-
async applyCheckpoint(checkpoint) {
|
|
14305
|
-
let result = await this.options.adapter.syncLocalDatabase(checkpoint);
|
|
14306
|
-
if (!result.checkpointValid) {
|
|
14307
|
-
this.logger.debug(`Checksum mismatch in checkpoint ${checkpoint.last_op_id}, will reconnect`);
|
|
14308
|
-
// This means checksums failed. Start again with a new checkpoint.
|
|
14309
|
-
// TODO: better back-off
|
|
14310
|
-
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
14311
|
-
return { applied: false, endIteration: true };
|
|
14312
|
-
}
|
|
14313
|
-
else if (!result.ready) {
|
|
14314
|
-
this.logger.debug(`Could not apply checkpoint ${checkpoint.last_op_id} due to local data. We will retry applying the checkpoint after that upload is completed.`);
|
|
14315
|
-
return { applied: false, endIteration: false };
|
|
14316
|
-
}
|
|
14317
|
-
this.logger.debug(`Applied checkpoint ${checkpoint.last_op_id}`, checkpoint);
|
|
14318
|
-
this.updateSyncStatus({
|
|
14319
|
-
connected: true,
|
|
14320
|
-
lastSyncedAt: new Date(),
|
|
14321
|
-
dataFlow: {
|
|
14322
|
-
downloading: false,
|
|
14323
|
-
downloadProgress: null,
|
|
14324
|
-
downloadError: undefined
|
|
14325
|
-
}
|
|
14326
|
-
});
|
|
14327
|
-
return { applied: true, endIteration: false };
|
|
14328
|
-
}
|
|
14329
13844
|
updateSyncStatus(options) {
|
|
14330
13845
|
const updatedStatus = new SyncStatus({
|
|
14331
13846
|
connected: options.connected ?? this.syncStatus.connected,
|
|
@@ -15864,14 +15379,12 @@ class SqliteBucketStorage extends BaseObserver {
|
|
|
15864
15379
|
db;
|
|
15865
15380
|
logger;
|
|
15866
15381
|
tableNames;
|
|
15867
|
-
_hasCompletedSync;
|
|
15868
15382
|
updateListener;
|
|
15869
15383
|
_clientId;
|
|
15870
15384
|
constructor(db, logger = Logger.get('SqliteBucketStorage')) {
|
|
15871
15385
|
super();
|
|
15872
15386
|
this.db = db;
|
|
15873
15387
|
this.logger = logger;
|
|
15874
|
-
this._hasCompletedSync = false;
|
|
15875
15388
|
this.tableNames = new Set();
|
|
15876
15389
|
this.updateListener = db.registerListener({
|
|
15877
15390
|
tablesUpdated: (update) => {
|
|
@@ -15883,7 +15396,6 @@ class SqliteBucketStorage extends BaseObserver {
|
|
|
15883
15396
|
});
|
|
15884
15397
|
}
|
|
15885
15398
|
async init() {
|
|
15886
|
-
this._hasCompletedSync = false;
|
|
15887
15399
|
const existingTableRows = await this.db.getAll(`SELECT name FROM sqlite_master WHERE type='table' AND name GLOB 'ps_data_*'`);
|
|
15888
15400
|
for (const row of existingTableRows ?? []) {
|
|
15889
15401
|
this.tableNames.add(row.name);
|
|
@@ -15905,156 +15417,6 @@ class SqliteBucketStorage extends BaseObserver {
|
|
|
15905
15417
|
getMaxOpId() {
|
|
15906
15418
|
return MAX_OP_ID;
|
|
15907
15419
|
}
|
|
15908
|
-
/**
|
|
15909
|
-
* Reset any caches.
|
|
15910
|
-
*/
|
|
15911
|
-
startSession() { }
|
|
15912
|
-
async getBucketStates() {
|
|
15913
|
-
const result = await this.db.getAll("SELECT name as bucket, cast(last_op as TEXT) as op_id FROM ps_buckets WHERE pending_delete = 0 AND name != '$local'");
|
|
15914
|
-
return result;
|
|
15915
|
-
}
|
|
15916
|
-
async getBucketOperationProgress() {
|
|
15917
|
-
const rows = await this.db.getAll('SELECT name, count_at_last, count_since_last FROM ps_buckets');
|
|
15918
|
-
return Object.fromEntries(rows.map((r) => [r.name, { atLast: r.count_at_last, sinceLast: r.count_since_last }]));
|
|
15919
|
-
}
|
|
15920
|
-
async saveSyncData(batch, fixedKeyFormat = false) {
|
|
15921
|
-
await this.writeTransaction(async (tx) => {
|
|
15922
|
-
for (const b of batch.buckets) {
|
|
15923
|
-
await tx.execute('INSERT INTO powersync_operations(op, data) VALUES(?, ?)', [
|
|
15924
|
-
'save',
|
|
15925
|
-
JSON.stringify({ buckets: [b.toJSON(fixedKeyFormat)] })
|
|
15926
|
-
]);
|
|
15927
|
-
this.logger.debug(`Saved batch of data for bucket: ${b.bucket}, operations: ${b.data.length}`);
|
|
15928
|
-
}
|
|
15929
|
-
});
|
|
15930
|
-
}
|
|
15931
|
-
async removeBuckets(buckets) {
|
|
15932
|
-
for (const bucket of buckets) {
|
|
15933
|
-
await this.deleteBucket(bucket);
|
|
15934
|
-
}
|
|
15935
|
-
}
|
|
15936
|
-
/**
|
|
15937
|
-
* Mark a bucket for deletion.
|
|
15938
|
-
*/
|
|
15939
|
-
async deleteBucket(bucket) {
|
|
15940
|
-
await this.writeTransaction(async (tx) => {
|
|
15941
|
-
await tx.execute('INSERT INTO powersync_operations(op, data) VALUES(?, ?)', ['delete_bucket', bucket]);
|
|
15942
|
-
});
|
|
15943
|
-
this.logger.debug(`Done deleting bucket ${bucket}`);
|
|
15944
|
-
}
|
|
15945
|
-
async hasCompletedSync() {
|
|
15946
|
-
if (this._hasCompletedSync) {
|
|
15947
|
-
return true;
|
|
15948
|
-
}
|
|
15949
|
-
const r = await this.db.get(`SELECT powersync_last_synced_at() as synced_at`);
|
|
15950
|
-
const completed = r.synced_at != null;
|
|
15951
|
-
if (completed) {
|
|
15952
|
-
this._hasCompletedSync = true;
|
|
15953
|
-
}
|
|
15954
|
-
return completed;
|
|
15955
|
-
}
|
|
15956
|
-
async syncLocalDatabase(checkpoint, priority) {
|
|
15957
|
-
const r = await this.validateChecksums(checkpoint, priority);
|
|
15958
|
-
if (!r.checkpointValid) {
|
|
15959
|
-
this.logger.error('Checksums failed for', r.checkpointFailures);
|
|
15960
|
-
for (const b of r.checkpointFailures ?? []) {
|
|
15961
|
-
await this.deleteBucket(b);
|
|
15962
|
-
}
|
|
15963
|
-
return { ready: false, checkpointValid: false, checkpointFailures: r.checkpointFailures };
|
|
15964
|
-
}
|
|
15965
|
-
if (priority == null) {
|
|
15966
|
-
this.logger.debug(`Validated checksums checkpoint ${checkpoint.last_op_id}`);
|
|
15967
|
-
}
|
|
15968
|
-
else {
|
|
15969
|
-
this.logger.debug(`Validated checksums for partial checkpoint ${checkpoint.last_op_id}, priority ${priority}`);
|
|
15970
|
-
}
|
|
15971
|
-
let buckets = checkpoint.buckets;
|
|
15972
|
-
if (priority !== undefined) {
|
|
15973
|
-
buckets = buckets.filter((b) => hasMatchingPriority(priority, b));
|
|
15974
|
-
}
|
|
15975
|
-
const bucketNames = buckets.map((b) => b.bucket);
|
|
15976
|
-
await this.writeTransaction(async (tx) => {
|
|
15977
|
-
await tx.execute(`UPDATE ps_buckets SET last_op = ? WHERE name IN (SELECT json_each.value FROM json_each(?))`, [
|
|
15978
|
-
checkpoint.last_op_id,
|
|
15979
|
-
JSON.stringify(bucketNames)
|
|
15980
|
-
]);
|
|
15981
|
-
if (priority == null && checkpoint.write_checkpoint) {
|
|
15982
|
-
await tx.execute("UPDATE ps_buckets SET last_op = ? WHERE name = '$local'", [checkpoint.write_checkpoint]);
|
|
15983
|
-
}
|
|
15984
|
-
});
|
|
15985
|
-
const valid = await this.updateObjectsFromBuckets(checkpoint, priority);
|
|
15986
|
-
if (!valid) {
|
|
15987
|
-
return { ready: false, checkpointValid: true };
|
|
15988
|
-
}
|
|
15989
|
-
return {
|
|
15990
|
-
ready: true,
|
|
15991
|
-
checkpointValid: true
|
|
15992
|
-
};
|
|
15993
|
-
}
|
|
15994
|
-
/**
|
|
15995
|
-
* Atomically update the local state to the current checkpoint.
|
|
15996
|
-
*
|
|
15997
|
-
* This includes creating new tables, dropping old tables, and copying data over from the oplog.
|
|
15998
|
-
*/
|
|
15999
|
-
async updateObjectsFromBuckets(checkpoint, priority) {
|
|
16000
|
-
let arg = '';
|
|
16001
|
-
if (priority !== undefined) {
|
|
16002
|
-
const affectedBuckets = [];
|
|
16003
|
-
for (const desc of checkpoint.buckets) {
|
|
16004
|
-
if (hasMatchingPriority(priority, desc)) {
|
|
16005
|
-
affectedBuckets.push(desc.bucket);
|
|
16006
|
-
}
|
|
16007
|
-
}
|
|
16008
|
-
arg = JSON.stringify({ priority, buckets: affectedBuckets });
|
|
16009
|
-
}
|
|
16010
|
-
return this.writeTransaction(async (tx) => {
|
|
16011
|
-
const { insertId: result } = await tx.execute('INSERT INTO powersync_operations(op, data) VALUES(?, ?)', [
|
|
16012
|
-
'sync_local',
|
|
16013
|
-
arg
|
|
16014
|
-
]);
|
|
16015
|
-
if (result == 1) {
|
|
16016
|
-
if (priority == null) {
|
|
16017
|
-
const bucketToCount = Object.fromEntries(checkpoint.buckets.map((b) => [b.bucket, b.count]));
|
|
16018
|
-
// The two parameters could be replaced with one, but: https://github.com/powersync-ja/better-sqlite3/pull/6
|
|
16019
|
-
const jsonBucketCount = JSON.stringify(bucketToCount);
|
|
16020
|
-
await tx.execute("UPDATE ps_buckets SET count_since_last = 0, count_at_last = ?->name WHERE name != '$local' AND ?->name IS NOT NULL", [jsonBucketCount, jsonBucketCount]);
|
|
16021
|
-
}
|
|
16022
|
-
return true;
|
|
16023
|
-
}
|
|
16024
|
-
else {
|
|
16025
|
-
return false;
|
|
16026
|
-
}
|
|
16027
|
-
});
|
|
16028
|
-
}
|
|
16029
|
-
async validateChecksums(checkpoint, priority) {
|
|
16030
|
-
if (priority !== undefined) {
|
|
16031
|
-
// Only validate the buckets within the priority we care about
|
|
16032
|
-
const newBuckets = checkpoint.buckets.filter((cs) => hasMatchingPriority(priority, cs));
|
|
16033
|
-
checkpoint = { ...checkpoint, buckets: newBuckets };
|
|
16034
|
-
}
|
|
16035
|
-
const rs = await this.db.execute('SELECT powersync_validate_checkpoint(?) as result', [
|
|
16036
|
-
JSON.stringify({ ...checkpoint })
|
|
16037
|
-
]);
|
|
16038
|
-
const resultItem = rs.rows?.item(0);
|
|
16039
|
-
if (!resultItem) {
|
|
16040
|
-
return {
|
|
16041
|
-
checkpointValid: false,
|
|
16042
|
-
ready: false,
|
|
16043
|
-
checkpointFailures: []
|
|
16044
|
-
};
|
|
16045
|
-
}
|
|
16046
|
-
const result = JSON.parse(resultItem['result']);
|
|
16047
|
-
if (result['valid']) {
|
|
16048
|
-
return { ready: true, checkpointValid: true };
|
|
16049
|
-
}
|
|
16050
|
-
else {
|
|
16051
|
-
return {
|
|
16052
|
-
checkpointValid: false,
|
|
16053
|
-
ready: false,
|
|
16054
|
-
checkpointFailures: result['failed_buckets']
|
|
16055
|
-
};
|
|
16056
|
-
}
|
|
16057
|
-
}
|
|
16058
15420
|
async updateLocalTarget(cb) {
|
|
16059
15421
|
const rs1 = await this.db.getAll("SELECT target_op FROM ps_buckets WHERE name = '$local' AND target_op = CAST(? as INTEGER)", [MAX_OP_ID]);
|
|
16060
15422
|
if (!rs1.length) {
|
|
@@ -16145,12 +15507,6 @@ class SqliteBucketStorage extends BaseObserver {
|
|
|
16145
15507
|
async writeTransaction(callback, options) {
|
|
16146
15508
|
return this.db.writeTransaction(callback, options);
|
|
16147
15509
|
}
|
|
16148
|
-
/**
|
|
16149
|
-
* Set a target checkpoint.
|
|
16150
|
-
*/
|
|
16151
|
-
async setTargetCheckpoint(checkpoint) {
|
|
16152
|
-
// No-op for now
|
|
16153
|
-
}
|
|
16154
15510
|
async control(op, payload) {
|
|
16155
15511
|
return await this.writeTransaction(async (tx) => {
|
|
16156
15512
|
const [[raw]] = await tx.executeRaw('SELECT powersync_control(?, ?)', [op, payload]);
|
|
@@ -16174,20 +15530,6 @@ class SqliteBucketStorage extends BaseObserver {
|
|
|
16174
15530
|
}
|
|
16175
15531
|
static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
|
|
16176
15532
|
}
|
|
16177
|
-
function hasMatchingPriority(priority, bucket) {
|
|
16178
|
-
return bucket.priority != null && bucket.priority <= priority;
|
|
16179
|
-
}
|
|
16180
|
-
|
|
16181
|
-
// TODO JSON
|
|
16182
|
-
class SyncDataBatch {
|
|
16183
|
-
buckets;
|
|
16184
|
-
static fromJSON(json) {
|
|
16185
|
-
return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
|
|
16186
|
-
}
|
|
16187
|
-
constructor(buckets) {
|
|
16188
|
-
this.buckets = buckets;
|
|
16189
|
-
}
|
|
16190
|
-
}
|
|
16191
15533
|
|
|
16192
15534
|
/**
|
|
16193
15535
|
* Thrown when an underlying database connection is closed.
|
|
@@ -16247,10 +15589,8 @@ class Schema {
|
|
|
16247
15589
|
* developer instead of automatically by PowerSync.
|
|
16248
15590
|
* Since raw tables are not backed by JSON, running complex queries on them may be more efficient. Further, they allow
|
|
16249
15591
|
* using client-side table and column constraints.
|
|
16250
|
-
* Note that raw tables are only supported when using the new `SyncClientImplementation.rust` sync client.
|
|
16251
15592
|
*
|
|
16252
15593
|
* @param tables An object of (table name, raw table definition) entries.
|
|
16253
|
-
* @experimental Note that the raw tables API is still experimental and may change in the future.
|
|
16254
15594
|
*/
|
|
16255
15595
|
withRawTables(tables) {
|
|
16256
15596
|
for (const [name, rawTableDefinition] of Object.entries(tables)) {
|
|
@@ -16485,9 +15825,6 @@ const parseQuery = (query, parameters) => {
|
|
|
16485
15825
|
/******/ return module.exports;
|
|
16486
15826
|
/******/ }
|
|
16487
15827
|
/******/
|
|
16488
|
-
/******/ // expose the modules object (__webpack_modules__)
|
|
16489
|
-
/******/ __webpack_require__.m = __webpack_modules__;
|
|
16490
|
-
/******/
|
|
16491
15828
|
/************************************************************************/
|
|
16492
15829
|
/******/ /* webpack/runtime/define property getters */
|
|
16493
15830
|
/******/ (() => {
|
|
@@ -16501,40 +15838,6 @@ const parseQuery = (query, parameters) => {
|
|
|
16501
15838
|
/******/ };
|
|
16502
15839
|
/******/ })();
|
|
16503
15840
|
/******/
|
|
16504
|
-
/******/ /* webpack/runtime/ensure chunk */
|
|
16505
|
-
/******/ (() => {
|
|
16506
|
-
/******/ __webpack_require__.f = {};
|
|
16507
|
-
/******/ // This file contains only the entry chunk.
|
|
16508
|
-
/******/ // The chunk loading function for additional chunks
|
|
16509
|
-
/******/ __webpack_require__.e = (chunkId) => {
|
|
16510
|
-
/******/ return Promise.all(Object.keys(__webpack_require__.f).reduce((promises, key) => {
|
|
16511
|
-
/******/ __webpack_require__.f[key](chunkId, promises);
|
|
16512
|
-
/******/ return promises;
|
|
16513
|
-
/******/ }, []));
|
|
16514
|
-
/******/ };
|
|
16515
|
-
/******/ })();
|
|
16516
|
-
/******/
|
|
16517
|
-
/******/ /* webpack/runtime/get javascript chunk filename */
|
|
16518
|
-
/******/ (() => {
|
|
16519
|
-
/******/ // This function allow to reference async chunks
|
|
16520
|
-
/******/ __webpack_require__.u = (chunkId) => {
|
|
16521
|
-
/******/ // return url for filenames based on template
|
|
16522
|
-
/******/ return "worker/" + chunkId + ".umd.js";
|
|
16523
|
-
/******/ };
|
|
16524
|
-
/******/ })();
|
|
16525
|
-
/******/
|
|
16526
|
-
/******/ /* webpack/runtime/global */
|
|
16527
|
-
/******/ (() => {
|
|
16528
|
-
/******/ __webpack_require__.g = (function() {
|
|
16529
|
-
/******/ if (typeof globalThis === 'object') return globalThis;
|
|
16530
|
-
/******/ try {
|
|
16531
|
-
/******/ return this || new Function('return this')();
|
|
16532
|
-
/******/ } catch (e) {
|
|
16533
|
-
/******/ if (typeof window === 'object') return window;
|
|
16534
|
-
/******/ }
|
|
16535
|
-
/******/ })();
|
|
16536
|
-
/******/ })();
|
|
16537
|
-
/******/
|
|
16538
15841
|
/******/ /* webpack/runtime/hasOwnProperty shorthand */
|
|
16539
15842
|
/******/ (() => {
|
|
16540
15843
|
/******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))
|
|
@@ -16551,70 +15854,6 @@ const parseQuery = (query, parameters) => {
|
|
|
16551
15854
|
/******/ };
|
|
16552
15855
|
/******/ })();
|
|
16553
15856
|
/******/
|
|
16554
|
-
/******/ /* webpack/runtime/publicPath */
|
|
16555
|
-
/******/ (() => {
|
|
16556
|
-
/******/ var scriptUrl;
|
|
16557
|
-
/******/ if (__webpack_require__.g.importScripts) scriptUrl = __webpack_require__.g.location + "";
|
|
16558
|
-
/******/ var document = __webpack_require__.g.document;
|
|
16559
|
-
/******/ if (!scriptUrl && document) {
|
|
16560
|
-
/******/ if (document.currentScript && document.currentScript.tagName.toUpperCase() === 'SCRIPT')
|
|
16561
|
-
/******/ scriptUrl = document.currentScript.src;
|
|
16562
|
-
/******/ if (!scriptUrl) {
|
|
16563
|
-
/******/ var scripts = document.getElementsByTagName("script");
|
|
16564
|
-
/******/ if(scripts.length) {
|
|
16565
|
-
/******/ var i = scripts.length - 1;
|
|
16566
|
-
/******/ while (i > -1 && (!scriptUrl || !/^http(s?):/.test(scriptUrl))) scriptUrl = scripts[i--].src;
|
|
16567
|
-
/******/ }
|
|
16568
|
-
/******/ }
|
|
16569
|
-
/******/ }
|
|
16570
|
-
/******/ // When supporting browsers where an automatic publicPath is not supported you must specify an output.publicPath manually via configuration
|
|
16571
|
-
/******/ // or pass an empty string ("") and set the __webpack_public_path__ variable from your code to use your own logic.
|
|
16572
|
-
/******/ if (!scriptUrl) throw new Error("Automatic publicPath is not supported in this browser");
|
|
16573
|
-
/******/ scriptUrl = scriptUrl.replace(/^blob:/, "").replace(/#.*$/, "").replace(/\?.*$/, "").replace(/\/[^\/]+$/, "/");
|
|
16574
|
-
/******/ __webpack_require__.p = scriptUrl + "../";
|
|
16575
|
-
/******/ })();
|
|
16576
|
-
/******/
|
|
16577
|
-
/******/ /* webpack/runtime/importScripts chunk loading */
|
|
16578
|
-
/******/ (() => {
|
|
16579
|
-
/******/ // no baseURI
|
|
16580
|
-
/******/
|
|
16581
|
-
/******/ // object to store loaded chunks
|
|
16582
|
-
/******/ // "1" means "already loaded"
|
|
16583
|
-
/******/ var installedChunks = {
|
|
16584
|
-
/******/ "SharedSyncImplementation": 1
|
|
16585
|
-
/******/ };
|
|
16586
|
-
/******/
|
|
16587
|
-
/******/ // importScripts chunk loading
|
|
16588
|
-
/******/ var installChunk = (data) => {
|
|
16589
|
-
/******/ var [chunkIds, moreModules, runtime] = data;
|
|
16590
|
-
/******/ for(var moduleId in moreModules) {
|
|
16591
|
-
/******/ if(__webpack_require__.o(moreModules, moduleId)) {
|
|
16592
|
-
/******/ __webpack_require__.m[moduleId] = moreModules[moduleId];
|
|
16593
|
-
/******/ }
|
|
16594
|
-
/******/ }
|
|
16595
|
-
/******/ if(runtime) runtime(__webpack_require__);
|
|
16596
|
-
/******/ while(chunkIds.length)
|
|
16597
|
-
/******/ installedChunks[chunkIds.pop()] = 1;
|
|
16598
|
-
/******/ parentChunkLoadingFunction(data);
|
|
16599
|
-
/******/ };
|
|
16600
|
-
/******/ __webpack_require__.f.i = (chunkId, promises) => {
|
|
16601
|
-
/******/ // "1" is the signal for "already loaded"
|
|
16602
|
-
/******/ if(!installedChunks[chunkId]) {
|
|
16603
|
-
/******/ if(true) { // all chunks have JS
|
|
16604
|
-
/******/ importScripts(__webpack_require__.p + __webpack_require__.u(chunkId));
|
|
16605
|
-
/******/ }
|
|
16606
|
-
/******/ }
|
|
16607
|
-
/******/ };
|
|
16608
|
-
/******/
|
|
16609
|
-
/******/ var chunkLoadingGlobal = self["webpackChunksdk_web"] = self["webpackChunksdk_web"] || [];
|
|
16610
|
-
/******/ var parentChunkLoadingFunction = chunkLoadingGlobal.push.bind(chunkLoadingGlobal);
|
|
16611
|
-
/******/ chunkLoadingGlobal.push = installChunk;
|
|
16612
|
-
/******/
|
|
16613
|
-
/******/ // no HMR
|
|
16614
|
-
/******/
|
|
16615
|
-
/******/ // no HMR manifest
|
|
16616
|
-
/******/ })();
|
|
16617
|
-
/******/
|
|
16618
15857
|
/************************************************************************/
|
|
16619
15858
|
var __webpack_exports__ = {};
|
|
16620
15859
|
// This entry needs to be wrapped in an IIFE because it needs to be isolated against other modules in the chunk.
|