@powersync/common 0.0.0-dev-20260503073249 → 0.0.0-dev-20260504100448

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/bundle.cjs +14 -659
  2. package/dist/bundle.cjs.map +1 -1
  3. package/dist/bundle.mjs +15 -648
  4. package/dist/bundle.mjs.map +1 -1
  5. package/dist/bundle.node.cjs +14 -659
  6. package/dist/bundle.node.cjs.map +1 -1
  7. package/dist/bundle.node.mjs +15 -648
  8. package/dist/bundle.node.mjs.map +1 -1
  9. package/dist/index.d.cts +19 -358
  10. package/legacy/sync_protocol.d.ts +103 -0
  11. package/lib/client/sync/bucket/BucketStorageAdapter.d.ts +1 -63
  12. package/lib/client/sync/bucket/BucketStorageAdapter.js.map +1 -1
  13. package/lib/client/sync/bucket/SqliteBucketStorage.d.ts +1 -28
  14. package/lib/client/sync/bucket/SqliteBucketStorage.js +0 -162
  15. package/lib/client/sync/bucket/SqliteBucketStorage.js.map +1 -1
  16. package/lib/client/sync/stream/AbstractRemote.d.ts +2 -12
  17. package/lib/client/sync/stream/AbstractRemote.js +3 -13
  18. package/lib/client/sync/stream/AbstractRemote.js.map +1 -1
  19. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.d.ts +9 -35
  20. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js +11 -338
  21. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js.map +1 -1
  22. package/lib/client/sync/stream/JsonValue.d.ts +7 -0
  23. package/lib/client/sync/stream/JsonValue.js +2 -0
  24. package/lib/client/sync/stream/JsonValue.js.map +1 -0
  25. package/lib/client/sync/stream/core-instruction.d.ts +1 -2
  26. package/lib/client/sync/stream/core-instruction.js.map +1 -1
  27. package/lib/index.d.ts +1 -5
  28. package/lib/index.js +1 -5
  29. package/lib/index.js.map +1 -1
  30. package/package.json +7 -4
  31. package/src/client/sync/bucket/BucketStorageAdapter.ts +1 -70
  32. package/src/client/sync/bucket/SqliteBucketStorage.ts +1 -197
  33. package/src/client/sync/stream/AbstractRemote.ts +5 -27
  34. package/src/client/sync/stream/AbstractStreamingSyncImplementation.ts +22 -402
  35. package/src/client/sync/stream/JsonValue.ts +8 -0
  36. package/src/client/sync/stream/core-instruction.ts +1 -2
  37. package/src/index.ts +1 -5
  38. package/lib/client/sync/bucket/OpType.d.ts +0 -16
  39. package/lib/client/sync/bucket/OpType.js +0 -23
  40. package/lib/client/sync/bucket/OpType.js.map +0 -1
  41. package/lib/client/sync/bucket/OplogEntry.d.ts +0 -23
  42. package/lib/client/sync/bucket/OplogEntry.js +0 -36
  43. package/lib/client/sync/bucket/OplogEntry.js.map +0 -1
  44. package/lib/client/sync/bucket/SyncDataBatch.d.ts +0 -6
  45. package/lib/client/sync/bucket/SyncDataBatch.js +0 -12
  46. package/lib/client/sync/bucket/SyncDataBatch.js.map +0 -1
  47. package/lib/client/sync/bucket/SyncDataBucket.d.ts +0 -40
  48. package/lib/client/sync/bucket/SyncDataBucket.js +0 -40
  49. package/lib/client/sync/bucket/SyncDataBucket.js.map +0 -1
  50. package/lib/client/sync/stream/streaming-sync-types.d.ts +0 -143
  51. package/lib/client/sync/stream/streaming-sync-types.js +0 -26
  52. package/lib/client/sync/stream/streaming-sync-types.js.map +0 -1
  53. package/src/client/sync/bucket/OpType.ts +0 -23
  54. package/src/client/sync/bucket/OplogEntry.ts +0 -50
  55. package/src/client/sync/bucket/SyncDataBatch.ts +0 -11
  56. package/src/client/sync/bucket/SyncDataBucket.ts +0 -49
  57. package/src/client/sync/stream/streaming-sync-types.ts +0 -210
package/dist/bundle.mjs CHANGED
@@ -3487,103 +3487,6 @@ class AbortOperation extends Error {
3487
3487
  }
3488
3488
  }
3489
3489
 
3490
- var OpTypeEnum;
3491
- (function (OpTypeEnum) {
3492
- OpTypeEnum[OpTypeEnum["CLEAR"] = 1] = "CLEAR";
3493
- OpTypeEnum[OpTypeEnum["MOVE"] = 2] = "MOVE";
3494
- OpTypeEnum[OpTypeEnum["PUT"] = 3] = "PUT";
3495
- OpTypeEnum[OpTypeEnum["REMOVE"] = 4] = "REMOVE";
3496
- })(OpTypeEnum || (OpTypeEnum = {}));
3497
- /**
3498
- * Used internally for sync buckets.
3499
- */
3500
- class OpType {
3501
- value;
3502
- static fromJSON(jsonValue) {
3503
- return new OpType(OpTypeEnum[jsonValue]);
3504
- }
3505
- constructor(value) {
3506
- this.value = value;
3507
- }
3508
- toJSON() {
3509
- return Object.entries(OpTypeEnum).find(([, value]) => value === this.value)[0];
3510
- }
3511
- }
3512
-
3513
- class OplogEntry {
3514
- op_id;
3515
- op;
3516
- checksum;
3517
- subkey;
3518
- object_type;
3519
- object_id;
3520
- data;
3521
- static fromRow(row) {
3522
- return new OplogEntry(row.op_id, OpType.fromJSON(row.op), row.checksum, row.subkey, row.object_type, row.object_id, row.data);
3523
- }
3524
- constructor(op_id, op, checksum, subkey, object_type, object_id, data) {
3525
- this.op_id = op_id;
3526
- this.op = op;
3527
- this.checksum = checksum;
3528
- this.subkey = subkey;
3529
- this.object_type = object_type;
3530
- this.object_id = object_id;
3531
- this.data = data;
3532
- }
3533
- toJSON(fixedKeyEncoding = false) {
3534
- return {
3535
- op_id: this.op_id,
3536
- op: this.op.toJSON(),
3537
- object_type: this.object_type,
3538
- object_id: this.object_id,
3539
- checksum: this.checksum,
3540
- data: this.data,
3541
- // Older versions of the JS SDK used to always JSON.stringify here. That has always been wrong,
3542
- // but we need to migrate gradually to not break existing databases.
3543
- subkey: fixedKeyEncoding ? this.subkey : JSON.stringify(this.subkey)
3544
- };
3545
- }
3546
- }
3547
-
3548
- class SyncDataBucket {
3549
- bucket;
3550
- data;
3551
- has_more;
3552
- after;
3553
- next_after;
3554
- static fromRow(row) {
3555
- return new SyncDataBucket(row.bucket, row.data.map((entry) => OplogEntry.fromRow(entry)), row.has_more ?? false, row.after, row.next_after);
3556
- }
3557
- constructor(bucket, data,
3558
- /**
3559
- * True if the response does not contain all the data for this bucket, and another request must be made.
3560
- */
3561
- has_more,
3562
- /**
3563
- * The `after` specified in the request.
3564
- */
3565
- after,
3566
- /**
3567
- * Use this for the next request.
3568
- */
3569
- next_after) {
3570
- this.bucket = bucket;
3571
- this.data = data;
3572
- this.has_more = has_more;
3573
- this.after = after;
3574
- this.next_after = next_after;
3575
- }
3576
- toJSON(fixedKeyEncoding = false) {
3577
- return {
3578
- bucket: this.bucket,
3579
- has_more: this.has_more,
3580
- after: this.after,
3581
- next_after: this.next_after,
3582
- data: this.data.map((entry) => entry.toJSON(fixedKeyEncoding))
3583
- };
3584
- }
3585
- }
3586
-
3587
3490
  var buffer = {};
3588
3491
 
3589
3492
  var base64Js = {};
@@ -11296,22 +11199,12 @@ class AbstractRemote {
11296
11199
  * Returns a data stream of sync line data, fetched via RSocket-over-WebSocket.
11297
11200
  *
11298
11201
  * The only mechanism to abort the returned stream is to use the abort signal in {@link SocketSyncStreamOptions}.
11299
- *
11300
- * @param bson A BSON encoder and decoder. When set, the data stream will be requested with a BSON payload
11301
- * (required for compatibility with older sync services).
11302
11202
  */
11303
- async socketStreamRaw(options, bson) {
11203
+ async socketStreamRaw(options) {
11304
11204
  const { path, fetchStrategy = FetchStrategy.Buffered } = options;
11305
- const mimeType = bson == null ? 'application/json' : 'application/bson';
11205
+ const mimeType = 'application/json';
11306
11206
  function toBuffer(js) {
11307
- let contents;
11308
- if (bson != null) {
11309
- contents = bson.serialize(js);
11310
- }
11311
- else {
11312
- contents = JSON.stringify(js);
11313
- }
11314
- return bufferExports.Buffer.from(contents);
11207
+ return bufferExports.Buffer.from(JSON.stringify(js));
11315
11208
  }
11316
11209
  const syncQueueRequestSize = fetchStrategy == FetchStrategy.Buffered ? 10 : 1;
11317
11210
  const request = await this.buildRequest(path);
@@ -11613,32 +11506,6 @@ function coreStatusToJs(status) {
11613
11506
  };
11614
11507
  }
11615
11508
 
11616
- function isStreamingSyncData(line) {
11617
- return line.data != null;
11618
- }
11619
- function isStreamingKeepalive(line) {
11620
- return line.token_expires_in != null;
11621
- }
11622
- function isStreamingSyncCheckpoint(line) {
11623
- return line.checkpoint != null;
11624
- }
11625
- function isStreamingSyncCheckpointComplete(line) {
11626
- return line.checkpoint_complete != null;
11627
- }
11628
- function isStreamingSyncCheckpointPartiallyComplete(line) {
11629
- return line.partial_checkpoint_complete != null;
11630
- }
11631
- function isStreamingSyncCheckpointDiff(line) {
11632
- return line.checkpoint_diff != null;
11633
- }
11634
- function isContinueCheckpointRequest(request) {
11635
- return (Array.isArray(request.buckets) &&
11636
- typeof request.checkpoint_token == 'string');
11637
- }
11638
- function isSyncNewCheckpointRequest(request) {
11639
- return typeof request.request_checkpoint == 'object';
11640
- }
11641
-
11642
11509
  var LockType;
11643
11510
  (function (LockType) {
11644
11511
  LockType["CRUD"] = "crud";
@@ -11651,35 +11518,21 @@ var SyncStreamConnectionMethod;
11651
11518
  })(SyncStreamConnectionMethod || (SyncStreamConnectionMethod = {}));
11652
11519
  var SyncClientImplementation;
11653
11520
  (function (SyncClientImplementation) {
11654
- /**
11655
- * Decodes and handles sync lines received from the sync service in JavaScript.
11656
- *
11657
- * This is the default option.
11658
- *
11659
- * @deprecated We recommend the {@link RUST} client implementation for all apps. If you have issues with
11660
- * the Rust client, please file an issue or reach out to us. The JavaScript client will be removed in a future
11661
- * version of the PowerSync SDK.
11662
- */
11663
- SyncClientImplementation["JAVASCRIPT"] = "js";
11664
11521
  /**
11665
11522
  * This implementation offloads the sync line decoding and handling into the PowerSync
11666
11523
  * core extension.
11667
11524
  *
11668
- * This option is more performant than the {@link JAVASCRIPT} client, enabled by default and the
11669
- * recommended client implementation for all apps.
11525
+ * This is the only option, as an older JavaScript client implementation has been removed from the SDK.
11670
11526
  *
11671
11527
  * ## Compatibility warning
11672
11528
  *
11673
11529
  * The Rust sync client stores sync data in a format that is slightly different than the one used
11674
- * by the old {@link JAVASCRIPT} implementation. When adopting the {@link RUST} client on existing
11675
- * databases, the PowerSync SDK will migrate the format automatically.
11676
- * Further, the {@link JAVASCRIPT} client in recent versions of the PowerSync JS SDK (starting from
11677
- * the version introducing {@link RUST} as an option) also supports the new format, so you can switch
11678
- * back to {@link JAVASCRIPT} later.
11530
+ * by the old JavaScript client. When adopting the {@link RUST} client on existing databases, the PowerSync SDK will
11531
+ * migrate the format automatically.
11679
11532
  *
11680
- * __However__: Upgrading the SDK version, then adopting {@link RUST} as a sync client and later
11681
- * downgrading the SDK to an older version (necessarily using the JavaScript-based implementation then)
11682
- * can lead to sync issues.
11533
+ * SDK versions supporting both the JavaScript and the Rust client support both formats with the JavaScript client
11534
+ * implementaiton. However, downgrading to an SDK version that only supports the JavaScript client would not be
11535
+ * possible anymore. Problematic SDK versions have been released before 2025-06-09.
11683
11536
  */
11684
11537
  SyncClientImplementation["RUST"] = "rust";
11685
11538
  })(SyncClientImplementation || (SyncClientImplementation = {}));
@@ -11702,13 +11555,7 @@ const DEFAULT_STREAM_CONNECTION_OPTIONS = {
11702
11555
  serializedSchema: undefined,
11703
11556
  includeDefaultStreams: true
11704
11557
  };
11705
- // The priority we assume when we receive checkpoint lines where no priority is set.
11706
- // This is the default priority used by the sync service, but can be set to an arbitrary
11707
- // value since sync services without priorities also won't send partial sync completion
11708
- // messages.
11709
- const FALLBACK_PRIORITY = 3;
11710
11558
  class AbstractStreamingSyncImplementation extends BaseObserver {
11711
- _lastSyncedAt;
11712
11559
  options;
11713
11560
  abortController;
11714
11561
  // In rare cases, mostly for tests, uploads can be triggered without being properly connected.
@@ -11798,9 +11645,6 @@ class AbstractStreamingSyncImplementation extends BaseObserver {
11798
11645
  this.crudUpdateListener = undefined;
11799
11646
  this.uploadAbortController?.abort();
11800
11647
  }
11801
- async hasCompletedSync() {
11802
- return this.options.adapter.hasCompletedSync();
11803
- }
11804
11648
  async getWriteCheckpoint() {
11805
11649
  const clientId = await this.options.adapter.getClientId();
11806
11650
  let path = `/write-checkpoint2.json?client_id=${clientId}`;
@@ -11882,7 +11726,7 @@ The next upload iteration will be delayed.`);
11882
11726
  });
11883
11727
  }
11884
11728
  }
11885
- this.uploadAbortController = null;
11729
+ this.uploadAbortController = undefined;
11886
11730
  }
11887
11731
  });
11888
11732
  }
@@ -12042,18 +11886,6 @@ The next upload iteration will be delayed.`);
12042
11886
  // iteration is active. That allows us to reconnect ASAP, instead of having to wait for the next sync line.
12043
11887
  this.handleActiveStreamsChange?.();
12044
11888
  }
12045
- async collectLocalBucketState() {
12046
- const bucketEntries = await this.options.adapter.getBucketStates();
12047
- const req = bucketEntries.map((entry) => ({
12048
- name: entry.bucket,
12049
- after: entry.op_id
12050
- }));
12051
- const localDescriptions = new Map();
12052
- for (const entry of bucketEntries) {
12053
- localDescriptions.set(entry.bucket, null);
12054
- }
12055
- return [req, localDescriptions];
12056
- }
12057
11889
  /**
12058
11890
  * Older versions of the JS SDK used to encode subkeys as JSON in {@link OplogEntry.toJSON}.
12059
11891
  * Because subkeys are always strings, this leads to quotes being added around them in `ps_oplog`.
@@ -12095,19 +11927,13 @@ The next upload iteration will be delayed.`);
12095
11927
  }
12096
11928
  const clientImplementation = resolvedOptions.clientImplementation;
12097
11929
  this.updateSyncStatus({ clientImplementation });
12098
- if (clientImplementation == SyncClientImplementation.JAVASCRIPT) {
12099
- await this.legacyStreamingSyncIteration(signal, resolvedOptions);
12100
- return null;
12101
- }
12102
- else {
12103
- await this.requireKeyFormat(true);
12104
- return await this.rustSyncIteration(signal, resolvedOptions);
12105
- }
11930
+ await this.requireKeyFormat(true);
11931
+ return await this.rustSyncIteration(signal, resolvedOptions);
12106
11932
  }
12107
11933
  });
12108
11934
  }
12109
11935
  async receiveSyncLines(data) {
12110
- const { options, connection, bson } = data;
11936
+ const { options, connection } = data;
12111
11937
  const remote = this.options.remote;
12112
11938
  if (connection.connectionMethod == SyncStreamConnectionMethod.HTTP) {
12113
11939
  return await remote.fetchStream(options);
@@ -12116,232 +11942,8 @@ The next upload iteration will be delayed.`);
12116
11942
  return await this.options.remote.socketStreamRaw({
12117
11943
  ...options,
12118
11944
  ...{ fetchStrategy: connection.fetchStrategy }
12119
- }, bson);
12120
- }
12121
- }
12122
- async legacyStreamingSyncIteration(signal, resolvedOptions) {
12123
- const rawTables = resolvedOptions.serializedSchema?.raw_tables;
12124
- if (rawTables != null && rawTables.length) {
12125
- this.logger.warn('Raw tables require the Rust-based sync client. The JS client will ignore them.');
12126
- }
12127
- if (this.activeStreams.length) {
12128
- this.logger.error('Sync streams require `clientImplementation: SyncClientImplementation.RUST` when connecting.');
12129
- }
12130
- this.logger.debug('Streaming sync iteration started');
12131
- this.options.adapter.startSession();
12132
- let [req, bucketMap] = await this.collectLocalBucketState();
12133
- let targetCheckpoint = null;
12134
- // A checkpoint that has been validated but not applied (e.g. due to pending local writes)
12135
- let pendingValidatedCheckpoint = null;
12136
- const clientId = await this.options.adapter.getClientId();
12137
- const usingFixedKeyFormat = await this.requireKeyFormat(false);
12138
- this.logger.debug('Requesting stream from server');
12139
- const syncOptions = {
12140
- path: '/sync/stream',
12141
- abortSignal: signal,
12142
- data: {
12143
- buckets: req,
12144
- include_checksum: true,
12145
- raw_data: true,
12146
- parameters: resolvedOptions.params,
12147
- app_metadata: resolvedOptions.appMetadata,
12148
- client_id: clientId
12149
- }
12150
- };
12151
- const bson = await this.options.remote.getBSON();
12152
- const source = await this.receiveSyncLines({
12153
- options: syncOptions,
12154
- connection: resolvedOptions,
12155
- bson
12156
- });
12157
- const stream = injectable(map(source, (line) => {
12158
- if (typeof line == 'string') {
12159
- return JSON.parse(line);
12160
- }
12161
- else {
12162
- return bson.deserialize(line);
12163
- }
12164
- }));
12165
- this.logger.debug('Stream established. Processing events');
12166
- this.notifyCompletedUploads = () => {
12167
- stream.inject({ crud_upload_completed: null });
12168
- };
12169
- while (true) {
12170
- const { value: line, done } = await stream.next();
12171
- if (done) {
12172
- // The stream has closed while waiting
12173
- return;
12174
- }
12175
- if ('crud_upload_completed' in line) {
12176
- if (pendingValidatedCheckpoint != null) {
12177
- const { applied, endIteration } = await this.applyCheckpoint(pendingValidatedCheckpoint);
12178
- if (applied) {
12179
- pendingValidatedCheckpoint = null;
12180
- }
12181
- else if (endIteration) {
12182
- break;
12183
- }
12184
- }
12185
- continue;
12186
- }
12187
- // A connection is active and messages are being received
12188
- if (!this.syncStatus.connected) {
12189
- // There is a connection now
12190
- Promise.resolve().then(() => this.triggerCrudUpload());
12191
- this.updateSyncStatus({
12192
- connected: true
12193
- });
12194
- }
12195
- if (isStreamingSyncCheckpoint(line)) {
12196
- targetCheckpoint = line.checkpoint;
12197
- // New checkpoint - existing validated checkpoint is no longer valid
12198
- pendingValidatedCheckpoint = null;
12199
- const bucketsToDelete = new Set(bucketMap.keys());
12200
- const newBuckets = new Map();
12201
- for (const checksum of line.checkpoint.buckets) {
12202
- newBuckets.set(checksum.bucket, {
12203
- name: checksum.bucket,
12204
- priority: checksum.priority ?? FALLBACK_PRIORITY
12205
- });
12206
- bucketsToDelete.delete(checksum.bucket);
12207
- }
12208
- if (bucketsToDelete.size > 0) {
12209
- this.logger.debug('Removing buckets', [...bucketsToDelete]);
12210
- }
12211
- bucketMap = newBuckets;
12212
- await this.options.adapter.removeBuckets([...bucketsToDelete]);
12213
- await this.options.adapter.setTargetCheckpoint(targetCheckpoint);
12214
- await this.updateSyncStatusForStartingCheckpoint(targetCheckpoint);
12215
- }
12216
- else if (isStreamingSyncCheckpointComplete(line)) {
12217
- const result = await this.applyCheckpoint(targetCheckpoint);
12218
- if (result.endIteration) {
12219
- return;
12220
- }
12221
- else if (!result.applied) {
12222
- // "Could not apply checkpoint due to local data". We need to retry after
12223
- // finishing uploads.
12224
- pendingValidatedCheckpoint = targetCheckpoint;
12225
- }
12226
- else {
12227
- // Nothing to retry later. This would likely already be null from the last
12228
- // checksum or checksum_diff operation, but we make sure.
12229
- pendingValidatedCheckpoint = null;
12230
- }
12231
- }
12232
- else if (isStreamingSyncCheckpointPartiallyComplete(line)) {
12233
- const priority = line.partial_checkpoint_complete.priority;
12234
- this.logger.debug('Partial checkpoint complete', priority);
12235
- const result = await this.options.adapter.syncLocalDatabase(targetCheckpoint, priority);
12236
- if (!result.checkpointValid) {
12237
- // This means checksums failed. Start again with a new checkpoint.
12238
- // TODO: better back-off
12239
- await new Promise((resolve) => setTimeout(resolve, 50));
12240
- return;
12241
- }
12242
- else if (!result.ready) ;
12243
- else {
12244
- // We'll keep on downloading, but can report that this priority is synced now.
12245
- this.logger.debug('partial checkpoint validation succeeded');
12246
- // All states with a higher priority can be deleted since this partial sync includes them.
12247
- const priorityStates = this.syncStatus.priorityStatusEntries.filter((s) => s.priority <= priority);
12248
- priorityStates.push({
12249
- priority,
12250
- lastSyncedAt: new Date(),
12251
- hasSynced: true
12252
- });
12253
- this.updateSyncStatus({
12254
- connected: true,
12255
- priorityStatusEntries: priorityStates
12256
- });
12257
- }
12258
- }
12259
- else if (isStreamingSyncCheckpointDiff(line)) {
12260
- // TODO: It may be faster to just keep track of the diff, instead of the entire checkpoint
12261
- if (targetCheckpoint == null) {
12262
- throw new Error('Checkpoint diff without previous checkpoint');
12263
- }
12264
- // New checkpoint - existing validated checkpoint is no longer valid
12265
- pendingValidatedCheckpoint = null;
12266
- const diff = line.checkpoint_diff;
12267
- const newBuckets = new Map();
12268
- for (const checksum of targetCheckpoint.buckets) {
12269
- newBuckets.set(checksum.bucket, checksum);
12270
- }
12271
- for (const checksum of diff.updated_buckets) {
12272
- newBuckets.set(checksum.bucket, checksum);
12273
- }
12274
- for (const bucket of diff.removed_buckets) {
12275
- newBuckets.delete(bucket);
12276
- }
12277
- const newCheckpoint = {
12278
- last_op_id: diff.last_op_id,
12279
- buckets: [...newBuckets.values()],
12280
- write_checkpoint: diff.write_checkpoint
12281
- };
12282
- targetCheckpoint = newCheckpoint;
12283
- await this.updateSyncStatusForStartingCheckpoint(targetCheckpoint);
12284
- bucketMap = new Map();
12285
- newBuckets.forEach((checksum, name) => bucketMap.set(name, {
12286
- name: checksum.bucket,
12287
- priority: checksum.priority ?? FALLBACK_PRIORITY
12288
- }));
12289
- const bucketsToDelete = diff.removed_buckets;
12290
- if (bucketsToDelete.length > 0) {
12291
- this.logger.debug('Remove buckets', bucketsToDelete);
12292
- }
12293
- await this.options.adapter.removeBuckets(bucketsToDelete);
12294
- await this.options.adapter.setTargetCheckpoint(targetCheckpoint);
12295
- }
12296
- else if (isStreamingSyncData(line)) {
12297
- const { data } = line;
12298
- const previousProgress = this.syncStatus.dataFlowStatus.downloadProgress;
12299
- let updatedProgress = null;
12300
- if (previousProgress) {
12301
- updatedProgress = { ...previousProgress };
12302
- const progressForBucket = updatedProgress[data.bucket];
12303
- if (progressForBucket) {
12304
- updatedProgress[data.bucket] = {
12305
- ...progressForBucket,
12306
- since_last: progressForBucket.since_last + data.data.length
12307
- };
12308
- }
12309
- }
12310
- this.updateSyncStatus({
12311
- dataFlow: {
12312
- downloading: true,
12313
- downloadProgress: updatedProgress
12314
- }
12315
- });
12316
- await this.options.adapter.saveSyncData({ buckets: [SyncDataBucket.fromRow(data)] }, usingFixedKeyFormat);
12317
- }
12318
- else if (isStreamingKeepalive(line)) {
12319
- const remaining_seconds = line.token_expires_in;
12320
- if (remaining_seconds == 0) {
12321
- // Connection would be closed automatically right after this
12322
- this.logger.debug('Token expiring; reconnect');
12323
- /**
12324
- * For a rare case where the backend connector does not update the token
12325
- * (uses the same one), this should have some delay.
12326
- */
12327
- await this.delayRetry();
12328
- return;
12329
- }
12330
- else if (remaining_seconds < 30) {
12331
- this.logger.debug('Token will expire soon; reconnect');
12332
- // Pre-emptively refresh the token
12333
- this.options.remote.invalidateCredentials();
12334
- return;
12335
- }
12336
- this.triggerCrudUpload();
12337
- }
12338
- else {
12339
- this.logger.debug('Received unknown sync line', line);
12340
- }
11945
+ });
12341
11946
  }
12342
- this.logger.debug('Stream input empty');
12343
- // Connection closed. Likely due to auth issue.
12344
- return;
12345
11947
  }
12346
11948
  async rustSyncIteration(signal, resolvedOptions) {
12347
11949
  const syncImplementation = this;
@@ -12504,68 +12106,6 @@ The next upload iteration will be delayed.`);
12504
12106
  }
12505
12107
  return { immediateRestart: hideDisconnectOnRestart };
12506
12108
  }
12507
- async updateSyncStatusForStartingCheckpoint(checkpoint) {
12508
- const localProgress = await this.options.adapter.getBucketOperationProgress();
12509
- const progress = {};
12510
- let invalidated = false;
12511
- for (const bucket of checkpoint.buckets) {
12512
- const savedProgress = localProgress[bucket.bucket];
12513
- const atLast = savedProgress?.atLast ?? 0;
12514
- const sinceLast = savedProgress?.sinceLast ?? 0;
12515
- progress[bucket.bucket] = {
12516
- // The fallback priority doesn't matter here, but 3 is the one newer versions of the sync service
12517
- // will use by default.
12518
- priority: bucket.priority ?? 3,
12519
- at_last: atLast,
12520
- since_last: sinceLast,
12521
- target_count: bucket.count ?? 0
12522
- };
12523
- if (bucket.count != null && bucket.count < atLast + sinceLast) {
12524
- // Either due to a defrag / sync rule deploy or a compaction operation, the size
12525
- // of the bucket shrank so much that the local ops exceed the ops in the updated
12526
- // bucket. We can't prossibly report progress in this case (it would overshoot 100%).
12527
- invalidated = true;
12528
- }
12529
- }
12530
- if (invalidated) {
12531
- for (const bucket in progress) {
12532
- const bucketProgress = progress[bucket];
12533
- bucketProgress.at_last = 0;
12534
- bucketProgress.since_last = 0;
12535
- }
12536
- }
12537
- this.updateSyncStatus({
12538
- dataFlow: {
12539
- downloading: true,
12540
- downloadProgress: progress
12541
- }
12542
- });
12543
- }
12544
- async applyCheckpoint(checkpoint) {
12545
- let result = await this.options.adapter.syncLocalDatabase(checkpoint);
12546
- if (!result.checkpointValid) {
12547
- this.logger.debug(`Checksum mismatch in checkpoint ${checkpoint.last_op_id}, will reconnect`);
12548
- // This means checksums failed. Start again with a new checkpoint.
12549
- // TODO: better back-off
12550
- await new Promise((resolve) => setTimeout(resolve, 50));
12551
- return { applied: false, endIteration: true };
12552
- }
12553
- else if (!result.ready) {
12554
- this.logger.debug(`Could not apply checkpoint ${checkpoint.last_op_id} due to local data. We will retry applying the checkpoint after that upload is completed.`);
12555
- return { applied: false, endIteration: false };
12556
- }
12557
- this.logger.debug(`Applied checkpoint ${checkpoint.last_op_id}`, checkpoint);
12558
- this.updateSyncStatus({
12559
- connected: true,
12560
- lastSyncedAt: new Date(),
12561
- dataFlow: {
12562
- downloading: false,
12563
- downloadProgress: null,
12564
- downloadError: undefined
12565
- }
12566
- });
12567
- return { applied: true, endIteration: false };
12568
- }
12569
12109
  updateSyncStatus(options) {
12570
12110
  const updatedStatus = new SyncStatus({
12571
12111
  connected: options.connected ?? this.syncStatus.connected,
@@ -14104,14 +13644,12 @@ class SqliteBucketStorage extends BaseObserver {
14104
13644
  db;
14105
13645
  logger;
14106
13646
  tableNames;
14107
- _hasCompletedSync;
14108
13647
  updateListener;
14109
13648
  _clientId;
14110
13649
  constructor(db, logger = Logger.get('SqliteBucketStorage')) {
14111
13650
  super();
14112
13651
  this.db = db;
14113
13652
  this.logger = logger;
14114
- this._hasCompletedSync = false;
14115
13653
  this.tableNames = new Set();
14116
13654
  this.updateListener = db.registerListener({
14117
13655
  tablesUpdated: (update) => {
@@ -14123,7 +13661,6 @@ class SqliteBucketStorage extends BaseObserver {
14123
13661
  });
14124
13662
  }
14125
13663
  async init() {
14126
- this._hasCompletedSync = false;
14127
13664
  const existingTableRows = await this.db.getAll(`SELECT name FROM sqlite_master WHERE type='table' AND name GLOB 'ps_data_*'`);
14128
13665
  for (const row of existingTableRows ?? []) {
14129
13666
  this.tableNames.add(row.name);
@@ -14145,156 +13682,6 @@ class SqliteBucketStorage extends BaseObserver {
14145
13682
  getMaxOpId() {
14146
13683
  return MAX_OP_ID;
14147
13684
  }
14148
- /**
14149
- * Reset any caches.
14150
- */
14151
- startSession() { }
14152
- async getBucketStates() {
14153
- const result = await this.db.getAll("SELECT name as bucket, cast(last_op as TEXT) as op_id FROM ps_buckets WHERE pending_delete = 0 AND name != '$local'");
14154
- return result;
14155
- }
14156
- async getBucketOperationProgress() {
14157
- const rows = await this.db.getAll('SELECT name, count_at_last, count_since_last FROM ps_buckets');
14158
- return Object.fromEntries(rows.map((r) => [r.name, { atLast: r.count_at_last, sinceLast: r.count_since_last }]));
14159
- }
14160
- async saveSyncData(batch, fixedKeyFormat = false) {
14161
- await this.writeTransaction(async (tx) => {
14162
- for (const b of batch.buckets) {
14163
- await tx.execute('INSERT INTO powersync_operations(op, data) VALUES(?, ?)', [
14164
- 'save',
14165
- JSON.stringify({ buckets: [b.toJSON(fixedKeyFormat)] })
14166
- ]);
14167
- this.logger.debug(`Saved batch of data for bucket: ${b.bucket}, operations: ${b.data.length}`);
14168
- }
14169
- });
14170
- }
14171
- async removeBuckets(buckets) {
14172
- for (const bucket of buckets) {
14173
- await this.deleteBucket(bucket);
14174
- }
14175
- }
14176
- /**
14177
- * Mark a bucket for deletion.
14178
- */
14179
- async deleteBucket(bucket) {
14180
- await this.writeTransaction(async (tx) => {
14181
- await tx.execute('INSERT INTO powersync_operations(op, data) VALUES(?, ?)', ['delete_bucket', bucket]);
14182
- });
14183
- this.logger.debug(`Done deleting bucket ${bucket}`);
14184
- }
14185
- async hasCompletedSync() {
14186
- if (this._hasCompletedSync) {
14187
- return true;
14188
- }
14189
- const r = await this.db.get(`SELECT powersync_last_synced_at() as synced_at`);
14190
- const completed = r.synced_at != null;
14191
- if (completed) {
14192
- this._hasCompletedSync = true;
14193
- }
14194
- return completed;
14195
- }
14196
- async syncLocalDatabase(checkpoint, priority) {
14197
- const r = await this.validateChecksums(checkpoint, priority);
14198
- if (!r.checkpointValid) {
14199
- this.logger.error('Checksums failed for', r.checkpointFailures);
14200
- for (const b of r.checkpointFailures ?? []) {
14201
- await this.deleteBucket(b);
14202
- }
14203
- return { ready: false, checkpointValid: false, checkpointFailures: r.checkpointFailures };
14204
- }
14205
- if (priority == null) {
14206
- this.logger.debug(`Validated checksums checkpoint ${checkpoint.last_op_id}`);
14207
- }
14208
- else {
14209
- this.logger.debug(`Validated checksums for partial checkpoint ${checkpoint.last_op_id}, priority ${priority}`);
14210
- }
14211
- let buckets = checkpoint.buckets;
14212
- if (priority !== undefined) {
14213
- buckets = buckets.filter((b) => hasMatchingPriority(priority, b));
14214
- }
14215
- const bucketNames = buckets.map((b) => b.bucket);
14216
- await this.writeTransaction(async (tx) => {
14217
- await tx.execute(`UPDATE ps_buckets SET last_op = ? WHERE name IN (SELECT json_each.value FROM json_each(?))`, [
14218
- checkpoint.last_op_id,
14219
- JSON.stringify(bucketNames)
14220
- ]);
14221
- if (priority == null && checkpoint.write_checkpoint) {
14222
- await tx.execute("UPDATE ps_buckets SET last_op = ? WHERE name = '$local'", [checkpoint.write_checkpoint]);
14223
- }
14224
- });
14225
- const valid = await this.updateObjectsFromBuckets(checkpoint, priority);
14226
- if (!valid) {
14227
- return { ready: false, checkpointValid: true };
14228
- }
14229
- return {
14230
- ready: true,
14231
- checkpointValid: true
14232
- };
14233
- }
14234
- /**
14235
- * Atomically update the local state to the current checkpoint.
14236
- *
14237
- * This includes creating new tables, dropping old tables, and copying data over from the oplog.
14238
- */
14239
- async updateObjectsFromBuckets(checkpoint, priority) {
14240
- let arg = '';
14241
- if (priority !== undefined) {
14242
- const affectedBuckets = [];
14243
- for (const desc of checkpoint.buckets) {
14244
- if (hasMatchingPriority(priority, desc)) {
14245
- affectedBuckets.push(desc.bucket);
14246
- }
14247
- }
14248
- arg = JSON.stringify({ priority, buckets: affectedBuckets });
14249
- }
14250
- return this.writeTransaction(async (tx) => {
14251
- const { insertId: result } = await tx.execute('INSERT INTO powersync_operations(op, data) VALUES(?, ?)', [
14252
- 'sync_local',
14253
- arg
14254
- ]);
14255
- if (result == 1) {
14256
- if (priority == null) {
14257
- const bucketToCount = Object.fromEntries(checkpoint.buckets.map((b) => [b.bucket, b.count]));
14258
- // The two parameters could be replaced with one, but: https://github.com/powersync-ja/better-sqlite3/pull/6
14259
- const jsonBucketCount = JSON.stringify(bucketToCount);
14260
- await tx.execute("UPDATE ps_buckets SET count_since_last = 0, count_at_last = ?->name WHERE name != '$local' AND ?->name IS NOT NULL", [jsonBucketCount, jsonBucketCount]);
14261
- }
14262
- return true;
14263
- }
14264
- else {
14265
- return false;
14266
- }
14267
- });
14268
- }
14269
- async validateChecksums(checkpoint, priority) {
14270
- if (priority !== undefined) {
14271
- // Only validate the buckets within the priority we care about
14272
- const newBuckets = checkpoint.buckets.filter((cs) => hasMatchingPriority(priority, cs));
14273
- checkpoint = { ...checkpoint, buckets: newBuckets };
14274
- }
14275
- const rs = await this.db.execute('SELECT powersync_validate_checkpoint(?) as result', [
14276
- JSON.stringify({ ...checkpoint })
14277
- ]);
14278
- const resultItem = rs.rows?.item(0);
14279
- if (!resultItem) {
14280
- return {
14281
- checkpointValid: false,
14282
- ready: false,
14283
- checkpointFailures: []
14284
- };
14285
- }
14286
- const result = JSON.parse(resultItem['result']);
14287
- if (result['valid']) {
14288
- return { ready: true, checkpointValid: true };
14289
- }
14290
- else {
14291
- return {
14292
- checkpointValid: false,
14293
- ready: false,
14294
- checkpointFailures: result['failed_buckets']
14295
- };
14296
- }
14297
- }
14298
13685
  async updateLocalTarget(cb) {
14299
13686
  const rs1 = await this.db.getAll("SELECT target_op FROM ps_buckets WHERE name = '$local' AND target_op = CAST(? as INTEGER)", [MAX_OP_ID]);
14300
13687
  if (!rs1.length) {
@@ -14385,12 +13772,6 @@ class SqliteBucketStorage extends BaseObserver {
14385
13772
  async writeTransaction(callback, options) {
14386
13773
  return this.db.writeTransaction(callback, options);
14387
13774
  }
14388
- /**
14389
- * Set a target checkpoint.
14390
- */
14391
- async setTargetCheckpoint(checkpoint) {
14392
- // No-op for now
14393
- }
14394
13775
  async control(op, payload) {
14395
13776
  return await this.writeTransaction(async (tx) => {
14396
13777
  const [[raw]] = await tx.executeRaw('SELECT powersync_control(?, ?)', [op, payload]);
@@ -14414,20 +13795,6 @@ class SqliteBucketStorage extends BaseObserver {
14414
13795
  }
14415
13796
  static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
14416
13797
  }
14417
- function hasMatchingPriority(priority, bucket) {
14418
- return bucket.priority != null && bucket.priority <= priority;
14419
- }
14420
-
14421
- // TODO JSON
14422
- class SyncDataBatch {
14423
- buckets;
14424
- static fromJSON(json) {
14425
- return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
14426
- }
14427
- constructor(buckets) {
14428
- this.buckets = buckets;
14429
- }
14430
- }
14431
13798
 
14432
13799
  /**
14433
13800
  * Thrown when an underlying database connection is closed.
@@ -14685,5 +14052,5 @@ const parseQuery = (query, parameters) => {
14685
14052
  return { sqlStatement, parameters: parameters };
14686
14053
  };
14687
14054
 
14688
- export { ATTACHMENT_TABLE, AbortOperation, AbstractPowerSyncDatabase, AbstractPowerSyncDatabaseOpenFactory, AbstractQueryProcessor, AbstractRemote, AbstractStreamingSyncImplementation, ArrayComparator, AttachmentContext, AttachmentQueue, AttachmentService, AttachmentState, AttachmentTable, BaseObserver, Column, ColumnType, ConnectionClosedError, ConnectionManager, ControlledExecutor, CrudBatch, CrudEntry, CrudTransaction, DBAdapterDefaultMixin, DBGetUtilsDefaultMixin, DEFAULT_CRUD_BATCH_LIMIT, DEFAULT_CRUD_UPLOAD_THROTTLE_MS, DEFAULT_INDEX_COLUMN_OPTIONS, DEFAULT_INDEX_OPTIONS, DEFAULT_LOCK_TIMEOUT_MS, DEFAULT_POWERSYNC_CLOSE_OPTIONS, DEFAULT_POWERSYNC_DB_OPTIONS, DEFAULT_REMOTE_LOGGER, DEFAULT_REMOTE_OPTIONS, DEFAULT_RETRY_DELAY_MS, DEFAULT_ROW_COMPARATOR, DEFAULT_STREAMING_SYNC_OPTIONS, DEFAULT_STREAM_CONNECTION_OPTIONS, DEFAULT_SYNC_CLIENT_IMPLEMENTATION, DEFAULT_TABLE_OPTIONS, DEFAULT_WATCH_QUERY_OPTIONS, DEFAULT_WATCH_THROTTLE_MS, DiffTriggerOperation, DifferentialQueryProcessor, EMPTY_DIFFERENTIAL, EncodingType, FalsyComparator, FetchImplementationProvider, FetchStrategy, GetAllQuery, Index, IndexedColumn, InvalidSQLCharacters, LockType, LogLevel, MAX_AMOUNT_OF_COLUMNS, MAX_OP_ID, MEMORY_TRIGGER_CLAIM_MANAGER, Mutex, OnChangeQueryProcessor, OpType, OpTypeEnum, OplogEntry, PSInternalTable, PowerSyncControlCommand, RowUpdateType, Schema, Semaphore, SqliteBucketStorage, SyncClientImplementation, SyncDataBatch, SyncDataBucket, SyncProgress, SyncStatus, SyncStreamConnectionMethod, SyncingService, Table, TableV2, TriggerManagerImpl, UpdateType, UploadQueueStats, WatchedQueryListenerEvent, attachmentFromSql, column, compilableQueryWatch, createBaseLogger, createLogger, extractTableUpdates, isBatchedUpdateNotification, isContinueCheckpointRequest, isDBAdapter, isPowerSyncDatabaseOptionsWithSettings, isSQLOpenFactory, isSQLOpenOptions, isStreamingKeepalive, isStreamingSyncCheckpoint, isStreamingSyncCheckpointComplete, isStreamingSyncCheckpointDiff, isStreamingSyncCheckpointPartiallyComplete, isStreamingSyncData, isSyncNewCheckpointRequest, parseQuery, runOnSchemaChange, sanitizeSQL, sanitizeUUID, timeoutSignal };
14055
+ export { ATTACHMENT_TABLE, AbortOperation, AbstractPowerSyncDatabase, AbstractPowerSyncDatabaseOpenFactory, AbstractQueryProcessor, AbstractRemote, AbstractStreamingSyncImplementation, ArrayComparator, AttachmentContext, AttachmentQueue, AttachmentService, AttachmentState, AttachmentTable, BaseObserver, Column, ColumnType, ConnectionClosedError, ConnectionManager, ControlledExecutor, CrudBatch, CrudEntry, CrudTransaction, DBAdapterDefaultMixin, DBGetUtilsDefaultMixin, DEFAULT_CRUD_BATCH_LIMIT, DEFAULT_CRUD_UPLOAD_THROTTLE_MS, DEFAULT_INDEX_COLUMN_OPTIONS, DEFAULT_INDEX_OPTIONS, DEFAULT_LOCK_TIMEOUT_MS, DEFAULT_POWERSYNC_CLOSE_OPTIONS, DEFAULT_POWERSYNC_DB_OPTIONS, DEFAULT_REMOTE_LOGGER, DEFAULT_REMOTE_OPTIONS, DEFAULT_RETRY_DELAY_MS, DEFAULT_ROW_COMPARATOR, DEFAULT_STREAMING_SYNC_OPTIONS, DEFAULT_STREAM_CONNECTION_OPTIONS, DEFAULT_SYNC_CLIENT_IMPLEMENTATION, DEFAULT_TABLE_OPTIONS, DEFAULT_WATCH_QUERY_OPTIONS, DEFAULT_WATCH_THROTTLE_MS, DiffTriggerOperation, DifferentialQueryProcessor, EMPTY_DIFFERENTIAL, EncodingType, FalsyComparator, FetchImplementationProvider, FetchStrategy, GetAllQuery, Index, IndexedColumn, InvalidSQLCharacters, LockType, LogLevel, MAX_AMOUNT_OF_COLUMNS, MAX_OP_ID, MEMORY_TRIGGER_CLAIM_MANAGER, Mutex, OnChangeQueryProcessor, PSInternalTable, PowerSyncControlCommand, RowUpdateType, Schema, Semaphore, SqliteBucketStorage, SyncClientImplementation, SyncProgress, SyncStatus, SyncStreamConnectionMethod, SyncingService, Table, TableV2, TriggerManagerImpl, UpdateType, UploadQueueStats, WatchedQueryListenerEvent, attachmentFromSql, column, compilableQueryWatch, createBaseLogger, createLogger, extractTableUpdates, isBatchedUpdateNotification, isDBAdapter, isPowerSyncDatabaseOptionsWithSettings, isSQLOpenFactory, isSQLOpenOptions, parseQuery, runOnSchemaChange, sanitizeSQL, sanitizeUUID, timeoutSignal };
14689
14056
  //# sourceMappingURL=bundle.mjs.map