@powersync/common 0.0.0-dev-20260503073249 → 0.0.0-dev-20260504100448

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/bundle.cjs +14 -659
  2. package/dist/bundle.cjs.map +1 -1
  3. package/dist/bundle.mjs +15 -648
  4. package/dist/bundle.mjs.map +1 -1
  5. package/dist/bundle.node.cjs +14 -659
  6. package/dist/bundle.node.cjs.map +1 -1
  7. package/dist/bundle.node.mjs +15 -648
  8. package/dist/bundle.node.mjs.map +1 -1
  9. package/dist/index.d.cts +19 -358
  10. package/legacy/sync_protocol.d.ts +103 -0
  11. package/lib/client/sync/bucket/BucketStorageAdapter.d.ts +1 -63
  12. package/lib/client/sync/bucket/BucketStorageAdapter.js.map +1 -1
  13. package/lib/client/sync/bucket/SqliteBucketStorage.d.ts +1 -28
  14. package/lib/client/sync/bucket/SqliteBucketStorage.js +0 -162
  15. package/lib/client/sync/bucket/SqliteBucketStorage.js.map +1 -1
  16. package/lib/client/sync/stream/AbstractRemote.d.ts +2 -12
  17. package/lib/client/sync/stream/AbstractRemote.js +3 -13
  18. package/lib/client/sync/stream/AbstractRemote.js.map +1 -1
  19. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.d.ts +9 -35
  20. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js +11 -338
  21. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js.map +1 -1
  22. package/lib/client/sync/stream/JsonValue.d.ts +7 -0
  23. package/lib/client/sync/stream/JsonValue.js +2 -0
  24. package/lib/client/sync/stream/JsonValue.js.map +1 -0
  25. package/lib/client/sync/stream/core-instruction.d.ts +1 -2
  26. package/lib/client/sync/stream/core-instruction.js.map +1 -1
  27. package/lib/index.d.ts +1 -5
  28. package/lib/index.js +1 -5
  29. package/lib/index.js.map +1 -1
  30. package/package.json +7 -4
  31. package/src/client/sync/bucket/BucketStorageAdapter.ts +1 -70
  32. package/src/client/sync/bucket/SqliteBucketStorage.ts +1 -197
  33. package/src/client/sync/stream/AbstractRemote.ts +5 -27
  34. package/src/client/sync/stream/AbstractStreamingSyncImplementation.ts +22 -402
  35. package/src/client/sync/stream/JsonValue.ts +8 -0
  36. package/src/client/sync/stream/core-instruction.ts +1 -2
  37. package/src/index.ts +1 -5
  38. package/lib/client/sync/bucket/OpType.d.ts +0 -16
  39. package/lib/client/sync/bucket/OpType.js +0 -23
  40. package/lib/client/sync/bucket/OpType.js.map +0 -1
  41. package/lib/client/sync/bucket/OplogEntry.d.ts +0 -23
  42. package/lib/client/sync/bucket/OplogEntry.js +0 -36
  43. package/lib/client/sync/bucket/OplogEntry.js.map +0 -1
  44. package/lib/client/sync/bucket/SyncDataBatch.d.ts +0 -6
  45. package/lib/client/sync/bucket/SyncDataBatch.js +0 -12
  46. package/lib/client/sync/bucket/SyncDataBatch.js.map +0 -1
  47. package/lib/client/sync/bucket/SyncDataBucket.d.ts +0 -40
  48. package/lib/client/sync/bucket/SyncDataBucket.js +0 -40
  49. package/lib/client/sync/bucket/SyncDataBucket.js.map +0 -1
  50. package/lib/client/sync/stream/streaming-sync-types.d.ts +0 -143
  51. package/lib/client/sync/stream/streaming-sync-types.js +0 -26
  52. package/lib/client/sync/stream/streaming-sync-types.js.map +0 -1
  53. package/src/client/sync/bucket/OpType.ts +0 -23
  54. package/src/client/sync/bucket/OplogEntry.ts +0 -50
  55. package/src/client/sync/bucket/SyncDataBatch.ts +0 -11
  56. package/src/client/sync/bucket/SyncDataBucket.ts +0 -49
  57. package/src/client/sync/stream/streaming-sync-types.ts +0 -210
@@ -1,35 +1,15 @@
1
1
  import Logger, { ILogger } from 'js-logger';
2
2
 
3
- import { InternalProgressInformation } from '../../../db/crud/SyncProgress.js';
4
3
  import { SyncStatus, SyncStatusOptions } from '../../../db/crud/SyncStatus.js';
5
4
  import { AbortOperation } from '../../../utils/AbortOperation.js';
6
5
  import { BaseListener, BaseObserver, BaseObserverInterface, Disposable } from '../../../utils/BaseObserver.js';
7
6
  import { throttleLeadingTrailing } from '../../../utils/async.js';
8
- import {
9
- BucketChecksum,
10
- BucketDescription,
11
- BucketStorageAdapter,
12
- Checkpoint,
13
- PowerSyncControlCommand
14
- } from '../bucket/BucketStorageAdapter.js';
7
+ import { BucketStorageAdapter, PowerSyncControlCommand } from '../bucket/BucketStorageAdapter.js';
15
8
  import { CrudEntry } from '../bucket/CrudEntry.js';
16
- import { SyncDataBucket } from '../bucket/SyncDataBucket.js';
17
9
  import { AbstractRemote, FetchStrategy, SyncStreamOptions } from './AbstractRemote.js';
18
10
  import { EstablishSyncStream, Instruction, coreStatusToJs } from './core-instruction.js';
19
- import {
20
- BucketRequest,
21
- StreamingSyncLine,
22
- StreamingSyncLineOrCrudUploadComplete,
23
- StreamingSyncRequestParameterType,
24
- isStreamingKeepalive,
25
- isStreamingSyncCheckpoint,
26
- isStreamingSyncCheckpointComplete,
27
- isStreamingSyncCheckpointDiff,
28
- isStreamingSyncCheckpointPartiallyComplete,
29
- isStreamingSyncData
30
- } from './streaming-sync-types.js';
31
11
  import { injectable, InjectableIterator, map, SimpleAsyncIterator } from '../../../utils/stream_transform.js';
32
- import type { BSON } from 'bson';
12
+ import { StreamingSyncRequestParameterType } from './JsonValue.js';
33
13
 
34
14
  export enum LockType {
35
15
  CRUD = 'crud',
@@ -42,35 +22,21 @@ export enum SyncStreamConnectionMethod {
42
22
  }
43
23
 
44
24
  export enum SyncClientImplementation {
45
- /**
46
- * Decodes and handles sync lines received from the sync service in JavaScript.
47
- *
48
- * This is the default option.
49
- *
50
- * @deprecated We recommend the {@link RUST} client implementation for all apps. If you have issues with
51
- * the Rust client, please file an issue or reach out to us. The JavaScript client will be removed in a future
52
- * version of the PowerSync SDK.
53
- */
54
- JAVASCRIPT = 'js',
55
25
  /**
56
26
  * This implementation offloads the sync line decoding and handling into the PowerSync
57
27
  * core extension.
58
28
  *
59
- * This option is more performant than the {@link JAVASCRIPT} client, enabled by default and the
60
- * recommended client implementation for all apps.
29
+ * This is the only option, as an older JavaScript client implementation has been removed from the SDK.
61
30
  *
62
31
  * ## Compatibility warning
63
32
  *
64
33
  * The Rust sync client stores sync data in a format that is slightly different than the one used
65
- * by the old {@link JAVASCRIPT} implementation. When adopting the {@link RUST} client on existing
66
- * databases, the PowerSync SDK will migrate the format automatically.
67
- * Further, the {@link JAVASCRIPT} client in recent versions of the PowerSync JS SDK (starting from
68
- * the version introducing {@link RUST} as an option) also supports the new format, so you can switch
69
- * back to {@link JAVASCRIPT} later.
34
+ * by the old JavaScript client. When adopting the {@link RUST} client on existing databases, the PowerSync SDK will
35
+ * migrate the format automatically.
70
36
  *
71
- * __However__: Upgrading the SDK version, then adopting {@link RUST} as a sync client and later
72
- * downgrading the SDK to an older version (necessarily using the JavaScript-based implementation then)
73
- * can lead to sync issues.
37
+ * SDK versions supporting both the JavaScript and the Rust client support both formats with the JavaScript client
38
+ * implementaiton. However, downgrading to an SDK version that only supports the JavaScript client would not be
39
+ * possible anymore. Problematic SDK versions have been released before 2025-06-09.
74
40
  */
75
41
  RUST = 'rust'
76
42
  }
@@ -130,11 +96,7 @@ export interface BaseConnectionOptions {
130
96
  appMetadata?: Record<string, string>;
131
97
 
132
98
  /**
133
- * Whether to use a JavaScript implementation to handle received sync lines from the sync
134
- * service, or whether this work should be offloaded to the PowerSync core extension.
135
- *
136
- * This defaults to the JavaScript implementation ({@link SyncClientImplementation.JAVASCRIPT})
137
- * since the ({@link SyncClientImplementation.RUST}) implementation is experimental at the moment.
99
+ * @deprecated The Rust sync client is used unconditionally, so this option can't be configured.
138
100
  */
139
101
  clientImplementation?: SyncClientImplementation;
140
102
 
@@ -200,9 +162,7 @@ export interface StreamingSyncImplementation
200
162
  */
201
163
  disconnect(): Promise<void>;
202
164
  getWriteCheckpoint: () => Promise<string>;
203
- hasCompletedSync: () => Promise<boolean>;
204
165
  isConnected: boolean;
205
- lastSyncedAt?: Date;
206
166
  syncStatus: SyncStatus;
207
167
  triggerCrudUpload: () => void;
208
168
  waitForReady(): Promise<void>;
@@ -247,12 +207,11 @@ export abstract class AbstractStreamingSyncImplementation
247
207
  extends BaseObserver<StreamingSyncImplementationListener>
248
208
  implements StreamingSyncImplementation
249
209
  {
250
- protected _lastSyncedAt: Date | null;
251
210
  protected options: AbstractStreamingSyncImplementationOptions;
252
211
  protected abortController: AbortController | null;
253
212
  // In rare cases, mostly for tests, uploads can be triggered without being properly connected.
254
213
  // This allows ensuring that all upload processes can be aborted.
255
- protected uploadAbortController: AbortController | null;
214
+ protected uploadAbortController: AbortController | undefined;
256
215
  protected crudUpdateListener?: () => void;
257
216
  protected streamingSyncPromise?: Promise<void>;
258
217
  protected logger: ILogger;
@@ -304,7 +263,7 @@ export abstract class AbstractStreamingSyncImplementation
304
263
  * Match only the partial status options provided in the
305
264
  * matching status
306
265
  */
307
- const matchPartialObject = (compA: object, compB: object) => {
266
+ const matchPartialObject = (compA: object, compB: any): any => {
308
267
  return Object.entries(compA).every(([key, value]) => {
309
268
  const comparisonBValue = compB[key];
310
269
  if (typeof value == 'object' && typeof comparisonBValue == 'object') {
@@ -354,10 +313,6 @@ export abstract class AbstractStreamingSyncImplementation
354
313
 
355
314
  abstract obtainLock<T>(lockOptions: LockOptions<T>): Promise<T>;
356
315
 
357
- async hasCompletedSync() {
358
- return this.options.adapter.hasCompletedSync();
359
- }
360
-
361
316
  async getWriteCheckpoint(): Promise<string> {
362
317
  const clientId = await this.options.adapter.getClientId();
363
318
  let path = `/write-checkpoint2.json?client_id=${clientId}`;
@@ -428,7 +383,7 @@ The next upload iteration will be delayed.`);
428
383
  this.updateSyncStatus({
429
384
  dataFlow: {
430
385
  uploading: false,
431
- uploadError: ex
386
+ uploadError: ex as Error
432
387
  }
433
388
  });
434
389
  await this.delayRetry(controller.signal);
@@ -437,7 +392,7 @@ The next upload iteration will be delayed.`);
437
392
  break;
438
393
  }
439
394
  this.logger.debug(
440
- `Caught exception when uploading. Upload will retry after a delay. Exception: ${ex.message}`
395
+ `Caught exception when uploading. Upload will retry after a delay. Exception: ${(ex as Error).message}`
441
396
  );
442
397
  } finally {
443
398
  this.updateSyncStatus({
@@ -447,7 +402,7 @@ The next upload iteration will be delayed.`);
447
402
  });
448
403
  }
449
404
  }
450
- this.uploadAbortController = null;
405
+ this.uploadAbortController = undefined;
451
406
  }
452
407
  });
453
408
  }
@@ -584,7 +539,7 @@ The next upload iteration will be delayed.`);
584
539
 
585
540
  this.updateSyncStatus({
586
541
  dataFlow: {
587
- downloadError: ex
542
+ downloadError: ex as Error
588
543
  }
589
544
  });
590
545
  } finally {
@@ -624,20 +579,6 @@ The next upload iteration will be delayed.`);
624
579
  this.handleActiveStreamsChange?.();
625
580
  }
626
581
 
627
- private async collectLocalBucketState(): Promise<[BucketRequest[], Map<string, BucketDescription | null>]> {
628
- const bucketEntries = await this.options.adapter.getBucketStates();
629
- const req: BucketRequest[] = bucketEntries.map((entry) => ({
630
- name: entry.bucket,
631
- after: entry.op_id
632
- }));
633
- const localDescriptions = new Map<string, BucketDescription | null>();
634
- for (const entry of bucketEntries) {
635
- localDescriptions.set(entry.bucket, null);
636
- }
637
-
638
- return [req, localDescriptions];
639
- }
640
-
641
582
  /**
642
583
  * Older versions of the JS SDK used to encode subkeys as JSON in {@link OplogEntry.toJSON}.
643
584
  * Because subkeys are always strings, this leads to quotes being added around them in `ps_oplog`.
@@ -688,13 +629,8 @@ The next upload iteration will be delayed.`);
688
629
  const clientImplementation = resolvedOptions.clientImplementation;
689
630
  this.updateSyncStatus({ clientImplementation });
690
631
 
691
- if (clientImplementation == SyncClientImplementation.JAVASCRIPT) {
692
- await this.legacyStreamingSyncIteration(signal, resolvedOptions);
693
- return null;
694
- } else {
695
- await this.requireKeyFormat(true);
696
- return await this.rustSyncIteration(signal, resolvedOptions);
697
- }
632
+ await this.requireKeyFormat(true);
633
+ return await this.rustSyncIteration(signal, resolvedOptions);
698
634
  }
699
635
  });
700
636
  }
@@ -702,260 +638,18 @@ The next upload iteration will be delayed.`);
702
638
  private async receiveSyncLines(data: {
703
639
  options: SyncStreamOptions;
704
640
  connection: RequiredPowerSyncConnectionOptions;
705
- bson?: typeof BSON;
706
641
  }): Promise<SimpleAsyncIterator<Uint8Array | string>> {
707
- const { options, connection, bson } = data;
642
+ const { options, connection } = data;
708
643
  const remote = this.options.remote;
709
644
 
710
645
  if (connection.connectionMethod == SyncStreamConnectionMethod.HTTP) {
711
646
  return await remote.fetchStream(options);
712
647
  } else {
713
- return await this.options.remote.socketStreamRaw(
714
- {
715
- ...options,
716
- ...{ fetchStrategy: connection.fetchStrategy }
717
- },
718
- bson
719
- );
720
- }
721
- }
722
-
723
- private async legacyStreamingSyncIteration(signal: AbortSignal, resolvedOptions: RequiredPowerSyncConnectionOptions) {
724
- const rawTables = resolvedOptions.serializedSchema?.raw_tables;
725
- if (rawTables != null && rawTables.length) {
726
- this.logger.warn('Raw tables require the Rust-based sync client. The JS client will ignore them.');
727
- }
728
- if (this.activeStreams.length) {
729
- this.logger.error('Sync streams require `clientImplementation: SyncClientImplementation.RUST` when connecting.');
730
- }
731
-
732
- this.logger.debug('Streaming sync iteration started');
733
- this.options.adapter.startSession();
734
- let [req, bucketMap] = await this.collectLocalBucketState();
735
-
736
- let targetCheckpoint: Checkpoint | null = null;
737
- // A checkpoint that has been validated but not applied (e.g. due to pending local writes)
738
- let pendingValidatedCheckpoint: Checkpoint | null = null;
739
-
740
- const clientId = await this.options.adapter.getClientId();
741
- const usingFixedKeyFormat = await this.requireKeyFormat(false);
742
-
743
- this.logger.debug('Requesting stream from server');
744
-
745
- const syncOptions: SyncStreamOptions = {
746
- path: '/sync/stream',
747
- abortSignal: signal,
748
- data: {
749
- buckets: req,
750
- include_checksum: true,
751
- raw_data: true,
752
- parameters: resolvedOptions.params,
753
- app_metadata: resolvedOptions.appMetadata,
754
- client_id: clientId
755
- }
756
- };
757
-
758
- const bson = await this.options.remote.getBSON();
759
- const source = await this.receiveSyncLines({
760
- options: syncOptions,
761
- connection: resolvedOptions,
762
- bson
763
- });
764
- const stream: InjectableIterator<StreamingSyncLineOrCrudUploadComplete> = injectable(
765
- map(source, (line) => {
766
- if (typeof line == 'string') {
767
- return JSON.parse(line) as StreamingSyncLine;
768
- } else {
769
- return bson.deserialize(line) as StreamingSyncLine;
770
- }
771
- })
772
- );
773
-
774
- this.logger.debug('Stream established. Processing events');
775
-
776
- this.notifyCompletedUploads = () => {
777
- stream.inject({ crud_upload_completed: null });
778
- };
779
-
780
- while (true) {
781
- const { value: line, done } = await stream.next();
782
- if (done) {
783
- // The stream has closed while waiting
784
- return;
785
- }
786
-
787
- if ('crud_upload_completed' in line) {
788
- if (pendingValidatedCheckpoint != null) {
789
- const { applied, endIteration } = await this.applyCheckpoint(pendingValidatedCheckpoint);
790
- if (applied) {
791
- pendingValidatedCheckpoint = null;
792
- } else if (endIteration) {
793
- break;
794
- }
795
- }
796
-
797
- continue;
798
- }
799
-
800
- // A connection is active and messages are being received
801
- if (!this.syncStatus.connected) {
802
- // There is a connection now
803
- Promise.resolve().then(() => this.triggerCrudUpload());
804
- this.updateSyncStatus({
805
- connected: true
806
- });
807
- }
808
-
809
- if (isStreamingSyncCheckpoint(line)) {
810
- targetCheckpoint = line.checkpoint;
811
- // New checkpoint - existing validated checkpoint is no longer valid
812
- pendingValidatedCheckpoint = null;
813
- const bucketsToDelete = new Set<string>(bucketMap.keys());
814
- const newBuckets = new Map<string, BucketDescription>();
815
- for (const checksum of line.checkpoint.buckets) {
816
- newBuckets.set(checksum.bucket, {
817
- name: checksum.bucket,
818
- priority: checksum.priority ?? FALLBACK_PRIORITY
819
- });
820
- bucketsToDelete.delete(checksum.bucket);
821
- }
822
- if (bucketsToDelete.size > 0) {
823
- this.logger.debug('Removing buckets', [...bucketsToDelete]);
824
- }
825
- bucketMap = newBuckets;
826
- await this.options.adapter.removeBuckets([...bucketsToDelete]);
827
- await this.options.adapter.setTargetCheckpoint(targetCheckpoint);
828
- await this.updateSyncStatusForStartingCheckpoint(targetCheckpoint);
829
- } else if (isStreamingSyncCheckpointComplete(line)) {
830
- const result = await this.applyCheckpoint(targetCheckpoint!);
831
- if (result.endIteration) {
832
- return;
833
- } else if (!result.applied) {
834
- // "Could not apply checkpoint due to local data". We need to retry after
835
- // finishing uploads.
836
- pendingValidatedCheckpoint = targetCheckpoint;
837
- } else {
838
- // Nothing to retry later. This would likely already be null from the last
839
- // checksum or checksum_diff operation, but we make sure.
840
- pendingValidatedCheckpoint = null;
841
- }
842
- } else if (isStreamingSyncCheckpointPartiallyComplete(line)) {
843
- const priority = line.partial_checkpoint_complete.priority;
844
- this.logger.debug('Partial checkpoint complete', priority);
845
- const result = await this.options.adapter.syncLocalDatabase(targetCheckpoint!, priority);
846
- if (!result.checkpointValid) {
847
- // This means checksums failed. Start again with a new checkpoint.
848
- // TODO: better back-off
849
- await new Promise((resolve) => setTimeout(resolve, 50));
850
- return;
851
- } else if (!result.ready) {
852
- // If we have pending uploads, we can't complete new checkpoints outside of priority 0.
853
- // We'll resolve this for a complete checkpoint.
854
- } else {
855
- // We'll keep on downloading, but can report that this priority is synced now.
856
- this.logger.debug('partial checkpoint validation succeeded');
857
-
858
- // All states with a higher priority can be deleted since this partial sync includes them.
859
- const priorityStates = this.syncStatus.priorityStatusEntries.filter((s) => s.priority <= priority);
860
- priorityStates.push({
861
- priority,
862
- lastSyncedAt: new Date(),
863
- hasSynced: true
864
- });
865
-
866
- this.updateSyncStatus({
867
- connected: true,
868
- priorityStatusEntries: priorityStates
869
- });
870
- }
871
- } else if (isStreamingSyncCheckpointDiff(line)) {
872
- // TODO: It may be faster to just keep track of the diff, instead of the entire checkpoint
873
- if (targetCheckpoint == null) {
874
- throw new Error('Checkpoint diff without previous checkpoint');
875
- }
876
- // New checkpoint - existing validated checkpoint is no longer valid
877
- pendingValidatedCheckpoint = null;
878
- const diff = line.checkpoint_diff;
879
- const newBuckets = new Map<string, BucketChecksum>();
880
- for (const checksum of targetCheckpoint.buckets) {
881
- newBuckets.set(checksum.bucket, checksum);
882
- }
883
- for (const checksum of diff.updated_buckets) {
884
- newBuckets.set(checksum.bucket, checksum);
885
- }
886
- for (const bucket of diff.removed_buckets) {
887
- newBuckets.delete(bucket);
888
- }
889
-
890
- const newCheckpoint: Checkpoint = {
891
- last_op_id: diff.last_op_id,
892
- buckets: [...newBuckets.values()],
893
- write_checkpoint: diff.write_checkpoint
894
- };
895
- targetCheckpoint = newCheckpoint;
896
- await this.updateSyncStatusForStartingCheckpoint(targetCheckpoint);
897
-
898
- bucketMap = new Map();
899
- newBuckets.forEach((checksum, name) =>
900
- bucketMap.set(name, {
901
- name: checksum.bucket,
902
- priority: checksum.priority ?? FALLBACK_PRIORITY
903
- })
904
- );
905
-
906
- const bucketsToDelete = diff.removed_buckets;
907
- if (bucketsToDelete.length > 0) {
908
- this.logger.debug('Remove buckets', bucketsToDelete);
909
- }
910
- await this.options.adapter.removeBuckets(bucketsToDelete);
911
- await this.options.adapter.setTargetCheckpoint(targetCheckpoint);
912
- } else if (isStreamingSyncData(line)) {
913
- const { data } = line;
914
- const previousProgress = this.syncStatus.dataFlowStatus.downloadProgress;
915
- let updatedProgress: InternalProgressInformation | null = null;
916
- if (previousProgress) {
917
- updatedProgress = { ...previousProgress };
918
- const progressForBucket = updatedProgress[data.bucket];
919
- if (progressForBucket) {
920
- updatedProgress[data.bucket] = {
921
- ...progressForBucket,
922
- since_last: progressForBucket.since_last + data.data.length
923
- };
924
- }
925
- }
926
-
927
- this.updateSyncStatus({
928
- dataFlow: {
929
- downloading: true,
930
- downloadProgress: updatedProgress
931
- }
932
- });
933
- await this.options.adapter.saveSyncData({ buckets: [SyncDataBucket.fromRow(data)] }, usingFixedKeyFormat);
934
- } else if (isStreamingKeepalive(line)) {
935
- const remaining_seconds = line.token_expires_in;
936
- if (remaining_seconds == 0) {
937
- // Connection would be closed automatically right after this
938
- this.logger.debug('Token expiring; reconnect');
939
- /**
940
- * For a rare case where the backend connector does not update the token
941
- * (uses the same one), this should have some delay.
942
- */
943
- await this.delayRetry();
944
- return;
945
- } else if (remaining_seconds < 30) {
946
- this.logger.debug('Token will expire soon; reconnect');
947
- // Pre-emptively refresh the token
948
- this.options.remote.invalidateCredentials();
949
- return;
950
- }
951
- this.triggerCrudUpload();
952
- } else {
953
- this.logger.debug('Received unknown sync line', line);
954
- }
648
+ return await this.options.remote.socketStreamRaw({
649
+ ...options,
650
+ ...{ fetchStrategy: connection.fetchStrategy }
651
+ });
955
652
  }
956
- this.logger.debug('Stream input empty');
957
- // Connection closed. Likely due to auth issue.
958
- return;
959
653
  }
960
654
 
961
655
  private async rustSyncIteration(
@@ -1147,80 +841,6 @@ The next upload iteration will be delayed.`);
1147
841
  return { immediateRestart: hideDisconnectOnRestart };
1148
842
  }
1149
843
 
1150
- private async updateSyncStatusForStartingCheckpoint(checkpoint: Checkpoint) {
1151
- const localProgress = await this.options.adapter.getBucketOperationProgress();
1152
- const progress: InternalProgressInformation = {};
1153
- let invalidated = false;
1154
-
1155
- for (const bucket of checkpoint.buckets) {
1156
- const savedProgress = localProgress[bucket.bucket];
1157
- const atLast = savedProgress?.atLast ?? 0;
1158
- const sinceLast = savedProgress?.sinceLast ?? 0;
1159
-
1160
- progress[bucket.bucket] = {
1161
- // The fallback priority doesn't matter here, but 3 is the one newer versions of the sync service
1162
- // will use by default.
1163
- priority: bucket.priority ?? 3,
1164
- at_last: atLast,
1165
- since_last: sinceLast,
1166
- target_count: bucket.count ?? 0
1167
- };
1168
-
1169
- if (bucket.count != null && bucket.count < atLast + sinceLast) {
1170
- // Either due to a defrag / sync rule deploy or a compaction operation, the size
1171
- // of the bucket shrank so much that the local ops exceed the ops in the updated
1172
- // bucket. We can't prossibly report progress in this case (it would overshoot 100%).
1173
- invalidated = true;
1174
- }
1175
- }
1176
-
1177
- if (invalidated) {
1178
- for (const bucket in progress) {
1179
- const bucketProgress = progress[bucket];
1180
- bucketProgress.at_last = 0;
1181
- bucketProgress.since_last = 0;
1182
- }
1183
- }
1184
-
1185
- this.updateSyncStatus({
1186
- dataFlow: {
1187
- downloading: true,
1188
- downloadProgress: progress
1189
- }
1190
- });
1191
- }
1192
-
1193
- private async applyCheckpoint(checkpoint: Checkpoint) {
1194
- let result = await this.options.adapter.syncLocalDatabase(checkpoint);
1195
-
1196
- if (!result.checkpointValid) {
1197
- this.logger.debug(`Checksum mismatch in checkpoint ${checkpoint.last_op_id}, will reconnect`);
1198
- // This means checksums failed. Start again with a new checkpoint.
1199
- // TODO: better back-off
1200
- await new Promise((resolve) => setTimeout(resolve, 50));
1201
- return { applied: false, endIteration: true };
1202
- } else if (!result.ready) {
1203
- this.logger.debug(
1204
- `Could not apply checkpoint ${checkpoint.last_op_id} due to local data. We will retry applying the checkpoint after that upload is completed.`
1205
- );
1206
-
1207
- return { applied: false, endIteration: false };
1208
- }
1209
-
1210
- this.logger.debug(`Applied checkpoint ${checkpoint.last_op_id}`, checkpoint);
1211
- this.updateSyncStatus({
1212
- connected: true,
1213
- lastSyncedAt: new Date(),
1214
- dataFlow: {
1215
- downloading: false,
1216
- downloadProgress: null,
1217
- downloadError: undefined
1218
- }
1219
- });
1220
-
1221
- return { applied: true, endIteration: false };
1222
- }
1223
-
1224
844
  protected updateSyncStatus(options: SyncStatusOptions) {
1225
845
  const updatedStatus = new SyncStatus({
1226
846
  connected: options.connected ?? this.syncStatus.connected,
@@ -0,0 +1,8 @@
1
+ type JSONValue = string | number | boolean | null | undefined | JSONObject | JSONArray;
2
+
3
+ interface JSONObject {
4
+ [key: string]: JSONValue;
5
+ }
6
+ type JSONArray = JSONValue[];
7
+
8
+ export type StreamingSyncRequestParameterType = JSONValue;
@@ -1,4 +1,3 @@
1
- import { StreamingSyncRequest } from './streaming-sync-types.js';
2
1
  import * as sync_status from '../../../db/crud/SyncStatus.js';
3
2
  import { FULL_SYNC_PRIORITY } from '../../../db/crud/SyncProgress.js';
4
3
 
@@ -21,7 +20,7 @@ export interface LogLine {
21
20
  }
22
21
 
23
22
  export interface EstablishSyncStream {
24
- request: StreamingSyncRequest;
23
+ request: unknown;
25
24
  }
26
25
 
27
26
  export interface UpdateSyncStatus {
package/src/index.ts CHANGED
@@ -20,14 +20,10 @@ export * from './client/sync/bucket/BucketStorageAdapter.js';
20
20
  export * from './client/sync/bucket/CrudBatch.js';
21
21
  export { CrudEntry, OpId, UpdateType } from './client/sync/bucket/CrudEntry.js';
22
22
  export * from './client/sync/bucket/CrudTransaction.js';
23
- export * from './client/sync/bucket/OplogEntry.js';
24
- export * from './client/sync/bucket/OpType.js';
25
23
  export * from './client/sync/bucket/SqliteBucketStorage.js';
26
- export * from './client/sync/bucket/SyncDataBatch.js';
27
- export * from './client/sync/bucket/SyncDataBucket.js';
28
24
  export * from './client/sync/stream/AbstractRemote.js';
29
25
  export * from './client/sync/stream/AbstractStreamingSyncImplementation.js';
30
- export * from './client/sync/stream/streaming-sync-types.js';
26
+ export * from './client/sync/stream/JsonValue.js';
31
27
  export * from './client/sync/sync-streams.js';
32
28
 
33
29
  export * from './client/ConnectionManager.js';
@@ -1,16 +0,0 @@
1
- export declare enum OpTypeEnum {
2
- CLEAR = 1,
3
- MOVE = 2,
4
- PUT = 3,
5
- REMOVE = 4
6
- }
7
- export type OpTypeJSON = string;
8
- /**
9
- * Used internally for sync buckets.
10
- */
11
- export declare class OpType {
12
- value: OpTypeEnum;
13
- static fromJSON(jsonValue: OpTypeJSON): OpType;
14
- constructor(value: OpTypeEnum);
15
- toJSON(): string;
16
- }
@@ -1,23 +0,0 @@
1
- export var OpTypeEnum;
2
- (function (OpTypeEnum) {
3
- OpTypeEnum[OpTypeEnum["CLEAR"] = 1] = "CLEAR";
4
- OpTypeEnum[OpTypeEnum["MOVE"] = 2] = "MOVE";
5
- OpTypeEnum[OpTypeEnum["PUT"] = 3] = "PUT";
6
- OpTypeEnum[OpTypeEnum["REMOVE"] = 4] = "REMOVE";
7
- })(OpTypeEnum || (OpTypeEnum = {}));
8
- /**
9
- * Used internally for sync buckets.
10
- */
11
- export class OpType {
12
- value;
13
- static fromJSON(jsonValue) {
14
- return new OpType(OpTypeEnum[jsonValue]);
15
- }
16
- constructor(value) {
17
- this.value = value;
18
- }
19
- toJSON() {
20
- return Object.entries(OpTypeEnum).find(([, value]) => value === this.value)[0];
21
- }
22
- }
23
- //# sourceMappingURL=OpType.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"OpType.js","sourceRoot":"","sources":["../../../../src/client/sync/bucket/OpType.ts"],"names":[],"mappings":"AAAA,MAAM,CAAN,IAAY,UAKX;AALD,WAAY,UAAU;IACpB,6CAAS,CAAA;IACT,2CAAQ,CAAA;IACR,yCAAO,CAAA;IACP,+CAAU,CAAA;AACZ,CAAC,EALW,UAAU,KAAV,UAAU,QAKrB;AAID;;GAEG;AACH,MAAM,OAAO,MAAM;IAKE;IAJnB,MAAM,CAAC,QAAQ,CAAC,SAAqB;QACnC,OAAO,IAAI,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAC;IAC3C,CAAC;IAED,YAAmB,KAAiB;QAAjB,UAAK,GAAL,KAAK,CAAY;IAAG,CAAC;IAExC,MAAM;QACJ,OAAO,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,KAAK,KAAK,IAAI,CAAC,KAAK,CAAE,CAAC,CAAC,CAAC,CAAC;IAClF,CAAC;CACF"}
@@ -1,23 +0,0 @@
1
- import { OpId } from './CrudEntry.js';
2
- import { OpType, OpTypeJSON } from './OpType.js';
3
- export interface OplogEntryJSON {
4
- checksum: number;
5
- data?: string;
6
- object_id?: string;
7
- object_type?: string;
8
- op_id: string;
9
- op: OpTypeJSON;
10
- subkey?: string;
11
- }
12
- export declare class OplogEntry {
13
- op_id: OpId;
14
- op: OpType;
15
- checksum: number;
16
- subkey?: string | undefined;
17
- object_type?: string | undefined;
18
- object_id?: string | undefined;
19
- data?: string | undefined;
20
- static fromRow(row: OplogEntryJSON): OplogEntry;
21
- constructor(op_id: OpId, op: OpType, checksum: number, subkey?: string | undefined, object_type?: string | undefined, object_id?: string | undefined, data?: string | undefined);
22
- toJSON(fixedKeyEncoding?: boolean): OplogEntryJSON;
23
- }