@powersync/common 1.24.0 → 1.26.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,7 +10,7 @@ import { PowerSyncBackendConnector } from './connection/PowerSyncBackendConnecto
10
10
  import { BucketStorageAdapter } from './sync/bucket/BucketStorageAdapter.js';
11
11
  import { CrudBatch } from './sync/bucket/CrudBatch.js';
12
12
  import { CrudTransaction } from './sync/bucket/CrudTransaction.js';
13
- import { type AdditionalConnectionOptions, type PowerSyncConnectionOptions, StreamingSyncImplementation, StreamingSyncImplementationListener, type RequiredAdditionalConnectionOptions } from './sync/stream/AbstractStreamingSyncImplementation.js';
13
+ import { StreamingSyncImplementation, StreamingSyncImplementationListener, type AdditionalConnectionOptions, type PowerSyncConnectionOptions, type RequiredAdditionalConnectionOptions } from './sync/stream/AbstractStreamingSyncImplementation.js';
14
14
  export interface DisconnectAndClearOptions {
15
15
  /** When set to false, data in local-only tables is preserved. */
16
16
  clearLocal?: boolean;
@@ -153,9 +153,18 @@ export declare abstract class AbstractPowerSyncDatabase extends BaseObserver<Pow
153
153
  */
154
154
  waitForReady(): Promise<void>;
155
155
  /**
156
+ * Wait for the first sync operation to complete.
157
+ *
158
+ * @argument request Either an abort signal (after which the promise will complete regardless of
159
+ * whether a full sync was completed) or an object providing an abort signal and a priority target.
160
+ * When a priority target is set, the promise may complete when all buckets with the given (or higher)
161
+ * priorities have been synchronized. This can be earlier than a complete sync.
156
162
  * @returns A promise which will resolve once the first full sync has completed.
157
163
  */
158
- waitForFirstSync(signal?: AbortSignal): Promise<void>;
164
+ waitForFirstSync(request?: AbortSignal | {
165
+ signal?: AbortSignal;
166
+ priority?: number;
167
+ }): Promise<void>;
159
168
  /**
160
169
  * Allows for extended implementations to execute custom initialization
161
170
  * logic as part of the total init process
@@ -256,6 +265,7 @@ export declare abstract class AbstractPowerSyncDatabase extends BaseObserver<Pow
256
265
  * and optionally return results.
257
266
  */
258
267
  execute(sql: string, parameters?: any[]): Promise<QueryResult>;
268
+ executeRaw(sql: string, parameters?: any[]): Promise<any[][]>;
259
269
  /**
260
270
  * Execute a write query (INSERT/UPDATE/DELETE) multiple times with each parameter set
261
271
  * and optionally return results.
@@ -9,12 +9,12 @@ import { ControlledExecutor } from '../utils/ControlledExecutor.js';
9
9
  import { mutexRunExclusive } from '../utils/mutex.js';
10
10
  import { throttleTrailing } from '../utils/throttle.js';
11
11
  import { isDBAdapter, isSQLOpenFactory, isSQLOpenOptions } from './SQLOpenFactory.js';
12
+ import { runOnSchemaChange } from './runOnSchemaChange.js';
12
13
  import { PSInternalTable } from './sync/bucket/BucketStorageAdapter.js';
13
14
  import { CrudBatch } from './sync/bucket/CrudBatch.js';
14
15
  import { CrudEntry } from './sync/bucket/CrudEntry.js';
15
16
  import { CrudTransaction } from './sync/bucket/CrudTransaction.js';
16
17
  import { DEFAULT_CRUD_UPLOAD_THROTTLE_MS, DEFAULT_RETRY_DELAY_MS } from './sync/stream/AbstractStreamingSyncImplementation.js';
17
- import { runOnSchemaChange } from './runOnSchemaChange.js';
18
18
  const POWERSYNC_TABLE_MATCH = /(^ps_data__|^ps_data_local__)/;
19
19
  const DEFAULT_DISCONNECT_CLEAR_OPTIONS = {
20
20
  clearLocal: true
@@ -42,6 +42,10 @@ export const DEFAULT_LOCK_TIMEOUT_MS = 120_000; // 2 mins
42
42
  export const isPowerSyncDatabaseOptionsWithSettings = (test) => {
43
43
  return typeof test == 'object' && isSQLOpenOptions(test.database);
44
44
  };
45
+ /**
46
+ * The priority used by the core extension to indicate that a full sync was completed.
47
+ */
48
+ const FULL_SYNC_PRIORITY = 2147483647;
45
49
  export class AbstractPowerSyncDatabase extends BaseObserver {
46
50
  options;
47
51
  /**
@@ -127,16 +131,27 @@ export class AbstractPowerSyncDatabase extends BaseObserver {
127
131
  await this._isReadyPromise;
128
132
  }
129
133
  /**
134
+ * Wait for the first sync operation to complete.
135
+ *
136
+ * @argument request Either an abort signal (after which the promise will complete regardless of
137
+ * whether a full sync was completed) or an object providing an abort signal and a priority target.
138
+ * When a priority target is set, the promise may complete when all buckets with the given (or higher)
139
+ * priorities have been synchronized. This can be earlier than a complete sync.
130
140
  * @returns A promise which will resolve once the first full sync has completed.
131
141
  */
132
- async waitForFirstSync(signal) {
133
- if (this.currentStatus.hasSynced) {
142
+ async waitForFirstSync(request) {
143
+ const signal = request instanceof AbortSignal ? request : request?.signal;
144
+ const priority = request && 'priority' in request ? request.priority : undefined;
145
+ const statusMatches = priority === undefined
146
+ ? (status) => status.hasSynced
147
+ : (status) => status.statusForPriority(priority).hasSynced;
148
+ if (statusMatches(this.currentStatus)) {
134
149
  return;
135
150
  }
136
151
  return new Promise((resolve) => {
137
152
  const dispose = this.registerListener({
138
153
  statusChanged: (status) => {
139
- if (status.hasSynced) {
154
+ if (statusMatches(status)) {
140
155
  dispose();
141
156
  resolve();
142
157
  }
@@ -177,19 +192,36 @@ export class AbstractPowerSyncDatabase extends BaseObserver {
177
192
  .map((n) => parseInt(n));
178
193
  }
179
194
  catch (e) {
180
- throw new Error(`Unsupported powersync extension version. Need >=0.2.0 <1.0.0, got: ${this.sdkVersion}. Details: ${e.message}`);
195
+ throw new Error(`Unsupported powersync extension version. Need >=0.3.11 <1.0.0, got: ${this.sdkVersion}. Details: ${e.message}`);
181
196
  }
182
- // Validate >=0.2.0 <1.0.0
183
- if (versionInts[0] != 0 || versionInts[1] < 2 || versionInts[2] < 0) {
184
- throw new Error(`Unsupported powersync extension version. Need >=0.2.0 <1.0.0, got: ${this.sdkVersion}`);
197
+ // Validate >=0.3.11 <1.0.0
198
+ if (versionInts[0] != 0 || versionInts[1] < 3 || (versionInts[1] == 3 && versionInts[2] < 11)) {
199
+ throw new Error(`Unsupported powersync extension version. Need >=0.3.11 <1.0.0, got: ${this.sdkVersion}`);
185
200
  }
186
201
  }
187
202
  async updateHasSynced() {
188
- const result = await this.database.get('SELECT powersync_last_synced_at() as synced_at');
189
- const hasSynced = result.synced_at != null;
190
- const syncedAt = result.synced_at != null ? new Date(result.synced_at + 'Z') : undefined;
191
- if (hasSynced != this.currentStatus.hasSynced) {
192
- this.currentStatus = new SyncStatus({ ...this.currentStatus.toJSON(), hasSynced, lastSyncedAt: syncedAt });
203
+ const result = await this.database.getAll('SELECT priority, last_synced_at FROM ps_sync_state ORDER BY priority DESC');
204
+ let lastCompleteSync;
205
+ const priorityStatusEntries = [];
206
+ for (const { priority, last_synced_at } of result) {
207
+ const parsedDate = new Date(last_synced_at + 'Z');
208
+ if (priority == FULL_SYNC_PRIORITY) {
209
+ // This lowest-possible priority represents a complete sync.
210
+ lastCompleteSync = parsedDate;
211
+ }
212
+ else {
213
+ priorityStatusEntries.push({ priority, hasSynced: true, lastSyncedAt: parsedDate });
214
+ }
215
+ }
216
+ const hasSynced = lastCompleteSync != null;
217
+ const updatedStatus = new SyncStatus({
218
+ ...this.currentStatus.toJSON(),
219
+ hasSynced,
220
+ priorityStatusEntries,
221
+ lastSyncedAt: lastCompleteSync
222
+ });
223
+ if (!updatedStatus.isEqual(this.currentStatus)) {
224
+ this.currentStatus = updatedStatus;
193
225
  this.iterateListeners((l) => l.statusChanged?.(this.currentStatus));
194
226
  }
195
227
  }
@@ -245,7 +277,7 @@ export class AbstractPowerSyncDatabase extends BaseObserver {
245
277
  const { retryDelayMs, crudUploadThrottleMs } = this.resolvedConnectionOptions(options);
246
278
  this.syncStreamImplementation = this.generateSyncStreamImplementation(connector, {
247
279
  retryDelayMs,
248
- crudUploadThrottleMs,
280
+ crudUploadThrottleMs
249
281
  });
250
282
  this.syncStatusListenerDisposer = this.syncStreamImplementation.registerListener({
251
283
  statusChanged: (status) => {
@@ -302,12 +334,15 @@ export class AbstractPowerSyncDatabase extends BaseObserver {
302
334
  */
303
335
  async close(options = DEFAULT_POWERSYNC_CLOSE_OPTIONS) {
304
336
  await this.waitForReady();
337
+ if (this.closed) {
338
+ return;
339
+ }
305
340
  const { disconnect } = options;
306
341
  if (disconnect) {
307
342
  await this.disconnect();
308
343
  }
309
344
  await this.syncStreamImplementation?.dispose();
310
- this.database.close();
345
+ await this.database.close();
311
346
  this.closed = true;
312
347
  }
313
348
  /**
@@ -424,6 +459,10 @@ export class AbstractPowerSyncDatabase extends BaseObserver {
424
459
  await this.waitForReady();
425
460
  return this.database.execute(sql, parameters);
426
461
  }
462
+ async executeRaw(sql, parameters) {
463
+ await this.waitForReady();
464
+ return this.database.executeRaw(sql, parameters);
465
+ }
427
466
  /**
428
467
  * Execute a write query (INSERT/UPDATE/DELETE) multiple times with each parameter set
429
468
  * and optionally return results.
@@ -6,6 +6,9 @@ export interface SQLOpenOptions {
6
6
  dbFilename: string;
7
7
  /**
8
8
  * Directory where the database file is located.
9
+ *
10
+ * When set, the directory must exist when the database is opened, it will
11
+ * not be created automatically.
9
12
  */
10
13
  dbLocation?: string;
11
14
  /**
@@ -2,6 +2,10 @@ import { BaseListener, BaseObserver, Disposable } from '../../../utils/BaseObser
2
2
  import { CrudBatch } from './CrudBatch.js';
3
3
  import { CrudEntry, OpId } from './CrudEntry.js';
4
4
  import { SyncDataBatch } from './SyncDataBatch.js';
5
+ export interface BucketDescription {
6
+ name: string;
7
+ priority: number;
8
+ }
5
9
  export interface Checkpoint {
6
10
  last_op_id: OpId;
7
11
  buckets: BucketChecksum[];
@@ -25,6 +29,7 @@ export interface SyncLocalDatabaseResult {
25
29
  }
26
30
  export interface BucketChecksum {
27
31
  bucket: string;
32
+ priority?: number;
28
33
  /**
29
34
  * 32-bit unsigned hash.
30
35
  */
@@ -51,7 +56,7 @@ export interface BucketStorageAdapter extends BaseObserver<BucketStorageListener
51
56
  setTargetCheckpoint(checkpoint: Checkpoint): Promise<void>;
52
57
  startSession(): void;
53
58
  getBucketStates(): Promise<BucketState[]>;
54
- syncLocalDatabase(checkpoint: Checkpoint): Promise<{
59
+ syncLocalDatabase(checkpoint: Checkpoint, priority?: number): Promise<{
55
60
  checkpointValid: boolean;
56
61
  ready: boolean;
57
62
  failures?: any[];
@@ -37,14 +37,14 @@ export declare class SqliteBucketStorage extends BaseObserver<BucketStorageListe
37
37
  */
38
38
  private deleteBucket;
39
39
  hasCompletedSync(): Promise<boolean>;
40
- syncLocalDatabase(checkpoint: Checkpoint): Promise<SyncLocalDatabaseResult>;
40
+ syncLocalDatabase(checkpoint: Checkpoint, priority?: number): Promise<SyncLocalDatabaseResult>;
41
41
  /**
42
42
  * Atomically update the local state to the current checkpoint.
43
43
  *
44
44
  * This includes creating new tables, dropping old tables, and copying data over from the oplog.
45
45
  */
46
46
  private updateObjectsFromBuckets;
47
- validateChecksums(checkpoint: Checkpoint): Promise<SyncLocalDatabaseResult>;
47
+ validateChecksums(checkpoint: Checkpoint, priority: number | undefined): Promise<SyncLocalDatabaseResult>;
48
48
  /**
49
49
  * Force a compact, for tests.
50
50
  */
@@ -106,8 +106,8 @@ export class SqliteBucketStorage extends BaseObserver {
106
106
  }
107
107
  return completed;
108
108
  }
109
- async syncLocalDatabase(checkpoint) {
110
- const r = await this.validateChecksums(checkpoint);
109
+ async syncLocalDatabase(checkpoint, priority) {
110
+ const r = await this.validateChecksums(checkpoint, priority);
111
111
  if (!r.checkpointValid) {
112
112
  this.logger.error('Checksums failed for', r.checkpointFailures);
113
113
  for (const b of r.checkpointFailures ?? []) {
@@ -115,17 +115,21 @@ export class SqliteBucketStorage extends BaseObserver {
115
115
  }
116
116
  return { ready: false, checkpointValid: false, checkpointFailures: r.checkpointFailures };
117
117
  }
118
- const bucketNames = checkpoint.buckets.map((b) => b.bucket);
118
+ const buckets = checkpoint.buckets;
119
+ if (priority !== undefined) {
120
+ buckets.filter((b) => hasMatchingPriority(priority, b));
121
+ }
122
+ const bucketNames = buckets.map((b) => b.bucket);
119
123
  await this.writeTransaction(async (tx) => {
120
124
  await tx.execute(`UPDATE ps_buckets SET last_op = ? WHERE name IN (SELECT json_each.value FROM json_each(?))`, [
121
125
  checkpoint.last_op_id,
122
126
  JSON.stringify(bucketNames)
123
127
  ]);
124
- if (checkpoint.write_checkpoint) {
128
+ if (priority == null && checkpoint.write_checkpoint) {
125
129
  await tx.execute("UPDATE ps_buckets SET last_op = ? WHERE name = '$local'", [checkpoint.write_checkpoint]);
126
130
  }
127
131
  });
128
- const valid = await this.updateObjectsFromBuckets(checkpoint);
132
+ const valid = await this.updateObjectsFromBuckets(checkpoint, priority);
129
133
  if (!valid) {
130
134
  this.logger.debug('Not at a consistent checkpoint - cannot update local db');
131
135
  return { ready: false, checkpointValid: true };
@@ -141,19 +145,36 @@ export class SqliteBucketStorage extends BaseObserver {
141
145
  *
142
146
  * This includes creating new tables, dropping old tables, and copying data over from the oplog.
143
147
  */
144
- async updateObjectsFromBuckets(checkpoint) {
148
+ async updateObjectsFromBuckets(checkpoint, priority) {
149
+ let arg = '';
150
+ if (priority !== undefined) {
151
+ const affectedBuckets = [];
152
+ for (const desc of checkpoint.buckets) {
153
+ if (hasMatchingPriority(priority, desc)) {
154
+ affectedBuckets.push(desc.bucket);
155
+ }
156
+ }
157
+ arg = JSON.stringify({ priority, buckets: affectedBuckets });
158
+ }
145
159
  return this.writeTransaction(async (tx) => {
146
160
  const { insertId: result } = await tx.execute('INSERT INTO powersync_operations(op, data) VALUES(?, ?)', [
147
161
  'sync_local',
148
- ''
162
+ arg
149
163
  ]);
150
164
  return result == 1;
151
165
  });
152
166
  }
153
- async validateChecksums(checkpoint) {
154
- const rs = await this.db.execute('SELECT powersync_validate_checkpoint(?) as result', [JSON.stringify(checkpoint)]);
167
+ async validateChecksums(checkpoint, priority) {
168
+ if (priority !== undefined) {
169
+ // Only validate the buckets within the priority we care about
170
+ const newBuckets = checkpoint.buckets.filter((cs) => hasMatchingPriority(priority, cs));
171
+ checkpoint = { ...checkpoint, buckets: newBuckets };
172
+ }
173
+ const rs = await this.db.execute('SELECT powersync_validate_checkpoint(?) as result', [
174
+ JSON.stringify({ ...checkpoint })
175
+ ]);
155
176
  const resultItem = rs.rows?.item(0);
156
- this.logger.debug('validateChecksums result item', resultItem);
177
+ this.logger.debug('validateChecksums priority, checkpoint, result item', priority, checkpoint, resultItem);
157
178
  if (!resultItem) {
158
179
  return {
159
180
  checkpointValid: false,
@@ -304,3 +325,6 @@ export class SqliteBucketStorage extends BaseObserver {
304
325
  // No-op for now
305
326
  }
306
327
  }
328
+ function hasMatchingPriority(priority, bucket) {
329
+ return bucket.priority != null && bucket.priority <= priority;
330
+ }
@@ -3,10 +3,11 @@ import ndjsonStream from 'can-ndjson-stream';
3
3
  import Logger from 'js-logger';
4
4
  import { RSocketConnector } from 'rsocket-core';
5
5
  import { WebsocketClientTransport } from 'rsocket-websocket-client';
6
+ import PACKAGE from '../../../../package.json' with { type: 'json' };
6
7
  import { AbortOperation } from '../../../utils/AbortOperation.js';
7
8
  import { DataStream } from '../../../utils/DataStream.js';
8
- import { version as POWERSYNC_JS_VERSION } from '../../../../package.json';
9
9
  const POWERSYNC_TRAILING_SLASH_MATCH = /\/+$/;
10
+ const POWERSYNC_JS_VERSION = PACKAGE.version;
10
11
  // Refresh at least 30 sec before it expires
11
12
  const REFRESH_CREDENTIALS_SAFETY_PERIOD_MS = 30_000;
12
13
  const SYNC_QUEUE_REQUEST_LOW_WATER = 5;
@@ -99,6 +99,7 @@ export interface StreamingSyncImplementation extends BaseObserver<StreamingSyncI
99
99
  triggerCrudUpload: () => void;
100
100
  waitForReady(): Promise<void>;
101
101
  waitForStatus(status: SyncStatusOptions): Promise<void>;
102
+ waitUntilStatusMatches(predicate: (status: SyncStatus) => boolean): Promise<void>;
102
103
  }
103
104
  export declare const DEFAULT_CRUD_UPLOAD_THROTTLE_MS = 1000;
104
105
  export declare const DEFAULT_RETRY_DELAY_MS = 5000;
@@ -120,6 +121,7 @@ export declare abstract class AbstractStreamingSyncImplementation extends BaseOb
120
121
  constructor(options: AbstractStreamingSyncImplementationOptions);
121
122
  waitForReady(): Promise<void>;
122
123
  waitForStatus(status: SyncStatusOptions): Promise<void>;
124
+ waitUntilStatusMatches(predicate: (status: SyncStatus) => boolean): Promise<void>;
123
125
  get lastSyncedAt(): Date | undefined;
124
126
  get isConnected(): boolean;
125
127
  protected get logger(): Logger.ILogger;
@@ -134,6 +136,7 @@ export declare abstract class AbstractStreamingSyncImplementation extends BaseOb
134
136
  * @deprecated use [connect instead]
135
137
  */
136
138
  streamingSync(signal?: AbortSignal, options?: PowerSyncConnectionOptions): Promise<void>;
139
+ private collectLocalBucketState;
137
140
  protected streamingSyncIteration(signal: AbortSignal, options?: PowerSyncConnectionOptions): Promise<{
138
141
  retry?: boolean;
139
142
  }>;
@@ -5,7 +5,7 @@ import { BaseObserver } from '../../../utils/BaseObserver.js';
5
5
  import { throttleLeadingTrailing } from '../../../utils/throttle.js';
6
6
  import { SyncDataBucket } from '../bucket/SyncDataBucket.js';
7
7
  import { FetchStrategy } from './AbstractRemote.js';
8
- import { isStreamingKeepalive, isStreamingSyncCheckpoint, isStreamingSyncCheckpointComplete, isStreamingSyncCheckpointDiff, isStreamingSyncData } from './streaming-sync-types.js';
8
+ import { isStreamingKeepalive, isStreamingSyncCheckpoint, isStreamingSyncCheckpointComplete, isStreamingSyncCheckpointDiff, isStreamingSyncCheckpointPartiallyComplete, isStreamingSyncData } from './streaming-sync-types.js';
9
9
  export var LockType;
10
10
  (function (LockType) {
11
11
  LockType["CRUD"] = "crud";
@@ -28,6 +28,11 @@ export const DEFAULT_STREAM_CONNECTION_OPTIONS = {
28
28
  fetchStrategy: FetchStrategy.Buffered,
29
29
  params: {}
30
30
  };
31
+ // The priority we assume when we receive checkpoint lines where no priority is set.
32
+ // This is the default priority used by the sync service, but can be set to an arbitrary
33
+ // value since sync services without priorities also won't send partial sync completion
34
+ // messages.
35
+ const FALLBACK_PRIORITY = 3;
31
36
  export class AbstractStreamingSyncImplementation extends BaseObserver {
32
37
  _lastSyncedAt;
33
38
  options;
@@ -58,23 +63,32 @@ export class AbstractStreamingSyncImplementation extends BaseObserver {
58
63
  }
59
64
  async waitForReady() { }
60
65
  waitForStatus(status) {
66
+ return this.waitUntilStatusMatches((currentStatus) => {
67
+ /**
68
+ * Match only the partial status options provided in the
69
+ * matching status
70
+ */
71
+ const matchPartialObject = (compA, compB) => {
72
+ return Object.entries(compA).every(([key, value]) => {
73
+ const comparisonBValue = compB[key];
74
+ if (typeof value == 'object' && typeof comparisonBValue == 'object') {
75
+ return matchPartialObject(value, comparisonBValue);
76
+ }
77
+ return value == comparisonBValue;
78
+ });
79
+ };
80
+ return matchPartialObject(status, currentStatus);
81
+ });
82
+ }
83
+ waitUntilStatusMatches(predicate) {
61
84
  return new Promise((resolve) => {
85
+ if (predicate(this.syncStatus)) {
86
+ resolve();
87
+ return;
88
+ }
62
89
  const l = this.registerListener({
63
90
  statusChanged: (updatedStatus) => {
64
- /**
65
- * Match only the partial status options provided in the
66
- * matching status
67
- */
68
- const matchPartialObject = (compA, compB) => {
69
- return Object.entries(compA).every(([key, value]) => {
70
- const comparisonBValue = compB[key];
71
- if (typeof value == 'object' && typeof comparisonBValue == 'object') {
72
- return matchPartialObject(value, comparisonBValue);
73
- }
74
- return value == comparisonBValue;
75
- });
76
- };
77
- if (matchPartialObject(status, updatedStatus.toJSON())) {
91
+ if (predicate(updatedStatus)) {
78
92
  resolve();
79
93
  l?.();
80
94
  }
@@ -303,6 +317,18 @@ The next upload iteration will be delayed.`);
303
317
  // Mark as disconnected if here
304
318
  this.updateSyncStatus({ connected: false, connecting: false });
305
319
  }
320
+ async collectLocalBucketState() {
321
+ const bucketEntries = await this.options.adapter.getBucketStates();
322
+ const req = bucketEntries.map((entry) => ({
323
+ name: entry.bucket,
324
+ after: entry.op_id
325
+ }));
326
+ const localDescriptions = new Map();
327
+ for (const entry of bucketEntries) {
328
+ localDescriptions.set(entry.bucket, null);
329
+ }
330
+ return [req, localDescriptions];
331
+ }
306
332
  async streamingSyncIteration(signal, options) {
307
333
  return await this.obtainLock({
308
334
  type: LockType.SYNC,
@@ -314,20 +340,11 @@ The next upload iteration will be delayed.`);
314
340
  };
315
341
  this.logger.debug('Streaming sync iteration started');
316
342
  this.options.adapter.startSession();
317
- const bucketEntries = await this.options.adapter.getBucketStates();
318
- const initialBuckets = new Map();
319
- bucketEntries.forEach((entry) => {
320
- initialBuckets.set(entry.bucket, entry.op_id);
321
- });
322
- const req = Array.from(initialBuckets.entries()).map(([bucket, after]) => ({
323
- name: bucket,
324
- after: after
325
- }));
343
+ let [req, bucketMap] = await this.collectLocalBucketState();
326
344
  // These are compared by reference
327
345
  let targetCheckpoint = null;
328
346
  let validatedCheckpoint = null;
329
347
  let appliedCheckpoint = null;
330
- let bucketSet = new Set(initialBuckets.keys());
331
348
  const clientId = await this.options.adapter.getClientId();
332
349
  this.logger.debug('Requesting stream from server');
333
350
  const syncOptions = {
@@ -368,16 +385,19 @@ The next upload iteration will be delayed.`);
368
385
  }
369
386
  if (isStreamingSyncCheckpoint(line)) {
370
387
  targetCheckpoint = line.checkpoint;
371
- const bucketsToDelete = new Set(bucketSet);
372
- const newBuckets = new Set();
388
+ const bucketsToDelete = new Set(bucketMap.keys());
389
+ const newBuckets = new Map();
373
390
  for (const checksum of line.checkpoint.buckets) {
374
- newBuckets.add(checksum.bucket);
391
+ newBuckets.set(checksum.bucket, {
392
+ name: checksum.bucket,
393
+ priority: checksum.priority ?? FALLBACK_PRIORITY
394
+ });
375
395
  bucketsToDelete.delete(checksum.bucket);
376
396
  }
377
397
  if (bucketsToDelete.size > 0) {
378
398
  this.logger.debug('Removing buckets', [...bucketsToDelete]);
379
399
  }
380
- bucketSet = newBuckets;
400
+ bucketMap = newBuckets;
381
401
  await this.options.adapter.removeBuckets([...bucketsToDelete]);
382
402
  await this.options.adapter.setTargetCheckpoint(targetCheckpoint);
383
403
  }
@@ -408,6 +428,35 @@ The next upload iteration will be delayed.`);
408
428
  }
409
429
  validatedCheckpoint = targetCheckpoint;
410
430
  }
431
+ else if (isStreamingSyncCheckpointPartiallyComplete(line)) {
432
+ const priority = line.partial_checkpoint_complete.priority;
433
+ this.logger.debug('Partial checkpoint complete', priority);
434
+ const result = await this.options.adapter.syncLocalDatabase(targetCheckpoint, priority);
435
+ if (!result.checkpointValid) {
436
+ // This means checksums failed. Start again with a new checkpoint.
437
+ // TODO: better back-off
438
+ await new Promise((resolve) => setTimeout(resolve, 50));
439
+ return { retry: true };
440
+ }
441
+ else if (!result.ready) {
442
+ // Need more data for a consistent partial sync within a priority - continue waiting.
443
+ }
444
+ else {
445
+ // We'll keep on downloading, but can report that this priority is synced now.
446
+ this.logger.debug('partial checkpoint validation succeeded');
447
+ // All states with a higher priority can be deleted since this partial sync includes them.
448
+ const priorityStates = this.syncStatus.priorityStatusEntries.filter((s) => s.priority <= priority);
449
+ priorityStates.push({
450
+ priority,
451
+ lastSyncedAt: new Date(),
452
+ hasSynced: true
453
+ });
454
+ this.updateSyncStatus({
455
+ connected: true,
456
+ priorityStatusEntries: priorityStates
457
+ });
458
+ }
459
+ }
411
460
  else if (isStreamingSyncCheckpointDiff(line)) {
412
461
  // TODO: It may be faster to just keep track of the diff, instead of the entire checkpoint
413
462
  if (targetCheckpoint == null) {
@@ -430,7 +479,11 @@ The next upload iteration will be delayed.`);
430
479
  write_checkpoint: diff.write_checkpoint
431
480
  };
432
481
  targetCheckpoint = newCheckpoint;
433
- bucketSet = new Set(newBuckets.keys());
482
+ bucketMap = new Map();
483
+ newBuckets.forEach((checksum, name) => bucketMap.set(name, {
484
+ name: checksum.bucket,
485
+ priority: checksum.priority ?? FALLBACK_PRIORITY
486
+ }));
434
487
  const bucketsToDelete = diff.removed_buckets;
435
488
  if (bucketsToDelete.length > 0) {
436
489
  this.logger.debug('Remove buckets', bucketsToDelete);
@@ -466,7 +519,8 @@ The next upload iteration will be delayed.`);
466
519
  if (targetCheckpoint === appliedCheckpoint) {
467
520
  this.updateSyncStatus({
468
521
  connected: true,
469
- lastSyncedAt: new Date()
522
+ lastSyncedAt: new Date(),
523
+ priorityStatusEntries: []
470
524
  });
471
525
  }
472
526
  else if (validatedCheckpoint === targetCheckpoint) {
@@ -486,6 +540,7 @@ The next upload iteration will be delayed.`);
486
540
  this.updateSyncStatus({
487
541
  connected: true,
488
542
  lastSyncedAt: new Date(),
543
+ priorityStatusEntries: [],
489
544
  dataFlow: {
490
545
  downloading: false
491
546
  }
@@ -508,7 +563,8 @@ The next upload iteration will be delayed.`);
508
563
  dataFlow: {
509
564
  ...this.syncStatus.dataFlowStatus,
510
565
  ...options.dataFlow
511
- }
566
+ },
567
+ priorityStatusEntries: options.priorityStatusEntries ?? this.syncStatus.priorityStatusEntries
512
568
  });
513
569
  if (!this.syncStatus.isEqual(updatedStatus)) {
514
570
  this.syncStatus = updatedStatus;
@@ -90,11 +90,17 @@ export interface StreamingSyncCheckpointComplete {
90
90
  last_op_id: OpId;
91
91
  };
92
92
  }
93
+ export interface StreamingSyncCheckpointPartiallyComplete {
94
+ partial_checkpoint_complete: {
95
+ priority: number;
96
+ last_op_id: OpId;
97
+ };
98
+ }
93
99
  export interface StreamingSyncKeepalive {
94
100
  /** If specified, token expires in this many seconds. */
95
101
  token_expires_in: number;
96
102
  }
97
- export type StreamingSyncLine = StreamingSyncDataJSON | StreamingSyncCheckpoint | StreamingSyncCheckpointDiff | StreamingSyncCheckpointComplete | StreamingSyncKeepalive;
103
+ export type StreamingSyncLine = StreamingSyncDataJSON | StreamingSyncCheckpoint | StreamingSyncCheckpointDiff | StreamingSyncCheckpointComplete | StreamingSyncCheckpointPartiallyComplete | StreamingSyncKeepalive;
98
104
  export interface BucketRequest {
99
105
  name: string;
100
106
  /**
@@ -106,6 +112,7 @@ export declare function isStreamingSyncData(line: StreamingSyncLine): line is St
106
112
  export declare function isStreamingKeepalive(line: StreamingSyncLine): line is StreamingSyncKeepalive;
107
113
  export declare function isStreamingSyncCheckpoint(line: StreamingSyncLine): line is StreamingSyncCheckpoint;
108
114
  export declare function isStreamingSyncCheckpointComplete(line: StreamingSyncLine): line is StreamingSyncCheckpointComplete;
115
+ export declare function isStreamingSyncCheckpointPartiallyComplete(line: StreamingSyncLine): line is StreamingSyncCheckpointPartiallyComplete;
109
116
  export declare function isStreamingSyncCheckpointDiff(line: StreamingSyncLine): line is StreamingSyncCheckpointDiff;
110
117
  export declare function isContinueCheckpointRequest(request: SyncRequest): request is ContinueCheckpointRequest;
111
118
  export declare function isSyncNewCheckpointRequest(request: SyncRequest): request is SyncNewCheckpointRequest;
@@ -10,6 +10,9 @@ export function isStreamingSyncCheckpoint(line) {
10
10
  export function isStreamingSyncCheckpointComplete(line) {
11
11
  return line.checkpoint_complete != null;
12
12
  }
13
+ export function isStreamingSyncCheckpointPartiallyComplete(line) {
14
+ return line.partial_checkpoint_complete != null;
15
+ }
13
16
  export function isStreamingSyncCheckpointDiff(line) {
14
17
  return line.checkpoint_diff != null;
15
18
  }
@@ -39,6 +39,21 @@ export interface DBGetUtils {
39
39
  export interface LockContext extends DBGetUtils {
40
40
  /** Execute a single write statement. */
41
41
  execute: (query: string, params?: any[] | undefined) => Promise<QueryResult>;
42
+ /**
43
+ * Execute a single write statement and return raw results.
44
+ * Unlike `execute`, which returns an object with structured key-value pairs,
45
+ * `executeRaw` returns a nested array of raw values, where each row is
46
+ * represented as an array of column values without field names.
47
+ *
48
+ * Example result:
49
+ *
50
+ * ```[ [ '1', 'list 1', '33', 'Post content', '1' ] ]```
51
+ *
52
+ * Where as `execute`'s `rows._array` would have been:
53
+ *
54
+ * ```[ { id: '33', name: 'list 1', content: 'Post content', list_id: '1' } ]```
55
+ */
56
+ executeRaw: (query: string, params?: any[] | undefined) => Promise<any[][]>;
42
57
  }
43
58
  export interface Transaction extends LockContext {
44
59
  /** Commit multiple changes to the local DB using the Transaction context. */
@@ -82,8 +97,9 @@ export interface DBLockOptions {
82
97
  timeoutMs?: number;
83
98
  }
84
99
  export interface DBAdapter extends BaseObserverInterface<DBAdapterListener>, DBGetUtils {
85
- close: () => void;
100
+ close: () => void | Promise<void>;
86
101
  execute: (query: string, params?: any[]) => Promise<QueryResult>;
102
+ executeRaw: (query: string, params?: any[]) => Promise<any[][]>;
87
103
  executeBatch: (query: string, params?: any[][]) => Promise<QueryResult>;
88
104
  name: string;
89
105
  readLock: <T>(fn: (tx: LockContext) => Promise<T>, options?: DBLockOptions) => Promise<T>;