@powersync/service-module-mongodb-storage 0.5.1 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,15 +3,15 @@ import { SqlEventDescriptor, SqliteRow, SqlSyncRules } from '@powersync/service-
3
3
  import * as bson from 'bson';
4
4
 
5
5
  import {
6
+ BaseObserver,
6
7
  container,
7
- DisposableObserver,
8
8
  ErrorCode,
9
9
  errors,
10
10
  logger,
11
11
  ReplicationAssertionError,
12
12
  ServiceError
13
13
  } from '@powersync/lib-services-framework';
14
- import { SaveOperationTag, storage, utils } from '@powersync/service-core';
14
+ import { deserializeBson, SaveOperationTag, storage, utils } from '@powersync/service-core';
15
15
  import * as timers from 'node:timers/promises';
16
16
  import { PowerSyncMongo } from './db.js';
17
17
  import { CurrentBucket, CurrentDataDocument, SourceKey, SyncRuleDocument } from './models.js';
@@ -49,7 +49,7 @@ export interface MongoBucketBatchOptions {
49
49
  }
50
50
 
51
51
  export class MongoBucketBatch
52
- extends DisposableObserver<storage.BucketBatchStorageListener>
52
+ extends BaseObserver<storage.BucketBatchStorageListener>
53
53
  implements storage.BucketStorageBatch
54
54
  {
55
55
  private readonly client: mongo.MongoClient;
@@ -322,10 +322,7 @@ export class MongoBucketBatch
322
322
  existing_buckets = result.buckets;
323
323
  existing_lookups = result.lookups;
324
324
  if (this.storeCurrentData) {
325
- const data = bson.deserialize(
326
- (result.data as mongo.Binary).buffer,
327
- storage.BSON_DESERIALIZE_OPTIONS
328
- ) as SqliteRow;
325
+ const data = deserializeBson((result.data as mongo.Binary).buffer) as SqliteRow;
329
326
  after = storage.mergeToast(after!, data);
330
327
  }
331
328
  }
@@ -610,7 +607,7 @@ export class MongoBucketBatch
610
607
 
611
608
  async [Symbol.asyncDispose]() {
612
609
  await this.session.endSession();
613
- super[Symbol.dispose]();
610
+ super.clearListeners();
614
611
  }
615
612
 
616
613
  private lastWaitingLogThottled = 0;
@@ -1,20 +1,36 @@
1
1
  import * as lib_mongo from '@powersync/lib-service-mongodb';
2
2
  import { mongo } from '@powersync/lib-service-mongodb';
3
- import { DisposableObserver, logger, ServiceAssertionError } from '@powersync/lib-services-framework';
4
- import { storage, utils } from '@powersync/service-core';
3
+ import {
4
+ BaseObserver,
5
+ ErrorCode,
6
+ logger,
7
+ ServiceAssertionError,
8
+ ServiceError
9
+ } from '@powersync/lib-services-framework';
10
+ import {
11
+ BroadcastIterable,
12
+ CHECKPOINT_INVALIDATE_ALL,
13
+ CheckpointChanges,
14
+ GetCheckpointChangesOptions,
15
+ ReplicationCheckpoint,
16
+ storage,
17
+ utils,
18
+ WatchWriteCheckpointOptions
19
+ } from '@powersync/service-core';
5
20
  import { SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service-sync-rules';
6
21
  import * as bson from 'bson';
22
+ import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
7
23
  import * as timers from 'timers/promises';
8
24
  import { MongoBucketStorage } from '../MongoBucketStorage.js';
9
25
  import { PowerSyncMongo } from './db.js';
10
- import { BucketDataDocument, BucketDataKey, SourceKey } from './models.js';
26
+ import { BucketDataDocument, BucketDataKey, SourceKey, SyncRuleCheckpointState, SyncRuleDocument } from './models.js';
11
27
  import { MongoBucketBatch } from './MongoBucketBatch.js';
12
28
  import { MongoCompactor } from './MongoCompactor.js';
13
29
  import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
14
30
  import { idPrefixFilter, mapOpEntry, readSingleBatch } from './util.js';
15
31
 
16
32
  export class MongoSyncBucketStorage
17
- extends DisposableObserver<storage.SyncRulesBucketStorageListener>
33
+ extends BaseObserver<storage.SyncRulesBucketStorageListener>
18
34
  implements storage.SyncRulesBucketStorage
19
35
  {
20
36
  private readonly db: PowerSyncMongo;
@@ -324,7 +340,7 @@ export class MongoSyncBucketStorage
324
340
 
325
341
  // Ordered by _id, meaning buckets are grouped together
326
342
  for (let rawData of data) {
327
- const row = bson.deserialize(rawData, storage.BSON_DESERIALIZE_OPTIONS) as BucketDataDocument;
343
+ const row = bson.deserialize(rawData, storage.BSON_DESERIALIZE_INTERNAL_OPTIONS) as BucketDataDocument;
328
344
  const bucket = row._id.b;
329
345
 
330
346
  if (currentBatch == null || currentBatch.bucket != bucket || batchSize >= sizeLimit) {
@@ -608,4 +624,232 @@ export class MongoSyncBucketStorage
608
624
  async compact(options?: storage.CompactOptions) {
609
625
  return new MongoCompactor(this.db, this.group_id, options).compact();
610
626
  }
627
+
628
+ private makeActiveCheckpoint(doc: SyncRuleCheckpointState | null) {
629
+ return {
630
+ checkpoint: utils.timestampToOpId(doc?.last_checkpoint ?? 0n),
631
+ lsn: doc?.last_checkpoint_lsn ?? null
632
+ };
633
+ }
634
+
635
+ /**
636
+ * Instance-wide watch on the latest available checkpoint (op_id + lsn).
637
+ */
638
+ private async *watchActiveCheckpoint(signal: AbortSignal): AsyncIterable<ReplicationCheckpoint> {
639
+ // Use this form instead of (doc: SyncRuleCheckpointState | null = null),
640
+ // otherwise we get weird "doc: never" issues.
641
+ let doc = null as SyncRuleCheckpointState | null;
642
+ let clusterTime = null as mongo.Timestamp | null;
643
+ const syncRulesId = this.group_id;
644
+
645
+ await this.db.client.withSession(async (session) => {
646
+ doc = await this.db.sync_rules.findOne(
647
+ {
648
+ _id: syncRulesId,
649
+ state: storage.SyncRuleState.ACTIVE
650
+ },
651
+ {
652
+ session,
653
+ sort: { _id: -1 },
654
+ limit: 1,
655
+ projection: {
656
+ _id: 1,
657
+ state: 1,
658
+ last_checkpoint: 1,
659
+ last_checkpoint_lsn: 1
660
+ }
661
+ }
662
+ );
663
+ const time = session.clusterTime?.clusterTime ?? null;
664
+ clusterTime = time;
665
+ });
666
+ if (clusterTime == null) {
667
+ throw new ServiceError(ErrorCode.PSYNC_S2401, 'Could not get clusterTime');
668
+ }
669
+
670
+ if (signal.aborted) {
671
+ return;
672
+ }
673
+
674
+ if (doc == null) {
675
+ // Sync rules not present or not active.
676
+ // Abort the connections - clients will have to retry later.
677
+ // Should this error instead?
678
+ return;
679
+ }
680
+
681
+ yield this.makeActiveCheckpoint(doc);
682
+
683
+ // We only watch changes to the active sync rules.
684
+ // If it changes to inactive, we abort and restart with the new sync rules.
685
+
686
+ const pipeline = this.getChangeStreamPipeline();
687
+
688
+ const stream = this.db.sync_rules.watch(pipeline, {
689
+ // Start at the cluster time where we got the initial doc, to make sure
690
+ // we don't skip any updates.
691
+ // This may result in the first operation being a duplicate, but we filter
692
+ // it out anyway.
693
+ startAtOperationTime: clusterTime
694
+ });
695
+
696
+ signal.addEventListener(
697
+ 'abort',
698
+ () => {
699
+ stream.close();
700
+ },
701
+ { once: true }
702
+ );
703
+
704
+ let lastOp: storage.ReplicationCheckpoint | null = null;
705
+ let lastDoc: SyncRuleCheckpointState | null = doc;
706
+
707
+ for await (const update of stream.stream()) {
708
+ if (signal.aborted) {
709
+ break;
710
+ }
711
+ if (update.operationType != 'insert' && update.operationType != 'update' && update.operationType != 'replace') {
712
+ continue;
713
+ }
714
+
715
+ const doc = await this.getOperationDoc(lastDoc, update as lib_mongo.mongo.ChangeStreamDocument<SyncRuleDocument>);
716
+ if (doc == null) {
717
+ // Irrelevant update
718
+ continue;
719
+ }
720
+ if (doc.state != storage.SyncRuleState.ACTIVE) {
721
+ // Sync rules have changed - abort and restart.
722
+ // Should this error instead?
723
+ break;
724
+ }
725
+
726
+ lastDoc = doc;
727
+
728
+ const op = this.makeActiveCheckpoint(doc);
729
+ // Check for LSN / checkpoint changes - ignore other metadata changes
730
+ if (lastOp == null || op.lsn != lastOp.lsn || op.checkpoint != lastOp.checkpoint) {
731
+ lastOp = op;
732
+ yield op;
733
+ }
734
+ }
735
+ }
736
+
737
+ // Nothing is done here until a subscriber starts to iterate
738
+ private readonly sharedIter = new BroadcastIterable((signal) => {
739
+ return this.watchActiveCheckpoint(signal);
740
+ });
741
+
742
+ /**
743
+ * User-specific watch on the latest checkpoint and/or write checkpoint.
744
+ */
745
+ async *watchWriteCheckpoint(options: WatchWriteCheckpointOptions): AsyncIterable<storage.StorageCheckpointUpdate> {
746
+ const { user_id, signal } = options;
747
+ let lastCheckpoint: utils.OpId | null = null;
748
+ let lastWriteCheckpoint: bigint | null = null;
749
+
750
+ const iter = wrapWithAbort(this.sharedIter, signal);
751
+ for await (const event of iter) {
752
+ const { checkpoint, lsn } = event;
753
+
754
+ // lsn changes are not important by itself.
755
+ // What is important is:
756
+ // 1. checkpoint (op_id) changes.
757
+ // 2. write checkpoint changes for the specific user
758
+
759
+ const lsnFilters: Record<string, string> = lsn ? { 1: lsn } : {};
760
+
761
+ const currentWriteCheckpoint = await this.lastWriteCheckpoint({
762
+ user_id,
763
+ heads: {
764
+ ...lsnFilters
765
+ }
766
+ });
767
+
768
+ if (currentWriteCheckpoint == lastWriteCheckpoint && checkpoint == lastCheckpoint) {
769
+ // No change - wait for next one
770
+ // In some cases, many LSNs may be produced in a short time.
771
+ // Add a delay to throttle the write checkpoint lookup a bit.
772
+ await timers.setTimeout(20 + 10 * Math.random());
773
+ continue;
774
+ }
775
+
776
+ const updates: CheckpointChanges =
777
+ lastCheckpoint == null
778
+ ? {
779
+ invalidateDataBuckets: true,
780
+ invalidateParameterBuckets: true,
781
+ updatedDataBuckets: [],
782
+ updatedParameterBucketDefinitions: []
783
+ }
784
+ : await this.getCheckpointChanges({
785
+ lastCheckpoint: lastCheckpoint,
786
+ nextCheckpoint: checkpoint
787
+ });
788
+
789
+ lastWriteCheckpoint = currentWriteCheckpoint;
790
+ lastCheckpoint = checkpoint;
791
+
792
+ yield {
793
+ base: event,
794
+ writeCheckpoint: currentWriteCheckpoint,
795
+ update: updates
796
+ };
797
+ }
798
+ }
799
+
800
+ private async getOperationDoc(
801
+ lastDoc: SyncRuleCheckpointState,
802
+ update: lib_mongo.mongo.ChangeStreamDocument<SyncRuleDocument>
803
+ ): Promise<SyncRuleCheckpointState | null> {
804
+ if (update.operationType == 'insert' || update.operationType == 'replace') {
805
+ return update.fullDocument;
806
+ } else if (update.operationType == 'update') {
807
+ const updatedFields = update.updateDescription.updatedFields ?? {};
808
+ if (lastDoc._id != update.documentKey._id) {
809
+ throw new ServiceAssertionError(`Sync rules id mismatch: ${lastDoc._id} != ${update.documentKey._id}`);
810
+ }
811
+
812
+ const mergedDoc: SyncRuleCheckpointState = {
813
+ _id: lastDoc._id,
814
+ last_checkpoint: updatedFields.last_checkpoint ?? lastDoc.last_checkpoint,
815
+ last_checkpoint_lsn: updatedFields.last_checkpoint_lsn ?? lastDoc.last_checkpoint_lsn,
816
+ state: updatedFields.state ?? lastDoc.state
817
+ };
818
+
819
+ return mergedDoc;
820
+ } else {
821
+ // Unknown event type
822
+ return null;
823
+ }
824
+ }
825
+
826
+ private getChangeStreamPipeline() {
827
+ const syncRulesId = this.group_id;
828
+ const pipeline: mongo.Document[] = [
829
+ {
830
+ $match: {
831
+ 'documentKey._id': syncRulesId,
832
+ operationType: { $in: ['insert', 'update', 'replace'] }
833
+ }
834
+ },
835
+ {
836
+ $project: {
837
+ operationType: 1,
838
+ 'documentKey._id': 1,
839
+ 'updateDescription.updatedFields.state': 1,
840
+ 'updateDescription.updatedFields.last_checkpoint': 1,
841
+ 'updateDescription.updatedFields.last_checkpoint_lsn': 1,
842
+ 'fullDocument._id': 1,
843
+ 'fullDocument.state': 1,
844
+ 'fullDocument.last_checkpoint': 1,
845
+ 'fullDocument.last_checkpoint_lsn': 1
846
+ }
847
+ }
848
+ ];
849
+ return pipeline;
850
+ }
851
+
852
+ async getCheckpointChanges(options: GetCheckpointChangesOptions): Promise<CheckpointChanges> {
853
+ return CHECKPOINT_INVALIDATE_ALL;
854
+ }
611
855
  }
@@ -41,7 +41,7 @@ export class PowerSyncMongo {
41
41
  this.client = client;
42
42
 
43
43
  const db = client.db(options?.database, {
44
- ...storage.BSON_DESERIALIZE_OPTIONS
44
+ ...storage.BSON_DESERIALIZE_INTERNAL_OPTIONS
45
45
  });
46
46
  this.db = db;
47
47
 
@@ -141,6 +141,11 @@ export interface SyncRuleDocument {
141
141
  content: string;
142
142
  }
143
143
 
144
+ export type SyncRuleCheckpointState = Pick<
145
+ SyncRuleDocument,
146
+ 'last_checkpoint' | 'last_checkpoint_lsn' | '_id' | 'state'
147
+ >;
148
+
144
149
  export interface CustomWriteCheckpointDocument {
145
150
  _id: bson.ObjectId;
146
151
  user_id: string;
@@ -9,6 +9,7 @@ exports[`sync - mongodb > compacting data - invalidate checkpoint 1`] = `
9
9
  "bucket": "mybucket[]",
10
10
  "checksum": -93886621,
11
11
  "count": 2,
12
+ "priority": 3,
12
13
  },
13
14
  ],
14
15
  "last_op_id": "2",
@@ -44,6 +45,7 @@ exports[`sync - mongodb > compacting data - invalidate checkpoint 2`] = `
44
45
  "bucket": "mybucket[]",
45
46
  "checksum": 499012468,
46
47
  "count": 4,
48
+ "priority": 3,
47
49
  },
48
50
  ],
49
51
  "write_checkpoint": undefined,
@@ -102,6 +104,7 @@ exports[`sync - mongodb > expiring token 1`] = `
102
104
  "bucket": "mybucket[]",
103
105
  "checksum": 0,
104
106
  "count": 0,
107
+ "priority": 3,
105
108
  },
106
109
  ],
107
110
  "last_op_id": "0",
@@ -124,6 +127,80 @@ exports[`sync - mongodb > expiring token 2`] = `
124
127
  ]
125
128
  `;
126
129
 
130
+ exports[`sync - mongodb > sync buckets in order 1`] = `
131
+ [
132
+ {
133
+ "checkpoint": {
134
+ "buckets": [
135
+ {
136
+ "bucket": "b0[]",
137
+ "checksum": 920318466,
138
+ "count": 1,
139
+ "priority": 2,
140
+ },
141
+ {
142
+ "bucket": "b1[]",
143
+ "checksum": -1382098757,
144
+ "count": 1,
145
+ "priority": 1,
146
+ },
147
+ ],
148
+ "last_op_id": "2",
149
+ "write_checkpoint": undefined,
150
+ },
151
+ },
152
+ {
153
+ "data": {
154
+ "after": "0",
155
+ "bucket": "b1[]",
156
+ "data": [
157
+ {
158
+ "checksum": 2912868539n,
159
+ "data": "{"id":"earlier","description":"Test 2"}",
160
+ "object_id": "earlier",
161
+ "object_type": "test",
162
+ "op": "PUT",
163
+ "op_id": "2",
164
+ "subkey": "0dfe86bd-d15b-5fd0-9c7b-a31693030ee0",
165
+ },
166
+ ],
167
+ "has_more": false,
168
+ "next_after": "2",
169
+ },
170
+ },
171
+ {
172
+ "partial_checkpoint_complete": {
173
+ "last_op_id": "2",
174
+ "priority": 1,
175
+ },
176
+ },
177
+ {
178
+ "data": {
179
+ "after": "0",
180
+ "bucket": "b0[]",
181
+ "data": [
182
+ {
183
+ "checksum": 920318466n,
184
+ "data": "{"id":"t1","description":"Test 1"}",
185
+ "object_id": "t1",
186
+ "object_type": "test",
187
+ "op": "PUT",
188
+ "op_id": "1",
189
+ "subkey": "e5aa2ddc-1328-58fa-a000-0b5ed31eaf1a",
190
+ },
191
+ ],
192
+ "has_more": false,
193
+ "next_after": "1",
194
+ },
195
+ },
196
+ {
197
+ "checkpoint_complete": {
198
+ "last_op_id": "2",
199
+ },
200
+ },
201
+ ]
202
+ `;
203
+
127
204
  exports[`sync - mongodb > sync global data 1`] = `
128
205
  [
129
206
  {
@@ -133,6 +210,7 @@ exports[`sync - mongodb > sync global data 1`] = `
133
210
  "bucket": "mybucket[]",
134
211
  "checksum": -93886621,
135
212
  "count": 2,
213
+ "priority": 3,
136
214
  },
137
215
  ],
138
216
  "last_op_id": "2",
@@ -184,6 +262,7 @@ exports[`sync - mongodb > sync legacy non-raw data 1`] = `
184
262
  "bucket": "mybucket[]",
185
263
  "checksum": -852817836,
186
264
  "count": 1,
265
+ "priority": 3,
187
266
  },
188
267
  ],
189
268
  "last_op_id": "1",
@@ -231,6 +310,7 @@ exports[`sync - mongodb > sync updates to global data 1`] = `
231
310
  "bucket": "mybucket[]",
232
311
  "checksum": 0,
233
312
  "count": 0,
313
+ "priority": 3,
234
314
  },
235
315
  ],
236
316
  "last_op_id": "0",
@@ -256,6 +336,7 @@ exports[`sync - mongodb > sync updates to global data 2`] = `
256
336
  "bucket": "mybucket[]",
257
337
  "checksum": 920318466,
258
338
  "count": 1,
339
+ "priority": 3,
259
340
  },
260
341
  ],
261
342
  "write_checkpoint": undefined,
@@ -299,6 +380,7 @@ exports[`sync - mongodb > sync updates to global data 3`] = `
299
380
  "bucket": "mybucket[]",
300
381
  "checksum": -93886621,
301
382
  "count": 2,
383
+ "priority": 3,
302
384
  },
303
385
  ],
304
386
  "write_checkpoint": undefined,
@@ -19,7 +19,7 @@ describe('sync - mongodb', () => {
19
19
  - SELECT id, description FROM "%"
20
20
  `
21
21
  );
22
- using factory = await INITIALIZED_MONGO_STORAGE_FACTORY();
22
+ await using factory = await INITIALIZED_MONGO_STORAGE_FACTORY();
23
23
  const bucketStorage = factory.getInstance(sync_rules);
24
24
 
25
25
  const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {