@powersync/service-module-mongodb-storage 0.13.1 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/CHANGELOG.md +41 -0
  2. package/dist/migrations/db/migrations/1770213298299-storage-version.d.ts +3 -0
  3. package/dist/migrations/db/migrations/1770213298299-storage-version.js +29 -0
  4. package/dist/migrations/db/migrations/1770213298299-storage-version.js.map +1 -0
  5. package/dist/storage/MongoBucketStorage.d.ts +7 -15
  6. package/dist/storage/MongoBucketStorage.js +13 -51
  7. package/dist/storage/MongoBucketStorage.js.map +1 -1
  8. package/dist/storage/implementation/MongoChecksums.d.ts +5 -2
  9. package/dist/storage/implementation/MongoChecksums.js +7 -4
  10. package/dist/storage/implementation/MongoChecksums.js.map +1 -1
  11. package/dist/storage/implementation/MongoCompactor.d.ts +12 -0
  12. package/dist/storage/implementation/MongoCompactor.js +86 -27
  13. package/dist/storage/implementation/MongoCompactor.js.map +1 -1
  14. package/dist/storage/implementation/MongoPersistedSyncRulesContent.d.ts +2 -12
  15. package/dist/storage/implementation/MongoPersistedSyncRulesContent.js +24 -24
  16. package/dist/storage/implementation/MongoPersistedSyncRulesContent.js.map +1 -1
  17. package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +4 -2
  18. package/dist/storage/implementation/MongoSyncBucketStorage.js +4 -1
  19. package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
  20. package/dist/storage/implementation/models.d.ts +13 -1
  21. package/dist/storage/implementation/models.js +9 -1
  22. package/dist/storage/implementation/models.js.map +1 -1
  23. package/dist/storage/storage-index.d.ts +0 -1
  24. package/dist/storage/storage-index.js +0 -1
  25. package/dist/storage/storage-index.js.map +1 -1
  26. package/dist/utils/test-utils.d.ts +3 -4
  27. package/dist/utils/test-utils.js +2 -2
  28. package/dist/utils/test-utils.js.map +1 -1
  29. package/package.json +7 -7
  30. package/src/migrations/db/migrations/1770213298299-storage-version.ts +44 -0
  31. package/src/storage/MongoBucketStorage.ts +21 -59
  32. package/src/storage/implementation/MongoChecksums.ts +14 -6
  33. package/src/storage/implementation/MongoCompactor.ts +102 -29
  34. package/src/storage/implementation/MongoPersistedSyncRulesContent.ts +26 -32
  35. package/src/storage/implementation/MongoSyncBucketStorage.ts +16 -5
  36. package/src/storage/implementation/models.ts +25 -1
  37. package/src/storage/storage-index.ts +0 -1
  38. package/src/utils/test-utils.ts +3 -4
  39. package/test/src/__snapshots__/storage_sync.test.ts.snap +1116 -21
  40. package/test/src/connection-report-storage.test.ts +6 -2
  41. package/test/src/storage_compacting.test.ts +28 -22
  42. package/test/src/storage_sync.test.ts +27 -14
  43. package/test/src/util.ts +3 -0
  44. package/test/tsconfig.json +3 -7
  45. package/tsconfig.tsbuildinfo +1 -1
  46. package/dist/storage/implementation/MongoPersistedSyncRules.d.ts +0 -10
  47. package/dist/storage/implementation/MongoPersistedSyncRules.js +0 -17
  48. package/dist/storage/implementation/MongoPersistedSyncRules.js.map +0 -1
  49. package/src/storage/implementation/MongoPersistedSyncRules.ts +0 -20
  50. package/test/src/__snapshots__/storage.test.ts.snap +0 -25
@@ -1,8 +1,6 @@
1
- import { SqlSyncRules } from '@powersync/service-sync-rules';
2
-
3
1
  import { GetIntanceOptions, storage } from '@powersync/service-core';
4
2
 
5
- import { BaseObserver, ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework';
3
+ import { ErrorCode, ServiceError } from '@powersync/lib-services-framework';
6
4
  import { v4 as uuid } from 'uuid';
7
5
 
8
6
  import * as lib_mongo from '@powersync/lib-service-mongodb';
@@ -11,13 +9,15 @@ import { mongo } from '@powersync/lib-service-mongodb';
11
9
  import { PowerSyncMongo } from './implementation/db.js';
12
10
  import { SyncRuleDocument } from './implementation/models.js';
13
11
  import { MongoPersistedSyncRulesContent } from './implementation/MongoPersistedSyncRulesContent.js';
14
- import { MongoSyncBucketStorage, MongoSyncBucketStorageOptions } from './implementation/MongoSyncBucketStorage.js';
12
+ import { MongoSyncBucketStorage } from './implementation/MongoSyncBucketStorage.js';
15
13
  import { generateSlotName } from '../utils/util.js';
14
+ import { MongoChecksumOptions } from './implementation/MongoChecksums.js';
15
+
16
+ export interface MongoBucketStorageOptions {
17
+ checksumOptions?: Omit<MongoChecksumOptions, 'storageConfig'>;
18
+ }
16
19
 
17
- export class MongoBucketStorage
18
- extends BaseObserver<storage.BucketStorageFactoryListener>
19
- implements storage.BucketStorageFactory
20
- {
20
+ export class MongoBucketStorage extends storage.BucketStorageFactory {
21
21
  private readonly client: mongo.MongoClient;
22
22
  private readonly session: mongo.ClientSession;
23
23
  // TODO: This is still Postgres specific and needs to be reworked
@@ -32,7 +32,7 @@ export class MongoBucketStorage
32
32
  options: {
33
33
  slot_name_prefix: string;
34
34
  },
35
- private internalOptions?: MongoSyncBucketStorageOptions
35
+ private internalOptions?: MongoBucketStorageOptions
36
36
  ) {
37
37
  super();
38
38
  this.client = db.client;
@@ -50,10 +50,15 @@ export class MongoBucketStorage
50
50
  if ((typeof id as any) == 'bigint') {
51
51
  id = Number(id);
52
52
  }
53
- const storage = new MongoSyncBucketStorage(this, id, syncRules, slot_name, undefined, this.internalOptions);
53
+ const storageConfig = (syncRules as MongoPersistedSyncRulesContent).getStorageConfig();
54
+ const storage = new MongoSyncBucketStorage(this, id, syncRules, slot_name, undefined, {
55
+ ...this.internalOptions,
56
+ storageConfig
57
+ });
54
58
  if (!options?.skipLifecycleHooks) {
55
59
  this.iterateListeners((cb) => cb.syncStorageCreated?.(storage));
56
60
  }
61
+
57
62
  storage.registerListener({
58
63
  batchStarted: (batch) => {
59
64
  batch.registerListener({
@@ -81,33 +86,13 @@ export class MongoBucketStorage
81
86
  };
82
87
  }
83
88
 
84
- async configureSyncRules(options: storage.UpdateSyncRulesOptions) {
85
- const next = await this.getNextSyncRulesContent();
86
- const active = await this.getActiveSyncRulesContent();
87
-
88
- if (next?.sync_rules_content == options.content) {
89
- logger.info('Sync rules from configuration unchanged');
90
- return { updated: false };
91
- } else if (next == null && active?.sync_rules_content == options.content) {
92
- logger.info('Sync rules from configuration unchanged');
93
- return { updated: false };
94
- } else {
95
- logger.info('Sync rules updated from configuration');
96
- const persisted_sync_rules = await this.updateSyncRules(options);
97
- return { updated: true, persisted_sync_rules, lock: persisted_sync_rules.current_lock ?? undefined };
98
- }
99
- }
100
-
101
89
  async restartReplication(sync_rules_group_id: number) {
102
90
  const next = await this.getNextSyncRulesContent();
103
91
  const active = await this.getActiveSyncRulesContent();
104
92
 
105
93
  if (next != null && next.id == sync_rules_group_id) {
106
94
  // We need to redo the "next" sync rules
107
- await this.updateSyncRules({
108
- content: next.sync_rules_content,
109
- validate: false
110
- });
95
+ await this.updateSyncRules(next.asUpdateOptions());
111
96
  // Pro-actively stop replicating
112
97
  await this.db.sync_rules.updateOne(
113
98
  {
@@ -123,10 +108,7 @@ export class MongoBucketStorage
123
108
  await this.db.notifyCheckpoint();
124
109
  } else if (next == null && active?.id == sync_rules_group_id) {
125
110
  // Slot removed for "active" sync rules, while there is no "next" one.
126
- await this.updateSyncRules({
127
- content: active.sync_rules_content,
128
- validate: false
129
- });
111
+ await this.updateSyncRules(active.asUpdateOptions());
130
112
 
131
113
  // In this case we keep the old one as active for clients, so that that existing clients
132
114
  // can still get the latest data while we replicate the new ones.
@@ -163,19 +145,6 @@ export class MongoBucketStorage
163
145
  }
164
146
 
165
147
  async updateSyncRules(options: storage.UpdateSyncRulesOptions): Promise<MongoPersistedSyncRulesContent> {
166
- if (options.validate) {
167
- // Parse and validate before applying any changes
168
- SqlSyncRules.fromYaml(options.content, {
169
- // No schema-based validation at this point
170
- schema: undefined,
171
- defaultSchema: 'not_applicable', // Not needed for validation
172
- throwOnError: true
173
- });
174
- } else {
175
- // We do not validate sync rules at this point.
176
- // That is done when using the sync rules, so that the diagnostics API can report the errors.
177
- }
178
-
179
148
  let rules: MongoPersistedSyncRulesContent | undefined = undefined;
180
149
 
181
150
  await this.session.withTransaction(async () => {
@@ -205,9 +174,12 @@ export class MongoBucketStorage
205
174
  const id = Number(id_doc!.op_id);
206
175
  const slot_name = generateSlotName(this.slot_name_prefix, id);
207
176
 
177
+ const storageVersion = options.storageVersion ?? storage.CURRENT_STORAGE_VERSION;
208
178
  const doc: SyncRuleDocument = {
209
179
  _id: id,
210
- content: options.content,
180
+ storage_version: storageVersion,
181
+ content: options.config.yaml,
182
+ serialized_plan: options.config.plan,
211
183
  last_checkpoint: null,
212
184
  last_checkpoint_lsn: null,
213
185
  no_checkpoint_before: null,
@@ -246,11 +218,6 @@ export class MongoBucketStorage
246
218
  return new MongoPersistedSyncRulesContent(this.db, doc);
247
219
  }
248
220
 
249
- async getActiveSyncRules(options: storage.ParseSyncRulesOptions): Promise<storage.PersistedSyncRules | null> {
250
- const content = await this.getActiveSyncRulesContent();
251
- return content?.parsed(options) ?? null;
252
- }
253
-
254
221
  async getNextSyncRulesContent(): Promise<MongoPersistedSyncRulesContent | null> {
255
222
  const doc = await this.db.sync_rules.findOne(
256
223
  {
@@ -265,11 +232,6 @@ export class MongoBucketStorage
265
232
  return new MongoPersistedSyncRulesContent(this.db, doc);
266
233
  }
267
234
 
268
- async getNextSyncRules(options: storage.ParseSyncRulesOptions): Promise<storage.PersistedSyncRules | null> {
269
- const content = await this.getNextSyncRulesContent();
270
- return content?.parsed(options) ?? null;
271
- }
272
-
273
235
  async getReplicatingSyncRules(): Promise<storage.PersistedSyncRulesContent[]> {
274
236
  const docs = await this.db.sync_rules
275
237
  .find({
@@ -13,6 +13,7 @@ import {
13
13
  PartialOrFullChecksum
14
14
  } from '@powersync/service-core';
15
15
  import { PowerSyncMongo } from './db.js';
16
+ import { StorageConfig } from './models.js';
16
17
 
17
18
  /**
18
19
  * Checksum calculation options, primarily for tests.
@@ -27,6 +28,8 @@ export interface MongoChecksumOptions {
27
28
  * Limit on the number of documents to calculate a checksum on at a time.
28
29
  */
29
30
  operationBatchLimit?: number;
31
+
32
+ storageConfig: StorageConfig;
30
33
  }
31
34
 
32
35
  const DEFAULT_BUCKET_BATCH_LIMIT = 200;
@@ -43,12 +46,15 @@ const DEFAULT_OPERATION_BATCH_LIMIT = 50_000;
43
46
  */
44
47
  export class MongoChecksums {
45
48
  private _cache: ChecksumCache | undefined;
49
+ private readonly storageConfig: StorageConfig;
46
50
 
47
51
  constructor(
48
52
  private db: PowerSyncMongo,
49
53
  private group_id: number,
50
- private options?: MongoChecksumOptions
51
- ) {}
54
+ private options: MongoChecksumOptions
55
+ ) {
56
+ this.storageConfig = options.storageConfig;
57
+ }
52
58
 
53
59
  /**
54
60
  * Lazy-instantiated cache.
@@ -222,6 +228,11 @@ export class MongoChecksums {
222
228
  });
223
229
  }
224
230
 
231
+ // Historically, checksum may be stored as 'int' or 'double'.
232
+ // More recently, this should be a 'long'.
233
+ // $toLong ensures that we always sum it as a long, avoiding inaccuracies in the calculations.
234
+ const checksumLong = this.storageConfig.longChecksums ? '$checksum' : { $toLong: '$checksum' };
235
+
225
236
  // Aggregate over a max of `batchLimit` operations at a time.
226
237
  // Let's say we have 3 buckets (A, B, C), each with 10 operations, and our batch limit is 12.
227
238
  // Then we'll do three batches:
@@ -245,10 +256,7 @@ export class MongoChecksums {
245
256
  {
246
257
  $group: {
247
258
  _id: '$_id.b',
248
- // Historically, checksum may be stored as 'int' or 'double'.
249
- // More recently, this should be a 'long'.
250
- // $toLong ensures that we always sum it as a long, avoiding inaccuracies in the calculations.
251
- checksum_total: { $sum: { $toLong: '$checksum' } },
259
+ checksum_total: { $sum: checksumLong },
252
260
  count: { $sum: 1 },
253
261
  has_clear_op: {
254
262
  $max: {
@@ -63,6 +63,7 @@ const DEFAULT_MOVE_BATCH_LIMIT = 2000;
63
63
  const DEFAULT_MOVE_BATCH_QUERY_LIMIT = 10_000;
64
64
  const DEFAULT_MIN_BUCKET_CHANGES = 10;
65
65
  const DEFAULT_MIN_CHANGE_RATIO = 0.1;
66
+ const DIRTY_BUCKET_SCAN_BATCH_SIZE = 2_000;
66
67
 
67
68
  /** This default is primarily for tests. */
68
69
  const DEFAULT_MEMORY_LIMIT_MB = 64;
@@ -485,19 +486,14 @@ export class MongoCompactor {
485
486
  */
486
487
  async populateChecksums(options: { minBucketChanges: number }): Promise<PopulateChecksumCacheResults> {
487
488
  let count = 0;
488
- for await (let buckets of this.dirtyBucketBatches({
489
- minBucketChanges: options.minBucketChanges,
490
- minChangeRatio: 0
491
- })) {
492
- if (this.signal?.aborted) {
489
+ while (!this.signal?.aborted) {
490
+ const buckets = await this.dirtyBucketBatchForChecksums(options);
491
+ if (buckets.length == 0 || this.signal?.aborted) {
492
+ // All done
493
493
  break;
494
494
  }
495
- if (buckets.length == 0) {
496
- continue;
497
- }
498
495
 
499
496
  const start = Date.now();
500
- logger.info(`Calculating checksums for batch of ${buckets.length} buckets, starting at ${buckets[0].bucket}`);
501
497
 
502
498
  // Filter batch by estimated bucket size, to reduce possibility of timeouts
503
499
  let checkBuckets: typeof buckets = [];
@@ -509,9 +505,12 @@ export class MongoCompactor {
509
505
  break;
510
506
  }
511
507
  }
508
+ logger.info(
509
+ `Calculating checksums for batch of ${buckets.length} buckets, estimated count of ${totalCountEstimate}`
510
+ );
512
511
  await this.updateChecksumsBatch(checkBuckets.map((b) => b.bucket));
513
512
  logger.info(`Updated checksums for batch of ${checkBuckets.length} buckets in ${Date.now() - start}ms`);
514
- count += buckets.length;
513
+ count += checkBuckets.length;
515
514
  }
516
515
  return { buckets: count };
517
516
  }
@@ -540,31 +539,60 @@ export class MongoCompactor {
540
539
  let lastId = { g: this.group_id, b: new mongo.MinKey() as any };
541
540
  const maxId = { g: this.group_id, b: new mongo.MaxKey() as any };
542
541
  while (true) {
543
- const batch = await this.db.bucket_state
544
- .find(
545
- {
546
- _id: { $gt: lastId, $lt: maxId },
547
- 'estimate_since_compact.count': { $gte: options.minBucketChanges }
548
- },
549
- {
550
- projection: {
551
- _id: 1,
552
- estimate_since_compact: 1,
553
- compacted_state: 1
542
+ // To avoid timeouts from too many buckets not meeting the minBucketChanges criteria, we use an aggregation pipeline
543
+ // to scan a fixed batch of buckets at a time, but only return buckets that meet the criteria, rather than limiting
544
+ // on the output number.
545
+ const [result] = await this.db.bucket_state
546
+ .aggregate<{
547
+ buckets: Pick<BucketStateDocument, '_id' | 'estimate_since_compact' | 'compacted_state'>[];
548
+ cursor: Pick<BucketStateDocument, '_id'>[];
549
+ }>(
550
+ [
551
+ {
552
+ $match: {
553
+ _id: { $gt: lastId, $lt: maxId }
554
+ }
554
555
  },
555
- sort: {
556
- _id: 1
556
+ {
557
+ $sort: { _id: 1 }
557
558
  },
558
- limit: 2000,
559
- maxTimeMS: MONGO_OPERATION_TIMEOUT_MS
560
- }
559
+ {
560
+ // Scan a fixed number of docs each query so sparse matches don't block progress.
561
+ $limit: DIRTY_BUCKET_SCAN_BATCH_SIZE
562
+ },
563
+ {
564
+ $facet: {
565
+ // This is the results for the batch
566
+ buckets: [
567
+ {
568
+ $match: {
569
+ 'estimate_since_compact.count': { $gte: options.minBucketChanges }
570
+ }
571
+ },
572
+ {
573
+ $project: {
574
+ _id: 1,
575
+ estimate_since_compact: 1,
576
+ compacted_state: 1
577
+ }
578
+ }
579
+ ],
580
+ // This is used for the next query.
581
+ cursor: [{ $sort: { _id: -1 } }, { $limit: 1 }, { $project: { _id: 1 } }]
582
+ }
583
+ }
584
+ ],
585
+ { maxTimeMS: MONGO_OPERATION_TIMEOUT_MS }
561
586
  )
562
587
  .toArray();
563
- if (batch.length == 0) {
588
+
589
+ const cursor = result?.cursor?.[0];
590
+ if (cursor == null) {
564
591
  break;
565
592
  }
566
- lastId = batch[batch.length - 1]._id;
567
- const mapped = batch.map((b) => {
593
+ lastId = cursor._id;
594
+
595
+ const mapped = (result?.buckets ?? []).map((b) => {
568
596
  const updatedCount = b.estimate_since_compact?.count ?? 0;
569
597
  const totalCount = (b.compacted_state?.count ?? 0) + updatedCount;
570
598
  const updatedBytes = b.estimate_since_compact?.bytes ?? 0;
@@ -584,6 +612,51 @@ export class MongoCompactor {
584
612
  }
585
613
  }
586
614
 
615
+ /**
616
+ * Returns a batch of dirty buckets - buckets with most changes first.
617
+ *
618
+ * This cannot be used to iterate on its own - the client is expected to process these buckets and
619
+ * set estimate_since_compact.count: 0 when done, before fetching the next batch.
620
+ *
621
+ * Unlike dirtyBucketBatches, used for compacting, this is specifically designed to be resuamble after a restart,
622
+ * since it is used as the last step for initial replication.
623
+ *
624
+ * We currently don't get new data while doing populateChecksums, so we don't need to worry about buckets changing while processing.
625
+ */
626
+ private async dirtyBucketBatchForChecksums(options: {
627
+ minBucketChanges: number;
628
+ }): Promise<{ bucket: string; estimatedCount: number }[]> {
629
+ if (options.minBucketChanges <= 0) {
630
+ throw new ReplicationAssertionError('minBucketChanges must be >= 1');
631
+ }
632
+ // We make use of an index on {_id.g: 1, 'estimate_since_compact.count': -1}
633
+ const dirtyBuckets = await this.db.bucket_state
634
+ .find(
635
+ {
636
+ '_id.g': this.group_id,
637
+ 'estimate_since_compact.count': { $gte: options.minBucketChanges }
638
+ },
639
+ {
640
+ projection: {
641
+ _id: 1,
642
+ estimate_since_compact: 1,
643
+ compacted_state: 1
644
+ },
645
+ sort: {
646
+ 'estimate_since_compact.count': -1
647
+ },
648
+ limit: 200,
649
+ maxTimeMS: MONGO_OPERATION_TIMEOUT_MS
650
+ }
651
+ )
652
+ .toArray();
653
+
654
+ return dirtyBuckets.map((bucket) => ({
655
+ bucket: bucket._id.b,
656
+ estimatedCount: bucket.estimate_since_compact!.count + (bucket.compacted_state?.count ?? 0)
657
+ }));
658
+ }
659
+
587
660
  private async updateChecksumsBatch(buckets: string[]) {
588
661
  const checksums = await this.storage.checksums.computePartialChecksumsDirect(
589
662
  buckets.map((bucket) => {
@@ -1,48 +1,42 @@
1
1
  import { mongo } from '@powersync/lib-service-mongodb';
2
2
  import { storage } from '@powersync/service-core';
3
- import { SqlSyncRules } from '@powersync/service-sync-rules';
4
- import { MongoPersistedSyncRules } from './MongoPersistedSyncRules.js';
5
3
  import { MongoSyncRulesLock } from './MongoSyncRulesLock.js';
6
4
  import { PowerSyncMongo } from './db.js';
7
- import { SyncRuleDocument } from './models.js';
8
-
9
- export class MongoPersistedSyncRulesContent implements storage.PersistedSyncRulesContent {
10
- public readonly slot_name: string;
11
-
12
- public readonly id: number;
13
- public readonly sync_rules_content: string;
14
- public readonly last_checkpoint_lsn: string | null;
15
- public readonly last_fatal_error: string | null;
16
- public readonly last_fatal_error_ts: Date | null;
17
- public readonly last_keepalive_ts: Date | null;
18
- public readonly last_checkpoint_ts: Date | null;
19
- public readonly active: boolean;
5
+ import { getMongoStorageConfig, SyncRuleDocument } from './models.js';
6
+ import { ErrorCode, ServiceError } from '@powersync/lib-services-framework';
20
7
 
8
+ export class MongoPersistedSyncRulesContent extends storage.PersistedSyncRulesContent {
21
9
  public current_lock: MongoSyncRulesLock | null = null;
22
10
 
23
11
  constructor(
24
12
  private db: PowerSyncMongo,
25
13
  doc: mongo.WithId<SyncRuleDocument>
26
14
  ) {
27
- this.id = doc._id;
28
- this.sync_rules_content = doc.content;
29
- this.last_checkpoint_lsn = doc.last_checkpoint_lsn;
30
- // Handle legacy values
31
- this.slot_name = doc.slot_name ?? `powersync_${this.id}`;
32
- this.last_fatal_error = doc.last_fatal_error;
33
- this.last_fatal_error_ts = doc.last_fatal_error_ts;
34
- this.last_checkpoint_ts = doc.last_checkpoint_ts;
35
- this.last_keepalive_ts = doc.last_keepalive_ts;
36
- this.active = doc.state == 'ACTIVE';
15
+ super({
16
+ id: doc._id,
17
+ sync_rules_content: doc.content,
18
+ compiled_plan: doc.serialized_plan ?? null,
19
+ last_checkpoint_lsn: doc.last_checkpoint_lsn,
20
+ // Handle legacy values
21
+ slot_name: doc.slot_name ?? `powersync_${doc._id}`,
22
+ last_fatal_error: doc.last_fatal_error,
23
+ last_fatal_error_ts: doc.last_fatal_error_ts,
24
+ last_checkpoint_ts: doc.last_checkpoint_ts,
25
+ last_keepalive_ts: doc.last_keepalive_ts,
26
+ active: doc.state == 'ACTIVE',
27
+ storageVersion: doc.storage_version ?? storage.LEGACY_STORAGE_VERSION
28
+ });
37
29
  }
38
30
 
39
- parsed(options: storage.ParseSyncRulesOptions) {
40
- return new MongoPersistedSyncRules(
41
- this.id,
42
- SqlSyncRules.fromYaml(this.sync_rules_content, options),
43
- this.last_checkpoint_lsn,
44
- this.slot_name
45
- );
31
+ getStorageConfig() {
32
+ const storageConfig = getMongoStorageConfig(this.storageVersion);
33
+ if (storageConfig == null) {
34
+ throw new ServiceError(
35
+ ErrorCode.PSYNC_S1005,
36
+ `Unsupported storage version ${this.storageVersion} for sync rules ${this.id}`
37
+ );
38
+ }
39
+ return storageConfig;
46
40
  }
47
41
 
48
42
  async lock() {
@@ -32,7 +32,14 @@ import * as timers from 'timers/promises';
32
32
  import { idPrefixFilter, mapOpEntry, readSingleBatch, setSessionSnapshotTime } from '../../utils/util.js';
33
33
  import { MongoBucketStorage } from '../MongoBucketStorage.js';
34
34
  import { PowerSyncMongo } from './db.js';
35
- import { BucketDataDocument, BucketDataKey, BucketStateDocument, SourceKey, SourceTableDocument } from './models.js';
35
+ import {
36
+ BucketDataDocument,
37
+ BucketDataKey,
38
+ BucketStateDocument,
39
+ SourceKey,
40
+ SourceTableDocument,
41
+ StorageConfig
42
+ } from './models.js';
36
43
  import { MongoBucketBatch } from './MongoBucketBatch.js';
37
44
  import { MongoChecksumOptions, MongoChecksums } from './MongoChecksums.js';
38
45
  import { MongoCompactor } from './MongoCompactor.js';
@@ -40,7 +47,8 @@ import { MongoParameterCompactor } from './MongoParameterCompactor.js';
40
47
  import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
41
48
 
42
49
  export interface MongoSyncBucketStorageOptions {
43
- checksumOptions?: MongoChecksumOptions;
50
+ checksumOptions?: Omit<MongoChecksumOptions, 'storageConfig'>;
51
+ storageConfig: StorageConfig;
44
52
  }
45
53
 
46
54
  /**
@@ -69,12 +77,15 @@ export class MongoSyncBucketStorage
69
77
  public readonly group_id: number,
70
78
  private readonly sync_rules: storage.PersistedSyncRulesContent,
71
79
  public readonly slot_name: string,
72
- writeCheckpointMode?: storage.WriteCheckpointMode,
73
- options?: MongoSyncBucketStorageOptions
80
+ writeCheckpointMode: storage.WriteCheckpointMode | undefined,
81
+ options: MongoSyncBucketStorageOptions
74
82
  ) {
75
83
  super();
76
84
  this.db = factory.db;
77
- this.checksums = new MongoChecksums(this.db, this.group_id, options?.checksumOptions);
85
+ this.checksums = new MongoChecksums(this.db, this.group_id, {
86
+ ...options.checksumOptions,
87
+ storageConfig: options?.storageConfig
88
+ });
78
89
  this.writeCheckpointAPI = new MongoWriteCheckpointAPI({
79
90
  db: this.db,
80
91
  mode: writeCheckpointMode ?? storage.WriteCheckpointMode.MANAGED,
@@ -1,4 +1,4 @@
1
- import { InternalOpId, storage } from '@powersync/service-core';
1
+ import { InternalOpId, SerializedSyncPlan, storage } from '@powersync/service-core';
2
2
  import { SqliteJsonValue } from '@powersync/service-sync-rules';
3
3
  import * as bson from 'bson';
4
4
  import { event_types } from '@powersync/service-types';
@@ -199,11 +199,35 @@ export interface SyncRuleDocument {
199
199
  last_fatal_error_ts: Date | null;
200
200
 
201
201
  content: string;
202
+ serialized_plan?: SerializedSyncPlan | null;
202
203
 
203
204
  lock?: {
204
205
  id: string;
205
206
  expires_at: Date;
206
207
  } | null;
208
+
209
+ storage_version?: number;
210
+ }
211
+
212
+ export interface StorageConfig extends storage.StorageVersionConfig {
213
+ /**
214
+ * When true, bucket_data.checksum is guaranteed to be persisted as a Long.
215
+ *
216
+ * When false, it could also have been persisted as an Int32 or Double, in which case it must be converted to
217
+ * a Long before summing.
218
+ */
219
+ longChecksums: boolean;
220
+ }
221
+
222
+ const LONG_CHECKSUMS_STORAGE_VERSION = 2;
223
+
224
+ export function getMongoStorageConfig(storageVersion: number): StorageConfig | undefined {
225
+ const baseConfig = storage.STORAGE_VERSION_CONFIG[storageVersion];
226
+ if (baseConfig == null) {
227
+ return undefined;
228
+ }
229
+
230
+ return { ...baseConfig, longChecksums: storageVersion >= LONG_CHECKSUMS_STORAGE_VERSION };
207
231
  }
208
232
 
209
233
  export interface CheckpointEventDocument {
@@ -2,7 +2,6 @@ export * from './implementation/db.js';
2
2
  export * from './implementation/models.js';
3
3
  export * from './implementation/MongoBucketBatch.js';
4
4
  export * from './implementation/MongoIdSequence.js';
5
- export * from './implementation/MongoPersistedSyncRules.js';
6
5
  export * from './implementation/MongoPersistedSyncRulesContent.js';
7
6
  export * from './implementation/MongoStorageProvider.js';
8
7
  export * from './implementation/MongoSyncBucketStorage.js';
@@ -1,14 +1,13 @@
1
1
  import { mongo } from '@powersync/lib-service-mongodb';
2
- import { PowerSyncMongo } from '../storage/implementation/db.js';
3
2
  import { TestStorageOptions } from '@powersync/service-core';
3
+ import { MongoBucketStorage, MongoBucketStorageOptions } from '../storage/MongoBucketStorage.js';
4
4
  import { MongoReportStorage } from '../storage/MongoReportStorage.js';
5
- import { MongoBucketStorage } from '../storage/MongoBucketStorage.js';
6
- import { MongoSyncBucketStorageOptions } from '../storage/implementation/MongoSyncBucketStorage.js';
5
+ import { PowerSyncMongo } from '../storage/implementation/db.js';
7
6
 
8
7
  export type MongoTestStorageOptions = {
9
8
  url: string;
10
9
  isCI: boolean;
11
- internalOptions?: MongoSyncBucketStorageOptions;
10
+ internalOptions?: MongoBucketStorageOptions;
12
11
  };
13
12
 
14
13
  export function mongoTestStorageFactoryGenerator(factoryOptions: MongoTestStorageOptions) {