@powersync/service-module-mongodb-storage 0.0.0-dev-20251015143910 → 0.0.0-dev-20251030082344

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/CHANGELOG.md +26 -12
  2. package/dist/index.d.ts +0 -1
  3. package/dist/index.js +0 -1
  4. package/dist/index.js.map +1 -1
  5. package/dist/migrations/db/migrations/1760433882550-bucket-state-index2.js +25 -0
  6. package/dist/migrations/db/migrations/1760433882550-bucket-state-index2.js.map +1 -0
  7. package/dist/storage/MongoBucketStorage.js +1 -1
  8. package/dist/storage/MongoBucketStorage.js.map +1 -1
  9. package/dist/storage/implementation/MongoBucketBatch.js +1 -1
  10. package/dist/storage/implementation/MongoBucketBatch.js.map +1 -1
  11. package/dist/storage/implementation/MongoCompactor.d.ts +13 -3
  12. package/dist/storage/implementation/MongoCompactor.js +86 -90
  13. package/dist/storage/implementation/MongoCompactor.js.map +1 -1
  14. package/dist/storage/implementation/MongoStorageProvider.d.ts +1 -1
  15. package/dist/storage/implementation/MongoStorageProvider.js +3 -7
  16. package/dist/storage/implementation/MongoStorageProvider.js.map +1 -1
  17. package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +2 -2
  18. package/dist/storage/implementation/MongoSyncBucketStorage.js +16 -5
  19. package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
  20. package/dist/storage/implementation/MongoTestStorageFactoryGenerator.d.ts +9 -0
  21. package/dist/storage/implementation/MongoTestStorageFactoryGenerator.js +20 -0
  22. package/dist/storage/implementation/MongoTestStorageFactoryGenerator.js.map +1 -0
  23. package/dist/storage/implementation/MongoWriteCheckpointAPI.js +6 -2
  24. package/dist/storage/implementation/MongoWriteCheckpointAPI.js.map +1 -1
  25. package/dist/storage/implementation/PersistedBatch.js +1 -1
  26. package/dist/storage/implementation/PersistedBatch.js.map +1 -1
  27. package/dist/storage/implementation/db.d.ts +3 -4
  28. package/dist/storage/implementation/db.js +9 -14
  29. package/dist/storage/implementation/db.js.map +1 -1
  30. package/dist/storage/implementation/models.d.ts +0 -3
  31. package/dist/{utils → storage/implementation}/util.d.ts +7 -2
  32. package/dist/{utils → storage/implementation}/util.js +16 -1
  33. package/dist/storage/implementation/util.js.map +1 -0
  34. package/dist/storage/storage-index.d.ts +2 -3
  35. package/dist/storage/storage-index.js +2 -3
  36. package/dist/storage/storage-index.js.map +1 -1
  37. package/package.json +9 -9
  38. package/src/index.ts +0 -1
  39. package/src/migrations/db/migrations/{1752661449910-connection-reporting.ts → 1760433882550-bucket-state-index2.ts} +6 -30
  40. package/src/storage/MongoBucketStorage.ts +1 -1
  41. package/src/storage/implementation/MongoBucketBatch.ts +1 -1
  42. package/src/storage/implementation/MongoCompactor.ts +100 -96
  43. package/src/storage/implementation/MongoStorageProvider.ts +4 -9
  44. package/src/storage/implementation/MongoSyncBucketStorage.ts +19 -7
  45. package/src/storage/implementation/MongoTestStorageFactoryGenerator.ts +32 -0
  46. package/src/storage/implementation/MongoWriteCheckpointAPI.ts +6 -2
  47. package/src/storage/implementation/PersistedBatch.ts +1 -1
  48. package/src/storage/implementation/db.ts +12 -16
  49. package/src/storage/implementation/models.ts +0 -3
  50. package/src/{utils → storage/implementation}/util.ts +19 -3
  51. package/src/storage/storage-index.ts +2 -3
  52. package/test/src/storage.test.ts +51 -3
  53. package/test/src/storage_compacting.test.ts +17 -2
  54. package/test/src/util.ts +2 -6
  55. package/tsconfig.tsbuildinfo +1 -1
  56. package/dist/migrations/db/migrations/1752661449910-connection-reporting.js +0 -36
  57. package/dist/migrations/db/migrations/1752661449910-connection-reporting.js.map +0 -1
  58. package/dist/storage/MongoReportStorage.d.ts +0 -17
  59. package/dist/storage/MongoReportStorage.js +0 -152
  60. package/dist/storage/MongoReportStorage.js.map +0 -1
  61. package/dist/utils/test-utils.d.ts +0 -13
  62. package/dist/utils/test-utils.js +0 -40
  63. package/dist/utils/test-utils.js.map +0 -1
  64. package/dist/utils/util.js.map +0 -1
  65. package/dist/utils/utils-index.d.ts +0 -2
  66. package/dist/utils/utils-index.js +0 -3
  67. package/dist/utils/utils-index.js.map +0 -1
  68. package/src/storage/MongoReportStorage.ts +0 -174
  69. package/src/utils/test-utils.ts +0 -57
  70. package/src/utils/utils-index.ts +0 -2
  71. package/test/src/__snapshots__/connection-report-storage.test.ts.snap +0 -215
  72. package/test/src/connection-report-storage.test.ts +0 -133
  73. /package/dist/migrations/db/migrations/{1752661449910-connection-reporting.d.ts → 1760433882550-bucket-state-index2.d.ts} +0 -0
@@ -1 +1 @@
1
- {"version":3,"file":"storage-index.js","sourceRoot":"","sources":["../../src/storage/storage-index.ts"],"names":[],"mappings":"AAAA,cAAc,wBAAwB,CAAC;AACvC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,sCAAsC,CAAC;AACrD,cAAc,qCAAqC,CAAC;AACpD,cAAc,6CAA6C,CAAC;AAC5D,cAAc,oDAAoD,CAAC;AACnE,cAAc,0CAA0C,CAAC;AACzD,cAAc,4CAA4C,CAAC;AAC3D,cAAc,wCAAwC,CAAC;AACvD,cAAc,oCAAoC,CAAC;AACnD,cAAc,oCAAoC,CAAC;AACnD,cAAc,kBAAkB,CAAC;AACjC,cAAc,yBAAyB,CAAC;AACxC,cAAc,yBAAyB,CAAC;AACxC,OAAO,KAAK,UAAU,MAAM,wBAAwB,CAAC"}
1
+ {"version":3,"file":"storage-index.js","sourceRoot":"","sources":["../../src/storage/storage-index.ts"],"names":[],"mappings":"AAAA,cAAc,wBAAwB,CAAC;AACvC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,sCAAsC,CAAC;AACrD,cAAc,qCAAqC,CAAC;AACpD,cAAc,6CAA6C,CAAC;AAC5D,cAAc,oDAAoD,CAAC;AACnE,cAAc,0CAA0C,CAAC;AACzD,cAAc,4CAA4C,CAAC;AAC3D,cAAc,wCAAwC,CAAC;AACvD,cAAc,sDAAsD,CAAC;AACrE,cAAc,oCAAoC,CAAC;AACnD,cAAc,oCAAoC,CAAC;AACnD,cAAc,0BAA0B,CAAC;AACzC,cAAc,yBAAyB,CAAC"}
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "@powersync/service-module-mongodb-storage",
3
3
  "repository": "https://github.com/powersync-ja/powersync-service",
4
4
  "types": "dist/index.d.ts",
5
- "version": "0.0.0-dev-20251015143910",
5
+ "version": "0.0.0-dev-20251030082344",
6
6
  "main": "dist/index.js",
7
7
  "license": "FSL-1.1-ALv2",
8
8
  "type": "module",
@@ -22,20 +22,20 @@
22
22
  }
23
23
  },
24
24
  "dependencies": {
25
- "bson": "^6.10.3",
25
+ "bson": "^6.10.4",
26
26
  "ix": "^5.0.0",
27
27
  "lru-cache": "^10.2.2",
28
28
  "ts-codec": "^1.3.0",
29
29
  "uuid": "^11.1.0",
30
- "@powersync/lib-service-mongodb": "0.0.0-dev-20251015143910",
31
- "@powersync/lib-services-framework": "0.0.0-dev-20251015143910",
32
- "@powersync/service-core": "0.0.0-dev-20251015143910",
33
- "@powersync/service-types": "0.0.0-dev-20251015143910",
34
- "@powersync/service-jsonbig": "0.17.11",
35
- "@powersync/service-sync-rules": "0.0.0-dev-20251015143910"
30
+ "@powersync/lib-service-mongodb": "0.0.0-dev-20251030082344",
31
+ "@powersync/lib-services-framework": "0.0.0-dev-20251030082344",
32
+ "@powersync/service-core": "0.0.0-dev-20251030082344",
33
+ "@powersync/service-jsonbig": "0.0.0-dev-20251030082344",
34
+ "@powersync/service-sync-rules": "0.0.0-dev-20251030082344",
35
+ "@powersync/service-types": "0.0.0-dev-20251030082344"
36
36
  },
37
37
  "devDependencies": {
38
- "@powersync/service-core-tests": "0.0.0-dev-20251015143910"
38
+ "@powersync/service-core-tests": "0.0.0-dev-20251030082344"
39
39
  },
40
40
  "scripts": {
41
41
  "build": "tsc -b",
package/src/index.ts CHANGED
@@ -5,4 +5,3 @@ export * as storage from './storage/storage-index.js';
5
5
 
6
6
  export * from './types/types.js';
7
7
  export * as types from './types/types.js';
8
- export * as utils from './utils/utils-index.js';
@@ -2,6 +2,8 @@ import { migrations } from '@powersync/service-core';
2
2
  import * as storage from '../../../storage/storage-index.js';
3
3
  import { MongoStorageConfig } from '../../../types/types.js';
4
4
 
5
+ const INDEX_NAME = 'dirty_buckets';
6
+
5
7
  export const up: migrations.PowerSyncMigrationFunction = async (context) => {
6
8
  const {
7
9
  service_context: { configuration }
@@ -9,35 +11,7 @@ export const up: migrations.PowerSyncMigrationFunction = async (context) => {
9
11
  const db = storage.createPowerSyncMongo(configuration.storage as MongoStorageConfig);
10
12
 
11
13
  try {
12
- await db.createConnectionReportingCollection();
13
-
14
- await db.connection_report_events.createIndex(
15
- {
16
- connected_at: 1,
17
- jwt_exp: 1,
18
- disconnected_at: 1
19
- },
20
- { name: 'connection_list_index' }
21
- );
22
-
23
- await db.connection_report_events.createIndex(
24
- {
25
- user_id: 1
26
- },
27
- { name: 'connection_user_id_index' }
28
- );
29
- await db.connection_report_events.createIndex(
30
- {
31
- client_id: 1
32
- },
33
- { name: 'connection_client_id_index' }
34
- );
35
- await db.connection_report_events.createIndex(
36
- {
37
- sdk: 1
38
- },
39
- { name: 'connection_index' }
40
- );
14
+ await db.createBucketStateIndex2();
41
15
  } finally {
42
16
  await db.client.close();
43
17
  }
@@ -51,7 +25,9 @@ export const down: migrations.PowerSyncMigrationFunction = async (context) => {
51
25
  const db = storage.createPowerSyncMongo(configuration.storage as MongoStorageConfig);
52
26
 
53
27
  try {
54
- await db.db.dropCollection('connection_report_events');
28
+ if (await db.bucket_state.indexExists(INDEX_NAME)) {
29
+ await db.bucket_state.dropIndex(INDEX_NAME);
30
+ }
55
31
  } finally {
56
32
  await db.client.close();
57
33
  }
@@ -12,7 +12,7 @@ import { PowerSyncMongo } from './implementation/db.js';
12
12
  import { SyncRuleDocument } from './implementation/models.js';
13
13
  import { MongoPersistedSyncRulesContent } from './implementation/MongoPersistedSyncRulesContent.js';
14
14
  import { MongoSyncBucketStorage, MongoSyncBucketStorageOptions } from './implementation/MongoSyncBucketStorage.js';
15
- import { generateSlotName } from '../utils/util.js';
15
+ import { generateSlotName } from './implementation/util.js';
16
16
 
17
17
  export class MongoBucketStorage
18
18
  extends BaseObserver<storage.BucketStorageFactoryListener>
@@ -28,7 +28,7 @@ import { MongoIdSequence } from './MongoIdSequence.js';
28
28
  import { batchCreateCustomWriteCheckpoints } from './MongoWriteCheckpointAPI.js';
29
29
  import { cacheKey, OperationBatch, RecordOperation } from './OperationBatch.js';
30
30
  import { PersistedBatch } from './PersistedBatch.js';
31
- import { idPrefixFilter } from '../../utils/util.js';
31
+ import { idPrefixFilter } from './util.js';
32
32
 
33
33
  /**
34
34
  * 15MB
@@ -1,6 +1,13 @@
1
1
  import { mongo, MONGO_OPERATION_TIMEOUT_MS } from '@powersync/lib-service-mongodb';
2
2
  import { logger, ReplicationAssertionError, ServiceAssertionError } from '@powersync/lib-services-framework';
3
- import { addChecksums, InternalOpId, isPartialChecksum, storage, utils } from '@powersync/service-core';
3
+ import {
4
+ addChecksums,
5
+ InternalOpId,
6
+ isPartialChecksum,
7
+ PopulateChecksumCacheResults,
8
+ storage,
9
+ utils
10
+ } from '@powersync/service-core';
4
11
 
5
12
  import { PowerSyncMongo } from './db.js';
6
13
  import { BucketDataDocument, BucketDataKey, BucketStateDocument } from './models.js';
@@ -10,6 +17,7 @@ import { cacheKey } from './OperationBatch.js';
10
17
  interface CurrentBucketState {
11
18
  /** Bucket name */
12
19
  bucket: string;
20
+
13
21
  /**
14
22
  * Rows seen in the bucket, with the last op_id of each.
15
23
  */
@@ -96,67 +104,56 @@ export class MongoCompactor {
96
104
  // We can make this more efficient later on by iterating
97
105
  // through the buckets in a single query.
98
106
  // That makes batching more tricky, so we leave for later.
99
- await this.compactInternal(bucket);
107
+ await this.compactSingleBucket(bucket);
100
108
  }
101
109
  } else {
102
- await this.compactInternal(undefined);
110
+ await this.compactDirtyBuckets();
103
111
  }
104
112
  }
105
113
 
106
- async compactInternal(bucket: string | undefined) {
107
- const idLimitBytes = this.idLimitBytes;
114
+ private async compactDirtyBuckets() {
115
+ while (!this.signal?.aborted) {
116
+ // Process all buckets with 1 or more changes since last time
117
+ const buckets = await this.dirtyBucketBatch({ minBucketChanges: 1 });
118
+ if (buckets.length == 0) {
119
+ // All done
120
+ break;
121
+ }
122
+ for (let bucket of buckets) {
123
+ await this.compactSingleBucket(bucket);
124
+ }
125
+ }
126
+ }
108
127
 
109
- let currentState: CurrentBucketState | null = null;
128
+ private async compactSingleBucket(bucket: string) {
129
+ const idLimitBytes = this.idLimitBytes;
110
130
 
111
- let bucketLower: string | mongo.MinKey;
112
- let bucketUpper: string | mongo.MaxKey;
131
+ let currentState: CurrentBucketState = {
132
+ bucket,
133
+ seen: new Map(),
134
+ trackingSize: 0,
135
+ lastNotPut: null,
136
+ opsSincePut: 0,
113
137
 
114
- if (bucket == null) {
115
- bucketLower = new mongo.MinKey();
116
- bucketUpper = new mongo.MaxKey();
117
- } else if (bucket.includes('[')) {
118
- // Exact bucket name
119
- bucketLower = bucket;
120
- bucketUpper = bucket;
121
- } else {
122
- // Bucket definition name
123
- bucketLower = `${bucket}[`;
124
- bucketUpper = `${bucket}[\uFFFF`;
125
- }
138
+ checksum: 0,
139
+ opCount: 0,
140
+ opBytes: 0
141
+ };
126
142
 
127
143
  // Constant lower bound
128
144
  const lowerBound: BucketDataKey = {
129
145
  g: this.group_id,
130
- b: bucketLower as string,
146
+ b: bucket,
131
147
  o: new mongo.MinKey() as any
132
148
  };
133
149
 
134
150
  // Upper bound is adjusted for each batch
135
151
  let upperBound: BucketDataKey = {
136
152
  g: this.group_id,
137
- b: bucketUpper as string,
153
+ b: bucket,
138
154
  o: new mongo.MaxKey() as any
139
155
  };
140
156
 
141
- const doneWithBucket = async () => {
142
- if (currentState == null) {
143
- return;
144
- }
145
- // Free memory before clearing bucket
146
- currentState.seen.clear();
147
- if (currentState.lastNotPut != null && currentState.opsSincePut >= 1) {
148
- logger.info(
149
- `Inserting CLEAR at ${this.group_id}:${currentState.bucket}:${currentState.lastNotPut} to remove ${currentState.opsSincePut} operations`
150
- );
151
- // Need flush() before clear()
152
- await this.flush();
153
- await this.clearBucket(currentState);
154
- }
155
-
156
- // Do this _after_ clearBucket so that we have accurate counts.
157
- this.updateBucketChecksums(currentState);
158
- };
159
-
160
157
  while (!this.signal?.aborted) {
161
158
  // Query one batch at a time, to avoid cursor timeouts
162
159
  const cursor = this.db.bucket_data.aggregate<BucketDataDocument & { size: number | bigint }>(
@@ -184,7 +181,11 @@ export class MongoCompactor {
184
181
  }
185
182
  }
186
183
  ],
187
- { batchSize: this.moveBatchQueryLimit }
184
+ {
185
+ // batchSize is 1 more than limit to auto-close the cursor.
186
+ // See https://github.com/mongodb/node-mongodb-native/pull/4580
187
+ batchSize: this.moveBatchQueryLimit + 1
188
+ }
188
189
  );
189
190
  // We don't limit to a single batch here, since that often causes MongoDB to scan through more than it returns.
190
191
  // Instead, we load up to the limit.
@@ -199,22 +200,6 @@ export class MongoCompactor {
199
200
  upperBound = batch[batch.length - 1]._id;
200
201
 
201
202
  for (let doc of batch) {
202
- if (currentState == null || doc._id.b != currentState.bucket) {
203
- await doneWithBucket();
204
-
205
- currentState = {
206
- bucket: doc._id.b,
207
- seen: new Map(),
208
- trackingSize: 0,
209
- lastNotPut: null,
210
- opsSincePut: 0,
211
-
212
- checksum: 0,
213
- opCount: 0,
214
- opBytes: 0
215
- };
216
- }
217
-
218
203
  if (doc._id.o > this.maxOpId) {
219
204
  continue;
220
205
  }
@@ -285,12 +270,22 @@ export class MongoCompactor {
285
270
  }
286
271
  }
287
272
 
288
- if (currentState != null) {
289
- logger.info(`Processed batch of length ${batch.length} current bucket: ${currentState.bucket}`);
290
- }
273
+ logger.info(`Processed batch of length ${batch.length} current bucket: ${bucket}`);
274
+ }
275
+
276
+ // Free memory before clearing bucket
277
+ currentState.seen.clear();
278
+ if (currentState.lastNotPut != null && currentState.opsSincePut >= 1) {
279
+ logger.info(
280
+ `Inserting CLEAR at ${this.group_id}:${bucket}:${currentState.lastNotPut} to remove ${currentState.opsSincePut} operations`
281
+ );
282
+ // Need flush() before clear()
283
+ await this.flush();
284
+ await this.clearBucket(currentState);
291
285
  }
292
286
 
293
- await doneWithBucket();
287
+ // Do this _after_ clearBucket so that we have accurate counts.
288
+ this.updateBucketChecksums(currentState);
294
289
 
295
290
  // Need another flush after updateBucketChecksums()
296
291
  await this.flush();
@@ -478,50 +473,55 @@ export class MongoCompactor {
478
473
  /**
479
474
  * Subset of compact, only populating checksums where relevant.
480
475
  */
481
- async populateChecksums() {
482
- // This is updated after each batch
483
- let lowerBound: BucketStateDocument['_id'] = {
484
- g: this.group_id,
485
- b: new mongo.MinKey() as any
486
- };
487
- // This is static
488
- const upperBound: BucketStateDocument['_id'] = {
489
- g: this.group_id,
490
- b: new mongo.MaxKey() as any
491
- };
476
+ async populateChecksums(options: { minBucketChanges: number }): Promise<PopulateChecksumCacheResults> {
477
+ let count = 0;
492
478
  while (!this.signal?.aborted) {
493
- // By filtering buckets, we effectively make this "resumeable".
494
- const filter: mongo.Filter<BucketStateDocument> = {
495
- _id: {
496
- $gt: lowerBound,
497
- $lt: upperBound
498
- },
499
- compacted_state: { $exists: false }
500
- };
479
+ const buckets = await this.dirtyBucketBatch(options);
480
+ if (buckets.length == 0) {
481
+ // All done
482
+ break;
483
+ }
484
+ const start = Date.now();
485
+ logger.info(`Calculating checksums for batch of ${buckets.length} buckets, starting at ${buckets[0]}`);
486
+
487
+ await this.updateChecksumsBatch(buckets);
488
+ logger.info(`Updated checksums for batch of ${buckets.length} buckets in ${Date.now() - start}ms`);
489
+ count += buckets.length;
490
+ }
491
+ return { buckets: count };
492
+ }
501
493
 
502
- const bucketsWithoutChecksums = await this.db.bucket_state
503
- .find(filter, {
494
+ /**
495
+ * Returns a batch of dirty buckets - buckets with most changes first.
496
+ *
497
+ * This cannot be used to iterate on its own - the client is expected to process these buckets and
498
+ * set estimate_since_compact.count: 0 when done, before fetching the next batch.
499
+ */
500
+ private async dirtyBucketBatch(options: { minBucketChanges: number }): Promise<string[]> {
501
+ if (options.minBucketChanges <= 0) {
502
+ throw new ReplicationAssertionError('minBucketChanges must be >= 1');
503
+ }
504
+ // We make use of an index on {_id.g: 1, 'estimate_since_compact.count': -1}
505
+ const dirtyBuckets = await this.db.bucket_state
506
+ .find(
507
+ {
508
+ '_id.g': this.group_id,
509
+ 'estimate_since_compact.count': { $gte: options.minBucketChanges }
510
+ },
511
+ {
504
512
  projection: {
505
513
  _id: 1
506
514
  },
507
515
  sort: {
508
- _id: 1
516
+ 'estimate_since_compact.count': -1
509
517
  },
510
518
  limit: 5_000,
511
519
  maxTimeMS: MONGO_OPERATION_TIMEOUT_MS
512
- })
513
- .toArray();
514
- if (bucketsWithoutChecksums.length == 0) {
515
- // All done
516
- break;
517
- }
518
-
519
- logger.info(`Calculating checksums for batch of ${bucketsWithoutChecksums.length} buckets`);
520
-
521
- await this.updateChecksumsBatch(bucketsWithoutChecksums.map((b) => b._id.b));
520
+ }
521
+ )
522
+ .toArray();
522
523
 
523
- lowerBound = bucketsWithoutChecksums[bucketsWithoutChecksums.length - 1]._id;
524
- }
524
+ return dirtyBuckets.map((bucket) => bucket._id.b);
525
525
  }
526
526
 
527
527
  private async updateChecksumsBatch(buckets: string[]) {
@@ -555,6 +555,10 @@ export class MongoCompactor {
555
555
  count: bucketChecksum.count,
556
556
  checksum: BigInt(bucketChecksum.checksum),
557
557
  bytes: null
558
+ },
559
+ estimate_since_compact: {
560
+ count: 0,
561
+ bytes: 0
558
562
  }
559
563
  }
560
564
  },
@@ -4,9 +4,8 @@ import { POWERSYNC_VERSION, storage } from '@powersync/service-core';
4
4
  import { MongoStorageConfig } from '../../types/types.js';
5
5
  import { MongoBucketStorage } from '../MongoBucketStorage.js';
6
6
  import { PowerSyncMongo } from './db.js';
7
- import { MongoReportStorage } from '../MongoReportStorage.js';
8
7
 
9
- export class MongoStorageProvider implements storage.StorageProvider {
8
+ export class MongoStorageProvider implements storage.BucketStorageProvider {
10
9
  get type() {
11
10
  return lib_mongo.MONGO_CONNECTION_TYPE;
12
11
  }
@@ -38,19 +37,15 @@ export class MongoStorageProvider implements storage.StorageProvider {
38
37
  await client.connect();
39
38
 
40
39
  const database = new PowerSyncMongo(client, { database: resolvedConfig.storage.database });
41
- const syncStorageFactory = new MongoBucketStorage(database, {
40
+ const factory = new MongoBucketStorage(database, {
42
41
  // TODO currently need the entire resolved config due to this
43
42
  slot_name_prefix: resolvedConfig.slot_name_prefix
44
43
  });
45
-
46
- // Storage factory for reports
47
- const reportStorageFactory = new MongoReportStorage(database);
48
44
  return {
49
- storage: syncStorageFactory,
50
- reportStorage: reportStorageFactory,
45
+ storage: factory,
51
46
  shutDown: async () => {
52
47
  shuttingDown = true;
53
- await syncStorageFactory[Symbol.asyncDispose]();
48
+ await factory[Symbol.asyncDispose]();
54
49
  await client.close();
55
50
  },
56
51
  tearDown: () => {
@@ -16,6 +16,8 @@ import {
16
16
  InternalOpId,
17
17
  internalToExternalOpId,
18
18
  maxLsn,
19
+ PopulateChecksumCacheOptions,
20
+ PopulateChecksumCacheResults,
19
21
  ProtocolOpId,
20
22
  ReplicationCheckpoint,
21
23
  storage,
@@ -35,8 +37,7 @@ import { MongoChecksumOptions, MongoChecksums } from './MongoChecksums.js';
35
37
  import { MongoCompactor } from './MongoCompactor.js';
36
38
  import { MongoParameterCompactor } from './MongoParameterCompactor.js';
37
39
  import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
38
- import { idPrefixFilter, mapOpEntry, readSingleBatch, setSessionSnapshotTime } from '../../utils/util.js';
39
-
40
+ import { idPrefixFilter, mapOpEntry, readSingleBatch, setSessionSnapshotTime } from './util.js';
40
41
 
41
42
  export interface MongoSyncBucketStorageOptions {
42
43
  checksumOptions?: MongoChecksumOptions;
@@ -404,7 +405,9 @@ export class MongoSyncBucketStorage
404
405
  limit: batchLimit,
405
406
  // Increase batch size above the default 101, so that we can fill an entire batch in
406
407
  // one go.
407
- batchSize: batchLimit,
408
+ // batchSize is 1 more than limit to auto-close the cursor.
409
+ // See https://github.com/mongodb/node-mongodb-native/pull/4580
410
+ batchSize: batchLimit + 1,
408
411
  // Raw mode is returns an array of Buffer instead of parsed documents.
409
412
  // We use it so that:
410
413
  // 1. We can calculate the document size accurately without serializing again.
@@ -664,7 +667,7 @@ export class MongoSyncBucketStorage
664
667
  }
665
668
  }
666
669
 
667
- async populatePersistentChecksumCache(options: Required<Pick<CompactOptions, 'signal' | 'maxOpId'>>): Promise<void> {
670
+ async populatePersistentChecksumCache(options: PopulateChecksumCacheOptions): Promise<PopulateChecksumCacheResults> {
668
671
  logger.info(`Populating persistent checksum cache...`);
669
672
  const start = Date.now();
670
673
  // We do a minimal compact here.
@@ -675,9 +678,14 @@ export class MongoSyncBucketStorage
675
678
  memoryLimitMB: 0
676
679
  });
677
680
 
678
- await compactor.populateChecksums();
681
+ const result = await compactor.populateChecksums({
682
+ // There are cases with millions of small buckets, in which case it can take very long to
683
+ // populate the checksums, with minimal benefit. We skip the small buckets here.
684
+ minBucketChanges: options.minBucketChanges ?? 10
685
+ });
679
686
  const duration = Date.now() - start;
680
687
  logger.info(`Populated persistent checksum cache in ${(duration / 1000).toFixed(1)}s`);
688
+ return result;
681
689
  }
682
690
 
683
691
  /**
@@ -906,7 +914,9 @@ export class MongoSyncBucketStorage
906
914
  '_id.b': 1
907
915
  },
908
916
  limit: limit + 1,
909
- batchSize: limit + 1,
917
+ // batchSize is 1 more than limit to auto-close the cursor.
918
+ // See https://github.com/mongodb/node-mongodb-native/pull/4580
919
+ batchSize: limit + 2,
910
920
  singleBatch: true
911
921
  }
912
922
  )
@@ -936,7 +946,9 @@ export class MongoSyncBucketStorage
936
946
  lookup: 1
937
947
  },
938
948
  limit: limit + 1,
939
- batchSize: limit + 1,
949
+ // batchSize is 1 more than limit to auto-close the cursor.
950
+ // See https://github.com/mongodb/node-mongodb-native/pull/4580
951
+ batchSize: limit + 2,
940
952
  singleBatch: true
941
953
  }
942
954
  )
@@ -0,0 +1,32 @@
1
+ import { TestStorageOptions } from '@powersync/service-core';
2
+ import { MongoBucketStorage } from '../MongoBucketStorage.js';
3
+ import { connectMongoForTests } from './util.js';
4
+ import { MongoSyncBucketStorageOptions } from './MongoSyncBucketStorage.js';
5
+
6
+ export type MongoTestStorageOptions = {
7
+ url: string;
8
+ isCI: boolean;
9
+ internalOptions?: MongoSyncBucketStorageOptions;
10
+ };
11
+
12
+ export const MongoTestStorageFactoryGenerator = (factoryOptions: MongoTestStorageOptions) => {
13
+ return async (options?: TestStorageOptions) => {
14
+ const db = connectMongoForTests(factoryOptions.url, factoryOptions.isCI);
15
+
16
+ // None of the tests insert data into this collection, so it was never created
17
+ if (!(await db.db.listCollections({ name: db.bucket_parameters.collectionName }).hasNext())) {
18
+ await db.db.createCollection('bucket_parameters');
19
+ }
20
+
21
+ if (!options?.doNotClear) {
22
+ await db.clear();
23
+ }
24
+
25
+ // Full migrations are not currently run for tests, so we manually create the important ones
26
+ await db.createCheckpointEventsCollection();
27
+ await db.createBucketStateIndex();
28
+ await db.createBucketStateIndex2();
29
+
30
+ return new MongoBucketStorage(db, { slot_name_prefix: 'test_' }, factoryOptions.internalOptions);
31
+ };
32
+ };
@@ -111,7 +111,9 @@ export class MongoWriteCheckpointAPI implements storage.WriteCheckpointAPI {
111
111
  },
112
112
  {
113
113
  limit: limit + 1,
114
- batchSize: limit + 1,
114
+ // batchSize is 1 more than limit to auto-close the cursor.
115
+ // See https://github.com/mongodb/node-mongodb-native/pull/4580
116
+ batchSize: limit + 2,
115
117
  singleBatch: true
116
118
  }
117
119
  )
@@ -140,7 +142,9 @@ export class MongoWriteCheckpointAPI implements storage.WriteCheckpointAPI {
140
142
  },
141
143
  {
142
144
  limit: limit + 1,
143
- batchSize: limit + 1,
145
+ // batchSize is 1 more than limit to auto-close the cursor.
146
+ // See https://github.com/mongodb/node-mongodb-native/pull/4580
147
+ batchSize: limit + 2,
144
148
  singleBatch: true
145
149
  }
146
150
  )
@@ -16,7 +16,7 @@ import {
16
16
  CurrentDataDocument,
17
17
  SourceKey
18
18
  } from './models.js';
19
- import { replicaIdToSubkey } from '../../utils/util.js';
19
+ import { replicaIdToSubkey } from './util.js';
20
20
 
21
21
  /**
22
22
  * Maximum size of operations we write in a single transaction.
@@ -8,7 +8,6 @@ import {
8
8
  BucketParameterDocument,
9
9
  BucketStateDocument,
10
10
  CheckpointEventDocument,
11
- ClientConnectionDocument,
12
11
  CurrentDataDocument,
13
12
  CustomWriteCheckpointDocument,
14
13
  IdSequenceDocument,
@@ -38,7 +37,6 @@ export class PowerSyncMongo {
38
37
  readonly locks: mongo.Collection<lib_mongo.locks.Lock>;
39
38
  readonly bucket_state: mongo.Collection<BucketStateDocument>;
40
39
  readonly checkpoint_events: mongo.Collection<CheckpointEventDocument>;
41
- readonly connection_report_events: mongo.Collection<ClientConnectionDocument>;
42
40
 
43
41
  readonly client: mongo.MongoClient;
44
42
  readonly db: mongo.Db;
@@ -63,7 +61,6 @@ export class PowerSyncMongo {
63
61
  this.locks = this.db.collection('locks');
64
62
  this.bucket_state = this.db.collection('bucket_state');
65
63
  this.checkpoint_events = this.db.collection('checkpoint_events');
66
- this.connection_report_events = this.db.collection('connection_report_events');
67
64
  }
68
65
 
69
66
  /**
@@ -134,28 +131,27 @@ export class PowerSyncMongo {
134
131
  /**
135
132
  * Only use in migrations and tests.
136
133
  */
137
- async createConnectionReportingCollection() {
138
- const existingCollections = await this.db
139
- .listCollections({ name: 'connection_report_events' }, { nameOnly: false })
140
- .toArray();
141
- const collection = existingCollections[0];
142
- if (collection != null) {
143
- return;
144
- }
145
- await this.db.createCollection('connection_report_events');
134
+ async createBucketStateIndex() {
135
+ // TODO: Implement a better mechanism to use migrations in tests
136
+ await this.bucket_state.createIndex(
137
+ {
138
+ '_id.g': 1,
139
+ last_op: 1
140
+ },
141
+ { name: 'bucket_updates', unique: true }
142
+ );
146
143
  }
147
-
148
144
  /**
149
145
  * Only use in migrations and tests.
150
146
  */
151
- async createBucketStateIndex() {
147
+ async createBucketStateIndex2() {
152
148
  // TODO: Implement a better mechanism to use migrations in tests
153
149
  await this.bucket_state.createIndex(
154
150
  {
155
151
  '_id.g': 1,
156
- last_op: 1
152
+ 'estimate_since_compact.count': -1
157
153
  },
158
- { name: 'bucket_updates', unique: true }
154
+ { name: 'dirty_count' }
159
155
  );
160
156
  }
161
157
  }
@@ -1,7 +1,6 @@
1
1
  import { InternalOpId, storage } from '@powersync/service-core';
2
2
  import { SqliteJsonValue } from '@powersync/service-sync-rules';
3
3
  import * as bson from 'bson';
4
- import { event_types } from '@powersync/service-types';
5
4
 
6
5
  /**
7
6
  * Replica id uniquely identifying a row on the source database.
@@ -239,5 +238,3 @@ export interface InstanceDocument {
239
238
  // The instance UUID
240
239
  _id: string;
241
240
  }
242
-
243
- export interface ClientConnectionDocument extends event_types.ClientConnection {}