@powersync/service-module-mongodb-storage 0.0.0-dev-20250828134335 → 0.0.0-dev-20250901073220

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/CHANGELOG.md +22 -5
  2. package/dist/index.d.ts +1 -0
  3. package/dist/index.js +1 -0
  4. package/dist/index.js.map +1 -1
  5. package/dist/migrations/db/migrations/1752661449910-connection-reporting.d.ts +3 -0
  6. package/dist/migrations/db/migrations/1752661449910-connection-reporting.js +36 -0
  7. package/dist/migrations/db/migrations/1752661449910-connection-reporting.js.map +1 -0
  8. package/dist/storage/MongoBucketStorage.js +1 -1
  9. package/dist/storage/MongoBucketStorage.js.map +1 -1
  10. package/dist/storage/MongoReportStorage.d.ts +18 -0
  11. package/dist/storage/MongoReportStorage.js +154 -0
  12. package/dist/storage/MongoReportStorage.js.map +1 -0
  13. package/dist/storage/implementation/MongoBucketBatch.js +1 -1
  14. package/dist/storage/implementation/MongoBucketBatch.js.map +1 -1
  15. package/dist/storage/implementation/MongoChecksums.d.ts +34 -0
  16. package/dist/storage/implementation/MongoChecksums.js +274 -0
  17. package/dist/storage/implementation/MongoChecksums.js.map +1 -0
  18. package/dist/storage/implementation/MongoCompactor.js +26 -29
  19. package/dist/storage/implementation/MongoCompactor.js.map +1 -1
  20. package/dist/storage/implementation/MongoStorageProvider.d.ts +1 -1
  21. package/dist/storage/implementation/MongoStorageProvider.js +7 -3
  22. package/dist/storage/implementation/MongoStorageProvider.js.map +1 -1
  23. package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +2 -11
  24. package/dist/storage/implementation/MongoSyncBucketStorage.js +7 -207
  25. package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
  26. package/dist/storage/implementation/PersistedBatch.js +1 -1
  27. package/dist/storage/implementation/PersistedBatch.js.map +1 -1
  28. package/dist/storage/implementation/db.d.ts +6 -1
  29. package/dist/storage/implementation/db.js +16 -0
  30. package/dist/storage/implementation/db.js.map +1 -1
  31. package/dist/storage/implementation/models.d.ts +4 -1
  32. package/dist/storage/storage-index.d.ts +3 -2
  33. package/dist/storage/storage-index.js +3 -2
  34. package/dist/storage/storage-index.js.map +1 -1
  35. package/dist/utils/test-utils.d.ts +11 -0
  36. package/dist/utils/test-utils.js +40 -0
  37. package/dist/utils/test-utils.js.map +1 -0
  38. package/dist/{storage/implementation → utils}/util.d.ts +2 -34
  39. package/dist/{storage/implementation → utils}/util.js +0 -54
  40. package/dist/utils/util.js.map +1 -0
  41. package/dist/utils/utils-index.d.ts +2 -0
  42. package/dist/utils/utils-index.js +3 -0
  43. package/dist/utils/utils-index.js.map +1 -0
  44. package/package.json +7 -7
  45. package/src/index.ts +1 -0
  46. package/src/migrations/db/migrations/1752661449910-connection-reporting.ts +58 -0
  47. package/src/storage/MongoBucketStorage.ts +1 -1
  48. package/src/storage/MongoReportStorage.ts +177 -0
  49. package/src/storage/implementation/MongoBucketBatch.ts +1 -1
  50. package/src/storage/implementation/MongoChecksums.ts +320 -0
  51. package/src/storage/implementation/MongoCompactor.ts +56 -56
  52. package/src/storage/implementation/MongoStorageProvider.ts +9 -4
  53. package/src/storage/implementation/MongoSyncBucketStorage.ts +7 -255
  54. package/src/storage/implementation/PersistedBatch.ts +1 -1
  55. package/src/storage/implementation/db.ts +18 -0
  56. package/src/storage/implementation/models.ts +4 -1
  57. package/src/storage/storage-index.ts +3 -2
  58. package/src/utils/test-utils.ts +55 -0
  59. package/src/{storage/implementation → utils}/util.ts +2 -59
  60. package/src/utils/utils-index.ts +2 -0
  61. package/test/src/__snapshots__/connection-report-storage.test.ts.snap +215 -0
  62. package/test/src/connection-report-storage.test.ts +133 -0
  63. package/test/src/util.ts +6 -2
  64. package/tsconfig.tsbuildinfo +1 -1
  65. package/dist/storage/implementation/MongoTestStorageFactoryGenerator.d.ts +0 -7
  66. package/dist/storage/implementation/MongoTestStorageFactoryGenerator.js +0 -18
  67. package/dist/storage/implementation/MongoTestStorageFactoryGenerator.js.map +0 -1
  68. package/dist/storage/implementation/util.js.map +0 -1
  69. package/src/storage/implementation/MongoTestStorageFactoryGenerator.ts +0 -28
@@ -2,17 +2,12 @@ import * as lib_mongo from '@powersync/lib-service-mongodb';
2
2
  import { mongo } from '@powersync/lib-service-mongodb';
3
3
  import {
4
4
  BaseObserver,
5
- DatabaseQueryError,
6
- ErrorCode,
7
5
  logger,
8
6
  ReplicationAbortedError,
9
7
  ServiceAssertionError
10
8
  } from '@powersync/lib-services-framework';
11
9
  import {
12
- addBucketChecksums,
13
- addPartialChecksums,
14
10
  BroadcastIterable,
15
- BucketChecksum,
16
11
  CHECKPOINT_INVALIDATE_ALL,
17
12
  CheckpointChanges,
18
13
  CompactOptions,
@@ -20,10 +15,7 @@ import {
20
15
  GetCheckpointChangesOptions,
21
16
  InternalOpId,
22
17
  internalToExternalOpId,
23
- isPartialChecksum,
24
18
  maxLsn,
25
- PartialChecksum,
26
- PartialOrFullChecksum,
27
19
  ProtocolOpId,
28
20
  ReplicationCheckpoint,
29
21
  storage,
@@ -39,28 +31,19 @@ import { MongoBucketStorage } from '../MongoBucketStorage.js';
39
31
  import { PowerSyncMongo } from './db.js';
40
32
  import { BucketDataDocument, BucketDataKey, BucketStateDocument, SourceKey, SourceTableDocument } from './models.js';
41
33
  import { MongoBucketBatch } from './MongoBucketBatch.js';
34
+ import { MongoChecksums } from './MongoChecksums.js';
42
35
  import { MongoCompactor } from './MongoCompactor.js';
43
36
  import { MongoParameterCompactor } from './MongoParameterCompactor.js';
44
37
  import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
45
- import {
46
- CHECKSUM_QUERY_GROUP_STAGE,
47
- checksumFromAggregate,
48
- idPrefixFilter,
49
- mapOpEntry,
50
- readSingleBatch,
51
- setSessionSnapshotTime
52
- } from './util.js';
38
+ import { idPrefixFilter, mapOpEntry, readSingleBatch, setSessionSnapshotTime } from '../../utils/util.js';
39
+
53
40
 
54
41
  export class MongoSyncBucketStorage
55
42
  extends BaseObserver<storage.SyncRulesBucketStorageListener>
56
43
  implements storage.SyncRulesBucketStorage
57
44
  {
58
45
  private readonly db: PowerSyncMongo;
59
- private checksumCache = new storage.ChecksumCache({
60
- fetchChecksums: (batch) => {
61
- return this.getChecksumsInternal(batch);
62
- }
63
- });
46
+ readonly checksums: MongoChecksums;
64
47
 
65
48
  private parsedSyncRulesCache: { parsed: SqlSyncRules; options: storage.ParseSyncRulesOptions } | undefined;
66
49
  private writeCheckpointAPI: MongoWriteCheckpointAPI;
@@ -74,6 +57,7 @@ export class MongoSyncBucketStorage
74
57
  ) {
75
58
  super();
76
59
  this.db = factory.db;
60
+ this.checksums = new MongoChecksums(this.db, this.group_id);
77
61
  this.writeCheckpointAPI = new MongoWriteCheckpointAPI({
78
62
  db: this.db,
79
63
  mode: writeCheckpointMode,
@@ -503,243 +487,11 @@ export class MongoSyncBucketStorage
503
487
  }
504
488
 
505
489
  async getChecksums(checkpoint: utils.InternalOpId, buckets: string[]): Promise<utils.ChecksumMap> {
506
- return this.checksumCache.getChecksumMap(checkpoint, buckets);
490
+ return this.checksums.getChecksums(checkpoint, buckets);
507
491
  }
508
492
 
509
493
  clearChecksumCache() {
510
- this.checksumCache.clear();
511
- }
512
-
513
- private async getChecksumsInternal(batch: storage.FetchPartialBucketChecksum[]): Promise<storage.PartialChecksumMap> {
514
- if (batch.length == 0) {
515
- return new Map();
516
- }
517
-
518
- const preFilters: any[] = [];
519
- for (let request of batch) {
520
- if (request.start == null) {
521
- preFilters.push({
522
- _id: {
523
- g: this.group_id,
524
- b: request.bucket
525
- },
526
- 'compacted_state.op_id': { $exists: true, $lte: request.end }
527
- });
528
- }
529
- }
530
-
531
- const preStates = new Map<string, { opId: InternalOpId; checksum: BucketChecksum }>();
532
-
533
- if (preFilters.length > 0) {
534
- // For un-cached bucket checksums, attempt to use the compacted state first.
535
- const states = await this.db.bucket_state
536
- .find({
537
- $or: preFilters
538
- })
539
- .toArray();
540
- for (let state of states) {
541
- const compactedState = state.compacted_state!;
542
- preStates.set(state._id.b, {
543
- opId: compactedState.op_id,
544
- checksum: {
545
- bucket: state._id.b,
546
- checksum: Number(compactedState.checksum),
547
- count: compactedState.count
548
- }
549
- });
550
- }
551
- }
552
-
553
- const mappedRequests = batch.map((request) => {
554
- let start = request.start;
555
- if (start == null) {
556
- const preState = preStates.get(request.bucket);
557
- if (preState != null) {
558
- start = preState.opId;
559
- }
560
- }
561
- return {
562
- ...request,
563
- start
564
- };
565
- });
566
-
567
- const queriedChecksums = await this.queryPartialChecksums(mappedRequests);
568
-
569
- return new Map<string, storage.PartialOrFullChecksum>(
570
- batch.map((request) => {
571
- const bucket = request.bucket;
572
- // Could be null if this is either (1) a partial request, or (2) no compacted checksum was available
573
- const preState = preStates.get(bucket);
574
- // Could be null if we got no data
575
- const partialChecksum = queriedChecksums.get(bucket);
576
- const merged = addPartialChecksums(bucket, preState?.checksum ?? null, partialChecksum ?? null);
577
-
578
- return [bucket, merged];
579
- })
580
- );
581
- }
582
-
583
- async queryPartialChecksums(batch: storage.FetchPartialBucketChecksum[]): Promise<storage.PartialChecksumMap> {
584
- try {
585
- return await this.queryPartialChecksumsInternal(batch);
586
- } catch (e) {
587
- if (e.codeName == 'MaxTimeMSExpired') {
588
- logger.warn(`Checksum query timed out; falling back to slower version`, e);
589
- // Timeout - try the slower but more robust version
590
- return await this.queryPartialChecksumsFallback(batch);
591
- }
592
- throw lib_mongo.mapQueryError(e, 'while reading checksums');
593
- }
594
- }
595
-
596
- private async queryPartialChecksumsInternal(
597
- batch: storage.FetchPartialBucketChecksum[]
598
- ): Promise<storage.PartialChecksumMap> {
599
- const filters: any[] = [];
600
- for (let request of batch) {
601
- filters.push({
602
- _id: {
603
- $gt: {
604
- g: this.group_id,
605
- b: request.bucket,
606
- o: request.start ?? new bson.MinKey()
607
- },
608
- $lte: {
609
- g: this.group_id,
610
- b: request.bucket,
611
- o: request.end
612
- }
613
- }
614
- });
615
- }
616
-
617
- const aggregate = await this.db.bucket_data
618
- .aggregate(
619
- [
620
- {
621
- $match: {
622
- $or: filters
623
- }
624
- },
625
- CHECKSUM_QUERY_GROUP_STAGE
626
- ],
627
- { session: undefined, readConcern: 'snapshot', maxTimeMS: lib_mongo.MONGO_CHECKSUM_TIMEOUT_MS }
628
- )
629
- // Don't map the error here - we want to keep timeout errors as-is
630
- .toArray();
631
-
632
- const partialChecksums = new Map<string, storage.PartialOrFullChecksum>(
633
- aggregate.map((doc) => {
634
- const bucket = doc._id;
635
- return [bucket, checksumFromAggregate(doc)];
636
- })
637
- );
638
-
639
- return new Map<string, storage.PartialOrFullChecksum>(
640
- batch.map((request) => {
641
- const bucket = request.bucket;
642
- // Could be null if we got no data
643
- let partialChecksum = partialChecksums.get(bucket);
644
- if (partialChecksum == null) {
645
- partialChecksum = {
646
- bucket,
647
- partialCount: 0,
648
- partialChecksum: 0
649
- };
650
- }
651
- if (request.start == null && isPartialChecksum(partialChecksum)) {
652
- partialChecksum = {
653
- bucket,
654
- count: partialChecksum.partialCount,
655
- checksum: partialChecksum.partialChecksum
656
- };
657
- }
658
-
659
- return [bucket, partialChecksum];
660
- })
661
- );
662
- }
663
-
664
- /**
665
- * Checksums for large buckets can run over the query timeout.
666
- * To avoid this, we query in batches.
667
- * This version can handle larger amounts of data, but is slower, especially for many buckets.
668
- */
669
- async queryPartialChecksumsFallback(
670
- batch: storage.FetchPartialBucketChecksum[]
671
- ): Promise<storage.PartialChecksumMap> {
672
- const partialChecksums = new Map<string, storage.PartialOrFullChecksum>();
673
- for (let request of batch) {
674
- const checksum = await this.slowChecksum(request);
675
- partialChecksums.set(request.bucket, checksum);
676
- }
677
-
678
- return partialChecksums;
679
- }
680
-
681
- private async slowChecksum(request: storage.FetchPartialBucketChecksum): Promise<PartialOrFullChecksum> {
682
- const batchLimit = 50_000;
683
-
684
- let lowerBound = 0n;
685
- const bucket = request.bucket;
686
-
687
- let runningChecksum: PartialOrFullChecksum = {
688
- bucket,
689
- partialCount: 0,
690
- partialChecksum: 0
691
- };
692
- if (request.start == null) {
693
- runningChecksum = {
694
- bucket,
695
- count: 0,
696
- checksum: 0
697
- };
698
- }
699
-
700
- while (true) {
701
- const filter = {
702
- _id: {
703
- $gt: {
704
- g: this.group_id,
705
- b: bucket,
706
- o: lowerBound
707
- },
708
- $lte: {
709
- g: this.group_id,
710
- b: bucket,
711
- o: request.end
712
- }
713
- }
714
- };
715
- const docs = await this.db.bucket_data
716
- .aggregate(
717
- [
718
- {
719
- $match: filter
720
- },
721
- // sort and limit _before_ grouping
722
- { $sort: { _id: 1 } },
723
- { $limit: batchLimit },
724
- CHECKSUM_QUERY_GROUP_STAGE
725
- ],
726
- { session: undefined, readConcern: 'snapshot', maxTimeMS: lib_mongo.MONGO_CHECKSUM_TIMEOUT_MS }
727
- )
728
- .toArray();
729
- const doc = docs[0];
730
- if (doc == null) {
731
- return runningChecksum;
732
- }
733
- const partial = checksumFromAggregate(doc);
734
- runningChecksum = addPartialChecksums(bucket, runningChecksum, partial);
735
- const isFinal = doc.count != batchLimit;
736
- if (isFinal) {
737
- break;
738
- } else {
739
- lowerBound = doc.last_op;
740
- }
741
- }
742
- return runningChecksum;
494
+ this.checksums.clearCache();
743
495
  }
744
496
 
745
497
  async terminate(options?: storage.TerminateOptions) {
@@ -16,7 +16,7 @@ import {
16
16
  CurrentDataDocument,
17
17
  SourceKey
18
18
  } from './models.js';
19
- import { replicaIdToSubkey } from './util.js';
19
+ import { replicaIdToSubkey } from '../../utils/util.js';
20
20
 
21
21
  /**
22
22
  * Maximum size of operations we write in a single transaction.
@@ -8,6 +8,7 @@ import {
8
8
  BucketParameterDocument,
9
9
  BucketStateDocument,
10
10
  CheckpointEventDocument,
11
+ ClientConnectionDocument,
11
12
  CurrentDataDocument,
12
13
  CustomWriteCheckpointDocument,
13
14
  IdSequenceDocument,
@@ -37,6 +38,7 @@ export class PowerSyncMongo {
37
38
  readonly locks: mongo.Collection<lib_mongo.locks.Lock>;
38
39
  readonly bucket_state: mongo.Collection<BucketStateDocument>;
39
40
  readonly checkpoint_events: mongo.Collection<CheckpointEventDocument>;
41
+ readonly connection_report_events: mongo.Collection<ClientConnectionDocument>;
40
42
 
41
43
  readonly client: mongo.MongoClient;
42
44
  readonly db: mongo.Db;
@@ -61,6 +63,7 @@ export class PowerSyncMongo {
61
63
  this.locks = this.db.collection('locks');
62
64
  this.bucket_state = this.db.collection('bucket_state');
63
65
  this.checkpoint_events = this.db.collection('checkpoint_events');
66
+ this.connection_report_events = this.db.collection('connection_report_events');
64
67
  }
65
68
 
66
69
  /**
@@ -78,6 +81,7 @@ export class PowerSyncMongo {
78
81
  await this.locks.deleteMany({});
79
82
  await this.bucket_state.deleteMany({});
80
83
  await this.custom_write_checkpoints.deleteMany({});
84
+ await this.connection_report_events.deleteMany({});
81
85
  }
82
86
 
83
87
  /**
@@ -127,6 +131,20 @@ export class PowerSyncMongo {
127
131
  max: 50 // max number of documents
128
132
  });
129
133
  }
134
+
135
+ /**
136
+ * Only use in migrations and tests.
137
+ */
138
+ async createConnectionReportingCollection() {
139
+ const existingCollections = await this.db
140
+ .listCollections({ name: 'connection_report_events' }, { nameOnly: false })
141
+ .toArray();
142
+ const collection = existingCollections[0];
143
+ if (collection != null) {
144
+ return;
145
+ }
146
+ await this.db.createCollection('connection_report_events');
147
+ }
130
148
  }
131
149
 
132
150
  export function createPowerSyncMongo(config: MongoStorageConfig, options?: lib_mongo.MongoConnectionOptions) {
@@ -1,6 +1,7 @@
1
1
  import { InternalOpId, storage } from '@powersync/service-core';
2
2
  import { SqliteJsonValue } from '@powersync/service-sync-rules';
3
3
  import * as bson from 'bson';
4
+ import { event_types } from '@powersync/service-types';
4
5
 
5
6
  /**
6
7
  * Replica id uniquely identifying a row on the source database.
@@ -106,7 +107,7 @@ export interface BucketStateDocument {
106
107
  op_id: InternalOpId;
107
108
  count: number;
108
109
  checksum: bigint;
109
- bytes: number;
110
+ bytes: number | null;
110
111
  };
111
112
 
112
113
  estimate_since_compact?: {
@@ -234,3 +235,5 @@ export interface InstanceDocument {
234
235
  // The instance UUID
235
236
  _id: string;
236
237
  }
238
+
239
+ export interface ClientConnectionDocument extends event_types.ClientConnection {}
@@ -7,8 +7,9 @@ export * from './implementation/MongoPersistedSyncRulesContent.js';
7
7
  export * from './implementation/MongoStorageProvider.js';
8
8
  export * from './implementation/MongoSyncBucketStorage.js';
9
9
  export * from './implementation/MongoSyncRulesLock.js';
10
- export * from './implementation/MongoTestStorageFactoryGenerator.js';
11
10
  export * from './implementation/OperationBatch.js';
12
11
  export * from './implementation/PersistedBatch.js';
13
- export * from './implementation/util.js';
12
+ export * from '../utils/util.js';
14
13
  export * from './MongoBucketStorage.js';
14
+ export * from './MongoReportStorage.js';
15
+ export * as test_utils from '../utils/test-utils.js';
@@ -0,0 +1,55 @@
1
+ import { mongo } from '@powersync/lib-service-mongodb';
2
+ import { PowerSyncMongo } from '../storage/implementation/db.js';
3
+ import { TestStorageOptions } from '@powersync/service-core';
4
+ import { MongoReportStorage } from '../storage/MongoReportStorage.js';
5
+ import { MongoBucketStorage } from '../storage/MongoBucketStorage.js';
6
+
7
+ export type MongoTestStorageOptions = {
8
+ url: string;
9
+ isCI: boolean;
10
+ };
11
+
12
+ export function mongoTestStorageFactoryGenerator(factoryOptions: MongoTestStorageOptions) {
13
+ return async (options?: TestStorageOptions) => {
14
+ const db = connectMongoForTests(factoryOptions.url, factoryOptions.isCI);
15
+
16
+ // None of the tests insert data into this collection, so it was never created
17
+ if (!(await db.db.listCollections({ name: db.bucket_parameters.collectionName }).hasNext())) {
18
+ await db.db.createCollection('bucket_parameters');
19
+ }
20
+
21
+ // Full migrations are not currently run for tests, so we manually create this
22
+ await db.createCheckpointEventsCollection();
23
+
24
+ if (!options?.doNotClear) {
25
+ await db.clear();
26
+ }
27
+
28
+ return new MongoBucketStorage(db, { slot_name_prefix: 'test_' });
29
+ };
30
+ }
31
+
32
+ export function mongoTestReportStorageFactoryGenerator(factoryOptions: MongoTestStorageOptions) {
33
+ return async (options?: TestStorageOptions) => {
34
+ const db = connectMongoForTests(factoryOptions.url, factoryOptions.isCI);
35
+
36
+ await db.createConnectionReportingCollection();
37
+
38
+ if (!options?.doNotClear) {
39
+ await db.clear();
40
+ }
41
+
42
+ return new MongoReportStorage(db);
43
+ };
44
+ }
45
+
46
+ export const connectMongoForTests = (url: string, isCI: boolean) => {
47
+ // Short timeout for tests, to fail fast when the server is not available.
48
+ // Slightly longer timeouts for CI, to avoid arbitrary test failures
49
+ const client = new mongo.MongoClient(url, {
50
+ connectTimeoutMS: isCI ? 15_000 : 5_000,
51
+ socketTimeoutMS: isCI ? 15_000 : 5_000,
52
+ serverSelectionTimeoutMS: isCI ? 15_000 : 2_500
53
+ });
54
+ return new PowerSyncMongo(client);
55
+ };
@@ -3,11 +3,9 @@ import * as crypto from 'crypto';
3
3
  import * as uuid from 'uuid';
4
4
 
5
5
  import { mongo } from '@powersync/lib-service-mongodb';
6
- import { BucketChecksum, PartialChecksum, PartialOrFullChecksum, storage, utils } from '@powersync/service-core';
7
-
8
- import { PowerSyncMongo } from './db.js';
9
- import { BucketDataDocument } from './models.js';
6
+ import { storage, utils } from '@powersync/service-core';
10
7
  import { ServiceAssertionError } from '@powersync/lib-services-framework';
8
+ import { BucketDataDocument } from '../storage/implementation/models.js';
11
9
 
12
10
  export function idPrefixFilter<T>(prefix: Partial<T>, rest: (keyof T)[]): mongo.Condition<T> {
13
11
  let filter = {
@@ -105,20 +103,6 @@ export function replicaIdToSubkey(table: bson.ObjectId, id: storage.ReplicaId):
105
103
  }
106
104
  }
107
105
 
108
- /**
109
- * Helper for unit tests
110
- */
111
- export const connectMongoForTests = (url: string, isCI: boolean) => {
112
- // Short timeout for tests, to fail fast when the server is not available.
113
- // Slightly longer timeouts for CI, to avoid arbitrary test failures
114
- const client = new mongo.MongoClient(url, {
115
- connectTimeoutMS: isCI ? 15_000 : 5_000,
116
- socketTimeoutMS: isCI ? 15_000 : 5_000,
117
- serverSelectionTimeoutMS: isCI ? 15_000 : 2_500
118
- });
119
- return new PowerSyncMongo(client);
120
- };
121
-
122
106
  export function setSessionSnapshotTime(session: mongo.ClientSession, time: bson.Timestamp) {
123
107
  // This is a workaround for the lack of direct support for snapshot reads in the MongoDB driver.
124
108
  if (!session.snapshotEnabled) {
@@ -130,44 +114,3 @@ export function setSessionSnapshotTime(session: mongo.ClientSession, time: bson.
130
114
  throw new ServiceAssertionError(`Session snapshotTime is already set`);
131
115
  }
132
116
  }
133
-
134
- export const CHECKSUM_QUERY_GROUP_STAGE = {
135
- $group: {
136
- _id: '$_id.b',
137
- // Historically, checksum may be stored as 'int' or 'double'.
138
- // More recently, this should be a 'long'.
139
- // $toLong ensures that we always sum it as a long, avoiding inaccuracies in the calculations.
140
- checksum_total: { $sum: { $toLong: '$checksum' } },
141
- count: { $sum: 1 },
142
- has_clear_op: {
143
- $max: {
144
- $cond: [{ $eq: ['$op', 'CLEAR'] }, 1, 0]
145
- }
146
- },
147
- last_op: { $max: '$_id.o' }
148
- }
149
- };
150
-
151
- /**
152
- * Convert output of CHECKSUM_QUERY_GROUP_STAGE into a checksum.
153
- */
154
- export function checksumFromAggregate(doc: bson.Document): PartialOrFullChecksum {
155
- const partialChecksum = Number(BigInt(doc.checksum_total) & 0xffffffffn) & 0xffffffff;
156
- const bucket = doc._id;
157
-
158
- if (doc.has_clear_op == 1) {
159
- return {
160
- // full checksum - replaces any previous one
161
- bucket,
162
- checksum: partialChecksum,
163
- count: doc.count
164
- } satisfies BucketChecksum;
165
- } else {
166
- return {
167
- // partial checksum - is added to a previous one
168
- bucket,
169
- partialCount: doc.count,
170
- partialChecksum
171
- } satisfies PartialChecksum;
172
- }
173
- }
@@ -0,0 +1,2 @@
1
+ export * as test_utils from './test-utils.js';
2
+ export * from './util.js';