@powersync/service-module-mongodb-storage 0.0.0-dev-20250910154512 → 0.0.0-dev-20251030082344
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +35 -11
- package/dist/index.d.ts +0 -1
- package/dist/index.js +0 -1
- package/dist/index.js.map +1 -1
- package/dist/migrations/db/migrations/1760433882550-bucket-state-index2.js +25 -0
- package/dist/migrations/db/migrations/1760433882550-bucket-state-index2.js.map +1 -0
- package/dist/storage/MongoBucketStorage.js +1 -1
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoBucketBatch.js +1 -1
- package/dist/storage/implementation/MongoBucketBatch.js.map +1 -1
- package/dist/storage/implementation/MongoCompactor.d.ts +13 -3
- package/dist/storage/implementation/MongoCompactor.js +86 -90
- package/dist/storage/implementation/MongoCompactor.js.map +1 -1
- package/dist/storage/implementation/MongoStorageProvider.d.ts +1 -1
- package/dist/storage/implementation/MongoStorageProvider.js +3 -7
- package/dist/storage/implementation/MongoStorageProvider.js.map +1 -1
- package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +2 -2
- package/dist/storage/implementation/MongoSyncBucketStorage.js +62 -19
- package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoTestStorageFactoryGenerator.d.ts +9 -0
- package/dist/storage/implementation/MongoTestStorageFactoryGenerator.js +20 -0
- package/dist/storage/implementation/MongoTestStorageFactoryGenerator.js.map +1 -0
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js +6 -2
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js.map +1 -1
- package/dist/storage/implementation/PersistedBatch.js +1 -1
- package/dist/storage/implementation/PersistedBatch.js.map +1 -1
- package/dist/storage/implementation/db.d.ts +3 -4
- package/dist/storage/implementation/db.js +9 -14
- package/dist/storage/implementation/db.js.map +1 -1
- package/dist/storage/implementation/models.d.ts +0 -3
- package/dist/{utils → storage/implementation}/util.d.ts +7 -2
- package/dist/{utils → storage/implementation}/util.js +16 -1
- package/dist/storage/implementation/util.js.map +1 -0
- package/dist/storage/storage-index.d.ts +2 -3
- package/dist/storage/storage-index.js +2 -3
- package/dist/storage/storage-index.js.map +1 -1
- package/package.json +9 -9
- package/src/index.ts +0 -1
- package/src/migrations/db/migrations/{1752661449910-connection-reporting.ts → 1760433882550-bucket-state-index2.ts} +6 -30
- package/src/storage/MongoBucketStorage.ts +1 -1
- package/src/storage/implementation/MongoBucketBatch.ts +1 -1
- package/src/storage/implementation/MongoCompactor.ts +100 -96
- package/src/storage/implementation/MongoStorageProvider.ts +4 -9
- package/src/storage/implementation/MongoSyncBucketStorage.ts +64 -21
- package/src/storage/implementation/MongoTestStorageFactoryGenerator.ts +32 -0
- package/src/storage/implementation/MongoWriteCheckpointAPI.ts +6 -2
- package/src/storage/implementation/PersistedBatch.ts +1 -1
- package/src/storage/implementation/db.ts +12 -16
- package/src/storage/implementation/models.ts +0 -3
- package/src/{utils → storage/implementation}/util.ts +19 -3
- package/src/storage/storage-index.ts +2 -3
- package/test/src/storage.test.ts +51 -3
- package/test/src/storage_compacting.test.ts +17 -2
- package/test/src/util.ts +2 -6
- package/tsconfig.tsbuildinfo +1 -1
- package/dist/migrations/db/migrations/1752661449910-connection-reporting.js +0 -36
- package/dist/migrations/db/migrations/1752661449910-connection-reporting.js.map +0 -1
- package/dist/storage/MongoReportStorage.d.ts +0 -17
- package/dist/storage/MongoReportStorage.js +0 -152
- package/dist/storage/MongoReportStorage.js.map +0 -1
- package/dist/utils/test-utils.d.ts +0 -13
- package/dist/utils/test-utils.js +0 -40
- package/dist/utils/test-utils.js.map +0 -1
- package/dist/utils/util.js.map +0 -1
- package/dist/utils/utils-index.d.ts +0 -2
- package/dist/utils/utils-index.js +0 -3
- package/dist/utils/utils-index.js.map +0 -1
- package/src/storage/MongoReportStorage.ts +0 -174
- package/src/utils/test-utils.ts +0 -57
- package/src/utils/utils-index.ts +0 -2
- package/test/src/__snapshots__/connection-report-storage.test.ts.snap +0 -215
- package/test/src/connection-report-storage.test.ts +0 -133
- /package/dist/migrations/db/migrations/{1752661449910-connection-reporting.d.ts → 1760433882550-bucket-state-index2.d.ts} +0 -0
|
@@ -16,6 +16,8 @@ import {
|
|
|
16
16
|
InternalOpId,
|
|
17
17
|
internalToExternalOpId,
|
|
18
18
|
maxLsn,
|
|
19
|
+
PopulateChecksumCacheOptions,
|
|
20
|
+
PopulateChecksumCacheResults,
|
|
19
21
|
ProtocolOpId,
|
|
20
22
|
ReplicationCheckpoint,
|
|
21
23
|
storage,
|
|
@@ -35,13 +37,23 @@ import { MongoChecksumOptions, MongoChecksums } from './MongoChecksums.js';
|
|
|
35
37
|
import { MongoCompactor } from './MongoCompactor.js';
|
|
36
38
|
import { MongoParameterCompactor } from './MongoParameterCompactor.js';
|
|
37
39
|
import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
|
|
38
|
-
import { idPrefixFilter, mapOpEntry, readSingleBatch, setSessionSnapshotTime } from '
|
|
39
|
-
|
|
40
|
+
import { idPrefixFilter, mapOpEntry, readSingleBatch, setSessionSnapshotTime } from './util.js';
|
|
40
41
|
|
|
41
42
|
export interface MongoSyncBucketStorageOptions {
|
|
42
43
|
checksumOptions?: MongoChecksumOptions;
|
|
43
44
|
}
|
|
44
45
|
|
|
46
|
+
/**
|
|
47
|
+
* Only keep checkpoints around for a minute, before fetching a fresh one.
|
|
48
|
+
*
|
|
49
|
+
* The reason is that we keep a MongoDB snapshot reference (clusterTime) with the checkpoint,
|
|
50
|
+
* and they expire after 5 minutes by default. This is an issue if the checkpoint stream is idle,
|
|
51
|
+
* but new clients connect and use an outdated checkpoint snapshot for parameter queries.
|
|
52
|
+
*
|
|
53
|
+
* These will be filtered out for existing clients, so should not create significant overhead.
|
|
54
|
+
*/
|
|
55
|
+
const CHECKPOINT_TIMEOUT_MS = 60_000;
|
|
56
|
+
|
|
45
57
|
export class MongoSyncBucketStorage
|
|
46
58
|
extends BaseObserver<storage.SyncRulesBucketStorageListener>
|
|
47
59
|
implements storage.SyncRulesBucketStorage
|
|
@@ -393,7 +405,9 @@ export class MongoSyncBucketStorage
|
|
|
393
405
|
limit: batchLimit,
|
|
394
406
|
// Increase batch size above the default 101, so that we can fill an entire batch in
|
|
395
407
|
// one go.
|
|
396
|
-
batchSize
|
|
408
|
+
// batchSize is 1 more than limit to auto-close the cursor.
|
|
409
|
+
// See https://github.com/mongodb/node-mongodb-native/pull/4580
|
|
410
|
+
batchSize: batchLimit + 1,
|
|
397
411
|
// Raw mode is returns an array of Buffer instead of parsed documents.
|
|
398
412
|
// We use it so that:
|
|
399
413
|
// 1. We can calculate the document size accurately without serializing again.
|
|
@@ -653,7 +667,7 @@ export class MongoSyncBucketStorage
|
|
|
653
667
|
}
|
|
654
668
|
}
|
|
655
669
|
|
|
656
|
-
async populatePersistentChecksumCache(options:
|
|
670
|
+
async populatePersistentChecksumCache(options: PopulateChecksumCacheOptions): Promise<PopulateChecksumCacheResults> {
|
|
657
671
|
logger.info(`Populating persistent checksum cache...`);
|
|
658
672
|
const start = Date.now();
|
|
659
673
|
// We do a minimal compact here.
|
|
@@ -664,9 +678,14 @@ export class MongoSyncBucketStorage
|
|
|
664
678
|
memoryLimitMB: 0
|
|
665
679
|
});
|
|
666
680
|
|
|
667
|
-
await compactor.populateChecksums(
|
|
681
|
+
const result = await compactor.populateChecksums({
|
|
682
|
+
// There are cases with millions of small buckets, in which case it can take very long to
|
|
683
|
+
// populate the checksums, with minimal benefit. We skip the small buckets here.
|
|
684
|
+
minBucketChanges: options.minBucketChanges ?? 10
|
|
685
|
+
});
|
|
668
686
|
const duration = Date.now() - start;
|
|
669
687
|
logger.info(`Populated persistent checksum cache in ${(duration / 1000).toFixed(1)}s`);
|
|
688
|
+
return result;
|
|
670
689
|
}
|
|
671
690
|
|
|
672
691
|
/**
|
|
@@ -681,25 +700,45 @@ export class MongoSyncBucketStorage
|
|
|
681
700
|
|
|
682
701
|
// We only watch changes to the active sync rules.
|
|
683
702
|
// If it changes to inactive, we abort and restart with the new sync rules.
|
|
684
|
-
|
|
703
|
+
try {
|
|
704
|
+
while (true) {
|
|
705
|
+
// If the stream is idle, we wait a max of a minute (CHECKPOINT_TIMEOUT_MS)
|
|
706
|
+
// before we get another checkpoint, to avoid stale checkpoint snapshots.
|
|
707
|
+
const timeout = timers
|
|
708
|
+
.setTimeout(CHECKPOINT_TIMEOUT_MS, { done: false }, { signal })
|
|
709
|
+
.catch(() => ({ done: true }));
|
|
710
|
+
try {
|
|
711
|
+
const result = await Promise.race([stream.next(), timeout]);
|
|
712
|
+
if (result.done) {
|
|
713
|
+
break;
|
|
714
|
+
}
|
|
715
|
+
} catch (e) {
|
|
716
|
+
if (e.name == 'AbortError') {
|
|
717
|
+
break;
|
|
718
|
+
}
|
|
719
|
+
throw e;
|
|
720
|
+
}
|
|
685
721
|
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
722
|
+
if (signal.aborted) {
|
|
723
|
+
// Would likely have been caught by the signal on the timeout or the upstream stream, but we check here anyway
|
|
724
|
+
break;
|
|
725
|
+
}
|
|
690
726
|
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
727
|
+
const op = await this.getCheckpointInternal();
|
|
728
|
+
if (op == null) {
|
|
729
|
+
// Sync rules have changed - abort and restart.
|
|
730
|
+
// We do a soft close of the stream here - no error
|
|
731
|
+
break;
|
|
732
|
+
}
|
|
697
733
|
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
734
|
+
// Previously, we only yielded when the checkpoint or lsn changed.
|
|
735
|
+
// However, we always want to use the latest snapshotTime, so we skip that filtering here.
|
|
736
|
+
// That filtering could be added in the per-user streams if needed, but in general the capped collection
|
|
737
|
+
// should already only contain useful changes in most cases.
|
|
701
738
|
yield op;
|
|
702
739
|
}
|
|
740
|
+
} finally {
|
|
741
|
+
await stream.return(null);
|
|
703
742
|
}
|
|
704
743
|
}
|
|
705
744
|
|
|
@@ -875,7 +914,9 @@ export class MongoSyncBucketStorage
|
|
|
875
914
|
'_id.b': 1
|
|
876
915
|
},
|
|
877
916
|
limit: limit + 1,
|
|
878
|
-
batchSize
|
|
917
|
+
// batchSize is 1 more than limit to auto-close the cursor.
|
|
918
|
+
// See https://github.com/mongodb/node-mongodb-native/pull/4580
|
|
919
|
+
batchSize: limit + 2,
|
|
879
920
|
singleBatch: true
|
|
880
921
|
}
|
|
881
922
|
)
|
|
@@ -905,7 +946,9 @@ export class MongoSyncBucketStorage
|
|
|
905
946
|
lookup: 1
|
|
906
947
|
},
|
|
907
948
|
limit: limit + 1,
|
|
908
|
-
batchSize
|
|
949
|
+
// batchSize is 1 more than limit to auto-close the cursor.
|
|
950
|
+
// See https://github.com/mongodb/node-mongodb-native/pull/4580
|
|
951
|
+
batchSize: limit + 2,
|
|
909
952
|
singleBatch: true
|
|
910
953
|
}
|
|
911
954
|
)
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { TestStorageOptions } from '@powersync/service-core';
|
|
2
|
+
import { MongoBucketStorage } from '../MongoBucketStorage.js';
|
|
3
|
+
import { connectMongoForTests } from './util.js';
|
|
4
|
+
import { MongoSyncBucketStorageOptions } from './MongoSyncBucketStorage.js';
|
|
5
|
+
|
|
6
|
+
export type MongoTestStorageOptions = {
|
|
7
|
+
url: string;
|
|
8
|
+
isCI: boolean;
|
|
9
|
+
internalOptions?: MongoSyncBucketStorageOptions;
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
export const MongoTestStorageFactoryGenerator = (factoryOptions: MongoTestStorageOptions) => {
|
|
13
|
+
return async (options?: TestStorageOptions) => {
|
|
14
|
+
const db = connectMongoForTests(factoryOptions.url, factoryOptions.isCI);
|
|
15
|
+
|
|
16
|
+
// None of the tests insert data into this collection, so it was never created
|
|
17
|
+
if (!(await db.db.listCollections({ name: db.bucket_parameters.collectionName }).hasNext())) {
|
|
18
|
+
await db.db.createCollection('bucket_parameters');
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
if (!options?.doNotClear) {
|
|
22
|
+
await db.clear();
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Full migrations are not currently run for tests, so we manually create the important ones
|
|
26
|
+
await db.createCheckpointEventsCollection();
|
|
27
|
+
await db.createBucketStateIndex();
|
|
28
|
+
await db.createBucketStateIndex2();
|
|
29
|
+
|
|
30
|
+
return new MongoBucketStorage(db, { slot_name_prefix: 'test_' }, factoryOptions.internalOptions);
|
|
31
|
+
};
|
|
32
|
+
};
|
|
@@ -111,7 +111,9 @@ export class MongoWriteCheckpointAPI implements storage.WriteCheckpointAPI {
|
|
|
111
111
|
},
|
|
112
112
|
{
|
|
113
113
|
limit: limit + 1,
|
|
114
|
-
batchSize
|
|
114
|
+
// batchSize is 1 more than limit to auto-close the cursor.
|
|
115
|
+
// See https://github.com/mongodb/node-mongodb-native/pull/4580
|
|
116
|
+
batchSize: limit + 2,
|
|
115
117
|
singleBatch: true
|
|
116
118
|
}
|
|
117
119
|
)
|
|
@@ -140,7 +142,9 @@ export class MongoWriteCheckpointAPI implements storage.WriteCheckpointAPI {
|
|
|
140
142
|
},
|
|
141
143
|
{
|
|
142
144
|
limit: limit + 1,
|
|
143
|
-
batchSize
|
|
145
|
+
// batchSize is 1 more than limit to auto-close the cursor.
|
|
146
|
+
// See https://github.com/mongodb/node-mongodb-native/pull/4580
|
|
147
|
+
batchSize: limit + 2,
|
|
144
148
|
singleBatch: true
|
|
145
149
|
}
|
|
146
150
|
)
|
|
@@ -8,7 +8,6 @@ import {
|
|
|
8
8
|
BucketParameterDocument,
|
|
9
9
|
BucketStateDocument,
|
|
10
10
|
CheckpointEventDocument,
|
|
11
|
-
ClientConnectionDocument,
|
|
12
11
|
CurrentDataDocument,
|
|
13
12
|
CustomWriteCheckpointDocument,
|
|
14
13
|
IdSequenceDocument,
|
|
@@ -38,7 +37,6 @@ export class PowerSyncMongo {
|
|
|
38
37
|
readonly locks: mongo.Collection<lib_mongo.locks.Lock>;
|
|
39
38
|
readonly bucket_state: mongo.Collection<BucketStateDocument>;
|
|
40
39
|
readonly checkpoint_events: mongo.Collection<CheckpointEventDocument>;
|
|
41
|
-
readonly connection_report_events: mongo.Collection<ClientConnectionDocument>;
|
|
42
40
|
|
|
43
41
|
readonly client: mongo.MongoClient;
|
|
44
42
|
readonly db: mongo.Db;
|
|
@@ -63,7 +61,6 @@ export class PowerSyncMongo {
|
|
|
63
61
|
this.locks = this.db.collection('locks');
|
|
64
62
|
this.bucket_state = this.db.collection('bucket_state');
|
|
65
63
|
this.checkpoint_events = this.db.collection('checkpoint_events');
|
|
66
|
-
this.connection_report_events = this.db.collection('connection_report_events');
|
|
67
64
|
}
|
|
68
65
|
|
|
69
66
|
/**
|
|
@@ -134,28 +131,27 @@ export class PowerSyncMongo {
|
|
|
134
131
|
/**
|
|
135
132
|
* Only use in migrations and tests.
|
|
136
133
|
*/
|
|
137
|
-
async
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
134
|
+
async createBucketStateIndex() {
|
|
135
|
+
// TODO: Implement a better mechanism to use migrations in tests
|
|
136
|
+
await this.bucket_state.createIndex(
|
|
137
|
+
{
|
|
138
|
+
'_id.g': 1,
|
|
139
|
+
last_op: 1
|
|
140
|
+
},
|
|
141
|
+
{ name: 'bucket_updates', unique: true }
|
|
142
|
+
);
|
|
146
143
|
}
|
|
147
|
-
|
|
148
144
|
/**
|
|
149
145
|
* Only use in migrations and tests.
|
|
150
146
|
*/
|
|
151
|
-
async
|
|
147
|
+
async createBucketStateIndex2() {
|
|
152
148
|
// TODO: Implement a better mechanism to use migrations in tests
|
|
153
149
|
await this.bucket_state.createIndex(
|
|
154
150
|
{
|
|
155
151
|
'_id.g': 1,
|
|
156
|
-
|
|
152
|
+
'estimate_since_compact.count': -1
|
|
157
153
|
},
|
|
158
|
-
{ name: '
|
|
154
|
+
{ name: 'dirty_count' }
|
|
159
155
|
);
|
|
160
156
|
}
|
|
161
157
|
}
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { InternalOpId, storage } from '@powersync/service-core';
|
|
2
2
|
import { SqliteJsonValue } from '@powersync/service-sync-rules';
|
|
3
3
|
import * as bson from 'bson';
|
|
4
|
-
import { event_types } from '@powersync/service-types';
|
|
5
4
|
|
|
6
5
|
/**
|
|
7
6
|
* Replica id uniquely identifying a row on the source database.
|
|
@@ -239,5 +238,3 @@ export interface InstanceDocument {
|
|
|
239
238
|
// The instance UUID
|
|
240
239
|
_id: string;
|
|
241
240
|
}
|
|
242
|
-
|
|
243
|
-
export interface ClientConnectionDocument extends event_types.ClientConnection {}
|
|
@@ -3,9 +3,11 @@ import * as crypto from 'crypto';
|
|
|
3
3
|
import * as uuid from 'uuid';
|
|
4
4
|
|
|
5
5
|
import { mongo } from '@powersync/lib-service-mongodb';
|
|
6
|
-
import { storage, utils } from '@powersync/service-core';
|
|
6
|
+
import { BucketChecksum, PartialChecksum, PartialOrFullChecksum, storage, utils } from '@powersync/service-core';
|
|
7
|
+
|
|
8
|
+
import { PowerSyncMongo } from './db.js';
|
|
9
|
+
import { BucketDataDocument } from './models.js';
|
|
7
10
|
import { ServiceAssertionError } from '@powersync/lib-services-framework';
|
|
8
|
-
import { BucketDataDocument } from '../storage/implementation/models.js';
|
|
9
11
|
|
|
10
12
|
export function idPrefixFilter<T>(prefix: Partial<T>, rest: (keyof T)[]): mongo.Condition<T> {
|
|
11
13
|
let filter = {
|
|
@@ -39,7 +41,7 @@ export function generateSlotName(prefix: string, sync_rules_id: number) {
|
|
|
39
41
|
* However, that makes `has_more` detection very difficult, since the cursor is always closed
|
|
40
42
|
* after the first batch. Instead, we do a workaround to only fetch a single batch below.
|
|
41
43
|
*
|
|
42
|
-
* For this to be effective, set batchSize = limit in the find command.
|
|
44
|
+
* For this to be effective, set batchSize = limit + 1 in the find command.
|
|
43
45
|
*/
|
|
44
46
|
export async function readSingleBatch<T>(cursor: mongo.AbstractCursor<T>): Promise<{ data: T[]; hasMore: boolean }> {
|
|
45
47
|
try {
|
|
@@ -103,6 +105,20 @@ export function replicaIdToSubkey(table: bson.ObjectId, id: storage.ReplicaId):
|
|
|
103
105
|
}
|
|
104
106
|
}
|
|
105
107
|
|
|
108
|
+
/**
|
|
109
|
+
* Helper for unit tests
|
|
110
|
+
*/
|
|
111
|
+
export const connectMongoForTests = (url: string, isCI: boolean) => {
|
|
112
|
+
// Short timeout for tests, to fail fast when the server is not available.
|
|
113
|
+
// Slightly longer timeouts for CI, to avoid arbitrary test failures
|
|
114
|
+
const client = new mongo.MongoClient(url, {
|
|
115
|
+
connectTimeoutMS: isCI ? 15_000 : 5_000,
|
|
116
|
+
socketTimeoutMS: isCI ? 15_000 : 5_000,
|
|
117
|
+
serverSelectionTimeoutMS: isCI ? 15_000 : 2_500
|
|
118
|
+
});
|
|
119
|
+
return new PowerSyncMongo(client);
|
|
120
|
+
};
|
|
121
|
+
|
|
106
122
|
export function setSessionSnapshotTime(session: mongo.ClientSession, time: bson.Timestamp) {
|
|
107
123
|
// This is a workaround for the lack of direct support for snapshot reads in the MongoDB driver.
|
|
108
124
|
if (!session.snapshotEnabled) {
|
|
@@ -7,9 +7,8 @@ export * from './implementation/MongoPersistedSyncRulesContent.js';
|
|
|
7
7
|
export * from './implementation/MongoStorageProvider.js';
|
|
8
8
|
export * from './implementation/MongoSyncBucketStorage.js';
|
|
9
9
|
export * from './implementation/MongoSyncRulesLock.js';
|
|
10
|
+
export * from './implementation/MongoTestStorageFactoryGenerator.js';
|
|
10
11
|
export * from './implementation/OperationBatch.js';
|
|
11
12
|
export * from './implementation/PersistedBatch.js';
|
|
12
|
-
export * from '
|
|
13
|
+
export * from './implementation/util.js';
|
|
13
14
|
export * from './MongoBucketStorage.js';
|
|
14
|
-
export * from './MongoReportStorage.js';
|
|
15
|
-
export * as test_utils from '../utils/test-utils.js';
|
package/test/src/storage.test.ts
CHANGED
|
@@ -2,7 +2,8 @@ import { register } from '@powersync/service-core-tests';
|
|
|
2
2
|
import { describe } from 'vitest';
|
|
3
3
|
import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
|
|
4
4
|
import { env } from './env.js';
|
|
5
|
-
import {
|
|
5
|
+
import { MongoTestStorageFactoryGenerator } from '@module/storage/implementation/MongoTestStorageFactoryGenerator.js';
|
|
6
|
+
import { MongoChecksumOptions } from '@module/storage/implementation/MongoChecksums.js';
|
|
6
7
|
|
|
7
8
|
describe('Mongo Sync Bucket Storage - Parameters', () =>
|
|
8
9
|
register.registerDataStorageParameterTests(INITIALIZED_MONGO_STORAGE_FACTORY));
|
|
@@ -17,7 +18,7 @@ describe('Sync Bucket Validation', register.registerBucketValidationTests);
|
|
|
17
18
|
|
|
18
19
|
describe('Mongo Sync Bucket Storage - split operations', () =>
|
|
19
20
|
register.registerDataStorageDataTests(
|
|
20
|
-
|
|
21
|
+
MongoTestStorageFactoryGenerator({
|
|
21
22
|
url: env.MONGO_TEST_URL,
|
|
22
23
|
isCI: env.CI,
|
|
23
24
|
internalOptions: {
|
|
@@ -31,7 +32,7 @@ describe('Mongo Sync Bucket Storage - split operations', () =>
|
|
|
31
32
|
|
|
32
33
|
describe('Mongo Sync Bucket Storage - split buckets', () =>
|
|
33
34
|
register.registerDataStorageDataTests(
|
|
34
|
-
|
|
35
|
+
MongoTestStorageFactoryGenerator({
|
|
35
36
|
url: env.MONGO_TEST_URL,
|
|
36
37
|
isCI: env.CI,
|
|
37
38
|
internalOptions: {
|
|
@@ -42,3 +43,50 @@ describe('Mongo Sync Bucket Storage - split buckets', () =>
|
|
|
42
43
|
}
|
|
43
44
|
})
|
|
44
45
|
));
|
|
46
|
+
|
|
47
|
+
describe('Mongo Sync Bucket Storage - checksum calculations', () => {
|
|
48
|
+
// This test tests 4 buckets x 4 operations in each.
|
|
49
|
+
// We specifically use operationBatchLimit that does not have factors in common with 4,
|
|
50
|
+
// as well some that do.
|
|
51
|
+
const params: MongoChecksumOptions[] = [
|
|
52
|
+
{
|
|
53
|
+
bucketBatchLimit: 100,
|
|
54
|
+
operationBatchLimit: 3
|
|
55
|
+
},
|
|
56
|
+
|
|
57
|
+
{
|
|
58
|
+
bucketBatchLimit: 10,
|
|
59
|
+
operationBatchLimit: 7
|
|
60
|
+
},
|
|
61
|
+
|
|
62
|
+
{
|
|
63
|
+
bucketBatchLimit: 3,
|
|
64
|
+
operationBatchLimit: 1
|
|
65
|
+
},
|
|
66
|
+
{
|
|
67
|
+
bucketBatchLimit: 1,
|
|
68
|
+
operationBatchLimit: 3
|
|
69
|
+
},
|
|
70
|
+
{
|
|
71
|
+
bucketBatchLimit: 2,
|
|
72
|
+
operationBatchLimit: 4
|
|
73
|
+
},
|
|
74
|
+
{
|
|
75
|
+
bucketBatchLimit: 4,
|
|
76
|
+
operationBatchLimit: 12
|
|
77
|
+
}
|
|
78
|
+
];
|
|
79
|
+
for (let options of params) {
|
|
80
|
+
describe(`${options.bucketBatchLimit}|${options.operationBatchLimit}`, () => {
|
|
81
|
+
register.testChecksumBatching(
|
|
82
|
+
MongoTestStorageFactoryGenerator({
|
|
83
|
+
url: env.MONGO_TEST_URL,
|
|
84
|
+
isCI: env.CI,
|
|
85
|
+
internalOptions: {
|
|
86
|
+
checksumOptions: options
|
|
87
|
+
}
|
|
88
|
+
})
|
|
89
|
+
);
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
});
|
|
@@ -97,10 +97,25 @@ bucket_definitions:
|
|
|
97
97
|
await populate(bucketStorage);
|
|
98
98
|
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
99
99
|
|
|
100
|
-
|
|
100
|
+
// Default is to small small numbers - should be a no-op
|
|
101
|
+
const result0 = await bucketStorage.populatePersistentChecksumCache({
|
|
102
|
+
maxOpId: checkpoint
|
|
103
|
+
});
|
|
104
|
+
expect(result0.buckets).toEqual(0);
|
|
105
|
+
|
|
106
|
+
// This should cache the checksums for the two buckets
|
|
107
|
+
const result1 = await bucketStorage.populatePersistentChecksumCache({
|
|
108
|
+
maxOpId: checkpoint,
|
|
109
|
+
minBucketChanges: 1
|
|
110
|
+
});
|
|
111
|
+
expect(result1.buckets).toEqual(2);
|
|
112
|
+
|
|
113
|
+
// This should be a no-op, as the checksums are already cached
|
|
114
|
+
const result2 = await bucketStorage.populatePersistentChecksumCache({
|
|
101
115
|
maxOpId: checkpoint,
|
|
102
|
-
|
|
116
|
+
minBucketChanges: 1
|
|
103
117
|
});
|
|
118
|
+
expect(result2.buckets).toEqual(0);
|
|
104
119
|
|
|
105
120
|
const checksumAfter = await bucketStorage.getChecksums(checkpoint, ['by_user2["u1"]', 'by_user2["u2"]']);
|
|
106
121
|
expect(checksumAfter.get('by_user2["u1"]')).toEqual({
|
package/test/src/util.ts
CHANGED
|
@@ -1,12 +1,8 @@
|
|
|
1
1
|
import { env } from './env.js';
|
|
2
|
-
import { mongoTestReportStorageFactoryGenerator, mongoTestStorageFactoryGenerator } from '@module/utils/test-utils.js';
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
url: env.MONGO_TEST_URL,
|
|
6
|
-
isCI: env.CI
|
|
7
|
-
});
|
|
3
|
+
import { MongoTestStorageFactoryGenerator } from '@module/storage/implementation/MongoTestStorageFactoryGenerator.js';
|
|
8
4
|
|
|
9
|
-
export const
|
|
5
|
+
export const INITIALIZED_MONGO_STORAGE_FACTORY = MongoTestStorageFactoryGenerator({
|
|
10
6
|
url: env.MONGO_TEST_URL,
|
|
11
7
|
isCI: env.CI
|
|
12
8
|
});
|