@powersync/service-module-mongodb-storage 0.0.0-dev-20260225160713 → 0.0.0-dev-20260313100403
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +47 -6
- package/dist/storage/MongoBucketStorage.js +16 -3
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/MongoReportStorage.d.ts +3 -3
- package/dist/storage/MongoReportStorage.js +5 -5
- package/dist/storage/MongoReportStorage.js.map +1 -1
- package/dist/storage/implementation/MongoBucketBatch.d.ts +13 -11
- package/dist/storage/implementation/MongoBucketBatch.js +208 -127
- package/dist/storage/implementation/MongoBucketBatch.js.map +1 -1
- package/dist/storage/implementation/MongoChecksums.d.ts +4 -4
- package/dist/storage/implementation/MongoChecksums.js +1 -0
- package/dist/storage/implementation/MongoChecksums.js.map +1 -1
- package/dist/storage/implementation/MongoCompactor.d.ts +2 -2
- package/dist/storage/implementation/MongoCompactor.js +10 -9
- package/dist/storage/implementation/MongoCompactor.js.map +1 -1
- package/dist/storage/implementation/MongoParameterCompactor.d.ts +2 -2
- package/dist/storage/implementation/MongoParameterCompactor.js.map +1 -1
- package/dist/storage/implementation/MongoPersistedSyncRulesContent.js +2 -7
- package/dist/storage/implementation/MongoPersistedSyncRulesContent.js.map +1 -1
- package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +9 -4
- package/dist/storage/implementation/MongoSyncBucketStorage.js +35 -33
- package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoSyncRulesLock.d.ts +3 -3
- package/dist/storage/implementation/MongoSyncRulesLock.js.map +1 -1
- package/dist/storage/implementation/MongoWriteCheckpointAPI.d.ts +4 -4
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js.map +1 -1
- package/dist/storage/implementation/OperationBatch.js +3 -2
- package/dist/storage/implementation/OperationBatch.js.map +1 -1
- package/dist/storage/implementation/PersistedBatch.d.ts +11 -4
- package/dist/storage/implementation/PersistedBatch.js +42 -11
- package/dist/storage/implementation/PersistedBatch.js.map +1 -1
- package/dist/storage/implementation/db.d.ts +35 -1
- package/dist/storage/implementation/db.js +99 -0
- package/dist/storage/implementation/db.js.map +1 -1
- package/dist/storage/implementation/models.d.ts +13 -1
- package/dist/storage/implementation/models.js +2 -1
- package/dist/storage/implementation/models.js.map +1 -1
- package/dist/utils/test-utils.d.ts +4 -1
- package/dist/utils/test-utils.js +15 -12
- package/dist/utils/test-utils.js.map +1 -1
- package/dist/utils/util.d.ts +2 -1
- package/dist/utils/util.js +15 -1
- package/dist/utils/util.js.map +1 -1
- package/package.json +7 -7
- package/src/storage/MongoBucketStorage.ts +29 -8
- package/src/storage/MongoReportStorage.ts +5 -5
- package/src/storage/implementation/MongoBucketBatch.ts +263 -177
- package/src/storage/implementation/MongoChecksums.ts +5 -3
- package/src/storage/implementation/MongoCompactor.ts +13 -12
- package/src/storage/implementation/MongoParameterCompactor.ts +3 -3
- package/src/storage/implementation/MongoPersistedSyncRulesContent.ts +3 -11
- package/src/storage/implementation/MongoSyncBucketStorage.ts +33 -26
- package/src/storage/implementation/MongoSyncRulesLock.ts +3 -3
- package/src/storage/implementation/MongoWriteCheckpointAPI.ts +4 -4
- package/src/storage/implementation/OperationBatch.ts +3 -2
- package/src/storage/implementation/PersistedBatch.ts +42 -11
- package/src/storage/implementation/db.ts +129 -1
- package/src/storage/implementation/models.ts +16 -2
- package/src/utils/test-utils.ts +15 -12
- package/src/utils/util.ts +17 -2
- package/test/src/__snapshots__/{connection-report-storage.test.ts.snap → client-connections-storage.test.ts.snap} +68 -68
- package/test/src/__snapshots__/storage.test.ts.snap +201 -0
- package/test/src/__snapshots__/storage_compacting.test.ts.snap +17 -0
- package/test/src/__snapshots__/storage_sync.test.ts.snap +1111 -16
- package/test/src/{connection-report-storage.test.ts → client-connections-storage.test.ts} +1 -1
- package/test/src/storage.test.ts +9 -7
- package/test/src/storage_compacting.test.ts +54 -45
- package/test/src/storage_sync.test.ts +53 -51
- package/test/src/util.ts +3 -3
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -2,6 +2,7 @@ import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
|
2
2
|
import {
|
|
3
3
|
addPartialChecksums,
|
|
4
4
|
bson,
|
|
5
|
+
BucketChecksumRequest,
|
|
5
6
|
BucketChecksum,
|
|
6
7
|
ChecksumCache,
|
|
7
8
|
ChecksumMap,
|
|
@@ -12,7 +13,7 @@ import {
|
|
|
12
13
|
PartialChecksumMap,
|
|
13
14
|
PartialOrFullChecksum
|
|
14
15
|
} from '@powersync/service-core';
|
|
15
|
-
import {
|
|
16
|
+
import { VersionedPowerSyncMongo } from './db.js';
|
|
16
17
|
import { StorageConfig } from './models.js';
|
|
17
18
|
|
|
18
19
|
/**
|
|
@@ -49,7 +50,7 @@ export class MongoChecksums {
|
|
|
49
50
|
private readonly storageConfig: StorageConfig;
|
|
50
51
|
|
|
51
52
|
constructor(
|
|
52
|
-
private db:
|
|
53
|
+
private db: VersionedPowerSyncMongo,
|
|
53
54
|
private group_id: number,
|
|
54
55
|
private options: MongoChecksumOptions
|
|
55
56
|
) {
|
|
@@ -74,7 +75,7 @@ export class MongoChecksums {
|
|
|
74
75
|
* Calculate checksums, utilizing the cache for partial checkums, and querying the remainder from
|
|
75
76
|
* the database (bucket_state + bucket_data).
|
|
76
77
|
*/
|
|
77
|
-
async getChecksums(checkpoint: InternalOpId, buckets:
|
|
78
|
+
async getChecksums(checkpoint: InternalOpId, buckets: BucketChecksumRequest[]): Promise<ChecksumMap> {
|
|
78
79
|
return this.cache.getChecksumMap(checkpoint, buckets);
|
|
79
80
|
}
|
|
80
81
|
|
|
@@ -298,6 +299,7 @@ export class MongoChecksums {
|
|
|
298
299
|
const req = requests.get(bucket);
|
|
299
300
|
requests.set(bucket, {
|
|
300
301
|
bucket,
|
|
302
|
+
source: req!.source,
|
|
301
303
|
start: doc.last_op,
|
|
302
304
|
end: req!.end
|
|
303
305
|
});
|
|
@@ -9,7 +9,7 @@ import {
|
|
|
9
9
|
utils
|
|
10
10
|
} from '@powersync/service-core';
|
|
11
11
|
|
|
12
|
-
import {
|
|
12
|
+
import { VersionedPowerSyncMongo } from './db.js';
|
|
13
13
|
import { BucketDataDocument, BucketDataKey, BucketStateDocument } from './models.js';
|
|
14
14
|
import { MongoSyncBucketStorage } from './MongoSyncBucketStorage.js';
|
|
15
15
|
import { cacheKey } from './OperationBatch.js';
|
|
@@ -85,19 +85,19 @@ export class MongoCompactor {
|
|
|
85
85
|
|
|
86
86
|
constructor(
|
|
87
87
|
private storage: MongoSyncBucketStorage,
|
|
88
|
-
private db:
|
|
89
|
-
options
|
|
88
|
+
private db: VersionedPowerSyncMongo,
|
|
89
|
+
options: MongoCompactOptions
|
|
90
90
|
) {
|
|
91
91
|
this.group_id = storage.group_id;
|
|
92
|
-
this.idLimitBytes = (options
|
|
93
|
-
this.moveBatchLimit = options
|
|
94
|
-
this.moveBatchQueryLimit = options
|
|
95
|
-
this.clearBatchLimit = options
|
|
96
|
-
this.minBucketChanges = options
|
|
97
|
-
this.minChangeRatio = options
|
|
98
|
-
this.maxOpId = options
|
|
99
|
-
this.buckets = options
|
|
100
|
-
this.signal = options
|
|
92
|
+
this.idLimitBytes = (options.memoryLimitMB ?? DEFAULT_MEMORY_LIMIT_MB) * 1024 * 1024;
|
|
93
|
+
this.moveBatchLimit = options.moveBatchLimit ?? DEFAULT_MOVE_BATCH_LIMIT;
|
|
94
|
+
this.moveBatchQueryLimit = options.moveBatchQueryLimit ?? DEFAULT_MOVE_BATCH_QUERY_LIMIT;
|
|
95
|
+
this.clearBatchLimit = options.clearBatchLimit ?? DEFAULT_CLEAR_BATCH_LIMIT;
|
|
96
|
+
this.minBucketChanges = options.minBucketChanges ?? DEFAULT_MIN_BUCKET_CHANGES;
|
|
97
|
+
this.minChangeRatio = options.minChangeRatio ?? DEFAULT_MIN_CHANGE_RATIO;
|
|
98
|
+
this.maxOpId = options.maxOpId ?? 0n;
|
|
99
|
+
this.buckets = options.compactBuckets;
|
|
100
|
+
this.signal = options.signal;
|
|
101
101
|
}
|
|
102
102
|
|
|
103
103
|
/**
|
|
@@ -662,6 +662,7 @@ export class MongoCompactor {
|
|
|
662
662
|
buckets.map((bucket) => {
|
|
663
663
|
return {
|
|
664
664
|
bucket,
|
|
665
|
+
source: {} as any,
|
|
665
666
|
end: this.maxOpId
|
|
666
667
|
};
|
|
667
668
|
})
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
+
import { mongo } from '@powersync/lib-service-mongodb';
|
|
1
2
|
import { logger } from '@powersync/lib-services-framework';
|
|
2
3
|
import { bson, CompactOptions, InternalOpId } from '@powersync/service-core';
|
|
3
4
|
import { LRUCache } from 'lru-cache';
|
|
4
|
-
import {
|
|
5
|
-
import { mongo } from '@powersync/lib-service-mongodb';
|
|
5
|
+
import { VersionedPowerSyncMongo } from './db.js';
|
|
6
6
|
import { BucketParameterDocument } from './models.js';
|
|
7
7
|
|
|
8
8
|
/**
|
|
@@ -14,7 +14,7 @@ import { BucketParameterDocument } from './models.js';
|
|
|
14
14
|
*/
|
|
15
15
|
export class MongoParameterCompactor {
|
|
16
16
|
constructor(
|
|
17
|
-
private db:
|
|
17
|
+
private db: VersionedPowerSyncMongo,
|
|
18
18
|
private group_id: number,
|
|
19
19
|
private checkpoint: InternalOpId,
|
|
20
20
|
private options: CompactOptions
|
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import { mongo } from '@powersync/lib-service-mongodb';
|
|
2
2
|
import { storage } from '@powersync/service-core';
|
|
3
3
|
import { MongoSyncRulesLock } from './MongoSyncRulesLock.js';
|
|
4
|
-
import { PowerSyncMongo } from './db.js';
|
|
4
|
+
import { PowerSyncMongo, VersionedPowerSyncMongo } from './db.js';
|
|
5
5
|
import { getMongoStorageConfig, SyncRuleDocument } from './models.js';
|
|
6
|
-
import { ErrorCode, ServiceError } from '@powersync/lib-services-framework';
|
|
7
6
|
|
|
8
7
|
export class MongoPersistedSyncRulesContent extends storage.PersistedSyncRulesContent {
|
|
9
8
|
public current_lock: MongoSyncRulesLock | null = null;
|
|
@@ -29,18 +28,11 @@ export class MongoPersistedSyncRulesContent extends storage.PersistedSyncRulesCo
|
|
|
29
28
|
}
|
|
30
29
|
|
|
31
30
|
getStorageConfig() {
|
|
32
|
-
|
|
33
|
-
if (storageConfig == null) {
|
|
34
|
-
throw new ServiceError(
|
|
35
|
-
ErrorCode.PSYNC_S1005,
|
|
36
|
-
`Unsupported storage version ${this.storageVersion} for sync rules ${this.id}`
|
|
37
|
-
);
|
|
38
|
-
}
|
|
39
|
-
return storageConfig;
|
|
31
|
+
return getMongoStorageConfig(this.storageVersion);
|
|
40
32
|
}
|
|
41
33
|
|
|
42
34
|
async lock() {
|
|
43
|
-
const lock = await MongoSyncRulesLock.createLock(this.db, this);
|
|
35
|
+
const lock = await MongoSyncRulesLock.createLock(this.db.versioned(this.getStorageConfig()), this);
|
|
44
36
|
this.current_lock = lock;
|
|
45
37
|
return lock;
|
|
46
38
|
}
|
|
@@ -31,7 +31,7 @@ import { LRUCache } from 'lru-cache';
|
|
|
31
31
|
import * as timers from 'timers/promises';
|
|
32
32
|
import { idPrefixFilter, mapOpEntry, readSingleBatch, setSessionSnapshotTime } from '../../utils/util.js';
|
|
33
33
|
import { MongoBucketStorage } from '../MongoBucketStorage.js';
|
|
34
|
-
import {
|
|
34
|
+
import { VersionedPowerSyncMongo } from './db.js';
|
|
35
35
|
import {
|
|
36
36
|
BucketDataDocument,
|
|
37
37
|
BucketDataKey,
|
|
@@ -44,6 +44,7 @@ import { MongoBucketBatch } from './MongoBucketBatch.js';
|
|
|
44
44
|
import { MongoChecksumOptions, MongoChecksums } from './MongoChecksums.js';
|
|
45
45
|
import { MongoCompactor } from './MongoCompactor.js';
|
|
46
46
|
import { MongoParameterCompactor } from './MongoParameterCompactor.js';
|
|
47
|
+
import { MongoPersistedSyncRulesContent } from './MongoPersistedSyncRulesContent.js';
|
|
47
48
|
import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
|
|
48
49
|
|
|
49
50
|
export interface MongoSyncBucketStorageOptions {
|
|
@@ -66,7 +67,7 @@ export class MongoSyncBucketStorage
|
|
|
66
67
|
extends BaseObserver<storage.SyncRulesBucketStorageListener>
|
|
67
68
|
implements storage.SyncRulesBucketStorage
|
|
68
69
|
{
|
|
69
|
-
private readonly db:
|
|
70
|
+
private readonly db: VersionedPowerSyncMongo;
|
|
70
71
|
readonly checksums: MongoChecksums;
|
|
71
72
|
|
|
72
73
|
private parsedSyncRulesCache: { parsed: HydratedSyncRules; options: storage.ParseSyncRulesOptions } | undefined;
|
|
@@ -75,13 +76,13 @@ export class MongoSyncBucketStorage
|
|
|
75
76
|
constructor(
|
|
76
77
|
public readonly factory: MongoBucketStorage,
|
|
77
78
|
public readonly group_id: number,
|
|
78
|
-
private readonly sync_rules:
|
|
79
|
+
private readonly sync_rules: MongoPersistedSyncRulesContent,
|
|
79
80
|
public readonly slot_name: string,
|
|
80
81
|
writeCheckpointMode: storage.WriteCheckpointMode | undefined,
|
|
81
82
|
options: MongoSyncBucketStorageOptions
|
|
82
83
|
) {
|
|
83
84
|
super();
|
|
84
|
-
this.db = factory.db;
|
|
85
|
+
this.db = factory.db.versioned(sync_rules.getStorageConfig());
|
|
85
86
|
this.checksums = new MongoChecksums(this.db, this.group_id, {
|
|
86
87
|
...options.checksumOptions,
|
|
87
88
|
storageConfig: options?.storageConfig
|
|
@@ -166,10 +167,7 @@ export class MongoSyncBucketStorage
|
|
|
166
167
|
});
|
|
167
168
|
}
|
|
168
169
|
|
|
169
|
-
async
|
|
170
|
-
options: storage.StartBatchOptions,
|
|
171
|
-
callback: (batch: storage.BucketStorageBatch) => Promise<void>
|
|
172
|
-
): Promise<storage.FlushedResult | null> {
|
|
170
|
+
async createWriter(options: storage.CreateWriterOptions): Promise<storage.BucketStorageBatch> {
|
|
173
171
|
const doc = await this.db.sync_rules.findOne(
|
|
174
172
|
{
|
|
175
173
|
_id: this.group_id
|
|
@@ -178,7 +176,7 @@ export class MongoSyncBucketStorage
|
|
|
178
176
|
);
|
|
179
177
|
const checkpoint_lsn = doc?.last_checkpoint_lsn ?? null;
|
|
180
178
|
|
|
181
|
-
|
|
179
|
+
const writer = new MongoBucketBatch({
|
|
182
180
|
logger: options.logger,
|
|
183
181
|
db: this.db,
|
|
184
182
|
syncRules: this.sync_rules.parsed(options).hydratedSyncRules(),
|
|
@@ -186,21 +184,26 @@ export class MongoSyncBucketStorage
|
|
|
186
184
|
slotName: this.slot_name,
|
|
187
185
|
lastCheckpointLsn: checkpoint_lsn,
|
|
188
186
|
resumeFromLsn: maxLsn(checkpoint_lsn, doc?.snapshot_lsn),
|
|
189
|
-
noCheckpointBeforeLsn: doc?.no_checkpoint_before ?? options.zeroLSN,
|
|
190
187
|
keepaliveOp: doc?.keepalive_op ? BigInt(doc.keepalive_op) : null,
|
|
191
188
|
storeCurrentData: options.storeCurrentData,
|
|
192
189
|
skipExistingRows: options.skipExistingRows ?? false,
|
|
193
190
|
markRecordUnavailable: options.markRecordUnavailable
|
|
194
191
|
});
|
|
195
|
-
this.iterateListeners((cb) => cb.batchStarted?.(
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
192
|
+
this.iterateListeners((cb) => cb.batchStarted?.(writer));
|
|
193
|
+
return writer;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* @deprecated Use `createWriter()` with `await using` instead.
|
|
198
|
+
*/
|
|
199
|
+
async startBatch(
|
|
200
|
+
options: storage.CreateWriterOptions,
|
|
201
|
+
callback: (batch: storage.BucketStorageBatch) => Promise<void>
|
|
202
|
+
): Promise<storage.FlushedResult | null> {
|
|
203
|
+
await using writer = await this.createWriter(options);
|
|
204
|
+
await callback(writer);
|
|
205
|
+
await writer.flush();
|
|
206
|
+
return writer.last_flushed_op != null ? { flushed_op: writer.last_flushed_op } : null;
|
|
204
207
|
}
|
|
205
208
|
|
|
206
209
|
async resolveTable(options: storage.ResolveTableOptions): Promise<storage.ResolveTableResult> {
|
|
@@ -372,19 +375,20 @@ export class MongoSyncBucketStorage
|
|
|
372
375
|
|
|
373
376
|
async *getBucketDataBatch(
|
|
374
377
|
checkpoint: utils.InternalOpId,
|
|
375
|
-
dataBuckets:
|
|
378
|
+
dataBuckets: storage.BucketDataRequest[],
|
|
376
379
|
options?: storage.BucketDataBatchOptions
|
|
377
380
|
): AsyncIterable<storage.SyncBucketDataChunk> {
|
|
378
|
-
if (dataBuckets.
|
|
381
|
+
if (dataBuckets.length == 0) {
|
|
379
382
|
return;
|
|
380
383
|
}
|
|
381
384
|
let filters: mongo.Filter<BucketDataDocument>[] = [];
|
|
385
|
+
const bucketMap = new Map(dataBuckets.map((request) => [request.bucket, request.start]));
|
|
382
386
|
|
|
383
387
|
if (checkpoint == null) {
|
|
384
388
|
throw new ServiceAssertionError('checkpoint is null');
|
|
385
389
|
}
|
|
386
390
|
const end = checkpoint;
|
|
387
|
-
for (let
|
|
391
|
+
for (let { bucket: name, start } of dataBuckets) {
|
|
388
392
|
filters.push({
|
|
389
393
|
_id: {
|
|
390
394
|
$gt: {
|
|
@@ -477,7 +481,7 @@ export class MongoSyncBucketStorage
|
|
|
477
481
|
}
|
|
478
482
|
|
|
479
483
|
if (start == null) {
|
|
480
|
-
const startOpId =
|
|
484
|
+
const startOpId = bucketMap.get(bucket);
|
|
481
485
|
if (startOpId == null) {
|
|
482
486
|
throw new ServiceAssertionError(`data for unexpected bucket: ${bucket}`);
|
|
483
487
|
}
|
|
@@ -519,7 +523,10 @@ export class MongoSyncBucketStorage
|
|
|
519
523
|
}
|
|
520
524
|
}
|
|
521
525
|
|
|
522
|
-
async getChecksums(
|
|
526
|
+
async getChecksums(
|
|
527
|
+
checkpoint: utils.InternalOpId,
|
|
528
|
+
buckets: storage.BucketChecksumRequest[]
|
|
529
|
+
): Promise<utils.ChecksumMap> {
|
|
523
530
|
return this.checksums.getChecksums(checkpoint, buckets);
|
|
524
531
|
}
|
|
525
532
|
|
|
@@ -576,7 +583,7 @@ export class MongoSyncBucketStorage
|
|
|
576
583
|
async clear(options?: storage.ClearStorageOptions): Promise<void> {
|
|
577
584
|
while (true) {
|
|
578
585
|
if (options?.signal?.aborted) {
|
|
579
|
-
throw new ReplicationAbortedError('Aborted clearing data');
|
|
586
|
+
throw new ReplicationAbortedError('Aborted clearing data', options.signal.reason);
|
|
580
587
|
}
|
|
581
588
|
try {
|
|
582
589
|
await this.clearIteration();
|
|
@@ -631,7 +638,7 @@ export class MongoSyncBucketStorage
|
|
|
631
638
|
{ maxTimeMS: lib_mongo.db.MONGO_CLEAR_OPERATION_TIMEOUT_MS }
|
|
632
639
|
);
|
|
633
640
|
|
|
634
|
-
await this.db.
|
|
641
|
+
await this.db.common_current_data.deleteMany(
|
|
635
642
|
{
|
|
636
643
|
_id: idPrefixFilter<SourceKey>({ g: this.group_id }, ['t', 'k'])
|
|
637
644
|
},
|
|
@@ -2,7 +2,7 @@ import crypto from 'crypto';
|
|
|
2
2
|
|
|
3
3
|
import { ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework';
|
|
4
4
|
import { storage } from '@powersync/service-core';
|
|
5
|
-
import { PowerSyncMongo } from './db.js';
|
|
5
|
+
import { PowerSyncMongo, VersionedPowerSyncMongo } from './db.js';
|
|
6
6
|
|
|
7
7
|
/**
|
|
8
8
|
* Manages a lock on a sync rules document, so that only one process
|
|
@@ -12,7 +12,7 @@ export class MongoSyncRulesLock implements storage.ReplicationLock {
|
|
|
12
12
|
private readonly refreshInterval: NodeJS.Timeout;
|
|
13
13
|
|
|
14
14
|
static async createLock(
|
|
15
|
-
db:
|
|
15
|
+
db: VersionedPowerSyncMongo,
|
|
16
16
|
sync_rules: storage.PersistedSyncRulesContent
|
|
17
17
|
): Promise<MongoSyncRulesLock> {
|
|
18
18
|
const lockId = crypto.randomBytes(8).toString('hex');
|
|
@@ -52,7 +52,7 @@ export class MongoSyncRulesLock implements storage.ReplicationLock {
|
|
|
52
52
|
}
|
|
53
53
|
|
|
54
54
|
constructor(
|
|
55
|
-
private db:
|
|
55
|
+
private db: VersionedPowerSyncMongo,
|
|
56
56
|
public sync_rules_id: number,
|
|
57
57
|
private lock_id: string
|
|
58
58
|
) {
|
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
import { mongo } from '@powersync/lib-service-mongodb';
|
|
2
2
|
import * as framework from '@powersync/lib-services-framework';
|
|
3
3
|
import { GetCheckpointChangesOptions, InternalOpId, storage } from '@powersync/service-core';
|
|
4
|
-
import { PowerSyncMongo } from './db.js';
|
|
4
|
+
import { PowerSyncMongo, VersionedPowerSyncMongo } from './db.js';
|
|
5
5
|
|
|
6
6
|
export type MongoCheckpointAPIOptions = {
|
|
7
|
-
db:
|
|
7
|
+
db: VersionedPowerSyncMongo;
|
|
8
8
|
mode: storage.WriteCheckpointMode;
|
|
9
9
|
sync_rules_id: number;
|
|
10
10
|
};
|
|
11
11
|
|
|
12
12
|
export class MongoWriteCheckpointAPI implements storage.WriteCheckpointAPI {
|
|
13
|
-
readonly db:
|
|
13
|
+
readonly db: VersionedPowerSyncMongo;
|
|
14
14
|
private _mode: storage.WriteCheckpointMode;
|
|
15
15
|
|
|
16
16
|
constructor(options: MongoCheckpointAPIOptions) {
|
|
@@ -166,7 +166,7 @@ export class MongoWriteCheckpointAPI implements storage.WriteCheckpointAPI {
|
|
|
166
166
|
}
|
|
167
167
|
|
|
168
168
|
export async function batchCreateCustomWriteCheckpoints(
|
|
169
|
-
db:
|
|
169
|
+
db: VersionedPowerSyncMongo,
|
|
170
170
|
session: mongo.ClientSession,
|
|
171
171
|
checkpoints: storage.CustomWriteCheckpointOptions[],
|
|
172
172
|
opId: InternalOpId
|
|
@@ -2,6 +2,7 @@ import { ToastableSqliteRow } from '@powersync/service-sync-rules';
|
|
|
2
2
|
import * as bson from 'bson';
|
|
3
3
|
|
|
4
4
|
import { storage } from '@powersync/service-core';
|
|
5
|
+
import { mongoTableId } from '../storage-index.js';
|
|
5
6
|
|
|
6
7
|
/**
|
|
7
8
|
* Maximum number of operations in a batch.
|
|
@@ -86,8 +87,8 @@ export class RecordOperation {
|
|
|
86
87
|
const beforeId = record.beforeReplicaId ?? record.afterReplicaId;
|
|
87
88
|
this.afterId = afterId;
|
|
88
89
|
this.beforeId = beforeId;
|
|
89
|
-
this.internalBeforeKey = cacheKey(record.sourceTable.id, beforeId);
|
|
90
|
-
this.internalAfterKey = afterId ? cacheKey(record.sourceTable.id, afterId) : null;
|
|
90
|
+
this.internalBeforeKey = cacheKey(mongoTableId(record.sourceTable.id), beforeId);
|
|
91
|
+
this.internalAfterKey = afterId ? cacheKey(mongoTableId(record.sourceTable.id), afterId) : null;
|
|
91
92
|
|
|
92
93
|
this.estimatedSize = estimateRowSize(record.before) + estimateRowSize(record.after);
|
|
93
94
|
}
|
|
@@ -5,9 +5,9 @@ import * as bson from 'bson';
|
|
|
5
5
|
|
|
6
6
|
import { Logger, logger as defaultLogger } from '@powersync/lib-services-framework';
|
|
7
7
|
import { InternalOpId, storage, utils } from '@powersync/service-core';
|
|
8
|
-
import { currentBucketKey, MAX_ROW_SIZE } from './MongoBucketBatch.js';
|
|
8
|
+
import { currentBucketKey, EMPTY_DATA, MAX_ROW_SIZE } from './MongoBucketBatch.js';
|
|
9
9
|
import { MongoIdSequence } from './MongoIdSequence.js';
|
|
10
|
-
import { PowerSyncMongo } from './db.js';
|
|
10
|
+
import { PowerSyncMongo, VersionedPowerSyncMongo } from './db.js';
|
|
11
11
|
import {
|
|
12
12
|
BucketDataDocument,
|
|
13
13
|
BucketParameterDocument,
|
|
@@ -16,7 +16,7 @@ import {
|
|
|
16
16
|
CurrentDataDocument,
|
|
17
17
|
SourceKey
|
|
18
18
|
} from './models.js';
|
|
19
|
-
import { replicaIdToSubkey } from '../../utils/util.js';
|
|
19
|
+
import { mongoTableId, replicaIdToSubkey } from '../../utils/util.js';
|
|
20
20
|
|
|
21
21
|
/**
|
|
22
22
|
* Maximum size of operations we write in a single transaction.
|
|
@@ -63,6 +63,7 @@ export class PersistedBatch {
|
|
|
63
63
|
currentSize = 0;
|
|
64
64
|
|
|
65
65
|
constructor(
|
|
66
|
+
private db: VersionedPowerSyncMongo,
|
|
66
67
|
private group_id: number,
|
|
67
68
|
writtenSize: number,
|
|
68
69
|
options?: { logger?: Logger }
|
|
@@ -132,7 +133,7 @@ export class PersistedBatch {
|
|
|
132
133
|
o: op_id
|
|
133
134
|
},
|
|
134
135
|
op: 'PUT',
|
|
135
|
-
source_table: options.table.id,
|
|
136
|
+
source_table: mongoTableId(options.table.id),
|
|
136
137
|
source_key: options.sourceKey,
|
|
137
138
|
table: k.table,
|
|
138
139
|
row_id: k.id,
|
|
@@ -159,7 +160,7 @@ export class PersistedBatch {
|
|
|
159
160
|
o: op_id
|
|
160
161
|
},
|
|
161
162
|
op: 'REMOVE',
|
|
162
|
-
source_table: options.table.id,
|
|
163
|
+
source_table: mongoTableId(options.table.id),
|
|
163
164
|
source_key: options.sourceKey,
|
|
164
165
|
table: bd.table,
|
|
165
166
|
row_id: bd.id,
|
|
@@ -208,7 +209,7 @@ export class PersistedBatch {
|
|
|
208
209
|
_id: op_id,
|
|
209
210
|
key: {
|
|
210
211
|
g: this.group_id,
|
|
211
|
-
t: sourceTable.id,
|
|
212
|
+
t: mongoTableId(sourceTable.id),
|
|
212
213
|
k: sourceKey
|
|
213
214
|
},
|
|
214
215
|
lookup: binLookup,
|
|
@@ -230,7 +231,7 @@ export class PersistedBatch {
|
|
|
230
231
|
_id: op_id,
|
|
231
232
|
key: {
|
|
232
233
|
g: this.group_id,
|
|
233
|
-
t: sourceTable.id,
|
|
234
|
+
t: mongoTableId(sourceTable.id),
|
|
234
235
|
k: sourceKey
|
|
235
236
|
},
|
|
236
237
|
lookup: lookup,
|
|
@@ -243,7 +244,7 @@ export class PersistedBatch {
|
|
|
243
244
|
}
|
|
244
245
|
}
|
|
245
246
|
|
|
246
|
-
|
|
247
|
+
hardDeleteCurrentData(id: SourceKey) {
|
|
247
248
|
const op: mongo.AnyBulkWriteOperation<CurrentDataDocument> = {
|
|
248
249
|
deleteOne: {
|
|
249
250
|
filter: { _id: id }
|
|
@@ -253,12 +254,41 @@ export class PersistedBatch {
|
|
|
253
254
|
this.currentSize += 50;
|
|
254
255
|
}
|
|
255
256
|
|
|
257
|
+
/**
|
|
258
|
+
* Mark a current_data document as soft deleted, to delete on the next commit.
|
|
259
|
+
*
|
|
260
|
+
* If softDeleteCurrentData is not enabled, this falls back to a hard delete.
|
|
261
|
+
*/
|
|
262
|
+
softDeleteCurrentData(id: SourceKey, checkpointGreaterThan: bigint) {
|
|
263
|
+
if (!this.db.storageConfig.softDeleteCurrentData) {
|
|
264
|
+
this.hardDeleteCurrentData(id);
|
|
265
|
+
return;
|
|
266
|
+
}
|
|
267
|
+
const op: mongo.AnyBulkWriteOperation<CurrentDataDocument> = {
|
|
268
|
+
updateOne: {
|
|
269
|
+
filter: { _id: id },
|
|
270
|
+
update: {
|
|
271
|
+
$set: {
|
|
272
|
+
data: EMPTY_DATA,
|
|
273
|
+
buckets: [],
|
|
274
|
+
lookups: [],
|
|
275
|
+
pending_delete: checkpointGreaterThan
|
|
276
|
+
}
|
|
277
|
+
},
|
|
278
|
+
upsert: true
|
|
279
|
+
}
|
|
280
|
+
};
|
|
281
|
+
this.currentData.push(op);
|
|
282
|
+
this.currentSize += 50;
|
|
283
|
+
}
|
|
284
|
+
|
|
256
285
|
upsertCurrentData(id: SourceKey, values: Partial<CurrentDataDocument>) {
|
|
257
286
|
const op: mongo.AnyBulkWriteOperation<CurrentDataDocument> = {
|
|
258
287
|
updateOne: {
|
|
259
288
|
filter: { _id: id },
|
|
260
289
|
update: {
|
|
261
|
-
$set: values
|
|
290
|
+
$set: values,
|
|
291
|
+
$unset: { pending_delete: 1 }
|
|
262
292
|
},
|
|
263
293
|
upsert: true
|
|
264
294
|
}
|
|
@@ -276,7 +306,8 @@ export class PersistedBatch {
|
|
|
276
306
|
);
|
|
277
307
|
}
|
|
278
308
|
|
|
279
|
-
async flush(
|
|
309
|
+
async flush(session: mongo.ClientSession, options?: storage.BucketBatchCommitOptions) {
|
|
310
|
+
const db = this.db;
|
|
280
311
|
const startAt = performance.now();
|
|
281
312
|
let flushedSomething = false;
|
|
282
313
|
if (this.bucketData.length > 0) {
|
|
@@ -297,7 +328,7 @@ export class PersistedBatch {
|
|
|
297
328
|
}
|
|
298
329
|
if (this.currentData.length > 0) {
|
|
299
330
|
flushedSomething = true;
|
|
300
|
-
await db.
|
|
331
|
+
await db.common_current_data.bulkWrite(this.currentData, {
|
|
301
332
|
session,
|
|
302
333
|
// may update and delete data within the same batch - order matters
|
|
303
334
|
ordered: true
|
|
@@ -10,13 +10,16 @@ import {
|
|
|
10
10
|
CheckpointEventDocument,
|
|
11
11
|
ClientConnectionDocument,
|
|
12
12
|
CurrentDataDocument,
|
|
13
|
+
CurrentDataDocumentV3,
|
|
13
14
|
CustomWriteCheckpointDocument,
|
|
14
15
|
IdSequenceDocument,
|
|
15
16
|
InstanceDocument,
|
|
16
17
|
SourceTableDocument,
|
|
18
|
+
StorageConfig,
|
|
17
19
|
SyncRuleDocument,
|
|
18
20
|
WriteCheckpointDocument
|
|
19
21
|
} from './models.js';
|
|
22
|
+
import { ServiceAssertionError } from '@powersync/lib-services-framework';
|
|
20
23
|
|
|
21
24
|
export interface PowerSyncMongoOptions {
|
|
22
25
|
/**
|
|
@@ -27,6 +30,7 @@ export interface PowerSyncMongoOptions {
|
|
|
27
30
|
|
|
28
31
|
export class PowerSyncMongo {
|
|
29
32
|
readonly current_data: mongo.Collection<CurrentDataDocument>;
|
|
33
|
+
readonly v3_current_data: mongo.Collection<CurrentDataDocumentV3>;
|
|
30
34
|
readonly bucket_data: mongo.Collection<BucketDataDocument>;
|
|
31
35
|
readonly bucket_parameters: mongo.Collection<BucketParameterDocument>;
|
|
32
36
|
readonly op_id_sequence: mongo.Collection<IdSequenceDocument>;
|
|
@@ -51,7 +55,8 @@ export class PowerSyncMongo {
|
|
|
51
55
|
});
|
|
52
56
|
this.db = db;
|
|
53
57
|
|
|
54
|
-
this.current_data = db.collection
|
|
58
|
+
this.current_data = db.collection('current_data');
|
|
59
|
+
this.v3_current_data = db.collection('v3_current_data');
|
|
55
60
|
this.bucket_data = db.collection('bucket_data');
|
|
56
61
|
this.bucket_parameters = db.collection('bucket_parameters');
|
|
57
62
|
this.op_id_sequence = db.collection('op_id_sequence');
|
|
@@ -66,11 +71,16 @@ export class PowerSyncMongo {
|
|
|
66
71
|
this.connection_report_events = this.db.collection('connection_report_events');
|
|
67
72
|
}
|
|
68
73
|
|
|
74
|
+
versioned(storageConfig: StorageConfig) {
|
|
75
|
+
return new VersionedPowerSyncMongo(this, storageConfig);
|
|
76
|
+
}
|
|
77
|
+
|
|
69
78
|
/**
|
|
70
79
|
* Clear all collections.
|
|
71
80
|
*/
|
|
72
81
|
async clear() {
|
|
73
82
|
await this.current_data.deleteMany({});
|
|
83
|
+
await this.v3_current_data.deleteMany({});
|
|
74
84
|
await this.bucket_data.deleteMany({});
|
|
75
85
|
await this.bucket_parameters.deleteMany({});
|
|
76
86
|
await this.op_id_sequence.deleteMany({});
|
|
@@ -171,6 +181,124 @@ export class PowerSyncMongo {
|
|
|
171
181
|
{ name: 'dirty_count' }
|
|
172
182
|
);
|
|
173
183
|
}
|
|
184
|
+
|
|
185
|
+
async initializeStorageVersion(storageConfig: StorageConfig) {
|
|
186
|
+
if (storageConfig.softDeleteCurrentData) {
|
|
187
|
+
// Initialize the v3_current_data collection, which is used for the new storage version.
|
|
188
|
+
// No-op if this already exists
|
|
189
|
+
await this.v3_current_data.createIndex(
|
|
190
|
+
{
|
|
191
|
+
'_id.g': 1,
|
|
192
|
+
pending_delete: 1
|
|
193
|
+
},
|
|
194
|
+
{
|
|
195
|
+
partialFilterExpression: { pending_delete: { $exists: true } },
|
|
196
|
+
name: 'pending_delete'
|
|
197
|
+
}
|
|
198
|
+
);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* This is similar to PowerSyncMongo, but blocks access to certain collections based on the storage version.
|
|
205
|
+
*/
|
|
206
|
+
export class VersionedPowerSyncMongo {
|
|
207
|
+
readonly client: mongo.MongoClient;
|
|
208
|
+
readonly db: mongo.Db;
|
|
209
|
+
|
|
210
|
+
readonly storageConfig: StorageConfig;
|
|
211
|
+
#upstream: PowerSyncMongo;
|
|
212
|
+
|
|
213
|
+
constructor(upstream: PowerSyncMongo, storageConfig: StorageConfig) {
|
|
214
|
+
this.#upstream = upstream;
|
|
215
|
+
this.client = upstream.client;
|
|
216
|
+
this.db = upstream.db;
|
|
217
|
+
this.storageConfig = storageConfig;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Uses either `current_data` or `v3_current_data` collection based on the storage version.
|
|
222
|
+
*
|
|
223
|
+
* Use in places where it does not matter which version is used.
|
|
224
|
+
*/
|
|
225
|
+
get common_current_data(): mongo.Collection<CurrentDataDocument> {
|
|
226
|
+
if (this.storageConfig.softDeleteCurrentData) {
|
|
227
|
+
return this.#upstream.v3_current_data;
|
|
228
|
+
} else {
|
|
229
|
+
return this.#upstream.current_data;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
get v1_current_data() {
|
|
234
|
+
if (this.storageConfig.softDeleteCurrentData) {
|
|
235
|
+
throw new ServiceAssertionError(
|
|
236
|
+
'current_data collection should not be used when softDeleteCurrentData is enabled'
|
|
237
|
+
);
|
|
238
|
+
}
|
|
239
|
+
return this.#upstream.current_data;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
get v3_current_data() {
|
|
243
|
+
if (!this.storageConfig.softDeleteCurrentData) {
|
|
244
|
+
throw new ServiceAssertionError(
|
|
245
|
+
'v3_current_data collection should not be used when softDeleteCurrentData is disabled'
|
|
246
|
+
);
|
|
247
|
+
}
|
|
248
|
+
return this.#upstream.v3_current_data;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
get bucket_data() {
|
|
252
|
+
return this.#upstream.bucket_data;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
get bucket_parameters() {
|
|
256
|
+
return this.#upstream.bucket_parameters;
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
get op_id_sequence() {
|
|
260
|
+
return this.#upstream.op_id_sequence;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
get sync_rules() {
|
|
264
|
+
return this.#upstream.sync_rules;
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
get source_tables() {
|
|
268
|
+
return this.#upstream.source_tables;
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
get custom_write_checkpoints() {
|
|
272
|
+
return this.#upstream.custom_write_checkpoints;
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
get write_checkpoints() {
|
|
276
|
+
return this.#upstream.write_checkpoints;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
get instance() {
|
|
280
|
+
return this.#upstream.instance;
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
get locks() {
|
|
284
|
+
return this.#upstream.locks;
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
get bucket_state() {
|
|
288
|
+
return this.#upstream.bucket_state;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
get checkpoint_events() {
|
|
292
|
+
return this.#upstream.checkpoint_events;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
get connection_report_events() {
|
|
296
|
+
return this.#upstream.connection_report_events;
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
notifyCheckpoint() {
|
|
300
|
+
return this.#upstream.notifyCheckpoint();
|
|
301
|
+
}
|
|
174
302
|
}
|
|
175
303
|
|
|
176
304
|
export function createPowerSyncMongo(config: MongoStorageConfig, options?: lib_mongo.MongoConnectionOptions) {
|