@powersync/service-module-mongodb 0.0.0-dev-20241128134723 → 0.0.0-dev-20241219110735
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +69 -4
- package/dist/db/db-index.d.ts +1 -0
- package/dist/db/db-index.js +2 -0
- package/dist/db/db-index.js.map +1 -0
- package/dist/db/mongo.d.ts +35 -0
- package/dist/db/mongo.js +73 -0
- package/dist/db/mongo.js.map +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +2 -0
- package/dist/index.js.map +1 -1
- package/dist/locks/MonogLocks.d.ts +36 -0
- package/dist/locks/MonogLocks.js +83 -0
- package/dist/locks/MonogLocks.js.map +1 -0
- package/dist/migrations/MonogMigrationAgent.d.ts +12 -0
- package/dist/migrations/MonogMigrationAgent.js +25 -0
- package/dist/migrations/MonogMigrationAgent.js.map +1 -0
- package/dist/migrations/db/migrations/1684951997326-init.d.ts +3 -0
- package/dist/migrations/db/migrations/1684951997326-init.js +30 -0
- package/dist/migrations/db/migrations/1684951997326-init.js.map +1 -0
- package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.d.ts +2 -0
- package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js +5 -0
- package/dist/migrations/db/migrations/1688556755264-initial-sync-rules.js.map +1 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.d.ts +3 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +54 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -0
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.d.ts +3 -0
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js +26 -0
- package/dist/migrations/db/migrations/1711543888062-write-checkpoint-index.js.map +1 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.d.ts +3 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js +28 -0
- package/dist/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.js.map +1 -0
- package/dist/migrations/mongo-migration-store.d.ts +7 -0
- package/dist/migrations/mongo-migration-store.js +49 -0
- package/dist/migrations/mongo-migration-store.js.map +1 -0
- package/dist/module/MongoModule.js +15 -4
- package/dist/module/MongoModule.js.map +1 -1
- package/dist/replication/MongoManager.d.ts +1 -1
- package/dist/replication/MongoManager.js +3 -2
- package/dist/replication/MongoManager.js.map +1 -1
- package/dist/storage/MongoBucketStorage.d.ts +48 -0
- package/dist/storage/MongoBucketStorage.js +425 -0
- package/dist/storage/MongoBucketStorage.js.map +1 -0
- package/dist/storage/implementation/MongoBucketBatch.d.ts +72 -0
- package/dist/storage/implementation/MongoBucketBatch.js +681 -0
- package/dist/storage/implementation/MongoBucketBatch.js.map +1 -0
- package/dist/storage/implementation/MongoCompactor.d.ts +40 -0
- package/dist/storage/implementation/MongoCompactor.js +310 -0
- package/dist/storage/implementation/MongoCompactor.js.map +1 -0
- package/dist/storage/implementation/MongoIdSequence.d.ts +12 -0
- package/dist/storage/implementation/MongoIdSequence.js +21 -0
- package/dist/storage/implementation/MongoIdSequence.js.map +1 -0
- package/dist/storage/implementation/MongoPersistedSyncRules.d.ts +9 -0
- package/dist/storage/implementation/MongoPersistedSyncRules.js +9 -0
- package/dist/storage/implementation/MongoPersistedSyncRules.js.map +1 -0
- package/dist/storage/implementation/MongoPersistedSyncRulesContent.d.ts +20 -0
- package/dist/storage/implementation/MongoPersistedSyncRulesContent.js +26 -0
- package/dist/storage/implementation/MongoPersistedSyncRulesContent.js.map +1 -0
- package/dist/storage/implementation/MongoStorageProvider.d.ts +6 -0
- package/dist/storage/implementation/MongoStorageProvider.js +34 -0
- package/dist/storage/implementation/MongoStorageProvider.js.map +1 -0
- package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +36 -0
- package/dist/storage/implementation/MongoSyncBucketStorage.js +529 -0
- package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -0
- package/dist/storage/implementation/MongoSyncRulesLock.d.ts +16 -0
- package/dist/storage/implementation/MongoSyncRulesLock.js +65 -0
- package/dist/storage/implementation/MongoSyncRulesLock.js.map +1 -0
- package/dist/storage/implementation/MongoWriteCheckpointAPI.d.ts +20 -0
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js +104 -0
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js.map +1 -0
- package/dist/storage/implementation/OperationBatch.d.ts +34 -0
- package/dist/storage/implementation/OperationBatch.js +119 -0
- package/dist/storage/implementation/OperationBatch.js.map +1 -0
- package/dist/storage/implementation/PersistedBatch.d.ts +46 -0
- package/dist/storage/implementation/PersistedBatch.js +223 -0
- package/dist/storage/implementation/PersistedBatch.js.map +1 -0
- package/dist/storage/implementation/config.d.ts +19 -0
- package/dist/storage/implementation/config.js +26 -0
- package/dist/storage/implementation/config.js.map +1 -0
- package/dist/storage/implementation/db.d.ts +36 -0
- package/dist/storage/implementation/db.js +47 -0
- package/dist/storage/implementation/db.js.map +1 -0
- package/dist/storage/implementation/models.d.ts +139 -0
- package/dist/storage/implementation/models.js +2 -0
- package/dist/storage/implementation/models.js.map +1 -0
- package/dist/storage/implementation/util.d.ts +58 -0
- package/dist/storage/implementation/util.js +196 -0
- package/dist/storage/implementation/util.js.map +1 -0
- package/dist/storage/storage-index.d.ts +14 -0
- package/dist/storage/storage-index.js +15 -0
- package/dist/storage/storage-index.js.map +1 -0
- package/dist/types/types.d.ts +3 -0
- package/dist/types/types.js +4 -1
- package/dist/types/types.js.map +1 -1
- package/package.json +11 -8
- package/src/db/db-index.ts +1 -0
- package/src/db/mongo.ts +81 -0
- package/src/index.ts +4 -0
- package/src/locks/MonogLocks.ts +147 -0
- package/src/migrations/MonogMigrationAgent.ts +39 -0
- package/src/migrations/db/migrations/1684951997326-init.ts +39 -0
- package/src/migrations/db/migrations/1688556755264-initial-sync-rules.ts +5 -0
- package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +105 -0
- package/src/migrations/db/migrations/1711543888062-write-checkpoint-index.ts +38 -0
- package/src/migrations/db/migrations/1727099539247-custom-write-checkpoint-index.ts +40 -0
- package/src/migrations/mongo-migration-store.ts +62 -0
- package/src/module/MongoModule.ts +18 -4
- package/src/replication/MongoManager.ts +6 -2
- package/src/storage/MongoBucketStorage.ts +530 -0
- package/src/storage/implementation/MongoBucketBatch.ts +893 -0
- package/src/storage/implementation/MongoCompactor.ts +392 -0
- package/src/storage/implementation/MongoIdSequence.ts +24 -0
- package/src/storage/implementation/MongoPersistedSyncRules.ts +16 -0
- package/src/storage/implementation/MongoPersistedSyncRulesContent.ts +49 -0
- package/src/storage/implementation/MongoStorageProvider.ts +42 -0
- package/src/storage/implementation/MongoSyncBucketStorage.ts +612 -0
- package/src/storage/implementation/MongoSyncRulesLock.ts +88 -0
- package/src/storage/implementation/MongoWriteCheckpointAPI.ts +146 -0
- package/src/storage/implementation/OperationBatch.ts +130 -0
- package/src/storage/implementation/PersistedBatch.ts +283 -0
- package/src/storage/implementation/config.ts +40 -0
- package/src/storage/implementation/db.ts +88 -0
- package/src/storage/implementation/models.ts +160 -0
- package/src/storage/implementation/util.ts +209 -0
- package/src/storage/storage-index.ts +14 -0
- package/src/types/types.ts +8 -1
- package/test/src/__snapshots__/storage_sync.test.ts.snap +332 -0
- package/test/src/change_stream.test.ts +34 -33
- package/test/src/change_stream_utils.ts +6 -6
- package/test/src/env.ts +1 -0
- package/test/src/slow_tests.test.ts +4 -4
- package/test/src/storage.test.ts +7 -0
- package/test/src/storage_compacting.test.ts +6 -0
- package/test/src/storage_sync.test.ts +113 -0
- package/test/src/util.ts +20 -7
- package/test/tsconfig.json +4 -0
- package/tsconfig.tsbuildinfo +1 -1
- package/vitest.config.ts +1 -1
|
@@ -0,0 +1,392 @@
|
|
|
1
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
2
|
+
import { storage, utils } from '@powersync/service-core';
|
|
3
|
+
import { AnyBulkWriteOperation, MaxKey, MinKey } from 'mongodb';
|
|
4
|
+
import { PowerSyncMongo } from './db.js';
|
|
5
|
+
import { BucketDataDocument, BucketDataKey } from './models.js';
|
|
6
|
+
import { cacheKey } from './OperationBatch.js';
|
|
7
|
+
import { safeBulkWrite } from './util.js';
|
|
8
|
+
|
|
9
|
+
interface CurrentBucketState {
|
|
10
|
+
/** Bucket name */
|
|
11
|
+
bucket: string;
|
|
12
|
+
/**
|
|
13
|
+
* Rows seen in the bucket, with the last op_id of each.
|
|
14
|
+
*/
|
|
15
|
+
seen: Map<string, bigint>;
|
|
16
|
+
/**
|
|
17
|
+
* Estimated memory usage of the seen Map.
|
|
18
|
+
*/
|
|
19
|
+
trackingSize: number;
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Last (lowest) seen op_id that is not a PUT.
|
|
23
|
+
*/
|
|
24
|
+
lastNotPut: bigint | null;
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Number of REMOVE/MOVE operations seen since lastNotPut.
|
|
28
|
+
*/
|
|
29
|
+
opsSincePut: number;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Additional options, primarily for testing.
|
|
34
|
+
*/
|
|
35
|
+
export interface MongoCompactOptions extends storage.CompactOptions {
|
|
36
|
+
/** Minimum of 2 */
|
|
37
|
+
clearBatchLimit?: number;
|
|
38
|
+
/** Minimum of 1 */
|
|
39
|
+
moveBatchLimit?: number;
|
|
40
|
+
/** Minimum of 1 */
|
|
41
|
+
moveBatchQueryLimit?: number;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const DEFAULT_CLEAR_BATCH_LIMIT = 5000;
|
|
45
|
+
const DEFAULT_MOVE_BATCH_LIMIT = 2000;
|
|
46
|
+
const DEFAULT_MOVE_BATCH_QUERY_LIMIT = 10_000;
|
|
47
|
+
|
|
48
|
+
/** This default is primarily for tests. */
|
|
49
|
+
const DEFAULT_MEMORY_LIMIT_MB = 64;
|
|
50
|
+
|
|
51
|
+
export class MongoCompactor {
|
|
52
|
+
private updates: AnyBulkWriteOperation<BucketDataDocument>[] = [];
|
|
53
|
+
|
|
54
|
+
private idLimitBytes: number;
|
|
55
|
+
private moveBatchLimit: number;
|
|
56
|
+
private moveBatchQueryLimit: number;
|
|
57
|
+
private clearBatchLimit: number;
|
|
58
|
+
private maxOpId: bigint | undefined;
|
|
59
|
+
private buckets: string[] | undefined;
|
|
60
|
+
|
|
61
|
+
constructor(
|
|
62
|
+
private db: PowerSyncMongo,
|
|
63
|
+
private group_id: number,
|
|
64
|
+
options?: MongoCompactOptions
|
|
65
|
+
) {
|
|
66
|
+
this.idLimitBytes = (options?.memoryLimitMB ?? DEFAULT_MEMORY_LIMIT_MB) * 1024 * 1024;
|
|
67
|
+
this.moveBatchLimit = options?.moveBatchLimit ?? DEFAULT_MOVE_BATCH_LIMIT;
|
|
68
|
+
this.moveBatchQueryLimit = options?.moveBatchQueryLimit ?? DEFAULT_MOVE_BATCH_QUERY_LIMIT;
|
|
69
|
+
this.clearBatchLimit = options?.clearBatchLimit ?? DEFAULT_CLEAR_BATCH_LIMIT;
|
|
70
|
+
this.maxOpId = options?.maxOpId;
|
|
71
|
+
this.buckets = options?.compactBuckets;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* Compact buckets by converting operations into MOVE and/or CLEAR operations.
|
|
76
|
+
*
|
|
77
|
+
* See /docs/compacting-operations.md for details.
|
|
78
|
+
*/
|
|
79
|
+
async compact() {
|
|
80
|
+
if (this.buckets) {
|
|
81
|
+
for (let bucket of this.buckets) {
|
|
82
|
+
// We can make this more efficient later on by iterating
|
|
83
|
+
// through the buckets in a single query.
|
|
84
|
+
// That makes batching more tricky, so we leave for later.
|
|
85
|
+
await this.compactInternal(bucket);
|
|
86
|
+
}
|
|
87
|
+
} else {
|
|
88
|
+
await this.compactInternal(undefined);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
async compactInternal(bucket: string | undefined) {
|
|
93
|
+
const idLimitBytes = this.idLimitBytes;
|
|
94
|
+
|
|
95
|
+
let currentState: CurrentBucketState | null = null;
|
|
96
|
+
|
|
97
|
+
let bucketLower: string | MinKey;
|
|
98
|
+
let bucketUpper: string | MaxKey;
|
|
99
|
+
|
|
100
|
+
if (bucket == null) {
|
|
101
|
+
bucketLower = new MinKey();
|
|
102
|
+
bucketUpper = new MaxKey();
|
|
103
|
+
} else if (bucket.includes('[')) {
|
|
104
|
+
// Exact bucket name
|
|
105
|
+
bucketLower = bucket;
|
|
106
|
+
bucketUpper = bucket;
|
|
107
|
+
} else {
|
|
108
|
+
// Bucket definition name
|
|
109
|
+
bucketLower = `${bucket}[`;
|
|
110
|
+
bucketUpper = `${bucket}[\uFFFF`;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Constant lower bound
|
|
114
|
+
const lowerBound: BucketDataKey = {
|
|
115
|
+
g: this.group_id,
|
|
116
|
+
b: bucketLower as string,
|
|
117
|
+
o: new MinKey() as any
|
|
118
|
+
};
|
|
119
|
+
|
|
120
|
+
// Upper bound is adjusted for each batch
|
|
121
|
+
let upperBound: BucketDataKey = {
|
|
122
|
+
g: this.group_id,
|
|
123
|
+
b: bucketUpper as string,
|
|
124
|
+
o: new MaxKey() as any
|
|
125
|
+
};
|
|
126
|
+
|
|
127
|
+
while (true) {
|
|
128
|
+
// Query one batch at a time, to avoid cursor timeouts
|
|
129
|
+
const batch = await this.db.bucket_data
|
|
130
|
+
.find(
|
|
131
|
+
{
|
|
132
|
+
_id: {
|
|
133
|
+
$gte: lowerBound,
|
|
134
|
+
$lt: upperBound
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
{
|
|
138
|
+
projection: {
|
|
139
|
+
_id: 1,
|
|
140
|
+
op: 1,
|
|
141
|
+
table: 1,
|
|
142
|
+
row_id: 1,
|
|
143
|
+
source_table: 1,
|
|
144
|
+
source_key: 1
|
|
145
|
+
},
|
|
146
|
+
limit: this.moveBatchQueryLimit,
|
|
147
|
+
sort: { _id: -1 },
|
|
148
|
+
singleBatch: true
|
|
149
|
+
}
|
|
150
|
+
)
|
|
151
|
+
.toArray();
|
|
152
|
+
|
|
153
|
+
if (batch.length == 0) {
|
|
154
|
+
// We've reached the end
|
|
155
|
+
break;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Set upperBound for the next batch
|
|
159
|
+
upperBound = batch[batch.length - 1]._id;
|
|
160
|
+
|
|
161
|
+
for (let doc of batch) {
|
|
162
|
+
if (currentState == null || doc._id.b != currentState.bucket) {
|
|
163
|
+
if (currentState != null && currentState.lastNotPut != null && currentState.opsSincePut >= 1) {
|
|
164
|
+
// Important to flush before clearBucket()
|
|
165
|
+
await this.flush();
|
|
166
|
+
logger.info(
|
|
167
|
+
`Inserting CLEAR at ${this.group_id}:${currentState.bucket}:${currentState.lastNotPut} to remove ${currentState.opsSincePut} operations`
|
|
168
|
+
);
|
|
169
|
+
|
|
170
|
+
const bucket = currentState.bucket;
|
|
171
|
+
const clearOp = currentState.lastNotPut;
|
|
172
|
+
// Free memory before clearing bucket
|
|
173
|
+
currentState = null;
|
|
174
|
+
await this.clearBucket(bucket, clearOp);
|
|
175
|
+
}
|
|
176
|
+
currentState = {
|
|
177
|
+
bucket: doc._id.b,
|
|
178
|
+
seen: new Map(),
|
|
179
|
+
trackingSize: 0,
|
|
180
|
+
lastNotPut: null,
|
|
181
|
+
opsSincePut: 0
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
if (this.maxOpId != null && doc._id.o > this.maxOpId) {
|
|
186
|
+
continue;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
let isPersistentPut = doc.op == 'PUT';
|
|
190
|
+
|
|
191
|
+
if (doc.op == 'REMOVE' || doc.op == 'PUT') {
|
|
192
|
+
const key = `${doc.table}/${doc.row_id}/${cacheKey(doc.source_table!, doc.source_key!)}`;
|
|
193
|
+
const targetOp = currentState.seen.get(key);
|
|
194
|
+
if (targetOp) {
|
|
195
|
+
// Will convert to MOVE, so don't count as PUT
|
|
196
|
+
isPersistentPut = false;
|
|
197
|
+
|
|
198
|
+
this.updates.push({
|
|
199
|
+
updateOne: {
|
|
200
|
+
filter: {
|
|
201
|
+
_id: doc._id
|
|
202
|
+
},
|
|
203
|
+
update: {
|
|
204
|
+
$set: {
|
|
205
|
+
op: 'MOVE',
|
|
206
|
+
target_op: targetOp
|
|
207
|
+
},
|
|
208
|
+
$unset: {
|
|
209
|
+
source_table: 1,
|
|
210
|
+
source_key: 1,
|
|
211
|
+
table: 1,
|
|
212
|
+
row_id: 1,
|
|
213
|
+
data: 1
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
});
|
|
218
|
+
} else {
|
|
219
|
+
if (currentState.trackingSize >= idLimitBytes) {
|
|
220
|
+
// Reached memory limit.
|
|
221
|
+
// Keep the highest seen values in this case.
|
|
222
|
+
} else {
|
|
223
|
+
// flatstr reduces the memory usage by flattening the string
|
|
224
|
+
currentState.seen.set(flatstr(key), doc._id.o);
|
|
225
|
+
// length + 16 for the string
|
|
226
|
+
// 24 for the bigint
|
|
227
|
+
// 50 for map overhead
|
|
228
|
+
// 50 for additional overhead
|
|
229
|
+
currentState.trackingSize += key.length + 140;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
if (isPersistentPut) {
|
|
235
|
+
currentState.lastNotPut = null;
|
|
236
|
+
currentState.opsSincePut = 0;
|
|
237
|
+
} else if (doc.op != 'CLEAR') {
|
|
238
|
+
if (currentState.lastNotPut == null) {
|
|
239
|
+
currentState.lastNotPut = doc._id.o;
|
|
240
|
+
}
|
|
241
|
+
currentState.opsSincePut += 1;
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
if (this.updates.length >= this.moveBatchLimit) {
|
|
245
|
+
await this.flush();
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
await this.flush();
|
|
251
|
+
currentState?.seen.clear();
|
|
252
|
+
if (currentState?.lastNotPut != null && currentState?.opsSincePut > 1) {
|
|
253
|
+
logger.info(
|
|
254
|
+
`Inserting CLEAR at ${this.group_id}:${currentState.bucket}:${currentState.lastNotPut} to remove ${currentState.opsSincePut} operations`
|
|
255
|
+
);
|
|
256
|
+
const bucket = currentState.bucket;
|
|
257
|
+
const clearOp = currentState.lastNotPut;
|
|
258
|
+
// Free memory before clearing bucket
|
|
259
|
+
currentState = null;
|
|
260
|
+
await this.clearBucket(bucket, clearOp);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
private async flush() {
|
|
265
|
+
if (this.updates.length > 0) {
|
|
266
|
+
logger.info(`Compacting ${this.updates.length} ops`);
|
|
267
|
+
await safeBulkWrite(this.db.bucket_data, this.updates, {
|
|
268
|
+
// Order is not important.
|
|
269
|
+
// Since checksums are not affected, these operations can happen in any order,
|
|
270
|
+
// and it's fine if the operations are partially applied.
|
|
271
|
+
// Each individual operation is atomic.
|
|
272
|
+
ordered: false
|
|
273
|
+
});
|
|
274
|
+
this.updates = [];
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
/**
|
|
279
|
+
* Perform a CLEAR compact for a bucket.
|
|
280
|
+
*
|
|
281
|
+
* @param bucket bucket name
|
|
282
|
+
* @param op op_id of the last non-PUT operation, which will be converted to CLEAR.
|
|
283
|
+
*/
|
|
284
|
+
private async clearBucket(bucket: string, op: bigint) {
|
|
285
|
+
const opFilter = {
|
|
286
|
+
_id: {
|
|
287
|
+
$gte: {
|
|
288
|
+
g: this.group_id,
|
|
289
|
+
b: bucket,
|
|
290
|
+
o: new MinKey() as any
|
|
291
|
+
},
|
|
292
|
+
$lte: {
|
|
293
|
+
g: this.group_id,
|
|
294
|
+
b: bucket,
|
|
295
|
+
o: op
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
};
|
|
299
|
+
|
|
300
|
+
const session = this.db.client.startSession();
|
|
301
|
+
try {
|
|
302
|
+
let done = false;
|
|
303
|
+
while (!done) {
|
|
304
|
+
// Do the CLEAR operation in batches, with each batch a separate transaction.
|
|
305
|
+
// The state after each batch is fully consistent.
|
|
306
|
+
// We need a transaction per batch to make sure checksums stay consistent.
|
|
307
|
+
await session.withTransaction(
|
|
308
|
+
async () => {
|
|
309
|
+
const query = this.db.bucket_data.find(opFilter, {
|
|
310
|
+
session,
|
|
311
|
+
sort: { _id: 1 },
|
|
312
|
+
projection: {
|
|
313
|
+
_id: 1,
|
|
314
|
+
op: 1,
|
|
315
|
+
checksum: 1,
|
|
316
|
+
target_op: 1
|
|
317
|
+
},
|
|
318
|
+
limit: this.clearBatchLimit
|
|
319
|
+
});
|
|
320
|
+
let checksum = 0;
|
|
321
|
+
let lastOpId: BucketDataKey | null = null;
|
|
322
|
+
let targetOp: bigint | null = null;
|
|
323
|
+
let gotAnOp = false;
|
|
324
|
+
for await (let op of query.stream()) {
|
|
325
|
+
if (op.op == 'MOVE' || op.op == 'REMOVE' || op.op == 'CLEAR') {
|
|
326
|
+
checksum = utils.addChecksums(checksum, op.checksum);
|
|
327
|
+
lastOpId = op._id;
|
|
328
|
+
if (op.op != 'CLEAR') {
|
|
329
|
+
gotAnOp = true;
|
|
330
|
+
}
|
|
331
|
+
if (op.target_op != null) {
|
|
332
|
+
if (targetOp == null || op.target_op > targetOp) {
|
|
333
|
+
targetOp = op.target_op;
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
} else {
|
|
337
|
+
throw new Error(`Unexpected ${op.op} operation at ${op._id.g}:${op._id.b}:${op._id.o}`);
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
if (!gotAnOp) {
|
|
341
|
+
done = true;
|
|
342
|
+
return;
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
logger.info(`Flushing CLEAR at ${lastOpId?.o}`);
|
|
346
|
+
await this.db.bucket_data.deleteMany(
|
|
347
|
+
{
|
|
348
|
+
_id: {
|
|
349
|
+
$gte: {
|
|
350
|
+
g: this.group_id,
|
|
351
|
+
b: bucket,
|
|
352
|
+
o: new MinKey() as any
|
|
353
|
+
},
|
|
354
|
+
$lte: lastOpId!
|
|
355
|
+
}
|
|
356
|
+
},
|
|
357
|
+
{ session }
|
|
358
|
+
);
|
|
359
|
+
|
|
360
|
+
await this.db.bucket_data.insertOne(
|
|
361
|
+
{
|
|
362
|
+
_id: lastOpId!,
|
|
363
|
+
op: 'CLEAR',
|
|
364
|
+
checksum: checksum,
|
|
365
|
+
data: null,
|
|
366
|
+
target_op: targetOp
|
|
367
|
+
},
|
|
368
|
+
{ session }
|
|
369
|
+
);
|
|
370
|
+
},
|
|
371
|
+
{
|
|
372
|
+
writeConcern: { w: 'majority' },
|
|
373
|
+
readConcern: { level: 'snapshot' }
|
|
374
|
+
}
|
|
375
|
+
);
|
|
376
|
+
}
|
|
377
|
+
} finally {
|
|
378
|
+
await session.endSession();
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
/**
|
|
384
|
+
* Flattens string to reduce memory usage (around 320 bytes -> 120 bytes),
|
|
385
|
+
* at the cost of some upfront CPU usage.
|
|
386
|
+
*
|
|
387
|
+
* From: https://github.com/davidmarkclements/flatstr/issues/8
|
|
388
|
+
*/
|
|
389
|
+
function flatstr(s: string) {
|
|
390
|
+
s.match(/\n/g);
|
|
391
|
+
return s;
|
|
392
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Manages op_id or similar sequence in memory.
|
|
3
|
+
*
|
|
4
|
+
* This is typically used within a transaction, with the last value persisted
|
|
5
|
+
* at the end of the transaction.
|
|
6
|
+
*/
|
|
7
|
+
export class MongoIdSequence {
|
|
8
|
+
private _last: bigint;
|
|
9
|
+
|
|
10
|
+
constructor(last: bigint) {
|
|
11
|
+
if (typeof last != 'bigint') {
|
|
12
|
+
throw new Error(`BigInt required, got ${last} ${typeof last}`);
|
|
13
|
+
}
|
|
14
|
+
this._last = last;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
next() {
|
|
18
|
+
return ++this._last;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
last() {
|
|
22
|
+
return this._last;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { SqlSyncRules } from '@powersync/service-sync-rules';
|
|
2
|
+
|
|
3
|
+
import { storage } from '@powersync/service-core';
|
|
4
|
+
|
|
5
|
+
export class MongoPersistedSyncRules implements storage.PersistedSyncRules {
|
|
6
|
+
public readonly slot_name: string;
|
|
7
|
+
|
|
8
|
+
constructor(
|
|
9
|
+
public readonly id: number,
|
|
10
|
+
public readonly sync_rules: SqlSyncRules,
|
|
11
|
+
public readonly checkpoint_lsn: string | null,
|
|
12
|
+
slot_name: string | null
|
|
13
|
+
) {
|
|
14
|
+
this.slot_name = slot_name ?? `powersync_${id}`;
|
|
15
|
+
}
|
|
16
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { storage } from '@powersync/service-core';
|
|
2
|
+
import { SqlSyncRules } from '@powersync/service-sync-rules';
|
|
3
|
+
import * as mongo from 'mongodb';
|
|
4
|
+
import { MongoPersistedSyncRules } from './MongoPersistedSyncRules.js';
|
|
5
|
+
import { MongoSyncRulesLock } from './MongoSyncRulesLock.js';
|
|
6
|
+
import { PowerSyncMongo } from './db.js';
|
|
7
|
+
import { SyncRuleDocument } from './models.js';
|
|
8
|
+
|
|
9
|
+
export class MongoPersistedSyncRulesContent implements storage.PersistedSyncRulesContent {
|
|
10
|
+
public readonly slot_name: string;
|
|
11
|
+
|
|
12
|
+
public readonly id: number;
|
|
13
|
+
public readonly sync_rules_content: string;
|
|
14
|
+
public readonly last_checkpoint_lsn: string | null;
|
|
15
|
+
public readonly last_fatal_error: string | null;
|
|
16
|
+
public readonly last_keepalive_ts: Date | null;
|
|
17
|
+
public readonly last_checkpoint_ts: Date | null;
|
|
18
|
+
|
|
19
|
+
public current_lock: MongoSyncRulesLock | null = null;
|
|
20
|
+
|
|
21
|
+
constructor(
|
|
22
|
+
private db: PowerSyncMongo,
|
|
23
|
+
doc: mongo.WithId<SyncRuleDocument>
|
|
24
|
+
) {
|
|
25
|
+
this.id = doc._id;
|
|
26
|
+
this.sync_rules_content = doc.content;
|
|
27
|
+
this.last_checkpoint_lsn = doc.last_checkpoint_lsn;
|
|
28
|
+
// Handle legacy values
|
|
29
|
+
this.slot_name = doc.slot_name ?? `powersync_${this.id}`;
|
|
30
|
+
this.last_fatal_error = doc.last_fatal_error;
|
|
31
|
+
this.last_checkpoint_ts = doc.last_checkpoint_ts;
|
|
32
|
+
this.last_keepalive_ts = doc.last_keepalive_ts;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
parsed(options: storage.ParseSyncRulesOptions) {
|
|
36
|
+
return new MongoPersistedSyncRules(
|
|
37
|
+
this.id,
|
|
38
|
+
SqlSyncRules.fromYaml(this.sync_rules_content, options),
|
|
39
|
+
this.last_checkpoint_lsn,
|
|
40
|
+
this.slot_name
|
|
41
|
+
);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
async lock() {
|
|
45
|
+
const lock = await MongoSyncRulesLock.createLock(this.db, this);
|
|
46
|
+
this.current_lock = lock;
|
|
47
|
+
return lock;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { logger } from '@powersync/lib-services-framework';
|
|
2
|
+
import { storage } from '@powersync/service-core';
|
|
3
|
+
import { configFile } from '@powersync/service-types';
|
|
4
|
+
import * as db from '../../db/db-index.js';
|
|
5
|
+
import { MongoBucketStorage } from '../MongoBucketStorage.js';
|
|
6
|
+
import { PowerSyncMongo } from './db.js';
|
|
7
|
+
|
|
8
|
+
export const MONGO_STORAGE_TYPE = 'mongodb';
|
|
9
|
+
export class MongoStorageProvider implements storage.BucketStorageProvider {
|
|
10
|
+
get type() {
|
|
11
|
+
return MONGO_STORAGE_TYPE;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
async getStorage(options: storage.GetStorageOptions): Promise<storage.ActiveStorage> {
|
|
15
|
+
const { resolvedConfig } = options;
|
|
16
|
+
|
|
17
|
+
const { storage } = resolvedConfig;
|
|
18
|
+
if (storage.type != MONGO_STORAGE_TYPE) {
|
|
19
|
+
// This should not be reached since the generation should be managed externally.
|
|
20
|
+
throw new Error(
|
|
21
|
+
`Cannot create MongoDB bucket storage with provided config ${storage.type} !== ${MONGO_STORAGE_TYPE}`
|
|
22
|
+
);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const decodedConfig = configFile.MongoStorageConfig.decode(storage as any);
|
|
26
|
+
const client = db.mongo.createMongoClient(decodedConfig);
|
|
27
|
+
|
|
28
|
+
const database = new PowerSyncMongo(client, { database: resolvedConfig.storage.database });
|
|
29
|
+
|
|
30
|
+
return {
|
|
31
|
+
storage: new MongoBucketStorage(database, {
|
|
32
|
+
// TODO currently need the entire resolved config due to this
|
|
33
|
+
slot_name_prefix: resolvedConfig.slot_name_prefix
|
|
34
|
+
}),
|
|
35
|
+
shutDown: () => client.close(),
|
|
36
|
+
tearDown: () => {
|
|
37
|
+
logger.info(`Tearing down storage: ${database.db.namespace}...`);
|
|
38
|
+
return database.db.dropDatabase();
|
|
39
|
+
}
|
|
40
|
+
} satisfies storage.ActiveStorage;
|
|
41
|
+
}
|
|
42
|
+
}
|