@powersync/service-module-mongodb-storage 0.3.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +2 -1
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -1
- package/dist/storage/MongoBucketStorage.js +2 -2
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoBucketBatch.js +5 -5
- package/dist/storage/implementation/MongoBucketBatch.js.map +1 -1
- package/dist/storage/implementation/MongoCompactor.js +2 -2
- package/dist/storage/implementation/MongoCompactor.js.map +1 -1
- package/dist/storage/implementation/MongoIdSequence.js +2 -1
- package/dist/storage/implementation/MongoIdSequence.js.map +1 -1
- package/dist/storage/implementation/MongoStorageProvider.js +2 -2
- package/dist/storage/implementation/MongoStorageProvider.js.map +1 -1
- package/dist/storage/implementation/MongoSyncBucketStorage.js +3 -3
- package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoSyncRulesLock.js +2 -2
- package/dist/storage/implementation/MongoSyncRulesLock.js.map +1 -1
- package/dist/types/types.d.ts +2 -0
- package/package.json +7 -7
- package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +2 -1
- package/src/storage/MongoBucketStorage.ts +2 -2
- package/src/storage/implementation/MongoBucketBatch.ts +14 -5
- package/src/storage/implementation/MongoCompactor.ts +4 -2
- package/src/storage/implementation/MongoIdSequence.ts +3 -1
- package/src/storage/implementation/MongoStorageProvider.ts +4 -2
- package/src/storage/implementation/MongoSyncBucketStorage.ts +3 -3
- package/src/storage/implementation/MongoSyncRulesLock.ts +5 -2
- package/test/src/storage_compacting.test.ts +6 -1
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -5,7 +5,7 @@ import * as timers from 'timers/promises';
|
|
|
5
5
|
|
|
6
6
|
import { storage, sync, utils } from '@powersync/service-core';
|
|
7
7
|
|
|
8
|
-
import { DisposableObserver, logger } from '@powersync/lib-services-framework';
|
|
8
|
+
import { DisposableObserver, ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework';
|
|
9
9
|
import { v4 as uuid } from 'uuid';
|
|
10
10
|
|
|
11
11
|
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
@@ -433,7 +433,7 @@ export class MongoBucketStorage
|
|
|
433
433
|
clusterTime = time;
|
|
434
434
|
});
|
|
435
435
|
if (clusterTime == null) {
|
|
436
|
-
throw new
|
|
436
|
+
throw new ServiceError(ErrorCode.PSYNC_S2401, 'Could not get clusterTime');
|
|
437
437
|
}
|
|
438
438
|
|
|
439
439
|
if (signal.aborted) {
|
|
@@ -2,7 +2,16 @@ import { mongo } from '@powersync/lib-service-mongodb';
|
|
|
2
2
|
import { SqlEventDescriptor, SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
3
3
|
import * as bson from 'bson';
|
|
4
4
|
|
|
5
|
-
import {
|
|
5
|
+
import {
|
|
6
|
+
container,
|
|
7
|
+
DisposableObserver,
|
|
8
|
+
ErrorCode,
|
|
9
|
+
errors,
|
|
10
|
+
logger,
|
|
11
|
+
ReplicationAssertionError,
|
|
12
|
+
ServiceAssertionError,
|
|
13
|
+
ServiceError
|
|
14
|
+
} from '@powersync/lib-services-framework';
|
|
6
15
|
import { SaveOperationTag, storage, utils } from '@powersync/service-core';
|
|
7
16
|
import * as timers from 'node:timers/promises';
|
|
8
17
|
import { PowerSyncMongo } from './db.js';
|
|
@@ -140,7 +149,7 @@ export class MongoBucketBatch
|
|
|
140
149
|
this.batch = resumeBatch;
|
|
141
150
|
|
|
142
151
|
if (last_op == null) {
|
|
143
|
-
throw new
|
|
152
|
+
throw new ReplicationAssertionError('Unexpected last_op == null');
|
|
144
153
|
}
|
|
145
154
|
|
|
146
155
|
this.persisted_op = last_op;
|
|
@@ -294,7 +303,7 @@ export class MongoBucketBatch
|
|
|
294
303
|
return null;
|
|
295
304
|
}
|
|
296
305
|
} else {
|
|
297
|
-
throw new
|
|
306
|
+
throw new ReplicationAssertionError(`${record.tag} not supported with skipExistingRows: true`);
|
|
298
307
|
}
|
|
299
308
|
}
|
|
300
309
|
|
|
@@ -348,7 +357,7 @@ export class MongoBucketBatch
|
|
|
348
357
|
afterData = new bson.Binary(bson.serialize(after!));
|
|
349
358
|
// We additionally make sure it's <= 15MB - we need some margin for metadata.
|
|
350
359
|
if (afterData.length() > MAX_ROW_SIZE) {
|
|
351
|
-
throw new
|
|
360
|
+
throw new ServiceError(ErrorCode.PSYNC_S1002, `Row too large: ${afterData.length()}`);
|
|
352
361
|
}
|
|
353
362
|
} catch (e) {
|
|
354
363
|
// Replace with empty values, equivalent to TOAST values
|
|
@@ -548,7 +557,7 @@ export class MongoBucketBatch
|
|
|
548
557
|
logger.info(`${this.slot_name} ${description} - try ${flushTry}`);
|
|
549
558
|
}
|
|
550
559
|
if (flushTry > 20 && Date.now() > lastTry) {
|
|
551
|
-
throw new
|
|
560
|
+
throw new ServiceError(ErrorCode.PSYNC_S1402, 'Max transaction tries exceeded');
|
|
552
561
|
}
|
|
553
562
|
|
|
554
563
|
const next_op_id_doc = await this.db.op_id_sequence.findOneAndUpdate(
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { mongo } from '@powersync/lib-service-mongodb';
|
|
2
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
2
|
+
import { logger, ReplicationAssertionError } from '@powersync/lib-services-framework';
|
|
3
3
|
import { storage, utils } from '@powersync/service-core';
|
|
4
4
|
|
|
5
5
|
import { PowerSyncMongo } from './db.js';
|
|
@@ -335,7 +335,9 @@ export class MongoCompactor {
|
|
|
335
335
|
}
|
|
336
336
|
}
|
|
337
337
|
} else {
|
|
338
|
-
throw new
|
|
338
|
+
throw new ReplicationAssertionError(
|
|
339
|
+
`Unexpected ${op.op} operation at ${op._id.g}:${op._id.b}:${op._id.o}`
|
|
340
|
+
);
|
|
339
341
|
}
|
|
340
342
|
}
|
|
341
343
|
if (!gotAnOp) {
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { ReplicationAssertionError } from '@powersync/lib-services-framework';
|
|
2
|
+
|
|
1
3
|
/**
|
|
2
4
|
* Manages op_id or similar sequence in memory.
|
|
3
5
|
*
|
|
@@ -9,7 +11,7 @@ export class MongoIdSequence {
|
|
|
9
11
|
|
|
10
12
|
constructor(last: bigint) {
|
|
11
13
|
if (typeof last != 'bigint') {
|
|
12
|
-
throw new
|
|
14
|
+
throw new ReplicationAssertionError(`BigInt required, got ${last} ${typeof last}`);
|
|
13
15
|
}
|
|
14
16
|
this._last = last;
|
|
15
17
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
2
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
2
|
+
import { logger, ServiceAssertionError } from '@powersync/lib-services-framework';
|
|
3
3
|
import { storage } from '@powersync/service-core';
|
|
4
4
|
import { MongoStorageConfig } from '../../types/types.js';
|
|
5
5
|
import { MongoBucketStorage } from '../MongoBucketStorage.js';
|
|
@@ -16,7 +16,9 @@ export class MongoStorageProvider implements storage.BucketStorageProvider {
|
|
|
16
16
|
const { storage } = resolvedConfig;
|
|
17
17
|
if (storage.type != this.type) {
|
|
18
18
|
// This should not be reached since the generation should be managed externally.
|
|
19
|
-
throw new
|
|
19
|
+
throw new ServiceAssertionError(
|
|
20
|
+
`Cannot create MongoDB bucket storage with provided config ${storage.type} !== ${this.type}`
|
|
21
|
+
);
|
|
20
22
|
}
|
|
21
23
|
|
|
22
24
|
const decodedConfig = MongoStorageConfig.decode(storage as any);
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
2
2
|
import { mongo } from '@powersync/lib-service-mongodb';
|
|
3
|
-
import { DisposableObserver, logger } from '@powersync/lib-services-framework';
|
|
3
|
+
import { DisposableObserver, logger, ServiceAssertionError } from '@powersync/lib-services-framework';
|
|
4
4
|
import { storage, utils } from '@powersync/service-core';
|
|
5
5
|
import { SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
6
6
|
import * as bson from 'bson';
|
|
@@ -344,7 +344,7 @@ export class MongoSyncBucketStorage
|
|
|
344
344
|
|
|
345
345
|
start ??= dataBuckets.get(bucket);
|
|
346
346
|
if (start == null) {
|
|
347
|
-
throw new
|
|
347
|
+
throw new ServiceAssertionError(`data for unexpected bucket: ${bucket}`);
|
|
348
348
|
}
|
|
349
349
|
currentBatch = {
|
|
350
350
|
bucket,
|
|
@@ -479,7 +479,7 @@ export class MongoSyncBucketStorage
|
|
|
479
479
|
}
|
|
480
480
|
);
|
|
481
481
|
if (doc == null) {
|
|
482
|
-
throw new
|
|
482
|
+
throw new ServiceAssertionError('Cannot find sync rules status');
|
|
483
483
|
}
|
|
484
484
|
|
|
485
485
|
return {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import crypto from 'crypto';
|
|
2
2
|
|
|
3
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
3
|
+
import { ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework';
|
|
4
4
|
import { storage } from '@powersync/service-core';
|
|
5
5
|
import { PowerSyncMongo } from './db.js';
|
|
6
6
|
|
|
@@ -33,7 +33,10 @@ export class MongoSyncRulesLock implements storage.ReplicationLock {
|
|
|
33
33
|
);
|
|
34
34
|
|
|
35
35
|
if (doc == null) {
|
|
36
|
-
throw new
|
|
36
|
+
throw new ServiceError(
|
|
37
|
+
ErrorCode.PSYNC_S1003,
|
|
38
|
+
`Sync rules: ${sync_rules.id} have been locked by another process for replication.`
|
|
39
|
+
);
|
|
37
40
|
}
|
|
38
41
|
return new MongoSyncRulesLock(db, sync_rules.id, lockId);
|
|
39
42
|
}
|
|
@@ -3,4 +3,9 @@ import { register } from '@powersync/service-core-tests';
|
|
|
3
3
|
import { describe } from 'vitest';
|
|
4
4
|
import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
|
|
5
5
|
|
|
6
|
-
describe('Mongo Sync Bucket Storage Compact', () =>
|
|
6
|
+
describe('Mongo Sync Bucket Storage Compact', () =>
|
|
7
|
+
register.registerCompactTests<MongoCompactOptions>(INITIALIZED_MONGO_STORAGE_FACTORY, {
|
|
8
|
+
clearBatchLimit: 2,
|
|
9
|
+
moveBatchLimit: 1,
|
|
10
|
+
moveBatchQueryLimit: 1
|
|
11
|
+
}));
|