@powersync/service-module-mongodb-storage 0.2.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +32 -0
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js +2 -1
- package/dist/migrations/db/migrations/1702295701188-sync-rule-state.js.map +1 -1
- package/dist/storage/MongoBucketStorage.d.ts +1 -0
- package/dist/storage/MongoBucketStorage.js +5 -3
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoBucketBatch.js +5 -5
- package/dist/storage/implementation/MongoBucketBatch.js.map +1 -1
- package/dist/storage/implementation/MongoCompactor.js +2 -2
- package/dist/storage/implementation/MongoCompactor.js.map +1 -1
- package/dist/storage/implementation/MongoIdSequence.js +2 -1
- package/dist/storage/implementation/MongoIdSequence.js.map +1 -1
- package/dist/storage/implementation/MongoStorageProvider.js +11 -7
- package/dist/storage/implementation/MongoStorageProvider.js.map +1 -1
- package/dist/storage/implementation/MongoSyncBucketStorage.js +3 -3
- package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoSyncRulesLock.js +2 -2
- package/dist/storage/implementation/MongoSyncRulesLock.js.map +1 -1
- package/dist/types/types.d.ts +2 -0
- package/package.json +7 -7
- package/src/migrations/db/migrations/1702295701188-sync-rule-state.ts +2 -1
- package/src/storage/MongoBucketStorage.ts +6 -3
- package/src/storage/implementation/MongoBucketBatch.ts +14 -5
- package/src/storage/implementation/MongoCompactor.ts +4 -2
- package/src/storage/implementation/MongoIdSequence.ts +3 -1
- package/src/storage/implementation/MongoStorageProvider.ts +13 -8
- package/src/storage/implementation/MongoSyncBucketStorage.ts +3 -3
- package/src/storage/implementation/MongoSyncRulesLock.ts +5 -2
- package/test/src/__snapshots__/storage.test.ts.snap +9 -0
- package/test/src/migrations.test.ts +10 -0
- package/test/src/setup.ts +5 -1
- package/test/src/storage_compacting.test.ts +6 -1
- package/tsconfig.tsbuildinfo +1 -1
package/package.json
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
"name": "@powersync/service-module-mongodb-storage",
|
|
3
3
|
"repository": "https://github.com/powersync-ja/powersync-service",
|
|
4
4
|
"types": "dist/index.d.ts",
|
|
5
|
-
"version": "0.
|
|
5
|
+
"version": "0.3.1",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
7
|
"license": "FSL-1.1-Apache-2.0",
|
|
8
8
|
"type": "module",
|
|
@@ -27,16 +27,16 @@
|
|
|
27
27
|
"ix": "^5.0.0",
|
|
28
28
|
"lru-cache": "^10.2.2",
|
|
29
29
|
"uuid": "^9.0.1",
|
|
30
|
-
"@powersync/lib-services-framework": "0.
|
|
31
|
-
"@powersync/service-core": "0.
|
|
30
|
+
"@powersync/lib-services-framework": "0.5.0",
|
|
31
|
+
"@powersync/service-core": "0.16.0",
|
|
32
32
|
"@powersync/service-jsonbig": "0.17.10",
|
|
33
|
-
"@powersync/service-sync-rules": "0.23.
|
|
34
|
-
"@powersync/service-types": "0.7.
|
|
35
|
-
"@powersync/lib-service-mongodb": "0.
|
|
33
|
+
"@powersync/service-sync-rules": "0.23.2",
|
|
34
|
+
"@powersync/service-types": "0.7.1",
|
|
35
|
+
"@powersync/lib-service-mongodb": "0.4.0"
|
|
36
36
|
},
|
|
37
37
|
"devDependencies": {
|
|
38
38
|
"@types/uuid": "^9.0.4",
|
|
39
|
-
"@powersync/service-core-tests": "0.
|
|
39
|
+
"@powersync/service-core-tests": "0.3.1"
|
|
40
40
|
},
|
|
41
41
|
"scripts": {
|
|
42
42
|
"build": "tsc -b",
|
|
@@ -2,6 +2,7 @@ import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
|
2
2
|
import { storage as core_storage, migrations } from '@powersync/service-core';
|
|
3
3
|
import * as storage from '../../../storage/storage-index.js';
|
|
4
4
|
import { MongoStorageConfig } from '../../../types/types.js';
|
|
5
|
+
import { ServiceAssertionError } from '@powersync/lib-services-framework';
|
|
5
6
|
|
|
6
7
|
interface LegacySyncRulesDocument extends storage.SyncRuleDocument {
|
|
7
8
|
/**
|
|
@@ -65,7 +66,7 @@ export const up: migrations.PowerSyncMigrationFunction = async (context) => {
|
|
|
65
66
|
const remaining = await db.sync_rules.find({ state: null as any }).toArray();
|
|
66
67
|
if (remaining.length > 0) {
|
|
67
68
|
const slots = remaining.map((doc) => doc.slot_name).join(', ');
|
|
68
|
-
throw new
|
|
69
|
+
throw new ServiceAssertionError(`Invalid state for sync rules: ${slots}`);
|
|
69
70
|
}
|
|
70
71
|
} finally {
|
|
71
72
|
await db.client.close();
|
|
@@ -5,7 +5,7 @@ import * as timers from 'timers/promises';
|
|
|
5
5
|
|
|
6
6
|
import { storage, sync, utils } from '@powersync/service-core';
|
|
7
7
|
|
|
8
|
-
import { DisposableObserver, logger } from '@powersync/lib-services-framework';
|
|
8
|
+
import { DisposableObserver, ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework';
|
|
9
9
|
import { v4 as uuid } from 'uuid';
|
|
10
10
|
|
|
11
11
|
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
@@ -62,6 +62,10 @@ export class MongoBucketStorage
|
|
|
62
62
|
this.slot_name_prefix = options.slot_name_prefix;
|
|
63
63
|
}
|
|
64
64
|
|
|
65
|
+
async [Symbol.asyncDispose]() {
|
|
66
|
+
super[Symbol.dispose]();
|
|
67
|
+
}
|
|
68
|
+
|
|
65
69
|
getInstance(options: storage.PersistedSyncRulesContent): MongoSyncBucketStorage {
|
|
66
70
|
let { id, slot_name } = options;
|
|
67
71
|
if ((typeof id as any) == 'bigint') {
|
|
@@ -106,7 +110,6 @@ export class MongoBucketStorage
|
|
|
106
110
|
|
|
107
111
|
// In both the below cases, we create a new sync rules instance.
|
|
108
112
|
// The current one will continue erroring until the next one has finished processing.
|
|
109
|
-
// TODO: Update
|
|
110
113
|
if (next != null && next.slot_name == slot_name) {
|
|
111
114
|
// We need to redo the "next" sync rules
|
|
112
115
|
await this.updateSyncRules({
|
|
@@ -430,7 +433,7 @@ export class MongoBucketStorage
|
|
|
430
433
|
clusterTime = time;
|
|
431
434
|
});
|
|
432
435
|
if (clusterTime == null) {
|
|
433
|
-
throw new
|
|
436
|
+
throw new ServiceError(ErrorCode.PSYNC_S2401, 'Could not get clusterTime');
|
|
434
437
|
}
|
|
435
438
|
|
|
436
439
|
if (signal.aborted) {
|
|
@@ -2,7 +2,16 @@ import { mongo } from '@powersync/lib-service-mongodb';
|
|
|
2
2
|
import { SqlEventDescriptor, SqliteRow, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
3
3
|
import * as bson from 'bson';
|
|
4
4
|
|
|
5
|
-
import {
|
|
5
|
+
import {
|
|
6
|
+
container,
|
|
7
|
+
DisposableObserver,
|
|
8
|
+
ErrorCode,
|
|
9
|
+
errors,
|
|
10
|
+
logger,
|
|
11
|
+
ReplicationAssertionError,
|
|
12
|
+
ServiceAssertionError,
|
|
13
|
+
ServiceError
|
|
14
|
+
} from '@powersync/lib-services-framework';
|
|
6
15
|
import { SaveOperationTag, storage, utils } from '@powersync/service-core';
|
|
7
16
|
import * as timers from 'node:timers/promises';
|
|
8
17
|
import { PowerSyncMongo } from './db.js';
|
|
@@ -140,7 +149,7 @@ export class MongoBucketBatch
|
|
|
140
149
|
this.batch = resumeBatch;
|
|
141
150
|
|
|
142
151
|
if (last_op == null) {
|
|
143
|
-
throw new
|
|
152
|
+
throw new ReplicationAssertionError('Unexpected last_op == null');
|
|
144
153
|
}
|
|
145
154
|
|
|
146
155
|
this.persisted_op = last_op;
|
|
@@ -294,7 +303,7 @@ export class MongoBucketBatch
|
|
|
294
303
|
return null;
|
|
295
304
|
}
|
|
296
305
|
} else {
|
|
297
|
-
throw new
|
|
306
|
+
throw new ReplicationAssertionError(`${record.tag} not supported with skipExistingRows: true`);
|
|
298
307
|
}
|
|
299
308
|
}
|
|
300
309
|
|
|
@@ -348,7 +357,7 @@ export class MongoBucketBatch
|
|
|
348
357
|
afterData = new bson.Binary(bson.serialize(after!));
|
|
349
358
|
// We additionally make sure it's <= 15MB - we need some margin for metadata.
|
|
350
359
|
if (afterData.length() > MAX_ROW_SIZE) {
|
|
351
|
-
throw new
|
|
360
|
+
throw new ServiceError(ErrorCode.PSYNC_S1002, `Row too large: ${afterData.length()}`);
|
|
352
361
|
}
|
|
353
362
|
} catch (e) {
|
|
354
363
|
// Replace with empty values, equivalent to TOAST values
|
|
@@ -548,7 +557,7 @@ export class MongoBucketBatch
|
|
|
548
557
|
logger.info(`${this.slot_name} ${description} - try ${flushTry}`);
|
|
549
558
|
}
|
|
550
559
|
if (flushTry > 20 && Date.now() > lastTry) {
|
|
551
|
-
throw new
|
|
560
|
+
throw new ServiceError(ErrorCode.PSYNC_S1402, 'Max transaction tries exceeded');
|
|
552
561
|
}
|
|
553
562
|
|
|
554
563
|
const next_op_id_doc = await this.db.op_id_sequence.findOneAndUpdate(
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { mongo } from '@powersync/lib-service-mongodb';
|
|
2
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
2
|
+
import { logger, ReplicationAssertionError } from '@powersync/lib-services-framework';
|
|
3
3
|
import { storage, utils } from '@powersync/service-core';
|
|
4
4
|
|
|
5
5
|
import { PowerSyncMongo } from './db.js';
|
|
@@ -335,7 +335,9 @@ export class MongoCompactor {
|
|
|
335
335
|
}
|
|
336
336
|
}
|
|
337
337
|
} else {
|
|
338
|
-
throw new
|
|
338
|
+
throw new ReplicationAssertionError(
|
|
339
|
+
`Unexpected ${op.op} operation at ${op._id.g}:${op._id.b}:${op._id.o}`
|
|
340
|
+
);
|
|
339
341
|
}
|
|
340
342
|
}
|
|
341
343
|
if (!gotAnOp) {
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { ReplicationAssertionError } from '@powersync/lib-services-framework';
|
|
2
|
+
|
|
1
3
|
/**
|
|
2
4
|
* Manages op_id or similar sequence in memory.
|
|
3
5
|
*
|
|
@@ -9,7 +11,7 @@ export class MongoIdSequence {
|
|
|
9
11
|
|
|
10
12
|
constructor(last: bigint) {
|
|
11
13
|
if (typeof last != 'bigint') {
|
|
12
|
-
throw new
|
|
14
|
+
throw new ReplicationAssertionError(`BigInt required, got ${last} ${typeof last}`);
|
|
13
15
|
}
|
|
14
16
|
this._last = last;
|
|
15
17
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
2
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
2
|
+
import { logger, ServiceAssertionError } from '@powersync/lib-services-framework';
|
|
3
3
|
import { storage } from '@powersync/service-core';
|
|
4
4
|
import { MongoStorageConfig } from '../../types/types.js';
|
|
5
5
|
import { MongoBucketStorage } from '../MongoBucketStorage.js';
|
|
@@ -16,20 +16,25 @@ export class MongoStorageProvider implements storage.BucketStorageProvider {
|
|
|
16
16
|
const { storage } = resolvedConfig;
|
|
17
17
|
if (storage.type != this.type) {
|
|
18
18
|
// This should not be reached since the generation should be managed externally.
|
|
19
|
-
throw new
|
|
19
|
+
throw new ServiceAssertionError(
|
|
20
|
+
`Cannot create MongoDB bucket storage with provided config ${storage.type} !== ${this.type}`
|
|
21
|
+
);
|
|
20
22
|
}
|
|
21
23
|
|
|
22
24
|
const decodedConfig = MongoStorageConfig.decode(storage as any);
|
|
23
25
|
const client = lib_mongo.db.createMongoClient(decodedConfig);
|
|
24
26
|
|
|
25
27
|
const database = new PowerSyncMongo(client, { database: resolvedConfig.storage.database });
|
|
26
|
-
|
|
28
|
+
const factory = new MongoBucketStorage(database, {
|
|
29
|
+
// TODO currently need the entire resolved config due to this
|
|
30
|
+
slot_name_prefix: resolvedConfig.slot_name_prefix
|
|
31
|
+
});
|
|
27
32
|
return {
|
|
28
|
-
storage:
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
+
storage: factory,
|
|
34
|
+
shutDown: async () => {
|
|
35
|
+
await factory[Symbol.asyncDispose]();
|
|
36
|
+
await client.close();
|
|
37
|
+
},
|
|
33
38
|
tearDown: () => {
|
|
34
39
|
logger.info(`Tearing down storage: ${database.db.namespace}...`);
|
|
35
40
|
return database.db.dropDatabase();
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
2
2
|
import { mongo } from '@powersync/lib-service-mongodb';
|
|
3
|
-
import { DisposableObserver, logger } from '@powersync/lib-services-framework';
|
|
3
|
+
import { DisposableObserver, logger, ServiceAssertionError } from '@powersync/lib-services-framework';
|
|
4
4
|
import { storage, utils } from '@powersync/service-core';
|
|
5
5
|
import { SqliteJsonRow, SqliteJsonValue, SqlSyncRules } from '@powersync/service-sync-rules';
|
|
6
6
|
import * as bson from 'bson';
|
|
@@ -344,7 +344,7 @@ export class MongoSyncBucketStorage
|
|
|
344
344
|
|
|
345
345
|
start ??= dataBuckets.get(bucket);
|
|
346
346
|
if (start == null) {
|
|
347
|
-
throw new
|
|
347
|
+
throw new ServiceAssertionError(`data for unexpected bucket: ${bucket}`);
|
|
348
348
|
}
|
|
349
349
|
currentBatch = {
|
|
350
350
|
bucket,
|
|
@@ -479,7 +479,7 @@ export class MongoSyncBucketStorage
|
|
|
479
479
|
}
|
|
480
480
|
);
|
|
481
481
|
if (doc == null) {
|
|
482
|
-
throw new
|
|
482
|
+
throw new ServiceAssertionError('Cannot find sync rules status');
|
|
483
483
|
}
|
|
484
484
|
|
|
485
485
|
return {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import crypto from 'crypto';
|
|
2
2
|
|
|
3
|
-
import { logger } from '@powersync/lib-services-framework';
|
|
3
|
+
import { ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework';
|
|
4
4
|
import { storage } from '@powersync/service-core';
|
|
5
5
|
import { PowerSyncMongo } from './db.js';
|
|
6
6
|
|
|
@@ -33,7 +33,10 @@ export class MongoSyncRulesLock implements storage.ReplicationLock {
|
|
|
33
33
|
);
|
|
34
34
|
|
|
35
35
|
if (doc == null) {
|
|
36
|
-
throw new
|
|
36
|
+
throw new ServiceError(
|
|
37
|
+
ErrorCode.PSYNC_S1003,
|
|
38
|
+
`Sync rules: ${sync_rules.id} have been locked by another process for replication.`
|
|
39
|
+
);
|
|
37
40
|
}
|
|
38
41
|
return new MongoSyncRulesLock(db, sync_rules.id, lockId);
|
|
39
42
|
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { register } from '@powersync/service-core-tests';
|
|
2
|
+
import { describe } from 'vitest';
|
|
3
|
+
import { MongoMigrationAgent } from '../../src/migrations/MongoMigrationAgent.js';
|
|
4
|
+
import { env } from './env.js';
|
|
5
|
+
|
|
6
|
+
const MIGRATION_AGENT_FACTORY = () => {
|
|
7
|
+
return new MongoMigrationAgent({ type: 'mongodb', uri: env.MONGO_TEST_URL });
|
|
8
|
+
};
|
|
9
|
+
|
|
10
|
+
describe('Mongo Migrations Store', () => register.registerMigrationTests(MIGRATION_AGENT_FACTORY));
|
package/test/src/setup.ts
CHANGED
|
@@ -1,9 +1,13 @@
|
|
|
1
1
|
import { container } from '@powersync/lib-services-framework';
|
|
2
2
|
import { test_utils } from '@powersync/service-core-tests';
|
|
3
|
-
import { beforeAll } from 'vitest';
|
|
3
|
+
import { beforeAll, beforeEach } from 'vitest';
|
|
4
4
|
|
|
5
5
|
beforeAll(async () => {
|
|
6
6
|
// Executes for every test file
|
|
7
7
|
container.registerDefaults();
|
|
8
8
|
await test_utils.initMetrics();
|
|
9
9
|
});
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
await test_utils.resetMetrics();
|
|
13
|
+
});
|
|
@@ -3,4 +3,9 @@ import { register } from '@powersync/service-core-tests';
|
|
|
3
3
|
import { describe } from 'vitest';
|
|
4
4
|
import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
|
|
5
5
|
|
|
6
|
-
describe('Mongo Sync Bucket Storage Compact', () =>
|
|
6
|
+
describe('Mongo Sync Bucket Storage Compact', () =>
|
|
7
|
+
register.registerCompactTests<MongoCompactOptions>(INITIALIZED_MONGO_STORAGE_FACTORY, {
|
|
8
|
+
clearBatchLimit: 2,
|
|
9
|
+
moveBatchLimit: 1,
|
|
10
|
+
moveBatchQueryLimit: 1
|
|
11
|
+
}));
|