@powersync/service-module-mongodb-storage 0.6.0 → 0.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/dist/storage/MongoBucketStorage.d.ts +3 -3
- package/dist/storage/MongoBucketStorage.js +26 -13
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoCompactor.d.ts +0 -6
- package/dist/storage/implementation/MongoCompactor.js +1 -2
- package/dist/storage/implementation/MongoCompactor.js.map +1 -1
- package/dist/storage/implementation/MongoSyncBucketStorage.js +20 -14
- package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js +1 -2
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js.map +1 -1
- package/dist/storage/implementation/PersistedBatch.js +4 -5
- package/dist/storage/implementation/PersistedBatch.js.map +1 -1
- package/dist/storage/implementation/util.d.ts +0 -14
- package/dist/storage/implementation/util.js +0 -38
- package/dist/storage/implementation/util.js.map +1 -1
- package/package.json +6 -6
- package/src/storage/MongoBucketStorage.ts +31 -14
- package/src/storage/implementation/MongoCompactor.ts +2 -10
- package/src/storage/implementation/MongoSyncBucketStorage.ts +46 -35
- package/src/storage/implementation/MongoWriteCheckpointAPI.ts +1 -3
- package/src/storage/implementation/PersistedBatch.ts +4 -5
- package/src/storage/implementation/util.ts +0 -45
- package/test/src/__snapshots__/storage_sync.test.ts.snap +56 -0
- package/test/src/storage_compacting.test.ts +1 -7
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"util.js","sourceRoot":"","sources":["../../../src/storage/implementation/util.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B,OAAO,EAAE,KAAK,EAAE,MAAM,gCAAgC,CAAC;AACvD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAEzD,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAGzC,MAAM,UAAU,cAAc,CAAI,MAAkB,EAAE,IAAiB;IACrE,IAAI,MAAM,GAAG;QACX,IAAI,EAAE;YACJ,GAAG,MAAM;SACH;QACR,GAAG,EAAE;YACH,GAAG,MAAM;SACH;KACT,CAAC;IAEF,KAAK,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QACrB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;QACrC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;IACtC,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,MAAc,EAAE,aAAqB;IACpE,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1D,OAAO,GAAG,MAAM,GAAG,aAAa,IAAI,WAAW,EAAE,CAAC;AACpD,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAI,MAA2B;IAClE,IAAI,CAAC;QACH,IAAI,IAAS,CAAC;QACd,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,2CAA2C;QAC3C,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;QACtC,yCAAyC;QACzC,IAAI,GAAG,MAAM,CAAC,qBAAqB,EAAE,CAAC;QACtC,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC;YACnC,0CAA0C;YAC1C,wEAAwE;YACxE,uEAAuE;YACvE,oCAAoC;YACpC,EAAE;YACF,4EAA4E;YAC5E,2DAA2D;YAC3D,gCAAgC;YAChC,OAAO,GAAG,KAAK,CAAC;QAClB,CAAC;QACD,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;IAC3B,CAAC;YAAS,CAAC;QACT,iDAAiD;QACjD,uIAAuI;QACvI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACnB,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAuB;IAChD,IAAI,GAAG,CAAC,EAAE,IAAI,KAAK,IAAI,GAAG,CAAC,EAAE,IAAI,QAAQ,EAAE,CAAC;QAC1C,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,WAAW,EAAE,GAAG,CAAC,KAAK;YACtB,SAAS,EAAE,GAAG,CAAC,MAAM;YACrB,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;YAC9B,MAAM,EAAE,iBAAiB,CAAC,GAAG,CAAC,YAAa,EAAE,GAAG,CAAC,UAAW,CAAC;YAC7D,IAAI,EAAE,GAAG,CAAC,IAAI;SACf,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,cAAc;QAEd,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;SAC/B,CAAC;IACJ,CAAC;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,KAAoB,EAAE,EAAqB;IAC3E,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;QACvB,mDAAmD;QACnD,OAAO,GAAG,KAAK,CAAC,WAAW,EAAE,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE,CAAC;IACtD,CAAC;SAAM,CAAC;QACN,oCAAoC;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QAC3C,OAAO,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,GAAW,EAAE,OAAkC,EAAE,EAAE;IACnF,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;AAC7C,CAAC,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAW,EAAE,IAAa,EAAE,EAAE;IACjE,0EAA0E;IAC1E,oEAAoE;IACpE,MAAM,MAAM,GAAG,iBAAiB,CAAC,GAAG,EAAE;QACpC,gBAAgB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACvC,eAAe,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACtC,wBAAwB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;KAChD,CAAC,CAAC;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,CAAC;AACpC,CAAC,CAAC
|
|
1
|
+
{"version":3,"file":"util.js","sourceRoot":"","sources":["../../../src/storage/implementation/util.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B,OAAO,EAAE,KAAK,EAAE,MAAM,gCAAgC,CAAC;AACvD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAEzD,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAGzC,MAAM,UAAU,cAAc,CAAI,MAAkB,EAAE,IAAiB;IACrE,IAAI,MAAM,GAAG;QACX,IAAI,EAAE;YACJ,GAAG,MAAM;SACH;QACR,GAAG,EAAE;YACH,GAAG,MAAM;SACH;KACT,CAAC;IAEF,KAAK,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QACrB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;QACrC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;IACtC,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,MAAc,EAAE,aAAqB;IACpE,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1D,OAAO,GAAG,MAAM,GAAG,aAAa,IAAI,WAAW,EAAE,CAAC;AACpD,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAI,MAA2B;IAClE,IAAI,CAAC;QACH,IAAI,IAAS,CAAC;QACd,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,2CAA2C;QAC3C,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;QACtC,yCAAyC;QACzC,IAAI,GAAG,MAAM,CAAC,qBAAqB,EAAE,CAAC;QACtC,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC;YACnC,0CAA0C;YAC1C,wEAAwE;YACxE,uEAAuE;YACvE,oCAAoC;YACpC,EAAE;YACF,4EAA4E;YAC5E,2DAA2D;YAC3D,gCAAgC;YAChC,OAAO,GAAG,KAAK,CAAC;QAClB,CAAC;QACD,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;IAC3B,CAAC;YAAS,CAAC;QACT,iDAAiD;QACjD,uIAAuI;QACvI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACnB,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAuB;IAChD,IAAI,GAAG,CAAC,EAAE,IAAI,KAAK,IAAI,GAAG,CAAC,EAAE,IAAI,QAAQ,EAAE,CAAC;QAC1C,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,WAAW,EAAE,GAAG,CAAC,KAAK;YACtB,SAAS,EAAE,GAAG,CAAC,MAAM;YACrB,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;YAC9B,MAAM,EAAE,iBAAiB,CAAC,GAAG,CAAC,YAAa,EAAE,GAAG,CAAC,UAAW,CAAC;YAC7D,IAAI,EAAE,GAAG,CAAC,IAAI;SACf,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,cAAc;QAEd,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;SAC/B,CAAC;IACJ,CAAC;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,KAAoB,EAAE,EAAqB;IAC3E,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;QACvB,mDAAmD;QACnD,OAAO,GAAG,KAAK,CAAC,WAAW,EAAE,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE,CAAC;IACtD,CAAC;SAAM,CAAC;QACN,oCAAoC;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QAC3C,OAAO,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,GAAW,EAAE,OAAkC,EAAE,EAAE;IACnF,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;AAC7C,CAAC,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAW,EAAE,IAAa,EAAE,EAAE;IACjE,0EAA0E;IAC1E,oEAAoE;IACpE,MAAM,MAAM,GAAG,iBAAiB,CAAC,GAAG,EAAE;QACpC,gBAAgB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACvC,eAAe,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACtC,wBAAwB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;KAChD,CAAC,CAAC;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,CAAC;AACpC,CAAC,CAAC"}
|
package/package.json
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
"name": "@powersync/service-module-mongodb-storage",
|
|
3
3
|
"repository": "https://github.com/powersync-ja/powersync-service",
|
|
4
4
|
"types": "dist/index.d.ts",
|
|
5
|
-
"version": "0.6.
|
|
5
|
+
"version": "0.6.1",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
7
|
"license": "FSL-1.1-Apache-2.0",
|
|
8
8
|
"type": "module",
|
|
@@ -22,21 +22,21 @@
|
|
|
22
22
|
}
|
|
23
23
|
},
|
|
24
24
|
"dependencies": {
|
|
25
|
-
"bson": "^6.
|
|
25
|
+
"bson": "^6.10.3",
|
|
26
26
|
"ts-codec": "^1.3.0",
|
|
27
27
|
"ix": "^5.0.0",
|
|
28
28
|
"lru-cache": "^10.2.2",
|
|
29
29
|
"uuid": "^9.0.1",
|
|
30
|
-
"@powersync/lib-services-framework": "0.5.
|
|
31
|
-
"@powersync/service-core": "1.7.
|
|
30
|
+
"@powersync/lib-services-framework": "0.5.3",
|
|
31
|
+
"@powersync/service-core": "1.7.1",
|
|
32
32
|
"@powersync/service-jsonbig": "0.17.10",
|
|
33
33
|
"@powersync/service-sync-rules": "0.24.0",
|
|
34
34
|
"@powersync/service-types": "0.8.0",
|
|
35
|
-
"@powersync/lib-service-mongodb": "0.4.
|
|
35
|
+
"@powersync/lib-service-mongodb": "0.4.3"
|
|
36
36
|
},
|
|
37
37
|
"devDependencies": {
|
|
38
38
|
"@types/uuid": "^9.0.4",
|
|
39
|
-
"@powersync/service-core-tests": "0.
|
|
39
|
+
"@powersync/service-core-tests": "0.6.0"
|
|
40
40
|
},
|
|
41
41
|
"scripts": {
|
|
42
42
|
"build": "tsc -b",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { SqlSyncRules } from '@powersync/service-sync-rules';
|
|
2
2
|
|
|
3
|
-
import { storage } from '@powersync/service-core';
|
|
3
|
+
import { GetIntanceOptions, storage } from '@powersync/service-core';
|
|
4
4
|
|
|
5
5
|
import { BaseObserver, ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework';
|
|
6
6
|
import { v4 as uuid } from 'uuid';
|
|
@@ -44,13 +44,15 @@ export class MongoBucketStorage
|
|
|
44
44
|
// No-op
|
|
45
45
|
}
|
|
46
46
|
|
|
47
|
-
getInstance(
|
|
48
|
-
let { id, slot_name } =
|
|
47
|
+
getInstance(syncRules: storage.PersistedSyncRulesContent, options?: GetIntanceOptions): MongoSyncBucketStorage {
|
|
48
|
+
let { id, slot_name } = syncRules;
|
|
49
49
|
if ((typeof id as any) == 'bigint') {
|
|
50
50
|
id = Number(id);
|
|
51
51
|
}
|
|
52
|
-
const storage = new MongoSyncBucketStorage(this, id,
|
|
53
|
-
|
|
52
|
+
const storage = new MongoSyncBucketStorage(this, id, syncRules, slot_name);
|
|
53
|
+
if (!options?.skipLifecycleHooks) {
|
|
54
|
+
this.iterateListeners((cb) => cb.syncStorageCreated?.(storage));
|
|
55
|
+
}
|
|
54
56
|
storage.registerListener({
|
|
55
57
|
batchStarted: (batch) => {
|
|
56
58
|
batch.registerListener({
|
|
@@ -95,13 +97,11 @@ export class MongoBucketStorage
|
|
|
95
97
|
}
|
|
96
98
|
}
|
|
97
99
|
|
|
98
|
-
async
|
|
100
|
+
async restartReplication(sync_rules_group_id: number) {
|
|
99
101
|
const next = await this.getNextSyncRulesContent();
|
|
100
102
|
const active = await this.getActiveSyncRulesContent();
|
|
101
103
|
|
|
102
|
-
|
|
103
|
-
// The current one will continue erroring until the next one has finished processing.
|
|
104
|
-
if (next != null && next.slot_name == slot_name) {
|
|
104
|
+
if (next != null && next.id == sync_rules_group_id) {
|
|
105
105
|
// We need to redo the "next" sync rules
|
|
106
106
|
await this.updateSyncRules({
|
|
107
107
|
content: next.sync_rules_content,
|
|
@@ -119,14 +119,17 @@ export class MongoBucketStorage
|
|
|
119
119
|
}
|
|
120
120
|
}
|
|
121
121
|
);
|
|
122
|
-
} else if (next == null && active?.
|
|
122
|
+
} else if (next == null && active?.id == sync_rules_group_id) {
|
|
123
123
|
// Slot removed for "active" sync rules, while there is no "next" one.
|
|
124
124
|
await this.updateSyncRules({
|
|
125
125
|
content: active.sync_rules_content,
|
|
126
126
|
validate: false
|
|
127
127
|
});
|
|
128
128
|
|
|
129
|
-
//
|
|
129
|
+
// In this case we keep the old one as active for clients, so that that existing clients
|
|
130
|
+
// can still get the latest data while we replicate the new ones.
|
|
131
|
+
// It will however not replicate anymore.
|
|
132
|
+
|
|
130
133
|
await this.db.sync_rules.updateOne(
|
|
131
134
|
{
|
|
132
135
|
_id: active.id,
|
|
@@ -134,7 +137,21 @@ export class MongoBucketStorage
|
|
|
134
137
|
},
|
|
135
138
|
{
|
|
136
139
|
$set: {
|
|
137
|
-
state: storage.SyncRuleState.
|
|
140
|
+
state: storage.SyncRuleState.ERRORED
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
);
|
|
144
|
+
} else if (next != null && active?.id == sync_rules_group_id) {
|
|
145
|
+
// Already have next sync rules, but need to stop replicating the active one.
|
|
146
|
+
|
|
147
|
+
await this.db.sync_rules.updateOne(
|
|
148
|
+
{
|
|
149
|
+
_id: active.id,
|
|
150
|
+
state: storage.SyncRuleState.ACTIVE
|
|
151
|
+
},
|
|
152
|
+
{
|
|
153
|
+
$set: {
|
|
154
|
+
state: storage.SyncRuleState.ERRORED
|
|
138
155
|
}
|
|
139
156
|
}
|
|
140
157
|
);
|
|
@@ -211,7 +228,7 @@ export class MongoBucketStorage
|
|
|
211
228
|
async getActiveSyncRulesContent(): Promise<MongoPersistedSyncRulesContent | null> {
|
|
212
229
|
const doc = await this.db.sync_rules.findOne(
|
|
213
230
|
{
|
|
214
|
-
state: storage.SyncRuleState.ACTIVE
|
|
231
|
+
state: { $in: [storage.SyncRuleState.ACTIVE, storage.SyncRuleState.ERRORED] }
|
|
215
232
|
},
|
|
216
233
|
{ sort: { _id: -1 }, limit: 1 }
|
|
217
234
|
);
|
|
@@ -249,7 +266,7 @@ export class MongoBucketStorage
|
|
|
249
266
|
async getReplicatingSyncRules(): Promise<storage.PersistedSyncRulesContent[]> {
|
|
250
267
|
const docs = await this.db.sync_rules
|
|
251
268
|
.find({
|
|
252
|
-
|
|
269
|
+
state: { $in: [storage.SyncRuleState.PROCESSING, storage.SyncRuleState.ACTIVE] }
|
|
253
270
|
})
|
|
254
271
|
.toArray();
|
|
255
272
|
|
|
@@ -5,7 +5,6 @@ import { storage, utils } from '@powersync/service-core';
|
|
|
5
5
|
import { PowerSyncMongo } from './db.js';
|
|
6
6
|
import { BucketDataDocument, BucketDataKey } from './models.js';
|
|
7
7
|
import { cacheKey } from './OperationBatch.js';
|
|
8
|
-
import { safeBulkWrite } from './util.js';
|
|
9
8
|
|
|
10
9
|
interface CurrentBucketState {
|
|
11
10
|
/** Bucket name */
|
|
@@ -33,14 +32,7 @@ interface CurrentBucketState {
|
|
|
33
32
|
/**
|
|
34
33
|
* Additional options, primarily for testing.
|
|
35
34
|
*/
|
|
36
|
-
export interface MongoCompactOptions extends storage.CompactOptions {
|
|
37
|
-
/** Minimum of 2 */
|
|
38
|
-
clearBatchLimit?: number;
|
|
39
|
-
/** Minimum of 1 */
|
|
40
|
-
moveBatchLimit?: number;
|
|
41
|
-
/** Minimum of 1 */
|
|
42
|
-
moveBatchQueryLimit?: number;
|
|
43
|
-
}
|
|
35
|
+
export interface MongoCompactOptions extends storage.CompactOptions {}
|
|
44
36
|
|
|
45
37
|
const DEFAULT_CLEAR_BATCH_LIMIT = 5000;
|
|
46
38
|
const DEFAULT_MOVE_BATCH_LIMIT = 2000;
|
|
@@ -265,7 +257,7 @@ export class MongoCompactor {
|
|
|
265
257
|
private async flush() {
|
|
266
258
|
if (this.updates.length > 0) {
|
|
267
259
|
logger.info(`Compacting ${this.updates.length} ops`);
|
|
268
|
-
await
|
|
260
|
+
await this.db.bucket_data.bulkWrite(this.updates, {
|
|
269
261
|
// Order is not important.
|
|
270
262
|
// Since checksums are not affected, these operations can happen in any order,
|
|
271
263
|
// and it's fine if the operations are partially applied.
|
|
@@ -13,6 +13,7 @@ import {
|
|
|
13
13
|
CheckpointChanges,
|
|
14
14
|
GetCheckpointChangesOptions,
|
|
15
15
|
ReplicationCheckpoint,
|
|
16
|
+
SourceTable,
|
|
16
17
|
storage,
|
|
17
18
|
utils,
|
|
18
19
|
WatchWriteCheckpointOptions
|
|
@@ -23,7 +24,14 @@ import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
|
|
|
23
24
|
import * as timers from 'timers/promises';
|
|
24
25
|
import { MongoBucketStorage } from '../MongoBucketStorage.js';
|
|
25
26
|
import { PowerSyncMongo } from './db.js';
|
|
26
|
-
import {
|
|
27
|
+
import {
|
|
28
|
+
BucketDataDocument,
|
|
29
|
+
BucketDataKey,
|
|
30
|
+
SourceKey,
|
|
31
|
+
SourceTableDocument,
|
|
32
|
+
SyncRuleCheckpointState,
|
|
33
|
+
SyncRuleDocument
|
|
34
|
+
} from './models.js';
|
|
27
35
|
import { MongoBucketBatch } from './MongoBucketBatch.js';
|
|
28
36
|
import { MongoCompactor } from './MongoCompactor.js';
|
|
29
37
|
import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
|
|
@@ -163,17 +171,17 @@ export class MongoSyncBucketStorage
|
|
|
163
171
|
let result: storage.ResolveTableResult | null = null;
|
|
164
172
|
await this.db.client.withSession(async (session) => {
|
|
165
173
|
const col = this.db.source_tables;
|
|
166
|
-
let
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
);
|
|
174
|
+
let filter: Partial<SourceTableDocument> = {
|
|
175
|
+
group_id: group_id,
|
|
176
|
+
connection_id: connection_id,
|
|
177
|
+
schema_name: schema,
|
|
178
|
+
table_name: table,
|
|
179
|
+
replica_id_columns2: columns
|
|
180
|
+
};
|
|
181
|
+
if (objectId != null) {
|
|
182
|
+
filter.relation_id = objectId;
|
|
183
|
+
}
|
|
184
|
+
let doc = await col.findOne(filter, { session });
|
|
177
185
|
if (doc == null) {
|
|
178
186
|
doc = {
|
|
179
187
|
_id: new bson.ObjectId(),
|
|
@@ -202,31 +210,40 @@ export class MongoSyncBucketStorage
|
|
|
202
210
|
sourceTable.syncData = options.sync_rules.tableSyncsData(sourceTable);
|
|
203
211
|
sourceTable.syncParameters = options.sync_rules.tableSyncsParameters(sourceTable);
|
|
204
212
|
|
|
213
|
+
let dropTables: storage.SourceTable[] = [];
|
|
214
|
+
// Detect tables that are either renamed, or have different replica_id_columns
|
|
215
|
+
let truncateFilter = [{ schema_name: schema, table_name: table }] as any[];
|
|
216
|
+
if (objectId != null) {
|
|
217
|
+
// Only detect renames if the source uses relation ids.
|
|
218
|
+
truncateFilter.push({ relation_id: objectId });
|
|
219
|
+
}
|
|
205
220
|
const truncate = await col
|
|
206
221
|
.find(
|
|
207
222
|
{
|
|
208
223
|
group_id: group_id,
|
|
209
224
|
connection_id: connection_id,
|
|
210
225
|
_id: { $ne: doc._id },
|
|
211
|
-
$or:
|
|
226
|
+
$or: truncateFilter
|
|
212
227
|
},
|
|
213
228
|
{ session }
|
|
214
229
|
)
|
|
215
230
|
.toArray();
|
|
231
|
+
dropTables = truncate.map(
|
|
232
|
+
(doc) =>
|
|
233
|
+
new storage.SourceTable(
|
|
234
|
+
doc._id,
|
|
235
|
+
connection_tag,
|
|
236
|
+
doc.relation_id,
|
|
237
|
+
doc.schema_name,
|
|
238
|
+
doc.table_name,
|
|
239
|
+
doc.replica_id_columns2?.map((c) => ({ name: c.name, typeOid: c.type_oid, type: c.type })) ?? [],
|
|
240
|
+
doc.snapshot_done ?? true
|
|
241
|
+
)
|
|
242
|
+
);
|
|
243
|
+
|
|
216
244
|
result = {
|
|
217
245
|
table: sourceTable,
|
|
218
|
-
dropTables:
|
|
219
|
-
(doc) =>
|
|
220
|
-
new storage.SourceTable(
|
|
221
|
-
doc._id,
|
|
222
|
-
connection_tag,
|
|
223
|
-
doc.relation_id ?? 0,
|
|
224
|
-
doc.schema_name,
|
|
225
|
-
doc.table_name,
|
|
226
|
-
doc.replica_id_columns2?.map((c) => ({ name: c.name, typeOid: c.type_oid, type: c.type })) ?? [],
|
|
227
|
-
doc.snapshot_done ?? true
|
|
228
|
-
)
|
|
229
|
-
)
|
|
246
|
+
dropTables: dropTables
|
|
230
247
|
};
|
|
231
248
|
});
|
|
232
249
|
return result!;
|
|
@@ -313,13 +330,7 @@ export class MongoSyncBucketStorage
|
|
|
313
330
|
// 1. We can calculate the document size accurately without serializing again.
|
|
314
331
|
// 2. We can delay parsing the results until it's needed.
|
|
315
332
|
// We manually use bson.deserialize below
|
|
316
|
-
raw: true
|
|
317
|
-
|
|
318
|
-
// Since we're using raw: true and parsing ourselves later, we don't need bigint
|
|
319
|
-
// support here.
|
|
320
|
-
// Disabling due to https://jira.mongodb.org/browse/NODE-6165, and the fact that this
|
|
321
|
-
// is one of our most common queries.
|
|
322
|
-
useBigInt64: false
|
|
333
|
+
raw: true
|
|
323
334
|
}
|
|
324
335
|
) as unknown as mongo.FindCursor<Buffer>;
|
|
325
336
|
|
|
@@ -593,7 +604,7 @@ export class MongoSyncBucketStorage
|
|
|
593
604
|
await this.db.sync_rules.updateMany(
|
|
594
605
|
{
|
|
595
606
|
_id: { $ne: this.group_id },
|
|
596
|
-
state: storage.SyncRuleState.ACTIVE
|
|
607
|
+
state: { $in: [storage.SyncRuleState.ACTIVE, storage.SyncRuleState.ERRORED] }
|
|
597
608
|
},
|
|
598
609
|
{
|
|
599
610
|
$set: {
|
|
@@ -646,7 +657,7 @@ export class MongoSyncBucketStorage
|
|
|
646
657
|
doc = await this.db.sync_rules.findOne(
|
|
647
658
|
{
|
|
648
659
|
_id: syncRulesId,
|
|
649
|
-
state: storage.SyncRuleState.ACTIVE
|
|
660
|
+
state: { $in: [storage.SyncRuleState.ACTIVE, storage.SyncRuleState.ERRORED] }
|
|
650
661
|
},
|
|
651
662
|
{
|
|
652
663
|
session,
|
|
@@ -717,7 +728,7 @@ export class MongoSyncBucketStorage
|
|
|
717
728
|
// Irrelevant update
|
|
718
729
|
continue;
|
|
719
730
|
}
|
|
720
|
-
if (doc.state != storage.SyncRuleState.ACTIVE) {
|
|
731
|
+
if (doc.state != storage.SyncRuleState.ACTIVE && doc.state != storage.SyncRuleState.ERRORED) {
|
|
721
732
|
// Sync rules have changed - abort and restart.
|
|
722
733
|
// Should this error instead?
|
|
723
734
|
break;
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import * as framework from '@powersync/lib-services-framework';
|
|
2
2
|
import { storage } from '@powersync/service-core';
|
|
3
3
|
import { PowerSyncMongo } from './db.js';
|
|
4
|
-
import { safeBulkWrite } from './util.js';
|
|
5
4
|
|
|
6
5
|
export type MongoCheckpointAPIOptions = {
|
|
7
6
|
db: PowerSyncMongo;
|
|
@@ -127,8 +126,7 @@ export async function batchCreateCustomWriteCheckpoints(
|
|
|
127
126
|
return;
|
|
128
127
|
}
|
|
129
128
|
|
|
130
|
-
await
|
|
131
|
-
db.custom_write_checkpoints,
|
|
129
|
+
await db.custom_write_checkpoints.bulkWrite(
|
|
132
130
|
checkpoints.map((checkpointOptions) => ({
|
|
133
131
|
updateOne: {
|
|
134
132
|
filter: { user_id: checkpointOptions.user_id, sync_rules_id: checkpointOptions.sync_rules_id },
|
|
@@ -15,7 +15,7 @@ import {
|
|
|
15
15
|
CurrentDataDocument,
|
|
16
16
|
SourceKey
|
|
17
17
|
} from './models.js';
|
|
18
|
-
import { replicaIdToSubkey
|
|
18
|
+
import { replicaIdToSubkey } from './util.js';
|
|
19
19
|
|
|
20
20
|
/**
|
|
21
21
|
* Maximum size of operations we write in a single transaction.
|
|
@@ -246,22 +246,21 @@ export class PersistedBatch {
|
|
|
246
246
|
|
|
247
247
|
async flush(db: PowerSyncMongo, session: mongo.ClientSession) {
|
|
248
248
|
if (this.bucketData.length > 0) {
|
|
249
|
-
|
|
250
|
-
await safeBulkWrite(db.bucket_data, this.bucketData, {
|
|
249
|
+
await db.bucket_data.bulkWrite(this.bucketData, {
|
|
251
250
|
session,
|
|
252
251
|
// inserts only - order doesn't matter
|
|
253
252
|
ordered: false
|
|
254
253
|
});
|
|
255
254
|
}
|
|
256
255
|
if (this.bucketParameters.length > 0) {
|
|
257
|
-
await
|
|
256
|
+
await db.bucket_parameters.bulkWrite(this.bucketParameters, {
|
|
258
257
|
session,
|
|
259
258
|
// inserts only - order doesn't matter
|
|
260
259
|
ordered: false
|
|
261
260
|
});
|
|
262
261
|
}
|
|
263
262
|
if (this.currentData.length > 0) {
|
|
264
|
-
await
|
|
263
|
+
await db.current_data.bulkWrite(this.currentData, {
|
|
265
264
|
session,
|
|
266
265
|
// may update and delete data within the same batch - order matters
|
|
267
266
|
ordered: true
|
|
@@ -124,48 +124,3 @@ export const connectMongoForTests = (url: string, isCI: boolean) => {
|
|
|
124
124
|
});
|
|
125
125
|
return new PowerSyncMongo(client);
|
|
126
126
|
};
|
|
127
|
-
|
|
128
|
-
/**
|
|
129
|
-
* MongoDB bulkWrite internally splits the operations into batches
|
|
130
|
-
* so that no batch exceeds 16MB. However, there are cases where
|
|
131
|
-
* the batch size is very close to 16MB, where additional metadata
|
|
132
|
-
* on the server pushes it over the limit, resulting in this error
|
|
133
|
-
* from the server:
|
|
134
|
-
*
|
|
135
|
-
* > MongoBulkWriteError: BSONObj size: 16814023 (0x1008FC7) is invalid. Size must be between 0 and 16793600(16MB) First element: insert: "bucket_data"
|
|
136
|
-
*
|
|
137
|
-
* We work around the issue by doing our own batching, limiting the
|
|
138
|
-
* batch size to 15MB. This does add additional overhead with
|
|
139
|
-
* BSON.calculateObjectSize.
|
|
140
|
-
*/
|
|
141
|
-
export async function safeBulkWrite<T extends mongo.Document>(
|
|
142
|
-
collection: mongo.Collection<T>,
|
|
143
|
-
operations: mongo.AnyBulkWriteOperation<T>[],
|
|
144
|
-
options: mongo.BulkWriteOptions
|
|
145
|
-
) {
|
|
146
|
-
// Must be below 16MB.
|
|
147
|
-
// We could probably go a little closer, but 15MB is a safe threshold.
|
|
148
|
-
const BULK_WRITE_LIMIT = 15 * 1024 * 1024;
|
|
149
|
-
|
|
150
|
-
let batch: mongo.AnyBulkWriteOperation<T>[] = [];
|
|
151
|
-
let currentSize = 0;
|
|
152
|
-
// Estimated overhead per operation, should be smaller in reality.
|
|
153
|
-
const keySize = 8;
|
|
154
|
-
for (let op of operations) {
|
|
155
|
-
const bsonSize =
|
|
156
|
-
mongo.BSON.calculateObjectSize(op, {
|
|
157
|
-
checkKeys: false,
|
|
158
|
-
ignoreUndefined: true
|
|
159
|
-
} as any) + keySize;
|
|
160
|
-
if (batch.length > 0 && currentSize + bsonSize > BULK_WRITE_LIMIT) {
|
|
161
|
-
await collection.bulkWrite(batch, options);
|
|
162
|
-
currentSize = 0;
|
|
163
|
-
batch = [];
|
|
164
|
-
}
|
|
165
|
-
batch.push(op);
|
|
166
|
-
currentSize += bsonSize;
|
|
167
|
-
}
|
|
168
|
-
if (batch.length > 0) {
|
|
169
|
-
await collection.bulkWrite(batch, options);
|
|
170
|
-
}
|
|
171
|
-
}
|
|
@@ -127,6 +127,62 @@ exports[`sync - mongodb > expiring token 2`] = `
|
|
|
127
127
|
]
|
|
128
128
|
`;
|
|
129
129
|
|
|
130
|
+
exports[`sync - mongodb > sends checkpoint complete line for empty checkpoint 1`] = `
|
|
131
|
+
[
|
|
132
|
+
{
|
|
133
|
+
"checkpoint": {
|
|
134
|
+
"buckets": [
|
|
135
|
+
{
|
|
136
|
+
"bucket": "mybucket[]",
|
|
137
|
+
"checksum": -1221282404,
|
|
138
|
+
"count": 1,
|
|
139
|
+
"priority": 3,
|
|
140
|
+
},
|
|
141
|
+
],
|
|
142
|
+
"last_op_id": "1",
|
|
143
|
+
"write_checkpoint": undefined,
|
|
144
|
+
},
|
|
145
|
+
},
|
|
146
|
+
{
|
|
147
|
+
"data": {
|
|
148
|
+
"after": "0",
|
|
149
|
+
"bucket": "mybucket[]",
|
|
150
|
+
"data": [
|
|
151
|
+
{
|
|
152
|
+
"checksum": 3073684892,
|
|
153
|
+
"data": "{"id":"t1","description":"sync"}",
|
|
154
|
+
"object_id": "t1",
|
|
155
|
+
"object_type": "test",
|
|
156
|
+
"op": "PUT",
|
|
157
|
+
"op_id": "1",
|
|
158
|
+
"subkey": "e5aa2ddc-1328-58fa-a000-0b5ed31eaf1a",
|
|
159
|
+
},
|
|
160
|
+
],
|
|
161
|
+
"has_more": false,
|
|
162
|
+
"next_after": "1",
|
|
163
|
+
},
|
|
164
|
+
},
|
|
165
|
+
{
|
|
166
|
+
"checkpoint_complete": {
|
|
167
|
+
"last_op_id": "1",
|
|
168
|
+
},
|
|
169
|
+
},
|
|
170
|
+
{
|
|
171
|
+
"checkpoint_diff": {
|
|
172
|
+
"last_op_id": "1",
|
|
173
|
+
"removed_buckets": [],
|
|
174
|
+
"updated_buckets": [],
|
|
175
|
+
"write_checkpoint": "1",
|
|
176
|
+
},
|
|
177
|
+
},
|
|
178
|
+
{
|
|
179
|
+
"checkpoint_complete": {
|
|
180
|
+
"last_op_id": "1",
|
|
181
|
+
},
|
|
182
|
+
},
|
|
183
|
+
]
|
|
184
|
+
`;
|
|
185
|
+
|
|
130
186
|
exports[`sync - mongodb > sync buckets in order 1`] = `
|
|
131
187
|
[
|
|
132
188
|
{
|
|
@@ -1,11 +1,5 @@
|
|
|
1
|
-
import { MongoCompactOptions } from '@module/storage/implementation/MongoCompactor.js';
|
|
2
1
|
import { register } from '@powersync/service-core-tests';
|
|
3
2
|
import { describe } from 'vitest';
|
|
4
3
|
import { INITIALIZED_MONGO_STORAGE_FACTORY } from './util.js';
|
|
5
4
|
|
|
6
|
-
describe('Mongo Sync Bucket Storage Compact', () =>
|
|
7
|
-
register.registerCompactTests<MongoCompactOptions>(INITIALIZED_MONGO_STORAGE_FACTORY, {
|
|
8
|
-
clearBatchLimit: 2,
|
|
9
|
-
moveBatchLimit: 1,
|
|
10
|
-
moveBatchQueryLimit: 1
|
|
11
|
-
}));
|
|
5
|
+
describe('Mongo Sync Bucket Storage Compact', () => register.registerCompactTests(INITIALIZED_MONGO_STORAGE_FACTORY));
|