@powersync/service-module-mongodb-storage 0.0.0-dev-20250214100224 → 0.0.0-dev-20250303114151
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +45 -3
- package/dist/storage/MongoBucketStorage.d.ts +7 -17
- package/dist/storage/MongoBucketStorage.js +48 -183
- package/dist/storage/MongoBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoBucketBatch.d.ts +2 -2
- package/dist/storage/implementation/MongoBucketBatch.js +6 -6
- package/dist/storage/implementation/MongoBucketBatch.js.map +1 -1
- package/dist/storage/implementation/MongoCompactor.d.ts +0 -6
- package/dist/storage/implementation/MongoCompactor.js +1 -2
- package/dist/storage/implementation/MongoCompactor.js.map +1 -1
- package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +16 -3
- package/dist/storage/implementation/MongoSyncBucketStorage.js +212 -16
- package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js +1 -2
- package/dist/storage/implementation/MongoWriteCheckpointAPI.js.map +1 -1
- package/dist/storage/implementation/OperationBatch.d.ts +1 -0
- package/dist/storage/implementation/OperationBatch.js +3 -0
- package/dist/storage/implementation/OperationBatch.js.map +1 -1
- package/dist/storage/implementation/PersistedBatch.js +7 -6
- package/dist/storage/implementation/PersistedBatch.js.map +1 -1
- package/dist/storage/implementation/db.js +1 -1
- package/dist/storage/implementation/db.js.map +1 -1
- package/dist/storage/implementation/models.d.ts +1 -0
- package/dist/storage/implementation/util.d.ts +0 -14
- package/dist/storage/implementation/util.js +0 -38
- package/dist/storage/implementation/util.js.map +1 -1
- package/package.json +7 -7
- package/src/storage/MongoBucketStorage.ts +51 -216
- package/src/storage/implementation/MongoBucketBatch.ts +6 -9
- package/src/storage/implementation/MongoCompactor.ts +2 -10
- package/src/storage/implementation/MongoSyncBucketStorage.ts +292 -37
- package/src/storage/implementation/MongoWriteCheckpointAPI.ts +1 -3
- package/src/storage/implementation/OperationBatch.ts +4 -0
- package/src/storage/implementation/PersistedBatch.ts +7 -6
- package/src/storage/implementation/db.ts +1 -1
- package/src/storage/implementation/models.ts +5 -0
- package/src/storage/implementation/util.ts +0 -45
- package/test/src/__snapshots__/storage_sync.test.ts.snap +138 -0
- package/test/src/storage_compacting.test.ts +1 -7
- package/test/src/storage_sync.test.ts +1 -1
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -16,7 +16,7 @@ export class PowerSyncMongo {
|
|
|
16
16
|
constructor(client, options) {
|
|
17
17
|
this.client = client;
|
|
18
18
|
const db = client.db(options?.database, {
|
|
19
|
-
...storage.
|
|
19
|
+
...storage.BSON_DESERIALIZE_INTERNAL_OPTIONS
|
|
20
20
|
});
|
|
21
21
|
this.db = db;
|
|
22
22
|
this.current_data = db.collection('current_data');
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"db.js","sourceRoot":"","sources":["../../../src/storage/implementation/db.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,SAAS,MAAM,gCAAgC,CAAC;AAE5D,OAAO,EAAE,OAAO,EAAE,MAAM,yBAAyB,CAAC;AAsBlD,MAAM,OAAO,cAAc;IAChB,YAAY,CAAwC;IACpD,WAAW,CAAuC;IAClD,iBAAiB,CAA4C;IAC7D,cAAc,CAAuC;IACrD,UAAU,CAAqC;IAC/C,aAAa,CAAwC;IACrD,wBAAwB,CAAkD;IAC1E,iBAAiB,CAA4C;IAC7D,QAAQ,CAAqC;IAC7C,KAAK,CAAyC;IAE9C,MAAM,CAAoB;IAC1B,EAAE,CAAW;IAEtB,YAAY,MAAyB,EAAE,OAA+B;QACpE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QAErB,MAAM,EAAE,GAAG,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,QAAQ,EAAE;YACtC,GAAG,OAAO,CAAC,
|
|
1
|
+
{"version":3,"file":"db.js","sourceRoot":"","sources":["../../../src/storage/implementation/db.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,SAAS,MAAM,gCAAgC,CAAC;AAE5D,OAAO,EAAE,OAAO,EAAE,MAAM,yBAAyB,CAAC;AAsBlD,MAAM,OAAO,cAAc;IAChB,YAAY,CAAwC;IACpD,WAAW,CAAuC;IAClD,iBAAiB,CAA4C;IAC7D,cAAc,CAAuC;IACrD,UAAU,CAAqC;IAC/C,aAAa,CAAwC;IACrD,wBAAwB,CAAkD;IAC1E,iBAAiB,CAA4C;IAC7D,QAAQ,CAAqC;IAC7C,KAAK,CAAyC;IAE9C,MAAM,CAAoB;IAC1B,EAAE,CAAW;IAEtB,YAAY,MAAyB,EAAE,OAA+B;QACpE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QAErB,MAAM,EAAE,GAAG,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,QAAQ,EAAE;YACtC,GAAG,OAAO,CAAC,iCAAiC;SAC7C,CAAC,CAAC;QACH,IAAI,CAAC,EAAE,GAAG,EAAE,CAAC;QAEb,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC,UAAU,CAAsB,cAAc,CAAC,CAAC;QACvE,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC,UAAU,CAAC,aAAa,CAAC,CAAC;QAChD,IAAI,CAAC,iBAAiB,GAAG,EAAE,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAAC;QAC5D,IAAI,CAAC,cAAc,GAAG,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;QACtD,IAAI,CAAC,UAAU,GAAG,EAAE,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;QAC9C,IAAI,CAAC,aAAa,GAAG,EAAE,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC;QACpD,IAAI,CAAC,wBAAwB,GAAG,EAAE,CAAC,UAAU,CAAC,0BAA0B,CAAC,CAAC;QAC1E,IAAI,CAAC,iBAAiB,GAAG,EAAE,CAAC,UAAU,CAAC,mBAAmB,CAAC,CAAC;QAC5D,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;QAC1C,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;IAC3C,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,KAAK;QACT,MAAM,IAAI,CAAC,YAAY,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACvC,MAAM,IAAI,CAAC,WAAW,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,MAAM,IAAI,CAAC,iBAAiB,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QAC5C,MAAM,IAAI,CAAC,cAAc,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACzC,MAAM,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACrC,MAAM,IAAI,CAAC,aAAa,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,MAAM,IAAI,CAAC,iBAAiB,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;QAC5C,MAAM,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAClC,MAAM,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;IAClC,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,IAAI;QACR,MAAM,IAAI,CAAC,EAAE,CAAC,YAAY,EAAE,CAAC;IAC/B,CAAC;CACF;AAED,MAAM,UAAU,oBAAoB,CAAC,MAA0B;IAC7D,OAAO,IAAI,cAAc,CAAC,SAAS,CAAC,iBAAiB,CAAC,MAAM,CAAC,EAAE,EAAE,QAAQ,EAAE,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC;AAChG,CAAC"}
|
|
@@ -122,6 +122,7 @@ export interface SyncRuleDocument {
|
|
|
122
122
|
last_fatal_error: string | null;
|
|
123
123
|
content: string;
|
|
124
124
|
}
|
|
125
|
+
export type SyncRuleCheckpointState = Pick<SyncRuleDocument, 'last_checkpoint' | 'last_checkpoint_lsn' | '_id' | 'state'>;
|
|
125
126
|
export interface CustomWriteCheckpointDocument {
|
|
126
127
|
_id: bson.ObjectId;
|
|
127
128
|
user_id: string;
|
|
@@ -30,17 +30,3 @@ export declare const createMongoClient: (url: string, options?: mongo.MongoClien
|
|
|
30
30
|
* Helper for unit tests
|
|
31
31
|
*/
|
|
32
32
|
export declare const connectMongoForTests: (url: string, isCI: boolean) => PowerSyncMongo;
|
|
33
|
-
/**
|
|
34
|
-
* MongoDB bulkWrite internally splits the operations into batches
|
|
35
|
-
* so that no batch exceeds 16MB. However, there are cases where
|
|
36
|
-
* the batch size is very close to 16MB, where additional metadata
|
|
37
|
-
* on the server pushes it over the limit, resulting in this error
|
|
38
|
-
* from the server:
|
|
39
|
-
*
|
|
40
|
-
* > MongoBulkWriteError: BSONObj size: 16814023 (0x1008FC7) is invalid. Size must be between 0 and 16793600(16MB) First element: insert: "bucket_data"
|
|
41
|
-
*
|
|
42
|
-
* We work around the issue by doing our own batching, limiting the
|
|
43
|
-
* batch size to 15MB. This does add additional overhead with
|
|
44
|
-
* BSON.calculateObjectSize.
|
|
45
|
-
*/
|
|
46
|
-
export declare function safeBulkWrite<T extends mongo.Document>(collection: mongo.Collection<T>, operations: mongo.AnyBulkWriteOperation<T>[], options: mongo.BulkWriteOptions): Promise<void>;
|
|
@@ -114,42 +114,4 @@ export const connectMongoForTests = (url, isCI) => {
|
|
|
114
114
|
});
|
|
115
115
|
return new PowerSyncMongo(client);
|
|
116
116
|
};
|
|
117
|
-
/**
|
|
118
|
-
* MongoDB bulkWrite internally splits the operations into batches
|
|
119
|
-
* so that no batch exceeds 16MB. However, there are cases where
|
|
120
|
-
* the batch size is very close to 16MB, where additional metadata
|
|
121
|
-
* on the server pushes it over the limit, resulting in this error
|
|
122
|
-
* from the server:
|
|
123
|
-
*
|
|
124
|
-
* > MongoBulkWriteError: BSONObj size: 16814023 (0x1008FC7) is invalid. Size must be between 0 and 16793600(16MB) First element: insert: "bucket_data"
|
|
125
|
-
*
|
|
126
|
-
* We work around the issue by doing our own batching, limiting the
|
|
127
|
-
* batch size to 15MB. This does add additional overhead with
|
|
128
|
-
* BSON.calculateObjectSize.
|
|
129
|
-
*/
|
|
130
|
-
export async function safeBulkWrite(collection, operations, options) {
|
|
131
|
-
// Must be below 16MB.
|
|
132
|
-
// We could probably go a little closer, but 15MB is a safe threshold.
|
|
133
|
-
const BULK_WRITE_LIMIT = 15 * 1024 * 1024;
|
|
134
|
-
let batch = [];
|
|
135
|
-
let currentSize = 0;
|
|
136
|
-
// Estimated overhead per operation, should be smaller in reality.
|
|
137
|
-
const keySize = 8;
|
|
138
|
-
for (let op of operations) {
|
|
139
|
-
const bsonSize = mongo.BSON.calculateObjectSize(op, {
|
|
140
|
-
checkKeys: false,
|
|
141
|
-
ignoreUndefined: true
|
|
142
|
-
}) + keySize;
|
|
143
|
-
if (batch.length > 0 && currentSize + bsonSize > BULK_WRITE_LIMIT) {
|
|
144
|
-
await collection.bulkWrite(batch, options);
|
|
145
|
-
currentSize = 0;
|
|
146
|
-
batch = [];
|
|
147
|
-
}
|
|
148
|
-
batch.push(op);
|
|
149
|
-
currentSize += bsonSize;
|
|
150
|
-
}
|
|
151
|
-
if (batch.length > 0) {
|
|
152
|
-
await collection.bulkWrite(batch, options);
|
|
153
|
-
}
|
|
154
|
-
}
|
|
155
117
|
//# sourceMappingURL=util.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"util.js","sourceRoot":"","sources":["../../../src/storage/implementation/util.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B,OAAO,EAAE,KAAK,EAAE,MAAM,gCAAgC,CAAC;AACvD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAEzD,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAGzC,MAAM,UAAU,cAAc,CAAI,MAAkB,EAAE,IAAiB;IACrE,IAAI,MAAM,GAAG;QACX,IAAI,EAAE;YACJ,GAAG,MAAM;SACH;QACR,GAAG,EAAE;YACH,GAAG,MAAM;SACH;KACT,CAAC;IAEF,KAAK,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QACrB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;QACrC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;IACtC,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,MAAc,EAAE,aAAqB;IACpE,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1D,OAAO,GAAG,MAAM,GAAG,aAAa,IAAI,WAAW,EAAE,CAAC;AACpD,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAI,MAA2B;IAClE,IAAI,CAAC;QACH,IAAI,IAAS,CAAC;QACd,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,2CAA2C;QAC3C,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;QACtC,yCAAyC;QACzC,IAAI,GAAG,MAAM,CAAC,qBAAqB,EAAE,CAAC;QACtC,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC;YACnC,0CAA0C;YAC1C,wEAAwE;YACxE,uEAAuE;YACvE,oCAAoC;YACpC,EAAE;YACF,4EAA4E;YAC5E,2DAA2D;YAC3D,gCAAgC;YAChC,OAAO,GAAG,KAAK,CAAC;QAClB,CAAC;QACD,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;IAC3B,CAAC;YAAS,CAAC;QACT,iDAAiD;QACjD,uIAAuI;QACvI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACnB,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAuB;IAChD,IAAI,GAAG,CAAC,EAAE,IAAI,KAAK,IAAI,GAAG,CAAC,EAAE,IAAI,QAAQ,EAAE,CAAC;QAC1C,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,WAAW,EAAE,GAAG,CAAC,KAAK;YACtB,SAAS,EAAE,GAAG,CAAC,MAAM;YACrB,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;YAC9B,MAAM,EAAE,iBAAiB,CAAC,GAAG,CAAC,YAAa,EAAE,GAAG,CAAC,UAAW,CAAC;YAC7D,IAAI,EAAE,GAAG,CAAC,IAAI;SACf,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,cAAc;QAEd,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;SAC/B,CAAC;IACJ,CAAC;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,KAAoB,EAAE,EAAqB;IAC3E,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;QACvB,mDAAmD;QACnD,OAAO,GAAG,KAAK,CAAC,WAAW,EAAE,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE,CAAC;IACtD,CAAC;SAAM,CAAC;QACN,oCAAoC;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QAC3C,OAAO,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,GAAW,EAAE,OAAkC,EAAE,EAAE;IACnF,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;AAC7C,CAAC,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAW,EAAE,IAAa,EAAE,EAAE;IACjE,0EAA0E;IAC1E,oEAAoE;IACpE,MAAM,MAAM,GAAG,iBAAiB,CAAC,GAAG,EAAE;QACpC,gBAAgB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACvC,eAAe,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACtC,wBAAwB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;KAChD,CAAC,CAAC;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,CAAC;AACpC,CAAC,CAAC
|
|
1
|
+
{"version":3,"file":"util.js","sourceRoot":"","sources":["../../../src/storage/implementation/util.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B,OAAO,EAAE,KAAK,EAAE,MAAM,gCAAgC,CAAC;AACvD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAEzD,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAGzC,MAAM,UAAU,cAAc,CAAI,MAAkB,EAAE,IAAiB;IACrE,IAAI,MAAM,GAAG;QACX,IAAI,EAAE;YACJ,GAAG,MAAM;SACH;QACR,GAAG,EAAE;YACH,GAAG,MAAM;SACH;KACT,CAAC;IAEF,KAAK,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QACrB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;QACrC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;IACtC,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,MAAc,EAAE,aAAqB;IACpE,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1D,OAAO,GAAG,MAAM,GAAG,aAAa,IAAI,WAAW,EAAE,CAAC;AACpD,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAI,MAA2B;IAClE,IAAI,CAAC;QACH,IAAI,IAAS,CAAC;QACd,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,2CAA2C;QAC3C,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;QACtC,yCAAyC;QACzC,IAAI,GAAG,MAAM,CAAC,qBAAqB,EAAE,CAAC;QACtC,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC;YACnC,0CAA0C;YAC1C,wEAAwE;YACxE,uEAAuE;YACvE,oCAAoC;YACpC,EAAE;YACF,4EAA4E;YAC5E,2DAA2D;YAC3D,gCAAgC;YAChC,OAAO,GAAG,KAAK,CAAC;QAClB,CAAC;QACD,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;IAC3B,CAAC;YAAS,CAAC;QACT,iDAAiD;QACjD,uIAAuI;QACvI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACnB,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAuB;IAChD,IAAI,GAAG,CAAC,EAAE,IAAI,KAAK,IAAI,GAAG,CAAC,EAAE,IAAI,QAAQ,EAAE,CAAC;QAC1C,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,WAAW,EAAE,GAAG,CAAC,KAAK;YACtB,SAAS,EAAE,GAAG,CAAC,MAAM;YACrB,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;YAC9B,MAAM,EAAE,iBAAiB,CAAC,GAAG,CAAC,YAAa,EAAE,GAAG,CAAC,UAAW,CAAC;YAC7D,IAAI,EAAE,GAAG,CAAC,IAAI;SACf,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,cAAc;QAEd,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;SAC/B,CAAC;IACJ,CAAC;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,KAAoB,EAAE,EAAqB;IAC3E,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;QACvB,mDAAmD;QACnD,OAAO,GAAG,KAAK,CAAC,WAAW,EAAE,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE,CAAC;IACtD,CAAC;SAAM,CAAC;QACN,oCAAoC;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QAC3C,OAAO,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,GAAW,EAAE,OAAkC,EAAE,EAAE;IACnF,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;AAC7C,CAAC,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAW,EAAE,IAAa,EAAE,EAAE;IACjE,0EAA0E;IAC1E,oEAAoE;IACpE,MAAM,MAAM,GAAG,iBAAiB,CAAC,GAAG,EAAE;QACpC,gBAAgB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACvC,eAAe,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACtC,wBAAwB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;KAChD,CAAC,CAAC;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,CAAC;AACpC,CAAC,CAAC"}
|
package/package.json
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
"name": "@powersync/service-module-mongodb-storage",
|
|
3
3
|
"repository": "https://github.com/powersync-ja/powersync-service",
|
|
4
4
|
"types": "dist/index.d.ts",
|
|
5
|
-
"version": "0.0.0-dev-
|
|
5
|
+
"version": "0.0.0-dev-20250303114151",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
7
|
"license": "FSL-1.1-Apache-2.0",
|
|
8
8
|
"type": "module",
|
|
@@ -22,21 +22,21 @@
|
|
|
22
22
|
}
|
|
23
23
|
},
|
|
24
24
|
"dependencies": {
|
|
25
|
-
"bson": "^6.
|
|
25
|
+
"bson": "^6.10.3",
|
|
26
26
|
"ts-codec": "^1.3.0",
|
|
27
27
|
"ix": "^5.0.0",
|
|
28
28
|
"lru-cache": "^10.2.2",
|
|
29
29
|
"uuid": "^9.0.1",
|
|
30
|
-
"@powersync/lib-services-framework": "0.5.
|
|
31
|
-
"@powersync/service-core": "0.0.0-dev-
|
|
30
|
+
"@powersync/lib-services-framework": "0.5.3",
|
|
31
|
+
"@powersync/service-core": "0.0.0-dev-20250303114151",
|
|
32
32
|
"@powersync/service-jsonbig": "0.17.10",
|
|
33
|
-
"@powersync/service-sync-rules": "0.
|
|
33
|
+
"@powersync/service-sync-rules": "0.24.0",
|
|
34
34
|
"@powersync/service-types": "0.8.0",
|
|
35
|
-
"@powersync/lib-service-mongodb": "0.4.
|
|
35
|
+
"@powersync/lib-service-mongodb": "0.4.3"
|
|
36
36
|
},
|
|
37
37
|
"devDependencies": {
|
|
38
38
|
"@types/uuid": "^9.0.4",
|
|
39
|
-
"@powersync/service-core-tests": "0.0.0-dev-
|
|
39
|
+
"@powersync/service-core-tests": "0.0.0-dev-20250303114151"
|
|
40
40
|
},
|
|
41
41
|
"scripts": {
|
|
42
42
|
"build": "tsc -b",
|
|
@@ -1,11 +1,8 @@
|
|
|
1
1
|
import { SqlSyncRules } from '@powersync/service-sync-rules';
|
|
2
|
-
import { wrapWithAbort } from 'ix/asynciterable/operators/withabort.js';
|
|
3
|
-
import { LRUCache } from 'lru-cache/min';
|
|
4
|
-
import * as timers from 'timers/promises';
|
|
5
2
|
|
|
6
|
-
import {
|
|
3
|
+
import { GetIntanceOptions, storage } from '@powersync/service-core';
|
|
7
4
|
|
|
8
|
-
import {
|
|
5
|
+
import { BaseObserver, ErrorCode, logger, ServiceError } from '@powersync/lib-services-framework';
|
|
9
6
|
import { v4 as uuid } from 'uuid';
|
|
10
7
|
|
|
11
8
|
import * as lib_mongo from '@powersync/lib-service-mongodb';
|
|
@@ -18,7 +15,7 @@ import { MongoSyncBucketStorage } from './implementation/MongoSyncBucketStorage.
|
|
|
18
15
|
import { generateSlotName } from './implementation/util.js';
|
|
19
16
|
|
|
20
17
|
export class MongoBucketStorage
|
|
21
|
-
extends
|
|
18
|
+
extends BaseObserver<storage.BucketStorageFactoryListener>
|
|
22
19
|
implements storage.BucketStorageFactory
|
|
23
20
|
{
|
|
24
21
|
private readonly client: mongo.MongoClient;
|
|
@@ -26,26 +23,7 @@ export class MongoBucketStorage
|
|
|
26
23
|
// TODO: This is still Postgres specific and needs to be reworked
|
|
27
24
|
public readonly slot_name_prefix: string;
|
|
28
25
|
|
|
29
|
-
private
|
|
30
|
-
max: 3,
|
|
31
|
-
fetchMethod: async (id) => {
|
|
32
|
-
const doc2 = await this.db.sync_rules.findOne(
|
|
33
|
-
{
|
|
34
|
-
_id: id
|
|
35
|
-
},
|
|
36
|
-
{ limit: 1 }
|
|
37
|
-
);
|
|
38
|
-
if (doc2 == null) {
|
|
39
|
-
// Deleted in the meantime?
|
|
40
|
-
return undefined;
|
|
41
|
-
}
|
|
42
|
-
const rules = new MongoPersistedSyncRulesContent(this.db, doc2);
|
|
43
|
-
return this.getInstance(rules);
|
|
44
|
-
},
|
|
45
|
-
dispose: (storage) => {
|
|
46
|
-
storage[Symbol.dispose]();
|
|
47
|
-
}
|
|
48
|
-
});
|
|
26
|
+
private activeStorageCache: MongoSyncBucketStorage | undefined;
|
|
49
27
|
|
|
50
28
|
public readonly db: PowerSyncMongo;
|
|
51
29
|
|
|
@@ -63,20 +41,21 @@ export class MongoBucketStorage
|
|
|
63
41
|
}
|
|
64
42
|
|
|
65
43
|
async [Symbol.asyncDispose]() {
|
|
66
|
-
|
|
44
|
+
// No-op
|
|
67
45
|
}
|
|
68
46
|
|
|
69
|
-
getInstance(
|
|
70
|
-
let { id, slot_name } =
|
|
47
|
+
getInstance(syncRules: storage.PersistedSyncRulesContent, options?: GetIntanceOptions): MongoSyncBucketStorage {
|
|
48
|
+
let { id, slot_name } = syncRules;
|
|
71
49
|
if ((typeof id as any) == 'bigint') {
|
|
72
50
|
id = Number(id);
|
|
73
51
|
}
|
|
74
|
-
const storage = new MongoSyncBucketStorage(this, id,
|
|
75
|
-
|
|
52
|
+
const storage = new MongoSyncBucketStorage(this, id, syncRules, slot_name);
|
|
53
|
+
if (!options?.skipLifecycleHooks) {
|
|
54
|
+
this.iterateListeners((cb) => cb.syncStorageCreated?.(storage));
|
|
55
|
+
}
|
|
76
56
|
storage.registerListener({
|
|
77
57
|
batchStarted: (batch) => {
|
|
78
|
-
|
|
79
|
-
batch.registerManagedListener(storage, {
|
|
58
|
+
batch.registerListener({
|
|
80
59
|
replicationEvent: (payload) => this.iterateListeners((cb) => cb.replicationEvent?.(payload))
|
|
81
60
|
});
|
|
82
61
|
}
|
|
@@ -118,13 +97,11 @@ export class MongoBucketStorage
|
|
|
118
97
|
}
|
|
119
98
|
}
|
|
120
99
|
|
|
121
|
-
async
|
|
100
|
+
async restartReplication(sync_rules_group_id: number) {
|
|
122
101
|
const next = await this.getNextSyncRulesContent();
|
|
123
102
|
const active = await this.getActiveSyncRulesContent();
|
|
124
103
|
|
|
125
|
-
|
|
126
|
-
// The current one will continue erroring until the next one has finished processing.
|
|
127
|
-
if (next != null && next.slot_name == slot_name) {
|
|
104
|
+
if (next != null && next.id == sync_rules_group_id) {
|
|
128
105
|
// We need to redo the "next" sync rules
|
|
129
106
|
await this.updateSyncRules({
|
|
130
107
|
content: next.sync_rules_content,
|
|
@@ -142,14 +119,17 @@ export class MongoBucketStorage
|
|
|
142
119
|
}
|
|
143
120
|
}
|
|
144
121
|
);
|
|
145
|
-
} else if (next == null && active?.
|
|
122
|
+
} else if (next == null && active?.id == sync_rules_group_id) {
|
|
146
123
|
// Slot removed for "active" sync rules, while there is no "next" one.
|
|
147
124
|
await this.updateSyncRules({
|
|
148
125
|
content: active.sync_rules_content,
|
|
149
126
|
validate: false
|
|
150
127
|
});
|
|
151
128
|
|
|
152
|
-
//
|
|
129
|
+
// In this case we keep the old one as active for clients, so that that existing clients
|
|
130
|
+
// can still get the latest data while we replicate the new ones.
|
|
131
|
+
// It will however not replicate anymore.
|
|
132
|
+
|
|
153
133
|
await this.db.sync_rules.updateOne(
|
|
154
134
|
{
|
|
155
135
|
_id: active.id,
|
|
@@ -157,7 +137,21 @@ export class MongoBucketStorage
|
|
|
157
137
|
},
|
|
158
138
|
{
|
|
159
139
|
$set: {
|
|
160
|
-
state: storage.SyncRuleState.
|
|
140
|
+
state: storage.SyncRuleState.ERRORED
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
);
|
|
144
|
+
} else if (next != null && active?.id == sync_rules_group_id) {
|
|
145
|
+
// Already have next sync rules, but need to stop replicating the active one.
|
|
146
|
+
|
|
147
|
+
await this.db.sync_rules.updateOne(
|
|
148
|
+
{
|
|
149
|
+
_id: active.id,
|
|
150
|
+
state: storage.SyncRuleState.ACTIVE
|
|
151
|
+
},
|
|
152
|
+
{
|
|
153
|
+
$set: {
|
|
154
|
+
state: storage.SyncRuleState.ERRORED
|
|
161
155
|
}
|
|
162
156
|
}
|
|
163
157
|
);
|
|
@@ -234,7 +228,7 @@ export class MongoBucketStorage
|
|
|
234
228
|
async getActiveSyncRulesContent(): Promise<MongoPersistedSyncRulesContent | null> {
|
|
235
229
|
const doc = await this.db.sync_rules.findOne(
|
|
236
230
|
{
|
|
237
|
-
state: storage.SyncRuleState.ACTIVE
|
|
231
|
+
state: { $in: [storage.SyncRuleState.ACTIVE, storage.SyncRuleState.ERRORED] }
|
|
238
232
|
},
|
|
239
233
|
{ sort: { _id: -1 }, limit: 1 }
|
|
240
234
|
);
|
|
@@ -272,7 +266,7 @@ export class MongoBucketStorage
|
|
|
272
266
|
async getReplicatingSyncRules(): Promise<storage.PersistedSyncRulesContent[]> {
|
|
273
267
|
const docs = await this.db.sync_rules
|
|
274
268
|
.find({
|
|
275
|
-
|
|
269
|
+
state: { $in: [storage.SyncRuleState.PROCESSING, storage.SyncRuleState.ACTIVE] }
|
|
276
270
|
})
|
|
277
271
|
.toArray();
|
|
278
272
|
|
|
@@ -293,19 +287,22 @@ export class MongoBucketStorage
|
|
|
293
287
|
});
|
|
294
288
|
}
|
|
295
289
|
|
|
296
|
-
async
|
|
297
|
-
const
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
{
|
|
302
|
-
sort: { _id: -1 },
|
|
303
|
-
limit: 1,
|
|
304
|
-
projection: { _id: 1, last_checkpoint: 1, last_checkpoint_lsn: 1 }
|
|
305
|
-
}
|
|
306
|
-
);
|
|
290
|
+
async getActiveStorage(): Promise<MongoSyncBucketStorage | null> {
|
|
291
|
+
const content = await this.getActiveSyncRulesContent();
|
|
292
|
+
if (content == null) {
|
|
293
|
+
return null;
|
|
294
|
+
}
|
|
307
295
|
|
|
308
|
-
|
|
296
|
+
// It is important that this instance is cached.
|
|
297
|
+
// Not for the instance construction itself, but to ensure that internal caches on the instance
|
|
298
|
+
// are re-used properly.
|
|
299
|
+
if (this.activeStorageCache?.group_id == content.id) {
|
|
300
|
+
return this.activeStorageCache;
|
|
301
|
+
} else {
|
|
302
|
+
const instance = this.getInstance(content);
|
|
303
|
+
this.activeStorageCache = instance;
|
|
304
|
+
return instance;
|
|
305
|
+
}
|
|
309
306
|
}
|
|
310
307
|
|
|
311
308
|
async getStorageMetrics(): Promise<storage.StorageMetrics> {
|
|
@@ -391,166 +388,4 @@ export class MongoBucketStorage
|
|
|
391
388
|
|
|
392
389
|
return instance!._id;
|
|
393
390
|
}
|
|
394
|
-
|
|
395
|
-
private makeActiveCheckpoint(doc: SyncRuleDocument | null) {
|
|
396
|
-
return {
|
|
397
|
-
checkpoint: utils.timestampToOpId(doc?.last_checkpoint ?? 0n),
|
|
398
|
-
lsn: doc?.last_checkpoint_lsn ?? null,
|
|
399
|
-
hasSyncRules() {
|
|
400
|
-
return doc != null;
|
|
401
|
-
},
|
|
402
|
-
getBucketStorage: async () => {
|
|
403
|
-
if (doc == null) {
|
|
404
|
-
return null;
|
|
405
|
-
}
|
|
406
|
-
return (await this.storageCache.fetch(doc._id)) ?? null;
|
|
407
|
-
}
|
|
408
|
-
} satisfies storage.ActiveCheckpoint;
|
|
409
|
-
}
|
|
410
|
-
|
|
411
|
-
/**
|
|
412
|
-
* Instance-wide watch on the latest available checkpoint (op_id + lsn).
|
|
413
|
-
*/
|
|
414
|
-
private async *watchActiveCheckpoint(signal: AbortSignal): AsyncIterable<storage.ActiveCheckpoint> {
|
|
415
|
-
const pipeline: mongo.Document[] = [
|
|
416
|
-
{
|
|
417
|
-
$match: {
|
|
418
|
-
'fullDocument.state': 'ACTIVE',
|
|
419
|
-
operationType: { $in: ['insert', 'update'] }
|
|
420
|
-
}
|
|
421
|
-
},
|
|
422
|
-
{
|
|
423
|
-
$project: {
|
|
424
|
-
operationType: 1,
|
|
425
|
-
'fullDocument._id': 1,
|
|
426
|
-
'fullDocument.last_checkpoint': 1,
|
|
427
|
-
'fullDocument.last_checkpoint_lsn': 1
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
];
|
|
431
|
-
|
|
432
|
-
// Use this form instead of (doc: SyncRuleDocument | null = null),
|
|
433
|
-
// otherwise we get weird "doc: never" issues.
|
|
434
|
-
let doc = null as SyncRuleDocument | null;
|
|
435
|
-
let clusterTime = null as mongo.Timestamp | null;
|
|
436
|
-
|
|
437
|
-
await this.client.withSession(async (session) => {
|
|
438
|
-
doc = await this.db.sync_rules.findOne(
|
|
439
|
-
{
|
|
440
|
-
state: storage.SyncRuleState.ACTIVE
|
|
441
|
-
},
|
|
442
|
-
{
|
|
443
|
-
session,
|
|
444
|
-
sort: { _id: -1 },
|
|
445
|
-
limit: 1,
|
|
446
|
-
projection: {
|
|
447
|
-
_id: 1,
|
|
448
|
-
last_checkpoint: 1,
|
|
449
|
-
last_checkpoint_lsn: 1
|
|
450
|
-
}
|
|
451
|
-
}
|
|
452
|
-
);
|
|
453
|
-
const time = session.clusterTime?.clusterTime ?? null;
|
|
454
|
-
clusterTime = time;
|
|
455
|
-
});
|
|
456
|
-
if (clusterTime == null) {
|
|
457
|
-
throw new ServiceError(ErrorCode.PSYNC_S2401, 'Could not get clusterTime');
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
if (signal.aborted) {
|
|
461
|
-
return;
|
|
462
|
-
}
|
|
463
|
-
|
|
464
|
-
if (doc) {
|
|
465
|
-
yield this.makeActiveCheckpoint(doc);
|
|
466
|
-
}
|
|
467
|
-
|
|
468
|
-
const stream = this.db.sync_rules.watch(pipeline, {
|
|
469
|
-
fullDocument: 'updateLookup',
|
|
470
|
-
// Start at the cluster time where we got the initial doc, to make sure
|
|
471
|
-
// we don't skip any updates.
|
|
472
|
-
// This may result in the first operation being a duplicate, but we filter
|
|
473
|
-
// it out anyway.
|
|
474
|
-
startAtOperationTime: clusterTime
|
|
475
|
-
});
|
|
476
|
-
|
|
477
|
-
signal.addEventListener(
|
|
478
|
-
'abort',
|
|
479
|
-
() => {
|
|
480
|
-
stream.close();
|
|
481
|
-
},
|
|
482
|
-
{ once: true }
|
|
483
|
-
);
|
|
484
|
-
|
|
485
|
-
let lastOp: storage.ActiveCheckpoint | null = null;
|
|
486
|
-
|
|
487
|
-
for await (const update of stream.stream()) {
|
|
488
|
-
if (signal.aborted) {
|
|
489
|
-
break;
|
|
490
|
-
}
|
|
491
|
-
if (update.operationType != 'insert' && update.operationType != 'update') {
|
|
492
|
-
continue;
|
|
493
|
-
}
|
|
494
|
-
const doc = update.fullDocument!;
|
|
495
|
-
if (doc == null) {
|
|
496
|
-
continue;
|
|
497
|
-
}
|
|
498
|
-
|
|
499
|
-
const op = this.makeActiveCheckpoint(doc);
|
|
500
|
-
// Check for LSN / checkpoint changes - ignore other metadata changes
|
|
501
|
-
if (lastOp == null || op.lsn != lastOp.lsn || op.checkpoint != lastOp.checkpoint) {
|
|
502
|
-
lastOp = op;
|
|
503
|
-
yield op;
|
|
504
|
-
}
|
|
505
|
-
}
|
|
506
|
-
}
|
|
507
|
-
|
|
508
|
-
// Nothing is done here until a subscriber starts to iterate
|
|
509
|
-
private readonly sharedIter = new sync.BroadcastIterable((signal) => {
|
|
510
|
-
return this.watchActiveCheckpoint(signal);
|
|
511
|
-
});
|
|
512
|
-
|
|
513
|
-
/**
|
|
514
|
-
* User-specific watch on the latest checkpoint and/or write checkpoint.
|
|
515
|
-
*/
|
|
516
|
-
async *watchWriteCheckpoint(user_id: string, signal: AbortSignal): AsyncIterable<storage.WriteCheckpoint> {
|
|
517
|
-
let lastCheckpoint: utils.OpId | null = null;
|
|
518
|
-
let lastWriteCheckpoint: bigint | null = null;
|
|
519
|
-
|
|
520
|
-
const iter = wrapWithAbort(this.sharedIter, signal);
|
|
521
|
-
for await (const cp of iter) {
|
|
522
|
-
const { checkpoint, lsn } = cp;
|
|
523
|
-
|
|
524
|
-
// lsn changes are not important by itself.
|
|
525
|
-
// What is important is:
|
|
526
|
-
// 1. checkpoint (op_id) changes.
|
|
527
|
-
// 2. write checkpoint changes for the specific user
|
|
528
|
-
const bucketStorage = await cp.getBucketStorage();
|
|
529
|
-
if (!bucketStorage) {
|
|
530
|
-
continue;
|
|
531
|
-
}
|
|
532
|
-
|
|
533
|
-
const lsnFilters: Record<string, string> = lsn ? { 1: lsn } : {};
|
|
534
|
-
|
|
535
|
-
const currentWriteCheckpoint = await bucketStorage.lastWriteCheckpoint({
|
|
536
|
-
user_id,
|
|
537
|
-
heads: {
|
|
538
|
-
...lsnFilters
|
|
539
|
-
}
|
|
540
|
-
});
|
|
541
|
-
|
|
542
|
-
if (currentWriteCheckpoint == lastWriteCheckpoint && checkpoint == lastCheckpoint) {
|
|
543
|
-
// No change - wait for next one
|
|
544
|
-
// In some cases, many LSNs may be produced in a short time.
|
|
545
|
-
// Add a delay to throttle the write checkpoint lookup a bit.
|
|
546
|
-
await timers.setTimeout(20 + 10 * Math.random());
|
|
547
|
-
continue;
|
|
548
|
-
}
|
|
549
|
-
|
|
550
|
-
lastWriteCheckpoint = currentWriteCheckpoint;
|
|
551
|
-
lastCheckpoint = checkpoint;
|
|
552
|
-
|
|
553
|
-
yield { base: cp, writeCheckpoint: currentWriteCheckpoint };
|
|
554
|
-
}
|
|
555
|
-
}
|
|
556
391
|
}
|
|
@@ -3,15 +3,15 @@ import { SqlEventDescriptor, SqliteRow, SqlSyncRules } from '@powersync/service-
|
|
|
3
3
|
import * as bson from 'bson';
|
|
4
4
|
|
|
5
5
|
import {
|
|
6
|
+
BaseObserver,
|
|
6
7
|
container,
|
|
7
|
-
DisposableObserver,
|
|
8
8
|
ErrorCode,
|
|
9
9
|
errors,
|
|
10
10
|
logger,
|
|
11
11
|
ReplicationAssertionError,
|
|
12
12
|
ServiceError
|
|
13
13
|
} from '@powersync/lib-services-framework';
|
|
14
|
-
import { SaveOperationTag, storage, utils } from '@powersync/service-core';
|
|
14
|
+
import { deserializeBson, SaveOperationTag, storage, utils } from '@powersync/service-core';
|
|
15
15
|
import * as timers from 'node:timers/promises';
|
|
16
16
|
import { PowerSyncMongo } from './db.js';
|
|
17
17
|
import { CurrentBucket, CurrentDataDocument, SourceKey, SyncRuleDocument } from './models.js';
|
|
@@ -49,7 +49,7 @@ export interface MongoBucketBatchOptions {
|
|
|
49
49
|
}
|
|
50
50
|
|
|
51
51
|
export class MongoBucketBatch
|
|
52
|
-
extends
|
|
52
|
+
extends BaseObserver<storage.BucketBatchStorageListener>
|
|
53
53
|
implements storage.BucketStorageBatch
|
|
54
54
|
{
|
|
55
55
|
private readonly client: mongo.MongoClient;
|
|
@@ -270,7 +270,7 @@ export class MongoBucketBatch
|
|
|
270
270
|
}
|
|
271
271
|
}
|
|
272
272
|
|
|
273
|
-
return resumeBatch;
|
|
273
|
+
return resumeBatch?.hasData() ? resumeBatch : null;
|
|
274
274
|
}
|
|
275
275
|
|
|
276
276
|
private saveOperation(
|
|
@@ -322,10 +322,7 @@ export class MongoBucketBatch
|
|
|
322
322
|
existing_buckets = result.buckets;
|
|
323
323
|
existing_lookups = result.lookups;
|
|
324
324
|
if (this.storeCurrentData) {
|
|
325
|
-
const data =
|
|
326
|
-
(result.data as mongo.Binary).buffer,
|
|
327
|
-
storage.BSON_DESERIALIZE_OPTIONS
|
|
328
|
-
) as SqliteRow;
|
|
325
|
+
const data = deserializeBson((result.data as mongo.Binary).buffer) as SqliteRow;
|
|
329
326
|
after = storage.mergeToast(after!, data);
|
|
330
327
|
}
|
|
331
328
|
}
|
|
@@ -610,7 +607,7 @@ export class MongoBucketBatch
|
|
|
610
607
|
|
|
611
608
|
async [Symbol.asyncDispose]() {
|
|
612
609
|
await this.session.endSession();
|
|
613
|
-
super
|
|
610
|
+
super.clearListeners();
|
|
614
611
|
}
|
|
615
612
|
|
|
616
613
|
private lastWaitingLogThottled = 0;
|
|
@@ -5,7 +5,6 @@ import { storage, utils } from '@powersync/service-core';
|
|
|
5
5
|
import { PowerSyncMongo } from './db.js';
|
|
6
6
|
import { BucketDataDocument, BucketDataKey } from './models.js';
|
|
7
7
|
import { cacheKey } from './OperationBatch.js';
|
|
8
|
-
import { safeBulkWrite } from './util.js';
|
|
9
8
|
|
|
10
9
|
interface CurrentBucketState {
|
|
11
10
|
/** Bucket name */
|
|
@@ -33,14 +32,7 @@ interface CurrentBucketState {
|
|
|
33
32
|
/**
|
|
34
33
|
* Additional options, primarily for testing.
|
|
35
34
|
*/
|
|
36
|
-
export interface MongoCompactOptions extends storage.CompactOptions {
|
|
37
|
-
/** Minimum of 2 */
|
|
38
|
-
clearBatchLimit?: number;
|
|
39
|
-
/** Minimum of 1 */
|
|
40
|
-
moveBatchLimit?: number;
|
|
41
|
-
/** Minimum of 1 */
|
|
42
|
-
moveBatchQueryLimit?: number;
|
|
43
|
-
}
|
|
35
|
+
export interface MongoCompactOptions extends storage.CompactOptions {}
|
|
44
36
|
|
|
45
37
|
const DEFAULT_CLEAR_BATCH_LIMIT = 5000;
|
|
46
38
|
const DEFAULT_MOVE_BATCH_LIMIT = 2000;
|
|
@@ -265,7 +257,7 @@ export class MongoCompactor {
|
|
|
265
257
|
private async flush() {
|
|
266
258
|
if (this.updates.length > 0) {
|
|
267
259
|
logger.info(`Compacting ${this.updates.length} ops`);
|
|
268
|
-
await
|
|
260
|
+
await this.db.bucket_data.bulkWrite(this.updates, {
|
|
269
261
|
// Order is not important.
|
|
270
262
|
// Since checksums are not affected, these operations can happen in any order,
|
|
271
263
|
// and it's fine if the operations are partially applied.
|