@powersync/service-module-mongodb-storage 0.6.2 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/CHANGELOG.md +27 -0
  2. package/dist/storage/implementation/MongoBucketBatch.d.ts +8 -4
  3. package/dist/storage/implementation/MongoBucketBatch.js +9 -8
  4. package/dist/storage/implementation/MongoBucketBatch.js.map +1 -1
  5. package/dist/storage/implementation/MongoCompactor.js.map +1 -1
  6. package/dist/storage/implementation/MongoStorageProvider.js +3 -1
  7. package/dist/storage/implementation/MongoStorageProvider.js.map +1 -1
  8. package/dist/storage/implementation/MongoSyncBucketStorage.d.ts +4 -4
  9. package/dist/storage/implementation/MongoSyncBucketStorage.js +16 -10
  10. package/dist/storage/implementation/MongoSyncBucketStorage.js.map +1 -1
  11. package/dist/storage/implementation/PersistedBatch.d.ts +2 -2
  12. package/dist/storage/implementation/PersistedBatch.js +10 -2
  13. package/dist/storage/implementation/PersistedBatch.js.map +1 -1
  14. package/dist/storage/implementation/db.d.ts +1 -1
  15. package/dist/storage/implementation/db.js +2 -2
  16. package/dist/storage/implementation/db.js.map +1 -1
  17. package/dist/storage/implementation/util.js +2 -2
  18. package/dist/storage/implementation/util.js.map +1 -1
  19. package/package.json +6 -6
  20. package/src/storage/implementation/MongoBucketBatch.ts +16 -17
  21. package/src/storage/implementation/MongoCompactor.ts +4 -4
  22. package/src/storage/implementation/MongoStorageProvider.ts +3 -1
  23. package/src/storage/implementation/MongoSyncBucketStorage.ts +25 -16
  24. package/src/storage/implementation/PersistedBatch.ts +13 -4
  25. package/src/storage/implementation/db.ts +2 -2
  26. package/src/storage/implementation/util.ts +2 -2
  27. package/test/src/storage_sync.test.ts +3 -3
  28. package/tsconfig.tsbuildinfo +1 -1
@@ -66,7 +66,7 @@ export async function readSingleBatch(cursor) {
66
66
  export function mapOpEntry(row) {
67
67
  if (row.op == 'PUT' || row.op == 'REMOVE') {
68
68
  return {
69
- op_id: utils.timestampToOpId(row._id.o),
69
+ op_id: utils.internalToExternalOpId(row._id.o),
70
70
  op: row.op,
71
71
  object_type: row.table,
72
72
  object_id: row.row_id,
@@ -78,7 +78,7 @@ export function mapOpEntry(row) {
78
78
  else {
79
79
  // MOVE, CLEAR
80
80
  return {
81
- op_id: utils.timestampToOpId(row._id.o),
81
+ op_id: utils.internalToExternalOpId(row._id.o),
82
82
  op: row.op,
83
83
  checksum: Number(row.checksum)
84
84
  };
@@ -1 +1 @@
1
- {"version":3,"file":"util.js","sourceRoot":"","sources":["../../../src/storage/implementation/util.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B,OAAO,EAAE,KAAK,EAAE,MAAM,gCAAgC,CAAC;AACvD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAEzD,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAGzC,MAAM,UAAU,cAAc,CAAI,MAAkB,EAAE,IAAiB;IACrE,IAAI,MAAM,GAAG;QACX,IAAI,EAAE;YACJ,GAAG,MAAM;SACH;QACR,GAAG,EAAE;YACH,GAAG,MAAM;SACH;KACT,CAAC;IAEF,KAAK,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QACrB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;QACrC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;IACtC,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,MAAc,EAAE,aAAqB;IACpE,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1D,OAAO,GAAG,MAAM,GAAG,aAAa,IAAI,WAAW,EAAE,CAAC;AACpD,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAI,MAA2B;IAClE,IAAI,CAAC;QACH,IAAI,IAAS,CAAC;QACd,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,2CAA2C;QAC3C,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;QACtC,yCAAyC;QACzC,IAAI,GAAG,MAAM,CAAC,qBAAqB,EAAE,CAAC;QACtC,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC;YACnC,0CAA0C;YAC1C,wEAAwE;YACxE,uEAAuE;YACvE,oCAAoC;YACpC,EAAE;YACF,4EAA4E;YAC5E,2DAA2D;YAC3D,gCAAgC;YAChC,OAAO,GAAG,KAAK,CAAC;QAClB,CAAC;QACD,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;IAC3B,CAAC;YAAS,CAAC;QACT,iDAAiD;QACjD,uIAAuI;QACvI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACnB,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAuB;IAChD,IAAI,GAAG,CAAC,EAAE,IAAI,KAAK,IAAI,GAAG,CAAC,EAAE,IAAI,QAAQ,EAAE,CAAC;QAC1C,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,WAAW,EAAE,GAAG,CAAC,KAAK;YACtB,SAAS,EAAE,GAAG,CAAC,MAAM;YACrB,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;YAC9B,MAAM,EAAE,iBAAiB,CAAC,GAAG,CAAC,YAAa,EAAE,GAAG,CAAC,UAAW,CAAC;YAC7D,IAAI,EAAE,GAAG,CAAC,IAAI;SACf,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,cAAc;QAEd,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YACvC,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;SAC/B,CAAC;IACJ,CAAC;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,KAAoB,EAAE,EAAqB;IAC3E,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;QACvB,mDAAmD;QACnD,OAAO,GAAG,KAAK,CAAC,WAAW,EAAE,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE,CAAC;IACtD,CAAC;SAAM,CAAC;QACN,oCAAoC;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QAC3C,OAAO,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,GAAW,EAAE,OAAkC,EAAE,EAAE;IACnF,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;AAC7C,CAAC,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAW,EAAE,IAAa,EAAE,EAAE;IACjE,0EAA0E;IAC1E,oEAAoE;IACpE,MAAM,MAAM,GAAG,iBAAiB,CAAC,GAAG,EAAE;QACpC,gBAAgB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACvC,eAAe,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACtC,wBAAwB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;KAChD,CAAC,CAAC;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,CAAC;AACpC,CAAC,CAAC"}
1
+ {"version":3,"file":"util.js","sourceRoot":"","sources":["../../../src/storage/implementation/util.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B,OAAO,EAAE,KAAK,EAAE,MAAM,gCAAgC,CAAC;AACvD,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAEzD,OAAO,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAGzC,MAAM,UAAU,cAAc,CAAI,MAAkB,EAAE,IAAiB;IACrE,IAAI,MAAM,GAAG;QACX,IAAI,EAAE;YACJ,GAAG,MAAM;SACH;QACR,GAAG,EAAE;YACH,GAAG,MAAM;SACH;KACT,CAAC;IAEF,KAAK,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QACrB,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;QACrC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;IACtC,CAAC;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,MAAc,EAAE,aAAqB;IACpE,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1D,OAAO,GAAG,MAAM,GAAG,aAAa,IAAI,WAAW,EAAE,CAAC;AACpD,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAI,MAA2B;IAClE,IAAI,CAAC;QACH,IAAI,IAAS,CAAC;QACd,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,2CAA2C;QAC3C,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;QACtC,yCAAyC;QACzC,IAAI,GAAG,MAAM,CAAC,qBAAqB,EAAE,CAAC;QACtC,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC;YACnC,0CAA0C;YAC1C,wEAAwE;YACxE,uEAAuE;YACvE,oCAAoC;YACpC,EAAE;YACF,4EAA4E;YAC5E,2DAA2D;YAC3D,gCAAgC;YAChC,OAAO,GAAG,KAAK,CAAC;QAClB,CAAC;QACD,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;IAC3B,CAAC;YAAS,CAAC;QACT,iDAAiD;QACjD,uIAAuI;QACvI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACnB,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;QACvB,CAAC;IACH,CAAC;AACH,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAuB;IAChD,IAAI,GAAG,CAAC,EAAE,IAAI,KAAK,IAAI,GAAG,CAAC,EAAE,IAAI,QAAQ,EAAE,CAAC;QAC1C,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,sBAAsB,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YAC9C,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,WAAW,EAAE,GAAG,CAAC,KAAK;YACtB,SAAS,EAAE,GAAG,CAAC,MAAM;YACrB,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;YAC9B,MAAM,EAAE,iBAAiB,CAAC,GAAG,CAAC,YAAa,EAAE,GAAG,CAAC,UAAW,CAAC;YAC7D,IAAI,EAAE,GAAG,CAAC,IAAI;SACf,CAAC;IACJ,CAAC;SAAM,CAAC;QACN,cAAc;QAEd,OAAO;YACL,KAAK,EAAE,KAAK,CAAC,sBAAsB,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YAC9C,EAAE,EAAE,GAAG,CAAC,EAAE;YACV,QAAQ,EAAE,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC;SAC/B,CAAC;IACJ,CAAC;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,KAAoB,EAAE,EAAqB;IAC3E,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;QACvB,mDAAmD;QACnD,OAAO,GAAG,KAAK,CAAC,WAAW,EAAE,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE,CAAC;IACtD,CAAC;SAAM,CAAC;QACN,oCAAoC;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QAC3C,OAAO,IAAI,CAAC,EAAE,CAAC,IAAI,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED;;GAEG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,GAAW,EAAE,OAAkC,EAAE,EAAE;IACnF,OAAO,IAAI,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;AAC7C,CAAC,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAW,EAAE,IAAa,EAAE,EAAE;IACjE,0EAA0E;IAC1E,oEAAoE;IACpE,MAAM,MAAM,GAAG,iBAAiB,CAAC,GAAG,EAAE;QACpC,gBAAgB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACvC,eAAe,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;QACtC,wBAAwB,EAAE,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;KAChD,CAAC,CAAC;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,CAAC;AACpC,CAAC,CAAC"}
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "@powersync/service-module-mongodb-storage",
3
3
  "repository": "https://github.com/powersync-ja/powersync-service",
4
4
  "types": "dist/index.d.ts",
5
- "version": "0.6.2",
5
+ "version": "0.7.1",
6
6
  "main": "dist/index.js",
7
7
  "license": "FSL-1.1-Apache-2.0",
8
8
  "type": "module",
@@ -28,15 +28,15 @@
28
28
  "lru-cache": "^10.2.2",
29
29
  "uuid": "^9.0.1",
30
30
  "@powersync/lib-services-framework": "0.5.3",
31
- "@powersync/service-core": "1.7.2",
31
+ "@powersync/service-core": "1.8.1",
32
32
  "@powersync/service-jsonbig": "0.17.10",
33
- "@powersync/service-sync-rules": "0.24.0",
34
- "@powersync/service-types": "0.8.0",
35
- "@powersync/lib-service-mongodb": "0.4.3"
33
+ "@powersync/service-sync-rules": "0.24.1",
34
+ "@powersync/service-types": "0.9.0",
35
+ "@powersync/lib-service-mongodb": "0.5.1"
36
36
  },
37
37
  "devDependencies": {
38
38
  "@types/uuid": "^9.0.4",
39
- "@powersync/service-core-tests": "0.6.1"
39
+ "@powersync/service-core-tests": "0.7.1"
40
40
  },
41
41
  "scripts": {
42
42
  "build": "tsc -b",
@@ -11,7 +11,7 @@ import {
11
11
  ReplicationAssertionError,
12
12
  ServiceError
13
13
  } from '@powersync/lib-services-framework';
14
- import { deserializeBson, SaveOperationTag, storage, utils } from '@powersync/service-core';
14
+ import { deserializeBson, InternalOpId, SaveOperationTag, storage, utils } from '@powersync/service-core';
15
15
  import * as timers from 'node:timers/promises';
16
16
  import { PowerSyncMongo } from './db.js';
17
17
  import { CurrentBucket, CurrentDataDocument, SourceKey, SyncRuleDocument } from './models.js';
@@ -24,7 +24,7 @@ import { idPrefixFilter } from './util.js';
24
24
  /**
25
25
  * 15MB
26
26
  */
27
- const MAX_ROW_SIZE = 15 * 1024 * 1024;
27
+ export const MAX_ROW_SIZE = 15 * 1024 * 1024;
28
28
 
29
29
  // Currently, we can only have a single flush() at a time, since it locks the op_id sequence.
30
30
  // While the MongoDB transaction retry mechanism handles this okay, using an in-process Mutex
@@ -39,7 +39,7 @@ export interface MongoBucketBatchOptions {
39
39
  groupId: number;
40
40
  slotName: string;
41
41
  lastCheckpointLsn: string | null;
42
- keepaliveOp: string | null;
42
+ keepaliveOp: InternalOpId | null;
43
43
  noCheckpointBeforeLsn: string;
44
44
  storeCurrentData: boolean;
45
45
  /**
@@ -77,12 +77,12 @@ export class MongoBucketBatch
77
77
 
78
78
  private no_checkpoint_before_lsn: string;
79
79
 
80
- private persisted_op: bigint | null = null;
80
+ private persisted_op: InternalOpId | null = null;
81
81
 
82
82
  /**
83
83
  * For tests only - not for persistence logic.
84
84
  */
85
- public last_flushed_op: bigint | null = null;
85
+ public last_flushed_op: InternalOpId | null = null;
86
86
 
87
87
  constructor(options: MongoBucketBatchOptions) {
88
88
  super();
@@ -98,9 +98,7 @@ export class MongoBucketBatch
98
98
  this.skipExistingRows = options.skipExistingRows;
99
99
  this.batch = new OperationBatch();
100
100
 
101
- if (options.keepaliveOp) {
102
- this.persisted_op = BigInt(options.keepaliveOp);
103
- }
101
+ this.persisted_op = options.keepaliveOp ?? null;
104
102
  }
105
103
 
106
104
  addCustomWriteCheckpoint(checkpoint: storage.BatchedCustomWriteCheckpointOptions): void {
@@ -135,7 +133,7 @@ export class MongoBucketBatch
135
133
  return null;
136
134
  }
137
135
 
138
- let last_op: bigint | null = null;
136
+ let last_op: InternalOpId | null = null;
139
137
  let resumeBatch: OperationBatch | null = null;
140
138
 
141
139
  await this.withReplicationTransaction(`Flushing ${batch.length} ops`, async (session, opSeq) => {
@@ -153,7 +151,7 @@ export class MongoBucketBatch
153
151
 
154
152
  this.persisted_op = last_op;
155
153
  this.last_flushed_op = last_op;
156
- return { flushed_op: String(last_op) };
154
+ return { flushed_op: last_op };
157
155
  }
158
156
 
159
157
  private async replicateBatch(
@@ -776,22 +774,23 @@ export class MongoBucketBatch
776
774
  async truncate(sourceTables: storage.SourceTable[]): Promise<storage.FlushedResult | null> {
777
775
  await this.flush();
778
776
 
779
- let last_op: bigint | null = null;
777
+ let last_op: InternalOpId | null = null;
780
778
  for (let table of sourceTables) {
781
779
  last_op = await this.truncateSingle(table);
782
780
  }
783
781
 
784
782
  if (last_op) {
785
783
  this.persisted_op = last_op;
784
+ return {
785
+ flushed_op: last_op
786
+ };
787
+ } else {
788
+ return null;
786
789
  }
787
-
788
- return {
789
- flushed_op: String(last_op!)
790
- };
791
790
  }
792
791
 
793
- async truncateSingle(sourceTable: storage.SourceTable): Promise<bigint> {
794
- let last_op: bigint | null = null;
792
+ async truncateSingle(sourceTable: storage.SourceTable): Promise<InternalOpId> {
793
+ let last_op: InternalOpId | null = null;
795
794
 
796
795
  // To avoid too large transactions, we limit the amount of data we delete per transaction.
797
796
  // Since we don't use the record data here, we don't have explicit size limits per batch.
@@ -1,6 +1,6 @@
1
1
  import { mongo } from '@powersync/lib-service-mongodb';
2
2
  import { logger, ReplicationAssertionError } from '@powersync/lib-services-framework';
3
- import { storage, utils } from '@powersync/service-core';
3
+ import { InternalOpId, storage, utils } from '@powersync/service-core';
4
4
 
5
5
  import { PowerSyncMongo } from './db.js';
6
6
  import { BucketDataDocument, BucketDataKey } from './models.js';
@@ -12,7 +12,7 @@ interface CurrentBucketState {
12
12
  /**
13
13
  * Rows seen in the bucket, with the last op_id of each.
14
14
  */
15
- seen: Map<string, bigint>;
15
+ seen: Map<string, InternalOpId>;
16
16
  /**
17
17
  * Estimated memory usage of the seen Map.
18
18
  */
@@ -21,7 +21,7 @@ interface CurrentBucketState {
21
21
  /**
22
22
  * Last (lowest) seen op_id that is not a PUT.
23
23
  */
24
- lastNotPut: bigint | null;
24
+ lastNotPut: InternalOpId | null;
25
25
 
26
26
  /**
27
27
  * Number of REMOVE/MOVE operations seen since lastNotPut.
@@ -274,7 +274,7 @@ export class MongoCompactor {
274
274
  * @param bucket bucket name
275
275
  * @param op op_id of the last non-PUT operation, which will be converted to CLEAR.
276
276
  */
277
- private async clearBucket(bucket: string, op: bigint) {
277
+ private async clearBucket(bucket: string, op: InternalOpId) {
278
278
  const opFilter = {
279
279
  _id: {
280
280
  $gte: {
@@ -22,7 +22,9 @@ export class MongoStorageProvider implements storage.BucketStorageProvider {
22
22
  }
23
23
 
24
24
  const decodedConfig = MongoStorageConfig.decode(storage as any);
25
- const client = lib_mongo.db.createMongoClient(decodedConfig);
25
+ const client = lib_mongo.db.createMongoClient(decodedConfig, {
26
+ maxPoolSize: resolvedConfig.storage.max_pool_size ?? 8
27
+ });
26
28
 
27
29
  const database = new PowerSyncMongo(client, { database: resolvedConfig.storage.database });
28
30
  const factory = new MongoBucketStorage(database, {
@@ -12,6 +12,9 @@ import {
12
12
  CHECKPOINT_INVALIDATE_ALL,
13
13
  CheckpointChanges,
14
14
  GetCheckpointChangesOptions,
15
+ InternalOpId,
16
+ internalToExternalOpId,
17
+ ProtocolOpId,
15
18
  ReplicationCheckpoint,
16
19
  SourceTable,
17
20
  storage,
@@ -119,7 +122,7 @@ export class MongoSyncBucketStorage
119
122
  }
120
123
  );
121
124
  return {
122
- checkpoint: utils.timestampToOpId(doc?.last_checkpoint ?? 0n),
125
+ checkpoint: doc?.last_checkpoint ?? 0n,
123
126
  lsn: doc?.last_checkpoint_lsn ?? null
124
127
  };
125
128
  }
@@ -143,7 +146,7 @@ export class MongoSyncBucketStorage
143
146
  slotName: this.slot_name,
144
147
  lastCheckpointLsn: checkpoint_lsn,
145
148
  noCheckpointBeforeLsn: doc?.no_checkpoint_before ?? options.zeroLSN,
146
- keepaliveOp: doc?.keepalive_op ?? null,
149
+ keepaliveOp: doc?.keepalive_op ? BigInt(doc.keepalive_op) : null,
147
150
  storeCurrentData: options.storeCurrentData,
148
151
  skipExistingRows: options.skipExistingRows ?? false
149
152
  });
@@ -152,7 +155,7 @@ export class MongoSyncBucketStorage
152
155
  await callback(batch);
153
156
  await batch.flush();
154
157
  if (batch.last_flushed_op) {
155
- return { flushed_op: String(batch.last_flushed_op) };
158
+ return { flushed_op: batch.last_flushed_op };
156
159
  } else {
157
160
  return null;
158
161
  }
@@ -249,7 +252,7 @@ export class MongoSyncBucketStorage
249
252
  return result!;
250
253
  }
251
254
 
252
- async getParameterSets(checkpoint: utils.OpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]> {
255
+ async getParameterSets(checkpoint: utils.InternalOpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]> {
253
256
  const lookupFilter = lookups.map((lookup) => {
254
257
  return storage.serializeLookup(lookup);
255
258
  });
@@ -259,7 +262,7 @@ export class MongoSyncBucketStorage
259
262
  $match: {
260
263
  'key.g': this.group_id,
261
264
  lookup: { $in: lookupFilter },
262
- _id: { $lte: BigInt(checkpoint) }
265
+ _id: { $lte: checkpoint }
263
266
  }
264
267
  },
265
268
  {
@@ -284,8 +287,8 @@ export class MongoSyncBucketStorage
284
287
  }
285
288
 
286
289
  async *getBucketDataBatch(
287
- checkpoint: utils.OpId,
288
- dataBuckets: Map<string, string>,
290
+ checkpoint: utils.InternalOpId,
291
+ dataBuckets: Map<string, InternalOpId>,
289
292
  options?: storage.BucketDataBatchOptions
290
293
  ): AsyncIterable<storage.SyncBucketDataBatch> {
291
294
  if (dataBuckets.size == 0) {
@@ -293,14 +296,17 @@ export class MongoSyncBucketStorage
293
296
  }
294
297
  let filters: mongo.Filter<BucketDataDocument>[] = [];
295
298
 
296
- const end = checkpoint ? BigInt(checkpoint) : new bson.MaxKey();
299
+ if (checkpoint == null) {
300
+ throw new ServiceAssertionError('checkpoint is null');
301
+ }
302
+ const end = checkpoint;
297
303
  for (let [name, start] of dataBuckets.entries()) {
298
304
  filters.push({
299
305
  _id: {
300
306
  $gt: {
301
307
  g: this.group_id,
302
308
  b: name,
303
- o: BigInt(start)
309
+ o: start
304
310
  },
305
311
  $lte: {
306
312
  g: this.group_id,
@@ -347,7 +353,7 @@ export class MongoSyncBucketStorage
347
353
 
348
354
  let batchSize = 0;
349
355
  let currentBatch: utils.SyncBucketData | null = null;
350
- let targetOp: bigint | null = null;
356
+ let targetOp: InternalOpId | null = null;
351
357
 
352
358
  // Ordered by _id, meaning buckets are grouped together
353
359
  for (let rawData of data) {
@@ -355,7 +361,7 @@ export class MongoSyncBucketStorage
355
361
  const bucket = row._id.b;
356
362
 
357
363
  if (currentBatch == null || currentBatch.bucket != bucket || batchSize >= sizeLimit) {
358
- let start: string | undefined = undefined;
364
+ let start: ProtocolOpId | undefined = undefined;
359
365
  if (currentBatch != null) {
360
366
  if (currentBatch.bucket == bucket) {
361
367
  currentBatch.has_more = true;
@@ -369,9 +375,12 @@ export class MongoSyncBucketStorage
369
375
  targetOp = null;
370
376
  }
371
377
 
372
- start ??= dataBuckets.get(bucket);
373
378
  if (start == null) {
374
- throw new ServiceAssertionError(`data for unexpected bucket: ${bucket}`);
379
+ const startOpId = dataBuckets.get(bucket);
380
+ if (startOpId == null) {
381
+ throw new ServiceAssertionError(`data for unexpected bucket: ${bucket}`);
382
+ }
383
+ start = internalToExternalOpId(startOpId);
375
384
  }
376
385
  currentBatch = {
377
386
  bucket,
@@ -406,7 +415,7 @@ export class MongoSyncBucketStorage
406
415
  }
407
416
  }
408
417
 
409
- async getChecksums(checkpoint: utils.OpId, buckets: string[]): Promise<utils.ChecksumMap> {
418
+ async getChecksums(checkpoint: utils.InternalOpId, buckets: string[]): Promise<utils.ChecksumMap> {
410
419
  return this.checksumCache.getChecksumMap(checkpoint, buckets);
411
420
  }
412
421
 
@@ -638,7 +647,7 @@ export class MongoSyncBucketStorage
638
647
 
639
648
  private makeActiveCheckpoint(doc: SyncRuleCheckpointState | null) {
640
649
  return {
641
- checkpoint: utils.timestampToOpId(doc?.last_checkpoint ?? 0n),
650
+ checkpoint: doc?.last_checkpoint ?? 0n,
642
651
  lsn: doc?.last_checkpoint_lsn ?? null
643
652
  };
644
653
  }
@@ -755,7 +764,7 @@ export class MongoSyncBucketStorage
755
764
  */
756
765
  async *watchWriteCheckpoint(options: WatchWriteCheckpointOptions): AsyncIterable<storage.StorageCheckpointUpdate> {
757
766
  const { user_id, signal } = options;
758
- let lastCheckpoint: utils.OpId | null = null;
767
+ let lastCheckpoint: utils.InternalOpId | null = null;
759
768
  let lastWriteCheckpoint: bigint | null = null;
760
769
 
761
770
  const iter = wrapWithAbort(this.sharedIter, signal);
@@ -4,8 +4,8 @@ import { EvaluatedParameters, EvaluatedRow } from '@powersync/service-sync-rules
4
4
  import * as bson from 'bson';
5
5
 
6
6
  import { logger } from '@powersync/lib-services-framework';
7
- import { storage, utils } from '@powersync/service-core';
8
- import { currentBucketKey } from './MongoBucketBatch.js';
7
+ import { InternalOpId, storage, utils } from '@powersync/service-core';
8
+ import { currentBucketKey, MAX_ROW_SIZE } from './MongoBucketBatch.js';
9
9
  import { MongoIdSequence } from './MongoIdSequence.js';
10
10
  import { PowerSyncMongo } from './db.js';
11
11
  import {
@@ -52,7 +52,7 @@ export class PersistedBatch {
52
52
  /**
53
53
  * For debug logging only.
54
54
  */
55
- debugLastOpId: bigint | null = null;
55
+ debugLastOpId: InternalOpId | null = null;
56
56
 
57
57
  /**
58
58
  * Very rough estimate of transaction size.
@@ -83,11 +83,20 @@ export class PersistedBatch {
83
83
 
84
84
  for (const k of options.evaluated) {
85
85
  const key = currentBucketKey(k);
86
- remaining_buckets.delete(key);
87
86
 
88
87
  // INSERT
89
88
  const recordData = JSONBig.stringify(k.data);
90
89
  const checksum = utils.hashData(k.table, k.id, recordData);
90
+ if (recordData.length > MAX_ROW_SIZE) {
91
+ // In many cases, the raw data size would have been too large already. But there are cases where
92
+ // the BSON size is small enough, but the JSON size is too large.
93
+ // In these cases, we can't store the data, so we skip it, or generate a REMOVE operation if the row
94
+ // was synced previously.
95
+ logger.error(`powersync_${this.group_id} Row ${key} too large: ${recordData.length} bytes. Removing.`);
96
+ continue;
97
+ }
98
+
99
+ remaining_buckets.delete(key);
91
100
  this.currentSize += recordData.length + 200;
92
101
 
93
102
  const op_id = options.op_seq.next();
@@ -82,6 +82,6 @@ export class PowerSyncMongo {
82
82
  }
83
83
  }
84
84
 
85
- export function createPowerSyncMongo(config: MongoStorageConfig) {
86
- return new PowerSyncMongo(lib_mongo.createMongoClient(config), { database: config.database });
85
+ export function createPowerSyncMongo(config: MongoStorageConfig, options?: lib_mongo.MongoConnectionOptions) {
86
+ return new PowerSyncMongo(lib_mongo.createMongoClient(config, options), { database: config.database });
87
87
  }
@@ -74,7 +74,7 @@ export async function readSingleBatch<T>(cursor: mongo.FindCursor<T>): Promise<{
74
74
  export function mapOpEntry(row: BucketDataDocument): utils.OplogEntry {
75
75
  if (row.op == 'PUT' || row.op == 'REMOVE') {
76
76
  return {
77
- op_id: utils.timestampToOpId(row._id.o),
77
+ op_id: utils.internalToExternalOpId(row._id.o),
78
78
  op: row.op,
79
79
  object_type: row.table,
80
80
  object_id: row.row_id,
@@ -86,7 +86,7 @@ export function mapOpEntry(row: BucketDataDocument): utils.OplogEntry {
86
86
  // MOVE, CLEAR
87
87
 
88
88
  return {
89
- op_id: utils.timestampToOpId(row._id.o),
89
+ op_id: utils.internalToExternalOpId(row._id.o),
90
90
  op: row.op,
91
91
  checksum: Number(row.checksum)
92
92
  };
@@ -74,7 +74,7 @@ describe('sync - mongodb', () => {
74
74
  const options: storage.BucketDataBatchOptions = {};
75
75
 
76
76
  const batch1 = await test_utils.fromAsync(
77
- bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']]), options)
77
+ bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]]), options)
78
78
  );
79
79
  expect(test_utils.getBatchData(batch1)).toEqual([
80
80
  { op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 },
@@ -87,7 +87,7 @@ describe('sync - mongodb', () => {
87
87
  });
88
88
 
89
89
  const batch2 = await test_utils.fromAsync(
90
- bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', batch1[0].batch.next_after]]), options)
90
+ bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', BigInt(batch1[0].batch.next_after)]]), options)
91
91
  );
92
92
  expect(test_utils.getBatchData(batch2)).toEqual([
93
93
  { op_id: '3', op: 'PUT', object_id: 'large2', checksum: 1607205872 }
@@ -99,7 +99,7 @@ describe('sync - mongodb', () => {
99
99
  });
100
100
 
101
101
  const batch3 = await test_utils.fromAsync(
102
- bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', batch2[0].batch.next_after]]), options)
102
+ bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', BigInt(batch2[0].batch.next_after)]]), options)
103
103
  );
104
104
  expect(test_utils.getBatchData(batch3)).toEqual([
105
105
  { op_id: '4', op: 'PUT', object_id: 'test3', checksum: 1359888332 }