@powersync/service-module-mongodb 0.0.0-dev-20241219091224 → 0.0.0-dev-20241219145106

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,7 @@ import { MongoIdSequence } from './MongoIdSequence.js';
11
11
  import { batchCreateCustomWriteCheckpoints } from './MongoWriteCheckpointAPI.js';
12
12
  import { cacheKey, OperationBatch, RecordOperation } from './OperationBatch.js';
13
13
  import { PersistedBatch } from './PersistedBatch.js';
14
- import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, replicaIdEquals, serializeLookup } from './util.js';
14
+ import { idPrefixFilter } from './util.js';
15
15
 
16
16
  /**
17
17
  * 15MB
@@ -314,7 +314,10 @@ export class MongoBucketBatch
314
314
  existing_buckets = result.buckets;
315
315
  existing_lookups = result.lookups;
316
316
  if (this.storeCurrentData) {
317
- const data = bson.deserialize((result.data as mongo.Binary).buffer, BSON_DESERIALIZE_OPTIONS) as SqliteRow;
317
+ const data = bson.deserialize(
318
+ (result.data as mongo.Binary).buffer,
319
+ storage.BSON_DESERIALIZE_OPTIONS
320
+ ) as SqliteRow;
318
321
  after = storage.mergeToast(after!, data);
319
322
  }
320
323
  }
@@ -370,7 +373,7 @@ export class MongoBucketBatch
370
373
  }
371
374
 
372
375
  // 2. Save bucket data
373
- if (beforeId != null && (afterId == null || !replicaIdEquals(beforeId, afterId))) {
376
+ if (beforeId != null && (afterId == null || !storage.replicaIdEquals(beforeId, afterId))) {
374
377
  // Source ID updated
375
378
  if (sourceTable.syncData) {
376
379
  // Delete old record
@@ -476,7 +479,7 @@ export class MongoBucketBatch
476
479
  existing_lookups
477
480
  });
478
481
  new_lookups = paramEvaluated.map((p) => {
479
- return serializeLookup(p.lookup);
482
+ return storage.serializeLookup(p.lookup);
480
483
  });
481
484
  }
482
485
  }
@@ -500,7 +503,7 @@ export class MongoBucketBatch
500
503
  };
501
504
  }
502
505
 
503
- if (afterId == null || !replicaIdEquals(beforeId, afterId)) {
506
+ if (afterId == null || !storage.replicaIdEquals(beforeId, afterId)) {
504
507
  // Either a delete (afterId == null), or replaced the old replication id
505
508
  batch.deleteCurrentData(before_key);
506
509
  }
@@ -12,7 +12,7 @@ import { BucketDataDocument, BucketDataKey, SourceKey } from './models.js';
12
12
  import { MongoBucketBatch } from './MongoBucketBatch.js';
13
13
  import { MongoCompactor } from './MongoCompactor.js';
14
14
  import { MongoWriteCheckpointAPI } from './MongoWriteCheckpointAPI.js';
15
- import { BSON_DESERIALIZE_OPTIONS, idPrefixFilter, mapOpEntry, readSingleBatch, serializeLookup } from './util.js';
15
+ import { idPrefixFilter, mapOpEntry, readSingleBatch } from './util.js';
16
16
 
17
17
  export class MongoSyncBucketStorage
18
18
  extends DisposableObserver<storage.SyncRulesBucketStorageListener>
@@ -219,7 +219,7 @@ export class MongoSyncBucketStorage
219
219
 
220
220
  async getParameterSets(checkpoint: utils.OpId, lookups: SqliteJsonValue[][]): Promise<SqliteJsonRow[]> {
221
221
  const lookupFilter = lookups.map((lookup) => {
222
- return serializeLookup(lookup);
222
+ return storage.serializeLookup(lookup);
223
223
  });
224
224
  const rows = await this.db.bucket_parameters
225
225
  .aggregate([
@@ -325,7 +325,7 @@ export class MongoSyncBucketStorage
325
325
 
326
326
  // Ordered by _id, meaning buckets are grouped together
327
327
  for (let rawData of data) {
328
- const row = bson.deserialize(rawData, BSON_DESERIALIZE_OPTIONS) as BucketDataDocument;
328
+ const row = bson.deserialize(rawData, storage.BSON_DESERIALIZE_OPTIONS) as BucketDataDocument;
329
329
  const bucket = row._id.b;
330
330
 
331
331
  if (currentBatch == null || currentBatch.bucket != bucket || batchSize >= sizeLimit) {
@@ -2,7 +2,6 @@ import { ToastableSqliteRow } from '@powersync/service-sync-rules';
2
2
  import * as bson from 'bson';
3
3
 
4
4
  import { storage } from '@powersync/service-core';
5
- import { isUUID } from './util.js';
6
5
 
7
6
  /**
8
7
  * Maximum number of operations in a batch.
@@ -94,7 +93,7 @@ export class RecordOperation {
94
93
  * In-memory cache key - must not be persisted.
95
94
  */
96
95
  export function cacheKey(table: bson.ObjectId, id: storage.ReplicaId) {
97
- if (isUUID(id)) {
96
+ if (storage.isUUID(id)) {
98
97
  return `${table.toHexString()}.${id.toHexString()}`;
99
98
  } else if (typeof id == 'string') {
100
99
  return `${table.toHexString()}.${id}`;
@@ -15,7 +15,7 @@ import {
15
15
  CurrentDataDocument,
16
16
  SourceKey
17
17
  } from './models.js';
18
- import { replicaIdToSubkey, safeBulkWrite, serializeLookup } from './util.js';
18
+ import { replicaIdToSubkey, safeBulkWrite } from './util.js';
19
19
 
20
20
  /**
21
21
  * Maximum size of operations we write in a single transaction.
@@ -164,7 +164,7 @@ export class PersistedBatch {
164
164
 
165
165
  // 1. Insert new entries
166
166
  for (let result of evaluated) {
167
- const binLookup = serializeLookup(result.lookup);
167
+ const binLookup = storage.serializeLookup(result.lookup);
168
168
  const hex = binLookup.toString('base64');
169
169
  remaining_lookups.delete(hex);
170
170
 
@@ -1,6 +1,6 @@
1
+ import { storage } from '@powersync/service-core';
1
2
  import { configFile } from '@powersync/service-types';
2
3
  import * as mongo from 'mongodb';
3
-
4
4
  import * as db from '../../db/db-index.js';
5
5
  import { Lock } from '../../locks/MonogLocks.js';
6
6
  import {
@@ -14,7 +14,6 @@ import {
14
14
  SyncRuleDocument,
15
15
  WriteCheckpointDocument
16
16
  } from './models.js';
17
- import { BSON_DESERIALIZE_OPTIONS } from './util.js';
18
17
 
19
18
  export interface PowerSyncMongoOptions {
20
19
  /**
@@ -46,7 +45,7 @@ export class PowerSyncMongo {
46
45
  this.client = client;
47
46
 
48
47
  const db = client.db(options?.database, {
49
- ...BSON_DESERIALIZE_OPTIONS
48
+ ...storage.BSON_DESERIALIZE_OPTIONS
50
49
  });
51
50
  this.db = db;
52
51
 
@@ -20,6 +20,15 @@ export interface SourceKey {
20
20
  k: ReplicaId;
21
21
  }
22
22
 
23
+ export interface BucketDataKey {
24
+ /** group_id */
25
+ g: number;
26
+ /** bucket name */
27
+ b: string;
28
+ /** op_id */
29
+ o: bigint;
30
+ }
31
+
23
32
  export interface CurrentDataDocument {
24
33
  _id: SourceKey;
25
34
  data: bson.Binary;
@@ -40,19 +49,11 @@ export interface BucketParameterDocument {
40
49
  bucket_parameters: Record<string, SqliteJsonValue>[];
41
50
  }
42
51
 
43
- export interface BucketDataKey {
44
- /** group_id */
45
- g: number;
46
- /** bucket name */
47
- b: string;
48
- /** op_id */
49
- o: bigint;
50
- }
51
52
  export interface BucketDataDocument {
52
53
  _id: BucketDataKey;
53
54
  op: OpType;
54
55
  source_table?: bson.ObjectId;
55
- source_key?: storage.ReplicaId;
56
+ source_key?: ReplicaId;
56
57
  table?: string;
57
58
  row_id?: string;
58
59
  checksum: number;
@@ -1,27 +1,10 @@
1
1
  import { storage, utils } from '@powersync/service-core';
2
- import { SqliteJsonValue } from '@powersync/service-sync-rules';
3
2
  import * as bson from 'bson';
4
3
  import * as crypto from 'crypto';
5
4
  import * as mongo from 'mongodb';
6
5
  import * as uuid from 'uuid';
7
6
  import { BucketDataDocument } from './models.js';
8
7
 
9
- /**
10
- * Lookup serialization must be number-agnostic. I.e. normalize numbers, instead of preserving numbers.
11
- * @param lookup
12
- */
13
-
14
- export function serializeLookup(lookup: SqliteJsonValue[]) {
15
- const normalized = lookup.map((value) => {
16
- if (typeof value == 'number' && Number.isInteger(value)) {
17
- return BigInt(value);
18
- } else {
19
- return value;
20
- }
21
- });
22
- return new bson.Binary(bson.serialize({ l: normalized }));
23
- }
24
-
25
8
  export function idPrefixFilter<T>(prefix: Partial<T>, rest: (keyof T)[]): mongo.Condition<T> {
26
9
  let filter = {
27
10
  $gte: {
@@ -85,11 +68,6 @@ export async function readSingleBatch<T>(cursor: mongo.FindCursor<T>): Promise<{
85
68
  }
86
69
  }
87
70
 
88
- export const BSON_DESERIALIZE_OPTIONS: bson.DeserializeOptions = {
89
- // use bigint instead of Long
90
- useBigInt64: true
91
- };
92
-
93
71
  export function mapOpEntry(row: BucketDataDocument): utils.OplogEntry {
94
72
  if (row.op == 'PUT' || row.op == 'REMOVE') {
95
73
  return {
@@ -112,28 +90,8 @@ export function mapOpEntry(row: BucketDataDocument): utils.OplogEntry {
112
90
  }
113
91
  }
114
92
 
115
- /**
116
- * Returns true if two ReplicaId values are the same (serializes to the same BSON value).
117
- */
118
- export function replicaIdEquals(a: storage.ReplicaId, b: storage.ReplicaId) {
119
- if (a === b) {
120
- return true;
121
- } else if (typeof a == 'string' && typeof b == 'string') {
122
- return a == b;
123
- } else if (isUUID(a) && isUUID(b)) {
124
- return a.equals(b);
125
- } else if (a == null && b == null) {
126
- return true;
127
- } else if ((b == null && a != null) || (a == null && b != null)) {
128
- return false;
129
- } else {
130
- // There are many possible primitive values, this covers them all
131
- return (bson.serialize({ id: a }) as Buffer).equals(bson.serialize({ id: b }));
132
- }
133
- }
134
-
135
93
  export function replicaIdToSubkey(table: bson.ObjectId, id: storage.ReplicaId): string {
136
- if (isUUID(id)) {
94
+ if (storage.isUUID(id)) {
137
95
  // Special case for UUID for backwards-compatiblity
138
96
  return `${table.toHexString()}/${id.toHexString()}`;
139
97
  } else {
@@ -143,19 +101,6 @@ export function replicaIdToSubkey(table: bson.ObjectId, id: storage.ReplicaId):
143
101
  }
144
102
  }
145
103
 
146
- /**
147
- * True if this is a bson.UUID.
148
- *
149
- * Works even with multiple copies of the bson package.
150
- */
151
- export function isUUID(value: any): value is bson.UUID {
152
- if (value == null || typeof value != 'object') {
153
- return false;
154
- }
155
- const uuid = value as bson.UUID;
156
- return uuid._bsontype == 'Binary' && uuid.sub_type == bson.Binary.SUBTYPE_UUID;
157
- }
158
-
159
104
  /**
160
105
  * Helper function for creating a MongoDB client from consumers of this package
161
106
  */