@powersync/service-module-mongodb 0.9.1 → 0.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/CHANGELOG.md +28 -0
  2. package/dist/api/MongoRouteAPIAdapter.d.ts +1 -1
  3. package/dist/api/MongoRouteAPIAdapter.js +1 -1
  4. package/dist/api/MongoRouteAPIAdapter.js.map +1 -1
  5. package/dist/replication/ChangeStream.d.ts +26 -11
  6. package/dist/replication/ChangeStream.js +556 -300
  7. package/dist/replication/ChangeStream.js.map +1 -1
  8. package/dist/replication/ChangeStreamReplicationJob.d.ts +2 -0
  9. package/dist/replication/ChangeStreamReplicationJob.js +13 -5
  10. package/dist/replication/ChangeStreamReplicationJob.js.map +1 -1
  11. package/dist/replication/ChangeStreamReplicator.d.ts +1 -0
  12. package/dist/replication/ChangeStreamReplicator.js +21 -0
  13. package/dist/replication/ChangeStreamReplicator.js.map +1 -1
  14. package/dist/replication/MongoRelation.d.ts +1 -1
  15. package/dist/replication/MongoRelation.js +4 -0
  16. package/dist/replication/MongoRelation.js.map +1 -1
  17. package/dist/replication/MongoSnapshotQuery.d.ts +26 -0
  18. package/dist/replication/MongoSnapshotQuery.js +56 -0
  19. package/dist/replication/MongoSnapshotQuery.js.map +1 -0
  20. package/dist/replication/replication-utils.d.ts +2 -0
  21. package/dist/replication/replication-utils.js +3 -0
  22. package/dist/replication/replication-utils.js.map +1 -1
  23. package/package.json +8 -8
  24. package/src/api/MongoRouteAPIAdapter.ts +1 -1
  25. package/src/replication/ChangeStream.ts +324 -124
  26. package/src/replication/ChangeStreamReplicationJob.ts +14 -6
  27. package/src/replication/ChangeStreamReplicator.ts +23 -0
  28. package/src/replication/MongoRelation.ts +4 -1
  29. package/src/replication/MongoSnapshotQuery.ts +59 -0
  30. package/src/replication/replication-utils.ts +5 -0
  31. package/test/src/change_stream.test.ts +18 -13
  32. package/test/src/change_stream_utils.ts +45 -20
  33. package/test/src/chunked_snapshot.test.ts +153 -0
  34. package/test/src/resume.test.ts +7 -94
  35. package/test/src/resume_token.test.ts +78 -2
  36. package/test/src/resuming_snapshots.test.ts +138 -0
  37. package/test/src/slow_tests.test.ts +4 -18
  38. package/test/src/util.ts +12 -1
  39. package/tsconfig.tsbuildinfo +1 -1
@@ -1,5 +1,4 @@
1
- import { isMongoServerError } from '@powersync/lib-service-mongodb';
2
- import { container } from '@powersync/lib-services-framework';
1
+ import { container, logger as defaultLogger } from '@powersync/lib-services-framework';
3
2
  import { replication } from '@powersync/service-core';
4
3
 
5
4
  import { ChangeStream, ChangeStreamInvalidatedError } from './ChangeStream.js';
@@ -11,18 +10,21 @@ export interface ChangeStreamReplicationJobOptions extends replication.AbstractR
11
10
 
12
11
  export class ChangeStreamReplicationJob extends replication.AbstractReplicationJob {
13
12
  private connectionFactory: ConnectionManagerFactory;
13
+ private lastStream: ChangeStream | null = null;
14
14
 
15
15
  constructor(options: ChangeStreamReplicationJobOptions) {
16
16
  super(options);
17
17
  this.connectionFactory = options.connectionFactory;
18
+ // We use a custom formatter to process the prefix
19
+ this.logger = defaultLogger.child({ prefix: `[powersync_${this.storage.group_id}] ` });
18
20
  }
19
21
 
20
22
  async cleanUp(): Promise<void> {
21
- // TODO: Implement?
23
+ // Nothing needed here
22
24
  }
23
25
 
24
26
  async keepAlive() {
25
- // TODO: Implement?
27
+ // Nothing needed here
26
28
  }
27
29
 
28
30
  private get slotName() {
@@ -72,14 +74,16 @@ export class ChangeStreamReplicationJob extends replication.AbstractReplicationJ
72
74
  abort_signal: this.abortController.signal,
73
75
  storage: this.options.storage,
74
76
  metrics: this.options.metrics,
75
- connections: connectionManager
77
+ connections: connectionManager,
78
+ logger: this.logger
76
79
  });
80
+ this.lastStream = stream;
77
81
  await stream.replicate();
78
82
  } catch (e) {
79
83
  if (this.abortController.signal.aborted) {
80
84
  return;
81
85
  }
82
- this.logger.error(`${this.slotName} Replication error`, e);
86
+ this.logger.error(`Replication error`, e);
83
87
  if (e.cause != null) {
84
88
  // Without this additional log, the cause may not be visible in the logs.
85
89
  this.logger.error(`cause`, e.cause);
@@ -98,4 +102,8 @@ export class ChangeStreamReplicationJob extends replication.AbstractReplicationJ
98
102
  await connectionManager.end();
99
103
  }
100
104
  }
105
+
106
+ async getReplicationLagMillis(): Promise<number | undefined> {
107
+ return this.lastStream?.getReplicationLagMillis();
108
+ }
101
109
  }
@@ -3,6 +3,8 @@ import { ChangeStreamReplicationJob } from './ChangeStreamReplicationJob.js';
3
3
  import { ConnectionManagerFactory } from './ConnectionManagerFactory.js';
4
4
  import { MongoErrorRateLimiter } from './MongoErrorRateLimiter.js';
5
5
  import { MongoModule } from '../module/MongoModule.js';
6
+ import { MongoLSN } from '../common/MongoLSN.js';
7
+ import { timestampToDate } from './replication-utils.js';
6
8
 
7
9
  export interface ChangeStreamReplicatorOptions extends replication.AbstractReplicatorOptions {
8
10
  connectionFactory: ConnectionManagerFactory;
@@ -39,4 +41,25 @@ export class ChangeStreamReplicator extends replication.AbstractReplicator<Chang
39
41
  async testConnection() {
40
42
  return await MongoModule.testConnection(this.connectionFactory.dbConnectionConfig);
41
43
  }
44
+
45
+ async getReplicationLagMillis(): Promise<number | undefined> {
46
+ const lag = await super.getReplicationLagMillis();
47
+ if (lag != null) {
48
+ return lag;
49
+ }
50
+
51
+ // Booting or in an error loop. Check last active replication status.
52
+ // This includes sync rules in an ERROR state.
53
+ const content = await this.storage.getActiveSyncRulesContent();
54
+ if (content == null) {
55
+ return undefined;
56
+ }
57
+ // Measure the lag from the last resume token's time
58
+ const lsn = content.last_checkpoint_lsn;
59
+ if (lsn == null) {
60
+ return undefined;
61
+ }
62
+ const { timestamp } = MongoLSN.fromSerialized(lsn);
63
+ return Date.now() - timestampToDate(timestamp).getTime();
64
+ }
42
65
  }
@@ -20,7 +20,10 @@ export function getMongoRelation(source: mongo.ChangeStreamNameSpace): storage.S
20
20
  /**
21
21
  * For in-memory cache only.
22
22
  */
23
- export function getCacheIdentifier(source: storage.SourceEntityDescriptor): string {
23
+ export function getCacheIdentifier(source: storage.SourceEntityDescriptor | storage.SourceTable): string {
24
+ if (source instanceof storage.SourceTable) {
25
+ return `${source.schema}.${source.table}`;
26
+ }
24
27
  return `${source.schema}.${source.name}`;
25
28
  }
26
29
 
@@ -0,0 +1,59 @@
1
+ import { mongo } from '@powersync/lib-service-mongodb';
2
+ import { ReplicationAssertionError } from '@powersync/lib-services-framework';
3
+ import { bson } from '@powersync/service-core';
4
+
5
+ /**
6
+ * Performs a collection snapshot query, chunking by ranges of _id.
7
+ *
8
+ * This may miss some rows if they are modified during the snapshot query.
9
+ * In that case, the change stream replication will pick up those rows afterwards.
10
+ */
11
+ export class ChunkedSnapshotQuery implements AsyncDisposable {
12
+ lastKey: any = null;
13
+ private lastCursor: mongo.FindCursor | null = null;
14
+ private collection: mongo.Collection;
15
+ private batchSize: number;
16
+
17
+ public constructor(options: { collection: mongo.Collection; batchSize: number; key?: Uint8Array | null }) {
18
+ this.lastKey = options.key ? bson.deserialize(options.key, { useBigInt64: true })._id : null;
19
+ this.lastCursor = null;
20
+ this.collection = options.collection;
21
+ this.batchSize = options.batchSize;
22
+ }
23
+
24
+ async nextChunk(): Promise<{ docs: mongo.Document[]; lastKey: Uint8Array } | { docs: []; lastKey: null }> {
25
+ let cursor = this.lastCursor;
26
+ let newCursor = false;
27
+ if (cursor == null || cursor.closed) {
28
+ const filter: mongo.Filter<mongo.Document> = this.lastKey == null ? {} : { _id: { $gt: this.lastKey as any } };
29
+ cursor = this.collection.find(filter, {
30
+ batchSize: this.batchSize,
31
+ readConcern: 'majority',
32
+ limit: this.batchSize,
33
+ sort: { _id: 1 }
34
+ });
35
+ newCursor = true;
36
+ }
37
+ const hasNext = await cursor.hasNext();
38
+ if (!hasNext) {
39
+ this.lastCursor = null;
40
+ if (newCursor) {
41
+ return { docs: [], lastKey: null };
42
+ } else {
43
+ return this.nextChunk();
44
+ }
45
+ }
46
+ const docBatch = cursor.readBufferedDocuments();
47
+ this.lastCursor = cursor;
48
+ if (docBatch.length == 0) {
49
+ throw new ReplicationAssertionError(`MongoDB snapshot query returned an empty batch, but hasNext() was true.`);
50
+ }
51
+ const lastKey = docBatch[docBatch.length - 1]._id;
52
+ this.lastKey = lastKey;
53
+ return { docs: docBatch, lastKey: bson.serialize({ _id: lastKey }) };
54
+ }
55
+
56
+ async [Symbol.asyncDispose](): Promise<void> {
57
+ await this.lastCursor?.close();
58
+ }
59
+ }
@@ -1,6 +1,7 @@
1
1
  import { ErrorCode, ServiceError } from '@powersync/lib-services-framework';
2
2
  import { MongoManager } from './MongoManager.js';
3
3
  import { PostImagesOption } from '../types/types.js';
4
+ import * as bson from 'bson';
4
5
 
5
6
  export const CHECKPOINTS_COLLECTION = '_powersync_checkpoints';
6
7
 
@@ -86,3 +87,7 @@ export async function checkSourceConfiguration(connectionManager: MongoManager):
86
87
  .toArray();
87
88
  }
88
89
  }
90
+
91
+ export function timestampToDate(timestamp: bson.Timestamp) {
92
+ return new Date(timestamp.getHighBitsUnsigned() * 1000);
93
+ }
@@ -8,8 +8,7 @@ import { test_utils } from '@powersync/service-core-tests';
8
8
 
9
9
  import { PostImagesOption } from '@module/types/types.js';
10
10
  import { ChangeStreamTestContext } from './change_stream_utils.js';
11
- import { env } from './env.js';
12
- import { INITIALIZED_MONGO_STORAGE_FACTORY, INITIALIZED_POSTGRES_STORAGE_FACTORY } from './util.js';
11
+ import { describeWithStorage } from './util.js';
13
12
 
14
13
  const BASIC_SYNC_RULES = `
15
14
  bucket_definitions:
@@ -18,12 +17,8 @@ bucket_definitions:
18
17
  - SELECT _id as id, description FROM "test_data"
19
18
  `;
20
19
 
21
- describe.skipIf(!env.TEST_MONGO_STORAGE)('change stream - mongodb', { timeout: 20_000 }, function () {
22
- defineChangeStreamTests(INITIALIZED_MONGO_STORAGE_FACTORY);
23
- });
24
-
25
- describe.skipIf(!env.TEST_POSTGRES_STORAGE)('change stream - postgres', { timeout: 20_000 }, function () {
26
- defineChangeStreamTests(INITIALIZED_POSTGRES_STORAGE_FACTORY);
20
+ describe('change stream', () => {
21
+ describeWithStorage({ timeout: 20_000 }, defineChangeStreamTests);
27
22
  });
28
23
 
29
24
  function defineChangeStreamTests(factory: storage.TestStorageFactory) {
@@ -97,7 +92,9 @@ bucket_definitions:
97
92
  });
98
93
 
99
94
  test('updateLookup - no fullDocument available', async () => {
100
- await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.OFF });
95
+ await using context = await ChangeStreamTestContext.open(factory, {
96
+ mongoOptions: { postImages: PostImagesOption.OFF }
97
+ });
101
98
  const { db, client } = context;
102
99
  await context.updateSyncRules(`
103
100
  bucket_definitions:
@@ -141,7 +138,9 @@ bucket_definitions:
141
138
  test('postImages - autoConfigure', async () => {
142
139
  // Similar to the above test, but with postImages enabled.
143
140
  // This resolves the consistency issue.
144
- await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.AUTO_CONFIGURE });
141
+ await using context = await ChangeStreamTestContext.open(factory, {
142
+ mongoOptions: { postImages: PostImagesOption.AUTO_CONFIGURE }
143
+ });
145
144
  const { db, client } = context;
146
145
  await context.updateSyncRules(`
147
146
  bucket_definitions:
@@ -187,7 +186,9 @@ bucket_definitions:
187
186
  test('postImages - on', async () => {
188
187
  // Similar to postImages - autoConfigure, but does not auto-configure.
189
188
  // changeStreamPreAndPostImages must be manually configured.
190
- await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.READ_ONLY });
189
+ await using context = await ChangeStreamTestContext.open(factory, {
190
+ mongoOptions: { postImages: PostImagesOption.READ_ONLY }
191
+ });
191
192
  const { db, client } = context;
192
193
  await context.updateSyncRules(`
193
194
  bucket_definitions:
@@ -432,7 +433,9 @@ bucket_definitions:
432
433
  });
433
434
 
434
435
  test('postImages - new collection with postImages enabled', async () => {
435
- await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.AUTO_CONFIGURE });
436
+ await using context = await ChangeStreamTestContext.open(factory, {
437
+ mongoOptions: { postImages: PostImagesOption.AUTO_CONFIGURE }
438
+ });
436
439
  const { db } = context;
437
440
  await context.updateSyncRules(`
438
441
  bucket_definitions:
@@ -463,7 +466,9 @@ bucket_definitions:
463
466
  });
464
467
 
465
468
  test('postImages - new collection with postImages disabled', async () => {
466
- await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.AUTO_CONFIGURE });
469
+ await using context = await ChangeStreamTestContext.open(factory, {
470
+ mongoOptions: { postImages: PostImagesOption.AUTO_CONFIGURE }
471
+ });
467
472
  const { db } = context;
468
473
  await context.updateSyncRules(`
469
474
  bucket_definitions:
@@ -4,9 +4,11 @@ import {
4
4
  createCoreReplicationMetrics,
5
5
  initializeCoreReplicationMetrics,
6
6
  InternalOpId,
7
+ OplogEntry,
7
8
  ProtocolOpId,
8
9
  ReplicationCheckpoint,
9
- SyncRulesBucketStorage
10
+ SyncRulesBucketStorage,
11
+ TestStorageOptions
10
12
  } from '@powersync/service-core';
11
13
  import { METRICS_HELPER, test_utils } from '@powersync/service-core-tests';
12
14
 
@@ -29,17 +31,27 @@ export class ChangeStreamTestContext {
29
31
  *
30
32
  * This configures all the context, and tears it down afterwards.
31
33
  */
32
- static async open(factory: () => Promise<BucketStorageFactory>, options?: Partial<NormalizedMongoConnectionConfig>) {
33
- const f = await factory();
34
- const connectionManager = new MongoManager({ ...TEST_CONNECTION_OPTIONS, ...options });
34
+ static async open(
35
+ factory: (options: TestStorageOptions) => Promise<BucketStorageFactory>,
36
+ options?: {
37
+ doNotClear?: boolean;
38
+ mongoOptions?: Partial<NormalizedMongoConnectionConfig>;
39
+ streamOptions?: Partial<ChangeStreamOptions>;
40
+ }
41
+ ) {
42
+ const f = await factory({ doNotClear: options?.doNotClear });
43
+ const connectionManager = new MongoManager({ ...TEST_CONNECTION_OPTIONS, ...options?.mongoOptions });
35
44
 
36
- await clearTestDb(connectionManager.db);
37
- return new ChangeStreamTestContext(f, connectionManager);
45
+ if (!options?.doNotClear) {
46
+ await clearTestDb(connectionManager.db);
47
+ }
48
+ return new ChangeStreamTestContext(f, connectionManager, options?.streamOptions);
38
49
  }
39
50
 
40
51
  constructor(
41
52
  public factory: BucketStorageFactory,
42
- public connectionManager: MongoManager
53
+ public connectionManager: MongoManager,
54
+ private streamOptions?: Partial<ChangeStreamOptions>
43
55
  ) {
44
56
  createCoreReplicationMetrics(METRICS_HELPER.metricsEngine);
45
57
  initializeCoreReplicationMetrics(METRICS_HELPER.metricsEngine);
@@ -74,6 +86,16 @@ export class ChangeStreamTestContext {
74
86
  return this.storage!;
75
87
  }
76
88
 
89
+ async loadNextSyncRules() {
90
+ const syncRules = await this.factory.getNextSyncRulesContent();
91
+ if (syncRules == null) {
92
+ throw new Error(`Next sync rules not available`);
93
+ }
94
+
95
+ this.storage = this.factory.getInstance(syncRules);
96
+ return this.storage!;
97
+ }
98
+
77
99
  get walStream() {
78
100
  if (this.storage == null) {
79
101
  throw new Error('updateSyncRules() first');
@@ -88,7 +110,8 @@ export class ChangeStreamTestContext {
88
110
  abort_signal: this.abortController.signal,
89
111
  // Specifically reduce this from the default for tests on MongoDB <= 6.0, otherwise it can take
90
112
  // a long time to abort the stream.
91
- maxAwaitTimeMS: 200
113
+ maxAwaitTimeMS: this.streamOptions?.maxAwaitTimeMS ?? 200,
114
+ snapshotChunkLength: this.streamOptions?.snapshotChunkLength
92
115
  };
93
116
  this._walStream = new ChangeStream(options);
94
117
  return this._walStream!;
@@ -122,23 +145,25 @@ export class ChangeStreamTestContext {
122
145
  return test_utils.fromAsync(this.storage!.getBucketDataBatch(checkpoint, map));
123
146
  }
124
147
 
125
- async getBucketData(
126
- bucket: string,
127
- start?: ProtocolOpId | InternalOpId | undefined,
128
- options?: { timeout?: number; limit?: number; chunkLimitBytes?: number }
129
- ) {
148
+ async getBucketData(bucket: string, start?: ProtocolOpId | InternalOpId | undefined, options?: { timeout?: number }) {
130
149
  start ??= 0n;
131
150
  if (typeof start == 'string') {
132
151
  start = BigInt(start);
133
152
  }
134
- let checkpoint = await this.getCheckpoint(options);
153
+ const checkpoint = await this.getCheckpoint(options);
135
154
  const map = new Map<string, InternalOpId>([[bucket, start]]);
136
- const batch = this.storage!.getBucketDataBatch(checkpoint, map, {
137
- limit: options?.limit,
138
- chunkLimitBytes: options?.chunkLimitBytes
139
- });
140
- const batches = await test_utils.fromAsync(batch);
141
- return batches[0]?.chunkData.data ?? [];
155
+ let data: OplogEntry[] = [];
156
+ while (true) {
157
+ const batch = this.storage!.getBucketDataBatch(checkpoint, map);
158
+
159
+ const batches = await test_utils.fromAsync(batch);
160
+ data = data.concat(batches[0]?.chunkData.data ?? []);
161
+ if (batches.length == 0 || !batches[0]!.chunkData.has_more) {
162
+ break;
163
+ }
164
+ map.set(bucket, BigInt(batches[0]!.chunkData.next_after));
165
+ }
166
+ return data;
142
167
  }
143
168
 
144
169
  async getChecksums(buckets: string[], options?: { timeout?: number }) {
@@ -0,0 +1,153 @@
1
+ import { mongo } from '@powersync/lib-service-mongodb';
2
+ import { reduceBucket, TestStorageFactory } from '@powersync/service-core';
3
+ import { METRICS_HELPER } from '@powersync/service-core-tests';
4
+ import { JSONBig } from '@powersync/service-jsonbig';
5
+ import { SqliteJsonValue } from '@powersync/service-sync-rules';
6
+ import * as timers from 'timers/promises';
7
+ import { describe, expect, test } from 'vitest';
8
+ import { ChangeStreamTestContext } from './change_stream_utils.js';
9
+ import { describeWithStorage } from './util.js';
10
+
11
+ describe('chunked snapshots', () => {
12
+ describeWithStorage({ timeout: 120_000 }, defineBatchTests);
13
+ });
14
+
15
+ function defineBatchTests(factory: TestStorageFactory) {
16
+ // This is not as sensitive to the id type as postgres, but we still test a couple of cases
17
+ test('chunked snapshot (int32)', async () => {
18
+ await testChunkedSnapshot({
19
+ generateId(i) {
20
+ return i;
21
+ },
22
+ idToSqlite(id: number) {
23
+ return BigInt(id);
24
+ }
25
+ });
26
+ });
27
+
28
+ test('chunked snapshot (Timestamp)', async () => {
29
+ await testChunkedSnapshot({
30
+ generateId(i) {
31
+ return mongo.Timestamp.fromBits(Math.floor(i / 1000), i % 1000);
32
+ },
33
+ idToSqlite(id: mongo.Timestamp) {
34
+ return id.toBigInt();
35
+ }
36
+ });
37
+ });
38
+
39
+ test('chunked snapshot (compound)', async () => {
40
+ await testChunkedSnapshot({
41
+ generateId(i) {
42
+ return { a: Math.floor(i / 100), b: i % 100 };
43
+ },
44
+ idToSqlite(id: any) {
45
+ return JSON.stringify(id);
46
+ }
47
+ });
48
+ });
49
+
50
+ test('chunked snapshot (float)', async () => {
51
+ await testChunkedSnapshot({
52
+ generateId(i) {
53
+ // Floating-point operations are not exact, but it should be consistent at least
54
+ return i / Math.PI;
55
+ },
56
+ idToSqlite(id: any) {
57
+ return id;
58
+ }
59
+ });
60
+ });
61
+
62
+ async function testChunkedSnapshot(options: {
63
+ generateId: (i: number) => any;
64
+ idToSqlite?: (id: any) => SqliteJsonValue;
65
+ }) {
66
+ // This is not quite as much of an edge cases as with Postgres. We do still test that
67
+ // updates applied while replicating are applied correctly.
68
+ const idToSqlite = options.idToSqlite ?? ((n) => n);
69
+ const idToString = (id: any) => String(idToSqlite(id));
70
+
71
+ await using context = await ChangeStreamTestContext.open(factory, {
72
+ // We need to use a smaller chunk size here, so that we can run a query in between chunks
73
+ streamOptions: { snapshotChunkLength: 100 }
74
+ });
75
+
76
+ await context.updateSyncRules(`bucket_definitions:
77
+ global:
78
+ data:
79
+ - SELECT _id as id, description FROM test_data`);
80
+ const { db } = context;
81
+
82
+ let batch = db.collection('test_data').initializeUnorderedBulkOp();
83
+
84
+ // 1. Start with 2k rows...
85
+ for (let i = 1; i <= 2000; i++) {
86
+ batch.insert({ _id: options.generateId(i), description: 'foo' });
87
+ }
88
+ await batch.execute();
89
+
90
+ // 2. Replicate one batch of rows
91
+ // Our "stopping point" here is not quite deterministic.
92
+ const p = context.replicateSnapshot();
93
+
94
+ const stopAfter = 100;
95
+ const startRowCount = (await METRICS_HELPER.getMetricValueForTests('powersync_rows_replicated_total')) ?? 0;
96
+
97
+ while (true) {
98
+ const count =
99
+ ((await METRICS_HELPER.getMetricValueForTests('powersync_rows_replicated_total')) ?? 0) - startRowCount;
100
+
101
+ if (count >= stopAfter) {
102
+ break;
103
+ }
104
+ await timers.setTimeout(1);
105
+ }
106
+
107
+ // 3. Update some records
108
+ const idA = options.generateId(2000);
109
+ const idB = options.generateId(1);
110
+ await db.collection('test_data').updateOne({ _id: idA }, { $set: { description: 'bar' } });
111
+ await db.collection('test_data').updateOne({ _id: idB }, { $set: { description: 'baz' } });
112
+
113
+ // 4. Delete
114
+ const idC = options.generateId(1999);
115
+ await db.collection('test_data').deleteOne({ _id: idC });
116
+
117
+ // 5. Insert
118
+ const idD = options.generateId(2001);
119
+ await db.collection('test_data').insertOne({ _id: idD, description: 'new' });
120
+
121
+ // 4. Replicate the rest of the table.
122
+ await p;
123
+
124
+ context.startStreaming();
125
+
126
+ const data = await context.getBucketData('global[]');
127
+ const reduced = reduceBucket(data);
128
+
129
+ expect(reduced.find((row) => row.object_id == idToString(idA))?.data).toEqual(
130
+ JSONBig.stringify({
131
+ id: idToSqlite(idA),
132
+ description: 'bar'
133
+ })
134
+ );
135
+
136
+ expect(reduced.find((row) => row.object_id == idToString(idB))?.data).toEqual(
137
+ JSONBig.stringify({
138
+ id: idToSqlite(idB),
139
+ description: 'baz'
140
+ })
141
+ );
142
+
143
+ expect(reduced.find((row) => row.object_id == idToString(idC))).toBeUndefined();
144
+
145
+ expect(reduced.find((row) => row.object_id == idToString(idD))?.data).toEqual(
146
+ JSONBig.stringify({
147
+ id: idToSqlite(idD),
148
+ description: 'new'
149
+ })
150
+ );
151
+ expect(reduced.length).toEqual(2001);
152
+ }
153
+ }
@@ -1,96 +1,14 @@
1
- import { MongoLSN, ZERO_LSN } from '@module/common/MongoLSN.js';
2
-
1
+ import { ChangeStreamInvalidatedError } from '@module/replication/ChangeStream.js';
3
2
  import { MongoManager } from '@module/replication/MongoManager.js';
4
3
  import { normalizeConnectionConfig } from '@module/types/types.js';
5
- import { isMongoServerError, mongo } from '@powersync/lib-service-mongodb';
6
4
  import { BucketStorageFactory, TestStorageOptions } from '@powersync/service-core';
7
- import { describe, expect, test, vi } from 'vitest';
5
+ import { describe, expect, test } from 'vitest';
8
6
  import { ChangeStreamTestContext } from './change_stream_utils.js';
9
7
  import { env } from './env.js';
10
- import { INITIALIZED_MONGO_STORAGE_FACTORY, INITIALIZED_POSTGRES_STORAGE_FACTORY } from './util.js';
11
- import { ChangeStreamInvalidatedError } from '@module/replication/ChangeStream.js';
12
-
13
- describe('mongo lsn', () => {
14
- test('LSN with resume tokens should be comparable', () => {
15
- // Values without a resume token should be comparable
16
- expect(
17
- new MongoLSN({
18
- timestamp: mongo.Timestamp.fromNumber(1)
19
- }).comparable <
20
- new MongoLSN({
21
- timestamp: mongo.Timestamp.fromNumber(10)
22
- }).comparable
23
- ).true;
24
-
25
- // Values with resume tokens should correctly compare
26
- expect(
27
- new MongoLSN({
28
- timestamp: mongo.Timestamp.fromNumber(1),
29
- resume_token: { _data: 'resume1' }
30
- }).comparable <
31
- new MongoLSN({
32
- timestamp: mongo.Timestamp.fromNumber(10),
33
- resume_token: { _data: 'resume2' }
34
- }).comparable
35
- ).true;
36
-
37
- // The resume token should not affect comparison
38
- expect(
39
- new MongoLSN({
40
- timestamp: mongo.Timestamp.fromNumber(1),
41
- resume_token: { _data: '2' }
42
- }).comparable <
43
- new MongoLSN({
44
- timestamp: mongo.Timestamp.fromNumber(10),
45
- resume_token: { _data: '1' }
46
- }).comparable
47
- ).true;
48
-
49
- // Resume token should not be required for comparison
50
- expect(
51
- new MongoLSN({
52
- timestamp: mongo.Timestamp.fromNumber(10),
53
- resume_token: { _data: '2' }
54
- }).comparable > // Switching the order to test this case
55
- new MongoLSN({
56
- timestamp: mongo.Timestamp.fromNumber(9)
57
- }).comparable
58
- ).true;
8
+ import { describeWithStorage } from './util.js';
59
9
 
60
- // Comparison should be backwards compatible with old LSNs
61
- expect(
62
- new MongoLSN({
63
- timestamp: mongo.Timestamp.fromNumber(10),
64
- resume_token: { _data: '2' }
65
- }).comparable > ZERO_LSN
66
- ).true;
67
- expect(
68
- new MongoLSN({
69
- timestamp: mongo.Timestamp.fromNumber(10),
70
- resume_token: { _data: '2' }
71
- }).comparable >
72
- new MongoLSN({
73
- timestamp: mongo.Timestamp.fromNumber(1)
74
- }).comparable.split('|')[0] // Simulate an old LSN
75
- ).true;
76
- expect(
77
- new MongoLSN({
78
- timestamp: mongo.Timestamp.fromNumber(1),
79
- resume_token: { _data: '2' }
80
- }).comparable <
81
- new MongoLSN({
82
- timestamp: mongo.Timestamp.fromNumber(10)
83
- }).comparable.split('|')[0] // Simulate an old LSN
84
- ).true;
85
- });
86
- });
87
-
88
- describe.skipIf(!env.TEST_MONGO_STORAGE)('MongoDB resume - mongo storage', () => {
89
- defineResumeTest(INITIALIZED_MONGO_STORAGE_FACTORY);
90
- });
91
-
92
- describe.skipIf(!env.TEST_POSTGRES_STORAGE)('MongoDB resume - postgres storage', () => {
93
- defineResumeTest(INITIALIZED_POSTGRES_STORAGE_FACTORY);
10
+ describe('mongodb resuming replication', () => {
11
+ describeWithStorage({}, defineResumeTest);
94
12
  });
95
13
 
96
14
  function defineResumeTest(factoryGenerator: (options?: TestStorageOptions) => Promise<BucketStorageFactory>) {
@@ -112,13 +30,7 @@ function defineResumeTest(factoryGenerator: (options?: TestStorageOptions) => Pr
112
30
  await collection.insertOne({ description: 'test1', num: 1152921504606846976n });
113
31
 
114
32
  // Wait for the item above to be replicated. The commit should store a resume token.
115
- await vi.waitFor(
116
- async () => {
117
- const checkpoint = await context.storage?.getCheckpoint();
118
- expect(MongoLSN.fromSerialized(checkpoint!.lsn!).resumeToken).exist;
119
- },
120
- { timeout: 5000 }
121
- );
33
+ await context.getCheckpoint();
122
34
 
123
35
  // Done with this context for now
124
36
  await context.dispose();
@@ -145,6 +57,7 @@ function defineResumeTest(factoryGenerator: (options?: TestStorageOptions) => Pr
145
57
  const activeContent = await factory.getActiveSyncRulesContent();
146
58
  context2.storage = factory.getInstance(activeContent!);
147
59
 
60
+ // If this test times out, it likely didn't throw the expected error here.
148
61
  const error = await context2.startStreaming().catch((ex) => ex);
149
62
  // The ChangeStreamReplicationJob will detect this and throw a ChangeStreamInvalidatedError
150
63
  expect(error).toBeInstanceOf(ChangeStreamInvalidatedError);