@powersync/service-module-mongodb 0.9.1 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/CHANGELOG.md +21 -0
  2. package/dist/api/MongoRouteAPIAdapter.d.ts +1 -1
  3. package/dist/api/MongoRouteAPIAdapter.js +1 -1
  4. package/dist/api/MongoRouteAPIAdapter.js.map +1 -1
  5. package/dist/replication/ChangeStream.d.ts +26 -11
  6. package/dist/replication/ChangeStream.js +556 -300
  7. package/dist/replication/ChangeStream.js.map +1 -1
  8. package/dist/replication/ChangeStreamReplicationJob.d.ts +2 -0
  9. package/dist/replication/ChangeStreamReplicationJob.js +13 -5
  10. package/dist/replication/ChangeStreamReplicationJob.js.map +1 -1
  11. package/dist/replication/ChangeStreamReplicator.d.ts +1 -0
  12. package/dist/replication/ChangeStreamReplicator.js +21 -0
  13. package/dist/replication/ChangeStreamReplicator.js.map +1 -1
  14. package/dist/replication/MongoRelation.d.ts +1 -1
  15. package/dist/replication/MongoRelation.js +4 -0
  16. package/dist/replication/MongoRelation.js.map +1 -1
  17. package/dist/replication/MongoSnapshotQuery.d.ts +26 -0
  18. package/dist/replication/MongoSnapshotQuery.js +56 -0
  19. package/dist/replication/MongoSnapshotQuery.js.map +1 -0
  20. package/dist/replication/replication-utils.d.ts +2 -0
  21. package/dist/replication/replication-utils.js +3 -0
  22. package/dist/replication/replication-utils.js.map +1 -1
  23. package/package.json +8 -8
  24. package/src/api/MongoRouteAPIAdapter.ts +1 -1
  25. package/src/replication/ChangeStream.ts +324 -124
  26. package/src/replication/ChangeStreamReplicationJob.ts +14 -6
  27. package/src/replication/ChangeStreamReplicator.ts +23 -0
  28. package/src/replication/MongoRelation.ts +4 -1
  29. package/src/replication/MongoSnapshotQuery.ts +59 -0
  30. package/src/replication/replication-utils.ts +5 -0
  31. package/test/src/change_stream.test.ts +18 -13
  32. package/test/src/change_stream_utils.ts +45 -20
  33. package/test/src/chunked_snapshot.test.ts +153 -0
  34. package/test/src/resume.test.ts +7 -94
  35. package/test/src/resume_token.test.ts +78 -2
  36. package/test/src/resuming_snapshots.test.ts +138 -0
  37. package/test/src/slow_tests.test.ts +4 -18
  38. package/test/src/util.ts +12 -1
  39. package/tsconfig.tsbuildinfo +1 -1
@@ -1,5 +1,6 @@
1
- import { parseResumeTokenTimestamp } from '@module/common/MongoLSN.js';
2
- import { describe, expect, it } from 'vitest';
1
+ import { MongoLSN, parseResumeTokenTimestamp, ZERO_LSN } from '@module/common/MongoLSN.js';
2
+ import { mongo } from '@powersync/lib-service-mongodb';
3
+ import { describe, expect, it, test } from 'vitest';
3
4
 
4
5
  describe('parseResumeTokenTimestamp', () => {
5
6
  it('parses a valid resume token (1)', () => {
@@ -33,3 +34,78 @@ describe('parseResumeTokenTimestamp', () => {
33
34
  expect(() => parseResumeTokenTimestamp(resumeToken)).toThrowError(/^Invalid resume token/);
34
35
  });
35
36
  });
37
+
38
+ describe('mongo lsn', () => {
39
+ test('LSN with resume tokens should be comparable', () => {
40
+ // Values without a resume token should be comparable
41
+ expect(
42
+ new MongoLSN({
43
+ timestamp: mongo.Timestamp.fromNumber(1)
44
+ }).comparable <
45
+ new MongoLSN({
46
+ timestamp: mongo.Timestamp.fromNumber(10)
47
+ }).comparable
48
+ ).toBe(true);
49
+
50
+ // Values with resume tokens should correctly compare
51
+ expect(
52
+ new MongoLSN({
53
+ timestamp: mongo.Timestamp.fromNumber(1),
54
+ resume_token: { _data: 'resume1' }
55
+ }).comparable <
56
+ new MongoLSN({
57
+ timestamp: mongo.Timestamp.fromNumber(10),
58
+ resume_token: { _data: 'resume2' }
59
+ }).comparable
60
+ ).toBe(true);
61
+
62
+ // The resume token should not affect comparison
63
+ expect(
64
+ new MongoLSN({
65
+ timestamp: mongo.Timestamp.fromNumber(1),
66
+ resume_token: { _data: '2' }
67
+ }).comparable <
68
+ new MongoLSN({
69
+ timestamp: mongo.Timestamp.fromNumber(10),
70
+ resume_token: { _data: '1' }
71
+ }).comparable
72
+ ).toBe(true);
73
+
74
+ // Resume token should not be required for comparison
75
+ expect(
76
+ new MongoLSN({
77
+ timestamp: mongo.Timestamp.fromNumber(10),
78
+ resume_token: { _data: '2' }
79
+ }).comparable > // Switching the order to test this case
80
+ new MongoLSN({
81
+ timestamp: mongo.Timestamp.fromNumber(9)
82
+ }).comparable
83
+ ).toBe(true);
84
+
85
+ // Comparison should be backwards compatible with old LSNs
86
+ expect(
87
+ new MongoLSN({
88
+ timestamp: mongo.Timestamp.fromNumber(10),
89
+ resume_token: { _data: '2' }
90
+ }).comparable > ZERO_LSN
91
+ ).toBe(true);
92
+ expect(
93
+ new MongoLSN({
94
+ timestamp: mongo.Timestamp.fromNumber(10),
95
+ resume_token: { _data: '2' }
96
+ }).comparable >
97
+ new MongoLSN({
98
+ timestamp: mongo.Timestamp.fromNumber(1)
99
+ }).comparable.split('|')[0] // Simulate an old LSN
100
+ ).toBe(true);
101
+ expect(
102
+ new MongoLSN({
103
+ timestamp: mongo.Timestamp.fromNumber(1),
104
+ resume_token: { _data: '2' }
105
+ }).comparable <
106
+ new MongoLSN({
107
+ timestamp: mongo.Timestamp.fromNumber(10)
108
+ }).comparable.split('|')[0] // Simulate an old LSN
109
+ ).toBe(true);
110
+ });
111
+ });
@@ -0,0 +1,138 @@
1
+ import { describe, expect, test } from 'vitest';
2
+ import { env } from './env.js';
3
+ import { describeWithStorage } from './util.js';
4
+ import { TestStorageFactory } from '@powersync/service-core';
5
+ import { METRICS_HELPER } from '@powersync/service-core-tests';
6
+ import { ReplicationMetric } from '@powersync/service-types';
7
+ import * as timers from 'node:timers/promises';
8
+ import { ChangeStreamTestContext } from './change_stream_utils.js';
9
+
10
+ describe.skipIf(!(env.CI || env.SLOW_TESTS))('batch replication', function () {
11
+ describeWithStorage({ timeout: 240_000 }, function (factory) {
12
+ test('resuming initial replication (1)', async () => {
13
+ // Stop early - likely to not include deleted row in first replication attempt.
14
+ await testResumingReplication(factory, 2000);
15
+ });
16
+ test('resuming initial replication (2)', async () => {
17
+ // Stop late - likely to include deleted row in first replication attempt.
18
+ await testResumingReplication(factory, 8000);
19
+ });
20
+ });
21
+ });
22
+
23
+ async function testResumingReplication(factory: TestStorageFactory, stopAfter: number) {
24
+ // This tests interrupting and then resuming initial replication.
25
+ // We interrupt replication after test_data1 has fully replicated, and
26
+ // test_data2 has partially replicated.
27
+ // This test relies on interval behavior that is not 100% deterministic:
28
+ // 1. We attempt to abort initial replication once a certain number of
29
+ // rows have been replicated, but this is not exact. Our only requirement
30
+ // is that we have not fully replicated test_data2 yet.
31
+ // 2. Order of replication is not deterministic, so which specific rows
32
+ // have been / have not been replicated at that point is not deterministic.
33
+ // We do allow for some variation in the test results to account for this.
34
+
35
+ await using context = await ChangeStreamTestContext.open(factory, { streamOptions: { snapshotChunkLength: 1000 } });
36
+
37
+ await context.updateSyncRules(`bucket_definitions:
38
+ global:
39
+ data:
40
+ - SELECT _id as id, description FROM test_data1
41
+ - SELECT _id as id, description FROM test_data2`);
42
+ const { db } = context;
43
+
44
+ let batch = db.collection('test_data1').initializeUnorderedBulkOp();
45
+ for (let i = 1; i <= 1000; i++) {
46
+ batch.insert({ _id: i, description: 'foo' });
47
+ }
48
+ await batch.execute();
49
+ batch = db.collection('test_data2').initializeUnorderedBulkOp();
50
+ for (let i = 1; i <= 10000; i++) {
51
+ batch.insert({ _id: i, description: 'foo' });
52
+ }
53
+ await batch.execute();
54
+
55
+ const p = context.replicateSnapshot();
56
+
57
+ let done = false;
58
+
59
+ const startRowCount = (await METRICS_HELPER.getMetricValueForTests(ReplicationMetric.ROWS_REPLICATED)) ?? 0;
60
+ try {
61
+ (async () => {
62
+ while (!done) {
63
+ const count =
64
+ ((await METRICS_HELPER.getMetricValueForTests(ReplicationMetric.ROWS_REPLICATED)) ?? 0) - startRowCount;
65
+
66
+ if (count >= stopAfter) {
67
+ break;
68
+ }
69
+ await timers.setTimeout(1);
70
+ }
71
+ // This interrupts initial replication
72
+ await context.dispose();
73
+ })();
74
+ // This confirms that initial replication was interrupted
75
+ await expect(p).rejects.toThrowError();
76
+ done = true;
77
+ } finally {
78
+ done = true;
79
+ }
80
+
81
+ // Bypass the usual "clear db on factory open" step.
82
+ await using context2 = await ChangeStreamTestContext.open(factory, {
83
+ doNotClear: true,
84
+ streamOptions: { snapshotChunkLength: 1000 }
85
+ });
86
+
87
+ // This delete should be using one of the ids already replicated
88
+ await db.collection('test_data2').deleteOne({ _id: 1 as any });
89
+ await db.collection('test_data2').updateOne({ _id: 2 as any }, { $set: { description: 'update1' } });
90
+ await db.collection('test_data2').insertOne({ _id: 10001 as any, description: 'insert1' });
91
+
92
+ await context2.loadNextSyncRules();
93
+ await context2.replicateSnapshot();
94
+
95
+ context2.startStreaming();
96
+ const data = await context2.getBucketData('global[]', undefined, {});
97
+
98
+ const deletedRowOps = data.filter((row) => row.object_type == 'test_data2' && row.object_id === '1');
99
+ const updatedRowOps = data.filter((row) => row.object_type == 'test_data2' && row.object_id === '2');
100
+ const insertedRowOps = data.filter((row) => row.object_type == 'test_data2' && row.object_id === '10001');
101
+
102
+ if (deletedRowOps.length != 0) {
103
+ // The deleted row was part of the first replication batch,
104
+ // so it is removed by streaming replication.
105
+ expect(deletedRowOps.length).toEqual(2);
106
+ expect(deletedRowOps[1].op).toEqual('REMOVE');
107
+ } else {
108
+ // The deleted row was not part of the first replication batch,
109
+ // so it's not in the resulting ops at all.
110
+ }
111
+
112
+ expect(updatedRowOps.length).toEqual(2);
113
+ // description for the first op could be 'foo' or 'update1'.
114
+ // We only test the final version.
115
+ expect(JSON.parse(updatedRowOps[1].data as string).description).toEqual('update1');
116
+
117
+ expect(insertedRowOps.length).toEqual(2);
118
+ expect(JSON.parse(insertedRowOps[0].data as string).description).toEqual('insert1');
119
+ expect(JSON.parse(insertedRowOps[1].data as string).description).toEqual('insert1');
120
+
121
+ // 1000 of test_data1 during first replication attempt.
122
+ // N >= 1000 of test_data2 during first replication attempt.
123
+ // 10000 - N - 1 + 1 of test_data2 during second replication attempt.
124
+ // An additional update during streaming replication (2x total for this row).
125
+ // An additional insert during streaming replication (2x total for this row).
126
+ // If the deleted row was part of the first replication batch, it's removed by streaming replication.
127
+ // This adds 2 ops.
128
+ // We expect this to be 11002 for stopAfter: 2000, and 11004 for stopAfter: 8000.
129
+ // However, this is not deterministic.
130
+ const expectedCount = 11002 + deletedRowOps.length;
131
+ expect(data.length).toEqual(expectedCount);
132
+
133
+ const replicatedCount =
134
+ ((await METRICS_HELPER.getMetricValueForTests(ReplicationMetric.ROWS_REPLICATED)) ?? 0) - startRowCount;
135
+
136
+ // With resumable replication, there should be no need to re-replicate anything.
137
+ expect(replicatedCount).toEqual(expectedCount);
138
+ }
@@ -6,24 +6,10 @@ import { storage } from '@powersync/service-core';
6
6
 
7
7
  import { ChangeStreamTestContext, setSnapshotHistorySeconds } from './change_stream_utils.js';
8
8
  import { env } from './env.js';
9
- import { INITIALIZED_MONGO_STORAGE_FACTORY, INITIALIZED_POSTGRES_STORAGE_FACTORY } from './util.js';
10
-
11
- describe.skipIf(!env.TEST_MONGO_STORAGE)('change stream slow tests - mongodb', { timeout: 60_000 }, function () {
12
- if (env.CI || env.SLOW_TESTS) {
13
- defineSlowTests(INITIALIZED_MONGO_STORAGE_FACTORY);
14
- } else {
15
- // Need something in this file.
16
- test('no-op', () => {});
17
- }
18
- });
9
+ import { describeWithStorage } from './util.js';
19
10
 
20
- describe.skipIf(!env.TEST_POSTGRES_STORAGE)('change stream slow tests - postgres', { timeout: 60_000 }, function () {
21
- if (env.CI || env.SLOW_TESTS) {
22
- defineSlowTests(INITIALIZED_POSTGRES_STORAGE_FACTORY);
23
- } else {
24
- // Need something in this file.
25
- test('no-op', () => {});
26
- }
11
+ describe.runIf(env.CI || env.SLOW_TESTS)('change stream slow tests', { timeout: 60_000 }, function () {
12
+ describeWithStorage({}, defineSlowTests);
27
13
  });
28
14
 
29
15
  function defineSlowTests(factory: storage.TestStorageFactory) {
@@ -96,7 +82,7 @@ bucket_definitions:
96
82
  await snapshotPromise;
97
83
  context.startStreaming();
98
84
 
99
- const data = await context.getBucketData('global[]', undefined, { limit: 50_000, chunkLimitBytes: 60_000_000 });
85
+ const data = await context.getBucketData('global[]');
100
86
 
101
87
  const preDocuments = data.filter((d: any) => JSON.parse(d.data! as string).description.startsWith('pre')).length;
102
88
  const updatedDocuments = data.filter((d: any) =>
package/test/src/util.ts CHANGED
@@ -4,7 +4,8 @@ import * as postgres_storage from '@powersync/service-module-postgres-storage';
4
4
 
5
5
  import * as types from '@module/types/types.js';
6
6
  import { env } from './env.js';
7
- import { BSON_DESERIALIZE_DATA_OPTIONS } from '@powersync/service-core';
7
+ import { BSON_DESERIALIZE_DATA_OPTIONS, TestStorageFactory } from '@powersync/service-core';
8
+ import { describe, TestOptions } from 'vitest';
8
9
 
9
10
  export const TEST_URI = env.MONGO_TEST_DATA_URL;
10
11
 
@@ -22,6 +23,16 @@ export const INITIALIZED_POSTGRES_STORAGE_FACTORY = postgres_storage.PostgresTes
22
23
  url: env.PG_STORAGE_TEST_URL
23
24
  });
24
25
 
26
+ export function describeWithStorage(options: TestOptions, fn: (factory: TestStorageFactory) => void) {
27
+ describe.skipIf(!env.TEST_MONGO_STORAGE)(`mongodb storage`, options, function () {
28
+ fn(INITIALIZED_MONGO_STORAGE_FACTORY);
29
+ });
30
+
31
+ describe.skipIf(!env.TEST_POSTGRES_STORAGE)(`postgres storage`, options, function () {
32
+ fn(INITIALIZED_POSTGRES_STORAGE_FACTORY);
33
+ });
34
+ }
35
+
25
36
  export async function clearTestDb(db: mongo.Db) {
26
37
  await db.dropDatabase();
27
38
  }