@powersync/service-module-mongodb 0.0.0-dev-20241111122558 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -17
- package/dist/api/MongoRouteAPIAdapter.d.ts +1 -0
- package/dist/api/MongoRouteAPIAdapter.js +54 -21
- package/dist/api/MongoRouteAPIAdapter.js.map +1 -1
- package/dist/replication/ChangeStream.d.ts +23 -2
- package/dist/replication/ChangeStream.js +178 -42
- package/dist/replication/ChangeStream.js.map +1 -1
- package/dist/replication/ChangeStreamReplicationJob.js +7 -4
- package/dist/replication/ChangeStreamReplicationJob.js.map +1 -1
- package/dist/replication/MongoErrorRateLimiter.js +0 -6
- package/dist/replication/MongoErrorRateLimiter.js.map +1 -1
- package/dist/replication/MongoRelation.js +5 -2
- package/dist/replication/MongoRelation.js.map +1 -1
- package/dist/replication/replication-utils.d.ts +1 -0
- package/dist/replication/replication-utils.js +1 -0
- package/dist/replication/replication-utils.js.map +1 -1
- package/dist/types/types.d.ts +35 -0
- package/dist/types/types.js +38 -2
- package/dist/types/types.js.map +1 -1
- package/package.json +6 -9
- package/src/api/MongoRouteAPIAdapter.ts +53 -21
- package/src/replication/ChangeStream.ts +277 -121
- package/src/replication/ChangeStreamReplicationJob.ts +6 -4
- package/src/replication/MongoErrorRateLimiter.ts +1 -8
- package/src/replication/MongoRelation.ts +5 -2
- package/src/replication/replication-utils.ts +2 -1
- package/src/types/types.ts +43 -3
- package/test/src/change_stream.test.ts +442 -231
- package/test/src/change_stream_utils.ts +54 -27
- package/test/src/mongo_test.test.ts +180 -46
- package/test/src/slow_tests.test.ts +109 -0
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -2,10 +2,11 @@ import { putOp, removeOp } from '@core-tests/stream_utils.js';
|
|
|
2
2
|
import { MONGO_STORAGE_FACTORY } from '@core-tests/util.js';
|
|
3
3
|
import { BucketStorageFactory } from '@powersync/service-core';
|
|
4
4
|
import * as crypto from 'crypto';
|
|
5
|
-
import { describe, expect, test } from 'vitest';
|
|
6
|
-
import { changeStreamTest } from './change_stream_utils.js';
|
|
7
5
|
import * as mongo from 'mongodb';
|
|
8
6
|
import { setTimeout } from 'node:timers/promises';
|
|
7
|
+
import { describe, expect, test, vi } from 'vitest';
|
|
8
|
+
import { ChangeStreamTestContext } from './change_stream_utils.js';
|
|
9
|
+
import { PostImagesOption } from '@module/types/types.js';
|
|
9
10
|
|
|
10
11
|
type StorageFactory = () => Promise<BucketStorageFactory>;
|
|
11
12
|
|
|
@@ -16,161 +17,270 @@ bucket_definitions:
|
|
|
16
17
|
- SELECT _id as id, description FROM "test_data"
|
|
17
18
|
`;
|
|
18
19
|
|
|
19
|
-
describe(
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
defineChangeStreamTests(MONGO_STORAGE_FACTORY);
|
|
23
|
-
},
|
|
24
|
-
{ timeout: 20_000 }
|
|
25
|
-
);
|
|
20
|
+
describe('change stream - mongodb', { timeout: 20_000 }, function () {
|
|
21
|
+
defineChangeStreamTests(MONGO_STORAGE_FACTORY);
|
|
22
|
+
});
|
|
26
23
|
|
|
27
24
|
function defineChangeStreamTests(factory: StorageFactory) {
|
|
28
|
-
test(
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
await context.updateSyncRules(`
|
|
25
|
+
test('replicating basic values', async () => {
|
|
26
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
27
|
+
const { db } = context;
|
|
28
|
+
await context.updateSyncRules(`
|
|
33
29
|
bucket_definitions:
|
|
34
30
|
global:
|
|
35
31
|
data:
|
|
36
32
|
- SELECT _id as id, description, num FROM "test_data"`);
|
|
37
33
|
|
|
38
|
-
|
|
39
|
-
|
|
34
|
+
await db.createCollection('test_data', {
|
|
35
|
+
changeStreamPreAndPostImages: { enabled: false }
|
|
36
|
+
});
|
|
37
|
+
const collection = db.collection('test_data');
|
|
38
|
+
|
|
39
|
+
await context.replicateSnapshot();
|
|
40
|
+
|
|
41
|
+
context.startStreaming();
|
|
42
|
+
|
|
43
|
+
const result = await collection.insertOne({ description: 'test1', num: 1152921504606846976n });
|
|
44
|
+
const test_id = result.insertedId;
|
|
45
|
+
await setTimeout(30);
|
|
46
|
+
await collection.updateOne({ _id: test_id }, { $set: { description: 'test2' } });
|
|
47
|
+
await setTimeout(30);
|
|
48
|
+
await collection.replaceOne({ _id: test_id }, { description: 'test3' });
|
|
49
|
+
await setTimeout(30);
|
|
50
|
+
await collection.deleteOne({ _id: test_id });
|
|
51
|
+
|
|
52
|
+
const data = await context.getBucketData('global[]');
|
|
53
|
+
|
|
54
|
+
expect(data).toMatchObject([
|
|
55
|
+
putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
|
|
56
|
+
putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n }),
|
|
57
|
+
putOp('test_data', { id: test_id.toHexString(), description: 'test3' }),
|
|
58
|
+
removeOp('test_data', test_id.toHexString())
|
|
59
|
+
]);
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
test('replicating wildcard', async () => {
|
|
63
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
64
|
+
const { db } = context;
|
|
65
|
+
await context.updateSyncRules(`
|
|
66
|
+
bucket_definitions:
|
|
67
|
+
global:
|
|
68
|
+
data:
|
|
69
|
+
- SELECT _id as id, description, num FROM "test_%"`);
|
|
70
|
+
|
|
71
|
+
await db.createCollection('test_data', {
|
|
72
|
+
changeStreamPreAndPostImages: { enabled: false }
|
|
73
|
+
});
|
|
74
|
+
const collection = db.collection('test_data');
|
|
75
|
+
|
|
76
|
+
const result = await collection.insertOne({ description: 'test1', num: 1152921504606846976n });
|
|
77
|
+
const test_id = result.insertedId;
|
|
78
|
+
|
|
79
|
+
await context.replicateSnapshot();
|
|
80
|
+
|
|
81
|
+
context.startStreaming();
|
|
82
|
+
|
|
83
|
+
await setTimeout(30);
|
|
84
|
+
await collection.updateOne({ _id: test_id }, { $set: { description: 'test2' } });
|
|
85
|
+
|
|
86
|
+
const data = await context.getBucketData('global[]');
|
|
87
|
+
|
|
88
|
+
expect(data).toMatchObject([
|
|
89
|
+
putOp('test_data', { id: test_id.toHexString(), description: 'test1', num: 1152921504606846976n }),
|
|
90
|
+
putOp('test_data', { id: test_id.toHexString(), description: 'test2', num: 1152921504606846976n })
|
|
91
|
+
]);
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
test('updateLookup - no fullDocument available', async () => {
|
|
95
|
+
await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.OFF });
|
|
96
|
+
const { db, client } = context;
|
|
97
|
+
await context.updateSyncRules(`
|
|
98
|
+
bucket_definitions:
|
|
99
|
+
global:
|
|
100
|
+
data:
|
|
101
|
+
- SELECT _id as id, description, num FROM "test_data"`);
|
|
102
|
+
|
|
103
|
+
await db.createCollection('test_data', {
|
|
104
|
+
changeStreamPreAndPostImages: { enabled: false }
|
|
105
|
+
});
|
|
106
|
+
const collection = db.collection('test_data');
|
|
107
|
+
|
|
108
|
+
await context.replicateSnapshot();
|
|
109
|
+
context.startStreaming();
|
|
110
|
+
|
|
111
|
+
const session = client.startSession();
|
|
112
|
+
let test_id: mongo.ObjectId | undefined;
|
|
113
|
+
try {
|
|
114
|
+
await session.withTransaction(async () => {
|
|
115
|
+
const result = await collection.insertOne({ description: 'test1', num: 1152921504606846976n }, { session });
|
|
116
|
+
test_id = result.insertedId;
|
|
117
|
+
await collection.updateOne({ _id: test_id }, { $set: { description: 'test2' } }, { session });
|
|
118
|
+
await collection.replaceOne({ _id: test_id }, { description: 'test3' }, { session });
|
|
119
|
+
await collection.deleteOne({ _id: test_id }, { session });
|
|
40
120
|
});
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
removeOp('test_data', test_id.toHexString())
|
|
63
|
-
]);
|
|
64
|
-
})
|
|
65
|
-
);
|
|
66
|
-
|
|
67
|
-
test(
|
|
68
|
-
'no fullDocument available',
|
|
69
|
-
changeStreamTest(factory, async (context) => {
|
|
70
|
-
const { db, client } = context;
|
|
71
|
-
await context.updateSyncRules(`
|
|
121
|
+
} finally {
|
|
122
|
+
await session.endSession();
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const data = await context.getBucketData('global[]');
|
|
126
|
+
|
|
127
|
+
expect(data).toMatchObject([
|
|
128
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
|
|
129
|
+
// fullDocument is not available at the point this is replicated, resulting in it treated as a remove
|
|
130
|
+
removeOp('test_data', test_id!.toHexString()),
|
|
131
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
|
|
132
|
+
removeOp('test_data', test_id!.toHexString())
|
|
133
|
+
]);
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
test('postImages - autoConfigure', async () => {
|
|
137
|
+
// Similar to the above test, but with postImages enabled.
|
|
138
|
+
// This resolves the consistency issue.
|
|
139
|
+
await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.AUTO_CONFIGURE });
|
|
140
|
+
const { db, client } = context;
|
|
141
|
+
await context.updateSyncRules(`
|
|
72
142
|
bucket_definitions:
|
|
73
143
|
global:
|
|
74
144
|
data:
|
|
75
145
|
- SELECT _id as id, description, num FROM "test_data"`);
|
|
76
146
|
|
|
77
|
-
|
|
78
|
-
|
|
147
|
+
await db.createCollection('test_data', {
|
|
148
|
+
// enabled: false here, but autoConfigure will enable it.
|
|
149
|
+
changeStreamPreAndPostImages: { enabled: false }
|
|
150
|
+
});
|
|
151
|
+
const collection = db.collection('test_data');
|
|
152
|
+
|
|
153
|
+
await context.replicateSnapshot();
|
|
154
|
+
|
|
155
|
+
context.startStreaming();
|
|
156
|
+
|
|
157
|
+
const session = client.startSession();
|
|
158
|
+
let test_id: mongo.ObjectId | undefined;
|
|
159
|
+
try {
|
|
160
|
+
await session.withTransaction(async () => {
|
|
161
|
+
const result = await collection.insertOne({ description: 'test1', num: 1152921504606846976n }, { session });
|
|
162
|
+
test_id = result.insertedId;
|
|
163
|
+
await collection.updateOne({ _id: test_id }, { $set: { description: 'test2' } }, { session });
|
|
164
|
+
await collection.replaceOne({ _id: test_id }, { description: 'test3' }, { session });
|
|
165
|
+
await collection.deleteOne({ _id: test_id }, { session });
|
|
79
166
|
});
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
167
|
+
} finally {
|
|
168
|
+
await session.endSession();
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const data = await context.getBucketData('global[]');
|
|
172
|
+
|
|
173
|
+
expect(data).toMatchObject([
|
|
174
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
|
|
175
|
+
// The postImage helps us get this data
|
|
176
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
|
|
177
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
|
|
178
|
+
removeOp('test_data', test_id!.toHexString())
|
|
179
|
+
]);
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
test('postImages - on', async () => {
|
|
183
|
+
// Similar to postImages - autoConfigure, but does not auto-configure.
|
|
184
|
+
// changeStreamPreAndPostImages must be manually configured.
|
|
185
|
+
await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.READ_ONLY });
|
|
186
|
+
const { db, client } = context;
|
|
187
|
+
await context.updateSyncRules(`
|
|
188
|
+
bucket_definitions:
|
|
189
|
+
global:
|
|
190
|
+
data:
|
|
191
|
+
- SELECT _id as id, description, num FROM "test_data"`);
|
|
192
|
+
|
|
193
|
+
await db.createCollection('test_data', {
|
|
194
|
+
changeStreamPreAndPostImages: { enabled: true }
|
|
195
|
+
});
|
|
196
|
+
const collection = db.collection('test_data');
|
|
197
|
+
|
|
198
|
+
await context.replicateSnapshot();
|
|
199
|
+
|
|
200
|
+
context.startStreaming();
|
|
201
|
+
|
|
202
|
+
const session = client.startSession();
|
|
203
|
+
let test_id: mongo.ObjectId | undefined;
|
|
204
|
+
try {
|
|
205
|
+
await session.withTransaction(async () => {
|
|
206
|
+
const result = await collection.insertOne({ description: 'test1', num: 1152921504606846976n }, { session });
|
|
207
|
+
test_id = result.insertedId;
|
|
208
|
+
await collection.updateOne({ _id: test_id }, { $set: { description: 'test2' } }, { session });
|
|
209
|
+
await collection.replaceOne({ _id: test_id }, { description: 'test3' }, { session });
|
|
210
|
+
await collection.deleteOne({ _id: test_id }, { session });
|
|
211
|
+
});
|
|
212
|
+
} finally {
|
|
213
|
+
await session.endSession();
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
const data = await context.getBucketData('global[]');
|
|
217
|
+
|
|
218
|
+
expect(data).toMatchObject([
|
|
219
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test1', num: 1152921504606846976n }),
|
|
220
|
+
// The postImage helps us get this data
|
|
221
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test2', num: 1152921504606846976n }),
|
|
222
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test3' }),
|
|
223
|
+
removeOp('test_data', test_id!.toHexString())
|
|
224
|
+
]);
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
test('replicating case sensitive table', async () => {
|
|
228
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
229
|
+
const { db } = context;
|
|
230
|
+
await context.updateSyncRules(`
|
|
117
231
|
bucket_definitions:
|
|
118
232
|
global:
|
|
119
233
|
data:
|
|
120
234
|
- SELECT _id as id, description FROM "test_DATA"
|
|
121
235
|
`);
|
|
122
236
|
|
|
123
|
-
|
|
237
|
+
await context.replicateSnapshot();
|
|
124
238
|
|
|
125
|
-
|
|
239
|
+
context.startStreaming();
|
|
126
240
|
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
241
|
+
const collection = db.collection('test_DATA');
|
|
242
|
+
const result = await collection.insertOne({ description: 'test1' });
|
|
243
|
+
const test_id = result.insertedId.toHexString();
|
|
130
244
|
|
|
131
|
-
|
|
245
|
+
const data = await context.getBucketData('global[]');
|
|
132
246
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
);
|
|
247
|
+
expect(data).toMatchObject([putOp('test_DATA', { id: test_id, description: 'test1' })]);
|
|
248
|
+
});
|
|
136
249
|
|
|
137
|
-
test(
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
await context.updateSyncRules(`
|
|
250
|
+
test('replicating large values', async () => {
|
|
251
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
252
|
+
const { db } = context;
|
|
253
|
+
await context.updateSyncRules(`
|
|
142
254
|
bucket_definitions:
|
|
143
255
|
global:
|
|
144
256
|
data:
|
|
145
257
|
- SELECT _id as id, name, description FROM "test_data"
|
|
146
258
|
`);
|
|
147
259
|
|
|
148
|
-
|
|
149
|
-
|
|
260
|
+
await context.replicateSnapshot();
|
|
261
|
+
context.startStreaming();
|
|
150
262
|
|
|
151
|
-
|
|
263
|
+
const largeDescription = crypto.randomBytes(20_000).toString('hex');
|
|
152
264
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
265
|
+
const collection = db.collection('test_data');
|
|
266
|
+
const result = await collection.insertOne({ name: 'test1', description: largeDescription });
|
|
267
|
+
const test_id = result.insertedId;
|
|
156
268
|
|
|
157
|
-
|
|
269
|
+
await collection.updateOne({ _id: test_id }, { $set: { name: 'test2' } });
|
|
158
270
|
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
);
|
|
271
|
+
const data = await context.getBucketData('global[]');
|
|
272
|
+
expect(data.slice(0, 1)).toMatchObject([
|
|
273
|
+
putOp('test_data', { id: test_id.toHexString(), name: 'test1', description: largeDescription })
|
|
274
|
+
]);
|
|
275
|
+
expect(data.slice(1)).toMatchObject([
|
|
276
|
+
putOp('test_data', { id: test_id.toHexString(), name: 'test2', description: largeDescription })
|
|
277
|
+
]);
|
|
278
|
+
});
|
|
168
279
|
|
|
169
|
-
test(
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
const syncRuleContent = `
|
|
280
|
+
test('replicating dropCollection', async () => {
|
|
281
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
282
|
+
const { db } = context;
|
|
283
|
+
const syncRuleContent = `
|
|
174
284
|
bucket_definitions:
|
|
175
285
|
global:
|
|
176
286
|
data:
|
|
@@ -179,128 +289,229 @@ bucket_definitions:
|
|
|
179
289
|
parameters: SELECT _id as id FROM test_data WHERE id = token_parameters.user_id
|
|
180
290
|
data: []
|
|
181
291
|
`;
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
const { db } = context;
|
|
205
|
-
const syncRuleContent = `
|
|
292
|
+
await context.updateSyncRules(syncRuleContent);
|
|
293
|
+
await context.replicateSnapshot();
|
|
294
|
+
context.startStreaming();
|
|
295
|
+
|
|
296
|
+
const collection = db.collection('test_data');
|
|
297
|
+
const result = await collection.insertOne({ description: 'test1' });
|
|
298
|
+
const test_id = result.insertedId.toHexString();
|
|
299
|
+
|
|
300
|
+
await collection.drop();
|
|
301
|
+
|
|
302
|
+
const data = await context.getBucketData('global[]');
|
|
303
|
+
|
|
304
|
+
expect(data).toMatchObject([
|
|
305
|
+
putOp('test_data', { id: test_id, description: 'test1' }),
|
|
306
|
+
removeOp('test_data', test_id)
|
|
307
|
+
]);
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
test('replicating renameCollection', async () => {
|
|
311
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
312
|
+
const { db } = context;
|
|
313
|
+
const syncRuleContent = `
|
|
206
314
|
bucket_definitions:
|
|
207
315
|
global:
|
|
208
316
|
data:
|
|
209
317
|
- SELECT _id as id, description FROM "test_data1"
|
|
210
318
|
- SELECT _id as id, description FROM "test_data2"
|
|
211
319
|
`;
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
)
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
320
|
+
await context.updateSyncRules(syncRuleContent);
|
|
321
|
+
await context.replicateSnapshot();
|
|
322
|
+
context.startStreaming();
|
|
323
|
+
|
|
324
|
+
const collection = db.collection('test_data1');
|
|
325
|
+
const result = await collection.insertOne({ description: 'test1' });
|
|
326
|
+
const test_id = result.insertedId.toHexString();
|
|
327
|
+
|
|
328
|
+
await collection.rename('test_data2');
|
|
329
|
+
|
|
330
|
+
const data = await context.getBucketData('global[]');
|
|
331
|
+
|
|
332
|
+
expect(data).toMatchObject([
|
|
333
|
+
putOp('test_data1', { id: test_id, description: 'test1' }),
|
|
334
|
+
removeOp('test_data1', test_id),
|
|
335
|
+
putOp('test_data2', { id: test_id, description: 'test1' })
|
|
336
|
+
]);
|
|
337
|
+
});
|
|
338
|
+
|
|
339
|
+
test('initial sync', async () => {
|
|
340
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
341
|
+
const { db } = context;
|
|
342
|
+
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
343
|
+
|
|
344
|
+
const collection = db.collection('test_data');
|
|
345
|
+
const result = await collection.insertOne({ description: 'test1' });
|
|
346
|
+
const test_id = result.insertedId.toHexString();
|
|
347
|
+
|
|
348
|
+
await context.replicateSnapshot();
|
|
349
|
+
context.startStreaming();
|
|
350
|
+
|
|
351
|
+
const data = await context.getBucketData('global[]');
|
|
352
|
+
expect(data).toMatchObject([putOp('test_data', { id: test_id, description: 'test1' })]);
|
|
353
|
+
});
|
|
354
|
+
|
|
355
|
+
test('large record', async () => {
|
|
356
|
+
// Test a large update.
|
|
357
|
+
|
|
358
|
+
// Without $changeStreamSplitLargeEvent, we get this error:
|
|
359
|
+
// MongoServerError: PlanExecutor error during aggregation :: caused by :: BSONObj size: 33554925 (0x20001ED) is invalid.
|
|
360
|
+
// Size must be between 0 and 16793600(16MB)
|
|
361
|
+
|
|
362
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
363
|
+
await context.updateSyncRules(`bucket_definitions:
|
|
256
364
|
global:
|
|
257
365
|
data:
|
|
258
|
-
- SELECT _id as id,
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
366
|
+
- SELECT _id as id, name, other FROM "test_data"`);
|
|
367
|
+
const { db } = context;
|
|
368
|
+
|
|
369
|
+
await context.replicateSnapshot();
|
|
370
|
+
|
|
371
|
+
const collection = db.collection('test_data');
|
|
372
|
+
const result = await collection.insertOne({ name: 't1' });
|
|
373
|
+
const test_id = result.insertedId;
|
|
374
|
+
|
|
375
|
+
// 12MB field.
|
|
376
|
+
// The field appears twice in the ChangeStream event, so the total size
|
|
377
|
+
// is > 16MB.
|
|
378
|
+
|
|
379
|
+
// We don't actually have this description field in the sync rules,
|
|
380
|
+
// That causes other issues, not relevant for this specific test.
|
|
381
|
+
const largeDescription = crypto.randomBytes(12000000 / 2).toString('hex');
|
|
382
|
+
|
|
383
|
+
await collection.updateOne({ _id: test_id }, { $set: { description: largeDescription } });
|
|
384
|
+
context.startStreaming();
|
|
385
|
+
|
|
386
|
+
const data = await context.getBucketData('global[]');
|
|
387
|
+
expect(data.length).toEqual(2);
|
|
388
|
+
const row1 = JSON.parse(data[0].data as string);
|
|
389
|
+
expect(row1).toEqual({ id: test_id.toHexString(), name: 't1' });
|
|
390
|
+
delete data[0].data;
|
|
391
|
+
expect(data[0]).toMatchObject({
|
|
392
|
+
object_id: test_id.toHexString(),
|
|
393
|
+
object_type: 'test_data',
|
|
394
|
+
op: 'PUT',
|
|
395
|
+
op_id: '1'
|
|
396
|
+
});
|
|
397
|
+
const row2 = JSON.parse(data[1].data as string);
|
|
398
|
+
expect(row2).toEqual({ id: test_id.toHexString(), name: 't1' });
|
|
399
|
+
delete data[1].data;
|
|
400
|
+
expect(data[1]).toMatchObject({
|
|
401
|
+
object_id: test_id.toHexString(),
|
|
402
|
+
object_type: 'test_data',
|
|
403
|
+
op: 'PUT',
|
|
404
|
+
op_id: '2'
|
|
405
|
+
});
|
|
406
|
+
});
|
|
407
|
+
|
|
408
|
+
test('collection not in sync rules', async () => {
|
|
409
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
410
|
+
const { db } = context;
|
|
411
|
+
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
412
|
+
|
|
413
|
+
await context.replicateSnapshot();
|
|
414
|
+
|
|
415
|
+
context.startStreaming();
|
|
416
|
+
|
|
417
|
+
const collection = db.collection('test_donotsync');
|
|
418
|
+
const result = await collection.insertOne({ description: 'test' });
|
|
419
|
+
|
|
420
|
+
const data = await context.getBucketData('global[]');
|
|
421
|
+
|
|
422
|
+
expect(data).toMatchObject([]);
|
|
423
|
+
});
|
|
424
|
+
|
|
425
|
+
test('postImages - new collection with postImages enabled', async () => {
|
|
426
|
+
await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.AUTO_CONFIGURE });
|
|
427
|
+
const { db } = context;
|
|
428
|
+
await context.updateSyncRules(`
|
|
429
|
+
bucket_definitions:
|
|
430
|
+
global:
|
|
431
|
+
data:
|
|
432
|
+
- SELECT _id as id, description FROM "test_%"`);
|
|
433
|
+
|
|
434
|
+
await context.replicateSnapshot();
|
|
435
|
+
|
|
436
|
+
await db.createCollection('test_data', {
|
|
437
|
+
// enabled: true here - everything should work
|
|
438
|
+
changeStreamPreAndPostImages: { enabled: true }
|
|
439
|
+
});
|
|
440
|
+
const collection = db.collection('test_data');
|
|
441
|
+
const result = await collection.insertOne({ description: 'test1' });
|
|
442
|
+
const test_id = result.insertedId;
|
|
443
|
+
await collection.updateOne({ _id: test_id }, { $set: { description: 'test2' } });
|
|
444
|
+
|
|
445
|
+
context.startStreaming();
|
|
446
|
+
|
|
447
|
+
const data = await context.getBucketData('global[]');
|
|
448
|
+
expect(data).toMatchObject([
|
|
449
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test1' }),
|
|
450
|
+
putOp('test_data', { id: test_id!.toHexString(), description: 'test2' })
|
|
451
|
+
]);
|
|
452
|
+
});
|
|
453
|
+
|
|
454
|
+
test('postImages - new collection with postImages disabled', async () => {
|
|
455
|
+
await using context = await ChangeStreamTestContext.open(factory, { postImages: PostImagesOption.AUTO_CONFIGURE });
|
|
456
|
+
const { db } = context;
|
|
457
|
+
await context.updateSyncRules(`
|
|
458
|
+
bucket_definitions:
|
|
459
|
+
global:
|
|
460
|
+
data:
|
|
461
|
+
- SELECT _id as id, description FROM "test_data%"`);
|
|
462
|
+
|
|
463
|
+
await context.replicateSnapshot();
|
|
464
|
+
|
|
465
|
+
await db.createCollection('test_data', {
|
|
466
|
+
// enabled: false here, but autoConfigure will enable it.
|
|
467
|
+
// Unfortunately, that is too late, and replication must be restarted.
|
|
468
|
+
changeStreamPreAndPostImages: { enabled: false }
|
|
469
|
+
});
|
|
470
|
+
const collection = db.collection('test_data');
|
|
471
|
+
const result = await collection.insertOne({ description: 'test1' });
|
|
472
|
+
const test_id = result.insertedId;
|
|
473
|
+
await collection.updateOne({ _id: test_id }, { $set: { description: 'test2' } });
|
|
474
|
+
|
|
475
|
+
context.startStreaming();
|
|
476
|
+
|
|
477
|
+
await expect(() => context.getBucketData('global[]')).rejects.toMatchObject({
|
|
478
|
+
message: expect.stringContaining('stream was configured to require a post-image for all update events')
|
|
479
|
+
});
|
|
480
|
+
});
|
|
481
|
+
|
|
482
|
+
test('recover from error', async () => {
|
|
483
|
+
await using context = await ChangeStreamTestContext.open(factory);
|
|
484
|
+
const { db } = context;
|
|
485
|
+
await context.updateSyncRules(`
|
|
486
|
+
bucket_definitions:
|
|
487
|
+
global:
|
|
488
|
+
data:
|
|
489
|
+
- SELECT _id as id, description, num FROM "test_data"`);
|
|
287
490
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
const { db } = context;
|
|
292
|
-
await context.updateSyncRules(BASIC_SYNC_RULES);
|
|
491
|
+
await db.createCollection('test_data', {
|
|
492
|
+
changeStreamPreAndPostImages: { enabled: false }
|
|
493
|
+
});
|
|
293
494
|
|
|
294
|
-
|
|
495
|
+
const collection = db.collection('test_data');
|
|
496
|
+
await collection.insertOne({ description: 'test1', num: 1152921504606846976n });
|
|
295
497
|
|
|
296
|
-
|
|
498
|
+
await context.replicateSnapshot();
|
|
297
499
|
|
|
298
|
-
|
|
299
|
-
|
|
500
|
+
// Simulate an error
|
|
501
|
+
await context.storage!.reportError(new Error('simulated error'));
|
|
502
|
+
expect((await context.factory.getActiveSyncRulesContent())?.last_fatal_error).toEqual('simulated error');
|
|
300
503
|
|
|
301
|
-
|
|
504
|
+
// startStreaming() should automatically clear the error.
|
|
505
|
+
context.startStreaming();
|
|
302
506
|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
507
|
+
// getBucketData() creates a checkpoint that clears the error, so we don't do that
|
|
508
|
+
// Just wait, and check that the error is cleared automatically.
|
|
509
|
+
await vi.waitUntil(
|
|
510
|
+
async () => {
|
|
511
|
+
const error = (await context.factory.getActiveSyncRulesContent())?.last_fatal_error;
|
|
512
|
+
return error == null;
|
|
513
|
+
},
|
|
514
|
+
{ timeout: 2_000 }
|
|
515
|
+
);
|
|
516
|
+
});
|
|
306
517
|
}
|