@powersync/service-core-tests 0.10.3 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +41 -0
- package/dist/test-utils/general-utils.js +9 -1
- package/dist/test-utils/general-utils.js.map +1 -1
- package/dist/tests/register-compacting-tests.d.ts +0 -11
- package/dist/tests/register-compacting-tests.js +0 -11
- package/dist/tests/register-compacting-tests.js.map +1 -1
- package/dist/tests/register-data-storage-tests.js +209 -156
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-parameter-compacting-tests.d.ts +2 -0
- package/dist/tests/register-parameter-compacting-tests.js +227 -0
- package/dist/tests/register-parameter-compacting-tests.js.map +1 -0
- package/dist/tests/register-sync-tests.js +16 -15
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/dist/tests/tests-index.d.ts +1 -0
- package/dist/tests/tests-index.js +1 -0
- package/dist/tests/tests-index.js.map +1 -1
- package/package.json +3 -3
- package/src/test-utils/general-utils.ts +9 -9
- package/src/tests/register-compacting-tests.ts +0 -11
- package/src/tests/register-data-storage-tests.ts +205 -190
- package/src/tests/register-parameter-compacting-tests.ts +172 -0
- package/src/tests/register-sync-tests.ts +16 -15
- package/src/tests/tests-index.ts +1 -0
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -34,18 +34,19 @@ const normalizeOplogData = (data: OplogEntry['data']) => {
|
|
|
34
34
|
*/
|
|
35
35
|
export function registerDataStorageTests(generateStorageFactory: storage.TestStorageFactory) {
|
|
36
36
|
test('save and load parameters', async () => {
|
|
37
|
-
|
|
37
|
+
await using factory = await generateStorageFactory();
|
|
38
|
+
const syncRules = await factory.updateSyncRules({
|
|
39
|
+
content: `
|
|
38
40
|
bucket_definitions:
|
|
39
41
|
mybucket:
|
|
40
42
|
parameters:
|
|
41
43
|
- SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
|
|
42
|
-
data: []
|
|
43
|
-
`
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
44
|
+
data: []
|
|
45
|
+
`
|
|
46
|
+
});
|
|
47
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
47
48
|
|
|
48
|
-
|
|
49
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
49
50
|
await batch.save({
|
|
50
51
|
sourceTable: TEST_TABLE,
|
|
51
52
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -69,11 +70,12 @@ bucket_definitions:
|
|
|
69
70
|
},
|
|
70
71
|
afterReplicaId: test_utils.rid('t1')
|
|
71
72
|
});
|
|
73
|
+
|
|
74
|
+
await batch.commit('1/1');
|
|
72
75
|
});
|
|
73
76
|
|
|
74
|
-
const
|
|
75
|
-
|
|
76
|
-
]);
|
|
77
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
78
|
+
const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
77
79
|
expect(parameters).toEqual([
|
|
78
80
|
{
|
|
79
81
|
group_id: 'group1a'
|
|
@@ -82,20 +84,19 @@ bucket_definitions:
|
|
|
82
84
|
});
|
|
83
85
|
|
|
84
86
|
test('it should use the latest version', async () => {
|
|
85
|
-
|
|
86
|
-
|
|
87
|
+
await using factory = await generateStorageFactory();
|
|
88
|
+
const syncRules = await factory.updateSyncRules({
|
|
89
|
+
content: `
|
|
87
90
|
bucket_definitions:
|
|
88
91
|
mybucket:
|
|
89
92
|
parameters:
|
|
90
93
|
- SELECT group_id FROM test WHERE id = token_parameters.user_id
|
|
91
|
-
data: []
|
|
94
|
+
data: []
|
|
92
95
|
`
|
|
93
|
-
);
|
|
94
|
-
|
|
95
|
-
await using factory = await generateStorageFactory();
|
|
96
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
96
|
+
});
|
|
97
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
97
98
|
|
|
98
|
-
|
|
99
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
99
100
|
await batch.save({
|
|
100
101
|
sourceTable: TEST_TABLE,
|
|
101
102
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -105,8 +106,10 @@ bucket_definitions:
|
|
|
105
106
|
},
|
|
106
107
|
afterReplicaId: test_utils.rid('user1')
|
|
107
108
|
});
|
|
109
|
+
await batch.commit('1/1');
|
|
108
110
|
});
|
|
109
|
-
const
|
|
111
|
+
const checkpoint1 = await bucketStorage.getCheckpoint();
|
|
112
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
110
113
|
await batch.save({
|
|
111
114
|
sourceTable: TEST_TABLE,
|
|
112
115
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -116,11 +119,11 @@ bucket_definitions:
|
|
|
116
119
|
},
|
|
117
120
|
afterReplicaId: test_utils.rid('user1')
|
|
118
121
|
});
|
|
122
|
+
await batch.commit('1/2');
|
|
119
123
|
});
|
|
124
|
+
const checkpoint2 = await bucketStorage.getCheckpoint();
|
|
120
125
|
|
|
121
|
-
const parameters = await
|
|
122
|
-
ParameterLookup.normalized('mybucket', '1', ['user1'])
|
|
123
|
-
]);
|
|
126
|
+
const parameters = await checkpoint2.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
124
127
|
expect(parameters).toEqual([
|
|
125
128
|
{
|
|
126
129
|
group_id: 'group2'
|
|
@@ -128,9 +131,7 @@ bucket_definitions:
|
|
|
128
131
|
]);
|
|
129
132
|
|
|
130
133
|
// Use the checkpoint to get older data if relevant
|
|
131
|
-
const parameters2 = await
|
|
132
|
-
ParameterLookup.normalized('mybucket', '1', ['user1'])
|
|
133
|
-
]);
|
|
134
|
+
const parameters2 = await checkpoint1.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
134
135
|
expect(parameters2).toEqual([
|
|
135
136
|
{
|
|
136
137
|
group_id: 'group1'
|
|
@@ -139,20 +140,19 @@ bucket_definitions:
|
|
|
139
140
|
});
|
|
140
141
|
|
|
141
142
|
test('it should use the latest version after updates', async () => {
|
|
142
|
-
|
|
143
|
-
|
|
143
|
+
await using factory = await generateStorageFactory();
|
|
144
|
+
const syncRules = await factory.updateSyncRules({
|
|
145
|
+
content: `
|
|
144
146
|
bucket_definitions:
|
|
145
147
|
mybucket:
|
|
146
148
|
parameters:
|
|
147
149
|
- SELECT id AS todo_id
|
|
148
150
|
FROM todos
|
|
149
151
|
WHERE list_id IN token_parameters.list_id
|
|
150
|
-
data: []
|
|
152
|
+
data: []
|
|
151
153
|
`
|
|
152
|
-
);
|
|
153
|
-
|
|
154
|
-
await using factory = await generateStorageFactory();
|
|
155
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
154
|
+
});
|
|
155
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
156
156
|
|
|
157
157
|
const table = test_utils.makeTestTable('todos', ['id', 'list_id']);
|
|
158
158
|
|
|
@@ -176,9 +176,11 @@ bucket_definitions:
|
|
|
176
176
|
},
|
|
177
177
|
afterReplicaId: test_utils.rid('todo2')
|
|
178
178
|
});
|
|
179
|
+
|
|
180
|
+
await batch.commit('1/1');
|
|
179
181
|
});
|
|
180
182
|
|
|
181
|
-
|
|
183
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
182
184
|
// Update the second todo item to now belong to list 1
|
|
183
185
|
await batch.save({
|
|
184
186
|
sourceTable: table,
|
|
@@ -189,12 +191,15 @@ bucket_definitions:
|
|
|
189
191
|
},
|
|
190
192
|
afterReplicaId: test_utils.rid('todo2')
|
|
191
193
|
});
|
|
194
|
+
|
|
195
|
+
await batch.commit('1/1');
|
|
192
196
|
});
|
|
193
197
|
|
|
194
198
|
// We specifically request the todo_ids for both lists.
|
|
195
199
|
// There removal operation for the association of `list2`::`todo2` should not interfere with the new
|
|
196
200
|
// association of `list1`::`todo2`
|
|
197
|
-
const
|
|
201
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
202
|
+
const parameters = await checkpoint.getParameterSets([
|
|
198
203
|
ParameterLookup.normalized('mybucket', '1', ['list1']),
|
|
199
204
|
ParameterLookup.normalized('mybucket', '1', ['list2'])
|
|
200
205
|
]);
|
|
@@ -210,20 +215,19 @@ bucket_definitions:
|
|
|
210
215
|
});
|
|
211
216
|
|
|
212
217
|
test('save and load parameters with different number types', async () => {
|
|
213
|
-
|
|
214
|
-
|
|
218
|
+
await using factory = await generateStorageFactory();
|
|
219
|
+
const syncRules = await factory.updateSyncRules({
|
|
220
|
+
content: `
|
|
215
221
|
bucket_definitions:
|
|
216
222
|
mybucket:
|
|
217
223
|
parameters:
|
|
218
224
|
- SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
|
|
219
225
|
data: []
|
|
220
226
|
`
|
|
221
|
-
);
|
|
222
|
-
|
|
223
|
-
await using factory = await generateStorageFactory();
|
|
224
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
227
|
+
});
|
|
228
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
225
229
|
|
|
226
|
-
|
|
230
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
227
231
|
await batch.save({
|
|
228
232
|
sourceTable: TEST_TABLE,
|
|
229
233
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -236,21 +240,23 @@ bucket_definitions:
|
|
|
236
240
|
},
|
|
237
241
|
afterReplicaId: test_utils.rid('t1')
|
|
238
242
|
});
|
|
243
|
+
|
|
244
|
+
await batch.commit('1/1');
|
|
239
245
|
});
|
|
240
246
|
|
|
241
247
|
const TEST_PARAMS = { group_id: 'group1' };
|
|
242
248
|
|
|
243
|
-
const checkpoint =
|
|
249
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
244
250
|
|
|
245
|
-
const parameters1 = await
|
|
251
|
+
const parameters1 = await checkpoint.getParameterSets([
|
|
246
252
|
ParameterLookup.normalized('mybucket', '1', [314n, 314, 3.14])
|
|
247
253
|
]);
|
|
248
254
|
expect(parameters1).toEqual([TEST_PARAMS]);
|
|
249
|
-
const parameters2 = await
|
|
255
|
+
const parameters2 = await checkpoint.getParameterSets([
|
|
250
256
|
ParameterLookup.normalized('mybucket', '1', [314, 314n, 3.14])
|
|
251
257
|
]);
|
|
252
258
|
expect(parameters2).toEqual([TEST_PARAMS]);
|
|
253
|
-
const parameters3 = await
|
|
259
|
+
const parameters3 = await checkpoint.getParameterSets([
|
|
254
260
|
ParameterLookup.normalized('mybucket', '1', [314n, 314, 3])
|
|
255
261
|
]);
|
|
256
262
|
expect(parameters3).toEqual([]);
|
|
@@ -261,20 +267,19 @@ bucket_definitions:
|
|
|
261
267
|
// This specific case tested here cannot happen with postgres in practice, but we still
|
|
262
268
|
// test this to ensure correct deserialization.
|
|
263
269
|
|
|
264
|
-
|
|
265
|
-
|
|
270
|
+
await using factory = await generateStorageFactory();
|
|
271
|
+
const syncRules = await factory.updateSyncRules({
|
|
272
|
+
content: `
|
|
266
273
|
bucket_definitions:
|
|
267
274
|
mybucket:
|
|
268
275
|
parameters:
|
|
269
276
|
- SELECT group_id FROM test WHERE n1 = token_parameters.n1
|
|
270
277
|
data: []
|
|
271
278
|
`
|
|
272
|
-
);
|
|
273
|
-
|
|
274
|
-
await using factory = await generateStorageFactory();
|
|
275
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
279
|
+
});
|
|
280
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
276
281
|
|
|
277
|
-
|
|
282
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
278
283
|
await batch.save({
|
|
279
284
|
sourceTable: TEST_TABLE,
|
|
280
285
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -298,31 +303,33 @@ bucket_definitions:
|
|
|
298
303
|
},
|
|
299
304
|
afterReplicaId: test_utils.rid('t1')
|
|
300
305
|
});
|
|
306
|
+
|
|
307
|
+
await batch.commit('1/1');
|
|
301
308
|
});
|
|
302
309
|
|
|
303
310
|
const TEST_PARAMS = { group_id: 'group1' };
|
|
304
311
|
|
|
305
|
-
const checkpoint =
|
|
312
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
306
313
|
|
|
307
|
-
const parameters1 = await
|
|
314
|
+
const parameters1 = await checkpoint.getParameterSets([
|
|
308
315
|
ParameterLookup.normalized('mybucket', '1', [1152921504606846976n])
|
|
309
316
|
]);
|
|
310
317
|
expect(parameters1).toEqual([TEST_PARAMS]);
|
|
311
318
|
});
|
|
312
319
|
|
|
313
320
|
test('removing row', async () => {
|
|
314
|
-
|
|
315
|
-
|
|
321
|
+
await using factory = await generateStorageFactory();
|
|
322
|
+
const syncRules = await factory.updateSyncRules({
|
|
323
|
+
content: `
|
|
316
324
|
bucket_definitions:
|
|
317
325
|
global:
|
|
318
326
|
data:
|
|
319
327
|
- SELECT id, description FROM "%"
|
|
320
328
|
`
|
|
321
|
-
);
|
|
322
|
-
|
|
323
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
329
|
+
});
|
|
330
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
324
331
|
|
|
325
|
-
|
|
332
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
326
333
|
const sourceTable = TEST_TABLE;
|
|
327
334
|
|
|
328
335
|
await batch.save({
|
|
@@ -339,9 +346,10 @@ bucket_definitions:
|
|
|
339
346
|
tag: storage.SaveOperationTag.DELETE,
|
|
340
347
|
beforeReplicaId: test_utils.rid('test1')
|
|
341
348
|
});
|
|
349
|
+
await batch.commit('1/1');
|
|
342
350
|
});
|
|
343
351
|
|
|
344
|
-
const checkpoint =
|
|
352
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
345
353
|
|
|
346
354
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
347
355
|
const data = batch[0].chunkData.data.map((d) => {
|
|
@@ -373,8 +381,9 @@ bucket_definitions:
|
|
|
373
381
|
test('save and load parameters with workspaceId', async () => {
|
|
374
382
|
const WORKSPACE_TABLE = test_utils.makeTestTable('workspace', ['id']);
|
|
375
383
|
|
|
376
|
-
|
|
377
|
-
|
|
384
|
+
await using factory = await generateStorageFactory();
|
|
385
|
+
const syncRules = await factory.updateSyncRules({
|
|
386
|
+
content: `
|
|
378
387
|
bucket_definitions:
|
|
379
388
|
by_workspace:
|
|
380
389
|
parameters:
|
|
@@ -382,13 +391,11 @@ bucket_definitions:
|
|
|
382
391
|
workspace."userId" = token_parameters.user_id
|
|
383
392
|
data: []
|
|
384
393
|
`
|
|
385
|
-
);
|
|
386
|
-
const sync_rules =
|
|
387
|
-
|
|
388
|
-
await using factory = await generateStorageFactory();
|
|
389
|
-
const bucketStorage = factory.getInstance(sync_rules_content);
|
|
394
|
+
});
|
|
395
|
+
const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
|
|
396
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
390
397
|
|
|
391
|
-
|
|
398
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
392
399
|
await batch.save({
|
|
393
400
|
sourceTable: WORKSPACE_TABLE,
|
|
394
401
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -398,9 +405,9 @@ bucket_definitions:
|
|
|
398
405
|
},
|
|
399
406
|
afterReplicaId: test_utils.rid('workspace1')
|
|
400
407
|
});
|
|
408
|
+
await batch.commit('1/1');
|
|
401
409
|
});
|
|
402
|
-
|
|
403
|
-
const checkpoint = result!.flushed_op;
|
|
410
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
404
411
|
|
|
405
412
|
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
406
413
|
|
|
@@ -409,12 +416,12 @@ bucket_definitions:
|
|
|
409
416
|
const lookups = q1.getLookups(parameters);
|
|
410
417
|
expect(lookups).toEqual([ParameterLookup.normalized('by_workspace', '1', ['u1'])]);
|
|
411
418
|
|
|
412
|
-
const parameter_sets = await
|
|
419
|
+
const parameter_sets = await checkpoint.getParameterSets(lookups);
|
|
413
420
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
|
|
414
421
|
|
|
415
422
|
const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
416
423
|
getParameterSets(lookups) {
|
|
417
|
-
return
|
|
424
|
+
return checkpoint.getParameterSets(lookups);
|
|
418
425
|
}
|
|
419
426
|
});
|
|
420
427
|
expect(buckets).toEqual([{ bucket: 'by_workspace["workspace1"]', priority: 3 }]);
|
|
@@ -423,8 +430,9 @@ bucket_definitions:
|
|
|
423
430
|
test('save and load parameters with dynamic global buckets', async () => {
|
|
424
431
|
const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
|
|
425
432
|
|
|
426
|
-
|
|
427
|
-
|
|
433
|
+
await using factory = await generateStorageFactory();
|
|
434
|
+
const syncRules = await factory.updateSyncRules({
|
|
435
|
+
content: `
|
|
428
436
|
bucket_definitions:
|
|
429
437
|
by_public_workspace:
|
|
430
438
|
parameters:
|
|
@@ -432,13 +440,11 @@ bucket_definitions:
|
|
|
432
440
|
workspace.visibility = 'public'
|
|
433
441
|
data: []
|
|
434
442
|
`
|
|
435
|
-
);
|
|
436
|
-
const sync_rules =
|
|
437
|
-
|
|
438
|
-
await using factory = await generateStorageFactory();
|
|
439
|
-
const bucketStorage = factory.getInstance(sync_rules_content);
|
|
443
|
+
});
|
|
444
|
+
const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
|
|
445
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
440
446
|
|
|
441
|
-
|
|
447
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
442
448
|
await batch.save({
|
|
443
449
|
sourceTable: WORKSPACE_TABLE,
|
|
444
450
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -468,9 +474,11 @@ bucket_definitions:
|
|
|
468
474
|
},
|
|
469
475
|
afterReplicaId: test_utils.rid('workspace3')
|
|
470
476
|
});
|
|
477
|
+
|
|
478
|
+
await batch.commit('1/1');
|
|
471
479
|
});
|
|
472
480
|
|
|
473
|
-
const checkpoint =
|
|
481
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
474
482
|
|
|
475
483
|
const parameters = new RequestParameters({ sub: 'unknown' }, {});
|
|
476
484
|
|
|
@@ -479,13 +487,13 @@ bucket_definitions:
|
|
|
479
487
|
const lookups = q1.getLookups(parameters);
|
|
480
488
|
expect(lookups).toEqual([ParameterLookup.normalized('by_public_workspace', '1', [])]);
|
|
481
489
|
|
|
482
|
-
const parameter_sets = await
|
|
490
|
+
const parameter_sets = await checkpoint.getParameterSets(lookups);
|
|
483
491
|
parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
484
492
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
|
|
485
493
|
|
|
486
494
|
const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
487
495
|
getParameterSets(lookups) {
|
|
488
|
-
return
|
|
496
|
+
return checkpoint.getParameterSets(lookups);
|
|
489
497
|
}
|
|
490
498
|
});
|
|
491
499
|
buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
|
|
@@ -498,8 +506,9 @@ bucket_definitions:
|
|
|
498
506
|
test('multiple parameter queries', async () => {
|
|
499
507
|
const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
|
|
500
508
|
|
|
501
|
-
|
|
502
|
-
|
|
509
|
+
await using factory = await generateStorageFactory();
|
|
510
|
+
const syncRules = await factory.updateSyncRules({
|
|
511
|
+
content: `
|
|
503
512
|
bucket_definitions:
|
|
504
513
|
by_workspace:
|
|
505
514
|
parameters:
|
|
@@ -509,13 +518,11 @@ bucket_definitions:
|
|
|
509
518
|
workspace.user_id = token_parameters.user_id
|
|
510
519
|
data: []
|
|
511
520
|
`
|
|
512
|
-
);
|
|
513
|
-
const sync_rules =
|
|
514
|
-
|
|
515
|
-
await using factory = await generateStorageFactory();
|
|
516
|
-
const bucketStorage = factory.getInstance(sync_rules_content);
|
|
521
|
+
});
|
|
522
|
+
const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
|
|
523
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
517
524
|
|
|
518
|
-
|
|
525
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
519
526
|
await batch.save({
|
|
520
527
|
sourceTable: WORKSPACE_TABLE,
|
|
521
528
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -557,9 +564,11 @@ bucket_definitions:
|
|
|
557
564
|
},
|
|
558
565
|
afterReplicaId: test_utils.rid('workspace4')
|
|
559
566
|
});
|
|
567
|
+
|
|
568
|
+
await batch.commit('1/1');
|
|
560
569
|
});
|
|
561
570
|
|
|
562
|
-
const checkpoint =
|
|
571
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
563
572
|
|
|
564
573
|
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
565
574
|
|
|
@@ -568,7 +577,7 @@ bucket_definitions:
|
|
|
568
577
|
const lookups1 = q1.getLookups(parameters);
|
|
569
578
|
expect(lookups1).toEqual([ParameterLookup.normalized('by_workspace', '1', [])]);
|
|
570
579
|
|
|
571
|
-
const parameter_sets1 = await
|
|
580
|
+
const parameter_sets1 = await checkpoint.getParameterSets(lookups1);
|
|
572
581
|
parameter_sets1.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
573
582
|
expect(parameter_sets1).toEqual([{ workspace_id: 'workspace1' }]);
|
|
574
583
|
|
|
@@ -576,7 +585,7 @@ bucket_definitions:
|
|
|
576
585
|
const lookups2 = q2.getLookups(parameters);
|
|
577
586
|
expect(lookups2).toEqual([ParameterLookup.normalized('by_workspace', '2', ['u1'])]);
|
|
578
587
|
|
|
579
|
-
const parameter_sets2 = await
|
|
588
|
+
const parameter_sets2 = await checkpoint.getParameterSets(lookups2);
|
|
580
589
|
parameter_sets2.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
581
590
|
expect(parameter_sets2).toEqual([{ workspace_id: 'workspace3' }]);
|
|
582
591
|
|
|
@@ -584,7 +593,7 @@ bucket_definitions:
|
|
|
584
593
|
const buckets = (
|
|
585
594
|
await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
586
595
|
getParameterSets(lookups) {
|
|
587
|
-
return
|
|
596
|
+
return checkpoint.getParameterSets(lookups);
|
|
588
597
|
}
|
|
589
598
|
})
|
|
590
599
|
).map((e) => e.bucket);
|
|
@@ -593,20 +602,19 @@ bucket_definitions:
|
|
|
593
602
|
});
|
|
594
603
|
|
|
595
604
|
test('changing client ids', async () => {
|
|
596
|
-
|
|
597
|
-
|
|
605
|
+
await using factory = await generateStorageFactory();
|
|
606
|
+
const syncRules = await factory.updateSyncRules({
|
|
607
|
+
content: `
|
|
598
608
|
bucket_definitions:
|
|
599
609
|
global:
|
|
600
610
|
data:
|
|
601
611
|
- SELECT client_id as id, description FROM "%"
|
|
602
612
|
`
|
|
603
|
-
);
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
613
|
+
});
|
|
614
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
607
615
|
|
|
608
616
|
const sourceTable = TEST_TABLE;
|
|
609
|
-
|
|
617
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
610
618
|
await batch.save({
|
|
611
619
|
sourceTable,
|
|
612
620
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -638,8 +646,10 @@ bucket_definitions:
|
|
|
638
646
|
},
|
|
639
647
|
afterReplicaId: test_utils.rid('test2')
|
|
640
648
|
});
|
|
649
|
+
|
|
650
|
+
await batch.commit('1/1');
|
|
641
651
|
});
|
|
642
|
-
const checkpoint =
|
|
652
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
643
653
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
644
654
|
const data = batch[0].chunkData.data.map((d) => {
|
|
645
655
|
return {
|
|
@@ -657,16 +667,16 @@ bucket_definitions:
|
|
|
657
667
|
});
|
|
658
668
|
|
|
659
669
|
test('re-apply delete', async () => {
|
|
660
|
-
|
|
661
|
-
|
|
670
|
+
await using factory = await generateStorageFactory();
|
|
671
|
+
const syncRules = await factory.updateSyncRules({
|
|
672
|
+
content: `
|
|
662
673
|
bucket_definitions:
|
|
663
674
|
global:
|
|
664
675
|
data:
|
|
665
676
|
- SELECT id, description FROM "%"
|
|
666
677
|
`
|
|
667
|
-
);
|
|
668
|
-
|
|
669
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
678
|
+
});
|
|
679
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
670
680
|
|
|
671
681
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
672
682
|
const sourceTable = TEST_TABLE;
|
|
@@ -690,9 +700,11 @@ bucket_definitions:
|
|
|
690
700
|
tag: storage.SaveOperationTag.DELETE,
|
|
691
701
|
beforeReplicaId: test_utils.rid('test1')
|
|
692
702
|
});
|
|
703
|
+
|
|
704
|
+
await batch.commit('1/1');
|
|
693
705
|
});
|
|
694
706
|
|
|
695
|
-
|
|
707
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
696
708
|
const sourceTable = TEST_TABLE;
|
|
697
709
|
|
|
698
710
|
await batch.save({
|
|
@@ -702,7 +714,7 @@ bucket_definitions:
|
|
|
702
714
|
});
|
|
703
715
|
});
|
|
704
716
|
|
|
705
|
-
const checkpoint =
|
|
717
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
706
718
|
|
|
707
719
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
708
720
|
const data = batch[0].chunkData.data.map((d) => {
|
|
@@ -732,16 +744,16 @@ bucket_definitions:
|
|
|
732
744
|
});
|
|
733
745
|
|
|
734
746
|
test('re-apply update + delete', async () => {
|
|
735
|
-
|
|
736
|
-
|
|
747
|
+
await using factory = await generateStorageFactory();
|
|
748
|
+
const syncRules = await factory.updateSyncRules({
|
|
749
|
+
content: `
|
|
737
750
|
bucket_definitions:
|
|
738
751
|
global:
|
|
739
752
|
data:
|
|
740
753
|
- SELECT id, description FROM "%"
|
|
741
754
|
`
|
|
742
|
-
);
|
|
743
|
-
|
|
744
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
755
|
+
});
|
|
756
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
745
757
|
|
|
746
758
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
747
759
|
const sourceTable = TEST_TABLE;
|
|
@@ -785,9 +797,11 @@ bucket_definitions:
|
|
|
785
797
|
tag: storage.SaveOperationTag.DELETE,
|
|
786
798
|
beforeReplicaId: test_utils.rid('test1')
|
|
787
799
|
});
|
|
800
|
+
|
|
801
|
+
await batch.commit('1/1');
|
|
788
802
|
});
|
|
789
803
|
|
|
790
|
-
|
|
804
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
791
805
|
const sourceTable = TEST_TABLE;
|
|
792
806
|
|
|
793
807
|
await batch.save({
|
|
@@ -815,9 +829,11 @@ bucket_definitions:
|
|
|
815
829
|
tag: storage.SaveOperationTag.DELETE,
|
|
816
830
|
beforeReplicaId: test_utils.rid('test1')
|
|
817
831
|
});
|
|
832
|
+
|
|
833
|
+
await batch.commit('2/1');
|
|
818
834
|
});
|
|
819
835
|
|
|
820
|
-
const checkpoint =
|
|
836
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
821
837
|
|
|
822
838
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
823
839
|
|
|
@@ -850,18 +866,17 @@ bucket_definitions:
|
|
|
850
866
|
});
|
|
851
867
|
|
|
852
868
|
test('truncate parameters', async () => {
|
|
853
|
-
|
|
854
|
-
|
|
869
|
+
await using factory = await generateStorageFactory();
|
|
870
|
+
const syncRules = await factory.updateSyncRules({
|
|
871
|
+
content: `
|
|
855
872
|
bucket_definitions:
|
|
856
873
|
mybucket:
|
|
857
874
|
parameters:
|
|
858
875
|
- SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
|
|
859
876
|
data: []
|
|
860
877
|
`
|
|
861
|
-
);
|
|
862
|
-
|
|
863
|
-
await using factory = await generateStorageFactory();
|
|
864
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
878
|
+
});
|
|
879
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
865
880
|
|
|
866
881
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
867
882
|
await batch.save({
|
|
@@ -879,11 +894,9 @@ bucket_definitions:
|
|
|
879
894
|
await batch.truncate([TEST_TABLE]);
|
|
880
895
|
});
|
|
881
896
|
|
|
882
|
-
const
|
|
897
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
883
898
|
|
|
884
|
-
const parameters = await
|
|
885
|
-
ParameterLookup.normalized('mybucket', '1', ['user1'])
|
|
886
|
-
]);
|
|
899
|
+
const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
887
900
|
expect(parameters).toEqual([]);
|
|
888
901
|
});
|
|
889
902
|
|
|
@@ -896,16 +909,16 @@ bucket_definitions:
|
|
|
896
909
|
// 1. Not getting the correct "current_data" state for each operation.
|
|
897
910
|
// 2. Output order not being correct.
|
|
898
911
|
|
|
899
|
-
|
|
900
|
-
|
|
912
|
+
await using factory = await generateStorageFactory();
|
|
913
|
+
const syncRules = await factory.updateSyncRules({
|
|
914
|
+
content: `
|
|
901
915
|
bucket_definitions:
|
|
902
916
|
global:
|
|
903
917
|
data:
|
|
904
918
|
- SELECT id, description FROM "test"
|
|
905
919
|
`
|
|
906
|
-
);
|
|
907
|
-
|
|
908
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
920
|
+
});
|
|
921
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
909
922
|
|
|
910
923
|
// Pre-setup
|
|
911
924
|
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
@@ -1048,14 +1061,6 @@ bucket_definitions:
|
|
|
1048
1061
|
});
|
|
1049
1062
|
|
|
1050
1063
|
test('changed data with replica identity full', async () => {
|
|
1051
|
-
const sync_rules = test_utils.testRules(
|
|
1052
|
-
`
|
|
1053
|
-
bucket_definitions:
|
|
1054
|
-
global:
|
|
1055
|
-
data:
|
|
1056
|
-
- SELECT id, description FROM "test"
|
|
1057
|
-
`
|
|
1058
|
-
);
|
|
1059
1064
|
function rid2(id: string, description: string) {
|
|
1060
1065
|
return getUuidReplicaIdentityBson({ id, description }, [
|
|
1061
1066
|
{ name: 'id', type: 'VARCHAR', typeId: 25 },
|
|
@@ -1063,7 +1068,15 @@ bucket_definitions:
|
|
|
1063
1068
|
]);
|
|
1064
1069
|
}
|
|
1065
1070
|
await using factory = await generateStorageFactory();
|
|
1066
|
-
const
|
|
1071
|
+
const syncRules = await factory.updateSyncRules({
|
|
1072
|
+
content: `
|
|
1073
|
+
bucket_definitions:
|
|
1074
|
+
global:
|
|
1075
|
+
data:
|
|
1076
|
+
- SELECT id, description FROM "test"
|
|
1077
|
+
`
|
|
1078
|
+
});
|
|
1079
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1067
1080
|
|
|
1068
1081
|
const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
|
|
1069
1082
|
|
|
@@ -1155,14 +1168,6 @@ bucket_definitions:
|
|
|
1155
1168
|
});
|
|
1156
1169
|
|
|
1157
1170
|
test('unchanged data with replica identity full', async () => {
|
|
1158
|
-
const sync_rules = test_utils.testRules(
|
|
1159
|
-
`
|
|
1160
|
-
bucket_definitions:
|
|
1161
|
-
global:
|
|
1162
|
-
data:
|
|
1163
|
-
- SELECT id, description FROM "test"
|
|
1164
|
-
`
|
|
1165
|
-
);
|
|
1166
1171
|
function rid2(id: string, description: string) {
|
|
1167
1172
|
return getUuidReplicaIdentityBson({ id, description }, [
|
|
1168
1173
|
{ name: 'id', type: 'VARCHAR', typeId: 25 },
|
|
@@ -1171,7 +1176,15 @@ bucket_definitions:
|
|
|
1171
1176
|
}
|
|
1172
1177
|
|
|
1173
1178
|
await using factory = await generateStorageFactory();
|
|
1174
|
-
const
|
|
1179
|
+
const syncRules = await factory.updateSyncRules({
|
|
1180
|
+
content: `
|
|
1181
|
+
bucket_definitions:
|
|
1182
|
+
global:
|
|
1183
|
+
data:
|
|
1184
|
+
- SELECT id, description FROM "test"
|
|
1185
|
+
`
|
|
1186
|
+
});
|
|
1187
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1175
1188
|
|
|
1176
1189
|
const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
|
|
1177
1190
|
|
|
@@ -1260,18 +1273,18 @@ bucket_definitions:
|
|
|
1260
1273
|
// but large enough in size to be split over multiple returned batches.
|
|
1261
1274
|
// The specific batch splits is an implementation detail of the storage driver,
|
|
1262
1275
|
// and the test will have to updated when other implementations are added.
|
|
1263
|
-
|
|
1264
|
-
|
|
1276
|
+
await using factory = await generateStorageFactory();
|
|
1277
|
+
const syncRules = await factory.updateSyncRules({
|
|
1278
|
+
content: `
|
|
1265
1279
|
bucket_definitions:
|
|
1266
1280
|
global:
|
|
1267
1281
|
data:
|
|
1268
1282
|
- SELECT id, description FROM "%"
|
|
1269
1283
|
`
|
|
1270
|
-
);
|
|
1271
|
-
|
|
1272
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
1284
|
+
});
|
|
1285
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1273
1286
|
|
|
1274
|
-
|
|
1287
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1275
1288
|
const sourceTable = TEST_TABLE;
|
|
1276
1289
|
|
|
1277
1290
|
const largeDescription = '0123456789'.repeat(12_000_00);
|
|
@@ -1316,9 +1329,11 @@ bucket_definitions:
|
|
|
1316
1329
|
},
|
|
1317
1330
|
afterReplicaId: test_utils.rid('test3')
|
|
1318
1331
|
});
|
|
1332
|
+
|
|
1333
|
+
await batch.commit('1/1');
|
|
1319
1334
|
});
|
|
1320
1335
|
|
|
1321
|
-
const checkpoint =
|
|
1336
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
1322
1337
|
|
|
1323
1338
|
const options: storage.BucketDataBatchOptions = {
|
|
1324
1339
|
chunkLimitBytes: 16 * 1024 * 1024
|
|
@@ -1367,18 +1382,18 @@ bucket_definitions:
|
|
|
1367
1382
|
|
|
1368
1383
|
test('long batch', async () => {
|
|
1369
1384
|
// Test syncing a batch of data that is limited by count.
|
|
1370
|
-
|
|
1371
|
-
|
|
1385
|
+
await using factory = await generateStorageFactory();
|
|
1386
|
+
const syncRules = await factory.updateSyncRules({
|
|
1387
|
+
content: `
|
|
1372
1388
|
bucket_definitions:
|
|
1373
1389
|
global:
|
|
1374
1390
|
data:
|
|
1375
1391
|
- SELECT id, description FROM "%"
|
|
1376
1392
|
`
|
|
1377
|
-
);
|
|
1378
|
-
|
|
1379
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
1393
|
+
});
|
|
1394
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1380
1395
|
|
|
1381
|
-
|
|
1396
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1382
1397
|
const sourceTable = TEST_TABLE;
|
|
1383
1398
|
|
|
1384
1399
|
for (let i = 1; i <= 6; i++) {
|
|
@@ -1392,9 +1407,11 @@ bucket_definitions:
|
|
|
1392
1407
|
afterReplicaId: `test${i}`
|
|
1393
1408
|
});
|
|
1394
1409
|
}
|
|
1410
|
+
|
|
1411
|
+
await batch.commit('1/1');
|
|
1395
1412
|
});
|
|
1396
1413
|
|
|
1397
|
-
const checkpoint =
|
|
1414
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
1398
1415
|
|
|
1399
1416
|
const batch1 = await test_utils.oneFromAsync(
|
|
1400
1417
|
bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]]), { limit: 4 })
|
|
@@ -1441,8 +1458,9 @@ bucket_definitions:
|
|
|
1441
1458
|
|
|
1442
1459
|
describe('batch has_more', () => {
|
|
1443
1460
|
const setup = async (options: BucketDataBatchOptions) => {
|
|
1444
|
-
|
|
1445
|
-
|
|
1461
|
+
await using factory = await generateStorageFactory();
|
|
1462
|
+
const syncRules = await factory.updateSyncRules({
|
|
1463
|
+
content: `
|
|
1446
1464
|
bucket_definitions:
|
|
1447
1465
|
global1:
|
|
1448
1466
|
data:
|
|
@@ -1451,11 +1469,10 @@ bucket_definitions:
|
|
|
1451
1469
|
data:
|
|
1452
1470
|
- SELECT id, description FROM test WHERE bucket = 'global2'
|
|
1453
1471
|
`
|
|
1454
|
-
);
|
|
1455
|
-
|
|
1456
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
1472
|
+
});
|
|
1473
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1457
1474
|
|
|
1458
|
-
|
|
1475
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1459
1476
|
const sourceTable = TEST_TABLE;
|
|
1460
1477
|
|
|
1461
1478
|
for (let i = 1; i <= 10; i++) {
|
|
@@ -1470,9 +1487,11 @@ bucket_definitions:
|
|
|
1470
1487
|
afterReplicaId: `test${i}`
|
|
1471
1488
|
});
|
|
1472
1489
|
}
|
|
1490
|
+
|
|
1491
|
+
await batch.commit('1/1');
|
|
1473
1492
|
});
|
|
1474
1493
|
|
|
1475
|
-
const checkpoint =
|
|
1494
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
1476
1495
|
return await test_utils.fromAsync(
|
|
1477
1496
|
bucketStorage.getBucketDataBatch(
|
|
1478
1497
|
checkpoint,
|
|
@@ -1603,15 +1622,18 @@ bucket_definitions:
|
|
|
1603
1622
|
|
|
1604
1623
|
const r = await f.configureSyncRules({ content: 'bucket_definitions: {}', validate: false });
|
|
1605
1624
|
const storage = f.getInstance(r.persisted_sync_rules!);
|
|
1606
|
-
await storage.
|
|
1625
|
+
await storage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1626
|
+
await batch.keepalive('1/0');
|
|
1627
|
+
});
|
|
1607
1628
|
|
|
1608
1629
|
const metrics2 = await f.getStorageMetrics();
|
|
1609
1630
|
expect(metrics2).toMatchSnapshot();
|
|
1610
1631
|
});
|
|
1611
1632
|
|
|
1612
1633
|
test('invalidate cached parsed sync rules', async () => {
|
|
1613
|
-
|
|
1614
|
-
|
|
1634
|
+
await using bucketStorageFactory = await generateStorageFactory();
|
|
1635
|
+
const syncRules = await bucketStorageFactory.updateSyncRules({
|
|
1636
|
+
content: `
|
|
1615
1637
|
bucket_definitions:
|
|
1616
1638
|
by_workspace:
|
|
1617
1639
|
parameters:
|
|
@@ -1619,10 +1641,8 @@ bucket_definitions:
|
|
|
1619
1641
|
workspace."userId" = token_parameters.user_id
|
|
1620
1642
|
data: []
|
|
1621
1643
|
`
|
|
1622
|
-
);
|
|
1623
|
-
|
|
1624
|
-
await using bucketStorageFactory = await generateStorageFactory();
|
|
1625
|
-
const syncBucketStorage = bucketStorageFactory.getInstance(sync_rules_content);
|
|
1644
|
+
});
|
|
1645
|
+
const syncBucketStorage = bucketStorageFactory.getInstance(syncRules);
|
|
1626
1646
|
|
|
1627
1647
|
const parsedSchema1 = syncBucketStorage.getParsedSyncRules({
|
|
1628
1648
|
defaultSchema: 'public'
|
|
@@ -1651,12 +1671,11 @@ bucket_definitions:
|
|
|
1651
1671
|
content: `
|
|
1652
1672
|
bucket_definitions:
|
|
1653
1673
|
mybucket:
|
|
1654
|
-
data: []
|
|
1674
|
+
data: []
|
|
1655
1675
|
`,
|
|
1656
1676
|
validate: false
|
|
1657
1677
|
});
|
|
1658
1678
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
|
|
1659
|
-
await bucketStorage.autoActivate();
|
|
1660
1679
|
|
|
1661
1680
|
const abortController = new AbortController();
|
|
1662
1681
|
context.onTestFinished(() => abortController.abort());
|
|
@@ -1697,7 +1716,6 @@ bucket_definitions:
|
|
|
1697
1716
|
validate: false
|
|
1698
1717
|
});
|
|
1699
1718
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
|
|
1700
|
-
await bucketStorage.autoActivate();
|
|
1701
1719
|
|
|
1702
1720
|
const abortController = new AbortController();
|
|
1703
1721
|
context.onTestFinished(() => abortController.abort());
|
|
@@ -1760,7 +1778,6 @@ bucket_definitions:
|
|
|
1760
1778
|
validate: false
|
|
1761
1779
|
});
|
|
1762
1780
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
|
|
1763
|
-
await bucketStorage.autoActivate();
|
|
1764
1781
|
bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
|
|
1765
1782
|
|
|
1766
1783
|
const abortController = new AbortController();
|
|
@@ -1801,7 +1818,6 @@ bucket_definitions:
|
|
|
1801
1818
|
validate: false
|
|
1802
1819
|
});
|
|
1803
1820
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
|
|
1804
|
-
await bucketStorage.autoActivate();
|
|
1805
1821
|
bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
|
|
1806
1822
|
|
|
1807
1823
|
const abortController = new AbortController();
|
|
@@ -1845,7 +1861,6 @@ bucket_definitions:
|
|
|
1845
1861
|
validate: false
|
|
1846
1862
|
});
|
|
1847
1863
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
|
|
1848
|
-
await bucketStorage.autoActivate();
|
|
1849
1864
|
bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
|
|
1850
1865
|
|
|
1851
1866
|
const abortController = new AbortController();
|
|
@@ -1916,17 +1931,17 @@ bucket_definitions:
|
|
|
1916
1931
|
// Test syncing a batch of data that is small in count,
|
|
1917
1932
|
// but large enough in size to be split over multiple returned chunks.
|
|
1918
1933
|
// Similar to the above test, but splits over 1MB chunks.
|
|
1919
|
-
|
|
1920
|
-
|
|
1934
|
+
await using factory = await generateStorageFactory();
|
|
1935
|
+
const syncRules = await factory.updateSyncRules({
|
|
1936
|
+
content: `
|
|
1921
1937
|
bucket_definitions:
|
|
1922
1938
|
global:
|
|
1923
1939
|
data:
|
|
1924
1940
|
- SELECT id FROM test
|
|
1925
1941
|
- SELECT id FROM test_ignore WHERE false
|
|
1926
1942
|
`
|
|
1927
|
-
);
|
|
1928
|
-
|
|
1929
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
1943
|
+
});
|
|
1944
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1930
1945
|
|
|
1931
1946
|
const sourceTable = test_utils.makeTestTable('test', ['id']);
|
|
1932
1947
|
const sourceTableIgnore = test_utils.makeTestTable('test_ignore', ['id']);
|