@powersync/service-core-tests 0.4.1 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/README.md +1 -1
- package/dist/tests/register-compacting-tests.d.ts +1 -1
- package/dist/tests/register-compacting-tests.js +135 -4
- package/dist/tests/register-compacting-tests.js.map +1 -1
- package/dist/tests/register-data-storage-tests.js +30 -126
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-sync-tests.js +301 -58
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/package.json +5 -5
- package/src/tests/register-compacting-tests.ts +142 -7
- package/src/tests/register-data-storage-tests.ts +18 -94
- package/src/tests/register-sync-tests.ts +251 -18
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -15,10 +15,7 @@ const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
|
|
|
15
15
|
* compactTests(() => new MongoStorageFactory(), { clearBatchLimit: 2, moveBatchLimit: 1, moveBatchQueryLimit: 1 }));
|
|
16
16
|
* ```
|
|
17
17
|
*/
|
|
18
|
-
export function registerCompactTests
|
|
19
|
-
generateStorageFactory: storage.TestStorageFactory,
|
|
20
|
-
compactOptions: CompactOptions
|
|
21
|
-
) {
|
|
18
|
+
export function registerCompactTests(generateStorageFactory: storage.TestStorageFactory) {
|
|
22
19
|
test('compacting (1)', async () => {
|
|
23
20
|
const sync_rules = test_utils.testRules(`
|
|
24
21
|
bucket_definitions:
|
|
@@ -87,7 +84,11 @@ bucket_definitions:
|
|
|
87
84
|
}
|
|
88
85
|
]);
|
|
89
86
|
|
|
90
|
-
await bucketStorage.compact(
|
|
87
|
+
await bucketStorage.compact({
|
|
88
|
+
clearBatchLimit: 2,
|
|
89
|
+
moveBatchLimit: 1,
|
|
90
|
+
moveBatchQueryLimit: 1
|
|
91
|
+
});
|
|
91
92
|
|
|
92
93
|
const batchAfter = await test_utils.oneFromAsync(
|
|
93
94
|
bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']]))
|
|
@@ -204,7 +205,11 @@ bucket_definitions:
|
|
|
204
205
|
}
|
|
205
206
|
]);
|
|
206
207
|
|
|
207
|
-
await bucketStorage.compact(
|
|
208
|
+
await bucketStorage.compact({
|
|
209
|
+
clearBatchLimit: 2,
|
|
210
|
+
moveBatchLimit: 1,
|
|
211
|
+
moveBatchQueryLimit: 1
|
|
212
|
+
});
|
|
208
213
|
|
|
209
214
|
const batchAfter = await test_utils.oneFromAsync(
|
|
210
215
|
bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']]))
|
|
@@ -285,7 +290,11 @@ bucket_definitions:
|
|
|
285
290
|
});
|
|
286
291
|
const checkpoint2 = result2!.flushed_op;
|
|
287
292
|
|
|
288
|
-
await bucketStorage.compact(
|
|
293
|
+
await bucketStorage.compact({
|
|
294
|
+
clearBatchLimit: 2,
|
|
295
|
+
moveBatchLimit: 1,
|
|
296
|
+
moveBatchQueryLimit: 1
|
|
297
|
+
});
|
|
289
298
|
|
|
290
299
|
const batchAfter = await test_utils.oneFromAsync(
|
|
291
300
|
bucketStorage.getBucketDataBatch(checkpoint2, new Map([['global[]', '0']]))
|
|
@@ -307,4 +316,130 @@ bucket_definitions:
|
|
|
307
316
|
checksum: 1874612650
|
|
308
317
|
});
|
|
309
318
|
});
|
|
319
|
+
|
|
320
|
+
test('compacting (4)', async () => {
|
|
321
|
+
const sync_rules = test_utils.testRules(/* yaml */
|
|
322
|
+
` bucket_definitions:
|
|
323
|
+
grouped:
|
|
324
|
+
# The parameter query here is not important
|
|
325
|
+
# We specifically don't want to create bucket_parameter records here
|
|
326
|
+
# since the op_ids for bucket_data could vary between storage implementations.
|
|
327
|
+
parameters: select 'b' as b
|
|
328
|
+
data:
|
|
329
|
+
- select * from test where b = bucket.b`);
|
|
330
|
+
|
|
331
|
+
await using factory = await generateStorageFactory();
|
|
332
|
+
const bucketStorage = factory.getInstance(sync_rules);
|
|
333
|
+
|
|
334
|
+
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
335
|
+
/**
|
|
336
|
+
* Repeatedly create operations which fall into different buckets.
|
|
337
|
+
* The bucket operations are purposely interleaved as the op_id increases.
|
|
338
|
+
* A large amount of operations are created here.
|
|
339
|
+
* The configured window of compacting operations is 100. This means the initial window will
|
|
340
|
+
* contain operations from multiple buckets.
|
|
341
|
+
*/
|
|
342
|
+
for (let count = 0; count < 100; count++) {
|
|
343
|
+
await batch.save({
|
|
344
|
+
sourceTable: TEST_TABLE,
|
|
345
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
346
|
+
after: {
|
|
347
|
+
id: 't1',
|
|
348
|
+
b: 'b1',
|
|
349
|
+
value: 'start'
|
|
350
|
+
},
|
|
351
|
+
afterReplicaId: test_utils.rid('t1')
|
|
352
|
+
});
|
|
353
|
+
|
|
354
|
+
await batch.save({
|
|
355
|
+
sourceTable: TEST_TABLE,
|
|
356
|
+
tag: storage.SaveOperationTag.UPDATE,
|
|
357
|
+
after: {
|
|
358
|
+
id: 't1',
|
|
359
|
+
b: 'b1',
|
|
360
|
+
value: 'intermediate'
|
|
361
|
+
},
|
|
362
|
+
afterReplicaId: test_utils.rid('t1')
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
await batch.save({
|
|
366
|
+
sourceTable: TEST_TABLE,
|
|
367
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
368
|
+
after: {
|
|
369
|
+
id: 't2',
|
|
370
|
+
b: 'b2',
|
|
371
|
+
value: 'start'
|
|
372
|
+
},
|
|
373
|
+
afterReplicaId: test_utils.rid('t2')
|
|
374
|
+
});
|
|
375
|
+
|
|
376
|
+
await batch.save({
|
|
377
|
+
sourceTable: TEST_TABLE,
|
|
378
|
+
tag: storage.SaveOperationTag.UPDATE,
|
|
379
|
+
after: {
|
|
380
|
+
id: 't1',
|
|
381
|
+
b: 'b1',
|
|
382
|
+
value: 'final'
|
|
383
|
+
},
|
|
384
|
+
afterReplicaId: test_utils.rid('t1')
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
await batch.save({
|
|
388
|
+
sourceTable: TEST_TABLE,
|
|
389
|
+
tag: storage.SaveOperationTag.UPDATE,
|
|
390
|
+
after: {
|
|
391
|
+
id: 't2',
|
|
392
|
+
b: 'b2',
|
|
393
|
+
value: 'final'
|
|
394
|
+
},
|
|
395
|
+
afterReplicaId: test_utils.rid('t2')
|
|
396
|
+
});
|
|
397
|
+
}
|
|
398
|
+
});
|
|
399
|
+
|
|
400
|
+
const checkpoint = result!.flushed_op;
|
|
401
|
+
|
|
402
|
+
await bucketStorage.compact({
|
|
403
|
+
clearBatchLimit: 100,
|
|
404
|
+
moveBatchLimit: 100,
|
|
405
|
+
moveBatchQueryLimit: 100 // Larger limit for a larger window of operations
|
|
406
|
+
});
|
|
407
|
+
|
|
408
|
+
const batchAfter = await test_utils.fromAsync(
|
|
409
|
+
bucketStorage.getBucketDataBatch(
|
|
410
|
+
checkpoint,
|
|
411
|
+
new Map([
|
|
412
|
+
['grouped["b1"]', '0'],
|
|
413
|
+
['grouped["b2"]', '0']
|
|
414
|
+
])
|
|
415
|
+
)
|
|
416
|
+
);
|
|
417
|
+
const dataAfter = batchAfter.flatMap((b) => b.batch.data);
|
|
418
|
+
|
|
419
|
+
// The op_ids will vary between MongoDB and Postgres storage
|
|
420
|
+
expect(dataAfter).toMatchObject(
|
|
421
|
+
expect.arrayContaining([
|
|
422
|
+
{ op_id: '497', op: 'CLEAR', checksum: -937074151 },
|
|
423
|
+
{
|
|
424
|
+
op_id: '499',
|
|
425
|
+
op: 'PUT',
|
|
426
|
+
object_type: 'test',
|
|
427
|
+
object_id: 't1',
|
|
428
|
+
checksum: 52221819,
|
|
429
|
+
subkey: '6544e3899293153fa7b38331/117ab485-4b42-58a2-ab32-0053a22c3423',
|
|
430
|
+
data: '{"id":"t1","b":"b1","value":"final"}'
|
|
431
|
+
},
|
|
432
|
+
{ op_id: '498', op: 'CLEAR', checksum: -234380197 },
|
|
433
|
+
{
|
|
434
|
+
op_id: '500',
|
|
435
|
+
op: 'PUT',
|
|
436
|
+
object_type: 'test',
|
|
437
|
+
object_id: 't2',
|
|
438
|
+
checksum: 2126669493,
|
|
439
|
+
subkey: '6544e3899293153fa7b38331/ec27c691-b47a-5d92-927a-9944feb89eee',
|
|
440
|
+
data: '{"id":"t2","b":"b2","value":"final"}'
|
|
441
|
+
}
|
|
442
|
+
])
|
|
443
|
+
);
|
|
444
|
+
});
|
|
310
445
|
}
|
|
@@ -140,7 +140,7 @@ bucket_definitions:
|
|
|
140
140
|
);
|
|
141
141
|
|
|
142
142
|
await using factory = await generateStorageFactory();
|
|
143
|
-
|
|
143
|
+
const bucketStorage = factory.getInstance(sync_rules);
|
|
144
144
|
|
|
145
145
|
const table = test_utils.makeTestTable('todos', ['id', 'list_id']);
|
|
146
146
|
|
|
@@ -394,13 +394,12 @@ bucket_definitions:
|
|
|
394
394
|
const parameter_sets = await bucketStorage.getParameterSets(checkpoint, lookups);
|
|
395
395
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
|
|
396
396
|
|
|
397
|
-
const buckets = await sync_rules.
|
|
397
|
+
const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
398
398
|
getParameterSets(lookups) {
|
|
399
399
|
return bucketStorage.getParameterSets(checkpoint, lookups);
|
|
400
|
-
}
|
|
401
|
-
parameters
|
|
400
|
+
}
|
|
402
401
|
});
|
|
403
|
-
expect(buckets).toEqual(['by_workspace["workspace1"]']);
|
|
402
|
+
expect(buckets).toEqual([{ bucket: 'by_workspace["workspace1"]', priority: 3 }]);
|
|
404
403
|
});
|
|
405
404
|
|
|
406
405
|
test('save and load parameters with dynamic global buckets', async () => {
|
|
@@ -466,14 +465,16 @@ bucket_definitions:
|
|
|
466
465
|
parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
467
466
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
|
|
468
467
|
|
|
469
|
-
const buckets = await sync_rules.
|
|
468
|
+
const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
470
469
|
getParameterSets(lookups) {
|
|
471
470
|
return bucketStorage.getParameterSets(checkpoint, lookups);
|
|
472
|
-
}
|
|
473
|
-
parameters
|
|
471
|
+
}
|
|
474
472
|
});
|
|
475
|
-
buckets.sort();
|
|
476
|
-
expect(buckets).toEqual([
|
|
473
|
+
buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
|
|
474
|
+
expect(buckets).toEqual([
|
|
475
|
+
{ bucket: 'by_public_workspace["workspace1"]', priority: 3 },
|
|
476
|
+
{ bucket: 'by_public_workspace["workspace3"]', priority: 3 }
|
|
477
|
+
]);
|
|
477
478
|
});
|
|
478
479
|
|
|
479
480
|
test('multiple parameter queries', async () => {
|
|
@@ -562,12 +563,13 @@ bucket_definitions:
|
|
|
562
563
|
expect(parameter_sets2).toEqual([{ workspace_id: 'workspace3' }]);
|
|
563
564
|
|
|
564
565
|
// Test final values - the important part
|
|
565
|
-
const buckets =
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
566
|
+
const buckets = (
|
|
567
|
+
await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
568
|
+
getParameterSets(lookups) {
|
|
569
|
+
return bucketStorage.getParameterSets(checkpoint, lookups);
|
|
570
|
+
}
|
|
571
|
+
})
|
|
572
|
+
).map((e) => e.bucket);
|
|
571
573
|
buckets.sort();
|
|
572
574
|
expect(buckets).toEqual(['by_workspace["workspace1"]', 'by_workspace["workspace3"]']);
|
|
573
575
|
});
|
|
@@ -1415,84 +1417,6 @@ bucket_definitions:
|
|
|
1415
1417
|
expect(test_utils.getBatchMeta(batch3)).toEqual(null);
|
|
1416
1418
|
});
|
|
1417
1419
|
|
|
1418
|
-
test('batch should be disposed automatically', async () => {
|
|
1419
|
-
const sync_rules = test_utils.testRules(`
|
|
1420
|
-
bucket_definitions:
|
|
1421
|
-
global:
|
|
1422
|
-
data: []
|
|
1423
|
-
`);
|
|
1424
|
-
|
|
1425
|
-
await using factory = await generateStorageFactory();
|
|
1426
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
1427
|
-
|
|
1428
|
-
let isDisposed = false;
|
|
1429
|
-
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1430
|
-
batch.registerListener({
|
|
1431
|
-
disposed: () => {
|
|
1432
|
-
isDisposed = true;
|
|
1433
|
-
}
|
|
1434
|
-
});
|
|
1435
|
-
});
|
|
1436
|
-
expect(isDisposed).true;
|
|
1437
|
-
|
|
1438
|
-
isDisposed = false;
|
|
1439
|
-
let errorCaught = false;
|
|
1440
|
-
try {
|
|
1441
|
-
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1442
|
-
batch.registerListener({
|
|
1443
|
-
disposed: () => {
|
|
1444
|
-
isDisposed = true;
|
|
1445
|
-
}
|
|
1446
|
-
});
|
|
1447
|
-
throw new Error(`Testing exceptions`);
|
|
1448
|
-
});
|
|
1449
|
-
} catch (ex) {
|
|
1450
|
-
errorCaught = true;
|
|
1451
|
-
expect(ex.message.includes('Testing')).true;
|
|
1452
|
-
}
|
|
1453
|
-
expect(errorCaught).true;
|
|
1454
|
-
expect(isDisposed).true;
|
|
1455
|
-
});
|
|
1456
|
-
|
|
1457
|
-
test('batch should be disposed automatically', async () => {
|
|
1458
|
-
const sync_rules = test_utils.testRules(`
|
|
1459
|
-
bucket_definitions:
|
|
1460
|
-
global:
|
|
1461
|
-
data: []
|
|
1462
|
-
`);
|
|
1463
|
-
|
|
1464
|
-
await using factory = await generateStorageFactory();
|
|
1465
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
1466
|
-
|
|
1467
|
-
let isDisposed = false;
|
|
1468
|
-
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1469
|
-
batch.registerListener({
|
|
1470
|
-
disposed: () => {
|
|
1471
|
-
isDisposed = true;
|
|
1472
|
-
}
|
|
1473
|
-
});
|
|
1474
|
-
});
|
|
1475
|
-
expect(isDisposed).true;
|
|
1476
|
-
|
|
1477
|
-
isDisposed = false;
|
|
1478
|
-
let errorCaught = false;
|
|
1479
|
-
try {
|
|
1480
|
-
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1481
|
-
batch.registerListener({
|
|
1482
|
-
disposed: () => {
|
|
1483
|
-
isDisposed = true;
|
|
1484
|
-
}
|
|
1485
|
-
});
|
|
1486
|
-
throw new Error(`Testing exceptions`);
|
|
1487
|
-
});
|
|
1488
|
-
} catch (ex) {
|
|
1489
|
-
errorCaught = true;
|
|
1490
|
-
expect(ex.message.includes('Testing')).true;
|
|
1491
|
-
}
|
|
1492
|
-
expect(errorCaught).true;
|
|
1493
|
-
expect(isDisposed).true;
|
|
1494
|
-
});
|
|
1495
|
-
|
|
1496
1420
|
test('empty storage metrics', async () => {
|
|
1497
1421
|
await using f = await generateStorageFactory({ dropAll: true });
|
|
1498
1422
|
const metrics = await f.getStorageMetrics();
|
|
@@ -67,13 +67,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
67
67
|
});
|
|
68
68
|
|
|
69
69
|
const stream = sync.streamResponse({
|
|
70
|
-
|
|
70
|
+
bucketStorage: bucketStorage,
|
|
71
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
71
72
|
params: {
|
|
72
73
|
buckets: [],
|
|
73
74
|
include_checksum: true,
|
|
74
75
|
raw_data: true
|
|
75
76
|
},
|
|
76
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
77
77
|
tracker,
|
|
78
78
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
79
79
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
@@ -83,6 +83,239 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
83
83
|
expect(lines).toMatchSnapshot();
|
|
84
84
|
});
|
|
85
85
|
|
|
86
|
+
test('sync buckets in order', async () => {
|
|
87
|
+
await using f = await factory();
|
|
88
|
+
|
|
89
|
+
const syncRules = await f.updateSyncRules({
|
|
90
|
+
content: `
|
|
91
|
+
bucket_definitions:
|
|
92
|
+
b0:
|
|
93
|
+
priority: 2
|
|
94
|
+
data:
|
|
95
|
+
- SELECT * FROM test WHERE LENGTH(id) <= 2;
|
|
96
|
+
b1:
|
|
97
|
+
priority: 1
|
|
98
|
+
data:
|
|
99
|
+
- SELECT * FROM test WHERE LENGTH(id) > 2;
|
|
100
|
+
`
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
104
|
+
await bucketStorage.autoActivate();
|
|
105
|
+
|
|
106
|
+
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
107
|
+
await batch.save({
|
|
108
|
+
sourceTable: TEST_TABLE,
|
|
109
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
110
|
+
after: {
|
|
111
|
+
id: 't1',
|
|
112
|
+
description: 'Test 1'
|
|
113
|
+
},
|
|
114
|
+
afterReplicaId: 't1'
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
await batch.save({
|
|
118
|
+
sourceTable: TEST_TABLE,
|
|
119
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
120
|
+
after: {
|
|
121
|
+
id: 'earlier',
|
|
122
|
+
description: 'Test 2'
|
|
123
|
+
},
|
|
124
|
+
afterReplicaId: 'earlier'
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
await batch.commit('0/1');
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
const stream = sync.streamResponse({
|
|
131
|
+
bucketStorage: bucketStorage,
|
|
132
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
133
|
+
params: {
|
|
134
|
+
buckets: [],
|
|
135
|
+
include_checksum: true,
|
|
136
|
+
raw_data: true
|
|
137
|
+
},
|
|
138
|
+
tracker,
|
|
139
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
140
|
+
token: { exp: Date.now() / 1000 + 10 } as any
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
const lines = await consumeCheckpointLines(stream);
|
|
144
|
+
expect(lines).toMatchSnapshot();
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
test('sync interrupts low-priority buckets on new checkpoints', async () => {
|
|
148
|
+
await using f = await factory();
|
|
149
|
+
|
|
150
|
+
const syncRules = await f.updateSyncRules({
|
|
151
|
+
content: `
|
|
152
|
+
bucket_definitions:
|
|
153
|
+
b0:
|
|
154
|
+
priority: 2
|
|
155
|
+
data:
|
|
156
|
+
- SELECT * FROM test WHERE LENGTH(id) <= 5;
|
|
157
|
+
b1:
|
|
158
|
+
priority: 1
|
|
159
|
+
data:
|
|
160
|
+
- SELECT * FROM test WHERE LENGTH(id) > 5;
|
|
161
|
+
`
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
165
|
+
await bucketStorage.autoActivate();
|
|
166
|
+
|
|
167
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
168
|
+
// Initial data: Add one priority row and 10k low-priority rows.
|
|
169
|
+
await batch.save({
|
|
170
|
+
sourceTable: TEST_TABLE,
|
|
171
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
172
|
+
after: {
|
|
173
|
+
id: 'highprio',
|
|
174
|
+
description: 'High priority row'
|
|
175
|
+
},
|
|
176
|
+
afterReplicaId: 'highprio'
|
|
177
|
+
});
|
|
178
|
+
for (let i = 0; i < 10_000; i++) {
|
|
179
|
+
await batch.save({
|
|
180
|
+
sourceTable: TEST_TABLE,
|
|
181
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
182
|
+
after: {
|
|
183
|
+
id: `${i}`,
|
|
184
|
+
description: 'low prio'
|
|
185
|
+
},
|
|
186
|
+
afterReplicaId: `${i}`
|
|
187
|
+
});
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
await batch.commit('0/1');
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
const stream = sync.streamResponse({
|
|
194
|
+
bucketStorage: bucketStorage,
|
|
195
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
196
|
+
params: {
|
|
197
|
+
buckets: [],
|
|
198
|
+
include_checksum: true,
|
|
199
|
+
raw_data: true
|
|
200
|
+
},
|
|
201
|
+
tracker,
|
|
202
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
203
|
+
token: { exp: Date.now() / 1000 + 10 } as any
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
let sentCheckpoints = 0;
|
|
207
|
+
let sentRows = 0;
|
|
208
|
+
|
|
209
|
+
for await (let next of stream) {
|
|
210
|
+
if (typeof next == 'string') {
|
|
211
|
+
next = JSON.parse(next);
|
|
212
|
+
}
|
|
213
|
+
if (typeof next === 'object' && next !== null) {
|
|
214
|
+
if ('partial_checkpoint_complete' in next) {
|
|
215
|
+
if (sentCheckpoints == 1) {
|
|
216
|
+
// Save new data to interrupt the low-priority sync.
|
|
217
|
+
|
|
218
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
219
|
+
// Add another high-priority row. This should interrupt the long-running low-priority sync.
|
|
220
|
+
await batch.save({
|
|
221
|
+
sourceTable: TEST_TABLE,
|
|
222
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
223
|
+
after: {
|
|
224
|
+
id: 'highprio2',
|
|
225
|
+
description: 'Another high-priority row'
|
|
226
|
+
},
|
|
227
|
+
afterReplicaId: 'highprio2'
|
|
228
|
+
});
|
|
229
|
+
|
|
230
|
+
await batch.commit('0/2');
|
|
231
|
+
});
|
|
232
|
+
} else {
|
|
233
|
+
// Low-priority sync from the first checkpoint was interrupted. This should not happen before
|
|
234
|
+
// 1000 low-priority items were synchronized.
|
|
235
|
+
expect(sentCheckpoints).toBe(2);
|
|
236
|
+
expect(sentRows).toBeGreaterThan(1000);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
if ('checkpoint' in next || 'checkpoint_diff' in next) {
|
|
240
|
+
sentCheckpoints += 1;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if ('data' in next) {
|
|
244
|
+
sentRows += next.data.data.length;
|
|
245
|
+
}
|
|
246
|
+
if ('checkpoint_complete' in next) {
|
|
247
|
+
break;
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
expect(sentCheckpoints).toBe(2);
|
|
253
|
+
expect(sentRows).toBe(10002);
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
test('sends checkpoint complete line for empty checkpoint', async () => {
|
|
257
|
+
await using f = await factory();
|
|
258
|
+
|
|
259
|
+
const syncRules = await f.updateSyncRules({
|
|
260
|
+
content: BASIC_SYNC_RULES
|
|
261
|
+
});
|
|
262
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
263
|
+
await bucketStorage.autoActivate();
|
|
264
|
+
|
|
265
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
266
|
+
await batch.save({
|
|
267
|
+
sourceTable: TEST_TABLE,
|
|
268
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
269
|
+
after: {
|
|
270
|
+
id: 't1',
|
|
271
|
+
description: 'sync'
|
|
272
|
+
},
|
|
273
|
+
afterReplicaId: 't1'
|
|
274
|
+
});
|
|
275
|
+
await batch.commit('0/1');
|
|
276
|
+
});
|
|
277
|
+
|
|
278
|
+
const stream = sync.streamResponse({
|
|
279
|
+
bucketStorage: bucketStorage,
|
|
280
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
281
|
+
params: {
|
|
282
|
+
buckets: [],
|
|
283
|
+
include_checksum: true,
|
|
284
|
+
raw_data: true
|
|
285
|
+
},
|
|
286
|
+
tracker,
|
|
287
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
288
|
+
token: { exp: Date.now() / 1000 + 100000 } as any
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
const lines: any[] = [];
|
|
292
|
+
let receivedCompletions = 0;
|
|
293
|
+
|
|
294
|
+
for await (let next of stream) {
|
|
295
|
+
if (typeof next == 'string') {
|
|
296
|
+
next = JSON.parse(next);
|
|
297
|
+
}
|
|
298
|
+
lines.push(next);
|
|
299
|
+
|
|
300
|
+
if (typeof next === 'object' && next !== null) {
|
|
301
|
+
if ('checkpoint_complete' in next) {
|
|
302
|
+
receivedCompletions++;
|
|
303
|
+
if (receivedCompletions == 1) {
|
|
304
|
+
// Trigger an empty bucket update.
|
|
305
|
+
await bucketStorage.createManagedWriteCheckpoint({user_id: '', heads: {'1': '1/0'}});
|
|
306
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
307
|
+
await batch.commit('1/0');
|
|
308
|
+
});
|
|
309
|
+
} else {
|
|
310
|
+
break;
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
expect(lines).toMatchSnapshot();
|
|
317
|
+
});
|
|
318
|
+
|
|
86
319
|
test('sync legacy non-raw data', async () => {
|
|
87
320
|
const f = await factory();
|
|
88
321
|
|
|
@@ -109,13 +342,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
109
342
|
});
|
|
110
343
|
|
|
111
344
|
const stream = sync.streamResponse({
|
|
112
|
-
|
|
345
|
+
bucketStorage: bucketStorage,
|
|
346
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
113
347
|
params: {
|
|
114
348
|
buckets: [],
|
|
115
349
|
include_checksum: true,
|
|
116
350
|
raw_data: false
|
|
117
351
|
},
|
|
118
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
119
352
|
tracker,
|
|
120
353
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
121
354
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
@@ -134,17 +367,17 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
134
367
|
content: BASIC_SYNC_RULES
|
|
135
368
|
});
|
|
136
369
|
|
|
137
|
-
const
|
|
138
|
-
await
|
|
370
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
371
|
+
await bucketStorage.autoActivate();
|
|
139
372
|
|
|
140
373
|
const stream = sync.streamResponse({
|
|
141
|
-
|
|
374
|
+
bucketStorage: bucketStorage,
|
|
375
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
142
376
|
params: {
|
|
143
377
|
buckets: [],
|
|
144
378
|
include_checksum: true,
|
|
145
379
|
raw_data: true
|
|
146
380
|
},
|
|
147
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
148
381
|
tracker,
|
|
149
382
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
150
383
|
token: { exp: 0 } as any
|
|
@@ -165,13 +398,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
165
398
|
await bucketStorage.autoActivate();
|
|
166
399
|
|
|
167
400
|
const stream = sync.streamResponse({
|
|
168
|
-
|
|
401
|
+
bucketStorage: bucketStorage,
|
|
402
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
169
403
|
params: {
|
|
170
404
|
buckets: [],
|
|
171
405
|
include_checksum: true,
|
|
172
406
|
raw_data: true
|
|
173
407
|
},
|
|
174
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
175
408
|
tracker,
|
|
176
409
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
177
410
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
@@ -222,19 +455,19 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
222
455
|
content: BASIC_SYNC_RULES
|
|
223
456
|
});
|
|
224
457
|
|
|
225
|
-
const
|
|
226
|
-
await
|
|
458
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
459
|
+
await bucketStorage.autoActivate();
|
|
227
460
|
|
|
228
461
|
const exp = Date.now() / 1000 + 0.1;
|
|
229
462
|
|
|
230
463
|
const stream = sync.streamResponse({
|
|
231
|
-
|
|
464
|
+
bucketStorage: bucketStorage,
|
|
465
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
232
466
|
params: {
|
|
233
467
|
buckets: [],
|
|
234
468
|
include_checksum: true,
|
|
235
469
|
raw_data: true
|
|
236
470
|
},
|
|
237
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
238
471
|
tracker,
|
|
239
472
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
240
473
|
token: { exp: exp } as any
|
|
@@ -288,13 +521,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
288
521
|
});
|
|
289
522
|
|
|
290
523
|
const stream = sync.streamResponse({
|
|
291
|
-
|
|
524
|
+
bucketStorage: bucketStorage,
|
|
525
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
292
526
|
params: {
|
|
293
527
|
buckets: [],
|
|
294
528
|
include_checksum: true,
|
|
295
529
|
raw_data: true
|
|
296
530
|
},
|
|
297
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
298
531
|
tracker,
|
|
299
532
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
300
533
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
@@ -411,13 +644,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
411
644
|
});
|
|
412
645
|
|
|
413
646
|
const params: sync.SyncStreamParameters = {
|
|
414
|
-
|
|
647
|
+
bucketStorage: bucketStorage,
|
|
648
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
415
649
|
params: {
|
|
416
650
|
buckets: [],
|
|
417
651
|
include_checksum: true,
|
|
418
652
|
raw_data: true
|
|
419
653
|
},
|
|
420
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
421
654
|
tracker,
|
|
422
655
|
syncParams: new RequestParameters({ sub: 'test' }, {}),
|
|
423
656
|
token: { sub: 'test', exp: Date.now() / 1000 + 10 } as any
|