@powersync/service-core-tests 0.9.2 → 0.9.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/dist/test-utils/general-utils.d.ts +2 -2
- package/dist/test-utils/general-utils.js +3 -3
- package/dist/test-utils/general-utils.js.map +1 -1
- package/dist/tests/register-compacting-tests.js +6 -6
- package/dist/tests/register-compacting-tests.js.map +1 -1
- package/dist/tests/register-data-storage-tests.js +248 -60
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-sync-tests.js +206 -73
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/package.json +3 -3
- package/src/test-utils/general-utils.ts +6 -6
- package/src/tests/register-compacting-tests.ts +6 -6
- package/src/tests/register-data-storage-tests.ts +230 -13
- package/src/tests/register-sync-tests.ts +136 -2
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -1,6 +1,12 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
BucketDataBatchOptions,
|
|
3
|
+
getUuidReplicaIdentityBson,
|
|
4
|
+
InternalOpId,
|
|
5
|
+
OplogEntry,
|
|
6
|
+
storage
|
|
7
|
+
} from '@powersync/service-core';
|
|
2
8
|
import { ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
|
|
3
|
-
import { expect, test } from 'vitest';
|
|
9
|
+
import { expect, test, describe, beforeEach } from 'vitest';
|
|
4
10
|
import * as test_utils from '../test-utils/test-utils-index.js';
|
|
5
11
|
|
|
6
12
|
export const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
|
|
@@ -338,7 +344,7 @@ bucket_definitions:
|
|
|
338
344
|
const checkpoint = result!.flushed_op;
|
|
339
345
|
|
|
340
346
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
341
|
-
const data = batch[0].
|
|
347
|
+
const data = batch[0].chunkData.data.map((d) => {
|
|
342
348
|
return {
|
|
343
349
|
op: d.op,
|
|
344
350
|
object_id: d.object_id,
|
|
@@ -635,7 +641,7 @@ bucket_definitions:
|
|
|
635
641
|
});
|
|
636
642
|
const checkpoint = result!.flushed_op;
|
|
637
643
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
638
|
-
const data = batch[0].
|
|
644
|
+
const data = batch[0].chunkData.data.map((d) => {
|
|
639
645
|
return {
|
|
640
646
|
op: d.op,
|
|
641
647
|
object_id: d.object_id
|
|
@@ -699,7 +705,7 @@ bucket_definitions:
|
|
|
699
705
|
const checkpoint = result!.flushed_op;
|
|
700
706
|
|
|
701
707
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
702
|
-
const data = batch[0].
|
|
708
|
+
const data = batch[0].chunkData.data.map((d) => {
|
|
703
709
|
return {
|
|
704
710
|
op: d.op,
|
|
705
711
|
object_id: d.object_id,
|
|
@@ -815,7 +821,7 @@ bucket_definitions:
|
|
|
815
821
|
|
|
816
822
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
817
823
|
|
|
818
|
-
const data = batch[0].
|
|
824
|
+
const data = batch[0].chunkData.data.map((d) => {
|
|
819
825
|
return {
|
|
820
826
|
op: d.op,
|
|
821
827
|
object_id: d.object_id,
|
|
@@ -1014,7 +1020,7 @@ bucket_definitions:
|
|
|
1014
1020
|
bucketStorage.getBucketDataBatch(checkpoint2, new Map([['global[]', checkpoint1]]))
|
|
1015
1021
|
);
|
|
1016
1022
|
|
|
1017
|
-
const data = batch[0].
|
|
1023
|
+
const data = batch[0].chunkData.data.map((d) => {
|
|
1018
1024
|
return {
|
|
1019
1025
|
op: d.op,
|
|
1020
1026
|
object_id: d.object_id,
|
|
@@ -1113,7 +1119,7 @@ bucket_definitions:
|
|
|
1113
1119
|
const batch = await test_utils.fromAsync(
|
|
1114
1120
|
bucketStorage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]]))
|
|
1115
1121
|
);
|
|
1116
|
-
const data = batch[0].
|
|
1122
|
+
const data = batch[0].chunkData.data.map((d) => {
|
|
1117
1123
|
return {
|
|
1118
1124
|
op: d.op,
|
|
1119
1125
|
object_id: d.object_id,
|
|
@@ -1221,7 +1227,7 @@ bucket_definitions:
|
|
|
1221
1227
|
const batch = await test_utils.fromAsync(
|
|
1222
1228
|
bucketStorage.getBucketDataBatch(checkpoint3, new Map([['global[]', checkpoint1]]))
|
|
1223
1229
|
);
|
|
1224
|
-
const data = batch[0].
|
|
1230
|
+
const data = batch[0].chunkData.data.map((d) => {
|
|
1225
1231
|
return {
|
|
1226
1232
|
op: d.op,
|
|
1227
1233
|
object_id: d.object_id,
|
|
@@ -1332,7 +1338,11 @@ bucket_definitions:
|
|
|
1332
1338
|
});
|
|
1333
1339
|
|
|
1334
1340
|
const batch2 = await test_utils.fromAsync(
|
|
1335
|
-
bucketStorage.getBucketDataBatch(
|
|
1341
|
+
bucketStorage.getBucketDataBatch(
|
|
1342
|
+
checkpoint,
|
|
1343
|
+
new Map([['global[]', BigInt(batch1[0].chunkData.next_after)]]),
|
|
1344
|
+
options
|
|
1345
|
+
)
|
|
1336
1346
|
);
|
|
1337
1347
|
expect(test_utils.getBatchData(batch2)).toEqual([
|
|
1338
1348
|
{ op_id: '3', op: 'PUT', object_id: 'large2', checksum: 1795508474 },
|
|
@@ -1345,7 +1355,11 @@ bucket_definitions:
|
|
|
1345
1355
|
});
|
|
1346
1356
|
|
|
1347
1357
|
const batch3 = await test_utils.fromAsync(
|
|
1348
|
-
bucketStorage.getBucketDataBatch(
|
|
1358
|
+
bucketStorage.getBucketDataBatch(
|
|
1359
|
+
checkpoint,
|
|
1360
|
+
new Map([['global[]', BigInt(batch2[0].chunkData.next_after)]]),
|
|
1361
|
+
options
|
|
1362
|
+
)
|
|
1349
1363
|
);
|
|
1350
1364
|
expect(test_utils.getBatchData(batch3)).toEqual([]);
|
|
1351
1365
|
expect(test_utils.getBatchMeta(batch3)).toEqual(null);
|
|
@@ -1400,7 +1414,7 @@ bucket_definitions:
|
|
|
1400
1414
|
});
|
|
1401
1415
|
|
|
1402
1416
|
const batch2 = await test_utils.oneFromAsync(
|
|
1403
|
-
bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', BigInt(batch1.
|
|
1417
|
+
bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', BigInt(batch1.chunkData.next_after)]]), {
|
|
1404
1418
|
limit: 4
|
|
1405
1419
|
})
|
|
1406
1420
|
);
|
|
@@ -1416,7 +1430,7 @@ bucket_definitions:
|
|
|
1416
1430
|
});
|
|
1417
1431
|
|
|
1418
1432
|
const batch3 = await test_utils.fromAsync(
|
|
1419
|
-
bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', BigInt(batch2.
|
|
1433
|
+
bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', BigInt(batch2.chunkData.next_after)]]), {
|
|
1420
1434
|
limit: 4
|
|
1421
1435
|
})
|
|
1422
1436
|
);
|
|
@@ -1425,6 +1439,159 @@ bucket_definitions:
|
|
|
1425
1439
|
expect(test_utils.getBatchMeta(batch3)).toEqual(null);
|
|
1426
1440
|
});
|
|
1427
1441
|
|
|
1442
|
+
describe('batch has_more', () => {
|
|
1443
|
+
const setup = async (options: BucketDataBatchOptions) => {
|
|
1444
|
+
const sync_rules = test_utils.testRules(
|
|
1445
|
+
`
|
|
1446
|
+
bucket_definitions:
|
|
1447
|
+
global1:
|
|
1448
|
+
data:
|
|
1449
|
+
- SELECT id, description FROM test WHERE bucket = 'global1'
|
|
1450
|
+
global2:
|
|
1451
|
+
data:
|
|
1452
|
+
- SELECT id, description FROM test WHERE bucket = 'global2'
|
|
1453
|
+
`
|
|
1454
|
+
);
|
|
1455
|
+
await using factory = await generateStorageFactory();
|
|
1456
|
+
const bucketStorage = factory.getInstance(sync_rules);
|
|
1457
|
+
|
|
1458
|
+
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1459
|
+
const sourceTable = TEST_TABLE;
|
|
1460
|
+
|
|
1461
|
+
for (let i = 1; i <= 10; i++) {
|
|
1462
|
+
await batch.save({
|
|
1463
|
+
sourceTable,
|
|
1464
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
1465
|
+
after: {
|
|
1466
|
+
id: `test${i}`,
|
|
1467
|
+
description: `test${i}`,
|
|
1468
|
+
bucket: i == 1 ? 'global1' : 'global2'
|
|
1469
|
+
},
|
|
1470
|
+
afterReplicaId: `test${i}`
|
|
1471
|
+
});
|
|
1472
|
+
}
|
|
1473
|
+
});
|
|
1474
|
+
|
|
1475
|
+
const checkpoint = result!.flushed_op;
|
|
1476
|
+
return await test_utils.fromAsync(
|
|
1477
|
+
bucketStorage.getBucketDataBatch(
|
|
1478
|
+
checkpoint,
|
|
1479
|
+
new Map([
|
|
1480
|
+
['global1[]', 0n],
|
|
1481
|
+
['global2[]', 0n]
|
|
1482
|
+
]),
|
|
1483
|
+
options
|
|
1484
|
+
)
|
|
1485
|
+
);
|
|
1486
|
+
};
|
|
1487
|
+
|
|
1488
|
+
test('batch has_more (1)', async () => {
|
|
1489
|
+
const batch = await setup({ limit: 5 });
|
|
1490
|
+
expect(batch.length).toEqual(2);
|
|
1491
|
+
|
|
1492
|
+
expect(batch[0].chunkData.bucket).toEqual('global1[]');
|
|
1493
|
+
expect(batch[1].chunkData.bucket).toEqual('global2[]');
|
|
1494
|
+
|
|
1495
|
+
expect(test_utils.getBatchData(batch[0])).toEqual([
|
|
1496
|
+
{ op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 }
|
|
1497
|
+
]);
|
|
1498
|
+
|
|
1499
|
+
expect(test_utils.getBatchData(batch[1])).toEqual([
|
|
1500
|
+
{ op_id: '2', op: 'PUT', object_id: 'test2', checksum: 730027011 },
|
|
1501
|
+
{ op_id: '3', op: 'PUT', object_id: 'test3', checksum: 1359888332 },
|
|
1502
|
+
{ op_id: '4', op: 'PUT', object_id: 'test4', checksum: 2049153252 },
|
|
1503
|
+
{ op_id: '5', op: 'PUT', object_id: 'test5', checksum: 3686902721 }
|
|
1504
|
+
]);
|
|
1505
|
+
|
|
1506
|
+
expect(test_utils.getBatchMeta(batch[0])).toEqual({
|
|
1507
|
+
after: '0',
|
|
1508
|
+
has_more: false,
|
|
1509
|
+
next_after: '1'
|
|
1510
|
+
});
|
|
1511
|
+
|
|
1512
|
+
expect(test_utils.getBatchMeta(batch[1])).toEqual({
|
|
1513
|
+
after: '0',
|
|
1514
|
+
has_more: true,
|
|
1515
|
+
next_after: '5'
|
|
1516
|
+
});
|
|
1517
|
+
});
|
|
1518
|
+
|
|
1519
|
+
test('batch has_more (2)', async () => {
|
|
1520
|
+
const batch = await setup({ limit: 11 });
|
|
1521
|
+
expect(batch.length).toEqual(2);
|
|
1522
|
+
|
|
1523
|
+
expect(batch[0].chunkData.bucket).toEqual('global1[]');
|
|
1524
|
+
expect(batch[1].chunkData.bucket).toEqual('global2[]');
|
|
1525
|
+
|
|
1526
|
+
expect(test_utils.getBatchData(batch[0])).toEqual([
|
|
1527
|
+
{ op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 }
|
|
1528
|
+
]);
|
|
1529
|
+
|
|
1530
|
+
expect(test_utils.getBatchData(batch[1])).toEqual([
|
|
1531
|
+
{ op_id: '2', op: 'PUT', object_id: 'test2', checksum: 730027011 },
|
|
1532
|
+
{ op_id: '3', op: 'PUT', object_id: 'test3', checksum: 1359888332 },
|
|
1533
|
+
{ op_id: '4', op: 'PUT', object_id: 'test4', checksum: 2049153252 },
|
|
1534
|
+
{ op_id: '5', op: 'PUT', object_id: 'test5', checksum: 3686902721 },
|
|
1535
|
+
{ op_id: '6', op: 'PUT', object_id: 'test6', checksum: 1974820016 },
|
|
1536
|
+
{ op_id: '7', op: 'PUT', object_id: 'test7', checksum: 2477637855 },
|
|
1537
|
+
{ op_id: '8', op: 'PUT', object_id: 'test8', checksum: 3644033632 },
|
|
1538
|
+
{ op_id: '9', op: 'PUT', object_id: 'test9', checksum: 1011055869 },
|
|
1539
|
+
{ op_id: '10', op: 'PUT', object_id: 'test10', checksum: 1331456365 }
|
|
1540
|
+
]);
|
|
1541
|
+
|
|
1542
|
+
expect(test_utils.getBatchMeta(batch[0])).toEqual({
|
|
1543
|
+
after: '0',
|
|
1544
|
+
has_more: false,
|
|
1545
|
+
next_after: '1'
|
|
1546
|
+
});
|
|
1547
|
+
|
|
1548
|
+
expect(test_utils.getBatchMeta(batch[1])).toEqual({
|
|
1549
|
+
after: '0',
|
|
1550
|
+
has_more: false,
|
|
1551
|
+
next_after: '10'
|
|
1552
|
+
});
|
|
1553
|
+
});
|
|
1554
|
+
|
|
1555
|
+
test('batch has_more (3)', async () => {
|
|
1556
|
+
// 50 bytes is more than 1 row, less than 2 rows
|
|
1557
|
+
const batch = await setup({ limit: 3, chunkLimitBytes: 50 });
|
|
1558
|
+
|
|
1559
|
+
expect(batch.length).toEqual(3);
|
|
1560
|
+
expect(batch[0].chunkData.bucket).toEqual('global1[]');
|
|
1561
|
+
expect(batch[1].chunkData.bucket).toEqual('global2[]');
|
|
1562
|
+
expect(batch[2].chunkData.bucket).toEqual('global2[]');
|
|
1563
|
+
|
|
1564
|
+
expect(test_utils.getBatchData(batch[0])).toEqual([
|
|
1565
|
+
{ op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 }
|
|
1566
|
+
]);
|
|
1567
|
+
|
|
1568
|
+
expect(test_utils.getBatchData(batch[1])).toEqual([
|
|
1569
|
+
{ op_id: '2', op: 'PUT', object_id: 'test2', checksum: 730027011 }
|
|
1570
|
+
]);
|
|
1571
|
+
expect(test_utils.getBatchData(batch[2])).toEqual([
|
|
1572
|
+
{ op_id: '3', op: 'PUT', object_id: 'test3', checksum: 1359888332 }
|
|
1573
|
+
]);
|
|
1574
|
+
|
|
1575
|
+
expect(test_utils.getBatchMeta(batch[0])).toEqual({
|
|
1576
|
+
after: '0',
|
|
1577
|
+
has_more: false,
|
|
1578
|
+
next_after: '1'
|
|
1579
|
+
});
|
|
1580
|
+
|
|
1581
|
+
expect(test_utils.getBatchMeta(batch[1])).toEqual({
|
|
1582
|
+
after: '0',
|
|
1583
|
+
has_more: true,
|
|
1584
|
+
next_after: '2'
|
|
1585
|
+
});
|
|
1586
|
+
|
|
1587
|
+
expect(test_utils.getBatchMeta(batch[2])).toEqual({
|
|
1588
|
+
after: '2',
|
|
1589
|
+
has_more: true,
|
|
1590
|
+
next_after: '3'
|
|
1591
|
+
});
|
|
1592
|
+
});
|
|
1593
|
+
});
|
|
1594
|
+
|
|
1428
1595
|
test('empty storage metrics', async () => {
|
|
1429
1596
|
await using f = await generateStorageFactory({ dropAll: true });
|
|
1430
1597
|
const metrics = await f.getStorageMetrics();
|
|
@@ -1711,4 +1878,54 @@ bucket_definitions:
|
|
|
1711
1878
|
}
|
|
1712
1879
|
});
|
|
1713
1880
|
});
|
|
1881
|
+
|
|
1882
|
+
test('op_id initialization edge case', async () => {
|
|
1883
|
+
// Test syncing a batch of data that is small in count,
|
|
1884
|
+
// but large enough in size to be split over multiple returned chunks.
|
|
1885
|
+
// Similar to the above test, but splits over 1MB chunks.
|
|
1886
|
+
const sync_rules = test_utils.testRules(
|
|
1887
|
+
`
|
|
1888
|
+
bucket_definitions:
|
|
1889
|
+
global:
|
|
1890
|
+
data:
|
|
1891
|
+
- SELECT id FROM test
|
|
1892
|
+
- SELECT id FROM test_ignore WHERE false
|
|
1893
|
+
`
|
|
1894
|
+
);
|
|
1895
|
+
await using factory = await generateStorageFactory();
|
|
1896
|
+
const bucketStorage = factory.getInstance(sync_rules);
|
|
1897
|
+
|
|
1898
|
+
const sourceTable = test_utils.makeTestTable('test', ['id']);
|
|
1899
|
+
const sourceTableIgnore = test_utils.makeTestTable('test_ignore', ['id']);
|
|
1900
|
+
|
|
1901
|
+
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1902
|
+
// This saves a record to current_data, but not bucket_data.
|
|
1903
|
+
// This causes a checkpoint to be created without increasing the op_id sequence.
|
|
1904
|
+
await batch.save({
|
|
1905
|
+
sourceTable: sourceTableIgnore,
|
|
1906
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
1907
|
+
after: {
|
|
1908
|
+
id: 'test1'
|
|
1909
|
+
},
|
|
1910
|
+
afterReplicaId: test_utils.rid('test1')
|
|
1911
|
+
});
|
|
1912
|
+
});
|
|
1913
|
+
|
|
1914
|
+
const checkpoint1 = result1!.flushed_op;
|
|
1915
|
+
|
|
1916
|
+
const result2 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1917
|
+
await batch.save({
|
|
1918
|
+
sourceTable: sourceTable,
|
|
1919
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
1920
|
+
after: {
|
|
1921
|
+
id: 'test2'
|
|
1922
|
+
},
|
|
1923
|
+
afterReplicaId: test_utils.rid('test2')
|
|
1924
|
+
});
|
|
1925
|
+
});
|
|
1926
|
+
|
|
1927
|
+
const checkpoint2 = result2!.flushed_op;
|
|
1928
|
+
// we expect 0n and 1n, or 1n and 2n.
|
|
1929
|
+
expect(checkpoint2).toBeGreaterThan(checkpoint1);
|
|
1930
|
+
});
|
|
1714
1931
|
}
|
|
@@ -160,8 +160,7 @@ bucket_definitions:
|
|
|
160
160
|
expect(lines).toMatchSnapshot();
|
|
161
161
|
});
|
|
162
162
|
|
|
163
|
-
|
|
164
|
-
test.skip('sync interrupts low-priority buckets on new checkpoints', async () => {
|
|
163
|
+
test('sync interrupts low-priority buckets on new checkpoints', async () => {
|
|
165
164
|
await using f = await factory();
|
|
166
165
|
|
|
167
166
|
const syncRules = await f.updateSyncRules({
|
|
@@ -271,6 +270,141 @@ bucket_definitions:
|
|
|
271
270
|
expect(sentRows).toBe(10002);
|
|
272
271
|
});
|
|
273
272
|
|
|
273
|
+
test('sync interruptions with unrelated data', async () => {
|
|
274
|
+
await using f = await factory();
|
|
275
|
+
|
|
276
|
+
const syncRules = await f.updateSyncRules({
|
|
277
|
+
content: `
|
|
278
|
+
bucket_definitions:
|
|
279
|
+
b0:
|
|
280
|
+
priority: 2
|
|
281
|
+
data:
|
|
282
|
+
- SELECT * FROM test WHERE LENGTH(id) <= 5;
|
|
283
|
+
b1:
|
|
284
|
+
priority: 1
|
|
285
|
+
parameters: SELECT request.user_id() as user_id
|
|
286
|
+
data:
|
|
287
|
+
- SELECT * FROM test WHERE LENGTH(id) > 5 AND description = bucket.user_id;
|
|
288
|
+
`
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
292
|
+
await bucketStorage.autoActivate();
|
|
293
|
+
|
|
294
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
295
|
+
// Initial data: Add one priority row and 10k low-priority rows.
|
|
296
|
+
await batch.save({
|
|
297
|
+
sourceTable: TEST_TABLE,
|
|
298
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
299
|
+
after: {
|
|
300
|
+
id: 'highprio',
|
|
301
|
+
description: 'user_one'
|
|
302
|
+
},
|
|
303
|
+
afterReplicaId: 'highprio'
|
|
304
|
+
});
|
|
305
|
+
for (let i = 0; i < 10_000; i++) {
|
|
306
|
+
await batch.save({
|
|
307
|
+
sourceTable: TEST_TABLE,
|
|
308
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
309
|
+
after: {
|
|
310
|
+
id: `${i}`,
|
|
311
|
+
description: 'low prio'
|
|
312
|
+
},
|
|
313
|
+
afterReplicaId: `${i}`
|
|
314
|
+
});
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
await batch.commit('0/1');
|
|
318
|
+
});
|
|
319
|
+
|
|
320
|
+
const stream = sync.streamResponse({
|
|
321
|
+
syncContext,
|
|
322
|
+
bucketStorage,
|
|
323
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
324
|
+
params: {
|
|
325
|
+
buckets: [],
|
|
326
|
+
include_checksum: true,
|
|
327
|
+
raw_data: true
|
|
328
|
+
},
|
|
329
|
+
tracker,
|
|
330
|
+
syncParams: new RequestParameters({ sub: 'user_one' }, {}),
|
|
331
|
+
token: { sub: 'user_one', exp: Date.now() / 1000 + 100000 } as any
|
|
332
|
+
});
|
|
333
|
+
|
|
334
|
+
let sentCheckpoints = 0;
|
|
335
|
+
let completedCheckpoints = 0;
|
|
336
|
+
let sentRows = 0;
|
|
337
|
+
|
|
338
|
+
// Expected flow:
|
|
339
|
+
// 1. Stream starts, we receive a checkpoint followed by the one high-prio row and a partial completion.
|
|
340
|
+
// 2. We insert a new row that is not part of a bucket relevant to this stream.
|
|
341
|
+
// 3. This means that no interruption happens and we receive all the low-priority data, followed by a checkpoint.
|
|
342
|
+
// 4. After the checkpoint, add a new row that _is_ relevant for this sync, which should trigger a new iteration.
|
|
343
|
+
|
|
344
|
+
for await (let next of stream) {
|
|
345
|
+
if (typeof next == 'string') {
|
|
346
|
+
next = JSON.parse(next);
|
|
347
|
+
}
|
|
348
|
+
if (typeof next === 'object' && next !== null) {
|
|
349
|
+
if ('partial_checkpoint_complete' in next) {
|
|
350
|
+
if (sentCheckpoints == 1) {
|
|
351
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
352
|
+
// Add a high-priority row that doesn't affect this sync stream.
|
|
353
|
+
await batch.save({
|
|
354
|
+
sourceTable: TEST_TABLE,
|
|
355
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
356
|
+
after: {
|
|
357
|
+
id: 'highprio2',
|
|
358
|
+
description: 'user_two'
|
|
359
|
+
},
|
|
360
|
+
afterReplicaId: 'highprio2'
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
await batch.commit('0/2');
|
|
364
|
+
});
|
|
365
|
+
} else {
|
|
366
|
+
expect(sentCheckpoints).toBe(2);
|
|
367
|
+
expect(sentRows).toBe(10002);
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
if ('checkpoint' in next || 'checkpoint_diff' in next) {
|
|
371
|
+
sentCheckpoints += 1;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
if ('data' in next) {
|
|
375
|
+
sentRows += next.data.data.length;
|
|
376
|
+
}
|
|
377
|
+
if ('checkpoint_complete' in next) {
|
|
378
|
+
completedCheckpoints++;
|
|
379
|
+
if (completedCheckpoints == 2) {
|
|
380
|
+
break;
|
|
381
|
+
}
|
|
382
|
+
if (completedCheckpoints == 1) {
|
|
383
|
+
expect(sentRows).toBe(10001);
|
|
384
|
+
|
|
385
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
386
|
+
// Add a high-priority row that affects this sync stream.
|
|
387
|
+
await batch.save({
|
|
388
|
+
sourceTable: TEST_TABLE,
|
|
389
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
390
|
+
after: {
|
|
391
|
+
id: 'highprio3',
|
|
392
|
+
description: 'user_one'
|
|
393
|
+
},
|
|
394
|
+
afterReplicaId: 'highprio3'
|
|
395
|
+
});
|
|
396
|
+
|
|
397
|
+
await batch.commit('0/3');
|
|
398
|
+
});
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
expect(sentCheckpoints).toBe(2);
|
|
405
|
+
expect(sentRows).toBe(10002);
|
|
406
|
+
});
|
|
407
|
+
|
|
274
408
|
test('sends checkpoint complete line for empty checkpoint', async () => {
|
|
275
409
|
await using f = await factory();
|
|
276
410
|
|