@powersync/service-core-tests 0.7.2 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/dist/test-utils/general-utils.d.ts +1 -1
- package/dist/test-utils/general-utils.js +3 -3
- package/dist/test-utils/general-utils.js.map +1 -1
- package/dist/tests/register-data-storage-tests.js +285 -15
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-sync-tests.js +246 -26
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/package.json +3 -3
- package/src/test-utils/general-utils.ts +3 -3
- package/src/tests/register-data-storage-tests.ts +265 -15
- package/src/tests/register-sync-tests.ts +231 -5
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -1,4 +1,11 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
CheckpointLine,
|
|
3
|
+
storage,
|
|
4
|
+
StreamingSyncCheckpoint,
|
|
5
|
+
StreamingSyncCheckpointDiff,
|
|
6
|
+
sync,
|
|
7
|
+
utils
|
|
8
|
+
} from '@powersync/service-core';
|
|
2
9
|
import { JSONBig } from '@powersync/service-jsonbig';
|
|
3
10
|
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
4
11
|
import path from 'path';
|
|
@@ -398,7 +405,7 @@ bucket_definitions:
|
|
|
398
405
|
expect(lines).toMatchSnapshot();
|
|
399
406
|
});
|
|
400
407
|
|
|
401
|
-
test('sync updates to global data', async () => {
|
|
408
|
+
test('sync updates to global data', async (context) => {
|
|
402
409
|
await using f = await factory();
|
|
403
410
|
|
|
404
411
|
const syncRules = await f.updateSyncRules({
|
|
@@ -422,6 +429,9 @@ bucket_definitions:
|
|
|
422
429
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
423
430
|
});
|
|
424
431
|
const iter = stream[Symbol.asyncIterator]();
|
|
432
|
+
context.onTestFinished(() => {
|
|
433
|
+
iter.return?.();
|
|
434
|
+
});
|
|
425
435
|
|
|
426
436
|
expect(await getCheckpointLines(iter)).toMatchSnapshot();
|
|
427
437
|
|
|
@@ -456,11 +466,221 @@ bucket_definitions:
|
|
|
456
466
|
});
|
|
457
467
|
|
|
458
468
|
expect(await getCheckpointLines(iter)).toMatchSnapshot();
|
|
469
|
+
});
|
|
470
|
+
|
|
471
|
+
test('sync updates to parameter query only', async (context) => {
|
|
472
|
+
await using f = await factory();
|
|
473
|
+
|
|
474
|
+
const syncRules = await f.updateSyncRules({
|
|
475
|
+
content: `bucket_definitions:
|
|
476
|
+
by_user:
|
|
477
|
+
parameters: select users.id as user_id from users where users.id = request.user_id()
|
|
478
|
+
data:
|
|
479
|
+
- select * from lists where user_id = bucket.user_id
|
|
480
|
+
`
|
|
481
|
+
});
|
|
482
|
+
|
|
483
|
+
const usersTable = test_utils.makeTestTable('users', ['id']);
|
|
484
|
+
const listsTable = test_utils.makeTestTable('lists', ['id']);
|
|
485
|
+
|
|
486
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
487
|
+
await bucketStorage.autoActivate();
|
|
488
|
+
|
|
489
|
+
const stream = sync.streamResponse({
|
|
490
|
+
syncContext,
|
|
491
|
+
bucketStorage,
|
|
492
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
493
|
+
params: {
|
|
494
|
+
buckets: [],
|
|
495
|
+
include_checksum: true,
|
|
496
|
+
raw_data: true
|
|
497
|
+
},
|
|
498
|
+
tracker,
|
|
499
|
+
syncParams: new RequestParameters({ sub: 'user1' }, {}),
|
|
500
|
+
token: { exp: Date.now() / 1000 + 100 } as any
|
|
501
|
+
});
|
|
502
|
+
const iter = stream[Symbol.asyncIterator]();
|
|
503
|
+
context.onTestFinished(() => {
|
|
504
|
+
iter.return?.();
|
|
505
|
+
});
|
|
506
|
+
|
|
507
|
+
// Initial empty checkpoint
|
|
508
|
+
const checkpoint1 = await getCheckpointLines(iter);
|
|
509
|
+
expect((checkpoint1[0] as StreamingSyncCheckpoint).checkpoint?.buckets?.map((b) => b.bucket)).toEqual([]);
|
|
510
|
+
expect(checkpoint1).toMatchSnapshot();
|
|
511
|
+
|
|
512
|
+
// Add user
|
|
513
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
514
|
+
await batch.save({
|
|
515
|
+
sourceTable: usersTable,
|
|
516
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
517
|
+
after: {
|
|
518
|
+
id: 'user1',
|
|
519
|
+
name: 'User 1'
|
|
520
|
+
},
|
|
521
|
+
afterReplicaId: 'user1'
|
|
522
|
+
});
|
|
523
|
+
|
|
524
|
+
await batch.commit('0/1');
|
|
525
|
+
});
|
|
526
|
+
|
|
527
|
+
const checkpoint2 = await getCheckpointLines(iter);
|
|
528
|
+
expect(
|
|
529
|
+
(checkpoint2[0] as StreamingSyncCheckpointDiff).checkpoint_diff?.updated_buckets?.map((b) => b.bucket)
|
|
530
|
+
).toEqual(['by_user["user1"]']);
|
|
531
|
+
expect(checkpoint2).toMatchSnapshot();
|
|
532
|
+
});
|
|
533
|
+
|
|
534
|
+
test('sync updates to data query only', async (context) => {
|
|
535
|
+
await using f = await factory();
|
|
536
|
+
|
|
537
|
+
const syncRules = await f.updateSyncRules({
|
|
538
|
+
content: `bucket_definitions:
|
|
539
|
+
by_user:
|
|
540
|
+
parameters: select users.id as user_id from users where users.id = request.user_id()
|
|
541
|
+
data:
|
|
542
|
+
- select * from lists where user_id = bucket.user_id
|
|
543
|
+
`
|
|
544
|
+
});
|
|
545
|
+
|
|
546
|
+
const usersTable = test_utils.makeTestTable('users', ['id']);
|
|
547
|
+
const listsTable = test_utils.makeTestTable('lists', ['id']);
|
|
548
|
+
|
|
549
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
550
|
+
await bucketStorage.autoActivate();
|
|
551
|
+
|
|
552
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
553
|
+
await batch.save({
|
|
554
|
+
sourceTable: usersTable,
|
|
555
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
556
|
+
after: {
|
|
557
|
+
id: 'user1',
|
|
558
|
+
name: 'User 1'
|
|
559
|
+
},
|
|
560
|
+
afterReplicaId: 'user1'
|
|
561
|
+
});
|
|
562
|
+
|
|
563
|
+
await batch.commit('0/1');
|
|
564
|
+
});
|
|
565
|
+
|
|
566
|
+
const stream = sync.streamResponse({
|
|
567
|
+
syncContext,
|
|
568
|
+
bucketStorage,
|
|
569
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
570
|
+
params: {
|
|
571
|
+
buckets: [],
|
|
572
|
+
include_checksum: true,
|
|
573
|
+
raw_data: true
|
|
574
|
+
},
|
|
575
|
+
tracker,
|
|
576
|
+
syncParams: new RequestParameters({ sub: 'user1' }, {}),
|
|
577
|
+
token: { exp: Date.now() / 1000 + 100 } as any
|
|
578
|
+
});
|
|
579
|
+
const iter = stream[Symbol.asyncIterator]();
|
|
580
|
+
context.onTestFinished(() => {
|
|
581
|
+
iter.return?.();
|
|
582
|
+
});
|
|
583
|
+
|
|
584
|
+
const checkpoint1 = await getCheckpointLines(iter);
|
|
585
|
+
expect((checkpoint1[0] as StreamingSyncCheckpoint).checkpoint?.buckets?.map((b) => b.bucket)).toEqual([
|
|
586
|
+
'by_user["user1"]'
|
|
587
|
+
]);
|
|
588
|
+
expect(checkpoint1).toMatchSnapshot();
|
|
589
|
+
|
|
590
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
591
|
+
await batch.save({
|
|
592
|
+
sourceTable: listsTable,
|
|
593
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
594
|
+
after: {
|
|
595
|
+
id: 'list1',
|
|
596
|
+
user_id: 'user1',
|
|
597
|
+
name: 'User 1'
|
|
598
|
+
},
|
|
599
|
+
afterReplicaId: 'list1'
|
|
600
|
+
});
|
|
601
|
+
|
|
602
|
+
await batch.commit('0/1');
|
|
603
|
+
});
|
|
604
|
+
|
|
605
|
+
const checkpoint2 = await getCheckpointLines(iter);
|
|
606
|
+
expect(
|
|
607
|
+
(checkpoint2[0] as StreamingSyncCheckpointDiff).checkpoint_diff?.updated_buckets?.map((b) => b.bucket)
|
|
608
|
+
).toEqual(['by_user["user1"]']);
|
|
609
|
+
expect(checkpoint2).toMatchSnapshot();
|
|
610
|
+
});
|
|
611
|
+
|
|
612
|
+
test('sync updates to parameter query + data', async (context) => {
|
|
613
|
+
await using f = await factory();
|
|
614
|
+
|
|
615
|
+
const syncRules = await f.updateSyncRules({
|
|
616
|
+
content: `bucket_definitions:
|
|
617
|
+
by_user:
|
|
618
|
+
parameters: select users.id as user_id from users where users.id = request.user_id()
|
|
619
|
+
data:
|
|
620
|
+
- select * from lists where user_id = bucket.user_id
|
|
621
|
+
`
|
|
622
|
+
});
|
|
623
|
+
|
|
624
|
+
const usersTable = test_utils.makeTestTable('users', ['id']);
|
|
625
|
+
const listsTable = test_utils.makeTestTable('lists', ['id']);
|
|
626
|
+
|
|
627
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
628
|
+
await bucketStorage.autoActivate();
|
|
629
|
+
|
|
630
|
+
const stream = sync.streamResponse({
|
|
631
|
+
syncContext,
|
|
632
|
+
bucketStorage,
|
|
633
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
634
|
+
params: {
|
|
635
|
+
buckets: [],
|
|
636
|
+
include_checksum: true,
|
|
637
|
+
raw_data: true
|
|
638
|
+
},
|
|
639
|
+
tracker,
|
|
640
|
+
syncParams: new RequestParameters({ sub: 'user1' }, {}),
|
|
641
|
+
token: { exp: Date.now() / 1000 + 100 } as any
|
|
642
|
+
});
|
|
643
|
+
const iter = stream[Symbol.asyncIterator]();
|
|
644
|
+
context.onTestFinished(() => {
|
|
645
|
+
iter.return?.();
|
|
646
|
+
});
|
|
647
|
+
|
|
648
|
+
// Initial empty checkpoint
|
|
649
|
+
expect(await getCheckpointLines(iter)).toMatchSnapshot();
|
|
650
|
+
|
|
651
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
652
|
+
await batch.save({
|
|
653
|
+
sourceTable: listsTable,
|
|
654
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
655
|
+
after: {
|
|
656
|
+
id: 'list1',
|
|
657
|
+
user_id: 'user1',
|
|
658
|
+
name: 'User 1'
|
|
659
|
+
},
|
|
660
|
+
afterReplicaId: 'list1'
|
|
661
|
+
});
|
|
459
662
|
|
|
460
|
-
|
|
663
|
+
await batch.save({
|
|
664
|
+
sourceTable: usersTable,
|
|
665
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
666
|
+
after: {
|
|
667
|
+
id: 'user1',
|
|
668
|
+
name: 'User 1'
|
|
669
|
+
},
|
|
670
|
+
afterReplicaId: 'user1'
|
|
671
|
+
});
|
|
672
|
+
|
|
673
|
+
await batch.commit('0/1');
|
|
674
|
+
});
|
|
675
|
+
|
|
676
|
+
const checkpoint2 = await getCheckpointLines(iter);
|
|
677
|
+
expect(
|
|
678
|
+
(checkpoint2[0] as StreamingSyncCheckpointDiff).checkpoint_diff?.updated_buckets?.map((b) => b.bucket)
|
|
679
|
+
).toEqual(['by_user["user1"]']);
|
|
680
|
+
expect(checkpoint2).toMatchSnapshot();
|
|
461
681
|
});
|
|
462
682
|
|
|
463
|
-
test('expiring token', async () => {
|
|
683
|
+
test('expiring token', async (context) => {
|
|
464
684
|
await using f = await factory();
|
|
465
685
|
|
|
466
686
|
const syncRules = await f.updateSyncRules({
|
|
@@ -486,6 +706,9 @@ bucket_definitions:
|
|
|
486
706
|
token: { exp: exp } as any
|
|
487
707
|
});
|
|
488
708
|
const iter = stream[Symbol.asyncIterator]();
|
|
709
|
+
context.onTestFinished(() => {
|
|
710
|
+
iter.return?.();
|
|
711
|
+
});
|
|
489
712
|
|
|
490
713
|
const checkpoint = await getCheckpointLines(iter);
|
|
491
714
|
expect(checkpoint).toMatchSnapshot();
|
|
@@ -494,7 +717,7 @@ bucket_definitions:
|
|
|
494
717
|
expect(expLines).toMatchSnapshot();
|
|
495
718
|
});
|
|
496
719
|
|
|
497
|
-
test('compacting data - invalidate checkpoint', async () => {
|
|
720
|
+
test('compacting data - invalidate checkpoint', async (context) => {
|
|
498
721
|
// This tests a case of a compact operation invalidating a checkpoint in the
|
|
499
722
|
// middle of syncing data.
|
|
500
723
|
// This is expected to be rare in practice, but it is important to handle
|
|
@@ -548,6 +771,9 @@ bucket_definitions:
|
|
|
548
771
|
});
|
|
549
772
|
|
|
550
773
|
const iter = stream[Symbol.asyncIterator]();
|
|
774
|
+
context.onTestFinished(() => {
|
|
775
|
+
iter.return?.();
|
|
776
|
+
});
|
|
551
777
|
|
|
552
778
|
// Only consume the first "checkpoint" message, and pause before receiving data.
|
|
553
779
|
const lines = await consumeIterator(iter, { consume: false, isDone: (line) => (line as any)?.checkpoint != null });
|