@powersync/service-core-tests 0.10.4 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +57 -0
- package/LICENSE +3 -3
- package/dist/test-utils/general-utils.d.ts +2 -0
- package/dist/test-utils/general-utils.js +17 -1
- package/dist/test-utils/general-utils.js.map +1 -1
- package/dist/tests/register-compacting-tests.d.ts +0 -11
- package/dist/tests/register-compacting-tests.js +119 -34
- package/dist/tests/register-compacting-tests.js.map +1 -1
- package/dist/tests/register-data-storage-tests.js +308 -167
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-parameter-compacting-tests.d.ts +2 -0
- package/dist/tests/register-parameter-compacting-tests.js +227 -0
- package/dist/tests/register-parameter-compacting-tests.js.map +1 -0
- package/dist/tests/register-sync-tests.js +166 -61
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/dist/tests/tests-index.d.ts +1 -0
- package/dist/tests/tests-index.js +1 -0
- package/dist/tests/tests-index.js.map +1 -1
- package/package.json +5 -5
- package/src/test-utils/general-utils.ts +19 -10
- package/src/tests/register-compacting-tests.ts +118 -39
- package/src/tests/register-data-storage-tests.ts +311 -210
- package/src/tests/register-parameter-compacting-tests.ts +172 -0
- package/src/tests/register-sync-tests.ts +160 -61
- package/src/tests/tests-index.ts +1 -0
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -52,7 +52,6 @@ var __disposeResources = (this && this.__disposeResources) || (function (Suppres
|
|
|
52
52
|
});
|
|
53
53
|
import { createCoreAPIMetrics, storage, sync } from '@powersync/service-core';
|
|
54
54
|
import { JSONBig } from '@powersync/service-jsonbig';
|
|
55
|
-
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
56
55
|
import path from 'path';
|
|
57
56
|
import * as timers from 'timers/promises';
|
|
58
57
|
import { fileURLToPath } from 'url';
|
|
@@ -93,7 +92,6 @@ export function registerSyncTests(factory) {
|
|
|
93
92
|
content: BASIC_SYNC_RULES
|
|
94
93
|
});
|
|
95
94
|
const bucketStorage = f.getInstance(syncRules);
|
|
96
|
-
await bucketStorage.autoActivate();
|
|
97
95
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
98
96
|
await batch.save({
|
|
99
97
|
sourceTable: TEST_TABLE,
|
|
@@ -118,15 +116,18 @@ export function registerSyncTests(factory) {
|
|
|
118
116
|
const stream = sync.streamResponse({
|
|
119
117
|
syncContext,
|
|
120
118
|
bucketStorage: bucketStorage,
|
|
121
|
-
syncRules:
|
|
119
|
+
syncRules: {
|
|
120
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
121
|
+
version: bucketStorage.group_id
|
|
122
|
+
},
|
|
122
123
|
params: {
|
|
123
124
|
buckets: [],
|
|
124
125
|
include_checksum: true,
|
|
125
126
|
raw_data: true
|
|
126
127
|
},
|
|
127
128
|
tracker,
|
|
128
|
-
|
|
129
|
-
|
|
129
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 },
|
|
130
|
+
isEncodingAsBson: false
|
|
130
131
|
});
|
|
131
132
|
const lines = await consumeCheckpointLines(stream);
|
|
132
133
|
expect(lines).toMatchSnapshot();
|
|
@@ -159,7 +160,6 @@ bucket_definitions:
|
|
|
159
160
|
`
|
|
160
161
|
});
|
|
161
162
|
const bucketStorage = f.getInstance(syncRules);
|
|
162
|
-
await bucketStorage.autoActivate();
|
|
163
163
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
164
164
|
await batch.save({
|
|
165
165
|
sourceTable: TEST_TABLE,
|
|
@@ -184,15 +184,18 @@ bucket_definitions:
|
|
|
184
184
|
const stream = sync.streamResponse({
|
|
185
185
|
syncContext,
|
|
186
186
|
bucketStorage,
|
|
187
|
-
syncRules:
|
|
187
|
+
syncRules: {
|
|
188
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
189
|
+
version: bucketStorage.group_id
|
|
190
|
+
},
|
|
188
191
|
params: {
|
|
189
192
|
buckets: [],
|
|
190
193
|
include_checksum: true,
|
|
191
194
|
raw_data: true
|
|
192
195
|
},
|
|
193
196
|
tracker,
|
|
194
|
-
|
|
195
|
-
|
|
197
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 },
|
|
198
|
+
isEncodingAsBson: false
|
|
196
199
|
});
|
|
197
200
|
const lines = await consumeCheckpointLines(stream);
|
|
198
201
|
expect(lines).toMatchSnapshot();
|
|
@@ -225,7 +228,6 @@ bucket_definitions:
|
|
|
225
228
|
`
|
|
226
229
|
});
|
|
227
230
|
const bucketStorage = f.getInstance(syncRules);
|
|
228
|
-
await bucketStorage.autoActivate();
|
|
229
231
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
230
232
|
// Initial data: Add one priority row and 10k low-priority rows.
|
|
231
233
|
await batch.save({
|
|
@@ -253,15 +255,18 @@ bucket_definitions:
|
|
|
253
255
|
const stream = sync.streamResponse({
|
|
254
256
|
syncContext,
|
|
255
257
|
bucketStorage,
|
|
256
|
-
syncRules:
|
|
258
|
+
syncRules: {
|
|
259
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
260
|
+
version: bucketStorage.group_id
|
|
261
|
+
},
|
|
257
262
|
params: {
|
|
258
263
|
buckets: [],
|
|
259
264
|
include_checksum: true,
|
|
260
265
|
raw_data: true
|
|
261
266
|
},
|
|
262
267
|
tracker,
|
|
263
|
-
|
|
264
|
-
|
|
268
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 },
|
|
269
|
+
isEncodingAsBson: false
|
|
265
270
|
});
|
|
266
271
|
let sentCheckpoints = 0;
|
|
267
272
|
let sentRows = 0;
|
|
@@ -337,7 +342,6 @@ bucket_definitions:
|
|
|
337
342
|
`
|
|
338
343
|
});
|
|
339
344
|
const bucketStorage = f.getInstance(syncRules);
|
|
340
|
-
await bucketStorage.autoActivate();
|
|
341
345
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
342
346
|
// Initial data: Add one priority row and 10k low-priority rows.
|
|
343
347
|
await batch.save({
|
|
@@ -365,15 +369,18 @@ bucket_definitions:
|
|
|
365
369
|
const stream = sync.streamResponse({
|
|
366
370
|
syncContext,
|
|
367
371
|
bucketStorage,
|
|
368
|
-
syncRules:
|
|
372
|
+
syncRules: {
|
|
373
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
374
|
+
version: bucketStorage.group_id
|
|
375
|
+
},
|
|
369
376
|
params: {
|
|
370
377
|
buckets: [],
|
|
371
378
|
include_checksum: true,
|
|
372
379
|
raw_data: true
|
|
373
380
|
},
|
|
374
381
|
tracker,
|
|
375
|
-
|
|
376
|
-
|
|
382
|
+
token: { sub: 'user_one', exp: Date.now() / 1000 + 100000 },
|
|
383
|
+
isEncodingAsBson: false
|
|
377
384
|
});
|
|
378
385
|
let sentCheckpoints = 0;
|
|
379
386
|
let completedCheckpoints = 0;
|
|
@@ -477,7 +484,6 @@ bucket_definitions:
|
|
|
477
484
|
`
|
|
478
485
|
});
|
|
479
486
|
const bucketStorage = f.getInstance(syncRules);
|
|
480
|
-
await bucketStorage.autoActivate();
|
|
481
487
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
482
488
|
// Initial data: Add one priority row and 10k low-priority rows.
|
|
483
489
|
await batch.save({
|
|
@@ -505,15 +511,18 @@ bucket_definitions:
|
|
|
505
511
|
const stream = sync.streamResponse({
|
|
506
512
|
syncContext,
|
|
507
513
|
bucketStorage,
|
|
508
|
-
syncRules:
|
|
514
|
+
syncRules: {
|
|
515
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
516
|
+
version: bucketStorage.group_id
|
|
517
|
+
},
|
|
509
518
|
params: {
|
|
510
519
|
buckets: [],
|
|
511
520
|
include_checksum: true,
|
|
512
521
|
raw_data: true
|
|
513
522
|
},
|
|
514
523
|
tracker,
|
|
515
|
-
|
|
516
|
-
|
|
524
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 },
|
|
525
|
+
isEncodingAsBson: false
|
|
517
526
|
});
|
|
518
527
|
let sentRows = 0;
|
|
519
528
|
let lines = [];
|
|
@@ -606,7 +615,6 @@ bucket_definitions:
|
|
|
606
615
|
content: BASIC_SYNC_RULES
|
|
607
616
|
});
|
|
608
617
|
const bucketStorage = f.getInstance(syncRules);
|
|
609
|
-
await bucketStorage.autoActivate();
|
|
610
618
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
611
619
|
await batch.save({
|
|
612
620
|
sourceTable: TEST_TABLE,
|
|
@@ -622,15 +630,18 @@ bucket_definitions:
|
|
|
622
630
|
const stream = sync.streamResponse({
|
|
623
631
|
syncContext,
|
|
624
632
|
bucketStorage,
|
|
625
|
-
syncRules:
|
|
633
|
+
syncRules: {
|
|
634
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
635
|
+
version: bucketStorage.group_id
|
|
636
|
+
},
|
|
626
637
|
params: {
|
|
627
638
|
buckets: [],
|
|
628
639
|
include_checksum: true,
|
|
629
640
|
raw_data: true
|
|
630
641
|
},
|
|
631
642
|
tracker,
|
|
632
|
-
|
|
633
|
-
|
|
643
|
+
token: { sub: '', exp: Date.now() / 1000 + 100000 },
|
|
644
|
+
isEncodingAsBson: false
|
|
634
645
|
});
|
|
635
646
|
const lines = [];
|
|
636
647
|
let receivedCompletions = 0;
|
|
@@ -673,7 +684,6 @@ bucket_definitions:
|
|
|
673
684
|
content: BASIC_SYNC_RULES
|
|
674
685
|
});
|
|
675
686
|
const bucketStorage = await f.getInstance(syncRules);
|
|
676
|
-
await bucketStorage.autoActivate();
|
|
677
687
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
678
688
|
await batch.save({
|
|
679
689
|
sourceTable: TEST_TABLE,
|
|
@@ -690,15 +700,18 @@ bucket_definitions:
|
|
|
690
700
|
const stream = sync.streamResponse({
|
|
691
701
|
syncContext,
|
|
692
702
|
bucketStorage,
|
|
693
|
-
syncRules:
|
|
703
|
+
syncRules: {
|
|
704
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
705
|
+
version: bucketStorage.group_id
|
|
706
|
+
},
|
|
694
707
|
params: {
|
|
695
708
|
buckets: [],
|
|
696
709
|
include_checksum: true,
|
|
697
710
|
raw_data: false
|
|
698
711
|
},
|
|
699
712
|
tracker,
|
|
700
|
-
|
|
701
|
-
|
|
713
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 },
|
|
714
|
+
isEncodingAsBson: false
|
|
702
715
|
});
|
|
703
716
|
const lines = await consumeCheckpointLines(stream);
|
|
704
717
|
expect(lines).toMatchSnapshot();
|
|
@@ -713,19 +726,21 @@ bucket_definitions:
|
|
|
713
726
|
content: BASIC_SYNC_RULES
|
|
714
727
|
});
|
|
715
728
|
const bucketStorage = await f.getInstance(syncRules);
|
|
716
|
-
await bucketStorage.autoActivate();
|
|
717
729
|
const stream = sync.streamResponse({
|
|
718
730
|
syncContext,
|
|
719
731
|
bucketStorage,
|
|
720
|
-
syncRules:
|
|
732
|
+
syncRules: {
|
|
733
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
734
|
+
version: bucketStorage.group_id
|
|
735
|
+
},
|
|
721
736
|
params: {
|
|
722
737
|
buckets: [],
|
|
723
738
|
include_checksum: true,
|
|
724
739
|
raw_data: true
|
|
725
740
|
},
|
|
726
741
|
tracker,
|
|
727
|
-
|
|
728
|
-
|
|
742
|
+
token: { sub: '', exp: 0 },
|
|
743
|
+
isEncodingAsBson: false
|
|
729
744
|
});
|
|
730
745
|
const lines = await consumeCheckpointLines(stream);
|
|
731
746
|
expect(lines).toMatchSnapshot();
|
|
@@ -748,19 +763,25 @@ bucket_definitions:
|
|
|
748
763
|
content: BASIC_SYNC_RULES
|
|
749
764
|
});
|
|
750
765
|
const bucketStorage = await f.getInstance(syncRules);
|
|
751
|
-
|
|
766
|
+
// Activate
|
|
767
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
768
|
+
await batch.keepalive('0/0');
|
|
769
|
+
});
|
|
752
770
|
const stream = sync.streamResponse({
|
|
753
771
|
syncContext,
|
|
754
772
|
bucketStorage,
|
|
755
|
-
syncRules:
|
|
773
|
+
syncRules: {
|
|
774
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
775
|
+
version: bucketStorage.group_id
|
|
776
|
+
},
|
|
756
777
|
params: {
|
|
757
778
|
buckets: [],
|
|
758
779
|
include_checksum: true,
|
|
759
780
|
raw_data: true
|
|
760
781
|
},
|
|
761
782
|
tracker,
|
|
762
|
-
|
|
763
|
-
|
|
783
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 },
|
|
784
|
+
isEncodingAsBson: false
|
|
764
785
|
});
|
|
765
786
|
const iter = stream[Symbol.asyncIterator]();
|
|
766
787
|
context.onTestFinished(() => {
|
|
@@ -819,19 +840,25 @@ bucket_definitions:
|
|
|
819
840
|
const usersTable = test_utils.makeTestTable('users', ['id']);
|
|
820
841
|
const listsTable = test_utils.makeTestTable('lists', ['id']);
|
|
821
842
|
const bucketStorage = await f.getInstance(syncRules);
|
|
822
|
-
|
|
843
|
+
// Activate
|
|
844
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
845
|
+
await batch.keepalive('0/0');
|
|
846
|
+
});
|
|
823
847
|
const stream = sync.streamResponse({
|
|
824
848
|
syncContext,
|
|
825
849
|
bucketStorage,
|
|
826
|
-
syncRules:
|
|
850
|
+
syncRules: {
|
|
851
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
852
|
+
version: bucketStorage.group_id
|
|
853
|
+
},
|
|
827
854
|
params: {
|
|
828
855
|
buckets: [],
|
|
829
856
|
include_checksum: true,
|
|
830
857
|
raw_data: true
|
|
831
858
|
},
|
|
832
859
|
tracker,
|
|
833
|
-
|
|
834
|
-
|
|
860
|
+
token: { sub: 'user1', exp: Date.now() / 1000 + 100 },
|
|
861
|
+
isEncodingAsBson: false
|
|
835
862
|
});
|
|
836
863
|
const iter = stream[Symbol.asyncIterator]();
|
|
837
864
|
context.onTestFinished(() => {
|
|
@@ -883,7 +910,6 @@ bucket_definitions:
|
|
|
883
910
|
const usersTable = test_utils.makeTestTable('users', ['id']);
|
|
884
911
|
const listsTable = test_utils.makeTestTable('lists', ['id']);
|
|
885
912
|
const bucketStorage = await f.getInstance(syncRules);
|
|
886
|
-
await bucketStorage.autoActivate();
|
|
887
913
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
888
914
|
await batch.save({
|
|
889
915
|
sourceTable: usersTable,
|
|
@@ -899,15 +925,18 @@ bucket_definitions:
|
|
|
899
925
|
const stream = sync.streamResponse({
|
|
900
926
|
syncContext,
|
|
901
927
|
bucketStorage,
|
|
902
|
-
syncRules:
|
|
928
|
+
syncRules: {
|
|
929
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
930
|
+
version: bucketStorage.group_id
|
|
931
|
+
},
|
|
903
932
|
params: {
|
|
904
933
|
buckets: [],
|
|
905
934
|
include_checksum: true,
|
|
906
935
|
raw_data: true
|
|
907
936
|
},
|
|
908
937
|
tracker,
|
|
909
|
-
|
|
910
|
-
|
|
938
|
+
token: { sub: 'user1', exp: Date.now() / 1000 + 100 },
|
|
939
|
+
isEncodingAsBson: false
|
|
911
940
|
});
|
|
912
941
|
const iter = stream[Symbol.asyncIterator]();
|
|
913
942
|
context.onTestFinished(() => {
|
|
@@ -960,19 +989,25 @@ bucket_definitions:
|
|
|
960
989
|
const usersTable = test_utils.makeTestTable('users', ['id']);
|
|
961
990
|
const listsTable = test_utils.makeTestTable('lists', ['id']);
|
|
962
991
|
const bucketStorage = await f.getInstance(syncRules);
|
|
963
|
-
|
|
992
|
+
// Activate
|
|
993
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
994
|
+
await batch.keepalive('0/0');
|
|
995
|
+
});
|
|
964
996
|
const stream = sync.streamResponse({
|
|
965
997
|
syncContext,
|
|
966
998
|
bucketStorage,
|
|
967
|
-
syncRules:
|
|
999
|
+
syncRules: {
|
|
1000
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
1001
|
+
version: bucketStorage.group_id
|
|
1002
|
+
},
|
|
968
1003
|
params: {
|
|
969
1004
|
buckets: [],
|
|
970
1005
|
include_checksum: true,
|
|
971
1006
|
raw_data: true
|
|
972
1007
|
},
|
|
973
1008
|
tracker,
|
|
974
|
-
|
|
975
|
-
|
|
1009
|
+
token: { sub: 'user1', exp: Date.now() / 1000 + 100 },
|
|
1010
|
+
isEncodingAsBson: false
|
|
976
1011
|
});
|
|
977
1012
|
const iter = stream[Symbol.asyncIterator]();
|
|
978
1013
|
context.onTestFinished(() => {
|
|
@@ -1024,20 +1059,26 @@ bucket_definitions:
|
|
|
1024
1059
|
content: BASIC_SYNC_RULES
|
|
1025
1060
|
});
|
|
1026
1061
|
const bucketStorage = await f.getInstance(syncRules);
|
|
1027
|
-
|
|
1062
|
+
// Activate
|
|
1063
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1064
|
+
await batch.keepalive('0/0');
|
|
1065
|
+
});
|
|
1028
1066
|
const exp = Date.now() / 1000 + 0.1;
|
|
1029
1067
|
const stream = sync.streamResponse({
|
|
1030
1068
|
syncContext,
|
|
1031
1069
|
bucketStorage,
|
|
1032
|
-
syncRules:
|
|
1070
|
+
syncRules: {
|
|
1071
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
1072
|
+
version: bucketStorage.group_id
|
|
1073
|
+
},
|
|
1033
1074
|
params: {
|
|
1034
1075
|
buckets: [],
|
|
1035
1076
|
include_checksum: true,
|
|
1036
1077
|
raw_data: true
|
|
1037
1078
|
},
|
|
1038
1079
|
tracker,
|
|
1039
|
-
|
|
1040
|
-
|
|
1080
|
+
token: { sub: '', exp: exp },
|
|
1081
|
+
isEncodingAsBson: false
|
|
1041
1082
|
});
|
|
1042
1083
|
const iter = stream[Symbol.asyncIterator]();
|
|
1043
1084
|
context.onTestFinished(() => {
|
|
@@ -1070,7 +1111,6 @@ bucket_definitions:
|
|
|
1070
1111
|
content: BASIC_SYNC_RULES
|
|
1071
1112
|
});
|
|
1072
1113
|
const bucketStorage = await f.getInstance(syncRules);
|
|
1073
|
-
await bucketStorage.autoActivate();
|
|
1074
1114
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1075
1115
|
await batch.save({
|
|
1076
1116
|
sourceTable: TEST_TABLE,
|
|
@@ -1095,15 +1135,18 @@ bucket_definitions:
|
|
|
1095
1135
|
const stream = sync.streamResponse({
|
|
1096
1136
|
syncContext,
|
|
1097
1137
|
bucketStorage,
|
|
1098
|
-
syncRules:
|
|
1138
|
+
syncRules: {
|
|
1139
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
1140
|
+
version: bucketStorage.group_id
|
|
1141
|
+
},
|
|
1099
1142
|
params: {
|
|
1100
1143
|
buckets: [],
|
|
1101
1144
|
include_checksum: true,
|
|
1102
1145
|
raw_data: true
|
|
1103
1146
|
},
|
|
1104
1147
|
tracker,
|
|
1105
|
-
|
|
1106
|
-
|
|
1148
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 },
|
|
1149
|
+
isEncodingAsBson: false
|
|
1107
1150
|
});
|
|
1108
1151
|
const iter = stream[Symbol.asyncIterator]();
|
|
1109
1152
|
context.onTestFinished(() => {
|
|
@@ -1202,7 +1245,6 @@ bucket_definitions:
|
|
|
1202
1245
|
content: BASIC_SYNC_RULES
|
|
1203
1246
|
});
|
|
1204
1247
|
const bucketStorage = f.getInstance(syncRules);
|
|
1205
|
-
await bucketStorage.autoActivate();
|
|
1206
1248
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1207
1249
|
// <= the managed write checkpoint LSN below
|
|
1208
1250
|
await batch.commit('0/1');
|
|
@@ -1214,15 +1256,18 @@ bucket_definitions:
|
|
|
1214
1256
|
const params = {
|
|
1215
1257
|
syncContext,
|
|
1216
1258
|
bucketStorage,
|
|
1217
|
-
syncRules:
|
|
1259
|
+
syncRules: {
|
|
1260
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
1261
|
+
version: bucketStorage.group_id
|
|
1262
|
+
},
|
|
1218
1263
|
params: {
|
|
1219
1264
|
buckets: [],
|
|
1220
1265
|
include_checksum: true,
|
|
1221
1266
|
raw_data: true
|
|
1222
1267
|
},
|
|
1223
1268
|
tracker,
|
|
1224
|
-
|
|
1225
|
-
|
|
1269
|
+
token: { sub: 'test', exp: Date.now() / 1000 + 10 },
|
|
1270
|
+
isEncodingAsBson: false
|
|
1226
1271
|
};
|
|
1227
1272
|
const stream1 = sync.streamResponse(params);
|
|
1228
1273
|
const lines1 = await consumeCheckpointLines(stream1);
|
|
@@ -1259,6 +1304,66 @@ bucket_definitions:
|
|
|
1259
1304
|
await result_14;
|
|
1260
1305
|
}
|
|
1261
1306
|
});
|
|
1307
|
+
test('encodes sync rules id in buckes for streams', async () => {
|
|
1308
|
+
const env_15 = { stack: [], error: void 0, hasError: false };
|
|
1309
|
+
try {
|
|
1310
|
+
const f = __addDisposableResource(env_15, await factory(), true);
|
|
1311
|
+
const rules = `
|
|
1312
|
+
streams:
|
|
1313
|
+
test:
|
|
1314
|
+
auto_subscribe: true
|
|
1315
|
+
query: SELECT * FROM test;
|
|
1316
|
+
|
|
1317
|
+
config:
|
|
1318
|
+
edition: 2
|
|
1319
|
+
`;
|
|
1320
|
+
for (let i = 0; i < 2; i++) {
|
|
1321
|
+
const syncRules = await f.updateSyncRules({
|
|
1322
|
+
content: rules
|
|
1323
|
+
});
|
|
1324
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
1325
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1326
|
+
await batch.save({
|
|
1327
|
+
sourceTable: TEST_TABLE,
|
|
1328
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
1329
|
+
after: {
|
|
1330
|
+
id: 't1',
|
|
1331
|
+
description: 'Test 1'
|
|
1332
|
+
},
|
|
1333
|
+
afterReplicaId: 't1'
|
|
1334
|
+
});
|
|
1335
|
+
await batch.commit('0/1');
|
|
1336
|
+
});
|
|
1337
|
+
const stream = sync.streamResponse({
|
|
1338
|
+
syncContext,
|
|
1339
|
+
bucketStorage: bucketStorage,
|
|
1340
|
+
syncRules: {
|
|
1341
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
1342
|
+
version: bucketStorage.group_id
|
|
1343
|
+
},
|
|
1344
|
+
params: {
|
|
1345
|
+
buckets: [],
|
|
1346
|
+
include_checksum: true,
|
|
1347
|
+
raw_data: true
|
|
1348
|
+
},
|
|
1349
|
+
tracker,
|
|
1350
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 },
|
|
1351
|
+
isEncodingAsBson: false
|
|
1352
|
+
});
|
|
1353
|
+
const lines = await consumeCheckpointLines(stream);
|
|
1354
|
+
expect(lines).toMatchSnapshot();
|
|
1355
|
+
}
|
|
1356
|
+
}
|
|
1357
|
+
catch (e_15) {
|
|
1358
|
+
env_15.error = e_15;
|
|
1359
|
+
env_15.hasError = true;
|
|
1360
|
+
}
|
|
1361
|
+
finally {
|
|
1362
|
+
const result_15 = __disposeResources(env_15);
|
|
1363
|
+
if (result_15)
|
|
1364
|
+
await result_15;
|
|
1365
|
+
}
|
|
1366
|
+
});
|
|
1262
1367
|
}
|
|
1263
1368
|
/**
|
|
1264
1369
|
* Get lines on an iterator until isDone(line) == true.
|