@powersync/service-core-tests 0.10.4 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +57 -0
- package/LICENSE +3 -3
- package/dist/test-utils/general-utils.d.ts +2 -0
- package/dist/test-utils/general-utils.js +17 -1
- package/dist/test-utils/general-utils.js.map +1 -1
- package/dist/tests/register-compacting-tests.d.ts +0 -11
- package/dist/tests/register-compacting-tests.js +119 -34
- package/dist/tests/register-compacting-tests.js.map +1 -1
- package/dist/tests/register-data-storage-tests.js +308 -167
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-parameter-compacting-tests.d.ts +2 -0
- package/dist/tests/register-parameter-compacting-tests.js +227 -0
- package/dist/tests/register-parameter-compacting-tests.js.map +1 -0
- package/dist/tests/register-sync-tests.js +166 -61
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/dist/tests/tests-index.d.ts +1 -0
- package/dist/tests/tests-index.js +1 -0
- package/dist/tests/tests-index.js.map +1 -1
- package/package.json +5 -5
- package/src/test-utils/general-utils.ts +19 -10
- package/src/tests/register-compacting-tests.ts +118 -39
- package/src/tests/register-data-storage-tests.ts +311 -210
- package/src/tests/register-parameter-compacting-tests.ts +172 -0
- package/src/tests/register-sync-tests.ts +160 -61
- package/src/tests/tests-index.ts +1 -0
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
import { storage } from '@powersync/service-core';
|
|
2
|
+
import { ParameterLookup } from '@powersync/service-sync-rules';
|
|
3
|
+
import { expect, test } from 'vitest';
|
|
4
|
+
import * as test_utils from '../test-utils/test-utils-index.js';
|
|
5
|
+
|
|
6
|
+
const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
|
|
7
|
+
|
|
8
|
+
export function registerParameterCompactTests(generateStorageFactory: storage.TestStorageFactory) {
|
|
9
|
+
test('compacting parameters', async () => {
|
|
10
|
+
await using factory = await generateStorageFactory();
|
|
11
|
+
const syncRules = await factory.updateSyncRules({
|
|
12
|
+
content: `
|
|
13
|
+
bucket_definitions:
|
|
14
|
+
test:
|
|
15
|
+
parameters: select id from test where id = request.user_id()
|
|
16
|
+
data: []
|
|
17
|
+
`
|
|
18
|
+
});
|
|
19
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
20
|
+
|
|
21
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
22
|
+
await batch.save({
|
|
23
|
+
sourceTable: TEST_TABLE,
|
|
24
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
25
|
+
after: {
|
|
26
|
+
id: 't1'
|
|
27
|
+
},
|
|
28
|
+
afterReplicaId: 't1'
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
await batch.save({
|
|
32
|
+
sourceTable: TEST_TABLE,
|
|
33
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
34
|
+
after: {
|
|
35
|
+
id: 't2'
|
|
36
|
+
},
|
|
37
|
+
afterReplicaId: 't2'
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
await batch.commit('1/1');
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
const lookup = ParameterLookup.normalized('test', '1', ['t1']);
|
|
44
|
+
|
|
45
|
+
const checkpoint1 = await bucketStorage.getCheckpoint();
|
|
46
|
+
const parameters1 = await checkpoint1.getParameterSets([lookup]);
|
|
47
|
+
expect(parameters1).toEqual([{ id: 't1' }]);
|
|
48
|
+
|
|
49
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
50
|
+
await batch.save({
|
|
51
|
+
sourceTable: TEST_TABLE,
|
|
52
|
+
tag: storage.SaveOperationTag.UPDATE,
|
|
53
|
+
before: {
|
|
54
|
+
id: 't1'
|
|
55
|
+
},
|
|
56
|
+
beforeReplicaId: 't1',
|
|
57
|
+
after: {
|
|
58
|
+
id: 't1'
|
|
59
|
+
},
|
|
60
|
+
afterReplicaId: 't1'
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
await batch.save({
|
|
64
|
+
sourceTable: TEST_TABLE,
|
|
65
|
+
tag: storage.SaveOperationTag.DELETE,
|
|
66
|
+
before: {
|
|
67
|
+
id: 't1'
|
|
68
|
+
},
|
|
69
|
+
beforeReplicaId: 't1'
|
|
70
|
+
});
|
|
71
|
+
await batch.commit('1/2');
|
|
72
|
+
});
|
|
73
|
+
const checkpoint2 = await bucketStorage.getCheckpoint();
|
|
74
|
+
const parameters2 = await checkpoint2.getParameterSets([lookup]);
|
|
75
|
+
expect(parameters2).toEqual([]);
|
|
76
|
+
|
|
77
|
+
const statsBefore = await bucketStorage.factory.getStorageMetrics();
|
|
78
|
+
await bucketStorage.compact({ compactParameterData: true });
|
|
79
|
+
|
|
80
|
+
// Check consistency
|
|
81
|
+
const parameters1b = await checkpoint1.getParameterSets([lookup]);
|
|
82
|
+
const parameters2b = await checkpoint2.getParameterSets([lookup]);
|
|
83
|
+
expect(parameters1b).toEqual([{ id: 't1' }]);
|
|
84
|
+
expect(parameters2b).toEqual([]);
|
|
85
|
+
|
|
86
|
+
// Check storage size
|
|
87
|
+
const statsAfter = await bucketStorage.factory.getStorageMetrics();
|
|
88
|
+
expect(statsAfter.parameters_size_bytes).toBeLessThan(statsBefore.parameters_size_bytes);
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
for (let cacheLimit of [1, 10]) {
|
|
92
|
+
test(`compacting deleted parameters with cache size ${cacheLimit}`, async () => {
|
|
93
|
+
await using factory = await generateStorageFactory();
|
|
94
|
+
const syncRules = await factory.updateSyncRules({
|
|
95
|
+
content: `
|
|
96
|
+
bucket_definitions:
|
|
97
|
+
test:
|
|
98
|
+
parameters: select id from test where uid = request.user_id()
|
|
99
|
+
data: []
|
|
100
|
+
`
|
|
101
|
+
});
|
|
102
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
103
|
+
|
|
104
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
105
|
+
await batch.save({
|
|
106
|
+
sourceTable: TEST_TABLE,
|
|
107
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
108
|
+
after: {
|
|
109
|
+
id: 't1',
|
|
110
|
+
uid: 'u1'
|
|
111
|
+
},
|
|
112
|
+
afterReplicaId: 't1'
|
|
113
|
+
});
|
|
114
|
+
// Interleave with another operation, to evict the other cache entry when compacting.
|
|
115
|
+
await batch.save({
|
|
116
|
+
sourceTable: TEST_TABLE,
|
|
117
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
118
|
+
after: {
|
|
119
|
+
id: 't2',
|
|
120
|
+
uid: 'u1'
|
|
121
|
+
},
|
|
122
|
+
afterReplicaId: 't2'
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
await batch.commit('1/1');
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
129
|
+
await batch.save({
|
|
130
|
+
sourceTable: TEST_TABLE,
|
|
131
|
+
tag: storage.SaveOperationTag.DELETE,
|
|
132
|
+
before: {
|
|
133
|
+
id: 't1',
|
|
134
|
+
uid: 'u1'
|
|
135
|
+
},
|
|
136
|
+
beforeReplicaId: 't1'
|
|
137
|
+
});
|
|
138
|
+
await batch.commit('2/1');
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
142
|
+
await batch.save({
|
|
143
|
+
sourceTable: TEST_TABLE,
|
|
144
|
+
tag: storage.SaveOperationTag.UPDATE,
|
|
145
|
+
after: {
|
|
146
|
+
id: 't2',
|
|
147
|
+
uid: 'u2'
|
|
148
|
+
},
|
|
149
|
+
afterReplicaId: 't2'
|
|
150
|
+
});
|
|
151
|
+
await batch.commit('3/1');
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
const lookup = ParameterLookup.normalized('test', '1', ['u1']);
|
|
155
|
+
|
|
156
|
+
const checkpoint1 = await bucketStorage.getCheckpoint();
|
|
157
|
+
const parameters1 = await checkpoint1.getParameterSets([lookup]);
|
|
158
|
+
expect(parameters1).toEqual([]);
|
|
159
|
+
|
|
160
|
+
const statsBefore = await bucketStorage.factory.getStorageMetrics();
|
|
161
|
+
await bucketStorage.compact({ compactParameterData: true, compactParameterCacheLimit: cacheLimit });
|
|
162
|
+
|
|
163
|
+
// Check consistency
|
|
164
|
+
const parameters1b = await checkpoint1.getParameterSets([lookup]);
|
|
165
|
+
expect(parameters1b).toEqual([]);
|
|
166
|
+
|
|
167
|
+
// Check storage size
|
|
168
|
+
const statsAfter = await bucketStorage.factory.getStorageMetrics();
|
|
169
|
+
expect(statsAfter.parameters_size_bytes).toBeLessThan(statsBefore.parameters_size_bytes);
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
}
|
|
@@ -7,7 +7,7 @@ import {
|
|
|
7
7
|
utils
|
|
8
8
|
} from '@powersync/service-core';
|
|
9
9
|
import { JSONBig } from '@powersync/service-jsonbig';
|
|
10
|
-
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
10
|
+
import { BucketSourceType, RequestParameters } from '@powersync/service-sync-rules';
|
|
11
11
|
import path from 'path';
|
|
12
12
|
import * as timers from 'timers/promises';
|
|
13
13
|
import { fileURLToPath } from 'url';
|
|
@@ -54,7 +54,6 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
54
54
|
});
|
|
55
55
|
|
|
56
56
|
const bucketStorage = f.getInstance(syncRules);
|
|
57
|
-
await bucketStorage.autoActivate();
|
|
58
57
|
|
|
59
58
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
60
59
|
await batch.save({
|
|
@@ -83,15 +82,18 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
83
82
|
const stream = sync.streamResponse({
|
|
84
83
|
syncContext,
|
|
85
84
|
bucketStorage: bucketStorage,
|
|
86
|
-
syncRules:
|
|
85
|
+
syncRules: {
|
|
86
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
87
|
+
version: bucketStorage.group_id
|
|
88
|
+
},
|
|
87
89
|
params: {
|
|
88
90
|
buckets: [],
|
|
89
91
|
include_checksum: true,
|
|
90
92
|
raw_data: true
|
|
91
93
|
},
|
|
92
94
|
tracker,
|
|
93
|
-
|
|
94
|
-
|
|
95
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 } as any,
|
|
96
|
+
isEncodingAsBson: false
|
|
95
97
|
});
|
|
96
98
|
|
|
97
99
|
const lines = await consumeCheckpointLines(stream);
|
|
@@ -116,7 +118,6 @@ bucket_definitions:
|
|
|
116
118
|
});
|
|
117
119
|
|
|
118
120
|
const bucketStorage = f.getInstance(syncRules);
|
|
119
|
-
await bucketStorage.autoActivate();
|
|
120
121
|
|
|
121
122
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
122
123
|
await batch.save({
|
|
@@ -145,15 +146,18 @@ bucket_definitions:
|
|
|
145
146
|
const stream = sync.streamResponse({
|
|
146
147
|
syncContext,
|
|
147
148
|
bucketStorage,
|
|
148
|
-
syncRules:
|
|
149
|
+
syncRules: {
|
|
150
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
151
|
+
version: bucketStorage.group_id
|
|
152
|
+
},
|
|
149
153
|
params: {
|
|
150
154
|
buckets: [],
|
|
151
155
|
include_checksum: true,
|
|
152
156
|
raw_data: true
|
|
153
157
|
},
|
|
154
158
|
tracker,
|
|
155
|
-
|
|
156
|
-
|
|
159
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 } as any,
|
|
160
|
+
isEncodingAsBson: false
|
|
157
161
|
});
|
|
158
162
|
|
|
159
163
|
const lines = await consumeCheckpointLines(stream);
|
|
@@ -178,7 +182,6 @@ bucket_definitions:
|
|
|
178
182
|
});
|
|
179
183
|
|
|
180
184
|
const bucketStorage = f.getInstance(syncRules);
|
|
181
|
-
await bucketStorage.autoActivate();
|
|
182
185
|
|
|
183
186
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
184
187
|
// Initial data: Add one priority row and 10k low-priority rows.
|
|
@@ -209,15 +212,18 @@ bucket_definitions:
|
|
|
209
212
|
const stream = sync.streamResponse({
|
|
210
213
|
syncContext,
|
|
211
214
|
bucketStorage,
|
|
212
|
-
syncRules:
|
|
215
|
+
syncRules: {
|
|
216
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
217
|
+
version: bucketStorage.group_id
|
|
218
|
+
},
|
|
213
219
|
params: {
|
|
214
220
|
buckets: [],
|
|
215
221
|
include_checksum: true,
|
|
216
222
|
raw_data: true
|
|
217
223
|
},
|
|
218
224
|
tracker,
|
|
219
|
-
|
|
220
|
-
|
|
225
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 } as any,
|
|
226
|
+
isEncodingAsBson: false
|
|
221
227
|
});
|
|
222
228
|
|
|
223
229
|
let sentCheckpoints = 0;
|
|
@@ -289,7 +295,6 @@ bucket_definitions:
|
|
|
289
295
|
});
|
|
290
296
|
|
|
291
297
|
const bucketStorage = f.getInstance(syncRules);
|
|
292
|
-
await bucketStorage.autoActivate();
|
|
293
298
|
|
|
294
299
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
295
300
|
// Initial data: Add one priority row and 10k low-priority rows.
|
|
@@ -320,15 +325,18 @@ bucket_definitions:
|
|
|
320
325
|
const stream = sync.streamResponse({
|
|
321
326
|
syncContext,
|
|
322
327
|
bucketStorage,
|
|
323
|
-
syncRules:
|
|
328
|
+
syncRules: {
|
|
329
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
330
|
+
version: bucketStorage.group_id
|
|
331
|
+
},
|
|
324
332
|
params: {
|
|
325
333
|
buckets: [],
|
|
326
334
|
include_checksum: true,
|
|
327
335
|
raw_data: true
|
|
328
336
|
},
|
|
329
337
|
tracker,
|
|
330
|
-
|
|
331
|
-
|
|
338
|
+
token: { sub: 'user_one', exp: Date.now() / 1000 + 100000 } as any,
|
|
339
|
+
isEncodingAsBson: false
|
|
332
340
|
});
|
|
333
341
|
|
|
334
342
|
let sentCheckpoints = 0;
|
|
@@ -431,7 +439,6 @@ bucket_definitions:
|
|
|
431
439
|
});
|
|
432
440
|
|
|
433
441
|
const bucketStorage = f.getInstance(syncRules);
|
|
434
|
-
await bucketStorage.autoActivate();
|
|
435
442
|
|
|
436
443
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
437
444
|
// Initial data: Add one priority row and 10k low-priority rows.
|
|
@@ -462,15 +469,18 @@ bucket_definitions:
|
|
|
462
469
|
const stream = sync.streamResponse({
|
|
463
470
|
syncContext,
|
|
464
471
|
bucketStorage,
|
|
465
|
-
syncRules:
|
|
472
|
+
syncRules: {
|
|
473
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
474
|
+
version: bucketStorage.group_id
|
|
475
|
+
},
|
|
466
476
|
params: {
|
|
467
477
|
buckets: [],
|
|
468
478
|
include_checksum: true,
|
|
469
479
|
raw_data: true
|
|
470
480
|
},
|
|
471
481
|
tracker,
|
|
472
|
-
|
|
473
|
-
|
|
482
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 } as any,
|
|
483
|
+
isEncodingAsBson: false
|
|
474
484
|
});
|
|
475
485
|
|
|
476
486
|
let sentRows = 0;
|
|
@@ -561,7 +571,6 @@ bucket_definitions:
|
|
|
561
571
|
content: BASIC_SYNC_RULES
|
|
562
572
|
});
|
|
563
573
|
const bucketStorage = f.getInstance(syncRules);
|
|
564
|
-
await bucketStorage.autoActivate();
|
|
565
574
|
|
|
566
575
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
567
576
|
await batch.save({
|
|
@@ -579,15 +588,18 @@ bucket_definitions:
|
|
|
579
588
|
const stream = sync.streamResponse({
|
|
580
589
|
syncContext,
|
|
581
590
|
bucketStorage,
|
|
582
|
-
syncRules:
|
|
591
|
+
syncRules: {
|
|
592
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
593
|
+
version: bucketStorage.group_id
|
|
594
|
+
},
|
|
583
595
|
params: {
|
|
584
596
|
buckets: [],
|
|
585
597
|
include_checksum: true,
|
|
586
598
|
raw_data: true
|
|
587
599
|
},
|
|
588
600
|
tracker,
|
|
589
|
-
|
|
590
|
-
|
|
601
|
+
token: { sub: '', exp: Date.now() / 1000 + 100000 } as any,
|
|
602
|
+
isEncodingAsBson: false
|
|
591
603
|
});
|
|
592
604
|
|
|
593
605
|
const lines: any[] = [];
|
|
@@ -626,7 +638,6 @@ bucket_definitions:
|
|
|
626
638
|
});
|
|
627
639
|
|
|
628
640
|
const bucketStorage = await f.getInstance(syncRules);
|
|
629
|
-
await bucketStorage.autoActivate();
|
|
630
641
|
|
|
631
642
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
632
643
|
await batch.save({
|
|
@@ -646,15 +657,18 @@ bucket_definitions:
|
|
|
646
657
|
const stream = sync.streamResponse({
|
|
647
658
|
syncContext,
|
|
648
659
|
bucketStorage,
|
|
649
|
-
syncRules:
|
|
660
|
+
syncRules: {
|
|
661
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
662
|
+
version: bucketStorage.group_id
|
|
663
|
+
},
|
|
650
664
|
params: {
|
|
651
665
|
buckets: [],
|
|
652
666
|
include_checksum: true,
|
|
653
667
|
raw_data: false
|
|
654
668
|
},
|
|
655
669
|
tracker,
|
|
656
|
-
|
|
657
|
-
|
|
670
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 } as any,
|
|
671
|
+
isEncodingAsBson: false
|
|
658
672
|
});
|
|
659
673
|
|
|
660
674
|
const lines = await consumeCheckpointLines(stream);
|
|
@@ -671,20 +685,22 @@ bucket_definitions:
|
|
|
671
685
|
});
|
|
672
686
|
|
|
673
687
|
const bucketStorage = await f.getInstance(syncRules);
|
|
674
|
-
await bucketStorage.autoActivate();
|
|
675
688
|
|
|
676
689
|
const stream = sync.streamResponse({
|
|
677
690
|
syncContext,
|
|
678
691
|
bucketStorage,
|
|
679
|
-
syncRules:
|
|
692
|
+
syncRules: {
|
|
693
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
694
|
+
version: bucketStorage.group_id
|
|
695
|
+
},
|
|
680
696
|
params: {
|
|
681
697
|
buckets: [],
|
|
682
698
|
include_checksum: true,
|
|
683
699
|
raw_data: true
|
|
684
700
|
},
|
|
685
701
|
tracker,
|
|
686
|
-
|
|
687
|
-
|
|
702
|
+
token: { sub: '', exp: 0 } as any,
|
|
703
|
+
isEncodingAsBson: false
|
|
688
704
|
});
|
|
689
705
|
|
|
690
706
|
const lines = await consumeCheckpointLines(stream);
|
|
@@ -699,20 +715,26 @@ bucket_definitions:
|
|
|
699
715
|
});
|
|
700
716
|
|
|
701
717
|
const bucketStorage = await f.getInstance(syncRules);
|
|
702
|
-
|
|
718
|
+
// Activate
|
|
719
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
720
|
+
await batch.keepalive('0/0');
|
|
721
|
+
});
|
|
703
722
|
|
|
704
723
|
const stream = sync.streamResponse({
|
|
705
724
|
syncContext,
|
|
706
725
|
bucketStorage,
|
|
707
|
-
syncRules:
|
|
726
|
+
syncRules: {
|
|
727
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
728
|
+
version: bucketStorage.group_id
|
|
729
|
+
},
|
|
708
730
|
params: {
|
|
709
731
|
buckets: [],
|
|
710
732
|
include_checksum: true,
|
|
711
733
|
raw_data: true
|
|
712
734
|
},
|
|
713
735
|
tracker,
|
|
714
|
-
|
|
715
|
-
|
|
736
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 } as any,
|
|
737
|
+
isEncodingAsBson: false
|
|
716
738
|
});
|
|
717
739
|
const iter = stream[Symbol.asyncIterator]();
|
|
718
740
|
context.onTestFinished(() => {
|
|
@@ -770,20 +792,26 @@ bucket_definitions:
|
|
|
770
792
|
const listsTable = test_utils.makeTestTable('lists', ['id']);
|
|
771
793
|
|
|
772
794
|
const bucketStorage = await f.getInstance(syncRules);
|
|
773
|
-
|
|
795
|
+
// Activate
|
|
796
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
797
|
+
await batch.keepalive('0/0');
|
|
798
|
+
});
|
|
774
799
|
|
|
775
800
|
const stream = sync.streamResponse({
|
|
776
801
|
syncContext,
|
|
777
802
|
bucketStorage,
|
|
778
|
-
syncRules:
|
|
803
|
+
syncRules: {
|
|
804
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
805
|
+
version: bucketStorage.group_id
|
|
806
|
+
},
|
|
779
807
|
params: {
|
|
780
808
|
buckets: [],
|
|
781
809
|
include_checksum: true,
|
|
782
810
|
raw_data: true
|
|
783
811
|
},
|
|
784
812
|
tracker,
|
|
785
|
-
|
|
786
|
-
|
|
813
|
+
token: { sub: 'user1', exp: Date.now() / 1000 + 100 } as any,
|
|
814
|
+
isEncodingAsBson: false
|
|
787
815
|
});
|
|
788
816
|
const iter = stream[Symbol.asyncIterator]();
|
|
789
817
|
context.onTestFinished(() => {
|
|
@@ -833,7 +861,6 @@ bucket_definitions:
|
|
|
833
861
|
const listsTable = test_utils.makeTestTable('lists', ['id']);
|
|
834
862
|
|
|
835
863
|
const bucketStorage = await f.getInstance(syncRules);
|
|
836
|
-
await bucketStorage.autoActivate();
|
|
837
864
|
|
|
838
865
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
839
866
|
await batch.save({
|
|
@@ -852,15 +879,18 @@ bucket_definitions:
|
|
|
852
879
|
const stream = sync.streamResponse({
|
|
853
880
|
syncContext,
|
|
854
881
|
bucketStorage,
|
|
855
|
-
syncRules:
|
|
882
|
+
syncRules: {
|
|
883
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
884
|
+
version: bucketStorage.group_id
|
|
885
|
+
},
|
|
856
886
|
params: {
|
|
857
887
|
buckets: [],
|
|
858
888
|
include_checksum: true,
|
|
859
889
|
raw_data: true
|
|
860
890
|
},
|
|
861
891
|
tracker,
|
|
862
|
-
|
|
863
|
-
|
|
892
|
+
token: { sub: 'user1', exp: Date.now() / 1000 + 100 } as any,
|
|
893
|
+
isEncodingAsBson: false
|
|
864
894
|
});
|
|
865
895
|
const iter = stream[Symbol.asyncIterator]();
|
|
866
896
|
context.onTestFinished(() => {
|
|
@@ -911,20 +941,26 @@ bucket_definitions:
|
|
|
911
941
|
const listsTable = test_utils.makeTestTable('lists', ['id']);
|
|
912
942
|
|
|
913
943
|
const bucketStorage = await f.getInstance(syncRules);
|
|
914
|
-
|
|
944
|
+
// Activate
|
|
945
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
946
|
+
await batch.keepalive('0/0');
|
|
947
|
+
});
|
|
915
948
|
|
|
916
949
|
const stream = sync.streamResponse({
|
|
917
950
|
syncContext,
|
|
918
951
|
bucketStorage,
|
|
919
|
-
syncRules:
|
|
952
|
+
syncRules: {
|
|
953
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
954
|
+
version: bucketStorage.group_id
|
|
955
|
+
},
|
|
920
956
|
params: {
|
|
921
957
|
buckets: [],
|
|
922
958
|
include_checksum: true,
|
|
923
959
|
raw_data: true
|
|
924
960
|
},
|
|
925
961
|
tracker,
|
|
926
|
-
|
|
927
|
-
|
|
962
|
+
token: { sub: 'user1', exp: Date.now() / 1000 + 100 } as any,
|
|
963
|
+
isEncodingAsBson: false
|
|
928
964
|
});
|
|
929
965
|
const iter = stream[Symbol.asyncIterator]();
|
|
930
966
|
context.onTestFinished(() => {
|
|
@@ -974,22 +1010,28 @@ bucket_definitions:
|
|
|
974
1010
|
});
|
|
975
1011
|
|
|
976
1012
|
const bucketStorage = await f.getInstance(syncRules);
|
|
977
|
-
|
|
1013
|
+
// Activate
|
|
1014
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1015
|
+
await batch.keepalive('0/0');
|
|
1016
|
+
});
|
|
978
1017
|
|
|
979
1018
|
const exp = Date.now() / 1000 + 0.1;
|
|
980
1019
|
|
|
981
1020
|
const stream = sync.streamResponse({
|
|
982
1021
|
syncContext,
|
|
983
1022
|
bucketStorage,
|
|
984
|
-
syncRules:
|
|
1023
|
+
syncRules: {
|
|
1024
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
1025
|
+
version: bucketStorage.group_id
|
|
1026
|
+
},
|
|
985
1027
|
params: {
|
|
986
1028
|
buckets: [],
|
|
987
1029
|
include_checksum: true,
|
|
988
1030
|
raw_data: true
|
|
989
1031
|
},
|
|
990
1032
|
tracker,
|
|
991
|
-
|
|
992
|
-
|
|
1033
|
+
token: { sub: '', exp: exp } as any,
|
|
1034
|
+
isEncodingAsBson: false
|
|
993
1035
|
});
|
|
994
1036
|
const iter = stream[Symbol.asyncIterator]();
|
|
995
1037
|
context.onTestFinished(() => {
|
|
@@ -1016,7 +1058,6 @@ bucket_definitions:
|
|
|
1016
1058
|
});
|
|
1017
1059
|
|
|
1018
1060
|
const bucketStorage = await f.getInstance(syncRules);
|
|
1019
|
-
await bucketStorage.autoActivate();
|
|
1020
1061
|
|
|
1021
1062
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1022
1063
|
await batch.save({
|
|
@@ -1045,15 +1086,18 @@ bucket_definitions:
|
|
|
1045
1086
|
const stream = sync.streamResponse({
|
|
1046
1087
|
syncContext,
|
|
1047
1088
|
bucketStorage,
|
|
1048
|
-
syncRules:
|
|
1089
|
+
syncRules: {
|
|
1090
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
1091
|
+
version: bucketStorage.group_id
|
|
1092
|
+
},
|
|
1049
1093
|
params: {
|
|
1050
1094
|
buckets: [],
|
|
1051
1095
|
include_checksum: true,
|
|
1052
1096
|
raw_data: true
|
|
1053
1097
|
},
|
|
1054
1098
|
tracker,
|
|
1055
|
-
|
|
1056
|
-
|
|
1099
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 } as any,
|
|
1100
|
+
isEncodingAsBson: false
|
|
1057
1101
|
});
|
|
1058
1102
|
|
|
1059
1103
|
const iter = stream[Symbol.asyncIterator]();
|
|
@@ -1157,7 +1201,6 @@ bucket_definitions:
|
|
|
1157
1201
|
});
|
|
1158
1202
|
|
|
1159
1203
|
const bucketStorage = f.getInstance(syncRules);
|
|
1160
|
-
await bucketStorage.autoActivate();
|
|
1161
1204
|
|
|
1162
1205
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1163
1206
|
// <= the managed write checkpoint LSN below
|
|
@@ -1172,15 +1215,18 @@ bucket_definitions:
|
|
|
1172
1215
|
const params: sync.SyncStreamParameters = {
|
|
1173
1216
|
syncContext,
|
|
1174
1217
|
bucketStorage,
|
|
1175
|
-
syncRules:
|
|
1218
|
+
syncRules: {
|
|
1219
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
1220
|
+
version: bucketStorage.group_id
|
|
1221
|
+
},
|
|
1176
1222
|
params: {
|
|
1177
1223
|
buckets: [],
|
|
1178
1224
|
include_checksum: true,
|
|
1179
1225
|
raw_data: true
|
|
1180
1226
|
},
|
|
1181
1227
|
tracker,
|
|
1182
|
-
|
|
1183
|
-
|
|
1228
|
+
token: { sub: 'test', exp: Date.now() / 1000 + 10 } as any,
|
|
1229
|
+
isEncodingAsBson: false
|
|
1184
1230
|
};
|
|
1185
1231
|
const stream1 = sync.streamResponse(params);
|
|
1186
1232
|
const lines1 = await consumeCheckpointLines(stream1);
|
|
@@ -1210,6 +1256,59 @@ bucket_definitions:
|
|
|
1210
1256
|
})
|
|
1211
1257
|
});
|
|
1212
1258
|
});
|
|
1259
|
+
|
|
1260
|
+
test('encodes sync rules id in buckes for streams', async () => {
|
|
1261
|
+
await using f = await factory();
|
|
1262
|
+
const rules = `
|
|
1263
|
+
streams:
|
|
1264
|
+
test:
|
|
1265
|
+
auto_subscribe: true
|
|
1266
|
+
query: SELECT * FROM test;
|
|
1267
|
+
|
|
1268
|
+
config:
|
|
1269
|
+
edition: 2
|
|
1270
|
+
`;
|
|
1271
|
+
|
|
1272
|
+
for (let i = 0; i < 2; i++) {
|
|
1273
|
+
const syncRules = await f.updateSyncRules({
|
|
1274
|
+
content: rules
|
|
1275
|
+
});
|
|
1276
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
1277
|
+
|
|
1278
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1279
|
+
await batch.save({
|
|
1280
|
+
sourceTable: TEST_TABLE,
|
|
1281
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
1282
|
+
after: {
|
|
1283
|
+
id: 't1',
|
|
1284
|
+
description: 'Test 1'
|
|
1285
|
+
},
|
|
1286
|
+
afterReplicaId: 't1'
|
|
1287
|
+
});
|
|
1288
|
+
await batch.commit('0/1');
|
|
1289
|
+
});
|
|
1290
|
+
|
|
1291
|
+
const stream = sync.streamResponse({
|
|
1292
|
+
syncContext,
|
|
1293
|
+
bucketStorage: bucketStorage,
|
|
1294
|
+
syncRules: {
|
|
1295
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
1296
|
+
version: bucketStorage.group_id
|
|
1297
|
+
},
|
|
1298
|
+
params: {
|
|
1299
|
+
buckets: [],
|
|
1300
|
+
include_checksum: true,
|
|
1301
|
+
raw_data: true
|
|
1302
|
+
},
|
|
1303
|
+
tracker,
|
|
1304
|
+
token: { sub: '', exp: Date.now() / 1000 + 10 } as any,
|
|
1305
|
+
isEncodingAsBson: false
|
|
1306
|
+
});
|
|
1307
|
+
|
|
1308
|
+
const lines = await consumeCheckpointLines(stream);
|
|
1309
|
+
expect(lines).toMatchSnapshot();
|
|
1310
|
+
}
|
|
1311
|
+
});
|
|
1213
1312
|
}
|
|
1214
1313
|
|
|
1215
1314
|
/**
|
package/src/tests/tests-index.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
export * from './register-bucket-validation-tests.js';
|
|
2
2
|
export * from './register-compacting-tests.js';
|
|
3
|
+
export * from './register-parameter-compacting-tests.js';
|
|
3
4
|
export * from './register-data-storage-tests.js';
|
|
4
5
|
export * from './register-migration-tests.js';
|
|
5
6
|
export * from './register-sync-tests.js';
|