@powersync/service-core-tests 0.10.3 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,172 @@
1
+ import { storage } from '@powersync/service-core';
2
+ import { ParameterLookup } from '@powersync/service-sync-rules';
3
+ import { expect, test } from 'vitest';
4
+ import * as test_utils from '../test-utils/test-utils-index.js';
5
+
6
+ const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
7
+
8
+ export function registerParameterCompactTests(generateStorageFactory: storage.TestStorageFactory) {
9
+ test('compacting parameters', async () => {
10
+ await using factory = await generateStorageFactory();
11
+ const syncRules = await factory.updateSyncRules({
12
+ content: `
13
+ bucket_definitions:
14
+ test:
15
+ parameters: select id from test where id = request.user_id()
16
+ data: []
17
+ `
18
+ });
19
+ const bucketStorage = factory.getInstance(syncRules);
20
+
21
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
22
+ await batch.save({
23
+ sourceTable: TEST_TABLE,
24
+ tag: storage.SaveOperationTag.INSERT,
25
+ after: {
26
+ id: 't1'
27
+ },
28
+ afterReplicaId: 't1'
29
+ });
30
+
31
+ await batch.save({
32
+ sourceTable: TEST_TABLE,
33
+ tag: storage.SaveOperationTag.INSERT,
34
+ after: {
35
+ id: 't2'
36
+ },
37
+ afterReplicaId: 't2'
38
+ });
39
+
40
+ await batch.commit('1/1');
41
+ });
42
+
43
+ const lookup = ParameterLookup.normalized('test', '1', ['t1']);
44
+
45
+ const checkpoint1 = await bucketStorage.getCheckpoint();
46
+ const parameters1 = await checkpoint1.getParameterSets([lookup]);
47
+ expect(parameters1).toEqual([{ id: 't1' }]);
48
+
49
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
50
+ await batch.save({
51
+ sourceTable: TEST_TABLE,
52
+ tag: storage.SaveOperationTag.UPDATE,
53
+ before: {
54
+ id: 't1'
55
+ },
56
+ beforeReplicaId: 't1',
57
+ after: {
58
+ id: 't1'
59
+ },
60
+ afterReplicaId: 't1'
61
+ });
62
+
63
+ await batch.save({
64
+ sourceTable: TEST_TABLE,
65
+ tag: storage.SaveOperationTag.DELETE,
66
+ before: {
67
+ id: 't1'
68
+ },
69
+ beforeReplicaId: 't1'
70
+ });
71
+ await batch.commit('1/2');
72
+ });
73
+ const checkpoint2 = await bucketStorage.getCheckpoint();
74
+ const parameters2 = await checkpoint2.getParameterSets([lookup]);
75
+ expect(parameters2).toEqual([]);
76
+
77
+ const statsBefore = await bucketStorage.factory.getStorageMetrics();
78
+ await bucketStorage.compact({ compactParameterData: true });
79
+
80
+ // Check consistency
81
+ const parameters1b = await checkpoint1.getParameterSets([lookup]);
82
+ const parameters2b = await checkpoint2.getParameterSets([lookup]);
83
+ expect(parameters1b).toEqual([{ id: 't1' }]);
84
+ expect(parameters2b).toEqual([]);
85
+
86
+ // Check storage size
87
+ const statsAfter = await bucketStorage.factory.getStorageMetrics();
88
+ expect(statsAfter.parameters_size_bytes).toBeLessThan(statsBefore.parameters_size_bytes);
89
+ });
90
+
91
+ for (let cacheLimit of [1, 10]) {
92
+ test(`compacting deleted parameters with cache size ${cacheLimit}`, async () => {
93
+ await using factory = await generateStorageFactory();
94
+ const syncRules = await factory.updateSyncRules({
95
+ content: `
96
+ bucket_definitions:
97
+ test:
98
+ parameters: select id from test where uid = request.user_id()
99
+ data: []
100
+ `
101
+ });
102
+ const bucketStorage = factory.getInstance(syncRules);
103
+
104
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
105
+ await batch.save({
106
+ sourceTable: TEST_TABLE,
107
+ tag: storage.SaveOperationTag.INSERT,
108
+ after: {
109
+ id: 't1',
110
+ uid: 'u1'
111
+ },
112
+ afterReplicaId: 't1'
113
+ });
114
+ // Interleave with another operation, to evict the other cache entry when compacting.
115
+ await batch.save({
116
+ sourceTable: TEST_TABLE,
117
+ tag: storage.SaveOperationTag.INSERT,
118
+ after: {
119
+ id: 't2',
120
+ uid: 'u1'
121
+ },
122
+ afterReplicaId: 't2'
123
+ });
124
+
125
+ await batch.commit('1/1');
126
+ });
127
+
128
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
129
+ await batch.save({
130
+ sourceTable: TEST_TABLE,
131
+ tag: storage.SaveOperationTag.DELETE,
132
+ before: {
133
+ id: 't1',
134
+ uid: 'u1'
135
+ },
136
+ beforeReplicaId: 't1'
137
+ });
138
+ await batch.commit('2/1');
139
+ });
140
+
141
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
142
+ await batch.save({
143
+ sourceTable: TEST_TABLE,
144
+ tag: storage.SaveOperationTag.UPDATE,
145
+ after: {
146
+ id: 't2',
147
+ uid: 'u2'
148
+ },
149
+ afterReplicaId: 't2'
150
+ });
151
+ await batch.commit('3/1');
152
+ });
153
+
154
+ const lookup = ParameterLookup.normalized('test', '1', ['u1']);
155
+
156
+ const checkpoint1 = await bucketStorage.getCheckpoint();
157
+ const parameters1 = await checkpoint1.getParameterSets([lookup]);
158
+ expect(parameters1).toEqual([]);
159
+
160
+ const statsBefore = await bucketStorage.factory.getStorageMetrics();
161
+ await bucketStorage.compact({ compactParameterData: true, compactParameterCacheLimit: cacheLimit });
162
+
163
+ // Check consistency
164
+ const parameters1b = await checkpoint1.getParameterSets([lookup]);
165
+ expect(parameters1b).toEqual([]);
166
+
167
+ // Check storage size
168
+ const statsAfter = await bucketStorage.factory.getStorageMetrics();
169
+ expect(statsAfter.parameters_size_bytes).toBeLessThan(statsBefore.parameters_size_bytes);
170
+ });
171
+ }
172
+ }
@@ -54,7 +54,6 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
54
54
  });
55
55
 
56
56
  const bucketStorage = f.getInstance(syncRules);
57
- await bucketStorage.autoActivate();
58
57
 
59
58
  const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
60
59
  await batch.save({
@@ -116,7 +115,6 @@ bucket_definitions:
116
115
  });
117
116
 
118
117
  const bucketStorage = f.getInstance(syncRules);
119
- await bucketStorage.autoActivate();
120
118
 
121
119
  const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
122
120
  await batch.save({
@@ -178,7 +176,6 @@ bucket_definitions:
178
176
  });
179
177
 
180
178
  const bucketStorage = f.getInstance(syncRules);
181
- await bucketStorage.autoActivate();
182
179
 
183
180
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
184
181
  // Initial data: Add one priority row and 10k low-priority rows.
@@ -289,7 +286,6 @@ bucket_definitions:
289
286
  });
290
287
 
291
288
  const bucketStorage = f.getInstance(syncRules);
292
- await bucketStorage.autoActivate();
293
289
 
294
290
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
295
291
  // Initial data: Add one priority row and 10k low-priority rows.
@@ -431,7 +427,6 @@ bucket_definitions:
431
427
  });
432
428
 
433
429
  const bucketStorage = f.getInstance(syncRules);
434
- await bucketStorage.autoActivate();
435
430
 
436
431
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
437
432
  // Initial data: Add one priority row and 10k low-priority rows.
@@ -561,7 +556,6 @@ bucket_definitions:
561
556
  content: BASIC_SYNC_RULES
562
557
  });
563
558
  const bucketStorage = f.getInstance(syncRules);
564
- await bucketStorage.autoActivate();
565
559
 
566
560
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
567
561
  await batch.save({
@@ -626,7 +620,6 @@ bucket_definitions:
626
620
  });
627
621
 
628
622
  const bucketStorage = await f.getInstance(syncRules);
629
- await bucketStorage.autoActivate();
630
623
 
631
624
  const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
632
625
  await batch.save({
@@ -671,7 +664,6 @@ bucket_definitions:
671
664
  });
672
665
 
673
666
  const bucketStorage = await f.getInstance(syncRules);
674
- await bucketStorage.autoActivate();
675
667
 
676
668
  const stream = sync.streamResponse({
677
669
  syncContext,
@@ -699,7 +691,10 @@ bucket_definitions:
699
691
  });
700
692
 
701
693
  const bucketStorage = await f.getInstance(syncRules);
702
- await bucketStorage.autoActivate();
694
+ // Activate
695
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
696
+ await batch.keepalive('0/0');
697
+ });
703
698
 
704
699
  const stream = sync.streamResponse({
705
700
  syncContext,
@@ -770,7 +765,10 @@ bucket_definitions:
770
765
  const listsTable = test_utils.makeTestTable('lists', ['id']);
771
766
 
772
767
  const bucketStorage = await f.getInstance(syncRules);
773
- await bucketStorage.autoActivate();
768
+ // Activate
769
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
770
+ await batch.keepalive('0/0');
771
+ });
774
772
 
775
773
  const stream = sync.streamResponse({
776
774
  syncContext,
@@ -833,7 +831,6 @@ bucket_definitions:
833
831
  const listsTable = test_utils.makeTestTable('lists', ['id']);
834
832
 
835
833
  const bucketStorage = await f.getInstance(syncRules);
836
- await bucketStorage.autoActivate();
837
834
 
838
835
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
839
836
  await batch.save({
@@ -911,7 +908,10 @@ bucket_definitions:
911
908
  const listsTable = test_utils.makeTestTable('lists', ['id']);
912
909
 
913
910
  const bucketStorage = await f.getInstance(syncRules);
914
- await bucketStorage.autoActivate();
911
+ // Activate
912
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
913
+ await batch.keepalive('0/0');
914
+ });
915
915
 
916
916
  const stream = sync.streamResponse({
917
917
  syncContext,
@@ -974,7 +974,10 @@ bucket_definitions:
974
974
  });
975
975
 
976
976
  const bucketStorage = await f.getInstance(syncRules);
977
- await bucketStorage.autoActivate();
977
+ // Activate
978
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
979
+ await batch.keepalive('0/0');
980
+ });
978
981
 
979
982
  const exp = Date.now() / 1000 + 0.1;
980
983
 
@@ -1016,7 +1019,6 @@ bucket_definitions:
1016
1019
  });
1017
1020
 
1018
1021
  const bucketStorage = await f.getInstance(syncRules);
1019
- await bucketStorage.autoActivate();
1020
1022
 
1021
1023
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1022
1024
  await batch.save({
@@ -1157,7 +1159,6 @@ bucket_definitions:
1157
1159
  });
1158
1160
 
1159
1161
  const bucketStorage = f.getInstance(syncRules);
1160
- await bucketStorage.autoActivate();
1161
1162
 
1162
1163
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1163
1164
  // <= the managed write checkpoint LSN below
@@ -1,5 +1,6 @@
1
1
  export * from './register-bucket-validation-tests.js';
2
2
  export * from './register-compacting-tests.js';
3
+ export * from './register-parameter-compacting-tests.js';
3
4
  export * from './register-data-storage-tests.js';
4
5
  export * from './register-migration-tests.js';
5
6
  export * from './register-sync-tests.js';