@powersync/service-core-tests 0.0.0-dev-20241219091224

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/CHANGELOG.md +15 -0
  2. package/LICENSE +67 -0
  3. package/README.md +5 -0
  4. package/dist/index.d.ts +4 -0
  5. package/dist/index.js +5 -0
  6. package/dist/index.js.map +1 -0
  7. package/dist/test-utils/bucket-validation.d.ts +42 -0
  8. package/dist/test-utils/bucket-validation.js +115 -0
  9. package/dist/test-utils/bucket-validation.js.map +1 -0
  10. package/dist/test-utils/general-utils.d.ts +31 -0
  11. package/dist/test-utils/general-utils.js +81 -0
  12. package/dist/test-utils/general-utils.js.map +1 -0
  13. package/dist/test-utils/stream_utils.d.ts +6 -0
  14. package/dist/test-utils/stream_utils.js +37 -0
  15. package/dist/test-utils/stream_utils.js.map +1 -0
  16. package/dist/test-utils/test-utils-index.d.ts +3 -0
  17. package/dist/test-utils/test-utils-index.js +4 -0
  18. package/dist/test-utils/test-utils-index.js.map +1 -0
  19. package/dist/tests/register-bucket-validation-tests.d.ts +10 -0
  20. package/dist/tests/register-bucket-validation-tests.js +139 -0
  21. package/dist/tests/register-bucket-validation-tests.js.map +1 -0
  22. package/dist/tests/register-compacting-tests.d.ts +14 -0
  23. package/dist/tests/register-compacting-tests.js +343 -0
  24. package/dist/tests/register-compacting-tests.js.map +1 -0
  25. package/dist/tests/register-data-storage-tests.d.ts +14 -0
  26. package/dist/tests/register-data-storage-tests.js +1571 -0
  27. package/dist/tests/register-data-storage-tests.js.map +1 -0
  28. package/dist/tests/register-sync-tests.d.ts +11 -0
  29. package/dist/tests/register-sync-tests.js +538 -0
  30. package/dist/tests/register-sync-tests.js.map +1 -0
  31. package/dist/tests/tests-index.d.ts +4 -0
  32. package/dist/tests/tests-index.js +5 -0
  33. package/dist/tests/tests-index.js.map +1 -0
  34. package/package.json +28 -0
  35. package/src/index.ts +5 -0
  36. package/src/test-utils/bucket-validation.ts +120 -0
  37. package/src/test-utils/general-utils.ts +113 -0
  38. package/src/test-utils/stream_utils.ts +42 -0
  39. package/src/test-utils/test-utils-index.ts +4 -0
  40. package/src/tests/register-bucket-validation-tests.ts +148 -0
  41. package/src/tests/register-compacting-tests.ts +297 -0
  42. package/src/tests/register-data-storage-tests.ts +1552 -0
  43. package/src/tests/register-sync-tests.ts +521 -0
  44. package/src/tests/tests-index.ts +4 -0
  45. package/tsconfig.json +34 -0
  46. package/tsconfig.tsbuildinfo +1 -0
@@ -0,0 +1,297 @@
1
+ import { storage } from '@powersync/service-core';
2
+ import { expect, test } from 'vitest';
3
+ import * as test_utils from '../test-utils/test-utils-index.js';
4
+
5
+ const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
6
+
7
+ /**
8
+ * @example
9
+ * ```TypeScript
10
+ * // Test with the default options - large batch sizes
11
+ * describe('compacting buckets - default options', () => registerCompactTests(() => new MongoStorageFactory(), {}));
12
+ *
13
+ * // Also test with the miniumum batch sizes, forcing usage of multiple batches internally
14
+ * describe('compacting buckets - batched', () =>
15
+ * compactTests(() => new MongoStorageFactory(), { clearBatchLimit: 2, moveBatchLimit: 1, moveBatchQueryLimit: 1 }));
16
+ * ```
17
+ */
18
+ export function registerCompactTests<CompactOptions extends storage.CompactOptions = storage.CompactOptions> (generateStorageFactory: test_utils.StorageFactory, compactOptions: CompactOptions) {
19
+ test('compacting (1)', async () => {
20
+ const sync_rules = test_utils.testRules(`
21
+ bucket_definitions:
22
+ global:
23
+ data: [select * from test]
24
+ `);
25
+
26
+ using factory = await generateStorageFactory()
27
+ const bucketStorage = factory.getInstance(sync_rules);
28
+
29
+ const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
30
+ await batch.save({
31
+ sourceTable: TEST_TABLE,
32
+ tag: storage.SaveOperationTag.INSERT,
33
+ after: {
34
+ id: 't1'
35
+ },
36
+ afterReplicaId: test_utils.rid('t1')
37
+ });
38
+
39
+ await batch.save({
40
+ sourceTable: TEST_TABLE,
41
+ tag: storage.SaveOperationTag.INSERT,
42
+ after: {
43
+ id: 't2'
44
+ },
45
+ afterReplicaId: test_utils.rid('t2')
46
+ });
47
+
48
+ await batch.save({
49
+ sourceTable: TEST_TABLE,
50
+ tag: storage.SaveOperationTag.UPDATE,
51
+ after: {
52
+ id: 't2'
53
+ },
54
+ afterReplicaId: test_utils.rid('t2')
55
+ });
56
+ });
57
+
58
+ const checkpoint = result!.flushed_op;
59
+
60
+ const batchBefore = await test_utils.oneFromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
61
+ const dataBefore = batchBefore.batch.data;
62
+ const checksumBefore = await bucketStorage.getChecksums(checkpoint, ['global[]']);
63
+
64
+ expect(dataBefore).toMatchObject([
65
+ {
66
+ checksum: 2634521662,
67
+ object_id: 't1',
68
+ op: 'PUT',
69
+ op_id: '1'
70
+ },
71
+ {
72
+ checksum: 4243212114,
73
+ object_id: 't2',
74
+ op: 'PUT',
75
+ op_id: '2'
76
+ },
77
+ {
78
+ checksum: 4243212114,
79
+ object_id: 't2',
80
+ op: 'PUT',
81
+ op_id: '3'
82
+ }
83
+ ]);
84
+
85
+ await bucketStorage.compact(compactOptions);
86
+
87
+ const batchAfter = await test_utils.oneFromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
88
+ const dataAfter = batchAfter.batch.data;
89
+ const checksumAfter = await bucketStorage.getChecksums(checkpoint, ['global[]']);
90
+
91
+ expect(batchAfter.targetOp).toEqual(3n);
92
+ expect(dataAfter).toMatchObject([
93
+ {
94
+ checksum: 2634521662,
95
+ object_id: 't1',
96
+ op: 'PUT',
97
+ op_id: '1'
98
+ },
99
+ {
100
+ checksum: 4243212114,
101
+ op: 'MOVE',
102
+ op_id: '2'
103
+ },
104
+ {
105
+ checksum: 4243212114,
106
+ object_id: 't2',
107
+ op: 'PUT',
108
+ op_id: '3'
109
+ }
110
+ ]);
111
+
112
+ expect(checksumBefore.get('global[]')).toEqual(checksumAfter.get('global[]'));
113
+
114
+ test_utils.validateCompactedBucket(dataBefore, dataAfter);
115
+ });
116
+
117
+ test('compacting (2)', async () => {
118
+ const sync_rules = test_utils.testRules(`
119
+ bucket_definitions:
120
+ global:
121
+ data: [select * from test]
122
+ `);
123
+
124
+ using factory = await generateStorageFactory();
125
+ const bucketStorage = factory.getInstance(sync_rules);
126
+
127
+ const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
128
+ await batch.save({
129
+ sourceTable: TEST_TABLE,
130
+ tag: storage.SaveOperationTag.INSERT,
131
+ after: {
132
+ id: 't1'
133
+ },
134
+ afterReplicaId: test_utils.rid('t1')
135
+ });
136
+
137
+ await batch.save({
138
+ sourceTable: TEST_TABLE,
139
+ tag: storage.SaveOperationTag.INSERT,
140
+ after: {
141
+ id: 't2'
142
+ },
143
+ afterReplicaId: test_utils.rid('t2')
144
+ });
145
+
146
+ await batch.save({
147
+ sourceTable: TEST_TABLE,
148
+ tag: storage.SaveOperationTag.DELETE,
149
+ before: {
150
+ id: 't1'
151
+ },
152
+ beforeReplicaId: test_utils.rid('t1')
153
+ });
154
+
155
+ await batch.save({
156
+ sourceTable: TEST_TABLE,
157
+ tag: storage.SaveOperationTag.UPDATE,
158
+ after: {
159
+ id: 't2'
160
+ },
161
+ afterReplicaId: test_utils.rid('t2')
162
+ });
163
+ });
164
+
165
+ const checkpoint = result!.flushed_op;
166
+
167
+ const batchBefore = await test_utils.oneFromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
168
+ const dataBefore = batchBefore.batch.data;
169
+ const checksumBefore = await bucketStorage.getChecksums(checkpoint, ['global[]']);
170
+
171
+ expect(dataBefore).toMatchObject([
172
+ {
173
+ checksum: 2634521662,
174
+ object_id: 't1',
175
+ op: 'PUT',
176
+ op_id: '1'
177
+ },
178
+ {
179
+ checksum: 4243212114,
180
+ object_id: 't2',
181
+ op: 'PUT',
182
+ op_id: '2'
183
+ },
184
+ {
185
+ checksum: 4228978084,
186
+ object_id: 't1',
187
+ op: 'REMOVE',
188
+ op_id: '3'
189
+ },
190
+ {
191
+ checksum: 4243212114,
192
+ object_id: 't2',
193
+ op: 'PUT',
194
+ op_id: '4'
195
+ }
196
+ ]);
197
+
198
+ await bucketStorage.compact(compactOptions);
199
+
200
+ const batchAfter = await test_utils.oneFromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', '0']])));
201
+ const dataAfter = batchAfter.batch.data;
202
+ const checksumAfter = await bucketStorage.getChecksums(checkpoint, ['global[]']);
203
+
204
+ expect(batchAfter.targetOp).toEqual(4n);
205
+ expect(dataAfter).toMatchObject([
206
+ {
207
+ checksum: -1778190028,
208
+ op: 'CLEAR',
209
+ op_id: '3'
210
+ },
211
+ {
212
+ checksum: 4243212114,
213
+ object_id: 't2',
214
+ op: 'PUT',
215
+ op_id: '4'
216
+ }
217
+ ]);
218
+ expect(checksumBefore.get('global[]')).toEqual(checksumAfter.get('global[]'));
219
+
220
+ test_utils.validateCompactedBucket(dataBefore, dataAfter);
221
+ });
222
+
223
+ test('compacting (3)', async () => {
224
+ const sync_rules = test_utils.testRules(`
225
+ bucket_definitions:
226
+ global:
227
+ data: [select * from test]
228
+ `);
229
+
230
+ using factory = await generateStorageFactory();
231
+ const bucketStorage = factory.getInstance(sync_rules);
232
+
233
+ const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
234
+ await batch.save({
235
+ sourceTable: TEST_TABLE,
236
+ tag: storage.SaveOperationTag.INSERT,
237
+ after: {
238
+ id: 't1'
239
+ },
240
+ afterReplicaId: 't1'
241
+ });
242
+
243
+ await batch.save({
244
+ sourceTable: TEST_TABLE,
245
+ tag: storage.SaveOperationTag.INSERT,
246
+ after: {
247
+ id: 't2'
248
+ },
249
+ afterReplicaId: 't2'
250
+ });
251
+
252
+ await batch.save({
253
+ sourceTable: TEST_TABLE,
254
+ tag: storage.SaveOperationTag.DELETE,
255
+ before: {
256
+ id: 't1'
257
+ },
258
+ beforeReplicaId: 't1'
259
+ });
260
+ });
261
+
262
+ const checkpoint1 = result!.flushed_op;
263
+ const checksumBefore = await bucketStorage.getChecksums(checkpoint1, ['global[]']);
264
+
265
+ const result2 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
266
+ await batch.save({
267
+ sourceTable: TEST_TABLE,
268
+ tag: storage.SaveOperationTag.DELETE,
269
+ before: {
270
+ id: 't2'
271
+ },
272
+ beforeReplicaId: 't2'
273
+ });
274
+ });
275
+ const checkpoint2 = result2!.flushed_op;
276
+
277
+ await bucketStorage.compact(compactOptions);
278
+
279
+ const batchAfter = await test_utils.oneFromAsync(bucketStorage.getBucketDataBatch(checkpoint2, new Map([['global[]', '0']])));
280
+ const dataAfter = batchAfter.batch.data;
281
+ const checksumAfter = await bucketStorage.getChecksums(checkpoint2, ['global[]']);
282
+
283
+ expect(batchAfter.targetOp).toEqual(4n);
284
+ expect(dataAfter).toMatchObject([
285
+ {
286
+ checksum: 1874612650,
287
+ op: 'CLEAR',
288
+ op_id: '4'
289
+ }
290
+ ]);
291
+ expect(checksumAfter.get('global[]')).toEqual({
292
+ bucket: 'global[]',
293
+ count: 1,
294
+ checksum: 1874612650
295
+ });
296
+ });
297
+ }