@powersync/service-core-tests 0.12.1 → 0.12.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/CHANGELOG.md +18 -0
  2. package/dist/tests/register-compacting-tests.js +68 -0
  3. package/dist/tests/register-compacting-tests.js.map +1 -1
  4. package/dist/tests/register-data-storage-checkpoint-tests.d.ts +12 -0
  5. package/dist/tests/register-data-storage-checkpoint-tests.js +357 -0
  6. package/dist/tests/register-data-storage-checkpoint-tests.js.map +1 -0
  7. package/dist/tests/register-data-storage-data-tests.d.ts +18 -0
  8. package/dist/tests/{register-data-storage-tests.js → register-data-storage-data-tests.js} +216 -1037
  9. package/dist/tests/register-data-storage-data-tests.js.map +1 -0
  10. package/dist/tests/{register-data-storage-tests.d.ts → register-data-storage-parameter-tests.d.ts} +1 -2
  11. package/dist/tests/register-data-storage-parameter-tests.js +707 -0
  12. package/dist/tests/register-data-storage-parameter-tests.js.map +1 -0
  13. package/dist/tests/register-sync-tests.js +2 -1
  14. package/dist/tests/register-sync-tests.js.map +1 -1
  15. package/dist/tests/tests-index.d.ts +4 -1
  16. package/dist/tests/tests-index.js +4 -1
  17. package/dist/tests/tests-index.js.map +1 -1
  18. package/dist/tests/util.d.ts +1 -0
  19. package/dist/tests/util.js +3 -0
  20. package/dist/tests/util.js.map +1 -0
  21. package/package.json +3 -3
  22. package/src/tests/register-compacting-tests.ts +63 -0
  23. package/src/tests/register-data-storage-checkpoint-tests.ts +277 -0
  24. package/src/tests/{register-data-storage-tests.ts → register-data-storage-data-tests.ts} +93 -865
  25. package/src/tests/register-data-storage-parameter-tests.ts +613 -0
  26. package/src/tests/register-sync-tests.ts +2 -1
  27. package/src/tests/tests-index.ts +4 -1
  28. package/src/tests/util.ts +3 -0
  29. package/tsconfig.tsbuildinfo +1 -1
  30. package/dist/tests/register-data-storage-tests.js.map +0 -1
@@ -0,0 +1,613 @@
1
+ import { storage } from '@powersync/service-core';
2
+ import { ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
3
+ import { SqlBucketDescriptor } from '@powersync/service-sync-rules/src/SqlBucketDescriptor.js';
4
+ import { expect, test } from 'vitest';
5
+ import * as test_utils from '../test-utils/test-utils-index.js';
6
+ import { TEST_TABLE } from './util.js';
7
+
8
+ /**
9
+ * @example
10
+ * ```TypeScript
11
+ *
12
+ * describe('store - mongodb', function () {
13
+ * registerDataStorageTests(MONGO_STORAGE_FACTORY);
14
+ * });
15
+ *
16
+ * ```
17
+ */
18
+ export function registerDataStorageParameterTests(generateStorageFactory: storage.TestStorageFactory) {
19
+ test('save and load parameters', async () => {
20
+ await using factory = await generateStorageFactory();
21
+ const syncRules = await factory.updateSyncRules({
22
+ content: `
23
+ bucket_definitions:
24
+ mybucket:
25
+ parameters:
26
+ - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
27
+ data: []
28
+ `
29
+ });
30
+ const bucketStorage = factory.getInstance(syncRules);
31
+
32
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
33
+ await batch.save({
34
+ sourceTable: TEST_TABLE,
35
+ tag: storage.SaveOperationTag.INSERT,
36
+ after: {
37
+ id: 't2',
38
+ id1: 'user3',
39
+ id2: 'user4',
40
+ group_id: 'group2a'
41
+ },
42
+ afterReplicaId: test_utils.rid('t2')
43
+ });
44
+
45
+ await batch.save({
46
+ sourceTable: TEST_TABLE,
47
+ tag: storage.SaveOperationTag.INSERT,
48
+ after: {
49
+ id: 't1',
50
+ id1: 'user1',
51
+ id2: 'user2',
52
+ group_id: 'group1a'
53
+ },
54
+ afterReplicaId: test_utils.rid('t1')
55
+ });
56
+
57
+ await batch.commit('1/1');
58
+ });
59
+
60
+ const checkpoint = await bucketStorage.getCheckpoint();
61
+ const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
62
+ expect(parameters).toEqual([
63
+ {
64
+ group_id: 'group1a'
65
+ }
66
+ ]);
67
+ });
68
+
69
+ test('it should use the latest version', async () => {
70
+ await using factory = await generateStorageFactory();
71
+ const syncRules = await factory.updateSyncRules({
72
+ content: `
73
+ bucket_definitions:
74
+ mybucket:
75
+ parameters:
76
+ - SELECT group_id FROM test WHERE id = token_parameters.user_id
77
+ data: []
78
+ `
79
+ });
80
+ const bucketStorage = factory.getInstance(syncRules);
81
+
82
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
83
+ await batch.save({
84
+ sourceTable: TEST_TABLE,
85
+ tag: storage.SaveOperationTag.INSERT,
86
+ after: {
87
+ id: 'user1',
88
+ group_id: 'group1'
89
+ },
90
+ afterReplicaId: test_utils.rid('user1')
91
+ });
92
+ await batch.commit('1/1');
93
+ });
94
+ const checkpoint1 = await bucketStorage.getCheckpoint();
95
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
96
+ await batch.save({
97
+ sourceTable: TEST_TABLE,
98
+ tag: storage.SaveOperationTag.INSERT,
99
+ after: {
100
+ id: 'user1',
101
+ group_id: 'group2'
102
+ },
103
+ afterReplicaId: test_utils.rid('user1')
104
+ });
105
+ await batch.commit('1/2');
106
+ });
107
+ const checkpoint2 = await bucketStorage.getCheckpoint();
108
+
109
+ const parameters = await checkpoint2.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
110
+ expect(parameters).toEqual([
111
+ {
112
+ group_id: 'group2'
113
+ }
114
+ ]);
115
+
116
+ // Use the checkpoint to get older data if relevant
117
+ const parameters2 = await checkpoint1.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
118
+ expect(parameters2).toEqual([
119
+ {
120
+ group_id: 'group1'
121
+ }
122
+ ]);
123
+ });
124
+
125
+ test('it should use the latest version after updates', async () => {
126
+ await using factory = await generateStorageFactory();
127
+ const syncRules = await factory.updateSyncRules({
128
+ content: `
129
+ bucket_definitions:
130
+ mybucket:
131
+ parameters:
132
+ - SELECT id AS todo_id
133
+ FROM todos
134
+ WHERE list_id IN token_parameters.list_id
135
+ data: []
136
+ `
137
+ });
138
+ const bucketStorage = factory.getInstance(syncRules);
139
+
140
+ const table = test_utils.makeTestTable('todos', ['id', 'list_id']);
141
+
142
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
143
+ // Create two todos which initially belong to different lists
144
+ await batch.save({
145
+ sourceTable: table,
146
+ tag: storage.SaveOperationTag.INSERT,
147
+ after: {
148
+ id: 'todo1',
149
+ list_id: 'list1'
150
+ },
151
+ afterReplicaId: test_utils.rid('todo1')
152
+ });
153
+ await batch.save({
154
+ sourceTable: table,
155
+ tag: storage.SaveOperationTag.INSERT,
156
+ after: {
157
+ id: 'todo2',
158
+ list_id: 'list2'
159
+ },
160
+ afterReplicaId: test_utils.rid('todo2')
161
+ });
162
+
163
+ await batch.commit('1/1');
164
+ });
165
+
166
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
167
+ // Update the second todo item to now belong to list 1
168
+ await batch.save({
169
+ sourceTable: table,
170
+ tag: storage.SaveOperationTag.UPDATE,
171
+ after: {
172
+ id: 'todo2',
173
+ list_id: 'list1'
174
+ },
175
+ afterReplicaId: test_utils.rid('todo2')
176
+ });
177
+
178
+ await batch.commit('1/1');
179
+ });
180
+
181
+ // We specifically request the todo_ids for both lists.
182
+ // There removal operation for the association of `list2`::`todo2` should not interfere with the new
183
+ // association of `list1`::`todo2`
184
+ const checkpoint = await bucketStorage.getCheckpoint();
185
+ const parameters = await checkpoint.getParameterSets([
186
+ ParameterLookup.normalized('mybucket', '1', ['list1']),
187
+ ParameterLookup.normalized('mybucket', '1', ['list2'])
188
+ ]);
189
+
190
+ expect(parameters.sort((a, b) => (a.todo_id as string).localeCompare(b.todo_id as string))).toEqual([
191
+ {
192
+ todo_id: 'todo1'
193
+ },
194
+ {
195
+ todo_id: 'todo2'
196
+ }
197
+ ]);
198
+ });
199
+
200
+ test('save and load parameters with different number types', async () => {
201
+ await using factory = await generateStorageFactory();
202
+ const syncRules = await factory.updateSyncRules({
203
+ content: `
204
+ bucket_definitions:
205
+ mybucket:
206
+ parameters:
207
+ - SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
208
+ data: []
209
+ `
210
+ });
211
+ const bucketStorage = factory.getInstance(syncRules);
212
+
213
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
214
+ await batch.save({
215
+ sourceTable: TEST_TABLE,
216
+ tag: storage.SaveOperationTag.INSERT,
217
+ after: {
218
+ id: 't1',
219
+ group_id: 'group1',
220
+ n1: 314n,
221
+ f2: 314,
222
+ f3: 3.14
223
+ },
224
+ afterReplicaId: test_utils.rid('t1')
225
+ });
226
+
227
+ await batch.commit('1/1');
228
+ });
229
+
230
+ const TEST_PARAMS = { group_id: 'group1' };
231
+
232
+ const checkpoint = await bucketStorage.getCheckpoint();
233
+
234
+ const parameters1 = await checkpoint.getParameterSets([
235
+ ParameterLookup.normalized('mybucket', '1', [314n, 314, 3.14])
236
+ ]);
237
+ expect(parameters1).toEqual([TEST_PARAMS]);
238
+ const parameters2 = await checkpoint.getParameterSets([
239
+ ParameterLookup.normalized('mybucket', '1', [314, 314n, 3.14])
240
+ ]);
241
+ expect(parameters2).toEqual([TEST_PARAMS]);
242
+ const parameters3 = await checkpoint.getParameterSets([
243
+ ParameterLookup.normalized('mybucket', '1', [314n, 314, 3])
244
+ ]);
245
+ expect(parameters3).toEqual([]);
246
+ });
247
+
248
+ test('save and load parameters with large numbers', async () => {
249
+ // This ensures serialization / deserialization of "current_data" is done correctly.
250
+ // This specific case tested here cannot happen with postgres in practice, but we still
251
+ // test this to ensure correct deserialization.
252
+
253
+ await using factory = await generateStorageFactory();
254
+ const syncRules = await factory.updateSyncRules({
255
+ content: `
256
+ bucket_definitions:
257
+ mybucket:
258
+ parameters:
259
+ - SELECT group_id FROM test WHERE n1 = token_parameters.n1
260
+ data: []
261
+ `
262
+ });
263
+ const bucketStorage = factory.getInstance(syncRules);
264
+
265
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
266
+ await batch.save({
267
+ sourceTable: TEST_TABLE,
268
+ tag: storage.SaveOperationTag.INSERT,
269
+ after: {
270
+ id: 't1',
271
+ group_id: 'group1',
272
+ n1: 1152921504606846976n // 2^60
273
+ },
274
+ afterReplicaId: test_utils.rid('t1')
275
+ });
276
+
277
+ await batch.save({
278
+ sourceTable: TEST_TABLE,
279
+ tag: storage.SaveOperationTag.UPDATE,
280
+ after: {
281
+ id: 't1',
282
+ group_id: 'group1',
283
+ // Simulate a TOAST value, even though it can't happen for values like this
284
+ // in practice.
285
+ n1: undefined
286
+ },
287
+ afterReplicaId: test_utils.rid('t1')
288
+ });
289
+
290
+ await batch.commit('1/1');
291
+ });
292
+
293
+ const TEST_PARAMS = { group_id: 'group1' };
294
+
295
+ const checkpoint = await bucketStorage.getCheckpoint();
296
+
297
+ const parameters1 = await checkpoint.getParameterSets([
298
+ ParameterLookup.normalized('mybucket', '1', [1152921504606846976n])
299
+ ]);
300
+ expect(parameters1).toEqual([TEST_PARAMS]);
301
+ });
302
+
303
+ test('save and load parameters with workspaceId', async () => {
304
+ const WORKSPACE_TABLE = test_utils.makeTestTable('workspace', ['id']);
305
+
306
+ await using factory = await generateStorageFactory();
307
+ const syncRules = await factory.updateSyncRules({
308
+ content: `
309
+ bucket_definitions:
310
+ by_workspace:
311
+ parameters:
312
+ - SELECT id as workspace_id FROM workspace WHERE
313
+ workspace."userId" = token_parameters.user_id
314
+ data: []
315
+ `
316
+ });
317
+ const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
318
+ const bucketStorage = factory.getInstance(syncRules);
319
+
320
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
321
+ await batch.save({
322
+ sourceTable: WORKSPACE_TABLE,
323
+ tag: storage.SaveOperationTag.INSERT,
324
+ after: {
325
+ id: 'workspace1',
326
+ userId: 'u1'
327
+ },
328
+ afterReplicaId: test_utils.rid('workspace1')
329
+ });
330
+ await batch.commit('1/1');
331
+ });
332
+ const checkpoint = await bucketStorage.getCheckpoint();
333
+
334
+ const parameters = new RequestParameters({ sub: 'u1' }, {});
335
+
336
+ const q1 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[0];
337
+
338
+ const lookups = q1.getLookups(parameters);
339
+ expect(lookups).toEqual([ParameterLookup.normalized('by_workspace', '1', ['u1'])]);
340
+
341
+ const parameter_sets = await checkpoint.getParameterSets(lookups);
342
+ expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
343
+
344
+ const buckets = await sync_rules
345
+ .getBucketParameterQuerier(test_utils.querierOptions(parameters))
346
+ .querier.queryDynamicBucketDescriptions({
347
+ getParameterSets(lookups) {
348
+ return checkpoint.getParameterSets(lookups);
349
+ }
350
+ });
351
+ expect(buckets).toEqual([
352
+ { bucket: 'by_workspace["workspace1"]', priority: 3, definition: 'by_workspace', inclusion_reasons: ['default'] }
353
+ ]);
354
+ });
355
+
356
+ test('save and load parameters with dynamic global buckets', async () => {
357
+ const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
358
+
359
+ await using factory = await generateStorageFactory();
360
+ const syncRules = await factory.updateSyncRules({
361
+ content: `
362
+ bucket_definitions:
363
+ by_public_workspace:
364
+ parameters:
365
+ - SELECT id as workspace_id FROM workspace WHERE
366
+ workspace.visibility = 'public'
367
+ data: []
368
+ `
369
+ });
370
+ const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
371
+ const bucketStorage = factory.getInstance(syncRules);
372
+
373
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
374
+ await batch.save({
375
+ sourceTable: WORKSPACE_TABLE,
376
+ tag: storage.SaveOperationTag.INSERT,
377
+ after: {
378
+ id: 'workspace1',
379
+ visibility: 'public'
380
+ },
381
+ afterReplicaId: test_utils.rid('workspace1')
382
+ });
383
+
384
+ await batch.save({
385
+ sourceTable: WORKSPACE_TABLE,
386
+ tag: storage.SaveOperationTag.INSERT,
387
+ after: {
388
+ id: 'workspace2',
389
+ visibility: 'private'
390
+ },
391
+ afterReplicaId: test_utils.rid('workspace2')
392
+ });
393
+
394
+ await batch.save({
395
+ sourceTable: WORKSPACE_TABLE,
396
+ tag: storage.SaveOperationTag.INSERT,
397
+ after: {
398
+ id: 'workspace3',
399
+ visibility: 'public'
400
+ },
401
+ afterReplicaId: test_utils.rid('workspace3')
402
+ });
403
+
404
+ await batch.commit('1/1');
405
+ });
406
+
407
+ const checkpoint = await bucketStorage.getCheckpoint();
408
+
409
+ const parameters = new RequestParameters({ sub: 'unknown' }, {});
410
+
411
+ const q1 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[0];
412
+
413
+ const lookups = q1.getLookups(parameters);
414
+ expect(lookups).toEqual([ParameterLookup.normalized('by_public_workspace', '1', [])]);
415
+
416
+ const parameter_sets = await checkpoint.getParameterSets(lookups);
417
+ parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
418
+ expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
419
+
420
+ const buckets = await sync_rules
421
+ .getBucketParameterQuerier(test_utils.querierOptions(parameters))
422
+ .querier.queryDynamicBucketDescriptions({
423
+ getParameterSets(lookups) {
424
+ return checkpoint.getParameterSets(lookups);
425
+ }
426
+ });
427
+ buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
428
+ expect(buckets).toEqual([
429
+ {
430
+ bucket: 'by_public_workspace["workspace1"]',
431
+ priority: 3,
432
+ definition: 'by_public_workspace',
433
+ inclusion_reasons: ['default']
434
+ },
435
+ {
436
+ bucket: 'by_public_workspace["workspace3"]',
437
+ priority: 3,
438
+ definition: 'by_public_workspace',
439
+ inclusion_reasons: ['default']
440
+ }
441
+ ]);
442
+ });
443
+
444
+ test('multiple parameter queries', async () => {
445
+ const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
446
+
447
+ await using factory = await generateStorageFactory();
448
+ const syncRules = await factory.updateSyncRules({
449
+ content: `
450
+ bucket_definitions:
451
+ by_workspace:
452
+ parameters:
453
+ - SELECT id as workspace_id FROM workspace WHERE
454
+ workspace.visibility = 'public'
455
+ - SELECT id as workspace_id FROM workspace WHERE
456
+ workspace.user_id = token_parameters.user_id
457
+ data: []
458
+ `
459
+ });
460
+ const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
461
+ const bucketStorage = factory.getInstance(syncRules);
462
+
463
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
464
+ await batch.save({
465
+ sourceTable: WORKSPACE_TABLE,
466
+ tag: storage.SaveOperationTag.INSERT,
467
+ after: {
468
+ id: 'workspace1',
469
+ visibility: 'public'
470
+ },
471
+ afterReplicaId: test_utils.rid('workspace1')
472
+ });
473
+
474
+ await batch.save({
475
+ sourceTable: WORKSPACE_TABLE,
476
+ tag: storage.SaveOperationTag.INSERT,
477
+ after: {
478
+ id: 'workspace2',
479
+ visibility: 'private'
480
+ },
481
+ afterReplicaId: test_utils.rid('workspace2')
482
+ });
483
+
484
+ await batch.save({
485
+ sourceTable: WORKSPACE_TABLE,
486
+ tag: storage.SaveOperationTag.INSERT,
487
+ after: {
488
+ id: 'workspace3',
489
+ user_id: 'u1',
490
+ visibility: 'private'
491
+ },
492
+ afterReplicaId: test_utils.rid('workspace3')
493
+ });
494
+
495
+ await batch.save({
496
+ sourceTable: WORKSPACE_TABLE,
497
+ tag: storage.SaveOperationTag.INSERT,
498
+ after: {
499
+ id: 'workspace4',
500
+ user_id: 'u2',
501
+ visibility: 'private'
502
+ },
503
+ afterReplicaId: test_utils.rid('workspace4')
504
+ });
505
+
506
+ await batch.commit('1/1');
507
+ });
508
+
509
+ const checkpoint = await bucketStorage.getCheckpoint();
510
+
511
+ const parameters = new RequestParameters({ sub: 'u1' }, {});
512
+
513
+ // Test intermediate values - could be moved to sync_rules.test.ts
514
+ const q1 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[0];
515
+ const lookups1 = q1.getLookups(parameters);
516
+ expect(lookups1).toEqual([ParameterLookup.normalized('by_workspace', '1', [])]);
517
+
518
+ const parameter_sets1 = await checkpoint.getParameterSets(lookups1);
519
+ parameter_sets1.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
520
+ expect(parameter_sets1).toEqual([{ workspace_id: 'workspace1' }]);
521
+
522
+ const q2 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[1];
523
+ const lookups2 = q2.getLookups(parameters);
524
+ expect(lookups2).toEqual([ParameterLookup.normalized('by_workspace', '2', ['u1'])]);
525
+
526
+ const parameter_sets2 = await checkpoint.getParameterSets(lookups2);
527
+ parameter_sets2.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
528
+ expect(parameter_sets2).toEqual([{ workspace_id: 'workspace3' }]);
529
+
530
+ // Test final values - the important part
531
+ const buckets = (
532
+ await sync_rules
533
+ .getBucketParameterQuerier(test_utils.querierOptions(parameters))
534
+ .querier.queryDynamicBucketDescriptions({
535
+ getParameterSets(lookups) {
536
+ return checkpoint.getParameterSets(lookups);
537
+ }
538
+ })
539
+ ).map((e) => e.bucket);
540
+ buckets.sort();
541
+ expect(buckets).toEqual(['by_workspace["workspace1"]', 'by_workspace["workspace3"]']);
542
+ });
543
+
544
+ test('truncate parameters', async () => {
545
+ await using factory = await generateStorageFactory();
546
+ const syncRules = await factory.updateSyncRules({
547
+ content: `
548
+ bucket_definitions:
549
+ mybucket:
550
+ parameters:
551
+ - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
552
+ data: []
553
+ `
554
+ });
555
+ const bucketStorage = factory.getInstance(syncRules);
556
+
557
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
558
+ await batch.save({
559
+ sourceTable: TEST_TABLE,
560
+ tag: storage.SaveOperationTag.INSERT,
561
+ after: {
562
+ id: 't2',
563
+ id1: 'user3',
564
+ id2: 'user4',
565
+ group_id: 'group2a'
566
+ },
567
+ afterReplicaId: test_utils.rid('t2')
568
+ });
569
+
570
+ await batch.truncate([TEST_TABLE]);
571
+ });
572
+
573
+ const checkpoint = await bucketStorage.getCheckpoint();
574
+
575
+ const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
576
+ expect(parameters).toEqual([]);
577
+ });
578
+
579
+ test('invalidate cached parsed sync rules', async () => {
580
+ await using bucketStorageFactory = await generateStorageFactory();
581
+ const syncRules = await bucketStorageFactory.updateSyncRules({
582
+ content: `
583
+ bucket_definitions:
584
+ by_workspace:
585
+ parameters:
586
+ - SELECT id as workspace_id FROM workspace WHERE
587
+ workspace."userId" = token_parameters.user_id
588
+ data: []
589
+ `
590
+ });
591
+ const syncBucketStorage = bucketStorageFactory.getInstance(syncRules);
592
+
593
+ const parsedSchema1 = syncBucketStorage.getParsedSyncRules({
594
+ defaultSchema: 'public'
595
+ });
596
+
597
+ const parsedSchema2 = syncBucketStorage.getParsedSyncRules({
598
+ defaultSchema: 'public'
599
+ });
600
+
601
+ // These should be cached, this will be the same instance
602
+ expect(parsedSchema2).equals(parsedSchema1);
603
+ expect(parsedSchema1.getSourceTables()[0].schema).equals('public');
604
+
605
+ const parsedSchema3 = syncBucketStorage.getParsedSyncRules({
606
+ defaultSchema: 'databasename'
607
+ });
608
+
609
+ // The cache should not be used
610
+ expect(parsedSchema3).not.equals(parsedSchema2);
611
+ expect(parsedSchema3.getSourceTables()[0].schema).equals('databasename');
612
+ });
613
+ }
@@ -1347,7 +1347,8 @@ async function consumeIterator<T>(
1347
1347
  return lines;
1348
1348
  } catch (e) {
1349
1349
  if (options?.consume) {
1350
- iter.throw?.(e);
1350
+ // iter.throw here would result in an uncaught error
1351
+ iter.return?.(e);
1351
1352
  }
1352
1353
  throw e;
1353
1354
  }
@@ -1,6 +1,9 @@
1
1
  export * from './register-bucket-validation-tests.js';
2
2
  export * from './register-compacting-tests.js';
3
3
  export * from './register-parameter-compacting-tests.js';
4
- export * from './register-data-storage-tests.js';
4
+ export * from './register-data-storage-parameter-tests.js';
5
+ export * from './register-data-storage-data-tests.js';
6
+ export * from './register-data-storage-checkpoint-tests.js';
5
7
  export * from './register-migration-tests.js';
6
8
  export * from './register-sync-tests.js';
9
+ export * from './util.js';
@@ -0,0 +1,3 @@
1
+ import { test_utils } from '../index.js';
2
+
3
+ export const TEST_TABLE = test_utils.makeTestTable('test', ['id']);