@powersync/service-core-tests 0.14.0 → 0.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/CHANGELOG.md +40 -0
  2. package/dist/test-utils/general-utils.d.ts +22 -3
  3. package/dist/test-utils/general-utils.js +56 -3
  4. package/dist/test-utils/general-utils.js.map +1 -1
  5. package/dist/test-utils/stream_utils.js +2 -2
  6. package/dist/test-utils/stream_utils.js.map +1 -1
  7. package/dist/tests/register-compacting-tests.d.ts +1 -1
  8. package/dist/tests/register-compacting-tests.js +360 -297
  9. package/dist/tests/register-compacting-tests.js.map +1 -1
  10. package/dist/tests/register-data-storage-checkpoint-tests.d.ts +1 -1
  11. package/dist/tests/register-data-storage-checkpoint-tests.js +59 -48
  12. package/dist/tests/register-data-storage-checkpoint-tests.js.map +1 -1
  13. package/dist/tests/register-data-storage-data-tests.d.ts +2 -2
  14. package/dist/tests/register-data-storage-data-tests.js +1112 -612
  15. package/dist/tests/register-data-storage-data-tests.js.map +1 -1
  16. package/dist/tests/register-data-storage-parameter-tests.d.ts +1 -1
  17. package/dist/tests/register-data-storage-parameter-tests.js +273 -254
  18. package/dist/tests/register-data-storage-parameter-tests.js.map +1 -1
  19. package/dist/tests/register-parameter-compacting-tests.d.ts +1 -1
  20. package/dist/tests/register-parameter-compacting-tests.js +83 -87
  21. package/dist/tests/register-parameter-compacting-tests.js.map +1 -1
  22. package/dist/tests/register-sync-tests.d.ts +2 -1
  23. package/dist/tests/register-sync-tests.js +479 -451
  24. package/dist/tests/register-sync-tests.js.map +1 -1
  25. package/dist/tests/util.d.ts +5 -4
  26. package/dist/tests/util.js +27 -12
  27. package/dist/tests/util.js.map +1 -1
  28. package/package.json +3 -3
  29. package/src/test-utils/general-utils.ts +81 -4
  30. package/src/test-utils/stream_utils.ts +2 -2
  31. package/src/tests/register-compacting-tests.ts +376 -322
  32. package/src/tests/register-data-storage-checkpoint-tests.ts +85 -53
  33. package/src/tests/register-data-storage-data-tests.ts +1050 -559
  34. package/src/tests/register-data-storage-parameter-tests.ts +330 -288
  35. package/src/tests/register-parameter-compacting-tests.ts +87 -90
  36. package/src/tests/register-sync-tests.ts +390 -380
  37. package/src/tests/util.ts +46 -17
  38. package/tsconfig.tsbuildinfo +1 -1
@@ -1,9 +1,9 @@
1
- import { JwtPayload, storage, updateSyncRulesFromYaml } from '@powersync/service-core';
1
+ import { CURRENT_STORAGE_VERSION, JwtPayload, storage, updateSyncRulesFromYaml } from '@powersync/service-core';
2
2
  import { RequestParameters, ScopedParameterLookup, SqliteJsonRow } from '@powersync/service-sync-rules';
3
3
  import { expect, test } from 'vitest';
4
4
  import * as test_utils from '../test-utils/test-utils-index.js';
5
- import { bucketRequest, TEST_TABLE } from './util.js';
6
- import { ParameterLookupScope } from '@powersync/service-sync-rules';
5
+ import { bucketRequest } from '../test-utils/test-utils-index.js';
6
+ import { parameterLookupScope } from './util.js';
7
7
 
8
8
  /**
9
9
  * @example
@@ -15,50 +15,59 @@ import { ParameterLookupScope } from '@powersync/service-sync-rules';
15
15
  *
16
16
  * ```
17
17
  */
18
- export function registerDataStorageParameterTests(generateStorageFactory: storage.TestStorageFactory) {
19
- const MYBUCKET_1: ParameterLookupScope = { lookupName: 'mybucket', queryId: '1' };
18
+ export function registerDataStorageParameterTests(config: storage.TestStorageConfig) {
19
+ const generateStorageFactory = config.factory;
20
+ const storageVersion = config.storageVersion ?? CURRENT_STORAGE_VERSION;
21
+ const MYBUCKET_1 = parameterLookupScope('mybucket', '1');
20
22
 
21
23
  test('save and load parameters', async () => {
22
24
  await using factory = await generateStorageFactory();
23
25
  const syncRules = await factory.updateSyncRules(
24
- updateSyncRulesFromYaml(`
26
+ updateSyncRulesFromYaml(
27
+ `
25
28
  bucket_definitions:
26
29
  mybucket:
27
30
  parameters:
28
31
  - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
29
32
  data: []
30
- `)
33
+ `,
34
+ {
35
+ storageVersion
36
+ }
37
+ )
31
38
  );
32
39
  const bucketStorage = factory.getInstance(syncRules);
33
40
 
34
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
35
- await batch.save({
36
- sourceTable: TEST_TABLE,
37
- tag: storage.SaveOperationTag.INSERT,
38
- after: {
39
- id: 't2',
40
- id1: 'user3',
41
- id2: 'user4',
42
- group_id: 'group2a'
43
- },
44
- afterReplicaId: test_utils.rid('t2')
45
- });
46
-
47
- await batch.save({
48
- sourceTable: TEST_TABLE,
49
- tag: storage.SaveOperationTag.INSERT,
50
- after: {
51
- id: 't1',
52
- id1: 'user1',
53
- id2: 'user2',
54
- group_id: 'group1a'
55
- },
56
- afterReplicaId: test_utils.rid('t1')
57
- });
58
-
59
- await batch.commit('1/1');
41
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
42
+ const testTable = await test_utils.resolveTestTable(writer, 'test', ['id'], config);
43
+ await writer.markAllSnapshotDone('1/1');
44
+
45
+ await writer.save({
46
+ sourceTable: testTable,
47
+ tag: storage.SaveOperationTag.INSERT,
48
+ after: {
49
+ id: 't2',
50
+ id1: 'user3',
51
+ id2: 'user4',
52
+ group_id: 'group2a'
53
+ },
54
+ afterReplicaId: test_utils.rid('t2')
60
55
  });
61
56
 
57
+ await writer.save({
58
+ sourceTable: testTable,
59
+ tag: storage.SaveOperationTag.INSERT,
60
+ after: {
61
+ id: 't1',
62
+ id1: 'user1',
63
+ id2: 'user2',
64
+ group_id: 'group1a'
65
+ },
66
+ afterReplicaId: test_utils.rid('t1')
67
+ });
68
+
69
+ await writer.commit('1/1');
70
+
62
71
  const checkpoint = await bucketStorage.getCheckpoint();
63
72
  const parameters = await checkpoint.getParameterSets([ScopedParameterLookup.direct(MYBUCKET_1, ['user1'])]);
64
73
  expect(parameters).toEqual([
@@ -71,41 +80,45 @@ bucket_definitions:
71
80
  test('it should use the latest version', async () => {
72
81
  await using factory = await generateStorageFactory();
73
82
  const syncRules = await factory.updateSyncRules(
74
- updateSyncRulesFromYaml(`
83
+ updateSyncRulesFromYaml(
84
+ `
75
85
  bucket_definitions:
76
86
  mybucket:
77
87
  parameters:
78
88
  - SELECT group_id FROM test WHERE id = token_parameters.user_id
79
89
  data: []
80
- `)
90
+ `,
91
+ {
92
+ storageVersion
93
+ }
94
+ )
81
95
  );
82
96
  const bucketStorage = factory.getInstance(syncRules);
83
97
 
84
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
85
- await batch.save({
86
- sourceTable: TEST_TABLE,
87
- tag: storage.SaveOperationTag.INSERT,
88
- after: {
89
- id: 'user1',
90
- group_id: 'group1'
91
- },
92
- afterReplicaId: test_utils.rid('user1')
93
- });
94
- await batch.commit('1/1');
98
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
99
+ const testTable = await test_utils.resolveTestTable(writer, 'test', ['id'], config);
100
+ await writer.markAllSnapshotDone('1/1');
101
+ await writer.save({
102
+ sourceTable: testTable,
103
+ tag: storage.SaveOperationTag.INSERT,
104
+ after: {
105
+ id: 'user1',
106
+ group_id: 'group1'
107
+ },
108
+ afterReplicaId: test_utils.rid('user1')
95
109
  });
110
+ await writer.commit('1/1');
96
111
  const checkpoint1 = await bucketStorage.getCheckpoint();
97
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
98
- await batch.save({
99
- sourceTable: TEST_TABLE,
100
- tag: storage.SaveOperationTag.INSERT,
101
- after: {
102
- id: 'user1',
103
- group_id: 'group2'
104
- },
105
- afterReplicaId: test_utils.rid('user1')
106
- });
107
- await batch.commit('1/2');
112
+ await writer.save({
113
+ sourceTable: testTable,
114
+ tag: storage.SaveOperationTag.INSERT,
115
+ after: {
116
+ id: 'user1',
117
+ group_id: 'group2'
118
+ },
119
+ afterReplicaId: test_utils.rid('user1')
108
120
  });
121
+ await writer.commit('1/2');
109
122
  const checkpoint2 = await bucketStorage.getCheckpoint();
110
123
 
111
124
  const parameters = await checkpoint2.getParameterSets([ScopedParameterLookup.direct(MYBUCKET_1, ['user1'])]);
@@ -127,7 +140,8 @@ bucket_definitions:
127
140
  test('it should use the latest version after updates', async () => {
128
141
  await using factory = await generateStorageFactory();
129
142
  const syncRules = await factory.updateSyncRules(
130
- updateSyncRulesFromYaml(`
143
+ updateSyncRulesFromYaml(
144
+ `
131
145
  bucket_definitions:
132
146
  mybucket:
133
147
  parameters:
@@ -135,51 +149,50 @@ bucket_definitions:
135
149
  FROM todos
136
150
  WHERE list_id IN token_parameters.list_id
137
151
  data: []
138
- `)
152
+ `,
153
+ { storageVersion }
154
+ )
139
155
  );
140
156
  const bucketStorage = factory.getInstance(syncRules);
141
157
 
142
- const table = test_utils.makeTestTable('todos', ['id', 'list_id']);
143
-
144
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
145
- // Create two todos which initially belong to different lists
146
- await batch.save({
147
- sourceTable: table,
148
- tag: storage.SaveOperationTag.INSERT,
149
- after: {
150
- id: 'todo1',
151
- list_id: 'list1'
152
- },
153
- afterReplicaId: test_utils.rid('todo1')
154
- });
155
- await batch.save({
156
- sourceTable: table,
157
- tag: storage.SaveOperationTag.INSERT,
158
- after: {
159
- id: 'todo2',
160
- list_id: 'list2'
161
- },
162
- afterReplicaId: test_utils.rid('todo2')
163
- });
164
-
165
- await batch.commit('1/1');
158
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
159
+ const table = await test_utils.resolveTestTable(writer, 'todos', ['id', 'list_id'], config);
160
+ await writer.markAllSnapshotDone('1/1');
161
+ // Create two todos which initially belong to different lists
162
+ await writer.save({
163
+ sourceTable: table,
164
+ tag: storage.SaveOperationTag.INSERT,
165
+ after: {
166
+ id: 'todo1',
167
+ list_id: 'list1'
168
+ },
169
+ afterReplicaId: test_utils.rid('todo1')
170
+ });
171
+ await writer.save({
172
+ sourceTable: table,
173
+ tag: storage.SaveOperationTag.INSERT,
174
+ after: {
175
+ id: 'todo2',
176
+ list_id: 'list2'
177
+ },
178
+ afterReplicaId: test_utils.rid('todo2')
166
179
  });
167
180
 
168
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
169
- // Update the second todo item to now belong to list 1
170
- await batch.save({
171
- sourceTable: table,
172
- tag: storage.SaveOperationTag.UPDATE,
173
- after: {
174
- id: 'todo2',
175
- list_id: 'list1'
176
- },
177
- afterReplicaId: test_utils.rid('todo2')
178
- });
179
-
180
- await batch.commit('1/1');
181
+ await writer.commit('1/1');
182
+
183
+ // Update the second todo item to now belong to list 1
184
+ await writer.save({
185
+ sourceTable: table,
186
+ tag: storage.SaveOperationTag.UPDATE,
187
+ after: {
188
+ id: 'todo2',
189
+ list_id: 'list1'
190
+ },
191
+ afterReplicaId: test_utils.rid('todo2')
181
192
  });
182
193
 
194
+ await writer.commit('1/1');
195
+
183
196
  // We specifically request the todo_ids for both lists.
184
197
  // There removal operation for the association of `list2`::`todo2` should not interfere with the new
185
198
  // association of `list1`::`todo2`
@@ -202,33 +215,39 @@ bucket_definitions:
202
215
  test('save and load parameters with different number types', async () => {
203
216
  await using factory = await generateStorageFactory();
204
217
  const syncRules = await factory.updateSyncRules(
205
- updateSyncRulesFromYaml(`
218
+ updateSyncRulesFromYaml(
219
+ `
206
220
  bucket_definitions:
207
221
  mybucket:
208
222
  parameters:
209
223
  - SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
210
224
  data: []
211
- `)
225
+ `,
226
+ {
227
+ storageVersion
228
+ }
229
+ )
212
230
  );
213
231
  const bucketStorage = factory.getInstance(syncRules);
214
232
 
215
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
216
- await batch.save({
217
- sourceTable: TEST_TABLE,
218
- tag: storage.SaveOperationTag.INSERT,
219
- after: {
220
- id: 't1',
221
- group_id: 'group1',
222
- n1: 314n,
223
- f2: 314,
224
- f3: 3.14
225
- },
226
- afterReplicaId: test_utils.rid('t1')
227
- });
228
-
229
- await batch.commit('1/1');
233
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
234
+ const testTable = await test_utils.resolveTestTable(writer, 'test', ['id'], config);
235
+ await writer.markAllSnapshotDone('1/1');
236
+ await writer.save({
237
+ sourceTable: testTable,
238
+ tag: storage.SaveOperationTag.INSERT,
239
+ after: {
240
+ id: 't1',
241
+ group_id: 'group1',
242
+ n1: 314n,
243
+ f2: 314,
244
+ f3: 3.14
245
+ },
246
+ afterReplicaId: test_utils.rid('t1')
230
247
  });
231
248
 
249
+ await writer.commit('1/1');
250
+
232
251
  const TEST_PARAMS = { group_id: 'group1' };
233
252
 
234
253
  const checkpoint = await bucketStorage.getCheckpoint();
@@ -252,44 +271,50 @@ bucket_definitions:
252
271
 
253
272
  await using factory = await generateStorageFactory();
254
273
  const syncRules = await factory.updateSyncRules(
255
- updateSyncRulesFromYaml(`
274
+ updateSyncRulesFromYaml(
275
+ `
256
276
  bucket_definitions:
257
277
  mybucket:
258
278
  parameters:
259
279
  - SELECT group_id FROM test WHERE n1 = token_parameters.n1
260
280
  data: []
261
- `)
281
+ `,
282
+ {
283
+ storageVersion
284
+ }
285
+ )
262
286
  );
263
287
  const bucketStorage = factory.getInstance(syncRules);
264
288
 
265
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
266
- await batch.save({
267
- sourceTable: TEST_TABLE,
268
- tag: storage.SaveOperationTag.INSERT,
269
- after: {
270
- id: 't1',
271
- group_id: 'group1',
272
- n1: 1152921504606846976n // 2^60
273
- },
274
- afterReplicaId: test_utils.rid('t1')
275
- });
276
-
277
- await batch.save({
278
- sourceTable: TEST_TABLE,
279
- tag: storage.SaveOperationTag.UPDATE,
280
- after: {
281
- id: 't1',
282
- group_id: 'group1',
283
- // Simulate a TOAST value, even though it can't happen for values like this
284
- // in practice.
285
- n1: undefined
286
- },
287
- afterReplicaId: test_utils.rid('t1')
288
- });
289
-
290
- await batch.commit('1/1');
289
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
290
+ const testTable = await test_utils.resolveTestTable(writer, 'test', ['id'], config);
291
+ await writer.markAllSnapshotDone('1/1');
292
+ await writer.save({
293
+ sourceTable: testTable,
294
+ tag: storage.SaveOperationTag.INSERT,
295
+ after: {
296
+ id: 't1',
297
+ group_id: 'group1',
298
+ n1: 1152921504606846976n // 2^60
299
+ },
300
+ afterReplicaId: test_utils.rid('t1')
301
+ });
302
+
303
+ await writer.save({
304
+ sourceTable: testTable,
305
+ tag: storage.SaveOperationTag.UPDATE,
306
+ after: {
307
+ id: 't1',
308
+ group_id: 'group1',
309
+ // Simulate a TOAST value, even though it can't happen for values like this
310
+ // in practice.
311
+ n1: undefined
312
+ },
313
+ afterReplicaId: test_utils.rid('t1')
291
314
  });
292
315
 
316
+ await writer.commit('1/1');
317
+
293
318
  const TEST_PARAMS = { group_id: 'group1' };
294
319
 
295
320
  const checkpoint = await bucketStorage.getCheckpoint();
@@ -301,34 +326,38 @@ bucket_definitions:
301
326
  });
302
327
 
303
328
  test('save and load parameters with workspaceId', async () => {
304
- const WORKSPACE_TABLE = test_utils.makeTestTable('workspace', ['id']);
305
-
306
329
  await using factory = await generateStorageFactory();
307
330
  const syncRules = await factory.updateSyncRules(
308
- updateSyncRulesFromYaml(`
331
+ updateSyncRulesFromYaml(
332
+ `
309
333
  bucket_definitions:
310
334
  by_workspace:
311
335
  parameters:
312
336
  - SELECT id as workspace_id FROM workspace WHERE
313
337
  workspace."userId" = token_parameters.user_id
314
338
  data: []
315
- `)
339
+ `,
340
+ {
341
+ storageVersion
342
+ }
343
+ )
316
344
  );
317
345
  const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).hydratedSyncRules();
318
346
  const bucketStorage = factory.getInstance(syncRules);
319
347
 
320
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
321
- await batch.save({
322
- sourceTable: WORKSPACE_TABLE,
323
- tag: storage.SaveOperationTag.INSERT,
324
- after: {
325
- id: 'workspace1',
326
- userId: 'u1'
327
- },
328
- afterReplicaId: test_utils.rid('workspace1')
329
- });
330
- await batch.commit('1/1');
348
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
349
+ const workspaceTable = await test_utils.resolveTestTable(writer, 'workspace', ['id'], config);
350
+ await writer.markAllSnapshotDone('1/1');
351
+ await writer.save({
352
+ sourceTable: workspaceTable,
353
+ tag: storage.SaveOperationTag.INSERT,
354
+ after: {
355
+ id: 'workspace1',
356
+ userId: 'u1'
357
+ },
358
+ afterReplicaId: test_utils.rid('workspace1')
331
359
  });
360
+ await writer.commit('1/1');
332
361
  const checkpoint = await bucketStorage.getCheckpoint();
333
362
 
334
363
  const parameters = new RequestParameters(new JwtPayload({ sub: 'u1' }), {});
@@ -337,7 +366,7 @@ bucket_definitions:
337
366
 
338
367
  const buckets = await querier.queryDynamicBucketDescriptions({
339
368
  async getParameterSets(lookups) {
340
- expect(lookups).toEqual([ScopedParameterLookup.direct({ lookupName: 'by_workspace', queryId: '1' }, ['u1'])]);
369
+ expect(lookups).toEqual([ScopedParameterLookup.direct(parameterLookupScope('by_workspace', '1'), ['u1'])]);
341
370
 
342
371
  const parameter_sets = await checkpoint.getParameterSets(lookups);
343
372
  expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
@@ -346,7 +375,7 @@ bucket_definitions:
346
375
  });
347
376
  expect(buckets).toEqual([
348
377
  {
349
- bucket: bucketRequest(syncRules, 'by_workspace["workspace1"]'),
378
+ bucket: bucketRequest(syncRules, 'by_workspace["workspace1"]').bucket,
350
379
  priority: 3,
351
380
  definition: 'by_workspace',
352
381
  inclusion_reasons: ['default']
@@ -355,56 +384,60 @@ bucket_definitions:
355
384
  });
356
385
 
357
386
  test('save and load parameters with dynamic global buckets', async () => {
358
- const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
359
-
360
387
  await using factory = await generateStorageFactory();
361
388
  const syncRules = await factory.updateSyncRules(
362
- updateSyncRulesFromYaml(`
389
+ updateSyncRulesFromYaml(
390
+ `
363
391
  bucket_definitions:
364
392
  by_public_workspace:
365
393
  parameters:
366
394
  - SELECT id as workspace_id FROM workspace WHERE
367
395
  workspace.visibility = 'public'
368
396
  data: []
369
- `)
397
+ `,
398
+ {
399
+ storageVersion
400
+ }
401
+ )
370
402
  );
371
403
  const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).hydratedSyncRules();
372
404
  const bucketStorage = factory.getInstance(syncRules);
373
405
 
374
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
375
- await batch.save({
376
- sourceTable: WORKSPACE_TABLE,
377
- tag: storage.SaveOperationTag.INSERT,
378
- after: {
379
- id: 'workspace1',
380
- visibility: 'public'
381
- },
382
- afterReplicaId: test_utils.rid('workspace1')
383
- });
384
-
385
- await batch.save({
386
- sourceTable: WORKSPACE_TABLE,
387
- tag: storage.SaveOperationTag.INSERT,
388
- after: {
389
- id: 'workspace2',
390
- visibility: 'private'
391
- },
392
- afterReplicaId: test_utils.rid('workspace2')
393
- });
394
-
395
- await batch.save({
396
- sourceTable: WORKSPACE_TABLE,
397
- tag: storage.SaveOperationTag.INSERT,
398
- after: {
399
- id: 'workspace3',
400
- visibility: 'public'
401
- },
402
- afterReplicaId: test_utils.rid('workspace3')
403
- });
404
-
405
- await batch.commit('1/1');
406
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
407
+ const workspaceTable = await test_utils.resolveTestTable(writer, 'workspace', undefined, config);
408
+ await writer.markAllSnapshotDone('1/1');
409
+ await writer.save({
410
+ sourceTable: workspaceTable,
411
+ tag: storage.SaveOperationTag.INSERT,
412
+ after: {
413
+ id: 'workspace1',
414
+ visibility: 'public'
415
+ },
416
+ afterReplicaId: test_utils.rid('workspace1')
417
+ });
418
+
419
+ await writer.save({
420
+ sourceTable: workspaceTable,
421
+ tag: storage.SaveOperationTag.INSERT,
422
+ after: {
423
+ id: 'workspace2',
424
+ visibility: 'private'
425
+ },
426
+ afterReplicaId: test_utils.rid('workspace2')
406
427
  });
407
428
 
429
+ await writer.save({
430
+ sourceTable: workspaceTable,
431
+ tag: storage.SaveOperationTag.INSERT,
432
+ after: {
433
+ id: 'workspace3',
434
+ visibility: 'public'
435
+ },
436
+ afterReplicaId: test_utils.rid('workspace3')
437
+ });
438
+
439
+ await writer.commit('1/1');
440
+
408
441
  const checkpoint = await bucketStorage.getCheckpoint();
409
442
 
410
443
  const parameters = new RequestParameters(new JwtPayload({ sub: 'unknown' }), {});
@@ -413,9 +446,7 @@ bucket_definitions:
413
446
 
414
447
  const buckets = await querier.queryDynamicBucketDescriptions({
415
448
  async getParameterSets(lookups) {
416
- expect(lookups).toEqual([
417
- ScopedParameterLookup.direct({ lookupName: 'by_public_workspace', queryId: '1' }, [])
418
- ]);
449
+ expect(lookups).toEqual([ScopedParameterLookup.direct(parameterLookupScope('by_public_workspace', '1'), [])]);
419
450
 
420
451
  const parameter_sets = await checkpoint.getParameterSets(lookups);
421
452
  parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
@@ -426,13 +457,13 @@ bucket_definitions:
426
457
  buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
427
458
  expect(buckets).toEqual([
428
459
  {
429
- bucket: bucketRequest(syncRules, 'by_public_workspace["workspace1"]'),
460
+ bucket: bucketRequest(syncRules, 'by_public_workspace["workspace1"]').bucket,
430
461
  priority: 3,
431
462
  definition: 'by_public_workspace',
432
463
  inclusion_reasons: ['default']
433
464
  },
434
465
  {
435
- bucket: bucketRequest(syncRules, 'by_public_workspace["workspace3"]'),
466
+ bucket: bucketRequest(syncRules, 'by_public_workspace["workspace3"]').bucket,
436
467
  priority: 3,
437
468
  definition: 'by_public_workspace',
438
469
  inclusion_reasons: ['default']
@@ -441,11 +472,10 @@ bucket_definitions:
441
472
  });
442
473
 
443
474
  test('multiple parameter queries', async () => {
444
- const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
445
-
446
475
  await using factory = await generateStorageFactory();
447
476
  const syncRules = await factory.updateSyncRules(
448
- updateSyncRulesFromYaml(`
477
+ updateSyncRulesFromYaml(
478
+ `
449
479
  bucket_definitions:
450
480
  by_workspace:
451
481
  parameters:
@@ -454,57 +484,62 @@ bucket_definitions:
454
484
  - SELECT id as workspace_id FROM workspace WHERE
455
485
  workspace.user_id = token_parameters.user_id
456
486
  data: []
457
- `)
487
+ `,
488
+ {
489
+ storageVersion
490
+ }
491
+ )
458
492
  );
459
493
  const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).hydratedSyncRules();
460
494
  const bucketStorage = factory.getInstance(syncRules);
461
495
 
462
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
463
- await batch.save({
464
- sourceTable: WORKSPACE_TABLE,
465
- tag: storage.SaveOperationTag.INSERT,
466
- after: {
467
- id: 'workspace1',
468
- visibility: 'public'
469
- },
470
- afterReplicaId: test_utils.rid('workspace1')
471
- });
472
-
473
- await batch.save({
474
- sourceTable: WORKSPACE_TABLE,
475
- tag: storage.SaveOperationTag.INSERT,
476
- after: {
477
- id: 'workspace2',
478
- visibility: 'private'
479
- },
480
- afterReplicaId: test_utils.rid('workspace2')
481
- });
482
-
483
- await batch.save({
484
- sourceTable: WORKSPACE_TABLE,
485
- tag: storage.SaveOperationTag.INSERT,
486
- after: {
487
- id: 'workspace3',
488
- user_id: 'u1',
489
- visibility: 'private'
490
- },
491
- afterReplicaId: test_utils.rid('workspace3')
492
- });
493
-
494
- await batch.save({
495
- sourceTable: WORKSPACE_TABLE,
496
- tag: storage.SaveOperationTag.INSERT,
497
- after: {
498
- id: 'workspace4',
499
- user_id: 'u2',
500
- visibility: 'private'
501
- },
502
- afterReplicaId: test_utils.rid('workspace4')
503
- });
504
-
505
- await batch.commit('1/1');
496
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
497
+ const workspaceTable = await test_utils.resolveTestTable(writer, 'workspace', undefined, config);
498
+ await writer.markAllSnapshotDone('1/1');
499
+ await writer.save({
500
+ sourceTable: workspaceTable,
501
+ tag: storage.SaveOperationTag.INSERT,
502
+ after: {
503
+ id: 'workspace1',
504
+ visibility: 'public'
505
+ },
506
+ afterReplicaId: test_utils.rid('workspace1')
507
+ });
508
+
509
+ await writer.save({
510
+ sourceTable: workspaceTable,
511
+ tag: storage.SaveOperationTag.INSERT,
512
+ after: {
513
+ id: 'workspace2',
514
+ visibility: 'private'
515
+ },
516
+ afterReplicaId: test_utils.rid('workspace2')
506
517
  });
507
518
 
519
+ await writer.save({
520
+ sourceTable: workspaceTable,
521
+ tag: storage.SaveOperationTag.INSERT,
522
+ after: {
523
+ id: 'workspace3',
524
+ user_id: 'u1',
525
+ visibility: 'private'
526
+ },
527
+ afterReplicaId: test_utils.rid('workspace3')
528
+ });
529
+
530
+ await writer.save({
531
+ sourceTable: workspaceTable,
532
+ tag: storage.SaveOperationTag.INSERT,
533
+ after: {
534
+ id: 'workspace4',
535
+ user_id: 'u2',
536
+ visibility: 'private'
537
+ },
538
+ afterReplicaId: test_utils.rid('workspace4')
539
+ });
540
+
541
+ await writer.commit('1/1');
542
+
508
543
  const checkpoint = await bucketStorage.getCheckpoint();
509
544
 
510
545
  const parameters = new RequestParameters(new JwtPayload({ sub: 'u1' }), {});
@@ -526,48 +561,55 @@ bucket_definitions:
526
561
  })
527
562
  ).map((e) => e.bucket);
528
563
  expect(foundLookups).toEqual([
529
- ScopedParameterLookup.direct({ lookupName: 'by_workspace', queryId: '1' }, []),
530
- ScopedParameterLookup.direct({ lookupName: 'by_workspace', queryId: '2' }, ['u1'])
564
+ ScopedParameterLookup.direct(parameterLookupScope('by_workspace', '1'), []),
565
+ ScopedParameterLookup.direct(parameterLookupScope('by_workspace', '2'), ['u1'])
531
566
  ]);
532
567
  parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
533
568
  expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
534
569
 
535
570
  buckets.sort();
536
571
  expect(buckets).toEqual([
537
- bucketRequest(syncRules, 'by_workspace["workspace1"]'),
538
- bucketRequest(syncRules, 'by_workspace["workspace3"]')
572
+ bucketRequest(syncRules, 'by_workspace["workspace1"]').bucket,
573
+ bucketRequest(syncRules, 'by_workspace["workspace3"]').bucket
539
574
  ]);
540
575
  });
541
576
 
542
577
  test('truncate parameters', async () => {
543
578
  await using factory = await generateStorageFactory();
544
579
  const syncRules = await factory.updateSyncRules(
545
- updateSyncRulesFromYaml(`
580
+ updateSyncRulesFromYaml(
581
+ `
546
582
  bucket_definitions:
547
583
  mybucket:
548
584
  parameters:
549
585
  - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
550
586
  data: []
551
- `)
587
+ `,
588
+ {
589
+ storageVersion
590
+ }
591
+ )
552
592
  );
553
593
  const bucketStorage = factory.getInstance(syncRules);
554
594
 
555
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
556
- await batch.save({
557
- sourceTable: TEST_TABLE,
558
- tag: storage.SaveOperationTag.INSERT,
559
- after: {
560
- id: 't2',
561
- id1: 'user3',
562
- id2: 'user4',
563
- group_id: 'group2a'
564
- },
565
- afterReplicaId: test_utils.rid('t2')
566
- });
567
-
568
- await batch.truncate([TEST_TABLE]);
595
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
596
+ const testTable = await test_utils.resolveTestTable(writer, 'test', ['id'], config);
597
+ await writer.markAllSnapshotDone('1/1');
598
+ await writer.save({
599
+ sourceTable: testTable,
600
+ tag: storage.SaveOperationTag.INSERT,
601
+ after: {
602
+ id: 't2',
603
+ id1: 'user3',
604
+ id2: 'user4',
605
+ group_id: 'group2a'
606
+ },
607
+ afterReplicaId: test_utils.rid('t2')
569
608
  });
570
609
 
610
+ await writer.truncate([testTable]);
611
+ await writer.flush();
612
+
571
613
  const checkpoint = await bucketStorage.getCheckpoint();
572
614
 
573
615
  const parameters = await checkpoint.getParameterSets([ScopedParameterLookup.direct(MYBUCKET_1, ['user1'])]);
@@ -577,14 +619,19 @@ bucket_definitions:
577
619
  test('invalidate cached parsed sync rules', async () => {
578
620
  await using bucketStorageFactory = await generateStorageFactory();
579
621
  const syncRules = await bucketStorageFactory.updateSyncRules(
580
- updateSyncRulesFromYaml(`
622
+ updateSyncRulesFromYaml(
623
+ `
581
624
  bucket_definitions:
582
625
  by_workspace:
583
626
  parameters:
584
627
  - SELECT id as workspace_id FROM workspace WHERE
585
628
  workspace."userId" = token_parameters.user_id
586
629
  data: []
587
- `)
630
+ `,
631
+ {
632
+ storageVersion
633
+ }
634
+ )
588
635
  );
589
636
  const syncBucketStorage = bucketStorageFactory.getInstance(syncRules);
590
637
 
@@ -625,29 +672,24 @@ streams:
625
672
  );
626
673
  const bucketStorage = factory.getInstance(syncRules);
627
674
 
628
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
629
- await batch.save({
630
- sourceTable: TEST_TABLE,
631
- tag: storage.SaveOperationTag.INSERT,
632
- after: {
633
- baz: 'baz',
634
- bar: 'bar'
635
- },
636
- afterReplicaId: test_utils.rid('t1')
637
- });
638
-
639
- await batch.commit('1/1');
675
+ await using writer = await bucketStorage.createWriter(test_utils.BATCH_OPTIONS);
676
+ const testTable = await test_utils.resolveTestTable(writer, 'test', ['id'], config);
677
+ await writer.markAllSnapshotDone('1/1');
678
+ await writer.save({
679
+ sourceTable: testTable,
680
+ tag: storage.SaveOperationTag.INSERT,
681
+ after: {
682
+ baz: 'baz',
683
+ bar: 'bar'
684
+ },
685
+ afterReplicaId: test_utils.rid('t1')
640
686
  });
641
687
 
688
+ await writer.commit('1/1');
689
+
642
690
  const checkpoint = await bucketStorage.getCheckpoint();
643
691
  const parameters = await checkpoint.getParameterSets([
644
- ScopedParameterLookup.direct(
645
- {
646
- lookupName: 'lookup',
647
- queryId: '0'
648
- },
649
- ['baz']
650
- )
692
+ ScopedParameterLookup.direct(parameterLookupScope('lookup', '0'), ['baz'])
651
693
  ]);
652
694
  expect(parameters).toEqual([
653
695
  {