@powersync/service-core-tests 0.10.4 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -51,7 +51,7 @@ var __disposeResources = (this && this.__disposeResources) || (function (Suppres
51
51
  return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
52
52
  });
53
53
  import { getUuidReplicaIdentityBson, storage } from '@powersync/service-core';
54
- import { ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
54
+ import { DateTimeValue, ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
55
55
  import { expect, test, describe } from 'vitest';
56
56
  import * as test_utils from '../test-utils/test-utils-index.js';
57
57
  export const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
@@ -79,16 +79,18 @@ export function registerDataStorageTests(generateStorageFactory) {
79
79
  test('save and load parameters', async () => {
80
80
  const env_1 = { stack: [], error: void 0, hasError: false };
81
81
  try {
82
- const sync_rules = test_utils.testRules(`
82
+ const factory = __addDisposableResource(env_1, await generateStorageFactory(), true);
83
+ const syncRules = await factory.updateSyncRules({
84
+ content: `
83
85
  bucket_definitions:
84
86
  mybucket:
85
87
  parameters:
86
88
  - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
87
- data: []
88
- `);
89
- const factory = __addDisposableResource(env_1, await generateStorageFactory(), true);
90
- const bucketStorage = factory.getInstance(sync_rules);
91
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
89
+ data: []
90
+ `
91
+ });
92
+ const bucketStorage = factory.getInstance(syncRules);
93
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
92
94
  await batch.save({
93
95
  sourceTable: TEST_TABLE,
94
96
  tag: storage.SaveOperationTag.INSERT,
@@ -111,10 +113,10 @@ bucket_definitions:
111
113
  },
112
114
  afterReplicaId: test_utils.rid('t1')
113
115
  });
116
+ await batch.commit('1/1');
114
117
  });
115
- const parameters = await bucketStorage.getParameterSets(result.flushed_op, [
116
- ParameterLookup.normalized('mybucket', '1', ['user1'])
117
- ]);
118
+ const checkpoint = await bucketStorage.getCheckpoint();
119
+ const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
118
120
  expect(parameters).toEqual([
119
121
  {
120
122
  group_id: 'group1a'
@@ -134,16 +136,18 @@ bucket_definitions:
134
136
  test('it should use the latest version', async () => {
135
137
  const env_2 = { stack: [], error: void 0, hasError: false };
136
138
  try {
137
- const sync_rules = test_utils.testRules(`
139
+ const factory = __addDisposableResource(env_2, await generateStorageFactory(), true);
140
+ const syncRules = await factory.updateSyncRules({
141
+ content: `
138
142
  bucket_definitions:
139
143
  mybucket:
140
144
  parameters:
141
145
  - SELECT group_id FROM test WHERE id = token_parameters.user_id
142
- data: []
143
- `);
144
- const factory = __addDisposableResource(env_2, await generateStorageFactory(), true);
145
- const bucketStorage = factory.getInstance(sync_rules);
146
- const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
146
+ data: []
147
+ `
148
+ });
149
+ const bucketStorage = factory.getInstance(syncRules);
150
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
147
151
  await batch.save({
148
152
  sourceTable: TEST_TABLE,
149
153
  tag: storage.SaveOperationTag.INSERT,
@@ -153,8 +157,10 @@ bucket_definitions:
153
157
  },
154
158
  afterReplicaId: test_utils.rid('user1')
155
159
  });
160
+ await batch.commit('1/1');
156
161
  });
157
- const result2 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
162
+ const checkpoint1 = await bucketStorage.getCheckpoint();
163
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
158
164
  await batch.save({
159
165
  sourceTable: TEST_TABLE,
160
166
  tag: storage.SaveOperationTag.INSERT,
@@ -164,19 +170,17 @@ bucket_definitions:
164
170
  },
165
171
  afterReplicaId: test_utils.rid('user1')
166
172
  });
173
+ await batch.commit('1/2');
167
174
  });
168
- const parameters = await bucketStorage.getParameterSets(result2.flushed_op, [
169
- ParameterLookup.normalized('mybucket', '1', ['user1'])
170
- ]);
175
+ const checkpoint2 = await bucketStorage.getCheckpoint();
176
+ const parameters = await checkpoint2.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
171
177
  expect(parameters).toEqual([
172
178
  {
173
179
  group_id: 'group2'
174
180
  }
175
181
  ]);
176
182
  // Use the checkpoint to get older data if relevant
177
- const parameters2 = await bucketStorage.getParameterSets(result1.flushed_op, [
178
- ParameterLookup.normalized('mybucket', '1', ['user1'])
179
- ]);
183
+ const parameters2 = await checkpoint1.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
180
184
  expect(parameters2).toEqual([
181
185
  {
182
186
  group_id: 'group1'
@@ -196,17 +200,19 @@ bucket_definitions:
196
200
  test('it should use the latest version after updates', async () => {
197
201
  const env_3 = { stack: [], error: void 0, hasError: false };
198
202
  try {
199
- const sync_rules = test_utils.testRules(`
203
+ const factory = __addDisposableResource(env_3, await generateStorageFactory(), true);
204
+ const syncRules = await factory.updateSyncRules({
205
+ content: `
200
206
  bucket_definitions:
201
207
  mybucket:
202
208
  parameters:
203
209
  - SELECT id AS todo_id
204
210
  FROM todos
205
211
  WHERE list_id IN token_parameters.list_id
206
- data: []
207
- `);
208
- const factory = __addDisposableResource(env_3, await generateStorageFactory(), true);
209
- const bucketStorage = factory.getInstance(sync_rules);
212
+ data: []
213
+ `
214
+ });
215
+ const bucketStorage = factory.getInstance(syncRules);
210
216
  const table = test_utils.makeTestTable('todos', ['id', 'list_id']);
211
217
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
212
218
  // Create two todos which initially belong to different lists
@@ -228,8 +234,9 @@ bucket_definitions:
228
234
  },
229
235
  afterReplicaId: test_utils.rid('todo2')
230
236
  });
237
+ await batch.commit('1/1');
231
238
  });
232
- const result2 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
239
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
233
240
  // Update the second todo item to now belong to list 1
234
241
  await batch.save({
235
242
  sourceTable: table,
@@ -240,11 +247,13 @@ bucket_definitions:
240
247
  },
241
248
  afterReplicaId: test_utils.rid('todo2')
242
249
  });
250
+ await batch.commit('1/1');
243
251
  });
244
252
  // We specifically request the todo_ids for both lists.
245
253
  // There removal operation for the association of `list2`::`todo2` should not interfere with the new
246
254
  // association of `list1`::`todo2`
247
- const parameters = await bucketStorage.getParameterSets(result2.flushed_op, [
255
+ const checkpoint = await bucketStorage.getCheckpoint();
256
+ const parameters = await checkpoint.getParameterSets([
248
257
  ParameterLookup.normalized('mybucket', '1', ['list1']),
249
258
  ParameterLookup.normalized('mybucket', '1', ['list2'])
250
259
  ]);
@@ -270,16 +279,18 @@ bucket_definitions:
270
279
  test('save and load parameters with different number types', async () => {
271
280
  const env_4 = { stack: [], error: void 0, hasError: false };
272
281
  try {
273
- const sync_rules = test_utils.testRules(`
282
+ const factory = __addDisposableResource(env_4, await generateStorageFactory(), true);
283
+ const syncRules = await factory.updateSyncRules({
284
+ content: `
274
285
  bucket_definitions:
275
286
  mybucket:
276
287
  parameters:
277
288
  - SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
278
289
  data: []
279
- `);
280
- const factory = __addDisposableResource(env_4, await generateStorageFactory(), true);
281
- const bucketStorage = factory.getInstance(sync_rules);
282
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
290
+ `
291
+ });
292
+ const bucketStorage = factory.getInstance(syncRules);
293
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
283
294
  await batch.save({
284
295
  sourceTable: TEST_TABLE,
285
296
  tag: storage.SaveOperationTag.INSERT,
@@ -292,18 +303,19 @@ bucket_definitions:
292
303
  },
293
304
  afterReplicaId: test_utils.rid('t1')
294
305
  });
306
+ await batch.commit('1/1');
295
307
  });
296
308
  const TEST_PARAMS = { group_id: 'group1' };
297
- const checkpoint = result.flushed_op;
298
- const parameters1 = await bucketStorage.getParameterSets(checkpoint, [
309
+ const checkpoint = await bucketStorage.getCheckpoint();
310
+ const parameters1 = await checkpoint.getParameterSets([
299
311
  ParameterLookup.normalized('mybucket', '1', [314n, 314, 3.14])
300
312
  ]);
301
313
  expect(parameters1).toEqual([TEST_PARAMS]);
302
- const parameters2 = await bucketStorage.getParameterSets(checkpoint, [
314
+ const parameters2 = await checkpoint.getParameterSets([
303
315
  ParameterLookup.normalized('mybucket', '1', [314, 314n, 3.14])
304
316
  ]);
305
317
  expect(parameters2).toEqual([TEST_PARAMS]);
306
- const parameters3 = await bucketStorage.getParameterSets(checkpoint, [
318
+ const parameters3 = await checkpoint.getParameterSets([
307
319
  ParameterLookup.normalized('mybucket', '1', [314n, 314, 3])
308
320
  ]);
309
321
  expect(parameters3).toEqual([]);
@@ -324,16 +336,18 @@ bucket_definitions:
324
336
  // This ensures serialization / deserialization of "current_data" is done correctly.
325
337
  // This specific case tested here cannot happen with postgres in practice, but we still
326
338
  // test this to ensure correct deserialization.
327
- const sync_rules = test_utils.testRules(`
339
+ const factory = __addDisposableResource(env_5, await generateStorageFactory(), true);
340
+ const syncRules = await factory.updateSyncRules({
341
+ content: `
328
342
  bucket_definitions:
329
343
  mybucket:
330
344
  parameters:
331
345
  - SELECT group_id FROM test WHERE n1 = token_parameters.n1
332
346
  data: []
333
- `);
334
- const factory = __addDisposableResource(env_5, await generateStorageFactory(), true);
335
- const bucketStorage = factory.getInstance(sync_rules);
336
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
347
+ `
348
+ });
349
+ const bucketStorage = factory.getInstance(syncRules);
350
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
337
351
  await batch.save({
338
352
  sourceTable: TEST_TABLE,
339
353
  tag: storage.SaveOperationTag.INSERT,
@@ -356,10 +370,11 @@ bucket_definitions:
356
370
  },
357
371
  afterReplicaId: test_utils.rid('t1')
358
372
  });
373
+ await batch.commit('1/1');
359
374
  });
360
375
  const TEST_PARAMS = { group_id: 'group1' };
361
- const checkpoint = result.flushed_op;
362
- const parameters1 = await bucketStorage.getParameterSets(checkpoint, [
376
+ const checkpoint = await bucketStorage.getCheckpoint();
377
+ const parameters1 = await checkpoint.getParameterSets([
363
378
  ParameterLookup.normalized('mybucket', '1', [1152921504606846976n])
364
379
  ]);
365
380
  expect(parameters1).toEqual([TEST_PARAMS]);
@@ -377,15 +392,17 @@ bucket_definitions:
377
392
  test('removing row', async () => {
378
393
  const env_6 = { stack: [], error: void 0, hasError: false };
379
394
  try {
380
- const sync_rules = test_utils.testRules(`
395
+ const factory = __addDisposableResource(env_6, await generateStorageFactory(), true);
396
+ const syncRules = await factory.updateSyncRules({
397
+ content: `
381
398
  bucket_definitions:
382
399
  global:
383
400
  data:
384
401
  - SELECT id, description FROM "%"
385
- `);
386
- const factory = __addDisposableResource(env_6, await generateStorageFactory(), true);
387
- const bucketStorage = factory.getInstance(sync_rules);
388
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
402
+ `
403
+ });
404
+ const bucketStorage = factory.getInstance(syncRules);
405
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
389
406
  const sourceTable = TEST_TABLE;
390
407
  await batch.save({
391
408
  sourceTable,
@@ -401,8 +418,9 @@ bucket_definitions:
401
418
  tag: storage.SaveOperationTag.DELETE,
402
419
  beforeReplicaId: test_utils.rid('test1')
403
420
  });
421
+ await batch.commit('1/1');
404
422
  });
405
- const checkpoint = result.flushed_op;
423
+ const { checkpoint } = await bucketStorage.getCheckpoint();
406
424
  const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
407
425
  const data = batch[0].chunkData.data.map((d) => {
408
426
  return {
@@ -440,18 +458,20 @@ bucket_definitions:
440
458
  const env_7 = { stack: [], error: void 0, hasError: false };
441
459
  try {
442
460
  const WORKSPACE_TABLE = test_utils.makeTestTable('workspace', ['id']);
443
- const sync_rules_content = test_utils.testRules(`
461
+ const factory = __addDisposableResource(env_7, await generateStorageFactory(), true);
462
+ const syncRules = await factory.updateSyncRules({
463
+ content: `
444
464
  bucket_definitions:
445
465
  by_workspace:
446
466
  parameters:
447
467
  - SELECT id as workspace_id FROM workspace WHERE
448
468
  workspace."userId" = token_parameters.user_id
449
469
  data: []
450
- `);
451
- const sync_rules = sync_rules_content.parsed(test_utils.PARSE_OPTIONS).sync_rules;
452
- const factory = __addDisposableResource(env_7, await generateStorageFactory(), true);
453
- const bucketStorage = factory.getInstance(sync_rules_content);
454
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
470
+ `
471
+ });
472
+ const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
473
+ const bucketStorage = factory.getInstance(syncRules);
474
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
455
475
  await batch.save({
456
476
  sourceTable: WORKSPACE_TABLE,
457
477
  tag: storage.SaveOperationTag.INSERT,
@@ -461,20 +481,25 @@ bucket_definitions:
461
481
  },
462
482
  afterReplicaId: test_utils.rid('workspace1')
463
483
  });
484
+ await batch.commit('1/1');
464
485
  });
465
- const checkpoint = result.flushed_op;
486
+ const checkpoint = await bucketStorage.getCheckpoint();
466
487
  const parameters = new RequestParameters({ sub: 'u1' }, {});
467
- const q1 = sync_rules.bucketDescriptors[0].parameterQueries[0];
488
+ const q1 = sync_rules.bucketSources[0].parameterQueries[0];
468
489
  const lookups = q1.getLookups(parameters);
469
490
  expect(lookups).toEqual([ParameterLookup.normalized('by_workspace', '1', ['u1'])]);
470
- const parameter_sets = await bucketStorage.getParameterSets(checkpoint, lookups);
491
+ const parameter_sets = await checkpoint.getParameterSets(lookups);
471
492
  expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
472
- const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
493
+ const buckets = await sync_rules
494
+ .getBucketParameterQuerier(test_utils.querierOptions(parameters))
495
+ .querier.queryDynamicBucketDescriptions({
473
496
  getParameterSets(lookups) {
474
- return bucketStorage.getParameterSets(checkpoint, lookups);
497
+ return checkpoint.getParameterSets(lookups);
475
498
  }
476
499
  });
477
- expect(buckets).toEqual([{ bucket: 'by_workspace["workspace1"]', priority: 3 }]);
500
+ expect(buckets).toEqual([
501
+ { bucket: 'by_workspace["workspace1"]', priority: 3, definition: 'by_workspace', inclusion_reasons: ['default'] }
502
+ ]);
478
503
  }
479
504
  catch (e_7) {
480
505
  env_7.error = e_7;
@@ -490,18 +515,20 @@ bucket_definitions:
490
515
  const env_8 = { stack: [], error: void 0, hasError: false };
491
516
  try {
492
517
  const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
493
- const sync_rules_content = test_utils.testRules(`
518
+ const factory = __addDisposableResource(env_8, await generateStorageFactory(), true);
519
+ const syncRules = await factory.updateSyncRules({
520
+ content: `
494
521
  bucket_definitions:
495
522
  by_public_workspace:
496
523
  parameters:
497
524
  - SELECT id as workspace_id FROM workspace WHERE
498
525
  workspace.visibility = 'public'
499
526
  data: []
500
- `);
501
- const sync_rules = sync_rules_content.parsed(test_utils.PARSE_OPTIONS).sync_rules;
502
- const factory = __addDisposableResource(env_8, await generateStorageFactory(), true);
503
- const bucketStorage = factory.getInstance(sync_rules_content);
504
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
527
+ `
528
+ });
529
+ const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
530
+ const bucketStorage = factory.getInstance(syncRules);
531
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
505
532
  await batch.save({
506
533
  sourceTable: WORKSPACE_TABLE,
507
534
  tag: storage.SaveOperationTag.INSERT,
@@ -529,24 +556,37 @@ bucket_definitions:
529
556
  },
530
557
  afterReplicaId: test_utils.rid('workspace3')
531
558
  });
559
+ await batch.commit('1/1');
532
560
  });
533
- const checkpoint = result.flushed_op;
561
+ const checkpoint = await bucketStorage.getCheckpoint();
534
562
  const parameters = new RequestParameters({ sub: 'unknown' }, {});
535
- const q1 = sync_rules.bucketDescriptors[0].parameterQueries[0];
563
+ const q1 = sync_rules.bucketSources[0].parameterQueries[0];
536
564
  const lookups = q1.getLookups(parameters);
537
565
  expect(lookups).toEqual([ParameterLookup.normalized('by_public_workspace', '1', [])]);
538
- const parameter_sets = await bucketStorage.getParameterSets(checkpoint, lookups);
566
+ const parameter_sets = await checkpoint.getParameterSets(lookups);
539
567
  parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
540
568
  expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
541
- const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
569
+ const buckets = await sync_rules
570
+ .getBucketParameterQuerier(test_utils.querierOptions(parameters))
571
+ .querier.queryDynamicBucketDescriptions({
542
572
  getParameterSets(lookups) {
543
- return bucketStorage.getParameterSets(checkpoint, lookups);
573
+ return checkpoint.getParameterSets(lookups);
544
574
  }
545
575
  });
546
576
  buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
547
577
  expect(buckets).toEqual([
548
- { bucket: 'by_public_workspace["workspace1"]', priority: 3 },
549
- { bucket: 'by_public_workspace["workspace3"]', priority: 3 }
578
+ {
579
+ bucket: 'by_public_workspace["workspace1"]',
580
+ priority: 3,
581
+ definition: 'by_public_workspace',
582
+ inclusion_reasons: ['default']
583
+ },
584
+ {
585
+ bucket: 'by_public_workspace["workspace3"]',
586
+ priority: 3,
587
+ definition: 'by_public_workspace',
588
+ inclusion_reasons: ['default']
589
+ }
550
590
  ]);
551
591
  }
552
592
  catch (e_8) {
@@ -563,7 +603,9 @@ bucket_definitions:
563
603
  const env_9 = { stack: [], error: void 0, hasError: false };
564
604
  try {
565
605
  const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
566
- const sync_rules_content = test_utils.testRules(`
606
+ const factory = __addDisposableResource(env_9, await generateStorageFactory(), true);
607
+ const syncRules = await factory.updateSyncRules({
608
+ content: `
567
609
  bucket_definitions:
568
610
  by_workspace:
569
611
  parameters:
@@ -572,11 +614,11 @@ bucket_definitions:
572
614
  - SELECT id as workspace_id FROM workspace WHERE
573
615
  workspace.user_id = token_parameters.user_id
574
616
  data: []
575
- `);
576
- const sync_rules = sync_rules_content.parsed(test_utils.PARSE_OPTIONS).sync_rules;
577
- const factory = __addDisposableResource(env_9, await generateStorageFactory(), true);
578
- const bucketStorage = factory.getInstance(sync_rules_content);
579
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
617
+ `
618
+ });
619
+ const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
620
+ const bucketStorage = factory.getInstance(syncRules);
621
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
580
622
  await batch.save({
581
623
  sourceTable: WORKSPACE_TABLE,
582
624
  tag: storage.SaveOperationTag.INSERT,
@@ -615,26 +657,29 @@ bucket_definitions:
615
657
  },
616
658
  afterReplicaId: test_utils.rid('workspace4')
617
659
  });
660
+ await batch.commit('1/1');
618
661
  });
619
- const checkpoint = result.flushed_op;
662
+ const checkpoint = await bucketStorage.getCheckpoint();
620
663
  const parameters = new RequestParameters({ sub: 'u1' }, {});
621
664
  // Test intermediate values - could be moved to sync_rules.test.ts
622
- const q1 = sync_rules.bucketDescriptors[0].parameterQueries[0];
665
+ const q1 = sync_rules.bucketSources[0].parameterQueries[0];
623
666
  const lookups1 = q1.getLookups(parameters);
624
667
  expect(lookups1).toEqual([ParameterLookup.normalized('by_workspace', '1', [])]);
625
- const parameter_sets1 = await bucketStorage.getParameterSets(checkpoint, lookups1);
668
+ const parameter_sets1 = await checkpoint.getParameterSets(lookups1);
626
669
  parameter_sets1.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
627
670
  expect(parameter_sets1).toEqual([{ workspace_id: 'workspace1' }]);
628
- const q2 = sync_rules.bucketDescriptors[0].parameterQueries[1];
671
+ const q2 = sync_rules.bucketSources[0].parameterQueries[1];
629
672
  const lookups2 = q2.getLookups(parameters);
630
673
  expect(lookups2).toEqual([ParameterLookup.normalized('by_workspace', '2', ['u1'])]);
631
- const parameter_sets2 = await bucketStorage.getParameterSets(checkpoint, lookups2);
674
+ const parameter_sets2 = await checkpoint.getParameterSets(lookups2);
632
675
  parameter_sets2.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
633
676
  expect(parameter_sets2).toEqual([{ workspace_id: 'workspace3' }]);
634
677
  // Test final values - the important part
635
- const buckets = (await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
678
+ const buckets = (await sync_rules
679
+ .getBucketParameterQuerier(test_utils.querierOptions(parameters))
680
+ .querier.queryDynamicBucketDescriptions({
636
681
  getParameterSets(lookups) {
637
- return bucketStorage.getParameterSets(checkpoint, lookups);
682
+ return checkpoint.getParameterSets(lookups);
638
683
  }
639
684
  })).map((e) => e.bucket);
640
685
  buckets.sort();
@@ -653,16 +698,18 @@ bucket_definitions:
653
698
  test('changing client ids', async () => {
654
699
  const env_10 = { stack: [], error: void 0, hasError: false };
655
700
  try {
656
- const sync_rules = test_utils.testRules(`
701
+ const factory = __addDisposableResource(env_10, await generateStorageFactory(), true);
702
+ const syncRules = await factory.updateSyncRules({
703
+ content: `
657
704
  bucket_definitions:
658
705
  global:
659
706
  data:
660
707
  - SELECT client_id as id, description FROM "%"
661
- `);
662
- const factory = __addDisposableResource(env_10, await generateStorageFactory(), true);
663
- const bucketStorage = factory.getInstance(sync_rules);
708
+ `
709
+ });
710
+ const bucketStorage = factory.getInstance(syncRules);
664
711
  const sourceTable = TEST_TABLE;
665
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
712
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
666
713
  await batch.save({
667
714
  sourceTable,
668
715
  tag: storage.SaveOperationTag.INSERT,
@@ -693,8 +740,9 @@ bucket_definitions:
693
740
  },
694
741
  afterReplicaId: test_utils.rid('test2')
695
742
  });
743
+ await batch.commit('1/1');
696
744
  });
697
- const checkpoint = result.flushed_op;
745
+ const { checkpoint } = await bucketStorage.getCheckpoint();
698
746
  const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
699
747
  const data = batch[0].chunkData.data.map((d) => {
700
748
  return {
@@ -722,14 +770,16 @@ bucket_definitions:
722
770
  test('re-apply delete', async () => {
723
771
  const env_11 = { stack: [], error: void 0, hasError: false };
724
772
  try {
725
- const sync_rules = test_utils.testRules(`
773
+ const factory = __addDisposableResource(env_11, await generateStorageFactory(), true);
774
+ const syncRules = await factory.updateSyncRules({
775
+ content: `
726
776
  bucket_definitions:
727
777
  global:
728
778
  data:
729
779
  - SELECT id, description FROM "%"
730
- `);
731
- const factory = __addDisposableResource(env_11, await generateStorageFactory(), true);
732
- const bucketStorage = factory.getInstance(sync_rules);
780
+ `
781
+ });
782
+ const bucketStorage = factory.getInstance(syncRules);
733
783
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
734
784
  const sourceTable = TEST_TABLE;
735
785
  await batch.save({
@@ -749,8 +799,9 @@ bucket_definitions:
749
799
  tag: storage.SaveOperationTag.DELETE,
750
800
  beforeReplicaId: test_utils.rid('test1')
751
801
  });
802
+ await batch.commit('1/1');
752
803
  });
753
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
804
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
754
805
  const sourceTable = TEST_TABLE;
755
806
  await batch.save({
756
807
  sourceTable,
@@ -758,7 +809,7 @@ bucket_definitions:
758
809
  beforeReplicaId: test_utils.rid('test1')
759
810
  });
760
811
  });
761
- const checkpoint = result.flushed_op;
812
+ const { checkpoint } = await bucketStorage.getCheckpoint();
762
813
  const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
763
814
  const data = batch[0].chunkData.data.map((d) => {
764
815
  return {
@@ -795,14 +846,16 @@ bucket_definitions:
795
846
  test('re-apply update + delete', async () => {
796
847
  const env_12 = { stack: [], error: void 0, hasError: false };
797
848
  try {
798
- const sync_rules = test_utils.testRules(`
849
+ const factory = __addDisposableResource(env_12, await generateStorageFactory(), true);
850
+ const syncRules = await factory.updateSyncRules({
851
+ content: `
799
852
  bucket_definitions:
800
853
  global:
801
854
  data:
802
855
  - SELECT id, description FROM "%"
803
- `);
804
- const factory = __addDisposableResource(env_12, await generateStorageFactory(), true);
805
- const bucketStorage = factory.getInstance(sync_rules);
856
+ `
857
+ });
858
+ const bucketStorage = factory.getInstance(syncRules);
806
859
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
807
860
  const sourceTable = TEST_TABLE;
808
861
  await batch.save({
@@ -840,8 +893,9 @@ bucket_definitions:
840
893
  tag: storage.SaveOperationTag.DELETE,
841
894
  beforeReplicaId: test_utils.rid('test1')
842
895
  });
896
+ await batch.commit('1/1');
843
897
  });
844
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
898
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
845
899
  const sourceTable = TEST_TABLE;
846
900
  await batch.save({
847
901
  sourceTable,
@@ -866,8 +920,9 @@ bucket_definitions:
866
920
  tag: storage.SaveOperationTag.DELETE,
867
921
  beforeReplicaId: test_utils.rid('test1')
868
922
  });
923
+ await batch.commit('2/1');
869
924
  });
870
- const checkpoint = result.flushed_op;
925
+ const { checkpoint } = await bucketStorage.getCheckpoint();
871
926
  const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
872
927
  const data = batch[0].chunkData.data.map((d) => {
873
928
  return {
@@ -906,15 +961,17 @@ bucket_definitions:
906
961
  test('truncate parameters', async () => {
907
962
  const env_13 = { stack: [], error: void 0, hasError: false };
908
963
  try {
909
- const sync_rules = test_utils.testRules(`
964
+ const factory = __addDisposableResource(env_13, await generateStorageFactory(), true);
965
+ const syncRules = await factory.updateSyncRules({
966
+ content: `
910
967
  bucket_definitions:
911
968
  mybucket:
912
969
  parameters:
913
970
  - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
914
971
  data: []
915
- `);
916
- const factory = __addDisposableResource(env_13, await generateStorageFactory(), true);
917
- const bucketStorage = factory.getInstance(sync_rules);
972
+ `
973
+ });
974
+ const bucketStorage = factory.getInstance(syncRules);
918
975
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
919
976
  await batch.save({
920
977
  sourceTable: TEST_TABLE,
@@ -929,10 +986,8 @@ bucket_definitions:
929
986
  });
930
987
  await batch.truncate([TEST_TABLE]);
931
988
  });
932
- const { checkpoint } = await bucketStorage.getCheckpoint();
933
- const parameters = await bucketStorage.getParameterSets(checkpoint, [
934
- ParameterLookup.normalized('mybucket', '1', ['user1'])
935
- ]);
989
+ const checkpoint = await bucketStorage.getCheckpoint();
990
+ const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
936
991
  expect(parameters).toEqual([]);
937
992
  }
938
993
  catch (e_13) {
@@ -955,14 +1010,16 @@ bucket_definitions:
955
1010
  // It can break at two places:
956
1011
  // 1. Not getting the correct "current_data" state for each operation.
957
1012
  // 2. Output order not being correct.
958
- const sync_rules = test_utils.testRules(`
1013
+ const factory = __addDisposableResource(env_14, await generateStorageFactory(), true);
1014
+ const syncRules = await factory.updateSyncRules({
1015
+ content: `
959
1016
  bucket_definitions:
960
1017
  global:
961
1018
  data:
962
1019
  - SELECT id, description FROM "test"
963
- `);
964
- const factory = __addDisposableResource(env_14, await generateStorageFactory(), true);
965
- const bucketStorage = factory.getInstance(sync_rules);
1020
+ `
1021
+ });
1022
+ const bucketStorage = factory.getInstance(syncRules);
966
1023
  // Pre-setup
967
1024
  const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
968
1025
  const sourceTable = TEST_TABLE;
@@ -1097,12 +1154,6 @@ bucket_definitions:
1097
1154
  test('changed data with replica identity full', async () => {
1098
1155
  const env_15 = { stack: [], error: void 0, hasError: false };
1099
1156
  try {
1100
- const sync_rules = test_utils.testRules(`
1101
- bucket_definitions:
1102
- global:
1103
- data:
1104
- - SELECT id, description FROM "test"
1105
- `);
1106
1157
  function rid2(id, description) {
1107
1158
  return getUuidReplicaIdentityBson({ id, description }, [
1108
1159
  { name: 'id', type: 'VARCHAR', typeId: 25 },
@@ -1110,7 +1161,15 @@ bucket_definitions:
1110
1161
  ]);
1111
1162
  }
1112
1163
  const factory = __addDisposableResource(env_15, await generateStorageFactory(), true);
1113
- const bucketStorage = factory.getInstance(sync_rules);
1164
+ const syncRules = await factory.updateSyncRules({
1165
+ content: `
1166
+ bucket_definitions:
1167
+ global:
1168
+ data:
1169
+ - SELECT id, description FROM "test"
1170
+ `
1171
+ });
1172
+ const bucketStorage = factory.getInstance(syncRules);
1114
1173
  const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
1115
1174
  // Pre-setup
1116
1175
  const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
@@ -1203,12 +1262,6 @@ bucket_definitions:
1203
1262
  test('unchanged data with replica identity full', async () => {
1204
1263
  const env_16 = { stack: [], error: void 0, hasError: false };
1205
1264
  try {
1206
- const sync_rules = test_utils.testRules(`
1207
- bucket_definitions:
1208
- global:
1209
- data:
1210
- - SELECT id, description FROM "test"
1211
- `);
1212
1265
  function rid2(id, description) {
1213
1266
  return getUuidReplicaIdentityBson({ id, description }, [
1214
1267
  { name: 'id', type: 'VARCHAR', typeId: 25 },
@@ -1216,7 +1269,15 @@ bucket_definitions:
1216
1269
  ]);
1217
1270
  }
1218
1271
  const factory = __addDisposableResource(env_16, await generateStorageFactory(), true);
1219
- const bucketStorage = factory.getInstance(sync_rules);
1272
+ const syncRules = await factory.updateSyncRules({
1273
+ content: `
1274
+ bucket_definitions:
1275
+ global:
1276
+ data:
1277
+ - SELECT id, description FROM "test"
1278
+ `
1279
+ });
1280
+ const bucketStorage = factory.getInstance(syncRules);
1220
1281
  const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
1221
1282
  // Pre-setup
1222
1283
  const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
@@ -1306,15 +1367,17 @@ bucket_definitions:
1306
1367
  // but large enough in size to be split over multiple returned batches.
1307
1368
  // The specific batch splits is an implementation detail of the storage driver,
1308
1369
  // and the test will have to updated when other implementations are added.
1309
- const sync_rules = test_utils.testRules(`
1370
+ const factory = __addDisposableResource(env_17, await generateStorageFactory(), true);
1371
+ const syncRules = await factory.updateSyncRules({
1372
+ content: `
1310
1373
  bucket_definitions:
1311
1374
  global:
1312
1375
  data:
1313
1376
  - SELECT id, description FROM "%"
1314
- `);
1315
- const factory = __addDisposableResource(env_17, await generateStorageFactory(), true);
1316
- const bucketStorage = factory.getInstance(sync_rules);
1317
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1377
+ `
1378
+ });
1379
+ const bucketStorage = factory.getInstance(syncRules);
1380
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1318
1381
  const sourceTable = TEST_TABLE;
1319
1382
  const largeDescription = '0123456789'.repeat(12_000_00);
1320
1383
  await batch.save({
@@ -1354,8 +1417,9 @@ bucket_definitions:
1354
1417
  },
1355
1418
  afterReplicaId: test_utils.rid('test3')
1356
1419
  });
1420
+ await batch.commit('1/1');
1357
1421
  });
1358
- const checkpoint = result.flushed_op;
1422
+ const { checkpoint } = await bucketStorage.getCheckpoint();
1359
1423
  const options = {
1360
1424
  chunkLimitBytes: 16 * 1024 * 1024
1361
1425
  };
@@ -1397,15 +1461,17 @@ bucket_definitions:
1397
1461
  const env_18 = { stack: [], error: void 0, hasError: false };
1398
1462
  try {
1399
1463
  // Test syncing a batch of data that is limited by count.
1400
- const sync_rules = test_utils.testRules(`
1464
+ const factory = __addDisposableResource(env_18, await generateStorageFactory(), true);
1465
+ const syncRules = await factory.updateSyncRules({
1466
+ content: `
1401
1467
  bucket_definitions:
1402
1468
  global:
1403
1469
  data:
1404
1470
  - SELECT id, description FROM "%"
1405
- `);
1406
- const factory = __addDisposableResource(env_18, await generateStorageFactory(), true);
1407
- const bucketStorage = factory.getInstance(sync_rules);
1408
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1471
+ `
1472
+ });
1473
+ const bucketStorage = factory.getInstance(syncRules);
1474
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1409
1475
  const sourceTable = TEST_TABLE;
1410
1476
  for (let i = 1; i <= 6; i++) {
1411
1477
  await batch.save({
@@ -1418,8 +1484,9 @@ bucket_definitions:
1418
1484
  afterReplicaId: `test${i}`
1419
1485
  });
1420
1486
  }
1487
+ await batch.commit('1/1');
1421
1488
  });
1422
- const checkpoint = result.flushed_op;
1489
+ const { checkpoint } = await bucketStorage.getCheckpoint();
1423
1490
  const batch1 = await test_utils.oneFromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]]), { limit: 4 }));
1424
1491
  expect(test_utils.getBatchData(batch1)).toEqual([
1425
1492
  { op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 },
@@ -1464,7 +1531,9 @@ bucket_definitions:
1464
1531
  const setup = async (options) => {
1465
1532
  const env_19 = { stack: [], error: void 0, hasError: false };
1466
1533
  try {
1467
- const sync_rules = test_utils.testRules(`
1534
+ const factory = __addDisposableResource(env_19, await generateStorageFactory(), true);
1535
+ const syncRules = await factory.updateSyncRules({
1536
+ content: `
1468
1537
  bucket_definitions:
1469
1538
  global1:
1470
1539
  data:
@@ -1472,10 +1541,10 @@ bucket_definitions:
1472
1541
  global2:
1473
1542
  data:
1474
1543
  - SELECT id, description FROM test WHERE bucket = 'global2'
1475
- `);
1476
- const factory = __addDisposableResource(env_19, await generateStorageFactory(), true);
1477
- const bucketStorage = factory.getInstance(sync_rules);
1478
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1544
+ `
1545
+ });
1546
+ const bucketStorage = factory.getInstance(syncRules);
1547
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1479
1548
  const sourceTable = TEST_TABLE;
1480
1549
  for (let i = 1; i <= 10; i++) {
1481
1550
  await batch.save({
@@ -1489,8 +1558,9 @@ bucket_definitions:
1489
1558
  afterReplicaId: `test${i}`
1490
1559
  });
1491
1560
  }
1561
+ await batch.commit('1/1');
1492
1562
  });
1493
- const checkpoint = result.flushed_op;
1563
+ const { checkpoint } = await bucketStorage.getCheckpoint();
1494
1564
  return await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([
1495
1565
  ['global1[]', 0n],
1496
1566
  ['global2[]', 0n]
@@ -1606,7 +1676,9 @@ bucket_definitions:
1606
1676
  });
1607
1677
  const r = await f.configureSyncRules({ content: 'bucket_definitions: {}', validate: false });
1608
1678
  const storage = f.getInstance(r.persisted_sync_rules);
1609
- await storage.autoActivate();
1679
+ await storage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1680
+ await batch.keepalive('1/0');
1681
+ });
1610
1682
  const metrics2 = await f.getStorageMetrics();
1611
1683
  expect(metrics2).toMatchSnapshot();
1612
1684
  }
@@ -1623,16 +1695,18 @@ bucket_definitions:
1623
1695
  test('invalidate cached parsed sync rules', async () => {
1624
1696
  const env_21 = { stack: [], error: void 0, hasError: false };
1625
1697
  try {
1626
- const sync_rules_content = test_utils.testRules(`
1698
+ const bucketStorageFactory = __addDisposableResource(env_21, await generateStorageFactory(), true);
1699
+ const syncRules = await bucketStorageFactory.updateSyncRules({
1700
+ content: `
1627
1701
  bucket_definitions:
1628
1702
  by_workspace:
1629
1703
  parameters:
1630
1704
  - SELECT id as workspace_id FROM workspace WHERE
1631
1705
  workspace."userId" = token_parameters.user_id
1632
1706
  data: []
1633
- `);
1634
- const bucketStorageFactory = __addDisposableResource(env_21, await generateStorageFactory(), true);
1635
- const syncBucketStorage = bucketStorageFactory.getInstance(sync_rules_content);
1707
+ `
1708
+ });
1709
+ const syncBucketStorage = bucketStorageFactory.getInstance(syncRules);
1636
1710
  const parsedSchema1 = syncBucketStorage.getParsedSyncRules({
1637
1711
  defaultSchema: 'public'
1638
1712
  });
@@ -1667,12 +1741,11 @@ bucket_definitions:
1667
1741
  content: `
1668
1742
  bucket_definitions:
1669
1743
  mybucket:
1670
- data: []
1744
+ data: []
1671
1745
  `,
1672
1746
  validate: false
1673
1747
  });
1674
1748
  const bucketStorage = factory.getInstance(r.persisted_sync_rules);
1675
- await bucketStorage.autoActivate();
1676
1749
  const abortController = new AbortController();
1677
1750
  context.onTestFinished(() => abortController.abort());
1678
1751
  const iter = bucketStorage
@@ -1719,7 +1792,6 @@ bucket_definitions:
1719
1792
  validate: false
1720
1793
  });
1721
1794
  const bucketStorage = factory.getInstance(r.persisted_sync_rules);
1722
- await bucketStorage.autoActivate();
1723
1795
  const abortController = new AbortController();
1724
1796
  context.onTestFinished(() => abortController.abort());
1725
1797
  const iter = bucketStorage
@@ -1787,7 +1859,6 @@ bucket_definitions:
1787
1859
  validate: false
1788
1860
  });
1789
1861
  const bucketStorage = factory.getInstance(r.persisted_sync_rules);
1790
- await bucketStorage.autoActivate();
1791
1862
  bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
1792
1863
  const abortController = new AbortController();
1793
1864
  context.onTestFinished(() => abortController.abort());
@@ -1835,7 +1906,6 @@ bucket_definitions:
1835
1906
  validate: false
1836
1907
  });
1837
1908
  const bucketStorage = factory.getInstance(r.persisted_sync_rules);
1838
- await bucketStorage.autoActivate();
1839
1909
  bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
1840
1910
  const abortController = new AbortController();
1841
1911
  context.onTestFinished(() => abortController.abort());
@@ -1885,7 +1955,6 @@ bucket_definitions:
1885
1955
  validate: false
1886
1956
  });
1887
1957
  const bucketStorage = factory.getInstance(r.persisted_sync_rules);
1888
- await bucketStorage.autoActivate();
1889
1958
  bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
1890
1959
  const abortController = new AbortController();
1891
1960
  context.onTestFinished(() => abortController.abort());
@@ -1959,15 +2028,17 @@ bucket_definitions:
1959
2028
  // Test syncing a batch of data that is small in count,
1960
2029
  // but large enough in size to be split over multiple returned chunks.
1961
2030
  // Similar to the above test, but splits over 1MB chunks.
1962
- const sync_rules = test_utils.testRules(`
2031
+ const factory = __addDisposableResource(env_27, await generateStorageFactory(), true);
2032
+ const syncRules = await factory.updateSyncRules({
2033
+ content: `
1963
2034
  bucket_definitions:
1964
2035
  global:
1965
2036
  data:
1966
2037
  - SELECT id FROM test
1967
2038
  - SELECT id FROM test_ignore WHERE false
1968
- `);
1969
- const factory = __addDisposableResource(env_27, await generateStorageFactory(), true);
1970
- const bucketStorage = factory.getInstance(sync_rules);
2039
+ `
2040
+ });
2041
+ const bucketStorage = factory.getInstance(syncRules);
1971
2042
  const sourceTable = test_utils.makeTestTable('test', ['id']);
1972
2043
  const sourceTableIgnore = test_utils.makeTestTable('test_ignore', ['id']);
1973
2044
  const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
@@ -2007,5 +2078,75 @@ bucket_definitions:
2007
2078
  await result_27;
2008
2079
  }
2009
2080
  });
2081
+ test('data with custom types', async () => {
2082
+ const env_28 = { stack: [], error: void 0, hasError: false };
2083
+ try {
2084
+ const factory = __addDisposableResource(env_28, await generateStorageFactory(), true);
2085
+ const testValue = {
2086
+ sourceTable: TEST_TABLE,
2087
+ tag: storage.SaveOperationTag.INSERT,
2088
+ after: {
2089
+ id: 't1',
2090
+ description: new DateTimeValue('2025-08-28T11:30:00')
2091
+ },
2092
+ afterReplicaId: test_utils.rid('t1')
2093
+ };
2094
+ {
2095
+ // First, deploy old sync rules and row with date time value
2096
+ const syncRules = await factory.updateSyncRules({
2097
+ content: `
2098
+ bucket_definitions:
2099
+ global:
2100
+ data:
2101
+ - SELECT id, description FROM test
2102
+ `
2103
+ });
2104
+ const bucketStorage = factory.getInstance(syncRules);
2105
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
2106
+ await batch.save(testValue);
2107
+ await batch.commit('1/1');
2108
+ });
2109
+ const { checkpoint } = await bucketStorage.getCheckpoint();
2110
+ const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
2111
+ expect(batch[0].chunkData.data).toMatchObject([
2112
+ {
2113
+ data: '{"id":"t1","description":"2025-08-28 11:30:00"}'
2114
+ }
2115
+ ]);
2116
+ }
2117
+ const syncRules = await factory.updateSyncRules({
2118
+ content: `
2119
+ bucket_definitions:
2120
+ global:
2121
+ data:
2122
+ - SELECT id, description FROM test
2123
+
2124
+ config:
2125
+ edition: 2
2126
+ `
2127
+ });
2128
+ const bucketStorage = factory.getInstance(syncRules);
2129
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
2130
+ await batch.save(testValue);
2131
+ await batch.commit('1/2');
2132
+ });
2133
+ const { checkpoint } = await bucketStorage.getCheckpoint();
2134
+ const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['2#global[]', 0n]])));
2135
+ expect(batch[0].chunkData.data).toMatchObject([
2136
+ {
2137
+ data: '{"id":"t1","description":"2025-08-28T11:30:00"}'
2138
+ }
2139
+ ]);
2140
+ }
2141
+ catch (e_28) {
2142
+ env_28.error = e_28;
2143
+ env_28.hasError = true;
2144
+ }
2145
+ finally {
2146
+ const result_28 = __disposeResources(env_28);
2147
+ if (result_28)
2148
+ await result_28;
2149
+ }
2150
+ });
2010
2151
  }
2011
2152
  //# sourceMappingURL=register-data-storage-tests.js.map