@powersync/service-core-tests 0.10.4 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,11 +3,13 @@ import {
3
3
  getUuidReplicaIdentityBson,
4
4
  InternalOpId,
5
5
  OplogEntry,
6
+ SaveOptions,
6
7
  storage
7
8
  } from '@powersync/service-core';
8
- import { ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
9
+ import { DateTimeValue, ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
9
10
  import { expect, test, describe, beforeEach } from 'vitest';
10
11
  import * as test_utils from '../test-utils/test-utils-index.js';
12
+ import { SqlBucketDescriptor } from '@powersync/service-sync-rules/src/SqlBucketDescriptor.js';
11
13
 
12
14
  export const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
13
15
 
@@ -34,18 +36,19 @@ const normalizeOplogData = (data: OplogEntry['data']) => {
34
36
  */
35
37
  export function registerDataStorageTests(generateStorageFactory: storage.TestStorageFactory) {
36
38
  test('save and load parameters', async () => {
37
- const sync_rules = test_utils.testRules(`
39
+ await using factory = await generateStorageFactory();
40
+ const syncRules = await factory.updateSyncRules({
41
+ content: `
38
42
  bucket_definitions:
39
43
  mybucket:
40
44
  parameters:
41
45
  - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
42
- data: []
43
- `);
44
-
45
- await using factory = await generateStorageFactory();
46
- const bucketStorage = factory.getInstance(sync_rules);
46
+ data: []
47
+ `
48
+ });
49
+ const bucketStorage = factory.getInstance(syncRules);
47
50
 
48
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
51
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
49
52
  await batch.save({
50
53
  sourceTable: TEST_TABLE,
51
54
  tag: storage.SaveOperationTag.INSERT,
@@ -69,11 +72,12 @@ bucket_definitions:
69
72
  },
70
73
  afterReplicaId: test_utils.rid('t1')
71
74
  });
75
+
76
+ await batch.commit('1/1');
72
77
  });
73
78
 
74
- const parameters = await bucketStorage.getParameterSets(result!.flushed_op, [
75
- ParameterLookup.normalized('mybucket', '1', ['user1'])
76
- ]);
79
+ const checkpoint = await bucketStorage.getCheckpoint();
80
+ const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
77
81
  expect(parameters).toEqual([
78
82
  {
79
83
  group_id: 'group1a'
@@ -82,20 +86,19 @@ bucket_definitions:
82
86
  });
83
87
 
84
88
  test('it should use the latest version', async () => {
85
- const sync_rules = test_utils.testRules(
86
- `
89
+ await using factory = await generateStorageFactory();
90
+ const syncRules = await factory.updateSyncRules({
91
+ content: `
87
92
  bucket_definitions:
88
93
  mybucket:
89
94
  parameters:
90
95
  - SELECT group_id FROM test WHERE id = token_parameters.user_id
91
- data: []
96
+ data: []
92
97
  `
93
- );
94
-
95
- await using factory = await generateStorageFactory();
96
- const bucketStorage = factory.getInstance(sync_rules);
98
+ });
99
+ const bucketStorage = factory.getInstance(syncRules);
97
100
 
98
- const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
101
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
99
102
  await batch.save({
100
103
  sourceTable: TEST_TABLE,
101
104
  tag: storage.SaveOperationTag.INSERT,
@@ -105,8 +108,10 @@ bucket_definitions:
105
108
  },
106
109
  afterReplicaId: test_utils.rid('user1')
107
110
  });
111
+ await batch.commit('1/1');
108
112
  });
109
- const result2 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
113
+ const checkpoint1 = await bucketStorage.getCheckpoint();
114
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
110
115
  await batch.save({
111
116
  sourceTable: TEST_TABLE,
112
117
  tag: storage.SaveOperationTag.INSERT,
@@ -116,11 +121,11 @@ bucket_definitions:
116
121
  },
117
122
  afterReplicaId: test_utils.rid('user1')
118
123
  });
124
+ await batch.commit('1/2');
119
125
  });
126
+ const checkpoint2 = await bucketStorage.getCheckpoint();
120
127
 
121
- const parameters = await bucketStorage.getParameterSets(result2!.flushed_op, [
122
- ParameterLookup.normalized('mybucket', '1', ['user1'])
123
- ]);
128
+ const parameters = await checkpoint2.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
124
129
  expect(parameters).toEqual([
125
130
  {
126
131
  group_id: 'group2'
@@ -128,9 +133,7 @@ bucket_definitions:
128
133
  ]);
129
134
 
130
135
  // Use the checkpoint to get older data if relevant
131
- const parameters2 = await bucketStorage.getParameterSets(result1!.flushed_op, [
132
- ParameterLookup.normalized('mybucket', '1', ['user1'])
133
- ]);
136
+ const parameters2 = await checkpoint1.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
134
137
  expect(parameters2).toEqual([
135
138
  {
136
139
  group_id: 'group1'
@@ -139,20 +142,19 @@ bucket_definitions:
139
142
  });
140
143
 
141
144
  test('it should use the latest version after updates', async () => {
142
- const sync_rules = test_utils.testRules(
143
- `
145
+ await using factory = await generateStorageFactory();
146
+ const syncRules = await factory.updateSyncRules({
147
+ content: `
144
148
  bucket_definitions:
145
149
  mybucket:
146
150
  parameters:
147
151
  - SELECT id AS todo_id
148
152
  FROM todos
149
153
  WHERE list_id IN token_parameters.list_id
150
- data: []
154
+ data: []
151
155
  `
152
- );
153
-
154
- await using factory = await generateStorageFactory();
155
- const bucketStorage = factory.getInstance(sync_rules);
156
+ });
157
+ const bucketStorage = factory.getInstance(syncRules);
156
158
 
157
159
  const table = test_utils.makeTestTable('todos', ['id', 'list_id']);
158
160
 
@@ -176,9 +178,11 @@ bucket_definitions:
176
178
  },
177
179
  afterReplicaId: test_utils.rid('todo2')
178
180
  });
181
+
182
+ await batch.commit('1/1');
179
183
  });
180
184
 
181
- const result2 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
185
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
182
186
  // Update the second todo item to now belong to list 1
183
187
  await batch.save({
184
188
  sourceTable: table,
@@ -189,12 +193,15 @@ bucket_definitions:
189
193
  },
190
194
  afterReplicaId: test_utils.rid('todo2')
191
195
  });
196
+
197
+ await batch.commit('1/1');
192
198
  });
193
199
 
194
200
  // We specifically request the todo_ids for both lists.
195
201
  // There removal operation for the association of `list2`::`todo2` should not interfere with the new
196
202
  // association of `list1`::`todo2`
197
- const parameters = await bucketStorage.getParameterSets(result2!.flushed_op, [
203
+ const checkpoint = await bucketStorage.getCheckpoint();
204
+ const parameters = await checkpoint.getParameterSets([
198
205
  ParameterLookup.normalized('mybucket', '1', ['list1']),
199
206
  ParameterLookup.normalized('mybucket', '1', ['list2'])
200
207
  ]);
@@ -210,20 +217,19 @@ bucket_definitions:
210
217
  });
211
218
 
212
219
  test('save and load parameters with different number types', async () => {
213
- const sync_rules = test_utils.testRules(
214
- `
220
+ await using factory = await generateStorageFactory();
221
+ const syncRules = await factory.updateSyncRules({
222
+ content: `
215
223
  bucket_definitions:
216
224
  mybucket:
217
225
  parameters:
218
226
  - SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
219
227
  data: []
220
228
  `
221
- );
222
-
223
- await using factory = await generateStorageFactory();
224
- const bucketStorage = factory.getInstance(sync_rules);
229
+ });
230
+ const bucketStorage = factory.getInstance(syncRules);
225
231
 
226
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
232
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
227
233
  await batch.save({
228
234
  sourceTable: TEST_TABLE,
229
235
  tag: storage.SaveOperationTag.INSERT,
@@ -236,21 +242,23 @@ bucket_definitions:
236
242
  },
237
243
  afterReplicaId: test_utils.rid('t1')
238
244
  });
245
+
246
+ await batch.commit('1/1');
239
247
  });
240
248
 
241
249
  const TEST_PARAMS = { group_id: 'group1' };
242
250
 
243
- const checkpoint = result!.flushed_op;
251
+ const checkpoint = await bucketStorage.getCheckpoint();
244
252
 
245
- const parameters1 = await bucketStorage.getParameterSets(checkpoint, [
253
+ const parameters1 = await checkpoint.getParameterSets([
246
254
  ParameterLookup.normalized('mybucket', '1', [314n, 314, 3.14])
247
255
  ]);
248
256
  expect(parameters1).toEqual([TEST_PARAMS]);
249
- const parameters2 = await bucketStorage.getParameterSets(checkpoint, [
257
+ const parameters2 = await checkpoint.getParameterSets([
250
258
  ParameterLookup.normalized('mybucket', '1', [314, 314n, 3.14])
251
259
  ]);
252
260
  expect(parameters2).toEqual([TEST_PARAMS]);
253
- const parameters3 = await bucketStorage.getParameterSets(checkpoint, [
261
+ const parameters3 = await checkpoint.getParameterSets([
254
262
  ParameterLookup.normalized('mybucket', '1', [314n, 314, 3])
255
263
  ]);
256
264
  expect(parameters3).toEqual([]);
@@ -261,20 +269,19 @@ bucket_definitions:
261
269
  // This specific case tested here cannot happen with postgres in practice, but we still
262
270
  // test this to ensure correct deserialization.
263
271
 
264
- const sync_rules = test_utils.testRules(
265
- `
272
+ await using factory = await generateStorageFactory();
273
+ const syncRules = await factory.updateSyncRules({
274
+ content: `
266
275
  bucket_definitions:
267
276
  mybucket:
268
277
  parameters:
269
278
  - SELECT group_id FROM test WHERE n1 = token_parameters.n1
270
279
  data: []
271
280
  `
272
- );
273
-
274
- await using factory = await generateStorageFactory();
275
- const bucketStorage = factory.getInstance(sync_rules);
281
+ });
282
+ const bucketStorage = factory.getInstance(syncRules);
276
283
 
277
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
284
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
278
285
  await batch.save({
279
286
  sourceTable: TEST_TABLE,
280
287
  tag: storage.SaveOperationTag.INSERT,
@@ -298,31 +305,33 @@ bucket_definitions:
298
305
  },
299
306
  afterReplicaId: test_utils.rid('t1')
300
307
  });
308
+
309
+ await batch.commit('1/1');
301
310
  });
302
311
 
303
312
  const TEST_PARAMS = { group_id: 'group1' };
304
313
 
305
- const checkpoint = result!.flushed_op;
314
+ const checkpoint = await bucketStorage.getCheckpoint();
306
315
 
307
- const parameters1 = await bucketStorage.getParameterSets(checkpoint, [
316
+ const parameters1 = await checkpoint.getParameterSets([
308
317
  ParameterLookup.normalized('mybucket', '1', [1152921504606846976n])
309
318
  ]);
310
319
  expect(parameters1).toEqual([TEST_PARAMS]);
311
320
  });
312
321
 
313
322
  test('removing row', async () => {
314
- const sync_rules = test_utils.testRules(
315
- `
323
+ await using factory = await generateStorageFactory();
324
+ const syncRules = await factory.updateSyncRules({
325
+ content: `
316
326
  bucket_definitions:
317
327
  global:
318
328
  data:
319
329
  - SELECT id, description FROM "%"
320
330
  `
321
- );
322
- await using factory = await generateStorageFactory();
323
- const bucketStorage = factory.getInstance(sync_rules);
331
+ });
332
+ const bucketStorage = factory.getInstance(syncRules);
324
333
 
325
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
334
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
326
335
  const sourceTable = TEST_TABLE;
327
336
 
328
337
  await batch.save({
@@ -339,9 +348,10 @@ bucket_definitions:
339
348
  tag: storage.SaveOperationTag.DELETE,
340
349
  beforeReplicaId: test_utils.rid('test1')
341
350
  });
351
+ await batch.commit('1/1');
342
352
  });
343
353
 
344
- const checkpoint = result!.flushed_op;
354
+ const { checkpoint } = await bucketStorage.getCheckpoint();
345
355
 
346
356
  const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
347
357
  const data = batch[0].chunkData.data.map((d) => {
@@ -373,8 +383,9 @@ bucket_definitions:
373
383
  test('save and load parameters with workspaceId', async () => {
374
384
  const WORKSPACE_TABLE = test_utils.makeTestTable('workspace', ['id']);
375
385
 
376
- const sync_rules_content = test_utils.testRules(
377
- `
386
+ await using factory = await generateStorageFactory();
387
+ const syncRules = await factory.updateSyncRules({
388
+ content: `
378
389
  bucket_definitions:
379
390
  by_workspace:
380
391
  parameters:
@@ -382,13 +393,11 @@ bucket_definitions:
382
393
  workspace."userId" = token_parameters.user_id
383
394
  data: []
384
395
  `
385
- );
386
- const sync_rules = sync_rules_content.parsed(test_utils.PARSE_OPTIONS).sync_rules;
387
-
388
- await using factory = await generateStorageFactory();
389
- const bucketStorage = factory.getInstance(sync_rules_content);
396
+ });
397
+ const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
398
+ const bucketStorage = factory.getInstance(syncRules);
390
399
 
391
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
400
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
392
401
  await batch.save({
393
402
  sourceTable: WORKSPACE_TABLE,
394
403
  tag: storage.SaveOperationTag.INSERT,
@@ -398,33 +407,38 @@ bucket_definitions:
398
407
  },
399
408
  afterReplicaId: test_utils.rid('workspace1')
400
409
  });
410
+ await batch.commit('1/1');
401
411
  });
402
-
403
- const checkpoint = result!.flushed_op;
412
+ const checkpoint = await bucketStorage.getCheckpoint();
404
413
 
405
414
  const parameters = new RequestParameters({ sub: 'u1' }, {});
406
415
 
407
- const q1 = sync_rules.bucketDescriptors[0].parameterQueries[0];
416
+ const q1 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[0];
408
417
 
409
418
  const lookups = q1.getLookups(parameters);
410
419
  expect(lookups).toEqual([ParameterLookup.normalized('by_workspace', '1', ['u1'])]);
411
420
 
412
- const parameter_sets = await bucketStorage.getParameterSets(checkpoint, lookups);
421
+ const parameter_sets = await checkpoint.getParameterSets(lookups);
413
422
  expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
414
423
 
415
- const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
416
- getParameterSets(lookups) {
417
- return bucketStorage.getParameterSets(checkpoint, lookups);
418
- }
419
- });
420
- expect(buckets).toEqual([{ bucket: 'by_workspace["workspace1"]', priority: 3 }]);
424
+ const buckets = await sync_rules
425
+ .getBucketParameterQuerier(test_utils.querierOptions(parameters))
426
+ .querier.queryDynamicBucketDescriptions({
427
+ getParameterSets(lookups) {
428
+ return checkpoint.getParameterSets(lookups);
429
+ }
430
+ });
431
+ expect(buckets).toEqual([
432
+ { bucket: 'by_workspace["workspace1"]', priority: 3, definition: 'by_workspace', inclusion_reasons: ['default'] }
433
+ ]);
421
434
  });
422
435
 
423
436
  test('save and load parameters with dynamic global buckets', async () => {
424
437
  const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
425
438
 
426
- const sync_rules_content = test_utils.testRules(
427
- `
439
+ await using factory = await generateStorageFactory();
440
+ const syncRules = await factory.updateSyncRules({
441
+ content: `
428
442
  bucket_definitions:
429
443
  by_public_workspace:
430
444
  parameters:
@@ -432,13 +446,11 @@ bucket_definitions:
432
446
  workspace.visibility = 'public'
433
447
  data: []
434
448
  `
435
- );
436
- const sync_rules = sync_rules_content.parsed(test_utils.PARSE_OPTIONS).sync_rules;
437
-
438
- await using factory = await generateStorageFactory();
439
- const bucketStorage = factory.getInstance(sync_rules_content);
449
+ });
450
+ const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
451
+ const bucketStorage = factory.getInstance(syncRules);
440
452
 
441
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
453
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
442
454
  await batch.save({
443
455
  sourceTable: WORKSPACE_TABLE,
444
456
  tag: storage.SaveOperationTag.INSERT,
@@ -468,38 +480,53 @@ bucket_definitions:
468
480
  },
469
481
  afterReplicaId: test_utils.rid('workspace3')
470
482
  });
483
+
484
+ await batch.commit('1/1');
471
485
  });
472
486
 
473
- const checkpoint = result!.flushed_op;
487
+ const checkpoint = await bucketStorage.getCheckpoint();
474
488
 
475
489
  const parameters = new RequestParameters({ sub: 'unknown' }, {});
476
490
 
477
- const q1 = sync_rules.bucketDescriptors[0].parameterQueries[0];
491
+ const q1 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[0];
478
492
 
479
493
  const lookups = q1.getLookups(parameters);
480
494
  expect(lookups).toEqual([ParameterLookup.normalized('by_public_workspace', '1', [])]);
481
495
 
482
- const parameter_sets = await bucketStorage.getParameterSets(checkpoint, lookups);
496
+ const parameter_sets = await checkpoint.getParameterSets(lookups);
483
497
  parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
484
498
  expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
485
499
 
486
- const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
487
- getParameterSets(lookups) {
488
- return bucketStorage.getParameterSets(checkpoint, lookups);
489
- }
490
- });
500
+ const buckets = await sync_rules
501
+ .getBucketParameterQuerier(test_utils.querierOptions(parameters))
502
+ .querier.queryDynamicBucketDescriptions({
503
+ getParameterSets(lookups) {
504
+ return checkpoint.getParameterSets(lookups);
505
+ }
506
+ });
491
507
  buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
492
508
  expect(buckets).toEqual([
493
- { bucket: 'by_public_workspace["workspace1"]', priority: 3 },
494
- { bucket: 'by_public_workspace["workspace3"]', priority: 3 }
509
+ {
510
+ bucket: 'by_public_workspace["workspace1"]',
511
+ priority: 3,
512
+ definition: 'by_public_workspace',
513
+ inclusion_reasons: ['default']
514
+ },
515
+ {
516
+ bucket: 'by_public_workspace["workspace3"]',
517
+ priority: 3,
518
+ definition: 'by_public_workspace',
519
+ inclusion_reasons: ['default']
520
+ }
495
521
  ]);
496
522
  });
497
523
 
498
524
  test('multiple parameter queries', async () => {
499
525
  const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
500
526
 
501
- const sync_rules_content = test_utils.testRules(
502
- `
527
+ await using factory = await generateStorageFactory();
528
+ const syncRules = await factory.updateSyncRules({
529
+ content: `
503
530
  bucket_definitions:
504
531
  by_workspace:
505
532
  parameters:
@@ -509,13 +536,11 @@ bucket_definitions:
509
536
  workspace.user_id = token_parameters.user_id
510
537
  data: []
511
538
  `
512
- );
513
- const sync_rules = sync_rules_content.parsed(test_utils.PARSE_OPTIONS).sync_rules;
514
-
515
- await using factory = await generateStorageFactory();
516
- const bucketStorage = factory.getInstance(sync_rules_content);
539
+ });
540
+ const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
541
+ const bucketStorage = factory.getInstance(syncRules);
517
542
 
518
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
543
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
519
544
  await batch.save({
520
545
  sourceTable: WORKSPACE_TABLE,
521
546
  tag: storage.SaveOperationTag.INSERT,
@@ -557,56 +582,59 @@ bucket_definitions:
557
582
  },
558
583
  afterReplicaId: test_utils.rid('workspace4')
559
584
  });
585
+
586
+ await batch.commit('1/1');
560
587
  });
561
588
 
562
- const checkpoint = result!.flushed_op;
589
+ const checkpoint = await bucketStorage.getCheckpoint();
563
590
 
564
591
  const parameters = new RequestParameters({ sub: 'u1' }, {});
565
592
 
566
593
  // Test intermediate values - could be moved to sync_rules.test.ts
567
- const q1 = sync_rules.bucketDescriptors[0].parameterQueries[0];
594
+ const q1 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[0];
568
595
  const lookups1 = q1.getLookups(parameters);
569
596
  expect(lookups1).toEqual([ParameterLookup.normalized('by_workspace', '1', [])]);
570
597
 
571
- const parameter_sets1 = await bucketStorage.getParameterSets(checkpoint, lookups1);
598
+ const parameter_sets1 = await checkpoint.getParameterSets(lookups1);
572
599
  parameter_sets1.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
573
600
  expect(parameter_sets1).toEqual([{ workspace_id: 'workspace1' }]);
574
601
 
575
- const q2 = sync_rules.bucketDescriptors[0].parameterQueries[1];
602
+ const q2 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[1];
576
603
  const lookups2 = q2.getLookups(parameters);
577
604
  expect(lookups2).toEqual([ParameterLookup.normalized('by_workspace', '2', ['u1'])]);
578
605
 
579
- const parameter_sets2 = await bucketStorage.getParameterSets(checkpoint, lookups2);
606
+ const parameter_sets2 = await checkpoint.getParameterSets(lookups2);
580
607
  parameter_sets2.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
581
608
  expect(parameter_sets2).toEqual([{ workspace_id: 'workspace3' }]);
582
609
 
583
610
  // Test final values - the important part
584
611
  const buckets = (
585
- await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
586
- getParameterSets(lookups) {
587
- return bucketStorage.getParameterSets(checkpoint, lookups);
588
- }
589
- })
612
+ await sync_rules
613
+ .getBucketParameterQuerier(test_utils.querierOptions(parameters))
614
+ .querier.queryDynamicBucketDescriptions({
615
+ getParameterSets(lookups) {
616
+ return checkpoint.getParameterSets(lookups);
617
+ }
618
+ })
590
619
  ).map((e) => e.bucket);
591
620
  buckets.sort();
592
621
  expect(buckets).toEqual(['by_workspace["workspace1"]', 'by_workspace["workspace3"]']);
593
622
  });
594
623
 
595
624
  test('changing client ids', async () => {
596
- const sync_rules = test_utils.testRules(
597
- `
625
+ await using factory = await generateStorageFactory();
626
+ const syncRules = await factory.updateSyncRules({
627
+ content: `
598
628
  bucket_definitions:
599
629
  global:
600
630
  data:
601
631
  - SELECT client_id as id, description FROM "%"
602
632
  `
603
- );
604
- await using factory = await generateStorageFactory();
605
-
606
- const bucketStorage = factory.getInstance(sync_rules);
633
+ });
634
+ const bucketStorage = factory.getInstance(syncRules);
607
635
 
608
636
  const sourceTable = TEST_TABLE;
609
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
637
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
610
638
  await batch.save({
611
639
  sourceTable,
612
640
  tag: storage.SaveOperationTag.INSERT,
@@ -638,8 +666,10 @@ bucket_definitions:
638
666
  },
639
667
  afterReplicaId: test_utils.rid('test2')
640
668
  });
669
+
670
+ await batch.commit('1/1');
641
671
  });
642
- const checkpoint = result!.flushed_op;
672
+ const { checkpoint } = await bucketStorage.getCheckpoint();
643
673
  const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
644
674
  const data = batch[0].chunkData.data.map((d) => {
645
675
  return {
@@ -657,16 +687,16 @@ bucket_definitions:
657
687
  });
658
688
 
659
689
  test('re-apply delete', async () => {
660
- const sync_rules = test_utils.testRules(
661
- `
690
+ await using factory = await generateStorageFactory();
691
+ const syncRules = await factory.updateSyncRules({
692
+ content: `
662
693
  bucket_definitions:
663
694
  global:
664
695
  data:
665
696
  - SELECT id, description FROM "%"
666
697
  `
667
- );
668
- await using factory = await generateStorageFactory();
669
- const bucketStorage = factory.getInstance(sync_rules);
698
+ });
699
+ const bucketStorage = factory.getInstance(syncRules);
670
700
 
671
701
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
672
702
  const sourceTable = TEST_TABLE;
@@ -690,9 +720,11 @@ bucket_definitions:
690
720
  tag: storage.SaveOperationTag.DELETE,
691
721
  beforeReplicaId: test_utils.rid('test1')
692
722
  });
723
+
724
+ await batch.commit('1/1');
693
725
  });
694
726
 
695
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
727
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
696
728
  const sourceTable = TEST_TABLE;
697
729
 
698
730
  await batch.save({
@@ -702,7 +734,7 @@ bucket_definitions:
702
734
  });
703
735
  });
704
736
 
705
- const checkpoint = result!.flushed_op;
737
+ const { checkpoint } = await bucketStorage.getCheckpoint();
706
738
 
707
739
  const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
708
740
  const data = batch[0].chunkData.data.map((d) => {
@@ -732,16 +764,16 @@ bucket_definitions:
732
764
  });
733
765
 
734
766
  test('re-apply update + delete', async () => {
735
- const sync_rules = test_utils.testRules(
736
- `
767
+ await using factory = await generateStorageFactory();
768
+ const syncRules = await factory.updateSyncRules({
769
+ content: `
737
770
  bucket_definitions:
738
771
  global:
739
772
  data:
740
773
  - SELECT id, description FROM "%"
741
774
  `
742
- );
743
- await using factory = await generateStorageFactory();
744
- const bucketStorage = factory.getInstance(sync_rules);
775
+ });
776
+ const bucketStorage = factory.getInstance(syncRules);
745
777
 
746
778
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
747
779
  const sourceTable = TEST_TABLE;
@@ -785,9 +817,11 @@ bucket_definitions:
785
817
  tag: storage.SaveOperationTag.DELETE,
786
818
  beforeReplicaId: test_utils.rid('test1')
787
819
  });
820
+
821
+ await batch.commit('1/1');
788
822
  });
789
823
 
790
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
824
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
791
825
  const sourceTable = TEST_TABLE;
792
826
 
793
827
  await batch.save({
@@ -815,9 +849,11 @@ bucket_definitions:
815
849
  tag: storage.SaveOperationTag.DELETE,
816
850
  beforeReplicaId: test_utils.rid('test1')
817
851
  });
852
+
853
+ await batch.commit('2/1');
818
854
  });
819
855
 
820
- const checkpoint = result!.flushed_op;
856
+ const { checkpoint } = await bucketStorage.getCheckpoint();
821
857
 
822
858
  const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
823
859
 
@@ -850,18 +886,17 @@ bucket_definitions:
850
886
  });
851
887
 
852
888
  test('truncate parameters', async () => {
853
- const sync_rules = test_utils.testRules(
854
- `
889
+ await using factory = await generateStorageFactory();
890
+ const syncRules = await factory.updateSyncRules({
891
+ content: `
855
892
  bucket_definitions:
856
893
  mybucket:
857
894
  parameters:
858
895
  - SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
859
896
  data: []
860
897
  `
861
- );
862
-
863
- await using factory = await generateStorageFactory();
864
- const bucketStorage = factory.getInstance(sync_rules);
898
+ });
899
+ const bucketStorage = factory.getInstance(syncRules);
865
900
 
866
901
  await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
867
902
  await batch.save({
@@ -879,11 +914,9 @@ bucket_definitions:
879
914
  await batch.truncate([TEST_TABLE]);
880
915
  });
881
916
 
882
- const { checkpoint } = await bucketStorage.getCheckpoint();
917
+ const checkpoint = await bucketStorage.getCheckpoint();
883
918
 
884
- const parameters = await bucketStorage.getParameterSets(checkpoint, [
885
- ParameterLookup.normalized('mybucket', '1', ['user1'])
886
- ]);
919
+ const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
887
920
  expect(parameters).toEqual([]);
888
921
  });
889
922
 
@@ -896,16 +929,16 @@ bucket_definitions:
896
929
  // 1. Not getting the correct "current_data" state for each operation.
897
930
  // 2. Output order not being correct.
898
931
 
899
- const sync_rules = test_utils.testRules(
900
- `
932
+ await using factory = await generateStorageFactory();
933
+ const syncRules = await factory.updateSyncRules({
934
+ content: `
901
935
  bucket_definitions:
902
936
  global:
903
937
  data:
904
938
  - SELECT id, description FROM "test"
905
939
  `
906
- );
907
- await using factory = await generateStorageFactory();
908
- const bucketStorage = factory.getInstance(sync_rules);
940
+ });
941
+ const bucketStorage = factory.getInstance(syncRules);
909
942
 
910
943
  // Pre-setup
911
944
  const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
@@ -1048,14 +1081,6 @@ bucket_definitions:
1048
1081
  });
1049
1082
 
1050
1083
  test('changed data with replica identity full', async () => {
1051
- const sync_rules = test_utils.testRules(
1052
- `
1053
- bucket_definitions:
1054
- global:
1055
- data:
1056
- - SELECT id, description FROM "test"
1057
- `
1058
- );
1059
1084
  function rid2(id: string, description: string) {
1060
1085
  return getUuidReplicaIdentityBson({ id, description }, [
1061
1086
  { name: 'id', type: 'VARCHAR', typeId: 25 },
@@ -1063,7 +1088,15 @@ bucket_definitions:
1063
1088
  ]);
1064
1089
  }
1065
1090
  await using factory = await generateStorageFactory();
1066
- const bucketStorage = factory.getInstance(sync_rules);
1091
+ const syncRules = await factory.updateSyncRules({
1092
+ content: `
1093
+ bucket_definitions:
1094
+ global:
1095
+ data:
1096
+ - SELECT id, description FROM "test"
1097
+ `
1098
+ });
1099
+ const bucketStorage = factory.getInstance(syncRules);
1067
1100
 
1068
1101
  const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
1069
1102
 
@@ -1155,14 +1188,6 @@ bucket_definitions:
1155
1188
  });
1156
1189
 
1157
1190
  test('unchanged data with replica identity full', async () => {
1158
- const sync_rules = test_utils.testRules(
1159
- `
1160
- bucket_definitions:
1161
- global:
1162
- data:
1163
- - SELECT id, description FROM "test"
1164
- `
1165
- );
1166
1191
  function rid2(id: string, description: string) {
1167
1192
  return getUuidReplicaIdentityBson({ id, description }, [
1168
1193
  { name: 'id', type: 'VARCHAR', typeId: 25 },
@@ -1171,7 +1196,15 @@ bucket_definitions:
1171
1196
  }
1172
1197
 
1173
1198
  await using factory = await generateStorageFactory();
1174
- const bucketStorage = factory.getInstance(sync_rules);
1199
+ const syncRules = await factory.updateSyncRules({
1200
+ content: `
1201
+ bucket_definitions:
1202
+ global:
1203
+ data:
1204
+ - SELECT id, description FROM "test"
1205
+ `
1206
+ });
1207
+ const bucketStorage = factory.getInstance(syncRules);
1175
1208
 
1176
1209
  const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
1177
1210
 
@@ -1260,18 +1293,18 @@ bucket_definitions:
1260
1293
  // but large enough in size to be split over multiple returned batches.
1261
1294
  // The specific batch splits is an implementation detail of the storage driver,
1262
1295
  // and the test will have to updated when other implementations are added.
1263
- const sync_rules = test_utils.testRules(
1264
- `
1296
+ await using factory = await generateStorageFactory();
1297
+ const syncRules = await factory.updateSyncRules({
1298
+ content: `
1265
1299
  bucket_definitions:
1266
1300
  global:
1267
1301
  data:
1268
1302
  - SELECT id, description FROM "%"
1269
1303
  `
1270
- );
1271
- await using factory = await generateStorageFactory();
1272
- const bucketStorage = factory.getInstance(sync_rules);
1304
+ });
1305
+ const bucketStorage = factory.getInstance(syncRules);
1273
1306
 
1274
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1307
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1275
1308
  const sourceTable = TEST_TABLE;
1276
1309
 
1277
1310
  const largeDescription = '0123456789'.repeat(12_000_00);
@@ -1316,9 +1349,11 @@ bucket_definitions:
1316
1349
  },
1317
1350
  afterReplicaId: test_utils.rid('test3')
1318
1351
  });
1352
+
1353
+ await batch.commit('1/1');
1319
1354
  });
1320
1355
 
1321
- const checkpoint = result!.flushed_op;
1356
+ const { checkpoint } = await bucketStorage.getCheckpoint();
1322
1357
 
1323
1358
  const options: storage.BucketDataBatchOptions = {
1324
1359
  chunkLimitBytes: 16 * 1024 * 1024
@@ -1367,18 +1402,18 @@ bucket_definitions:
1367
1402
 
1368
1403
  test('long batch', async () => {
1369
1404
  // Test syncing a batch of data that is limited by count.
1370
- const sync_rules = test_utils.testRules(
1371
- `
1405
+ await using factory = await generateStorageFactory();
1406
+ const syncRules = await factory.updateSyncRules({
1407
+ content: `
1372
1408
  bucket_definitions:
1373
1409
  global:
1374
1410
  data:
1375
1411
  - SELECT id, description FROM "%"
1376
1412
  `
1377
- );
1378
- await using factory = await generateStorageFactory();
1379
- const bucketStorage = factory.getInstance(sync_rules);
1413
+ });
1414
+ const bucketStorage = factory.getInstance(syncRules);
1380
1415
 
1381
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1416
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1382
1417
  const sourceTable = TEST_TABLE;
1383
1418
 
1384
1419
  for (let i = 1; i <= 6; i++) {
@@ -1392,9 +1427,11 @@ bucket_definitions:
1392
1427
  afterReplicaId: `test${i}`
1393
1428
  });
1394
1429
  }
1430
+
1431
+ await batch.commit('1/1');
1395
1432
  });
1396
1433
 
1397
- const checkpoint = result!.flushed_op;
1434
+ const { checkpoint } = await bucketStorage.getCheckpoint();
1398
1435
 
1399
1436
  const batch1 = await test_utils.oneFromAsync(
1400
1437
  bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]]), { limit: 4 })
@@ -1441,8 +1478,9 @@ bucket_definitions:
1441
1478
 
1442
1479
  describe('batch has_more', () => {
1443
1480
  const setup = async (options: BucketDataBatchOptions) => {
1444
- const sync_rules = test_utils.testRules(
1445
- `
1481
+ await using factory = await generateStorageFactory();
1482
+ const syncRules = await factory.updateSyncRules({
1483
+ content: `
1446
1484
  bucket_definitions:
1447
1485
  global1:
1448
1486
  data:
@@ -1451,11 +1489,10 @@ bucket_definitions:
1451
1489
  data:
1452
1490
  - SELECT id, description FROM test WHERE bucket = 'global2'
1453
1491
  `
1454
- );
1455
- await using factory = await generateStorageFactory();
1456
- const bucketStorage = factory.getInstance(sync_rules);
1492
+ });
1493
+ const bucketStorage = factory.getInstance(syncRules);
1457
1494
 
1458
- const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1495
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1459
1496
  const sourceTable = TEST_TABLE;
1460
1497
 
1461
1498
  for (let i = 1; i <= 10; i++) {
@@ -1470,9 +1507,11 @@ bucket_definitions:
1470
1507
  afterReplicaId: `test${i}`
1471
1508
  });
1472
1509
  }
1510
+
1511
+ await batch.commit('1/1');
1473
1512
  });
1474
1513
 
1475
- const checkpoint = result!.flushed_op;
1514
+ const { checkpoint } = await bucketStorage.getCheckpoint();
1476
1515
  return await test_utils.fromAsync(
1477
1516
  bucketStorage.getBucketDataBatch(
1478
1517
  checkpoint,
@@ -1603,15 +1642,18 @@ bucket_definitions:
1603
1642
 
1604
1643
  const r = await f.configureSyncRules({ content: 'bucket_definitions: {}', validate: false });
1605
1644
  const storage = f.getInstance(r.persisted_sync_rules!);
1606
- await storage.autoActivate();
1645
+ await storage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1646
+ await batch.keepalive('1/0');
1647
+ });
1607
1648
 
1608
1649
  const metrics2 = await f.getStorageMetrics();
1609
1650
  expect(metrics2).toMatchSnapshot();
1610
1651
  });
1611
1652
 
1612
1653
  test('invalidate cached parsed sync rules', async () => {
1613
- const sync_rules_content = test_utils.testRules(
1614
- `
1654
+ await using bucketStorageFactory = await generateStorageFactory();
1655
+ const syncRules = await bucketStorageFactory.updateSyncRules({
1656
+ content: `
1615
1657
  bucket_definitions:
1616
1658
  by_workspace:
1617
1659
  parameters:
@@ -1619,10 +1661,8 @@ bucket_definitions:
1619
1661
  workspace."userId" = token_parameters.user_id
1620
1662
  data: []
1621
1663
  `
1622
- );
1623
-
1624
- await using bucketStorageFactory = await generateStorageFactory();
1625
- const syncBucketStorage = bucketStorageFactory.getInstance(sync_rules_content);
1664
+ });
1665
+ const syncBucketStorage = bucketStorageFactory.getInstance(syncRules);
1626
1666
 
1627
1667
  const parsedSchema1 = syncBucketStorage.getParsedSyncRules({
1628
1668
  defaultSchema: 'public'
@@ -1651,12 +1691,11 @@ bucket_definitions:
1651
1691
  content: `
1652
1692
  bucket_definitions:
1653
1693
  mybucket:
1654
- data: []
1694
+ data: []
1655
1695
  `,
1656
1696
  validate: false
1657
1697
  });
1658
1698
  const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
1659
- await bucketStorage.autoActivate();
1660
1699
 
1661
1700
  const abortController = new AbortController();
1662
1701
  context.onTestFinished(() => abortController.abort());
@@ -1697,7 +1736,6 @@ bucket_definitions:
1697
1736
  validate: false
1698
1737
  });
1699
1738
  const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
1700
- await bucketStorage.autoActivate();
1701
1739
 
1702
1740
  const abortController = new AbortController();
1703
1741
  context.onTestFinished(() => abortController.abort());
@@ -1760,7 +1798,6 @@ bucket_definitions:
1760
1798
  validate: false
1761
1799
  });
1762
1800
  const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
1763
- await bucketStorage.autoActivate();
1764
1801
  bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
1765
1802
 
1766
1803
  const abortController = new AbortController();
@@ -1801,7 +1838,6 @@ bucket_definitions:
1801
1838
  validate: false
1802
1839
  });
1803
1840
  const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
1804
- await bucketStorage.autoActivate();
1805
1841
  bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
1806
1842
 
1807
1843
  const abortController = new AbortController();
@@ -1845,7 +1881,6 @@ bucket_definitions:
1845
1881
  validate: false
1846
1882
  });
1847
1883
  const bucketStorage = factory.getInstance(r.persisted_sync_rules!);
1848
- await bucketStorage.autoActivate();
1849
1884
  bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
1850
1885
 
1851
1886
  const abortController = new AbortController();
@@ -1916,17 +1951,17 @@ bucket_definitions:
1916
1951
  // Test syncing a batch of data that is small in count,
1917
1952
  // but large enough in size to be split over multiple returned chunks.
1918
1953
  // Similar to the above test, but splits over 1MB chunks.
1919
- const sync_rules = test_utils.testRules(
1920
- `
1954
+ await using factory = await generateStorageFactory();
1955
+ const syncRules = await factory.updateSyncRules({
1956
+ content: `
1921
1957
  bucket_definitions:
1922
1958
  global:
1923
1959
  data:
1924
1960
  - SELECT id FROM test
1925
1961
  - SELECT id FROM test_ignore WHERE false
1926
1962
  `
1927
- );
1928
- await using factory = await generateStorageFactory();
1929
- const bucketStorage = factory.getInstance(sync_rules);
1963
+ });
1964
+ const bucketStorage = factory.getInstance(syncRules);
1930
1965
 
1931
1966
  const sourceTable = test_utils.makeTestTable('test', ['id']);
1932
1967
  const sourceTableIgnore = test_utils.makeTestTable('test_ignore', ['id']);
@@ -1961,4 +1996,70 @@ bucket_definitions:
1961
1996
  // we expect 0n and 1n, or 1n and 2n.
1962
1997
  expect(checkpoint2).toBeGreaterThan(checkpoint1);
1963
1998
  });
1999
+
2000
+ test('data with custom types', async () => {
2001
+ await using factory = await generateStorageFactory();
2002
+ const testValue = {
2003
+ sourceTable: TEST_TABLE,
2004
+ tag: storage.SaveOperationTag.INSERT,
2005
+ after: {
2006
+ id: 't1',
2007
+ description: new DateTimeValue('2025-08-28T11:30:00')
2008
+ },
2009
+ afterReplicaId: test_utils.rid('t1')
2010
+ } satisfies SaveOptions;
2011
+
2012
+ {
2013
+ // First, deploy old sync rules and row with date time value
2014
+ const syncRules = await factory.updateSyncRules({
2015
+ content: `
2016
+ bucket_definitions:
2017
+ global:
2018
+ data:
2019
+ - SELECT id, description FROM test
2020
+ `
2021
+ });
2022
+ const bucketStorage = factory.getInstance(syncRules);
2023
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
2024
+ await batch.save(testValue);
2025
+ await batch.commit('1/1');
2026
+ });
2027
+
2028
+ const { checkpoint } = await bucketStorage.getCheckpoint();
2029
+ const batch = await test_utils.fromAsync(
2030
+ bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]]))
2031
+ );
2032
+ expect(batch[0].chunkData.data).toMatchObject([
2033
+ {
2034
+ data: '{"id":"t1","description":"2025-08-28 11:30:00"}'
2035
+ }
2036
+ ]);
2037
+ }
2038
+
2039
+ const syncRules = await factory.updateSyncRules({
2040
+ content: `
2041
+ bucket_definitions:
2042
+ global:
2043
+ data:
2044
+ - SELECT id, description FROM test
2045
+
2046
+ config:
2047
+ edition: 2
2048
+ `
2049
+ });
2050
+ const bucketStorage = factory.getInstance(syncRules);
2051
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
2052
+ await batch.save(testValue);
2053
+ await batch.commit('1/2');
2054
+ });
2055
+ const { checkpoint } = await bucketStorage.getCheckpoint();
2056
+ const batch = await test_utils.fromAsync(
2057
+ bucketStorage.getBucketDataBatch(checkpoint, new Map([['2#global[]', 0n]]))
2058
+ );
2059
+ expect(batch[0].chunkData.data).toMatchObject([
2060
+ {
2061
+ data: '{"id":"t1","description":"2025-08-28T11:30:00"}'
2062
+ }
2063
+ ]);
2064
+ });
1964
2065
  }