@powersync/service-core-tests 0.11.0 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +27 -0
- package/LICENSE +3 -3
- package/dist/test-utils/general-utils.d.ts +2 -0
- package/dist/test-utils/general-utils.js +8 -0
- package/dist/test-utils/general-utils.js.map +1 -1
- package/dist/tests/register-compacting-tests.js +119 -23
- package/dist/tests/register-compacting-tests.js.map +1 -1
- package/dist/tests/register-data-storage-tests.js +99 -11
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-sync-tests.js +150 -46
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/package.json +5 -5
- package/src/test-utils/general-utils.ts +10 -1
- package/src/tests/register-compacting-tests.ts +118 -28
- package/src/tests/register-data-storage-tests.ts +109 -23
- package/src/tests/register-sync-tests.ts +144 -46
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -6,14 +6,15 @@ const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
|
|
|
6
6
|
|
|
7
7
|
export function registerCompactTests(generateStorageFactory: storage.TestStorageFactory) {
|
|
8
8
|
test('compacting (1)', async () => {
|
|
9
|
-
|
|
9
|
+
await using factory = await generateStorageFactory();
|
|
10
|
+
const syncRules = await factory.updateSyncRules({
|
|
11
|
+
content: `
|
|
10
12
|
bucket_definitions:
|
|
11
13
|
global:
|
|
12
14
|
data: [select * from test]
|
|
13
|
-
`
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
15
|
+
`
|
|
16
|
+
});
|
|
17
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
17
18
|
|
|
18
19
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
19
20
|
await batch.save({
|
|
@@ -42,6 +43,8 @@ bucket_definitions:
|
|
|
42
43
|
},
|
|
43
44
|
afterReplicaId: test_utils.rid('t2')
|
|
44
45
|
});
|
|
46
|
+
|
|
47
|
+
await batch.commit('1/1');
|
|
45
48
|
});
|
|
46
49
|
|
|
47
50
|
const checkpoint = result!.flushed_op;
|
|
@@ -72,6 +75,7 @@ bucket_definitions:
|
|
|
72
75
|
op_id: '3'
|
|
73
76
|
}
|
|
74
77
|
]);
|
|
78
|
+
expect(batchBefore.targetOp).toEqual(null);
|
|
75
79
|
|
|
76
80
|
await bucketStorage.compact({
|
|
77
81
|
clearBatchLimit: 2,
|
|
@@ -84,6 +88,8 @@ bucket_definitions:
|
|
|
84
88
|
);
|
|
85
89
|
const dataAfter = batchAfter.chunkData.data;
|
|
86
90
|
const checksumAfter = await bucketStorage.getChecksums(checkpoint, ['global[]']);
|
|
91
|
+
bucketStorage.clearChecksumCache();
|
|
92
|
+
const checksumAfter2 = await bucketStorage.getChecksums(checkpoint, ['global[]']);
|
|
87
93
|
|
|
88
94
|
expect(batchAfter.targetOp).toEqual(3n);
|
|
89
95
|
expect(dataAfter).toMatchObject([
|
|
@@ -106,20 +112,22 @@ bucket_definitions:
|
|
|
106
112
|
}
|
|
107
113
|
]);
|
|
108
114
|
|
|
109
|
-
expect(
|
|
115
|
+
expect(checksumAfter.get('global[]')).toEqual(checksumBefore.get('global[]'));
|
|
116
|
+
expect(checksumAfter2.get('global[]')).toEqual(checksumBefore.get('global[]'));
|
|
110
117
|
|
|
111
118
|
test_utils.validateCompactedBucket(dataBefore, dataAfter);
|
|
112
119
|
});
|
|
113
120
|
|
|
114
121
|
test('compacting (2)', async () => {
|
|
115
|
-
|
|
122
|
+
await using factory = await generateStorageFactory();
|
|
123
|
+
const syncRules = await factory.updateSyncRules({
|
|
124
|
+
content: `
|
|
116
125
|
bucket_definitions:
|
|
117
126
|
global:
|
|
118
127
|
data: [select * from test]
|
|
119
|
-
`
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
128
|
+
`
|
|
129
|
+
});
|
|
130
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
123
131
|
|
|
124
132
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
125
133
|
await batch.save({
|
|
@@ -157,6 +165,8 @@ bucket_definitions:
|
|
|
157
165
|
},
|
|
158
166
|
afterReplicaId: test_utils.rid('t2')
|
|
159
167
|
});
|
|
168
|
+
|
|
169
|
+
await batch.commit('1/1');
|
|
160
170
|
});
|
|
161
171
|
|
|
162
172
|
const checkpoint = result!.flushed_op;
|
|
@@ -204,6 +214,7 @@ bucket_definitions:
|
|
|
204
214
|
bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]]))
|
|
205
215
|
);
|
|
206
216
|
const dataAfter = batchAfter.chunkData.data;
|
|
217
|
+
bucketStorage.clearChecksumCache();
|
|
207
218
|
const checksumAfter = await bucketStorage.getChecksums(checkpoint, ['global[]']);
|
|
208
219
|
|
|
209
220
|
expect(batchAfter.targetOp).toEqual(4n);
|
|
@@ -220,20 +231,24 @@ bucket_definitions:
|
|
|
220
231
|
op_id: '4'
|
|
221
232
|
}
|
|
222
233
|
]);
|
|
223
|
-
expect(
|
|
234
|
+
expect(checksumAfter.get('global[]')).toEqual({
|
|
235
|
+
...checksumBefore.get('global[]'),
|
|
236
|
+
count: 2
|
|
237
|
+
});
|
|
224
238
|
|
|
225
239
|
test_utils.validateCompactedBucket(dataBefore, dataAfter);
|
|
226
240
|
});
|
|
227
241
|
|
|
228
242
|
test('compacting (3)', async () => {
|
|
229
|
-
|
|
243
|
+
await using factory = await generateStorageFactory();
|
|
244
|
+
const syncRules = await factory.updateSyncRules({
|
|
245
|
+
content: `
|
|
230
246
|
bucket_definitions:
|
|
231
247
|
global:
|
|
232
248
|
data: [select * from test]
|
|
233
|
-
`
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
249
|
+
`
|
|
250
|
+
});
|
|
251
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
237
252
|
|
|
238
253
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
239
254
|
await batch.save({
|
|
@@ -262,6 +277,8 @@ bucket_definitions:
|
|
|
262
277
|
},
|
|
263
278
|
beforeReplicaId: 't1'
|
|
264
279
|
});
|
|
280
|
+
|
|
281
|
+
await batch.commit('1/1');
|
|
265
282
|
});
|
|
266
283
|
|
|
267
284
|
const checkpoint1 = result!.flushed_op;
|
|
@@ -276,6 +293,7 @@ bucket_definitions:
|
|
|
276
293
|
},
|
|
277
294
|
beforeReplicaId: 't2'
|
|
278
295
|
});
|
|
296
|
+
await batch.commit('2/1');
|
|
279
297
|
});
|
|
280
298
|
const checkpoint2 = result2!.flushed_op;
|
|
281
299
|
|
|
@@ -289,6 +307,7 @@ bucket_definitions:
|
|
|
289
307
|
bucketStorage.getBucketDataBatch(checkpoint2, new Map([['global[]', 0n]]))
|
|
290
308
|
);
|
|
291
309
|
const dataAfter = batchAfter.chunkData.data;
|
|
310
|
+
await bucketStorage.clearChecksumCache();
|
|
292
311
|
const checksumAfter = await bucketStorage.getChecksums(checkpoint2, ['global[]']);
|
|
293
312
|
|
|
294
313
|
expect(batchAfter.targetOp).toEqual(4n);
|
|
@@ -307,18 +326,18 @@ bucket_definitions:
|
|
|
307
326
|
});
|
|
308
327
|
|
|
309
328
|
test('compacting (4)', async () => {
|
|
310
|
-
const sync_rules = test_utils.testRules(/* yaml */
|
|
311
|
-
` bucket_definitions:
|
|
312
|
-
grouped:
|
|
313
|
-
# The parameter query here is not important
|
|
314
|
-
# We specifically don't want to create bucket_parameter records here
|
|
315
|
-
# since the op_ids for bucket_data could vary between storage implementations.
|
|
316
|
-
parameters: select 'b' as b
|
|
317
|
-
data:
|
|
318
|
-
- select * from test where b = bucket.b`);
|
|
319
|
-
|
|
320
329
|
await using factory = await generateStorageFactory();
|
|
321
|
-
const
|
|
330
|
+
const syncRules = await factory.updateSyncRules({
|
|
331
|
+
/* yaml */ content: ` bucket_definitions:
|
|
332
|
+
grouped:
|
|
333
|
+
# The parameter query here is not important
|
|
334
|
+
# We specifically don't want to create bucket_parameter records here
|
|
335
|
+
# since the op_ids for bucket_data could vary between storage implementations.
|
|
336
|
+
parameters: select 'b' as b
|
|
337
|
+
data:
|
|
338
|
+
- select * from test where b = bucket.b`
|
|
339
|
+
});
|
|
340
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
322
341
|
|
|
323
342
|
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
324
343
|
/**
|
|
@@ -383,6 +402,8 @@ bucket_definitions:
|
|
|
383
402
|
},
|
|
384
403
|
afterReplicaId: test_utils.rid('t2')
|
|
385
404
|
});
|
|
405
|
+
|
|
406
|
+
await batch.commit('1/1');
|
|
386
407
|
}
|
|
387
408
|
});
|
|
388
409
|
|
|
@@ -431,4 +452,73 @@ bucket_definitions:
|
|
|
431
452
|
])
|
|
432
453
|
);
|
|
433
454
|
});
|
|
455
|
+
|
|
456
|
+
test('partial checksums after compacting', async () => {
|
|
457
|
+
await using factory = await generateStorageFactory();
|
|
458
|
+
const syncRules = await factory.updateSyncRules({
|
|
459
|
+
content: `
|
|
460
|
+
bucket_definitions:
|
|
461
|
+
global:
|
|
462
|
+
data: [select * from test]
|
|
463
|
+
`
|
|
464
|
+
});
|
|
465
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
466
|
+
|
|
467
|
+
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
468
|
+
await batch.save({
|
|
469
|
+
sourceTable: TEST_TABLE,
|
|
470
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
471
|
+
after: {
|
|
472
|
+
id: 't1'
|
|
473
|
+
},
|
|
474
|
+
afterReplicaId: 't1'
|
|
475
|
+
});
|
|
476
|
+
|
|
477
|
+
await batch.save({
|
|
478
|
+
sourceTable: TEST_TABLE,
|
|
479
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
480
|
+
after: {
|
|
481
|
+
id: 't2'
|
|
482
|
+
},
|
|
483
|
+
afterReplicaId: 't2'
|
|
484
|
+
});
|
|
485
|
+
|
|
486
|
+
await batch.save({
|
|
487
|
+
sourceTable: TEST_TABLE,
|
|
488
|
+
tag: storage.SaveOperationTag.DELETE,
|
|
489
|
+
before: {
|
|
490
|
+
id: 't1'
|
|
491
|
+
},
|
|
492
|
+
beforeReplicaId: 't1'
|
|
493
|
+
});
|
|
494
|
+
|
|
495
|
+
await batch.commit('1/1');
|
|
496
|
+
});
|
|
497
|
+
|
|
498
|
+
await bucketStorage.compact({
|
|
499
|
+
clearBatchLimit: 2,
|
|
500
|
+
moveBatchLimit: 1,
|
|
501
|
+
moveBatchQueryLimit: 1
|
|
502
|
+
});
|
|
503
|
+
|
|
504
|
+
const result2 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
505
|
+
await batch.save({
|
|
506
|
+
sourceTable: TEST_TABLE,
|
|
507
|
+
tag: storage.SaveOperationTag.DELETE,
|
|
508
|
+
before: {
|
|
509
|
+
id: 't2'
|
|
510
|
+
},
|
|
511
|
+
beforeReplicaId: 't2'
|
|
512
|
+
});
|
|
513
|
+
await batch.commit('2/1');
|
|
514
|
+
});
|
|
515
|
+
const checkpoint2 = result2!.flushed_op;
|
|
516
|
+
await bucketStorage.clearChecksumCache();
|
|
517
|
+
const checksumAfter = await bucketStorage.getChecksums(checkpoint2, ['global[]']);
|
|
518
|
+
expect(checksumAfter.get('global[]')).toEqual({
|
|
519
|
+
bucket: 'global[]',
|
|
520
|
+
count: 4,
|
|
521
|
+
checksum: 1874612650
|
|
522
|
+
});
|
|
523
|
+
});
|
|
434
524
|
}
|
|
@@ -3,11 +3,13 @@ import {
|
|
|
3
3
|
getUuidReplicaIdentityBson,
|
|
4
4
|
InternalOpId,
|
|
5
5
|
OplogEntry,
|
|
6
|
+
SaveOptions,
|
|
6
7
|
storage
|
|
7
8
|
} from '@powersync/service-core';
|
|
8
|
-
import { ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
|
|
9
|
+
import { DateTimeValue, ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
|
|
9
10
|
import { expect, test, describe, beforeEach } from 'vitest';
|
|
10
11
|
import * as test_utils from '../test-utils/test-utils-index.js';
|
|
12
|
+
import { SqlBucketDescriptor } from '@powersync/service-sync-rules/src/SqlBucketDescriptor.js';
|
|
11
13
|
|
|
12
14
|
export const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
|
|
13
15
|
|
|
@@ -411,7 +413,7 @@ bucket_definitions:
|
|
|
411
413
|
|
|
412
414
|
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
413
415
|
|
|
414
|
-
const q1 = sync_rules.
|
|
416
|
+
const q1 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[0];
|
|
415
417
|
|
|
416
418
|
const lookups = q1.getLookups(parameters);
|
|
417
419
|
expect(lookups).toEqual([ParameterLookup.normalized('by_workspace', '1', ['u1'])]);
|
|
@@ -419,12 +421,16 @@ bucket_definitions:
|
|
|
419
421
|
const parameter_sets = await checkpoint.getParameterSets(lookups);
|
|
420
422
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
|
|
421
423
|
|
|
422
|
-
const buckets = await sync_rules
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
424
|
+
const buckets = await sync_rules
|
|
425
|
+
.getBucketParameterQuerier(test_utils.querierOptions(parameters))
|
|
426
|
+
.querier.queryDynamicBucketDescriptions({
|
|
427
|
+
getParameterSets(lookups) {
|
|
428
|
+
return checkpoint.getParameterSets(lookups);
|
|
429
|
+
}
|
|
430
|
+
});
|
|
431
|
+
expect(buckets).toEqual([
|
|
432
|
+
{ bucket: 'by_workspace["workspace1"]', priority: 3, definition: 'by_workspace', inclusion_reasons: ['default'] }
|
|
433
|
+
]);
|
|
428
434
|
});
|
|
429
435
|
|
|
430
436
|
test('save and load parameters with dynamic global buckets', async () => {
|
|
@@ -482,7 +488,7 @@ bucket_definitions:
|
|
|
482
488
|
|
|
483
489
|
const parameters = new RequestParameters({ sub: 'unknown' }, {});
|
|
484
490
|
|
|
485
|
-
const q1 = sync_rules.
|
|
491
|
+
const q1 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[0];
|
|
486
492
|
|
|
487
493
|
const lookups = q1.getLookups(parameters);
|
|
488
494
|
expect(lookups).toEqual([ParameterLookup.normalized('by_public_workspace', '1', [])]);
|
|
@@ -491,15 +497,27 @@ bucket_definitions:
|
|
|
491
497
|
parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
492
498
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
|
|
493
499
|
|
|
494
|
-
const buckets = await sync_rules
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
500
|
+
const buckets = await sync_rules
|
|
501
|
+
.getBucketParameterQuerier(test_utils.querierOptions(parameters))
|
|
502
|
+
.querier.queryDynamicBucketDescriptions({
|
|
503
|
+
getParameterSets(lookups) {
|
|
504
|
+
return checkpoint.getParameterSets(lookups);
|
|
505
|
+
}
|
|
506
|
+
});
|
|
499
507
|
buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
|
|
500
508
|
expect(buckets).toEqual([
|
|
501
|
-
{
|
|
502
|
-
|
|
509
|
+
{
|
|
510
|
+
bucket: 'by_public_workspace["workspace1"]',
|
|
511
|
+
priority: 3,
|
|
512
|
+
definition: 'by_public_workspace',
|
|
513
|
+
inclusion_reasons: ['default']
|
|
514
|
+
},
|
|
515
|
+
{
|
|
516
|
+
bucket: 'by_public_workspace["workspace3"]',
|
|
517
|
+
priority: 3,
|
|
518
|
+
definition: 'by_public_workspace',
|
|
519
|
+
inclusion_reasons: ['default']
|
|
520
|
+
}
|
|
503
521
|
]);
|
|
504
522
|
});
|
|
505
523
|
|
|
@@ -573,7 +591,7 @@ bucket_definitions:
|
|
|
573
591
|
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
574
592
|
|
|
575
593
|
// Test intermediate values - could be moved to sync_rules.test.ts
|
|
576
|
-
const q1 = sync_rules.
|
|
594
|
+
const q1 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[0];
|
|
577
595
|
const lookups1 = q1.getLookups(parameters);
|
|
578
596
|
expect(lookups1).toEqual([ParameterLookup.normalized('by_workspace', '1', [])]);
|
|
579
597
|
|
|
@@ -581,7 +599,7 @@ bucket_definitions:
|
|
|
581
599
|
parameter_sets1.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
582
600
|
expect(parameter_sets1).toEqual([{ workspace_id: 'workspace1' }]);
|
|
583
601
|
|
|
584
|
-
const q2 = sync_rules.
|
|
602
|
+
const q2 = (sync_rules.bucketSources[0] as SqlBucketDescriptor).parameterQueries[1];
|
|
585
603
|
const lookups2 = q2.getLookups(parameters);
|
|
586
604
|
expect(lookups2).toEqual([ParameterLookup.normalized('by_workspace', '2', ['u1'])]);
|
|
587
605
|
|
|
@@ -591,11 +609,13 @@ bucket_definitions:
|
|
|
591
609
|
|
|
592
610
|
// Test final values - the important part
|
|
593
611
|
const buckets = (
|
|
594
|
-
await sync_rules
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
612
|
+
await sync_rules
|
|
613
|
+
.getBucketParameterQuerier(test_utils.querierOptions(parameters))
|
|
614
|
+
.querier.queryDynamicBucketDescriptions({
|
|
615
|
+
getParameterSets(lookups) {
|
|
616
|
+
return checkpoint.getParameterSets(lookups);
|
|
617
|
+
}
|
|
618
|
+
})
|
|
599
619
|
).map((e) => e.bucket);
|
|
600
620
|
buckets.sort();
|
|
601
621
|
expect(buckets).toEqual(['by_workspace["workspace1"]', 'by_workspace["workspace3"]']);
|
|
@@ -1976,4 +1996,70 @@ bucket_definitions:
|
|
|
1976
1996
|
// we expect 0n and 1n, or 1n and 2n.
|
|
1977
1997
|
expect(checkpoint2).toBeGreaterThan(checkpoint1);
|
|
1978
1998
|
});
|
|
1999
|
+
|
|
2000
|
+
test('data with custom types', async () => {
|
|
2001
|
+
await using factory = await generateStorageFactory();
|
|
2002
|
+
const testValue = {
|
|
2003
|
+
sourceTable: TEST_TABLE,
|
|
2004
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
2005
|
+
after: {
|
|
2006
|
+
id: 't1',
|
|
2007
|
+
description: new DateTimeValue('2025-08-28T11:30:00')
|
|
2008
|
+
},
|
|
2009
|
+
afterReplicaId: test_utils.rid('t1')
|
|
2010
|
+
} satisfies SaveOptions;
|
|
2011
|
+
|
|
2012
|
+
{
|
|
2013
|
+
// First, deploy old sync rules and row with date time value
|
|
2014
|
+
const syncRules = await factory.updateSyncRules({
|
|
2015
|
+
content: `
|
|
2016
|
+
bucket_definitions:
|
|
2017
|
+
global:
|
|
2018
|
+
data:
|
|
2019
|
+
- SELECT id, description FROM test
|
|
2020
|
+
`
|
|
2021
|
+
});
|
|
2022
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
2023
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
2024
|
+
await batch.save(testValue);
|
|
2025
|
+
await batch.commit('1/1');
|
|
2026
|
+
});
|
|
2027
|
+
|
|
2028
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
2029
|
+
const batch = await test_utils.fromAsync(
|
|
2030
|
+
bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]]))
|
|
2031
|
+
);
|
|
2032
|
+
expect(batch[0].chunkData.data).toMatchObject([
|
|
2033
|
+
{
|
|
2034
|
+
data: '{"id":"t1","description":"2025-08-28 11:30:00"}'
|
|
2035
|
+
}
|
|
2036
|
+
]);
|
|
2037
|
+
}
|
|
2038
|
+
|
|
2039
|
+
const syncRules = await factory.updateSyncRules({
|
|
2040
|
+
content: `
|
|
2041
|
+
bucket_definitions:
|
|
2042
|
+
global:
|
|
2043
|
+
data:
|
|
2044
|
+
- SELECT id, description FROM test
|
|
2045
|
+
|
|
2046
|
+
config:
|
|
2047
|
+
edition: 2
|
|
2048
|
+
`
|
|
2049
|
+
});
|
|
2050
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
2051
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
2052
|
+
await batch.save(testValue);
|
|
2053
|
+
await batch.commit('1/2');
|
|
2054
|
+
});
|
|
2055
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
2056
|
+
const batch = await test_utils.fromAsync(
|
|
2057
|
+
bucketStorage.getBucketDataBatch(checkpoint, new Map([['2#global[]', 0n]]))
|
|
2058
|
+
);
|
|
2059
|
+
expect(batch[0].chunkData.data).toMatchObject([
|
|
2060
|
+
{
|
|
2061
|
+
data: '{"id":"t1","description":"2025-08-28T11:30:00"}'
|
|
2062
|
+
}
|
|
2063
|
+
]);
|
|
2064
|
+
});
|
|
1979
2065
|
}
|