@powersync/service-core-tests 0.10.3 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +41 -0
- package/dist/test-utils/general-utils.js +9 -1
- package/dist/test-utils/general-utils.js.map +1 -1
- package/dist/tests/register-compacting-tests.d.ts +0 -11
- package/dist/tests/register-compacting-tests.js +0 -11
- package/dist/tests/register-compacting-tests.js.map +1 -1
- package/dist/tests/register-data-storage-tests.js +209 -156
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-parameter-compacting-tests.d.ts +2 -0
- package/dist/tests/register-parameter-compacting-tests.js +227 -0
- package/dist/tests/register-parameter-compacting-tests.js.map +1 -0
- package/dist/tests/register-sync-tests.js +16 -15
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/dist/tests/tests-index.d.ts +1 -0
- package/dist/tests/tests-index.js +1 -0
- package/dist/tests/tests-index.js.map +1 -1
- package/package.json +3 -3
- package/src/test-utils/general-utils.ts +9 -9
- package/src/tests/register-compacting-tests.ts +0 -11
- package/src/tests/register-data-storage-tests.ts +205 -190
- package/src/tests/register-parameter-compacting-tests.ts +172 -0
- package/src/tests/register-sync-tests.ts +16 -15
- package/src/tests/tests-index.ts +1 -0
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -79,16 +79,18 @@ export function registerDataStorageTests(generateStorageFactory) {
|
|
|
79
79
|
test('save and load parameters', async () => {
|
|
80
80
|
const env_1 = { stack: [], error: void 0, hasError: false };
|
|
81
81
|
try {
|
|
82
|
-
const
|
|
82
|
+
const factory = __addDisposableResource(env_1, await generateStorageFactory(), true);
|
|
83
|
+
const syncRules = await factory.updateSyncRules({
|
|
84
|
+
content: `
|
|
83
85
|
bucket_definitions:
|
|
84
86
|
mybucket:
|
|
85
87
|
parameters:
|
|
86
88
|
- SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
|
|
87
|
-
data: []
|
|
88
|
-
`
|
|
89
|
-
|
|
90
|
-
const bucketStorage = factory.getInstance(
|
|
91
|
-
|
|
89
|
+
data: []
|
|
90
|
+
`
|
|
91
|
+
});
|
|
92
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
93
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
92
94
|
await batch.save({
|
|
93
95
|
sourceTable: TEST_TABLE,
|
|
94
96
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -111,10 +113,10 @@ bucket_definitions:
|
|
|
111
113
|
},
|
|
112
114
|
afterReplicaId: test_utils.rid('t1')
|
|
113
115
|
});
|
|
116
|
+
await batch.commit('1/1');
|
|
114
117
|
});
|
|
115
|
-
const
|
|
116
|
-
|
|
117
|
-
]);
|
|
118
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
119
|
+
const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
118
120
|
expect(parameters).toEqual([
|
|
119
121
|
{
|
|
120
122
|
group_id: 'group1a'
|
|
@@ -134,16 +136,18 @@ bucket_definitions:
|
|
|
134
136
|
test('it should use the latest version', async () => {
|
|
135
137
|
const env_2 = { stack: [], error: void 0, hasError: false };
|
|
136
138
|
try {
|
|
137
|
-
const
|
|
139
|
+
const factory = __addDisposableResource(env_2, await generateStorageFactory(), true);
|
|
140
|
+
const syncRules = await factory.updateSyncRules({
|
|
141
|
+
content: `
|
|
138
142
|
bucket_definitions:
|
|
139
143
|
mybucket:
|
|
140
144
|
parameters:
|
|
141
145
|
- SELECT group_id FROM test WHERE id = token_parameters.user_id
|
|
142
|
-
data: []
|
|
143
|
-
`
|
|
144
|
-
|
|
145
|
-
const bucketStorage = factory.getInstance(
|
|
146
|
-
|
|
146
|
+
data: []
|
|
147
|
+
`
|
|
148
|
+
});
|
|
149
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
150
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
147
151
|
await batch.save({
|
|
148
152
|
sourceTable: TEST_TABLE,
|
|
149
153
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -153,8 +157,10 @@ bucket_definitions:
|
|
|
153
157
|
},
|
|
154
158
|
afterReplicaId: test_utils.rid('user1')
|
|
155
159
|
});
|
|
160
|
+
await batch.commit('1/1');
|
|
156
161
|
});
|
|
157
|
-
const
|
|
162
|
+
const checkpoint1 = await bucketStorage.getCheckpoint();
|
|
163
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
158
164
|
await batch.save({
|
|
159
165
|
sourceTable: TEST_TABLE,
|
|
160
166
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -164,19 +170,17 @@ bucket_definitions:
|
|
|
164
170
|
},
|
|
165
171
|
afterReplicaId: test_utils.rid('user1')
|
|
166
172
|
});
|
|
173
|
+
await batch.commit('1/2');
|
|
167
174
|
});
|
|
168
|
-
const
|
|
169
|
-
|
|
170
|
-
]);
|
|
175
|
+
const checkpoint2 = await bucketStorage.getCheckpoint();
|
|
176
|
+
const parameters = await checkpoint2.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
171
177
|
expect(parameters).toEqual([
|
|
172
178
|
{
|
|
173
179
|
group_id: 'group2'
|
|
174
180
|
}
|
|
175
181
|
]);
|
|
176
182
|
// Use the checkpoint to get older data if relevant
|
|
177
|
-
const parameters2 = await
|
|
178
|
-
ParameterLookup.normalized('mybucket', '1', ['user1'])
|
|
179
|
-
]);
|
|
183
|
+
const parameters2 = await checkpoint1.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
180
184
|
expect(parameters2).toEqual([
|
|
181
185
|
{
|
|
182
186
|
group_id: 'group1'
|
|
@@ -196,17 +200,19 @@ bucket_definitions:
|
|
|
196
200
|
test('it should use the latest version after updates', async () => {
|
|
197
201
|
const env_3 = { stack: [], error: void 0, hasError: false };
|
|
198
202
|
try {
|
|
199
|
-
const
|
|
203
|
+
const factory = __addDisposableResource(env_3, await generateStorageFactory(), true);
|
|
204
|
+
const syncRules = await factory.updateSyncRules({
|
|
205
|
+
content: `
|
|
200
206
|
bucket_definitions:
|
|
201
207
|
mybucket:
|
|
202
208
|
parameters:
|
|
203
209
|
- SELECT id AS todo_id
|
|
204
210
|
FROM todos
|
|
205
211
|
WHERE list_id IN token_parameters.list_id
|
|
206
|
-
data: []
|
|
207
|
-
`
|
|
208
|
-
|
|
209
|
-
const bucketStorage = factory.getInstance(
|
|
212
|
+
data: []
|
|
213
|
+
`
|
|
214
|
+
});
|
|
215
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
210
216
|
const table = test_utils.makeTestTable('todos', ['id', 'list_id']);
|
|
211
217
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
212
218
|
// Create two todos which initially belong to different lists
|
|
@@ -228,8 +234,9 @@ bucket_definitions:
|
|
|
228
234
|
},
|
|
229
235
|
afterReplicaId: test_utils.rid('todo2')
|
|
230
236
|
});
|
|
237
|
+
await batch.commit('1/1');
|
|
231
238
|
});
|
|
232
|
-
|
|
239
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
233
240
|
// Update the second todo item to now belong to list 1
|
|
234
241
|
await batch.save({
|
|
235
242
|
sourceTable: table,
|
|
@@ -240,11 +247,13 @@ bucket_definitions:
|
|
|
240
247
|
},
|
|
241
248
|
afterReplicaId: test_utils.rid('todo2')
|
|
242
249
|
});
|
|
250
|
+
await batch.commit('1/1');
|
|
243
251
|
});
|
|
244
252
|
// We specifically request the todo_ids for both lists.
|
|
245
253
|
// There removal operation for the association of `list2`::`todo2` should not interfere with the new
|
|
246
254
|
// association of `list1`::`todo2`
|
|
247
|
-
const
|
|
255
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
256
|
+
const parameters = await checkpoint.getParameterSets([
|
|
248
257
|
ParameterLookup.normalized('mybucket', '1', ['list1']),
|
|
249
258
|
ParameterLookup.normalized('mybucket', '1', ['list2'])
|
|
250
259
|
]);
|
|
@@ -270,16 +279,18 @@ bucket_definitions:
|
|
|
270
279
|
test('save and load parameters with different number types', async () => {
|
|
271
280
|
const env_4 = { stack: [], error: void 0, hasError: false };
|
|
272
281
|
try {
|
|
273
|
-
const
|
|
282
|
+
const factory = __addDisposableResource(env_4, await generateStorageFactory(), true);
|
|
283
|
+
const syncRules = await factory.updateSyncRules({
|
|
284
|
+
content: `
|
|
274
285
|
bucket_definitions:
|
|
275
286
|
mybucket:
|
|
276
287
|
parameters:
|
|
277
288
|
- SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
|
|
278
289
|
data: []
|
|
279
|
-
`
|
|
280
|
-
|
|
281
|
-
const bucketStorage = factory.getInstance(
|
|
282
|
-
|
|
290
|
+
`
|
|
291
|
+
});
|
|
292
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
293
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
283
294
|
await batch.save({
|
|
284
295
|
sourceTable: TEST_TABLE,
|
|
285
296
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -292,18 +303,19 @@ bucket_definitions:
|
|
|
292
303
|
},
|
|
293
304
|
afterReplicaId: test_utils.rid('t1')
|
|
294
305
|
});
|
|
306
|
+
await batch.commit('1/1');
|
|
295
307
|
});
|
|
296
308
|
const TEST_PARAMS = { group_id: 'group1' };
|
|
297
|
-
const checkpoint =
|
|
298
|
-
const parameters1 = await
|
|
309
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
310
|
+
const parameters1 = await checkpoint.getParameterSets([
|
|
299
311
|
ParameterLookup.normalized('mybucket', '1', [314n, 314, 3.14])
|
|
300
312
|
]);
|
|
301
313
|
expect(parameters1).toEqual([TEST_PARAMS]);
|
|
302
|
-
const parameters2 = await
|
|
314
|
+
const parameters2 = await checkpoint.getParameterSets([
|
|
303
315
|
ParameterLookup.normalized('mybucket', '1', [314, 314n, 3.14])
|
|
304
316
|
]);
|
|
305
317
|
expect(parameters2).toEqual([TEST_PARAMS]);
|
|
306
|
-
const parameters3 = await
|
|
318
|
+
const parameters3 = await checkpoint.getParameterSets([
|
|
307
319
|
ParameterLookup.normalized('mybucket', '1', [314n, 314, 3])
|
|
308
320
|
]);
|
|
309
321
|
expect(parameters3).toEqual([]);
|
|
@@ -324,16 +336,18 @@ bucket_definitions:
|
|
|
324
336
|
// This ensures serialization / deserialization of "current_data" is done correctly.
|
|
325
337
|
// This specific case tested here cannot happen with postgres in practice, but we still
|
|
326
338
|
// test this to ensure correct deserialization.
|
|
327
|
-
const
|
|
339
|
+
const factory = __addDisposableResource(env_5, await generateStorageFactory(), true);
|
|
340
|
+
const syncRules = await factory.updateSyncRules({
|
|
341
|
+
content: `
|
|
328
342
|
bucket_definitions:
|
|
329
343
|
mybucket:
|
|
330
344
|
parameters:
|
|
331
345
|
- SELECT group_id FROM test WHERE n1 = token_parameters.n1
|
|
332
346
|
data: []
|
|
333
|
-
`
|
|
334
|
-
|
|
335
|
-
const bucketStorage = factory.getInstance(
|
|
336
|
-
|
|
347
|
+
`
|
|
348
|
+
});
|
|
349
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
350
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
337
351
|
await batch.save({
|
|
338
352
|
sourceTable: TEST_TABLE,
|
|
339
353
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -356,10 +370,11 @@ bucket_definitions:
|
|
|
356
370
|
},
|
|
357
371
|
afterReplicaId: test_utils.rid('t1')
|
|
358
372
|
});
|
|
373
|
+
await batch.commit('1/1');
|
|
359
374
|
});
|
|
360
375
|
const TEST_PARAMS = { group_id: 'group1' };
|
|
361
|
-
const checkpoint =
|
|
362
|
-
const parameters1 = await
|
|
376
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
377
|
+
const parameters1 = await checkpoint.getParameterSets([
|
|
363
378
|
ParameterLookup.normalized('mybucket', '1', [1152921504606846976n])
|
|
364
379
|
]);
|
|
365
380
|
expect(parameters1).toEqual([TEST_PARAMS]);
|
|
@@ -377,15 +392,17 @@ bucket_definitions:
|
|
|
377
392
|
test('removing row', async () => {
|
|
378
393
|
const env_6 = { stack: [], error: void 0, hasError: false };
|
|
379
394
|
try {
|
|
380
|
-
const
|
|
395
|
+
const factory = __addDisposableResource(env_6, await generateStorageFactory(), true);
|
|
396
|
+
const syncRules = await factory.updateSyncRules({
|
|
397
|
+
content: `
|
|
381
398
|
bucket_definitions:
|
|
382
399
|
global:
|
|
383
400
|
data:
|
|
384
401
|
- SELECT id, description FROM "%"
|
|
385
|
-
`
|
|
386
|
-
|
|
387
|
-
const bucketStorage = factory.getInstance(
|
|
388
|
-
|
|
402
|
+
`
|
|
403
|
+
});
|
|
404
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
405
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
389
406
|
const sourceTable = TEST_TABLE;
|
|
390
407
|
await batch.save({
|
|
391
408
|
sourceTable,
|
|
@@ -401,8 +418,9 @@ bucket_definitions:
|
|
|
401
418
|
tag: storage.SaveOperationTag.DELETE,
|
|
402
419
|
beforeReplicaId: test_utils.rid('test1')
|
|
403
420
|
});
|
|
421
|
+
await batch.commit('1/1');
|
|
404
422
|
});
|
|
405
|
-
const checkpoint =
|
|
423
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
406
424
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
407
425
|
const data = batch[0].chunkData.data.map((d) => {
|
|
408
426
|
return {
|
|
@@ -440,18 +458,20 @@ bucket_definitions:
|
|
|
440
458
|
const env_7 = { stack: [], error: void 0, hasError: false };
|
|
441
459
|
try {
|
|
442
460
|
const WORKSPACE_TABLE = test_utils.makeTestTable('workspace', ['id']);
|
|
443
|
-
const
|
|
461
|
+
const factory = __addDisposableResource(env_7, await generateStorageFactory(), true);
|
|
462
|
+
const syncRules = await factory.updateSyncRules({
|
|
463
|
+
content: `
|
|
444
464
|
bucket_definitions:
|
|
445
465
|
by_workspace:
|
|
446
466
|
parameters:
|
|
447
467
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
448
468
|
workspace."userId" = token_parameters.user_id
|
|
449
469
|
data: []
|
|
450
|
-
`
|
|
451
|
-
|
|
452
|
-
const
|
|
453
|
-
const bucketStorage = factory.getInstance(
|
|
454
|
-
|
|
470
|
+
`
|
|
471
|
+
});
|
|
472
|
+
const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
|
|
473
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
474
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
455
475
|
await batch.save({
|
|
456
476
|
sourceTable: WORKSPACE_TABLE,
|
|
457
477
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -461,17 +481,18 @@ bucket_definitions:
|
|
|
461
481
|
},
|
|
462
482
|
afterReplicaId: test_utils.rid('workspace1')
|
|
463
483
|
});
|
|
484
|
+
await batch.commit('1/1');
|
|
464
485
|
});
|
|
465
|
-
const checkpoint =
|
|
486
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
466
487
|
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
467
488
|
const q1 = sync_rules.bucketDescriptors[0].parameterQueries[0];
|
|
468
489
|
const lookups = q1.getLookups(parameters);
|
|
469
490
|
expect(lookups).toEqual([ParameterLookup.normalized('by_workspace', '1', ['u1'])]);
|
|
470
|
-
const parameter_sets = await
|
|
491
|
+
const parameter_sets = await checkpoint.getParameterSets(lookups);
|
|
471
492
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
|
|
472
493
|
const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
473
494
|
getParameterSets(lookups) {
|
|
474
|
-
return
|
|
495
|
+
return checkpoint.getParameterSets(lookups);
|
|
475
496
|
}
|
|
476
497
|
});
|
|
477
498
|
expect(buckets).toEqual([{ bucket: 'by_workspace["workspace1"]', priority: 3 }]);
|
|
@@ -490,18 +511,20 @@ bucket_definitions:
|
|
|
490
511
|
const env_8 = { stack: [], error: void 0, hasError: false };
|
|
491
512
|
try {
|
|
492
513
|
const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
|
|
493
|
-
const
|
|
514
|
+
const factory = __addDisposableResource(env_8, await generateStorageFactory(), true);
|
|
515
|
+
const syncRules = await factory.updateSyncRules({
|
|
516
|
+
content: `
|
|
494
517
|
bucket_definitions:
|
|
495
518
|
by_public_workspace:
|
|
496
519
|
parameters:
|
|
497
520
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
498
521
|
workspace.visibility = 'public'
|
|
499
522
|
data: []
|
|
500
|
-
`
|
|
501
|
-
|
|
502
|
-
const
|
|
503
|
-
const bucketStorage = factory.getInstance(
|
|
504
|
-
|
|
523
|
+
`
|
|
524
|
+
});
|
|
525
|
+
const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
|
|
526
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
527
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
505
528
|
await batch.save({
|
|
506
529
|
sourceTable: WORKSPACE_TABLE,
|
|
507
530
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -529,18 +552,19 @@ bucket_definitions:
|
|
|
529
552
|
},
|
|
530
553
|
afterReplicaId: test_utils.rid('workspace3')
|
|
531
554
|
});
|
|
555
|
+
await batch.commit('1/1');
|
|
532
556
|
});
|
|
533
|
-
const checkpoint =
|
|
557
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
534
558
|
const parameters = new RequestParameters({ sub: 'unknown' }, {});
|
|
535
559
|
const q1 = sync_rules.bucketDescriptors[0].parameterQueries[0];
|
|
536
560
|
const lookups = q1.getLookups(parameters);
|
|
537
561
|
expect(lookups).toEqual([ParameterLookup.normalized('by_public_workspace', '1', [])]);
|
|
538
|
-
const parameter_sets = await
|
|
562
|
+
const parameter_sets = await checkpoint.getParameterSets(lookups);
|
|
539
563
|
parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
540
564
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
|
|
541
565
|
const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
542
566
|
getParameterSets(lookups) {
|
|
543
|
-
return
|
|
567
|
+
return checkpoint.getParameterSets(lookups);
|
|
544
568
|
}
|
|
545
569
|
});
|
|
546
570
|
buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
|
|
@@ -563,7 +587,9 @@ bucket_definitions:
|
|
|
563
587
|
const env_9 = { stack: [], error: void 0, hasError: false };
|
|
564
588
|
try {
|
|
565
589
|
const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
|
|
566
|
-
const
|
|
590
|
+
const factory = __addDisposableResource(env_9, await generateStorageFactory(), true);
|
|
591
|
+
const syncRules = await factory.updateSyncRules({
|
|
592
|
+
content: `
|
|
567
593
|
bucket_definitions:
|
|
568
594
|
by_workspace:
|
|
569
595
|
parameters:
|
|
@@ -572,11 +598,11 @@ bucket_definitions:
|
|
|
572
598
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
573
599
|
workspace.user_id = token_parameters.user_id
|
|
574
600
|
data: []
|
|
575
|
-
`
|
|
576
|
-
|
|
577
|
-
const
|
|
578
|
-
const bucketStorage = factory.getInstance(
|
|
579
|
-
|
|
601
|
+
`
|
|
602
|
+
});
|
|
603
|
+
const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
|
|
604
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
605
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
580
606
|
await batch.save({
|
|
581
607
|
sourceTable: WORKSPACE_TABLE,
|
|
582
608
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -615,26 +641,27 @@ bucket_definitions:
|
|
|
615
641
|
},
|
|
616
642
|
afterReplicaId: test_utils.rid('workspace4')
|
|
617
643
|
});
|
|
644
|
+
await batch.commit('1/1');
|
|
618
645
|
});
|
|
619
|
-
const checkpoint =
|
|
646
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
620
647
|
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
621
648
|
// Test intermediate values - could be moved to sync_rules.test.ts
|
|
622
649
|
const q1 = sync_rules.bucketDescriptors[0].parameterQueries[0];
|
|
623
650
|
const lookups1 = q1.getLookups(parameters);
|
|
624
651
|
expect(lookups1).toEqual([ParameterLookup.normalized('by_workspace', '1', [])]);
|
|
625
|
-
const parameter_sets1 = await
|
|
652
|
+
const parameter_sets1 = await checkpoint.getParameterSets(lookups1);
|
|
626
653
|
parameter_sets1.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
627
654
|
expect(parameter_sets1).toEqual([{ workspace_id: 'workspace1' }]);
|
|
628
655
|
const q2 = sync_rules.bucketDescriptors[0].parameterQueries[1];
|
|
629
656
|
const lookups2 = q2.getLookups(parameters);
|
|
630
657
|
expect(lookups2).toEqual([ParameterLookup.normalized('by_workspace', '2', ['u1'])]);
|
|
631
|
-
const parameter_sets2 = await
|
|
658
|
+
const parameter_sets2 = await checkpoint.getParameterSets(lookups2);
|
|
632
659
|
parameter_sets2.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
633
660
|
expect(parameter_sets2).toEqual([{ workspace_id: 'workspace3' }]);
|
|
634
661
|
// Test final values - the important part
|
|
635
662
|
const buckets = (await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
636
663
|
getParameterSets(lookups) {
|
|
637
|
-
return
|
|
664
|
+
return checkpoint.getParameterSets(lookups);
|
|
638
665
|
}
|
|
639
666
|
})).map((e) => e.bucket);
|
|
640
667
|
buckets.sort();
|
|
@@ -653,16 +680,18 @@ bucket_definitions:
|
|
|
653
680
|
test('changing client ids', async () => {
|
|
654
681
|
const env_10 = { stack: [], error: void 0, hasError: false };
|
|
655
682
|
try {
|
|
656
|
-
const
|
|
683
|
+
const factory = __addDisposableResource(env_10, await generateStorageFactory(), true);
|
|
684
|
+
const syncRules = await factory.updateSyncRules({
|
|
685
|
+
content: `
|
|
657
686
|
bucket_definitions:
|
|
658
687
|
global:
|
|
659
688
|
data:
|
|
660
689
|
- SELECT client_id as id, description FROM "%"
|
|
661
|
-
`
|
|
662
|
-
|
|
663
|
-
const bucketStorage = factory.getInstance(
|
|
690
|
+
`
|
|
691
|
+
});
|
|
692
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
664
693
|
const sourceTable = TEST_TABLE;
|
|
665
|
-
|
|
694
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
666
695
|
await batch.save({
|
|
667
696
|
sourceTable,
|
|
668
697
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -693,8 +722,9 @@ bucket_definitions:
|
|
|
693
722
|
},
|
|
694
723
|
afterReplicaId: test_utils.rid('test2')
|
|
695
724
|
});
|
|
725
|
+
await batch.commit('1/1');
|
|
696
726
|
});
|
|
697
|
-
const checkpoint =
|
|
727
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
698
728
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
699
729
|
const data = batch[0].chunkData.data.map((d) => {
|
|
700
730
|
return {
|
|
@@ -722,14 +752,16 @@ bucket_definitions:
|
|
|
722
752
|
test('re-apply delete', async () => {
|
|
723
753
|
const env_11 = { stack: [], error: void 0, hasError: false };
|
|
724
754
|
try {
|
|
725
|
-
const
|
|
755
|
+
const factory = __addDisposableResource(env_11, await generateStorageFactory(), true);
|
|
756
|
+
const syncRules = await factory.updateSyncRules({
|
|
757
|
+
content: `
|
|
726
758
|
bucket_definitions:
|
|
727
759
|
global:
|
|
728
760
|
data:
|
|
729
761
|
- SELECT id, description FROM "%"
|
|
730
|
-
`
|
|
731
|
-
|
|
732
|
-
const bucketStorage = factory.getInstance(
|
|
762
|
+
`
|
|
763
|
+
});
|
|
764
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
733
765
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
734
766
|
const sourceTable = TEST_TABLE;
|
|
735
767
|
await batch.save({
|
|
@@ -749,8 +781,9 @@ bucket_definitions:
|
|
|
749
781
|
tag: storage.SaveOperationTag.DELETE,
|
|
750
782
|
beforeReplicaId: test_utils.rid('test1')
|
|
751
783
|
});
|
|
784
|
+
await batch.commit('1/1');
|
|
752
785
|
});
|
|
753
|
-
|
|
786
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
754
787
|
const sourceTable = TEST_TABLE;
|
|
755
788
|
await batch.save({
|
|
756
789
|
sourceTable,
|
|
@@ -758,7 +791,7 @@ bucket_definitions:
|
|
|
758
791
|
beforeReplicaId: test_utils.rid('test1')
|
|
759
792
|
});
|
|
760
793
|
});
|
|
761
|
-
const checkpoint =
|
|
794
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
762
795
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
763
796
|
const data = batch[0].chunkData.data.map((d) => {
|
|
764
797
|
return {
|
|
@@ -795,14 +828,16 @@ bucket_definitions:
|
|
|
795
828
|
test('re-apply update + delete', async () => {
|
|
796
829
|
const env_12 = { stack: [], error: void 0, hasError: false };
|
|
797
830
|
try {
|
|
798
|
-
const
|
|
831
|
+
const factory = __addDisposableResource(env_12, await generateStorageFactory(), true);
|
|
832
|
+
const syncRules = await factory.updateSyncRules({
|
|
833
|
+
content: `
|
|
799
834
|
bucket_definitions:
|
|
800
835
|
global:
|
|
801
836
|
data:
|
|
802
837
|
- SELECT id, description FROM "%"
|
|
803
|
-
`
|
|
804
|
-
|
|
805
|
-
const bucketStorage = factory.getInstance(
|
|
838
|
+
`
|
|
839
|
+
});
|
|
840
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
806
841
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
807
842
|
const sourceTable = TEST_TABLE;
|
|
808
843
|
await batch.save({
|
|
@@ -840,8 +875,9 @@ bucket_definitions:
|
|
|
840
875
|
tag: storage.SaveOperationTag.DELETE,
|
|
841
876
|
beforeReplicaId: test_utils.rid('test1')
|
|
842
877
|
});
|
|
878
|
+
await batch.commit('1/1');
|
|
843
879
|
});
|
|
844
|
-
|
|
880
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
845
881
|
const sourceTable = TEST_TABLE;
|
|
846
882
|
await batch.save({
|
|
847
883
|
sourceTable,
|
|
@@ -866,8 +902,9 @@ bucket_definitions:
|
|
|
866
902
|
tag: storage.SaveOperationTag.DELETE,
|
|
867
903
|
beforeReplicaId: test_utils.rid('test1')
|
|
868
904
|
});
|
|
905
|
+
await batch.commit('2/1');
|
|
869
906
|
});
|
|
870
|
-
const checkpoint =
|
|
907
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
871
908
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
872
909
|
const data = batch[0].chunkData.data.map((d) => {
|
|
873
910
|
return {
|
|
@@ -906,15 +943,17 @@ bucket_definitions:
|
|
|
906
943
|
test('truncate parameters', async () => {
|
|
907
944
|
const env_13 = { stack: [], error: void 0, hasError: false };
|
|
908
945
|
try {
|
|
909
|
-
const
|
|
946
|
+
const factory = __addDisposableResource(env_13, await generateStorageFactory(), true);
|
|
947
|
+
const syncRules = await factory.updateSyncRules({
|
|
948
|
+
content: `
|
|
910
949
|
bucket_definitions:
|
|
911
950
|
mybucket:
|
|
912
951
|
parameters:
|
|
913
952
|
- SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
|
|
914
953
|
data: []
|
|
915
|
-
`
|
|
916
|
-
|
|
917
|
-
const bucketStorage = factory.getInstance(
|
|
954
|
+
`
|
|
955
|
+
});
|
|
956
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
918
957
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
919
958
|
await batch.save({
|
|
920
959
|
sourceTable: TEST_TABLE,
|
|
@@ -929,10 +968,8 @@ bucket_definitions:
|
|
|
929
968
|
});
|
|
930
969
|
await batch.truncate([TEST_TABLE]);
|
|
931
970
|
});
|
|
932
|
-
const
|
|
933
|
-
const parameters = await
|
|
934
|
-
ParameterLookup.normalized('mybucket', '1', ['user1'])
|
|
935
|
-
]);
|
|
971
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
972
|
+
const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
936
973
|
expect(parameters).toEqual([]);
|
|
937
974
|
}
|
|
938
975
|
catch (e_13) {
|
|
@@ -955,14 +992,16 @@ bucket_definitions:
|
|
|
955
992
|
// It can break at two places:
|
|
956
993
|
// 1. Not getting the correct "current_data" state for each operation.
|
|
957
994
|
// 2. Output order not being correct.
|
|
958
|
-
const
|
|
995
|
+
const factory = __addDisposableResource(env_14, await generateStorageFactory(), true);
|
|
996
|
+
const syncRules = await factory.updateSyncRules({
|
|
997
|
+
content: `
|
|
959
998
|
bucket_definitions:
|
|
960
999
|
global:
|
|
961
1000
|
data:
|
|
962
1001
|
- SELECT id, description FROM "test"
|
|
963
|
-
`
|
|
964
|
-
|
|
965
|
-
const bucketStorage = factory.getInstance(
|
|
1002
|
+
`
|
|
1003
|
+
});
|
|
1004
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
966
1005
|
// Pre-setup
|
|
967
1006
|
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
968
1007
|
const sourceTable = TEST_TABLE;
|
|
@@ -1097,12 +1136,6 @@ bucket_definitions:
|
|
|
1097
1136
|
test('changed data with replica identity full', async () => {
|
|
1098
1137
|
const env_15 = { stack: [], error: void 0, hasError: false };
|
|
1099
1138
|
try {
|
|
1100
|
-
const sync_rules = test_utils.testRules(`
|
|
1101
|
-
bucket_definitions:
|
|
1102
|
-
global:
|
|
1103
|
-
data:
|
|
1104
|
-
- SELECT id, description FROM "test"
|
|
1105
|
-
`);
|
|
1106
1139
|
function rid2(id, description) {
|
|
1107
1140
|
return getUuidReplicaIdentityBson({ id, description }, [
|
|
1108
1141
|
{ name: 'id', type: 'VARCHAR', typeId: 25 },
|
|
@@ -1110,7 +1143,15 @@ bucket_definitions:
|
|
|
1110
1143
|
]);
|
|
1111
1144
|
}
|
|
1112
1145
|
const factory = __addDisposableResource(env_15, await generateStorageFactory(), true);
|
|
1113
|
-
const
|
|
1146
|
+
const syncRules = await factory.updateSyncRules({
|
|
1147
|
+
content: `
|
|
1148
|
+
bucket_definitions:
|
|
1149
|
+
global:
|
|
1150
|
+
data:
|
|
1151
|
+
- SELECT id, description FROM "test"
|
|
1152
|
+
`
|
|
1153
|
+
});
|
|
1154
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1114
1155
|
const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
|
|
1115
1156
|
// Pre-setup
|
|
1116
1157
|
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
@@ -1203,12 +1244,6 @@ bucket_definitions:
|
|
|
1203
1244
|
test('unchanged data with replica identity full', async () => {
|
|
1204
1245
|
const env_16 = { stack: [], error: void 0, hasError: false };
|
|
1205
1246
|
try {
|
|
1206
|
-
const sync_rules = test_utils.testRules(`
|
|
1207
|
-
bucket_definitions:
|
|
1208
|
-
global:
|
|
1209
|
-
data:
|
|
1210
|
-
- SELECT id, description FROM "test"
|
|
1211
|
-
`);
|
|
1212
1247
|
function rid2(id, description) {
|
|
1213
1248
|
return getUuidReplicaIdentityBson({ id, description }, [
|
|
1214
1249
|
{ name: 'id', type: 'VARCHAR', typeId: 25 },
|
|
@@ -1216,7 +1251,15 @@ bucket_definitions:
|
|
|
1216
1251
|
]);
|
|
1217
1252
|
}
|
|
1218
1253
|
const factory = __addDisposableResource(env_16, await generateStorageFactory(), true);
|
|
1219
|
-
const
|
|
1254
|
+
const syncRules = await factory.updateSyncRules({
|
|
1255
|
+
content: `
|
|
1256
|
+
bucket_definitions:
|
|
1257
|
+
global:
|
|
1258
|
+
data:
|
|
1259
|
+
- SELECT id, description FROM "test"
|
|
1260
|
+
`
|
|
1261
|
+
});
|
|
1262
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1220
1263
|
const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
|
|
1221
1264
|
// Pre-setup
|
|
1222
1265
|
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
@@ -1306,15 +1349,17 @@ bucket_definitions:
|
|
|
1306
1349
|
// but large enough in size to be split over multiple returned batches.
|
|
1307
1350
|
// The specific batch splits is an implementation detail of the storage driver,
|
|
1308
1351
|
// and the test will have to updated when other implementations are added.
|
|
1309
|
-
const
|
|
1352
|
+
const factory = __addDisposableResource(env_17, await generateStorageFactory(), true);
|
|
1353
|
+
const syncRules = await factory.updateSyncRules({
|
|
1354
|
+
content: `
|
|
1310
1355
|
bucket_definitions:
|
|
1311
1356
|
global:
|
|
1312
1357
|
data:
|
|
1313
1358
|
- SELECT id, description FROM "%"
|
|
1314
|
-
`
|
|
1315
|
-
|
|
1316
|
-
const bucketStorage = factory.getInstance(
|
|
1317
|
-
|
|
1359
|
+
`
|
|
1360
|
+
});
|
|
1361
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1362
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1318
1363
|
const sourceTable = TEST_TABLE;
|
|
1319
1364
|
const largeDescription = '0123456789'.repeat(12_000_00);
|
|
1320
1365
|
await batch.save({
|
|
@@ -1354,8 +1399,9 @@ bucket_definitions:
|
|
|
1354
1399
|
},
|
|
1355
1400
|
afterReplicaId: test_utils.rid('test3')
|
|
1356
1401
|
});
|
|
1402
|
+
await batch.commit('1/1');
|
|
1357
1403
|
});
|
|
1358
|
-
const checkpoint =
|
|
1404
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
1359
1405
|
const options = {
|
|
1360
1406
|
chunkLimitBytes: 16 * 1024 * 1024
|
|
1361
1407
|
};
|
|
@@ -1397,15 +1443,17 @@ bucket_definitions:
|
|
|
1397
1443
|
const env_18 = { stack: [], error: void 0, hasError: false };
|
|
1398
1444
|
try {
|
|
1399
1445
|
// Test syncing a batch of data that is limited by count.
|
|
1400
|
-
const
|
|
1446
|
+
const factory = __addDisposableResource(env_18, await generateStorageFactory(), true);
|
|
1447
|
+
const syncRules = await factory.updateSyncRules({
|
|
1448
|
+
content: `
|
|
1401
1449
|
bucket_definitions:
|
|
1402
1450
|
global:
|
|
1403
1451
|
data:
|
|
1404
1452
|
- SELECT id, description FROM "%"
|
|
1405
|
-
`
|
|
1406
|
-
|
|
1407
|
-
const bucketStorage = factory.getInstance(
|
|
1408
|
-
|
|
1453
|
+
`
|
|
1454
|
+
});
|
|
1455
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1456
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1409
1457
|
const sourceTable = TEST_TABLE;
|
|
1410
1458
|
for (let i = 1; i <= 6; i++) {
|
|
1411
1459
|
await batch.save({
|
|
@@ -1418,8 +1466,9 @@ bucket_definitions:
|
|
|
1418
1466
|
afterReplicaId: `test${i}`
|
|
1419
1467
|
});
|
|
1420
1468
|
}
|
|
1469
|
+
await batch.commit('1/1');
|
|
1421
1470
|
});
|
|
1422
|
-
const checkpoint =
|
|
1471
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
1423
1472
|
const batch1 = await test_utils.oneFromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]]), { limit: 4 }));
|
|
1424
1473
|
expect(test_utils.getBatchData(batch1)).toEqual([
|
|
1425
1474
|
{ op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 },
|
|
@@ -1464,7 +1513,9 @@ bucket_definitions:
|
|
|
1464
1513
|
const setup = async (options) => {
|
|
1465
1514
|
const env_19 = { stack: [], error: void 0, hasError: false };
|
|
1466
1515
|
try {
|
|
1467
|
-
const
|
|
1516
|
+
const factory = __addDisposableResource(env_19, await generateStorageFactory(), true);
|
|
1517
|
+
const syncRules = await factory.updateSyncRules({
|
|
1518
|
+
content: `
|
|
1468
1519
|
bucket_definitions:
|
|
1469
1520
|
global1:
|
|
1470
1521
|
data:
|
|
@@ -1472,10 +1523,10 @@ bucket_definitions:
|
|
|
1472
1523
|
global2:
|
|
1473
1524
|
data:
|
|
1474
1525
|
- SELECT id, description FROM test WHERE bucket = 'global2'
|
|
1475
|
-
`
|
|
1476
|
-
|
|
1477
|
-
const bucketStorage = factory.getInstance(
|
|
1478
|
-
|
|
1526
|
+
`
|
|
1527
|
+
});
|
|
1528
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1529
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1479
1530
|
const sourceTable = TEST_TABLE;
|
|
1480
1531
|
for (let i = 1; i <= 10; i++) {
|
|
1481
1532
|
await batch.save({
|
|
@@ -1489,8 +1540,9 @@ bucket_definitions:
|
|
|
1489
1540
|
afterReplicaId: `test${i}`
|
|
1490
1541
|
});
|
|
1491
1542
|
}
|
|
1543
|
+
await batch.commit('1/1');
|
|
1492
1544
|
});
|
|
1493
|
-
const checkpoint =
|
|
1545
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
1494
1546
|
return await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([
|
|
1495
1547
|
['global1[]', 0n],
|
|
1496
1548
|
['global2[]', 0n]
|
|
@@ -1606,7 +1658,9 @@ bucket_definitions:
|
|
|
1606
1658
|
});
|
|
1607
1659
|
const r = await f.configureSyncRules({ content: 'bucket_definitions: {}', validate: false });
|
|
1608
1660
|
const storage = f.getInstance(r.persisted_sync_rules);
|
|
1609
|
-
await storage.
|
|
1661
|
+
await storage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1662
|
+
await batch.keepalive('1/0');
|
|
1663
|
+
});
|
|
1610
1664
|
const metrics2 = await f.getStorageMetrics();
|
|
1611
1665
|
expect(metrics2).toMatchSnapshot();
|
|
1612
1666
|
}
|
|
@@ -1623,16 +1677,18 @@ bucket_definitions:
|
|
|
1623
1677
|
test('invalidate cached parsed sync rules', async () => {
|
|
1624
1678
|
const env_21 = { stack: [], error: void 0, hasError: false };
|
|
1625
1679
|
try {
|
|
1626
|
-
const
|
|
1680
|
+
const bucketStorageFactory = __addDisposableResource(env_21, await generateStorageFactory(), true);
|
|
1681
|
+
const syncRules = await bucketStorageFactory.updateSyncRules({
|
|
1682
|
+
content: `
|
|
1627
1683
|
bucket_definitions:
|
|
1628
1684
|
by_workspace:
|
|
1629
1685
|
parameters:
|
|
1630
1686
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
1631
1687
|
workspace."userId" = token_parameters.user_id
|
|
1632
1688
|
data: []
|
|
1633
|
-
`
|
|
1634
|
-
|
|
1635
|
-
const syncBucketStorage = bucketStorageFactory.getInstance(
|
|
1689
|
+
`
|
|
1690
|
+
});
|
|
1691
|
+
const syncBucketStorage = bucketStorageFactory.getInstance(syncRules);
|
|
1636
1692
|
const parsedSchema1 = syncBucketStorage.getParsedSyncRules({
|
|
1637
1693
|
defaultSchema: 'public'
|
|
1638
1694
|
});
|
|
@@ -1667,12 +1723,11 @@ bucket_definitions:
|
|
|
1667
1723
|
content: `
|
|
1668
1724
|
bucket_definitions:
|
|
1669
1725
|
mybucket:
|
|
1670
|
-
data: []
|
|
1726
|
+
data: []
|
|
1671
1727
|
`,
|
|
1672
1728
|
validate: false
|
|
1673
1729
|
});
|
|
1674
1730
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1675
|
-
await bucketStorage.autoActivate();
|
|
1676
1731
|
const abortController = new AbortController();
|
|
1677
1732
|
context.onTestFinished(() => abortController.abort());
|
|
1678
1733
|
const iter = bucketStorage
|
|
@@ -1719,7 +1774,6 @@ bucket_definitions:
|
|
|
1719
1774
|
validate: false
|
|
1720
1775
|
});
|
|
1721
1776
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1722
|
-
await bucketStorage.autoActivate();
|
|
1723
1777
|
const abortController = new AbortController();
|
|
1724
1778
|
context.onTestFinished(() => abortController.abort());
|
|
1725
1779
|
const iter = bucketStorage
|
|
@@ -1787,7 +1841,6 @@ bucket_definitions:
|
|
|
1787
1841
|
validate: false
|
|
1788
1842
|
});
|
|
1789
1843
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1790
|
-
await bucketStorage.autoActivate();
|
|
1791
1844
|
bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
|
|
1792
1845
|
const abortController = new AbortController();
|
|
1793
1846
|
context.onTestFinished(() => abortController.abort());
|
|
@@ -1835,7 +1888,6 @@ bucket_definitions:
|
|
|
1835
1888
|
validate: false
|
|
1836
1889
|
});
|
|
1837
1890
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1838
|
-
await bucketStorage.autoActivate();
|
|
1839
1891
|
bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
|
|
1840
1892
|
const abortController = new AbortController();
|
|
1841
1893
|
context.onTestFinished(() => abortController.abort());
|
|
@@ -1885,7 +1937,6 @@ bucket_definitions:
|
|
|
1885
1937
|
validate: false
|
|
1886
1938
|
});
|
|
1887
1939
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1888
|
-
await bucketStorage.autoActivate();
|
|
1889
1940
|
bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
|
|
1890
1941
|
const abortController = new AbortController();
|
|
1891
1942
|
context.onTestFinished(() => abortController.abort());
|
|
@@ -1959,15 +2010,17 @@ bucket_definitions:
|
|
|
1959
2010
|
// Test syncing a batch of data that is small in count,
|
|
1960
2011
|
// but large enough in size to be split over multiple returned chunks.
|
|
1961
2012
|
// Similar to the above test, but splits over 1MB chunks.
|
|
1962
|
-
const
|
|
2013
|
+
const factory = __addDisposableResource(env_27, await generateStorageFactory(), true);
|
|
2014
|
+
const syncRules = await factory.updateSyncRules({
|
|
2015
|
+
content: `
|
|
1963
2016
|
bucket_definitions:
|
|
1964
2017
|
global:
|
|
1965
2018
|
data:
|
|
1966
2019
|
- SELECT id FROM test
|
|
1967
2020
|
- SELECT id FROM test_ignore WHERE false
|
|
1968
|
-
`
|
|
1969
|
-
|
|
1970
|
-
const bucketStorage = factory.getInstance(
|
|
2021
|
+
`
|
|
2022
|
+
});
|
|
2023
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1971
2024
|
const sourceTable = test_utils.makeTestTable('test', ['id']);
|
|
1972
2025
|
const sourceTableIgnore = test_utils.makeTestTable('test_ignore', ['id']);
|
|
1973
2026
|
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|