@powersync/service-core-tests 0.10.4 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +57 -0
- package/LICENSE +3 -3
- package/dist/test-utils/general-utils.d.ts +2 -0
- package/dist/test-utils/general-utils.js +17 -1
- package/dist/test-utils/general-utils.js.map +1 -1
- package/dist/tests/register-compacting-tests.d.ts +0 -11
- package/dist/tests/register-compacting-tests.js +119 -34
- package/dist/tests/register-compacting-tests.js.map +1 -1
- package/dist/tests/register-data-storage-tests.js +308 -167
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-parameter-compacting-tests.d.ts +2 -0
- package/dist/tests/register-parameter-compacting-tests.js +227 -0
- package/dist/tests/register-parameter-compacting-tests.js.map +1 -0
- package/dist/tests/register-sync-tests.js +166 -61
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/dist/tests/tests-index.d.ts +1 -0
- package/dist/tests/tests-index.js +1 -0
- package/dist/tests/tests-index.js.map +1 -1
- package/package.json +5 -5
- package/src/test-utils/general-utils.ts +19 -10
- package/src/tests/register-compacting-tests.ts +118 -39
- package/src/tests/register-data-storage-tests.ts +311 -210
- package/src/tests/register-parameter-compacting-tests.ts +172 -0
- package/src/tests/register-sync-tests.ts +160 -61
- package/src/tests/tests-index.ts +1 -0
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -51,7 +51,7 @@ var __disposeResources = (this && this.__disposeResources) || (function (Suppres
|
|
|
51
51
|
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
52
52
|
});
|
|
53
53
|
import { getUuidReplicaIdentityBson, storage } from '@powersync/service-core';
|
|
54
|
-
import { ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
|
|
54
|
+
import { DateTimeValue, ParameterLookup, RequestParameters } from '@powersync/service-sync-rules';
|
|
55
55
|
import { expect, test, describe } from 'vitest';
|
|
56
56
|
import * as test_utils from '../test-utils/test-utils-index.js';
|
|
57
57
|
export const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
|
|
@@ -79,16 +79,18 @@ export function registerDataStorageTests(generateStorageFactory) {
|
|
|
79
79
|
test('save and load parameters', async () => {
|
|
80
80
|
const env_1 = { stack: [], error: void 0, hasError: false };
|
|
81
81
|
try {
|
|
82
|
-
const
|
|
82
|
+
const factory = __addDisposableResource(env_1, await generateStorageFactory(), true);
|
|
83
|
+
const syncRules = await factory.updateSyncRules({
|
|
84
|
+
content: `
|
|
83
85
|
bucket_definitions:
|
|
84
86
|
mybucket:
|
|
85
87
|
parameters:
|
|
86
88
|
- SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
|
|
87
|
-
data: []
|
|
88
|
-
`
|
|
89
|
-
|
|
90
|
-
const bucketStorage = factory.getInstance(
|
|
91
|
-
|
|
89
|
+
data: []
|
|
90
|
+
`
|
|
91
|
+
});
|
|
92
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
93
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
92
94
|
await batch.save({
|
|
93
95
|
sourceTable: TEST_TABLE,
|
|
94
96
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -111,10 +113,10 @@ bucket_definitions:
|
|
|
111
113
|
},
|
|
112
114
|
afterReplicaId: test_utils.rid('t1')
|
|
113
115
|
});
|
|
116
|
+
await batch.commit('1/1');
|
|
114
117
|
});
|
|
115
|
-
const
|
|
116
|
-
|
|
117
|
-
]);
|
|
118
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
119
|
+
const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
118
120
|
expect(parameters).toEqual([
|
|
119
121
|
{
|
|
120
122
|
group_id: 'group1a'
|
|
@@ -134,16 +136,18 @@ bucket_definitions:
|
|
|
134
136
|
test('it should use the latest version', async () => {
|
|
135
137
|
const env_2 = { stack: [], error: void 0, hasError: false };
|
|
136
138
|
try {
|
|
137
|
-
const
|
|
139
|
+
const factory = __addDisposableResource(env_2, await generateStorageFactory(), true);
|
|
140
|
+
const syncRules = await factory.updateSyncRules({
|
|
141
|
+
content: `
|
|
138
142
|
bucket_definitions:
|
|
139
143
|
mybucket:
|
|
140
144
|
parameters:
|
|
141
145
|
- SELECT group_id FROM test WHERE id = token_parameters.user_id
|
|
142
|
-
data: []
|
|
143
|
-
`
|
|
144
|
-
|
|
145
|
-
const bucketStorage = factory.getInstance(
|
|
146
|
-
|
|
146
|
+
data: []
|
|
147
|
+
`
|
|
148
|
+
});
|
|
149
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
150
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
147
151
|
await batch.save({
|
|
148
152
|
sourceTable: TEST_TABLE,
|
|
149
153
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -153,8 +157,10 @@ bucket_definitions:
|
|
|
153
157
|
},
|
|
154
158
|
afterReplicaId: test_utils.rid('user1')
|
|
155
159
|
});
|
|
160
|
+
await batch.commit('1/1');
|
|
156
161
|
});
|
|
157
|
-
const
|
|
162
|
+
const checkpoint1 = await bucketStorage.getCheckpoint();
|
|
163
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
158
164
|
await batch.save({
|
|
159
165
|
sourceTable: TEST_TABLE,
|
|
160
166
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -164,19 +170,17 @@ bucket_definitions:
|
|
|
164
170
|
},
|
|
165
171
|
afterReplicaId: test_utils.rid('user1')
|
|
166
172
|
});
|
|
173
|
+
await batch.commit('1/2');
|
|
167
174
|
});
|
|
168
|
-
const
|
|
169
|
-
|
|
170
|
-
]);
|
|
175
|
+
const checkpoint2 = await bucketStorage.getCheckpoint();
|
|
176
|
+
const parameters = await checkpoint2.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
171
177
|
expect(parameters).toEqual([
|
|
172
178
|
{
|
|
173
179
|
group_id: 'group2'
|
|
174
180
|
}
|
|
175
181
|
]);
|
|
176
182
|
// Use the checkpoint to get older data if relevant
|
|
177
|
-
const parameters2 = await
|
|
178
|
-
ParameterLookup.normalized('mybucket', '1', ['user1'])
|
|
179
|
-
]);
|
|
183
|
+
const parameters2 = await checkpoint1.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
180
184
|
expect(parameters2).toEqual([
|
|
181
185
|
{
|
|
182
186
|
group_id: 'group1'
|
|
@@ -196,17 +200,19 @@ bucket_definitions:
|
|
|
196
200
|
test('it should use the latest version after updates', async () => {
|
|
197
201
|
const env_3 = { stack: [], error: void 0, hasError: false };
|
|
198
202
|
try {
|
|
199
|
-
const
|
|
203
|
+
const factory = __addDisposableResource(env_3, await generateStorageFactory(), true);
|
|
204
|
+
const syncRules = await factory.updateSyncRules({
|
|
205
|
+
content: `
|
|
200
206
|
bucket_definitions:
|
|
201
207
|
mybucket:
|
|
202
208
|
parameters:
|
|
203
209
|
- SELECT id AS todo_id
|
|
204
210
|
FROM todos
|
|
205
211
|
WHERE list_id IN token_parameters.list_id
|
|
206
|
-
data: []
|
|
207
|
-
`
|
|
208
|
-
|
|
209
|
-
const bucketStorage = factory.getInstance(
|
|
212
|
+
data: []
|
|
213
|
+
`
|
|
214
|
+
});
|
|
215
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
210
216
|
const table = test_utils.makeTestTable('todos', ['id', 'list_id']);
|
|
211
217
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
212
218
|
// Create two todos which initially belong to different lists
|
|
@@ -228,8 +234,9 @@ bucket_definitions:
|
|
|
228
234
|
},
|
|
229
235
|
afterReplicaId: test_utils.rid('todo2')
|
|
230
236
|
});
|
|
237
|
+
await batch.commit('1/1');
|
|
231
238
|
});
|
|
232
|
-
|
|
239
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
233
240
|
// Update the second todo item to now belong to list 1
|
|
234
241
|
await batch.save({
|
|
235
242
|
sourceTable: table,
|
|
@@ -240,11 +247,13 @@ bucket_definitions:
|
|
|
240
247
|
},
|
|
241
248
|
afterReplicaId: test_utils.rid('todo2')
|
|
242
249
|
});
|
|
250
|
+
await batch.commit('1/1');
|
|
243
251
|
});
|
|
244
252
|
// We specifically request the todo_ids for both lists.
|
|
245
253
|
// There removal operation for the association of `list2`::`todo2` should not interfere with the new
|
|
246
254
|
// association of `list1`::`todo2`
|
|
247
|
-
const
|
|
255
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
256
|
+
const parameters = await checkpoint.getParameterSets([
|
|
248
257
|
ParameterLookup.normalized('mybucket', '1', ['list1']),
|
|
249
258
|
ParameterLookup.normalized('mybucket', '1', ['list2'])
|
|
250
259
|
]);
|
|
@@ -270,16 +279,18 @@ bucket_definitions:
|
|
|
270
279
|
test('save and load parameters with different number types', async () => {
|
|
271
280
|
const env_4 = { stack: [], error: void 0, hasError: false };
|
|
272
281
|
try {
|
|
273
|
-
const
|
|
282
|
+
const factory = __addDisposableResource(env_4, await generateStorageFactory(), true);
|
|
283
|
+
const syncRules = await factory.updateSyncRules({
|
|
284
|
+
content: `
|
|
274
285
|
bucket_definitions:
|
|
275
286
|
mybucket:
|
|
276
287
|
parameters:
|
|
277
288
|
- SELECT group_id FROM test WHERE n1 = token_parameters.n1 and f2 = token_parameters.f2 and f3 = token_parameters.f3
|
|
278
289
|
data: []
|
|
279
|
-
`
|
|
280
|
-
|
|
281
|
-
const bucketStorage = factory.getInstance(
|
|
282
|
-
|
|
290
|
+
`
|
|
291
|
+
});
|
|
292
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
293
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
283
294
|
await batch.save({
|
|
284
295
|
sourceTable: TEST_TABLE,
|
|
285
296
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -292,18 +303,19 @@ bucket_definitions:
|
|
|
292
303
|
},
|
|
293
304
|
afterReplicaId: test_utils.rid('t1')
|
|
294
305
|
});
|
|
306
|
+
await batch.commit('1/1');
|
|
295
307
|
});
|
|
296
308
|
const TEST_PARAMS = { group_id: 'group1' };
|
|
297
|
-
const checkpoint =
|
|
298
|
-
const parameters1 = await
|
|
309
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
310
|
+
const parameters1 = await checkpoint.getParameterSets([
|
|
299
311
|
ParameterLookup.normalized('mybucket', '1', [314n, 314, 3.14])
|
|
300
312
|
]);
|
|
301
313
|
expect(parameters1).toEqual([TEST_PARAMS]);
|
|
302
|
-
const parameters2 = await
|
|
314
|
+
const parameters2 = await checkpoint.getParameterSets([
|
|
303
315
|
ParameterLookup.normalized('mybucket', '1', [314, 314n, 3.14])
|
|
304
316
|
]);
|
|
305
317
|
expect(parameters2).toEqual([TEST_PARAMS]);
|
|
306
|
-
const parameters3 = await
|
|
318
|
+
const parameters3 = await checkpoint.getParameterSets([
|
|
307
319
|
ParameterLookup.normalized('mybucket', '1', [314n, 314, 3])
|
|
308
320
|
]);
|
|
309
321
|
expect(parameters3).toEqual([]);
|
|
@@ -324,16 +336,18 @@ bucket_definitions:
|
|
|
324
336
|
// This ensures serialization / deserialization of "current_data" is done correctly.
|
|
325
337
|
// This specific case tested here cannot happen with postgres in practice, but we still
|
|
326
338
|
// test this to ensure correct deserialization.
|
|
327
|
-
const
|
|
339
|
+
const factory = __addDisposableResource(env_5, await generateStorageFactory(), true);
|
|
340
|
+
const syncRules = await factory.updateSyncRules({
|
|
341
|
+
content: `
|
|
328
342
|
bucket_definitions:
|
|
329
343
|
mybucket:
|
|
330
344
|
parameters:
|
|
331
345
|
- SELECT group_id FROM test WHERE n1 = token_parameters.n1
|
|
332
346
|
data: []
|
|
333
|
-
`
|
|
334
|
-
|
|
335
|
-
const bucketStorage = factory.getInstance(
|
|
336
|
-
|
|
347
|
+
`
|
|
348
|
+
});
|
|
349
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
350
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
337
351
|
await batch.save({
|
|
338
352
|
sourceTable: TEST_TABLE,
|
|
339
353
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -356,10 +370,11 @@ bucket_definitions:
|
|
|
356
370
|
},
|
|
357
371
|
afterReplicaId: test_utils.rid('t1')
|
|
358
372
|
});
|
|
373
|
+
await batch.commit('1/1');
|
|
359
374
|
});
|
|
360
375
|
const TEST_PARAMS = { group_id: 'group1' };
|
|
361
|
-
const checkpoint =
|
|
362
|
-
const parameters1 = await
|
|
376
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
377
|
+
const parameters1 = await checkpoint.getParameterSets([
|
|
363
378
|
ParameterLookup.normalized('mybucket', '1', [1152921504606846976n])
|
|
364
379
|
]);
|
|
365
380
|
expect(parameters1).toEqual([TEST_PARAMS]);
|
|
@@ -377,15 +392,17 @@ bucket_definitions:
|
|
|
377
392
|
test('removing row', async () => {
|
|
378
393
|
const env_6 = { stack: [], error: void 0, hasError: false };
|
|
379
394
|
try {
|
|
380
|
-
const
|
|
395
|
+
const factory = __addDisposableResource(env_6, await generateStorageFactory(), true);
|
|
396
|
+
const syncRules = await factory.updateSyncRules({
|
|
397
|
+
content: `
|
|
381
398
|
bucket_definitions:
|
|
382
399
|
global:
|
|
383
400
|
data:
|
|
384
401
|
- SELECT id, description FROM "%"
|
|
385
|
-
`
|
|
386
|
-
|
|
387
|
-
const bucketStorage = factory.getInstance(
|
|
388
|
-
|
|
402
|
+
`
|
|
403
|
+
});
|
|
404
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
405
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
389
406
|
const sourceTable = TEST_TABLE;
|
|
390
407
|
await batch.save({
|
|
391
408
|
sourceTable,
|
|
@@ -401,8 +418,9 @@ bucket_definitions:
|
|
|
401
418
|
tag: storage.SaveOperationTag.DELETE,
|
|
402
419
|
beforeReplicaId: test_utils.rid('test1')
|
|
403
420
|
});
|
|
421
|
+
await batch.commit('1/1');
|
|
404
422
|
});
|
|
405
|
-
const checkpoint =
|
|
423
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
406
424
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
407
425
|
const data = batch[0].chunkData.data.map((d) => {
|
|
408
426
|
return {
|
|
@@ -440,18 +458,20 @@ bucket_definitions:
|
|
|
440
458
|
const env_7 = { stack: [], error: void 0, hasError: false };
|
|
441
459
|
try {
|
|
442
460
|
const WORKSPACE_TABLE = test_utils.makeTestTable('workspace', ['id']);
|
|
443
|
-
const
|
|
461
|
+
const factory = __addDisposableResource(env_7, await generateStorageFactory(), true);
|
|
462
|
+
const syncRules = await factory.updateSyncRules({
|
|
463
|
+
content: `
|
|
444
464
|
bucket_definitions:
|
|
445
465
|
by_workspace:
|
|
446
466
|
parameters:
|
|
447
467
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
448
468
|
workspace."userId" = token_parameters.user_id
|
|
449
469
|
data: []
|
|
450
|
-
`
|
|
451
|
-
|
|
452
|
-
const
|
|
453
|
-
const bucketStorage = factory.getInstance(
|
|
454
|
-
|
|
470
|
+
`
|
|
471
|
+
});
|
|
472
|
+
const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
|
|
473
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
474
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
455
475
|
await batch.save({
|
|
456
476
|
sourceTable: WORKSPACE_TABLE,
|
|
457
477
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -461,20 +481,25 @@ bucket_definitions:
|
|
|
461
481
|
},
|
|
462
482
|
afterReplicaId: test_utils.rid('workspace1')
|
|
463
483
|
});
|
|
484
|
+
await batch.commit('1/1');
|
|
464
485
|
});
|
|
465
|
-
const checkpoint =
|
|
486
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
466
487
|
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
467
|
-
const q1 = sync_rules.
|
|
488
|
+
const q1 = sync_rules.bucketSources[0].parameterQueries[0];
|
|
468
489
|
const lookups = q1.getLookups(parameters);
|
|
469
490
|
expect(lookups).toEqual([ParameterLookup.normalized('by_workspace', '1', ['u1'])]);
|
|
470
|
-
const parameter_sets = await
|
|
491
|
+
const parameter_sets = await checkpoint.getParameterSets(lookups);
|
|
471
492
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
|
|
472
|
-
const buckets = await sync_rules
|
|
493
|
+
const buckets = await sync_rules
|
|
494
|
+
.getBucketParameterQuerier(test_utils.querierOptions(parameters))
|
|
495
|
+
.querier.queryDynamicBucketDescriptions({
|
|
473
496
|
getParameterSets(lookups) {
|
|
474
|
-
return
|
|
497
|
+
return checkpoint.getParameterSets(lookups);
|
|
475
498
|
}
|
|
476
499
|
});
|
|
477
|
-
expect(buckets).toEqual([
|
|
500
|
+
expect(buckets).toEqual([
|
|
501
|
+
{ bucket: 'by_workspace["workspace1"]', priority: 3, definition: 'by_workspace', inclusion_reasons: ['default'] }
|
|
502
|
+
]);
|
|
478
503
|
}
|
|
479
504
|
catch (e_7) {
|
|
480
505
|
env_7.error = e_7;
|
|
@@ -490,18 +515,20 @@ bucket_definitions:
|
|
|
490
515
|
const env_8 = { stack: [], error: void 0, hasError: false };
|
|
491
516
|
try {
|
|
492
517
|
const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
|
|
493
|
-
const
|
|
518
|
+
const factory = __addDisposableResource(env_8, await generateStorageFactory(), true);
|
|
519
|
+
const syncRules = await factory.updateSyncRules({
|
|
520
|
+
content: `
|
|
494
521
|
bucket_definitions:
|
|
495
522
|
by_public_workspace:
|
|
496
523
|
parameters:
|
|
497
524
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
498
525
|
workspace.visibility = 'public'
|
|
499
526
|
data: []
|
|
500
|
-
`
|
|
501
|
-
|
|
502
|
-
const
|
|
503
|
-
const bucketStorage = factory.getInstance(
|
|
504
|
-
|
|
527
|
+
`
|
|
528
|
+
});
|
|
529
|
+
const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
|
|
530
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
531
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
505
532
|
await batch.save({
|
|
506
533
|
sourceTable: WORKSPACE_TABLE,
|
|
507
534
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -529,24 +556,37 @@ bucket_definitions:
|
|
|
529
556
|
},
|
|
530
557
|
afterReplicaId: test_utils.rid('workspace3')
|
|
531
558
|
});
|
|
559
|
+
await batch.commit('1/1');
|
|
532
560
|
});
|
|
533
|
-
const checkpoint =
|
|
561
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
534
562
|
const parameters = new RequestParameters({ sub: 'unknown' }, {});
|
|
535
|
-
const q1 = sync_rules.
|
|
563
|
+
const q1 = sync_rules.bucketSources[0].parameterQueries[0];
|
|
536
564
|
const lookups = q1.getLookups(parameters);
|
|
537
565
|
expect(lookups).toEqual([ParameterLookup.normalized('by_public_workspace', '1', [])]);
|
|
538
|
-
const parameter_sets = await
|
|
566
|
+
const parameter_sets = await checkpoint.getParameterSets(lookups);
|
|
539
567
|
parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
540
568
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
|
|
541
|
-
const buckets = await sync_rules
|
|
569
|
+
const buckets = await sync_rules
|
|
570
|
+
.getBucketParameterQuerier(test_utils.querierOptions(parameters))
|
|
571
|
+
.querier.queryDynamicBucketDescriptions({
|
|
542
572
|
getParameterSets(lookups) {
|
|
543
|
-
return
|
|
573
|
+
return checkpoint.getParameterSets(lookups);
|
|
544
574
|
}
|
|
545
575
|
});
|
|
546
576
|
buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
|
|
547
577
|
expect(buckets).toEqual([
|
|
548
|
-
{
|
|
549
|
-
|
|
578
|
+
{
|
|
579
|
+
bucket: 'by_public_workspace["workspace1"]',
|
|
580
|
+
priority: 3,
|
|
581
|
+
definition: 'by_public_workspace',
|
|
582
|
+
inclusion_reasons: ['default']
|
|
583
|
+
},
|
|
584
|
+
{
|
|
585
|
+
bucket: 'by_public_workspace["workspace3"]',
|
|
586
|
+
priority: 3,
|
|
587
|
+
definition: 'by_public_workspace',
|
|
588
|
+
inclusion_reasons: ['default']
|
|
589
|
+
}
|
|
550
590
|
]);
|
|
551
591
|
}
|
|
552
592
|
catch (e_8) {
|
|
@@ -563,7 +603,9 @@ bucket_definitions:
|
|
|
563
603
|
const env_9 = { stack: [], error: void 0, hasError: false };
|
|
564
604
|
try {
|
|
565
605
|
const WORKSPACE_TABLE = test_utils.makeTestTable('workspace');
|
|
566
|
-
const
|
|
606
|
+
const factory = __addDisposableResource(env_9, await generateStorageFactory(), true);
|
|
607
|
+
const syncRules = await factory.updateSyncRules({
|
|
608
|
+
content: `
|
|
567
609
|
bucket_definitions:
|
|
568
610
|
by_workspace:
|
|
569
611
|
parameters:
|
|
@@ -572,11 +614,11 @@ bucket_definitions:
|
|
|
572
614
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
573
615
|
workspace.user_id = token_parameters.user_id
|
|
574
616
|
data: []
|
|
575
|
-
`
|
|
576
|
-
|
|
577
|
-
const
|
|
578
|
-
const bucketStorage = factory.getInstance(
|
|
579
|
-
|
|
617
|
+
`
|
|
618
|
+
});
|
|
619
|
+
const sync_rules = syncRules.parsed(test_utils.PARSE_OPTIONS).sync_rules;
|
|
620
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
621
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
580
622
|
await batch.save({
|
|
581
623
|
sourceTable: WORKSPACE_TABLE,
|
|
582
624
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -615,26 +657,29 @@ bucket_definitions:
|
|
|
615
657
|
},
|
|
616
658
|
afterReplicaId: test_utils.rid('workspace4')
|
|
617
659
|
});
|
|
660
|
+
await batch.commit('1/1');
|
|
618
661
|
});
|
|
619
|
-
const checkpoint =
|
|
662
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
620
663
|
const parameters = new RequestParameters({ sub: 'u1' }, {});
|
|
621
664
|
// Test intermediate values - could be moved to sync_rules.test.ts
|
|
622
|
-
const q1 = sync_rules.
|
|
665
|
+
const q1 = sync_rules.bucketSources[0].parameterQueries[0];
|
|
623
666
|
const lookups1 = q1.getLookups(parameters);
|
|
624
667
|
expect(lookups1).toEqual([ParameterLookup.normalized('by_workspace', '1', [])]);
|
|
625
|
-
const parameter_sets1 = await
|
|
668
|
+
const parameter_sets1 = await checkpoint.getParameterSets(lookups1);
|
|
626
669
|
parameter_sets1.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
627
670
|
expect(parameter_sets1).toEqual([{ workspace_id: 'workspace1' }]);
|
|
628
|
-
const q2 = sync_rules.
|
|
671
|
+
const q2 = sync_rules.bucketSources[0].parameterQueries[1];
|
|
629
672
|
const lookups2 = q2.getLookups(parameters);
|
|
630
673
|
expect(lookups2).toEqual([ParameterLookup.normalized('by_workspace', '2', ['u1'])]);
|
|
631
|
-
const parameter_sets2 = await
|
|
674
|
+
const parameter_sets2 = await checkpoint.getParameterSets(lookups2);
|
|
632
675
|
parameter_sets2.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
633
676
|
expect(parameter_sets2).toEqual([{ workspace_id: 'workspace3' }]);
|
|
634
677
|
// Test final values - the important part
|
|
635
|
-
const buckets = (await sync_rules
|
|
678
|
+
const buckets = (await sync_rules
|
|
679
|
+
.getBucketParameterQuerier(test_utils.querierOptions(parameters))
|
|
680
|
+
.querier.queryDynamicBucketDescriptions({
|
|
636
681
|
getParameterSets(lookups) {
|
|
637
|
-
return
|
|
682
|
+
return checkpoint.getParameterSets(lookups);
|
|
638
683
|
}
|
|
639
684
|
})).map((e) => e.bucket);
|
|
640
685
|
buckets.sort();
|
|
@@ -653,16 +698,18 @@ bucket_definitions:
|
|
|
653
698
|
test('changing client ids', async () => {
|
|
654
699
|
const env_10 = { stack: [], error: void 0, hasError: false };
|
|
655
700
|
try {
|
|
656
|
-
const
|
|
701
|
+
const factory = __addDisposableResource(env_10, await generateStorageFactory(), true);
|
|
702
|
+
const syncRules = await factory.updateSyncRules({
|
|
703
|
+
content: `
|
|
657
704
|
bucket_definitions:
|
|
658
705
|
global:
|
|
659
706
|
data:
|
|
660
707
|
- SELECT client_id as id, description FROM "%"
|
|
661
|
-
`
|
|
662
|
-
|
|
663
|
-
const bucketStorage = factory.getInstance(
|
|
708
|
+
`
|
|
709
|
+
});
|
|
710
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
664
711
|
const sourceTable = TEST_TABLE;
|
|
665
|
-
|
|
712
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
666
713
|
await batch.save({
|
|
667
714
|
sourceTable,
|
|
668
715
|
tag: storage.SaveOperationTag.INSERT,
|
|
@@ -693,8 +740,9 @@ bucket_definitions:
|
|
|
693
740
|
},
|
|
694
741
|
afterReplicaId: test_utils.rid('test2')
|
|
695
742
|
});
|
|
743
|
+
await batch.commit('1/1');
|
|
696
744
|
});
|
|
697
|
-
const checkpoint =
|
|
745
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
698
746
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
699
747
|
const data = batch[0].chunkData.data.map((d) => {
|
|
700
748
|
return {
|
|
@@ -722,14 +770,16 @@ bucket_definitions:
|
|
|
722
770
|
test('re-apply delete', async () => {
|
|
723
771
|
const env_11 = { stack: [], error: void 0, hasError: false };
|
|
724
772
|
try {
|
|
725
|
-
const
|
|
773
|
+
const factory = __addDisposableResource(env_11, await generateStorageFactory(), true);
|
|
774
|
+
const syncRules = await factory.updateSyncRules({
|
|
775
|
+
content: `
|
|
726
776
|
bucket_definitions:
|
|
727
777
|
global:
|
|
728
778
|
data:
|
|
729
779
|
- SELECT id, description FROM "%"
|
|
730
|
-
`
|
|
731
|
-
|
|
732
|
-
const bucketStorage = factory.getInstance(
|
|
780
|
+
`
|
|
781
|
+
});
|
|
782
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
733
783
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
734
784
|
const sourceTable = TEST_TABLE;
|
|
735
785
|
await batch.save({
|
|
@@ -749,8 +799,9 @@ bucket_definitions:
|
|
|
749
799
|
tag: storage.SaveOperationTag.DELETE,
|
|
750
800
|
beforeReplicaId: test_utils.rid('test1')
|
|
751
801
|
});
|
|
802
|
+
await batch.commit('1/1');
|
|
752
803
|
});
|
|
753
|
-
|
|
804
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
754
805
|
const sourceTable = TEST_TABLE;
|
|
755
806
|
await batch.save({
|
|
756
807
|
sourceTable,
|
|
@@ -758,7 +809,7 @@ bucket_definitions:
|
|
|
758
809
|
beforeReplicaId: test_utils.rid('test1')
|
|
759
810
|
});
|
|
760
811
|
});
|
|
761
|
-
const checkpoint =
|
|
812
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
762
813
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
763
814
|
const data = batch[0].chunkData.data.map((d) => {
|
|
764
815
|
return {
|
|
@@ -795,14 +846,16 @@ bucket_definitions:
|
|
|
795
846
|
test('re-apply update + delete', async () => {
|
|
796
847
|
const env_12 = { stack: [], error: void 0, hasError: false };
|
|
797
848
|
try {
|
|
798
|
-
const
|
|
849
|
+
const factory = __addDisposableResource(env_12, await generateStorageFactory(), true);
|
|
850
|
+
const syncRules = await factory.updateSyncRules({
|
|
851
|
+
content: `
|
|
799
852
|
bucket_definitions:
|
|
800
853
|
global:
|
|
801
854
|
data:
|
|
802
855
|
- SELECT id, description FROM "%"
|
|
803
|
-
`
|
|
804
|
-
|
|
805
|
-
const bucketStorage = factory.getInstance(
|
|
856
|
+
`
|
|
857
|
+
});
|
|
858
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
806
859
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
807
860
|
const sourceTable = TEST_TABLE;
|
|
808
861
|
await batch.save({
|
|
@@ -840,8 +893,9 @@ bucket_definitions:
|
|
|
840
893
|
tag: storage.SaveOperationTag.DELETE,
|
|
841
894
|
beforeReplicaId: test_utils.rid('test1')
|
|
842
895
|
});
|
|
896
|
+
await batch.commit('1/1');
|
|
843
897
|
});
|
|
844
|
-
|
|
898
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
845
899
|
const sourceTable = TEST_TABLE;
|
|
846
900
|
await batch.save({
|
|
847
901
|
sourceTable,
|
|
@@ -866,8 +920,9 @@ bucket_definitions:
|
|
|
866
920
|
tag: storage.SaveOperationTag.DELETE,
|
|
867
921
|
beforeReplicaId: test_utils.rid('test1')
|
|
868
922
|
});
|
|
923
|
+
await batch.commit('2/1');
|
|
869
924
|
});
|
|
870
|
-
const checkpoint =
|
|
925
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
871
926
|
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
872
927
|
const data = batch[0].chunkData.data.map((d) => {
|
|
873
928
|
return {
|
|
@@ -906,15 +961,17 @@ bucket_definitions:
|
|
|
906
961
|
test('truncate parameters', async () => {
|
|
907
962
|
const env_13 = { stack: [], error: void 0, hasError: false };
|
|
908
963
|
try {
|
|
909
|
-
const
|
|
964
|
+
const factory = __addDisposableResource(env_13, await generateStorageFactory(), true);
|
|
965
|
+
const syncRules = await factory.updateSyncRules({
|
|
966
|
+
content: `
|
|
910
967
|
bucket_definitions:
|
|
911
968
|
mybucket:
|
|
912
969
|
parameters:
|
|
913
970
|
- SELECT group_id FROM test WHERE id1 = token_parameters.user_id OR id2 = token_parameters.user_id
|
|
914
971
|
data: []
|
|
915
|
-
`
|
|
916
|
-
|
|
917
|
-
const bucketStorage = factory.getInstance(
|
|
972
|
+
`
|
|
973
|
+
});
|
|
974
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
918
975
|
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
919
976
|
await batch.save({
|
|
920
977
|
sourceTable: TEST_TABLE,
|
|
@@ -929,10 +986,8 @@ bucket_definitions:
|
|
|
929
986
|
});
|
|
930
987
|
await batch.truncate([TEST_TABLE]);
|
|
931
988
|
});
|
|
932
|
-
const
|
|
933
|
-
const parameters = await
|
|
934
|
-
ParameterLookup.normalized('mybucket', '1', ['user1'])
|
|
935
|
-
]);
|
|
989
|
+
const checkpoint = await bucketStorage.getCheckpoint();
|
|
990
|
+
const parameters = await checkpoint.getParameterSets([ParameterLookup.normalized('mybucket', '1', ['user1'])]);
|
|
936
991
|
expect(parameters).toEqual([]);
|
|
937
992
|
}
|
|
938
993
|
catch (e_13) {
|
|
@@ -955,14 +1010,16 @@ bucket_definitions:
|
|
|
955
1010
|
// It can break at two places:
|
|
956
1011
|
// 1. Not getting the correct "current_data" state for each operation.
|
|
957
1012
|
// 2. Output order not being correct.
|
|
958
|
-
const
|
|
1013
|
+
const factory = __addDisposableResource(env_14, await generateStorageFactory(), true);
|
|
1014
|
+
const syncRules = await factory.updateSyncRules({
|
|
1015
|
+
content: `
|
|
959
1016
|
bucket_definitions:
|
|
960
1017
|
global:
|
|
961
1018
|
data:
|
|
962
1019
|
- SELECT id, description FROM "test"
|
|
963
|
-
`
|
|
964
|
-
|
|
965
|
-
const bucketStorage = factory.getInstance(
|
|
1020
|
+
`
|
|
1021
|
+
});
|
|
1022
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
966
1023
|
// Pre-setup
|
|
967
1024
|
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
968
1025
|
const sourceTable = TEST_TABLE;
|
|
@@ -1097,12 +1154,6 @@ bucket_definitions:
|
|
|
1097
1154
|
test('changed data with replica identity full', async () => {
|
|
1098
1155
|
const env_15 = { stack: [], error: void 0, hasError: false };
|
|
1099
1156
|
try {
|
|
1100
|
-
const sync_rules = test_utils.testRules(`
|
|
1101
|
-
bucket_definitions:
|
|
1102
|
-
global:
|
|
1103
|
-
data:
|
|
1104
|
-
- SELECT id, description FROM "test"
|
|
1105
|
-
`);
|
|
1106
1157
|
function rid2(id, description) {
|
|
1107
1158
|
return getUuidReplicaIdentityBson({ id, description }, [
|
|
1108
1159
|
{ name: 'id', type: 'VARCHAR', typeId: 25 },
|
|
@@ -1110,7 +1161,15 @@ bucket_definitions:
|
|
|
1110
1161
|
]);
|
|
1111
1162
|
}
|
|
1112
1163
|
const factory = __addDisposableResource(env_15, await generateStorageFactory(), true);
|
|
1113
|
-
const
|
|
1164
|
+
const syncRules = await factory.updateSyncRules({
|
|
1165
|
+
content: `
|
|
1166
|
+
bucket_definitions:
|
|
1167
|
+
global:
|
|
1168
|
+
data:
|
|
1169
|
+
- SELECT id, description FROM "test"
|
|
1170
|
+
`
|
|
1171
|
+
});
|
|
1172
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1114
1173
|
const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
|
|
1115
1174
|
// Pre-setup
|
|
1116
1175
|
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
@@ -1203,12 +1262,6 @@ bucket_definitions:
|
|
|
1203
1262
|
test('unchanged data with replica identity full', async () => {
|
|
1204
1263
|
const env_16 = { stack: [], error: void 0, hasError: false };
|
|
1205
1264
|
try {
|
|
1206
|
-
const sync_rules = test_utils.testRules(`
|
|
1207
|
-
bucket_definitions:
|
|
1208
|
-
global:
|
|
1209
|
-
data:
|
|
1210
|
-
- SELECT id, description FROM "test"
|
|
1211
|
-
`);
|
|
1212
1265
|
function rid2(id, description) {
|
|
1213
1266
|
return getUuidReplicaIdentityBson({ id, description }, [
|
|
1214
1267
|
{ name: 'id', type: 'VARCHAR', typeId: 25 },
|
|
@@ -1216,7 +1269,15 @@ bucket_definitions:
|
|
|
1216
1269
|
]);
|
|
1217
1270
|
}
|
|
1218
1271
|
const factory = __addDisposableResource(env_16, await generateStorageFactory(), true);
|
|
1219
|
-
const
|
|
1272
|
+
const syncRules = await factory.updateSyncRules({
|
|
1273
|
+
content: `
|
|
1274
|
+
bucket_definitions:
|
|
1275
|
+
global:
|
|
1276
|
+
data:
|
|
1277
|
+
- SELECT id, description FROM "test"
|
|
1278
|
+
`
|
|
1279
|
+
});
|
|
1280
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1220
1281
|
const sourceTable = test_utils.makeTestTable('test', ['id', 'description']);
|
|
1221
1282
|
// Pre-setup
|
|
1222
1283
|
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
@@ -1306,15 +1367,17 @@ bucket_definitions:
|
|
|
1306
1367
|
// but large enough in size to be split over multiple returned batches.
|
|
1307
1368
|
// The specific batch splits is an implementation detail of the storage driver,
|
|
1308
1369
|
// and the test will have to updated when other implementations are added.
|
|
1309
|
-
const
|
|
1370
|
+
const factory = __addDisposableResource(env_17, await generateStorageFactory(), true);
|
|
1371
|
+
const syncRules = await factory.updateSyncRules({
|
|
1372
|
+
content: `
|
|
1310
1373
|
bucket_definitions:
|
|
1311
1374
|
global:
|
|
1312
1375
|
data:
|
|
1313
1376
|
- SELECT id, description FROM "%"
|
|
1314
|
-
`
|
|
1315
|
-
|
|
1316
|
-
const bucketStorage = factory.getInstance(
|
|
1317
|
-
|
|
1377
|
+
`
|
|
1378
|
+
});
|
|
1379
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1380
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1318
1381
|
const sourceTable = TEST_TABLE;
|
|
1319
1382
|
const largeDescription = '0123456789'.repeat(12_000_00);
|
|
1320
1383
|
await batch.save({
|
|
@@ -1354,8 +1417,9 @@ bucket_definitions:
|
|
|
1354
1417
|
},
|
|
1355
1418
|
afterReplicaId: test_utils.rid('test3')
|
|
1356
1419
|
});
|
|
1420
|
+
await batch.commit('1/1');
|
|
1357
1421
|
});
|
|
1358
|
-
const checkpoint =
|
|
1422
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
1359
1423
|
const options = {
|
|
1360
1424
|
chunkLimitBytes: 16 * 1024 * 1024
|
|
1361
1425
|
};
|
|
@@ -1397,15 +1461,17 @@ bucket_definitions:
|
|
|
1397
1461
|
const env_18 = { stack: [], error: void 0, hasError: false };
|
|
1398
1462
|
try {
|
|
1399
1463
|
// Test syncing a batch of data that is limited by count.
|
|
1400
|
-
const
|
|
1464
|
+
const factory = __addDisposableResource(env_18, await generateStorageFactory(), true);
|
|
1465
|
+
const syncRules = await factory.updateSyncRules({
|
|
1466
|
+
content: `
|
|
1401
1467
|
bucket_definitions:
|
|
1402
1468
|
global:
|
|
1403
1469
|
data:
|
|
1404
1470
|
- SELECT id, description FROM "%"
|
|
1405
|
-
`
|
|
1406
|
-
|
|
1407
|
-
const bucketStorage = factory.getInstance(
|
|
1408
|
-
|
|
1471
|
+
`
|
|
1472
|
+
});
|
|
1473
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1474
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1409
1475
|
const sourceTable = TEST_TABLE;
|
|
1410
1476
|
for (let i = 1; i <= 6; i++) {
|
|
1411
1477
|
await batch.save({
|
|
@@ -1418,8 +1484,9 @@ bucket_definitions:
|
|
|
1418
1484
|
afterReplicaId: `test${i}`
|
|
1419
1485
|
});
|
|
1420
1486
|
}
|
|
1487
|
+
await batch.commit('1/1');
|
|
1421
1488
|
});
|
|
1422
|
-
const checkpoint =
|
|
1489
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
1423
1490
|
const batch1 = await test_utils.oneFromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]]), { limit: 4 }));
|
|
1424
1491
|
expect(test_utils.getBatchData(batch1)).toEqual([
|
|
1425
1492
|
{ op_id: '1', op: 'PUT', object_id: 'test1', checksum: 2871785649 },
|
|
@@ -1464,7 +1531,9 @@ bucket_definitions:
|
|
|
1464
1531
|
const setup = async (options) => {
|
|
1465
1532
|
const env_19 = { stack: [], error: void 0, hasError: false };
|
|
1466
1533
|
try {
|
|
1467
|
-
const
|
|
1534
|
+
const factory = __addDisposableResource(env_19, await generateStorageFactory(), true);
|
|
1535
|
+
const syncRules = await factory.updateSyncRules({
|
|
1536
|
+
content: `
|
|
1468
1537
|
bucket_definitions:
|
|
1469
1538
|
global1:
|
|
1470
1539
|
data:
|
|
@@ -1472,10 +1541,10 @@ bucket_definitions:
|
|
|
1472
1541
|
global2:
|
|
1473
1542
|
data:
|
|
1474
1543
|
- SELECT id, description FROM test WHERE bucket = 'global2'
|
|
1475
|
-
`
|
|
1476
|
-
|
|
1477
|
-
const bucketStorage = factory.getInstance(
|
|
1478
|
-
|
|
1544
|
+
`
|
|
1545
|
+
});
|
|
1546
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1547
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1479
1548
|
const sourceTable = TEST_TABLE;
|
|
1480
1549
|
for (let i = 1; i <= 10; i++) {
|
|
1481
1550
|
await batch.save({
|
|
@@ -1489,8 +1558,9 @@ bucket_definitions:
|
|
|
1489
1558
|
afterReplicaId: `test${i}`
|
|
1490
1559
|
});
|
|
1491
1560
|
}
|
|
1561
|
+
await batch.commit('1/1');
|
|
1492
1562
|
});
|
|
1493
|
-
const checkpoint =
|
|
1563
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
1494
1564
|
return await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([
|
|
1495
1565
|
['global1[]', 0n],
|
|
1496
1566
|
['global2[]', 0n]
|
|
@@ -1606,7 +1676,9 @@ bucket_definitions:
|
|
|
1606
1676
|
});
|
|
1607
1677
|
const r = await f.configureSyncRules({ content: 'bucket_definitions: {}', validate: false });
|
|
1608
1678
|
const storage = f.getInstance(r.persisted_sync_rules);
|
|
1609
|
-
await storage.
|
|
1679
|
+
await storage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1680
|
+
await batch.keepalive('1/0');
|
|
1681
|
+
});
|
|
1610
1682
|
const metrics2 = await f.getStorageMetrics();
|
|
1611
1683
|
expect(metrics2).toMatchSnapshot();
|
|
1612
1684
|
}
|
|
@@ -1623,16 +1695,18 @@ bucket_definitions:
|
|
|
1623
1695
|
test('invalidate cached parsed sync rules', async () => {
|
|
1624
1696
|
const env_21 = { stack: [], error: void 0, hasError: false };
|
|
1625
1697
|
try {
|
|
1626
|
-
const
|
|
1698
|
+
const bucketStorageFactory = __addDisposableResource(env_21, await generateStorageFactory(), true);
|
|
1699
|
+
const syncRules = await bucketStorageFactory.updateSyncRules({
|
|
1700
|
+
content: `
|
|
1627
1701
|
bucket_definitions:
|
|
1628
1702
|
by_workspace:
|
|
1629
1703
|
parameters:
|
|
1630
1704
|
- SELECT id as workspace_id FROM workspace WHERE
|
|
1631
1705
|
workspace."userId" = token_parameters.user_id
|
|
1632
1706
|
data: []
|
|
1633
|
-
`
|
|
1634
|
-
|
|
1635
|
-
const syncBucketStorage = bucketStorageFactory.getInstance(
|
|
1707
|
+
`
|
|
1708
|
+
});
|
|
1709
|
+
const syncBucketStorage = bucketStorageFactory.getInstance(syncRules);
|
|
1636
1710
|
const parsedSchema1 = syncBucketStorage.getParsedSyncRules({
|
|
1637
1711
|
defaultSchema: 'public'
|
|
1638
1712
|
});
|
|
@@ -1667,12 +1741,11 @@ bucket_definitions:
|
|
|
1667
1741
|
content: `
|
|
1668
1742
|
bucket_definitions:
|
|
1669
1743
|
mybucket:
|
|
1670
|
-
data: []
|
|
1744
|
+
data: []
|
|
1671
1745
|
`,
|
|
1672
1746
|
validate: false
|
|
1673
1747
|
});
|
|
1674
1748
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1675
|
-
await bucketStorage.autoActivate();
|
|
1676
1749
|
const abortController = new AbortController();
|
|
1677
1750
|
context.onTestFinished(() => abortController.abort());
|
|
1678
1751
|
const iter = bucketStorage
|
|
@@ -1719,7 +1792,6 @@ bucket_definitions:
|
|
|
1719
1792
|
validate: false
|
|
1720
1793
|
});
|
|
1721
1794
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1722
|
-
await bucketStorage.autoActivate();
|
|
1723
1795
|
const abortController = new AbortController();
|
|
1724
1796
|
context.onTestFinished(() => abortController.abort());
|
|
1725
1797
|
const iter = bucketStorage
|
|
@@ -1787,7 +1859,6 @@ bucket_definitions:
|
|
|
1787
1859
|
validate: false
|
|
1788
1860
|
});
|
|
1789
1861
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1790
|
-
await bucketStorage.autoActivate();
|
|
1791
1862
|
bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
|
|
1792
1863
|
const abortController = new AbortController();
|
|
1793
1864
|
context.onTestFinished(() => abortController.abort());
|
|
@@ -1835,7 +1906,6 @@ bucket_definitions:
|
|
|
1835
1906
|
validate: false
|
|
1836
1907
|
});
|
|
1837
1908
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1838
|
-
await bucketStorage.autoActivate();
|
|
1839
1909
|
bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
|
|
1840
1910
|
const abortController = new AbortController();
|
|
1841
1911
|
context.onTestFinished(() => abortController.abort());
|
|
@@ -1885,7 +1955,6 @@ bucket_definitions:
|
|
|
1885
1955
|
validate: false
|
|
1886
1956
|
});
|
|
1887
1957
|
const bucketStorage = factory.getInstance(r.persisted_sync_rules);
|
|
1888
|
-
await bucketStorage.autoActivate();
|
|
1889
1958
|
bucketStorage.setWriteCheckpointMode(storage.WriteCheckpointMode.CUSTOM);
|
|
1890
1959
|
const abortController = new AbortController();
|
|
1891
1960
|
context.onTestFinished(() => abortController.abort());
|
|
@@ -1959,15 +2028,17 @@ bucket_definitions:
|
|
|
1959
2028
|
// Test syncing a batch of data that is small in count,
|
|
1960
2029
|
// but large enough in size to be split over multiple returned chunks.
|
|
1961
2030
|
// Similar to the above test, but splits over 1MB chunks.
|
|
1962
|
-
const
|
|
2031
|
+
const factory = __addDisposableResource(env_27, await generateStorageFactory(), true);
|
|
2032
|
+
const syncRules = await factory.updateSyncRules({
|
|
2033
|
+
content: `
|
|
1963
2034
|
bucket_definitions:
|
|
1964
2035
|
global:
|
|
1965
2036
|
data:
|
|
1966
2037
|
- SELECT id FROM test
|
|
1967
2038
|
- SELECT id FROM test_ignore WHERE false
|
|
1968
|
-
`
|
|
1969
|
-
|
|
1970
|
-
const bucketStorage = factory.getInstance(
|
|
2039
|
+
`
|
|
2040
|
+
});
|
|
2041
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
1971
2042
|
const sourceTable = test_utils.makeTestTable('test', ['id']);
|
|
1972
2043
|
const sourceTableIgnore = test_utils.makeTestTable('test_ignore', ['id']);
|
|
1973
2044
|
const result1 = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
@@ -2007,5 +2078,75 @@ bucket_definitions:
|
|
|
2007
2078
|
await result_27;
|
|
2008
2079
|
}
|
|
2009
2080
|
});
|
|
2081
|
+
test('data with custom types', async () => {
|
|
2082
|
+
const env_28 = { stack: [], error: void 0, hasError: false };
|
|
2083
|
+
try {
|
|
2084
|
+
const factory = __addDisposableResource(env_28, await generateStorageFactory(), true);
|
|
2085
|
+
const testValue = {
|
|
2086
|
+
sourceTable: TEST_TABLE,
|
|
2087
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
2088
|
+
after: {
|
|
2089
|
+
id: 't1',
|
|
2090
|
+
description: new DateTimeValue('2025-08-28T11:30:00')
|
|
2091
|
+
},
|
|
2092
|
+
afterReplicaId: test_utils.rid('t1')
|
|
2093
|
+
};
|
|
2094
|
+
{
|
|
2095
|
+
// First, deploy old sync rules and row with date time value
|
|
2096
|
+
const syncRules = await factory.updateSyncRules({
|
|
2097
|
+
content: `
|
|
2098
|
+
bucket_definitions:
|
|
2099
|
+
global:
|
|
2100
|
+
data:
|
|
2101
|
+
- SELECT id, description FROM test
|
|
2102
|
+
`
|
|
2103
|
+
});
|
|
2104
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
2105
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
2106
|
+
await batch.save(testValue);
|
|
2107
|
+
await batch.commit('1/1');
|
|
2108
|
+
});
|
|
2109
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
2110
|
+
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['global[]', 0n]])));
|
|
2111
|
+
expect(batch[0].chunkData.data).toMatchObject([
|
|
2112
|
+
{
|
|
2113
|
+
data: '{"id":"t1","description":"2025-08-28 11:30:00"}'
|
|
2114
|
+
}
|
|
2115
|
+
]);
|
|
2116
|
+
}
|
|
2117
|
+
const syncRules = await factory.updateSyncRules({
|
|
2118
|
+
content: `
|
|
2119
|
+
bucket_definitions:
|
|
2120
|
+
global:
|
|
2121
|
+
data:
|
|
2122
|
+
- SELECT id, description FROM test
|
|
2123
|
+
|
|
2124
|
+
config:
|
|
2125
|
+
edition: 2
|
|
2126
|
+
`
|
|
2127
|
+
});
|
|
2128
|
+
const bucketStorage = factory.getInstance(syncRules);
|
|
2129
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
2130
|
+
await batch.save(testValue);
|
|
2131
|
+
await batch.commit('1/2');
|
|
2132
|
+
});
|
|
2133
|
+
const { checkpoint } = await bucketStorage.getCheckpoint();
|
|
2134
|
+
const batch = await test_utils.fromAsync(bucketStorage.getBucketDataBatch(checkpoint, new Map([['2#global[]', 0n]])));
|
|
2135
|
+
expect(batch[0].chunkData.data).toMatchObject([
|
|
2136
|
+
{
|
|
2137
|
+
data: '{"id":"t1","description":"2025-08-28T11:30:00"}'
|
|
2138
|
+
}
|
|
2139
|
+
]);
|
|
2140
|
+
}
|
|
2141
|
+
catch (e_28) {
|
|
2142
|
+
env_28.error = e_28;
|
|
2143
|
+
env_28.hasError = true;
|
|
2144
|
+
}
|
|
2145
|
+
finally {
|
|
2146
|
+
const result_28 = __disposeResources(env_28);
|
|
2147
|
+
if (result_28)
|
|
2148
|
+
await result_28;
|
|
2149
|
+
}
|
|
2150
|
+
});
|
|
2010
2151
|
}
|
|
2011
2152
|
//# sourceMappingURL=register-data-storage-tests.js.map
|