@powersync/service-core-tests 0.4.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/README.md +1 -1
- package/dist/tests/register-data-storage-tests.js +30 -126
- package/dist/tests/register-data-storage-tests.js.map +1 -1
- package/dist/tests/register-sync-tests.js +233 -58
- package/dist/tests/register-sync-tests.js.map +1 -1
- package/package.json +5 -5
- package/src/tests/register-data-storage-tests.ts +18 -94
- package/src/tests/register-sync-tests.ts +188 -18
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -140,7 +140,7 @@ bucket_definitions:
|
|
|
140
140
|
);
|
|
141
141
|
|
|
142
142
|
await using factory = await generateStorageFactory();
|
|
143
|
-
|
|
143
|
+
const bucketStorage = factory.getInstance(sync_rules);
|
|
144
144
|
|
|
145
145
|
const table = test_utils.makeTestTable('todos', ['id', 'list_id']);
|
|
146
146
|
|
|
@@ -394,13 +394,12 @@ bucket_definitions:
|
|
|
394
394
|
const parameter_sets = await bucketStorage.getParameterSets(checkpoint, lookups);
|
|
395
395
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }]);
|
|
396
396
|
|
|
397
|
-
const buckets = await sync_rules.
|
|
397
|
+
const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
398
398
|
getParameterSets(lookups) {
|
|
399
399
|
return bucketStorage.getParameterSets(checkpoint, lookups);
|
|
400
|
-
}
|
|
401
|
-
parameters
|
|
400
|
+
}
|
|
402
401
|
});
|
|
403
|
-
expect(buckets).toEqual(['by_workspace["workspace1"]']);
|
|
402
|
+
expect(buckets).toEqual([{ bucket: 'by_workspace["workspace1"]', priority: 3 }]);
|
|
404
403
|
});
|
|
405
404
|
|
|
406
405
|
test('save and load parameters with dynamic global buckets', async () => {
|
|
@@ -466,14 +465,16 @@ bucket_definitions:
|
|
|
466
465
|
parameter_sets.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
|
467
466
|
expect(parameter_sets).toEqual([{ workspace_id: 'workspace1' }, { workspace_id: 'workspace3' }]);
|
|
468
467
|
|
|
469
|
-
const buckets = await sync_rules.
|
|
468
|
+
const buckets = await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
470
469
|
getParameterSets(lookups) {
|
|
471
470
|
return bucketStorage.getParameterSets(checkpoint, lookups);
|
|
472
|
-
}
|
|
473
|
-
parameters
|
|
471
|
+
}
|
|
474
472
|
});
|
|
475
|
-
buckets.sort();
|
|
476
|
-
expect(buckets).toEqual([
|
|
473
|
+
buckets.sort((a, b) => a.bucket.localeCompare(b.bucket));
|
|
474
|
+
expect(buckets).toEqual([
|
|
475
|
+
{ bucket: 'by_public_workspace["workspace1"]', priority: 3 },
|
|
476
|
+
{ bucket: 'by_public_workspace["workspace3"]', priority: 3 }
|
|
477
|
+
]);
|
|
477
478
|
});
|
|
478
479
|
|
|
479
480
|
test('multiple parameter queries', async () => {
|
|
@@ -562,12 +563,13 @@ bucket_definitions:
|
|
|
562
563
|
expect(parameter_sets2).toEqual([{ workspace_id: 'workspace3' }]);
|
|
563
564
|
|
|
564
565
|
// Test final values - the important part
|
|
565
|
-
const buckets =
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
566
|
+
const buckets = (
|
|
567
|
+
await sync_rules.getBucketParameterQuerier(parameters).queryDynamicBucketDescriptions({
|
|
568
|
+
getParameterSets(lookups) {
|
|
569
|
+
return bucketStorage.getParameterSets(checkpoint, lookups);
|
|
570
|
+
}
|
|
571
|
+
})
|
|
572
|
+
).map((e) => e.bucket);
|
|
571
573
|
buckets.sort();
|
|
572
574
|
expect(buckets).toEqual(['by_workspace["workspace1"]', 'by_workspace["workspace3"]']);
|
|
573
575
|
});
|
|
@@ -1415,84 +1417,6 @@ bucket_definitions:
|
|
|
1415
1417
|
expect(test_utils.getBatchMeta(batch3)).toEqual(null);
|
|
1416
1418
|
});
|
|
1417
1419
|
|
|
1418
|
-
test('batch should be disposed automatically', async () => {
|
|
1419
|
-
const sync_rules = test_utils.testRules(`
|
|
1420
|
-
bucket_definitions:
|
|
1421
|
-
global:
|
|
1422
|
-
data: []
|
|
1423
|
-
`);
|
|
1424
|
-
|
|
1425
|
-
await using factory = await generateStorageFactory();
|
|
1426
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
1427
|
-
|
|
1428
|
-
let isDisposed = false;
|
|
1429
|
-
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1430
|
-
batch.registerListener({
|
|
1431
|
-
disposed: () => {
|
|
1432
|
-
isDisposed = true;
|
|
1433
|
-
}
|
|
1434
|
-
});
|
|
1435
|
-
});
|
|
1436
|
-
expect(isDisposed).true;
|
|
1437
|
-
|
|
1438
|
-
isDisposed = false;
|
|
1439
|
-
let errorCaught = false;
|
|
1440
|
-
try {
|
|
1441
|
-
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1442
|
-
batch.registerListener({
|
|
1443
|
-
disposed: () => {
|
|
1444
|
-
isDisposed = true;
|
|
1445
|
-
}
|
|
1446
|
-
});
|
|
1447
|
-
throw new Error(`Testing exceptions`);
|
|
1448
|
-
});
|
|
1449
|
-
} catch (ex) {
|
|
1450
|
-
errorCaught = true;
|
|
1451
|
-
expect(ex.message.includes('Testing')).true;
|
|
1452
|
-
}
|
|
1453
|
-
expect(errorCaught).true;
|
|
1454
|
-
expect(isDisposed).true;
|
|
1455
|
-
});
|
|
1456
|
-
|
|
1457
|
-
test('batch should be disposed automatically', async () => {
|
|
1458
|
-
const sync_rules = test_utils.testRules(`
|
|
1459
|
-
bucket_definitions:
|
|
1460
|
-
global:
|
|
1461
|
-
data: []
|
|
1462
|
-
`);
|
|
1463
|
-
|
|
1464
|
-
await using factory = await generateStorageFactory();
|
|
1465
|
-
const bucketStorage = factory.getInstance(sync_rules);
|
|
1466
|
-
|
|
1467
|
-
let isDisposed = false;
|
|
1468
|
-
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1469
|
-
batch.registerListener({
|
|
1470
|
-
disposed: () => {
|
|
1471
|
-
isDisposed = true;
|
|
1472
|
-
}
|
|
1473
|
-
});
|
|
1474
|
-
});
|
|
1475
|
-
expect(isDisposed).true;
|
|
1476
|
-
|
|
1477
|
-
isDisposed = false;
|
|
1478
|
-
let errorCaught = false;
|
|
1479
|
-
try {
|
|
1480
|
-
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
1481
|
-
batch.registerListener({
|
|
1482
|
-
disposed: () => {
|
|
1483
|
-
isDisposed = true;
|
|
1484
|
-
}
|
|
1485
|
-
});
|
|
1486
|
-
throw new Error(`Testing exceptions`);
|
|
1487
|
-
});
|
|
1488
|
-
} catch (ex) {
|
|
1489
|
-
errorCaught = true;
|
|
1490
|
-
expect(ex.message.includes('Testing')).true;
|
|
1491
|
-
}
|
|
1492
|
-
expect(errorCaught).true;
|
|
1493
|
-
expect(isDisposed).true;
|
|
1494
|
-
});
|
|
1495
|
-
|
|
1496
1420
|
test('empty storage metrics', async () => {
|
|
1497
1421
|
await using f = await generateStorageFactory({ dropAll: true });
|
|
1498
1422
|
const metrics = await f.getStorageMetrics();
|
|
@@ -67,13 +67,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
67
67
|
});
|
|
68
68
|
|
|
69
69
|
const stream = sync.streamResponse({
|
|
70
|
-
|
|
70
|
+
bucketStorage: bucketStorage,
|
|
71
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
71
72
|
params: {
|
|
72
73
|
buckets: [],
|
|
73
74
|
include_checksum: true,
|
|
74
75
|
raw_data: true
|
|
75
76
|
},
|
|
76
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
77
77
|
tracker,
|
|
78
78
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
79
79
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
@@ -83,6 +83,176 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
83
83
|
expect(lines).toMatchSnapshot();
|
|
84
84
|
});
|
|
85
85
|
|
|
86
|
+
test('sync buckets in order', async () => {
|
|
87
|
+
await using f = await factory();
|
|
88
|
+
|
|
89
|
+
const syncRules = await f.updateSyncRules({
|
|
90
|
+
content: `
|
|
91
|
+
bucket_definitions:
|
|
92
|
+
b0:
|
|
93
|
+
priority: 2
|
|
94
|
+
data:
|
|
95
|
+
- SELECT * FROM test WHERE LENGTH(id) <= 2;
|
|
96
|
+
b1:
|
|
97
|
+
priority: 1
|
|
98
|
+
data:
|
|
99
|
+
- SELECT * FROM test WHERE LENGTH(id) > 2;
|
|
100
|
+
`
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
104
|
+
await bucketStorage.autoActivate();
|
|
105
|
+
|
|
106
|
+
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
107
|
+
await batch.save({
|
|
108
|
+
sourceTable: TEST_TABLE,
|
|
109
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
110
|
+
after: {
|
|
111
|
+
id: 't1',
|
|
112
|
+
description: 'Test 1'
|
|
113
|
+
},
|
|
114
|
+
afterReplicaId: 't1'
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
await batch.save({
|
|
118
|
+
sourceTable: TEST_TABLE,
|
|
119
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
120
|
+
after: {
|
|
121
|
+
id: 'earlier',
|
|
122
|
+
description: 'Test 2'
|
|
123
|
+
},
|
|
124
|
+
afterReplicaId: 'earlier'
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
await batch.commit('0/1');
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
const stream = sync.streamResponse({
|
|
131
|
+
bucketStorage: bucketStorage,
|
|
132
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
133
|
+
params: {
|
|
134
|
+
buckets: [],
|
|
135
|
+
include_checksum: true,
|
|
136
|
+
raw_data: true
|
|
137
|
+
},
|
|
138
|
+
tracker,
|
|
139
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
140
|
+
token: { exp: Date.now() / 1000 + 10 } as any
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
const lines = await consumeCheckpointLines(stream);
|
|
144
|
+
expect(lines).toMatchSnapshot();
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
test('sync interrupts low-priority buckets on new checkpoints', async () => {
|
|
148
|
+
await using f = await factory();
|
|
149
|
+
|
|
150
|
+
const syncRules = await f.updateSyncRules({
|
|
151
|
+
content: `
|
|
152
|
+
bucket_definitions:
|
|
153
|
+
b0:
|
|
154
|
+
priority: 2
|
|
155
|
+
data:
|
|
156
|
+
- SELECT * FROM test WHERE LENGTH(id) <= 5;
|
|
157
|
+
b1:
|
|
158
|
+
priority: 1
|
|
159
|
+
data:
|
|
160
|
+
- SELECT * FROM test WHERE LENGTH(id) > 5;
|
|
161
|
+
`
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
165
|
+
await bucketStorage.autoActivate();
|
|
166
|
+
|
|
167
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
168
|
+
// Initial data: Add one priority row and 10k low-priority rows.
|
|
169
|
+
await batch.save({
|
|
170
|
+
sourceTable: TEST_TABLE,
|
|
171
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
172
|
+
after: {
|
|
173
|
+
id: 'highprio',
|
|
174
|
+
description: 'High priority row'
|
|
175
|
+
},
|
|
176
|
+
afterReplicaId: 'highprio'
|
|
177
|
+
});
|
|
178
|
+
for (let i = 0; i < 10_000; i++) {
|
|
179
|
+
await batch.save({
|
|
180
|
+
sourceTable: TEST_TABLE,
|
|
181
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
182
|
+
after: {
|
|
183
|
+
id: `${i}`,
|
|
184
|
+
description: 'low prio'
|
|
185
|
+
},
|
|
186
|
+
afterReplicaId: `${i}`
|
|
187
|
+
});
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
await batch.commit('0/1');
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
const stream = sync.streamResponse({
|
|
194
|
+
bucketStorage: bucketStorage,
|
|
195
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
196
|
+
params: {
|
|
197
|
+
buckets: [],
|
|
198
|
+
include_checksum: true,
|
|
199
|
+
raw_data: true
|
|
200
|
+
},
|
|
201
|
+
tracker,
|
|
202
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
203
|
+
token: { exp: Date.now() / 1000 + 10 } as any
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
let sentCheckpoints = 0;
|
|
207
|
+
let sentRows = 0;
|
|
208
|
+
|
|
209
|
+
for await (let next of stream) {
|
|
210
|
+
if (typeof next == 'string') {
|
|
211
|
+
next = JSON.parse(next);
|
|
212
|
+
}
|
|
213
|
+
if (typeof next === 'object' && next !== null) {
|
|
214
|
+
if ('partial_checkpoint_complete' in next) {
|
|
215
|
+
if (sentCheckpoints == 1) {
|
|
216
|
+
// Save new data to interrupt the low-priority sync.
|
|
217
|
+
|
|
218
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
219
|
+
// Add another high-priority row. This should interrupt the long-running low-priority sync.
|
|
220
|
+
await batch.save({
|
|
221
|
+
sourceTable: TEST_TABLE,
|
|
222
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
223
|
+
after: {
|
|
224
|
+
id: 'highprio2',
|
|
225
|
+
description: 'Another high-priority row'
|
|
226
|
+
},
|
|
227
|
+
afterReplicaId: 'highprio2'
|
|
228
|
+
});
|
|
229
|
+
|
|
230
|
+
await batch.commit('0/2');
|
|
231
|
+
});
|
|
232
|
+
} else {
|
|
233
|
+
// Low-priority sync from the first checkpoint was interrupted. This should not happen before
|
|
234
|
+
// 1000 low-priority items were synchronized.
|
|
235
|
+
expect(sentCheckpoints).toBe(2);
|
|
236
|
+
expect(sentRows).toBeGreaterThan(1000);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
if ('checkpoint' in next || 'checkpoint_diff' in next) {
|
|
240
|
+
sentCheckpoints += 1;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if ('data' in next) {
|
|
244
|
+
sentRows += next.data.data.length;
|
|
245
|
+
}
|
|
246
|
+
if ('checkpoint_complete' in next) {
|
|
247
|
+
break;
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
expect(sentCheckpoints).toBe(2);
|
|
253
|
+
expect(sentRows).toBe(10002);
|
|
254
|
+
});
|
|
255
|
+
|
|
86
256
|
test('sync legacy non-raw data', async () => {
|
|
87
257
|
const f = await factory();
|
|
88
258
|
|
|
@@ -109,13 +279,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
109
279
|
});
|
|
110
280
|
|
|
111
281
|
const stream = sync.streamResponse({
|
|
112
|
-
|
|
282
|
+
bucketStorage: bucketStorage,
|
|
283
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
113
284
|
params: {
|
|
114
285
|
buckets: [],
|
|
115
286
|
include_checksum: true,
|
|
116
287
|
raw_data: false
|
|
117
288
|
},
|
|
118
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
119
289
|
tracker,
|
|
120
290
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
121
291
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
@@ -134,17 +304,17 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
134
304
|
content: BASIC_SYNC_RULES
|
|
135
305
|
});
|
|
136
306
|
|
|
137
|
-
const
|
|
138
|
-
await
|
|
307
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
308
|
+
await bucketStorage.autoActivate();
|
|
139
309
|
|
|
140
310
|
const stream = sync.streamResponse({
|
|
141
|
-
|
|
311
|
+
bucketStorage: bucketStorage,
|
|
312
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
142
313
|
params: {
|
|
143
314
|
buckets: [],
|
|
144
315
|
include_checksum: true,
|
|
145
316
|
raw_data: true
|
|
146
317
|
},
|
|
147
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
148
318
|
tracker,
|
|
149
319
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
150
320
|
token: { exp: 0 } as any
|
|
@@ -165,13 +335,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
165
335
|
await bucketStorage.autoActivate();
|
|
166
336
|
|
|
167
337
|
const stream = sync.streamResponse({
|
|
168
|
-
|
|
338
|
+
bucketStorage: bucketStorage,
|
|
339
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
169
340
|
params: {
|
|
170
341
|
buckets: [],
|
|
171
342
|
include_checksum: true,
|
|
172
343
|
raw_data: true
|
|
173
344
|
},
|
|
174
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
175
345
|
tracker,
|
|
176
346
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
177
347
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
@@ -222,19 +392,19 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
222
392
|
content: BASIC_SYNC_RULES
|
|
223
393
|
});
|
|
224
394
|
|
|
225
|
-
const
|
|
226
|
-
await
|
|
395
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
396
|
+
await bucketStorage.autoActivate();
|
|
227
397
|
|
|
228
398
|
const exp = Date.now() / 1000 + 0.1;
|
|
229
399
|
|
|
230
400
|
const stream = sync.streamResponse({
|
|
231
|
-
|
|
401
|
+
bucketStorage: bucketStorage,
|
|
402
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
232
403
|
params: {
|
|
233
404
|
buckets: [],
|
|
234
405
|
include_checksum: true,
|
|
235
406
|
raw_data: true
|
|
236
407
|
},
|
|
237
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
238
408
|
tracker,
|
|
239
409
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
240
410
|
token: { exp: exp } as any
|
|
@@ -288,13 +458,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
288
458
|
});
|
|
289
459
|
|
|
290
460
|
const stream = sync.streamResponse({
|
|
291
|
-
|
|
461
|
+
bucketStorage: bucketStorage,
|
|
462
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
292
463
|
params: {
|
|
293
464
|
buckets: [],
|
|
294
465
|
include_checksum: true,
|
|
295
466
|
raw_data: true
|
|
296
467
|
},
|
|
297
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
298
468
|
tracker,
|
|
299
469
|
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
300
470
|
token: { exp: Date.now() / 1000 + 10 } as any
|
|
@@ -411,13 +581,13 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
|
|
|
411
581
|
});
|
|
412
582
|
|
|
413
583
|
const params: sync.SyncStreamParameters = {
|
|
414
|
-
|
|
584
|
+
bucketStorage: bucketStorage,
|
|
585
|
+
syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
|
|
415
586
|
params: {
|
|
416
587
|
buckets: [],
|
|
417
588
|
include_checksum: true,
|
|
418
589
|
raw_data: true
|
|
419
590
|
},
|
|
420
|
-
parseOptions: test_utils.PARSE_OPTIONS,
|
|
421
591
|
tracker,
|
|
422
592
|
syncParams: new RequestParameters({ sub: 'test' }, {}),
|
|
423
593
|
token: { sub: 'test', exp: Date.now() / 1000 + 10 } as any
|