@powersync/service-core 0.1.3 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/routes/socket-route.js +1 -1
- package/dist/routes/socket-route.js.map +1 -1
- package/dist/routes/sync-stream.js +1 -1
- package/dist/routes/sync-stream.js.map +1 -1
- package/dist/storage/BucketStorage.d.ts +6 -1
- package/dist/storage/BucketStorage.js.map +1 -1
- package/dist/storage/ChecksumCache.d.ts +50 -0
- package/dist/storage/ChecksumCache.js +234 -0
- package/dist/storage/ChecksumCache.js.map +1 -0
- package/dist/storage/mongo/MongoSyncBucketStorage.d.ts +3 -1
- package/dist/storage/mongo/MongoSyncBucketStorage.js +26 -14
- package/dist/storage/mongo/MongoSyncBucketStorage.js.map +1 -1
- package/dist/sync/sync.js +32 -21
- package/dist/sync/sync.js.map +1 -1
- package/dist/util/protocol-types.d.ts +4 -0
- package/dist/util/protocol-types.js +5 -1
- package/dist/util/protocol-types.js.map +1 -1
- package/dist/util/utils.d.ts +6 -3
- package/dist/util/utils.js +32 -15
- package/dist/util/utils.js.map +1 -1
- package/package.json +6 -5
- package/src/routes/socket-route.ts +5 -1
- package/src/routes/sync-stream.ts +4 -1
- package/src/storage/BucketStorage.ts +6 -1
- package/src/storage/ChecksumCache.ts +294 -0
- package/src/storage/mongo/MongoSyncBucketStorage.ts +31 -15
- package/src/sync/sync.ts +44 -37
- package/src/util/protocol-types.ts +6 -1
- package/src/util/utils.ts +36 -16
- package/test/src/__snapshots__/sync.test.ts.snap +14 -2
- package/test/src/checksum_cache.test.ts +436 -0
- package/test/src/data_storage.test.ts +3 -3
- package/test/src/large_batch.test.ts +4 -4
- package/test/src/sync_rules.test.ts +11 -9
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -0,0 +1,436 @@
|
|
|
1
|
+
import { describe, expect, it } from 'vitest';
|
|
2
|
+
import { BucketChecksum, OpId } from '@/util/protocol-types.js';
|
|
3
|
+
import * as crypto from 'node:crypto';
|
|
4
|
+
import { addBucketChecksums } from '@/util/util-index.js';
|
|
5
|
+
import { ChecksumCache, FetchChecksums, FetchPartialBucketChecksum } from '@/storage/ChecksumCache.js';
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Create a deterministic BucketChecksum based on the bucket name and checkpoint for testing purposes.
|
|
9
|
+
*/
|
|
10
|
+
function testHash(bucket: string, checkpoint: OpId) {
|
|
11
|
+
const key = `${checkpoint}/${bucket}`;
|
|
12
|
+
const hash = crypto.createHash('sha256').update(key).digest().readInt32LE(0);
|
|
13
|
+
return hash;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function testPartialHash(request: FetchPartialBucketChecksum): BucketChecksum {
|
|
17
|
+
if (request.start) {
|
|
18
|
+
const a = testHash(request.bucket, request.start);
|
|
19
|
+
const b = testHash(request.bucket, request.end);
|
|
20
|
+
return addBucketChecksums(
|
|
21
|
+
{
|
|
22
|
+
bucket: request.bucket,
|
|
23
|
+
checksum: b,
|
|
24
|
+
count: Number(request.end)
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
// Subtract a
|
|
28
|
+
bucket: request.bucket,
|
|
29
|
+
checksum: -a,
|
|
30
|
+
count: -Number(request.start)
|
|
31
|
+
}
|
|
32
|
+
);
|
|
33
|
+
} else {
|
|
34
|
+
return {
|
|
35
|
+
bucket: request.bucket,
|
|
36
|
+
checksum: testHash(request.bucket, request.end),
|
|
37
|
+
count: Number(request.end)
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const TEST_123 = {
|
|
43
|
+
bucket: 'test',
|
|
44
|
+
count: 123,
|
|
45
|
+
checksum: 1104081737
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
const TEST_1234 = {
|
|
49
|
+
bucket: 'test',
|
|
50
|
+
count: 1234,
|
|
51
|
+
checksum: -1593864957
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
const TEST2_123 = {
|
|
55
|
+
bucket: 'test2',
|
|
56
|
+
count: 123,
|
|
57
|
+
checksum: 1741377449
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
const TEST3_123 = {
|
|
61
|
+
bucket: 'test3',
|
|
62
|
+
count: 123,
|
|
63
|
+
checksum: -2085080402
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
function fetchTestChecksums(batch: FetchPartialBucketChecksum[]) {
|
|
67
|
+
return new Map(
|
|
68
|
+
batch.map((v) => {
|
|
69
|
+
return [v.bucket, testPartialHash(v)];
|
|
70
|
+
})
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
describe('checksum cache', function () {
|
|
75
|
+
const factory = (fetch: FetchChecksums) => {
|
|
76
|
+
return new ChecksumCache({ fetchChecksums: fetch });
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
it('should handle a sequential lookups (a)', async function () {
|
|
80
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
81
|
+
const cache = factory(async (batch) => {
|
|
82
|
+
lookups.push(batch);
|
|
83
|
+
return fetchTestChecksums(batch);
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
expect(await cache.getChecksums('123', ['test'])).toEqual([TEST_123]);
|
|
87
|
+
|
|
88
|
+
expect(await cache.getChecksums('1234', ['test'])).toEqual([TEST_1234]);
|
|
89
|
+
|
|
90
|
+
expect(await cache.getChecksums('123', ['test2'])).toEqual([TEST2_123]);
|
|
91
|
+
|
|
92
|
+
expect(lookups).toEqual([
|
|
93
|
+
[{ bucket: 'test', end: '123' }],
|
|
94
|
+
// This should use the previous lookup
|
|
95
|
+
[{ bucket: 'test', start: '123', end: '1234' }],
|
|
96
|
+
[{ bucket: 'test2', end: '123' }]
|
|
97
|
+
]);
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
it('should handle a sequential lookups (b)', async function () {
|
|
101
|
+
// Reverse order of the above
|
|
102
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
103
|
+
const cache = factory(async (batch) => {
|
|
104
|
+
lookups.push(batch);
|
|
105
|
+
return fetchTestChecksums(batch);
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
expect(await cache.getChecksums('123', ['test2'])).toEqual([TEST2_123]);
|
|
109
|
+
|
|
110
|
+
expect(await cache.getChecksums('1234', ['test'])).toEqual([TEST_1234]);
|
|
111
|
+
|
|
112
|
+
expect(await cache.getChecksums('123', ['test'])).toEqual([TEST_123]);
|
|
113
|
+
|
|
114
|
+
expect(lookups).toEqual([
|
|
115
|
+
// With this order, there is no option for a partial lookup
|
|
116
|
+
[{ bucket: 'test2', end: '123' }],
|
|
117
|
+
[{ bucket: 'test', end: '1234' }],
|
|
118
|
+
[{ bucket: 'test', end: '123' }]
|
|
119
|
+
]);
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
it('should handle a concurrent lookups (a)', async function () {
|
|
123
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
124
|
+
const cache = factory(async (batch) => {
|
|
125
|
+
lookups.push(batch);
|
|
126
|
+
return fetchTestChecksums(batch);
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
const p1 = cache.getChecksums('123', ['test']);
|
|
130
|
+
const p2 = cache.getChecksums('1234', ['test']);
|
|
131
|
+
const p3 = cache.getChecksums('123', ['test2']);
|
|
132
|
+
|
|
133
|
+
expect(await p1).toEqual([TEST_123]);
|
|
134
|
+
expect(await p2).toEqual([TEST_1234]);
|
|
135
|
+
expect(await p3).toEqual([TEST2_123]);
|
|
136
|
+
|
|
137
|
+
// Concurrent requests, so we can't do a partial lookup for 123 -> 1234
|
|
138
|
+
expect(lookups).toEqual([
|
|
139
|
+
[{ bucket: 'test', end: '123' }],
|
|
140
|
+
[{ bucket: 'test', end: '1234' }],
|
|
141
|
+
[{ bucket: 'test2', end: '123' }]
|
|
142
|
+
]);
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
it('should handle a concurrent lookups (b)', async function () {
|
|
146
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
147
|
+
const cache = factory(async (batch) => {
|
|
148
|
+
lookups.push(batch);
|
|
149
|
+
return fetchTestChecksums(batch);
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
const p1 = cache.getChecksums('123', ['test']);
|
|
153
|
+
const p2 = cache.getChecksums('123', ['test']);
|
|
154
|
+
|
|
155
|
+
expect(await p1).toEqual([TEST_123]);
|
|
156
|
+
|
|
157
|
+
expect(await p2).toEqual([TEST_123]);
|
|
158
|
+
|
|
159
|
+
// The lookup should be deduplicated, even though it's in progress
|
|
160
|
+
expect(lookups).toEqual([[{ bucket: 'test', end: '123' }]]);
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
it('should handle serial + concurrent lookups', async function () {
|
|
164
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
165
|
+
const cache = factory(async (batch) => {
|
|
166
|
+
lookups.push(batch);
|
|
167
|
+
return fetchTestChecksums(batch);
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
expect(await cache.getChecksums('123', ['test'])).toEqual([TEST_123]);
|
|
171
|
+
|
|
172
|
+
const p2 = cache.getChecksums('1234', ['test']);
|
|
173
|
+
const p3 = cache.getChecksums('1234', ['test']);
|
|
174
|
+
|
|
175
|
+
expect(await p2).toEqual([TEST_1234]);
|
|
176
|
+
expect(await p3).toEqual([TEST_1234]);
|
|
177
|
+
|
|
178
|
+
expect(lookups).toEqual([
|
|
179
|
+
[{ bucket: 'test', end: '123' }],
|
|
180
|
+
// This lookup is deduplicated
|
|
181
|
+
[{ bucket: 'test', start: '123', end: '1234' }]
|
|
182
|
+
]);
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
it('should handle multiple buckets', async function () {
|
|
186
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
187
|
+
const cache = factory(async (batch) => {
|
|
188
|
+
lookups.push(batch);
|
|
189
|
+
return fetchTestChecksums(batch);
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
expect(await cache.getChecksums('123', ['test', 'test2'])).toEqual([TEST_123, TEST2_123]);
|
|
193
|
+
|
|
194
|
+
expect(lookups).toEqual([
|
|
195
|
+
[
|
|
196
|
+
// Both lookups in the same request
|
|
197
|
+
{ bucket: 'test', end: '123' },
|
|
198
|
+
{ bucket: 'test2', end: '123' }
|
|
199
|
+
]
|
|
200
|
+
]);
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
it('should handle multiple buckets with partial caching (a)', async function () {
|
|
204
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
205
|
+
const cache = factory(async (batch) => {
|
|
206
|
+
lookups.push(batch);
|
|
207
|
+
return fetchTestChecksums(batch);
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
expect(await cache.getChecksums('123', ['test'])).toEqual([TEST_123]);
|
|
211
|
+
expect(await cache.getChecksums('123', ['test', 'test2'])).toEqual([TEST_123, TEST2_123]);
|
|
212
|
+
|
|
213
|
+
expect(lookups).toEqual([
|
|
214
|
+
// Request 1
|
|
215
|
+
[{ bucket: 'test', end: '123' }],
|
|
216
|
+
// Request 2
|
|
217
|
+
[{ bucket: 'test2', end: '123' }]
|
|
218
|
+
]);
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
it('should handle multiple buckets with partial caching (b)', async function () {
|
|
222
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
223
|
+
const cache = factory(async (batch) => {
|
|
224
|
+
lookups.push(batch);
|
|
225
|
+
return fetchTestChecksums(batch);
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
const a = cache.getChecksums('123', ['test', 'test2']);
|
|
229
|
+
const b = cache.getChecksums('123', ['test2', 'test3']);
|
|
230
|
+
|
|
231
|
+
expect(await a).toEqual([TEST_123, TEST2_123]);
|
|
232
|
+
expect(await b).toEqual([TEST2_123, TEST3_123]);
|
|
233
|
+
|
|
234
|
+
expect(lookups).toEqual([
|
|
235
|
+
// Request A
|
|
236
|
+
[
|
|
237
|
+
{ bucket: 'test', end: '123' },
|
|
238
|
+
{ bucket: 'test2', end: '123' }
|
|
239
|
+
],
|
|
240
|
+
// Request B (re-uses the checksum for test2 from request a)
|
|
241
|
+
[{ bucket: 'test3', end: '123' }]
|
|
242
|
+
]);
|
|
243
|
+
});
|
|
244
|
+
|
|
245
|
+
it('should handle out-of-order requests', async function () {
|
|
246
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
247
|
+
const cache = factory(async (batch) => {
|
|
248
|
+
lookups.push(batch);
|
|
249
|
+
return fetchTestChecksums(batch);
|
|
250
|
+
});
|
|
251
|
+
|
|
252
|
+
expect(await cache.getChecksums('123', ['test'])).toEqual([TEST_123]);
|
|
253
|
+
|
|
254
|
+
expect(await cache.getChecksums('125', ['test'])).toEqual([
|
|
255
|
+
{
|
|
256
|
+
bucket: 'test',
|
|
257
|
+
checksum: -1865121912,
|
|
258
|
+
count: 125
|
|
259
|
+
}
|
|
260
|
+
]);
|
|
261
|
+
|
|
262
|
+
expect(await cache.getChecksums('124', ['test'])).toEqual([
|
|
263
|
+
{
|
|
264
|
+
bucket: 'test',
|
|
265
|
+
checksum: 1887460431,
|
|
266
|
+
count: 124
|
|
267
|
+
}
|
|
268
|
+
]);
|
|
269
|
+
expect(lookups).toEqual([
|
|
270
|
+
[{ bucket: 'test', end: '123' }],
|
|
271
|
+
[{ bucket: 'test', start: '123', end: '125' }],
|
|
272
|
+
[{ bucket: 'test', start: '123', end: '124' }]
|
|
273
|
+
]);
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
it('should handle errors', async function () {
|
|
277
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
278
|
+
const TEST_ERROR = new Error('Simulated error');
|
|
279
|
+
const cache = factory(async (batch) => {
|
|
280
|
+
lookups.push(batch);
|
|
281
|
+
if (lookups.length == 1) {
|
|
282
|
+
throw new Error('Simulated error');
|
|
283
|
+
}
|
|
284
|
+
return fetchTestChecksums(batch);
|
|
285
|
+
});
|
|
286
|
+
|
|
287
|
+
const a = cache.getChecksums('123', ['test', 'test2']);
|
|
288
|
+
const b = cache.getChecksums('123', ['test2', 'test3']);
|
|
289
|
+
|
|
290
|
+
await expect(a).rejects.toEqual(TEST_ERROR);
|
|
291
|
+
await expect(b).rejects.toEqual(TEST_ERROR);
|
|
292
|
+
|
|
293
|
+
const a2 = cache.getChecksums('123', ['test', 'test2']);
|
|
294
|
+
const b2 = cache.getChecksums('123', ['test2', 'test3']);
|
|
295
|
+
|
|
296
|
+
expect(await a2).toEqual([TEST_123, TEST2_123]);
|
|
297
|
+
expect(await b2).toEqual([TEST2_123, TEST3_123]);
|
|
298
|
+
|
|
299
|
+
expect(lookups).toEqual([
|
|
300
|
+
// Request A (fails)
|
|
301
|
+
[
|
|
302
|
+
{ bucket: 'test', end: '123' },
|
|
303
|
+
{ bucket: 'test2', end: '123' }
|
|
304
|
+
],
|
|
305
|
+
// Request B (re-uses the checksum for test2 from request a)
|
|
306
|
+
// Even thought the full request fails, this batch succeeds
|
|
307
|
+
[{ bucket: 'test3', end: '123' }],
|
|
308
|
+
// Retry request A
|
|
309
|
+
[
|
|
310
|
+
{ bucket: 'test', end: '123' },
|
|
311
|
+
{ bucket: 'test2', end: '123' }
|
|
312
|
+
]
|
|
313
|
+
]);
|
|
314
|
+
});
|
|
315
|
+
|
|
316
|
+
it('should handle missing checksums (a)', async function () {
|
|
317
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
318
|
+
const cache = factory(async (batch) => {
|
|
319
|
+
lookups.push(batch);
|
|
320
|
+
return fetchTestChecksums(batch.filter((b) => b.bucket != 'test'));
|
|
321
|
+
});
|
|
322
|
+
|
|
323
|
+
expect(await cache.getChecksums('123', ['test'])).toEqual([{ bucket: 'test', checksum: 0, count: 0 }]);
|
|
324
|
+
expect(await cache.getChecksums('123', ['test', 'test2'])).toEqual([
|
|
325
|
+
{ bucket: 'test', checksum: 0, count: 0 },
|
|
326
|
+
TEST2_123
|
|
327
|
+
]);
|
|
328
|
+
});
|
|
329
|
+
|
|
330
|
+
it('should handle missing checksums (b)', async function () {
|
|
331
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
332
|
+
const cache = factory(async (batch) => {
|
|
333
|
+
lookups.push(batch);
|
|
334
|
+
return fetchTestChecksums(batch.filter((b) => b.bucket != 'test' || b.end != '123'));
|
|
335
|
+
});
|
|
336
|
+
|
|
337
|
+
expect(await cache.getChecksums('123', ['test'])).toEqual([{ bucket: 'test', checksum: 0, count: 0 }]);
|
|
338
|
+
expect(await cache.getChecksums('1234', ['test'])).toEqual([
|
|
339
|
+
{
|
|
340
|
+
bucket: 'test',
|
|
341
|
+
checksum: 1597020602,
|
|
342
|
+
count: 1111
|
|
343
|
+
}
|
|
344
|
+
]);
|
|
345
|
+
|
|
346
|
+
expect(lookups).toEqual([[{ bucket: 'test', end: '123' }], [{ bucket: 'test', start: '123', end: '1234' }]]);
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
it('should use maxSize', async function () {
|
|
350
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
351
|
+
const cache = new ChecksumCache({
|
|
352
|
+
fetchChecksums: async (batch) => {
|
|
353
|
+
lookups.push(batch);
|
|
354
|
+
return fetchTestChecksums(batch);
|
|
355
|
+
},
|
|
356
|
+
maxSize: 2
|
|
357
|
+
});
|
|
358
|
+
|
|
359
|
+
expect(await cache.getChecksums('123', ['test'])).toEqual([TEST_123]);
|
|
360
|
+
expect(await cache.getChecksums('124', ['test'])).toEqual([
|
|
361
|
+
{
|
|
362
|
+
bucket: 'test',
|
|
363
|
+
checksum: 1887460431,
|
|
364
|
+
count: 124
|
|
365
|
+
}
|
|
366
|
+
]);
|
|
367
|
+
|
|
368
|
+
expect(await cache.getChecksums('125', ['test'])).toEqual([
|
|
369
|
+
{
|
|
370
|
+
bucket: 'test',
|
|
371
|
+
checksum: -1865121912,
|
|
372
|
+
count: 125
|
|
373
|
+
}
|
|
374
|
+
]);
|
|
375
|
+
expect(await cache.getChecksums('126', ['test'])).toEqual([
|
|
376
|
+
{
|
|
377
|
+
bucket: 'test',
|
|
378
|
+
checksum: -1720007310,
|
|
379
|
+
count: 126
|
|
380
|
+
}
|
|
381
|
+
]);
|
|
382
|
+
expect(await cache.getChecksums('124', ['test'])).toEqual([
|
|
383
|
+
{
|
|
384
|
+
bucket: 'test',
|
|
385
|
+
checksum: 1887460431,
|
|
386
|
+
count: 124
|
|
387
|
+
}
|
|
388
|
+
]);
|
|
389
|
+
expect(await cache.getChecksums('123', ['test'])).toEqual([TEST_123]);
|
|
390
|
+
|
|
391
|
+
expect(lookups).toEqual([
|
|
392
|
+
[{ bucket: 'test', end: '123' }],
|
|
393
|
+
[{ bucket: 'test', start: '123', end: '124' }],
|
|
394
|
+
[{ bucket: 'test', start: '124', end: '125' }],
|
|
395
|
+
[{ bucket: 'test', start: '125', end: '126' }],
|
|
396
|
+
[{ bucket: 'test', end: '124' }],
|
|
397
|
+
[{ bucket: 'test', end: '123' }]
|
|
398
|
+
]);
|
|
399
|
+
});
|
|
400
|
+
|
|
401
|
+
it('should handle concurrent requests greater than cache size', async function () {
|
|
402
|
+
// This will not be cached efficiently, but we test that we don't get errors at least.
|
|
403
|
+
let lookups: FetchPartialBucketChecksum[][] = [];
|
|
404
|
+
const cache = new ChecksumCache({
|
|
405
|
+
fetchChecksums: async (batch) => {
|
|
406
|
+
lookups.push(batch);
|
|
407
|
+
return fetchTestChecksums(batch);
|
|
408
|
+
},
|
|
409
|
+
maxSize: 2
|
|
410
|
+
});
|
|
411
|
+
|
|
412
|
+
const p3 = cache.getChecksums('123', ['test3']);
|
|
413
|
+
const p4 = cache.getChecksums('123', ['test4']);
|
|
414
|
+
const p1 = cache.getChecksums('123', ['test']);
|
|
415
|
+
const p2 = cache.getChecksums('123', ['test2']);
|
|
416
|
+
|
|
417
|
+
expect(await p1).toEqual([TEST_123]);
|
|
418
|
+
expect(await p2).toEqual([TEST2_123]);
|
|
419
|
+
expect(await p3).toEqual([TEST3_123]);
|
|
420
|
+
expect(await p4).toEqual([
|
|
421
|
+
{
|
|
422
|
+
bucket: 'test4',
|
|
423
|
+
checksum: 1004797863,
|
|
424
|
+
count: 123
|
|
425
|
+
}
|
|
426
|
+
]);
|
|
427
|
+
|
|
428
|
+
// The lookup should be deduplicated, even though it's in progress
|
|
429
|
+
expect(lookups).toEqual([
|
|
430
|
+
[{ bucket: 'test3', end: '123' }],
|
|
431
|
+
[{ bucket: 'test4', end: '123' }],
|
|
432
|
+
[{ bucket: 'test', end: '123' }],
|
|
433
|
+
[{ bucket: 'test2', end: '123' }]
|
|
434
|
+
]);
|
|
435
|
+
});
|
|
436
|
+
});
|
|
@@ -252,7 +252,7 @@ bucket_definitions:
|
|
|
252
252
|
{ op: 'REMOVE', object_id: 'test1', checksum: c2 }
|
|
253
253
|
]);
|
|
254
254
|
|
|
255
|
-
const checksums = await storage.getChecksums(checkpoint, ['global[]']);
|
|
255
|
+
const checksums = [...(await storage.getChecksums(checkpoint, ['global[]'])).values()];
|
|
256
256
|
expect(checksums).toEqual([
|
|
257
257
|
{
|
|
258
258
|
bucket: 'global[]',
|
|
@@ -599,7 +599,7 @@ bucket_definitions:
|
|
|
599
599
|
{ op: 'REMOVE', object_id: 'test1', checksum: c2 }
|
|
600
600
|
]);
|
|
601
601
|
|
|
602
|
-
const checksums = await storage.getChecksums(checkpoint, ['global[]']);
|
|
602
|
+
const checksums = [...(await storage.getChecksums(checkpoint, ['global[]'])).values()];
|
|
603
603
|
expect(checksums).toEqual([
|
|
604
604
|
{
|
|
605
605
|
bucket: 'global[]',
|
|
@@ -713,7 +713,7 @@ bucket_definitions:
|
|
|
713
713
|
{ op: 'REMOVE', object_id: 'test1', checksum: c2 }
|
|
714
714
|
]);
|
|
715
715
|
|
|
716
|
-
const checksums = await storage.getChecksums(checkpoint, ['global[]']);
|
|
716
|
+
const checksums = [...(await storage.getChecksums(checkpoint, ['global[]'])).values()];
|
|
717
717
|
expect(checksums).toEqual([
|
|
718
718
|
{
|
|
719
719
|
bucket: 'global[]',
|
|
@@ -50,7 +50,7 @@ function defineBatchTests(factory: StorageFactory) {
|
|
|
50
50
|
const duration = Date.now() - start;
|
|
51
51
|
const used = Math.round(process.memoryUsage().heapUsed / 1024 / 1024);
|
|
52
52
|
const checksum = await context.storage!.getChecksums(checkpoint, ['global[]']);
|
|
53
|
-
expect(checksum[
|
|
53
|
+
expect(checksum.get('global[]')!.count).toEqual(operation_count);
|
|
54
54
|
const perSecond = Math.round((operation_count / duration) * 1000);
|
|
55
55
|
console.log(`${operation_count} ops in ${duration}ms ${perSecond} ops/s. ${used}MB heap`);
|
|
56
56
|
}),
|
|
@@ -101,7 +101,7 @@ function defineBatchTests(factory: StorageFactory) {
|
|
|
101
101
|
const checkpoint = await context.getCheckpoint({ timeout: 100_000 });
|
|
102
102
|
const duration = Date.now() - start;
|
|
103
103
|
const checksum = await context.storage!.getChecksums(checkpoint, ['global[]']);
|
|
104
|
-
expect(checksum[
|
|
104
|
+
expect(checksum.get('global[]')!.count).toEqual(operation_count);
|
|
105
105
|
const perSecond = Math.round((operation_count / duration) * 1000);
|
|
106
106
|
console.log(`${operation_count} ops in ${duration}ms ${perSecond} ops/s.`);
|
|
107
107
|
printMemoryUsage();
|
|
@@ -157,7 +157,7 @@ function defineBatchTests(factory: StorageFactory) {
|
|
|
157
157
|
const duration = Date.now() - start;
|
|
158
158
|
const used = Math.round(process.memoryUsage().heapUsed / 1024 / 1024);
|
|
159
159
|
const checksum = await context.storage!.getChecksums(checkpoint, ['global[]']);
|
|
160
|
-
expect(checksum[
|
|
160
|
+
expect(checksum.get('global[]')!.count).toEqual(operationCount);
|
|
161
161
|
const perSecond = Math.round((operationCount / duration) * 1000);
|
|
162
162
|
// This number depends on the test machine, so we keep the test significantly
|
|
163
163
|
// lower than expected numbers.
|
|
@@ -174,7 +174,7 @@ function defineBatchTests(factory: StorageFactory) {
|
|
|
174
174
|
const truncateDuration = Date.now() - truncateStart;
|
|
175
175
|
|
|
176
176
|
const checksum2 = await context.storage!.getChecksums(checkpoint2, ['global[]']);
|
|
177
|
-
const truncateCount = checksum2[
|
|
177
|
+
const truncateCount = checksum2.get('global[]')!.count - checksum.get('global[]')!.count;
|
|
178
178
|
expect(truncateCount).toEqual(numTransactions * perTransaction);
|
|
179
179
|
const truncatePerSecond = Math.round((truncateCount / truncateDuration) * 1000);
|
|
180
180
|
console.log(`Truncated ${truncateCount} ops in ${truncateDuration}ms ${truncatePerSecond} ops/s. ${used}MB heap`);
|
|
@@ -100,27 +100,29 @@ bucket_definitions:
|
|
|
100
100
|
const rules = SqlSyncRules.fromYaml(`
|
|
101
101
|
bucket_definitions:
|
|
102
102
|
mybucket:
|
|
103
|
-
parameters: SELECT token_parameters.user_id
|
|
103
|
+
parameters: SELECT token_parameters.user_id, user_parameters.device_id
|
|
104
104
|
data:
|
|
105
|
-
- SELECT id, description FROM assets WHERE assets.user_id = bucket.user_id AND NOT assets.archived
|
|
105
|
+
- SELECT id, description FROM assets WHERE assets.user_id = bucket.user_id AND assets.device_id = bucket.device_id AND NOT assets.archived
|
|
106
106
|
`);
|
|
107
107
|
const bucket = rules.bucket_descriptors[0];
|
|
108
|
-
expect(bucket.bucket_parameters).toEqual(['user_id']);
|
|
108
|
+
expect(bucket.bucket_parameters).toEqual(['user_id', 'device_id']);
|
|
109
109
|
const param_query = bucket.global_parameter_queries[0];
|
|
110
|
-
expect(param_query.bucket_parameters).toEqual(['user_id']);
|
|
111
|
-
expect(rules.getStaticBucketIds(normalizeTokenParameters({ user_id: 'user1' }))).toEqual([
|
|
110
|
+
expect(param_query.bucket_parameters).toEqual(['user_id', 'device_id']);
|
|
111
|
+
expect(rules.getStaticBucketIds(normalizeTokenParameters({ user_id: 'user1' }, { device_id: 'device1' }))).toEqual([
|
|
112
|
+
'mybucket["user1","device1"]'
|
|
113
|
+
]);
|
|
112
114
|
|
|
113
115
|
const data_query = bucket.data_queries[0];
|
|
114
|
-
expect(data_query.bucket_parameters).toEqual(['user_id']);
|
|
116
|
+
expect(data_query.bucket_parameters).toEqual(['user_id', 'device_id']);
|
|
115
117
|
expect(
|
|
116
118
|
rules.evaluateRow({
|
|
117
119
|
sourceTable: ASSETS,
|
|
118
|
-
record: { id: 'asset1', description: 'test', user_id: 'user1' }
|
|
120
|
+
record: { id: 'asset1', description: 'test', user_id: 'user1', device_id: 'device1' }
|
|
119
121
|
})
|
|
120
122
|
).toEqual([
|
|
121
123
|
{
|
|
122
124
|
ruleId: '1',
|
|
123
|
-
bucket: 'mybucket["user1"]',
|
|
125
|
+
bucket: 'mybucket["user1","device1"]',
|
|
124
126
|
id: 'asset1',
|
|
125
127
|
data: {
|
|
126
128
|
id: 'asset1',
|
|
@@ -132,7 +134,7 @@ bucket_definitions:
|
|
|
132
134
|
expect(
|
|
133
135
|
rules.evaluateRow({
|
|
134
136
|
sourceTable: ASSETS,
|
|
135
|
-
record: { id: 'asset1', description: 'test', user_id: 'user1', archived: 1 }
|
|
137
|
+
record: { id: 'asset1', description: 'test', user_id: 'user1', archived: 1, device_id: 'device1' }
|
|
136
138
|
})
|
|
137
139
|
).toEqual([]);
|
|
138
140
|
});
|