@powersync/service-core-tests 0.0.0-dev-20241219091224
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/LICENSE +67 -0
- package/README.md +5 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +5 -0
- package/dist/index.js.map +1 -0
- package/dist/test-utils/bucket-validation.d.ts +42 -0
- package/dist/test-utils/bucket-validation.js +115 -0
- package/dist/test-utils/bucket-validation.js.map +1 -0
- package/dist/test-utils/general-utils.d.ts +31 -0
- package/dist/test-utils/general-utils.js +81 -0
- package/dist/test-utils/general-utils.js.map +1 -0
- package/dist/test-utils/stream_utils.d.ts +6 -0
- package/dist/test-utils/stream_utils.js +37 -0
- package/dist/test-utils/stream_utils.js.map +1 -0
- package/dist/test-utils/test-utils-index.d.ts +3 -0
- package/dist/test-utils/test-utils-index.js +4 -0
- package/dist/test-utils/test-utils-index.js.map +1 -0
- package/dist/tests/register-bucket-validation-tests.d.ts +10 -0
- package/dist/tests/register-bucket-validation-tests.js +139 -0
- package/dist/tests/register-bucket-validation-tests.js.map +1 -0
- package/dist/tests/register-compacting-tests.d.ts +14 -0
- package/dist/tests/register-compacting-tests.js +343 -0
- package/dist/tests/register-compacting-tests.js.map +1 -0
- package/dist/tests/register-data-storage-tests.d.ts +14 -0
- package/dist/tests/register-data-storage-tests.js +1571 -0
- package/dist/tests/register-data-storage-tests.js.map +1 -0
- package/dist/tests/register-sync-tests.d.ts +11 -0
- package/dist/tests/register-sync-tests.js +538 -0
- package/dist/tests/register-sync-tests.js.map +1 -0
- package/dist/tests/tests-index.d.ts +4 -0
- package/dist/tests/tests-index.js +5 -0
- package/dist/tests/tests-index.js.map +1 -0
- package/package.json +28 -0
- package/src/index.ts +5 -0
- package/src/test-utils/bucket-validation.ts +120 -0
- package/src/test-utils/general-utils.ts +113 -0
- package/src/test-utils/stream_utils.ts +42 -0
- package/src/test-utils/test-utils-index.ts +4 -0
- package/src/tests/register-bucket-validation-tests.ts +148 -0
- package/src/tests/register-compacting-tests.ts +297 -0
- package/src/tests/register-data-storage-tests.ts +1552 -0
- package/src/tests/register-sync-tests.ts +521 -0
- package/src/tests/tests-index.ts +4 -0
- package/tsconfig.json +34 -0
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,521 @@
|
|
|
1
|
+
import { storage, sync, utils } from '@powersync/service-core';
|
|
2
|
+
import { JSONBig } from '@powersync/service-jsonbig';
|
|
3
|
+
import { RequestParameters } from '@powersync/service-sync-rules';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import * as timers from 'timers/promises';
|
|
6
|
+
import { fileURLToPath } from 'url';
|
|
7
|
+
import { expect, test } from 'vitest';
|
|
8
|
+
import * as test_utils from '../test-utils/test-utils-index.js';
|
|
9
|
+
|
|
10
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
11
|
+
const __dirname = path.dirname(__filename);
|
|
12
|
+
|
|
13
|
+
const TEST_TABLE = test_utils.makeTestTable('test', ['id']);
|
|
14
|
+
|
|
15
|
+
const BASIC_SYNC_RULES = `
|
|
16
|
+
bucket_definitions:
|
|
17
|
+
mybucket:
|
|
18
|
+
data:
|
|
19
|
+
- SELECT * FROM test
|
|
20
|
+
`;
|
|
21
|
+
|
|
22
|
+
export const SYNC_SNAPSHOT_PATH = path.resolve(__dirname, '../__snapshots/sync.test.js.snap');
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* @example
|
|
26
|
+
* ```TypeScript
|
|
27
|
+
* describe('sync - mongodb', function () {
|
|
28
|
+
* registerSyncTests(MONGO_STORAGE_FACTORY);
|
|
29
|
+
* });
|
|
30
|
+
* ```
|
|
31
|
+
*/
|
|
32
|
+
export function registerSyncTests(factory: test_utils.StorageFactory) {
|
|
33
|
+
const tracker = new sync.RequestTracker();
|
|
34
|
+
|
|
35
|
+
test('sync global data', async () => {
|
|
36
|
+
using f = await factory();
|
|
37
|
+
|
|
38
|
+
const syncRules = await f.updateSyncRules({
|
|
39
|
+
content: BASIC_SYNC_RULES
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
43
|
+
await bucketStorage.autoActivate();
|
|
44
|
+
|
|
45
|
+
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
46
|
+
await batch.save({
|
|
47
|
+
sourceTable: TEST_TABLE,
|
|
48
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
49
|
+
after: {
|
|
50
|
+
id: 't1',
|
|
51
|
+
description: 'Test 1'
|
|
52
|
+
},
|
|
53
|
+
afterReplicaId: 't1'
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
await batch.save({
|
|
57
|
+
sourceTable: TEST_TABLE,
|
|
58
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
59
|
+
after: {
|
|
60
|
+
id: 't2',
|
|
61
|
+
description: 'Test 2'
|
|
62
|
+
},
|
|
63
|
+
afterReplicaId: 't2'
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
await batch.commit('0/1');
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
const stream = sync.streamResponse({
|
|
70
|
+
storage: f,
|
|
71
|
+
params: {
|
|
72
|
+
buckets: [],
|
|
73
|
+
include_checksum: true,
|
|
74
|
+
raw_data: true
|
|
75
|
+
},
|
|
76
|
+
parseOptions: test_utils.PARSE_OPTIONS,
|
|
77
|
+
tracker,
|
|
78
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
79
|
+
token: { exp: Date.now() / 1000 + 10 } as any
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
const lines = await consumeCheckpointLines(stream);
|
|
83
|
+
expect(lines).toMatchSnapshot();
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
test('sync legacy non-raw data', async () => {
|
|
87
|
+
const f = await factory();
|
|
88
|
+
|
|
89
|
+
const syncRules = await f.updateSyncRules({
|
|
90
|
+
content: BASIC_SYNC_RULES
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
94
|
+
await bucketStorage.autoActivate();
|
|
95
|
+
|
|
96
|
+
const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
97
|
+
await batch.save({
|
|
98
|
+
sourceTable: TEST_TABLE,
|
|
99
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
100
|
+
after: {
|
|
101
|
+
id: 't1',
|
|
102
|
+
description: 'Test\n"string"',
|
|
103
|
+
large_num: 12345678901234567890n
|
|
104
|
+
},
|
|
105
|
+
afterReplicaId: 't1'
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
await batch.commit('0/1');
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
const stream = sync.streamResponse({
|
|
112
|
+
storage: f,
|
|
113
|
+
params: {
|
|
114
|
+
buckets: [],
|
|
115
|
+
include_checksum: true,
|
|
116
|
+
raw_data: false
|
|
117
|
+
},
|
|
118
|
+
parseOptions: test_utils.PARSE_OPTIONS,
|
|
119
|
+
tracker,
|
|
120
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
121
|
+
token: { exp: Date.now() / 1000 + 10 } as any
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
const lines = await consumeCheckpointLines(stream);
|
|
125
|
+
expect(lines).toMatchSnapshot();
|
|
126
|
+
// Specifically check the number - this is the important part of the test
|
|
127
|
+
expect(lines[1].data.data[0].data.large_num).toEqual(12345678901234567890n);
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
test('expired token', async () => {
|
|
131
|
+
const f = await factory();
|
|
132
|
+
|
|
133
|
+
const syncRules = await f.updateSyncRules({
|
|
134
|
+
content: BASIC_SYNC_RULES
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
const storage = await f.getInstance(syncRules);
|
|
138
|
+
await storage.autoActivate();
|
|
139
|
+
|
|
140
|
+
const stream = sync.streamResponse({
|
|
141
|
+
storage: f,
|
|
142
|
+
params: {
|
|
143
|
+
buckets: [],
|
|
144
|
+
include_checksum: true,
|
|
145
|
+
raw_data: true
|
|
146
|
+
},
|
|
147
|
+
parseOptions: test_utils.PARSE_OPTIONS,
|
|
148
|
+
tracker,
|
|
149
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
150
|
+
token: { exp: 0 } as any
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
const lines = await consumeCheckpointLines(stream);
|
|
154
|
+
expect(lines).toMatchSnapshot();
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
test('sync updates to global data', async () => {
|
|
158
|
+
using f = await factory();
|
|
159
|
+
|
|
160
|
+
const syncRules = await f.updateSyncRules({
|
|
161
|
+
content: BASIC_SYNC_RULES
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
165
|
+
await bucketStorage.autoActivate();
|
|
166
|
+
|
|
167
|
+
const stream = sync.streamResponse({
|
|
168
|
+
storage: f,
|
|
169
|
+
params: {
|
|
170
|
+
buckets: [],
|
|
171
|
+
include_checksum: true,
|
|
172
|
+
raw_data: true
|
|
173
|
+
},
|
|
174
|
+
parseOptions: test_utils.PARSE_OPTIONS,
|
|
175
|
+
tracker,
|
|
176
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
177
|
+
token: { exp: Date.now() / 1000 + 10 } as any
|
|
178
|
+
});
|
|
179
|
+
const iter = stream[Symbol.asyncIterator]();
|
|
180
|
+
|
|
181
|
+
expect(await getCheckpointLines(iter)).toMatchSnapshot();
|
|
182
|
+
|
|
183
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
184
|
+
await batch.save({
|
|
185
|
+
sourceTable: TEST_TABLE,
|
|
186
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
187
|
+
after: {
|
|
188
|
+
id: 't1',
|
|
189
|
+
description: 'Test 1'
|
|
190
|
+
},
|
|
191
|
+
afterReplicaId: 't1'
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
await batch.commit('0/1');
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
expect(await getCheckpointLines(iter)).toMatchSnapshot();
|
|
198
|
+
|
|
199
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
200
|
+
await batch.save({
|
|
201
|
+
sourceTable: TEST_TABLE,
|
|
202
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
203
|
+
after: {
|
|
204
|
+
id: 't2',
|
|
205
|
+
description: 'Test 2'
|
|
206
|
+
},
|
|
207
|
+
afterReplicaId: 't2'
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
await batch.commit('0/2');
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
expect(await getCheckpointLines(iter)).toMatchSnapshot();
|
|
214
|
+
|
|
215
|
+
iter.return?.();
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
test('expiring token', async () => {
|
|
219
|
+
using f = await factory();
|
|
220
|
+
|
|
221
|
+
const syncRules = await f.updateSyncRules({
|
|
222
|
+
content: BASIC_SYNC_RULES
|
|
223
|
+
});
|
|
224
|
+
|
|
225
|
+
const storage = await f.getInstance(syncRules);
|
|
226
|
+
await storage.autoActivate();
|
|
227
|
+
|
|
228
|
+
const exp = Date.now() / 1000 + 0.1;
|
|
229
|
+
|
|
230
|
+
const stream = sync.streamResponse({
|
|
231
|
+
storage: f,
|
|
232
|
+
params: {
|
|
233
|
+
buckets: [],
|
|
234
|
+
include_checksum: true,
|
|
235
|
+
raw_data: true
|
|
236
|
+
},
|
|
237
|
+
parseOptions: test_utils.PARSE_OPTIONS,
|
|
238
|
+
tracker,
|
|
239
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
240
|
+
token: { exp: exp } as any
|
|
241
|
+
});
|
|
242
|
+
const iter = stream[Symbol.asyncIterator]();
|
|
243
|
+
|
|
244
|
+
const checkpoint = await getCheckpointLines(iter);
|
|
245
|
+
expect(checkpoint).toMatchSnapshot();
|
|
246
|
+
|
|
247
|
+
const expLines = await getCheckpointLines(iter);
|
|
248
|
+
expect(expLines).toMatchSnapshot();
|
|
249
|
+
});
|
|
250
|
+
|
|
251
|
+
test('compacting data - invalidate checkpoint', async () => {
|
|
252
|
+
// This tests a case of a compact operation invalidating a checkpoint in the
|
|
253
|
+
// middle of syncing data.
|
|
254
|
+
// This is expected to be rare in practice, but it is important to handle
|
|
255
|
+
// this case correctly to maintain consistency on the client.
|
|
256
|
+
|
|
257
|
+
using f = await factory();
|
|
258
|
+
|
|
259
|
+
const syncRules = await f.updateSyncRules({
|
|
260
|
+
content: BASIC_SYNC_RULES
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
const bucketStorage = await f.getInstance(syncRules);
|
|
264
|
+
await bucketStorage.autoActivate();
|
|
265
|
+
|
|
266
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
267
|
+
await batch.save({
|
|
268
|
+
sourceTable: TEST_TABLE,
|
|
269
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
270
|
+
after: {
|
|
271
|
+
id: 't1',
|
|
272
|
+
description: 'Test 1'
|
|
273
|
+
},
|
|
274
|
+
afterReplicaId: 't1'
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
await batch.save({
|
|
278
|
+
sourceTable: TEST_TABLE,
|
|
279
|
+
tag: storage.SaveOperationTag.INSERT,
|
|
280
|
+
after: {
|
|
281
|
+
id: 't2',
|
|
282
|
+
description: 'Test 2'
|
|
283
|
+
},
|
|
284
|
+
afterReplicaId: 't2'
|
|
285
|
+
});
|
|
286
|
+
|
|
287
|
+
await batch.commit('0/1');
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
const stream = sync.streamResponse({
|
|
291
|
+
storage: f,
|
|
292
|
+
params: {
|
|
293
|
+
buckets: [],
|
|
294
|
+
include_checksum: true,
|
|
295
|
+
raw_data: true
|
|
296
|
+
},
|
|
297
|
+
parseOptions: test_utils.PARSE_OPTIONS,
|
|
298
|
+
tracker,
|
|
299
|
+
syncParams: new RequestParameters({ sub: '' }, {}),
|
|
300
|
+
token: { exp: Date.now() / 1000 + 10 } as any
|
|
301
|
+
});
|
|
302
|
+
|
|
303
|
+
const iter = stream[Symbol.asyncIterator]();
|
|
304
|
+
|
|
305
|
+
// Only consume the first "checkpoint" message, and pause before receiving data.
|
|
306
|
+
const lines = await consumeIterator(iter, { consume: false, isDone: (line) => (line as any)?.checkpoint != null });
|
|
307
|
+
expect(lines).toMatchSnapshot();
|
|
308
|
+
expect(lines[0]).toEqual({
|
|
309
|
+
checkpoint: expect.objectContaining({
|
|
310
|
+
last_op_id: '2'
|
|
311
|
+
})
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
// Now we save additional data AND compact before continuing.
|
|
315
|
+
// This invalidates the checkpoint we've received above.
|
|
316
|
+
|
|
317
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
318
|
+
await batch.save({
|
|
319
|
+
sourceTable: TEST_TABLE,
|
|
320
|
+
tag: storage.SaveOperationTag.UPDATE,
|
|
321
|
+
after: {
|
|
322
|
+
id: 't1',
|
|
323
|
+
description: 'Test 1b'
|
|
324
|
+
},
|
|
325
|
+
afterReplicaId: 't1'
|
|
326
|
+
});
|
|
327
|
+
|
|
328
|
+
await batch.save({
|
|
329
|
+
sourceTable: TEST_TABLE,
|
|
330
|
+
tag: storage.SaveOperationTag.UPDATE,
|
|
331
|
+
after: {
|
|
332
|
+
id: 't2',
|
|
333
|
+
description: 'Test 2b'
|
|
334
|
+
},
|
|
335
|
+
afterReplicaId: 't2'
|
|
336
|
+
});
|
|
337
|
+
|
|
338
|
+
await batch.commit('0/2');
|
|
339
|
+
});
|
|
340
|
+
|
|
341
|
+
await bucketStorage.compact();
|
|
342
|
+
|
|
343
|
+
const lines2 = await getCheckpointLines(iter, { consume: true });
|
|
344
|
+
|
|
345
|
+
// Snapshot test checks for changes in general.
|
|
346
|
+
// The tests after that documents the specific things we're looking for
|
|
347
|
+
// in this test.
|
|
348
|
+
expect(lines2).toMatchSnapshot();
|
|
349
|
+
|
|
350
|
+
expect(lines2[0]).toEqual({
|
|
351
|
+
data: expect.objectContaining({
|
|
352
|
+
has_more: false,
|
|
353
|
+
data: [
|
|
354
|
+
// The first two ops have been replaced by a single CLEAR op
|
|
355
|
+
expect.objectContaining({
|
|
356
|
+
op: 'CLEAR'
|
|
357
|
+
})
|
|
358
|
+
]
|
|
359
|
+
})
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
// Note: No checkpoint_complete here, since the checkpoint has been
|
|
363
|
+
// invalidated by the CLEAR op.
|
|
364
|
+
|
|
365
|
+
expect(lines2[1]).toEqual({
|
|
366
|
+
checkpoint_diff: expect.objectContaining({
|
|
367
|
+
last_op_id: '4'
|
|
368
|
+
})
|
|
369
|
+
});
|
|
370
|
+
|
|
371
|
+
expect(lines2[2]).toEqual({
|
|
372
|
+
data: expect.objectContaining({
|
|
373
|
+
has_more: false,
|
|
374
|
+
data: [
|
|
375
|
+
expect.objectContaining({
|
|
376
|
+
op: 'PUT'
|
|
377
|
+
}),
|
|
378
|
+
expect.objectContaining({
|
|
379
|
+
op: 'PUT'
|
|
380
|
+
})
|
|
381
|
+
]
|
|
382
|
+
})
|
|
383
|
+
});
|
|
384
|
+
|
|
385
|
+
// Now we get a checkpoint_complete
|
|
386
|
+
expect(lines2[3]).toEqual({
|
|
387
|
+
checkpoint_complete: expect.objectContaining({
|
|
388
|
+
last_op_id: '4'
|
|
389
|
+
})
|
|
390
|
+
});
|
|
391
|
+
});
|
|
392
|
+
|
|
393
|
+
test('write checkpoint', async () => {
|
|
394
|
+
using f = await factory();
|
|
395
|
+
|
|
396
|
+
const syncRules = await f.updateSyncRules({
|
|
397
|
+
content: BASIC_SYNC_RULES
|
|
398
|
+
});
|
|
399
|
+
|
|
400
|
+
const bucketStorage = f.getInstance(syncRules);
|
|
401
|
+
await bucketStorage.autoActivate();
|
|
402
|
+
|
|
403
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
404
|
+
// <= the managed write checkpoint LSN below
|
|
405
|
+
await batch.commit('0/1');
|
|
406
|
+
});
|
|
407
|
+
|
|
408
|
+
const checkpoint = await bucketStorage.createManagedWriteCheckpoint({
|
|
409
|
+
user_id: 'test',
|
|
410
|
+
heads: { '1': '1/0' }
|
|
411
|
+
});
|
|
412
|
+
|
|
413
|
+
const params: sync.SyncStreamParameters = {
|
|
414
|
+
storage: f,
|
|
415
|
+
params: {
|
|
416
|
+
buckets: [],
|
|
417
|
+
include_checksum: true,
|
|
418
|
+
raw_data: true
|
|
419
|
+
},
|
|
420
|
+
parseOptions: test_utils.PARSE_OPTIONS,
|
|
421
|
+
tracker,
|
|
422
|
+
syncParams: new RequestParameters({ sub: 'test' }, {}),
|
|
423
|
+
token: { sub: 'test', exp: Date.now() / 1000 + 10 } as any
|
|
424
|
+
};
|
|
425
|
+
const stream1 = sync.streamResponse(params);
|
|
426
|
+
const lines1 = await consumeCheckpointLines(stream1);
|
|
427
|
+
|
|
428
|
+
// If write checkpoints are not correctly filtered, this may already
|
|
429
|
+
// contain the write checkpoint.
|
|
430
|
+
expect(lines1[0]).toMatchObject({
|
|
431
|
+
checkpoint: expect.objectContaining({
|
|
432
|
+
last_op_id: '0',
|
|
433
|
+
write_checkpoint: undefined
|
|
434
|
+
})
|
|
435
|
+
});
|
|
436
|
+
|
|
437
|
+
await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
|
|
438
|
+
// must be >= the managed write checkpoint LSN
|
|
439
|
+
await batch.commit('1/0');
|
|
440
|
+
});
|
|
441
|
+
|
|
442
|
+
// At this point the LSN has advanced, so the write checkpoint should be
|
|
443
|
+
// included in the next checkpoint message.
|
|
444
|
+
const stream2 = sync.streamResponse(params);
|
|
445
|
+
const lines2 = await consumeCheckpointLines(stream2);
|
|
446
|
+
expect(lines2[0]).toMatchObject({
|
|
447
|
+
checkpoint: expect.objectContaining({
|
|
448
|
+
last_op_id: '0',
|
|
449
|
+
write_checkpoint: `${checkpoint}`
|
|
450
|
+
})
|
|
451
|
+
});
|
|
452
|
+
});
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
/**
|
|
456
|
+
* Get lines on an iterator until isDone(line) == true.
|
|
457
|
+
*
|
|
458
|
+
* Does not stop the iterator unless options.consume is true.
|
|
459
|
+
*/
|
|
460
|
+
async function consumeIterator<T>(
|
|
461
|
+
iter: AsyncIterator<T>,
|
|
462
|
+
options: { isDone: (line: T) => boolean; consume?: boolean }
|
|
463
|
+
) {
|
|
464
|
+
let lines: T[] = [];
|
|
465
|
+
try {
|
|
466
|
+
const controller = new AbortController();
|
|
467
|
+
const timeout = timers.setTimeout(1500, { value: null, done: 'timeout' }, { signal: controller.signal });
|
|
468
|
+
while (true) {
|
|
469
|
+
let { value, done } = await Promise.race([timeout, iter.next()]);
|
|
470
|
+
if (done == 'timeout') {
|
|
471
|
+
throw new Error('Timeout');
|
|
472
|
+
}
|
|
473
|
+
if (typeof value == 'string') {
|
|
474
|
+
value = JSONBig.parse(value);
|
|
475
|
+
}
|
|
476
|
+
if (value) {
|
|
477
|
+
lines.push(value);
|
|
478
|
+
}
|
|
479
|
+
if (done || options.isDone(value)) {
|
|
480
|
+
break;
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
controller.abort();
|
|
484
|
+
|
|
485
|
+
if (options?.consume) {
|
|
486
|
+
iter.return?.();
|
|
487
|
+
}
|
|
488
|
+
return lines;
|
|
489
|
+
} catch (e) {
|
|
490
|
+
if (options?.consume) {
|
|
491
|
+
iter.throw?.(e);
|
|
492
|
+
}
|
|
493
|
+
throw e;
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
/**
|
|
498
|
+
* Get lines on an iterator until the next checkpoint_complete.
|
|
499
|
+
*
|
|
500
|
+
* Does not stop the iterator unless options.consume is true.
|
|
501
|
+
*/
|
|
502
|
+
async function getCheckpointLines(
|
|
503
|
+
iter: AsyncIterator<utils.StreamingSyncLine | string | null>,
|
|
504
|
+
options?: { consume?: boolean }
|
|
505
|
+
) {
|
|
506
|
+
return consumeIterator(iter, {
|
|
507
|
+
consume: options?.consume,
|
|
508
|
+
isDone: (line) => (line as any)?.checkpoint_complete
|
|
509
|
+
});
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
/**
|
|
513
|
+
* Get lines on an iterator until the next checkpoint_complete.
|
|
514
|
+
*
|
|
515
|
+
* Stops the iterator afterwards.
|
|
516
|
+
*/
|
|
517
|
+
async function consumeCheckpointLines(
|
|
518
|
+
iterable: AsyncIterable<utils.StreamingSyncLine | string | null>
|
|
519
|
+
): Promise<any[]> {
|
|
520
|
+
return getCheckpointLines(iterable[Symbol.asyncIterator](), { consume: true });
|
|
521
|
+
}
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"extends": "../../tsconfig.base.json",
|
|
3
|
+
"compilerOptions": {
|
|
4
|
+
"rootDir": "src",
|
|
5
|
+
"outDir": "dist",
|
|
6
|
+
"esModuleInterop": true,
|
|
7
|
+
"skipLibCheck": true,
|
|
8
|
+
"sourceMap": true
|
|
9
|
+
},
|
|
10
|
+
"include": ["src"],
|
|
11
|
+
"references": [
|
|
12
|
+
{
|
|
13
|
+
"path": "../types"
|
|
14
|
+
},
|
|
15
|
+
{
|
|
16
|
+
"path": "../rsocket-router"
|
|
17
|
+
},
|
|
18
|
+
{
|
|
19
|
+
"path": "../jsonbig"
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
"path": "../jpgwire"
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
"path": "../sync-rules"
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
"path": "../service-core"
|
|
29
|
+
},
|
|
30
|
+
{
|
|
31
|
+
"path": "../../libs/lib-services"
|
|
32
|
+
}
|
|
33
|
+
]
|
|
34
|
+
}
|