@powersync/service-core-tests 0.0.0-dev-20250214100224 → 0.0.0-dev-20250227082606

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -31,6 +31,11 @@ export const SYNC_SNAPSHOT_PATH = path.resolve(__dirname, '../__snapshots/sync.t
31
31
  */
32
32
  export function registerSyncTests(factory: storage.TestStorageFactory) {
33
33
  const tracker = new sync.RequestTracker();
34
+ const syncContext = new sync.SyncContext({
35
+ maxBuckets: 10,
36
+ maxParameterQueryResults: 10,
37
+ maxDataFetchConcurrency: 2
38
+ });
34
39
 
35
40
  test('sync global data', async () => {
36
41
  await using f = await factory();
@@ -67,13 +72,76 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
67
72
  });
68
73
 
69
74
  const stream = sync.streamResponse({
70
- storage: f,
75
+ syncContext,
76
+ bucketStorage: bucketStorage,
77
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
78
+ params: {
79
+ buckets: [],
80
+ include_checksum: true,
81
+ raw_data: true
82
+ },
83
+ tracker,
84
+ syncParams: new RequestParameters({ sub: '' }, {}),
85
+ token: { exp: Date.now() / 1000 + 10 } as any
86
+ });
87
+
88
+ const lines = await consumeCheckpointLines(stream);
89
+ expect(lines).toMatchSnapshot();
90
+ });
91
+
92
+ test('sync buckets in order', async () => {
93
+ await using f = await factory();
94
+
95
+ const syncRules = await f.updateSyncRules({
96
+ content: `
97
+ bucket_definitions:
98
+ b0:
99
+ priority: 2
100
+ data:
101
+ - SELECT * FROM test WHERE LENGTH(id) <= 2;
102
+ b1:
103
+ priority: 1
104
+ data:
105
+ - SELECT * FROM test WHERE LENGTH(id) > 2;
106
+ `
107
+ });
108
+
109
+ const bucketStorage = f.getInstance(syncRules);
110
+ await bucketStorage.autoActivate();
111
+
112
+ const result = await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
113
+ await batch.save({
114
+ sourceTable: TEST_TABLE,
115
+ tag: storage.SaveOperationTag.INSERT,
116
+ after: {
117
+ id: 't1',
118
+ description: 'Test 1'
119
+ },
120
+ afterReplicaId: 't1'
121
+ });
122
+
123
+ await batch.save({
124
+ sourceTable: TEST_TABLE,
125
+ tag: storage.SaveOperationTag.INSERT,
126
+ after: {
127
+ id: 'earlier',
128
+ description: 'Test 2'
129
+ },
130
+ afterReplicaId: 'earlier'
131
+ });
132
+
133
+ await batch.commit('0/1');
134
+ });
135
+
136
+ const stream = sync.streamResponse({
137
+ syncContext,
138
+ bucketStorage,
139
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
71
140
  params: {
72
141
  buckets: [],
73
142
  include_checksum: true,
74
143
  raw_data: true
75
144
  },
76
- parseOptions: test_utils.PARSE_OPTIONS,
77
145
  tracker,
78
146
  syncParams: new RequestParameters({ sub: '' }, {}),
79
147
  token: { exp: Date.now() / 1000 + 10 } as any
@@ -83,6 +151,180 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
83
151
  expect(lines).toMatchSnapshot();
84
152
  });
85
153
 
154
+ test('sync interrupts low-priority buckets on new checkpoints', async () => {
155
+ await using f = await factory();
156
+
157
+ const syncRules = await f.updateSyncRules({
158
+ content: `
159
+ bucket_definitions:
160
+ b0:
161
+ priority: 2
162
+ data:
163
+ - SELECT * FROM test WHERE LENGTH(id) <= 5;
164
+ b1:
165
+ priority: 1
166
+ data:
167
+ - SELECT * FROM test WHERE LENGTH(id) > 5;
168
+ `
169
+ });
170
+
171
+ const bucketStorage = f.getInstance(syncRules);
172
+ await bucketStorage.autoActivate();
173
+
174
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
175
+ // Initial data: Add one priority row and 10k low-priority rows.
176
+ await batch.save({
177
+ sourceTable: TEST_TABLE,
178
+ tag: storage.SaveOperationTag.INSERT,
179
+ after: {
180
+ id: 'highprio',
181
+ description: 'High priority row'
182
+ },
183
+ afterReplicaId: 'highprio'
184
+ });
185
+ for (let i = 0; i < 10_000; i++) {
186
+ await batch.save({
187
+ sourceTable: TEST_TABLE,
188
+ tag: storage.SaveOperationTag.INSERT,
189
+ after: {
190
+ id: `${i}`,
191
+ description: 'low prio'
192
+ },
193
+ afterReplicaId: `${i}`
194
+ });
195
+ }
196
+
197
+ await batch.commit('0/1');
198
+ });
199
+
200
+ const stream = sync.streamResponse({
201
+ syncContext,
202
+ bucketStorage,
203
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
204
+ params: {
205
+ buckets: [],
206
+ include_checksum: true,
207
+ raw_data: true
208
+ },
209
+ tracker,
210
+ syncParams: new RequestParameters({ sub: '' }, {}),
211
+ token: { exp: Date.now() / 1000 + 10 } as any
212
+ });
213
+
214
+ let sentCheckpoints = 0;
215
+ let sentRows = 0;
216
+
217
+ for await (let next of stream) {
218
+ if (typeof next == 'string') {
219
+ next = JSON.parse(next);
220
+ }
221
+ if (typeof next === 'object' && next !== null) {
222
+ if ('partial_checkpoint_complete' in next) {
223
+ if (sentCheckpoints == 1) {
224
+ // Save new data to interrupt the low-priority sync.
225
+
226
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
227
+ // Add another high-priority row. This should interrupt the long-running low-priority sync.
228
+ await batch.save({
229
+ sourceTable: TEST_TABLE,
230
+ tag: storage.SaveOperationTag.INSERT,
231
+ after: {
232
+ id: 'highprio2',
233
+ description: 'Another high-priority row'
234
+ },
235
+ afterReplicaId: 'highprio2'
236
+ });
237
+
238
+ await batch.commit('0/2');
239
+ });
240
+ } else {
241
+ // Low-priority sync from the first checkpoint was interrupted. This should not happen before
242
+ // 1000 low-priority items were synchronized.
243
+ expect(sentCheckpoints).toBe(2);
244
+ expect(sentRows).toBeGreaterThan(1000);
245
+ }
246
+ }
247
+ if ('checkpoint' in next || 'checkpoint_diff' in next) {
248
+ sentCheckpoints += 1;
249
+ }
250
+
251
+ if ('data' in next) {
252
+ sentRows += next.data.data.length;
253
+ }
254
+ if ('checkpoint_complete' in next) {
255
+ break;
256
+ }
257
+ }
258
+ }
259
+
260
+ expect(sentCheckpoints).toBe(2);
261
+ expect(sentRows).toBe(10002);
262
+ });
263
+
264
+ test('sends checkpoint complete line for empty checkpoint', async () => {
265
+ await using f = await factory();
266
+
267
+ const syncRules = await f.updateSyncRules({
268
+ content: BASIC_SYNC_RULES
269
+ });
270
+ const bucketStorage = f.getInstance(syncRules);
271
+ await bucketStorage.autoActivate();
272
+
273
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
274
+ await batch.save({
275
+ sourceTable: TEST_TABLE,
276
+ tag: storage.SaveOperationTag.INSERT,
277
+ after: {
278
+ id: 't1',
279
+ description: 'sync'
280
+ },
281
+ afterReplicaId: 't1'
282
+ });
283
+ await batch.commit('0/1');
284
+ });
285
+
286
+ const stream = sync.streamResponse({
287
+ syncContext,
288
+ bucketStorage,
289
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
290
+ params: {
291
+ buckets: [],
292
+ include_checksum: true,
293
+ raw_data: true
294
+ },
295
+ tracker,
296
+ syncParams: new RequestParameters({ sub: '' }, {}),
297
+ token: { exp: Date.now() / 1000 + 100000 } as any
298
+ });
299
+
300
+ const lines: any[] = [];
301
+ let receivedCompletions = 0;
302
+
303
+ for await (let next of stream) {
304
+ if (typeof next == 'string') {
305
+ next = JSON.parse(next);
306
+ }
307
+ lines.push(next);
308
+
309
+ if (typeof next === 'object' && next !== null) {
310
+ if ('checkpoint_complete' in next) {
311
+ receivedCompletions++;
312
+ if (receivedCompletions == 1) {
313
+ // Trigger an empty bucket update.
314
+ await bucketStorage.createManagedWriteCheckpoint({ user_id: '', heads: { '1': '1/0' } });
315
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
316
+ await batch.commit('1/0');
317
+ });
318
+ } else {
319
+ break;
320
+ }
321
+ }
322
+ }
323
+ }
324
+
325
+ expect(lines).toMatchSnapshot();
326
+ });
327
+
86
328
  test('sync legacy non-raw data', async () => {
87
329
  const f = await factory();
88
330
 
@@ -109,13 +351,14 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
109
351
  });
110
352
 
111
353
  const stream = sync.streamResponse({
112
- storage: f,
354
+ syncContext,
355
+ bucketStorage,
356
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
113
357
  params: {
114
358
  buckets: [],
115
359
  include_checksum: true,
116
360
  raw_data: false
117
361
  },
118
- parseOptions: test_utils.PARSE_OPTIONS,
119
362
  tracker,
120
363
  syncParams: new RequestParameters({ sub: '' }, {}),
121
364
  token: { exp: Date.now() / 1000 + 10 } as any
@@ -134,17 +377,18 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
134
377
  content: BASIC_SYNC_RULES
135
378
  });
136
379
 
137
- const storage = await f.getInstance(syncRules);
138
- await storage.autoActivate();
380
+ const bucketStorage = await f.getInstance(syncRules);
381
+ await bucketStorage.autoActivate();
139
382
 
140
383
  const stream = sync.streamResponse({
141
- storage: f,
384
+ syncContext,
385
+ bucketStorage,
386
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
142
387
  params: {
143
388
  buckets: [],
144
389
  include_checksum: true,
145
390
  raw_data: true
146
391
  },
147
- parseOptions: test_utils.PARSE_OPTIONS,
148
392
  tracker,
149
393
  syncParams: new RequestParameters({ sub: '' }, {}),
150
394
  token: { exp: 0 } as any
@@ -165,13 +409,14 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
165
409
  await bucketStorage.autoActivate();
166
410
 
167
411
  const stream = sync.streamResponse({
168
- storage: f,
412
+ syncContext,
413
+ bucketStorage,
414
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
169
415
  params: {
170
416
  buckets: [],
171
417
  include_checksum: true,
172
418
  raw_data: true
173
419
  },
174
- parseOptions: test_utils.PARSE_OPTIONS,
175
420
  tracker,
176
421
  syncParams: new RequestParameters({ sub: '' }, {}),
177
422
  token: { exp: Date.now() / 1000 + 10 } as any
@@ -222,19 +467,20 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
222
467
  content: BASIC_SYNC_RULES
223
468
  });
224
469
 
225
- const storage = await f.getInstance(syncRules);
226
- await storage.autoActivate();
470
+ const bucketStorage = await f.getInstance(syncRules);
471
+ await bucketStorage.autoActivate();
227
472
 
228
473
  const exp = Date.now() / 1000 + 0.1;
229
474
 
230
475
  const stream = sync.streamResponse({
231
- storage: f,
476
+ syncContext,
477
+ bucketStorage,
478
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
232
479
  params: {
233
480
  buckets: [],
234
481
  include_checksum: true,
235
482
  raw_data: true
236
483
  },
237
- parseOptions: test_utils.PARSE_OPTIONS,
238
484
  tracker,
239
485
  syncParams: new RequestParameters({ sub: '' }, {}),
240
486
  token: { exp: exp } as any
@@ -288,13 +534,14 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
288
534
  });
289
535
 
290
536
  const stream = sync.streamResponse({
291
- storage: f,
537
+ syncContext,
538
+ bucketStorage,
539
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
292
540
  params: {
293
541
  buckets: [],
294
542
  include_checksum: true,
295
543
  raw_data: true
296
544
  },
297
- parseOptions: test_utils.PARSE_OPTIONS,
298
545
  tracker,
299
546
  syncParams: new RequestParameters({ sub: '' }, {}),
300
547
  token: { exp: Date.now() / 1000 + 10 } as any
@@ -411,13 +658,14 @@ export function registerSyncTests(factory: storage.TestStorageFactory) {
411
658
  });
412
659
 
413
660
  const params: sync.SyncStreamParameters = {
414
- storage: f,
661
+ syncContext,
662
+ bucketStorage,
663
+ syncRules: bucketStorage.getParsedSyncRules(test_utils.PARSE_OPTIONS),
415
664
  params: {
416
665
  buckets: [],
417
666
  include_checksum: true,
418
667
  raw_data: true
419
668
  },
420
- parseOptions: test_utils.PARSE_OPTIONS,
421
669
  tracker,
422
670
  syncParams: new RequestParameters({ sub: 'test' }, {}),
423
671
  token: { sub: 'test', exp: Date.now() / 1000 + 10 } as any