@powersync/service-core-tests 0.0.0-dev-20250611110033 → 0.0.0-dev-20250618131818

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1769,14 +1769,12 @@ bucket_definitions:
1769
1769
  .watchCheckpointChanges({ user_id: 'user1', signal: abortController.signal })
1770
1770
  [Symbol.asyncIterator]();
1771
1771
 
1772
- await bucketStorage.batchCreateCustomWriteCheckpoints([
1773
- {
1772
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1773
+ await batch.addCustomWriteCheckpoint({
1774
1774
  checkpoint: 5n,
1775
1775
  user_id: 'user1'
1776
- }
1777
- ]);
1778
-
1779
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1776
+ });
1777
+ await batch.flush();
1780
1778
  await batch.keepalive('5/0');
1781
1779
  });
1782
1780
 
@@ -1785,7 +1783,6 @@ bucket_definitions:
1785
1783
  done: false,
1786
1784
  value: {
1787
1785
  base: {
1788
- checkpoint: 0n,
1789
1786
  lsn: '5/0'
1790
1787
  },
1791
1788
  writeCheckpoint: 5n
@@ -1822,23 +1819,18 @@ bucket_definitions:
1822
1819
  done: false,
1823
1820
  value: {
1824
1821
  base: {
1825
- checkpoint: 0n,
1826
1822
  lsn: '5/0'
1827
1823
  },
1828
1824
  writeCheckpoint: null
1829
1825
  }
1830
1826
  });
1831
1827
 
1832
- await bucketStorage.batchCreateCustomWriteCheckpoints([
1833
- {
1828
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1829
+ batch.addCustomWriteCheckpoint({
1834
1830
  checkpoint: 6n,
1835
1831
  user_id: 'user1'
1836
- }
1837
- ]);
1838
- // We have to trigger a new keepalive after the checkpoint, at least to cover postgres storage.
1839
- // This is what is effetively triggered with RouteAPI.createReplicationHead().
1840
- // MongoDB storage doesn't explicitly need this anymore.
1841
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1832
+ });
1833
+ await batch.flush();
1842
1834
  await batch.keepalive('6/0');
1843
1835
  });
1844
1836
 
@@ -1847,7 +1839,6 @@ bucket_definitions:
1847
1839
  done: false,
1848
1840
  value: {
1849
1841
  base: {
1850
- checkpoint: 0n
1851
1842
  // can be 5/0 or 6/0 - actual value not relevant for custom write checkpoints
1852
1843
  // lsn: '6/0'
1853
1844
  },
@@ -1855,13 +1846,12 @@ bucket_definitions:
1855
1846
  }
1856
1847
  });
1857
1848
 
1858
- await bucketStorage.batchCreateCustomWriteCheckpoints([
1859
- {
1849
+ await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1850
+ batch.addCustomWriteCheckpoint({
1860
1851
  checkpoint: 7n,
1861
1852
  user_id: 'user1'
1862
- }
1863
- ]);
1864
- await bucketStorage.startBatch(test_utils.BATCH_OPTIONS, async (batch) => {
1853
+ });
1854
+ await batch.flush();
1865
1855
  await batch.keepalive('7/0');
1866
1856
  });
1867
1857
 
@@ -1870,7 +1860,6 @@ bucket_definitions:
1870
1860
  done: false,
1871
1861
  value: {
1872
1862
  base: {
1873
- checkpoint: 0n
1874
1863
  // can be 5/0, 6/0 or 7/0 - actual value not relevant for custom write checkpoints
1875
1864
  // lsn: '7/0'
1876
1865
  },
@@ -519,12 +519,12 @@ bucket_definitions:
519
519
 
520
520
  await batch.commit('0/2');
521
521
  });
522
+ }
522
523
 
523
- if (sentRows >= 1000 && sentRows <= 2001) {
524
- // pause for a bit to give the stream time to process interruptions.
525
- // This covers the data batch above and the next one.
526
- await timers.setTimeout(50);
527
- }
524
+ if (sentRows >= 1000 && sentRows <= 2001) {
525
+ // pause for a bit to give the stream time to process interruptions.
526
+ // This covers the data batch above and the next one.
527
+ await timers.setTimeout(50);
528
528
  }
529
529
  }
530
530
  if ('checkpoint_complete' in next) {