react-server-dom-webpack 19.2.0-canary-72135096-20250421 → 19.2.0-canary-197d6a04-20250424

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2693,10 +2693,10 @@
2693
2693
  return hook.checkDCE ? !0 : !1;
2694
2694
  })({
2695
2695
  bundleType: 1,
2696
- version: "19.2.0-canary-72135096-20250421",
2696
+ version: "19.2.0-canary-197d6a04-20250424",
2697
2697
  rendererPackageName: "react-server-dom-webpack",
2698
2698
  currentDispatcherRef: ReactSharedInternals,
2699
- reconcilerVersion: "19.2.0-canary-72135096-20250421",
2699
+ reconcilerVersion: "19.2.0-canary-197d6a04-20250424",
2700
2700
  getCurrentComponentInfo: function () {
2701
2701
  return currentOwnerInDEV;
2702
2702
  }
@@ -818,10 +818,11 @@
818
818
  function progress(entry) {
819
819
  if (!aborted)
820
820
  if (entry.done)
821
- request.abortListeners.delete(abortStream),
822
- (entry = streamTask.id.toString(16) + ":C\n"),
821
+ (entry = streamTask.id.toString(16) + ":C\n"),
823
822
  request.completedRegularChunks.push(stringToChunk(entry)),
824
823
  enqueueFlush(request),
824
+ request.abortListeners.delete(abortStream),
825
+ callOnAllReadyIfReady(request),
825
826
  (aborted = !0);
826
827
  else
827
828
  try {
@@ -882,7 +883,6 @@
882
883
  function progress(entry) {
883
884
  if (!aborted)
884
885
  if (entry.done) {
885
- request.abortListeners.delete(abortIterable);
886
886
  if (void 0 === entry.value)
887
887
  var endStreamRow = streamTask.id.toString(16) + ":C\n";
888
888
  else
@@ -899,6 +899,8 @@
899
899
  }
900
900
  request.completedRegularChunks.push(stringToChunk(endStreamRow));
901
901
  enqueueFlush(request);
902
+ request.abortListeners.delete(abortIterable);
903
+ callOnAllReadyIfReady(request);
902
904
  aborted = !0;
903
905
  } else
904
906
  try {
@@ -2420,10 +2422,11 @@
2420
2422
  emitModelChunk(request, task.id, value));
2421
2423
  }
2422
2424
  function erroredTask(request, task, error) {
2423
- request.abortableTasks.delete(task);
2424
2425
  task.status = ERRORED$1;
2425
2426
  var digest = logRecoverableError(request, error, task);
2426
2427
  emitErrorChunk(request, task.id, digest, error);
2428
+ request.abortableTasks.delete(task);
2429
+ callOnAllReadyIfReady(request);
2427
2430
  }
2428
2431
  function retryTask(request, task) {
2429
2432
  if (task.status === PENDING$1) {
@@ -2457,8 +2460,9 @@
2457
2460
  var json = stringify(resolvedModel);
2458
2461
  emitModelChunk(request, task.id, json);
2459
2462
  }
2460
- request.abortableTasks.delete(task);
2461
2463
  task.status = COMPLETED;
2464
+ request.abortableTasks.delete(task);
2465
+ callOnAllReadyIfReady(request);
2462
2466
  } catch (thrownValue) {
2463
2467
  if (request.status === ABORTING) {
2464
2468
  request.abortableTasks.delete(task);
@@ -2500,7 +2504,6 @@
2500
2504
  ReactSharedInternalsServer.H = HooksDispatcher;
2501
2505
  var prevRequest = currentRequest;
2502
2506
  currentRequest$1 = currentRequest = request;
2503
- var hadAbortableTasks = 0 < request.abortableTasks.size;
2504
2507
  try {
2505
2508
  var pingedTasks = request.pingedTasks;
2506
2509
  request.pingedTasks = [];
@@ -2508,10 +2511,6 @@
2508
2511
  retryTask(request, pingedTasks[i]);
2509
2512
  null !== request.destination &&
2510
2513
  flushCompletedChunks(request, request.destination);
2511
- if (hadAbortableTasks && 0 === request.abortableTasks.size) {
2512
- var onAllReady = request.onAllReady;
2513
- onAllReady();
2514
- }
2515
2514
  } catch (error) {
2516
2515
  logRecoverableError(request, error, null), fatalError(request, error);
2517
2516
  } finally {
@@ -2603,6 +2602,13 @@
2603
2602
  destination && flushCompletedChunks(request, destination);
2604
2603
  }));
2605
2604
  }
2605
+ function callOnAllReadyIfReady(request) {
2606
+ if (
2607
+ 0 === request.abortableTasks.size &&
2608
+ 0 === request.abortListeners.size
2609
+ )
2610
+ request.onAllReady();
2611
+ }
2606
2612
  function startFlowing(request, destination) {
2607
2613
  if (request.status === CLOSING)
2608
2614
  (request.status = CLOSED),
@@ -2647,8 +2653,7 @@
2647
2653
  }
2648
2654
  });
2649
2655
  abortableTasks.clear();
2650
- var onAllReady = request.onAllReady;
2651
- onAllReady();
2656
+ callOnAllReadyIfReady(request);
2652
2657
  }
2653
2658
  var abortListeners = request.abortListeners;
2654
2659
  if (0 < abortListeners.size) {
@@ -2664,6 +2669,7 @@
2664
2669
  return callback(_error);
2665
2670
  });
2666
2671
  abortListeners.clear();
2672
+ callOnAllReadyIfReady(request);
2667
2673
  }
2668
2674
  null !== request.destination &&
2669
2675
  flushCompletedChunks(request, request.destination);
@@ -821,10 +821,11 @@ function serializeReadableStream(request, task, stream) {
821
821
  function progress(entry) {
822
822
  if (!aborted)
823
823
  if (entry.done)
824
- request.abortListeners.delete(abortStream),
825
- (entry = streamTask.id.toString(16) + ":C\n"),
824
+ (entry = streamTask.id.toString(16) + ":C\n"),
826
825
  request.completedRegularChunks.push(stringToChunk(entry)),
827
826
  enqueueFlush(request),
827
+ request.abortListeners.delete(abortStream),
828
+ callOnAllReadyIfReady(request),
828
829
  (aborted = !0);
829
830
  else
830
831
  try {
@@ -881,7 +882,6 @@ function serializeAsyncIterable(request, task, iterable, iterator) {
881
882
  function progress(entry) {
882
883
  if (!aborted)
883
884
  if (entry.done) {
884
- request.abortListeners.delete(abortIterable);
885
885
  if (void 0 === entry.value)
886
886
  var endStreamRow = streamTask.id.toString(16) + ":C\n";
887
887
  else
@@ -898,6 +898,8 @@ function serializeAsyncIterable(request, task, iterable, iterator) {
898
898
  }
899
899
  request.completedRegularChunks.push(stringToChunk(endStreamRow));
900
900
  enqueueFlush(request);
901
+ request.abortListeners.delete(abortIterable);
902
+ callOnAllReadyIfReady(request);
901
903
  aborted = !0;
902
904
  } else
903
905
  try {
@@ -1704,10 +1706,11 @@ function emitChunk(request, task, value) {
1704
1706
  emitModelChunk(request, task.id, value));
1705
1707
  }
1706
1708
  function erroredTask(request, task, error) {
1707
- request.abortableTasks.delete(task);
1708
1709
  task.status = 4;
1709
1710
  error = logRecoverableError(request, error, task);
1710
1711
  emitErrorChunk(request, task.id, error);
1712
+ request.abortableTasks.delete(task);
1713
+ callOnAllReadyIfReady(request);
1711
1714
  }
1712
1715
  var emptyRoot = {};
1713
1716
  function retryTask(request, task) {
@@ -1732,8 +1735,9 @@ function retryTask(request, task) {
1732
1735
  var json = stringify(resolvedModel);
1733
1736
  emitModelChunk(request, task.id, json);
1734
1737
  }
1735
- request.abortableTasks.delete(task);
1736
1738
  task.status = 1;
1739
+ request.abortableTasks.delete(task);
1740
+ callOnAllReadyIfReady(request);
1737
1741
  } catch (thrownValue) {
1738
1742
  if (12 === request.status) {
1739
1743
  request.abortableTasks.delete(task);
@@ -1765,7 +1769,6 @@ function performWork(request) {
1765
1769
  ReactSharedInternalsServer.H = HooksDispatcher;
1766
1770
  var prevRequest = currentRequest;
1767
1771
  currentRequest$1 = currentRequest = request;
1768
- var hadAbortableTasks = 0 < request.abortableTasks.size;
1769
1772
  try {
1770
1773
  var pingedTasks = request.pingedTasks;
1771
1774
  request.pingedTasks = [];
@@ -1773,10 +1776,6 @@ function performWork(request) {
1773
1776
  retryTask(request, pingedTasks[i]);
1774
1777
  null !== request.destination &&
1775
1778
  flushCompletedChunks(request, request.destination);
1776
- if (hadAbortableTasks && 0 === request.abortableTasks.size) {
1777
- var onAllReady = request.onAllReady;
1778
- onAllReady();
1779
- }
1780
1779
  } catch (error) {
1781
1780
  logRecoverableError(request, error, null), fatalError(request, error);
1782
1781
  } finally {
@@ -1843,6 +1842,10 @@ function enqueueFlush(request) {
1843
1842
  destination && flushCompletedChunks(request, destination);
1844
1843
  }));
1845
1844
  }
1845
+ function callOnAllReadyIfReady(request) {
1846
+ if (0 === request.abortableTasks.size && 0 === request.abortListeners.size)
1847
+ request.onAllReady();
1848
+ }
1846
1849
  function startFlowing(request, destination) {
1847
1850
  if (13 === request.status)
1848
1851
  (request.status = 14), closeWithError(destination, request.fatalError);
@@ -1882,8 +1885,7 @@ function abort(request, reason) {
1882
1885
  }
1883
1886
  });
1884
1887
  abortableTasks.clear();
1885
- var onAllReady = request.onAllReady;
1886
- onAllReady();
1888
+ callOnAllReadyIfReady(request);
1887
1889
  }
1888
1890
  var abortListeners = request.abortListeners;
1889
1891
  if (0 < abortListeners.size) {
@@ -1899,6 +1901,7 @@ function abort(request, reason) {
1899
1901
  return callback(error$22);
1900
1902
  });
1901
1903
  abortListeners.clear();
1904
+ callOnAllReadyIfReady(request);
1902
1905
  }
1903
1906
  null !== request.destination &&
1904
1907
  flushCompletedChunks(request, request.destination);
@@ -833,10 +833,11 @@
833
833
  function progress(entry) {
834
834
  if (!aborted)
835
835
  if (entry.done)
836
- request.abortListeners.delete(abortStream),
837
- (entry = streamTask.id.toString(16) + ":C\n"),
836
+ (entry = streamTask.id.toString(16) + ":C\n"),
838
837
  request.completedRegularChunks.push(stringToChunk(entry)),
839
838
  enqueueFlush(request),
839
+ request.abortListeners.delete(abortStream),
840
+ callOnAllReadyIfReady(request),
840
841
  (aborted = !0);
841
842
  else
842
843
  try {
@@ -897,7 +898,6 @@
897
898
  function progress(entry) {
898
899
  if (!aborted)
899
900
  if (entry.done) {
900
- request.abortListeners.delete(abortIterable);
901
901
  if (void 0 === entry.value)
902
902
  var endStreamRow = streamTask.id.toString(16) + ":C\n";
903
903
  else
@@ -914,6 +914,8 @@
914
914
  }
915
915
  request.completedRegularChunks.push(stringToChunk(endStreamRow));
916
916
  enqueueFlush(request);
917
+ request.abortListeners.delete(abortIterable);
918
+ callOnAllReadyIfReady(request);
917
919
  aborted = !0;
918
920
  } else
919
921
  try {
@@ -2494,10 +2496,11 @@
2494
2496
  emitModelChunk(request, task.id, value));
2495
2497
  }
2496
2498
  function erroredTask(request, task, error) {
2497
- request.abortableTasks.delete(task);
2498
2499
  task.status = ERRORED$1;
2499
2500
  var digest = logRecoverableError(request, error, task);
2500
2501
  emitErrorChunk(request, task.id, digest, error);
2502
+ request.abortableTasks.delete(task);
2503
+ callOnAllReadyIfReady(request);
2501
2504
  }
2502
2505
  function retryTask(request, task) {
2503
2506
  if (task.status === PENDING$1) {
@@ -2531,8 +2534,9 @@
2531
2534
  var json = stringify(resolvedModel);
2532
2535
  emitModelChunk(request, task.id, json);
2533
2536
  }
2534
- request.abortableTasks.delete(task);
2535
2537
  task.status = COMPLETED;
2538
+ request.abortableTasks.delete(task);
2539
+ callOnAllReadyIfReady(request);
2536
2540
  } catch (thrownValue) {
2537
2541
  if (request.status === ABORTING) {
2538
2542
  request.abortableTasks.delete(task);
@@ -2574,7 +2578,6 @@
2574
2578
  ReactSharedInternalsServer.H = HooksDispatcher;
2575
2579
  var prevRequest = currentRequest;
2576
2580
  currentRequest$1 = currentRequest = request;
2577
- var hadAbortableTasks = 0 < request.abortableTasks.size;
2578
2581
  try {
2579
2582
  var pingedTasks = request.pingedTasks;
2580
2583
  request.pingedTasks = [];
@@ -2582,10 +2585,6 @@
2582
2585
  retryTask(request, pingedTasks[i]);
2583
2586
  null !== request.destination &&
2584
2587
  flushCompletedChunks(request, request.destination);
2585
- if (hadAbortableTasks && 0 === request.abortableTasks.size) {
2586
- var onAllReady = request.onAllReady;
2587
- onAllReady();
2588
- }
2589
2588
  } catch (error) {
2590
2589
  logRecoverableError(request, error, null), fatalError(request, error);
2591
2590
  } finally {
@@ -2681,6 +2680,13 @@
2681
2680
  destination && flushCompletedChunks(request, destination);
2682
2681
  }, 0));
2683
2682
  }
2683
+ function callOnAllReadyIfReady(request) {
2684
+ if (
2685
+ 0 === request.abortableTasks.size &&
2686
+ 0 === request.abortListeners.size
2687
+ )
2688
+ request.onAllReady();
2689
+ }
2684
2690
  function startFlowing(request, destination) {
2685
2691
  if (request.status === CLOSING)
2686
2692
  (request.status = CLOSED),
@@ -2725,8 +2731,7 @@
2725
2731
  }
2726
2732
  });
2727
2733
  abortableTasks.clear();
2728
- var onAllReady = request.onAllReady;
2729
- onAllReady();
2734
+ callOnAllReadyIfReady(request);
2730
2735
  }
2731
2736
  var abortListeners = request.abortListeners;
2732
2737
  if (0 < abortListeners.size) {
@@ -2742,6 +2747,7 @@
2742
2747
  return callback(_error);
2743
2748
  });
2744
2749
  abortListeners.clear();
2750
+ callOnAllReadyIfReady(request);
2745
2751
  }
2746
2752
  null !== request.destination &&
2747
2753
  flushCompletedChunks(request, request.destination);
@@ -4147,9 +4153,6 @@
4147
4153
  var stream = new ReadableStream(
4148
4154
  {
4149
4155
  type: "bytes",
4150
- start: function () {
4151
- startWork(request);
4152
- },
4153
4156
  pull: function (controller) {
4154
4157
  startFlowing(request, controller);
4155
4158
  },
@@ -826,10 +826,11 @@ function serializeReadableStream(request, task, stream) {
826
826
  function progress(entry) {
827
827
  if (!aborted)
828
828
  if (entry.done)
829
- request.abortListeners.delete(abortStream),
830
- (entry = streamTask.id.toString(16) + ":C\n"),
829
+ (entry = streamTask.id.toString(16) + ":C\n"),
831
830
  request.completedRegularChunks.push(stringToChunk(entry)),
832
831
  enqueueFlush(request),
832
+ request.abortListeners.delete(abortStream),
833
+ callOnAllReadyIfReady(request),
833
834
  (aborted = !0);
834
835
  else
835
836
  try {
@@ -886,7 +887,6 @@ function serializeAsyncIterable(request, task, iterable, iterator) {
886
887
  function progress(entry) {
887
888
  if (!aborted)
888
889
  if (entry.done) {
889
- request.abortListeners.delete(abortIterable);
890
890
  if (void 0 === entry.value)
891
891
  var endStreamRow = streamTask.id.toString(16) + ":C\n";
892
892
  else
@@ -903,6 +903,8 @@ function serializeAsyncIterable(request, task, iterable, iterator) {
903
903
  }
904
904
  request.completedRegularChunks.push(stringToChunk(endStreamRow));
905
905
  enqueueFlush(request);
906
+ request.abortListeners.delete(abortIterable);
907
+ callOnAllReadyIfReady(request);
906
908
  aborted = !0;
907
909
  } else
908
910
  try {
@@ -1711,10 +1713,11 @@ function emitChunk(request, task, value) {
1711
1713
  emitModelChunk(request, task.id, value));
1712
1714
  }
1713
1715
  function erroredTask(request, task, error) {
1714
- request.abortableTasks.delete(task);
1715
1716
  task.status = 4;
1716
1717
  error = logRecoverableError(request, error, task);
1717
1718
  emitErrorChunk(request, task.id, error);
1719
+ request.abortableTasks.delete(task);
1720
+ callOnAllReadyIfReady(request);
1718
1721
  }
1719
1722
  var emptyRoot = {};
1720
1723
  function retryTask(request, task) {
@@ -1739,8 +1742,9 @@ function retryTask(request, task) {
1739
1742
  var json = stringify(resolvedModel);
1740
1743
  emitModelChunk(request, task.id, json);
1741
1744
  }
1742
- request.abortableTasks.delete(task);
1743
1745
  task.status = 1;
1746
+ request.abortableTasks.delete(task);
1747
+ callOnAllReadyIfReady(request);
1744
1748
  } catch (thrownValue) {
1745
1749
  if (12 === request.status) {
1746
1750
  request.abortableTasks.delete(task);
@@ -1772,7 +1776,6 @@ function performWork(request) {
1772
1776
  ReactSharedInternalsServer.H = HooksDispatcher;
1773
1777
  var prevRequest = currentRequest;
1774
1778
  currentRequest$1 = currentRequest = request;
1775
- var hadAbortableTasks = 0 < request.abortableTasks.size;
1776
1779
  try {
1777
1780
  var pingedTasks = request.pingedTasks;
1778
1781
  request.pingedTasks = [];
@@ -1780,10 +1783,6 @@ function performWork(request) {
1780
1783
  retryTask(request, pingedTasks[i]);
1781
1784
  null !== request.destination &&
1782
1785
  flushCompletedChunks(request, request.destination);
1783
- if (hadAbortableTasks && 0 === request.abortableTasks.size) {
1784
- var onAllReady = request.onAllReady;
1785
- onAllReady();
1786
- }
1787
1786
  } catch (error) {
1788
1787
  logRecoverableError(request, error, null), fatalError(request, error);
1789
1788
  } finally {
@@ -1854,6 +1853,10 @@ function enqueueFlush(request) {
1854
1853
  destination && flushCompletedChunks(request, destination);
1855
1854
  }, 0));
1856
1855
  }
1856
+ function callOnAllReadyIfReady(request) {
1857
+ if (0 === request.abortableTasks.size && 0 === request.abortListeners.size)
1858
+ request.onAllReady();
1859
+ }
1857
1860
  function startFlowing(request, destination) {
1858
1861
  if (13 === request.status)
1859
1862
  (request.status = 14), closeWithError(destination, request.fatalError);
@@ -1893,8 +1896,7 @@ function abort(request, reason) {
1893
1896
  }
1894
1897
  });
1895
1898
  abortableTasks.clear();
1896
- var onAllReady = request.onAllReady;
1897
- onAllReady();
1899
+ callOnAllReadyIfReady(request);
1898
1900
  }
1899
1901
  var abortListeners = request.abortListeners;
1900
1902
  if (0 < abortListeners.size) {
@@ -1910,6 +1912,7 @@ function abort(request, reason) {
1910
1912
  return callback(error$22);
1911
1913
  });
1912
1914
  abortListeners.clear();
1915
+ callOnAllReadyIfReady(request);
1913
1916
  }
1914
1917
  null !== request.destination &&
1915
1918
  flushCompletedChunks(request, request.destination);
@@ -2812,9 +2815,6 @@ exports.unstable_prerender = function (model, webpackMap, options) {
2812
2815
  var stream = new ReadableStream(
2813
2816
  {
2814
2817
  type: "bytes",
2815
- start: function () {
2816
- startWork(request);
2817
- },
2818
2818
  pull: function (controller) {
2819
2819
  startFlowing(request, controller);
2820
2820
  },
@@ -854,10 +854,11 @@
854
854
  function progress(entry) {
855
855
  if (!aborted)
856
856
  if (entry.done)
857
- request.abortListeners.delete(abortStream),
858
- (entry = streamTask.id.toString(16) + ":C\n"),
857
+ (entry = streamTask.id.toString(16) + ":C\n"),
859
858
  request.completedRegularChunks.push(entry),
860
859
  enqueueFlush(request),
860
+ request.abortListeners.delete(abortStream),
861
+ callOnAllReadyIfReady(request),
861
862
  (aborted = !0);
862
863
  else
863
864
  try {
@@ -918,7 +919,6 @@
918
919
  function progress(entry) {
919
920
  if (!aborted)
920
921
  if (entry.done) {
921
- request.abortListeners.delete(abortIterable);
922
922
  if (void 0 === entry.value)
923
923
  var endStreamRow = streamTask.id.toString(16) + ":C\n";
924
924
  else
@@ -935,6 +935,8 @@
935
935
  }
936
936
  request.completedRegularChunks.push(endStreamRow);
937
937
  enqueueFlush(request);
938
+ request.abortListeners.delete(abortIterable);
939
+ callOnAllReadyIfReady(request);
938
940
  aborted = !0;
939
941
  } else
940
942
  try {
@@ -2476,10 +2478,11 @@
2476
2478
  emitModelChunk(request, task.id, value));
2477
2479
  }
2478
2480
  function erroredTask(request, task, error) {
2479
- request.abortableTasks.delete(task);
2480
2481
  task.status = ERRORED$1;
2481
2482
  var digest = logRecoverableError(request, error, task);
2482
2483
  emitErrorChunk(request, task.id, digest, error);
2484
+ request.abortableTasks.delete(task);
2485
+ callOnAllReadyIfReady(request);
2483
2486
  }
2484
2487
  function retryTask(request, task) {
2485
2488
  if (task.status === PENDING$1) {
@@ -2513,8 +2516,9 @@
2513
2516
  var json = stringify(resolvedModel);
2514
2517
  emitModelChunk(request, task.id, json);
2515
2518
  }
2516
- request.abortableTasks.delete(task);
2517
2519
  task.status = COMPLETED;
2520
+ request.abortableTasks.delete(task);
2521
+ callOnAllReadyIfReady(request);
2518
2522
  } catch (thrownValue) {
2519
2523
  if (request.status === ABORTING) {
2520
2524
  request.abortableTasks.delete(task);
@@ -2556,7 +2560,6 @@
2556
2560
  ReactSharedInternalsServer.H = HooksDispatcher;
2557
2561
  var prevRequest = currentRequest;
2558
2562
  currentRequest$1 = currentRequest = request;
2559
- var hadAbortableTasks = 0 < request.abortableTasks.size;
2560
2563
  try {
2561
2564
  var pingedTasks = request.pingedTasks;
2562
2565
  request.pingedTasks = [];
@@ -2564,10 +2567,6 @@
2564
2567
  retryTask(request, pingedTasks[i]);
2565
2568
  null !== request.destination &&
2566
2569
  flushCompletedChunks(request, request.destination);
2567
- if (hadAbortableTasks && 0 === request.abortableTasks.size) {
2568
- var onAllReady = request.onAllReady;
2569
- onAllReady();
2570
- }
2571
2570
  } catch (error) {
2572
2571
  logRecoverableError(request, error, null), fatalError(request, error);
2573
2572
  } finally {
@@ -2660,6 +2659,13 @@
2660
2659
  destination && flushCompletedChunks(request, destination);
2661
2660
  }));
2662
2661
  }
2662
+ function callOnAllReadyIfReady(request) {
2663
+ if (
2664
+ 0 === request.abortableTasks.size &&
2665
+ 0 === request.abortListeners.size
2666
+ )
2667
+ request.onAllReady();
2668
+ }
2663
2669
  function startFlowing(request, destination) {
2664
2670
  if (request.status === CLOSING)
2665
2671
  (request.status = CLOSED), destination.destroy(request.fatalError);
@@ -2703,8 +2709,7 @@
2703
2709
  }
2704
2710
  });
2705
2711
  abortableTasks.clear();
2706
- var onAllReady = request.onAllReady;
2707
- onAllReady();
2712
+ callOnAllReadyIfReady(request);
2708
2713
  }
2709
2714
  var abortListeners = request.abortListeners;
2710
2715
  if (0 < abortListeners.size) {
@@ -2720,6 +2725,7 @@
2720
2725
  return callback(_error);
2721
2726
  });
2722
2727
  abortListeners.clear();
2728
+ callOnAllReadyIfReady(request);
2723
2729
  }
2724
2730
  null !== request.destination &&
2725
2731
  flushCompletedChunks(request, request.destination);
@@ -843,10 +843,11 @@ function serializeReadableStream(request, task, stream) {
843
843
  function progress(entry) {
844
844
  if (!aborted)
845
845
  if (entry.done)
846
- request.abortListeners.delete(abortStream),
847
- (entry = streamTask.id.toString(16) + ":C\n"),
846
+ (entry = streamTask.id.toString(16) + ":C\n"),
848
847
  request.completedRegularChunks.push(entry),
849
848
  enqueueFlush(request),
849
+ request.abortListeners.delete(abortStream),
850
+ callOnAllReadyIfReady(request),
850
851
  (aborted = !0);
851
852
  else
852
853
  try {
@@ -903,7 +904,6 @@ function serializeAsyncIterable(request, task, iterable, iterator) {
903
904
  function progress(entry) {
904
905
  if (!aborted)
905
906
  if (entry.done) {
906
- request.abortListeners.delete(abortIterable);
907
907
  if (void 0 === entry.value)
908
908
  var endStreamRow = streamTask.id.toString(16) + ":C\n";
909
909
  else
@@ -920,6 +920,8 @@ function serializeAsyncIterable(request, task, iterable, iterator) {
920
920
  }
921
921
  request.completedRegularChunks.push(endStreamRow);
922
922
  enqueueFlush(request);
923
+ request.abortListeners.delete(abortIterable);
924
+ callOnAllReadyIfReady(request);
923
925
  aborted = !0;
924
926
  } else
925
927
  try {
@@ -1716,10 +1718,11 @@ function emitChunk(request, task, value) {
1716
1718
  emitModelChunk(request, task.id, value));
1717
1719
  }
1718
1720
  function erroredTask(request, task, error) {
1719
- request.abortableTasks.delete(task);
1720
1721
  task.status = 4;
1721
1722
  error = logRecoverableError(request, error, task);
1722
1723
  emitErrorChunk(request, task.id, error);
1724
+ request.abortableTasks.delete(task);
1725
+ callOnAllReadyIfReady(request);
1723
1726
  }
1724
1727
  var emptyRoot = {};
1725
1728
  function retryTask(request, task) {
@@ -1744,8 +1747,9 @@ function retryTask(request, task) {
1744
1747
  var json = stringify(resolvedModel);
1745
1748
  emitModelChunk(request, task.id, json);
1746
1749
  }
1747
- request.abortableTasks.delete(task);
1748
1750
  task.status = 1;
1751
+ request.abortableTasks.delete(task);
1752
+ callOnAllReadyIfReady(request);
1749
1753
  } catch (thrownValue) {
1750
1754
  if (12 === request.status) {
1751
1755
  request.abortableTasks.delete(task);
@@ -1777,7 +1781,6 @@ function performWork(request) {
1777
1781
  ReactSharedInternalsServer.H = HooksDispatcher;
1778
1782
  var prevRequest = currentRequest;
1779
1783
  currentRequest$1 = currentRequest = request;
1780
- var hadAbortableTasks = 0 < request.abortableTasks.size;
1781
1784
  try {
1782
1785
  var pingedTasks = request.pingedTasks;
1783
1786
  request.pingedTasks = [];
@@ -1785,10 +1788,6 @@ function performWork(request) {
1785
1788
  retryTask(request, pingedTasks[i]);
1786
1789
  null !== request.destination &&
1787
1790
  flushCompletedChunks(request, request.destination);
1788
- if (hadAbortableTasks && 0 === request.abortableTasks.size) {
1789
- var onAllReady = request.onAllReady;
1790
- onAllReady();
1791
- }
1792
1791
  } catch (error) {
1793
1792
  logRecoverableError(request, error, null), fatalError(request, error);
1794
1793
  } finally {
@@ -1879,6 +1878,10 @@ function enqueueFlush(request) {
1879
1878
  destination && flushCompletedChunks(request, destination);
1880
1879
  }));
1881
1880
  }
1881
+ function callOnAllReadyIfReady(request) {
1882
+ if (0 === request.abortableTasks.size && 0 === request.abortListeners.size)
1883
+ request.onAllReady();
1884
+ }
1882
1885
  function startFlowing(request, destination) {
1883
1886
  if (13 === request.status)
1884
1887
  (request.status = 14), destination.destroy(request.fatalError);
@@ -1918,8 +1921,7 @@ function abort(request, reason) {
1918
1921
  }
1919
1922
  });
1920
1923
  abortableTasks.clear();
1921
- var onAllReady = request.onAllReady;
1922
- onAllReady();
1924
+ callOnAllReadyIfReady(request);
1923
1925
  }
1924
1926
  var abortListeners = request.abortListeners;
1925
1927
  if (0 < abortListeners.size) {
@@ -1935,6 +1937,7 @@ function abort(request, reason) {
1935
1937
  return callback(error$22);
1936
1938
  });
1937
1939
  abortListeners.clear();
1940
+ callOnAllReadyIfReady(request);
1938
1941
  }
1939
1942
  null !== request.destination &&
1940
1943
  flushCompletedChunks(request, request.destination);
@@ -854,10 +854,11 @@
854
854
  function progress(entry) {
855
855
  if (!aborted)
856
856
  if (entry.done)
857
- request.abortListeners.delete(abortStream),
858
- (entry = streamTask.id.toString(16) + ":C\n"),
857
+ (entry = streamTask.id.toString(16) + ":C\n"),
859
858
  request.completedRegularChunks.push(entry),
860
859
  enqueueFlush(request),
860
+ request.abortListeners.delete(abortStream),
861
+ callOnAllReadyIfReady(request),
861
862
  (aborted = !0);
862
863
  else
863
864
  try {
@@ -918,7 +919,6 @@
918
919
  function progress(entry) {
919
920
  if (!aborted)
920
921
  if (entry.done) {
921
- request.abortListeners.delete(abortIterable);
922
922
  if (void 0 === entry.value)
923
923
  var endStreamRow = streamTask.id.toString(16) + ":C\n";
924
924
  else
@@ -935,6 +935,8 @@
935
935
  }
936
936
  request.completedRegularChunks.push(endStreamRow);
937
937
  enqueueFlush(request);
938
+ request.abortListeners.delete(abortIterable);
939
+ callOnAllReadyIfReady(request);
938
940
  aborted = !0;
939
941
  } else
940
942
  try {
@@ -2476,10 +2478,11 @@
2476
2478
  emitModelChunk(request, task.id, value));
2477
2479
  }
2478
2480
  function erroredTask(request, task, error) {
2479
- request.abortableTasks.delete(task);
2480
2481
  task.status = ERRORED$1;
2481
2482
  var digest = logRecoverableError(request, error, task);
2482
2483
  emitErrorChunk(request, task.id, digest, error);
2484
+ request.abortableTasks.delete(task);
2485
+ callOnAllReadyIfReady(request);
2483
2486
  }
2484
2487
  function retryTask(request, task) {
2485
2488
  if (task.status === PENDING$1) {
@@ -2513,8 +2516,9 @@
2513
2516
  var json = stringify(resolvedModel);
2514
2517
  emitModelChunk(request, task.id, json);
2515
2518
  }
2516
- request.abortableTasks.delete(task);
2517
2519
  task.status = COMPLETED;
2520
+ request.abortableTasks.delete(task);
2521
+ callOnAllReadyIfReady(request);
2518
2522
  } catch (thrownValue) {
2519
2523
  if (request.status === ABORTING) {
2520
2524
  request.abortableTasks.delete(task);
@@ -2556,7 +2560,6 @@
2556
2560
  ReactSharedInternalsServer.H = HooksDispatcher;
2557
2561
  var prevRequest = currentRequest;
2558
2562
  currentRequest$1 = currentRequest = request;
2559
- var hadAbortableTasks = 0 < request.abortableTasks.size;
2560
2563
  try {
2561
2564
  var pingedTasks = request.pingedTasks;
2562
2565
  request.pingedTasks = [];
@@ -2564,10 +2567,6 @@
2564
2567
  retryTask(request, pingedTasks[i]);
2565
2568
  null !== request.destination &&
2566
2569
  flushCompletedChunks(request, request.destination);
2567
- if (hadAbortableTasks && 0 === request.abortableTasks.size) {
2568
- var onAllReady = request.onAllReady;
2569
- onAllReady();
2570
- }
2571
2570
  } catch (error) {
2572
2571
  logRecoverableError(request, error, null), fatalError(request, error);
2573
2572
  } finally {
@@ -2660,6 +2659,13 @@
2660
2659
  destination && flushCompletedChunks(request, destination);
2661
2660
  }));
2662
2661
  }
2662
+ function callOnAllReadyIfReady(request) {
2663
+ if (
2664
+ 0 === request.abortableTasks.size &&
2665
+ 0 === request.abortListeners.size
2666
+ )
2667
+ request.onAllReady();
2668
+ }
2663
2669
  function startFlowing(request, destination) {
2664
2670
  if (request.status === CLOSING)
2665
2671
  (request.status = CLOSED), destination.destroy(request.fatalError);
@@ -2703,8 +2709,7 @@
2703
2709
  }
2704
2710
  });
2705
2711
  abortableTasks.clear();
2706
- var onAllReady = request.onAllReady;
2707
- onAllReady();
2712
+ callOnAllReadyIfReady(request);
2708
2713
  }
2709
2714
  var abortListeners = request.abortListeners;
2710
2715
  if (0 < abortListeners.size) {
@@ -2720,6 +2725,7 @@
2720
2725
  return callback(_error);
2721
2726
  });
2722
2727
  abortListeners.clear();
2728
+ callOnAllReadyIfReady(request);
2723
2729
  }
2724
2730
  null !== request.destination &&
2725
2731
  flushCompletedChunks(request, request.destination);
@@ -843,10 +843,11 @@ function serializeReadableStream(request, task, stream) {
843
843
  function progress(entry) {
844
844
  if (!aborted)
845
845
  if (entry.done)
846
- request.abortListeners.delete(abortStream),
847
- (entry = streamTask.id.toString(16) + ":C\n"),
846
+ (entry = streamTask.id.toString(16) + ":C\n"),
848
847
  request.completedRegularChunks.push(entry),
849
848
  enqueueFlush(request),
849
+ request.abortListeners.delete(abortStream),
850
+ callOnAllReadyIfReady(request),
850
851
  (aborted = !0);
851
852
  else
852
853
  try {
@@ -903,7 +904,6 @@ function serializeAsyncIterable(request, task, iterable, iterator) {
903
904
  function progress(entry) {
904
905
  if (!aborted)
905
906
  if (entry.done) {
906
- request.abortListeners.delete(abortIterable);
907
907
  if (void 0 === entry.value)
908
908
  var endStreamRow = streamTask.id.toString(16) + ":C\n";
909
909
  else
@@ -920,6 +920,8 @@ function serializeAsyncIterable(request, task, iterable, iterator) {
920
920
  }
921
921
  request.completedRegularChunks.push(endStreamRow);
922
922
  enqueueFlush(request);
923
+ request.abortListeners.delete(abortIterable);
924
+ callOnAllReadyIfReady(request);
923
925
  aborted = !0;
924
926
  } else
925
927
  try {
@@ -1716,10 +1718,11 @@ function emitChunk(request, task, value) {
1716
1718
  emitModelChunk(request, task.id, value));
1717
1719
  }
1718
1720
  function erroredTask(request, task, error) {
1719
- request.abortableTasks.delete(task);
1720
1721
  task.status = 4;
1721
1722
  error = logRecoverableError(request, error, task);
1722
1723
  emitErrorChunk(request, task.id, error);
1724
+ request.abortableTasks.delete(task);
1725
+ callOnAllReadyIfReady(request);
1723
1726
  }
1724
1727
  var emptyRoot = {};
1725
1728
  function retryTask(request, task) {
@@ -1744,8 +1747,9 @@ function retryTask(request, task) {
1744
1747
  var json = stringify(resolvedModel);
1745
1748
  emitModelChunk(request, task.id, json);
1746
1749
  }
1747
- request.abortableTasks.delete(task);
1748
1750
  task.status = 1;
1751
+ request.abortableTasks.delete(task);
1752
+ callOnAllReadyIfReady(request);
1749
1753
  } catch (thrownValue) {
1750
1754
  if (12 === request.status) {
1751
1755
  request.abortableTasks.delete(task);
@@ -1777,7 +1781,6 @@ function performWork(request) {
1777
1781
  ReactSharedInternalsServer.H = HooksDispatcher;
1778
1782
  var prevRequest = currentRequest;
1779
1783
  currentRequest$1 = currentRequest = request;
1780
- var hadAbortableTasks = 0 < request.abortableTasks.size;
1781
1784
  try {
1782
1785
  var pingedTasks = request.pingedTasks;
1783
1786
  request.pingedTasks = [];
@@ -1785,10 +1788,6 @@ function performWork(request) {
1785
1788
  retryTask(request, pingedTasks[i]);
1786
1789
  null !== request.destination &&
1787
1790
  flushCompletedChunks(request, request.destination);
1788
- if (hadAbortableTasks && 0 === request.abortableTasks.size) {
1789
- var onAllReady = request.onAllReady;
1790
- onAllReady();
1791
- }
1792
1791
  } catch (error) {
1793
1792
  logRecoverableError(request, error, null), fatalError(request, error);
1794
1793
  } finally {
@@ -1879,6 +1878,10 @@ function enqueueFlush(request) {
1879
1878
  destination && flushCompletedChunks(request, destination);
1880
1879
  }));
1881
1880
  }
1881
+ function callOnAllReadyIfReady(request) {
1882
+ if (0 === request.abortableTasks.size && 0 === request.abortListeners.size)
1883
+ request.onAllReady();
1884
+ }
1882
1885
  function startFlowing(request, destination) {
1883
1886
  if (13 === request.status)
1884
1887
  (request.status = 14), destination.destroy(request.fatalError);
@@ -1918,8 +1921,7 @@ function abort(request, reason) {
1918
1921
  }
1919
1922
  });
1920
1923
  abortableTasks.clear();
1921
- var onAllReady = request.onAllReady;
1922
- onAllReady();
1924
+ callOnAllReadyIfReady(request);
1923
1925
  }
1924
1926
  var abortListeners = request.abortListeners;
1925
1927
  if (0 < abortListeners.size) {
@@ -1935,6 +1937,7 @@ function abort(request, reason) {
1935
1937
  return callback(error$22);
1936
1938
  });
1937
1939
  abortListeners.clear();
1940
+ callOnAllReadyIfReady(request);
1938
1941
  }
1939
1942
  null !== request.destination &&
1940
1943
  flushCompletedChunks(request, request.destination);
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "react-server-dom-webpack",
3
3
  "description": "React Server Components bindings for DOM using Webpack. This is intended to be integrated into meta-frameworks. It is not intended to be imported directly.",
4
- "version": "19.2.0-canary-72135096-20250421",
4
+ "version": "19.2.0-canary-197d6a04-20250424",
5
5
  "keywords": [
6
6
  "react"
7
7
  ],
@@ -99,8 +99,8 @@
99
99
  "node": ">=0.10.0"
100
100
  },
101
101
  "peerDependencies": {
102
- "react": "19.2.0-canary-72135096-20250421",
103
- "react-dom": "19.2.0-canary-72135096-20250421",
102
+ "react": "19.2.0-canary-197d6a04-20250424",
103
+ "react-dom": "19.2.0-canary-197d6a04-20250424",
104
104
  "webpack": "^5.59.0"
105
105
  },
106
106
  "dependencies": {