@remotion/media 4.0.375 → 4.0.376

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -91,8 +91,6 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
91
91
  }
92
92
  queuedAudioNodes.length = 0;
93
93
  };
94
- let lastReturnedBuffer = null;
95
- let iteratorEnded = false;
96
94
  const getNextOrNullIfNotAvailable = async (allowWait) => {
97
95
  const next = iterator.next();
98
96
  const result = allowWait ? await allowWaitRoutine(next, allowWait) : await Promise.race([
@@ -106,50 +104,16 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
106
104
  type: "need-to-wait-for-it",
107
105
  waitPromise: async () => {
108
106
  const res = await next;
109
- if (res.value) {
110
- lastReturnedBuffer = res.value;
111
- } else {
112
- iteratorEnded = true;
113
- }
114
107
  return res.value;
115
108
  }
116
109
  };
117
110
  }
118
- if (result.value) {
119
- lastReturnedBuffer = result.value;
120
- } else {
121
- iteratorEnded = true;
122
- }
123
111
  return {
124
112
  type: "got-buffer-or-end",
125
113
  buffer: result.value ?? null
126
114
  };
127
115
  };
128
116
  const tryToSatisfySeek = async (time, allowWait, onBufferScheduled) => {
129
- if (lastReturnedBuffer) {
130
- const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
131
- const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
132
- if (roundTo4Digits(time) < bufferTimestamp) {
133
- return {
134
- type: "not-satisfied",
135
- reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`
136
- };
137
- }
138
- if (roundTo4Digits(time) <= bufferEndTimestamp) {
139
- onBufferScheduled(lastReturnedBuffer);
140
- return {
141
- type: "satisfied"
142
- };
143
- }
144
- }
145
- if (iteratorEnded) {
146
- if (lastReturnedBuffer) {
147
- onBufferScheduled(lastReturnedBuffer);
148
- }
149
- return {
150
- type: "satisfied"
151
- };
152
- }
153
117
  while (true) {
154
118
  const buffer = await getNextOrNullIfNotAvailable(allowWait);
155
119
  if (buffer.type === "need-to-wait-for-it") {
@@ -160,17 +124,19 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
160
124
  }
161
125
  if (buffer.type === "got-buffer-or-end") {
162
126
  if (buffer.buffer === null) {
163
- iteratorEnded = true;
164
- if (lastReturnedBuffer) {
165
- onBufferScheduled(lastReturnedBuffer);
166
- }
167
127
  return {
168
- type: "satisfied"
128
+ type: "ended"
169
129
  };
170
130
  }
171
131
  const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
172
132
  const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
173
133
  const timestamp = roundTo4Digits(time);
134
+ if (roundTo4Digits(time) < bufferTimestamp) {
135
+ return {
136
+ type: "not-satisfied",
137
+ reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`
138
+ };
139
+ }
174
140
  if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
175
141
  onBufferScheduled(buffer.buffer);
176
142
  return {
@@ -214,11 +180,6 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
214
180
  },
215
181
  getNext: async () => {
216
182
  const next = await iterator.next();
217
- if (next.value) {
218
- lastReturnedBuffer = next.value;
219
- } else {
220
- iteratorEnded = true;
221
- }
222
183
  return next;
223
184
  },
224
185
  isDestroyed: () => {
@@ -408,6 +369,9 @@ var audioIteratorManager = ({
408
369
  if (nonce.isStale()) {
409
370
  return;
410
371
  }
372
+ if (audioSatisfyResult.type === "ended") {
373
+ return;
374
+ }
411
375
  if (audioSatisfyResult.type === "not-satisfied") {
412
376
  await startAudioIterator({
413
377
  nonce,
@@ -443,6 +407,9 @@ var audioIteratorManager = ({
443
407
  if (nonce.isStale()) {
444
408
  return;
445
409
  }
410
+ if (audioSatisfyResult.type === "ended") {
411
+ return;
412
+ }
446
413
  if (audioSatisfyResult.type === "not-satisfied") {
447
414
  await startAudioIterator({
448
415
  nonce,
@@ -1696,130 +1663,8 @@ import {
1696
1663
  useRemotionEnvironment
1697
1664
  } from "remotion";
1698
1665
 
1699
- // src/convert-audiodata/apply-volume.ts
1700
- var applyVolume = (array, volume) => {
1701
- if (volume === 1) {
1702
- return;
1703
- }
1704
- for (let i = 0;i < array.length; i++) {
1705
- const newValue = array[i] * volume;
1706
- if (newValue < -32768) {
1707
- array[i] = -32768;
1708
- } else if (newValue > 32767) {
1709
- array[i] = 32767;
1710
- } else {
1711
- array[i] = newValue;
1712
- }
1713
- }
1714
- };
1715
-
1716
- // src/convert-audiodata/resample-audiodata.ts
1717
- var TARGET_NUMBER_OF_CHANNELS = 2;
1718
- var TARGET_SAMPLE_RATE = 48000;
1719
- var fixFloatingPoint = (value) => {
1720
- if (value % 1 < 0.0000001) {
1721
- return Math.floor(value);
1722
- }
1723
- if (value % 1 > 0.9999999) {
1724
- return Math.ceil(value);
1725
- }
1726
- return value;
1727
- };
1728
- var resampleAudioData = ({
1729
- srcNumberOfChannels,
1730
- sourceChannels,
1731
- destination,
1732
- targetFrames,
1733
- chunkSize
1734
- }) => {
1735
- const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {
1736
- const start = fixFloatingPoint(startUnfixed);
1737
- const end = fixFloatingPoint(endUnfixed);
1738
- const startFloor = Math.floor(start);
1739
- const startCeil = Math.ceil(start);
1740
- const startFraction = start - startFloor;
1741
- const endFraction = end - Math.floor(end);
1742
- const endFloor = Math.floor(end);
1743
- let weightedSum = 0;
1744
- let totalWeight = 0;
1745
- if (startFraction > 0) {
1746
- const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];
1747
- weightedSum += firstSample * (1 - startFraction);
1748
- totalWeight += 1 - startFraction;
1749
- }
1750
- for (let k = startCeil;k < endFloor; k++) {
1751
- const num = sourceChannels[k * srcNumberOfChannels + channelIndex];
1752
- weightedSum += num;
1753
- totalWeight += 1;
1754
- }
1755
- if (endFraction > 0) {
1756
- const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];
1757
- weightedSum += lastSample * endFraction;
1758
- totalWeight += endFraction;
1759
- }
1760
- const average = weightedSum / totalWeight;
1761
- return average;
1762
- };
1763
- for (let newFrameIndex = 0;newFrameIndex < targetFrames; newFrameIndex++) {
1764
- const start = newFrameIndex * chunkSize;
1765
- const end = start + chunkSize;
1766
- if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {
1767
- for (let i = 0;i < srcNumberOfChannels; i++) {
1768
- destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);
1769
- }
1770
- }
1771
- if (srcNumberOfChannels === 1) {
1772
- const m = getSourceValues(start, end, 0);
1773
- destination[newFrameIndex * 2 + 0] = m;
1774
- destination[newFrameIndex * 2 + 1] = m;
1775
- } else if (srcNumberOfChannels === 4) {
1776
- const l = getSourceValues(start, end, 0);
1777
- const r = getSourceValues(start, end, 1);
1778
- const sl = getSourceValues(start, end, 2);
1779
- const sr = getSourceValues(start, end, 3);
1780
- const l2 = 0.5 * (l + sl);
1781
- const r2 = 0.5 * (r + sr);
1782
- destination[newFrameIndex * 2 + 0] = l2;
1783
- destination[newFrameIndex * 2 + 1] = r2;
1784
- } else if (srcNumberOfChannels === 6) {
1785
- const l = getSourceValues(start, end, 0);
1786
- const r = getSourceValues(start, end, 1);
1787
- const c = getSourceValues(start, end, 2);
1788
- const sl = getSourceValues(start, end, 3);
1789
- const sr = getSourceValues(start, end, 4);
1790
- const sq = Math.sqrt(1 / 2);
1791
- const l2 = l + sq * (c + sl);
1792
- const r2 = r + sq * (c + sr);
1793
- destination[newFrameIndex * 2 + 0] = l2;
1794
- destination[newFrameIndex * 2 + 1] = r2;
1795
- } else {
1796
- for (let i = 0;i < srcNumberOfChannels; i++) {
1797
- destination[newFrameIndex * TARGET_NUMBER_OF_CHANNELS + i] = getSourceValues(start, end, i);
1798
- }
1799
- }
1800
- }
1801
- };
1802
-
1803
- // src/looped-frame.ts
1804
- var frameForVolumeProp = ({
1805
- behavior,
1806
- loop,
1807
- assetDurationInSeconds,
1808
- fps,
1809
- frame,
1810
- startsAt
1811
- }) => {
1812
- if (!loop) {
1813
- return frame + startsAt;
1814
- }
1815
- if (behavior === "extend") {
1816
- return frame + startsAt;
1817
- }
1818
- const assetDurationInFrames = Math.floor(assetDurationInSeconds * fps) - startsAt;
1819
- return frame % assetDurationInFrames + startsAt;
1820
- };
1821
-
1822
1666
  // src/caches.ts
1667
+ import React2 from "react";
1823
1668
  import { cancelRender, Internals as Internals11 } from "remotion";
1824
1669
 
1825
1670
  // src/audio-extraction/audio-manager.ts
@@ -2080,9 +1925,9 @@ var makeAudioManager = () => {
2080
1925
  audioSampleSink,
2081
1926
  isMatroska,
2082
1927
  actualMatroskaTimestamps,
2083
- logLevel
1928
+ logLevel,
1929
+ maxCacheSize
2084
1930
  }) => {
2085
- const maxCacheSize = getMaxVideoCacheSize(logLevel);
2086
1931
  while ((await getTotalCacheStats()).totalSize > maxCacheSize) {
2087
1932
  deleteOldestIterator();
2088
1933
  }
@@ -2131,7 +1976,8 @@ var makeAudioManager = () => {
2131
1976
  audioSampleSink,
2132
1977
  isMatroska,
2133
1978
  actualMatroskaTimestamps,
2134
- logLevel
1979
+ logLevel,
1980
+ maxCacheSize
2135
1981
  }) => {
2136
1982
  queue = queue.then(() => getIterator({
2137
1983
  src,
@@ -2139,7 +1985,8 @@ var makeAudioManager = () => {
2139
1985
  audioSampleSink,
2140
1986
  isMatroska,
2141
1987
  actualMatroskaTimestamps,
2142
- logLevel
1988
+ logLevel,
1989
+ maxCacheSize
2143
1990
  }));
2144
1991
  return queue;
2145
1992
  },
@@ -2265,15 +2112,9 @@ var makeKeyframeBank = ({
2265
2112
  };
2266
2113
  const getFrameFromTimestamp = async (timestampInSeconds) => {
2267
2114
  lastUsed = Date.now();
2268
- const maxClampToleranceInSeconds = 0.1;
2269
2115
  let adjustedTimestamp = timestampInSeconds;
2270
2116
  if (roundTo4Digits(timestampInSeconds) < roundTo4Digits(startTimestampInSeconds)) {
2271
- const differenceInSeconds = startTimestampInSeconds - timestampInSeconds;
2272
- if (differenceInSeconds <= maxClampToleranceInSeconds) {
2273
- adjustedTimestamp = startTimestampInSeconds;
2274
- } else {
2275
- return Promise.reject(new Error(`Timestamp is before start timestamp (requested: ${timestampInSeconds}sec, start: ${startTimestampInSeconds}sec, difference: ${differenceInSeconds.toFixed(3)}sec exceeds tolerance of ${maxClampToleranceInSeconds}sec)`));
2276
- }
2117
+ adjustedTimestamp = startTimestampInSeconds;
2277
2118
  }
2278
2119
  if (roundTo4Digits(adjustedTimestamp) > roundTo4Digits(endTimestampInSeconds)) {
2279
2120
  return Promise.reject(new Error(`Timestamp is after end timestamp (requested: ${timestampInSeconds}sec, end: ${endTimestampInSeconds}sec)`));
@@ -2543,9 +2384,8 @@ var makeKeyframeManager = () => {
2543
2384
  }
2544
2385
  return { finish: false };
2545
2386
  };
2546
- const ensureToStayUnderMaxCacheSize = async (logLevel) => {
2387
+ const ensureToStayUnderMaxCacheSize = async (logLevel, maxCacheSize) => {
2547
2388
  let cacheStats = await getTotalCacheStats();
2548
- const maxCacheSize = getMaxVideoCacheSize(logLevel);
2549
2389
  while (cacheStats.totalSize > maxCacheSize) {
2550
2390
  const { finish } = await deleteOldestKeyframeBank(logLevel);
2551
2391
  if (finish) {
@@ -2632,9 +2472,10 @@ var makeKeyframeManager = () => {
2632
2472
  timestamp,
2633
2473
  videoSampleSink,
2634
2474
  src,
2635
- logLevel
2475
+ logLevel,
2476
+ maxCacheSize
2636
2477
  }) => {
2637
- await ensureToStayUnderMaxCacheSize(logLevel);
2478
+ await ensureToStayUnderMaxCacheSize(logLevel, maxCacheSize);
2638
2479
  await clearKeyframeBanksBeforeTime({
2639
2480
  timestampInSeconds: timestamp,
2640
2481
  src,
@@ -2667,14 +2508,16 @@ var makeKeyframeManager = () => {
2667
2508
  timestamp,
2668
2509
  videoSampleSink,
2669
2510
  src,
2670
- logLevel
2511
+ logLevel,
2512
+ maxCacheSize
2671
2513
  }) => {
2672
2514
  queue = queue.then(() => requestKeyframeBank({
2673
2515
  packetSink,
2674
2516
  timestamp,
2675
2517
  videoSampleSink,
2676
2518
  src,
2677
- logLevel
2519
+ logLevel,
2520
+ maxCacheSize
2678
2521
  }));
2679
2522
  return queue;
2680
2523
  },
@@ -2729,6 +2572,136 @@ var getMaxVideoCacheSize = (logLevel) => {
2729
2572
  cachedMaxCacheSize = getUncachedMaxCacheSize(logLevel);
2730
2573
  return cachedMaxCacheSize;
2731
2574
  };
2575
+ var useMaxMediaCacheSize = (logLevel) => {
2576
+ const context = React2.useContext(Internals11.MaxMediaCacheSizeContext);
2577
+ if (context === null) {
2578
+ return getMaxVideoCacheSize(logLevel);
2579
+ }
2580
+ return context;
2581
+ };
2582
+
2583
+ // src/convert-audiodata/apply-volume.ts
2584
+ var applyVolume = (array, volume) => {
2585
+ if (volume === 1) {
2586
+ return;
2587
+ }
2588
+ for (let i = 0;i < array.length; i++) {
2589
+ const newValue = array[i] * volume;
2590
+ if (newValue < -32768) {
2591
+ array[i] = -32768;
2592
+ } else if (newValue > 32767) {
2593
+ array[i] = 32767;
2594
+ } else {
2595
+ array[i] = newValue;
2596
+ }
2597
+ }
2598
+ };
2599
+
2600
+ // src/convert-audiodata/resample-audiodata.ts
2601
+ var TARGET_NUMBER_OF_CHANNELS = 2;
2602
+ var TARGET_SAMPLE_RATE = 48000;
2603
+ var fixFloatingPoint = (value) => {
2604
+ if (value % 1 < 0.0000001) {
2605
+ return Math.floor(value);
2606
+ }
2607
+ if (value % 1 > 0.9999999) {
2608
+ return Math.ceil(value);
2609
+ }
2610
+ return value;
2611
+ };
2612
+ var resampleAudioData = ({
2613
+ srcNumberOfChannels,
2614
+ sourceChannels,
2615
+ destination,
2616
+ targetFrames,
2617
+ chunkSize
2618
+ }) => {
2619
+ const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {
2620
+ const start = fixFloatingPoint(startUnfixed);
2621
+ const end = fixFloatingPoint(endUnfixed);
2622
+ const startFloor = Math.floor(start);
2623
+ const startCeil = Math.ceil(start);
2624
+ const startFraction = start - startFloor;
2625
+ const endFraction = end - Math.floor(end);
2626
+ const endFloor = Math.floor(end);
2627
+ let weightedSum = 0;
2628
+ let totalWeight = 0;
2629
+ if (startFraction > 0) {
2630
+ const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];
2631
+ weightedSum += firstSample * (1 - startFraction);
2632
+ totalWeight += 1 - startFraction;
2633
+ }
2634
+ for (let k = startCeil;k < endFloor; k++) {
2635
+ const num = sourceChannels[k * srcNumberOfChannels + channelIndex];
2636
+ weightedSum += num;
2637
+ totalWeight += 1;
2638
+ }
2639
+ if (endFraction > 0) {
2640
+ const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];
2641
+ weightedSum += lastSample * endFraction;
2642
+ totalWeight += endFraction;
2643
+ }
2644
+ const average = weightedSum / totalWeight;
2645
+ return average;
2646
+ };
2647
+ for (let newFrameIndex = 0;newFrameIndex < targetFrames; newFrameIndex++) {
2648
+ const start = newFrameIndex * chunkSize;
2649
+ const end = start + chunkSize;
2650
+ if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {
2651
+ for (let i = 0;i < srcNumberOfChannels; i++) {
2652
+ destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);
2653
+ }
2654
+ }
2655
+ if (srcNumberOfChannels === 1) {
2656
+ const m = getSourceValues(start, end, 0);
2657
+ destination[newFrameIndex * 2 + 0] = m;
2658
+ destination[newFrameIndex * 2 + 1] = m;
2659
+ } else if (srcNumberOfChannels === 4) {
2660
+ const l = getSourceValues(start, end, 0);
2661
+ const r = getSourceValues(start, end, 1);
2662
+ const sl = getSourceValues(start, end, 2);
2663
+ const sr = getSourceValues(start, end, 3);
2664
+ const l2 = 0.5 * (l + sl);
2665
+ const r2 = 0.5 * (r + sr);
2666
+ destination[newFrameIndex * 2 + 0] = l2;
2667
+ destination[newFrameIndex * 2 + 1] = r2;
2668
+ } else if (srcNumberOfChannels === 6) {
2669
+ const l = getSourceValues(start, end, 0);
2670
+ const r = getSourceValues(start, end, 1);
2671
+ const c = getSourceValues(start, end, 2);
2672
+ const sl = getSourceValues(start, end, 3);
2673
+ const sr = getSourceValues(start, end, 4);
2674
+ const sq = Math.sqrt(1 / 2);
2675
+ const l2 = l + sq * (c + sl);
2676
+ const r2 = r + sq * (c + sr);
2677
+ destination[newFrameIndex * 2 + 0] = l2;
2678
+ destination[newFrameIndex * 2 + 1] = r2;
2679
+ } else {
2680
+ for (let i = 0;i < srcNumberOfChannels; i++) {
2681
+ destination[newFrameIndex * TARGET_NUMBER_OF_CHANNELS + i] = getSourceValues(start, end, i);
2682
+ }
2683
+ }
2684
+ }
2685
+ };
2686
+
2687
+ // src/looped-frame.ts
2688
+ var frameForVolumeProp = ({
2689
+ behavior,
2690
+ loop,
2691
+ assetDurationInSeconds,
2692
+ fps,
2693
+ frame,
2694
+ startsAt
2695
+ }) => {
2696
+ if (!loop) {
2697
+ return frame + startsAt;
2698
+ }
2699
+ if (behavior === "extend") {
2700
+ return frame + startsAt;
2701
+ }
2702
+ const assetDurationInFrames = Math.floor(assetDurationInSeconds * fps) - startsAt;
2703
+ return frame % assetDurationInFrames + startsAt;
2704
+ };
2732
2705
 
2733
2706
  // src/convert-audiodata/convert-audiodata.ts
2734
2707
  var FORMAT = "s16";
@@ -2851,7 +2824,8 @@ var extractAudioInternal = async ({
2851
2824
  audioStreamIndex,
2852
2825
  trimBefore,
2853
2826
  trimAfter,
2854
- fps
2827
+ fps,
2828
+ maxCacheSize
2855
2829
  }) => {
2856
2830
  const { getAudio, actualMatroskaTimestamps, isMatroska, getDuration } = await getSink(src, logLevel);
2857
2831
  let mediaDurationInSeconds = null;
@@ -2888,7 +2862,8 @@ var extractAudioInternal = async ({
2888
2862
  audioSampleSink: audio.sampleSink,
2889
2863
  isMatroska,
2890
2864
  actualMatroskaTimestamps,
2891
- logLevel
2865
+ logLevel,
2866
+ maxCacheSize
2892
2867
  });
2893
2868
  const durationInSeconds = durationNotYetApplyingPlaybackRate * playbackRate;
2894
2869
  const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
@@ -2962,7 +2937,8 @@ var extractFrameInternal = async ({
2962
2937
  trimAfter,
2963
2938
  trimBefore,
2964
2939
  playbackRate,
2965
- fps
2940
+ fps,
2941
+ maxCacheSize
2966
2942
  }) => {
2967
2943
  const sink = await getSink(src, logLevel);
2968
2944
  const video = await sink.getVideo();
@@ -3002,7 +2978,8 @@ var extractFrameInternal = async ({
3002
2978
  videoSampleSink: video.sampleSink,
3003
2979
  timestamp: timeInSeconds,
3004
2980
  src,
3005
- logLevel
2981
+ logLevel,
2982
+ maxCacheSize
3006
2983
  });
3007
2984
  if (keyframeBank === "has-alpha") {
3008
2985
  return {
@@ -3072,7 +3049,8 @@ var extractFrameAndAudio = async ({
3072
3049
  audioStreamIndex,
3073
3050
  trimAfter,
3074
3051
  trimBefore,
3075
- fps
3052
+ fps,
3053
+ maxCacheSize
3076
3054
  }) => {
3077
3055
  try {
3078
3056
  const [frame, audio] = await Promise.all([
@@ -3084,7 +3062,8 @@ var extractFrameAndAudio = async ({
3084
3062
  trimAfter,
3085
3063
  playbackRate,
3086
3064
  trimBefore,
3087
- fps
3065
+ fps,
3066
+ maxCacheSize
3088
3067
  }) : null,
3089
3068
  includeAudio ? extractAudio({
3090
3069
  src,
@@ -3096,7 +3075,8 @@ var extractFrameAndAudio = async ({
3096
3075
  audioStreamIndex,
3097
3076
  trimAfter,
3098
3077
  fps,
3099
- trimBefore
3078
+ trimBefore,
3079
+ maxCacheSize
3100
3080
  }) : null
3101
3081
  ]);
3102
3082
  if (frame?.type === "cannot-decode") {
@@ -3173,7 +3153,8 @@ if (typeof window !== "undefined" && window.remotion_broadcastChannel && window.
3173
3153
  audioStreamIndex: data.audioStreamIndex,
3174
3154
  trimAfter: data.trimAfter,
3175
3155
  trimBefore: data.trimBefore,
3176
- fps: data.fps
3156
+ fps: data.fps,
3157
+ maxCacheSize: data.maxCacheSize
3177
3158
  });
3178
3159
  if (result.type === "cannot-decode") {
3179
3160
  const cannotDecodeResponse = {
@@ -3248,7 +3229,8 @@ var extractFrameViaBroadcastChannel = ({
3248
3229
  audioStreamIndex,
3249
3230
  trimAfter,
3250
3231
  trimBefore,
3251
- fps
3232
+ fps,
3233
+ maxCacheSize
3252
3234
  }) => {
3253
3235
  if (isClientSideRendering || window.remotion_isMainTab) {
3254
3236
  return extractFrameAndAudio({
@@ -3263,7 +3245,8 @@ var extractFrameViaBroadcastChannel = ({
3263
3245
  audioStreamIndex,
3264
3246
  trimAfter,
3265
3247
  trimBefore,
3266
- fps
3248
+ fps,
3249
+ maxCacheSize
3267
3250
  });
3268
3251
  }
3269
3252
  const requestId = crypto.randomUUID();
@@ -3335,7 +3318,8 @@ var extractFrameViaBroadcastChannel = ({
3335
3318
  audioStreamIndex,
3336
3319
  trimAfter,
3337
3320
  trimBefore,
3338
- fps
3321
+ fps,
3322
+ maxCacheSize
3339
3323
  };
3340
3324
  window.remotion_broadcastChannel.postMessage(request);
3341
3325
  let timeoutId;
@@ -3396,6 +3380,7 @@ var AudioForRendering = ({
3396
3380
  sequenceContext?.relativeFrom,
3397
3381
  sequenceContext?.durationInFrames
3398
3382
  ]);
3383
+ const maxCacheSize = useMaxMediaCacheSize(logLevel ?? window.remotion_logLevel);
3399
3384
  useLayoutEffect2(() => {
3400
3385
  const timestamp = frame / fps;
3401
3386
  const durationInSeconds = 1 / fps;
@@ -3428,7 +3413,8 @@ var AudioForRendering = ({
3428
3413
  audioStreamIndex: audioStreamIndex ?? 0,
3429
3414
  trimAfter,
3430
3415
  trimBefore,
3431
- fps
3416
+ fps,
3417
+ maxCacheSize
3432
3418
  }).then((result) => {
3433
3419
  if (result.type === "unknown-container-format") {
3434
3420
  if (disallowFallbackToHtml5Audio) {
@@ -3526,7 +3512,8 @@ var AudioForRendering = ({
3526
3512
  toneFrequency,
3527
3513
  trimAfter,
3528
3514
  trimBefore,
3529
- replaceWithHtml5Audio
3515
+ replaceWithHtml5Audio,
3516
+ maxCacheSize
3530
3517
  ]);
3531
3518
  if (replaceWithHtml5Audio) {
3532
3519
  return /* @__PURE__ */ jsx2(Html5Audio, {
@@ -3967,7 +3954,6 @@ import {
3967
3954
  useState as useState5
3968
3955
  } from "react";
3969
3956
  import {
3970
- cancelRender as cancelRender3,
3971
3957
  Internals as Internals16,
3972
3958
  Loop,
3973
3959
  random as random2,
@@ -4015,11 +4001,16 @@ var VideoForRendering = ({
4015
4001
  sequenceContext?.durationInFrames
4016
4002
  ]);
4017
4003
  const environment = useRemotionEnvironment3();
4018
- const { delayRender, continueRender } = useDelayRender2();
4004
+ const { delayRender, continueRender, cancelRender: cancelRender3 } = useDelayRender2();
4019
4005
  const canvasRef = useRef3(null);
4020
4006
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
4021
4007
  const audioEnabled = Internals16.useAudioEnabled();
4022
4008
  const videoEnabled = Internals16.useVideoEnabled();
4009
+ const maxCacheSize = useMaxMediaCacheSize(logLevel);
4010
+ const [error, setError] = useState5(null);
4011
+ if (error) {
4012
+ throw error;
4013
+ }
4023
4014
  useLayoutEffect4(() => {
4024
4015
  if (!canvasRef.current) {
4025
4016
  return;
@@ -4027,6 +4018,9 @@ var VideoForRendering = ({
4027
4018
  if (replaceWithOffthreadVideo) {
4028
4019
  return;
4029
4020
  }
4021
+ if (!canvasRef.current?.getContext) {
4022
+ return setError(new Error("Canvas does not have .getContext() method available. This could be because <Video> was mounted inside an <svg> tag."));
4023
+ }
4030
4024
  const timestamp = frame / fps;
4031
4025
  const durationInSeconds = 1 / fps;
4032
4026
  const newHandle = delayRender(`Extracting frame at time ${timestamp}`, {
@@ -4055,7 +4049,8 @@ var VideoForRendering = ({
4055
4049
  audioStreamIndex,
4056
4050
  trimAfter: trimAfterValue,
4057
4051
  trimBefore: trimBeforeValue,
4058
- fps
4052
+ fps,
4053
+ maxCacheSize
4059
4054
  }).then((result) => {
4060
4055
  if (result.type === "unknown-container-format") {
4061
4056
  if (disallowFallbackToOffthreadVideo) {
@@ -4154,8 +4149,8 @@ var VideoForRendering = ({
4154
4149
  });
4155
4150
  }
4156
4151
  continueRender(newHandle);
4157
- }).catch((error) => {
4158
- cancelRender3(error);
4152
+ }).catch((err) => {
4153
+ cancelRender3(err);
4159
4154
  });
4160
4155
  return () => {
4161
4156
  continueRender(newHandle);
@@ -4189,7 +4184,9 @@ var VideoForRendering = ({
4189
4184
  trimAfterValue,
4190
4185
  trimBeforeValue,
4191
4186
  audioEnabled,
4192
- videoEnabled
4187
+ videoEnabled,
4188
+ maxCacheSize,
4189
+ cancelRender3
4193
4190
  ]);
4194
4191
  const classNameValue = useMemo5(() => {
4195
4192
  return [Internals16.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals16.truthy).join(" ");
@@ -4231,7 +4228,9 @@ var VideoForRendering = ({
4231
4228
  });
4232
4229
  if (loop) {
4233
4230
  if (!replaceWithOffthreadVideo.durationInSeconds) {
4234
- cancelRender3(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
4231
+ const err = new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`);
4232
+ cancelRender3(err);
4233
+ throw err;
4235
4234
  }
4236
4235
  return /* @__PURE__ */ jsx5(Loop, {
4237
4236
  layout: "none",
@@ -1,6 +1,6 @@
1
1
  import type { LogLevel } from 'remotion';
2
2
  import type { ExtractFrameViaBroadcastChannelResult } from './video-extraction/extract-frame-via-broadcast-channel';
3
- export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, trimAfter, trimBefore, fps, }: {
3
+ export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, trimAfter, trimBefore, fps, maxCacheSize, }: {
4
4
  src: string;
5
5
  timeInSeconds: number;
6
6
  logLevel: LogLevel;
@@ -13,4 +13,5 @@ export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, dura
13
13
  trimAfter: number | undefined;
14
14
  trimBefore: number | undefined;
15
15
  fps: number;
16
+ maxCacheSize: number;
16
17
  }) => Promise<ExtractFrameViaBroadcastChannelResult>;