@remotion/media 4.0.374 → 4.0.376

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -91,8 +91,6 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
91
91
  }
92
92
  queuedAudioNodes.length = 0;
93
93
  };
94
- let lastReturnedBuffer = null;
95
- let iteratorEnded = false;
96
94
  const getNextOrNullIfNotAvailable = async (allowWait) => {
97
95
  const next = iterator.next();
98
96
  const result = allowWait ? await allowWaitRoutine(next, allowWait) : await Promise.race([
@@ -106,50 +104,16 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
106
104
  type: "need-to-wait-for-it",
107
105
  waitPromise: async () => {
108
106
  const res = await next;
109
- if (res.value) {
110
- lastReturnedBuffer = res.value;
111
- } else {
112
- iteratorEnded = true;
113
- }
114
107
  return res.value;
115
108
  }
116
109
  };
117
110
  }
118
- if (result.value) {
119
- lastReturnedBuffer = result.value;
120
- } else {
121
- iteratorEnded = true;
122
- }
123
111
  return {
124
112
  type: "got-buffer-or-end",
125
113
  buffer: result.value ?? null
126
114
  };
127
115
  };
128
116
  const tryToSatisfySeek = async (time, allowWait, onBufferScheduled) => {
129
- if (lastReturnedBuffer) {
130
- const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
131
- const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
132
- if (roundTo4Digits(time) < bufferTimestamp) {
133
- return {
134
- type: "not-satisfied",
135
- reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`
136
- };
137
- }
138
- if (roundTo4Digits(time) <= bufferEndTimestamp) {
139
- onBufferScheduled(lastReturnedBuffer);
140
- return {
141
- type: "satisfied"
142
- };
143
- }
144
- }
145
- if (iteratorEnded) {
146
- if (lastReturnedBuffer) {
147
- onBufferScheduled(lastReturnedBuffer);
148
- }
149
- return {
150
- type: "satisfied"
151
- };
152
- }
153
117
  while (true) {
154
118
  const buffer = await getNextOrNullIfNotAvailable(allowWait);
155
119
  if (buffer.type === "need-to-wait-for-it") {
@@ -160,17 +124,19 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
160
124
  }
161
125
  if (buffer.type === "got-buffer-or-end") {
162
126
  if (buffer.buffer === null) {
163
- iteratorEnded = true;
164
- if (lastReturnedBuffer) {
165
- onBufferScheduled(lastReturnedBuffer);
166
- }
167
127
  return {
168
- type: "satisfied"
128
+ type: "ended"
169
129
  };
170
130
  }
171
131
  const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
172
132
  const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
173
133
  const timestamp = roundTo4Digits(time);
134
+ if (roundTo4Digits(time) < bufferTimestamp) {
135
+ return {
136
+ type: "not-satisfied",
137
+ reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`
138
+ };
139
+ }
174
140
  if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
175
141
  onBufferScheduled(buffer.buffer);
176
142
  return {
@@ -214,11 +180,6 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
214
180
  },
215
181
  getNext: async () => {
216
182
  const next = await iterator.next();
217
- if (next.value) {
218
- lastReturnedBuffer = next.value;
219
- } else {
220
- iteratorEnded = true;
221
- }
222
183
  return next;
223
184
  },
224
185
  isDestroyed: () => {
@@ -408,6 +369,9 @@ var audioIteratorManager = ({
408
369
  if (nonce.isStale()) {
409
370
  return;
410
371
  }
372
+ if (audioSatisfyResult.type === "ended") {
373
+ return;
374
+ }
411
375
  if (audioSatisfyResult.type === "not-satisfied") {
412
376
  await startAudioIterator({
413
377
  nonce,
@@ -443,6 +407,9 @@ var audioIteratorManager = ({
443
407
  if (nonce.isStale()) {
444
408
  return;
445
409
  }
410
+ if (audioSatisfyResult.type === "ended") {
411
+ return;
412
+ }
446
413
  if (audioSatisfyResult.type === "not-satisfied") {
447
414
  await startAudioIterator({
448
415
  nonce,
@@ -1696,130 +1663,8 @@ import {
1696
1663
  useRemotionEnvironment
1697
1664
  } from "remotion";
1698
1665
 
1699
- // src/convert-audiodata/apply-volume.ts
1700
- var applyVolume = (array, volume) => {
1701
- if (volume === 1) {
1702
- return;
1703
- }
1704
- for (let i = 0;i < array.length; i++) {
1705
- const newValue = array[i] * volume;
1706
- if (newValue < -32768) {
1707
- array[i] = -32768;
1708
- } else if (newValue > 32767) {
1709
- array[i] = 32767;
1710
- } else {
1711
- array[i] = newValue;
1712
- }
1713
- }
1714
- };
1715
-
1716
- // src/convert-audiodata/resample-audiodata.ts
1717
- var TARGET_NUMBER_OF_CHANNELS = 2;
1718
- var TARGET_SAMPLE_RATE = 48000;
1719
- var fixFloatingPoint = (value) => {
1720
- if (value % 1 < 0.0000001) {
1721
- return Math.floor(value);
1722
- }
1723
- if (value % 1 > 0.9999999) {
1724
- return Math.ceil(value);
1725
- }
1726
- return value;
1727
- };
1728
- var resampleAudioData = ({
1729
- srcNumberOfChannels,
1730
- sourceChannels,
1731
- destination,
1732
- targetFrames,
1733
- chunkSize
1734
- }) => {
1735
- const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {
1736
- const start = fixFloatingPoint(startUnfixed);
1737
- const end = fixFloatingPoint(endUnfixed);
1738
- const startFloor = Math.floor(start);
1739
- const startCeil = Math.ceil(start);
1740
- const startFraction = start - startFloor;
1741
- const endFraction = end - Math.floor(end);
1742
- const endFloor = Math.floor(end);
1743
- let weightedSum = 0;
1744
- let totalWeight = 0;
1745
- if (startFraction > 0) {
1746
- const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];
1747
- weightedSum += firstSample * (1 - startFraction);
1748
- totalWeight += 1 - startFraction;
1749
- }
1750
- for (let k = startCeil;k < endFloor; k++) {
1751
- const num = sourceChannels[k * srcNumberOfChannels + channelIndex];
1752
- weightedSum += num;
1753
- totalWeight += 1;
1754
- }
1755
- if (endFraction > 0) {
1756
- const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];
1757
- weightedSum += lastSample * endFraction;
1758
- totalWeight += endFraction;
1759
- }
1760
- const average = weightedSum / totalWeight;
1761
- return average;
1762
- };
1763
- for (let newFrameIndex = 0;newFrameIndex < targetFrames; newFrameIndex++) {
1764
- const start = newFrameIndex * chunkSize;
1765
- const end = start + chunkSize;
1766
- if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {
1767
- for (let i = 0;i < srcNumberOfChannels; i++) {
1768
- destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);
1769
- }
1770
- }
1771
- if (srcNumberOfChannels === 1) {
1772
- const m = getSourceValues(start, end, 0);
1773
- destination[newFrameIndex * 2 + 0] = m;
1774
- destination[newFrameIndex * 2 + 1] = m;
1775
- } else if (srcNumberOfChannels === 4) {
1776
- const l = getSourceValues(start, end, 0);
1777
- const r = getSourceValues(start, end, 1);
1778
- const sl = getSourceValues(start, end, 2);
1779
- const sr = getSourceValues(start, end, 3);
1780
- const l2 = 0.5 * (l + sl);
1781
- const r2 = 0.5 * (r + sr);
1782
- destination[newFrameIndex * 2 + 0] = l2;
1783
- destination[newFrameIndex * 2 + 1] = r2;
1784
- } else if (srcNumberOfChannels === 6) {
1785
- const l = getSourceValues(start, end, 0);
1786
- const r = getSourceValues(start, end, 1);
1787
- const c = getSourceValues(start, end, 2);
1788
- const sl = getSourceValues(start, end, 3);
1789
- const sr = getSourceValues(start, end, 4);
1790
- const sq = Math.sqrt(1 / 2);
1791
- const l2 = l + sq * (c + sl);
1792
- const r2 = r + sq * (c + sr);
1793
- destination[newFrameIndex * 2 + 0] = l2;
1794
- destination[newFrameIndex * 2 + 1] = r2;
1795
- } else {
1796
- for (let i = 0;i < srcNumberOfChannels; i++) {
1797
- destination[newFrameIndex * TARGET_NUMBER_OF_CHANNELS + i] = getSourceValues(start, end, i);
1798
- }
1799
- }
1800
- }
1801
- };
1802
-
1803
- // src/looped-frame.ts
1804
- var frameForVolumeProp = ({
1805
- behavior,
1806
- loop,
1807
- assetDurationInSeconds,
1808
- fps,
1809
- frame,
1810
- startsAt
1811
- }) => {
1812
- if (!loop) {
1813
- return frame + startsAt;
1814
- }
1815
- if (behavior === "extend") {
1816
- return frame + startsAt;
1817
- }
1818
- const assetDurationInFrames = Math.floor(assetDurationInSeconds * fps) - startsAt;
1819
- return frame % assetDurationInFrames + startsAt;
1820
- };
1821
-
1822
1666
  // src/caches.ts
1667
+ import React2 from "react";
1823
1668
  import { cancelRender, Internals as Internals11 } from "remotion";
1824
1669
 
1825
1670
  // src/audio-extraction/audio-manager.ts
@@ -2080,9 +1925,9 @@ var makeAudioManager = () => {
2080
1925
  audioSampleSink,
2081
1926
  isMatroska,
2082
1927
  actualMatroskaTimestamps,
2083
- logLevel
1928
+ logLevel,
1929
+ maxCacheSize
2084
1930
  }) => {
2085
- const maxCacheSize = getMaxVideoCacheSize(logLevel);
2086
1931
  while ((await getTotalCacheStats()).totalSize > maxCacheSize) {
2087
1932
  deleteOldestIterator();
2088
1933
  }
@@ -2131,7 +1976,8 @@ var makeAudioManager = () => {
2131
1976
  audioSampleSink,
2132
1977
  isMatroska,
2133
1978
  actualMatroskaTimestamps,
2134
- logLevel
1979
+ logLevel,
1980
+ maxCacheSize
2135
1981
  }) => {
2136
1982
  queue = queue.then(() => getIterator({
2137
1983
  src,
@@ -2139,7 +1985,8 @@ var makeAudioManager = () => {
2139
1985
  audioSampleSink,
2140
1986
  isMatroska,
2141
1987
  actualMatroskaTimestamps,
2142
- logLevel
1988
+ logLevel,
1989
+ maxCacheSize
2143
1990
  }));
2144
1991
  return queue;
2145
1992
  },
@@ -2265,15 +2112,9 @@ var makeKeyframeBank = ({
2265
2112
  };
2266
2113
  const getFrameFromTimestamp = async (timestampInSeconds) => {
2267
2114
  lastUsed = Date.now();
2268
- const maxClampToleranceInSeconds = 0.1;
2269
2115
  let adjustedTimestamp = timestampInSeconds;
2270
2116
  if (roundTo4Digits(timestampInSeconds) < roundTo4Digits(startTimestampInSeconds)) {
2271
- const differenceInSeconds = startTimestampInSeconds - timestampInSeconds;
2272
- if (differenceInSeconds <= maxClampToleranceInSeconds) {
2273
- adjustedTimestamp = startTimestampInSeconds;
2274
- } else {
2275
- return Promise.reject(new Error(`Timestamp is before start timestamp (requested: ${timestampInSeconds}sec, start: ${startTimestampInSeconds}sec, difference: ${differenceInSeconds.toFixed(3)}sec exceeds tolerance of ${maxClampToleranceInSeconds}sec)`));
2276
- }
2117
+ adjustedTimestamp = startTimestampInSeconds;
2277
2118
  }
2278
2119
  if (roundTo4Digits(adjustedTimestamp) > roundTo4Digits(endTimestampInSeconds)) {
2279
2120
  return Promise.reject(new Error(`Timestamp is after end timestamp (requested: ${timestampInSeconds}sec, end: ${endTimestampInSeconds}sec)`));
@@ -2543,9 +2384,8 @@ var makeKeyframeManager = () => {
2543
2384
  }
2544
2385
  return { finish: false };
2545
2386
  };
2546
- const ensureToStayUnderMaxCacheSize = async (logLevel) => {
2387
+ const ensureToStayUnderMaxCacheSize = async (logLevel, maxCacheSize) => {
2547
2388
  let cacheStats = await getTotalCacheStats();
2548
- const maxCacheSize = getMaxVideoCacheSize(logLevel);
2549
2389
  while (cacheStats.totalSize > maxCacheSize) {
2550
2390
  const { finish } = await deleteOldestKeyframeBank(logLevel);
2551
2391
  if (finish) {
@@ -2632,9 +2472,10 @@ var makeKeyframeManager = () => {
2632
2472
  timestamp,
2633
2473
  videoSampleSink,
2634
2474
  src,
2635
- logLevel
2475
+ logLevel,
2476
+ maxCacheSize
2636
2477
  }) => {
2637
- await ensureToStayUnderMaxCacheSize(logLevel);
2478
+ await ensureToStayUnderMaxCacheSize(logLevel, maxCacheSize);
2638
2479
  await clearKeyframeBanksBeforeTime({
2639
2480
  timestampInSeconds: timestamp,
2640
2481
  src,
@@ -2667,14 +2508,16 @@ var makeKeyframeManager = () => {
2667
2508
  timestamp,
2668
2509
  videoSampleSink,
2669
2510
  src,
2670
- logLevel
2511
+ logLevel,
2512
+ maxCacheSize
2671
2513
  }) => {
2672
2514
  queue = queue.then(() => requestKeyframeBank({
2673
2515
  packetSink,
2674
2516
  timestamp,
2675
2517
  videoSampleSink,
2676
2518
  src,
2677
- logLevel
2519
+ logLevel,
2520
+ maxCacheSize
2678
2521
  }));
2679
2522
  return queue;
2680
2523
  },
@@ -2729,10 +2572,35 @@ var getMaxVideoCacheSize = (logLevel) => {
2729
2572
  cachedMaxCacheSize = getUncachedMaxCacheSize(logLevel);
2730
2573
  return cachedMaxCacheSize;
2731
2574
  };
2575
+ var useMaxMediaCacheSize = (logLevel) => {
2576
+ const context = React2.useContext(Internals11.MaxMediaCacheSizeContext);
2577
+ if (context === null) {
2578
+ return getMaxVideoCacheSize(logLevel);
2579
+ }
2580
+ return context;
2581
+ };
2732
2582
 
2733
- // src/convert-audiodata/convert-audiodata.ts
2734
- var FORMAT = "s16";
2735
- var fixFloatingPoint2 = (value) => {
2583
+ // src/convert-audiodata/apply-volume.ts
2584
+ var applyVolume = (array, volume) => {
2585
+ if (volume === 1) {
2586
+ return;
2587
+ }
2588
+ for (let i = 0;i < array.length; i++) {
2589
+ const newValue = array[i] * volume;
2590
+ if (newValue < -32768) {
2591
+ array[i] = -32768;
2592
+ } else if (newValue > 32767) {
2593
+ array[i] = 32767;
2594
+ } else {
2595
+ array[i] = newValue;
2596
+ }
2597
+ }
2598
+ };
2599
+
2600
+ // src/convert-audiodata/resample-audiodata.ts
2601
+ var TARGET_NUMBER_OF_CHANNELS = 2;
2602
+ var TARGET_SAMPLE_RATE = 48000;
2603
+ var fixFloatingPoint = (value) => {
2736
2604
  if (value % 1 < 0.0000001) {
2737
2605
  return Math.floor(value);
2738
2606
  }
@@ -2741,6 +2609,112 @@ var fixFloatingPoint2 = (value) => {
2741
2609
  }
2742
2610
  return value;
2743
2611
  };
2612
+ var resampleAudioData = ({
2613
+ srcNumberOfChannels,
2614
+ sourceChannels,
2615
+ destination,
2616
+ targetFrames,
2617
+ chunkSize
2618
+ }) => {
2619
+ const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {
2620
+ const start = fixFloatingPoint(startUnfixed);
2621
+ const end = fixFloatingPoint(endUnfixed);
2622
+ const startFloor = Math.floor(start);
2623
+ const startCeil = Math.ceil(start);
2624
+ const startFraction = start - startFloor;
2625
+ const endFraction = end - Math.floor(end);
2626
+ const endFloor = Math.floor(end);
2627
+ let weightedSum = 0;
2628
+ let totalWeight = 0;
2629
+ if (startFraction > 0) {
2630
+ const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];
2631
+ weightedSum += firstSample * (1 - startFraction);
2632
+ totalWeight += 1 - startFraction;
2633
+ }
2634
+ for (let k = startCeil;k < endFloor; k++) {
2635
+ const num = sourceChannels[k * srcNumberOfChannels + channelIndex];
2636
+ weightedSum += num;
2637
+ totalWeight += 1;
2638
+ }
2639
+ if (endFraction > 0) {
2640
+ const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];
2641
+ weightedSum += lastSample * endFraction;
2642
+ totalWeight += endFraction;
2643
+ }
2644
+ const average = weightedSum / totalWeight;
2645
+ return average;
2646
+ };
2647
+ for (let newFrameIndex = 0;newFrameIndex < targetFrames; newFrameIndex++) {
2648
+ const start = newFrameIndex * chunkSize;
2649
+ const end = start + chunkSize;
2650
+ if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {
2651
+ for (let i = 0;i < srcNumberOfChannels; i++) {
2652
+ destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);
2653
+ }
2654
+ }
2655
+ if (srcNumberOfChannels === 1) {
2656
+ const m = getSourceValues(start, end, 0);
2657
+ destination[newFrameIndex * 2 + 0] = m;
2658
+ destination[newFrameIndex * 2 + 1] = m;
2659
+ } else if (srcNumberOfChannels === 4) {
2660
+ const l = getSourceValues(start, end, 0);
2661
+ const r = getSourceValues(start, end, 1);
2662
+ const sl = getSourceValues(start, end, 2);
2663
+ const sr = getSourceValues(start, end, 3);
2664
+ const l2 = 0.5 * (l + sl);
2665
+ const r2 = 0.5 * (r + sr);
2666
+ destination[newFrameIndex * 2 + 0] = l2;
2667
+ destination[newFrameIndex * 2 + 1] = r2;
2668
+ } else if (srcNumberOfChannels === 6) {
2669
+ const l = getSourceValues(start, end, 0);
2670
+ const r = getSourceValues(start, end, 1);
2671
+ const c = getSourceValues(start, end, 2);
2672
+ const sl = getSourceValues(start, end, 3);
2673
+ const sr = getSourceValues(start, end, 4);
2674
+ const sq = Math.sqrt(1 / 2);
2675
+ const l2 = l + sq * (c + sl);
2676
+ const r2 = r + sq * (c + sr);
2677
+ destination[newFrameIndex * 2 + 0] = l2;
2678
+ destination[newFrameIndex * 2 + 1] = r2;
2679
+ } else {
2680
+ for (let i = 0;i < srcNumberOfChannels; i++) {
2681
+ destination[newFrameIndex * TARGET_NUMBER_OF_CHANNELS + i] = getSourceValues(start, end, i);
2682
+ }
2683
+ }
2684
+ }
2685
+ };
2686
+
2687
+ // src/looped-frame.ts
2688
+ var frameForVolumeProp = ({
2689
+ behavior,
2690
+ loop,
2691
+ assetDurationInSeconds,
2692
+ fps,
2693
+ frame,
2694
+ startsAt
2695
+ }) => {
2696
+ if (!loop) {
2697
+ return frame + startsAt;
2698
+ }
2699
+ if (behavior === "extend") {
2700
+ return frame + startsAt;
2701
+ }
2702
+ const assetDurationInFrames = Math.floor(assetDurationInSeconds * fps) - startsAt;
2703
+ return frame % assetDurationInFrames + startsAt;
2704
+ };
2705
+
2706
+ // src/convert-audiodata/convert-audiodata.ts
2707
+ var FORMAT = "s16";
2708
+ var fixFloatingPoint2 = (value) => {
2709
+ const decimal = Math.abs(value % 1);
2710
+ if (decimal < 0.0000001) {
2711
+ return value < 0 ? Math.ceil(value) : Math.floor(value);
2712
+ }
2713
+ if (decimal > 0.9999999) {
2714
+ return value < 0 ? Math.floor(value) : Math.ceil(value);
2715
+ }
2716
+ return value;
2717
+ };
2744
2718
  var ceilButNotIfFloatingPointIssue = (value) => {
2745
2719
  const fixed = fixFloatingPoint2(value);
2746
2720
  return Math.ceil(fixed);
@@ -2850,7 +2824,8 @@ var extractAudioInternal = async ({
2850
2824
  audioStreamIndex,
2851
2825
  trimBefore,
2852
2826
  trimAfter,
2853
- fps
2827
+ fps,
2828
+ maxCacheSize
2854
2829
  }) => {
2855
2830
  const { getAudio, actualMatroskaTimestamps, isMatroska, getDuration } = await getSink(src, logLevel);
2856
2831
  let mediaDurationInSeconds = null;
@@ -2887,12 +2862,12 @@ var extractAudioInternal = async ({
2887
2862
  audioSampleSink: audio.sampleSink,
2888
2863
  isMatroska,
2889
2864
  actualMatroskaTimestamps,
2890
- logLevel
2865
+ logLevel,
2866
+ maxCacheSize
2891
2867
  });
2892
2868
  const durationInSeconds = durationNotYetApplyingPlaybackRate * playbackRate;
2893
2869
  const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
2894
2870
  audioManager.logOpenFrames();
2895
- const trimStartToleranceInSeconds = 0.002;
2896
2871
  const audioDataArray = [];
2897
2872
  for (let i = 0;i < samples.length; i++) {
2898
2873
  const sample = samples[i];
@@ -2907,13 +2882,18 @@ var extractAudioInternal = async ({
2907
2882
  const audioDataRaw = sample.toAudioData();
2908
2883
  let trimStartInSeconds = 0;
2909
2884
  let trimEndInSeconds = 0;
2885
+ let leadingSilence = null;
2910
2886
  if (isFirstSample) {
2911
- trimStartInSeconds = timeInSeconds - sample.timestamp;
2912
- if (trimStartInSeconds < 0 && trimStartInSeconds > -trimStartToleranceInSeconds) {
2913
- trimStartInSeconds = 0;
2914
- }
2887
+ trimStartInSeconds = fixFloatingPoint2(timeInSeconds - sample.timestamp);
2915
2888
  if (trimStartInSeconds < 0) {
2916
- throw new Error(`trimStartInSeconds is negative: ${trimStartInSeconds}. ${JSON.stringify({ timeInSeconds, ts: sample.timestamp, d: sample.duration, isFirstSample, isLastSample, durationInSeconds, i, st: samples.map((s) => s.timestamp) })}`);
2889
+ const silenceFrames = Math.ceil(fixFloatingPoint2(-trimStartInSeconds * TARGET_SAMPLE_RATE));
2890
+ leadingSilence = {
2891
+ data: new Int16Array(silenceFrames * TARGET_NUMBER_OF_CHANNELS),
2892
+ numberOfFrames: silenceFrames,
2893
+ timestamp: timeInSeconds * 1e6,
2894
+ durationInMicroSeconds: silenceFrames / TARGET_SAMPLE_RATE * 1e6
2895
+ };
2896
+ trimStartInSeconds = 0;
2917
2897
  }
2918
2898
  }
2919
2899
  if (isLastSample) {
@@ -2931,6 +2911,9 @@ var extractAudioInternal = async ({
2931
2911
  if (audioData.numberOfFrames === 0) {
2932
2912
  continue;
2933
2913
  }
2914
+ if (leadingSilence) {
2915
+ audioDataArray.push(leadingSilence);
2916
+ }
2934
2917
  audioDataArray.push(audioData);
2935
2918
  }
2936
2919
  if (audioDataArray.length === 0) {
@@ -2954,7 +2937,8 @@ var extractFrameInternal = async ({
2954
2937
  trimAfter,
2955
2938
  trimBefore,
2956
2939
  playbackRate,
2957
- fps
2940
+ fps,
2941
+ maxCacheSize
2958
2942
  }) => {
2959
2943
  const sink = await getSink(src, logLevel);
2960
2944
  const video = await sink.getVideo();
@@ -2994,7 +2978,8 @@ var extractFrameInternal = async ({
2994
2978
  videoSampleSink: video.sampleSink,
2995
2979
  timestamp: timeInSeconds,
2996
2980
  src,
2997
- logLevel
2981
+ logLevel,
2982
+ maxCacheSize
2998
2983
  });
2999
2984
  if (keyframeBank === "has-alpha") {
3000
2985
  return {
@@ -3018,6 +3003,39 @@ var extractFrame = (params) => {
3018
3003
  return queue2;
3019
3004
  };
3020
3005
 
3006
+ // src/video-extraction/rotate-frame.ts
3007
+ var rotateFrame = async ({
3008
+ frame,
3009
+ rotation
3010
+ }) => {
3011
+ if (rotation === 0) {
3012
+ const directBitmap = await createImageBitmap(frame);
3013
+ frame.close();
3014
+ return directBitmap;
3015
+ }
3016
+ const width = rotation === 90 || rotation === 270 ? frame.displayHeight : frame.displayWidth;
3017
+ const height = rotation === 90 || rotation === 270 ? frame.displayWidth : frame.displayHeight;
3018
+ const canvas = new OffscreenCanvas(width, height);
3019
+ const ctx = canvas.getContext("2d");
3020
+ if (!ctx) {
3021
+ throw new Error("Could not get 2d context");
3022
+ }
3023
+ canvas.width = width;
3024
+ canvas.height = height;
3025
+ if (rotation === 90) {
3026
+ ctx.translate(width, 0);
3027
+ } else if (rotation === 180) {
3028
+ ctx.translate(width, height);
3029
+ } else if (rotation === 270) {
3030
+ ctx.translate(0, height);
3031
+ }
3032
+ ctx.rotate(rotation * (Math.PI / 180));
3033
+ ctx.drawImage(frame, 0, 0);
3034
+ const bitmap = await createImageBitmap(canvas);
3035
+ frame.close();
3036
+ return bitmap;
3037
+ };
3038
+
3021
3039
  // src/extract-frame-and-audio.ts
3022
3040
  var extractFrameAndAudio = async ({
3023
3041
  src,
@@ -3031,7 +3049,8 @@ var extractFrameAndAudio = async ({
3031
3049
  audioStreamIndex,
3032
3050
  trimAfter,
3033
3051
  trimBefore,
3034
- fps
3052
+ fps,
3053
+ maxCacheSize
3035
3054
  }) => {
3036
3055
  try {
3037
3056
  const [frame, audio] = await Promise.all([
@@ -3043,7 +3062,8 @@ var extractFrameAndAudio = async ({
3043
3062
  trimAfter,
3044
3063
  playbackRate,
3045
3064
  trimBefore,
3046
- fps
3065
+ fps,
3066
+ maxCacheSize
3047
3067
  }) : null,
3048
3068
  includeAudio ? extractAudio({
3049
3069
  src,
@@ -3055,7 +3075,8 @@ var extractFrameAndAudio = async ({
3055
3075
  audioStreamIndex,
3056
3076
  trimAfter,
3057
3077
  fps,
3058
- trimBefore
3078
+ trimBefore,
3079
+ maxCacheSize
3059
3080
  }) : null
3060
3081
  ]);
3061
3082
  if (frame?.type === "cannot-decode") {
@@ -3088,9 +3109,20 @@ var extractFrameAndAudio = async ({
3088
3109
  durationInSeconds: frame?.type === "success" ? frame.durationInSeconds : null
3089
3110
  };
3090
3111
  }
3112
+ if (!frame?.frame) {
3113
+ return {
3114
+ type: "success",
3115
+ frame: null,
3116
+ audio: audio?.data ?? null,
3117
+ durationInSeconds: audio?.durationInSeconds ?? null
3118
+ };
3119
+ }
3091
3120
  return {
3092
3121
  type: "success",
3093
- frame: frame?.frame?.toVideoFrame() ?? null,
3122
+ frame: await rotateFrame({
3123
+ frame: frame.frame.toVideoFrame(),
3124
+ rotation: frame.frame.rotation
3125
+ }),
3094
3126
  audio: audio?.data ?? null,
3095
3127
  durationInSeconds: audio?.durationInSeconds ?? null
3096
3128
  };
@@ -3121,7 +3153,8 @@ if (typeof window !== "undefined" && window.remotion_broadcastChannel && window.
3121
3153
  audioStreamIndex: data.audioStreamIndex,
3122
3154
  trimAfter: data.trimAfter,
3123
3155
  trimBefore: data.trimBefore,
3124
- fps: data.fps
3156
+ fps: data.fps,
3157
+ maxCacheSize: data.maxCacheSize
3125
3158
  });
3126
3159
  if (result.type === "cannot-decode") {
3127
3160
  const cannotDecodeResponse = {
@@ -3158,10 +3191,9 @@ if (typeof window !== "undefined" && window.remotion_broadcastChannel && window.
3158
3191
  return;
3159
3192
  }
3160
3193
  const { frame, audio, durationInSeconds } = result;
3161
- const videoFrame = frame;
3162
- const imageBitmap = videoFrame ? await createImageBitmap(videoFrame) : null;
3163
- if (videoFrame) {
3164
- videoFrame.close();
3194
+ const imageBitmap = frame ? await createImageBitmap(frame) : null;
3195
+ if (frame) {
3196
+ frame.close();
3165
3197
  }
3166
3198
  const response = {
3167
3199
  type: "response-success",
@@ -3171,7 +3203,6 @@ if (typeof window !== "undefined" && window.remotion_broadcastChannel && window.
3171
3203
  durationInSeconds: durationInSeconds ?? null
3172
3204
  };
3173
3205
  window.remotion_broadcastChannel.postMessage(response);
3174
- videoFrame?.close();
3175
3206
  } catch (error) {
3176
3207
  const response = {
3177
3208
  type: "response-error",
@@ -3198,7 +3229,8 @@ var extractFrameViaBroadcastChannel = ({
3198
3229
  audioStreamIndex,
3199
3230
  trimAfter,
3200
3231
  trimBefore,
3201
- fps
3232
+ fps,
3233
+ maxCacheSize
3202
3234
  }) => {
3203
3235
  if (isClientSideRendering || window.remotion_isMainTab) {
3204
3236
  return extractFrameAndAudio({
@@ -3213,7 +3245,8 @@ var extractFrameViaBroadcastChannel = ({
3213
3245
  audioStreamIndex,
3214
3246
  trimAfter,
3215
3247
  trimBefore,
3216
- fps
3248
+ fps,
3249
+ maxCacheSize
3217
3250
  });
3218
3251
  }
3219
3252
  const requestId = crypto.randomUUID();
@@ -3285,7 +3318,8 @@ var extractFrameViaBroadcastChannel = ({
3285
3318
  audioStreamIndex,
3286
3319
  trimAfter,
3287
3320
  trimBefore,
3288
- fps
3321
+ fps,
3322
+ maxCacheSize
3289
3323
  };
3290
3324
  window.remotion_broadcastChannel.postMessage(request);
3291
3325
  let timeoutId;
@@ -3346,6 +3380,7 @@ var AudioForRendering = ({
3346
3380
  sequenceContext?.relativeFrom,
3347
3381
  sequenceContext?.durationInFrames
3348
3382
  ]);
3383
+ const maxCacheSize = useMaxMediaCacheSize(logLevel ?? window.remotion_logLevel);
3349
3384
  useLayoutEffect2(() => {
3350
3385
  const timestamp = frame / fps;
3351
3386
  const durationInSeconds = 1 / fps;
@@ -3378,7 +3413,8 @@ var AudioForRendering = ({
3378
3413
  audioStreamIndex: audioStreamIndex ?? 0,
3379
3414
  trimAfter,
3380
3415
  trimBefore,
3381
- fps
3416
+ fps,
3417
+ maxCacheSize
3382
3418
  }).then((result) => {
3383
3419
  if (result.type === "unknown-container-format") {
3384
3420
  if (disallowFallbackToHtml5Audio) {
@@ -3476,7 +3512,8 @@ var AudioForRendering = ({
3476
3512
  toneFrequency,
3477
3513
  trimAfter,
3478
3514
  trimBefore,
3479
- replaceWithHtml5Audio
3515
+ replaceWithHtml5Audio,
3516
+ maxCacheSize
3480
3517
  ]);
3481
3518
  if (replaceWithHtml5Audio) {
3482
3519
  return /* @__PURE__ */ jsx2(Html5Audio, {
@@ -3917,7 +3954,6 @@ import {
3917
3954
  useState as useState5
3918
3955
  } from "react";
3919
3956
  import {
3920
- cancelRender as cancelRender3,
3921
3957
  Internals as Internals16,
3922
3958
  Loop,
3923
3959
  random as random2,
@@ -3965,11 +4001,16 @@ var VideoForRendering = ({
3965
4001
  sequenceContext?.durationInFrames
3966
4002
  ]);
3967
4003
  const environment = useRemotionEnvironment3();
3968
- const { delayRender, continueRender } = useDelayRender2();
4004
+ const { delayRender, continueRender, cancelRender: cancelRender3 } = useDelayRender2();
3969
4005
  const canvasRef = useRef3(null);
3970
4006
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
3971
4007
  const audioEnabled = Internals16.useAudioEnabled();
3972
4008
  const videoEnabled = Internals16.useVideoEnabled();
4009
+ const maxCacheSize = useMaxMediaCacheSize(logLevel);
4010
+ const [error, setError] = useState5(null);
4011
+ if (error) {
4012
+ throw error;
4013
+ }
3973
4014
  useLayoutEffect4(() => {
3974
4015
  if (!canvasRef.current) {
3975
4016
  return;
@@ -3977,6 +4018,9 @@ var VideoForRendering = ({
3977
4018
  if (replaceWithOffthreadVideo) {
3978
4019
  return;
3979
4020
  }
4021
+ if (!canvasRef.current?.getContext) {
4022
+ return setError(new Error("Canvas does not have .getContext() method available. This could be because <Video> was mounted inside an <svg> tag."));
4023
+ }
3980
4024
  const timestamp = frame / fps;
3981
4025
  const durationInSeconds = 1 / fps;
3982
4026
  const newHandle = delayRender(`Extracting frame at time ${timestamp}`, {
@@ -4005,7 +4049,8 @@ var VideoForRendering = ({
4005
4049
  audioStreamIndex,
4006
4050
  trimAfter: trimAfterValue,
4007
4051
  trimBefore: trimBeforeValue,
4008
- fps
4052
+ fps,
4053
+ maxCacheSize
4009
4054
  }).then((result) => {
4010
4055
  if (result.type === "unknown-container-format") {
4011
4056
  if (disallowFallbackToOffthreadVideo) {
@@ -4064,8 +4109,8 @@ var VideoForRendering = ({
4064
4109
  if (!context) {
4065
4110
  return;
4066
4111
  }
4067
- context.canvas.width = imageBitmap instanceof ImageBitmap ? imageBitmap.width : imageBitmap.displayWidth;
4068
- context.canvas.height = imageBitmap instanceof ImageBitmap ? imageBitmap.height : imageBitmap.displayHeight;
4112
+ context.canvas.width = imageBitmap.width;
4113
+ context.canvas.height = imageBitmap.height;
4069
4114
  context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
4070
4115
  context.drawImage(imageBitmap, 0, 0);
4071
4116
  imageBitmap.close();
@@ -4104,8 +4149,8 @@ var VideoForRendering = ({
4104
4149
  });
4105
4150
  }
4106
4151
  continueRender(newHandle);
4107
- }).catch((error) => {
4108
- cancelRender3(error);
4152
+ }).catch((err) => {
4153
+ cancelRender3(err);
4109
4154
  });
4110
4155
  return () => {
4111
4156
  continueRender(newHandle);
@@ -4139,7 +4184,9 @@ var VideoForRendering = ({
4139
4184
  trimAfterValue,
4140
4185
  trimBeforeValue,
4141
4186
  audioEnabled,
4142
- videoEnabled
4187
+ videoEnabled,
4188
+ maxCacheSize,
4189
+ cancelRender3
4143
4190
  ]);
4144
4191
  const classNameValue = useMemo5(() => {
4145
4192
  return [Internals16.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals16.truthy).join(" ");
@@ -4181,7 +4228,9 @@ var VideoForRendering = ({
4181
4228
  });
4182
4229
  if (loop) {
4183
4230
  if (!replaceWithOffthreadVideo.durationInSeconds) {
4184
- cancelRender3(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
4231
+ const err = new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`);
4232
+ cancelRender3(err);
4233
+ throw err;
4185
4234
  }
4186
4235
  return /* @__PURE__ */ jsx5(Loop, {
4187
4236
  layout: "none",