@remotion/media 4.0.362 → 4.0.363

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,7 +5,7 @@ import { applyVolume } from '../convert-audiodata/apply-volume';
5
5
  import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
6
6
  import { frameForVolumeProp } from '../looped-frame';
7
7
  import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
8
- export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel, loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, toneFrequency, trimAfter, trimBefore, }) => {
8
+ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel, loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, toneFrequency, trimAfter, trimBefore, }) => {
9
9
  const frame = useCurrentFrame();
10
10
  const absoluteFrame = Internals.useTimelinePosition();
11
11
  const videoConfig = Internals.useUnsafeVideoConfig();
@@ -54,7 +54,7 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
54
54
  timeInSeconds: timestamp,
55
55
  durationInSeconds,
56
56
  playbackRate: playbackRate ?? 1,
57
- logLevel,
57
+ logLevel: logLevel ?? window.remotion_logLevel,
58
58
  includeAudio: shouldRenderAudio,
59
59
  includeVideo: false,
60
60
  isClientSideRendering: environment.isClientSideRendering,
@@ -69,7 +69,10 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
69
69
  if (disallowFallbackToHtml5Audio) {
70
70
  cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
71
71
  }
72
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
72
+ Internals.Log.warn({
73
+ logLevel: logLevel ?? window.remotion_logLevel,
74
+ tag: '@remotion/media',
75
+ }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
73
76
  setReplaceWithHtml5Audio(true);
74
77
  return;
75
78
  }
@@ -77,7 +80,10 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
77
80
  if (disallowFallbackToHtml5Audio) {
78
81
  cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
79
82
  }
80
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${src}, falling back to <Html5Audio>`);
83
+ Internals.Log.warn({
84
+ logLevel: logLevel ?? window.remotion_logLevel,
85
+ tag: '@remotion/media',
86
+ }, `Cannot decode ${src}, falling back to <Html5Audio>`);
81
87
  setReplaceWithHtml5Audio(true);
82
88
  return;
83
89
  }
@@ -88,7 +94,10 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
88
94
  if (disallowFallbackToHtml5Audio) {
89
95
  cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
90
96
  }
91
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${src}, falling back to <Html5Audio>`);
97
+ Internals.Log.warn({
98
+ logLevel: logLevel ?? window.remotion_logLevel,
99
+ tag: '@remotion/media',
100
+ }, `Network error fetching ${src}, falling back to <Html5Audio>`);
92
101
  setReplaceWithHtml5Audio(true);
93
102
  return;
94
103
  }
@@ -17,5 +17,4 @@ export const Audio = (props) => {
17
17
  }
18
18
  return _jsx(AudioForPreview, { name: name, ...otherProps, stack: stack ?? null });
19
19
  };
20
- // TODO: Doesn't work
21
20
  Internals.addSequenceStackTraces(Audio);
package/dist/caches.js CHANGED
@@ -14,7 +14,8 @@ export const getTotalCacheStats = async () => {
14
14
  };
15
15
  };
16
16
  const getUncachedMaxCacheSize = (logLevel) => {
17
- if (window.remotion_mediaCacheSizeInBytes !== undefined &&
17
+ if (typeof window !== 'undefined' &&
18
+ window.remotion_mediaCacheSizeInBytes !== undefined &&
18
19
  window.remotion_mediaCacheSizeInBytes !== null) {
19
20
  if (window.remotion_mediaCacheSizeInBytes < 240 * 1024 * 1024) {
20
21
  cancelRender(new Error(`The minimum value for the "mediaCacheSizeInBytes" prop is 240MB (${240 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
@@ -25,7 +26,8 @@ const getUncachedMaxCacheSize = (logLevel) => {
25
26
  Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
26
27
  return window.remotion_mediaCacheSizeInBytes;
27
28
  }
28
- if (window.remotion_initialMemoryAvailable !== undefined &&
29
+ if (typeof window !== 'undefined' &&
30
+ window.remotion_initialMemoryAvailable !== undefined &&
29
31
  window.remotion_initialMemoryAvailable !== null) {
30
32
  const value = window.remotion_initialMemoryAvailable / 2;
31
33
  if (value < 500 * 1024 * 1024) {
@@ -1591,13 +1591,39 @@ var makeKeyframeBank = ({
1591
1591
  startTimestampInSeconds,
1592
1592
  endTimestampInSeconds,
1593
1593
  sampleIterator,
1594
- logLevel: parentLogLevel
1594
+ logLevel: parentLogLevel,
1595
+ src
1595
1596
  }) => {
1596
1597
  Internals8.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1597
1598
  const frames = {};
1598
1599
  const frameTimestamps = [];
1599
1600
  let lastUsed = Date.now();
1600
1601
  let allocationSize = 0;
1602
+ const deleteFramesBeforeTimestamp = ({
1603
+ logLevel,
1604
+ timestampInSeconds
1605
+ }) => {
1606
+ const deletedTimestamps = [];
1607
+ for (const frameTimestamp of frameTimestamps.slice()) {
1608
+ const isLast = frameTimestamp === frameTimestamps[frameTimestamps.length - 1];
1609
+ if (isLast) {
1610
+ continue;
1611
+ }
1612
+ if (frameTimestamp < timestampInSeconds) {
1613
+ if (!frames[frameTimestamp]) {
1614
+ continue;
1615
+ }
1616
+ allocationSize -= frames[frameTimestamp].allocationSize();
1617
+ frameTimestamps.splice(frameTimestamps.indexOf(frameTimestamp), 1);
1618
+ frames[frameTimestamp].close();
1619
+ delete frames[frameTimestamp];
1620
+ deletedTimestamps.push(frameTimestamp);
1621
+ }
1622
+ }
1623
+ if (deletedTimestamps.length > 0) {
1624
+ Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
1625
+ }
1626
+ };
1601
1627
  const hasDecodedEnoughForTimestamp = (timestamp) => {
1602
1628
  const lastFrameTimestamp = frameTimestamps[frameTimestamps.length - 1];
1603
1629
  if (!lastFrameTimestamp) {
@@ -1615,8 +1641,8 @@ var makeKeyframeBank = ({
1615
1641
  allocationSize += frame.allocationSize();
1616
1642
  lastUsed = Date.now();
1617
1643
  };
1618
- const ensureEnoughFramesForTimestamp = async (timestamp) => {
1619
- while (!hasDecodedEnoughForTimestamp(timestamp)) {
1644
+ const ensureEnoughFramesForTimestamp = async (timestampInSeconds) => {
1645
+ while (!hasDecodedEnoughForTimestamp(timestampInSeconds)) {
1620
1646
  const sample = await sampleIterator.next();
1621
1647
  if (sample.value) {
1622
1648
  addFrame(sample.value);
@@ -1624,6 +1650,10 @@ var makeKeyframeBank = ({
1624
1650
  if (sample.done) {
1625
1651
  break;
1626
1652
  }
1653
+ deleteFramesBeforeTimestamp({
1654
+ logLevel: parentLogLevel,
1655
+ timestampInSeconds: timestampInSeconds - SAFE_BACK_WINDOW_IN_SECONDS
1656
+ });
1627
1657
  }
1628
1658
  lastUsed = Date.now();
1629
1659
  };
@@ -1658,6 +1688,7 @@ var makeKeyframeBank = ({
1658
1688
  }
1659
1689
  return null;
1660
1690
  });
1691
+ let framesDeleted = 0;
1661
1692
  for (const frameTimestamp of frameTimestamps) {
1662
1693
  if (!frames[frameTimestamp]) {
1663
1694
  continue;
@@ -1665,34 +1696,10 @@ var makeKeyframeBank = ({
1665
1696
  allocationSize -= frames[frameTimestamp].allocationSize();
1666
1697
  frames[frameTimestamp].close();
1667
1698
  delete frames[frameTimestamp];
1699
+ framesDeleted++;
1668
1700
  }
1669
1701
  frameTimestamps.length = 0;
1670
- };
1671
- const deleteFramesBeforeTimestamp = ({
1672
- logLevel,
1673
- src,
1674
- timestampInSeconds
1675
- }) => {
1676
- const deletedTimestamps = [];
1677
- for (const frameTimestamp of frameTimestamps.slice()) {
1678
- const isLast = frameTimestamp === frameTimestamps[frameTimestamps.length - 1];
1679
- if (isLast) {
1680
- continue;
1681
- }
1682
- if (frameTimestamp < timestampInSeconds) {
1683
- if (!frames[frameTimestamp]) {
1684
- continue;
1685
- }
1686
- allocationSize -= frames[frameTimestamp].allocationSize();
1687
- frameTimestamps.splice(frameTimestamps.indexOf(frameTimestamp), 1);
1688
- frames[frameTimestamp].close();
1689
- delete frames[frameTimestamp];
1690
- deletedTimestamps.push(frameTimestamp);
1691
- }
1692
- }
1693
- if (deletedTimestamps.length > 0) {
1694
- Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
1695
- }
1702
+ return { framesDeleted };
1696
1703
  };
1697
1704
  const getOpenFrameCount = () => {
1698
1705
  return {
@@ -1711,13 +1718,11 @@ var makeKeyframeBank = ({
1711
1718
  queue = queue.then(() => getFrameFromTimestamp(timestamp));
1712
1719
  return queue;
1713
1720
  },
1714
- prepareForDeletion: (logLevel) => {
1715
- queue = queue.then(() => prepareForDeletion(logLevel));
1716
- return queue;
1717
- },
1721
+ prepareForDeletion,
1718
1722
  hasTimestampInSecond,
1719
1723
  addFrame,
1720
1724
  deleteFramesBeforeTimestamp,
1725
+ src,
1721
1726
  getOpenFrameCount,
1722
1727
  getLastUsed
1723
1728
  };
@@ -1829,7 +1834,8 @@ var getFramesSinceKeyframe = async ({
1829
1834
  packetSink,
1830
1835
  videoSampleSink,
1831
1836
  startPacket,
1832
- logLevel
1837
+ logLevel,
1838
+ src
1833
1839
  }) => {
1834
1840
  const nextKeyPacket = await packetSink.getNextKeyPacket(startPacket, {
1835
1841
  verifyKeyPackets: true
@@ -1839,7 +1845,8 @@ var getFramesSinceKeyframe = async ({
1839
1845
  startTimestampInSeconds: startPacket.timestamp,
1840
1846
  endTimestampInSeconds: nextKeyPacket ? nextKeyPacket.timestamp : Infinity,
1841
1847
  sampleIterator,
1842
- logLevel
1848
+ logLevel,
1849
+ src
1843
1850
  });
1844
1851
  return keyframeBank;
1845
1852
  };
@@ -1891,6 +1898,7 @@ var makeKeyframeManager = () => {
1891
1898
  const getTheKeyframeBankMostInThePast = async () => {
1892
1899
  let mostInThePast = null;
1893
1900
  let mostInThePastBank = null;
1901
+ let numberOfBanks = 0;
1894
1902
  for (const src in sources) {
1895
1903
  for (const b in sources[src]) {
1896
1904
  const bank = await sources[src][b];
@@ -1899,26 +1907,38 @@ var makeKeyframeManager = () => {
1899
1907
  mostInThePast = lastUsed;
1900
1908
  mostInThePastBank = { src, bank };
1901
1909
  }
1910
+ numberOfBanks++;
1902
1911
  }
1903
1912
  }
1904
1913
  if (!mostInThePastBank) {
1905
1914
  throw new Error("No keyframe bank found");
1906
1915
  }
1907
- return mostInThePastBank;
1916
+ return { mostInThePastBank, numberOfBanks };
1908
1917
  };
1909
1918
  const deleteOldestKeyframeBank = async (logLevel) => {
1910
- const { bank: mostInThePastBank, src: mostInThePastSrc } = await getTheKeyframeBankMostInThePast();
1919
+ const {
1920
+ mostInThePastBank: { bank: mostInThePastBank, src: mostInThePastSrc },
1921
+ numberOfBanks
1922
+ } = await getTheKeyframeBankMostInThePast();
1923
+ if (numberOfBanks < 2) {
1924
+ return { finish: true };
1925
+ }
1911
1926
  if (mostInThePastBank) {
1912
- await mostInThePastBank.prepareForDeletion(logLevel);
1927
+ const { framesDeleted } = mostInThePastBank.prepareForDeletion(logLevel);
1913
1928
  delete sources[mostInThePastSrc][mostInThePastBank.startTimestampInSeconds];
1914
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
1929
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${framesDeleted} frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
1915
1930
  }
1931
+ return { finish: false };
1916
1932
  };
1917
1933
  const ensureToStayUnderMaxCacheSize = async (logLevel) => {
1918
1934
  let cacheStats = await getTotalCacheStats();
1919
1935
  const maxCacheSize = getMaxVideoCacheSize(logLevel);
1920
1936
  while (cacheStats.totalSize > maxCacheSize) {
1921
- await deleteOldestKeyframeBank(logLevel);
1937
+ const { finish } = await deleteOldestKeyframeBank(logLevel);
1938
+ if (finish) {
1939
+ break;
1940
+ }
1941
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, "Deleted oldest keyframe bank to stay under max cache size", (cacheStats.totalSize / 1024 / 1024).toFixed(1), "out of", (maxCacheSize / 1024 / 1024).toFixed(1));
1922
1942
  cacheStats = await getTotalCacheStats();
1923
1943
  }
1924
1944
  };
@@ -1936,14 +1956,13 @@ var makeKeyframeManager = () => {
1936
1956
  const bank = await sources[src][startTimeInSeconds];
1937
1957
  const { endTimestampInSeconds, startTimestampInSeconds } = bank;
1938
1958
  if (endTimestampInSeconds < threshold) {
1939
- await bank.prepareForDeletion(logLevel);
1959
+ bank.prepareForDeletion(logLevel);
1940
1960
  Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1941
1961
  delete sources[src][startTimeInSeconds];
1942
1962
  } else {
1943
1963
  bank.deleteFramesBeforeTimestamp({
1944
1964
  timestampInSeconds: threshold,
1945
- logLevel,
1946
- src
1965
+ logLevel
1947
1966
  });
1948
1967
  }
1949
1968
  }
@@ -1973,7 +1992,8 @@ var makeKeyframeManager = () => {
1973
1992
  packetSink,
1974
1993
  videoSampleSink,
1975
1994
  startPacket,
1976
- logLevel
1995
+ logLevel,
1996
+ src
1977
1997
  });
1978
1998
  addKeyframeBank({ src, bank: newKeyframeBank, startTimestampInSeconds });
1979
1999
  return newKeyframeBank;
@@ -1988,7 +2008,8 @@ var makeKeyframeManager = () => {
1988
2008
  packetSink,
1989
2009
  videoSampleSink,
1990
2010
  startPacket,
1991
- logLevel
2011
+ logLevel,
2012
+ src
1992
2013
  });
1993
2014
  addKeyframeBank({ src, bank: replacementKeybank, startTimestampInSeconds });
1994
2015
  return replacementKeybank;
@@ -2062,7 +2083,7 @@ var getTotalCacheStats = async () => {
2062
2083
  };
2063
2084
  };
2064
2085
  var getUncachedMaxCacheSize = (logLevel) => {
2065
- if (window.remotion_mediaCacheSizeInBytes !== undefined && window.remotion_mediaCacheSizeInBytes !== null) {
2086
+ if (typeof window !== "undefined" && window.remotion_mediaCacheSizeInBytes !== undefined && window.remotion_mediaCacheSizeInBytes !== null) {
2066
2087
  if (window.remotion_mediaCacheSizeInBytes < 240 * 1024 * 1024) {
2067
2088
  cancelRender(new Error(`The minimum value for the "mediaCacheSizeInBytes" prop is 240MB (${240 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
2068
2089
  }
@@ -2072,7 +2093,7 @@ var getUncachedMaxCacheSize = (logLevel) => {
2072
2093
  Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
2073
2094
  return window.remotion_mediaCacheSizeInBytes;
2074
2095
  }
2075
- if (window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
2096
+ if (typeof window !== "undefined" && window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
2076
2097
  const value = window.remotion_initialMemoryAvailable / 2;
2077
2098
  if (value < 500 * 1024 * 1024) {
2078
2099
  Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);
@@ -2446,7 +2467,7 @@ var extractFrameAndAudio = async ({
2446
2467
  };
2447
2468
 
2448
2469
  // src/video-extraction/extract-frame-via-broadcast-channel.ts
2449
- if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
2470
+ if (typeof window !== "undefined" && window.remotion_broadcastChannel && window.remotion_isMainTab) {
2450
2471
  window.remotion_broadcastChannel.addEventListener("message", async (event) => {
2451
2472
  const data = event.data;
2452
2473
  if (data.type === "request") {
@@ -2654,7 +2675,7 @@ var AudioForRendering = ({
2654
2675
  loopVolumeCurveBehavior,
2655
2676
  delayRenderRetries,
2656
2677
  delayRenderTimeoutInMilliseconds,
2657
- logLevel = window.remotion_logLevel,
2678
+ logLevel,
2658
2679
  loop,
2659
2680
  fallbackHtml5AudioProps,
2660
2681
  audioStreamIndex,
@@ -2712,7 +2733,7 @@ var AudioForRendering = ({
2712
2733
  timeInSeconds: timestamp,
2713
2734
  durationInSeconds,
2714
2735
  playbackRate: playbackRate ?? 1,
2715
- logLevel,
2736
+ logLevel: logLevel ?? window.remotion_logLevel,
2716
2737
  includeAudio: shouldRenderAudio,
2717
2738
  includeVideo: false,
2718
2739
  isClientSideRendering: environment.isClientSideRendering,
@@ -2726,7 +2747,10 @@ var AudioForRendering = ({
2726
2747
  if (disallowFallbackToHtml5Audio) {
2727
2748
  cancelRender2(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2728
2749
  }
2729
- Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
2750
+ Internals12.Log.warn({
2751
+ logLevel: logLevel ?? window.remotion_logLevel,
2752
+ tag: "@remotion/media"
2753
+ }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
2730
2754
  setReplaceWithHtml5Audio(true);
2731
2755
  return;
2732
2756
  }
@@ -2734,7 +2758,10 @@ var AudioForRendering = ({
2734
2758
  if (disallowFallbackToHtml5Audio) {
2735
2759
  cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2736
2760
  }
2737
- Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <Html5Audio>`);
2761
+ Internals12.Log.warn({
2762
+ logLevel: logLevel ?? window.remotion_logLevel,
2763
+ tag: "@remotion/media"
2764
+ }, `Cannot decode ${src}, falling back to <Html5Audio>`);
2738
2765
  setReplaceWithHtml5Audio(true);
2739
2766
  return;
2740
2767
  }
@@ -2745,7 +2772,10 @@ var AudioForRendering = ({
2745
2772
  if (disallowFallbackToHtml5Audio) {
2746
2773
  cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2747
2774
  }
2748
- Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <Html5Audio>`);
2775
+ Internals12.Log.warn({
2776
+ logLevel: logLevel ?? window.remotion_logLevel,
2777
+ tag: "@remotion/media"
2778
+ }, `Network error fetching ${src}, falling back to <Html5Audio>`);
2749
2779
  setReplaceWithHtml5Audio(true);
2750
2780
  return;
2751
2781
  }
@@ -3183,26 +3213,6 @@ import {
3183
3213
  useRemotionEnvironment as useRemotionEnvironment3,
3184
3214
  useVideoConfig as useVideoConfig2
3185
3215
  } from "remotion";
3186
-
3187
- // ../core/src/calculate-media-duration.ts
3188
- var calculateMediaDuration = ({
3189
- trimAfter,
3190
- mediaDurationInFrames,
3191
- playbackRate,
3192
- trimBefore
3193
- }) => {
3194
- let duration = mediaDurationInFrames;
3195
- if (typeof trimAfter !== "undefined") {
3196
- duration = trimAfter;
3197
- }
3198
- if (typeof trimBefore !== "undefined") {
3199
- duration -= trimBefore;
3200
- }
3201
- const actualDuration = duration / playbackRate;
3202
- return Math.floor(actualDuration);
3203
- };
3204
-
3205
- // src/video/video-for-rendering.tsx
3206
3216
  import { jsx as jsx5 } from "react/jsx-runtime";
3207
3217
  var VideoForRendering = ({
3208
3218
  volume: volumeProp,
@@ -3245,6 +3255,8 @@ var VideoForRendering = ({
3245
3255
  const { delayRender, continueRender } = useDelayRender2();
3246
3256
  const canvasRef = useRef3(null);
3247
3257
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
3258
+ const audioEnabled = Internals15.useAudioEnabled();
3259
+ const videoEnabled = Internals15.useVideoEnabled();
3248
3260
  useLayoutEffect2(() => {
3249
3261
  if (!canvasRef.current) {
3250
3262
  return;
@@ -3259,7 +3271,7 @@ var VideoForRendering = ({
3259
3271
  timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined
3260
3272
  });
3261
3273
  const shouldRenderAudio = (() => {
3262
- if (!window.remotion_audioEnabled) {
3274
+ if (!audioEnabled) {
3263
3275
  return false;
3264
3276
  }
3265
3277
  if (muted) {
@@ -3274,7 +3286,7 @@ var VideoForRendering = ({
3274
3286
  playbackRate,
3275
3287
  logLevel,
3276
3288
  includeAudio: shouldRenderAudio,
3277
- includeVideo: window.remotion_videoEnabled,
3289
+ includeVideo: videoEnabled,
3278
3290
  isClientSideRendering: environment.isClientSideRendering,
3279
3291
  loop,
3280
3292
  audioStreamIndex,
@@ -3344,7 +3356,7 @@ var VideoForRendering = ({
3344
3356
  context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
3345
3357
  context.drawImage(imageBitmap, 0, 0);
3346
3358
  imageBitmap.close();
3347
- } else if (window.remotion_videoEnabled) {
3359
+ } else if (videoEnabled) {
3348
3360
  const context = canvasRef.current?.getContext("2d", {
3349
3361
  alpha: true
3350
3362
  });
@@ -3412,7 +3424,9 @@ var VideoForRendering = ({
3412
3424
  disallowFallbackToOffthreadVideo,
3413
3425
  toneFrequency,
3414
3426
  trimAfterValue,
3415
- trimBeforeValue
3427
+ trimBeforeValue,
3428
+ audioEnabled,
3429
+ videoEnabled
3416
3430
  ]);
3417
3431
  const classNameValue = useMemo5(() => {
3418
3432
  return [Internals15.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals15.truthy).join(" ");
@@ -3458,7 +3472,7 @@ var VideoForRendering = ({
3458
3472
  }
3459
3473
  return /* @__PURE__ */ jsx5(Loop, {
3460
3474
  layout: "none",
3461
- durationInFrames: calculateMediaDuration({
3475
+ durationInFrames: Internals15.calculateMediaDuration({
3462
3476
  trimAfter: trimAfterValue,
3463
3477
  mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
3464
3478
  playbackRate,
@@ -3594,7 +3608,7 @@ var Video = ({
3594
3608
  delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null,
3595
3609
  disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false,
3596
3610
  fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {},
3597
- logLevel: logLevel ?? window.remotion_logLevel,
3611
+ logLevel: logLevel ?? (typeof window !== "undefined" ? window.remotion_logLevel : "info"),
3598
3612
  loop: loop ?? false,
3599
3613
  loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? "repeat",
3600
3614
  muted: muted ?? false,
@@ -1,7 +1,6 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
2
  import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
3
3
  import { cancelRender, Internals, Loop, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
4
- import { calculateMediaDuration } from '../../../core/src/calculate-media-duration';
5
4
  import { applyVolume } from '../convert-audiodata/apply-volume';
6
5
  import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
7
6
  import { frameForVolumeProp } from '../looped-frame';
@@ -28,6 +27,8 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
28
27
  const { delayRender, continueRender } = useDelayRender();
29
28
  const canvasRef = useRef(null);
30
29
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState(false);
30
+ const audioEnabled = Internals.useAudioEnabled();
31
+ const videoEnabled = Internals.useVideoEnabled();
31
32
  useLayoutEffect(() => {
32
33
  if (!canvasRef.current) {
33
34
  return;
@@ -42,7 +43,7 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
42
43
  timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
43
44
  });
44
45
  const shouldRenderAudio = (() => {
45
- if (!window.remotion_audioEnabled) {
46
+ if (!audioEnabled) {
46
47
  return false;
47
48
  }
48
49
  if (muted) {
@@ -57,7 +58,7 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
57
58
  playbackRate,
58
59
  logLevel,
59
60
  includeAudio: shouldRenderAudio,
60
- includeVideo: window.remotion_videoEnabled,
61
+ includeVideo: videoEnabled,
61
62
  isClientSideRendering: environment.isClientSideRendering,
62
63
  loop,
63
64
  audioStreamIndex,
@@ -131,8 +132,7 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
131
132
  context.drawImage(imageBitmap, 0, 0);
132
133
  imageBitmap.close();
133
134
  }
134
- else if (window.remotion_videoEnabled) {
135
- // In the case of https://discord.com/channels/809501355504959528/809501355504959531/1424400511070765086
135
+ else if (videoEnabled) {
136
136
  // A video that only starts at time 0.033sec
137
137
  // we shall not crash here but clear the canvas
138
138
  const context = canvasRef.current?.getContext('2d', {
@@ -204,6 +204,8 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
204
204
  toneFrequency,
205
205
  trimAfterValue,
206
206
  trimBeforeValue,
207
+ audioEnabled,
208
+ videoEnabled,
207
209
  ]);
208
210
  const classNameValue = useMemo(() => {
209
211
  return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
@@ -218,7 +220,7 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
218
220
  if (!replaceWithOffthreadVideo.durationInSeconds) {
219
221
  cancelRender(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
220
222
  }
221
- return (_jsx(Loop, { layout: "none", durationInFrames: calculateMediaDuration({
223
+ return (_jsx(Loop, { layout: "none", durationInFrames: Internals.calculateMediaDuration({
222
224
  trimAfter: trimAfterValue,
223
225
  mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
224
226
  playbackRate,
@@ -27,6 +27,7 @@ const InnerVideo = ({ src, audioStreamIndex, className, delayRenderRetries, dela
27
27
  return (_jsx(VideoForPreview, { audioStreamIndex: audioStreamIndex ?? 0, className: className, name: name, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, showInTimeline: showInTimeline, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, stack: stack ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps }));
28
28
  };
29
29
  export const Video = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, showInTimeline, style, trimAfter, trimBefore, volume, stack, toneFrequency, }) => {
30
- return (_jsx(InnerVideo, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {}, logLevel: logLevel ?? window.remotion_logLevel, loop: loop ?? false, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', muted: muted ?? false, name: name, onVideoFrame: onVideoFrame, playbackRate: playbackRate ?? 1, showInTimeline: showInTimeline ?? true, src: src, style: style ?? {}, trimAfter: trimAfter, trimBefore: trimBefore, volume: volume ?? 1, toneFrequency: toneFrequency ?? 1, stack: stack }));
30
+ return (_jsx(InnerVideo, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {}, logLevel: logLevel ??
31
+ (typeof window !== 'undefined' ? window.remotion_logLevel : 'info'), loop: loop ?? false, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', muted: muted ?? false, name: name, onVideoFrame: onVideoFrame, playbackRate: playbackRate ?? 1, showInTimeline: showInTimeline ?? true, src: src, style: style ?? {}, trimAfter: trimAfter, trimBefore: trimBefore, volume: volume ?? 1, toneFrequency: toneFrequency ?? 1, stack: stack }));
31
32
  };
32
33
  Internals.addSequenceStackTraces(Video);
@@ -1,6 +1,8 @@
1
1
  import { extractFrameAndAudio } from '../extract-frame-and-audio';
2
2
  // Doesn't exist in studio
3
- if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
3
+ if (typeof window !== 'undefined' &&
4
+ window.remotion_broadcastChannel &&
5
+ window.remotion_isMainTab) {
4
6
  window.remotion_broadcastChannel.addEventListener('message', async (event) => {
5
7
  const data = event.data;
6
8
  if (data.type === 'request') {
@@ -21,10 +21,11 @@ export declare const getSinks: (src: string) => Promise<{
21
21
  getDuration: () => Promise<number>;
22
22
  }>;
23
23
  export type GetSink = Awaited<ReturnType<typeof getSinks>>;
24
- export declare const getFramesSinceKeyframe: ({ packetSink, videoSampleSink, startPacket, logLevel, }: {
24
+ export declare const getFramesSinceKeyframe: ({ packetSink, videoSampleSink, startPacket, logLevel, src, }: {
25
25
  packetSink: EncodedPacketSink;
26
26
  videoSampleSink: VideoSampleSink;
27
27
  startPacket: EncodedPacket;
28
28
  logLevel: LogLevel;
29
+ src: string;
29
30
  }) => Promise<import("./keyframe-bank").KeyframeBank>;
30
31
  export {};
@@ -82,7 +82,7 @@ export const getSinks = async (src) => {
82
82
  },
83
83
  };
84
84
  };
85
- export const getFramesSinceKeyframe = async ({ packetSink, videoSampleSink, startPacket, logLevel, }) => {
85
+ export const getFramesSinceKeyframe = async ({ packetSink, videoSampleSink, startPacket, logLevel, src, }) => {
86
86
  const nextKeyPacket = await packetSink.getNextKeyPacket(startPacket, {
87
87
  verifyKeyPackets: true,
88
88
  });
@@ -92,6 +92,7 @@ export const getFramesSinceKeyframe = async ({ packetSink, videoSampleSink, star
92
92
  endTimestampInSeconds: nextKeyPacket ? nextKeyPacket.timestamp : Infinity,
93
93
  sampleIterator,
94
94
  logLevel,
95
+ src,
95
96
  });
96
97
  return keyframeBank;
97
98
  };
@@ -1,14 +1,16 @@
1
1
  import type { VideoSample } from 'mediabunny';
2
2
  import { type LogLevel } from 'remotion';
3
3
  export type KeyframeBank = {
4
+ src: string;
4
5
  startTimestampInSeconds: number;
5
6
  endTimestampInSeconds: number;
6
7
  getFrameFromTimestamp: (timestamp: number) => Promise<VideoSample | null>;
7
- prepareForDeletion: (logLevel: LogLevel) => void;
8
- deleteFramesBeforeTimestamp: ({ logLevel, src, timestampInSeconds, }: {
8
+ prepareForDeletion: (logLevel: LogLevel) => {
9
+ framesDeleted: number;
10
+ };
11
+ deleteFramesBeforeTimestamp: ({ logLevel, timestampInSeconds, }: {
9
12
  timestampInSeconds: number;
10
13
  logLevel: LogLevel;
11
- src: string;
12
14
  }) => void;
13
15
  hasTimestampInSecond: (timestamp: number) => Promise<boolean>;
14
16
  addFrame: (frame: VideoSample) => void;
@@ -18,9 +20,10 @@ export type KeyframeBank = {
18
20
  };
19
21
  getLastUsed: () => number;
20
22
  };
21
- export declare const makeKeyframeBank: ({ startTimestampInSeconds, endTimestampInSeconds, sampleIterator, logLevel: parentLogLevel, }: {
23
+ export declare const makeKeyframeBank: ({ startTimestampInSeconds, endTimestampInSeconds, sampleIterator, logLevel: parentLogLevel, src, }: {
22
24
  startTimestampInSeconds: number;
23
25
  endTimestampInSeconds: number;
24
26
  sampleIterator: AsyncGenerator<VideoSample, void, unknown>;
25
27
  logLevel: LogLevel;
28
+ src: string;
26
29
  }) => KeyframeBank;
@@ -1,15 +1,39 @@
1
1
  import { Internals } from 'remotion';
2
+ import { SAFE_BACK_WINDOW_IN_SECONDS } from '../caches';
2
3
  import { renderTimestampRange } from '../render-timestamp-range';
3
4
  // Round to only 4 digits, because WebM has a timescale of 1_000, e.g. framer.webm
4
5
  const roundTo4Digits = (timestamp) => {
5
6
  return Math.round(timestamp * 1000) / 1000;
6
7
  };
7
- export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSeconds, sampleIterator, logLevel: parentLogLevel, }) => {
8
+ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSeconds, sampleIterator, logLevel: parentLogLevel, src, }) => {
8
9
  Internals.Log.verbose({ logLevel: parentLogLevel, tag: '@remotion/media' }, `Creating keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
9
10
  const frames = {};
10
11
  const frameTimestamps = [];
11
12
  let lastUsed = Date.now();
12
13
  let allocationSize = 0;
14
+ const deleteFramesBeforeTimestamp = ({ logLevel, timestampInSeconds, }) => {
15
+ const deletedTimestamps = [];
16
+ for (const frameTimestamp of frameTimestamps.slice()) {
17
+ const isLast = frameTimestamp === frameTimestamps[frameTimestamps.length - 1];
18
+ // Don't delete the last frame, since it may be the last one in the video!
19
+ if (isLast) {
20
+ continue;
21
+ }
22
+ if (frameTimestamp < timestampInSeconds) {
23
+ if (!frames[frameTimestamp]) {
24
+ continue;
25
+ }
26
+ allocationSize -= frames[frameTimestamp].allocationSize();
27
+ frameTimestamps.splice(frameTimestamps.indexOf(frameTimestamp), 1);
28
+ frames[frameTimestamp].close();
29
+ delete frames[frameTimestamp];
30
+ deletedTimestamps.push(frameTimestamp);
31
+ }
32
+ }
33
+ if (deletedTimestamps.length > 0) {
34
+ Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? '' : 's'} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
35
+ }
36
+ };
13
37
  const hasDecodedEnoughForTimestamp = (timestamp) => {
14
38
  const lastFrameTimestamp = frameTimestamps[frameTimestamps.length - 1];
15
39
  if (!lastFrameTimestamp) {
@@ -29,8 +53,8 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
29
53
  allocationSize += frame.allocationSize();
30
54
  lastUsed = Date.now();
31
55
  };
32
- const ensureEnoughFramesForTimestamp = async (timestamp) => {
33
- while (!hasDecodedEnoughForTimestamp(timestamp)) {
56
+ const ensureEnoughFramesForTimestamp = async (timestampInSeconds) => {
57
+ while (!hasDecodedEnoughForTimestamp(timestampInSeconds)) {
34
58
  const sample = await sampleIterator.next();
35
59
  if (sample.value) {
36
60
  addFrame(sample.value);
@@ -38,6 +62,10 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
38
62
  if (sample.done) {
39
63
  break;
40
64
  }
65
+ deleteFramesBeforeTimestamp({
66
+ logLevel: parentLogLevel,
67
+ timestampInSeconds: timestampInSeconds - SAFE_BACK_WINDOW_IN_SECONDS,
68
+ });
41
69
  }
42
70
  lastUsed = Date.now();
43
71
  };
@@ -77,6 +105,7 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
77
105
  }
78
106
  return null;
79
107
  });
108
+ let framesDeleted = 0;
80
109
  for (const frameTimestamp of frameTimestamps) {
81
110
  if (!frames[frameTimestamp]) {
82
111
  continue;
@@ -84,31 +113,10 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
84
113
  allocationSize -= frames[frameTimestamp].allocationSize();
85
114
  frames[frameTimestamp].close();
86
115
  delete frames[frameTimestamp];
116
+ framesDeleted++;
87
117
  }
88
118
  frameTimestamps.length = 0;
89
- };
90
- const deleteFramesBeforeTimestamp = ({ logLevel, src, timestampInSeconds, }) => {
91
- const deletedTimestamps = [];
92
- for (const frameTimestamp of frameTimestamps.slice()) {
93
- const isLast = frameTimestamp === frameTimestamps[frameTimestamps.length - 1];
94
- // Don't delete the last frame, since it may be the last one in the video!
95
- if (isLast) {
96
- continue;
97
- }
98
- if (frameTimestamp < timestampInSeconds) {
99
- if (!frames[frameTimestamp]) {
100
- continue;
101
- }
102
- allocationSize -= frames[frameTimestamp].allocationSize();
103
- frameTimestamps.splice(frameTimestamps.indexOf(frameTimestamp), 1);
104
- frames[frameTimestamp].close();
105
- delete frames[frameTimestamp];
106
- deletedTimestamps.push(frameTimestamp);
107
- }
108
- }
109
- if (deletedTimestamps.length > 0) {
110
- Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? '' : 's'} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
111
- }
119
+ return { framesDeleted };
112
120
  };
113
121
  const getOpenFrameCount = () => {
114
122
  return {
@@ -127,13 +135,11 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
127
135
  queue = queue.then(() => getFrameFromTimestamp(timestamp));
128
136
  return queue;
129
137
  },
130
- prepareForDeletion: (logLevel) => {
131
- queue = queue.then(() => prepareForDeletion(logLevel));
132
- return queue;
133
- },
138
+ prepareForDeletion,
134
139
  hasTimestampInSecond,
135
140
  addFrame,
136
141
  deleteFramesBeforeTimestamp,
142
+ src,
137
143
  getOpenFrameCount,
138
144
  getLastUsed,
139
145
  };
@@ -46,6 +46,7 @@ export const makeKeyframeManager = () => {
46
46
  const getTheKeyframeBankMostInThePast = async () => {
47
47
  let mostInThePast = null;
48
48
  let mostInThePastBank = null;
49
+ let numberOfBanks = 0;
49
50
  for (const src in sources) {
50
51
  for (const b in sources[src]) {
51
52
  const bank = await sources[src][b];
@@ -54,26 +55,35 @@ export const makeKeyframeManager = () => {
54
55
  mostInThePast = lastUsed;
55
56
  mostInThePastBank = { src, bank };
56
57
  }
58
+ numberOfBanks++;
57
59
  }
58
60
  }
59
61
  if (!mostInThePastBank) {
60
62
  throw new Error('No keyframe bank found');
61
63
  }
62
- return mostInThePastBank;
64
+ return { mostInThePastBank, numberOfBanks };
63
65
  };
64
66
  const deleteOldestKeyframeBank = async (logLevel) => {
65
- const { bank: mostInThePastBank, src: mostInThePastSrc } = await getTheKeyframeBankMostInThePast();
67
+ const { mostInThePastBank: { bank: mostInThePastBank, src: mostInThePastSrc }, numberOfBanks, } = await getTheKeyframeBankMostInThePast();
68
+ if (numberOfBanks < 2) {
69
+ return { finish: true };
70
+ }
66
71
  if (mostInThePastBank) {
67
- await mostInThePastBank.prepareForDeletion(logLevel);
72
+ const { framesDeleted } = mostInThePastBank.prepareForDeletion(logLevel);
68
73
  delete sources[mostInThePastSrc][mostInThePastBank.startTimestampInSeconds];
69
- Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Deleted frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
74
+ Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `Deleted ${framesDeleted} frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
70
75
  }
76
+ return { finish: false };
71
77
  };
72
78
  const ensureToStayUnderMaxCacheSize = async (logLevel) => {
73
79
  let cacheStats = await getTotalCacheStats();
74
80
  const maxCacheSize = getMaxVideoCacheSize(logLevel);
75
81
  while (cacheStats.totalSize > maxCacheSize) {
76
- await deleteOldestKeyframeBank(logLevel);
82
+ const { finish } = await deleteOldestKeyframeBank(logLevel);
83
+ if (finish) {
84
+ break;
85
+ }
86
+ Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, 'Deleted oldest keyframe bank to stay under max cache size', (cacheStats.totalSize / 1024 / 1024).toFixed(1), 'out of', (maxCacheSize / 1024 / 1024).toFixed(1));
77
87
  cacheStats = await getTotalCacheStats();
78
88
  }
79
89
  };
@@ -87,7 +97,7 @@ export const makeKeyframeManager = () => {
87
97
  const bank = await sources[src][startTimeInSeconds];
88
98
  const { endTimestampInSeconds, startTimestampInSeconds } = bank;
89
99
  if (endTimestampInSeconds < threshold) {
90
- await bank.prepareForDeletion(logLevel);
100
+ bank.prepareForDeletion(logLevel);
91
101
  Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
92
102
  delete sources[src][startTimeInSeconds];
93
103
  }
@@ -95,7 +105,6 @@ export const makeKeyframeManager = () => {
95
105
  bank.deleteFramesBeforeTimestamp({
96
106
  timestampInSeconds: threshold,
97
107
  logLevel,
98
- src,
99
108
  });
100
109
  }
101
110
  }
@@ -124,6 +133,7 @@ export const makeKeyframeManager = () => {
124
133
  videoSampleSink,
125
134
  startPacket,
126
135
  logLevel,
136
+ src,
127
137
  });
128
138
  addKeyframeBank({ src, bank: newKeyframeBank, startTimestampInSeconds });
129
139
  return newKeyframeBank;
@@ -143,6 +153,7 @@ export const makeKeyframeManager = () => {
143
153
  videoSampleSink,
144
154
  startPacket,
145
155
  logLevel,
156
+ src,
146
157
  });
147
158
  addKeyframeBank({ src, bank: replacementKeybank, startTimestampInSeconds });
148
159
  return replacementKeybank;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.362",
3
+ "version": "4.0.363",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -22,7 +22,7 @@
22
22
  },
23
23
  "dependencies": {
24
24
  "mediabunny": "1.23.0",
25
- "remotion": "4.0.362",
25
+ "remotion": "4.0.363",
26
26
  "webdriverio": "9.19.2"
27
27
  },
28
28
  "peerDependencies": {
@@ -30,7 +30,7 @@
30
30
  "react-dom": ">=16.8.0"
31
31
  },
32
32
  "devDependencies": {
33
- "@remotion/eslint-config-internal": "4.0.362",
33
+ "@remotion/eslint-config-internal": "4.0.363",
34
34
  "@vitest/browser": "^3.2.4",
35
35
  "eslint": "9.19.0",
36
36
  "react": "19.0.0",