@remotion/media 4.0.375 → 4.0.376

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@ import { extractAudio } from './audio-extraction/extract-audio';
2
2
  import { isNetworkError } from './is-network-error';
3
3
  import { extractFrame } from './video-extraction/extract-frame';
4
4
  import { rotateFrame } from './video-extraction/rotate-frame';
5
- export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, trimAfter, trimBefore, fps, }) => {
5
+ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, trimAfter, trimBefore, fps, maxCacheSize, }) => {
6
6
  try {
7
7
  const [frame, audio] = await Promise.all([
8
8
  includeVideo
@@ -15,6 +15,7 @@ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durat
15
15
  playbackRate,
16
16
  trimBefore,
17
17
  fps,
18
+ maxCacheSize,
18
19
  })
19
20
  : null,
20
21
  includeAudio
@@ -29,6 +30,7 @@ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durat
29
30
  trimAfter,
30
31
  fps,
31
32
  trimBefore,
33
+ maxCacheSize,
32
34
  })
33
35
  : null,
34
36
  ]);
@@ -1,6 +1,7 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
2
  import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
3
- import { cancelRender, Internals, Loop, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
3
+ import { Internals, Loop, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
4
+ import { useMaxMediaCacheSize } from '../caches';
4
5
  import { applyVolume } from '../convert-audiodata/apply-volume';
5
6
  import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
6
7
  import { frameForVolumeProp } from '../looped-frame';
@@ -24,11 +25,16 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
24
25
  sequenceContext?.durationInFrames,
25
26
  ]);
26
27
  const environment = useRemotionEnvironment();
27
- const { delayRender, continueRender } = useDelayRender();
28
+ const { delayRender, continueRender, cancelRender } = useDelayRender();
28
29
  const canvasRef = useRef(null);
29
30
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState(false);
30
31
  const audioEnabled = Internals.useAudioEnabled();
31
32
  const videoEnabled = Internals.useVideoEnabled();
33
+ const maxCacheSize = useMaxMediaCacheSize(logLevel);
34
+ const [error, setError] = useState(null);
35
+ if (error) {
36
+ throw error;
37
+ }
32
38
  useLayoutEffect(() => {
33
39
  if (!canvasRef.current) {
34
40
  return;
@@ -36,6 +42,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
36
42
  if (replaceWithOffthreadVideo) {
37
43
  return;
38
44
  }
45
+ if (!canvasRef.current?.getContext) {
46
+ return setError(new Error('Canvas does not have .getContext() method available. This could be because <Video> was mounted inside an <svg> tag.'));
47
+ }
39
48
  const timestamp = frame / fps;
40
49
  const durationInSeconds = 1 / fps;
41
50
  const newHandle = delayRender(`Extracting frame at time ${timestamp}`, {
@@ -65,6 +74,7 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
65
74
  trimAfter: trimAfterValue,
66
75
  trimBefore: trimBeforeValue,
67
76
  fps,
77
+ maxCacheSize,
68
78
  })
69
79
  .then((result) => {
70
80
  if (result.type === 'unknown-container-format') {
@@ -164,8 +174,8 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
164
174
  }
165
175
  continueRender(newHandle);
166
176
  })
167
- .catch((error) => {
168
- cancelRender(error);
177
+ .catch((err) => {
178
+ cancelRender(err);
169
179
  });
170
180
  return () => {
171
181
  continueRender(newHandle);
@@ -200,6 +210,8 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
200
210
  trimBeforeValue,
201
211
  audioEnabled,
202
212
  videoEnabled,
213
+ maxCacheSize,
214
+ cancelRender,
203
215
  ]);
204
216
  const classNameValue = useMemo(() => {
205
217
  return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
@@ -212,7 +224,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
212
224
  showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
213
225
  if (loop) {
214
226
  if (!replaceWithOffthreadVideo.durationInSeconds) {
215
- cancelRender(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
227
+ const err = new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`);
228
+ cancelRender(err);
229
+ throw err;
216
230
  }
217
231
  return (_jsx(Loop, { layout: "none", durationInFrames: Internals.calculateMediaDuration({
218
232
  trimAfter: trimAfterValue,
@@ -16,7 +16,7 @@ export type ExtractFrameViaBroadcastChannelResult = {
16
16
  } | {
17
17
  type: 'unknown-container-format';
18
18
  };
19
- export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, }: {
19
+ export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, maxCacheSize, }: {
20
20
  src: string;
21
21
  timeInSeconds: number;
22
22
  durationInSeconds: number;
@@ -30,4 +30,5 @@ export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, log
30
30
  trimAfter: number | undefined;
31
31
  trimBefore: number | undefined;
32
32
  fps: number;
33
+ maxCacheSize: number;
33
34
  }) => Promise<ExtractFrameViaBroadcastChannelResult>;
@@ -20,6 +20,7 @@ if (typeof window !== 'undefined' &&
20
20
  trimAfter: data.trimAfter,
21
21
  trimBefore: data.trimBefore,
22
22
  fps: data.fps,
23
+ maxCacheSize: data.maxCacheSize,
23
24
  });
24
25
  if (result.type === 'cannot-decode') {
25
26
  const cannotDecodeResponse = {
@@ -83,7 +84,7 @@ if (typeof window !== 'undefined' &&
83
84
  }
84
85
  });
85
86
  }
86
- export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, }) => {
87
+ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, maxCacheSize, }) => {
87
88
  if (isClientSideRendering || window.remotion_isMainTab) {
88
89
  return extractFrameAndAudio({
89
90
  logLevel,
@@ -98,6 +99,7 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
98
99
  trimAfter,
99
100
  trimBefore,
100
101
  fps,
102
+ maxCacheSize,
101
103
  });
102
104
  }
103
105
  const requestId = crypto.randomUUID();
@@ -172,6 +174,7 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
172
174
  trimAfter,
173
175
  trimBefore,
174
176
  fps,
177
+ maxCacheSize,
175
178
  };
176
179
  window.remotion_broadcastChannel.postMessage(request);
177
180
  let timeoutId;
@@ -22,8 +22,9 @@ type ExtractFrameParams = {
22
22
  trimBefore: number | undefined;
23
23
  playbackRate: number;
24
24
  fps: number;
25
+ maxCacheSize: number;
25
26
  };
26
- declare const extractFrameInternal: ({ src, timeInSeconds: unloopedTimeInSeconds, logLevel, loop, trimAfter, trimBefore, playbackRate, fps, }: ExtractFrameParams) => Promise<ExtractFrameResult>;
27
+ declare const extractFrameInternal: ({ src, timeInSeconds: unloopedTimeInSeconds, logLevel, loop, trimAfter, trimBefore, playbackRate, fps, maxCacheSize, }: ExtractFrameParams) => Promise<ExtractFrameResult>;
27
28
  type ExtractFrameReturnType = Awaited<ReturnType<typeof extractFrameInternal>>;
28
29
  export declare const extractFrame: (params: ExtractFrameParams) => Promise<ExtractFrameReturnType>;
29
30
  export {};
@@ -1,7 +1,7 @@
1
1
  import { keyframeManager } from '../caches';
2
2
  import { getSink } from '../get-sink';
3
3
  import { getTimeInSeconds } from '../get-time-in-seconds';
4
- const extractFrameInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds, logLevel, loop, trimAfter, trimBefore, playbackRate, fps, }) => {
4
+ const extractFrameInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds, logLevel, loop, trimAfter, trimBefore, playbackRate, fps, maxCacheSize, }) => {
5
5
  const sink = await getSink(src, logLevel);
6
6
  const video = await sink.getVideo();
7
7
  if (video === 'no-video-track') {
@@ -41,6 +41,7 @@ const extractFrameInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds,
41
41
  timestamp: timeInSeconds,
42
42
  src,
43
43
  logLevel,
44
+ maxCacheSize,
44
45
  });
45
46
  if (keyframeBank === 'has-alpha') {
46
47
  return {
@@ -68,21 +68,15 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
68
68
  };
69
69
  const getFrameFromTimestamp = async (timestampInSeconds) => {
70
70
  lastUsed = Date.now();
71
- // Videos may start slightly after timestamp 0 due to encoding, but if the requested timestamp is too far before the bank start, something is likely wrong.
72
- const maxClampToleranceInSeconds = 0.1;
73
- // If the requested timestamp is before the start of this bank, clamp it to the start if within tolerance. This handles videos that don't start at timestamp 0.
74
- // For example, requesting frame at 0sec when video starts at 0.04sec should return the frame at 0.04sec.
71
+ // If the requested timestamp is before the start of this bank, clamp it to the start.
72
+ // This matches Chrome's behavior: render the first available frame rather than showing black.
73
+ // Videos don't always start at timestamp 0 due to encoding artifacts, container format quirks,
74
+ // and keyframe positioning. Users have no control over this, so we clamp to the first frame.
75
75
  // Test case: https://github.com/remotion-dev/remotion/issues/5915
76
76
  let adjustedTimestamp = timestampInSeconds;
77
77
  if (roundTo4Digits(timestampInSeconds) <
78
78
  roundTo4Digits(startTimestampInSeconds)) {
79
- const differenceInSeconds = startTimestampInSeconds - timestampInSeconds;
80
- if (differenceInSeconds <= maxClampToleranceInSeconds) {
81
- adjustedTimestamp = startTimestampInSeconds;
82
- }
83
- else {
84
- return Promise.reject(new Error(`Timestamp is before start timestamp (requested: ${timestampInSeconds}sec, start: ${startTimestampInSeconds}sec, difference: ${differenceInSeconds.toFixed(3)}sec exceeds tolerance of ${maxClampToleranceInSeconds}sec)`));
85
- }
79
+ adjustedTimestamp = startTimestampInSeconds;
86
80
  }
87
81
  if (roundTo4Digits(adjustedTimestamp) > roundTo4Digits(endTimestampInSeconds)) {
88
82
  return Promise.reject(new Error(`Timestamp is after end timestamp (requested: ${timestampInSeconds}sec, end: ${endTimestampInSeconds}sec)`));
@@ -2,12 +2,13 @@ import type { EncodedPacketSink, VideoSampleSink } from 'mediabunny';
2
2
  import { type LogLevel } from 'remotion';
3
3
  import { type KeyframeBank } from './keyframe-bank';
4
4
  export declare const makeKeyframeManager: () => {
5
- requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, }: {
5
+ requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, maxCacheSize, }: {
6
6
  packetSink: EncodedPacketSink;
7
7
  timestamp: number;
8
8
  videoSampleSink: VideoSampleSink;
9
9
  src: string;
10
10
  logLevel: LogLevel;
11
+ maxCacheSize: number;
11
12
  }) => Promise<KeyframeBank | "has-alpha" | null>;
12
13
  getCacheStats: () => Promise<{
13
14
  count: number;
@@ -1,6 +1,6 @@
1
1
  import { Internals } from 'remotion';
2
2
  import { canBrowserUseWebGl2 } from '../browser-can-use-webgl2';
3
- import { getMaxVideoCacheSize, getTotalCacheStats, SAFE_BACK_WINDOW_IN_SECONDS, } from '../caches';
3
+ import { getTotalCacheStats, SAFE_BACK_WINDOW_IN_SECONDS } from '../caches';
4
4
  import { renderTimestampRange } from '../render-timestamp-range';
5
5
  import { getFramesSinceKeyframe } from './get-frames-since-keyframe';
6
6
  export const makeKeyframeManager = () => {
@@ -75,9 +75,8 @@ export const makeKeyframeManager = () => {
75
75
  }
76
76
  return { finish: false };
77
77
  };
78
- const ensureToStayUnderMaxCacheSize = async (logLevel) => {
78
+ const ensureToStayUnderMaxCacheSize = async (logLevel, maxCacheSize) => {
79
79
  let cacheStats = await getTotalCacheStats();
80
- const maxCacheSize = getMaxVideoCacheSize(logLevel);
81
80
  while (cacheStats.totalSize > maxCacheSize) {
82
81
  const { finish } = await deleteOldestKeyframeBank(logLevel);
83
82
  if (finish) {
@@ -161,8 +160,8 @@ export const makeKeyframeManager = () => {
161
160
  addKeyframeBank({ src, bank: replacementKeybank, startTimestampInSeconds });
162
161
  return replacementKeybank;
163
162
  };
164
- const requestKeyframeBank = async ({ packetSink, timestamp, videoSampleSink, src, logLevel, }) => {
165
- await ensureToStayUnderMaxCacheSize(logLevel);
163
+ const requestKeyframeBank = async ({ packetSink, timestamp, videoSampleSink, src, logLevel, maxCacheSize, }) => {
164
+ await ensureToStayUnderMaxCacheSize(logLevel, maxCacheSize);
166
165
  await clearKeyframeBanksBeforeTime({
167
166
  timestampInSeconds: timestamp,
168
167
  src,
@@ -190,13 +189,14 @@ export const makeKeyframeManager = () => {
190
189
  };
191
190
  let queue = Promise.resolve(undefined);
192
191
  return {
193
- requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, }) => {
192
+ requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, maxCacheSize, }) => {
194
193
  queue = queue.then(() => requestKeyframeBank({
195
194
  packetSink,
196
195
  timestamp,
197
196
  videoSampleSink,
198
197
  src,
199
198
  logLevel,
199
+ maxCacheSize,
200
200
  }));
201
201
  return queue;
202
202
  },
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.375",
3
+ "version": "4.0.376",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -22,19 +22,19 @@
22
22
  },
23
23
  "dependencies": {
24
24
  "mediabunny": "1.24.5",
25
- "remotion": "4.0.375"
25
+ "remotion": "4.0.376"
26
26
  },
27
27
  "peerDependencies": {
28
28
  "react": ">=16.8.0",
29
29
  "react-dom": ">=16.8.0"
30
30
  },
31
31
  "devDependencies": {
32
- "@remotion/eslint-config-internal": "4.0.375",
32
+ "@remotion/eslint-config-internal": "4.0.376",
33
33
  "@vitest/browser-webdriverio": "4.0.7",
34
34
  "eslint": "9.19.0",
35
35
  "react": "19.0.0",
36
36
  "react-dom": "19.0.0",
37
- "vitest": "4.0.7",
37
+ "vitest": "4.0.9",
38
38
  "webdriverio": "9.19.2"
39
39
  },
40
40
  "keywords": [],
@@ -1,14 +0,0 @@
1
- import type { VideoSample } from 'mediabunny';
2
- /**
3
- * Once we convert a VideoSample to a VideoFrame, we lose the rotation
4
- * https://github.com/Vanilagy/mediabunny/pull/212
5
- * This will be fixed in Mediabunny v2, but for now, we need to manually fix it.
6
- *
7
- * I'm actually wondering if your PR is actually a breaking change
8
- I would say it kinda is actually
9
- Because, previously only the VideoSample had rotation but the video frame you got from .toVideoFrame() was unrotated. Now, the resulting VideoFrame will be rotated, so drawing it to a canvas will behave differently. To me, this is a breaking change
10
- People's old code that manually handled the rotation will break here
11
- So I think this is actually a PR for v2
12
- And for Remotion, you can do a temporary workaround fix by cloning the VideoFrame and overriding rotation that way, then closing the old frame, then transferring the cloned frame
13
- */
14
- export declare const toVideoFrameFixedRotation: (videoSample: VideoSample) => VideoFrame;
@@ -1,41 +0,0 @@
1
- /**
2
- * Once we convert a VideoSample to a VideoFrame, we lose the rotation
3
- * https://github.com/Vanilagy/mediabunny/pull/212
4
- * This will be fixed in Mediabunny v2, but for now, we need to manually fix it.
5
- *
6
- * I'm actually wondering if your PR is actually a breaking change
7
- I would say it kinda is actually
8
- Because, previously only the VideoSample had rotation but the video frame you got from .toVideoFrame() was unrotated. Now, the resulting VideoFrame will be rotated, so drawing it to a canvas will behave differently. To me, this is a breaking change
9
- People's old code that manually handled the rotation will break here
10
- So I think this is actually a PR for v2
11
- And for Remotion, you can do a temporary workaround fix by cloning the VideoFrame and overriding rotation that way, then closing the old frame, then transferring the cloned frame
12
- */
13
- export const toVideoFrameFixedRotation = (videoSample) => {
14
- const frame = videoSample.toVideoFrame();
15
- if (videoSample.rotation === 0) {
16
- return frame;
17
- }
18
- const canvas = new OffscreenCanvas(width, height);
19
- const ctx = canvas.getContext('2d');
20
- if (!ctx) {
21
- throw new Error('Could not get 2d context');
22
- }
23
- canvas.width = width;
24
- canvas.height = height;
25
- if (canvasRotationToApply === 90) {
26
- ctx.translate(width, 0);
27
- }
28
- else if (canvasRotationToApply === 180) {
29
- ctx.translate(width, height);
30
- }
31
- else if (canvasRotationToApply === 270) {
32
- ctx.translate(0, height);
33
- }
34
- console.log('sample rotation', videoSample.rotation);
35
- // @ts-expect-error - rotation is not a known property of VideoFrameInit
36
- const fixedFrame = new VideoFrame(frame, { rotation: videoSample.rotation });
37
- frame.close();
38
- // @ts-expect-error - rotation is not a known property of VideoFrameInit
39
- console.log('fixed frame rotation', fixedFrame.rotation);
40
- return fixedFrame;
41
- };