@remotion/media 4.0.374 → 4.0.376

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  import type { LogLevel } from 'remotion';
2
2
  import type { ExtractFrameViaBroadcastChannelResult } from './video-extraction/extract-frame-via-broadcast-channel';
3
- export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, trimAfter, trimBefore, fps, }: {
3
+ export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, trimAfter, trimBefore, fps, maxCacheSize, }: {
4
4
  src: string;
5
5
  timeInSeconds: number;
6
6
  logLevel: LogLevel;
@@ -13,4 +13,5 @@ export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, dura
13
13
  trimAfter: number | undefined;
14
14
  trimBefore: number | undefined;
15
15
  fps: number;
16
+ maxCacheSize: number;
16
17
  }) => Promise<ExtractFrameViaBroadcastChannelResult>;
@@ -1,7 +1,8 @@
1
1
  import { extractAudio } from './audio-extraction/extract-audio';
2
2
  import { isNetworkError } from './is-network-error';
3
3
  import { extractFrame } from './video-extraction/extract-frame';
4
- export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, trimAfter, trimBefore, fps, }) => {
4
+ import { rotateFrame } from './video-extraction/rotate-frame';
5
+ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, trimAfter, trimBefore, fps, maxCacheSize, }) => {
5
6
  try {
6
7
  const [frame, audio] = await Promise.all([
7
8
  includeVideo
@@ -14,6 +15,7 @@ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durat
14
15
  playbackRate,
15
16
  trimBefore,
16
17
  fps,
18
+ maxCacheSize,
17
19
  })
18
20
  : null,
19
21
  includeAudio
@@ -28,6 +30,7 @@ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durat
28
30
  trimAfter,
29
31
  fps,
30
32
  trimBefore,
33
+ maxCacheSize,
31
34
  })
32
35
  : null,
33
36
  ]);
@@ -61,9 +64,20 @@ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durat
61
64
  durationInSeconds: frame?.type === 'success' ? frame.durationInSeconds : null,
62
65
  };
63
66
  }
67
+ if (!frame?.frame) {
68
+ return {
69
+ type: 'success',
70
+ frame: null,
71
+ audio: audio?.data ?? null,
72
+ durationInSeconds: audio?.durationInSeconds ?? null,
73
+ };
74
+ }
64
75
  return {
65
76
  type: 'success',
66
- frame: frame?.frame?.toVideoFrame() ?? null,
77
+ frame: await rotateFrame({
78
+ frame: frame.frame.toVideoFrame(),
79
+ rotation: frame.frame.rotation,
80
+ }),
67
81
  audio: audio?.data ?? null,
68
82
  durationInSeconds: audio?.durationInSeconds ?? null,
69
83
  };
@@ -1,6 +1,7 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
2
  import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
3
- import { cancelRender, Internals, Loop, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
3
+ import { Internals, Loop, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
4
+ import { useMaxMediaCacheSize } from '../caches';
4
5
  import { applyVolume } from '../convert-audiodata/apply-volume';
5
6
  import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
6
7
  import { frameForVolumeProp } from '../looped-frame';
@@ -24,11 +25,16 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
24
25
  sequenceContext?.durationInFrames,
25
26
  ]);
26
27
  const environment = useRemotionEnvironment();
27
- const { delayRender, continueRender } = useDelayRender();
28
+ const { delayRender, continueRender, cancelRender } = useDelayRender();
28
29
  const canvasRef = useRef(null);
29
30
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState(false);
30
31
  const audioEnabled = Internals.useAudioEnabled();
31
32
  const videoEnabled = Internals.useVideoEnabled();
33
+ const maxCacheSize = useMaxMediaCacheSize(logLevel);
34
+ const [error, setError] = useState(null);
35
+ if (error) {
36
+ throw error;
37
+ }
32
38
  useLayoutEffect(() => {
33
39
  if (!canvasRef.current) {
34
40
  return;
@@ -36,6 +42,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
36
42
  if (replaceWithOffthreadVideo) {
37
43
  return;
38
44
  }
45
+ if (!canvasRef.current?.getContext) {
46
+ return setError(new Error('Canvas does not have .getContext() method available. This could be because <Video> was mounted inside an <svg> tag.'));
47
+ }
39
48
  const timestamp = frame / fps;
40
49
  const durationInSeconds = 1 / fps;
41
50
  const newHandle = delayRender(`Extracting frame at time ${timestamp}`, {
@@ -65,6 +74,7 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
65
74
  trimAfter: trimAfterValue,
66
75
  trimBefore: trimBeforeValue,
67
76
  fps,
77
+ maxCacheSize,
68
78
  })
69
79
  .then((result) => {
70
80
  if (result.type === 'unknown-container-format') {
@@ -120,14 +130,8 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
120
130
  if (!context) {
121
131
  return;
122
132
  }
123
- context.canvas.width =
124
- imageBitmap instanceof ImageBitmap
125
- ? imageBitmap.width
126
- : imageBitmap.displayWidth;
127
- context.canvas.height =
128
- imageBitmap instanceof ImageBitmap
129
- ? imageBitmap.height
130
- : imageBitmap.displayHeight;
133
+ context.canvas.width = imageBitmap.width;
134
+ context.canvas.height = imageBitmap.height;
131
135
  context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
132
136
  context.drawImage(imageBitmap, 0, 0);
133
137
  imageBitmap.close();
@@ -170,8 +174,8 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
170
174
  }
171
175
  continueRender(newHandle);
172
176
  })
173
- .catch((error) => {
174
- cancelRender(error);
177
+ .catch((err) => {
178
+ cancelRender(err);
175
179
  });
176
180
  return () => {
177
181
  continueRender(newHandle);
@@ -206,6 +210,8 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
206
210
  trimBeforeValue,
207
211
  audioEnabled,
208
212
  videoEnabled,
213
+ maxCacheSize,
214
+ cancelRender,
209
215
  ]);
210
216
  const classNameValue = useMemo(() => {
211
217
  return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
@@ -218,7 +224,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
218
224
  showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
219
225
  if (loop) {
220
226
  if (!replaceWithOffthreadVideo.durationInSeconds) {
221
- cancelRender(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
227
+ const err = new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`);
228
+ cancelRender(err);
229
+ throw err;
222
230
  }
223
231
  return (_jsx(Loop, { layout: "none", durationInFrames: Internals.calculateMediaDuration({
224
232
  trimAfter: trimAfterValue,
@@ -2,7 +2,7 @@ import { type LogLevel } from 'remotion';
2
2
  import type { PcmS16AudioData } from '../convert-audiodata/convert-audiodata';
3
3
  export type ExtractFrameViaBroadcastChannelResult = {
4
4
  type: 'success';
5
- frame: ImageBitmap | VideoFrame | null;
5
+ frame: ImageBitmap | null;
6
6
  audio: PcmS16AudioData | null;
7
7
  durationInSeconds: number | null;
8
8
  } | {
@@ -16,7 +16,7 @@ export type ExtractFrameViaBroadcastChannelResult = {
16
16
  } | {
17
17
  type: 'unknown-container-format';
18
18
  };
19
- export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, }: {
19
+ export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, maxCacheSize, }: {
20
20
  src: string;
21
21
  timeInSeconds: number;
22
22
  durationInSeconds: number;
@@ -30,4 +30,5 @@ export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, log
30
30
  trimAfter: number | undefined;
31
31
  trimBefore: number | undefined;
32
32
  fps: number;
33
+ maxCacheSize: number;
33
34
  }) => Promise<ExtractFrameViaBroadcastChannelResult>;
@@ -20,6 +20,7 @@ if (typeof window !== 'undefined' &&
20
20
  trimAfter: data.trimAfter,
21
21
  trimBefore: data.trimBefore,
22
22
  fps: data.fps,
23
+ maxCacheSize: data.maxCacheSize,
23
24
  });
24
25
  if (result.type === 'cannot-decode') {
25
26
  const cannotDecodeResponse = {
@@ -56,12 +57,9 @@ if (typeof window !== 'undefined' &&
56
57
  return;
57
58
  }
58
59
  const { frame, audio, durationInSeconds } = result;
59
- const videoFrame = frame;
60
- const imageBitmap = videoFrame
61
- ? await createImageBitmap(videoFrame)
62
- : null;
63
- if (videoFrame) {
64
- videoFrame.close();
60
+ const imageBitmap = frame ? await createImageBitmap(frame) : null;
61
+ if (frame) {
62
+ frame.close();
65
63
  }
66
64
  const response = {
67
65
  type: 'response-success',
@@ -71,7 +69,6 @@ if (typeof window !== 'undefined' &&
71
69
  durationInSeconds: durationInSeconds ?? null,
72
70
  };
73
71
  window.remotion_broadcastChannel.postMessage(response);
74
- videoFrame?.close();
75
72
  }
76
73
  catch (error) {
77
74
  const response = {
@@ -87,7 +84,7 @@ if (typeof window !== 'undefined' &&
87
84
  }
88
85
  });
89
86
  }
90
- export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, }) => {
87
+ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, maxCacheSize, }) => {
91
88
  if (isClientSideRendering || window.remotion_isMainTab) {
92
89
  return extractFrameAndAudio({
93
90
  logLevel,
@@ -102,6 +99,7 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
102
99
  trimAfter,
103
100
  trimBefore,
104
101
  fps,
102
+ maxCacheSize,
105
103
  });
106
104
  }
107
105
  const requestId = crypto.randomUUID();
@@ -176,6 +174,7 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
176
174
  trimAfter,
177
175
  trimBefore,
178
176
  fps,
177
+ maxCacheSize,
179
178
  };
180
179
  window.remotion_broadcastChannel.postMessage(request);
181
180
  let timeoutId;
@@ -22,8 +22,9 @@ type ExtractFrameParams = {
22
22
  trimBefore: number | undefined;
23
23
  playbackRate: number;
24
24
  fps: number;
25
+ maxCacheSize: number;
25
26
  };
26
- declare const extractFrameInternal: ({ src, timeInSeconds: unloopedTimeInSeconds, logLevel, loop, trimAfter, trimBefore, playbackRate, fps, }: ExtractFrameParams) => Promise<ExtractFrameResult>;
27
+ declare const extractFrameInternal: ({ src, timeInSeconds: unloopedTimeInSeconds, logLevel, loop, trimAfter, trimBefore, playbackRate, fps, maxCacheSize, }: ExtractFrameParams) => Promise<ExtractFrameResult>;
27
28
  type ExtractFrameReturnType = Awaited<ReturnType<typeof extractFrameInternal>>;
28
29
  export declare const extractFrame: (params: ExtractFrameParams) => Promise<ExtractFrameReturnType>;
29
30
  export {};
@@ -1,7 +1,7 @@
1
1
  import { keyframeManager } from '../caches';
2
2
  import { getSink } from '../get-sink';
3
3
  import { getTimeInSeconds } from '../get-time-in-seconds';
4
- const extractFrameInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds, logLevel, loop, trimAfter, trimBefore, playbackRate, fps, }) => {
4
+ const extractFrameInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds, logLevel, loop, trimAfter, trimBefore, playbackRate, fps, maxCacheSize, }) => {
5
5
  const sink = await getSink(src, logLevel);
6
6
  const video = await sink.getVideo();
7
7
  if (video === 'no-video-track') {
@@ -41,6 +41,7 @@ const extractFrameInternal = async ({ src, timeInSeconds: unloopedTimeInSeconds,
41
41
  timestamp: timeInSeconds,
42
42
  src,
43
43
  logLevel,
44
+ maxCacheSize,
44
45
  });
45
46
  if (keyframeBank === 'has-alpha') {
46
47
  return {
@@ -68,21 +68,15 @@ export const makeKeyframeBank = ({ startTimestampInSeconds, endTimestampInSecond
68
68
  };
69
69
  const getFrameFromTimestamp = async (timestampInSeconds) => {
70
70
  lastUsed = Date.now();
71
- // Videos may start slightly after timestamp 0 due to encoding, but if the requested timestamp is too far before the bank start, something is likely wrong.
72
- const maxClampToleranceInSeconds = 0.1;
73
- // If the requested timestamp is before the start of this bank, clamp it to the start if within tolerance. This handles videos that don't start at timestamp 0.
74
- // For example, requesting frame at 0sec when video starts at 0.04sec should return the frame at 0.04sec.
71
+ // If the requested timestamp is before the start of this bank, clamp it to the start.
72
+ // This matches Chrome's behavior: render the first available frame rather than showing black.
73
+ // Videos don't always start at timestamp 0 due to encoding artifacts, container format quirks,
74
+ // and keyframe positioning. Users have no control over this, so we clamp to the first frame.
75
75
  // Test case: https://github.com/remotion-dev/remotion/issues/5915
76
76
  let adjustedTimestamp = timestampInSeconds;
77
77
  if (roundTo4Digits(timestampInSeconds) <
78
78
  roundTo4Digits(startTimestampInSeconds)) {
79
- const differenceInSeconds = startTimestampInSeconds - timestampInSeconds;
80
- if (differenceInSeconds <= maxClampToleranceInSeconds) {
81
- adjustedTimestamp = startTimestampInSeconds;
82
- }
83
- else {
84
- return Promise.reject(new Error(`Timestamp is before start timestamp (requested: ${timestampInSeconds}sec, start: ${startTimestampInSeconds}sec, difference: ${differenceInSeconds.toFixed(3)}sec exceeds tolerance of ${maxClampToleranceInSeconds}sec)`));
85
- }
79
+ adjustedTimestamp = startTimestampInSeconds;
86
80
  }
87
81
  if (roundTo4Digits(adjustedTimestamp) > roundTo4Digits(endTimestampInSeconds)) {
88
82
  return Promise.reject(new Error(`Timestamp is after end timestamp (requested: ${timestampInSeconds}sec, end: ${endTimestampInSeconds}sec)`));
@@ -2,12 +2,13 @@ import type { EncodedPacketSink, VideoSampleSink } from 'mediabunny';
2
2
  import { type LogLevel } from 'remotion';
3
3
  import { type KeyframeBank } from './keyframe-bank';
4
4
  export declare const makeKeyframeManager: () => {
5
- requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, }: {
5
+ requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, maxCacheSize, }: {
6
6
  packetSink: EncodedPacketSink;
7
7
  timestamp: number;
8
8
  videoSampleSink: VideoSampleSink;
9
9
  src: string;
10
10
  logLevel: LogLevel;
11
+ maxCacheSize: number;
11
12
  }) => Promise<KeyframeBank | "has-alpha" | null>;
12
13
  getCacheStats: () => Promise<{
13
14
  count: number;
@@ -1,6 +1,6 @@
1
1
  import { Internals } from 'remotion';
2
2
  import { canBrowserUseWebGl2 } from '../browser-can-use-webgl2';
3
- import { getMaxVideoCacheSize, getTotalCacheStats, SAFE_BACK_WINDOW_IN_SECONDS, } from '../caches';
3
+ import { getTotalCacheStats, SAFE_BACK_WINDOW_IN_SECONDS } from '../caches';
4
4
  import { renderTimestampRange } from '../render-timestamp-range';
5
5
  import { getFramesSinceKeyframe } from './get-frames-since-keyframe';
6
6
  export const makeKeyframeManager = () => {
@@ -75,9 +75,8 @@ export const makeKeyframeManager = () => {
75
75
  }
76
76
  return { finish: false };
77
77
  };
78
- const ensureToStayUnderMaxCacheSize = async (logLevel) => {
78
+ const ensureToStayUnderMaxCacheSize = async (logLevel, maxCacheSize) => {
79
79
  let cacheStats = await getTotalCacheStats();
80
- const maxCacheSize = getMaxVideoCacheSize(logLevel);
81
80
  while (cacheStats.totalSize > maxCacheSize) {
82
81
  const { finish } = await deleteOldestKeyframeBank(logLevel);
83
82
  if (finish) {
@@ -161,8 +160,8 @@ export const makeKeyframeManager = () => {
161
160
  addKeyframeBank({ src, bank: replacementKeybank, startTimestampInSeconds });
162
161
  return replacementKeybank;
163
162
  };
164
- const requestKeyframeBank = async ({ packetSink, timestamp, videoSampleSink, src, logLevel, }) => {
165
- await ensureToStayUnderMaxCacheSize(logLevel);
163
+ const requestKeyframeBank = async ({ packetSink, timestamp, videoSampleSink, src, logLevel, maxCacheSize, }) => {
164
+ await ensureToStayUnderMaxCacheSize(logLevel, maxCacheSize);
166
165
  await clearKeyframeBanksBeforeTime({
167
166
  timestampInSeconds: timestamp,
168
167
  src,
@@ -190,13 +189,14 @@ export const makeKeyframeManager = () => {
190
189
  };
191
190
  let queue = Promise.resolve(undefined);
192
191
  return {
193
- requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, }) => {
192
+ requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, maxCacheSize, }) => {
194
193
  queue = queue.then(() => requestKeyframeBank({
195
194
  packetSink,
196
195
  timestamp,
197
196
  videoSampleSink,
198
197
  src,
199
198
  logLevel,
199
+ maxCacheSize,
200
200
  }));
201
201
  return queue;
202
202
  },
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.374",
3
+ "version": "4.0.376",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -21,20 +21,20 @@
21
21
  "make": "tsc -d && bun --env-file=../.env.bundle bundle.ts"
22
22
  },
23
23
  "dependencies": {
24
- "mediabunny": "1.24.3",
25
- "remotion": "4.0.374"
24
+ "mediabunny": "1.24.5",
25
+ "remotion": "4.0.376"
26
26
  },
27
27
  "peerDependencies": {
28
28
  "react": ">=16.8.0",
29
29
  "react-dom": ">=16.8.0"
30
30
  },
31
31
  "devDependencies": {
32
- "@remotion/eslint-config-internal": "4.0.374",
32
+ "@remotion/eslint-config-internal": "4.0.376",
33
33
  "@vitest/browser-webdriverio": "4.0.7",
34
34
  "eslint": "9.19.0",
35
35
  "react": "19.0.0",
36
36
  "react-dom": "19.0.0",
37
- "vitest": "4.0.7",
37
+ "vitest": "4.0.9",
38
38
  "webdriverio": "9.19.2"
39
39
  },
40
40
  "keywords": [],
@@ -1,14 +0,0 @@
1
- import type { VideoSample } from 'mediabunny';
2
- /**
3
- * Once we convert a VideoSample to a VideoFrame, we lose the rotation
4
- * https://github.com/Vanilagy/mediabunny/pull/212
5
- * This will be fixed in Mediabunny v2, but for now, we need to manually fix it.
6
- *
7
- * I'm actually wondering if your PR is actually a breaking change
8
- I would say it kinda is actually
9
- Because, previously only the VideoSample had rotation but the video frame you got from .toVideoFrame() was unrotated. Now, the resulting VideoFrame will be rotated, so drawing it to a canvas will behave differently. To me, this is a breaking change
10
- People's old code that manually handled the rotation will break here
11
- So I think this is actually a PR for v2
12
- And for Remotion, you can do a temporary workaround fix by cloning the VideoFrame and overriding rotation that way, then closing the old frame, then transferring the cloned frame
13
- */
14
- export declare const toVideoFrameFixedRotation: (videoSample: VideoSample) => VideoFrame;
@@ -1,41 +0,0 @@
1
- /**
2
- * Once we convert a VideoSample to a VideoFrame, we lose the rotation
3
- * https://github.com/Vanilagy/mediabunny/pull/212
4
- * This will be fixed in Mediabunny v2, but for now, we need to manually fix it.
5
- *
6
- * I'm actually wondering if your PR is actually a breaking change
7
- I would say it kinda is actually
8
- Because, previously only the VideoSample had rotation but the video frame you got from .toVideoFrame() was unrotated. Now, the resulting VideoFrame will be rotated, so drawing it to a canvas will behave differently. To me, this is a breaking change
9
- People's old code that manually handled the rotation will break here
10
- So I think this is actually a PR for v2
11
- And for Remotion, you can do a temporary workaround fix by cloning the VideoFrame and overriding rotation that way, then closing the old frame, then transferring the cloned frame
12
- */
13
- export const toVideoFrameFixedRotation = (videoSample) => {
14
- const frame = videoSample.toVideoFrame();
15
- if (videoSample.rotation === 0) {
16
- return frame;
17
- }
18
- const canvas = new OffscreenCanvas(width, height);
19
- const ctx = canvas.getContext('2d');
20
- if (!ctx) {
21
- throw new Error('Could not get 2d context');
22
- }
23
- canvas.width = width;
24
- canvas.height = height;
25
- if (canvasRotationToApply === 90) {
26
- ctx.translate(width, 0);
27
- }
28
- else if (canvasRotationToApply === 180) {
29
- ctx.translate(width, height);
30
- }
31
- else if (canvasRotationToApply === 270) {
32
- ctx.translate(0, height);
33
- }
34
- console.log('sample rotation', videoSample.rotation);
35
- // @ts-expect-error - rotation is not a known property of VideoFrameInit
36
- const fixedFrame = new VideoFrame(frame, { rotation: videoSample.rotation });
37
- frame.close();
38
- // @ts-expect-error - rotation is not a known property of VideoFrameInit
39
- console.log('fixed frame rotation', fixedFrame.rotation);
40
- return fixedFrame;
41
- };