@remotion/media 4.0.428 → 4.0.430
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -7
- package/dist/audio/allow-wait.js +15 -0
- package/dist/audio/audio-for-preview.d.ts +0 -1
- package/dist/audio/audio-for-preview.js +304 -0
- package/dist/audio/audio-for-rendering.js +194 -0
- package/dist/audio/audio-preview-iterator.d.ts +4 -2
- package/dist/audio/audio-preview-iterator.js +176 -0
- package/dist/audio/audio.js +20 -0
- package/dist/audio/props.js +1 -0
- package/dist/audio-extraction/audio-cache.js +66 -0
- package/dist/audio-extraction/audio-iterator.js +132 -0
- package/dist/audio-extraction/audio-manager.js +113 -0
- package/dist/audio-extraction/extract-audio.js +132 -0
- package/dist/audio-iterator-manager.d.ts +10 -9
- package/dist/audio-iterator-manager.js +228 -0
- package/dist/browser-can-use-webgl2.js +13 -0
- package/dist/caches.js +61 -0
- package/dist/calculate-playbacktime.js +4 -0
- package/dist/convert-audiodata/apply-volume.js +17 -0
- package/dist/convert-audiodata/combine-audiodata.js +23 -0
- package/dist/convert-audiodata/convert-audiodata.js +73 -0
- package/dist/convert-audiodata/resample-audiodata.js +94 -0
- package/dist/debug-overlay/preview-overlay.d.ts +9 -7
- package/dist/debug-overlay/preview-overlay.js +42 -0
- package/dist/esm/index.mjs +246 -103
- package/dist/extract-frame-and-audio.js +101 -0
- package/dist/get-sink.js +15 -0
- package/dist/get-time-in-seconds.js +40 -0
- package/dist/helpers/round-to-4-digits.js +4 -0
- package/dist/index.js +12 -0
- package/dist/is-type-of-error.js +20 -0
- package/dist/looped-frame.js +10 -0
- package/dist/media-player.d.ts +9 -5
- package/dist/media-player.js +431 -0
- package/dist/nonce-manager.js +13 -0
- package/dist/prewarm-iterator-for-looping.js +56 -0
- package/dist/render-timestamp-range.js +9 -0
- package/dist/show-in-timeline.js +31 -0
- package/dist/use-media-in-timeline.d.ts +3 -2
- package/dist/use-media-in-timeline.js +103 -0
- package/dist/video/props.js +1 -0
- package/dist/video/video-for-preview.js +331 -0
- package/dist/video/video-for-rendering.js +263 -0
- package/dist/video/video-preview-iterator.js +122 -0
- package/dist/video/video.js +35 -0
- package/dist/video-extraction/add-broadcast-channel-listener.js +125 -0
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +113 -0
- package/dist/video-extraction/extract-frame.js +85 -0
- package/dist/video-extraction/get-allocation-size.js +6 -0
- package/dist/video-extraction/get-frames-since-keyframe.js +108 -0
- package/dist/video-extraction/keyframe-bank.js +159 -0
- package/dist/video-extraction/keyframe-manager.js +206 -0
- package/dist/video-extraction/remember-actual-matroska-timestamps.js +19 -0
- package/dist/video-extraction/rotate-frame.js +34 -0
- package/dist/video-iterator-manager.js +109 -0
- package/package.json +7 -5
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useContext, useEffect, useLayoutEffect, useMemo, useRef, useState, } from 'react';
|
|
3
|
+
import { Html5Video, Internals, useBufferState, useCurrentFrame, useVideoConfig, } from 'remotion';
|
|
4
|
+
import { getTimeInSeconds } from '../get-time-in-seconds';
|
|
5
|
+
import { MediaPlayer } from '../media-player';
|
|
6
|
+
import { useLoopDisplay } from '../show-in-timeline';
|
|
7
|
+
import { useMediaInTimeline } from '../use-media-in-timeline';
|
|
8
|
+
const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, SequenceContext, SequenceVisibilityToggleContext, } = Internals;
|
|
9
|
+
const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRate, logLevel, className, muted, volume, loopVolumeCurveBehavior, onVideoFrame, showInTimeline, loop, name, trimAfter, trimBefore, stack, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, audioStreamIndex, debugOverlay, headless, }) => {
|
|
10
|
+
const src = usePreload(unpreloadedSrc);
|
|
11
|
+
const canvasRef = useRef(null);
|
|
12
|
+
const videoConfig = useUnsafeVideoConfig();
|
|
13
|
+
const frame = useCurrentFrame();
|
|
14
|
+
const mediaPlayerRef = useRef(null);
|
|
15
|
+
const initialTrimBeforeRef = useRef(trimBefore);
|
|
16
|
+
const initialTrimAfterRef = useRef(trimAfter);
|
|
17
|
+
const initialOnVideoFrameRef = useRef(onVideoFrame);
|
|
18
|
+
const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
|
|
19
|
+
const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState(false);
|
|
20
|
+
const [playing] = Timeline.usePlayingState();
|
|
21
|
+
const timelineContext = useContext(Internals.TimelineContext);
|
|
22
|
+
const globalPlaybackRate = timelineContext.playbackRate;
|
|
23
|
+
const sharedAudioContext = useContext(SharedAudioContext);
|
|
24
|
+
const buffer = useBufferState();
|
|
25
|
+
const [mediaMuted] = useMediaMutedState();
|
|
26
|
+
const [mediaVolume] = useMediaVolumeState();
|
|
27
|
+
const [mediaDurationInSeconds, setMediaDurationInSeconds] = useState(null);
|
|
28
|
+
const { hidden } = useContext(SequenceVisibilityToggleContext);
|
|
29
|
+
const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior);
|
|
30
|
+
const userPreferredVolume = evaluateVolume({
|
|
31
|
+
frame: volumePropFrame,
|
|
32
|
+
volume,
|
|
33
|
+
mediaVolume,
|
|
34
|
+
});
|
|
35
|
+
warnAboutTooHighVolume(userPreferredVolume);
|
|
36
|
+
const parentSequence = useContext(SequenceContext);
|
|
37
|
+
const isPremounting = Boolean(parentSequence?.premounting);
|
|
38
|
+
const isPostmounting = Boolean(parentSequence?.postmounting);
|
|
39
|
+
const loopDisplay = useLoopDisplay({
|
|
40
|
+
loop,
|
|
41
|
+
mediaDurationInSeconds,
|
|
42
|
+
playbackRate,
|
|
43
|
+
trimAfter,
|
|
44
|
+
trimBefore,
|
|
45
|
+
});
|
|
46
|
+
const { id: timelineId } = useMediaInTimeline({
|
|
47
|
+
volume,
|
|
48
|
+
mediaType: 'video',
|
|
49
|
+
src,
|
|
50
|
+
playbackRate,
|
|
51
|
+
displayName: name ?? null,
|
|
52
|
+
stack,
|
|
53
|
+
showInTimeline,
|
|
54
|
+
premountDisplay: parentSequence?.premountDisplay ?? null,
|
|
55
|
+
postmountDisplay: parentSequence?.postmountDisplay ?? null,
|
|
56
|
+
loopDisplay,
|
|
57
|
+
mediaVolume,
|
|
58
|
+
trimAfter,
|
|
59
|
+
trimBefore,
|
|
60
|
+
});
|
|
61
|
+
const isSequenceHidden = hidden[timelineId] ?? false;
|
|
62
|
+
if (!videoConfig) {
|
|
63
|
+
throw new Error('No video config found');
|
|
64
|
+
}
|
|
65
|
+
const currentTime = frame / videoConfig.fps;
|
|
66
|
+
const currentTimeRef = useRef(currentTime);
|
|
67
|
+
currentTimeRef.current = currentTime;
|
|
68
|
+
const preloadedSrc = usePreload(src);
|
|
69
|
+
const buffering = useContext(Internals.BufferingContextReact);
|
|
70
|
+
if (!buffering) {
|
|
71
|
+
throw new Error('useMediaPlayback must be used inside a <BufferingContext>');
|
|
72
|
+
}
|
|
73
|
+
const isPlayerBuffering = Internals.useIsPlayerBuffering(buffering);
|
|
74
|
+
const initialPlaying = useRef(playing && !isPlayerBuffering);
|
|
75
|
+
const initialIsPremounting = useRef(isPremounting);
|
|
76
|
+
const initialIsPostmounting = useRef(isPostmounting);
|
|
77
|
+
const initialGlobalPlaybackRate = useRef(globalPlaybackRate);
|
|
78
|
+
const initialPlaybackRate = useRef(playbackRate);
|
|
79
|
+
useEffect(() => {
|
|
80
|
+
if (!sharedAudioContext)
|
|
81
|
+
return;
|
|
82
|
+
if (!sharedAudioContext.audioContext)
|
|
83
|
+
return;
|
|
84
|
+
try {
|
|
85
|
+
const player = new MediaPlayer({
|
|
86
|
+
canvas: canvasRef.current,
|
|
87
|
+
src: preloadedSrc,
|
|
88
|
+
logLevel,
|
|
89
|
+
sharedAudioContext: sharedAudioContext.audioContext,
|
|
90
|
+
loop,
|
|
91
|
+
trimAfter: initialTrimAfterRef.current,
|
|
92
|
+
trimBefore: initialTrimBeforeRef.current,
|
|
93
|
+
fps: videoConfig.fps,
|
|
94
|
+
playbackRate: initialPlaybackRate.current,
|
|
95
|
+
audioStreamIndex,
|
|
96
|
+
debugOverlay,
|
|
97
|
+
bufferState: buffer,
|
|
98
|
+
isPremounting: initialIsPremounting.current,
|
|
99
|
+
isPostmounting: initialIsPostmounting.current,
|
|
100
|
+
globalPlaybackRate: initialGlobalPlaybackRate.current,
|
|
101
|
+
onVideoFrameCallback: initialOnVideoFrameRef.current ?? null,
|
|
102
|
+
playing: initialPlaying.current,
|
|
103
|
+
});
|
|
104
|
+
mediaPlayerRef.current = player;
|
|
105
|
+
player
|
|
106
|
+
.initialize(currentTimeRef.current)
|
|
107
|
+
.then((result) => {
|
|
108
|
+
if (result.type === 'disposed') {
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
if (result.type === 'unknown-container-format') {
|
|
112
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
113
|
+
throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
114
|
+
}
|
|
115
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
|
|
116
|
+
setShouldFallbackToNativeVideo(true);
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
if (result.type === 'network-error') {
|
|
120
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
121
|
+
throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
122
|
+
}
|
|
123
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
|
|
124
|
+
setShouldFallbackToNativeVideo(true);
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
if (result.type === 'cannot-decode') {
|
|
128
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
129
|
+
throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
130
|
+
}
|
|
131
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
|
|
132
|
+
setShouldFallbackToNativeVideo(true);
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
if (result.type === 'no-tracks') {
|
|
136
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
137
|
+
throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
138
|
+
}
|
|
139
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
|
|
140
|
+
setShouldFallbackToNativeVideo(true);
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
143
|
+
if (result.type === 'success') {
|
|
144
|
+
setMediaPlayerReady(true);
|
|
145
|
+
setMediaDurationInSeconds(result.durationInSeconds);
|
|
146
|
+
}
|
|
147
|
+
})
|
|
148
|
+
.catch((error) => {
|
|
149
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[VideoForPreview] Failed to initialize MediaPlayer', error);
|
|
150
|
+
setShouldFallbackToNativeVideo(true);
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
catch (error) {
|
|
154
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[VideoForPreview] MediaPlayer initialization failed', error);
|
|
155
|
+
setShouldFallbackToNativeVideo(true);
|
|
156
|
+
}
|
|
157
|
+
return () => {
|
|
158
|
+
if (mediaPlayerRef.current) {
|
|
159
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[VideoForPreview] Disposing MediaPlayer`);
|
|
160
|
+
mediaPlayerRef.current.dispose();
|
|
161
|
+
mediaPlayerRef.current = null;
|
|
162
|
+
}
|
|
163
|
+
setMediaPlayerReady(false);
|
|
164
|
+
setShouldFallbackToNativeVideo(false);
|
|
165
|
+
};
|
|
166
|
+
}, [
|
|
167
|
+
audioStreamIndex,
|
|
168
|
+
buffer,
|
|
169
|
+
debugOverlay,
|
|
170
|
+
disallowFallbackToOffthreadVideo,
|
|
171
|
+
logLevel,
|
|
172
|
+
loop,
|
|
173
|
+
preloadedSrc,
|
|
174
|
+
sharedAudioContext,
|
|
175
|
+
videoConfig.fps,
|
|
176
|
+
]);
|
|
177
|
+
const classNameValue = useMemo(() => {
|
|
178
|
+
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
179
|
+
.filter(Internals.truthy)
|
|
180
|
+
.join(' ');
|
|
181
|
+
}, [className]);
|
|
182
|
+
useEffect(() => {
|
|
183
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
184
|
+
if (!mediaPlayer)
|
|
185
|
+
return;
|
|
186
|
+
if (playing && !isPlayerBuffering) {
|
|
187
|
+
mediaPlayer.play(currentTimeRef.current);
|
|
188
|
+
}
|
|
189
|
+
else {
|
|
190
|
+
mediaPlayer.pause();
|
|
191
|
+
}
|
|
192
|
+
}, [isPlayerBuffering, playing, logLevel, mediaPlayerReady]);
|
|
193
|
+
useEffect(() => {
|
|
194
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
195
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
196
|
+
return;
|
|
197
|
+
}
|
|
198
|
+
mediaPlayer.setTrimBefore(trimBefore, currentTimeRef.current);
|
|
199
|
+
}, [trimBefore, mediaPlayerReady]);
|
|
200
|
+
useEffect(() => {
|
|
201
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
202
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
203
|
+
return;
|
|
204
|
+
}
|
|
205
|
+
mediaPlayer.setTrimAfter(trimAfter, currentTimeRef.current);
|
|
206
|
+
}, [trimAfter, mediaPlayerReady]);
|
|
207
|
+
const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
|
|
208
|
+
useLayoutEffect(() => {
|
|
209
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
210
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
211
|
+
return;
|
|
212
|
+
mediaPlayer.setMuted(effectiveMuted);
|
|
213
|
+
}, [effectiveMuted, mediaPlayerReady]);
|
|
214
|
+
useLayoutEffect(() => {
|
|
215
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
216
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
217
|
+
return;
|
|
218
|
+
}
|
|
219
|
+
mediaPlayer.setVolume(userPreferredVolume);
|
|
220
|
+
}, [userPreferredVolume, mediaPlayerReady]);
|
|
221
|
+
useLayoutEffect(() => {
|
|
222
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
223
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
224
|
+
return;
|
|
225
|
+
}
|
|
226
|
+
mediaPlayer.setDebugOverlay(debugOverlay);
|
|
227
|
+
}, [debugOverlay, mediaPlayerReady]);
|
|
228
|
+
useLayoutEffect(() => {
|
|
229
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
230
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
231
|
+
return;
|
|
232
|
+
}
|
|
233
|
+
mediaPlayer.setPlaybackRate(playbackRate);
|
|
234
|
+
}, [playbackRate, mediaPlayerReady]);
|
|
235
|
+
useLayoutEffect(() => {
|
|
236
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
237
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
238
|
+
return;
|
|
239
|
+
}
|
|
240
|
+
mediaPlayer.setGlobalPlaybackRate(globalPlaybackRate);
|
|
241
|
+
}, [globalPlaybackRate, mediaPlayerReady]);
|
|
242
|
+
useLayoutEffect(() => {
|
|
243
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
244
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
245
|
+
return;
|
|
246
|
+
}
|
|
247
|
+
mediaPlayer.setLoop(loop);
|
|
248
|
+
}, [loop, mediaPlayerReady]);
|
|
249
|
+
useLayoutEffect(() => {
|
|
250
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
251
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
252
|
+
return;
|
|
253
|
+
}
|
|
254
|
+
mediaPlayer.setIsPremounting(isPremounting);
|
|
255
|
+
}, [isPremounting, mediaPlayerReady]);
|
|
256
|
+
useLayoutEffect(() => {
|
|
257
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
258
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
259
|
+
return;
|
|
260
|
+
}
|
|
261
|
+
mediaPlayer.setIsPostmounting(isPostmounting);
|
|
262
|
+
}, [isPostmounting, mediaPlayerReady]);
|
|
263
|
+
useLayoutEffect(() => {
|
|
264
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
265
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
266
|
+
return;
|
|
267
|
+
}
|
|
268
|
+
mediaPlayer.setFps(videoConfig.fps);
|
|
269
|
+
}, [videoConfig.fps, mediaPlayerReady]);
|
|
270
|
+
useLayoutEffect(() => {
|
|
271
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
272
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
275
|
+
mediaPlayer.setVideoFrameCallback(onVideoFrame ?? null);
|
|
276
|
+
}, [onVideoFrame, mediaPlayerReady]);
|
|
277
|
+
useLayoutEffect(() => {
|
|
278
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
279
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
280
|
+
return;
|
|
281
|
+
mediaPlayer.seekTo(currentTime).catch(() => {
|
|
282
|
+
// Might be disposed
|
|
283
|
+
});
|
|
284
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
285
|
+
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
286
|
+
const actualStyle = useMemo(() => {
|
|
287
|
+
return {
|
|
288
|
+
...style,
|
|
289
|
+
opacity: isSequenceHidden ? 0 : (style?.opacity ?? 1),
|
|
290
|
+
};
|
|
291
|
+
}, [isSequenceHidden, style]);
|
|
292
|
+
if (shouldFallbackToNativeVideo && !disallowFallbackToOffthreadVideo) {
|
|
293
|
+
// <Video> will fallback to <VideoForPreview> anyway
|
|
294
|
+
// not using <OffthreadVideo> because it does not support looping
|
|
295
|
+
return (_jsx(Html5Video, { src: src, style: actualStyle, className: className, muted: muted, volume: volume, trimAfter: trimAfter, trimBefore: trimBefore, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, ...fallbackOffthreadVideoProps }));
|
|
296
|
+
}
|
|
297
|
+
if (headless) {
|
|
298
|
+
return null;
|
|
299
|
+
}
|
|
300
|
+
return (_jsx("canvas", { ref: canvasRef, width: videoConfig.width, height: videoConfig.height, style: actualStyle, className: classNameValue }));
|
|
301
|
+
};
|
|
302
|
+
export const VideoForPreview = (props) => {
|
|
303
|
+
const frame = useCurrentFrame();
|
|
304
|
+
const videoConfig = useVideoConfig();
|
|
305
|
+
const currentTime = frame / videoConfig.fps;
|
|
306
|
+
const showShow = useMemo(() => {
|
|
307
|
+
return (getTimeInSeconds({
|
|
308
|
+
unloopedTimeInSeconds: currentTime,
|
|
309
|
+
playbackRate: props.playbackRate,
|
|
310
|
+
loop: props.loop,
|
|
311
|
+
trimBefore: props.trimBefore,
|
|
312
|
+
trimAfter: props.trimAfter,
|
|
313
|
+
mediaDurationInSeconds: Infinity,
|
|
314
|
+
fps: videoConfig.fps,
|
|
315
|
+
ifNoMediaDuration: 'infinity',
|
|
316
|
+
src: props.src,
|
|
317
|
+
}) !== null);
|
|
318
|
+
}, [
|
|
319
|
+
currentTime,
|
|
320
|
+
props.loop,
|
|
321
|
+
props.playbackRate,
|
|
322
|
+
props.src,
|
|
323
|
+
props.trimAfter,
|
|
324
|
+
props.trimBefore,
|
|
325
|
+
videoConfig.fps,
|
|
326
|
+
]);
|
|
327
|
+
if (!showShow) {
|
|
328
|
+
return null;
|
|
329
|
+
}
|
|
330
|
+
return _jsx(VideoForPreviewAssertedShowing, { ...props });
|
|
331
|
+
};
|
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
|
|
3
|
+
import { Internals, Loop, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
|
|
4
|
+
import { useMaxMediaCacheSize } from '../caches';
|
|
5
|
+
import { applyVolume } from '../convert-audiodata/apply-volume';
|
|
6
|
+
import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
|
|
7
|
+
import { frameForVolumeProp } from '../looped-frame';
|
|
8
|
+
import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
|
|
9
|
+
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, onVideoFrame, logLevel, loop, style, className, fallbackOffthreadVideoProps, audioStreamIndex, name, disallowFallbackToOffthreadVideo, stack, toneFrequency, trimAfterValue, trimBeforeValue, headless, }) => {
|
|
10
|
+
if (!src) {
|
|
11
|
+
throw new TypeError('No `src` was passed to <Video>.');
|
|
12
|
+
}
|
|
13
|
+
const frame = useCurrentFrame();
|
|
14
|
+
const absoluteFrame = Internals.useTimelinePosition();
|
|
15
|
+
const { fps } = useVideoConfig();
|
|
16
|
+
const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
|
|
17
|
+
const startsAt = Internals.useMediaStartsAt();
|
|
18
|
+
const sequenceContext = useContext(Internals.SequenceContext);
|
|
19
|
+
// Generate a string that's as unique as possible for this asset
|
|
20
|
+
// but at the same time the same on all threads
|
|
21
|
+
const id = useMemo(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
|
|
22
|
+
src,
|
|
23
|
+
sequenceContext?.cumulatedFrom,
|
|
24
|
+
sequenceContext?.relativeFrom,
|
|
25
|
+
sequenceContext?.durationInFrames,
|
|
26
|
+
]);
|
|
27
|
+
const environment = useRemotionEnvironment();
|
|
28
|
+
const { delayRender, continueRender, cancelRender } = useDelayRender();
|
|
29
|
+
const canvasRef = useRef(null);
|
|
30
|
+
const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState(false);
|
|
31
|
+
const audioEnabled = Internals.useAudioEnabled();
|
|
32
|
+
const videoEnabled = Internals.useVideoEnabled();
|
|
33
|
+
const maxCacheSize = useMaxMediaCacheSize(logLevel);
|
|
34
|
+
const [error, setError] = useState(null);
|
|
35
|
+
if (error) {
|
|
36
|
+
throw error;
|
|
37
|
+
}
|
|
38
|
+
useLayoutEffect(() => {
|
|
39
|
+
if (!canvasRef.current && !headless) {
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
if (replaceWithOffthreadVideo) {
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
if (!canvasRef.current?.getContext && !headless) {
|
|
46
|
+
return setError(new Error('Canvas does not have .getContext() method available. This could be because <Video> was mounted inside an <svg> tag.'));
|
|
47
|
+
}
|
|
48
|
+
const timestamp = frame / fps;
|
|
49
|
+
const durationInSeconds = 1 / fps;
|
|
50
|
+
const newHandle = delayRender(`Extracting frame at time ${timestamp}`, {
|
|
51
|
+
retries: delayRenderRetries ?? undefined,
|
|
52
|
+
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
|
|
53
|
+
});
|
|
54
|
+
const shouldRenderAudio = (() => {
|
|
55
|
+
if (!audioEnabled) {
|
|
56
|
+
return false;
|
|
57
|
+
}
|
|
58
|
+
if (muted) {
|
|
59
|
+
return false;
|
|
60
|
+
}
|
|
61
|
+
return true;
|
|
62
|
+
})();
|
|
63
|
+
extractFrameViaBroadcastChannel({
|
|
64
|
+
src,
|
|
65
|
+
timeInSeconds: timestamp,
|
|
66
|
+
durationInSeconds,
|
|
67
|
+
playbackRate,
|
|
68
|
+
logLevel,
|
|
69
|
+
includeAudio: shouldRenderAudio,
|
|
70
|
+
includeVideo: videoEnabled,
|
|
71
|
+
isClientSideRendering: environment.isClientSideRendering,
|
|
72
|
+
loop,
|
|
73
|
+
audioStreamIndex,
|
|
74
|
+
trimAfter: trimAfterValue,
|
|
75
|
+
trimBefore: trimBeforeValue,
|
|
76
|
+
fps,
|
|
77
|
+
maxCacheSize,
|
|
78
|
+
})
|
|
79
|
+
.then((result) => {
|
|
80
|
+
if (result.type === 'unknown-container-format') {
|
|
81
|
+
if (environment.isClientSideRendering) {
|
|
82
|
+
cancelRender(new Error(`Cannot render video "${src}": Unknown container format. See supported formats: https://www.remotion.dev/docs/mediabunny/formats`));
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
86
|
+
cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
87
|
+
}
|
|
88
|
+
if (window.remotion_isMainTab) {
|
|
89
|
+
Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
|
|
90
|
+
}
|
|
91
|
+
setReplaceWithOffthreadVideo({ durationInSeconds: null });
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
if (result.type === 'cannot-decode') {
|
|
95
|
+
if (environment.isClientSideRendering) {
|
|
96
|
+
cancelRender(new Error(`Cannot render video "${src}": The video could not be decoded by the browser.`));
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
100
|
+
cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
101
|
+
}
|
|
102
|
+
if (window.remotion_isMainTab) {
|
|
103
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
|
|
104
|
+
}
|
|
105
|
+
setReplaceWithOffthreadVideo({
|
|
106
|
+
durationInSeconds: result.durationInSeconds,
|
|
107
|
+
});
|
|
108
|
+
return;
|
|
109
|
+
}
|
|
110
|
+
if (result.type === 'cannot-decode-alpha') {
|
|
111
|
+
if (environment.isClientSideRendering) {
|
|
112
|
+
cancelRender(new Error(`Cannot render video "${src}": The alpha channel could not be decoded by the browser.`));
|
|
113
|
+
return;
|
|
114
|
+
}
|
|
115
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
116
|
+
cancelRender(new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
117
|
+
}
|
|
118
|
+
if (window.remotion_isMainTab) {
|
|
119
|
+
Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Cannot decode alpha component for ${src}, falling back to <OffthreadVideo>`);
|
|
120
|
+
}
|
|
121
|
+
setReplaceWithOffthreadVideo({
|
|
122
|
+
durationInSeconds: result.durationInSeconds,
|
|
123
|
+
});
|
|
124
|
+
return;
|
|
125
|
+
}
|
|
126
|
+
if (result.type === 'network-error') {
|
|
127
|
+
if (environment.isClientSideRendering) {
|
|
128
|
+
cancelRender(new Error(`Cannot render video "${src}": Network error while fetching the video (possibly CORS).`));
|
|
129
|
+
return;
|
|
130
|
+
}
|
|
131
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
132
|
+
cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
133
|
+
}
|
|
134
|
+
if (window.remotion_isMainTab) {
|
|
135
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${src} (no CORS?), falling back to <OffthreadVideo>`);
|
|
136
|
+
}
|
|
137
|
+
setReplaceWithOffthreadVideo({ durationInSeconds: null });
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
const { frame: imageBitmap, audio, durationInSeconds: assetDurationInSeconds, } = result;
|
|
141
|
+
if (imageBitmap) {
|
|
142
|
+
onVideoFrame?.(imageBitmap);
|
|
143
|
+
const context = canvasRef.current?.getContext('2d', {
|
|
144
|
+
alpha: true,
|
|
145
|
+
});
|
|
146
|
+
// Could be in headless mode
|
|
147
|
+
if (context) {
|
|
148
|
+
context.canvas.width = imageBitmap.width;
|
|
149
|
+
context.canvas.height = imageBitmap.height;
|
|
150
|
+
context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
|
|
151
|
+
context.drawImage(imageBitmap, 0, 0);
|
|
152
|
+
}
|
|
153
|
+
imageBitmap.close();
|
|
154
|
+
}
|
|
155
|
+
else if (videoEnabled) {
|
|
156
|
+
// A video that only starts at time 0.033sec
|
|
157
|
+
// we shall not crash here but clear the canvas
|
|
158
|
+
const context = canvasRef.current?.getContext('2d', {
|
|
159
|
+
alpha: true,
|
|
160
|
+
});
|
|
161
|
+
if (context) {
|
|
162
|
+
context.clearRect(0, 0, context.canvas.width, context.canvas.height);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
const volumePropsFrame = frameForVolumeProp({
|
|
166
|
+
behavior: loopVolumeCurveBehavior,
|
|
167
|
+
loop,
|
|
168
|
+
assetDurationInSeconds: assetDurationInSeconds ?? 0,
|
|
169
|
+
fps,
|
|
170
|
+
frame,
|
|
171
|
+
startsAt,
|
|
172
|
+
});
|
|
173
|
+
const volume = Internals.evaluateVolume({
|
|
174
|
+
volume: volumeProp,
|
|
175
|
+
frame: volumePropsFrame,
|
|
176
|
+
mediaVolume: 1,
|
|
177
|
+
});
|
|
178
|
+
Internals.warnAboutTooHighVolume(volume);
|
|
179
|
+
if (audio && volume > 0) {
|
|
180
|
+
applyVolume(audio.data, volume);
|
|
181
|
+
registerRenderAsset({
|
|
182
|
+
type: 'inline-audio',
|
|
183
|
+
id,
|
|
184
|
+
audio: environment.isClientSideRendering
|
|
185
|
+
? audio.data
|
|
186
|
+
: Array.from(audio.data),
|
|
187
|
+
frame: absoluteFrame,
|
|
188
|
+
timestamp: audio.timestamp,
|
|
189
|
+
duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
|
|
190
|
+
toneFrequency,
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
continueRender(newHandle);
|
|
194
|
+
})
|
|
195
|
+
.catch((err) => {
|
|
196
|
+
cancelRender(err);
|
|
197
|
+
});
|
|
198
|
+
return () => {
|
|
199
|
+
continueRender(newHandle);
|
|
200
|
+
unregisterRenderAsset(id);
|
|
201
|
+
};
|
|
202
|
+
}, [
|
|
203
|
+
absoluteFrame,
|
|
204
|
+
continueRender,
|
|
205
|
+
delayRender,
|
|
206
|
+
delayRenderRetries,
|
|
207
|
+
delayRenderTimeoutInMilliseconds,
|
|
208
|
+
environment.isClientSideRendering,
|
|
209
|
+
fps,
|
|
210
|
+
frame,
|
|
211
|
+
id,
|
|
212
|
+
logLevel,
|
|
213
|
+
loop,
|
|
214
|
+
loopVolumeCurveBehavior,
|
|
215
|
+
muted,
|
|
216
|
+
onVideoFrame,
|
|
217
|
+
playbackRate,
|
|
218
|
+
registerRenderAsset,
|
|
219
|
+
src,
|
|
220
|
+
startsAt,
|
|
221
|
+
unregisterRenderAsset,
|
|
222
|
+
volumeProp,
|
|
223
|
+
replaceWithOffthreadVideo,
|
|
224
|
+
audioStreamIndex,
|
|
225
|
+
disallowFallbackToOffthreadVideo,
|
|
226
|
+
toneFrequency,
|
|
227
|
+
trimAfterValue,
|
|
228
|
+
trimBeforeValue,
|
|
229
|
+
audioEnabled,
|
|
230
|
+
videoEnabled,
|
|
231
|
+
maxCacheSize,
|
|
232
|
+
cancelRender,
|
|
233
|
+
headless,
|
|
234
|
+
]);
|
|
235
|
+
const classNameValue = useMemo(() => {
|
|
236
|
+
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
237
|
+
.filter(Internals.truthy)
|
|
238
|
+
.join(' ');
|
|
239
|
+
}, [className]);
|
|
240
|
+
if (replaceWithOffthreadVideo) {
|
|
241
|
+
const fallback = (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? true, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: toneFrequency,
|
|
242
|
+
// these shouldn't matter during rendering / should not appear at all
|
|
243
|
+
showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
|
|
244
|
+
if (loop) {
|
|
245
|
+
if (!replaceWithOffthreadVideo.durationInSeconds) {
|
|
246
|
+
const err = new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`);
|
|
247
|
+
cancelRender(err);
|
|
248
|
+
throw err;
|
|
249
|
+
}
|
|
250
|
+
return (_jsx(Loop, { layout: "none", durationInFrames: Internals.calculateMediaDuration({
|
|
251
|
+
trimAfter: trimAfterValue,
|
|
252
|
+
mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
|
|
253
|
+
playbackRate,
|
|
254
|
+
trimBefore: trimBeforeValue,
|
|
255
|
+
}), children: fallback }));
|
|
256
|
+
}
|
|
257
|
+
return fallback;
|
|
258
|
+
}
|
|
259
|
+
if (headless) {
|
|
260
|
+
return null;
|
|
261
|
+
}
|
|
262
|
+
return _jsx("canvas", { ref: canvasRef, style: style, className: classNameValue });
|
|
263
|
+
};
|