@remotion/media 4.0.354 → 4.0.356
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-rendering.js +37 -3
- package/dist/audio/audio.js +1 -1
- package/dist/audio/props.d.ts +15 -0
- package/dist/audio-extraction/audio-iterator.d.ts +3 -2
- package/dist/audio-extraction/audio-iterator.js +13 -2
- package/dist/audio-extraction/audio-manager.d.ts +6 -5
- package/dist/audio-extraction/audio-manager.js +5 -3
- package/dist/audio-extraction/extract-audio.d.ts +3 -2
- package/dist/audio-extraction/extract-audio.js +12 -9
- package/dist/caches.d.ts +6 -5
- package/dist/convert-audiodata/apply-tonefrequency.d.ts +2 -0
- package/dist/convert-audiodata/apply-tonefrequency.js +44 -0
- package/dist/convert-audiodata/wsola.d.ts +13 -0
- package/dist/convert-audiodata/wsola.js +197 -0
- package/dist/esm/index.mjs +1519 -13269
- package/dist/extract-frame-and-audio.d.ts +3 -2
- package/dist/extract-frame-and-audio.js +60 -26
- package/dist/get-sink-weak.d.ts +13 -0
- package/dist/get-sink-weak.js +23 -0
- package/dist/index.d.ts +12 -3
- package/dist/index.js +11 -2
- package/dist/video/media-player.d.ts +8 -0
- package/dist/video/media-player.js +77 -19
- package/dist/video/props.d.ts +36 -18
- package/dist/video/video-for-preview.d.ts +13 -7
- package/dist/video/video-for-preview.js +115 -10
- package/dist/video/video-for-rendering.d.ts +23 -2
- package/dist/video/video-for-rendering.js +47 -4
- package/dist/video/video.js +13 -14
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +3 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +53 -4
- package/dist/video-extraction/extract-frame.d.ts +2 -3
- package/dist/video-extraction/extract-frame.js +11 -8
- package/dist/video-extraction/get-frames-since-keyframe.d.ts +14 -9
- package/dist/video-extraction/get-frames-since-keyframe.js +72 -19
- package/package.json +5 -5
- package/dist/audio-for-rendering.d.ts +0 -3
- package/dist/audio-for-rendering.js +0 -94
- package/dist/audio.d.ts +0 -3
- package/dist/audio.js +0 -60
- package/dist/audiodata-to-array.d.ts +0 -0
- package/dist/audiodata-to-array.js +0 -1
- package/dist/convert-audiodata/data-types.d.ts +0 -1
- package/dist/convert-audiodata/data-types.js +0 -22
- package/dist/convert-audiodata/is-planar-format.d.ts +0 -1
- package/dist/convert-audiodata/is-planar-format.js +0 -3
- package/dist/convert-audiodata/log-audiodata.d.ts +0 -1
- package/dist/convert-audiodata/log-audiodata.js +0 -8
- package/dist/convert-audiodata/trim-audiodata.d.ts +0 -0
- package/dist/convert-audiodata/trim-audiodata.js +0 -1
- package/dist/deserialized-audiodata.d.ts +0 -15
- package/dist/deserialized-audiodata.js +0 -26
- package/dist/extract-audio.d.ts +0 -7
- package/dist/extract-audio.js +0 -98
- package/dist/extract-frame-via-broadcast-channel.d.ts +0 -15
- package/dist/extract-frame-via-broadcast-channel.js +0 -104
- package/dist/extract-frame.d.ts +0 -27
- package/dist/extract-frame.js +0 -21
- package/dist/extrct-audio.d.ts +0 -7
- package/dist/extrct-audio.js +0 -94
- package/dist/get-frames-since-keyframe.d.ts +0 -22
- package/dist/get-frames-since-keyframe.js +0 -41
- package/dist/keyframe-bank.d.ts +0 -25
- package/dist/keyframe-bank.js +0 -120
- package/dist/keyframe-manager.d.ts +0 -23
- package/dist/keyframe-manager.js +0 -170
- package/dist/new-video-for-rendering.d.ts +0 -3
- package/dist/new-video-for-rendering.js +0 -108
- package/dist/new-video.d.ts +0 -3
- package/dist/new-video.js +0 -37
- package/dist/props.d.ts +0 -29
- package/dist/props.js +0 -1
- package/dist/remember-actual-matroska-timestamps.d.ts +0 -4
- package/dist/remember-actual-matroska-timestamps.js +0 -19
- package/dist/serialize-videoframe.d.ts +0 -0
- package/dist/serialize-videoframe.js +0 -1
- package/dist/video/new-video-for-preview.d.ts +0 -10
- package/dist/video/new-video-for-preview.js +0 -108
- package/dist/video-extraction/media-player.d.ts +0 -64
- package/dist/video-extraction/media-player.js +0 -501
- package/dist/video-extraction/new-video-for-preview.d.ts +0 -10
- package/dist/video-extraction/new-video-for-preview.js +0 -114
- package/dist/video-for-rendering.d.ts +0 -3
- package/dist/video-for-rendering.js +0 -108
- package/dist/video.d.ts +0 -3
- package/dist/video.js +0 -37
|
@@ -1,18 +1,41 @@
|
|
|
1
1
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { ALL_FORMATS, Input, UrlSource } from 'mediabunny';
|
|
2
3
|
import { useContext, useEffect, useMemo, useRef, useState } from 'react';
|
|
3
|
-
import { Internals, useBufferState, useCurrentFrame } from 'remotion';
|
|
4
|
+
import { Internals, Loop, useBufferState, useCurrentFrame, useVideoConfig, } from 'remotion';
|
|
4
5
|
import { MediaPlayer } from './media-player';
|
|
5
|
-
const { useUnsafeVideoConfig, Timeline, SharedAudioContext } = Internals;
|
|
6
|
-
|
|
6
|
+
const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, } = Internals;
|
|
7
|
+
const calculateLoopDuration = ({ endAt, mediaDuration, playbackRate, startFrom, }) => {
|
|
8
|
+
let duration = mediaDuration;
|
|
9
|
+
if (typeof endAt !== 'undefined') {
|
|
10
|
+
duration = endAt;
|
|
11
|
+
}
|
|
12
|
+
if (typeof startFrom !== 'undefined') {
|
|
13
|
+
duration -= startFrom;
|
|
14
|
+
}
|
|
15
|
+
const actualDuration = duration / playbackRate;
|
|
16
|
+
return Math.floor(actualDuration);
|
|
17
|
+
};
|
|
18
|
+
const NewVideoForPreview = ({ src, style, playbackRate, logLevel, className, muted, volume, loopVolumeCurveBehavior, onVideoFrame, }) => {
|
|
7
19
|
const canvasRef = useRef(null);
|
|
8
20
|
const videoConfig = useUnsafeVideoConfig();
|
|
9
21
|
const frame = useCurrentFrame();
|
|
10
22
|
const mediaPlayerRef = useRef(null);
|
|
11
23
|
const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
|
|
12
24
|
const [playing] = Timeline.usePlayingState();
|
|
25
|
+
const timelineContext = useContext(Timeline.TimelineContext);
|
|
26
|
+
const globalPlaybackRate = timelineContext.playbackRate;
|
|
13
27
|
const sharedAudioContext = useContext(SharedAudioContext);
|
|
14
28
|
const buffer = useBufferState();
|
|
15
29
|
const delayHandleRef = useRef(null);
|
|
30
|
+
const [mediaMuted] = useMediaMutedState();
|
|
31
|
+
const [mediaVolume] = useMediaVolumeState();
|
|
32
|
+
const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
|
|
33
|
+
const userPreferredVolume = evaluateVolume({
|
|
34
|
+
frame: volumePropFrame,
|
|
35
|
+
volume,
|
|
36
|
+
mediaVolume,
|
|
37
|
+
});
|
|
38
|
+
warnAboutTooHighVolume(userPreferredVolume);
|
|
16
39
|
if (!videoConfig) {
|
|
17
40
|
throw new Error('No video config found');
|
|
18
41
|
}
|
|
@@ -22,6 +45,7 @@ export const NewVideoForPreview = ({ src, style, playbackRate = 1, logLevel = 'i
|
|
|
22
45
|
const actualFps = videoConfig.fps / playbackRate;
|
|
23
46
|
const currentTime = frame / actualFps;
|
|
24
47
|
const [initialTimestamp] = useState(currentTime);
|
|
48
|
+
const preloadedSrc = usePreload(src);
|
|
25
49
|
useEffect(() => {
|
|
26
50
|
if (!canvasRef.current)
|
|
27
51
|
return;
|
|
@@ -32,7 +56,7 @@ export const NewVideoForPreview = ({ src, style, playbackRate = 1, logLevel = 'i
|
|
|
32
56
|
try {
|
|
33
57
|
const player = new MediaPlayer({
|
|
34
58
|
canvas: canvasRef.current,
|
|
35
|
-
src,
|
|
59
|
+
src: preloadedSrc,
|
|
36
60
|
logLevel,
|
|
37
61
|
sharedAudioContext: sharedAudioContext.audioContext,
|
|
38
62
|
});
|
|
@@ -62,13 +86,12 @@ export const NewVideoForPreview = ({ src, style, playbackRate = 1, logLevel = 'i
|
|
|
62
86
|
}
|
|
63
87
|
setMediaPlayerReady(false);
|
|
64
88
|
};
|
|
65
|
-
}, [
|
|
89
|
+
}, [preloadedSrc, logLevel, sharedAudioContext, initialTimestamp]);
|
|
66
90
|
const classNameValue = useMemo(() => {
|
|
67
91
|
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
68
92
|
.filter(Internals.truthy)
|
|
69
93
|
.join(' ');
|
|
70
94
|
}, [className]);
|
|
71
|
-
// sync play/pause state with Remotion timeline (like old VideoForPreview video does)
|
|
72
95
|
useEffect(() => {
|
|
73
96
|
const mediaPlayer = mediaPlayerRef.current;
|
|
74
97
|
if (!mediaPlayer)
|
|
@@ -82,7 +105,6 @@ export const NewVideoForPreview = ({ src, style, playbackRate = 1, logLevel = 'i
|
|
|
82
105
|
mediaPlayer.pause();
|
|
83
106
|
}
|
|
84
107
|
}, [playing, logLevel, mediaPlayerReady]);
|
|
85
|
-
// sync target time with MediaPlayer
|
|
86
108
|
useEffect(() => {
|
|
87
109
|
const mediaPlayer = mediaPlayerRef.current;
|
|
88
110
|
if (!mediaPlayer || !mediaPlayerReady)
|
|
@@ -90,24 +112,107 @@ export const NewVideoForPreview = ({ src, style, playbackRate = 1, logLevel = 'i
|
|
|
90
112
|
mediaPlayer.seekTo(currentTime);
|
|
91
113
|
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
92
114
|
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
93
|
-
// sync MediaPlayer buffering with Remotion buffering
|
|
94
115
|
useEffect(() => {
|
|
95
116
|
const mediaPlayer = mediaPlayerRef.current;
|
|
96
117
|
if (!mediaPlayer || !mediaPlayerReady)
|
|
97
118
|
return;
|
|
98
119
|
mediaPlayer.onBufferingChange((newBufferingState) => {
|
|
99
120
|
if (newBufferingState && !delayHandleRef.current) {
|
|
100
|
-
// Start blocking Remotion playback
|
|
101
121
|
delayHandleRef.current = buffer.delayPlayback();
|
|
102
122
|
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback');
|
|
103
123
|
}
|
|
104
124
|
else if (!newBufferingState && delayHandleRef.current) {
|
|
105
|
-
// Unblock Remotion playback
|
|
106
125
|
delayHandleRef.current.unblock();
|
|
107
126
|
delayHandleRef.current = null;
|
|
108
127
|
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback');
|
|
109
128
|
}
|
|
110
129
|
});
|
|
111
130
|
}, [mediaPlayerReady, buffer, logLevel]);
|
|
131
|
+
const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
|
|
132
|
+
useEffect(() => {
|
|
133
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
134
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
135
|
+
return;
|
|
136
|
+
mediaPlayer.setMuted(effectiveMuted);
|
|
137
|
+
}, [effectiveMuted, mediaPlayerReady]);
|
|
138
|
+
useEffect(() => {
|
|
139
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
140
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
143
|
+
mediaPlayer.setVolume(userPreferredVolume);
|
|
144
|
+
}, [userPreferredVolume, mediaPlayerReady, logLevel]);
|
|
145
|
+
const effectivePlaybackRate = useMemo(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
|
|
146
|
+
useEffect(() => {
|
|
147
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
148
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
149
|
+
return;
|
|
150
|
+
}
|
|
151
|
+
mediaPlayer.setPlaybackRate(effectivePlaybackRate).catch((error) => {
|
|
152
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] Failed to set playback rate', error);
|
|
153
|
+
});
|
|
154
|
+
}, [effectivePlaybackRate, mediaPlayerReady, logLevel]);
|
|
155
|
+
useEffect(() => {
|
|
156
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
157
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
158
|
+
return;
|
|
159
|
+
}
|
|
160
|
+
if (onVideoFrame) {
|
|
161
|
+
mediaPlayer.onVideoFrame(onVideoFrame);
|
|
162
|
+
}
|
|
163
|
+
}, [onVideoFrame, mediaPlayerReady]);
|
|
112
164
|
return (_jsx("canvas", { ref: canvasRef, width: videoConfig.width, height: videoConfig.height, style: style, className: classNameValue }));
|
|
113
165
|
};
|
|
166
|
+
const VideoForPreviewWithDuration = ({ className, durationInSeconds, logLevel, loopVolumeCurveBehavior, muted, onVideoFrame, playbackRate, src, style, volume, loop, name, trimAfter, trimBefore, }) => {
|
|
167
|
+
const { fps } = useVideoConfig();
|
|
168
|
+
if (loop) {
|
|
169
|
+
if (!Number.isFinite(durationInSeconds) || durationInSeconds === null) {
|
|
170
|
+
return (_jsx(VideoForPreviewWithDuration, { loop: false, className: className, durationInSeconds: durationInSeconds, logLevel: logLevel, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, name: name, trimAfter: trimAfter, trimBefore: trimBefore }));
|
|
171
|
+
}
|
|
172
|
+
const mediaDuration = durationInSeconds * fps;
|
|
173
|
+
return (_jsx(Loop, { durationInFrames: calculateLoopDuration({
|
|
174
|
+
endAt: trimAfter,
|
|
175
|
+
mediaDuration,
|
|
176
|
+
playbackRate: playbackRate ?? 1,
|
|
177
|
+
startFrom: trimBefore,
|
|
178
|
+
}), layout: "none", name: name, children: _jsx(VideoForPreviewWithDuration, { loop: false, className: className, durationInSeconds: durationInSeconds, logLevel: logLevel, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, name: name, trimAfter: trimAfter, trimBefore: trimBefore }) }));
|
|
179
|
+
}
|
|
180
|
+
return (_jsx(NewVideoForPreview, { src: src, style: style, playbackRate: playbackRate, logLevel: logLevel, muted: muted, volume: volume, loopVolumeCurveBehavior: loopVolumeCurveBehavior, onVideoFrame: onVideoFrame, className: className }));
|
|
181
|
+
};
|
|
182
|
+
export const VideoForPreview = ({ className, loop, src, logLevel, muted, name, volume, loopVolumeCurveBehavior, onVideoFrame, playbackRate, style, }) => {
|
|
183
|
+
const preloadedSrc = usePreload(src);
|
|
184
|
+
const [durationInSeconds, setDurationInSeconds] = useState(null);
|
|
185
|
+
useEffect(() => {
|
|
186
|
+
if (!loop) {
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
let cancelled = false;
|
|
190
|
+
const computeDuration = async () => {
|
|
191
|
+
const urlSource = new UrlSource(preloadedSrc);
|
|
192
|
+
const input = new Input({
|
|
193
|
+
source: urlSource,
|
|
194
|
+
formats: ALL_FORMATS,
|
|
195
|
+
});
|
|
196
|
+
try {
|
|
197
|
+
const duration = await input.computeDuration();
|
|
198
|
+
if (!cancelled) {
|
|
199
|
+
setDurationInSeconds(duration);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
catch (error) {
|
|
203
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[VideoForPreview] Failed to compute duration', error);
|
|
204
|
+
}
|
|
205
|
+
finally {
|
|
206
|
+
input.dispose();
|
|
207
|
+
}
|
|
208
|
+
};
|
|
209
|
+
computeDuration();
|
|
210
|
+
return () => {
|
|
211
|
+
cancelled = true;
|
|
212
|
+
};
|
|
213
|
+
}, [loop, preloadedSrc, logLevel]);
|
|
214
|
+
if (loop && durationInSeconds === null) {
|
|
215
|
+
return null;
|
|
216
|
+
}
|
|
217
|
+
return (_jsx(VideoForPreviewWithDuration, { durationInSeconds: durationInSeconds, className: className, logLevel: logLevel, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, name: name, trimAfter: undefined, trimBefore: undefined, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior }));
|
|
218
|
+
};
|
|
@@ -1,3 +1,24 @@
|
|
|
1
1
|
import React from 'react';
|
|
2
|
-
import type {
|
|
3
|
-
|
|
2
|
+
import type { LogLevel, LoopVolumeCurveBehavior, OnVideoFrame, VolumeProp } from 'remotion';
|
|
3
|
+
import type { FallbackOffthreadVideoProps } from './props';
|
|
4
|
+
type InnerVideoProps = {
|
|
5
|
+
readonly className: string | undefined;
|
|
6
|
+
readonly loop: boolean;
|
|
7
|
+
readonly src: string;
|
|
8
|
+
readonly logLevel: LogLevel;
|
|
9
|
+
readonly muted: boolean;
|
|
10
|
+
readonly name: string | undefined;
|
|
11
|
+
readonly volume: VolumeProp;
|
|
12
|
+
readonly loopVolumeCurveBehavior: LoopVolumeCurveBehavior;
|
|
13
|
+
readonly onVideoFrame: OnVideoFrame | undefined;
|
|
14
|
+
readonly playbackRate: number;
|
|
15
|
+
readonly style: React.CSSProperties;
|
|
16
|
+
readonly delayRenderRetries: number | null;
|
|
17
|
+
readonly delayRenderTimeoutInMilliseconds: number | null;
|
|
18
|
+
readonly fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
|
|
19
|
+
readonly audioStreamIndex: number;
|
|
20
|
+
readonly disallowFallbackToOffthreadVideo: boolean;
|
|
21
|
+
readonly stack: string | undefined;
|
|
22
|
+
};
|
|
23
|
+
export declare const VideoForRendering: React.FC<InnerVideoProps>;
|
|
24
|
+
export {};
|
|
@@ -4,9 +4,7 @@ import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEn
|
|
|
4
4
|
import { applyVolume } from '../convert-audiodata/apply-volume';
|
|
5
5
|
import { frameForVolumeProp } from '../looped-frame';
|
|
6
6
|
import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
|
|
7
|
-
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds,
|
|
8
|
-
// call when a frame of the video, i.e. frame drawn on canvas
|
|
9
|
-
onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) => {
|
|
7
|
+
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, onVideoFrame, logLevel, loop, style, className, fallbackOffthreadVideoProps, audioStreamIndex, name, disallowFallbackToOffthreadVideo, stack, }) => {
|
|
10
8
|
if (!src) {
|
|
11
9
|
throw new TypeError('No `src` was passed to <Video>.');
|
|
12
10
|
}
|
|
@@ -19,10 +17,14 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
|
|
|
19
17
|
const environment = useRemotionEnvironment();
|
|
20
18
|
const { delayRender, continueRender } = useDelayRender();
|
|
21
19
|
const canvasRef = useRef(null);
|
|
20
|
+
const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState(false);
|
|
22
21
|
useLayoutEffect(() => {
|
|
23
22
|
if (!canvasRef.current) {
|
|
24
23
|
return;
|
|
25
24
|
}
|
|
25
|
+
if (replaceWithOffthreadVideo) {
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
26
28
|
const actualFps = playbackRate ? fps / playbackRate : fps;
|
|
27
29
|
const timestamp = frame / actualFps;
|
|
28
30
|
const durationInSeconds = 1 / actualFps;
|
|
@@ -49,8 +51,40 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
|
|
|
49
51
|
includeVideo: window.remotion_videoEnabled,
|
|
50
52
|
isClientSideRendering: environment.isClientSideRendering,
|
|
51
53
|
loop: loop ?? false,
|
|
54
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
52
55
|
})
|
|
53
|
-
.then((
|
|
56
|
+
.then((result) => {
|
|
57
|
+
if (result === 'unknown-container-format') {
|
|
58
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
59
|
+
cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
60
|
+
}
|
|
61
|
+
if (window.remotion_isMainTab) {
|
|
62
|
+
Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
|
|
63
|
+
}
|
|
64
|
+
setReplaceWithOffthreadVideo(true);
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
if (result === 'cannot-decode') {
|
|
68
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
69
|
+
cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
70
|
+
}
|
|
71
|
+
if (window.remotion_isMainTab) {
|
|
72
|
+
Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
|
|
73
|
+
}
|
|
74
|
+
setReplaceWithOffthreadVideo(true);
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
if (result === 'network-error') {
|
|
78
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
79
|
+
cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
80
|
+
}
|
|
81
|
+
if (window.remotion_isMainTab) {
|
|
82
|
+
Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
|
|
83
|
+
}
|
|
84
|
+
setReplaceWithOffthreadVideo(true);
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
const { frame: imageBitmap, audio, durationInSeconds: assetDurationInSeconds, } = result;
|
|
54
88
|
if (imageBitmap) {
|
|
55
89
|
onVideoFrame?.(imageBitmap);
|
|
56
90
|
const context = canvasRef.current?.getContext('2d');
|
|
@@ -129,11 +163,20 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
|
|
|
129
163
|
startsAt,
|
|
130
164
|
unregisterRenderAsset,
|
|
131
165
|
volumeProp,
|
|
166
|
+
replaceWithOffthreadVideo,
|
|
167
|
+
audioStreamIndex,
|
|
168
|
+
disallowFallbackToOffthreadVideo,
|
|
132
169
|
]);
|
|
133
170
|
const classNameValue = useMemo(() => {
|
|
134
171
|
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
135
172
|
.filter(Internals.truthy)
|
|
136
173
|
.join(' ');
|
|
137
174
|
}, [className]);
|
|
175
|
+
if (replaceWithOffthreadVideo) {
|
|
176
|
+
// TODO: Loop and other props
|
|
177
|
+
return (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? false, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
|
|
178
|
+
// these shouldn't matter during rendering / should not appear at all
|
|
179
|
+
showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: undefined, trimBefore: undefined, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
|
|
180
|
+
}
|
|
138
181
|
return _jsx("canvas", { ref: canvasRef, style: style, className: classNameValue });
|
|
139
182
|
};
|
package/dist/video/video.js
CHANGED
|
@@ -1,16 +1,12 @@
|
|
|
1
1
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
-
import { useCallback } from 'react';
|
|
3
2
|
import { Internals, Sequence, useRemotionEnvironment } from 'remotion';
|
|
3
|
+
import { VideoForPreview } from './video-for-preview';
|
|
4
4
|
import { VideoForRendering } from './video-for-rendering';
|
|
5
|
-
const { validateMediaTrimProps, resolveTrimProps, validateMediaProps
|
|
6
|
-
|
|
7
|
-
// Should only destruct `trimBefore` and `trimAfter` from props,
|
|
8
|
-
// rest gets drilled down
|
|
9
|
-
const { trimBefore, trimAfter, name, pauseWhenBuffering, stack, showInTimeline, ...otherProps } = props;
|
|
5
|
+
const { validateMediaTrimProps, resolveTrimProps, validateMediaProps } = Internals;
|
|
6
|
+
const InnerVideo = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, style, trimAfter, trimBefore, volume, showInTimeline, stack, }) => {
|
|
10
7
|
const environment = useRemotionEnvironment();
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
throw new TypeError(`The \`<Video>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
|
|
8
|
+
if (typeof src !== 'string') {
|
|
9
|
+
throw new TypeError(`The \`<Video>\` tag requires a string for \`src\`, but got ${JSON.stringify(src)} instead.`);
|
|
14
10
|
}
|
|
15
11
|
validateMediaTrimProps({
|
|
16
12
|
startFrom: undefined,
|
|
@@ -26,12 +22,15 @@ export const Video = (props) => {
|
|
|
26
22
|
});
|
|
27
23
|
if (typeof trimBeforeValue !== 'undefined' ||
|
|
28
24
|
typeof trimAfterValue !== 'undefined') {
|
|
29
|
-
return (_jsx(Sequence, { layout: "none", from: 0 - (trimBeforeValue ?? 0), showInTimeline: false, durationInFrames: trimAfterValue, name: name, children: _jsx(
|
|
25
|
+
return (_jsx(Sequence, { layout: "none", from: 0 - (trimBeforeValue ?? 0), showInTimeline: false, durationInFrames: trimAfterValue, name: name, children: _jsx(InnerVideo, { audioStreamIndex: audioStreamIndex, className: className, delayRenderRetries: delayRenderRetries, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo, name: name, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, stack: stack, style: style, volume: volume, trimAfter: undefined, trimBefore: undefined, showInTimeline: showInTimeline }) }));
|
|
30
26
|
}
|
|
31
|
-
validateMediaProps(
|
|
27
|
+
validateMediaProps({ playbackRate, volume }, 'Video');
|
|
32
28
|
if (environment.isRendering) {
|
|
33
|
-
return _jsx(VideoForRendering, {
|
|
29
|
+
return (_jsx(VideoForRendering, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, name: name, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, stack: stack, style: style, volume: volume }));
|
|
34
30
|
}
|
|
35
|
-
|
|
36
|
-
return (_jsx(VideoForPreview, { _remotionInternalStack: stack ?? null, _remotionInternalNativeLoopPassed: false, onDuration: onDuration, onlyWarnForMediaSeekingError: true, pauseWhenBuffering: pauseWhenBuffering ?? false, showInTimeline: showInTimeline ?? true, onVideoFrame: onVideoFrame ?? null, ...propsForPreview }));
|
|
31
|
+
return (_jsx(VideoForPreview, { className: className, name: name, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume }));
|
|
37
32
|
};
|
|
33
|
+
export const Video = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, showInTimeline, style, trimAfter, trimBefore, volume, stack, }) => {
|
|
34
|
+
return (_jsx(InnerVideo, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {}, logLevel: logLevel ?? 'info', loop: loop ?? false, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', muted: muted ?? false, name: name, onVideoFrame: onVideoFrame, playbackRate: playbackRate ?? 1, showInTimeline: showInTimeline ?? true, src: src, style: style ?? {}, trimAfter: trimAfter, trimBefore: trimBefore, volume: volume ?? 1, stack: stack }));
|
|
35
|
+
};
|
|
36
|
+
Internals.addSequenceStackTraces(Video);
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { type LogLevel } from 'remotion';
|
|
2
2
|
import type { PcmS16AudioData } from '../convert-audiodata/convert-audiodata';
|
|
3
|
-
export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, }: {
|
|
3
|
+
export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, }: {
|
|
4
4
|
src: string;
|
|
5
5
|
timeInSeconds: number;
|
|
6
6
|
durationInSeconds: number;
|
|
@@ -10,8 +10,9 @@ export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, log
|
|
|
10
10
|
includeVideo: boolean;
|
|
11
11
|
isClientSideRendering: boolean;
|
|
12
12
|
loop: boolean;
|
|
13
|
+
audioStreamIndex: number;
|
|
13
14
|
}) => Promise<{
|
|
14
15
|
frame: ImageBitmap | VideoFrame | null;
|
|
15
16
|
audio: PcmS16AudioData | null;
|
|
16
17
|
durationInSeconds: number | null;
|
|
17
|
-
}>;
|
|
18
|
+
} | "cannot-decode" | "network-error" | "unknown-container-format">;
|
|
@@ -5,7 +5,7 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
5
5
|
const data = event.data;
|
|
6
6
|
if (data.type === 'request') {
|
|
7
7
|
try {
|
|
8
|
-
const
|
|
8
|
+
const result = await extractFrameAndAudio({
|
|
9
9
|
src: data.src,
|
|
10
10
|
timeInSeconds: data.timeInSeconds,
|
|
11
11
|
logLevel: data.logLevel,
|
|
@@ -14,7 +14,33 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
14
14
|
includeAudio: data.includeAudio,
|
|
15
15
|
includeVideo: data.includeVideo,
|
|
16
16
|
loop: data.loop,
|
|
17
|
+
audioStreamIndex: data.audioStreamIndex,
|
|
17
18
|
});
|
|
19
|
+
if (result === 'cannot-decode') {
|
|
20
|
+
const cannotDecodeResponse = {
|
|
21
|
+
type: 'response-cannot-decode',
|
|
22
|
+
id: data.id,
|
|
23
|
+
};
|
|
24
|
+
window.remotion_broadcastChannel.postMessage(cannotDecodeResponse);
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
if (result === 'network-error') {
|
|
28
|
+
const networkErrorResponse = {
|
|
29
|
+
type: 'response-network-error',
|
|
30
|
+
id: data.id,
|
|
31
|
+
};
|
|
32
|
+
window.remotion_broadcastChannel.postMessage(networkErrorResponse);
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
if (result === 'unknown-container-format') {
|
|
36
|
+
const unknownContainerFormatResponse = {
|
|
37
|
+
type: 'response-unknown-container-format',
|
|
38
|
+
id: data.id,
|
|
39
|
+
};
|
|
40
|
+
window.remotion_broadcastChannel.postMessage(unknownContainerFormatResponse);
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
const { frame, audio, durationInSeconds } = result;
|
|
18
44
|
const videoFrame = frame;
|
|
19
45
|
const imageBitmap = videoFrame
|
|
20
46
|
? await createImageBitmap(videoFrame)
|
|
@@ -46,7 +72,7 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
46
72
|
}
|
|
47
73
|
});
|
|
48
74
|
}
|
|
49
|
-
export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, }) => {
|
|
75
|
+
export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, }) => {
|
|
50
76
|
if (isClientSideRendering || window.remotion_isMainTab) {
|
|
51
77
|
return extractFrameAndAudio({
|
|
52
78
|
logLevel,
|
|
@@ -57,6 +83,7 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
|
|
|
57
83
|
includeAudio,
|
|
58
84
|
includeVideo,
|
|
59
85
|
loop,
|
|
86
|
+
audioStreamIndex,
|
|
60
87
|
});
|
|
61
88
|
}
|
|
62
89
|
const requestId = crypto.randomUUID();
|
|
@@ -66,7 +93,10 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
|
|
|
66
93
|
if (!data) {
|
|
67
94
|
return;
|
|
68
95
|
}
|
|
69
|
-
if (data.
|
|
96
|
+
if (data.id !== requestId) {
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
if (data.type === 'response-success') {
|
|
70
100
|
resolve({
|
|
71
101
|
frame: data.frame ? data.frame : null,
|
|
72
102
|
audio: data.audio ? data.audio : null,
|
|
@@ -75,11 +105,29 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
|
|
|
75
105
|
: null,
|
|
76
106
|
});
|
|
77
107
|
window.remotion_broadcastChannel.removeEventListener('message', onMessage);
|
|
108
|
+
return;
|
|
78
109
|
}
|
|
79
|
-
|
|
110
|
+
if (data.type === 'response-error') {
|
|
80
111
|
reject(data.errorStack);
|
|
81
112
|
window.remotion_broadcastChannel.removeEventListener('message', onMessage);
|
|
113
|
+
return;
|
|
114
|
+
}
|
|
115
|
+
if (data.type === 'response-cannot-decode') {
|
|
116
|
+
resolve('cannot-decode');
|
|
117
|
+
window.remotion_broadcastChannel.removeEventListener('message', onMessage);
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
if (data.type === 'response-network-error') {
|
|
121
|
+
resolve('network-error');
|
|
122
|
+
window.remotion_broadcastChannel.removeEventListener('message', onMessage);
|
|
123
|
+
return;
|
|
124
|
+
}
|
|
125
|
+
if (data.type === 'response-unknown-container-format') {
|
|
126
|
+
resolve('unknown-container-format');
|
|
127
|
+
window.remotion_broadcastChannel.removeEventListener('message', onMessage);
|
|
128
|
+
return;
|
|
82
129
|
}
|
|
130
|
+
throw new Error(`Invalid message: ${JSON.stringify(data)}`);
|
|
83
131
|
};
|
|
84
132
|
window.remotion_broadcastChannel.addEventListener('message', onMessage);
|
|
85
133
|
});
|
|
@@ -94,6 +142,7 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
|
|
|
94
142
|
includeAudio,
|
|
95
143
|
includeVideo,
|
|
96
144
|
loop,
|
|
145
|
+
audioStreamIndex,
|
|
97
146
|
};
|
|
98
147
|
window.remotion_broadcastChannel.postMessage(request);
|
|
99
148
|
let timeoutId;
|
|
@@ -1,9 +1,8 @@
|
|
|
1
|
+
import type { VideoSample } from 'mediabunny';
|
|
1
2
|
import { type LogLevel } from 'remotion';
|
|
2
|
-
import { type GetSink } from './get-frames-since-keyframe';
|
|
3
|
-
export declare const sinkPromises: Record<string, Promise<GetSink>>;
|
|
4
3
|
export declare const extractFrame: ({ src, timeInSeconds: unloopedTimeinSeconds, logLevel, loop, }: {
|
|
5
4
|
src: string;
|
|
6
5
|
timeInSeconds: number;
|
|
7
6
|
logLevel: LogLevel;
|
|
8
7
|
loop: boolean;
|
|
9
|
-
}) => Promise<
|
|
8
|
+
}) => Promise<VideoSample | "cannot-decode" | "unknown-container-format" | null>;
|
|
@@ -1,16 +1,19 @@
|
|
|
1
1
|
import { keyframeManager } from '../caches';
|
|
2
|
-
import {
|
|
3
|
-
export const sinkPromises = {};
|
|
2
|
+
import { getSinkWeak } from '../get-sink-weak';
|
|
4
3
|
export const extractFrame = async ({ src, timeInSeconds: unloopedTimeinSeconds, logLevel, loop, }) => {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
const { video, getDuration } = await sinkPromises[src];
|
|
9
|
-
if (video === null) {
|
|
4
|
+
const sink = await getSinkWeak(src, logLevel);
|
|
5
|
+
const video = await sink.getVideo();
|
|
6
|
+
if (video === 'no-video-track') {
|
|
10
7
|
throw new Error(`No video track found for ${src}`);
|
|
11
8
|
}
|
|
9
|
+
if (video === 'cannot-decode') {
|
|
10
|
+
return 'cannot-decode';
|
|
11
|
+
}
|
|
12
|
+
if (video === 'unknown-container-format') {
|
|
13
|
+
return 'unknown-container-format';
|
|
14
|
+
}
|
|
12
15
|
const timeInSeconds = loop
|
|
13
|
-
? unloopedTimeinSeconds % (await getDuration())
|
|
16
|
+
? unloopedTimeinSeconds % (await sink.getDuration())
|
|
14
17
|
: unloopedTimeinSeconds;
|
|
15
18
|
const keyframeBank = await keyframeManager.requestKeyframeBank({
|
|
16
19
|
packetSink: video.packetSink,
|
|
@@ -1,23 +1,28 @@
|
|
|
1
1
|
import type { EncodedPacket } from 'mediabunny';
|
|
2
2
|
import { AudioSampleSink, EncodedPacketSink, VideoSampleSink } from 'mediabunny';
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
3
|
+
type VideoSinks = {
|
|
4
|
+
sampleSink: VideoSampleSink;
|
|
5
|
+
packetSink: EncodedPacketSink;
|
|
6
|
+
};
|
|
7
|
+
type AudioSinks = {
|
|
8
|
+
sampleSink: AudioSampleSink;
|
|
9
|
+
};
|
|
10
|
+
export type AudioSinkResult = AudioSinks | 'no-audio-track' | 'cannot-decode-audio' | 'unknown-container-format';
|
|
11
|
+
export type VideoSinkResult = VideoSinks | 'no-video-track' | 'cannot-decode' | 'unknown-container-format';
|
|
12
|
+
export declare const getSinks: (src: string) => Promise<WeakRef<{
|
|
13
|
+
getVideo: () => Promise<VideoSinkResult>;
|
|
14
|
+
getAudio: (index: number) => Promise<AudioSinkResult>;
|
|
11
15
|
actualMatroskaTimestamps: {
|
|
12
16
|
observeTimestamp: (startTime: number) => void;
|
|
13
17
|
getRealTimestamp: (observedTimestamp: number) => number | null;
|
|
14
18
|
};
|
|
15
19
|
isMatroska: boolean;
|
|
16
20
|
getDuration: () => Promise<number>;
|
|
17
|
-
}
|
|
21
|
+
}>>;
|
|
18
22
|
export type GetSink = Awaited<ReturnType<typeof getSinks>>;
|
|
19
23
|
export declare const getFramesSinceKeyframe: ({ packetSink, videoSampleSink, startPacket, }: {
|
|
20
24
|
packetSink: EncodedPacketSink;
|
|
21
25
|
videoSampleSink: VideoSampleSink;
|
|
22
26
|
startPacket: EncodedPacket;
|
|
23
27
|
}) => Promise<import("./keyframe-bank").KeyframeBank>;
|
|
28
|
+
export {};
|