@remotion/media 4.0.356 → 4.0.358
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-preview.d.ts +30 -0
- package/dist/audio/audio-for-preview.js +229 -0
- package/dist/audio/audio-for-rendering.js +35 -19
- package/dist/audio/audio.js +7 -49
- package/dist/audio/props.d.ts +8 -14
- package/dist/audio-extraction/audio-cache.d.ts +1 -1
- package/dist/audio-extraction/audio-cache.js +5 -1
- package/dist/audio-extraction/audio-iterator.d.ts +4 -1
- package/dist/audio-extraction/audio-iterator.js +22 -10
- package/dist/audio-extraction/audio-manager.d.ts +8 -37
- package/dist/audio-extraction/audio-manager.js +35 -8
- package/dist/audio-extraction/extract-audio.d.ts +9 -2
- package/dist/audio-extraction/extract-audio.js +29 -15
- package/dist/caches.d.ts +9 -44
- package/dist/convert-audiodata/combine-audiodata.js +2 -23
- package/dist/convert-audiodata/convert-audiodata.d.ts +1 -5
- package/dist/convert-audiodata/convert-audiodata.js +16 -24
- package/dist/esm/index.mjs +2864 -2173
- package/dist/extract-frame-and-audio.d.ts +6 -7
- package/dist/extract-frame-and-audio.js +28 -19
- package/dist/{get-sink-weak.d.ts → get-sink.d.ts} +1 -1
- package/dist/get-sink.js +15 -0
- package/dist/get-time-in-seconds.d.ts +11 -0
- package/dist/get-time-in-seconds.js +25 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +1 -0
- package/dist/is-network-error.d.ts +6 -0
- package/dist/is-network-error.js +17 -0
- package/dist/render-timestamp-range.d.ts +1 -0
- package/dist/render-timestamp-range.js +9 -0
- package/dist/show-in-timeline.d.ts +8 -0
- package/dist/show-in-timeline.js +31 -0
- package/dist/use-media-in-timeline.d.ts +19 -0
- package/dist/use-media-in-timeline.js +103 -0
- package/dist/video/media-player.d.ts +34 -7
- package/dist/video/media-player.js +164 -63
- package/dist/video/props.d.ts +1 -0
- package/dist/video/video-for-preview.d.ts +17 -9
- package/dist/video/video-for-preview.js +138 -92
- package/dist/video/video-for-rendering.d.ts +3 -0
- package/dist/video/video-for-rendering.js +58 -25
- package/dist/video/video.js +6 -10
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +18 -6
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +21 -7
- package/dist/video-extraction/extract-frame.d.ts +20 -2
- package/dist/video-extraction/extract-frame.js +41 -9
- package/dist/video-extraction/get-frames-since-keyframe.d.ts +5 -3
- package/dist/video-extraction/get-frames-since-keyframe.js +7 -4
- package/dist/video-extraction/keyframe-bank.d.ts +3 -2
- package/dist/video-extraction/keyframe-bank.js +32 -12
- package/dist/video-extraction/keyframe-manager.d.ts +3 -8
- package/dist/video-extraction/keyframe-manager.js +25 -10
- package/package.json +54 -54
- package/LICENSE.md +0 -49
- package/dist/convert-audiodata/apply-tonefrequency.d.ts +0 -2
- package/dist/convert-audiodata/apply-tonefrequency.js +0 -44
- package/dist/convert-audiodata/wsola.d.ts +0 -13
- package/dist/convert-audiodata/wsola.js +0 -197
- package/dist/get-sink-weak.js +0 -23
- package/dist/log.d.ts +0 -10
- package/dist/log.js +0 -33
|
@@ -1,50 +1,67 @@
|
|
|
1
1
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
-
import { ALL_FORMATS, Input, UrlSource } from 'mediabunny';
|
|
3
2
|
import { useContext, useEffect, useMemo, useRef, useState } from 'react';
|
|
4
|
-
import {
|
|
3
|
+
import { Html5Video, Internals, useBufferState, useCurrentFrame } from 'remotion';
|
|
4
|
+
import { useLoopDisplay } from '../show-in-timeline';
|
|
5
|
+
import { useMediaInTimeline } from '../use-media-in-timeline';
|
|
5
6
|
import { MediaPlayer } from './media-player';
|
|
6
|
-
const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, } = Internals;
|
|
7
|
-
const
|
|
8
|
-
|
|
9
|
-
if (typeof endAt !== 'undefined') {
|
|
10
|
-
duration = endAt;
|
|
11
|
-
}
|
|
12
|
-
if (typeof startFrom !== 'undefined') {
|
|
13
|
-
duration -= startFrom;
|
|
14
|
-
}
|
|
15
|
-
const actualDuration = duration / playbackRate;
|
|
16
|
-
return Math.floor(actualDuration);
|
|
17
|
-
};
|
|
18
|
-
const NewVideoForPreview = ({ src, style, playbackRate, logLevel, className, muted, volume, loopVolumeCurveBehavior, onVideoFrame, }) => {
|
|
7
|
+
const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, SequenceContext, SequenceVisibilityToggleContext, } = Internals;
|
|
8
|
+
export const VideoForPreview = ({ src: unpreloadedSrc, style, playbackRate, logLevel, className, muted, volume, loopVolumeCurveBehavior, onVideoFrame, showInTimeline, loop, name, trimAfter, trimBefore, stack, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, audioStreamIndex, }) => {
|
|
9
|
+
const src = usePreload(unpreloadedSrc);
|
|
19
10
|
const canvasRef = useRef(null);
|
|
20
11
|
const videoConfig = useUnsafeVideoConfig();
|
|
21
12
|
const frame = useCurrentFrame();
|
|
22
13
|
const mediaPlayerRef = useRef(null);
|
|
23
14
|
const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
|
|
15
|
+
const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState(false);
|
|
24
16
|
const [playing] = Timeline.usePlayingState();
|
|
25
17
|
const timelineContext = useContext(Timeline.TimelineContext);
|
|
26
18
|
const globalPlaybackRate = timelineContext.playbackRate;
|
|
27
19
|
const sharedAudioContext = useContext(SharedAudioContext);
|
|
28
20
|
const buffer = useBufferState();
|
|
29
|
-
const delayHandleRef = useRef(null);
|
|
30
21
|
const [mediaMuted] = useMediaMutedState();
|
|
31
22
|
const [mediaVolume] = useMediaVolumeState();
|
|
32
|
-
const
|
|
23
|
+
const [mediaDurationInSeconds, setMediaDurationInSeconds] = useState(null);
|
|
24
|
+
const { hidden } = useContext(SequenceVisibilityToggleContext);
|
|
25
|
+
const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior);
|
|
33
26
|
const userPreferredVolume = evaluateVolume({
|
|
34
27
|
frame: volumePropFrame,
|
|
35
28
|
volume,
|
|
36
29
|
mediaVolume,
|
|
37
30
|
});
|
|
38
31
|
warnAboutTooHighVolume(userPreferredVolume);
|
|
32
|
+
const parentSequence = useContext(SequenceContext);
|
|
33
|
+
const loopDisplay = useLoopDisplay({
|
|
34
|
+
loop,
|
|
35
|
+
mediaDurationInSeconds,
|
|
36
|
+
playbackRate,
|
|
37
|
+
trimAfter,
|
|
38
|
+
trimBefore,
|
|
39
|
+
});
|
|
40
|
+
const { id: timelineId } = useMediaInTimeline({
|
|
41
|
+
volume,
|
|
42
|
+
mediaType: 'video',
|
|
43
|
+
src,
|
|
44
|
+
playbackRate,
|
|
45
|
+
displayName: name ?? null,
|
|
46
|
+
stack,
|
|
47
|
+
showInTimeline,
|
|
48
|
+
premountDisplay: parentSequence?.premountDisplay ?? null,
|
|
49
|
+
postmountDisplay: parentSequence?.postmountDisplay ?? null,
|
|
50
|
+
loopDisplay,
|
|
51
|
+
mediaVolume,
|
|
52
|
+
trimAfter,
|
|
53
|
+
trimBefore,
|
|
54
|
+
});
|
|
55
|
+
const isSequenceHidden = hidden[timelineId] ?? false;
|
|
39
56
|
if (!videoConfig) {
|
|
40
57
|
throw new Error('No video config found');
|
|
41
58
|
}
|
|
42
59
|
if (!src) {
|
|
43
60
|
throw new TypeError('No `src` was passed to <NewVideoForPreview>.');
|
|
44
61
|
}
|
|
45
|
-
const
|
|
46
|
-
const
|
|
47
|
-
|
|
62
|
+
const currentTime = frame / videoConfig.fps;
|
|
63
|
+
const currentTimeRef = useRef(currentTime);
|
|
64
|
+
currentTimeRef.current = currentTime;
|
|
48
65
|
const preloadedSrc = usePreload(src);
|
|
49
66
|
useEffect(() => {
|
|
50
67
|
if (!canvasRef.current)
|
|
@@ -59,34 +76,84 @@ const NewVideoForPreview = ({ src, style, playbackRate, logLevel, className, mut
|
|
|
59
76
|
src: preloadedSrc,
|
|
60
77
|
logLevel,
|
|
61
78
|
sharedAudioContext: sharedAudioContext.audioContext,
|
|
79
|
+
loop,
|
|
80
|
+
trimAfter,
|
|
81
|
+
trimBefore,
|
|
82
|
+
fps: videoConfig.fps,
|
|
83
|
+
playbackRate,
|
|
84
|
+
audioStreamIndex,
|
|
62
85
|
});
|
|
63
86
|
mediaPlayerRef.current = player;
|
|
64
87
|
player
|
|
65
|
-
.initialize(
|
|
66
|
-
.then(() => {
|
|
67
|
-
|
|
68
|
-
|
|
88
|
+
.initialize(currentTimeRef.current)
|
|
89
|
+
.then((result) => {
|
|
90
|
+
if (result.type === 'unknown-container-format') {
|
|
91
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
92
|
+
throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
93
|
+
}
|
|
94
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
|
|
95
|
+
setShouldFallbackToNativeVideo(true);
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
if (result.type === 'network-error') {
|
|
99
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
100
|
+
throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
101
|
+
}
|
|
102
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
|
|
103
|
+
setShouldFallbackToNativeVideo(true);
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
if (result.type === 'cannot-decode') {
|
|
107
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
108
|
+
throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
109
|
+
}
|
|
110
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
|
|
111
|
+
setShouldFallbackToNativeVideo(true);
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
if (result.type === 'no-tracks') {
|
|
115
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
116
|
+
throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
117
|
+
}
|
|
118
|
+
Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
|
|
119
|
+
setShouldFallbackToNativeVideo(true);
|
|
120
|
+
return;
|
|
121
|
+
}
|
|
122
|
+
if (result.type === 'success') {
|
|
123
|
+
setMediaPlayerReady(true);
|
|
124
|
+
setMediaDurationInSeconds(result.durationInSeconds);
|
|
125
|
+
}
|
|
69
126
|
})
|
|
70
127
|
.catch((error) => {
|
|
71
128
|
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] Failed to initialize MediaPlayer', error);
|
|
129
|
+
setShouldFallbackToNativeVideo(true);
|
|
72
130
|
});
|
|
73
131
|
}
|
|
74
132
|
catch (error) {
|
|
75
133
|
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer initialization failed', error);
|
|
134
|
+
setShouldFallbackToNativeVideo(true);
|
|
76
135
|
}
|
|
77
136
|
return () => {
|
|
78
|
-
if (delayHandleRef.current) {
|
|
79
|
-
delayHandleRef.current.unblock();
|
|
80
|
-
delayHandleRef.current = null;
|
|
81
|
-
}
|
|
82
137
|
if (mediaPlayerRef.current) {
|
|
83
138
|
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] Disposing MediaPlayer`);
|
|
84
139
|
mediaPlayerRef.current.dispose();
|
|
85
140
|
mediaPlayerRef.current = null;
|
|
86
141
|
}
|
|
87
142
|
setMediaPlayerReady(false);
|
|
143
|
+
setShouldFallbackToNativeVideo(false);
|
|
88
144
|
};
|
|
89
|
-
}, [
|
|
145
|
+
}, [
|
|
146
|
+
preloadedSrc,
|
|
147
|
+
logLevel,
|
|
148
|
+
sharedAudioContext,
|
|
149
|
+
loop,
|
|
150
|
+
trimAfter,
|
|
151
|
+
trimBefore,
|
|
152
|
+
videoConfig.fps,
|
|
153
|
+
playbackRate,
|
|
154
|
+
disallowFallbackToOffthreadVideo,
|
|
155
|
+
audioStreamIndex,
|
|
156
|
+
]);
|
|
90
157
|
const classNameValue = useMemo(() => {
|
|
91
158
|
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
92
159
|
.filter(Internals.truthy)
|
|
@@ -116,19 +183,27 @@ const NewVideoForPreview = ({ src, style, playbackRate, logLevel, className, mut
|
|
|
116
183
|
const mediaPlayer = mediaPlayerRef.current;
|
|
117
184
|
if (!mediaPlayer || !mediaPlayerReady)
|
|
118
185
|
return;
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
186
|
+
let currentBlock = null;
|
|
187
|
+
const unsubscribe = mediaPlayer.onBufferingChange((newBufferingState) => {
|
|
188
|
+
if (newBufferingState && !currentBlock) {
|
|
189
|
+
currentBlock = buffer.delayPlayback();
|
|
122
190
|
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback');
|
|
123
191
|
}
|
|
124
|
-
else if (!newBufferingState &&
|
|
125
|
-
|
|
126
|
-
|
|
192
|
+
else if (!newBufferingState && currentBlock) {
|
|
193
|
+
currentBlock.unblock();
|
|
194
|
+
currentBlock = null;
|
|
127
195
|
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback');
|
|
128
196
|
}
|
|
129
197
|
});
|
|
198
|
+
return () => {
|
|
199
|
+
unsubscribe();
|
|
200
|
+
if (currentBlock) {
|
|
201
|
+
currentBlock.unblock();
|
|
202
|
+
currentBlock = null;
|
|
203
|
+
}
|
|
204
|
+
};
|
|
130
205
|
}, [mediaPlayerReady, buffer, logLevel]);
|
|
131
|
-
const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
|
|
206
|
+
const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
|
|
132
207
|
useEffect(() => {
|
|
133
208
|
const mediaPlayer = mediaPlayerRef.current;
|
|
134
209
|
if (!mediaPlayer || !mediaPlayerReady)
|
|
@@ -141,78 +216,49 @@ const NewVideoForPreview = ({ src, style, playbackRate, logLevel, className, mut
|
|
|
141
216
|
return;
|
|
142
217
|
}
|
|
143
218
|
mediaPlayer.setVolume(userPreferredVolume);
|
|
144
|
-
}, [userPreferredVolume, mediaPlayerReady
|
|
219
|
+
}, [userPreferredVolume, mediaPlayerReady]);
|
|
145
220
|
const effectivePlaybackRate = useMemo(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
|
|
146
221
|
useEffect(() => {
|
|
147
222
|
const mediaPlayer = mediaPlayerRef.current;
|
|
148
223
|
if (!mediaPlayer || !mediaPlayerReady) {
|
|
149
224
|
return;
|
|
150
225
|
}
|
|
151
|
-
mediaPlayer.setPlaybackRate(effectivePlaybackRate)
|
|
152
|
-
|
|
153
|
-
});
|
|
154
|
-
}, [effectivePlaybackRate, mediaPlayerReady, logLevel]);
|
|
226
|
+
mediaPlayer.setPlaybackRate(effectivePlaybackRate);
|
|
227
|
+
}, [effectivePlaybackRate, mediaPlayerReady]);
|
|
155
228
|
useEffect(() => {
|
|
156
229
|
const mediaPlayer = mediaPlayerRef.current;
|
|
157
230
|
if (!mediaPlayer || !mediaPlayerReady) {
|
|
158
231
|
return;
|
|
159
232
|
}
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
const VideoForPreviewWithDuration = ({ className, durationInSeconds, logLevel, loopVolumeCurveBehavior, muted, onVideoFrame, playbackRate, src, style, volume, loop, name, trimAfter, trimBefore, }) => {
|
|
167
|
-
const { fps } = useVideoConfig();
|
|
168
|
-
if (loop) {
|
|
169
|
-
if (!Number.isFinite(durationInSeconds) || durationInSeconds === null) {
|
|
170
|
-
return (_jsx(VideoForPreviewWithDuration, { loop: false, className: className, durationInSeconds: durationInSeconds, logLevel: logLevel, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, name: name, trimAfter: trimAfter, trimBefore: trimBefore }));
|
|
233
|
+
mediaPlayer.setLoop(loop);
|
|
234
|
+
}, [loop, mediaPlayerReady]);
|
|
235
|
+
useEffect(() => {
|
|
236
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
237
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
238
|
+
return;
|
|
171
239
|
}
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
endAt: trimAfter,
|
|
175
|
-
mediaDuration,
|
|
176
|
-
playbackRate: playbackRate ?? 1,
|
|
177
|
-
startFrom: trimBefore,
|
|
178
|
-
}), layout: "none", name: name, children: _jsx(VideoForPreviewWithDuration, { loop: false, className: className, durationInSeconds: durationInSeconds, logLevel: logLevel, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, name: name, trimAfter: trimAfter, trimBefore: trimBefore }) }));
|
|
179
|
-
}
|
|
180
|
-
return (_jsx(NewVideoForPreview, { src: src, style: style, playbackRate: playbackRate, logLevel: logLevel, muted: muted, volume: volume, loopVolumeCurveBehavior: loopVolumeCurveBehavior, onVideoFrame: onVideoFrame, className: className }));
|
|
181
|
-
};
|
|
182
|
-
export const VideoForPreview = ({ className, loop, src, logLevel, muted, name, volume, loopVolumeCurveBehavior, onVideoFrame, playbackRate, style, }) => {
|
|
183
|
-
const preloadedSrc = usePreload(src);
|
|
184
|
-
const [durationInSeconds, setDurationInSeconds] = useState(null);
|
|
240
|
+
mediaPlayer.setFps(videoConfig.fps);
|
|
241
|
+
}, [videoConfig.fps, mediaPlayerReady]);
|
|
185
242
|
useEffect(() => {
|
|
186
|
-
|
|
243
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
244
|
+
if (!mediaPlayer || !mediaPlayerReady || !onVideoFrame) {
|
|
187
245
|
return;
|
|
188
246
|
}
|
|
189
|
-
|
|
190
|
-
const computeDuration = async () => {
|
|
191
|
-
const urlSource = new UrlSource(preloadedSrc);
|
|
192
|
-
const input = new Input({
|
|
193
|
-
source: urlSource,
|
|
194
|
-
formats: ALL_FORMATS,
|
|
195
|
-
});
|
|
196
|
-
try {
|
|
197
|
-
const duration = await input.computeDuration();
|
|
198
|
-
if (!cancelled) {
|
|
199
|
-
setDurationInSeconds(duration);
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
catch (error) {
|
|
203
|
-
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[VideoForPreview] Failed to compute duration', error);
|
|
204
|
-
}
|
|
205
|
-
finally {
|
|
206
|
-
input.dispose();
|
|
207
|
-
}
|
|
208
|
-
};
|
|
209
|
-
computeDuration();
|
|
247
|
+
const unsubscribe = mediaPlayer.onVideoFrame(onVideoFrame);
|
|
210
248
|
return () => {
|
|
211
|
-
|
|
249
|
+
unsubscribe();
|
|
250
|
+
};
|
|
251
|
+
}, [onVideoFrame, mediaPlayerReady]);
|
|
252
|
+
const actualStyle = useMemo(() => {
|
|
253
|
+
return {
|
|
254
|
+
...style,
|
|
255
|
+
opacity: isSequenceHidden ? 0 : (style?.opacity ?? 1),
|
|
212
256
|
};
|
|
213
|
-
}, [
|
|
214
|
-
if (
|
|
215
|
-
|
|
257
|
+
}, [isSequenceHidden, style]);
|
|
258
|
+
if (shouldFallbackToNativeVideo && !disallowFallbackToOffthreadVideo) {
|
|
259
|
+
// <Video> will fallback to <VideoForPreview> anyway
|
|
260
|
+
// not using <OffthreadVideo> because it does not support looping
|
|
261
|
+
return (_jsx(Html5Video, { src: src, style: actualStyle, className: className, muted: muted, volume: volume, trimAfter: trimAfter, trimBefore: trimBefore, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, ...fallbackOffthreadVideoProps }));
|
|
216
262
|
}
|
|
217
|
-
return (_jsx(
|
|
263
|
+
return (_jsx("canvas", { ref: canvasRef, width: videoConfig.width, height: videoConfig.height, style: actualStyle, className: classNameValue }));
|
|
218
264
|
};
|
|
@@ -19,6 +19,9 @@ type InnerVideoProps = {
|
|
|
19
19
|
readonly audioStreamIndex: number;
|
|
20
20
|
readonly disallowFallbackToOffthreadVideo: boolean;
|
|
21
21
|
readonly stack: string | undefined;
|
|
22
|
+
readonly toneFrequency: number;
|
|
23
|
+
readonly trimBeforeValue: number | undefined;
|
|
24
|
+
readonly trimAfterValue: number | undefined;
|
|
22
25
|
};
|
|
23
26
|
export declare const VideoForRendering: React.FC<InnerVideoProps>;
|
|
24
27
|
export {};
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
2
|
import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
|
|
3
|
-
import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
|
|
3
|
+
import { cancelRender, Internals, Loop, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
|
|
4
|
+
import { calculateMediaDuration } from '../../../core/src/calculate-media-duration';
|
|
4
5
|
import { applyVolume } from '../convert-audiodata/apply-volume';
|
|
6
|
+
import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
|
|
5
7
|
import { frameForVolumeProp } from '../looped-frame';
|
|
6
8
|
import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
|
|
7
|
-
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, onVideoFrame, logLevel, loop, style, className, fallbackOffthreadVideoProps, audioStreamIndex, name, disallowFallbackToOffthreadVideo, stack, }) => {
|
|
9
|
+
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, onVideoFrame, logLevel, loop, style, className, fallbackOffthreadVideoProps, audioStreamIndex, name, disallowFallbackToOffthreadVideo, stack, toneFrequency, trimAfterValue, trimBeforeValue, }) => {
|
|
8
10
|
if (!src) {
|
|
9
11
|
throw new TypeError('No `src` was passed to <Video>.');
|
|
10
12
|
}
|
|
@@ -13,7 +15,15 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
13
15
|
const { fps } = useVideoConfig();
|
|
14
16
|
const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
|
|
15
17
|
const startsAt = Internals.useMediaStartsAt();
|
|
16
|
-
const
|
|
18
|
+
const sequenceContext = useContext(Internals.SequenceContext);
|
|
19
|
+
// Generate a string that's as unique as possible for this asset
|
|
20
|
+
// but at the same time the same on all threads
|
|
21
|
+
const id = useMemo(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
|
|
22
|
+
src,
|
|
23
|
+
sequenceContext?.cumulatedFrom,
|
|
24
|
+
sequenceContext?.relativeFrom,
|
|
25
|
+
sequenceContext?.durationInFrames,
|
|
26
|
+
]);
|
|
17
27
|
const environment = useRemotionEnvironment();
|
|
18
28
|
const { delayRender, continueRender } = useDelayRender();
|
|
19
29
|
const canvasRef = useRef(null);
|
|
@@ -25,10 +35,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
25
35
|
if (replaceWithOffthreadVideo) {
|
|
26
36
|
return;
|
|
27
37
|
}
|
|
28
|
-
const
|
|
29
|
-
const
|
|
30
|
-
const
|
|
31
|
-
const newHandle = delayRender(`Extracting frame number ${frame}`, {
|
|
38
|
+
const timestamp = frame / fps;
|
|
39
|
+
const durationInSeconds = 1 / fps;
|
|
40
|
+
const newHandle = delayRender(`Extracting frame at time ${timestamp}`, {
|
|
32
41
|
retries: delayRenderRetries ?? undefined,
|
|
33
42
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
|
|
34
43
|
});
|
|
@@ -45,43 +54,48 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
45
54
|
src,
|
|
46
55
|
timeInSeconds: timestamp,
|
|
47
56
|
durationInSeconds,
|
|
48
|
-
playbackRate
|
|
49
|
-
logLevel
|
|
57
|
+
playbackRate,
|
|
58
|
+
logLevel,
|
|
50
59
|
includeAudio: shouldRenderAudio,
|
|
51
60
|
includeVideo: window.remotion_videoEnabled,
|
|
52
61
|
isClientSideRendering: environment.isClientSideRendering,
|
|
53
|
-
loop
|
|
54
|
-
audioStreamIndex
|
|
62
|
+
loop,
|
|
63
|
+
audioStreamIndex,
|
|
64
|
+
trimAfter: trimAfterValue,
|
|
65
|
+
trimBefore: trimBeforeValue,
|
|
66
|
+
fps,
|
|
55
67
|
})
|
|
56
68
|
.then((result) => {
|
|
57
|
-
if (result === 'unknown-container-format') {
|
|
69
|
+
if (result.type === 'unknown-container-format') {
|
|
58
70
|
if (disallowFallbackToOffthreadVideo) {
|
|
59
71
|
cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
60
72
|
}
|
|
61
73
|
if (window.remotion_isMainTab) {
|
|
62
74
|
Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
|
|
63
75
|
}
|
|
64
|
-
setReplaceWithOffthreadVideo(
|
|
76
|
+
setReplaceWithOffthreadVideo({ durationInSeconds: null });
|
|
65
77
|
return;
|
|
66
78
|
}
|
|
67
|
-
if (result === 'cannot-decode') {
|
|
79
|
+
if (result.type === 'cannot-decode') {
|
|
68
80
|
if (disallowFallbackToOffthreadVideo) {
|
|
69
81
|
cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
70
82
|
}
|
|
71
83
|
if (window.remotion_isMainTab) {
|
|
72
84
|
Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
|
|
73
85
|
}
|
|
74
|
-
setReplaceWithOffthreadVideo(
|
|
86
|
+
setReplaceWithOffthreadVideo({
|
|
87
|
+
durationInSeconds: result.durationInSeconds,
|
|
88
|
+
});
|
|
75
89
|
return;
|
|
76
90
|
}
|
|
77
|
-
if (result === 'network-error') {
|
|
91
|
+
if (result.type === 'network-error') {
|
|
78
92
|
if (disallowFallbackToOffthreadVideo) {
|
|
79
93
|
cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
80
94
|
}
|
|
81
95
|
if (window.remotion_isMainTab) {
|
|
82
96
|
Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
|
|
83
97
|
}
|
|
84
|
-
setReplaceWithOffthreadVideo(
|
|
98
|
+
setReplaceWithOffthreadVideo({ durationInSeconds: null });
|
|
85
99
|
return;
|
|
86
100
|
}
|
|
87
101
|
const { frame: imageBitmap, audio, durationInSeconds: assetDurationInSeconds, } = result;
|
|
@@ -104,11 +118,17 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
104
118
|
imageBitmap.close();
|
|
105
119
|
}
|
|
106
120
|
else if (window.remotion_videoEnabled) {
|
|
107
|
-
|
|
121
|
+
// In the case of https://discord.com/channels/809501355504959528/809501355504959531/1424400511070765086
|
|
122
|
+
// A video that only starts at time 0.033sec
|
|
123
|
+
// we shall not crash here but clear the canvas
|
|
124
|
+
const context = canvasRef.current?.getContext('2d');
|
|
125
|
+
if (context) {
|
|
126
|
+
context.clearRect(0, 0, context.canvas.width, context.canvas.height);
|
|
127
|
+
}
|
|
108
128
|
}
|
|
109
129
|
const volumePropsFrame = frameForVolumeProp({
|
|
110
|
-
behavior: loopVolumeCurveBehavior
|
|
111
|
-
loop
|
|
130
|
+
behavior: loopVolumeCurveBehavior,
|
|
131
|
+
loop,
|
|
112
132
|
assetDurationInSeconds: assetDurationInSeconds ?? 0,
|
|
113
133
|
fps,
|
|
114
134
|
frame,
|
|
@@ -126,11 +146,10 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
126
146
|
type: 'inline-audio',
|
|
127
147
|
id,
|
|
128
148
|
audio: Array.from(audio.data),
|
|
129
|
-
sampleRate: audio.sampleRate,
|
|
130
|
-
numberOfChannels: audio.numberOfChannels,
|
|
131
149
|
frame: absoluteFrame,
|
|
132
150
|
timestamp: audio.timestamp,
|
|
133
|
-
duration: (audio.numberOfFrames /
|
|
151
|
+
duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
|
|
152
|
+
toneFrequency,
|
|
134
153
|
});
|
|
135
154
|
}
|
|
136
155
|
continueRender(newHandle);
|
|
@@ -166,6 +185,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
166
185
|
replaceWithOffthreadVideo,
|
|
167
186
|
audioStreamIndex,
|
|
168
187
|
disallowFallbackToOffthreadVideo,
|
|
188
|
+
toneFrequency,
|
|
189
|
+
trimAfterValue,
|
|
190
|
+
trimBeforeValue,
|
|
169
191
|
]);
|
|
170
192
|
const classNameValue = useMemo(() => {
|
|
171
193
|
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
@@ -173,10 +195,21 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
173
195
|
.join(' ');
|
|
174
196
|
}, [className]);
|
|
175
197
|
if (replaceWithOffthreadVideo) {
|
|
176
|
-
|
|
177
|
-
return (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? false, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
|
|
198
|
+
const fallback = (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? false, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
|
|
178
199
|
// these shouldn't matter during rendering / should not appear at all
|
|
179
200
|
showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: undefined, trimBefore: undefined, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
|
|
201
|
+
if (loop) {
|
|
202
|
+
if (!replaceWithOffthreadVideo.durationInSeconds) {
|
|
203
|
+
cancelRender(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
|
|
204
|
+
}
|
|
205
|
+
return (_jsx(Loop, { layout: "none", durationInFrames: calculateMediaDuration({
|
|
206
|
+
trimAfter: trimAfterValue,
|
|
207
|
+
mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
|
|
208
|
+
playbackRate,
|
|
209
|
+
trimBefore: trimBeforeValue,
|
|
210
|
+
}), children: fallback }));
|
|
211
|
+
}
|
|
212
|
+
return fallback;
|
|
180
213
|
}
|
|
181
214
|
return _jsx("canvas", { ref: canvasRef, style: style, className: classNameValue });
|
|
182
215
|
};
|
package/dist/video/video.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
-
import { Internals,
|
|
2
|
+
import { Internals, useRemotionEnvironment } from 'remotion';
|
|
3
3
|
import { VideoForPreview } from './video-for-preview';
|
|
4
4
|
import { VideoForRendering } from './video-for-rendering';
|
|
5
5
|
const { validateMediaTrimProps, resolveTrimProps, validateMediaProps } = Internals;
|
|
6
|
-
const InnerVideo = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, style, trimAfter, trimBefore, volume,
|
|
6
|
+
const InnerVideo = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, style, trimAfter, trimBefore, volume, stack, toneFrequency, showInTimeline, }) => {
|
|
7
7
|
const environment = useRemotionEnvironment();
|
|
8
8
|
if (typeof src !== 'string') {
|
|
9
9
|
throw new TypeError(`The \`<Video>\` tag requires a string for \`src\`, but got ${JSON.stringify(src)} instead.`);
|
|
@@ -20,17 +20,13 @@ const InnerVideo = ({ src, audioStreamIndex, className, delayRenderRetries, dela
|
|
|
20
20
|
trimBefore,
|
|
21
21
|
trimAfter,
|
|
22
22
|
});
|
|
23
|
-
if (typeof trimBeforeValue !== 'undefined' ||
|
|
24
|
-
typeof trimAfterValue !== 'undefined') {
|
|
25
|
-
return (_jsx(Sequence, { layout: "none", from: 0 - (trimBeforeValue ?? 0), showInTimeline: false, durationInFrames: trimAfterValue, name: name, children: _jsx(InnerVideo, { audioStreamIndex: audioStreamIndex, className: className, delayRenderRetries: delayRenderRetries, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo, name: name, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, stack: stack, style: style, volume: volume, trimAfter: undefined, trimBefore: undefined, showInTimeline: showInTimeline }) }));
|
|
26
|
-
}
|
|
27
23
|
validateMediaProps({ playbackRate, volume }, 'Video');
|
|
28
24
|
if (environment.isRendering) {
|
|
29
|
-
return (_jsx(VideoForRendering, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, name: name, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, stack: stack, style: style, volume: volume }));
|
|
25
|
+
return (_jsx(VideoForRendering, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, name: name, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, stack: stack, style: style, volume: volume, toneFrequency: toneFrequency, trimAfterValue: trimAfterValue, trimBeforeValue: trimBeforeValue }));
|
|
30
26
|
}
|
|
31
|
-
return (_jsx(VideoForPreview, { className: className, name: name, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume }));
|
|
27
|
+
return (_jsx(VideoForPreview, { audioStreamIndex: audioStreamIndex ?? 0, className: className, name: name, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, showInTimeline: showInTimeline, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, stack: stack ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps }));
|
|
32
28
|
};
|
|
33
|
-
export const Video = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, showInTimeline, style, trimAfter, trimBefore, volume, stack, }) => {
|
|
34
|
-
return (_jsx(InnerVideo, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {}, logLevel: logLevel ??
|
|
29
|
+
export const Video = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, showInTimeline, style, trimAfter, trimBefore, volume, stack, toneFrequency, }) => {
|
|
30
|
+
return (_jsx(InnerVideo, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {}, logLevel: logLevel ?? window.remotion_logLevel, loop: loop ?? false, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', muted: muted ?? false, name: name, onVideoFrame: onVideoFrame, playbackRate: playbackRate ?? 1, showInTimeline: showInTimeline ?? true, src: src, style: style ?? {}, trimAfter: trimAfter, trimBefore: trimBefore, volume: volume ?? 1, toneFrequency: toneFrequency ?? 1, stack: stack }));
|
|
35
31
|
};
|
|
36
32
|
Internals.addSequenceStackTraces(Video);
|
|
@@ -1,6 +1,19 @@
|
|
|
1
1
|
import { type LogLevel } from 'remotion';
|
|
2
2
|
import type { PcmS16AudioData } from '../convert-audiodata/convert-audiodata';
|
|
3
|
-
export
|
|
3
|
+
export type ExtractFrameViaBroadcastChannelResult = {
|
|
4
|
+
type: 'success';
|
|
5
|
+
frame: ImageBitmap | VideoFrame | null;
|
|
6
|
+
audio: PcmS16AudioData | null;
|
|
7
|
+
durationInSeconds: number | null;
|
|
8
|
+
} | {
|
|
9
|
+
type: 'cannot-decode';
|
|
10
|
+
durationInSeconds: number | null;
|
|
11
|
+
} | {
|
|
12
|
+
type: 'network-error';
|
|
13
|
+
} | {
|
|
14
|
+
type: 'unknown-container-format';
|
|
15
|
+
};
|
|
16
|
+
export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, }: {
|
|
4
17
|
src: string;
|
|
5
18
|
timeInSeconds: number;
|
|
6
19
|
durationInSeconds: number;
|
|
@@ -11,8 +24,7 @@ export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, log
|
|
|
11
24
|
isClientSideRendering: boolean;
|
|
12
25
|
loop: boolean;
|
|
13
26
|
audioStreamIndex: number;
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
} | "cannot-decode" | "network-error" | "unknown-container-format">;
|
|
27
|
+
trimAfter: number | undefined;
|
|
28
|
+
trimBefore: number | undefined;
|
|
29
|
+
fps: number;
|
|
30
|
+
}) => Promise<ExtractFrameViaBroadcastChannelResult>;
|