@remotion/media 4.0.355 → 4.0.357

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dist/audio/audio-for-preview.d.ts +30 -0
  2. package/dist/audio/audio-for-preview.js +213 -0
  3. package/dist/audio/audio-for-rendering.js +63 -12
  4. package/dist/audio/audio.js +8 -50
  5. package/dist/audio/props.d.ts +12 -3
  6. package/dist/audio-extraction/audio-cache.d.ts +1 -1
  7. package/dist/audio-extraction/audio-cache.js +5 -1
  8. package/dist/audio-extraction/audio-iterator.d.ts +7 -3
  9. package/dist/audio-extraction/audio-iterator.js +35 -12
  10. package/dist/audio-extraction/audio-manager.d.ts +10 -38
  11. package/dist/audio-extraction/audio-manager.js +40 -11
  12. package/dist/audio-extraction/extract-audio.d.ts +11 -3
  13. package/dist/audio-extraction/extract-audio.js +37 -17
  14. package/dist/caches.d.ts +11 -45
  15. package/dist/convert-audiodata/apply-tonefrequency.d.ts +2 -0
  16. package/dist/convert-audiodata/apply-tonefrequency.js +43 -0
  17. package/dist/convert-audiodata/combine-audiodata.js +2 -23
  18. package/dist/convert-audiodata/convert-audiodata.d.ts +1 -5
  19. package/dist/convert-audiodata/convert-audiodata.js +16 -24
  20. package/dist/convert-audiodata/wsola.d.ts +13 -0
  21. package/dist/convert-audiodata/wsola.js +197 -0
  22. package/dist/esm/index.mjs +2265 -589
  23. package/dist/extract-frame-and-audio.d.ts +7 -7
  24. package/dist/extract-frame-and-audio.js +69 -26
  25. package/dist/get-sink-weak.d.ts +3 -8
  26. package/dist/get-sink-weak.js +3 -11
  27. package/dist/get-sink.d.ts +13 -0
  28. package/dist/get-sink.js +15 -0
  29. package/dist/get-time-in-seconds.d.ts +10 -0
  30. package/dist/get-time-in-seconds.js +25 -0
  31. package/dist/index.d.ts +13 -3
  32. package/dist/index.js +12 -2
  33. package/dist/is-network-error.d.ts +6 -0
  34. package/dist/is-network-error.js +17 -0
  35. package/dist/render-timestamp-range.d.ts +1 -0
  36. package/dist/render-timestamp-range.js +9 -0
  37. package/dist/video/media-player.d.ts +91 -0
  38. package/dist/video/media-player.js +484 -0
  39. package/dist/video/props.d.ts +37 -18
  40. package/dist/video/resolve-playback-time.d.ts +8 -0
  41. package/dist/video/resolve-playback-time.js +22 -0
  42. package/dist/video/timeout-utils.d.ts +2 -0
  43. package/dist/video/timeout-utils.js +18 -0
  44. package/dist/video/video-for-preview.d.ts +25 -0
  45. package/dist/video/video-for-preview.js +241 -0
  46. package/dist/video/video-for-rendering.d.ts +26 -2
  47. package/dist/video/video-for-rendering.js +95 -19
  48. package/dist/video/video.js +13 -18
  49. package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +19 -6
  50. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +67 -4
  51. package/dist/video-extraction/extract-frame.d.ts +21 -2
  52. package/dist/video-extraction/extract-frame.js +46 -9
  53. package/dist/video-extraction/get-frames-since-keyframe.d.ts +17 -10
  54. package/dist/video-extraction/get-frames-since-keyframe.js +77 -21
  55. package/dist/video-extraction/keyframe-bank.d.ts +3 -2
  56. package/dist/video-extraction/keyframe-bank.js +32 -12
  57. package/dist/video-extraction/keyframe-manager.d.ts +3 -8
  58. package/dist/video-extraction/keyframe-manager.js +25 -10
  59. package/package.json +4 -4
@@ -0,0 +1,241 @@
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { useContext, useEffect, useMemo, useRef, useState } from 'react';
3
+ import { Internals, useBufferState, useCurrentFrame, Video } from 'remotion';
4
+ import { MediaPlayer } from './media-player';
5
+ const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, useMediaInTimeline, SequenceContext, } = Internals;
6
+ const NewVideoForPreview = ({ src, style, playbackRate, logLevel, className, muted, volume, loopVolumeCurveBehavior, onVideoFrame, showInTimeline, loop, name, trimAfter, trimBefore, stack, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, audioStreamIndex, }) => {
7
+ const canvasRef = useRef(null);
8
+ const videoConfig = useUnsafeVideoConfig();
9
+ const frame = useCurrentFrame();
10
+ const mediaPlayerRef = useRef(null);
11
+ const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
12
+ const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState(false);
13
+ const [playing] = Timeline.usePlayingState();
14
+ const timelineContext = useContext(Timeline.TimelineContext);
15
+ const globalPlaybackRate = timelineContext.playbackRate;
16
+ const sharedAudioContext = useContext(SharedAudioContext);
17
+ const buffer = useBufferState();
18
+ const [mediaMuted] = useMediaMutedState();
19
+ const [mediaVolume] = useMediaVolumeState();
20
+ const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior);
21
+ const userPreferredVolume = evaluateVolume({
22
+ frame: volumePropFrame,
23
+ volume,
24
+ mediaVolume,
25
+ });
26
+ warnAboutTooHighVolume(userPreferredVolume);
27
+ const [timelineId] = useState(() => String(Math.random()));
28
+ const parentSequence = useContext(SequenceContext);
29
+ useMediaInTimeline({
30
+ volume,
31
+ mediaVolume,
32
+ mediaType: 'video',
33
+ src,
34
+ playbackRate,
35
+ displayName: name ?? null,
36
+ id: timelineId,
37
+ stack,
38
+ showInTimeline,
39
+ premountDisplay: parentSequence?.premountDisplay ?? null,
40
+ postmountDisplay: parentSequence?.postmountDisplay ?? null,
41
+ });
42
+ if (!videoConfig) {
43
+ throw new Error('No video config found');
44
+ }
45
+ if (!src) {
46
+ throw new TypeError('No `src` was passed to <NewVideoForPreview>.');
47
+ }
48
+ const currentTime = frame / videoConfig.fps;
49
+ const currentTimeRef = useRef(currentTime);
50
+ currentTimeRef.current = currentTime;
51
+ const preloadedSrc = usePreload(src);
52
+ useEffect(() => {
53
+ if (!canvasRef.current)
54
+ return;
55
+ if (!sharedAudioContext)
56
+ return;
57
+ if (!sharedAudioContext.audioContext)
58
+ return;
59
+ try {
60
+ const player = new MediaPlayer({
61
+ canvas: canvasRef.current,
62
+ src: preloadedSrc,
63
+ logLevel,
64
+ sharedAudioContext: sharedAudioContext.audioContext,
65
+ loop,
66
+ trimAfterSeconds: trimAfter ? trimAfter / videoConfig.fps : undefined,
67
+ trimBeforeSeconds: trimBefore
68
+ ? trimBefore / videoConfig.fps
69
+ : undefined,
70
+ playbackRate,
71
+ audioStreamIndex,
72
+ });
73
+ mediaPlayerRef.current = player;
74
+ player
75
+ .initialize(currentTimeRef.current)
76
+ .then((result) => {
77
+ if (result.type === 'unknown-container-format') {
78
+ if (disallowFallbackToOffthreadVideo) {
79
+ throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
80
+ }
81
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
82
+ setShouldFallbackToNativeVideo(true);
83
+ return;
84
+ }
85
+ if (result.type === 'network-error') {
86
+ if (disallowFallbackToOffthreadVideo) {
87
+ throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
88
+ }
89
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
90
+ setShouldFallbackToNativeVideo(true);
91
+ return;
92
+ }
93
+ if (result.type === 'cannot-decode') {
94
+ if (disallowFallbackToOffthreadVideo) {
95
+ throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
96
+ }
97
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
98
+ setShouldFallbackToNativeVideo(true);
99
+ return;
100
+ }
101
+ if (result.type === 'no-tracks') {
102
+ if (disallowFallbackToOffthreadVideo) {
103
+ throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
104
+ }
105
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
106
+ setShouldFallbackToNativeVideo(true);
107
+ return;
108
+ }
109
+ if (result.type === 'success') {
110
+ setMediaPlayerReady(true);
111
+ }
112
+ })
113
+ .catch((error) => {
114
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] Failed to initialize MediaPlayer', error);
115
+ setShouldFallbackToNativeVideo(true);
116
+ });
117
+ }
118
+ catch (error) {
119
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer initialization failed', error);
120
+ setShouldFallbackToNativeVideo(true);
121
+ }
122
+ return () => {
123
+ if (mediaPlayerRef.current) {
124
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] Disposing MediaPlayer`);
125
+ mediaPlayerRef.current.dispose();
126
+ mediaPlayerRef.current = null;
127
+ }
128
+ setMediaPlayerReady(false);
129
+ setShouldFallbackToNativeVideo(false);
130
+ };
131
+ }, [
132
+ preloadedSrc,
133
+ logLevel,
134
+ sharedAudioContext,
135
+ loop,
136
+ trimAfter,
137
+ trimBefore,
138
+ videoConfig.fps,
139
+ playbackRate,
140
+ disallowFallbackToOffthreadVideo,
141
+ audioStreamIndex,
142
+ ]);
143
+ const classNameValue = useMemo(() => {
144
+ return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
145
+ .filter(Internals.truthy)
146
+ .join(' ');
147
+ }, [className]);
148
+ useEffect(() => {
149
+ const mediaPlayer = mediaPlayerRef.current;
150
+ if (!mediaPlayer)
151
+ return;
152
+ if (playing) {
153
+ mediaPlayer.play().catch((error) => {
154
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] Failed to play', error);
155
+ });
156
+ }
157
+ else {
158
+ mediaPlayer.pause();
159
+ }
160
+ }, [playing, logLevel, mediaPlayerReady]);
161
+ useEffect(() => {
162
+ const mediaPlayer = mediaPlayerRef.current;
163
+ if (!mediaPlayer || !mediaPlayerReady)
164
+ return;
165
+ mediaPlayer.seekTo(currentTime);
166
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
167
+ }, [currentTime, logLevel, mediaPlayerReady]);
168
+ useEffect(() => {
169
+ const mediaPlayer = mediaPlayerRef.current;
170
+ if (!mediaPlayer || !mediaPlayerReady)
171
+ return;
172
+ let currentBlock = null;
173
+ const unsubscribe = mediaPlayer.onBufferingChange((newBufferingState) => {
174
+ if (newBufferingState && !currentBlock) {
175
+ currentBlock = buffer.delayPlayback();
176
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback');
177
+ }
178
+ else if (!newBufferingState && currentBlock) {
179
+ currentBlock.unblock();
180
+ currentBlock = null;
181
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback');
182
+ }
183
+ });
184
+ return () => {
185
+ unsubscribe();
186
+ if (currentBlock) {
187
+ currentBlock.unblock();
188
+ currentBlock = null;
189
+ }
190
+ };
191
+ }, [mediaPlayerReady, buffer, logLevel]);
192
+ const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
193
+ useEffect(() => {
194
+ const mediaPlayer = mediaPlayerRef.current;
195
+ if (!mediaPlayer || !mediaPlayerReady)
196
+ return;
197
+ mediaPlayer.setMuted(effectiveMuted);
198
+ }, [effectiveMuted, mediaPlayerReady]);
199
+ useEffect(() => {
200
+ const mediaPlayer = mediaPlayerRef.current;
201
+ if (!mediaPlayer || !mediaPlayerReady) {
202
+ return;
203
+ }
204
+ mediaPlayer.setVolume(userPreferredVolume);
205
+ }, [userPreferredVolume, mediaPlayerReady, logLevel]);
206
+ const effectivePlaybackRate = useMemo(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
207
+ useEffect(() => {
208
+ const mediaPlayer = mediaPlayerRef.current;
209
+ if (!mediaPlayer || !mediaPlayerReady) {
210
+ return;
211
+ }
212
+ mediaPlayer.setPlaybackRate(effectivePlaybackRate);
213
+ }, [effectivePlaybackRate, mediaPlayerReady, logLevel]);
214
+ useEffect(() => {
215
+ const mediaPlayer = mediaPlayerRef.current;
216
+ if (!mediaPlayer || !mediaPlayerReady) {
217
+ return;
218
+ }
219
+ mediaPlayer.setLoop(loop);
220
+ }, [loop, mediaPlayerReady]);
221
+ useEffect(() => {
222
+ const mediaPlayer = mediaPlayerRef.current;
223
+ if (!mediaPlayer || !mediaPlayerReady || !onVideoFrame) {
224
+ return;
225
+ }
226
+ const unsubscribe = mediaPlayer.onVideoFrame(onVideoFrame);
227
+ return () => {
228
+ unsubscribe();
229
+ };
230
+ }, [onVideoFrame, mediaPlayerReady]);
231
+ if (shouldFallbackToNativeVideo && !disallowFallbackToOffthreadVideo) {
232
+ // <Video> will fallback to <VideoForPreview> anyway
233
+ // not using <OffthreadVideo> because it does not support looping
234
+ return (_jsx(Video, { src: src, style: style, className: className, muted: muted, volume: volume, trimAfter: trimAfter, trimBefore: trimBefore, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, ...fallbackOffthreadVideoProps }));
235
+ }
236
+ return (_jsx("canvas", { ref: canvasRef, width: videoConfig.width, height: videoConfig.height, style: style, className: classNameValue }));
237
+ };
238
+ export const VideoForPreview = ({ className, loop, src, logLevel, muted, name, volume, loopVolumeCurveBehavior, onVideoFrame, playbackRate, style, showInTimeline, trimAfter, trimBefore, stack, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, audioStreamIndex, }) => {
239
+ const preloadedSrc = usePreload(src);
240
+ return (_jsx(NewVideoForPreview, { className: className, logLevel: logLevel, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: preloadedSrc, style: style, volume: volume, name: name, trimAfter: trimAfter, trimBefore: trimBefore, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, showInTimeline: showInTimeline, stack: stack, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, audioStreamIndex: audioStreamIndex }));
241
+ };
@@ -1,3 +1,27 @@
1
1
  import React from 'react';
2
- import type { VideoProps } from './props';
3
- export declare const VideoForRendering: React.FC<VideoProps>;
2
+ import type { LogLevel, LoopVolumeCurveBehavior, OnVideoFrame, VolumeProp } from 'remotion';
3
+ import type { FallbackOffthreadVideoProps } from './props';
4
+ type InnerVideoProps = {
5
+ readonly className: string | undefined;
6
+ readonly loop: boolean;
7
+ readonly src: string;
8
+ readonly logLevel: LogLevel;
9
+ readonly muted: boolean;
10
+ readonly name: string | undefined;
11
+ readonly volume: VolumeProp;
12
+ readonly loopVolumeCurveBehavior: LoopVolumeCurveBehavior;
13
+ readonly onVideoFrame: OnVideoFrame | undefined;
14
+ readonly playbackRate: number;
15
+ readonly style: React.CSSProperties;
16
+ readonly delayRenderRetries: number | null;
17
+ readonly delayRenderTimeoutInMilliseconds: number | null;
18
+ readonly fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
19
+ readonly audioStreamIndex: number;
20
+ readonly disallowFallbackToOffthreadVideo: boolean;
21
+ readonly stack: string | undefined;
22
+ readonly toneFrequency: number;
23
+ readonly trimBeforeValue: number | undefined;
24
+ readonly trimAfterValue: number | undefined;
25
+ };
26
+ export declare const VideoForRendering: React.FC<InnerVideoProps>;
27
+ export {};
@@ -1,12 +1,12 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
2
  import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
3
- import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
3
+ import { cancelRender, Internals, Loop, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
4
+ import { calculateLoopDuration } from '../../../core/src/calculate-loop';
4
5
  import { applyVolume } from '../convert-audiodata/apply-volume';
6
+ import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
5
7
  import { frameForVolumeProp } from '../looped-frame';
6
8
  import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
7
- export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds,
8
- // call when a frame of the video, i.e. frame drawn on canvas
9
- onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) => {
9
+ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, onVideoFrame, logLevel, loop, style, className, fallbackOffthreadVideoProps, audioStreamIndex, name, disallowFallbackToOffthreadVideo, stack, toneFrequency, trimAfterValue, trimBeforeValue, }) => {
10
10
  if (!src) {
11
11
  throw new TypeError('No `src` was passed to <Video>.');
12
12
  }
@@ -15,18 +15,29 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
15
15
  const { fps } = useVideoConfig();
16
16
  const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
17
17
  const startsAt = Internals.useMediaStartsAt();
18
- const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
18
+ const sequenceContext = useContext(Internals.SequenceContext);
19
+ // Generate a string that's as unique as possible for this asset
20
+ // but at the same time the same on all threads
21
+ const id = useMemo(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
22
+ src,
23
+ sequenceContext?.cumulatedFrom,
24
+ sequenceContext?.relativeFrom,
25
+ sequenceContext?.durationInFrames,
26
+ ]);
19
27
  const environment = useRemotionEnvironment();
20
28
  const { delayRender, continueRender } = useDelayRender();
21
29
  const canvasRef = useRef(null);
30
+ const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState(false);
22
31
  useLayoutEffect(() => {
23
32
  if (!canvasRef.current) {
24
33
  return;
25
34
  }
26
- const actualFps = playbackRate ? fps / playbackRate : fps;
27
- const timestamp = frame / actualFps;
28
- const durationInSeconds = 1 / actualFps;
29
- const newHandle = delayRender(`Extracting frame number ${frame}`, {
35
+ if (replaceWithOffthreadVideo) {
36
+ return;
37
+ }
38
+ const timestamp = frame / fps;
39
+ const durationInSeconds = 1 / fps;
40
+ const newHandle = delayRender(`Extracting frame at time ${timestamp}`, {
30
41
  retries: delayRenderRetries ?? undefined,
31
42
  timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
32
43
  });
@@ -43,14 +54,51 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
43
54
  src,
44
55
  timeInSeconds: timestamp,
45
56
  durationInSeconds,
46
- playbackRate: playbackRate ?? 1,
47
- logLevel: logLevel ?? 'info',
57
+ playbackRate,
58
+ logLevel,
48
59
  includeAudio: shouldRenderAudio,
49
60
  includeVideo: window.remotion_videoEnabled,
50
61
  isClientSideRendering: environment.isClientSideRendering,
51
- loop: loop ?? false,
62
+ loop,
63
+ audioStreamIndex,
64
+ trimAfter: trimAfterValue,
65
+ trimBefore: trimBeforeValue,
66
+ fps,
52
67
  })
53
- .then(({ frame: imageBitmap, audio, durationInSeconds: assetDurationInSeconds, }) => {
68
+ .then((result) => {
69
+ if (result.type === 'unknown-container-format') {
70
+ if (disallowFallbackToOffthreadVideo) {
71
+ cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
72
+ }
73
+ if (window.remotion_isMainTab) {
74
+ Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
75
+ }
76
+ setReplaceWithOffthreadVideo({ durationInSeconds: null });
77
+ return;
78
+ }
79
+ if (result.type === 'cannot-decode') {
80
+ if (disallowFallbackToOffthreadVideo) {
81
+ cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
82
+ }
83
+ if (window.remotion_isMainTab) {
84
+ Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
85
+ }
86
+ setReplaceWithOffthreadVideo({
87
+ durationInSeconds: result.durationInSeconds,
88
+ });
89
+ return;
90
+ }
91
+ if (result.type === 'network-error') {
92
+ if (disallowFallbackToOffthreadVideo) {
93
+ cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
94
+ }
95
+ if (window.remotion_isMainTab) {
96
+ Internals.Log.info({ logLevel, tag: '@remotion/media' }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
97
+ }
98
+ setReplaceWithOffthreadVideo({ durationInSeconds: null });
99
+ return;
100
+ }
101
+ const { frame: imageBitmap, audio, durationInSeconds: assetDurationInSeconds, } = result;
54
102
  if (imageBitmap) {
55
103
  onVideoFrame?.(imageBitmap);
56
104
  const context = canvasRef.current?.getContext('2d');
@@ -70,11 +118,17 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
70
118
  imageBitmap.close();
71
119
  }
72
120
  else if (window.remotion_videoEnabled) {
73
- cancelRender(new Error('No video frame found'));
121
+ // In the case of https://discord.com/channels/809501355504959528/809501355504959531/1424400511070765086
122
+ // A video that only starts at time 0.033sec
123
+ // we shall not crash here but clear the canvas
124
+ const context = canvasRef.current?.getContext('2d');
125
+ if (context) {
126
+ context.clearRect(0, 0, context.canvas.width, context.canvas.height);
127
+ }
74
128
  }
75
129
  const volumePropsFrame = frameForVolumeProp({
76
- behavior: loopVolumeCurveBehavior ?? 'repeat',
77
- loop: loop ?? false,
130
+ behavior: loopVolumeCurveBehavior,
131
+ loop,
78
132
  assetDurationInSeconds: assetDurationInSeconds ?? 0,
79
133
  fps,
80
134
  frame,
@@ -92,11 +146,10 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
92
146
  type: 'inline-audio',
93
147
  id,
94
148
  audio: Array.from(audio.data),
95
- sampleRate: audio.sampleRate,
96
- numberOfChannels: audio.numberOfChannels,
97
149
  frame: absoluteFrame,
98
150
  timestamp: audio.timestamp,
99
- duration: (audio.numberOfFrames / audio.sampleRate) * 1000000,
151
+ duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
152
+ toneFrequency,
100
153
  });
101
154
  }
102
155
  continueRender(newHandle);
@@ -129,11 +182,34 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
129
182
  startsAt,
130
183
  unregisterRenderAsset,
131
184
  volumeProp,
185
+ replaceWithOffthreadVideo,
186
+ audioStreamIndex,
187
+ disallowFallbackToOffthreadVideo,
188
+ toneFrequency,
189
+ trimAfterValue,
190
+ trimBeforeValue,
132
191
  ]);
133
192
  const classNameValue = useMemo(() => {
134
193
  return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
135
194
  .filter(Internals.truthy)
136
195
  .join(' ');
137
196
  }, [className]);
197
+ if (replaceWithOffthreadVideo) {
198
+ const fallback = (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? false, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
199
+ // these shouldn't matter during rendering / should not appear at all
200
+ showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: undefined, trimBefore: undefined, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
201
+ if (loop) {
202
+ if (!replaceWithOffthreadVideo.durationInSeconds) {
203
+ cancelRender(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
204
+ }
205
+ return (_jsx(Loop, { layout: "none", durationInFrames: calculateLoopDuration({
206
+ trimAfter: trimAfterValue,
207
+ mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
208
+ playbackRate,
209
+ trimBefore: trimBeforeValue,
210
+ }), children: fallback }));
211
+ }
212
+ return fallback;
213
+ }
138
214
  return _jsx("canvas", { ref: canvasRef, style: style, className: classNameValue });
139
215
  };
@@ -1,16 +1,12 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
- import { useCallback } from 'react';
3
- import { Internals, Sequence, useRemotionEnvironment } from 'remotion';
2
+ import { Internals, useRemotionEnvironment } from 'remotion';
3
+ import { VideoForPreview } from './video-for-preview';
4
4
  import { VideoForRendering } from './video-for-rendering';
5
- const { validateMediaTrimProps, resolveTrimProps, validateMediaProps, VideoForPreview, } = Internals;
6
- export const Video = (props) => {
7
- // Should only destruct `trimBefore` and `trimAfter` from props,
8
- // rest gets drilled down
9
- const { trimBefore, trimAfter, name, pauseWhenBuffering, stack, showInTimeline, ...otherProps } = props;
5
+ const { validateMediaTrimProps, resolveTrimProps, validateMediaProps } = Internals;
6
+ const InnerVideo = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, style, trimAfter, trimBefore, volume, stack, toneFrequency, showInTimeline, }) => {
10
7
  const environment = useRemotionEnvironment();
11
- const onDuration = useCallback(() => undefined, []);
12
- if (typeof props.src !== 'string') {
13
- throw new TypeError(`The \`<Video>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
8
+ if (typeof src !== 'string') {
9
+ throw new TypeError(`The \`<Video>\` tag requires a string for \`src\`, but got ${JSON.stringify(src)} instead.`);
14
10
  }
15
11
  validateMediaTrimProps({
16
12
  startFrom: undefined,
@@ -24,14 +20,13 @@ export const Video = (props) => {
24
20
  trimBefore,
25
21
  trimAfter,
26
22
  });
27
- if (typeof trimBeforeValue !== 'undefined' ||
28
- typeof trimAfterValue !== 'undefined') {
29
- return (_jsx(Sequence, { layout: "none", from: 0 - (trimBeforeValue ?? 0), showInTimeline: false, durationInFrames: trimAfterValue, name: name, children: _jsx(Video, { pauseWhenBuffering: pauseWhenBuffering ?? false, ...otherProps }) }));
30
- }
31
- validateMediaProps(props, 'Video');
23
+ validateMediaProps({ playbackRate, volume }, 'Video');
32
24
  if (environment.isRendering) {
33
- return _jsx(VideoForRendering, { ...otherProps });
25
+ return (_jsx(VideoForRendering, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, name: name, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, stack: stack, style: style, volume: volume, toneFrequency: toneFrequency, trimAfterValue: trimAfterValue, trimBeforeValue: trimBeforeValue }));
34
26
  }
35
- const { onVideoFrame, delayRenderRetries, delayRenderTimeoutInMilliseconds, ...propsForPreview } = otherProps;
36
- return (_jsx(VideoForPreview, { _remotionInternalStack: stack ?? null, _remotionInternalNativeLoopPassed: false, onDuration: onDuration, onlyWarnForMediaSeekingError: true, pauseWhenBuffering: pauseWhenBuffering ?? false, showInTimeline: showInTimeline ?? true, onVideoFrame: onVideoFrame ?? null, ...propsForPreview }));
27
+ return (_jsx(VideoForPreview, { audioStreamIndex: audioStreamIndex ?? 0, className: className, name: name, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, showInTimeline: showInTimeline, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, stack: stack ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps }));
28
+ };
29
+ export const Video = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, showInTimeline, style, trimAfter, trimBefore, volume, stack, toneFrequency, }) => {
30
+ return (_jsx(InnerVideo, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {}, logLevel: logLevel ?? window.remotion_logLevel, loop: loop ?? false, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', muted: muted ?? false, name: name, onVideoFrame: onVideoFrame, playbackRate: playbackRate ?? 1, showInTimeline: showInTimeline ?? true, src: src, style: style ?? {}, trimAfter: trimAfter, trimBefore: trimBefore, volume: volume ?? 1, toneFrequency: toneFrequency ?? 1, stack: stack }));
37
31
  };
32
+ Internals.addSequenceStackTraces(Video);
@@ -1,6 +1,19 @@
1
1
  import { type LogLevel } from 'remotion';
2
2
  import type { PcmS16AudioData } from '../convert-audiodata/convert-audiodata';
3
- export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, }: {
3
+ export type ExtractFrameViaBroadcastChannelResult = {
4
+ type: 'success';
5
+ frame: ImageBitmap | VideoFrame | null;
6
+ audio: PcmS16AudioData | null;
7
+ durationInSeconds: number | null;
8
+ } | {
9
+ type: 'cannot-decode';
10
+ durationInSeconds: number | null;
11
+ } | {
12
+ type: 'network-error';
13
+ } | {
14
+ type: 'unknown-container-format';
15
+ };
16
+ export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, }: {
4
17
  src: string;
5
18
  timeInSeconds: number;
6
19
  durationInSeconds: number;
@@ -10,8 +23,8 @@ export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, log
10
23
  includeVideo: boolean;
11
24
  isClientSideRendering: boolean;
12
25
  loop: boolean;
13
- }) => Promise<{
14
- frame: ImageBitmap | VideoFrame | null;
15
- audio: PcmS16AudioData | null;
16
- durationInSeconds: number | null;
17
- }>;
26
+ audioStreamIndex: number;
27
+ trimAfter: number | undefined;
28
+ trimBefore: number | undefined;
29
+ fps: number;
30
+ }) => Promise<ExtractFrameViaBroadcastChannelResult>;
@@ -5,7 +5,7 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
5
5
  const data = event.data;
6
6
  if (data.type === 'request') {
7
7
  try {
8
- const { frame, audio, durationInSeconds } = await extractFrameAndAudio({
8
+ const result = await extractFrameAndAudio({
9
9
  src: data.src,
10
10
  timeInSeconds: data.timeInSeconds,
11
11
  logLevel: data.logLevel,
@@ -14,7 +14,37 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
14
14
  includeAudio: data.includeAudio,
15
15
  includeVideo: data.includeVideo,
16
16
  loop: data.loop,
17
+ audioStreamIndex: data.audioStreamIndex,
18
+ trimAfter: data.trimAfter,
19
+ trimBefore: data.trimBefore,
20
+ fps: data.fps,
17
21
  });
22
+ if (result.type === 'cannot-decode') {
23
+ const cannotDecodeResponse = {
24
+ type: 'response-cannot-decode',
25
+ id: data.id,
26
+ durationInSeconds: result.durationInSeconds,
27
+ };
28
+ window.remotion_broadcastChannel.postMessage(cannotDecodeResponse);
29
+ return;
30
+ }
31
+ if (result.type === 'network-error') {
32
+ const networkErrorResponse = {
33
+ type: 'response-network-error',
34
+ id: data.id,
35
+ };
36
+ window.remotion_broadcastChannel.postMessage(networkErrorResponse);
37
+ return;
38
+ }
39
+ if (result.type === 'unknown-container-format') {
40
+ const unknownContainerFormatResponse = {
41
+ type: 'response-unknown-container-format',
42
+ id: data.id,
43
+ };
44
+ window.remotion_broadcastChannel.postMessage(unknownContainerFormatResponse);
45
+ return;
46
+ }
47
+ const { frame, audio, durationInSeconds } = result;
18
48
  const videoFrame = frame;
19
49
  const imageBitmap = videoFrame
20
50
  ? await createImageBitmap(videoFrame)
@@ -46,7 +76,7 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
46
76
  }
47
77
  });
48
78
  }
49
- export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, }) => {
79
+ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, audioStreamIndex, trimAfter, trimBefore, fps, }) => {
50
80
  if (isClientSideRendering || window.remotion_isMainTab) {
51
81
  return extractFrameAndAudio({
52
82
  logLevel,
@@ -57,6 +87,10 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
57
87
  includeAudio,
58
88
  includeVideo,
59
89
  loop,
90
+ audioStreamIndex,
91
+ trimAfter,
92
+ trimBefore,
93
+ fps,
60
94
  });
61
95
  }
62
96
  const requestId = crypto.randomUUID();
@@ -66,8 +100,12 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
66
100
  if (!data) {
67
101
  return;
68
102
  }
69
- if (data.type === 'response-success' && data.id === requestId) {
103
+ if (data.id !== requestId) {
104
+ return;
105
+ }
106
+ if (data.type === 'response-success') {
70
107
  resolve({
108
+ type: 'success',
71
109
  frame: data.frame ? data.frame : null,
72
110
  audio: data.audio ? data.audio : null,
73
111
  durationInSeconds: data.durationInSeconds
@@ -75,11 +113,32 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
75
113
  : null,
76
114
  });
77
115
  window.remotion_broadcastChannel.removeEventListener('message', onMessage);
116
+ return;
78
117
  }
79
- else if (data.type === 'response-error' && data.id === requestId) {
118
+ if (data.type === 'response-error') {
80
119
  reject(data.errorStack);
81
120
  window.remotion_broadcastChannel.removeEventListener('message', onMessage);
121
+ return;
122
+ }
123
+ if (data.type === 'response-cannot-decode') {
124
+ resolve({
125
+ type: 'cannot-decode',
126
+ durationInSeconds: data.durationInSeconds,
127
+ });
128
+ window.remotion_broadcastChannel.removeEventListener('message', onMessage);
129
+ return;
130
+ }
131
+ if (data.type === 'response-network-error') {
132
+ resolve({ type: 'network-error' });
133
+ window.remotion_broadcastChannel.removeEventListener('message', onMessage);
134
+ return;
135
+ }
136
+ if (data.type === 'response-unknown-container-format') {
137
+ resolve({ type: 'unknown-container-format' });
138
+ window.remotion_broadcastChannel.removeEventListener('message', onMessage);
139
+ return;
82
140
  }
141
+ throw new Error(`Invalid message: ${JSON.stringify(data)}`);
83
142
  };
84
143
  window.remotion_broadcastChannel.addEventListener('message', onMessage);
85
144
  });
@@ -94,6 +153,10 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
94
153
  includeAudio,
95
154
  includeVideo,
96
155
  loop,
156
+ audioStreamIndex,
157
+ trimAfter,
158
+ trimBefore,
159
+ fps,
97
160
  };
98
161
  window.remotion_broadcastChannel.postMessage(request);
99
162
  let timeoutId;