@remotion/media 4.0.413 → 4.0.415

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/dist/audio/allow-wait.js +15 -0
  2. package/dist/audio/audio-for-preview.js +304 -0
  3. package/dist/audio/audio-for-rendering.js +194 -0
  4. package/dist/audio/audio-preview-iterator.js +176 -0
  5. package/dist/audio/audio.js +20 -0
  6. package/dist/audio/props.js +1 -0
  7. package/dist/audio-extraction/audio-cache.js +66 -0
  8. package/dist/audio-extraction/audio-iterator.js +132 -0
  9. package/dist/audio-extraction/audio-manager.js +113 -0
  10. package/dist/audio-extraction/extract-audio.js +132 -0
  11. package/dist/audio-iterator-manager.js +228 -0
  12. package/dist/browser-can-use-webgl2.js +13 -0
  13. package/dist/caches.d.ts +3 -2
  14. package/dist/caches.js +61 -0
  15. package/dist/calculate-playbacktime.js +4 -0
  16. package/dist/convert-audiodata/apply-volume.js +17 -0
  17. package/dist/convert-audiodata/combine-audiodata.js +23 -0
  18. package/dist/convert-audiodata/convert-audiodata.js +73 -0
  19. package/dist/convert-audiodata/resample-audiodata.js +94 -0
  20. package/dist/debug-overlay/preview-overlay.js +42 -0
  21. package/dist/esm/index.mjs +40 -30
  22. package/dist/extract-frame-and-audio.js +101 -0
  23. package/dist/get-sink.js +15 -0
  24. package/dist/get-time-in-seconds.js +40 -0
  25. package/dist/helpers/round-to-4-digits.js +4 -0
  26. package/dist/index.js +12 -0
  27. package/dist/is-network-error.d.ts +6 -0
  28. package/dist/is-network-error.js +17 -0
  29. package/dist/is-type-of-error.js +20 -0
  30. package/dist/looped-frame.js +10 -0
  31. package/dist/media-player.js +431 -0
  32. package/dist/nonce-manager.js +13 -0
  33. package/dist/prewarm-iterator-for-looping.js +56 -0
  34. package/dist/render-timestamp-range.js +9 -0
  35. package/dist/show-in-timeline.js +31 -0
  36. package/dist/use-media-in-timeline.js +103 -0
  37. package/dist/video/props.js +1 -0
  38. package/dist/video/video-for-preview.js +331 -0
  39. package/dist/video/video-for-rendering.js +263 -0
  40. package/dist/video/video-preview-iterator.js +122 -0
  41. package/dist/video/video.js +35 -0
  42. package/dist/video-extraction/add-broadcast-channel-listener.js +125 -0
  43. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +113 -0
  44. package/dist/video-extraction/extract-frame.js +85 -0
  45. package/dist/video-extraction/get-allocation-size.js +6 -0
  46. package/dist/video-extraction/get-frames-since-keyframe.js +108 -0
  47. package/dist/video-extraction/keyframe-bank.d.ts +2 -2
  48. package/dist/video-extraction/keyframe-bank.js +159 -0
  49. package/dist/video-extraction/keyframe-manager.d.ts +2 -1
  50. package/dist/video-extraction/keyframe-manager.js +206 -0
  51. package/dist/video-extraction/remember-actual-matroska-timestamps.js +19 -0
  52. package/dist/video-extraction/rotate-frame.js +34 -0
  53. package/dist/video-iterator-manager.js +109 -0
  54. package/package.json +3 -3
@@ -0,0 +1,15 @@
1
+ export const allowWaitRoutine = async (next, waitFn) => {
2
+ const result = await Promise.race([
3
+ next,
4
+ new Promise((resolve) => {
5
+ Promise.resolve().then(() => resolve());
6
+ }),
7
+ ]);
8
+ if (!result) {
9
+ const unblock = waitFn.waitCallback();
10
+ const newRes = await next;
11
+ unblock();
12
+ return newRes;
13
+ }
14
+ return result;
15
+ };
@@ -0,0 +1,304 @@
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { useContext, useEffect, useLayoutEffect, useMemo, useRef, useState, } from 'react';
3
+ import { Internals, Audio as RemotionAudio, useBufferState, useCurrentFrame, useVideoConfig, } from 'remotion';
4
+ import { getTimeInSeconds } from '../get-time-in-seconds';
5
+ import { MediaPlayer } from '../media-player';
6
+ import { useLoopDisplay } from '../show-in-timeline';
7
+ import { useMediaInTimeline } from '../use-media-in-timeline';
8
+ const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, SequenceContext, } = Internals;
9
+ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, volume, loopVolumeCurveBehavior, loop, trimAfter, trimBefore, name, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
10
+ const videoConfig = useUnsafeVideoConfig();
11
+ const frame = useCurrentFrame();
12
+ const mediaPlayerRef = useRef(null);
13
+ const initialTrimBeforeRef = useRef(trimBefore);
14
+ const initialTrimAfterRef = useRef(trimAfter);
15
+ const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
16
+ const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState(false);
17
+ const [playing] = Timeline.usePlayingState();
18
+ const timelineContext = useContext(Internals.TimelineContext);
19
+ const globalPlaybackRate = timelineContext.playbackRate;
20
+ const sharedAudioContext = useContext(SharedAudioContext);
21
+ const buffer = useBufferState();
22
+ const [mediaMuted] = useMediaMutedState();
23
+ const [mediaVolume] = useMediaVolumeState();
24
+ const [mediaDurationInSeconds, setMediaDurationInSeconds] = useState(null);
25
+ const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
26
+ const userPreferredVolume = evaluateVolume({
27
+ frame: volumePropFrame,
28
+ volume,
29
+ mediaVolume,
30
+ });
31
+ warnAboutTooHighVolume(userPreferredVolume);
32
+ if (!videoConfig) {
33
+ throw new Error('No video config found');
34
+ }
35
+ if (!src) {
36
+ throw new TypeError('No `src` was passed to <NewAudioForPreview>.');
37
+ }
38
+ const currentTime = frame / videoConfig.fps;
39
+ const currentTimeRef = useRef(currentTime);
40
+ currentTimeRef.current = currentTime;
41
+ const preloadedSrc = usePreload(src);
42
+ const parentSequence = useContext(SequenceContext);
43
+ const isPremounting = Boolean(parentSequence?.premounting);
44
+ const isPostmounting = Boolean(parentSequence?.postmounting);
45
+ const loopDisplay = useLoopDisplay({
46
+ loop,
47
+ mediaDurationInSeconds,
48
+ playbackRate,
49
+ trimAfter,
50
+ trimBefore,
51
+ });
52
+ useMediaInTimeline({
53
+ volume,
54
+ mediaVolume,
55
+ mediaType: 'audio',
56
+ src,
57
+ playbackRate,
58
+ displayName: name ?? null,
59
+ stack,
60
+ showInTimeline,
61
+ premountDisplay: parentSequence?.premountDisplay ?? null,
62
+ postmountDisplay: parentSequence?.postmountDisplay ?? null,
63
+ loopDisplay,
64
+ trimAfter,
65
+ trimBefore,
66
+ });
67
+ const bufferingContext = useContext(Internals.BufferingContextReact);
68
+ if (!bufferingContext) {
69
+ throw new Error('useMediaPlayback must be used inside a <BufferingContext>');
70
+ }
71
+ const isPlayerBuffering = Internals.useIsPlayerBuffering(bufferingContext);
72
+ const initialPlaying = useRef(playing && !isPlayerBuffering);
73
+ const initialIsPremounting = useRef(isPremounting);
74
+ const initialIsPostmounting = useRef(isPostmounting);
75
+ const initialGlobalPlaybackRate = useRef(globalPlaybackRate);
76
+ const initialPlaybackRate = useRef(playbackRate);
77
+ useEffect(() => {
78
+ if (!sharedAudioContext)
79
+ return;
80
+ if (!sharedAudioContext.audioContext)
81
+ return;
82
+ try {
83
+ const player = new MediaPlayer({
84
+ src: preloadedSrc,
85
+ logLevel,
86
+ sharedAudioContext: sharedAudioContext.audioContext,
87
+ loop,
88
+ trimAfter: initialTrimAfterRef.current,
89
+ trimBefore: initialTrimBeforeRef.current,
90
+ fps: videoConfig.fps,
91
+ canvas: null,
92
+ playbackRate: initialPlaybackRate.current,
93
+ audioStreamIndex: audioStreamIndex ?? 0,
94
+ debugOverlay: false,
95
+ bufferState: buffer,
96
+ isPostmounting: initialIsPostmounting.current,
97
+ isPremounting: initialIsPremounting.current,
98
+ globalPlaybackRate: initialGlobalPlaybackRate.current,
99
+ onVideoFrameCallback: null,
100
+ playing: initialPlaying.current,
101
+ });
102
+ mediaPlayerRef.current = player;
103
+ player
104
+ .initialize(currentTimeRef.current)
105
+ .then((result) => {
106
+ if (result.type === 'disposed') {
107
+ return;
108
+ }
109
+ if (result.type === 'unknown-container-format') {
110
+ if (disallowFallbackToHtml5Audio) {
111
+ throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
112
+ }
113
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
114
+ setShouldFallbackToNativeAudio(true);
115
+ return;
116
+ }
117
+ if (result.type === 'network-error') {
118
+ if (disallowFallbackToHtml5Audio) {
119
+ throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
120
+ }
121
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${preloadedSrc}, falling back to <Html5Audio>`);
122
+ setShouldFallbackToNativeAudio(true);
123
+ return;
124
+ }
125
+ if (result.type === 'cannot-decode') {
126
+ if (disallowFallbackToHtml5Audio) {
127
+ throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
128
+ }
129
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${preloadedSrc}, falling back to <Html5Audio>`);
130
+ setShouldFallbackToNativeAudio(true);
131
+ return;
132
+ }
133
+ if (result.type === 'no-tracks') {
134
+ if (disallowFallbackToHtml5Audio) {
135
+ throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
136
+ }
137
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Html5Audio>`);
138
+ setShouldFallbackToNativeAudio(true);
139
+ return;
140
+ }
141
+ if (result.type === 'success') {
142
+ setMediaPlayerReady(true);
143
+ setMediaDurationInSeconds(result.durationInSeconds);
144
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] MediaPlayer initialized successfully`);
145
+ }
146
+ })
147
+ .catch((error) => {
148
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[AudioForPreview] Failed to initialize MediaPlayer', error);
149
+ setShouldFallbackToNativeAudio(true);
150
+ });
151
+ }
152
+ catch (error) {
153
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[AudioForPreview] MediaPlayer initialization failed', error);
154
+ setShouldFallbackToNativeAudio(true);
155
+ }
156
+ return () => {
157
+ if (mediaPlayerRef.current) {
158
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] Disposing MediaPlayer`);
159
+ mediaPlayerRef.current.dispose();
160
+ mediaPlayerRef.current = null;
161
+ }
162
+ setMediaPlayerReady(false);
163
+ setShouldFallbackToNativeAudio(false);
164
+ };
165
+ }, [
166
+ preloadedSrc,
167
+ logLevel,
168
+ sharedAudioContext,
169
+ currentTimeRef,
170
+ loop,
171
+ videoConfig.fps,
172
+ audioStreamIndex,
173
+ disallowFallbackToHtml5Audio,
174
+ buffer,
175
+ ]);
176
+ useLayoutEffect(() => {
177
+ const audioPlayer = mediaPlayerRef.current;
178
+ if (!audioPlayer)
179
+ return;
180
+ if (playing && !isPlayerBuffering) {
181
+ audioPlayer.play(currentTimeRef.current);
182
+ }
183
+ else {
184
+ audioPlayer.pause();
185
+ }
186
+ }, [isPlayerBuffering, logLevel, playing]);
187
+ useLayoutEffect(() => {
188
+ const mediaPlayer = mediaPlayerRef.current;
189
+ if (!mediaPlayer || !mediaPlayerReady) {
190
+ return;
191
+ }
192
+ mediaPlayer.setTrimBefore(trimBefore, currentTimeRef.current);
193
+ }, [trimBefore, mediaPlayerReady]);
194
+ useLayoutEffect(() => {
195
+ const mediaPlayer = mediaPlayerRef.current;
196
+ if (!mediaPlayer || !mediaPlayerReady) {
197
+ return;
198
+ }
199
+ mediaPlayer.setTrimAfter(trimAfter, currentTimeRef.current);
200
+ }, [trimAfter, mediaPlayerReady]);
201
+ const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
202
+ useLayoutEffect(() => {
203
+ const audioPlayer = mediaPlayerRef.current;
204
+ if (!audioPlayer || !mediaPlayerReady)
205
+ return;
206
+ audioPlayer.setMuted(effectiveMuted);
207
+ }, [effectiveMuted, mediaPlayerReady]);
208
+ useEffect(() => {
209
+ const audioPlayer = mediaPlayerRef.current;
210
+ if (!audioPlayer || !mediaPlayerReady) {
211
+ return;
212
+ }
213
+ audioPlayer.setVolume(userPreferredVolume);
214
+ }, [userPreferredVolume, mediaPlayerReady]);
215
+ useEffect(() => {
216
+ const audioPlayer = mediaPlayerRef.current;
217
+ if (!audioPlayer || !mediaPlayerReady) {
218
+ return;
219
+ }
220
+ audioPlayer.setPlaybackRate(playbackRate);
221
+ }, [playbackRate, mediaPlayerReady]);
222
+ useLayoutEffect(() => {
223
+ const audioPlayer = mediaPlayerRef.current;
224
+ if (!audioPlayer || !mediaPlayerReady) {
225
+ return;
226
+ }
227
+ audioPlayer.setGlobalPlaybackRate(globalPlaybackRate);
228
+ }, [globalPlaybackRate, mediaPlayerReady]);
229
+ useLayoutEffect(() => {
230
+ const audioPlayer = mediaPlayerRef.current;
231
+ if (!audioPlayer || !mediaPlayerReady) {
232
+ return;
233
+ }
234
+ audioPlayer.setFps(videoConfig.fps);
235
+ }, [videoConfig.fps, mediaPlayerReady]);
236
+ useLayoutEffect(() => {
237
+ const mediaPlayer = mediaPlayerRef.current;
238
+ if (!mediaPlayer || !mediaPlayerReady) {
239
+ return;
240
+ }
241
+ mediaPlayer.setLoop(loop);
242
+ }, [loop, mediaPlayerReady]);
243
+ useLayoutEffect(() => {
244
+ const mediaPlayer = mediaPlayerRef.current;
245
+ if (!mediaPlayer || !mediaPlayerReady) {
246
+ return;
247
+ }
248
+ mediaPlayer.setIsPremounting(isPremounting);
249
+ }, [isPremounting, mediaPlayerReady]);
250
+ useLayoutEffect(() => {
251
+ const mediaPlayer = mediaPlayerRef.current;
252
+ if (!mediaPlayer || !mediaPlayerReady) {
253
+ return;
254
+ }
255
+ mediaPlayer.setIsPostmounting(isPostmounting);
256
+ }, [isPostmounting, mediaPlayerReady]);
257
+ useLayoutEffect(() => {
258
+ const audioPlayer = mediaPlayerRef.current;
259
+ if (!audioPlayer || !mediaPlayerReady)
260
+ return;
261
+ audioPlayer.seekTo(currentTime).catch(() => {
262
+ // Might be disposed
263
+ });
264
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
265
+ }, [currentTime, logLevel, mediaPlayerReady]);
266
+ if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
267
+ return (_jsx(RemotionAudio, { src: src, muted: muted, volume: volume, startFrom: trimBefore, endAt: trimAfter, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, toneFrequency: toneFrequency, audioStreamIndex: audioStreamIndex, pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering, crossOrigin: fallbackHtml5AudioProps?.crossOrigin, ...fallbackHtml5AudioProps }));
268
+ }
269
+ return null;
270
+ };
271
+ export const AudioForPreview = ({ loop, src, logLevel, muted, name, volume, loopVolumeCurveBehavior, playbackRate, trimAfter, trimBefore, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
272
+ const preloadedSrc = usePreload(src);
273
+ const frame = useCurrentFrame();
274
+ const videoConfig = useVideoConfig();
275
+ const currentTime = frame / videoConfig.fps;
276
+ const showShow = useMemo(() => {
277
+ return (getTimeInSeconds({
278
+ unloopedTimeInSeconds: currentTime,
279
+ playbackRate: playbackRate ?? 1,
280
+ loop: loop ?? false,
281
+ trimBefore,
282
+ trimAfter,
283
+ mediaDurationInSeconds: Infinity,
284
+ fps: videoConfig.fps,
285
+ ifNoMediaDuration: 'infinity',
286
+ src,
287
+ }) !== null);
288
+ }, [
289
+ currentTime,
290
+ loop,
291
+ playbackRate,
292
+ src,
293
+ trimAfter,
294
+ trimBefore,
295
+ videoConfig.fps,
296
+ ]);
297
+ if (!showShow) {
298
+ return null;
299
+ }
300
+ return (_jsx(AudioForPreviewAssertedShowing, { audioStreamIndex: audioStreamIndex ?? 0, src: preloadedSrc, playbackRate: playbackRate ?? 1, logLevel: logLevel ??
301
+ (typeof window !== 'undefined'
302
+ ? (window.remotion_logLevel ?? 'info')
303
+ : 'info'), muted: muted ?? false, volume: volume ?? 1, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', loop: loop ?? false, trimAfter: trimAfter, trimBefore: trimBefore, name: name, showInTimeline: showInTimeline ?? true, stack: stack, disallowFallbackToHtml5Audio: disallowFallbackToHtml5Audio ?? false, toneFrequency: toneFrequency, fallbackHtml5AudioProps: fallbackHtml5AudioProps }));
304
+ };
@@ -0,0 +1,194 @@
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { useContext, useLayoutEffect, useMemo, useState } from 'react';
3
+ import { cancelRender, Html5Audio, Internals, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
4
+ import { useMaxMediaCacheSize } from '../caches';
5
+ import { applyVolume } from '../convert-audiodata/apply-volume';
6
+ import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
7
+ import { frameForVolumeProp } from '../looped-frame';
8
+ import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
9
+ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel ?? 'info', loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, toneFrequency, trimAfter, trimBefore, }) => {
10
+ const frame = useCurrentFrame();
11
+ const absoluteFrame = Internals.useTimelinePosition();
12
+ const videoConfig = Internals.useUnsafeVideoConfig();
13
+ const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
14
+ const startsAt = Internals.useMediaStartsAt();
15
+ const environment = useRemotionEnvironment();
16
+ if (!videoConfig) {
17
+ throw new Error('No video config found');
18
+ }
19
+ if (!src) {
20
+ throw new TypeError('No `src` was passed to <Audio>.');
21
+ }
22
+ const { fps } = videoConfig;
23
+ const { delayRender, continueRender } = useDelayRender();
24
+ const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState(false);
25
+ const sequenceContext = useContext(Internals.SequenceContext);
26
+ // Generate a string that's as unique as possible for this asset
27
+ // but at the same time the same on all threads
28
+ const id = useMemo(() => `media-audio-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
29
+ src,
30
+ sequenceContext?.cumulatedFrom,
31
+ sequenceContext?.relativeFrom,
32
+ sequenceContext?.durationInFrames,
33
+ ]);
34
+ const maxCacheSize = useMaxMediaCacheSize(logLevel ?? window.remotion_logLevel);
35
+ const audioEnabled = Internals.useAudioEnabled();
36
+ useLayoutEffect(() => {
37
+ const timestamp = frame / fps;
38
+ const durationInSeconds = 1 / fps;
39
+ const shouldRenderAudio = (() => {
40
+ if (!audioEnabled) {
41
+ return false;
42
+ }
43
+ if (muted) {
44
+ return false;
45
+ }
46
+ return true;
47
+ })();
48
+ if (!shouldRenderAudio) {
49
+ return;
50
+ }
51
+ if (replaceWithHtml5Audio) {
52
+ return;
53
+ }
54
+ const newHandle = delayRender(`Extracting audio for frame ${frame}`, {
55
+ retries: delayRenderRetries ?? undefined,
56
+ timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
57
+ });
58
+ extractFrameViaBroadcastChannel({
59
+ src,
60
+ timeInSeconds: timestamp,
61
+ durationInSeconds,
62
+ playbackRate: playbackRate ?? 1,
63
+ logLevel: logLevel ?? window.remotion_logLevel,
64
+ includeAudio: shouldRenderAudio,
65
+ includeVideo: false,
66
+ isClientSideRendering: environment.isClientSideRendering,
67
+ loop: loop ?? false,
68
+ audioStreamIndex: audioStreamIndex ?? 0,
69
+ trimAfter,
70
+ trimBefore,
71
+ fps,
72
+ maxCacheSize,
73
+ })
74
+ .then((result) => {
75
+ if (result.type === 'unknown-container-format') {
76
+ if (environment.isClientSideRendering) {
77
+ cancelRender(new Error(`Cannot render audio "${src}": Unknown container format. See supported formats: https://www.remotion.dev/docs/mediabunny/formats`));
78
+ return;
79
+ }
80
+ if (disallowFallbackToHtml5Audio) {
81
+ cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
82
+ }
83
+ Internals.Log.warn({
84
+ logLevel: logLevel ?? window.remotion_logLevel,
85
+ tag: '@remotion/media',
86
+ }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
87
+ setReplaceWithHtml5Audio(true);
88
+ return;
89
+ }
90
+ if (result.type === 'cannot-decode') {
91
+ if (environment.isClientSideRendering) {
92
+ cancelRender(new Error(`Cannot render audio "${src}": The audio could not be decoded by the browser.`));
93
+ return;
94
+ }
95
+ if (disallowFallbackToHtml5Audio) {
96
+ cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
97
+ }
98
+ Internals.Log.warn({
99
+ logLevel: logLevel ?? window.remotion_logLevel,
100
+ tag: '@remotion/media',
101
+ }, `Cannot decode ${src}, falling back to <Html5Audio>`);
102
+ setReplaceWithHtml5Audio(true);
103
+ return;
104
+ }
105
+ if (result.type === 'cannot-decode-alpha') {
106
+ throw new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToHtml5Audio' was set. But this should never happen, since you used the <Audio> tag. Please report this as a bug.`);
107
+ }
108
+ if (result.type === 'network-error') {
109
+ if (environment.isClientSideRendering) {
110
+ cancelRender(new Error(`Cannot render audio "${src}": Network error while fetching the audio (possibly CORS).`));
111
+ return;
112
+ }
113
+ if (disallowFallbackToHtml5Audio) {
114
+ cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
115
+ }
116
+ Internals.Log.warn({
117
+ logLevel: logLevel ?? window.remotion_logLevel,
118
+ tag: '@remotion/media',
119
+ }, `Network error fetching ${src}, falling back to <Html5Audio>`);
120
+ setReplaceWithHtml5Audio(true);
121
+ return;
122
+ }
123
+ const { audio, durationInSeconds: assetDurationInSeconds } = result;
124
+ const volumePropsFrame = frameForVolumeProp({
125
+ behavior: loopVolumeCurveBehavior ?? 'repeat',
126
+ loop: loop ?? false,
127
+ assetDurationInSeconds: assetDurationInSeconds ?? 0,
128
+ fps,
129
+ frame,
130
+ startsAt,
131
+ });
132
+ const volume = Internals.evaluateVolume({
133
+ volume: volumeProp,
134
+ frame: volumePropsFrame,
135
+ mediaVolume: 1,
136
+ });
137
+ Internals.warnAboutTooHighVolume(volume);
138
+ if (audio && volume > 0) {
139
+ applyVolume(audio.data, volume);
140
+ registerRenderAsset({
141
+ type: 'inline-audio',
142
+ id,
143
+ audio: environment.isClientSideRendering
144
+ ? audio.data
145
+ : Array.from(audio.data),
146
+ frame: absoluteFrame,
147
+ timestamp: audio.timestamp,
148
+ duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
149
+ toneFrequency: toneFrequency ?? 1,
150
+ });
151
+ }
152
+ continueRender(newHandle);
153
+ })
154
+ .catch((error) => {
155
+ cancelRender(error);
156
+ });
157
+ return () => {
158
+ continueRender(newHandle);
159
+ unregisterRenderAsset(id);
160
+ };
161
+ }, [
162
+ absoluteFrame,
163
+ continueRender,
164
+ delayRender,
165
+ delayRenderRetries,
166
+ delayRenderTimeoutInMilliseconds,
167
+ disallowFallbackToHtml5Audio,
168
+ environment.isClientSideRendering,
169
+ fps,
170
+ frame,
171
+ id,
172
+ logLevel,
173
+ loop,
174
+ loopVolumeCurveBehavior,
175
+ muted,
176
+ playbackRate,
177
+ registerRenderAsset,
178
+ src,
179
+ startsAt,
180
+ unregisterRenderAsset,
181
+ volumeProp,
182
+ audioStreamIndex,
183
+ toneFrequency,
184
+ trimAfter,
185
+ trimBefore,
186
+ replaceWithHtml5Audio,
187
+ maxCacheSize,
188
+ audioEnabled,
189
+ ]);
190
+ if (replaceWithHtml5Audio) {
191
+ return (_jsx(Html5Audio, { src: src, playbackRate: playbackRate, muted: muted, loop: loop, volume: volumeProp, delayRenderRetries: delayRenderRetries, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds, style: style, loopVolumeCurveBehavior: loopVolumeCurveBehavior, audioStreamIndex: audioStreamIndex, useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi, onError: fallbackHtml5AudioProps?.onError, toneFrequency: toneFrequency, acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds, name: name, showInTimeline: showInTimeline }));
192
+ }
193
+ return null;
194
+ };
@@ -0,0 +1,176 @@
1
+ import { roundTo4Digits } from '../helpers/round-to-4-digits';
2
+ import { allowWaitRoutine } from './allow-wait';
3
+ export const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
4
+ export const makeAudioIterator = (startFromSecond, cache) => {
5
+ let destroyed = false;
6
+ const iterator = cache.makeIteratorOrUsePrewarmed(startFromSecond);
7
+ const queuedAudioNodes = [];
8
+ const audioChunksForAfterResuming = [];
9
+ let mostRecentTimestamp = -Infinity;
10
+ const cleanupAudioQueue = () => {
11
+ for (const node of queuedAudioNodes) {
12
+ node.node.stop();
13
+ }
14
+ queuedAudioNodes.length = 0;
15
+ };
16
+ const getNextOrNullIfNotAvailable = async (allowWait) => {
17
+ const next = iterator.next();
18
+ const result = allowWait
19
+ ? await allowWaitRoutine(next, allowWait)
20
+ : await Promise.race([
21
+ next,
22
+ new Promise((resolve) => {
23
+ Promise.resolve().then(() => resolve());
24
+ }),
25
+ ]);
26
+ if (!result) {
27
+ return {
28
+ type: 'need-to-wait-for-it',
29
+ waitPromise: async () => {
30
+ const res = await next;
31
+ return res.value;
32
+ },
33
+ };
34
+ }
35
+ if (result.value) {
36
+ mostRecentTimestamp = Math.max(mostRecentTimestamp, result.value.timestamp + result.value.duration);
37
+ return {
38
+ type: 'got-buffer',
39
+ buffer: result.value,
40
+ };
41
+ }
42
+ return {
43
+ type: 'got-end',
44
+ mostRecentTimestamp,
45
+ };
46
+ };
47
+ const tryToSatisfySeek = async (time, allowWait, onBufferScheduled) => {
48
+ if (time < startFromSecond) {
49
+ return {
50
+ type: 'not-satisfied',
51
+ reason: `time requested is before the start of the iterator`,
52
+ };
53
+ }
54
+ while (true) {
55
+ const buffer = await getNextOrNullIfNotAvailable(allowWait);
56
+ if (buffer.type === 'need-to-wait-for-it') {
57
+ return {
58
+ type: 'not-satisfied',
59
+ reason: 'iterator did not have buffer ready',
60
+ };
61
+ }
62
+ if (buffer.type === 'got-end') {
63
+ if (time >= mostRecentTimestamp) {
64
+ return {
65
+ type: 'ended',
66
+ };
67
+ }
68
+ return {
69
+ type: 'not-satisfied',
70
+ reason: `iterator ended before the requested time`,
71
+ };
72
+ }
73
+ if (buffer.type === 'got-buffer') {
74
+ const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
75
+ const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
76
+ const timestamp = roundTo4Digits(time);
77
+ if (roundTo4Digits(time) < bufferTimestamp) {
78
+ return {
79
+ type: 'not-satisfied',
80
+ reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`,
81
+ };
82
+ }
83
+ if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
84
+ onBufferScheduled(buffer.buffer);
85
+ return {
86
+ type: 'satisfied',
87
+ };
88
+ }
89
+ onBufferScheduled(buffer.buffer);
90
+ continue;
91
+ }
92
+ throw new Error('Unreachable');
93
+ }
94
+ };
95
+ const removeAndReturnAllQueuedAudioNodes = () => {
96
+ const nodes = queuedAudioNodes.slice();
97
+ for (const node of nodes) {
98
+ node.node.stop();
99
+ }
100
+ queuedAudioNodes.length = 0;
101
+ return nodes;
102
+ };
103
+ const addChunkForAfterResuming = (buffer, timestamp) => {
104
+ audioChunksForAfterResuming.push({ buffer, timestamp });
105
+ };
106
+ const moveQueuedChunksToPauseQueue = () => {
107
+ const toQueue = removeAndReturnAllQueuedAudioNodes();
108
+ for (const chunk of toQueue) {
109
+ addChunkForAfterResuming(chunk.buffer, chunk.timestamp);
110
+ }
111
+ };
112
+ const getNumberOfChunksAfterResuming = () => {
113
+ return audioChunksForAfterResuming.length;
114
+ };
115
+ return {
116
+ destroy: () => {
117
+ cleanupAudioQueue();
118
+ destroyed = true;
119
+ iterator.return().catch(() => undefined);
120
+ audioChunksForAfterResuming.length = 0;
121
+ },
122
+ getNext: async () => {
123
+ const next = await iterator.next();
124
+ if (next.value) {
125
+ mostRecentTimestamp = Math.max(mostRecentTimestamp, next.value.timestamp + next.value.duration);
126
+ }
127
+ return next;
128
+ },
129
+ isDestroyed: () => {
130
+ return destroyed;
131
+ },
132
+ addQueuedAudioNode: (node, timestamp, buffer) => {
133
+ queuedAudioNodes.push({ node, timestamp, buffer });
134
+ },
135
+ removeQueuedAudioNode: (node) => {
136
+ const index = queuedAudioNodes.findIndex((n) => n.node === node);
137
+ if (index !== -1) {
138
+ queuedAudioNodes.splice(index, 1);
139
+ }
140
+ },
141
+ getAndClearAudioChunksForAfterResuming: () => {
142
+ const chunks = audioChunksForAfterResuming.slice();
143
+ audioChunksForAfterResuming.length = 0;
144
+ return chunks;
145
+ },
146
+ getQueuedPeriod: () => {
147
+ let until = -Infinity;
148
+ let from = Infinity;
149
+ for (const node of queuedAudioNodes) {
150
+ until = Math.max(until, node.timestamp + node.buffer.duration);
151
+ from = Math.min(from, node.timestamp);
152
+ }
153
+ for (const chunk of audioChunksForAfterResuming) {
154
+ until = Math.max(until, chunk.timestamp + chunk.buffer.duration);
155
+ from = Math.min(from, chunk.timestamp);
156
+ }
157
+ if (!Number.isFinite(from) || !Number.isFinite(until)) {
158
+ return null;
159
+ }
160
+ return {
161
+ from,
162
+ until,
163
+ };
164
+ },
165
+ tryToSatisfySeek,
166
+ addChunkForAfterResuming,
167
+ moveQueuedChunksToPauseQueue,
168
+ getNumberOfChunksAfterResuming,
169
+ };
170
+ };
171
+ export const isAlreadyQueued = (time, queuedPeriod) => {
172
+ if (!queuedPeriod) {
173
+ return false;
174
+ }
175
+ return time >= queuedPeriod.from && time < queuedPeriod.until;
176
+ };