@remotion/media 4.0.355 → 4.0.357

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dist/audio/audio-for-preview.d.ts +30 -0
  2. package/dist/audio/audio-for-preview.js +213 -0
  3. package/dist/audio/audio-for-rendering.js +63 -12
  4. package/dist/audio/audio.js +8 -50
  5. package/dist/audio/props.d.ts +12 -3
  6. package/dist/audio-extraction/audio-cache.d.ts +1 -1
  7. package/dist/audio-extraction/audio-cache.js +5 -1
  8. package/dist/audio-extraction/audio-iterator.d.ts +7 -3
  9. package/dist/audio-extraction/audio-iterator.js +35 -12
  10. package/dist/audio-extraction/audio-manager.d.ts +10 -38
  11. package/dist/audio-extraction/audio-manager.js +40 -11
  12. package/dist/audio-extraction/extract-audio.d.ts +11 -3
  13. package/dist/audio-extraction/extract-audio.js +37 -17
  14. package/dist/caches.d.ts +11 -45
  15. package/dist/convert-audiodata/apply-tonefrequency.d.ts +2 -0
  16. package/dist/convert-audiodata/apply-tonefrequency.js +43 -0
  17. package/dist/convert-audiodata/combine-audiodata.js +2 -23
  18. package/dist/convert-audiodata/convert-audiodata.d.ts +1 -5
  19. package/dist/convert-audiodata/convert-audiodata.js +16 -24
  20. package/dist/convert-audiodata/wsola.d.ts +13 -0
  21. package/dist/convert-audiodata/wsola.js +197 -0
  22. package/dist/esm/index.mjs +2265 -589
  23. package/dist/extract-frame-and-audio.d.ts +7 -7
  24. package/dist/extract-frame-and-audio.js +69 -26
  25. package/dist/get-sink-weak.d.ts +3 -8
  26. package/dist/get-sink-weak.js +3 -11
  27. package/dist/get-sink.d.ts +13 -0
  28. package/dist/get-sink.js +15 -0
  29. package/dist/get-time-in-seconds.d.ts +10 -0
  30. package/dist/get-time-in-seconds.js +25 -0
  31. package/dist/index.d.ts +13 -3
  32. package/dist/index.js +12 -2
  33. package/dist/is-network-error.d.ts +6 -0
  34. package/dist/is-network-error.js +17 -0
  35. package/dist/render-timestamp-range.d.ts +1 -0
  36. package/dist/render-timestamp-range.js +9 -0
  37. package/dist/video/media-player.d.ts +91 -0
  38. package/dist/video/media-player.js +484 -0
  39. package/dist/video/props.d.ts +37 -18
  40. package/dist/video/resolve-playback-time.d.ts +8 -0
  41. package/dist/video/resolve-playback-time.js +22 -0
  42. package/dist/video/timeout-utils.d.ts +2 -0
  43. package/dist/video/timeout-utils.js +18 -0
  44. package/dist/video/video-for-preview.d.ts +25 -0
  45. package/dist/video/video-for-preview.js +241 -0
  46. package/dist/video/video-for-rendering.d.ts +26 -2
  47. package/dist/video/video-for-rendering.js +95 -19
  48. package/dist/video/video.js +13 -18
  49. package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +19 -6
  50. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +67 -4
  51. package/dist/video-extraction/extract-frame.d.ts +21 -2
  52. package/dist/video-extraction/extract-frame.js +46 -9
  53. package/dist/video-extraction/get-frames-since-keyframe.d.ts +17 -10
  54. package/dist/video-extraction/get-frames-since-keyframe.js +77 -21
  55. package/dist/video-extraction/keyframe-bank.d.ts +3 -2
  56. package/dist/video-extraction/keyframe-bank.js +32 -12
  57. package/dist/video-extraction/keyframe-manager.d.ts +3 -8
  58. package/dist/video-extraction/keyframe-manager.js +25 -10
  59. package/package.json +4 -4
@@ -0,0 +1,30 @@
1
+ import React from 'react';
2
+ import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
3
+ import type { FallbackHtml5AudioProps } from './props';
4
+ type InnerAudioProps = {
5
+ readonly loop?: boolean;
6
+ readonly src: string;
7
+ readonly logLevel?: LogLevel;
8
+ readonly muted?: boolean;
9
+ readonly name?: string | undefined;
10
+ readonly volume?: VolumeProp;
11
+ readonly loopVolumeCurveBehavior?: LoopVolumeCurveBehavior;
12
+ readonly playbackRate?: number;
13
+ readonly _remotionInternalNativeLoopPassed?: boolean;
14
+ readonly _remotionInternalStack?: string | null;
15
+ readonly shouldPreMountAudioTags?: boolean;
16
+ readonly onNativeError?: React.ReactEventHandler<HTMLAudioElement>;
17
+ readonly onDuration?: (src: string, durationInSeconds: number) => void;
18
+ readonly pauseWhenBuffering?: boolean;
19
+ readonly _remotionInternalNeedsDurationCalculation?: boolean;
20
+ readonly showInTimeline?: boolean;
21
+ readonly trimAfter?: number | undefined;
22
+ readonly trimBefore?: number | undefined;
23
+ readonly stack: string | null;
24
+ readonly disallowFallbackToHtml5Audio?: boolean;
25
+ readonly toneFrequency?: number;
26
+ readonly audioStreamIndex?: number;
27
+ readonly fallbackHtml5AudioProps?: FallbackHtml5AudioProps;
28
+ };
29
+ export declare const AudioForPreview: React.FC<InnerAudioProps>;
30
+ export {};
@@ -0,0 +1,213 @@
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { useContext, useEffect, useMemo, useRef, useState } from 'react';
3
+ import { Internals, Audio as RemotionAudio, useBufferState, useCurrentFrame, } from 'remotion';
4
+ import { MediaPlayer } from '../video/media-player';
5
+ const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, useMediaInTimeline, SequenceContext, } = Internals;
6
+ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVolumeCurveBehavior, loop, trimAfter, trimBefore, name, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
7
+ const videoConfig = useUnsafeVideoConfig();
8
+ const frame = useCurrentFrame();
9
+ const mediaPlayerRef = useRef(null);
10
+ const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
11
+ const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState(false);
12
+ const [playing] = Timeline.usePlayingState();
13
+ const timelineContext = useContext(Timeline.TimelineContext);
14
+ const globalPlaybackRate = timelineContext.playbackRate;
15
+ const sharedAudioContext = useContext(SharedAudioContext);
16
+ const buffer = useBufferState();
17
+ const delayHandleRef = useRef(null);
18
+ const [mediaMuted] = useMediaMutedState();
19
+ const [mediaVolume] = useMediaVolumeState();
20
+ const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
21
+ const userPreferredVolume = evaluateVolume({
22
+ frame: volumePropFrame,
23
+ volume,
24
+ mediaVolume,
25
+ });
26
+ warnAboutTooHighVolume(userPreferredVolume);
27
+ if (!videoConfig) {
28
+ throw new Error('No video config found');
29
+ }
30
+ if (!src) {
31
+ throw new TypeError('No `src` was passed to <NewAudioForPreview>.');
32
+ }
33
+ const currentTime = frame / videoConfig.fps;
34
+ const currentTimeRef = useRef(currentTime);
35
+ currentTimeRef.current = currentTime;
36
+ const preloadedSrc = usePreload(src);
37
+ const [timelineId] = useState(() => String(Math.random()));
38
+ const parentSequence = useContext(SequenceContext);
39
+ useMediaInTimeline({
40
+ volume,
41
+ mediaVolume,
42
+ mediaType: 'audio',
43
+ src,
44
+ playbackRate,
45
+ displayName: name ?? null,
46
+ id: timelineId,
47
+ stack,
48
+ showInTimeline,
49
+ premountDisplay: parentSequence?.premountDisplay ?? null,
50
+ postmountDisplay: parentSequence?.postmountDisplay ?? null,
51
+ });
52
+ useEffect(() => {
53
+ if (!sharedAudioContext)
54
+ return;
55
+ if (!sharedAudioContext.audioContext)
56
+ return;
57
+ try {
58
+ const player = new MediaPlayer({
59
+ src: preloadedSrc,
60
+ logLevel,
61
+ sharedAudioContext: sharedAudioContext.audioContext,
62
+ loop,
63
+ trimAfterSeconds: trimAfter ? trimAfter / videoConfig.fps : undefined,
64
+ trimBeforeSeconds: trimBefore
65
+ ? trimBefore / videoConfig.fps
66
+ : undefined,
67
+ canvas: null,
68
+ playbackRate,
69
+ audioStreamIndex: audioStreamIndex ?? 0,
70
+ });
71
+ mediaPlayerRef.current = player;
72
+ player
73
+ .initialize(currentTimeRef.current)
74
+ .then((result) => {
75
+ if (result.type === 'unknown-container-format') {
76
+ if (disallowFallbackToHtml5Audio) {
77
+ throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
78
+ }
79
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Audio>`);
80
+ setShouldFallbackToNativeAudio(true);
81
+ return;
82
+ }
83
+ if (result.type === 'network-error') {
84
+ if (disallowFallbackToHtml5Audio) {
85
+ throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
86
+ }
87
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${preloadedSrc}, falling back to <Audio>`);
88
+ setShouldFallbackToNativeAudio(true);
89
+ return;
90
+ }
91
+ if (result.type === 'cannot-decode') {
92
+ if (disallowFallbackToHtml5Audio) {
93
+ throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
94
+ }
95
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${preloadedSrc}, falling back to <Audio>`);
96
+ setShouldFallbackToNativeAudio(true);
97
+ return;
98
+ }
99
+ if (result.type === 'no-tracks') {
100
+ if (disallowFallbackToHtml5Audio) {
101
+ throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
102
+ }
103
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Audio>`);
104
+ setShouldFallbackToNativeAudio(true);
105
+ return;
106
+ }
107
+ if (result.type === 'success') {
108
+ setMediaPlayerReady(true);
109
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] MediaPlayer initialized successfully`);
110
+ }
111
+ })
112
+ .catch((error) => {
113
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] Failed to initialize MediaPlayer', error);
114
+ setShouldFallbackToNativeAudio(true);
115
+ });
116
+ }
117
+ catch (error) {
118
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] MediaPlayer initialization failed', error);
119
+ setShouldFallbackToNativeAudio(true);
120
+ }
121
+ return () => {
122
+ if (delayHandleRef.current) {
123
+ delayHandleRef.current.unblock();
124
+ delayHandleRef.current = null;
125
+ }
126
+ if (mediaPlayerRef.current) {
127
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] Disposing MediaPlayer`);
128
+ mediaPlayerRef.current.dispose();
129
+ mediaPlayerRef.current = null;
130
+ }
131
+ setMediaPlayerReady(false);
132
+ setShouldFallbackToNativeAudio(false);
133
+ };
134
+ }, [
135
+ preloadedSrc,
136
+ logLevel,
137
+ sharedAudioContext,
138
+ currentTimeRef,
139
+ loop,
140
+ trimAfter,
141
+ trimBefore,
142
+ playbackRate,
143
+ videoConfig.fps,
144
+ audioStreamIndex,
145
+ disallowFallbackToHtml5Audio,
146
+ ]);
147
+ useEffect(() => {
148
+ const audioPlayer = mediaPlayerRef.current;
149
+ if (!audioPlayer)
150
+ return;
151
+ if (playing) {
152
+ audioPlayer.play().catch((error) => {
153
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] Failed to play', error);
154
+ });
155
+ }
156
+ else {
157
+ audioPlayer.pause();
158
+ }
159
+ }, [playing, logLevel, mediaPlayerReady]);
160
+ useEffect(() => {
161
+ const audioPlayer = mediaPlayerRef.current;
162
+ if (!audioPlayer || !mediaPlayerReady)
163
+ return;
164
+ audioPlayer.seekTo(currentTime);
165
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
166
+ }, [currentTime, logLevel, mediaPlayerReady]);
167
+ useEffect(() => {
168
+ const audioPlayer = mediaPlayerRef.current;
169
+ if (!audioPlayer || !mediaPlayerReady)
170
+ return;
171
+ audioPlayer.onBufferingChange((newBufferingState) => {
172
+ if (newBufferingState && !delayHandleRef.current) {
173
+ delayHandleRef.current = buffer.delayPlayback();
174
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] MediaPlayer buffering - blocking Remotion playback');
175
+ }
176
+ else if (!newBufferingState && delayHandleRef.current) {
177
+ delayHandleRef.current.unblock();
178
+ delayHandleRef.current = null;
179
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] MediaPlayer unbuffering - unblocking Remotion playback');
180
+ }
181
+ });
182
+ }, [mediaPlayerReady, buffer, logLevel]);
183
+ const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
184
+ useEffect(() => {
185
+ const audioPlayer = mediaPlayerRef.current;
186
+ if (!audioPlayer || !mediaPlayerReady)
187
+ return;
188
+ audioPlayer.setMuted(effectiveMuted);
189
+ }, [effectiveMuted, mediaPlayerReady]);
190
+ useEffect(() => {
191
+ const audioPlayer = mediaPlayerRef.current;
192
+ if (!audioPlayer || !mediaPlayerReady) {
193
+ return;
194
+ }
195
+ audioPlayer.setVolume(userPreferredVolume);
196
+ }, [userPreferredVolume, mediaPlayerReady, logLevel]);
197
+ const effectivePlaybackRate = useMemo(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
198
+ useEffect(() => {
199
+ const audioPlayer = mediaPlayerRef.current;
200
+ if (!audioPlayer || !mediaPlayerReady) {
201
+ return;
202
+ }
203
+ audioPlayer.setPlaybackRate(effectivePlaybackRate);
204
+ }, [effectivePlaybackRate, mediaPlayerReady, logLevel]);
205
+ if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
206
+ return (_jsx(RemotionAudio, { src: src, muted: muted, volume: volume, startFrom: trimBefore, endAt: trimAfter, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, toneFrequency: toneFrequency, audioStreamIndex: audioStreamIndex, pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering, ...fallbackHtml5AudioProps }));
207
+ }
208
+ return null;
209
+ };
210
+ export const AudioForPreview = ({ loop, src, logLevel, muted, name, volume, loopVolumeCurveBehavior, playbackRate, trimAfter, trimBefore, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
211
+ const preloadedSrc = usePreload(src);
212
+ return (_jsx(NewAudioForPreview, { audioStreamIndex: audioStreamIndex ?? 0, src: preloadedSrc, playbackRate: playbackRate ?? 1, logLevel: logLevel ?? window.remotion_logLevel, muted: muted ?? false, volume: volume ?? 1, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', loop: loop ?? false, trimAfter: trimAfter, trimBefore: trimBefore, name: name, showInTimeline: showInTimeline ?? true, stack: stack, disallowFallbackToHtml5Audio: disallowFallbackToHtml5Audio ?? false, toneFrequency: toneFrequency, fallbackHtml5AudioProps: fallbackHtml5AudioProps }));
213
+ };
@@ -1,16 +1,17 @@
1
- import { useContext, useLayoutEffect, useState } from 'react';
2
- import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { useContext, useLayoutEffect, useMemo, useState } from 'react';
3
+ import { Audio, cancelRender, Internals, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
3
4
  import { applyVolume } from '../convert-audiodata/apply-volume';
5
+ import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
4
6
  import { frameForVolumeProp } from '../looped-frame';
5
7
  import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
6
- export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel, loop, }) => {
8
+ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel, loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, toneFrequency, trimAfter, trimBefore, }) => {
7
9
  const frame = useCurrentFrame();
8
10
  const absoluteFrame = Internals.useTimelinePosition();
9
11
  const videoConfig = Internals.useUnsafeVideoConfig();
10
12
  const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
11
13
  const startsAt = Internals.useMediaStartsAt();
12
14
  const environment = useRemotionEnvironment();
13
- const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
14
15
  if (!videoConfig) {
15
16
  throw new Error('No video config found');
16
17
  }
@@ -19,10 +20,22 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
19
20
  }
20
21
  const { fps } = videoConfig;
21
22
  const { delayRender, continueRender } = useDelayRender();
23
+ const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState(false);
24
+ const sequenceContext = useContext(Internals.SequenceContext);
25
+ // Generate a string that's as unique as possible for this asset
26
+ // but at the same time the same on all threads
27
+ const id = useMemo(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
28
+ src,
29
+ sequenceContext?.cumulatedFrom,
30
+ sequenceContext?.relativeFrom,
31
+ sequenceContext?.durationInFrames,
32
+ ]);
22
33
  useLayoutEffect(() => {
23
- const actualFps = playbackRate ? fps / playbackRate : fps;
24
- const timestamp = frame / actualFps;
25
- const durationInSeconds = 1 / actualFps;
34
+ const timestamp = frame / fps;
35
+ const durationInSeconds = 1 / fps;
36
+ if (replaceWithHtml5Audio) {
37
+ return;
38
+ }
26
39
  const newHandle = delayRender(`Extracting audio for frame ${frame}`, {
27
40
  retries: delayRenderRetries ?? undefined,
28
41
  timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
@@ -41,13 +54,42 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
41
54
  timeInSeconds: timestamp,
42
55
  durationInSeconds,
43
56
  playbackRate: playbackRate ?? 1,
44
- logLevel: logLevel ?? 'info',
57
+ logLevel,
45
58
  includeAudio: shouldRenderAudio,
46
59
  includeVideo: false,
47
60
  isClientSideRendering: environment.isClientSideRendering,
48
61
  loop: loop ?? false,
62
+ audioStreamIndex: audioStreamIndex ?? 0,
63
+ trimAfter,
64
+ trimBefore,
65
+ fps,
49
66
  })
50
- .then(({ audio, durationInSeconds: assetDurationInSeconds }) => {
67
+ .then((result) => {
68
+ if (result.type === 'unknown-container-format') {
69
+ if (disallowFallbackToHtml5Audio) {
70
+ cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
71
+ }
72
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Audio>`);
73
+ setReplaceWithHtml5Audio(true);
74
+ return;
75
+ }
76
+ if (result.type === 'cannot-decode') {
77
+ if (disallowFallbackToHtml5Audio) {
78
+ cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
79
+ }
80
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${src}, falling back to <Audio>`);
81
+ setReplaceWithHtml5Audio(true);
82
+ return;
83
+ }
84
+ if (result.type === 'network-error') {
85
+ if (disallowFallbackToHtml5Audio) {
86
+ cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
87
+ }
88
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${src}, falling back to <Audio>`);
89
+ setReplaceWithHtml5Audio(true);
90
+ return;
91
+ }
92
+ const { audio, durationInSeconds: assetDurationInSeconds } = result;
51
93
  const volumePropsFrame = frameForVolumeProp({
52
94
  behavior: loopVolumeCurveBehavior ?? 'repeat',
53
95
  loop: loop ?? false,
@@ -68,11 +110,10 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
68
110
  type: 'inline-audio',
69
111
  id,
70
112
  audio: Array.from(audio.data),
71
- sampleRate: audio.sampleRate,
72
- numberOfChannels: audio.numberOfChannels,
73
113
  frame: absoluteFrame,
74
114
  timestamp: audio.timestamp,
75
- duration: (audio.numberOfFrames / audio.sampleRate) * 1000000,
115
+ duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
116
+ toneFrequency: toneFrequency ?? 1,
76
117
  });
77
118
  }
78
119
  continueRender(newHandle);
@@ -90,6 +131,7 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
90
131
  delayRender,
91
132
  delayRenderRetries,
92
133
  delayRenderTimeoutInMilliseconds,
134
+ disallowFallbackToHtml5Audio,
93
135
  environment.isClientSideRendering,
94
136
  fps,
95
137
  frame,
@@ -104,6 +146,15 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
104
146
  startsAt,
105
147
  unregisterRenderAsset,
106
148
  volumeProp,
149
+ audioStreamIndex,
150
+ toneFrequency,
151
+ trimAfter,
152
+ trimBefore,
153
+ replaceWithHtml5Audio,
107
154
  ]);
155
+ if (replaceWithHtml5Audio) {
156
+ // TODO: Loop and other props
157
+ return (_jsx(Audio, { src: src, playbackRate: playbackRate, muted: muted, loop: loop, volume: volumeProp, delayRenderRetries: delayRenderRetries, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds, style: style, loopVolumeCurveBehavior: loopVolumeCurveBehavior, audioStreamIndex: audioStreamIndex, useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi, onError: fallbackHtml5AudioProps?.onError, toneFrequency: toneFrequency, acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds, name: name, showInTimeline: showInTimeline }));
158
+ }
108
159
  return null;
109
160
  };
@@ -1,63 +1,21 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
- import { useCallback, useContext } from 'react';
3
- import { cancelRender, Internals, Sequence, useRemotionEnvironment, } from 'remotion';
4
- import { SharedAudioContext } from '../../../core/src/audio/shared-audio-tags';
2
+ import { Internals, useRemotionEnvironment } from 'remotion';
3
+ import { AudioForPreview } from './audio-for-preview';
5
4
  import { AudioForRendering } from './audio-for-rendering';
6
- const { validateMediaTrimProps, resolveTrimProps, validateMediaProps, AudioForPreview, } = Internals;
7
- // dummy function for now because onError is not supported
8
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
9
- const onRemotionError = (_e) => { };
5
+ const { validateMediaProps } = Internals;
10
6
  export const Audio = (props) => {
11
- const audioContext = useContext(SharedAudioContext);
12
7
  // Should only destruct `trimBefore` and `trimAfter` from props,
13
8
  // rest gets drilled down
14
- const { trimBefore, trimAfter, name, pauseWhenBuffering, stack, showInTimeline, loop, ...otherProps } = props;
9
+ const { name, stack, showInTimeline, ...otherProps } = props;
15
10
  const environment = useRemotionEnvironment();
16
- const onDuration = useCallback(() => undefined, []);
17
11
  if (typeof props.src !== 'string') {
18
12
  throw new TypeError(`The \`<Audio>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
19
13
  }
20
- validateMediaTrimProps({
21
- startFrom: undefined,
22
- endAt: undefined,
23
- trimBefore,
24
- trimAfter,
25
- });
26
- const { trimBeforeValue, trimAfterValue } = resolveTrimProps({
27
- startFrom: undefined,
28
- endAt: undefined,
29
- trimBefore,
30
- trimAfter,
31
- });
32
- const onError = useCallback((e) => {
33
- // eslint-disable-next-line no-console
34
- console.log(e.currentTarget.error);
35
- // If there is no `loop` property, we don't need to get the duration
36
- // and this does not need to be a fatal error
37
- const errMessage = `Could not play audio: ${e.currentTarget.error}. See https://remotion.dev/docs/media-playback-error for help.`;
38
- if (loop) {
39
- if (onRemotionError) {
40
- onRemotionError(new Error(errMessage));
41
- return;
42
- }
43
- cancelRender(new Error(errMessage));
44
- }
45
- else {
46
- onRemotionError?.(new Error(errMessage));
47
- // eslint-disable-next-line no-console
48
- console.warn(errMessage);
49
- }
50
- }, [loop]);
51
- if (typeof trimBeforeValue !== 'undefined' ||
52
- typeof trimAfterValue !== 'undefined') {
53
- return (_jsx(Sequence, { layout: "none", from: 0 - (trimBeforeValue ?? 0), showInTimeline: false, durationInFrames: trimAfterValue, name: name, children: _jsx(Audio, { pauseWhenBuffering: pauseWhenBuffering ?? false, ...otherProps }) }));
54
- }
55
- validateMediaProps(props, 'Video');
14
+ validateMediaProps({ playbackRate: props.playbackRate, volume: props.volume }, 'Audio');
56
15
  if (environment.isRendering) {
57
16
  return _jsx(AudioForRendering, { ...otherProps });
58
17
  }
59
- const { delayRenderRetries, delayRenderTimeoutInMilliseconds, ...propsForPreview } = otherProps;
60
- return (_jsx(AudioForPreview, { _remotionInternalNativeLoopPassed: props._remotionInternalNativeLoopPassed ?? false, _remotionInternalStack: stack ?? null, shouldPreMountAudioTags: audioContext !== null && audioContext.numberOfAudioTags > 0, ...propsForPreview, onNativeError: onError, onDuration: onDuration,
61
- // Proposal: Make this default to true in v5
62
- pauseWhenBuffering: pauseWhenBuffering ?? false, _remotionInternalNeedsDurationCalculation: Boolean(loop), showInTimeline: showInTimeline ?? true }));
18
+ return _jsx(AudioForPreview, { name: name, ...otherProps, stack: stack ?? null });
63
19
  };
20
+ // TODO: Doesn't work
21
+ Internals.addSequenceStackTraces(Audio);
@@ -1,4 +1,10 @@
1
1
  import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
2
+ export type FallbackHtml5AudioProps = {
3
+ onError?: (err: Error) => void;
4
+ useWebAudioApi?: boolean;
5
+ acceptableTimeShiftInSeconds?: number;
6
+ pauseWhenBuffering?: boolean;
7
+ };
2
8
  export type AudioProps = {
3
9
  src: string;
4
10
  trimBefore?: number;
@@ -6,12 +12,9 @@ export type AudioProps = {
6
12
  volume?: VolumeProp;
7
13
  loopVolumeCurveBehavior?: LoopVolumeCurveBehavior;
8
14
  name?: string;
9
- pauseWhenBuffering?: boolean;
10
15
  showInTimeline?: boolean;
11
16
  playbackRate?: number;
12
17
  muted?: boolean;
13
- delayRenderRetries?: number;
14
- delayRenderTimeoutInMilliseconds?: number;
15
18
  style?: React.CSSProperties;
16
19
  /**
17
20
  * @deprecated For internal use only
@@ -19,5 +22,11 @@ export type AudioProps = {
19
22
  stack?: string;
20
23
  logLevel?: LogLevel;
21
24
  loop?: boolean;
25
+ audioStreamIndex?: number;
22
26
  _remotionInternalNativeLoopPassed?: boolean;
27
+ fallbackHtml5AudioProps?: FallbackHtml5AudioProps;
28
+ disallowFallbackToHtml5Audio?: boolean;
29
+ toneFrequency?: number;
30
+ delayRenderRetries?: number;
31
+ delayRenderTimeoutInMilliseconds?: number;
23
32
  };
@@ -5,7 +5,7 @@ export declare const makeAudioCache: () => {
5
5
  deleteAll: () => void;
6
6
  getSamples: (timestamp: number, durationInSeconds: number) => AudioSample[];
7
7
  getOldestTimestamp: () => number;
8
- getNewestTimestamp: () => number;
8
+ getNewestTimestamp: () => number | null;
9
9
  getOpenTimestamps: () => number[];
10
10
  };
11
11
  export type AudioCache = ReturnType<typeof makeAudioCache>;
@@ -6,7 +6,7 @@ export const makeAudioCache = () => {
6
6
  samples[sample.timestamp] = sample;
7
7
  };
8
8
  const clearBeforeThreshold = (threshold) => {
9
- for (const timestamp of timestamps) {
9
+ for (const timestamp of timestamps.slice()) {
10
10
  const endTimestamp = timestamp + samples[timestamp].duration;
11
11
  if (endTimestamp < threshold) {
12
12
  const isLast = timestamp === timestamps[timestamps.length - 1];
@@ -21,6 +21,7 @@ export const makeAudioCache = () => {
21
21
  };
22
22
  const deleteAll = () => {
23
23
  for (const timestamp of timestamps) {
24
+ samples[timestamp].close();
24
25
  delete samples[timestamp];
25
26
  }
26
27
  timestamps.length = 0;
@@ -47,6 +48,9 @@ export const makeAudioCache = () => {
47
48
  return timestamps[0];
48
49
  };
49
50
  const getNewestTimestamp = () => {
51
+ if (timestamps.length === 0) {
52
+ return null;
53
+ }
50
54
  const sample = samples[timestamps[timestamps.length - 1]];
51
55
  return sample.timestamp + sample.duration;
52
56
  };
@@ -1,24 +1,28 @@
1
1
  import type { AudioSample, AudioSampleSink } from 'mediabunny';
2
2
  import { type LogLevel } from 'remotion';
3
3
  import type { RememberActualMatroskaTimestamps } from '../video-extraction/remember-actual-matroska-timestamps';
4
- export declare const makeAudioIterator: ({ audioSampleSink, isMatroska, startTimestamp, src, actualMatroskaTimestamps, }: {
4
+ export declare const makeAudioIterator: ({ audioSampleSink, isMatroska, startTimestamp, src, actualMatroskaTimestamps, logLevel, }: {
5
5
  audioSampleSink: AudioSampleSink;
6
6
  isMatroska: boolean;
7
7
  startTimestamp: number;
8
8
  src: string;
9
9
  actualMatroskaTimestamps: RememberActualMatroskaTimestamps;
10
+ logLevel: LogLevel;
10
11
  }) => {
11
12
  src: string;
12
13
  getSamples: (ts: number, dur: number) => Promise<AudioSample[]>;
13
14
  waitForCompletion: () => Promise<boolean>;
14
15
  canSatisfyRequestedTime: (timestamp: number) => boolean;
15
- logOpenFrames: (logLevel: LogLevel) => void;
16
+ logOpenFrames: () => void;
16
17
  getCacheStats: () => {
17
18
  count: number;
18
19
  size: number;
19
20
  };
20
21
  getLastUsed: () => number;
21
- prepareForDeletion: () => Promise<void>;
22
+ prepareForDeletion: () => void;
22
23
  startTimestamp: number;
24
+ clearBeforeThreshold: (threshold: number) => void;
25
+ getOldestTimestamp: () => number;
26
+ getNewestTimestamp: () => number | null;
23
27
  };
24
28
  export type AudioSampleIterator = ReturnType<typeof makeAudioIterator>;
@@ -7,11 +7,22 @@ import { makeAudioCache } from './audio-cache';
7
7
  // The worst case seems to be FLAC files with a 65'535 sample window, which would be 1486.0ms at 44.1Khz.
8
8
  // So let's set a threshold of 1.5 seconds.
9
9
  const extraThreshold = 1.5;
10
- export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp, src, actualMatroskaTimestamps, }) => {
10
+ const warned = {};
11
+ const warnAboutMatroskaOnce = (src, logLevel) => {
12
+ if (warned[src]) {
13
+ return;
14
+ }
15
+ warned[src] = true;
16
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
17
+ };
18
+ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp, src, actualMatroskaTimestamps, logLevel, }) => {
11
19
  // Matroska timestamps are not accurate unless we start from the beginning
12
20
  // So for matroska, we need to decode all samples :(
13
21
  // https://github.com/Vanilagy/mediabunny/issues/105
14
22
  const sampleIterator = audioSampleSink.samples(isMatroska ? 0 : Math.max(0, startTimestamp - extraThreshold));
23
+ if (isMatroska) {
24
+ warnAboutMatroskaOnce(src, logLevel);
25
+ }
15
26
  let fullDuration = null;
16
27
  const cache = makeAudioCache();
17
28
  let lastUsed = Date.now();
@@ -19,7 +30,7 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
19
30
  lastUsed = Date.now();
20
31
  const { value: sample, done } = await sampleIterator.next();
21
32
  if (done) {
22
- fullDuration = cache.getNewestTimestamp() ?? null;
33
+ fullDuration = cache.getNewestTimestamp();
23
34
  return null;
24
35
  }
25
36
  const realTimestamp = actualMatroskaTimestamps.getRealTimestamp(sample.timestamp);
@@ -40,6 +51,12 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
40
51
  return [];
41
52
  }
42
53
  const samples = cache.getSamples(timestamp, durationInSeconds);
54
+ const newestTimestamp = cache.getNewestTimestamp();
55
+ if (newestTimestamp !== null) {
56
+ if (newestTimestamp >= timestamp + durationInSeconds - 0.0000000001) {
57
+ return samples;
58
+ }
59
+ }
43
60
  while (true) {
44
61
  const sample = await getNextSample();
45
62
  // Clear all samples before the timestamp
@@ -61,11 +78,13 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
61
78
  }
62
79
  return samples;
63
80
  };
64
- const logOpenFrames = (logLevel) => {
65
- Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, 'Open audio samples for src', src, cache
66
- .getOpenTimestamps()
67
- .map((t) => t.toFixed(3))
68
- .join(', '));
81
+ const logOpenFrames = () => {
82
+ const openTimestamps = cache.getOpenTimestamps();
83
+ if (openTimestamps.length > 0) {
84
+ const first = openTimestamps[0];
85
+ const last = openTimestamps[openTimestamps.length - 1];
86
+ Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, 'Open audio samples for src', src, `${first.toFixed(3)}...${last.toFixed(3)}`);
87
+ }
69
88
  };
70
89
  const getCacheStats = () => {
71
90
  return {
@@ -80,12 +99,13 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
80
99
  }
81
100
  return (oldestTimestamp < timestamp && Math.abs(oldestTimestamp - timestamp) < 10);
82
101
  };
83
- const prepareForDeletion = async () => {
102
+ const prepareForDeletion = () => {
84
103
  cache.deleteAll();
85
- const { value } = await sampleIterator.return();
86
- if (value) {
87
- value.close();
88
- }
104
+ sampleIterator.return().then((value) => {
105
+ if (value.value) {
106
+ value.value.close();
107
+ }
108
+ });
89
109
  fullDuration = null;
90
110
  };
91
111
  let op = Promise.resolve([]);
@@ -105,5 +125,8 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
105
125
  getLastUsed: () => lastUsed,
106
126
  prepareForDeletion,
107
127
  startTimestamp,
128
+ clearBeforeThreshold: cache.clearBeforeThreshold,
129
+ getOldestTimestamp: cache.getOldestTimestamp,
130
+ getNewestTimestamp: cache.getNewestTimestamp,
108
131
  };
109
132
  };