@remotion/media 4.0.356 → 4.0.358

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/dist/audio/audio-for-preview.d.ts +30 -0
  2. package/dist/audio/audio-for-preview.js +229 -0
  3. package/dist/audio/audio-for-rendering.js +35 -19
  4. package/dist/audio/audio.js +7 -49
  5. package/dist/audio/props.d.ts +8 -14
  6. package/dist/audio-extraction/audio-cache.d.ts +1 -1
  7. package/dist/audio-extraction/audio-cache.js +5 -1
  8. package/dist/audio-extraction/audio-iterator.d.ts +4 -1
  9. package/dist/audio-extraction/audio-iterator.js +22 -10
  10. package/dist/audio-extraction/audio-manager.d.ts +8 -37
  11. package/dist/audio-extraction/audio-manager.js +35 -8
  12. package/dist/audio-extraction/extract-audio.d.ts +9 -2
  13. package/dist/audio-extraction/extract-audio.js +29 -15
  14. package/dist/caches.d.ts +9 -44
  15. package/dist/convert-audiodata/combine-audiodata.js +2 -23
  16. package/dist/convert-audiodata/convert-audiodata.d.ts +1 -5
  17. package/dist/convert-audiodata/convert-audiodata.js +16 -24
  18. package/dist/esm/index.mjs +2864 -2173
  19. package/dist/extract-frame-and-audio.d.ts +6 -7
  20. package/dist/extract-frame-and-audio.js +28 -19
  21. package/dist/{get-sink-weak.d.ts → get-sink.d.ts} +1 -1
  22. package/dist/get-sink.js +15 -0
  23. package/dist/get-time-in-seconds.d.ts +11 -0
  24. package/dist/get-time-in-seconds.js +25 -0
  25. package/dist/index.d.ts +1 -0
  26. package/dist/index.js +1 -0
  27. package/dist/is-network-error.d.ts +6 -0
  28. package/dist/is-network-error.js +17 -0
  29. package/dist/render-timestamp-range.d.ts +1 -0
  30. package/dist/render-timestamp-range.js +9 -0
  31. package/dist/show-in-timeline.d.ts +8 -0
  32. package/dist/show-in-timeline.js +31 -0
  33. package/dist/use-media-in-timeline.d.ts +19 -0
  34. package/dist/use-media-in-timeline.js +103 -0
  35. package/dist/video/media-player.d.ts +34 -7
  36. package/dist/video/media-player.js +164 -63
  37. package/dist/video/props.d.ts +1 -0
  38. package/dist/video/video-for-preview.d.ts +17 -9
  39. package/dist/video/video-for-preview.js +138 -92
  40. package/dist/video/video-for-rendering.d.ts +3 -0
  41. package/dist/video/video-for-rendering.js +58 -25
  42. package/dist/video/video.js +6 -10
  43. package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +18 -6
  44. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +21 -7
  45. package/dist/video-extraction/extract-frame.d.ts +20 -2
  46. package/dist/video-extraction/extract-frame.js +41 -9
  47. package/dist/video-extraction/get-frames-since-keyframe.d.ts +5 -3
  48. package/dist/video-extraction/get-frames-since-keyframe.js +7 -4
  49. package/dist/video-extraction/keyframe-bank.d.ts +3 -2
  50. package/dist/video-extraction/keyframe-bank.js +32 -12
  51. package/dist/video-extraction/keyframe-manager.d.ts +3 -8
  52. package/dist/video-extraction/keyframe-manager.js +25 -10
  53. package/package.json +54 -54
  54. package/LICENSE.md +0 -49
  55. package/dist/convert-audiodata/apply-tonefrequency.d.ts +0 -2
  56. package/dist/convert-audiodata/apply-tonefrequency.js +0 -44
  57. package/dist/convert-audiodata/wsola.d.ts +0 -13
  58. package/dist/convert-audiodata/wsola.js +0 -197
  59. package/dist/get-sink-weak.js +0 -23
  60. package/dist/log.d.ts +0 -10
  61. package/dist/log.js +0 -33
@@ -0,0 +1,30 @@
1
+ import React from 'react';
2
+ import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
3
+ import type { FallbackHtml5AudioProps } from './props';
4
+ type InnerAudioProps = {
5
+ readonly loop?: boolean;
6
+ readonly src: string;
7
+ readonly logLevel?: LogLevel;
8
+ readonly muted?: boolean;
9
+ readonly name?: string | undefined;
10
+ readonly volume?: VolumeProp;
11
+ readonly loopVolumeCurveBehavior?: LoopVolumeCurveBehavior;
12
+ readonly playbackRate?: number;
13
+ readonly _remotionInternalNativeLoopPassed?: boolean;
14
+ readonly _remotionInternalStack?: string | null;
15
+ readonly shouldPreMountAudioTags?: boolean;
16
+ readonly onNativeError?: React.ReactEventHandler<HTMLAudioElement>;
17
+ readonly onDuration?: (src: string, durationInSeconds: number) => void;
18
+ readonly pauseWhenBuffering?: boolean;
19
+ readonly _remotionInternalNeedsDurationCalculation?: boolean;
20
+ readonly showInTimeline?: boolean;
21
+ readonly trimAfter?: number | undefined;
22
+ readonly trimBefore?: number | undefined;
23
+ readonly stack: string | null;
24
+ readonly disallowFallbackToHtml5Audio?: boolean;
25
+ readonly toneFrequency?: number;
26
+ readonly audioStreamIndex?: number;
27
+ readonly fallbackHtml5AudioProps?: FallbackHtml5AudioProps;
28
+ };
29
+ export declare const AudioForPreview: React.FC<InnerAudioProps>;
30
+ export {};
@@ -0,0 +1,229 @@
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { useContext, useEffect, useMemo, useRef, useState } from 'react';
3
+ import { Internals, Audio as RemotionAudio, useBufferState, useCurrentFrame, } from 'remotion';
4
+ import { useLoopDisplay } from '../show-in-timeline';
5
+ import { useMediaInTimeline } from '../use-media-in-timeline';
6
+ import { MediaPlayer } from '../video/media-player';
7
+ const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, SequenceContext, } = Internals;
8
+ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVolumeCurveBehavior, loop, trimAfter, trimBefore, name, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
9
+ const videoConfig = useUnsafeVideoConfig();
10
+ const frame = useCurrentFrame();
11
+ const mediaPlayerRef = useRef(null);
12
+ const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
13
+ const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState(false);
14
+ const [playing] = Timeline.usePlayingState();
15
+ const timelineContext = useContext(Timeline.TimelineContext);
16
+ const globalPlaybackRate = timelineContext.playbackRate;
17
+ const sharedAudioContext = useContext(SharedAudioContext);
18
+ const buffer = useBufferState();
19
+ const delayHandleRef = useRef(null);
20
+ const [mediaMuted] = useMediaMutedState();
21
+ const [mediaVolume] = useMediaVolumeState();
22
+ const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
23
+ const userPreferredVolume = evaluateVolume({
24
+ frame: volumePropFrame,
25
+ volume,
26
+ mediaVolume,
27
+ });
28
+ warnAboutTooHighVolume(userPreferredVolume);
29
+ if (!videoConfig) {
30
+ throw new Error('No video config found');
31
+ }
32
+ if (!src) {
33
+ throw new TypeError('No `src` was passed to <NewAudioForPreview>.');
34
+ }
35
+ const currentTime = frame / videoConfig.fps;
36
+ const currentTimeRef = useRef(currentTime);
37
+ currentTimeRef.current = currentTime;
38
+ const preloadedSrc = usePreload(src);
39
+ const parentSequence = useContext(SequenceContext);
40
+ const loopDisplay = useLoopDisplay({
41
+ loop,
42
+ mediaDurationInSeconds: videoConfig.durationInFrames,
43
+ playbackRate,
44
+ trimAfter,
45
+ trimBefore,
46
+ });
47
+ useMediaInTimeline({
48
+ volume,
49
+ mediaVolume,
50
+ mediaType: 'audio',
51
+ src,
52
+ playbackRate,
53
+ displayName: name ?? null,
54
+ stack,
55
+ showInTimeline,
56
+ premountDisplay: parentSequence?.premountDisplay ?? null,
57
+ postmountDisplay: parentSequence?.postmountDisplay ?? null,
58
+ loopDisplay,
59
+ trimAfter,
60
+ trimBefore,
61
+ });
62
+ useEffect(() => {
63
+ if (!sharedAudioContext)
64
+ return;
65
+ if (!sharedAudioContext.audioContext)
66
+ return;
67
+ try {
68
+ const player = new MediaPlayer({
69
+ src: preloadedSrc,
70
+ logLevel,
71
+ sharedAudioContext: sharedAudioContext.audioContext,
72
+ loop,
73
+ trimAfter,
74
+ trimBefore,
75
+ fps: videoConfig.fps,
76
+ canvas: null,
77
+ playbackRate,
78
+ audioStreamIndex: audioStreamIndex ?? 0,
79
+ });
80
+ mediaPlayerRef.current = player;
81
+ player
82
+ .initialize(currentTimeRef.current)
83
+ .then((result) => {
84
+ if (result.type === 'unknown-container-format') {
85
+ if (disallowFallbackToHtml5Audio) {
86
+ throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
87
+ }
88
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
89
+ setShouldFallbackToNativeAudio(true);
90
+ return;
91
+ }
92
+ if (result.type === 'network-error') {
93
+ if (disallowFallbackToHtml5Audio) {
94
+ throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
95
+ }
96
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${preloadedSrc}, falling back to <Html5Audio>`);
97
+ setShouldFallbackToNativeAudio(true);
98
+ return;
99
+ }
100
+ if (result.type === 'cannot-decode') {
101
+ if (disallowFallbackToHtml5Audio) {
102
+ throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
103
+ }
104
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${preloadedSrc}, falling back to <Html5Audio>`);
105
+ setShouldFallbackToNativeAudio(true);
106
+ return;
107
+ }
108
+ if (result.type === 'no-tracks') {
109
+ if (disallowFallbackToHtml5Audio) {
110
+ throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
111
+ }
112
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Html5Audio>`);
113
+ setShouldFallbackToNativeAudio(true);
114
+ return;
115
+ }
116
+ if (result.type === 'success') {
117
+ setMediaPlayerReady(true);
118
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] MediaPlayer initialized successfully`);
119
+ }
120
+ })
121
+ .catch((error) => {
122
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] Failed to initialize MediaPlayer', error);
123
+ setShouldFallbackToNativeAudio(true);
124
+ });
125
+ }
126
+ catch (error) {
127
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] MediaPlayer initialization failed', error);
128
+ setShouldFallbackToNativeAudio(true);
129
+ }
130
+ return () => {
131
+ if (delayHandleRef.current) {
132
+ delayHandleRef.current.unblock();
133
+ delayHandleRef.current = null;
134
+ }
135
+ if (mediaPlayerRef.current) {
136
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] Disposing MediaPlayer`);
137
+ mediaPlayerRef.current.dispose();
138
+ mediaPlayerRef.current = null;
139
+ }
140
+ setMediaPlayerReady(false);
141
+ setShouldFallbackToNativeAudio(false);
142
+ };
143
+ }, [
144
+ preloadedSrc,
145
+ logLevel,
146
+ sharedAudioContext,
147
+ currentTimeRef,
148
+ loop,
149
+ trimAfter,
150
+ trimBefore,
151
+ playbackRate,
152
+ videoConfig.fps,
153
+ audioStreamIndex,
154
+ disallowFallbackToHtml5Audio,
155
+ ]);
156
+ useEffect(() => {
157
+ const audioPlayer = mediaPlayerRef.current;
158
+ if (!audioPlayer)
159
+ return;
160
+ if (playing) {
161
+ audioPlayer.play().catch((error) => {
162
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] Failed to play', error);
163
+ });
164
+ }
165
+ else {
166
+ audioPlayer.pause();
167
+ }
168
+ }, [playing, logLevel, mediaPlayerReady]);
169
+ useEffect(() => {
170
+ const audioPlayer = mediaPlayerRef.current;
171
+ if (!audioPlayer || !mediaPlayerReady)
172
+ return;
173
+ audioPlayer.seekTo(currentTime);
174
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
175
+ }, [currentTime, logLevel, mediaPlayerReady]);
176
+ useEffect(() => {
177
+ const audioPlayer = mediaPlayerRef.current;
178
+ if (!audioPlayer || !mediaPlayerReady)
179
+ return;
180
+ audioPlayer.onBufferingChange((newBufferingState) => {
181
+ if (newBufferingState && !delayHandleRef.current) {
182
+ delayHandleRef.current = buffer.delayPlayback();
183
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] MediaPlayer buffering - blocking Remotion playback');
184
+ }
185
+ else if (!newBufferingState && delayHandleRef.current) {
186
+ delayHandleRef.current.unblock();
187
+ delayHandleRef.current = null;
188
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] MediaPlayer unbuffering - unblocking Remotion playback');
189
+ }
190
+ });
191
+ }, [mediaPlayerReady, buffer, logLevel]);
192
+ const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
193
+ useEffect(() => {
194
+ const audioPlayer = mediaPlayerRef.current;
195
+ if (!audioPlayer || !mediaPlayerReady)
196
+ return;
197
+ audioPlayer.setMuted(effectiveMuted);
198
+ }, [effectiveMuted, mediaPlayerReady]);
199
+ useEffect(() => {
200
+ const audioPlayer = mediaPlayerRef.current;
201
+ if (!audioPlayer || !mediaPlayerReady) {
202
+ return;
203
+ }
204
+ audioPlayer.setVolume(userPreferredVolume);
205
+ }, [userPreferredVolume, mediaPlayerReady]);
206
+ const effectivePlaybackRate = useMemo(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
207
+ useEffect(() => {
208
+ const audioPlayer = mediaPlayerRef.current;
209
+ if (!audioPlayer || !mediaPlayerReady) {
210
+ return;
211
+ }
212
+ audioPlayer.setPlaybackRate(effectivePlaybackRate);
213
+ }, [effectivePlaybackRate, mediaPlayerReady]);
214
+ useEffect(() => {
215
+ const audioPlayer = mediaPlayerRef.current;
216
+ if (!audioPlayer || !mediaPlayerReady) {
217
+ return;
218
+ }
219
+ audioPlayer.setFps(videoConfig.fps);
220
+ }, [videoConfig.fps, mediaPlayerReady]);
221
+ if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
222
+ return (_jsx(RemotionAudio, { src: src, muted: muted, volume: volume, startFrom: trimBefore, endAt: trimAfter, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, toneFrequency: toneFrequency, audioStreamIndex: audioStreamIndex, pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering, ...fallbackHtml5AudioProps }));
223
+ }
224
+ return null;
225
+ };
226
+ export const AudioForPreview = ({ loop, src, logLevel, muted, name, volume, loopVolumeCurveBehavior, playbackRate, trimAfter, trimBefore, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
227
+ const preloadedSrc = usePreload(src);
228
+ return (_jsx(NewAudioForPreview, { audioStreamIndex: audioStreamIndex ?? 0, src: preloadedSrc, playbackRate: playbackRate ?? 1, logLevel: logLevel ?? window.remotion_logLevel, muted: muted ?? false, volume: volume ?? 1, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', loop: loop ?? false, trimAfter: trimAfter, trimBefore: trimBefore, name: name, showInTimeline: showInTimeline ?? true, stack: stack, disallowFallbackToHtml5Audio: disallowFallbackToHtml5Audio ?? false, toneFrequency: toneFrequency, fallbackHtml5AudioProps: fallbackHtml5AudioProps }));
229
+ };
@@ -1,17 +1,17 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
- import { useContext, useLayoutEffect, useState } from 'react';
3
- import { Audio, cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
2
+ import { useContext, useLayoutEffect, useMemo, useState } from 'react';
3
+ import { cancelRender, Html5Audio, Internals, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
4
4
  import { applyVolume } from '../convert-audiodata/apply-volume';
5
+ import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
5
6
  import { frameForVolumeProp } from '../looped-frame';
6
7
  import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
7
- export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel, loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, }) => {
8
+ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel, loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, toneFrequency, trimAfter, trimBefore, }) => {
8
9
  const frame = useCurrentFrame();
9
10
  const absoluteFrame = Internals.useTimelinePosition();
10
11
  const videoConfig = Internals.useUnsafeVideoConfig();
11
12
  const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
12
13
  const startsAt = Internals.useMediaStartsAt();
13
14
  const environment = useRemotionEnvironment();
14
- const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
15
15
  if (!videoConfig) {
16
16
  throw new Error('No video config found');
17
17
  }
@@ -21,10 +21,21 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
21
21
  const { fps } = videoConfig;
22
22
  const { delayRender, continueRender } = useDelayRender();
23
23
  const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState(false);
24
+ const sequenceContext = useContext(Internals.SequenceContext);
25
+ // Generate a string that's as unique as possible for this asset
26
+ // but at the same time the same on all threads
27
+ const id = useMemo(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
28
+ src,
29
+ sequenceContext?.cumulatedFrom,
30
+ sequenceContext?.relativeFrom,
31
+ sequenceContext?.durationInFrames,
32
+ ]);
24
33
  useLayoutEffect(() => {
25
- const actualFps = playbackRate ? fps / playbackRate : fps;
26
- const timestamp = frame / actualFps;
27
- const durationInSeconds = 1 / actualFps;
34
+ const timestamp = frame / fps;
35
+ const durationInSeconds = 1 / fps;
36
+ if (replaceWithHtml5Audio) {
37
+ return;
38
+ }
28
39
  const newHandle = delayRender(`Extracting audio for frame ${frame}`, {
29
40
  retries: delayRenderRetries ?? undefined,
30
41
  timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
@@ -43,35 +54,38 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
43
54
  timeInSeconds: timestamp,
44
55
  durationInSeconds,
45
56
  playbackRate: playbackRate ?? 1,
46
- logLevel: logLevel ?? 'info',
57
+ logLevel,
47
58
  includeAudio: shouldRenderAudio,
48
59
  includeVideo: false,
49
60
  isClientSideRendering: environment.isClientSideRendering,
50
61
  loop: loop ?? false,
51
62
  audioStreamIndex: audioStreamIndex ?? 0,
63
+ trimAfter,
64
+ trimBefore,
65
+ fps,
52
66
  })
53
67
  .then((result) => {
54
- if (result === 'unknown-container-format') {
68
+ if (result.type === 'unknown-container-format') {
55
69
  if (disallowFallbackToHtml5Audio) {
56
70
  cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
57
71
  }
58
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Audio>`);
72
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
59
73
  setReplaceWithHtml5Audio(true);
60
74
  return;
61
75
  }
62
- if (result === 'cannot-decode') {
76
+ if (result.type === 'cannot-decode') {
63
77
  if (disallowFallbackToHtml5Audio) {
64
78
  cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
65
79
  }
66
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${src}, falling back to <Audio>`);
80
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${src}, falling back to <Html5Audio>`);
67
81
  setReplaceWithHtml5Audio(true);
68
82
  return;
69
83
  }
70
- if (result === 'network-error') {
84
+ if (result.type === 'network-error') {
71
85
  if (disallowFallbackToHtml5Audio) {
72
86
  cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
73
87
  }
74
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${src}, falling back to <Audio>`);
88
+ Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${src}, falling back to <Html5Audio>`);
75
89
  setReplaceWithHtml5Audio(true);
76
90
  return;
77
91
  }
@@ -96,11 +110,10 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
96
110
  type: 'inline-audio',
97
111
  id,
98
112
  audio: Array.from(audio.data),
99
- sampleRate: audio.sampleRate,
100
- numberOfChannels: audio.numberOfChannels,
101
113
  frame: absoluteFrame,
102
114
  timestamp: audio.timestamp,
103
- duration: (audio.numberOfFrames / audio.sampleRate) * 1000000,
115
+ duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
116
+ toneFrequency: toneFrequency ?? 1,
104
117
  });
105
118
  }
106
119
  continueRender(newHandle);
@@ -134,10 +147,13 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
134
147
  unregisterRenderAsset,
135
148
  volumeProp,
136
149
  audioStreamIndex,
150
+ toneFrequency,
151
+ trimAfter,
152
+ trimBefore,
153
+ replaceWithHtml5Audio,
137
154
  ]);
138
155
  if (replaceWithHtml5Audio) {
139
- // TODO: Loop and other props
140
- return (_jsx(Audio, { src: src, playbackRate: playbackRate, muted: muted, loop: loop, volume: volumeProp, delayRenderRetries: delayRenderRetries, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds, style: style, loopVolumeCurveBehavior: loopVolumeCurveBehavior, audioStreamIndex: audioStreamIndex, useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi, onError: fallbackHtml5AudioProps?.onError, toneFrequency: fallbackHtml5AudioProps?.toneFrequency, acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds, name: name, showInTimeline: showInTimeline }));
156
+ return (_jsx(Html5Audio, { src: src, playbackRate: playbackRate, muted: muted, loop: loop, volume: volumeProp, delayRenderRetries: delayRenderRetries, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds, style: style, loopVolumeCurveBehavior: loopVolumeCurveBehavior, audioStreamIndex: audioStreamIndex, useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi, onError: fallbackHtml5AudioProps?.onError, toneFrequency: toneFrequency, acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds, name: name, showInTimeline: showInTimeline }));
141
157
  }
142
158
  return null;
143
159
  };
@@ -1,63 +1,21 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
- import { useCallback, useContext } from 'react';
3
- import { cancelRender, Internals, Sequence, useRemotionEnvironment, } from 'remotion';
4
- import { SharedAudioContext } from '../../../core/src/audio/shared-audio-tags';
2
+ import { Internals, useRemotionEnvironment } from 'remotion';
3
+ import { AudioForPreview } from './audio-for-preview';
5
4
  import { AudioForRendering } from './audio-for-rendering';
6
- const { validateMediaTrimProps, resolveTrimProps, validateMediaProps, AudioForPreview, } = Internals;
7
- // dummy function for now because onError is not supported
8
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
9
- const onRemotionError = (_e) => { };
5
+ const { validateMediaProps } = Internals;
10
6
  export const Audio = (props) => {
11
- const audioContext = useContext(SharedAudioContext);
12
7
  // Should only destruct `trimBefore` and `trimAfter` from props,
13
8
  // rest gets drilled down
14
- const { trimBefore, trimAfter, name, pauseWhenBuffering, stack, showInTimeline, loop, ...otherProps } = props;
9
+ const { name, stack, showInTimeline, ...otherProps } = props;
15
10
  const environment = useRemotionEnvironment();
16
- const onDuration = useCallback(() => undefined, []);
17
11
  if (typeof props.src !== 'string') {
18
12
  throw new TypeError(`The \`<Audio>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
19
13
  }
20
- validateMediaTrimProps({
21
- startFrom: undefined,
22
- endAt: undefined,
23
- trimBefore,
24
- trimAfter,
25
- });
26
- const { trimBeforeValue, trimAfterValue } = resolveTrimProps({
27
- startFrom: undefined,
28
- endAt: undefined,
29
- trimBefore,
30
- trimAfter,
31
- });
32
- const onError = useCallback((e) => {
33
- // eslint-disable-next-line no-console
34
- console.log(e.currentTarget.error);
35
- // If there is no `loop` property, we don't need to get the duration
36
- // and this does not need to be a fatal error
37
- const errMessage = `Could not play audio: ${e.currentTarget.error}. See https://remotion.dev/docs/media-playback-error for help.`;
38
- if (loop) {
39
- if (onRemotionError) {
40
- onRemotionError(new Error(errMessage));
41
- return;
42
- }
43
- cancelRender(new Error(errMessage));
44
- }
45
- else {
46
- onRemotionError?.(new Error(errMessage));
47
- // eslint-disable-next-line no-console
48
- console.warn(errMessage);
49
- }
50
- }, [loop]);
51
- if (typeof trimBeforeValue !== 'undefined' ||
52
- typeof trimAfterValue !== 'undefined') {
53
- return (_jsx(Sequence, { layout: "none", from: 0 - (trimBeforeValue ?? 0), showInTimeline: false, durationInFrames: trimAfterValue, name: name, children: _jsx(Audio, { pauseWhenBuffering: pauseWhenBuffering ?? false, ...otherProps }) }));
54
- }
55
14
  validateMediaProps({ playbackRate: props.playbackRate, volume: props.volume }, 'Audio');
56
15
  if (environment.isRendering) {
57
16
  return _jsx(AudioForRendering, { ...otherProps });
58
17
  }
59
- const { delayRenderRetries, delayRenderTimeoutInMilliseconds, ...propsForPreview } = otherProps;
60
- return (_jsx(AudioForPreview, { _remotionInternalNativeLoopPassed: props._remotionInternalNativeLoopPassed ?? false, _remotionInternalStack: stack ?? null, shouldPreMountAudioTags: audioContext !== null && audioContext.numberOfAudioTags > 0, ...propsForPreview, onNativeError: onError, onDuration: onDuration,
61
- // Proposal: Make this default to true in v5
62
- pauseWhenBuffering: pauseWhenBuffering ?? false, _remotionInternalNeedsDurationCalculation: Boolean(loop), showInTimeline: showInTimeline ?? true }));
18
+ return _jsx(AudioForPreview, { name: name, ...otherProps, stack: stack ?? null });
63
19
  };
20
+ // TODO: Doesn't work
21
+ Internals.addSequenceStackTraces(Audio);
@@ -1,10 +1,9 @@
1
1
  import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
2
2
  export type FallbackHtml5AudioProps = {
3
- offthreadAudioProps: {
4
- playbackRate?: number;
5
- muted?: boolean;
6
- loop?: boolean;
7
- };
3
+ onError?: (err: Error) => void;
4
+ useWebAudioApi?: boolean;
5
+ acceptableTimeShiftInSeconds?: number;
6
+ pauseWhenBuffering?: boolean;
8
7
  };
9
8
  export type AudioProps = {
10
9
  src: string;
@@ -13,12 +12,9 @@ export type AudioProps = {
13
12
  volume?: VolumeProp;
14
13
  loopVolumeCurveBehavior?: LoopVolumeCurveBehavior;
15
14
  name?: string;
16
- pauseWhenBuffering?: boolean;
17
15
  showInTimeline?: boolean;
18
16
  playbackRate?: number;
19
17
  muted?: boolean;
20
- delayRenderRetries?: number;
21
- delayRenderTimeoutInMilliseconds?: number;
22
18
  style?: React.CSSProperties;
23
19
  /**
24
20
  * @deprecated For internal use only
@@ -28,11 +24,9 @@ export type AudioProps = {
28
24
  loop?: boolean;
29
25
  audioStreamIndex?: number;
30
26
  _remotionInternalNativeLoopPassed?: boolean;
31
- fallbackHtml5AudioProps?: {
32
- onError?: (err: Error) => void;
33
- useWebAudioApi?: boolean;
34
- toneFrequency?: number;
35
- acceptableTimeShiftInSeconds?: number;
36
- };
27
+ fallbackHtml5AudioProps?: FallbackHtml5AudioProps;
37
28
  disallowFallbackToHtml5Audio?: boolean;
29
+ toneFrequency?: number;
30
+ delayRenderRetries?: number;
31
+ delayRenderTimeoutInMilliseconds?: number;
38
32
  };
@@ -5,7 +5,7 @@ export declare const makeAudioCache: () => {
5
5
  deleteAll: () => void;
6
6
  getSamples: (timestamp: number, durationInSeconds: number) => AudioSample[];
7
7
  getOldestTimestamp: () => number;
8
- getNewestTimestamp: () => number;
8
+ getNewestTimestamp: () => number | null;
9
9
  getOpenTimestamps: () => number[];
10
10
  };
11
11
  export type AudioCache = ReturnType<typeof makeAudioCache>;
@@ -6,7 +6,7 @@ export const makeAudioCache = () => {
6
6
  samples[sample.timestamp] = sample;
7
7
  };
8
8
  const clearBeforeThreshold = (threshold) => {
9
- for (const timestamp of timestamps) {
9
+ for (const timestamp of timestamps.slice()) {
10
10
  const endTimestamp = timestamp + samples[timestamp].duration;
11
11
  if (endTimestamp < threshold) {
12
12
  const isLast = timestamp === timestamps[timestamps.length - 1];
@@ -21,6 +21,7 @@ export const makeAudioCache = () => {
21
21
  };
22
22
  const deleteAll = () => {
23
23
  for (const timestamp of timestamps) {
24
+ samples[timestamp].close();
24
25
  delete samples[timestamp];
25
26
  }
26
27
  timestamps.length = 0;
@@ -47,6 +48,9 @@ export const makeAudioCache = () => {
47
48
  return timestamps[0];
48
49
  };
49
50
  const getNewestTimestamp = () => {
51
+ if (timestamps.length === 0) {
52
+ return null;
53
+ }
50
54
  const sample = samples[timestamps[timestamps.length - 1]];
51
55
  return sample.timestamp + sample.duration;
52
56
  };
@@ -19,7 +19,10 @@ export declare const makeAudioIterator: ({ audioSampleSink, isMatroska, startTim
19
19
  size: number;
20
20
  };
21
21
  getLastUsed: () => number;
22
- prepareForDeletion: () => Promise<void>;
22
+ prepareForDeletion: () => void;
23
23
  startTimestamp: number;
24
+ clearBeforeThreshold: (threshold: number) => void;
25
+ getOldestTimestamp: () => number;
26
+ getNewestTimestamp: () => number | null;
24
27
  };
25
28
  export type AudioSampleIterator = ReturnType<typeof makeAudioIterator>;
@@ -30,7 +30,7 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
30
30
  lastUsed = Date.now();
31
31
  const { value: sample, done } = await sampleIterator.next();
32
32
  if (done) {
33
- fullDuration = cache.getNewestTimestamp() ?? null;
33
+ fullDuration = cache.getNewestTimestamp();
34
34
  return null;
35
35
  }
36
36
  const realTimestamp = actualMatroskaTimestamps.getRealTimestamp(sample.timestamp);
@@ -51,6 +51,12 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
51
51
  return [];
52
52
  }
53
53
  const samples = cache.getSamples(timestamp, durationInSeconds);
54
+ const newestTimestamp = cache.getNewestTimestamp();
55
+ if (newestTimestamp !== null) {
56
+ if (newestTimestamp >= timestamp + durationInSeconds - 0.0000000001) {
57
+ return samples;
58
+ }
59
+ }
54
60
  while (true) {
55
61
  const sample = await getNextSample();
56
62
  // Clear all samples before the timestamp
@@ -73,10 +79,12 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
73
79
  return samples;
74
80
  };
75
81
  const logOpenFrames = () => {
76
- Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, 'Open audio samples for src', src, cache
77
- .getOpenTimestamps()
78
- .map((t) => t.toFixed(3))
79
- .join(', '));
82
+ const openTimestamps = cache.getOpenTimestamps();
83
+ if (openTimestamps.length > 0) {
84
+ const first = openTimestamps[0];
85
+ const last = openTimestamps[openTimestamps.length - 1];
86
+ Internals.Log.verbose({ logLevel, tag: '@remotion/media' }, 'Open audio samples for src', src, `${first.toFixed(3)}...${last.toFixed(3)}`);
87
+ }
80
88
  };
81
89
  const getCacheStats = () => {
82
90
  return {
@@ -91,12 +99,13 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
91
99
  }
92
100
  return (oldestTimestamp < timestamp && Math.abs(oldestTimestamp - timestamp) < 10);
93
101
  };
94
- const prepareForDeletion = async () => {
102
+ const prepareForDeletion = () => {
95
103
  cache.deleteAll();
96
- const { value } = await sampleIterator.return();
97
- if (value) {
98
- value.close();
99
- }
104
+ sampleIterator.return().then((value) => {
105
+ if (value.value) {
106
+ value.value.close();
107
+ }
108
+ });
100
109
  fullDuration = null;
101
110
  };
102
111
  let op = Promise.resolve([]);
@@ -116,5 +125,8 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
116
125
  getLastUsed: () => lastUsed,
117
126
  prepareForDeletion,
118
127
  startTimestamp,
128
+ clearBeforeThreshold: cache.clearBeforeThreshold,
129
+ getOldestTimestamp: cache.getOldestTimestamp,
130
+ getNewestTimestamp: cache.getNewestTimestamp,
119
131
  };
120
132
  };