@remotion/media 4.0.423 → 4.0.424

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/esm/index.mjs +21 -3
  2. package/dist/video-extraction/get-allocation-size.d.ts +2 -2
  3. package/dist/video-extraction/keyframe-bank.d.ts +2 -1
  4. package/package.json +6 -7
  5. package/dist/audio/allow-wait.js +0 -15
  6. package/dist/audio/audio-for-preview.js +0 -304
  7. package/dist/audio/audio-for-rendering.js +0 -194
  8. package/dist/audio/audio-preview-iterator.js +0 -176
  9. package/dist/audio/audio.js +0 -20
  10. package/dist/audio/props.js +0 -1
  11. package/dist/audio-extraction/audio-cache.js +0 -66
  12. package/dist/audio-extraction/audio-iterator.js +0 -132
  13. package/dist/audio-extraction/audio-manager.js +0 -113
  14. package/dist/audio-extraction/extract-audio.js +0 -132
  15. package/dist/audio-iterator-manager.js +0 -228
  16. package/dist/browser-can-use-webgl2.js +0 -13
  17. package/dist/caches.js +0 -61
  18. package/dist/calculate-playbacktime.js +0 -4
  19. package/dist/convert-audiodata/apply-volume.js +0 -17
  20. package/dist/convert-audiodata/combine-audiodata.js +0 -23
  21. package/dist/convert-audiodata/convert-audiodata.js +0 -73
  22. package/dist/convert-audiodata/resample-audiodata.js +0 -94
  23. package/dist/debug-overlay/preview-overlay.js +0 -42
  24. package/dist/extract-frame-and-audio.js +0 -101
  25. package/dist/get-sink.js +0 -15
  26. package/dist/get-time-in-seconds.js +0 -40
  27. package/dist/helpers/round-to-4-digits.js +0 -4
  28. package/dist/index.js +0 -12
  29. package/dist/is-type-of-error.js +0 -20
  30. package/dist/looped-frame.js +0 -10
  31. package/dist/media-player.js +0 -431
  32. package/dist/nonce-manager.js +0 -13
  33. package/dist/prewarm-iterator-for-looping.js +0 -56
  34. package/dist/render-timestamp-range.js +0 -9
  35. package/dist/show-in-timeline.js +0 -31
  36. package/dist/use-media-in-timeline.js +0 -103
  37. package/dist/video/props.js +0 -1
  38. package/dist/video/video-for-preview.js +0 -331
  39. package/dist/video/video-for-rendering.js +0 -263
  40. package/dist/video/video-preview-iterator.js +0 -122
  41. package/dist/video/video.js +0 -35
  42. package/dist/video-extraction/add-broadcast-channel-listener.js +0 -125
  43. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +0 -113
  44. package/dist/video-extraction/extract-frame.js +0 -85
  45. package/dist/video-extraction/get-allocation-size.js +0 -6
  46. package/dist/video-extraction/get-frames-since-keyframe.js +0 -108
  47. package/dist/video-extraction/keyframe-bank.js +0 -159
  48. package/dist/video-extraction/keyframe-manager.js +0 -206
  49. package/dist/video-extraction/remember-actual-matroska-timestamps.js +0 -19
  50. package/dist/video-extraction/rotate-frame.js +0 -34
  51. package/dist/video-iterator-manager.js +0 -109
@@ -36,6 +36,9 @@ var __callDispose = (stack, error, hasError) => {
36
36
  return next();
37
37
  };
38
38
 
39
+ // src/index.ts
40
+ import { registerAc3Decoder } from "@mediabunny/ac3";
41
+
39
42
  // src/audio/audio.tsx
40
43
  import { Internals as Internals15, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
41
44
 
@@ -2409,6 +2412,17 @@ var makeKeyframeBank = async ({
2409
2412
  let hasReachedEndOfVideo = false;
2410
2413
  let lastUsed = Date.now();
2411
2414
  let allocationSize = 0;
2415
+ const getDurationOfFrame = (timestamp) => {
2416
+ const index = frameTimestamps.indexOf(timestamp);
2417
+ if (index === -1) {
2418
+ throw new Error(`Frame ${timestamp} not found`);
2419
+ }
2420
+ const nextTimestamp = frameTimestamps[index + 1];
2421
+ if (!nextTimestamp) {
2422
+ return null;
2423
+ }
2424
+ return nextTimestamp - timestamp;
2425
+ };
2412
2426
  const deleteFrameAtTimestamp = (timestamp) => {
2413
2427
  allocationSize -= getAllocationSize(frames[timestamp]);
2414
2428
  frameTimestamps.splice(frameTimestamps.indexOf(timestamp), 1);
@@ -2430,7 +2444,7 @@ var makeKeyframeBank = async ({
2430
2444
  if (!frames[frameTimestamp]) {
2431
2445
  continue;
2432
2446
  }
2433
- const { duration } = frames[frameTimestamp];
2447
+ const duration = getDurationOfFrame(frameTimestamp) ?? frames[frameTimestamp].duration;
2434
2448
  if (frameTimestamp + duration < timestampInSeconds) {
2435
2449
  deleteFrameAtTimestamp(frameTimestamp);
2436
2450
  deletedTimestamps.push(frameTimestamp);
@@ -2449,7 +2463,8 @@ var makeKeyframeBank = async ({
2449
2463
  if (!lastFrame) {
2450
2464
  return true;
2451
2465
  }
2452
- return roundTo4Digits(lastFrame.timestamp + lastFrame.duration) > roundTo4Digits(timestamp);
2466
+ const duration = getDurationOfFrame(lastFrameTimestamp) ?? lastFrame.duration;
2467
+ return roundTo4Digits(lastFrameTimestamp + duration) > roundTo4Digits(timestamp);
2453
2468
  };
2454
2469
  const addFrame = (frame, logLevel) => {
2455
2470
  if (frames[frame.timestamp]) {
@@ -2523,9 +2538,10 @@ var makeKeyframeBank = async ({
2523
2538
  const firstTimestamp = frameTimestamps[0];
2524
2539
  const lastTimestamp = frameTimestamps[frameTimestamps.length - 1];
2525
2540
  const lastFrame = frames[lastTimestamp];
2541
+ const lastFrameDuration = getDurationOfFrame(lastTimestamp) ?? lastFrame.duration ?? 0;
2526
2542
  return {
2527
2543
  firstTimestamp,
2528
- lastTimestamp: lastTimestamp + lastFrame.duration
2544
+ lastTimestamp: lastTimestamp + lastFrameDuration
2529
2545
  };
2530
2546
  };
2531
2547
  const prepareForDeletion = (logLevel, reason) => {
@@ -4944,9 +4960,11 @@ var Video = ({
4944
4960
  });
4945
4961
  };
4946
4962
  Internals18.addSequenceStackTraces(Video);
4963
+
4947
4964
  // src/index.ts
4948
4965
  var experimental_Audio = Audio;
4949
4966
  var experimental_Video = Video;
4967
+ registerAc3Decoder();
4950
4968
  export {
4951
4969
  experimental_Video,
4952
4970
  experimental_Audio,
@@ -1,2 +1,2 @@
1
- import type { VideoSample } from 'mediabunny';
2
- export declare const getAllocationSize: (sample: VideoSample) => number;
1
+ import type { VideoSampleWithoutDuration } from './keyframe-bank';
2
+ export declare const getAllocationSize: (sample: VideoSampleWithoutDuration) => number;
@@ -1,8 +1,9 @@
1
1
  import type { VideoSample, VideoSampleSink } from 'mediabunny';
2
2
  import { type LogLevel } from 'remotion';
3
+ export type VideoSampleWithoutDuration = Omit<VideoSample, 'duration'>;
3
4
  export type KeyframeBank = {
4
5
  src: string;
5
- getFrameFromTimestamp: (timestamp: number, fps: number) => Promise<VideoSample | null>;
6
+ getFrameFromTimestamp: (timestamp: number, fps: number) => Promise<VideoSampleWithoutDuration | null>;
6
7
  prepareForDeletion: (logLevel: LogLevel, reason: string) => {
7
8
  framesDeleted: number;
8
9
  };
package/package.json CHANGED
@@ -1,13 +1,12 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.423",
3
+ "version": "4.0.424",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
7
7
  "repository": {
8
8
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/media"
9
9
  },
10
- "sideEffects": false,
11
10
  "author": "Jonny Burger <jonny@remotion.dev>, Hunain Ahmed <junaidhunain6@gmail.com>",
12
11
  "bugs": {
13
12
  "url": "https://github.com/remotion-dev/remotion/issues"
@@ -22,23 +21,23 @@
22
21
  "make": "tsgo && bun --env-file=../.env.bundle bundle.ts"
23
22
  },
24
23
  "dependencies": {
25
- "mediabunny": "1.34.2",
26
- "@mediabunny/ac3": "1.34.2",
27
- "remotion": "4.0.423"
24
+ "mediabunny": "1.34.4",
25
+ "@mediabunny/ac3": "1.34.4",
26
+ "remotion": "4.0.424"
28
27
  },
29
28
  "peerDependencies": {
30
29
  "react": ">=16.8.0",
31
30
  "react-dom": ">=16.8.0"
32
31
  },
33
32
  "devDependencies": {
34
- "@remotion/eslint-config-internal": "4.0.423",
33
+ "@remotion/eslint-config-internal": "4.0.424",
35
34
  "@vitest/browser-webdriverio": "4.0.9",
36
35
  "eslint": "9.19.0",
37
36
  "react": "19.2.3",
38
37
  "react-dom": "19.2.3",
39
38
  "vitest": "4.0.9",
40
39
  "webdriverio": "9.19.2",
41
- "@typescript/native-preview": "7.0.0-dev.20260105.1"
40
+ "@typescript/native-preview": "7.0.0-dev.20260217.1"
42
41
  },
43
42
  "keywords": [],
44
43
  "publishConfig": {
@@ -1,15 +0,0 @@
1
- export const allowWaitRoutine = async (next, waitFn) => {
2
- const result = await Promise.race([
3
- next,
4
- new Promise((resolve) => {
5
- Promise.resolve().then(() => resolve());
6
- }),
7
- ]);
8
- if (!result) {
9
- const unblock = waitFn.waitCallback();
10
- const newRes = await next;
11
- unblock();
12
- return newRes;
13
- }
14
- return result;
15
- };
@@ -1,304 +0,0 @@
1
- import { jsx as _jsx } from "react/jsx-runtime";
2
- import { useContext, useEffect, useLayoutEffect, useMemo, useRef, useState, } from 'react';
3
- import { Internals, Audio as RemotionAudio, useBufferState, useCurrentFrame, useVideoConfig, } from 'remotion';
4
- import { getTimeInSeconds } from '../get-time-in-seconds';
5
- import { MediaPlayer } from '../media-player';
6
- import { useLoopDisplay } from '../show-in-timeline';
7
- import { useMediaInTimeline } from '../use-media-in-timeline';
8
- const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, SequenceContext, } = Internals;
9
- const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, volume, loopVolumeCurveBehavior, loop, trimAfter, trimBefore, name, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
10
- const videoConfig = useUnsafeVideoConfig();
11
- const frame = useCurrentFrame();
12
- const mediaPlayerRef = useRef(null);
13
- const initialTrimBeforeRef = useRef(trimBefore);
14
- const initialTrimAfterRef = useRef(trimAfter);
15
- const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
16
- const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState(false);
17
- const [playing] = Timeline.usePlayingState();
18
- const timelineContext = useContext(Internals.TimelineContext);
19
- const globalPlaybackRate = timelineContext.playbackRate;
20
- const sharedAudioContext = useContext(SharedAudioContext);
21
- const buffer = useBufferState();
22
- const [mediaMuted] = useMediaMutedState();
23
- const [mediaVolume] = useMediaVolumeState();
24
- const [mediaDurationInSeconds, setMediaDurationInSeconds] = useState(null);
25
- const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
26
- const userPreferredVolume = evaluateVolume({
27
- frame: volumePropFrame,
28
- volume,
29
- mediaVolume,
30
- });
31
- warnAboutTooHighVolume(userPreferredVolume);
32
- if (!videoConfig) {
33
- throw new Error('No video config found');
34
- }
35
- if (!src) {
36
- throw new TypeError('No `src` was passed to <NewAudioForPreview>.');
37
- }
38
- const currentTime = frame / videoConfig.fps;
39
- const currentTimeRef = useRef(currentTime);
40
- currentTimeRef.current = currentTime;
41
- const preloadedSrc = usePreload(src);
42
- const parentSequence = useContext(SequenceContext);
43
- const isPremounting = Boolean(parentSequence?.premounting);
44
- const isPostmounting = Boolean(parentSequence?.postmounting);
45
- const loopDisplay = useLoopDisplay({
46
- loop,
47
- mediaDurationInSeconds,
48
- playbackRate,
49
- trimAfter,
50
- trimBefore,
51
- });
52
- useMediaInTimeline({
53
- volume,
54
- mediaVolume,
55
- mediaType: 'audio',
56
- src,
57
- playbackRate,
58
- displayName: name ?? null,
59
- stack,
60
- showInTimeline,
61
- premountDisplay: parentSequence?.premountDisplay ?? null,
62
- postmountDisplay: parentSequence?.postmountDisplay ?? null,
63
- loopDisplay,
64
- trimAfter,
65
- trimBefore,
66
- });
67
- const bufferingContext = useContext(Internals.BufferingContextReact);
68
- if (!bufferingContext) {
69
- throw new Error('useMediaPlayback must be used inside a <BufferingContext>');
70
- }
71
- const isPlayerBuffering = Internals.useIsPlayerBuffering(bufferingContext);
72
- const initialPlaying = useRef(playing && !isPlayerBuffering);
73
- const initialIsPremounting = useRef(isPremounting);
74
- const initialIsPostmounting = useRef(isPostmounting);
75
- const initialGlobalPlaybackRate = useRef(globalPlaybackRate);
76
- const initialPlaybackRate = useRef(playbackRate);
77
- useEffect(() => {
78
- if (!sharedAudioContext)
79
- return;
80
- if (!sharedAudioContext.audioContext)
81
- return;
82
- try {
83
- const player = new MediaPlayer({
84
- src: preloadedSrc,
85
- logLevel,
86
- sharedAudioContext: sharedAudioContext.audioContext,
87
- loop,
88
- trimAfter: initialTrimAfterRef.current,
89
- trimBefore: initialTrimBeforeRef.current,
90
- fps: videoConfig.fps,
91
- canvas: null,
92
- playbackRate: initialPlaybackRate.current,
93
- audioStreamIndex: audioStreamIndex ?? 0,
94
- debugOverlay: false,
95
- bufferState: buffer,
96
- isPostmounting: initialIsPostmounting.current,
97
- isPremounting: initialIsPremounting.current,
98
- globalPlaybackRate: initialGlobalPlaybackRate.current,
99
- onVideoFrameCallback: null,
100
- playing: initialPlaying.current,
101
- });
102
- mediaPlayerRef.current = player;
103
- player
104
- .initialize(currentTimeRef.current)
105
- .then((result) => {
106
- if (result.type === 'disposed') {
107
- return;
108
- }
109
- if (result.type === 'unknown-container-format') {
110
- if (disallowFallbackToHtml5Audio) {
111
- throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
112
- }
113
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
114
- setShouldFallbackToNativeAudio(true);
115
- return;
116
- }
117
- if (result.type === 'network-error') {
118
- if (disallowFallbackToHtml5Audio) {
119
- throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
120
- }
121
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Network error fetching ${preloadedSrc}, falling back to <Html5Audio>`);
122
- setShouldFallbackToNativeAudio(true);
123
- return;
124
- }
125
- if (result.type === 'cannot-decode') {
126
- if (disallowFallbackToHtml5Audio) {
127
- throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
128
- }
129
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `Cannot decode ${preloadedSrc}, falling back to <Html5Audio>`);
130
- setShouldFallbackToNativeAudio(true);
131
- return;
132
- }
133
- if (result.type === 'no-tracks') {
134
- if (disallowFallbackToHtml5Audio) {
135
- throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
136
- }
137
- Internals.Log.warn({ logLevel, tag: '@remotion/media' }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Html5Audio>`);
138
- setShouldFallbackToNativeAudio(true);
139
- return;
140
- }
141
- if (result.type === 'success') {
142
- setMediaPlayerReady(true);
143
- setMediaDurationInSeconds(result.durationInSeconds);
144
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] MediaPlayer initialized successfully`);
145
- }
146
- })
147
- .catch((error) => {
148
- Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[AudioForPreview] Failed to initialize MediaPlayer', error);
149
- setShouldFallbackToNativeAudio(true);
150
- });
151
- }
152
- catch (error) {
153
- Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[AudioForPreview] MediaPlayer initialization failed', error);
154
- setShouldFallbackToNativeAudio(true);
155
- }
156
- return () => {
157
- if (mediaPlayerRef.current) {
158
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] Disposing MediaPlayer`);
159
- mediaPlayerRef.current.dispose();
160
- mediaPlayerRef.current = null;
161
- }
162
- setMediaPlayerReady(false);
163
- setShouldFallbackToNativeAudio(false);
164
- };
165
- }, [
166
- preloadedSrc,
167
- logLevel,
168
- sharedAudioContext,
169
- currentTimeRef,
170
- loop,
171
- videoConfig.fps,
172
- audioStreamIndex,
173
- disallowFallbackToHtml5Audio,
174
- buffer,
175
- ]);
176
- useLayoutEffect(() => {
177
- const audioPlayer = mediaPlayerRef.current;
178
- if (!audioPlayer)
179
- return;
180
- if (playing && !isPlayerBuffering) {
181
- audioPlayer.play(currentTimeRef.current);
182
- }
183
- else {
184
- audioPlayer.pause();
185
- }
186
- }, [isPlayerBuffering, logLevel, playing]);
187
- useLayoutEffect(() => {
188
- const mediaPlayer = mediaPlayerRef.current;
189
- if (!mediaPlayer || !mediaPlayerReady) {
190
- return;
191
- }
192
- mediaPlayer.setTrimBefore(trimBefore, currentTimeRef.current);
193
- }, [trimBefore, mediaPlayerReady]);
194
- useLayoutEffect(() => {
195
- const mediaPlayer = mediaPlayerRef.current;
196
- if (!mediaPlayer || !mediaPlayerReady) {
197
- return;
198
- }
199
- mediaPlayer.setTrimAfter(trimAfter, currentTimeRef.current);
200
- }, [trimAfter, mediaPlayerReady]);
201
- const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
202
- useLayoutEffect(() => {
203
- const audioPlayer = mediaPlayerRef.current;
204
- if (!audioPlayer || !mediaPlayerReady)
205
- return;
206
- audioPlayer.setMuted(effectiveMuted);
207
- }, [effectiveMuted, mediaPlayerReady]);
208
- useEffect(() => {
209
- const audioPlayer = mediaPlayerRef.current;
210
- if (!audioPlayer || !mediaPlayerReady) {
211
- return;
212
- }
213
- audioPlayer.setVolume(userPreferredVolume);
214
- }, [userPreferredVolume, mediaPlayerReady]);
215
- useEffect(() => {
216
- const audioPlayer = mediaPlayerRef.current;
217
- if (!audioPlayer || !mediaPlayerReady) {
218
- return;
219
- }
220
- audioPlayer.setPlaybackRate(playbackRate);
221
- }, [playbackRate, mediaPlayerReady]);
222
- useLayoutEffect(() => {
223
- const audioPlayer = mediaPlayerRef.current;
224
- if (!audioPlayer || !mediaPlayerReady) {
225
- return;
226
- }
227
- audioPlayer.setGlobalPlaybackRate(globalPlaybackRate);
228
- }, [globalPlaybackRate, mediaPlayerReady]);
229
- useLayoutEffect(() => {
230
- const audioPlayer = mediaPlayerRef.current;
231
- if (!audioPlayer || !mediaPlayerReady) {
232
- return;
233
- }
234
- audioPlayer.setFps(videoConfig.fps);
235
- }, [videoConfig.fps, mediaPlayerReady]);
236
- useLayoutEffect(() => {
237
- const mediaPlayer = mediaPlayerRef.current;
238
- if (!mediaPlayer || !mediaPlayerReady) {
239
- return;
240
- }
241
- mediaPlayer.setLoop(loop);
242
- }, [loop, mediaPlayerReady]);
243
- useLayoutEffect(() => {
244
- const mediaPlayer = mediaPlayerRef.current;
245
- if (!mediaPlayer || !mediaPlayerReady) {
246
- return;
247
- }
248
- mediaPlayer.setIsPremounting(isPremounting);
249
- }, [isPremounting, mediaPlayerReady]);
250
- useLayoutEffect(() => {
251
- const mediaPlayer = mediaPlayerRef.current;
252
- if (!mediaPlayer || !mediaPlayerReady) {
253
- return;
254
- }
255
- mediaPlayer.setIsPostmounting(isPostmounting);
256
- }, [isPostmounting, mediaPlayerReady]);
257
- useLayoutEffect(() => {
258
- const audioPlayer = mediaPlayerRef.current;
259
- if (!audioPlayer || !mediaPlayerReady)
260
- return;
261
- audioPlayer.seekTo(currentTime).catch(() => {
262
- // Might be disposed
263
- });
264
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
265
- }, [currentTime, logLevel, mediaPlayerReady]);
266
- if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
267
- return (_jsx(RemotionAudio, { src: src, muted: muted, volume: volume, startFrom: trimBefore, endAt: trimAfter, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, toneFrequency: toneFrequency, audioStreamIndex: audioStreamIndex, pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering, crossOrigin: fallbackHtml5AudioProps?.crossOrigin, ...fallbackHtml5AudioProps }));
268
- }
269
- return null;
270
- };
271
- export const AudioForPreview = ({ loop, src, logLevel, muted, name, volume, loopVolumeCurveBehavior, playbackRate, trimAfter, trimBefore, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
272
- const preloadedSrc = usePreload(src);
273
- const frame = useCurrentFrame();
274
- const videoConfig = useVideoConfig();
275
- const currentTime = frame / videoConfig.fps;
276
- const showShow = useMemo(() => {
277
- return (getTimeInSeconds({
278
- unloopedTimeInSeconds: currentTime,
279
- playbackRate: playbackRate ?? 1,
280
- loop: loop ?? false,
281
- trimBefore,
282
- trimAfter,
283
- mediaDurationInSeconds: Infinity,
284
- fps: videoConfig.fps,
285
- ifNoMediaDuration: 'infinity',
286
- src,
287
- }) !== null);
288
- }, [
289
- currentTime,
290
- loop,
291
- playbackRate,
292
- src,
293
- trimAfter,
294
- trimBefore,
295
- videoConfig.fps,
296
- ]);
297
- if (!showShow) {
298
- return null;
299
- }
300
- return (_jsx(AudioForPreviewAssertedShowing, { audioStreamIndex: audioStreamIndex ?? 0, src: preloadedSrc, playbackRate: playbackRate ?? 1, logLevel: logLevel ??
301
- (typeof window !== 'undefined'
302
- ? (window.remotion_logLevel ?? 'info')
303
- : 'info'), muted: muted ?? false, volume: volume ?? 1, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', loop: loop ?? false, trimAfter: trimAfter, trimBefore: trimBefore, name: name, showInTimeline: showInTimeline ?? true, stack: stack, disallowFallbackToHtml5Audio: disallowFallbackToHtml5Audio ?? false, toneFrequency: toneFrequency, fallbackHtml5AudioProps: fallbackHtml5AudioProps }));
304
- };
@@ -1,194 +0,0 @@
1
- import { jsx as _jsx } from "react/jsx-runtime";
2
- import { useContext, useLayoutEffect, useMemo, useState } from 'react';
3
- import { cancelRender, Html5Audio, Internals, random, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
4
- import { useMaxMediaCacheSize } from '../caches';
5
- import { applyVolume } from '../convert-audiodata/apply-volume';
6
- import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
7
- import { frameForVolumeProp } from '../looped-frame';
8
- import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
9
- export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel ?? 'info', loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, toneFrequency, trimAfter, trimBefore, }) => {
10
- const frame = useCurrentFrame();
11
- const absoluteFrame = Internals.useTimelinePosition();
12
- const videoConfig = Internals.useUnsafeVideoConfig();
13
- const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
14
- const startsAt = Internals.useMediaStartsAt();
15
- const environment = useRemotionEnvironment();
16
- if (!videoConfig) {
17
- throw new Error('No video config found');
18
- }
19
- if (!src) {
20
- throw new TypeError('No `src` was passed to <Audio>.');
21
- }
22
- const { fps } = videoConfig;
23
- const { delayRender, continueRender } = useDelayRender();
24
- const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState(false);
25
- const sequenceContext = useContext(Internals.SequenceContext);
26
- // Generate a string that's as unique as possible for this asset
27
- // but at the same time the same on all threads
28
- const id = useMemo(() => `media-audio-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
29
- src,
30
- sequenceContext?.cumulatedFrom,
31
- sequenceContext?.relativeFrom,
32
- sequenceContext?.durationInFrames,
33
- ]);
34
- const maxCacheSize = useMaxMediaCacheSize(logLevel ?? window.remotion_logLevel);
35
- const audioEnabled = Internals.useAudioEnabled();
36
- useLayoutEffect(() => {
37
- const timestamp = frame / fps;
38
- const durationInSeconds = 1 / fps;
39
- const shouldRenderAudio = (() => {
40
- if (!audioEnabled) {
41
- return false;
42
- }
43
- if (muted) {
44
- return false;
45
- }
46
- return true;
47
- })();
48
- if (!shouldRenderAudio) {
49
- return;
50
- }
51
- if (replaceWithHtml5Audio) {
52
- return;
53
- }
54
- const newHandle = delayRender(`Extracting audio for frame ${frame}`, {
55
- retries: delayRenderRetries ?? undefined,
56
- timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
57
- });
58
- extractFrameViaBroadcastChannel({
59
- src,
60
- timeInSeconds: timestamp,
61
- durationInSeconds,
62
- playbackRate: playbackRate ?? 1,
63
- logLevel: logLevel ?? window.remotion_logLevel,
64
- includeAudio: shouldRenderAudio,
65
- includeVideo: false,
66
- isClientSideRendering: environment.isClientSideRendering,
67
- loop: loop ?? false,
68
- audioStreamIndex: audioStreamIndex ?? 0,
69
- trimAfter,
70
- trimBefore,
71
- fps,
72
- maxCacheSize,
73
- })
74
- .then((result) => {
75
- if (result.type === 'unknown-container-format') {
76
- if (environment.isClientSideRendering) {
77
- cancelRender(new Error(`Cannot render audio "${src}": Unknown container format. See supported formats: https://www.remotion.dev/docs/mediabunny/formats`));
78
- return;
79
- }
80
- if (disallowFallbackToHtml5Audio) {
81
- cancelRender(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
82
- }
83
- Internals.Log.warn({
84
- logLevel: logLevel ?? window.remotion_logLevel,
85
- tag: '@remotion/media',
86
- }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
87
- setReplaceWithHtml5Audio(true);
88
- return;
89
- }
90
- if (result.type === 'cannot-decode') {
91
- if (environment.isClientSideRendering) {
92
- cancelRender(new Error(`Cannot render audio "${src}": The audio could not be decoded by the browser.`));
93
- return;
94
- }
95
- if (disallowFallbackToHtml5Audio) {
96
- cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
97
- }
98
- Internals.Log.warn({
99
- logLevel: logLevel ?? window.remotion_logLevel,
100
- tag: '@remotion/media',
101
- }, `Cannot decode ${src}, falling back to <Html5Audio>`);
102
- setReplaceWithHtml5Audio(true);
103
- return;
104
- }
105
- if (result.type === 'cannot-decode-alpha') {
106
- throw new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToHtml5Audio' was set. But this should never happen, since you used the <Audio> tag. Please report this as a bug.`);
107
- }
108
- if (result.type === 'network-error') {
109
- if (environment.isClientSideRendering) {
110
- cancelRender(new Error(`Cannot render audio "${src}": Network error while fetching the audio (possibly CORS).`));
111
- return;
112
- }
113
- if (disallowFallbackToHtml5Audio) {
114
- cancelRender(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
115
- }
116
- Internals.Log.warn({
117
- logLevel: logLevel ?? window.remotion_logLevel,
118
- tag: '@remotion/media',
119
- }, `Network error fetching ${src}, falling back to <Html5Audio>`);
120
- setReplaceWithHtml5Audio(true);
121
- return;
122
- }
123
- const { audio, durationInSeconds: assetDurationInSeconds } = result;
124
- const volumePropsFrame = frameForVolumeProp({
125
- behavior: loopVolumeCurveBehavior ?? 'repeat',
126
- loop: loop ?? false,
127
- assetDurationInSeconds: assetDurationInSeconds ?? 0,
128
- fps,
129
- frame,
130
- startsAt,
131
- });
132
- const volume = Internals.evaluateVolume({
133
- volume: volumeProp,
134
- frame: volumePropsFrame,
135
- mediaVolume: 1,
136
- });
137
- Internals.warnAboutTooHighVolume(volume);
138
- if (audio && volume > 0) {
139
- applyVolume(audio.data, volume);
140
- registerRenderAsset({
141
- type: 'inline-audio',
142
- id,
143
- audio: environment.isClientSideRendering
144
- ? audio.data
145
- : Array.from(audio.data),
146
- frame: absoluteFrame,
147
- timestamp: audio.timestamp,
148
- duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
149
- toneFrequency: toneFrequency ?? 1,
150
- });
151
- }
152
- continueRender(newHandle);
153
- })
154
- .catch((error) => {
155
- cancelRender(error);
156
- });
157
- return () => {
158
- continueRender(newHandle);
159
- unregisterRenderAsset(id);
160
- };
161
- }, [
162
- absoluteFrame,
163
- continueRender,
164
- delayRender,
165
- delayRenderRetries,
166
- delayRenderTimeoutInMilliseconds,
167
- disallowFallbackToHtml5Audio,
168
- environment.isClientSideRendering,
169
- fps,
170
- frame,
171
- id,
172
- logLevel,
173
- loop,
174
- loopVolumeCurveBehavior,
175
- muted,
176
- playbackRate,
177
- registerRenderAsset,
178
- src,
179
- startsAt,
180
- unregisterRenderAsset,
181
- volumeProp,
182
- audioStreamIndex,
183
- toneFrequency,
184
- trimAfter,
185
- trimBefore,
186
- replaceWithHtml5Audio,
187
- maxCacheSize,
188
- audioEnabled,
189
- ]);
190
- if (replaceWithHtml5Audio) {
191
- return (_jsx(Html5Audio, { src: src, playbackRate: playbackRate, muted: muted, loop: loop, volume: volumeProp, delayRenderRetries: delayRenderRetries, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds, style: style, loopVolumeCurveBehavior: loopVolumeCurveBehavior, audioStreamIndex: audioStreamIndex, useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi, onError: fallbackHtml5AudioProps?.onError, toneFrequency: toneFrequency, acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds, name: name, showInTimeline: showInTimeline }));
192
- }
193
- return null;
194
- };