@remotion/media 4.0.353 → 4.0.354

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/dist/audio/audio-for-rendering.js +37 -27
  2. package/dist/audio/audio.js +6 -3
  3. package/dist/audio/props.d.ts +0 -5
  4. package/dist/audio-extraction/extract-audio.d.ts +6 -3
  5. package/dist/audio-extraction/extract-audio.js +16 -7
  6. package/dist/audio-for-rendering.d.ts +3 -0
  7. package/dist/audio-for-rendering.js +94 -0
  8. package/dist/audio.d.ts +3 -0
  9. package/dist/audio.js +60 -0
  10. package/dist/audiodata-to-array.d.ts +0 -0
  11. package/dist/audiodata-to-array.js +1 -0
  12. package/dist/convert-audiodata/apply-volume.d.ts +1 -0
  13. package/dist/convert-audiodata/apply-volume.js +17 -0
  14. package/dist/convert-audiodata/convert-audiodata.d.ts +2 -2
  15. package/dist/convert-audiodata/convert-audiodata.js +13 -7
  16. package/dist/convert-audiodata/data-types.d.ts +1 -0
  17. package/dist/convert-audiodata/data-types.js +22 -0
  18. package/dist/convert-audiodata/is-planar-format.d.ts +1 -0
  19. package/dist/convert-audiodata/is-planar-format.js +3 -0
  20. package/dist/convert-audiodata/log-audiodata.d.ts +1 -0
  21. package/dist/convert-audiodata/log-audiodata.js +8 -0
  22. package/dist/convert-audiodata/resample-audiodata.d.ts +1 -2
  23. package/dist/convert-audiodata/resample-audiodata.js +39 -18
  24. package/dist/convert-audiodata/trim-audiodata.d.ts +0 -0
  25. package/dist/convert-audiodata/trim-audiodata.js +1 -0
  26. package/dist/deserialized-audiodata.d.ts +15 -0
  27. package/dist/deserialized-audiodata.js +26 -0
  28. package/dist/esm/index.mjs +206 -120
  29. package/dist/extract-audio.d.ts +7 -0
  30. package/dist/extract-audio.js +98 -0
  31. package/dist/extract-frame-and-audio.d.ts +3 -2
  32. package/dist/extract-frame-and-audio.js +4 -3
  33. package/dist/extract-frame-via-broadcast-channel.d.ts +15 -0
  34. package/dist/extract-frame-via-broadcast-channel.js +104 -0
  35. package/dist/extract-frame.d.ts +27 -0
  36. package/dist/extract-frame.js +21 -0
  37. package/dist/extrct-audio.d.ts +7 -0
  38. package/dist/extrct-audio.js +94 -0
  39. package/dist/get-frames-since-keyframe.d.ts +22 -0
  40. package/dist/get-frames-since-keyframe.js +41 -0
  41. package/dist/keyframe-bank.d.ts +25 -0
  42. package/dist/keyframe-bank.js +120 -0
  43. package/dist/keyframe-manager.d.ts +23 -0
  44. package/dist/keyframe-manager.js +170 -0
  45. package/dist/looped-frame.d.ts +9 -0
  46. package/dist/looped-frame.js +10 -0
  47. package/dist/new-video-for-rendering.d.ts +3 -0
  48. package/dist/new-video-for-rendering.js +108 -0
  49. package/dist/new-video.d.ts +3 -0
  50. package/dist/new-video.js +37 -0
  51. package/dist/props.d.ts +29 -0
  52. package/dist/props.js +1 -0
  53. package/dist/remember-actual-matroska-timestamps.d.ts +4 -0
  54. package/dist/remember-actual-matroska-timestamps.js +19 -0
  55. package/dist/serialize-videoframe.d.ts +0 -0
  56. package/dist/serialize-videoframe.js +1 -0
  57. package/dist/video/media-player.d.ts +62 -0
  58. package/dist/video/media-player.js +361 -0
  59. package/dist/video/new-video-for-preview.d.ts +10 -0
  60. package/dist/video/new-video-for-preview.js +108 -0
  61. package/dist/video/props.d.ts +0 -5
  62. package/dist/video/timeout-utils.d.ts +2 -0
  63. package/dist/video/timeout-utils.js +18 -0
  64. package/dist/video/video-for-preview.d.ts +11 -0
  65. package/dist/video/video-for-preview.js +113 -0
  66. package/dist/video/video-for-rendering.js +41 -31
  67. package/dist/video/video.js +2 -2
  68. package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +3 -2
  69. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +9 -5
  70. package/dist/video-extraction/extract-frame.js +3 -0
  71. package/dist/video-extraction/get-frames-since-keyframe.d.ts +1 -1
  72. package/dist/video-extraction/get-frames-since-keyframe.js +6 -7
  73. package/dist/video-extraction/media-player.d.ts +64 -0
  74. package/dist/video-extraction/media-player.js +501 -0
  75. package/dist/video-extraction/new-video-for-preview.d.ts +10 -0
  76. package/dist/video-extraction/new-video-for-preview.js +114 -0
  77. package/dist/video-for-rendering.d.ts +3 -0
  78. package/dist/video-for-rendering.js +108 -0
  79. package/dist/video.d.ts +3 -0
  80. package/dist/video.js +37 -0
  81. package/package.json +3 -3
@@ -1,12 +1,14 @@
1
- import { useContext, useLayoutEffect, useMemo, useState } from 'react';
1
+ import { useContext, useLayoutEffect, useState } from 'react';
2
2
  import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
3
+ import { applyVolume } from '../convert-audiodata/apply-volume';
4
+ import { frameForVolumeProp } from '../looped-frame';
3
5
  import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
4
6
  export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel, loop, }) => {
7
+ const frame = useCurrentFrame();
5
8
  const absoluteFrame = Internals.useTimelinePosition();
6
9
  const videoConfig = Internals.useUnsafeVideoConfig();
7
10
  const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
8
- const frame = useCurrentFrame();
9
- const volumePropsFrame = Internals.useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
11
+ const startsAt = Internals.useMediaStartsAt();
10
12
  const environment = useRemotionEnvironment();
11
13
  const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
12
14
  if (!videoConfig) {
@@ -15,24 +17,6 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
15
17
  if (!src) {
16
18
  throw new TypeError('No `src` was passed to <Audio>.');
17
19
  }
18
- const volume = Internals.evaluateVolume({
19
- volume: volumeProp,
20
- frame: volumePropsFrame,
21
- mediaVolume: 1,
22
- });
23
- Internals.warnAboutTooHighVolume(volume);
24
- const shouldRenderAudio = useMemo(() => {
25
- if (!window.remotion_audioEnabled) {
26
- return false;
27
- }
28
- if (muted) {
29
- return false;
30
- }
31
- if (volume <= 0) {
32
- return false;
33
- }
34
- return true;
35
- }, [muted, volume]);
36
20
  const { fps } = videoConfig;
37
21
  const { delayRender, continueRender } = useDelayRender();
38
22
  useLayoutEffect(() => {
@@ -43,19 +27,43 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
43
27
  retries: delayRenderRetries ?? undefined,
44
28
  timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
45
29
  });
30
+ const shouldRenderAudio = (() => {
31
+ if (!window.remotion_audioEnabled) {
32
+ return false;
33
+ }
34
+ if (muted) {
35
+ return false;
36
+ }
37
+ return true;
38
+ })();
46
39
  extractFrameViaBroadcastChannel({
47
40
  src,
48
41
  timeInSeconds: timestamp,
49
42
  durationInSeconds,
43
+ playbackRate: playbackRate ?? 1,
50
44
  logLevel: logLevel ?? 'info',
51
45
  includeAudio: shouldRenderAudio,
52
46
  includeVideo: false,
53
47
  isClientSideRendering: environment.isClientSideRendering,
54
- volume,
55
48
  loop: loop ?? false,
56
49
  })
57
- .then(({ audio }) => {
58
- if (audio) {
50
+ .then(({ audio, durationInSeconds: assetDurationInSeconds }) => {
51
+ const volumePropsFrame = frameForVolumeProp({
52
+ behavior: loopVolumeCurveBehavior ?? 'repeat',
53
+ loop: loop ?? false,
54
+ assetDurationInSeconds: assetDurationInSeconds ?? 0,
55
+ fps,
56
+ frame,
57
+ startsAt,
58
+ });
59
+ const volume = Internals.evaluateVolume({
60
+ volume: volumeProp,
61
+ frame: volumePropsFrame,
62
+ mediaVolume: 1,
63
+ });
64
+ Internals.warnAboutTooHighVolume(volume);
65
+ if (audio && volume > 0) {
66
+ applyVolume(audio.data, volume);
59
67
  registerRenderAsset({
60
68
  type: 'inline-audio',
61
69
  id,
@@ -87,13 +95,15 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
87
95
  frame,
88
96
  id,
89
97
  logLevel,
98
+ loop,
99
+ loopVolumeCurveBehavior,
100
+ muted,
90
101
  playbackRate,
91
102
  registerRenderAsset,
92
- shouldRenderAudio,
93
103
  src,
104
+ startsAt,
94
105
  unregisterRenderAsset,
95
- volume,
96
- loop,
106
+ volumeProp,
97
107
  ]);
98
108
  return null;
99
109
  };
@@ -4,11 +4,14 @@ import { cancelRender, Internals, Sequence, useRemotionEnvironment, } from 'remo
4
4
  import { SharedAudioContext } from '../../../core/src/audio/shared-audio-tags';
5
5
  import { AudioForRendering } from './audio-for-rendering';
6
6
  const { validateMediaTrimProps, resolveTrimProps, validateMediaProps, AudioForPreview, } = Internals;
7
+ // dummy function for now because onError is not supported
8
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
9
+ const onRemotionError = (_e) => { };
7
10
  export const Audio = (props) => {
8
11
  const audioContext = useContext(SharedAudioContext);
9
12
  // Should only destruct `trimBefore` and `trimAfter` from props,
10
13
  // rest gets drilled down
11
- const { trimBefore, trimAfter, name, pauseWhenBuffering, stack, showInTimeline, onError: onRemotionError, loop, ...otherProps } = props;
14
+ const { trimBefore, trimAfter, name, pauseWhenBuffering, stack, showInTimeline, loop, ...otherProps } = props;
12
15
  const environment = useRemotionEnvironment();
13
16
  const onDuration = useCallback(() => undefined, []);
14
17
  if (typeof props.src !== 'string') {
@@ -44,7 +47,7 @@ export const Audio = (props) => {
44
47
  // eslint-disable-next-line no-console
45
48
  console.warn(errMessage);
46
49
  }
47
- }, [onRemotionError, loop]);
50
+ }, [loop]);
48
51
  if (typeof trimBeforeValue !== 'undefined' ||
49
52
  typeof trimAfterValue !== 'undefined') {
50
53
  return (_jsx(Sequence, { layout: "none", from: 0 - (trimBeforeValue ?? 0), showInTimeline: false, durationInFrames: trimAfterValue, name: name, children: _jsx(Audio, { pauseWhenBuffering: pauseWhenBuffering ?? false, ...otherProps }) }));
@@ -53,7 +56,7 @@ export const Audio = (props) => {
53
56
  if (environment.isRendering) {
54
57
  return _jsx(AudioForRendering, { ...otherProps });
55
58
  }
56
- const { onAutoPlayError, crossOrigin, delayRenderRetries, delayRenderTimeoutInMilliseconds, ...propsForPreview } = otherProps;
59
+ const { delayRenderRetries, delayRenderTimeoutInMilliseconds, ...propsForPreview } = otherProps;
57
60
  return (_jsx(AudioForPreview, { _remotionInternalNativeLoopPassed: props._remotionInternalNativeLoopPassed ?? false, _remotionInternalStack: stack ?? null, shouldPreMountAudioTags: audioContext !== null && audioContext.numberOfAudioTags > 0, ...propsForPreview, onNativeError: onError, onDuration: onDuration,
58
61
  // Proposal: Make this default to true in v5
59
62
  pauseWhenBuffering: pauseWhenBuffering ?? false, _remotionInternalNeedsDurationCalculation: Boolean(loop), showInTimeline: showInTimeline ?? true }));
@@ -8,16 +8,11 @@ export type AudioProps = {
8
8
  name?: string;
9
9
  pauseWhenBuffering?: boolean;
10
10
  showInTimeline?: boolean;
11
- onAutoPlayError?: null | (() => void);
12
11
  playbackRate?: number;
13
12
  muted?: boolean;
14
13
  delayRenderRetries?: number;
15
14
  delayRenderTimeoutInMilliseconds?: number;
16
- crossOrigin?: '' | 'anonymous' | 'use-credentials';
17
15
  style?: React.CSSProperties;
18
- onError?: (err: Error) => void;
19
- useWebAudioApi?: boolean;
20
- acceptableTimeShiftInSeconds?: number;
21
16
  /**
22
17
  * @deprecated For internal use only
23
18
  */
@@ -1,10 +1,13 @@
1
1
  import { type LogLevel } from 'remotion';
2
2
  import type { PcmS16AudioData } from '../convert-audiodata/convert-audiodata';
3
- export declare const extractAudio: ({ src, timeInSeconds: unloopedTimeInSeconds, durationInSeconds, volume, logLevel, loop, }: {
3
+ export declare const extractAudio: ({ src, timeInSeconds: unloopedTimeInSeconds, durationInSeconds, logLevel, loop, playbackRate, }: {
4
4
  src: string;
5
5
  timeInSeconds: number;
6
6
  durationInSeconds: number;
7
- volume: number;
8
7
  logLevel: LogLevel;
9
8
  loop: boolean;
10
- }) => Promise<PcmS16AudioData | null>;
9
+ playbackRate: number;
10
+ }) => Promise<{
11
+ data: PcmS16AudioData | null;
12
+ durationInSeconds: number | null;
13
+ }>;
@@ -4,16 +4,20 @@ import { convertAudioData } from '../convert-audiodata/convert-audiodata';
4
4
  import { TARGET_NUMBER_OF_CHANNELS, TARGET_SAMPLE_RATE, } from '../convert-audiodata/resample-audiodata';
5
5
  import { sinkPromises } from '../video-extraction/extract-frame';
6
6
  import { getSinks } from '../video-extraction/get-frames-since-keyframe';
7
- export const extractAudio = async ({ src, timeInSeconds: unloopedTimeInSeconds, durationInSeconds, volume, logLevel, loop, }) => {
7
+ export const extractAudio = async ({ src, timeInSeconds: unloopedTimeInSeconds, durationInSeconds, logLevel, loop, playbackRate, }) => {
8
8
  if (!sinkPromises[src]) {
9
9
  sinkPromises[src] = getSinks(src);
10
10
  }
11
11
  const { audio, actualMatroskaTimestamps, isMatroska, getDuration } = await sinkPromises[src];
12
+ let duration = null;
13
+ if (loop) {
14
+ duration = await getDuration();
15
+ }
12
16
  if (audio === null) {
13
- return null;
17
+ return { data: null, durationInSeconds: null };
14
18
  }
15
19
  const timeInSeconds = loop
16
- ? unloopedTimeInSeconds % (await getDuration())
20
+ ? unloopedTimeInSeconds % duration
17
21
  : unloopedTimeInSeconds;
18
22
  const sampleIterator = await audioManager.getIterator({
19
23
  src,
@@ -44,10 +48,15 @@ export const extractAudio = async ({ src, timeInSeconds: unloopedTimeInSeconds,
44
48
  // amount of samples to shave from start and end
45
49
  let trimStartInSeconds = 0;
46
50
  let trimEndInSeconds = 0;
47
- // TODO: Apply playback rate
48
51
  // TODO: Apply tone frequency
49
52
  if (isFirstSample) {
50
53
  trimStartInSeconds = timeInSeconds - sample.timestamp;
54
+ if (trimStartInSeconds < 0 && trimStartInSeconds > -1e-10) {
55
+ trimStartInSeconds = 0;
56
+ }
57
+ if (trimStartInSeconds < 0) {
58
+ throw new Error(`trimStartInSeconds is negative: ${trimStartInSeconds}`);
59
+ }
51
60
  }
52
61
  if (isLastSample) {
53
62
  trimEndInSeconds =
@@ -62,7 +71,7 @@ export const extractAudio = async ({ src, timeInSeconds: unloopedTimeInSeconds,
62
71
  trimStartInSeconds,
63
72
  trimEndInSeconds,
64
73
  targetNumberOfChannels: TARGET_NUMBER_OF_CHANNELS,
65
- volume,
74
+ playbackRate,
66
75
  });
67
76
  audioDataRaw.close();
68
77
  if (audioData.numberOfFrames === 0) {
@@ -71,8 +80,8 @@ export const extractAudio = async ({ src, timeInSeconds: unloopedTimeInSeconds,
71
80
  audioDataArray.push(audioData);
72
81
  }
73
82
  if (audioDataArray.length === 0) {
74
- return null;
83
+ return { data: null, durationInSeconds: duration };
75
84
  }
76
85
  const combined = combineAudioDataAndClosePrevious(audioDataArray);
77
- return combined;
86
+ return { data: combined, durationInSeconds: duration };
78
87
  };
@@ -0,0 +1,3 @@
1
+ import type React from 'react';
2
+ import type { AudioProps } from './props';
3
+ export declare const AudioForRendering: React.FC<AudioProps>;
@@ -0,0 +1,94 @@
1
+ import { useContext, useLayoutEffect, useMemo, useState } from 'react';
2
+ import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
3
+ import { extractFrameViaBroadcastChannel } from './extract-frame-via-broadcast-channel';
4
+ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel, }) => {
5
+ const absoluteFrame = Internals.useTimelinePosition();
6
+ const videoConfig = Internals.useUnsafeVideoConfig();
7
+ const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
8
+ const frame = useCurrentFrame();
9
+ const volumePropsFrame = Internals.useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
10
+ const environment = useRemotionEnvironment();
11
+ const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
12
+ if (!videoConfig) {
13
+ throw new Error('No video config found');
14
+ }
15
+ if (!src) {
16
+ throw new TypeError('No `src` was passed to <Video>.');
17
+ }
18
+ const volume = Internals.evaluateVolume({
19
+ volume: volumeProp,
20
+ frame: volumePropsFrame,
21
+ mediaVolume: 1,
22
+ });
23
+ Internals.warnAboutTooHighVolume(volume);
24
+ const shouldRenderAudio = useMemo(() => {
25
+ if (!window.remotion_audioEnabled) {
26
+ return false;
27
+ }
28
+ if (muted) {
29
+ return false;
30
+ }
31
+ if (volume <= 0) {
32
+ return false;
33
+ }
34
+ return true;
35
+ }, [muted, volume]);
36
+ const { fps } = videoConfig;
37
+ const { delayRender, continueRender } = useDelayRender();
38
+ useLayoutEffect(() => {
39
+ const actualFps = playbackRate ? fps / playbackRate : fps;
40
+ const timestamp = frame / actualFps;
41
+ const durationInSeconds = 1 / actualFps;
42
+ const newHandle = delayRender(`Extracting frame number ${frame}`, {
43
+ retries: delayRenderRetries ?? undefined,
44
+ timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
45
+ });
46
+ extractFrameViaBroadcastChannel({
47
+ src,
48
+ timeInSeconds: timestamp,
49
+ durationInSeconds,
50
+ logLevel: logLevel ?? 'info',
51
+ shouldRenderAudio,
52
+ isClientSideRendering: environment.isClientSideRendering,
53
+ })
54
+ .then(({ audio }) => {
55
+ if (audio) {
56
+ registerRenderAsset({
57
+ type: 'inline-audio',
58
+ id,
59
+ audio: Array.from(audio.data),
60
+ sampleRate: audio.sampleRate,
61
+ numberOfChannels: audio.numberOfChannels,
62
+ frame: absoluteFrame,
63
+ timestamp: audio.timestamp,
64
+ duration: (audio.numberOfFrames / audio.sampleRate) * 1000000,
65
+ });
66
+ }
67
+ continueRender(newHandle);
68
+ })
69
+ .catch((error) => {
70
+ cancelRender(error);
71
+ });
72
+ return () => {
73
+ continueRender(newHandle);
74
+ unregisterRenderAsset(id);
75
+ };
76
+ }, [
77
+ absoluteFrame,
78
+ continueRender,
79
+ delayRender,
80
+ delayRenderRetries,
81
+ delayRenderTimeoutInMilliseconds,
82
+ environment.isClientSideRendering,
83
+ fps,
84
+ frame,
85
+ id,
86
+ logLevel,
87
+ playbackRate,
88
+ registerRenderAsset,
89
+ shouldRenderAudio,
90
+ src,
91
+ unregisterRenderAsset,
92
+ ]);
93
+ return null;
94
+ };
@@ -0,0 +1,3 @@
1
+ import React from 'react';
2
+ import type { AudioProps } from './props';
3
+ export declare const Audio: React.FC<AudioProps>;
package/dist/audio.js ADDED
@@ -0,0 +1,60 @@
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { useCallback, useContext } from 'react';
3
+ import { cancelRender, Internals, Sequence, useRemotionEnvironment, } from 'remotion';
4
+ import { SharedAudioContext } from '../../core/src/audio/shared-audio-tags';
5
+ import { AudioForRendering } from './audio-for-rendering';
6
+ const { validateMediaTrimProps, resolveTrimProps, validateMediaProps, AudioForPreview, } = Internals;
7
+ export const Audio = (props) => {
8
+ const audioContext = useContext(SharedAudioContext);
9
+ // Should only destruct `trimBefore` and `trimAfter` from props,
10
+ // rest gets drilled down
11
+ const { trimBefore, trimAfter, name, pauseWhenBuffering, stack, showInTimeline, onError: onRemotionError, loop, ...otherProps } = props;
12
+ const environment = useRemotionEnvironment();
13
+ const onDuration = useCallback(() => undefined, []);
14
+ if (typeof props.src !== 'string') {
15
+ throw new TypeError(`The \`<Audio>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
16
+ }
17
+ validateMediaTrimProps({
18
+ startFrom: undefined,
19
+ endAt: undefined,
20
+ trimBefore,
21
+ trimAfter,
22
+ });
23
+ const { trimBeforeValue, trimAfterValue } = resolveTrimProps({
24
+ startFrom: undefined,
25
+ endAt: undefined,
26
+ trimBefore,
27
+ trimAfter,
28
+ });
29
+ const onError = useCallback((e) => {
30
+ // eslint-disable-next-line no-console
31
+ console.log(e.currentTarget.error);
32
+ // If there is no `loop` property, we don't need to get the duration
33
+ // and this does not need to be a fatal error
34
+ const errMessage = `Could not play audio: ${e.currentTarget.error}. See https://remotion.dev/docs/media-playback-error for help.`;
35
+ if (loop) {
36
+ if (onRemotionError) {
37
+ onRemotionError(new Error(errMessage));
38
+ return;
39
+ }
40
+ cancelRender(new Error(errMessage));
41
+ }
42
+ else {
43
+ onRemotionError?.(new Error(errMessage));
44
+ // eslint-disable-next-line no-console
45
+ console.warn(errMessage);
46
+ }
47
+ }, [onRemotionError, loop]);
48
+ if (typeof trimBeforeValue !== 'undefined' ||
49
+ typeof trimAfterValue !== 'undefined') {
50
+ return (_jsx(Sequence, { layout: "none", from: 0 - (trimBeforeValue ?? 0), showInTimeline: false, durationInFrames: trimAfterValue, name: name, children: _jsx(Audio, { pauseWhenBuffering: pauseWhenBuffering ?? false, ...otherProps }) }));
51
+ }
52
+ validateMediaProps(props, 'Video');
53
+ if (environment.isRendering) {
54
+ return _jsx(AudioForRendering, { ...otherProps });
55
+ }
56
+ const { onAutoPlayError, crossOrigin, delayRenderRetries, delayRenderTimeoutInMilliseconds, ...propsForPreview } = otherProps;
57
+ return (_jsx(AudioForPreview, { _remotionInternalNativeLoopPassed: props._remotionInternalNativeLoopPassed ?? false, _remotionInternalStack: stack ?? null, shouldPreMountAudioTags: audioContext !== null && audioContext.numberOfAudioTags > 0, ...propsForPreview, onNativeError: onError, onDuration: onDuration,
58
+ // Proposal: Make this default to true in v5
59
+ pauseWhenBuffering: pauseWhenBuffering ?? false, _remotionInternalNeedsDurationCalculation: Boolean(loop), showInTimeline: showInTimeline ?? true }));
60
+ };
File without changes
@@ -0,0 +1 @@
1
+ "use strict";
@@ -0,0 +1 @@
1
+ export declare const applyVolume: (array: Int16Array, volume: number) => void;
@@ -0,0 +1,17 @@
1
+ export const applyVolume = (array, volume) => {
2
+ if (volume === 1) {
3
+ return;
4
+ }
5
+ for (let i = 0; i < array.length; i++) {
6
+ const newValue = array[i] * volume;
7
+ if (newValue < -32768) {
8
+ array[i] = -32768;
9
+ }
10
+ else if (newValue > 32767) {
11
+ array[i] = 32767;
12
+ }
13
+ else {
14
+ array[i] = newValue;
15
+ }
16
+ }
17
+ };
@@ -4,7 +4,7 @@ export type ConvertAudioDataOptions = {
4
4
  trimStartInSeconds: number;
5
5
  trimEndInSeconds: number;
6
6
  targetNumberOfChannels: number;
7
- volume: number;
7
+ playbackRate: number;
8
8
  };
9
9
  export type PcmS16AudioData = {
10
10
  data: Int16Array;
@@ -13,4 +13,4 @@ export type PcmS16AudioData = {
13
13
  numberOfFrames: number;
14
14
  timestamp: number;
15
15
  };
16
- export declare const convertAudioData: ({ audioData, newSampleRate, trimStartInSeconds, trimEndInSeconds, targetNumberOfChannels, volume, }: ConvertAudioDataOptions) => PcmS16AudioData;
16
+ export declare const convertAudioData: ({ audioData, newSampleRate, trimStartInSeconds, trimEndInSeconds, targetNumberOfChannels, playbackRate, }: ConvertAudioDataOptions) => PcmS16AudioData;
@@ -1,12 +1,19 @@
1
1
  import { resampleAudioData } from './resample-audiodata';
2
2
  const FORMAT = 's16';
3
- export const convertAudioData = ({ audioData, newSampleRate, trimStartInSeconds, trimEndInSeconds, targetNumberOfChannels, volume, }) => {
3
+ const roundButRoundDownZeroPointFive = (value) => {
4
+ if (value % 1 <= 0.5) {
5
+ return Math.floor(value);
6
+ }
7
+ return Math.ceil(value);
8
+ };
9
+ export const convertAudioData = ({ audioData, newSampleRate, trimStartInSeconds, trimEndInSeconds, targetNumberOfChannels, playbackRate, }) => {
4
10
  const { numberOfChannels: srcNumberOfChannels, sampleRate: currentSampleRate, numberOfFrames, } = audioData;
5
11
  const ratio = currentSampleRate / newSampleRate;
6
- const frameOffset = Math.round(trimStartInSeconds * audioData.sampleRate);
7
- const frameCount = numberOfFrames -
8
- Math.round((trimEndInSeconds + trimStartInSeconds) * audioData.sampleRate);
9
- const newNumberOfFrames = Math.round(frameCount / ratio);
12
+ const frameOffset = roundButRoundDownZeroPointFive(trimStartInSeconds * audioData.sampleRate);
13
+ const unroundedFrameCount = numberOfFrames -
14
+ (trimEndInSeconds + trimStartInSeconds) * audioData.sampleRate;
15
+ const frameCount = Math.round(unroundedFrameCount);
16
+ const newNumberOfFrames = Math.round(unroundedFrameCount / ratio / playbackRate);
10
17
  if (newNumberOfFrames === 0) {
11
18
  throw new Error('Cannot resample - the given sample rate would result in less than 1 sample');
12
19
  }
@@ -24,7 +31,7 @@ export const convertAudioData = ({ audioData, newSampleRate, trimStartInSeconds,
24
31
  const chunkSize = frameCount / newNumberOfFrames;
25
32
  if (newNumberOfFrames === frameCount &&
26
33
  targetNumberOfChannels === srcNumberOfChannels &&
27
- volume === 1) {
34
+ playbackRate === 1) {
28
35
  return {
29
36
  data: srcChannels,
30
37
  numberOfChannels: targetNumberOfChannels,
@@ -39,7 +46,6 @@ export const convertAudioData = ({ audioData, newSampleRate, trimStartInSeconds,
39
46
  destination: data,
40
47
  targetFrames: newNumberOfFrames,
41
48
  chunkSize,
42
- volume,
43
49
  });
44
50
  const newAudioData = {
45
51
  data,
@@ -0,0 +1 @@
1
+ export declare const getDataTypeForAudioFormat: (format: AudioSampleFormat) => Float32ArrayConstructor | Int16ArrayConstructor | Uint8ArrayConstructor | Int32ArrayConstructor;
@@ -0,0 +1,22 @@
1
+ export const getDataTypeForAudioFormat = (format) => {
2
+ switch (format) {
3
+ case 'f32':
4
+ return Float32Array;
5
+ case 'f32-planar':
6
+ return Float32Array;
7
+ case 's16':
8
+ return Int16Array;
9
+ case 's16-planar':
10
+ return Int16Array;
11
+ case 'u8':
12
+ return Uint8Array;
13
+ case 'u8-planar':
14
+ return Uint8Array;
15
+ case 's32':
16
+ return Int32Array;
17
+ case 's32-planar':
18
+ return Int32Array;
19
+ default:
20
+ throw new Error(`Unsupported audio format: ${format}`);
21
+ }
22
+ };
@@ -0,0 +1 @@
1
+ export declare const isPlanarFormat: (format: AudioSampleFormat) => boolean;
@@ -0,0 +1,3 @@
1
+ export const isPlanarFormat = (format) => {
2
+ return format.includes('-planar');
3
+ };
@@ -0,0 +1 @@
1
+ export declare const logAudioData: (audioData: AudioData) => string;
@@ -0,0 +1,8 @@
1
+ export const logAudioData = (audioData) => {
2
+ const srcChannels = new Int16Array(audioData.numberOfFrames * audioData.numberOfChannels);
3
+ audioData.copyTo(srcChannels, {
4
+ planeIndex: 0,
5
+ format: 's16',
6
+ });
7
+ return srcChannels.slice(0, 10).join(',');
8
+ };
@@ -1,10 +1,9 @@
1
1
  export declare const TARGET_NUMBER_OF_CHANNELS = 2;
2
2
  export declare const TARGET_SAMPLE_RATE = 48000;
3
- export declare const resampleAudioData: ({ srcNumberOfChannels, sourceChannels, destination, targetFrames, chunkSize, volume, }: {
3
+ export declare const resampleAudioData: ({ srcNumberOfChannels, sourceChannels, destination, targetFrames, chunkSize, }: {
4
4
  srcNumberOfChannels: number;
5
5
  sourceChannels: Int16Array;
6
6
  destination: Int16Array;
7
7
  targetFrames: number;
8
8
  chunkSize: number;
9
- volume: number;
10
9
  }) => void;
@@ -2,29 +2,50 @@
2
2
  export const TARGET_NUMBER_OF_CHANNELS = 2;
3
3
  // Remotion exports all videos with 48kHz sample rate.
4
4
  export const TARGET_SAMPLE_RATE = 48000;
5
- export const resampleAudioData = ({ srcNumberOfChannels, sourceChannels, destination, targetFrames, chunkSize, volume, }) => {
6
- const getSourceValues = (start, end, channelIndex) => {
7
- const sampleCountAvg = end - start;
8
- let itemSum = 0;
9
- let itemCount = 0;
10
- for (let k = 0; k < sampleCountAvg; k++) {
11
- const num = sourceChannels[(start + k) * srcNumberOfChannels + channelIndex];
12
- itemSum += num;
13
- itemCount++;
5
+ const fixFloatingPoint = (value) => {
6
+ if (value % 1 < 0.0000001) {
7
+ return Math.floor(value);
8
+ }
9
+ if (value % 1 > 0.9999999) {
10
+ return Math.ceil(value);
11
+ }
12
+ return value;
13
+ };
14
+ export const resampleAudioData = ({ srcNumberOfChannels, sourceChannels, destination, targetFrames, chunkSize, }) => {
15
+ const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {
16
+ const start = fixFloatingPoint(startUnfixed);
17
+ const end = fixFloatingPoint(endUnfixed);
18
+ const startFloor = Math.floor(start);
19
+ const startCeil = Math.ceil(start);
20
+ const startFraction = start - startFloor;
21
+ const endFraction = end - Math.floor(end);
22
+ const endFloor = Math.floor(end);
23
+ let weightedSum = 0;
24
+ let totalWeight = 0;
25
+ // Handle first fractional sample
26
+ if (startFraction > 0) {
27
+ const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];
28
+ weightedSum += firstSample * (1 - startFraction);
29
+ totalWeight += 1 - startFraction;
14
30
  }
15
- const average = itemSum / itemCount;
16
- const averageVolume = average * volume;
17
- if (averageVolume < -32768) {
18
- return -32768;
31
+ // Handle full samples
32
+ for (let k = startCeil; k < endFloor; k++) {
33
+ const num = sourceChannels[k * srcNumberOfChannels + channelIndex];
34
+ weightedSum += num;
35
+ totalWeight += 1;
19
36
  }
20
- if (averageVolume > 32767) {
21
- return 32767;
37
+ // Handle last fractional sample
38
+ if (endFraction > 0) {
39
+ const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];
40
+ weightedSum += lastSample * endFraction;
41
+ totalWeight += endFraction;
22
42
  }
23
- return averageVolume;
43
+ const average = weightedSum / totalWeight;
44
+ return average;
24
45
  };
25
46
  for (let newFrameIndex = 0; newFrameIndex < targetFrames; newFrameIndex++) {
26
- const start = Math.floor(newFrameIndex * chunkSize);
27
- const end = Math.max(Math.floor(start + chunkSize), start + 1);
47
+ const start = newFrameIndex * chunkSize;
48
+ const end = start + chunkSize;
28
49
  if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {
29
50
  for (let i = 0; i < srcNumberOfChannels; i++) {
30
51
  destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);
File without changes
@@ -0,0 +1 @@
1
+ "use strict";
@@ -0,0 +1,15 @@
1
+ import type { DataType } from './convert-audiodata/data-types';
2
+ export type SerializableAudioData = {
3
+ data: DataType[];
4
+ format: AudioSampleFormat;
5
+ numberOfChannels: number;
6
+ numberOfFrames: number;
7
+ sampleRate: number;
8
+ };
9
+ export declare const turnAudioDataIntoSerializableData: (audioData: AudioData) => {
10
+ data: (Float32Array<ArrayBuffer> | Int32Array<ArrayBuffer> | Int16Array<ArrayBuffer> | Uint8Array<ArrayBuffer>)[];
11
+ format: AudioSampleFormat;
12
+ numberOfChannels: number;
13
+ numberOfFrames: number;
14
+ sampleRate: number;
15
+ };