@remotion/media 4.0.379 → 4.0.380

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/dist/audio/audio-for-preview.js +1 -1
  2. package/dist/audio/audio-preview-iterator.js +27 -4
  3. package/dist/audio/props.d.ts +1 -0
  4. package/dist/audio-for-rendering.d.ts +3 -0
  5. package/dist/audio-for-rendering.js +94 -0
  6. package/dist/audio.d.ts +3 -0
  7. package/dist/audio.js +60 -0
  8. package/dist/audiodata-to-array.d.ts +0 -0
  9. package/dist/audiodata-to-array.js +1 -0
  10. package/dist/convert-audiodata/data-types.d.ts +1 -0
  11. package/dist/convert-audiodata/data-types.js +22 -0
  12. package/dist/convert-audiodata/is-planar-format.d.ts +1 -0
  13. package/dist/convert-audiodata/is-planar-format.js +3 -0
  14. package/dist/convert-audiodata/log-audiodata.d.ts +1 -0
  15. package/dist/convert-audiodata/log-audiodata.js +8 -0
  16. package/dist/convert-audiodata/trim-audiodata.d.ts +0 -0
  17. package/dist/convert-audiodata/trim-audiodata.js +1 -0
  18. package/dist/deserialized-audiodata.d.ts +15 -0
  19. package/dist/deserialized-audiodata.js +26 -0
  20. package/dist/esm/index.mjs +28 -4
  21. package/dist/extract-audio.d.ts +7 -0
  22. package/dist/extract-audio.js +98 -0
  23. package/dist/extract-frame-via-broadcast-channel.d.ts +15 -0
  24. package/dist/extract-frame-via-broadcast-channel.js +104 -0
  25. package/dist/extract-frame.d.ts +27 -0
  26. package/dist/extract-frame.js +21 -0
  27. package/dist/extrct-audio.d.ts +7 -0
  28. package/dist/extrct-audio.js +94 -0
  29. package/dist/get-frames-since-keyframe.d.ts +22 -0
  30. package/dist/get-frames-since-keyframe.js +41 -0
  31. package/dist/keyframe-bank.d.ts +25 -0
  32. package/dist/keyframe-bank.js +120 -0
  33. package/dist/keyframe-manager.d.ts +23 -0
  34. package/dist/keyframe-manager.js +170 -0
  35. package/dist/log.d.ts +10 -0
  36. package/dist/log.js +33 -0
  37. package/dist/new-video-for-rendering.d.ts +3 -0
  38. package/dist/new-video-for-rendering.js +108 -0
  39. package/dist/new-video.d.ts +3 -0
  40. package/dist/new-video.js +37 -0
  41. package/dist/props.d.ts +29 -0
  42. package/dist/props.js +1 -0
  43. package/dist/remember-actual-matroska-timestamps.d.ts +4 -0
  44. package/dist/remember-actual-matroska-timestamps.js +19 -0
  45. package/dist/serialize-videoframe.d.ts +0 -0
  46. package/dist/serialize-videoframe.js +1 -0
  47. package/dist/video/media-player.d.ts +62 -0
  48. package/dist/video/media-player.js +361 -0
  49. package/dist/video/new-video-for-preview.d.ts +10 -0
  50. package/dist/video/new-video-for-preview.js +108 -0
  51. package/dist/video/props.d.ts +1 -0
  52. package/dist/video/timeout-utils.d.ts +2 -0
  53. package/dist/video/timeout-utils.js +18 -0
  54. package/dist/video-extraction/media-player.d.ts +64 -0
  55. package/dist/video-extraction/media-player.js +501 -0
  56. package/dist/video-extraction/new-video-for-preview.d.ts +10 -0
  57. package/dist/video-extraction/new-video-for-preview.js +114 -0
  58. package/dist/video-for-rendering.d.ts +3 -0
  59. package/dist/video-for-rendering.js +108 -0
  60. package/dist/video.d.ts +3 -0
  61. package/dist/video.js +37 -0
  62. package/package.json +3 -3
@@ -261,7 +261,7 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
261
261
  mediaPlayer.setIsPostmounting(isPostmounting);
262
262
  }, [isPostmounting, mediaPlayerReady]);
263
263
  if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
264
- return (_jsx(RemotionAudio, { src: src, muted: muted, volume: volume, startFrom: trimBefore, endAt: trimAfter, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, toneFrequency: toneFrequency, audioStreamIndex: audioStreamIndex, pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering, ...fallbackHtml5AudioProps }));
264
+ return (_jsx(RemotionAudio, { src: src, muted: muted, volume: volume, startFrom: trimBefore, endAt: trimAfter, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, toneFrequency: toneFrequency, audioStreamIndex: audioStreamIndex, pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering, crossOrigin: fallbackHtml5AudioProps?.crossOrigin, ...fallbackHtml5AudioProps }));
265
265
  }
266
266
  return null;
267
267
  };
@@ -6,6 +6,7 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
6
6
  const iterator = audioSink.buffers(startFromSecond);
7
7
  const queuedAudioNodes = [];
8
8
  const audioChunksForAfterResuming = [];
9
+ let mostRecentTimestamp = -Infinity;
9
10
  const cleanupAudioQueue = () => {
10
11
  for (const node of queuedAudioNodes) {
11
12
  node.node.stop();
@@ -31,12 +32,25 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
31
32
  },
32
33
  };
33
34
  }
35
+ if (result.value) {
36
+ mostRecentTimestamp = Math.max(mostRecentTimestamp, result.value.timestamp + result.value.duration);
37
+ return {
38
+ type: 'got-buffer',
39
+ buffer: result.value,
40
+ };
41
+ }
34
42
  return {
35
- type: 'got-buffer-or-end',
36
- buffer: result.value ?? null,
43
+ type: 'got-end',
44
+ mostRecentTimestamp,
37
45
  };
38
46
  };
39
47
  const tryToSatisfySeek = async (time, allowWait, onBufferScheduled) => {
48
+ if (time < startFromSecond) {
49
+ return {
50
+ type: 'not-satisfied',
51
+ reason: `time requested is before the start of the iterator`,
52
+ };
53
+ }
40
54
  while (true) {
41
55
  const buffer = await getNextOrNullIfNotAvailable(allowWait);
42
56
  if (buffer.type === 'need-to-wait-for-it') {
@@ -45,12 +59,18 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
45
59
  reason: 'iterator did not have buffer ready',
46
60
  };
47
61
  }
48
- if (buffer.type === 'got-buffer-or-end') {
49
- if (buffer.buffer === null) {
62
+ if (buffer.type === 'got-end') {
63
+ if (time >= mostRecentTimestamp) {
50
64
  return {
51
65
  type: 'ended',
52
66
  };
53
67
  }
68
+ return {
69
+ type: 'not-satisfied',
70
+ reason: `iterator ended before the requested time`,
71
+ };
72
+ }
73
+ if (buffer.type === 'got-buffer') {
54
74
  const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
55
75
  const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
56
76
  const timestamp = roundTo4Digits(time);
@@ -101,6 +121,9 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
101
121
  },
102
122
  getNext: async () => {
103
123
  const next = await iterator.next();
124
+ if (next.value) {
125
+ mostRecentTimestamp = Math.max(mostRecentTimestamp, next.value.timestamp + next.value.duration);
126
+ }
104
127
  return next;
105
128
  },
106
129
  isDestroyed: () => {
@@ -1,5 +1,6 @@
1
1
  import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
2
2
  export type FallbackHtml5AudioProps = {
3
+ crossOrigin?: '' | 'anonymous' | 'use-credentials' | undefined;
3
4
  onError?: (err: Error) => void;
4
5
  useWebAudioApi?: boolean;
5
6
  acceptableTimeShiftInSeconds?: number;
@@ -0,0 +1,3 @@
1
+ import type React from 'react';
2
+ import type { AudioProps } from './props';
3
+ export declare const AudioForRendering: React.FC<AudioProps>;
@@ -0,0 +1,94 @@
1
+ import { useContext, useLayoutEffect, useMemo, useState } from 'react';
2
+ import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
3
+ import { extractFrameViaBroadcastChannel } from './extract-frame-via-broadcast-channel';
4
+ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel, }) => {
5
+ const absoluteFrame = Internals.useTimelinePosition();
6
+ const videoConfig = Internals.useUnsafeVideoConfig();
7
+ const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
8
+ const frame = useCurrentFrame();
9
+ const volumePropsFrame = Internals.useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
10
+ const environment = useRemotionEnvironment();
11
+ const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
12
+ if (!videoConfig) {
13
+ throw new Error('No video config found');
14
+ }
15
+ if (!src) {
16
+ throw new TypeError('No `src` was passed to <Video>.');
17
+ }
18
+ const volume = Internals.evaluateVolume({
19
+ volume: volumeProp,
20
+ frame: volumePropsFrame,
21
+ mediaVolume: 1,
22
+ });
23
+ Internals.warnAboutTooHighVolume(volume);
24
+ const shouldRenderAudio = useMemo(() => {
25
+ if (!window.remotion_audioEnabled) {
26
+ return false;
27
+ }
28
+ if (muted) {
29
+ return false;
30
+ }
31
+ if (volume <= 0) {
32
+ return false;
33
+ }
34
+ return true;
35
+ }, [muted, volume]);
36
+ const { fps } = videoConfig;
37
+ const { delayRender, continueRender } = useDelayRender();
38
+ useLayoutEffect(() => {
39
+ const actualFps = playbackRate ? fps / playbackRate : fps;
40
+ const timestamp = frame / actualFps;
41
+ const durationInSeconds = 1 / actualFps;
42
+ const newHandle = delayRender(`Extracting frame number ${frame}`, {
43
+ retries: delayRenderRetries ?? undefined,
44
+ timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
45
+ });
46
+ extractFrameViaBroadcastChannel({
47
+ src,
48
+ timeInSeconds: timestamp,
49
+ durationInSeconds,
50
+ logLevel: logLevel ?? 'info',
51
+ shouldRenderAudio,
52
+ isClientSideRendering: environment.isClientSideRendering,
53
+ })
54
+ .then(({ audio }) => {
55
+ if (audio) {
56
+ registerRenderAsset({
57
+ type: 'inline-audio',
58
+ id,
59
+ audio: Array.from(audio.data),
60
+ sampleRate: audio.sampleRate,
61
+ numberOfChannels: audio.numberOfChannels,
62
+ frame: absoluteFrame,
63
+ timestamp: audio.timestamp,
64
+ duration: (audio.numberOfFrames / audio.sampleRate) * 1000000,
65
+ });
66
+ }
67
+ continueRender(newHandle);
68
+ })
69
+ .catch((error) => {
70
+ cancelRender(error);
71
+ });
72
+ return () => {
73
+ continueRender(newHandle);
74
+ unregisterRenderAsset(id);
75
+ };
76
+ }, [
77
+ absoluteFrame,
78
+ continueRender,
79
+ delayRender,
80
+ delayRenderRetries,
81
+ delayRenderTimeoutInMilliseconds,
82
+ environment.isClientSideRendering,
83
+ fps,
84
+ frame,
85
+ id,
86
+ logLevel,
87
+ playbackRate,
88
+ registerRenderAsset,
89
+ shouldRenderAudio,
90
+ src,
91
+ unregisterRenderAsset,
92
+ ]);
93
+ return null;
94
+ };
@@ -0,0 +1,3 @@
1
+ import React from 'react';
2
+ import type { AudioProps } from './props';
3
+ export declare const Audio: React.FC<AudioProps>;
package/dist/audio.js ADDED
@@ -0,0 +1,60 @@
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { useCallback, useContext } from 'react';
3
+ import { cancelRender, Internals, Sequence, useRemotionEnvironment, } from 'remotion';
4
+ import { SharedAudioContext } from '../../core/src/audio/shared-audio-tags';
5
+ import { AudioForRendering } from './audio-for-rendering';
6
+ const { validateMediaTrimProps, resolveTrimProps, validateMediaProps, AudioForPreview, } = Internals;
7
+ export const Audio = (props) => {
8
+ const audioContext = useContext(SharedAudioContext);
9
+ // Should only destruct `trimBefore` and `trimAfter` from props,
10
+ // rest gets drilled down
11
+ const { trimBefore, trimAfter, name, pauseWhenBuffering, stack, showInTimeline, onError: onRemotionError, loop, ...otherProps } = props;
12
+ const environment = useRemotionEnvironment();
13
+ const onDuration = useCallback(() => undefined, []);
14
+ if (typeof props.src !== 'string') {
15
+ throw new TypeError(`The \`<Audio>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
16
+ }
17
+ validateMediaTrimProps({
18
+ startFrom: undefined,
19
+ endAt: undefined,
20
+ trimBefore,
21
+ trimAfter,
22
+ });
23
+ const { trimBeforeValue, trimAfterValue } = resolveTrimProps({
24
+ startFrom: undefined,
25
+ endAt: undefined,
26
+ trimBefore,
27
+ trimAfter,
28
+ });
29
+ const onError = useCallback((e) => {
30
+ // eslint-disable-next-line no-console
31
+ console.log(e.currentTarget.error);
32
+ // If there is no `loop` property, we don't need to get the duration
33
+ // and this does not need to be a fatal error
34
+ const errMessage = `Could not play audio: ${e.currentTarget.error}. See https://remotion.dev/docs/media-playback-error for help.`;
35
+ if (loop) {
36
+ if (onRemotionError) {
37
+ onRemotionError(new Error(errMessage));
38
+ return;
39
+ }
40
+ cancelRender(new Error(errMessage));
41
+ }
42
+ else {
43
+ onRemotionError?.(new Error(errMessage));
44
+ // eslint-disable-next-line no-console
45
+ console.warn(errMessage);
46
+ }
47
+ }, [onRemotionError, loop]);
48
+ if (typeof trimBeforeValue !== 'undefined' ||
49
+ typeof trimAfterValue !== 'undefined') {
50
+ return (_jsx(Sequence, { layout: "none", from: 0 - (trimBeforeValue ?? 0), showInTimeline: false, durationInFrames: trimAfterValue, name: name, children: _jsx(Audio, { pauseWhenBuffering: pauseWhenBuffering ?? false, ...otherProps }) }));
51
+ }
52
+ validateMediaProps(props, 'Video');
53
+ if (environment.isRendering) {
54
+ return _jsx(AudioForRendering, { ...otherProps });
55
+ }
56
+ const { onAutoPlayError, crossOrigin, delayRenderRetries, delayRenderTimeoutInMilliseconds, ...propsForPreview } = otherProps;
57
+ return (_jsx(AudioForPreview, { _remotionInternalNativeLoopPassed: props._remotionInternalNativeLoopPassed ?? false, _remotionInternalStack: stack ?? null, shouldPreMountAudioTags: audioContext !== null && audioContext.numberOfAudioTags > 0, ...propsForPreview, onNativeError: onError, onDuration: onDuration,
58
+ // Proposal: Make this default to true in v5
59
+ pauseWhenBuffering: pauseWhenBuffering ?? false, _remotionInternalNeedsDurationCalculation: Boolean(loop), showInTimeline: showInTimeline ?? true }));
60
+ };
File without changes
@@ -0,0 +1 @@
1
+ "use strict";
@@ -0,0 +1 @@
1
+ export declare const getDataTypeForAudioFormat: (format: AudioSampleFormat) => Float32ArrayConstructor | Int16ArrayConstructor | Uint8ArrayConstructor | Int32ArrayConstructor;
@@ -0,0 +1,22 @@
1
+ export const getDataTypeForAudioFormat = (format) => {
2
+ switch (format) {
3
+ case 'f32':
4
+ return Float32Array;
5
+ case 'f32-planar':
6
+ return Float32Array;
7
+ case 's16':
8
+ return Int16Array;
9
+ case 's16-planar':
10
+ return Int16Array;
11
+ case 'u8':
12
+ return Uint8Array;
13
+ case 'u8-planar':
14
+ return Uint8Array;
15
+ case 's32':
16
+ return Int32Array;
17
+ case 's32-planar':
18
+ return Int32Array;
19
+ default:
20
+ throw new Error(`Unsupported audio format: ${format}`);
21
+ }
22
+ };
@@ -0,0 +1 @@
1
+ export declare const isPlanarFormat: (format: AudioSampleFormat) => boolean;
@@ -0,0 +1,3 @@
1
+ export const isPlanarFormat = (format) => {
2
+ return format.includes('-planar');
3
+ };
@@ -0,0 +1 @@
1
+ export declare const logAudioData: (audioData: AudioData) => string;
@@ -0,0 +1,8 @@
1
+ export const logAudioData = (audioData) => {
2
+ const srcChannels = new Int16Array(audioData.numberOfFrames * audioData.numberOfChannels);
3
+ audioData.copyTo(srcChannels, {
4
+ planeIndex: 0,
5
+ format: 's16',
6
+ });
7
+ return srcChannels.slice(0, 10).join(',');
8
+ };
File without changes
@@ -0,0 +1 @@
1
+ "use strict";
@@ -0,0 +1,15 @@
1
+ import type { DataType } from './convert-audiodata/data-types';
2
+ export type SerializableAudioData = {
3
+ data: DataType[];
4
+ format: AudioSampleFormat;
5
+ numberOfChannels: number;
6
+ numberOfFrames: number;
7
+ sampleRate: number;
8
+ };
9
+ export declare const turnAudioDataIntoSerializableData: (audioData: AudioData) => {
10
+ data: (Float32Array<ArrayBuffer> | Int32Array<ArrayBuffer> | Int16Array<ArrayBuffer> | Uint8Array<ArrayBuffer>)[];
11
+ format: AudioSampleFormat;
12
+ numberOfChannels: number;
13
+ numberOfFrames: number;
14
+ sampleRate: number;
15
+ };
@@ -0,0 +1,26 @@
1
+ import { getDataTypeForAudioFormat } from './convert-audiodata/data-types';
2
+ import { isPlanarFormat } from './convert-audiodata/is-planar-format';
3
+ export const turnAudioDataIntoSerializableData = (audioData) => {
4
+ if (!audioData.format) {
5
+ throw new Error('AudioData format is not set');
6
+ }
7
+ const DataType = getDataTypeForAudioFormat(audioData.format);
8
+ const isPlanar = isPlanarFormat(audioData.format);
9
+ const planes = isPlanar ? audioData.numberOfChannels : 1;
10
+ const srcChannels = new Array(planes)
11
+ .fill(true)
12
+ .map(() => new DataType((isPlanar ? 1 : audioData.numberOfChannels) *
13
+ audioData.numberOfFrames));
14
+ for (let i = 0; i < planes; i++) {
15
+ audioData.copyTo(srcChannels[i], {
16
+ planeIndex: i,
17
+ });
18
+ }
19
+ return {
20
+ data: srcChannels,
21
+ format: audioData.format,
22
+ numberOfChannels: audioData.numberOfChannels,
23
+ numberOfFrames: audioData.numberOfFrames,
24
+ sampleRate: audioData.sampleRate,
25
+ };
26
+ };
@@ -85,6 +85,7 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
85
85
  const iterator = audioSink.buffers(startFromSecond);
86
86
  const queuedAudioNodes = [];
87
87
  const audioChunksForAfterResuming = [];
88
+ let mostRecentTimestamp = -Infinity;
88
89
  const cleanupAudioQueue = () => {
89
90
  for (const node of queuedAudioNodes) {
90
91
  node.node.stop();
@@ -108,12 +109,25 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
108
109
  }
109
110
  };
110
111
  }
112
+ if (result.value) {
113
+ mostRecentTimestamp = Math.max(mostRecentTimestamp, result.value.timestamp + result.value.duration);
114
+ return {
115
+ type: "got-buffer",
116
+ buffer: result.value
117
+ };
118
+ }
111
119
  return {
112
- type: "got-buffer-or-end",
113
- buffer: result.value ?? null
120
+ type: "got-end",
121
+ mostRecentTimestamp
114
122
  };
115
123
  };
116
124
  const tryToSatisfySeek = async (time, allowWait, onBufferScheduled) => {
125
+ if (time < startFromSecond) {
126
+ return {
127
+ type: "not-satisfied",
128
+ reason: `time requested is before the start of the iterator`
129
+ };
130
+ }
117
131
  while (true) {
118
132
  const buffer = await getNextOrNullIfNotAvailable(allowWait);
119
133
  if (buffer.type === "need-to-wait-for-it") {
@@ -122,12 +136,18 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
122
136
  reason: "iterator did not have buffer ready"
123
137
  };
124
138
  }
125
- if (buffer.type === "got-buffer-or-end") {
126
- if (buffer.buffer === null) {
139
+ if (buffer.type === "got-end") {
140
+ if (time >= mostRecentTimestamp) {
127
141
  return {
128
142
  type: "ended"
129
143
  };
130
144
  }
145
+ return {
146
+ type: "not-satisfied",
147
+ reason: `iterator ended before the requested time`
148
+ };
149
+ }
150
+ if (buffer.type === "got-buffer") {
131
151
  const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
132
152
  const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
133
153
  const timestamp = roundTo4Digits(time);
@@ -180,6 +200,9 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
180
200
  },
181
201
  getNext: async () => {
182
202
  const next = await iterator.next();
203
+ if (next.value) {
204
+ mostRecentTimestamp = Math.max(mostRecentTimestamp, next.value.timestamp + next.value.duration);
205
+ }
183
206
  return next;
184
207
  },
185
208
  isDestroyed: () => {
@@ -1580,6 +1603,7 @@ var AudioForPreviewAssertedShowing = ({
1580
1603
  toneFrequency,
1581
1604
  audioStreamIndex,
1582
1605
  pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering,
1606
+ crossOrigin: fallbackHtml5AudioProps?.crossOrigin,
1583
1607
  ...fallbackHtml5AudioProps
1584
1608
  });
1585
1609
  }
@@ -0,0 +1,7 @@
1
+ import type { PcmS16AudioData } from './convert-audiodata/convert-audiodata';
2
+ export declare const extractAudio: ({ src, timeInSeconds, durationInSeconds, volume, }: {
3
+ src: string;
4
+ timeInSeconds: number;
5
+ durationInSeconds: number;
6
+ volume: number;
7
+ }) => Promise<PcmS16AudioData | null>;
@@ -0,0 +1,98 @@
1
+ import { combineAudioDataAndClosePrevious } from './convert-audiodata/combine-audiodata';
2
+ import { convertAudioData } from './convert-audiodata/convert-audiodata';
3
+ import { TARGET_NUMBER_OF_CHANNELS, TARGET_SAMPLE_RATE, } from './convert-audiodata/resample-audiodata';
4
+ import { sinkPromises } from './extract-frame';
5
+ import { getSinks } from './video-extraction/get-frames-since-keyframe';
6
+ export const extractAudio = async ({ src, timeInSeconds, durationInSeconds, volume, }) => {
7
+ console.time('extractAudio');
8
+ if (!sinkPromises[src]) {
9
+ sinkPromises[src] = getSinks(src);
10
+ }
11
+ const { audio, actualMatroskaTimestamps, isMatroska } = await sinkPromises[src];
12
+ if (audio === null) {
13
+ console.timeEnd('extractAudio');
14
+ return null;
15
+ }
16
+ // https://discord.com/channels/@me/1409810025844838481/1415028953093111870
17
+ // Audio frames might have dependencies on previous and next frames so we need to decode a bit more
18
+ // and then discard it.
19
+ // The worst case seems to be FLAC files with a 65'535 sample window, which would be 1486.0ms at 44.1Khz.
20
+ // So let's set a threshold of 1.5 seconds.
21
+ const extraThreshold = 1.5;
22
+ // Matroska timestamps are not accurate unless we start from the beginning
23
+ // So for matroska, we need to decode all samples :(
24
+ // https://github.com/Vanilagy/mediabunny/issues/105
25
+ const sampleIterator = audio.sampleSink.samples(isMatroska ? 0 : Math.max(0, timeInSeconds - extraThreshold), timeInSeconds + durationInSeconds);
26
+ const samples = [];
27
+ for await (const sample of sampleIterator) {
28
+ const realTimestamp = actualMatroskaTimestamps.getRealTimestamp(sample.timestamp);
29
+ if (realTimestamp !== null && realTimestamp !== sample.timestamp) {
30
+ sample.setTimestamp(realTimestamp);
31
+ }
32
+ actualMatroskaTimestamps.observeTimestamp(sample.timestamp);
33
+ actualMatroskaTimestamps.observeTimestamp(sample.timestamp + sample.duration);
34
+ if (sample.timestamp + sample.duration - 0.0000000001 <= timeInSeconds) {
35
+ continue;
36
+ }
37
+ if (sample.timestamp >= timeInSeconds + durationInSeconds - 0.0000000001) {
38
+ continue;
39
+ }
40
+ samples.push(sample);
41
+ }
42
+ const audioDataArray = [];
43
+ for (let i = 0; i < samples.length; i++) {
44
+ const sample = samples[i];
45
+ // Less than 1 sample would be included - we did not need it after all!
46
+ if (Math.abs(sample.timestamp - (timeInSeconds + durationInSeconds)) *
47
+ sample.sampleRate <
48
+ 1) {
49
+ sample.close();
50
+ continue;
51
+ }
52
+ // Less than 1 sample would be included - we did not need it after all!
53
+ if (sample.timestamp + sample.duration <= timeInSeconds) {
54
+ sample.close();
55
+ continue;
56
+ }
57
+ const isFirstSample = i === 0;
58
+ const isLastSample = i === samples.length - 1;
59
+ const audioDataRaw = sample.toAudioData();
60
+ // amount of samples to shave from start and end
61
+ let trimStartInSeconds = 0;
62
+ let trimEndInSeconds = 0;
63
+ // TODO: Apply playback rate
64
+ // TODO: Apply tone frequency
65
+ if (isFirstSample) {
66
+ trimStartInSeconds = timeInSeconds - sample.timestamp;
67
+ }
68
+ if (isLastSample) {
69
+ trimEndInSeconds =
70
+ // clamp to 0 in case the audio ends early
71
+ Math.max(0, sample.timestamp +
72
+ sample.duration -
73
+ (timeInSeconds + durationInSeconds));
74
+ }
75
+ const audioData = convertAudioData({
76
+ audioData: audioDataRaw,
77
+ newSampleRate: TARGET_SAMPLE_RATE,
78
+ trimStartInSeconds,
79
+ trimEndInSeconds,
80
+ targetNumberOfChannels: TARGET_NUMBER_OF_CHANNELS,
81
+ volume,
82
+ });
83
+ audioDataRaw.close();
84
+ if (audioData.numberOfFrames === 0) {
85
+ sample.close();
86
+ continue;
87
+ }
88
+ audioDataArray.push(audioData);
89
+ sample.close();
90
+ }
91
+ if (audioDataArray.length === 0) {
92
+ console.timeEnd('extractAudio');
93
+ return null;
94
+ }
95
+ const combined = combineAudioDataAndClosePrevious(audioDataArray);
96
+ console.timeEnd('extractAudio');
97
+ return combined;
98
+ };
@@ -0,0 +1,15 @@
1
+ import type { PcmS16AudioData } from './convert-audiodata/convert-audiodata';
2
+ import type { LogLevel } from './log';
3
+ export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, includeAudio, includeVideo, isClientSideRendering, volume, }: {
4
+ src: string;
5
+ timeInSeconds: number;
6
+ durationInSeconds: number;
7
+ logLevel: LogLevel;
8
+ includeAudio: boolean;
9
+ includeVideo: boolean;
10
+ isClientSideRendering: boolean;
11
+ volume: number;
12
+ }) => Promise<{
13
+ frame: ImageBitmap | VideoFrame | null;
14
+ audio: PcmS16AudioData | null;
15
+ }>;
@@ -0,0 +1,104 @@
1
+ import { extractFrameAndAudio } from './extract-frame-and-audio';
2
+ // Doesn't exist in studio
3
+ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
4
+ window.remotion_broadcastChannel.addEventListener('message', async (event) => {
5
+ const data = event.data;
6
+ if (data.type === 'request') {
7
+ try {
8
+ const { frame, audio } = await extractFrameAndAudio({
9
+ src: data.src,
10
+ timeInSeconds: data.timeInSeconds,
11
+ logLevel: data.logLevel,
12
+ durationInSeconds: data.durationInSeconds,
13
+ includeAudio: data.includeAudio,
14
+ includeVideo: data.includeVideo,
15
+ volume: data.volume,
16
+ });
17
+ const videoFrame = frame;
18
+ const imageBitmap = videoFrame
19
+ ? await createImageBitmap(videoFrame)
20
+ : null;
21
+ if (videoFrame) {
22
+ videoFrame.close();
23
+ }
24
+ const response = {
25
+ type: 'response-success',
26
+ id: data.id,
27
+ frame: imageBitmap,
28
+ audio,
29
+ };
30
+ window.remotion_broadcastChannel.postMessage(response);
31
+ videoFrame?.close();
32
+ }
33
+ catch (error) {
34
+ const response = {
35
+ type: 'response-error',
36
+ id: data.id,
37
+ errorStack: error.stack ?? 'No stack trace',
38
+ };
39
+ window.remotion_broadcastChannel.postMessage(response);
40
+ }
41
+ }
42
+ else {
43
+ throw new Error('Invalid message: ' + JSON.stringify(data));
44
+ }
45
+ });
46
+ }
47
+ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, includeAudio, includeVideo, isClientSideRendering, volume, }) => {
48
+ if (isClientSideRendering || window.remotion_isMainTab) {
49
+ return extractFrameAndAudio({
50
+ logLevel,
51
+ src,
52
+ timeInSeconds,
53
+ durationInSeconds,
54
+ includeAudio,
55
+ includeVideo,
56
+ volume,
57
+ });
58
+ }
59
+ const requestId = crypto.randomUUID();
60
+ const resolvePromise = new Promise((resolve, reject) => {
61
+ const onMessage = (event) => {
62
+ const data = event.data;
63
+ if (!data) {
64
+ return;
65
+ }
66
+ if (data.type === 'response-success' && data.id === requestId) {
67
+ resolve({
68
+ frame: data.frame ? data.frame : null,
69
+ audio: data.audio ? data.audio : null,
70
+ });
71
+ window.remotion_broadcastChannel.removeEventListener('message', onMessage);
72
+ }
73
+ else if (data.type === 'response-error' && data.id === requestId) {
74
+ reject(data.errorStack);
75
+ window.remotion_broadcastChannel.removeEventListener('message', onMessage);
76
+ }
77
+ };
78
+ window.remotion_broadcastChannel.addEventListener('message', onMessage);
79
+ });
80
+ const request = {
81
+ type: 'request',
82
+ src,
83
+ timeInSeconds,
84
+ id: requestId,
85
+ logLevel,
86
+ durationInSeconds,
87
+ includeAudio,
88
+ includeVideo,
89
+ volume,
90
+ };
91
+ window.remotion_broadcastChannel.postMessage(request);
92
+ let timeoutId;
93
+ return Promise.race([
94
+ resolvePromise.then((res) => {
95
+ clearTimeout(timeoutId);
96
+ return res;
97
+ }),
98
+ new Promise((_, reject) => {
99
+ timeoutId = setTimeout(() => {
100
+ reject(new Error(`Timeout while extracting frame at time ${timeInSeconds}sec from ${src}`));
101
+ }, Math.max(3000, window.remotion_puppeteerTimeout - 5000));
102
+ }),
103
+ ]);
104
+ };