@remotion/media 4.0.363 → 4.0.365

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,9 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
2
  import { useContext, useEffect, useMemo, useRef, useState } from 'react';
3
3
  import { Internals, Audio as RemotionAudio, useBufferState, useCurrentFrame, } from 'remotion';
4
+ import { MediaPlayer } from '../media-player';
4
5
  import { useLoopDisplay } from '../show-in-timeline';
5
6
  import { useMediaInTimeline } from '../use-media-in-timeline';
6
- import { MediaPlayer } from '../video/media-player';
7
7
  const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, SequenceContext, } = Internals;
8
8
  const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVolumeCurveBehavior, loop, trimAfter, trimBefore, name, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
9
9
  const videoConfig = useUnsafeVideoConfig();
@@ -59,6 +59,11 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
59
59
  trimAfter,
60
60
  trimBefore,
61
61
  });
62
+ const buffering = useContext(Internals.BufferingContextReact);
63
+ if (!buffering) {
64
+ throw new Error('useMediaPlayback must be used inside a <BufferingContext>');
65
+ }
66
+ const isPlayerBuffering = Internals.useIsPlayerBuffering(buffering);
62
67
  useEffect(() => {
63
68
  if (!sharedAudioContext)
64
69
  return;
@@ -76,6 +81,8 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
76
81
  canvas: null,
77
82
  playbackRate,
78
83
  audioStreamIndex: audioStreamIndex ?? 0,
84
+ debugOverlay: false,
85
+ bufferState: buffer,
79
86
  });
80
87
  mediaPlayerRef.current = player;
81
88
  player
@@ -152,20 +159,19 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
152
159
  videoConfig.fps,
153
160
  audioStreamIndex,
154
161
  disallowFallbackToHtml5Audio,
162
+ buffer,
155
163
  ]);
156
164
  useEffect(() => {
157
165
  const audioPlayer = mediaPlayerRef.current;
158
166
  if (!audioPlayer)
159
167
  return;
160
- if (playing) {
161
- audioPlayer.play().catch((error) => {
162
- Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] Failed to play', error);
163
- });
168
+ if (playing && !isPlayerBuffering) {
169
+ audioPlayer.play(currentTimeRef.current);
164
170
  }
165
171
  else {
166
172
  audioPlayer.pause();
167
173
  }
168
- }, [playing, logLevel, mediaPlayerReady]);
174
+ }, [isPlayerBuffering, logLevel, playing]);
169
175
  useEffect(() => {
170
176
  const audioPlayer = mediaPlayerRef.current;
171
177
  if (!audioPlayer || !mediaPlayerReady)
@@ -173,22 +179,6 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
173
179
  audioPlayer.seekTo(currentTime);
174
180
  Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
175
181
  }, [currentTime, logLevel, mediaPlayerReady]);
176
- useEffect(() => {
177
- const audioPlayer = mediaPlayerRef.current;
178
- if (!audioPlayer || !mediaPlayerReady)
179
- return;
180
- audioPlayer.onBufferingChange((newBufferingState) => {
181
- if (newBufferingState && !delayHandleRef.current) {
182
- delayHandleRef.current = buffer.delayPlayback();
183
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] MediaPlayer buffering - blocking Remotion playback');
184
- }
185
- else if (!newBufferingState && delayHandleRef.current) {
186
- delayHandleRef.current.unblock();
187
- delayHandleRef.current = null;
188
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] MediaPlayer unbuffering - unblocking Remotion playback');
189
- }
190
- });
191
- }, [mediaPlayerReady, buffer, logLevel]);
192
182
  const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
193
183
  useEffect(() => {
194
184
  const audioPlayer = mediaPlayerRef.current;
@@ -0,0 +1,11 @@
1
+ import type { AudioBufferSink } from 'mediabunny';
2
+ export declare const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
3
+ export declare const makeAudioIterator: (audioSink: AudioBufferSink, startFromSecond: number) => {
4
+ iterator: AsyncGenerator<import("mediabunny").WrappedAudioBuffer, void, unknown>;
5
+ destroy: () => void;
6
+ isReadyToPlay: () => boolean;
7
+ setAudioIteratorStarted: (started: boolean) => void;
8
+ getNext: () => Promise<IteratorResult<import("mediabunny").WrappedAudioBuffer, void>>;
9
+ setAudioBufferHealth: (health: number) => void;
10
+ };
11
+ export type AudioIterator = ReturnType<typeof makeAudioIterator>;
@@ -0,0 +1,24 @@
1
+ export const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
2
+ export const makeAudioIterator = (audioSink, startFromSecond) => {
3
+ const iterator = audioSink.buffers(startFromSecond);
4
+ let audioIteratorStarted = false;
5
+ let audioBufferHealth = 0;
6
+ return {
7
+ iterator,
8
+ destroy: () => {
9
+ iterator.return().catch(() => undefined);
10
+ },
11
+ isReadyToPlay: () => {
12
+ return audioIteratorStarted && audioBufferHealth > 0;
13
+ },
14
+ setAudioIteratorStarted: (started) => {
15
+ audioIteratorStarted = started;
16
+ },
17
+ getNext: () => {
18
+ return iterator.next();
19
+ },
20
+ setAudioBufferHealth: (health) => {
21
+ audioBufferHealth = health;
22
+ },
23
+ };
24
+ };
@@ -0,0 +1,31 @@
1
+ import type { AudioBufferSink, WrappedAudioBuffer } from 'mediabunny';
2
+ export declare const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
3
+ export type QueuedNode = {
4
+ node: AudioBufferSourceNode;
5
+ timestamp: number;
6
+ buffer: AudioBuffer;
7
+ };
8
+ export declare const makeAudioIterator: (audioSink: AudioBufferSink, startFromSecond: number) => {
9
+ destroy: () => void;
10
+ getNext: () => Promise<IteratorResult<WrappedAudioBuffer, void>>;
11
+ isDestroyed: () => boolean;
12
+ addQueuedAudioNode: (node: AudioBufferSourceNode, timestamp: number, buffer: AudioBuffer) => void;
13
+ removeQueuedAudioNode: (node: AudioBufferSourceNode) => void;
14
+ removeAndReturnAllQueuedAudioNodes: () => QueuedNode[];
15
+ getQueuedPeriod: () => {
16
+ from: number;
17
+ until: number;
18
+ } | null;
19
+ tryToSatisfySeek: (time: number) => Promise<{
20
+ type: "not-satisfied";
21
+ reason: string;
22
+ } | {
23
+ type: "satisfied";
24
+ buffers: WrappedAudioBuffer[];
25
+ }>;
26
+ };
27
+ export type AudioIterator = ReturnType<typeof makeAudioIterator>;
28
+ export declare const isAlreadyQueued: (time: number, queuedPeriod: {
29
+ from: number;
30
+ until: number;
31
+ } | undefined | null) => boolean;
@@ -0,0 +1,168 @@
1
+ import { roundTo4Digits } from '../helpers/round-to-4-digits';
2
+ export const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
3
+ export const makeAudioIterator = (audioSink, startFromSecond) => {
4
+ let destroyed = false;
5
+ const iterator = audioSink.buffers(startFromSecond);
6
+ const queuedAudioNodes = [];
7
+ const cleanupAudioQueue = () => {
8
+ for (const node of queuedAudioNodes) {
9
+ node.node.stop();
10
+ }
11
+ queuedAudioNodes.length = 0;
12
+ };
13
+ let lastReturnedBuffer = null;
14
+ let iteratorEnded = false;
15
+ const getNextOrNullIfNotAvailable = async () => {
16
+ const next = iterator.next();
17
+ const result = await Promise.race([
18
+ next,
19
+ new Promise((resolve) => {
20
+ Promise.resolve().then(() => resolve());
21
+ }),
22
+ ]);
23
+ if (!result) {
24
+ return {
25
+ type: 'need-to-wait-for-it',
26
+ waitPromise: async () => {
27
+ const res = await next;
28
+ if (res.value) {
29
+ lastReturnedBuffer = res.value;
30
+ }
31
+ else {
32
+ iteratorEnded = true;
33
+ }
34
+ return res.value;
35
+ },
36
+ };
37
+ }
38
+ if (result.value) {
39
+ lastReturnedBuffer = result.value;
40
+ }
41
+ else {
42
+ iteratorEnded = true;
43
+ }
44
+ return {
45
+ type: 'got-buffer-or-end',
46
+ buffer: result.value ?? null,
47
+ };
48
+ };
49
+ const tryToSatisfySeek = async (time) => {
50
+ if (lastReturnedBuffer) {
51
+ const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
52
+ const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
53
+ if (roundTo4Digits(time) < bufferTimestamp) {
54
+ return {
55
+ type: 'not-satisfied',
56
+ reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`,
57
+ };
58
+ }
59
+ if (roundTo4Digits(time) <= bufferEndTimestamp) {
60
+ return {
61
+ type: 'satisfied',
62
+ buffers: [lastReturnedBuffer],
63
+ };
64
+ }
65
+ // fall through
66
+ }
67
+ if (iteratorEnded) {
68
+ if (lastReturnedBuffer) {
69
+ return {
70
+ type: 'satisfied',
71
+ buffers: [lastReturnedBuffer],
72
+ };
73
+ }
74
+ return {
75
+ type: 'not-satisfied',
76
+ reason: 'iterator ended',
77
+ };
78
+ }
79
+ const toBeReturned = [];
80
+ while (true) {
81
+ const buffer = await getNextOrNullIfNotAvailable();
82
+ if (buffer.type === 'need-to-wait-for-it') {
83
+ return {
84
+ type: 'not-satisfied',
85
+ reason: 'iterator did not have buffer ready',
86
+ };
87
+ }
88
+ if (buffer.type === 'got-buffer-or-end') {
89
+ if (buffer.buffer === null) {
90
+ iteratorEnded = true;
91
+ if (lastReturnedBuffer) {
92
+ return {
93
+ type: 'satisfied',
94
+ buffers: [lastReturnedBuffer],
95
+ };
96
+ }
97
+ return {
98
+ type: 'not-satisfied',
99
+ reason: 'iterator ended and did not have buffer ready',
100
+ };
101
+ }
102
+ const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
103
+ const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
104
+ const timestamp = roundTo4Digits(time);
105
+ if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
106
+ return {
107
+ type: 'satisfied',
108
+ buffers: [...toBeReturned, buffer.buffer],
109
+ };
110
+ }
111
+ toBeReturned.push(buffer.buffer);
112
+ continue;
113
+ }
114
+ throw new Error('Unreachable');
115
+ }
116
+ };
117
+ return {
118
+ destroy: () => {
119
+ cleanupAudioQueue();
120
+ destroyed = true;
121
+ iterator.return().catch(() => undefined);
122
+ },
123
+ getNext: () => {
124
+ return iterator.next();
125
+ },
126
+ isDestroyed: () => {
127
+ return destroyed;
128
+ },
129
+ addQueuedAudioNode: (node, timestamp, buffer) => {
130
+ queuedAudioNodes.push({ node, timestamp, buffer });
131
+ },
132
+ removeQueuedAudioNode: (node) => {
133
+ const index = queuedAudioNodes.findIndex((n) => n.node === node);
134
+ if (index !== -1) {
135
+ queuedAudioNodes.splice(index, 1);
136
+ }
137
+ },
138
+ removeAndReturnAllQueuedAudioNodes: () => {
139
+ const nodes = queuedAudioNodes.slice();
140
+ for (const node of nodes) {
141
+ node.node.stop();
142
+ }
143
+ queuedAudioNodes.length = 0;
144
+ return nodes;
145
+ },
146
+ getQueuedPeriod: () => {
147
+ const lastNode = queuedAudioNodes[queuedAudioNodes.length - 1];
148
+ if (!lastNode) {
149
+ return null;
150
+ }
151
+ const firstNode = queuedAudioNodes[0];
152
+ if (!firstNode) {
153
+ return null;
154
+ }
155
+ return {
156
+ from: firstNode.timestamp,
157
+ until: lastNode.timestamp + lastNode.buffer.duration,
158
+ };
159
+ },
160
+ tryToSatisfySeek,
161
+ };
162
+ };
163
+ export const isAlreadyQueued = (time, queuedPeriod) => {
164
+ if (!queuedPeriod) {
165
+ return false;
166
+ }
167
+ return time >= queuedPeriod.from && time < queuedPeriod.until;
168
+ };
@@ -0,0 +1,19 @@
1
+ import type { AudioIterator } from '../audio/audio-preview-iterator';
2
+ export type DebugStats = {
3
+ videoIteratorsCreated: number;
4
+ audioIteratorsCreated: number;
5
+ framesRendered: number;
6
+ };
7
+ export declare const drawPreviewOverlay: ({ context, stats, audioTime, audioContextState, audioIterator, audioSyncAnchor, audioChunksForAfterResuming, playing, }: {
8
+ context: CanvasRenderingContext2D;
9
+ stats: DebugStats;
10
+ audioTime: number;
11
+ audioContextState: AudioContextState;
12
+ audioSyncAnchor: number;
13
+ audioIterator: AudioIterator | null;
14
+ audioChunksForAfterResuming: {
15
+ buffer: AudioBuffer;
16
+ timestamp: number;
17
+ }[];
18
+ playing: boolean;
19
+ }) => void;
@@ -0,0 +1,37 @@
1
+ export const drawPreviewOverlay = ({ context, stats, audioTime, audioContextState, audioIterator, audioSyncAnchor, audioChunksForAfterResuming, playing, }) => {
2
+ // Collect all lines to be rendered
3
+ const lines = [
4
+ 'Debug overlay',
5
+ `Video iterators created: ${stats.videoIteratorsCreated}`,
6
+ `Audio iterators created: ${stats.audioIteratorsCreated}`,
7
+ `Frames rendered: ${stats.framesRendered}`,
8
+ `Audio context state: ${audioContextState}`,
9
+ `Audio time: ${(audioTime - audioSyncAnchor).toFixed(3)}s`,
10
+ ];
11
+ if (audioIterator) {
12
+ const queuedPeriod = audioIterator.getQueuedPeriod();
13
+ if (queuedPeriod) {
14
+ lines.push(`Audio queued until: ${(queuedPeriod.until - (audioTime - audioSyncAnchor)).toFixed(3)}s`);
15
+ }
16
+ else if (audioChunksForAfterResuming.length > 0) {
17
+ lines.push(`Audio chunks for after resuming: ${audioChunksForAfterResuming.length}`);
18
+ }
19
+ lines.push(`Playing: ${playing}`);
20
+ }
21
+ const lineHeight = 30; // px, should match or exceed font size
22
+ const boxPaddingX = 10;
23
+ const boxPaddingY = 10;
24
+ const boxLeft = 20;
25
+ const boxTop = 20;
26
+ const boxWidth = 600;
27
+ const boxHeight = lines.length * lineHeight + 2 * boxPaddingY;
28
+ // Draw background for text legibility
29
+ context.fillStyle = 'rgba(0, 0, 0, 1)';
30
+ context.fillRect(boxLeft, boxTop, boxWidth, boxHeight);
31
+ context.fillStyle = 'white';
32
+ context.font = '24px sans-serif';
33
+ context.textBaseline = 'top';
34
+ for (let i = 0; i < lines.length; i++) {
35
+ context.fillText(lines[i], boxLeft + boxPaddingX, boxTop + boxPaddingY + i * lineHeight);
36
+ }
37
+ };