@remotion/media 4.0.365 → 4.0.366

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,12 @@
1
1
  import { jsx as _jsx } from "react/jsx-runtime";
2
2
  import { useContext, useEffect, useMemo, useRef, useState } from 'react';
3
- import { Internals, Audio as RemotionAudio, useBufferState, useCurrentFrame, } from 'remotion';
3
+ import { Internals, Audio as RemotionAudio, useBufferState, useCurrentFrame, useVideoConfig, } from 'remotion';
4
+ import { getTimeInSeconds } from '../get-time-in-seconds';
4
5
  import { MediaPlayer } from '../media-player';
5
6
  import { useLoopDisplay } from '../show-in-timeline';
6
7
  import { useMediaInTimeline } from '../use-media-in-timeline';
7
8
  const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, SequenceContext, } = Internals;
8
- const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVolumeCurveBehavior, loop, trimAfter, trimBefore, name, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
9
+ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, volume, loopVolumeCurveBehavior, loop, trimAfter, trimBefore, name, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
9
10
  const videoConfig = useUnsafeVideoConfig();
10
11
  const frame = useCurrentFrame();
11
12
  const mediaPlayerRef = useRef(null);
@@ -37,6 +38,8 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
37
38
  currentTimeRef.current = currentTime;
38
39
  const preloadedSrc = usePreload(src);
39
40
  const parentSequence = useContext(SequenceContext);
41
+ const isPremounting = Boolean(parentSequence?.premounting);
42
+ const isPostmounting = Boolean(parentSequence?.postmounting);
40
43
  const loopDisplay = useLoopDisplay({
41
44
  loop,
42
45
  mediaDurationInSeconds: videoConfig.durationInFrames,
@@ -83,6 +86,9 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
83
86
  audioStreamIndex: audioStreamIndex ?? 0,
84
87
  debugOverlay: false,
85
88
  bufferState: buffer,
89
+ isPostmounting,
90
+ isPremounting,
91
+ globalPlaybackRate,
86
92
  });
87
93
  mediaPlayerRef.current = player;
88
94
  player
@@ -122,16 +128,16 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
122
128
  }
123
129
  if (result.type === 'success') {
124
130
  setMediaPlayerReady(true);
125
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] MediaPlayer initialized successfully`);
131
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] MediaPlayer initialized successfully`);
126
132
  }
127
133
  })
128
134
  .catch((error) => {
129
- Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] Failed to initialize MediaPlayer', error);
135
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[AudioForPreview] Failed to initialize MediaPlayer', error);
130
136
  setShouldFallbackToNativeAudio(true);
131
137
  });
132
138
  }
133
139
  catch (error) {
134
- Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewAudioForPreview] MediaPlayer initialization failed', error);
140
+ Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[AudioForPreview] MediaPlayer initialization failed', error);
135
141
  setShouldFallbackToNativeAudio(true);
136
142
  }
137
143
  return () => {
@@ -140,7 +146,7 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
140
146
  delayHandleRef.current = null;
141
147
  }
142
148
  if (mediaPlayerRef.current) {
143
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] Disposing MediaPlayer`);
149
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] Disposing MediaPlayer`);
144
150
  mediaPlayerRef.current.dispose();
145
151
  mediaPlayerRef.current = null;
146
152
  }
@@ -160,6 +166,9 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
160
166
  audioStreamIndex,
161
167
  disallowFallbackToHtml5Audio,
162
168
  buffer,
169
+ isPremounting,
170
+ isPostmounting,
171
+ globalPlaybackRate,
163
172
  ]);
164
173
  useEffect(() => {
165
174
  const audioPlayer = mediaPlayerRef.current;
@@ -176,8 +185,10 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
176
185
  const audioPlayer = mediaPlayerRef.current;
177
186
  if (!audioPlayer || !mediaPlayerReady)
178
187
  return;
179
- audioPlayer.seekTo(currentTime);
180
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
188
+ audioPlayer.seekTo(currentTime).catch(() => {
189
+ // Might be disposed
190
+ });
191
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
181
192
  }, [currentTime, logLevel, mediaPlayerReady]);
182
193
  const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
183
194
  useEffect(() => {
@@ -193,14 +204,20 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
193
204
  }
194
205
  audioPlayer.setVolume(userPreferredVolume);
195
206
  }, [userPreferredVolume, mediaPlayerReady]);
196
- const effectivePlaybackRate = useMemo(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
197
207
  useEffect(() => {
198
208
  const audioPlayer = mediaPlayerRef.current;
199
209
  if (!audioPlayer || !mediaPlayerReady) {
200
210
  return;
201
211
  }
202
- audioPlayer.setPlaybackRate(effectivePlaybackRate);
203
- }, [effectivePlaybackRate, mediaPlayerReady]);
212
+ audioPlayer.setPlaybackRate(playbackRate);
213
+ }, [playbackRate, mediaPlayerReady]);
214
+ useEffect(() => {
215
+ const audioPlayer = mediaPlayerRef.current;
216
+ if (!audioPlayer || !mediaPlayerReady) {
217
+ return;
218
+ }
219
+ audioPlayer.setGlobalPlaybackRate(globalPlaybackRate);
220
+ }, [globalPlaybackRate, mediaPlayerReady]);
204
221
  useEffect(() => {
205
222
  const audioPlayer = mediaPlayerRef.current;
206
223
  if (!audioPlayer || !mediaPlayerReady) {
@@ -208,6 +225,41 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
208
225
  }
209
226
  audioPlayer.setFps(videoConfig.fps);
210
227
  }, [videoConfig.fps, mediaPlayerReady]);
228
+ useEffect(() => {
229
+ const mediaPlayer = mediaPlayerRef.current;
230
+ if (!mediaPlayer || !mediaPlayerReady) {
231
+ return;
232
+ }
233
+ mediaPlayer.setTrimBefore(trimBefore);
234
+ }, [trimBefore, mediaPlayerReady]);
235
+ useEffect(() => {
236
+ const mediaPlayer = mediaPlayerRef.current;
237
+ if (!mediaPlayer || !mediaPlayerReady) {
238
+ return;
239
+ }
240
+ mediaPlayer.setTrimAfter(trimAfter);
241
+ }, [trimAfter, mediaPlayerReady]);
242
+ useEffect(() => {
243
+ const mediaPlayer = mediaPlayerRef.current;
244
+ if (!mediaPlayer || !mediaPlayerReady) {
245
+ return;
246
+ }
247
+ mediaPlayer.setLoop(loop);
248
+ }, [loop, mediaPlayerReady]);
249
+ useEffect(() => {
250
+ const mediaPlayer = mediaPlayerRef.current;
251
+ if (!mediaPlayer || !mediaPlayerReady) {
252
+ return;
253
+ }
254
+ mediaPlayer.setIsPremounting(isPremounting);
255
+ }, [isPremounting, mediaPlayerReady]);
256
+ useEffect(() => {
257
+ const mediaPlayer = mediaPlayerRef.current;
258
+ if (!mediaPlayer || !mediaPlayerReady) {
259
+ return;
260
+ }
261
+ mediaPlayer.setIsPostmounting(isPostmounting);
262
+ }, [isPostmounting, mediaPlayerReady]);
211
263
  if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
212
264
  return (_jsx(RemotionAudio, { src: src, muted: muted, volume: volume, startFrom: trimBefore, endAt: trimAfter, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, toneFrequency: toneFrequency, audioStreamIndex: audioStreamIndex, pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering, ...fallbackHtml5AudioProps }));
213
265
  }
@@ -215,5 +267,32 @@ const NewAudioForPreview = ({ src, playbackRate, logLevel, muted, volume, loopVo
215
267
  };
216
268
  export const AudioForPreview = ({ loop, src, logLevel, muted, name, volume, loopVolumeCurveBehavior, playbackRate, trimAfter, trimBefore, showInTimeline, stack, disallowFallbackToHtml5Audio, toneFrequency, audioStreamIndex, fallbackHtml5AudioProps, }) => {
217
269
  const preloadedSrc = usePreload(src);
218
- return (_jsx(NewAudioForPreview, { audioStreamIndex: audioStreamIndex ?? 0, src: preloadedSrc, playbackRate: playbackRate ?? 1, logLevel: logLevel ?? window.remotion_logLevel, muted: muted ?? false, volume: volume ?? 1, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', loop: loop ?? false, trimAfter: trimAfter, trimBefore: trimBefore, name: name, showInTimeline: showInTimeline ?? true, stack: stack, disallowFallbackToHtml5Audio: disallowFallbackToHtml5Audio ?? false, toneFrequency: toneFrequency, fallbackHtml5AudioProps: fallbackHtml5AudioProps }));
270
+ const frame = useCurrentFrame();
271
+ const videoConfig = useVideoConfig();
272
+ const currentTime = frame / videoConfig.fps;
273
+ const showShow = useMemo(() => {
274
+ return (getTimeInSeconds({
275
+ unloopedTimeInSeconds: currentTime,
276
+ playbackRate: playbackRate ?? 1,
277
+ loop: loop ?? false,
278
+ trimBefore,
279
+ trimAfter,
280
+ mediaDurationInSeconds: Infinity,
281
+ fps: videoConfig.fps,
282
+ ifNoMediaDuration: 'infinity',
283
+ src,
284
+ }) !== null);
285
+ }, [
286
+ currentTime,
287
+ loop,
288
+ playbackRate,
289
+ src,
290
+ trimAfter,
291
+ trimBefore,
292
+ videoConfig.fps,
293
+ ]);
294
+ if (!showShow) {
295
+ return null;
296
+ }
297
+ return (_jsx(AudioForPreviewAssertedShowing, { audioStreamIndex: audioStreamIndex ?? 0, src: preloadedSrc, playbackRate: playbackRate ?? 1, logLevel: logLevel ?? window.remotion_logLevel, muted: muted ?? false, volume: volume ?? 1, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', loop: loop ?? false, trimAfter: trimAfter, trimBefore: trimBefore, name: name, showInTimeline: showInTimeline ?? true, stack: stack, disallowFallbackToHtml5Audio: disallowFallbackToHtml5Audio ?? false, toneFrequency: toneFrequency, fallbackHtml5AudioProps: fallbackHtml5AudioProps }));
219
298
  };
@@ -11,18 +11,24 @@ export declare const makeAudioIterator: (audioSink: AudioBufferSink, startFromSe
11
11
  isDestroyed: () => boolean;
12
12
  addQueuedAudioNode: (node: AudioBufferSourceNode, timestamp: number, buffer: AudioBuffer) => void;
13
13
  removeQueuedAudioNode: (node: AudioBufferSourceNode) => void;
14
- removeAndReturnAllQueuedAudioNodes: () => QueuedNode[];
15
- getQueuedPeriod: () => {
14
+ getAndClearAudioChunksForAfterResuming: () => {
15
+ buffer: AudioBuffer;
16
+ timestamp: number;
17
+ }[];
18
+ getQueuedPeriod: (pendingBuffers: WrappedAudioBuffer[]) => {
16
19
  from: number;
17
20
  until: number;
18
21
  } | null;
19
- tryToSatisfySeek: (time: number) => Promise<{
22
+ tryToSatisfySeek: (time: number, allowWait: boolean) => Promise<{
20
23
  type: "not-satisfied";
21
24
  reason: string;
22
25
  } | {
23
26
  type: "satisfied";
24
27
  buffers: WrappedAudioBuffer[];
25
28
  }>;
29
+ addChunkForAfterResuming: (buffer: AudioBuffer, timestamp: number) => void;
30
+ moveQueuedChunksToPauseQueue: () => void;
31
+ getNumberOfChunksAfterResuming: () => number;
26
32
  };
27
33
  export type AudioIterator = ReturnType<typeof makeAudioIterator>;
28
34
  export declare const isAlreadyQueued: (time: number, queuedPeriod: {
@@ -4,6 +4,7 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
4
4
  let destroyed = false;
5
5
  const iterator = audioSink.buffers(startFromSecond);
6
6
  const queuedAudioNodes = [];
7
+ const audioChunksForAfterResuming = [];
7
8
  const cleanupAudioQueue = () => {
8
9
  for (const node of queuedAudioNodes) {
9
10
  node.node.stop();
@@ -12,14 +13,16 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
12
13
  };
13
14
  let lastReturnedBuffer = null;
14
15
  let iteratorEnded = false;
15
- const getNextOrNullIfNotAvailable = async () => {
16
+ const getNextOrNullIfNotAvailable = async (allowWait) => {
16
17
  const next = iterator.next();
17
- const result = await Promise.race([
18
- next,
19
- new Promise((resolve) => {
20
- Promise.resolve().then(() => resolve());
21
- }),
22
- ]);
18
+ const result = allowWait
19
+ ? await next
20
+ : await Promise.race([
21
+ next,
22
+ new Promise((resolve) => {
23
+ Promise.resolve().then(() => resolve());
24
+ }),
25
+ ]);
23
26
  if (!result) {
24
27
  return {
25
28
  type: 'need-to-wait-for-it',
@@ -46,7 +49,7 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
46
49
  buffer: result.value ?? null,
47
50
  };
48
51
  };
49
- const tryToSatisfySeek = async (time) => {
52
+ const tryToSatisfySeek = async (time, allowWait) => {
50
53
  if (lastReturnedBuffer) {
51
54
  const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
52
55
  const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
@@ -65,20 +68,14 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
65
68
  // fall through
66
69
  }
67
70
  if (iteratorEnded) {
68
- if (lastReturnedBuffer) {
69
- return {
70
- type: 'satisfied',
71
- buffers: [lastReturnedBuffer],
72
- };
73
- }
74
71
  return {
75
- type: 'not-satisfied',
76
- reason: 'iterator ended',
72
+ type: 'satisfied',
73
+ buffers: lastReturnedBuffer ? [lastReturnedBuffer] : [],
77
74
  };
78
75
  }
79
76
  const toBeReturned = [];
80
77
  while (true) {
81
- const buffer = await getNextOrNullIfNotAvailable();
78
+ const buffer = await getNextOrNullIfNotAvailable(allowWait);
82
79
  if (buffer.type === 'need-to-wait-for-it') {
83
80
  return {
84
81
  type: 'not-satisfied',
@@ -88,15 +85,9 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
88
85
  if (buffer.type === 'got-buffer-or-end') {
89
86
  if (buffer.buffer === null) {
90
87
  iteratorEnded = true;
91
- if (lastReturnedBuffer) {
92
- return {
93
- type: 'satisfied',
94
- buffers: [lastReturnedBuffer],
95
- };
96
- }
97
88
  return {
98
- type: 'not-satisfied',
99
- reason: 'iterator ended and did not have buffer ready',
89
+ type: 'satisfied',
90
+ buffers: lastReturnedBuffer ? [lastReturnedBuffer] : [],
100
91
  };
101
92
  }
102
93
  const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
@@ -114,14 +105,42 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
114
105
  throw new Error('Unreachable');
115
106
  }
116
107
  };
108
+ const removeAndReturnAllQueuedAudioNodes = () => {
109
+ const nodes = queuedAudioNodes.slice();
110
+ for (const node of nodes) {
111
+ node.node.stop();
112
+ }
113
+ queuedAudioNodes.length = 0;
114
+ return nodes;
115
+ };
116
+ const addChunkForAfterResuming = (buffer, timestamp) => {
117
+ audioChunksForAfterResuming.push({ buffer, timestamp });
118
+ };
119
+ const moveQueuedChunksToPauseQueue = () => {
120
+ const toQueue = removeAndReturnAllQueuedAudioNodes();
121
+ for (const chunk of toQueue) {
122
+ addChunkForAfterResuming(chunk.buffer, chunk.timestamp);
123
+ }
124
+ };
125
+ const getNumberOfChunksAfterResuming = () => {
126
+ return audioChunksForAfterResuming.length;
127
+ };
117
128
  return {
118
129
  destroy: () => {
119
130
  cleanupAudioQueue();
120
131
  destroyed = true;
121
132
  iterator.return().catch(() => undefined);
133
+ audioChunksForAfterResuming.length = 0;
122
134
  },
123
- getNext: () => {
124
- return iterator.next();
135
+ getNext: async () => {
136
+ const next = await iterator.next();
137
+ if (next.value) {
138
+ lastReturnedBuffer = next.value;
139
+ }
140
+ else {
141
+ iteratorEnded = true;
142
+ }
143
+ return next;
125
144
  },
126
145
  isDestroyed: () => {
127
146
  return destroyed;
@@ -135,29 +154,38 @@ export const makeAudioIterator = (audioSink, startFromSecond) => {
135
154
  queuedAudioNodes.splice(index, 1);
136
155
  }
137
156
  },
138
- removeAndReturnAllQueuedAudioNodes: () => {
139
- const nodes = queuedAudioNodes.slice();
140
- for (const node of nodes) {
141
- node.node.stop();
142
- }
143
- queuedAudioNodes.length = 0;
144
- return nodes;
157
+ getAndClearAudioChunksForAfterResuming: () => {
158
+ const chunks = audioChunksForAfterResuming.slice();
159
+ audioChunksForAfterResuming.length = 0;
160
+ return chunks;
145
161
  },
146
- getQueuedPeriod: () => {
147
- const lastNode = queuedAudioNodes[queuedAudioNodes.length - 1];
148
- if (!lastNode) {
149
- return null;
162
+ getQueuedPeriod: (pendingBuffers) => {
163
+ let until = -Infinity;
164
+ let from = Infinity;
165
+ for (const buffer of pendingBuffers) {
166
+ until = Math.max(until, buffer.timestamp + buffer.duration);
167
+ from = Math.min(from, buffer.timestamp);
168
+ }
169
+ for (const node of queuedAudioNodes) {
170
+ until = Math.max(until, node.timestamp + node.buffer.duration);
171
+ from = Math.min(from, node.timestamp);
172
+ }
173
+ for (const chunk of audioChunksForAfterResuming) {
174
+ until = Math.max(until, chunk.timestamp + chunk.buffer.duration);
175
+ from = Math.min(from, chunk.timestamp);
150
176
  }
151
- const firstNode = queuedAudioNodes[0];
152
- if (!firstNode) {
177
+ if (!Number.isFinite(from) || !Number.isFinite(until)) {
153
178
  return null;
154
179
  }
155
180
  return {
156
- from: firstNode.timestamp,
157
- until: lastNode.timestamp + lastNode.buffer.duration,
181
+ from,
182
+ until,
158
183
  };
159
184
  },
160
185
  tryToSatisfySeek,
186
+ addChunkForAfterResuming,
187
+ moveQueuedChunksToPauseQueue,
188
+ getNumberOfChunksAfterResuming,
161
189
  };
162
190
  };
163
191
  export const isAlreadyQueued = (time, queuedPeriod) => {
@@ -0,0 +1,66 @@
1
+ import type { InputAudioTrack, WrappedAudioBuffer } from 'mediabunny';
2
+ import type { Nonce } from './nonce-manager';
3
+ export declare const audioIteratorManager: ({ audioTrack, delayPlaybackHandleIfNotPremounting, sharedAudioContext, }: {
4
+ audioTrack: InputAudioTrack;
5
+ delayPlaybackHandleIfNotPremounting: () => {
6
+ unblock: () => void;
7
+ };
8
+ sharedAudioContext: AudioContext;
9
+ }) => {
10
+ startAudioIterator: ({ nonce, playbackRate, startFromSecond, getIsPlaying, scheduleAudioNode, }: {
11
+ startFromSecond: number;
12
+ nonce: Nonce;
13
+ playbackRate: number;
14
+ getIsPlaying: () => boolean;
15
+ scheduleAudioNode: (node: AudioBufferSourceNode, mediaTimestamp: number) => void;
16
+ }) => Promise<void>;
17
+ resumeScheduledAudioChunks: ({ playbackRate, scheduleAudioNode, }: {
18
+ playbackRate: number;
19
+ scheduleAudioNode: (node: AudioBufferSourceNode, mediaTimestamp: number) => void;
20
+ }) => void;
21
+ pausePlayback: () => void;
22
+ getAudioBufferIterator: () => {
23
+ destroy: () => void;
24
+ getNext: () => Promise<IteratorResult<WrappedAudioBuffer, void>>;
25
+ isDestroyed: () => boolean;
26
+ addQueuedAudioNode: (node: AudioBufferSourceNode, timestamp: number, buffer: AudioBuffer) => void;
27
+ removeQueuedAudioNode: (node: AudioBufferSourceNode) => void;
28
+ getAndClearAudioChunksForAfterResuming: () => {
29
+ buffer: AudioBuffer;
30
+ timestamp: number;
31
+ }[];
32
+ getQueuedPeriod: (pendingBuffers: WrappedAudioBuffer[]) => {
33
+ from: number;
34
+ until: number;
35
+ } | null;
36
+ tryToSatisfySeek: (time: number, allowWait: boolean) => Promise<{
37
+ type: "not-satisfied";
38
+ reason: string;
39
+ } | {
40
+ type: "satisfied";
41
+ buffers: WrappedAudioBuffer[];
42
+ }>;
43
+ addChunkForAfterResuming: (buffer: AudioBuffer, timestamp: number) => void;
44
+ moveQueuedChunksToPauseQueue: () => void;
45
+ getNumberOfChunksAfterResuming: () => number;
46
+ } | null;
47
+ destroy: () => void;
48
+ seek: ({ newTime, nonce, fps, playbackRate, getIsPlaying, scheduleAudioNode, }: {
49
+ newTime: number;
50
+ nonce: Nonce;
51
+ fps: number;
52
+ playbackRate: number;
53
+ getIsPlaying: () => boolean;
54
+ scheduleAudioNode: (node: AudioBufferSourceNode, mediaTimestamp: number) => void;
55
+ }) => Promise<void>;
56
+ getAudioIteratorsCreated: () => number;
57
+ setMuted: (newMuted: boolean) => void;
58
+ setVolume: (volume: number) => void;
59
+ scheduleAudioChunk: ({ buffer, mediaTimestamp, playbackRate, scheduleAudioNode, }: {
60
+ buffer: AudioBuffer;
61
+ mediaTimestamp: number;
62
+ playbackRate: number;
63
+ scheduleAudioNode: (node: AudioBufferSourceNode, mediaTimestamp: number) => void;
64
+ }) => void;
65
+ };
66
+ export type AudioIteratorManager = ReturnType<typeof audioIteratorManager>;
@@ -0,0 +1,181 @@
1
+ import { AudioBufferSink } from 'mediabunny';
2
+ import { isAlreadyQueued, makeAudioIterator, } from './audio/audio-preview-iterator';
3
+ export const audioIteratorManager = ({ audioTrack, delayPlaybackHandleIfNotPremounting, sharedAudioContext, }) => {
4
+ let muted = false;
5
+ let currentVolume = 1;
6
+ const gainNode = sharedAudioContext.createGain();
7
+ gainNode.connect(sharedAudioContext.destination);
8
+ const audioSink = new AudioBufferSink(audioTrack);
9
+ let audioBufferIterator = null;
10
+ let audioIteratorsCreated = 0;
11
+ const scheduleAudioChunk = ({ buffer, mediaTimestamp, playbackRate, scheduleAudioNode, }) => {
12
+ if (!audioBufferIterator) {
13
+ throw new Error('Audio buffer iterator not found');
14
+ }
15
+ const node = sharedAudioContext.createBufferSource();
16
+ node.buffer = buffer;
17
+ node.playbackRate.value = playbackRate;
18
+ node.connect(gainNode);
19
+ scheduleAudioNode(node, mediaTimestamp);
20
+ const iterator = audioBufferIterator;
21
+ iterator.addQueuedAudioNode(node, mediaTimestamp, buffer);
22
+ node.onended = () => {
23
+ // Some leniancy is needed as we find that sometimes onended is fired a bit too early
24
+ setTimeout(() => {
25
+ iterator.removeQueuedAudioNode(node);
26
+ }, 30);
27
+ };
28
+ };
29
+ const onAudioChunk = ({ getIsPlaying, buffer, playbackRate, scheduleAudioNode, }) => {
30
+ if (getIsPlaying()) {
31
+ scheduleAudioChunk({
32
+ buffer: buffer.buffer,
33
+ mediaTimestamp: buffer.timestamp,
34
+ playbackRate,
35
+ scheduleAudioNode,
36
+ });
37
+ }
38
+ else {
39
+ if (!audioBufferIterator) {
40
+ throw new Error('Audio buffer iterator not found');
41
+ }
42
+ audioBufferIterator.addChunkForAfterResuming(buffer.buffer, buffer.timestamp);
43
+ }
44
+ };
45
+ const startAudioIterator = async ({ nonce, playbackRate, startFromSecond, getIsPlaying, scheduleAudioNode, }) => {
46
+ audioBufferIterator?.destroy();
47
+ const delayHandle = delayPlaybackHandleIfNotPremounting();
48
+ const iterator = makeAudioIterator(audioSink, startFromSecond);
49
+ audioIteratorsCreated++;
50
+ audioBufferIterator = iterator;
51
+ // Schedule up to 3 buffers ahead of the current time
52
+ for (let i = 0; i < 3; i++) {
53
+ const result = await iterator.getNext();
54
+ if (iterator.isDestroyed()) {
55
+ delayHandle.unblock();
56
+ return;
57
+ }
58
+ if (nonce.isStale()) {
59
+ delayHandle.unblock();
60
+ return;
61
+ }
62
+ if (!result.value) {
63
+ // media ended
64
+ delayHandle.unblock();
65
+ return;
66
+ }
67
+ onAudioChunk({
68
+ getIsPlaying,
69
+ buffer: result.value,
70
+ playbackRate,
71
+ scheduleAudioNode,
72
+ });
73
+ }
74
+ delayHandle.unblock();
75
+ };
76
+ const pausePlayback = () => {
77
+ if (!audioBufferIterator) {
78
+ return;
79
+ }
80
+ audioBufferIterator.moveQueuedChunksToPauseQueue();
81
+ };
82
+ const seek = async ({ newTime, nonce, fps, playbackRate, getIsPlaying, scheduleAudioNode, }) => {
83
+ if (!audioBufferIterator) {
84
+ await startAudioIterator({
85
+ nonce,
86
+ playbackRate,
87
+ startFromSecond: newTime,
88
+ getIsPlaying,
89
+ scheduleAudioNode,
90
+ });
91
+ return;
92
+ }
93
+ const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, audioBufferIterator.getQueuedPeriod([]));
94
+ const toBeScheduled = [];
95
+ if (!currentTimeIsAlreadyQueued) {
96
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, false);
97
+ if (nonce.isStale()) {
98
+ return;
99
+ }
100
+ if (audioSatisfyResult.type === 'not-satisfied') {
101
+ await startAudioIterator({
102
+ nonce,
103
+ playbackRate,
104
+ startFromSecond: newTime,
105
+ getIsPlaying,
106
+ scheduleAudioNode,
107
+ });
108
+ return;
109
+ }
110
+ toBeScheduled.push(...audioSatisfyResult.buffers);
111
+ }
112
+ const nextTime = newTime +
113
+ // start of next frame
114
+ (1 / fps) * playbackRate +
115
+ // need the full duration of the next frame to be queued
116
+ (1 / fps) * playbackRate;
117
+ const nextIsAlreadyQueued = isAlreadyQueued(nextTime, audioBufferIterator.getQueuedPeriod(toBeScheduled));
118
+ if (!nextIsAlreadyQueued) {
119
+ // here we allow waiting for the next buffer to be loaded
120
+ // it's better than to create a new iterator
121
+ // because we already know we are in the right spot
122
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(nextTime, true);
123
+ if (nonce.isStale()) {
124
+ return;
125
+ }
126
+ if (audioSatisfyResult.type === 'not-satisfied') {
127
+ await startAudioIterator({
128
+ nonce,
129
+ playbackRate,
130
+ startFromSecond: newTime,
131
+ getIsPlaying,
132
+ scheduleAudioNode,
133
+ });
134
+ return;
135
+ }
136
+ toBeScheduled.push(...audioSatisfyResult.buffers);
137
+ }
138
+ for (const buffer of toBeScheduled) {
139
+ onAudioChunk({
140
+ getIsPlaying,
141
+ buffer,
142
+ playbackRate,
143
+ scheduleAudioNode,
144
+ });
145
+ }
146
+ };
147
+ const resumeScheduledAudioChunks = ({ playbackRate, scheduleAudioNode, }) => {
148
+ if (!audioBufferIterator) {
149
+ return;
150
+ }
151
+ for (const chunk of audioBufferIterator.getAndClearAudioChunksForAfterResuming()) {
152
+ scheduleAudioChunk({
153
+ buffer: chunk.buffer,
154
+ mediaTimestamp: chunk.timestamp,
155
+ playbackRate,
156
+ scheduleAudioNode,
157
+ });
158
+ }
159
+ };
160
+ return {
161
+ startAudioIterator,
162
+ resumeScheduledAudioChunks,
163
+ pausePlayback,
164
+ getAudioBufferIterator: () => audioBufferIterator,
165
+ destroy: () => {
166
+ audioBufferIterator?.destroy();
167
+ audioBufferIterator = null;
168
+ },
169
+ seek,
170
+ getAudioIteratorsCreated: () => audioIteratorsCreated,
171
+ setMuted: (newMuted) => {
172
+ muted = newMuted;
173
+ gainNode.gain.value = muted ? 0 : currentVolume;
174
+ },
175
+ setVolume: (volume) => {
176
+ currentVolume = Math.max(0, volume);
177
+ gainNode.gain.value = muted ? 0 : currentVolume;
178
+ },
179
+ scheduleAudioChunk,
180
+ };
181
+ };
@@ -0,0 +1,5 @@
1
+ export declare const calculatePlaybackTime: ({ audioSyncAnchor, currentTime, playbackRate, }: {
2
+ audioSyncAnchor: number;
3
+ currentTime: number;
4
+ playbackRate: number;
5
+ }) => number;
@@ -0,0 +1,4 @@
1
+ export const calculatePlaybackTime = ({ audioSyncAnchor, currentTime, playbackRate, }) => {
2
+ const timeSinceAnchor = currentTime - audioSyncAnchor;
3
+ return timeSinceAnchor * playbackRate;
4
+ };