@remotion/media 4.0.355 → 4.0.356

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/audio/audio-for-rendering.js +37 -3
  2. package/dist/audio/audio.js +1 -1
  3. package/dist/audio/props.d.ts +15 -0
  4. package/dist/audio-extraction/audio-iterator.d.ts +3 -2
  5. package/dist/audio-extraction/audio-iterator.js +13 -2
  6. package/dist/audio-extraction/audio-manager.d.ts +6 -5
  7. package/dist/audio-extraction/audio-manager.js +5 -3
  8. package/dist/audio-extraction/extract-audio.d.ts +3 -2
  9. package/dist/audio-extraction/extract-audio.js +11 -4
  10. package/dist/caches.d.ts +6 -5
  11. package/dist/convert-audiodata/apply-tonefrequency.d.ts +2 -0
  12. package/dist/convert-audiodata/apply-tonefrequency.js +44 -0
  13. package/dist/convert-audiodata/wsola.d.ts +13 -0
  14. package/dist/convert-audiodata/wsola.js +197 -0
  15. package/dist/esm/index.mjs +1297 -140
  16. package/dist/extract-frame-and-audio.d.ts +3 -2
  17. package/dist/extract-frame-and-audio.js +60 -26
  18. package/dist/get-sink-weak.d.ts +2 -7
  19. package/dist/index.d.ts +12 -3
  20. package/dist/index.js +11 -2
  21. package/dist/video/media-player.d.ts +70 -0
  22. package/dist/video/media-player.js +419 -0
  23. package/dist/video/props.d.ts +36 -18
  24. package/dist/video/timeout-utils.d.ts +2 -0
  25. package/dist/video/timeout-utils.js +18 -0
  26. package/dist/video/video-for-preview.d.ts +17 -0
  27. package/dist/video/video-for-preview.js +218 -0
  28. package/dist/video/video-for-rendering.d.ts +23 -2
  29. package/dist/video/video-for-rendering.js +47 -4
  30. package/dist/video/video.js +13 -14
  31. package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +3 -2
  32. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +53 -4
  33. package/dist/video-extraction/extract-frame.d.ts +2 -1
  34. package/dist/video-extraction/extract-frame.js +9 -3
  35. package/dist/video-extraction/get-frames-since-keyframe.d.ts +12 -7
  36. package/dist/video-extraction/get-frames-since-keyframe.js +70 -17
  37. package/package.json +3 -3
@@ -1,6 +1,6 @@
1
1
  import type { LogLevel } from 'remotion';
2
2
  import type { PcmS16AudioData } from './convert-audiodata/convert-audiodata';
3
- export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, }: {
3
+ export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, }: {
4
4
  src: string;
5
5
  timeInSeconds: number;
6
6
  logLevel: LogLevel;
@@ -9,8 +9,9 @@ export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, dura
9
9
  includeAudio: boolean;
10
10
  includeVideo: boolean;
11
11
  loop: boolean;
12
+ audioStreamIndex: number;
12
13
  }) => Promise<{
13
14
  frame: VideoFrame | null;
14
15
  audio: PcmS16AudioData | null;
15
16
  durationInSeconds: number | null;
16
- }>;
17
+ } | "cannot-decode" | "unknown-container-format" | "network-error">;
@@ -1,29 +1,63 @@
1
1
  import { extractAudio } from './audio-extraction/extract-audio';
2
2
  import { extractFrame } from './video-extraction/extract-frame';
3
- export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, }) => {
4
- const [frame, audio] = await Promise.all([
5
- includeVideo
6
- ? extractFrame({
7
- src,
8
- timeInSeconds,
9
- logLevel,
10
- loop,
11
- })
12
- : null,
13
- includeAudio
14
- ? extractAudio({
15
- src,
16
- timeInSeconds,
17
- durationInSeconds,
18
- logLevel,
19
- loop,
20
- playbackRate,
21
- })
22
- : null,
23
- ]);
24
- return {
25
- frame: frame?.toVideoFrame() ?? null,
26
- audio: audio?.data ?? null,
27
- durationInSeconds: audio?.durationInSeconds ?? null,
28
- };
3
+ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, audioStreamIndex, }) => {
4
+ try {
5
+ const [frame, audio] = await Promise.all([
6
+ includeVideo
7
+ ? extractFrame({
8
+ src,
9
+ timeInSeconds,
10
+ logLevel,
11
+ loop,
12
+ })
13
+ : null,
14
+ includeAudio
15
+ ? extractAudio({
16
+ src,
17
+ timeInSeconds,
18
+ durationInSeconds,
19
+ logLevel,
20
+ loop,
21
+ playbackRate,
22
+ audioStreamIndex,
23
+ })
24
+ : null,
25
+ ]);
26
+ if (frame === 'cannot-decode') {
27
+ return 'cannot-decode';
28
+ }
29
+ if (frame === 'unknown-container-format') {
30
+ return 'unknown-container-format';
31
+ }
32
+ if (audio === 'unknown-container-format') {
33
+ if (frame !== null) {
34
+ frame?.close();
35
+ }
36
+ return 'unknown-container-format';
37
+ }
38
+ if (audio === 'cannot-decode') {
39
+ if (frame !== null) {
40
+ frame?.close();
41
+ }
42
+ return 'cannot-decode';
43
+ }
44
+ return {
45
+ frame: frame?.toVideoFrame() ?? null,
46
+ audio: audio?.data ?? null,
47
+ durationInSeconds: audio?.durationInSeconds ?? null,
48
+ };
49
+ }
50
+ catch (err) {
51
+ const error = err;
52
+ if (
53
+ // Chrome
54
+ error.message.includes('Failed to fetch') ||
55
+ // Safari
56
+ error.message.includes('Load failed') ||
57
+ // Firefox
58
+ error.message.includes('NetworkError when attempting to fetch resource')) {
59
+ return 'network-error';
60
+ }
61
+ throw err;
62
+ }
29
63
  };
@@ -2,13 +2,8 @@ import type { LogLevel } from 'remotion';
2
2
  import type { GetSink } from './video-extraction/get-frames-since-keyframe';
3
3
  export declare const sinkPromises: Record<string, Promise<GetSink>>;
4
4
  export declare const getSinkWeak: (src: string, logLevel: LogLevel) => Promise<{
5
- video: {
6
- sampleSink: import("mediabunny").VideoSampleSink;
7
- packetSink: import("mediabunny").EncodedPacketSink;
8
- } | null;
9
- audio: {
10
- sampleSink: import("mediabunny").AudioSampleSink;
11
- } | null;
5
+ getVideo: () => Promise<import("./video-extraction/get-frames-since-keyframe").VideoSinkResult>;
6
+ getAudio: (index: number) => Promise<import("./video-extraction/get-frames-since-keyframe").AudioSinkResult>;
12
7
  actualMatroskaTimestamps: {
13
8
  observeTimestamp: (startTime: number) => void;
14
9
  getRealTimestamp: (observedTimestamp: number) => number | null;
package/dist/index.d.ts CHANGED
@@ -1,4 +1,13 @@
1
- export { Audio as experimental_Audio } from './audio/audio';
2
- export { AudioProps } from './audio/props';
1
+ import { Audio } from './audio/audio';
2
+ import { Video } from './video/video';
3
+ /**
4
+ * @deprecated Now just `Audio`
5
+ */
6
+ export declare const experimental_Audio: import("react").FC<import(".").AudioProps>;
7
+ /**
8
+ * @deprecated Now just `Video`
9
+ */
10
+ export declare const experimental_Video: import("react").FC<import(".").VideoProps>;
11
+ export { AudioProps, FallbackHtml5AudioProps } from './audio/props';
3
12
  export { VideoProps } from './video/props';
4
- export { Video as experimental_Video } from './video/video';
13
+ export { Audio, Video };
package/dist/index.js CHANGED
@@ -1,2 +1,11 @@
1
- export { Audio as experimental_Audio } from './audio/audio';
2
- export { Video as experimental_Video } from './video/video';
1
+ import { Audio } from './audio/audio';
2
+ import { Video } from './video/video';
3
+ /**
4
+ * @deprecated Now just `Audio`
5
+ */
6
+ export const experimental_Audio = Audio;
7
+ /**
8
+ * @deprecated Now just `Video`
9
+ */
10
+ export const experimental_Video = Video;
11
+ export { Audio, Video };
@@ -0,0 +1,70 @@
1
+ import type { LogLevel } from 'remotion';
2
+ export declare const SEEK_THRESHOLD = 0.05;
3
+ export declare class MediaPlayer {
4
+ private canvas;
5
+ private context;
6
+ private src;
7
+ private logLevel;
8
+ private playbackRate;
9
+ private canvasSink;
10
+ private videoFrameIterator;
11
+ private nextFrame;
12
+ private audioSink;
13
+ private audioBufferIterator;
14
+ private queuedAudioNodes;
15
+ private gainNode;
16
+ private sharedAudioContext;
17
+ private audioSyncAnchor;
18
+ private playing;
19
+ private muted;
20
+ private animationFrameId;
21
+ private videoAsyncId;
22
+ private initialized;
23
+ private totalDuration;
24
+ private isBuffering;
25
+ private onBufferingChangeCallback?;
26
+ private audioBufferHealth;
27
+ private audioIteratorStarted;
28
+ private readonly HEALTHY_BUFER_THRESHOLD_SECONDS;
29
+ private onVideoFrameCallback?;
30
+ constructor({ canvas, src, logLevel, sharedAudioContext, }: {
31
+ canvas: HTMLCanvasElement;
32
+ src: string;
33
+ logLevel: LogLevel;
34
+ sharedAudioContext: AudioContext;
35
+ });
36
+ private input;
37
+ private isReady;
38
+ private hasAudio;
39
+ private isCurrentlyBuffering;
40
+ initialize(startTime?: number): Promise<void>;
41
+ private cleanupAudioQueue;
42
+ private cleanAudioIteratorAndNodes;
43
+ seekTo(time: number): Promise<void>;
44
+ play(): Promise<void>;
45
+ pause(): void;
46
+ setMuted(muted: boolean): void;
47
+ setVolume(volume: number): void;
48
+ setPlaybackRate(rate: number): Promise<void>;
49
+ dispose(): void;
50
+ private getPlaybackTime;
51
+ private getAdjustedTimestamp;
52
+ private scheduleAudioChunk;
53
+ onBufferingChange(callback: (isBuffering: boolean) => void): void;
54
+ onVideoFrame(callback: (frame: CanvasImageSource) => void): void;
55
+ private canRenderVideo;
56
+ private startRenderLoop;
57
+ private stopRenderLoop;
58
+ private render;
59
+ private shouldRenderFrame;
60
+ private drawCurrentFrame;
61
+ private startAudioIterator;
62
+ private startVideoIterator;
63
+ private updateNextFrame;
64
+ private bufferingStartedAtMs;
65
+ private minBufferingTimeoutMs;
66
+ private setBufferingState;
67
+ private maybeResumeFromBuffering;
68
+ private maybeForceResumeFromBuffering;
69
+ private runAudioIterator;
70
+ }
@@ -0,0 +1,419 @@
1
+ import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
2
+ import { Internals } from 'remotion';
3
+ import { sleep, withTimeout } from './timeout-utils';
4
+ export const SEEK_THRESHOLD = 0.05;
5
+ const AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
6
+ export class MediaPlayer {
7
+ constructor({ canvas, src, logLevel, sharedAudioContext, }) {
8
+ this.canvasSink = null;
9
+ this.videoFrameIterator = null;
10
+ this.nextFrame = null;
11
+ this.audioSink = null;
12
+ this.audioBufferIterator = null;
13
+ this.queuedAudioNodes = new Set();
14
+ this.gainNode = null;
15
+ // audioDelay = mediaTimestamp + audioSyncAnchor - sharedAudioContext.currentTime
16
+ this.audioSyncAnchor = 0;
17
+ this.playing = false;
18
+ this.muted = false;
19
+ this.animationFrameId = null;
20
+ this.videoAsyncId = 0;
21
+ this.initialized = false;
22
+ this.totalDuration = 0;
23
+ // for remotion buffer state
24
+ this.isBuffering = false;
25
+ this.audioBufferHealth = 0;
26
+ this.audioIteratorStarted = false;
27
+ this.HEALTHY_BUFER_THRESHOLD_SECONDS = 1;
28
+ this.input = null;
29
+ this.render = () => {
30
+ if (this.isBuffering) {
31
+ this.maybeForceResumeFromBuffering();
32
+ }
33
+ if (this.shouldRenderFrame()) {
34
+ this.drawCurrentFrame();
35
+ }
36
+ if (this.playing) {
37
+ this.animationFrameId = requestAnimationFrame(this.render);
38
+ }
39
+ else {
40
+ this.animationFrameId = null;
41
+ }
42
+ };
43
+ this.startAudioIterator = async (startFromSecond) => {
44
+ if (!this.hasAudio())
45
+ return;
46
+ // Clean up existing audio iterator
47
+ await this.audioBufferIterator?.return();
48
+ this.audioIteratorStarted = false;
49
+ this.audioBufferHealth = 0;
50
+ try {
51
+ this.audioBufferIterator = this.audioSink.buffers(startFromSecond);
52
+ this.runAudioIterator(startFromSecond);
53
+ }
54
+ catch (error) {
55
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start audio iterator', error);
56
+ }
57
+ };
58
+ this.startVideoIterator = async (timeToSeek) => {
59
+ if (!this.canvasSink) {
60
+ return;
61
+ }
62
+ this.videoAsyncId++;
63
+ const currentAsyncId = this.videoAsyncId;
64
+ await this.videoFrameIterator?.return();
65
+ this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
66
+ try {
67
+ const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
68
+ const secondFrame = (await this.videoFrameIterator.next()).value ?? null;
69
+ if (currentAsyncId !== this.videoAsyncId) {
70
+ return;
71
+ }
72
+ if (firstFrame) {
73
+ Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
74
+ this.context.drawImage(firstFrame.canvas, 0, 0);
75
+ if (this.onVideoFrameCallback) {
76
+ this.onVideoFrameCallback(this.canvas);
77
+ }
78
+ }
79
+ this.nextFrame = secondFrame ?? null;
80
+ if (secondFrame) {
81
+ Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
82
+ }
83
+ }
84
+ catch (error) {
85
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start video iterator', error);
86
+ }
87
+ };
88
+ this.updateNextFrame = async () => {
89
+ if (!this.videoFrameIterator) {
90
+ return;
91
+ }
92
+ try {
93
+ while (true) {
94
+ const newNextFrame = (await this.videoFrameIterator.next()).value ?? null;
95
+ if (!newNextFrame) {
96
+ break;
97
+ }
98
+ if (this.getAdjustedTimestamp(newNextFrame.timestamp) <=
99
+ this.getPlaybackTime()) {
100
+ continue;
101
+ }
102
+ else {
103
+ this.nextFrame = newNextFrame;
104
+ Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Buffered next frame ${newNextFrame.timestamp.toFixed(3)}s`);
105
+ break;
106
+ }
107
+ }
108
+ }
109
+ catch (error) {
110
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to update next frame', error);
111
+ }
112
+ };
113
+ this.bufferingStartedAtMs = null;
114
+ this.minBufferingTimeoutMs = 500;
115
+ this.runAudioIterator = async (startFromSecond) => {
116
+ if (!this.hasAudio() || !this.audioBufferIterator)
117
+ return;
118
+ try {
119
+ let totalBufferDuration = 0;
120
+ let isFirstBuffer = true;
121
+ this.audioIteratorStarted = true;
122
+ while (true) {
123
+ const BUFFERING_TIMEOUT_MS = 50;
124
+ let result;
125
+ try {
126
+ result = await withTimeout(this.audioBufferIterator.next(), BUFFERING_TIMEOUT_MS, 'Iterator timeout');
127
+ }
128
+ catch {
129
+ this.setBufferingState(true);
130
+ await sleep(10);
131
+ continue;
132
+ }
133
+ if (result.done || !result.value) {
134
+ break;
135
+ }
136
+ const { buffer, timestamp, duration } = result.value;
137
+ totalBufferDuration += duration;
138
+ this.audioBufferHealth = Math.max(0, totalBufferDuration / this.playbackRate);
139
+ this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
140
+ if (this.playing && !this.muted) {
141
+ if (isFirstBuffer) {
142
+ this.audioSyncAnchor =
143
+ this.sharedAudioContext.currentTime -
144
+ this.getAdjustedTimestamp(timestamp);
145
+ isFirstBuffer = false;
146
+ }
147
+ // if timestamp is less than timeToSeek, skip
148
+ // context: for some reason, mediabunny returns buffer at 9.984s, when requested at 10s
149
+ if (timestamp < startFromSecond - AUDIO_BUFFER_TOLERANCE_THRESHOLD) {
150
+ continue;
151
+ }
152
+ this.scheduleAudioChunk(buffer, timestamp);
153
+ }
154
+ if (this.getAdjustedTimestamp(timestamp) - this.getPlaybackTime() >=
155
+ 1) {
156
+ await new Promise((resolve) => {
157
+ const check = () => {
158
+ if (this.getAdjustedTimestamp(timestamp) - this.getPlaybackTime() <
159
+ 1) {
160
+ resolve();
161
+ }
162
+ else {
163
+ requestAnimationFrame(check);
164
+ }
165
+ };
166
+ check();
167
+ });
168
+ }
169
+ }
170
+ }
171
+ catch (error) {
172
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to run audio iterator', error);
173
+ }
174
+ };
175
+ this.canvas = canvas;
176
+ this.src = src;
177
+ this.logLevel = logLevel ?? 'info';
178
+ this.sharedAudioContext = sharedAudioContext;
179
+ this.playbackRate = 1;
180
+ const context = canvas.getContext('2d', {
181
+ alpha: false,
182
+ desynchronized: true,
183
+ });
184
+ if (!context) {
185
+ throw new Error('Could not get 2D context from canvas');
186
+ }
187
+ this.context = context;
188
+ }
189
+ isReady() {
190
+ return this.initialized && Boolean(this.sharedAudioContext);
191
+ }
192
+ hasAudio() {
193
+ return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
194
+ }
195
+ isCurrentlyBuffering() {
196
+ return this.isBuffering && Boolean(this.bufferingStartedAtMs);
197
+ }
198
+ async initialize(startTime = 0) {
199
+ try {
200
+ const urlSource = new UrlSource(this.src);
201
+ const input = new Input({
202
+ source: urlSource,
203
+ formats: ALL_FORMATS,
204
+ });
205
+ this.input = input;
206
+ this.totalDuration = await input.computeDuration();
207
+ const videoTrack = await input.getPrimaryVideoTrack();
208
+ const audioTrack = await input.getPrimaryAudioTrack();
209
+ if (!videoTrack && !audioTrack) {
210
+ throw new Error(`No video or audio track found for ${this.src}`);
211
+ }
212
+ if (videoTrack) {
213
+ this.canvasSink = new CanvasSink(videoTrack, {
214
+ poolSize: 2,
215
+ fit: 'contain',
216
+ });
217
+ this.canvas.width = videoTrack.displayWidth;
218
+ this.canvas.height = videoTrack.displayHeight;
219
+ }
220
+ if (audioTrack && this.sharedAudioContext) {
221
+ this.audioSink = new AudioBufferSink(audioTrack);
222
+ this.gainNode = this.sharedAudioContext.createGain();
223
+ this.gainNode.connect(this.sharedAudioContext.destination);
224
+ }
225
+ if (this.sharedAudioContext) {
226
+ this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
227
+ }
228
+ this.initialized = true;
229
+ const mediaTime = startTime * this.playbackRate;
230
+ await this.startAudioIterator(mediaTime);
231
+ await this.startVideoIterator(mediaTime);
232
+ this.startRenderLoop();
233
+ }
234
+ catch (error) {
235
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to initialize', error);
236
+ throw error;
237
+ }
238
+ }
239
+ cleanupAudioQueue() {
240
+ for (const node of this.queuedAudioNodes) {
241
+ node.stop();
242
+ }
243
+ this.queuedAudioNodes.clear();
244
+ }
245
+ async cleanAudioIteratorAndNodes() {
246
+ await this.audioBufferIterator?.return();
247
+ this.audioBufferIterator = null;
248
+ this.audioIteratorStarted = false;
249
+ this.audioBufferHealth = 0;
250
+ this.cleanupAudioQueue();
251
+ }
252
+ async seekTo(time) {
253
+ if (!this.isReady())
254
+ return;
255
+ const newTime = Math.max(0, Math.min(time, this.totalDuration));
256
+ const currentPlaybackTime = this.getPlaybackTime();
257
+ const isSignificantSeek = Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
258
+ if (isSignificantSeek) {
259
+ this.nextFrame = null;
260
+ this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
261
+ if (this.audioSink) {
262
+ await this.cleanAudioIteratorAndNodes();
263
+ }
264
+ const mediaTime = newTime * this.playbackRate;
265
+ await this.startAudioIterator(mediaTime);
266
+ await this.startVideoIterator(mediaTime);
267
+ }
268
+ if (!this.playing) {
269
+ this.render();
270
+ }
271
+ }
272
+ async play() {
273
+ if (!this.isReady())
274
+ return;
275
+ if (!this.playing) {
276
+ if (this.sharedAudioContext.state === 'suspended') {
277
+ await this.sharedAudioContext.resume();
278
+ }
279
+ this.playing = true;
280
+ this.startRenderLoop();
281
+ }
282
+ }
283
+ pause() {
284
+ this.playing = false;
285
+ this.cleanupAudioQueue();
286
+ this.stopRenderLoop();
287
+ }
288
+ setMuted(muted) {
289
+ this.muted = muted;
290
+ if (muted) {
291
+ this.cleanupAudioQueue();
292
+ }
293
+ }
294
+ setVolume(volume) {
295
+ if (!this.gainNode) {
296
+ return;
297
+ }
298
+ const appliedVolume = Math.max(0, volume);
299
+ this.gainNode.gain.value = appliedVolume;
300
+ }
301
+ async setPlaybackRate(rate) {
302
+ if (this.playbackRate === rate)
303
+ return;
304
+ this.playbackRate = rate;
305
+ if (this.hasAudio() && this.playing) {
306
+ const currentPlaybackTime = this.getPlaybackTime();
307
+ const mediaTime = currentPlaybackTime * rate;
308
+ await this.cleanAudioIteratorAndNodes();
309
+ await this.startAudioIterator(mediaTime);
310
+ }
311
+ }
312
+ dispose() {
313
+ this.input?.dispose();
314
+ this.stopRenderLoop();
315
+ this.videoFrameIterator?.return();
316
+ this.cleanAudioIteratorAndNodes();
317
+ this.videoAsyncId++;
318
+ }
319
+ getPlaybackTime() {
320
+ return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
321
+ }
322
+ getAdjustedTimestamp(mediaTimestamp) {
323
+ return mediaTimestamp / this.playbackRate;
324
+ }
325
+ scheduleAudioChunk(buffer, mediaTimestamp) {
326
+ const adjustedTimestamp = this.getAdjustedTimestamp(mediaTimestamp);
327
+ const targetTime = adjustedTimestamp + this.audioSyncAnchor;
328
+ const delay = targetTime - this.sharedAudioContext.currentTime;
329
+ const node = this.sharedAudioContext.createBufferSource();
330
+ node.buffer = buffer;
331
+ node.playbackRate.value = this.playbackRate;
332
+ node.connect(this.gainNode);
333
+ if (delay >= 0) {
334
+ node.start(targetTime);
335
+ }
336
+ else {
337
+ node.start(this.sharedAudioContext.currentTime, -delay);
338
+ }
339
+ this.queuedAudioNodes.add(node);
340
+ node.onended = () => this.queuedAudioNodes.delete(node);
341
+ }
342
+ onBufferingChange(callback) {
343
+ this.onBufferingChangeCallback = callback;
344
+ }
345
+ onVideoFrame(callback) {
346
+ this.onVideoFrameCallback = callback;
347
+ if (this.initialized && callback) {
348
+ callback(this.canvas);
349
+ }
350
+ }
351
+ canRenderVideo() {
352
+ return (!this.hasAudio() ||
353
+ (this.audioIteratorStarted &&
354
+ this.audioBufferHealth >= this.HEALTHY_BUFER_THRESHOLD_SECONDS));
355
+ }
356
+ startRenderLoop() {
357
+ if (this.animationFrameId !== null) {
358
+ return;
359
+ }
360
+ this.render();
361
+ }
362
+ stopRenderLoop() {
363
+ if (this.animationFrameId !== null) {
364
+ cancelAnimationFrame(this.animationFrameId);
365
+ this.animationFrameId = null;
366
+ }
367
+ }
368
+ shouldRenderFrame() {
369
+ return (!this.isBuffering &&
370
+ this.canRenderVideo() &&
371
+ this.nextFrame !== null &&
372
+ this.getAdjustedTimestamp(this.nextFrame.timestamp) <=
373
+ this.getPlaybackTime());
374
+ }
375
+ drawCurrentFrame() {
376
+ this.context.drawImage(this.nextFrame.canvas, 0, 0);
377
+ if (this.onVideoFrameCallback) {
378
+ this.onVideoFrameCallback(this.canvas);
379
+ }
380
+ this.nextFrame = null;
381
+ this.updateNextFrame();
382
+ }
383
+ setBufferingState(isBuffering) {
384
+ if (this.isBuffering !== isBuffering) {
385
+ this.isBuffering = isBuffering;
386
+ if (isBuffering) {
387
+ this.bufferingStartedAtMs = performance.now();
388
+ this.onBufferingChangeCallback?.(true);
389
+ }
390
+ else {
391
+ this.bufferingStartedAtMs = null;
392
+ this.onBufferingChangeCallback?.(false);
393
+ }
394
+ }
395
+ }
396
+ maybeResumeFromBuffering(currentBufferDuration) {
397
+ if (!this.isCurrentlyBuffering())
398
+ return;
399
+ const now = performance.now();
400
+ const bufferingDuration = now - this.bufferingStartedAtMs;
401
+ const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
402
+ const bufferHealthy = currentBufferDuration >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
403
+ if (minTimeElapsed && bufferHealthy) {
404
+ Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
405
+ this.setBufferingState(false);
406
+ }
407
+ }
408
+ maybeForceResumeFromBuffering() {
409
+ if (!this.isCurrentlyBuffering())
410
+ return;
411
+ const now = performance.now();
412
+ const bufferingDuration = now - this.bufferingStartedAtMs;
413
+ const forceTimeout = bufferingDuration > this.minBufferingTimeoutMs * 10;
414
+ if (forceTimeout) {
415
+ Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Force resuming from buffering after ${bufferingDuration}ms`);
416
+ this.setBufferingState(false);
417
+ }
418
+ }
419
+ }