@remotion/media 4.0.364 → 4.0.366

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,116 +1,505 @@
1
1
  // src/audio/audio.tsx
2
- import { Internals as Internals13, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
2
+ import { Internals as Internals14, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
3
3
 
4
4
  // src/audio/audio-for-preview.tsx
5
5
  import { useContext as useContext2, useEffect as useEffect2, useMemo as useMemo2, useRef, useState as useState2 } from "react";
6
6
  import {
7
- Internals as Internals5,
7
+ Internals as Internals6,
8
8
  Audio as RemotionAudio,
9
9
  useBufferState,
10
- useCurrentFrame as useCurrentFrame2
10
+ useCurrentFrame as useCurrentFrame2,
11
+ useVideoConfig as useVideoConfig2
11
12
  } from "remotion";
12
13
 
14
+ // src/get-time-in-seconds.ts
15
+ import { Internals } from "remotion";
16
+ var getTimeInSeconds = ({
17
+ loop,
18
+ mediaDurationInSeconds,
19
+ unloopedTimeInSeconds,
20
+ src,
21
+ trimAfter,
22
+ trimBefore,
23
+ fps,
24
+ playbackRate,
25
+ ifNoMediaDuration
26
+ }) => {
27
+ if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === "fail") {
28
+ throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
29
+ }
30
+ const loopDuration = loop ? Internals.calculateMediaDuration({
31
+ trimAfter,
32
+ mediaDurationInFrames: mediaDurationInSeconds ? mediaDurationInSeconds * fps : Infinity,
33
+ playbackRate: 1,
34
+ trimBefore
35
+ }) / fps : Infinity;
36
+ const timeInSeconds = unloopedTimeInSeconds * playbackRate % loopDuration;
37
+ if ((trimAfter ?? null) !== null && !loop) {
38
+ const time = (trimAfter - (trimBefore ?? 0)) / fps;
39
+ if (timeInSeconds >= time) {
40
+ return null;
41
+ }
42
+ }
43
+ return timeInSeconds + (trimBefore ?? 0) / fps;
44
+ };
45
+
13
46
  // src/media-player.ts
14
- import {
15
- ALL_FORMATS,
16
- AudioBufferSink,
17
- CanvasSink,
18
- Input,
19
- UrlSource
20
- } from "mediabunny";
21
- import { Internals as Internals2 } from "remotion";
47
+ import { ALL_FORMATS, Input, UrlSource } from "mediabunny";
48
+ import { Internals as Internals3 } from "remotion";
49
+
50
+ // src/audio-iterator-manager.ts
51
+ import { AudioBufferSink } from "mediabunny";
52
+
53
+ // src/helpers/round-to-4-digits.ts
54
+ var roundTo4Digits = (timestamp) => {
55
+ return Math.round(timestamp * 1000) / 1000;
56
+ };
22
57
 
23
58
  // src/audio/audio-preview-iterator.ts
24
- var HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
25
59
  var makeAudioIterator = (audioSink, startFromSecond) => {
26
60
  let destroyed = false;
27
61
  const iterator = audioSink.buffers(startFromSecond);
28
- let audioIteratorStarted = false;
29
- let audioBufferHealth = 0;
30
- const queuedAudioNodes = new Set;
62
+ const queuedAudioNodes = [];
63
+ const audioChunksForAfterResuming = [];
31
64
  const cleanupAudioQueue = () => {
32
65
  for (const node of queuedAudioNodes) {
33
- node.stop();
66
+ node.node.stop();
67
+ }
68
+ queuedAudioNodes.length = 0;
69
+ };
70
+ let lastReturnedBuffer = null;
71
+ let iteratorEnded = false;
72
+ const getNextOrNullIfNotAvailable = async (allowWait) => {
73
+ const next = iterator.next();
74
+ const result = allowWait ? await next : await Promise.race([
75
+ next,
76
+ new Promise((resolve) => {
77
+ Promise.resolve().then(() => resolve());
78
+ })
79
+ ]);
80
+ if (!result) {
81
+ return {
82
+ type: "need-to-wait-for-it",
83
+ waitPromise: async () => {
84
+ const res = await next;
85
+ if (res.value) {
86
+ lastReturnedBuffer = res.value;
87
+ } else {
88
+ iteratorEnded = true;
89
+ }
90
+ return res.value;
91
+ }
92
+ };
93
+ }
94
+ if (result.value) {
95
+ lastReturnedBuffer = result.value;
96
+ } else {
97
+ iteratorEnded = true;
34
98
  }
35
- queuedAudioNodes.clear();
99
+ return {
100
+ type: "got-buffer-or-end",
101
+ buffer: result.value ?? null
102
+ };
103
+ };
104
+ const tryToSatisfySeek = async (time, allowWait) => {
105
+ if (lastReturnedBuffer) {
106
+ const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
107
+ const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
108
+ if (roundTo4Digits(time) < bufferTimestamp) {
109
+ return {
110
+ type: "not-satisfied",
111
+ reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`
112
+ };
113
+ }
114
+ if (roundTo4Digits(time) <= bufferEndTimestamp) {
115
+ return {
116
+ type: "satisfied",
117
+ buffers: [lastReturnedBuffer]
118
+ };
119
+ }
120
+ }
121
+ if (iteratorEnded) {
122
+ return {
123
+ type: "satisfied",
124
+ buffers: lastReturnedBuffer ? [lastReturnedBuffer] : []
125
+ };
126
+ }
127
+ const toBeReturned = [];
128
+ while (true) {
129
+ const buffer = await getNextOrNullIfNotAvailable(allowWait);
130
+ if (buffer.type === "need-to-wait-for-it") {
131
+ return {
132
+ type: "not-satisfied",
133
+ reason: "iterator did not have buffer ready"
134
+ };
135
+ }
136
+ if (buffer.type === "got-buffer-or-end") {
137
+ if (buffer.buffer === null) {
138
+ iteratorEnded = true;
139
+ return {
140
+ type: "satisfied",
141
+ buffers: lastReturnedBuffer ? [lastReturnedBuffer] : []
142
+ };
143
+ }
144
+ const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
145
+ const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
146
+ const timestamp = roundTo4Digits(time);
147
+ if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
148
+ return {
149
+ type: "satisfied",
150
+ buffers: [...toBeReturned, buffer.buffer]
151
+ };
152
+ }
153
+ toBeReturned.push(buffer.buffer);
154
+ continue;
155
+ }
156
+ throw new Error("Unreachable");
157
+ }
158
+ };
159
+ const removeAndReturnAllQueuedAudioNodes = () => {
160
+ const nodes = queuedAudioNodes.slice();
161
+ for (const node of nodes) {
162
+ node.node.stop();
163
+ }
164
+ queuedAudioNodes.length = 0;
165
+ return nodes;
166
+ };
167
+ const addChunkForAfterResuming = (buffer, timestamp) => {
168
+ audioChunksForAfterResuming.push({ buffer, timestamp });
169
+ };
170
+ const moveQueuedChunksToPauseQueue = () => {
171
+ const toQueue = removeAndReturnAllQueuedAudioNodes();
172
+ for (const chunk of toQueue) {
173
+ addChunkForAfterResuming(chunk.buffer, chunk.timestamp);
174
+ }
175
+ };
176
+ const getNumberOfChunksAfterResuming = () => {
177
+ return audioChunksForAfterResuming.length;
36
178
  };
37
179
  return {
38
- cleanupAudioQueue,
39
180
  destroy: () => {
40
181
  cleanupAudioQueue();
41
182
  destroyed = true;
42
183
  iterator.return().catch(() => {
43
184
  return;
44
185
  });
186
+ audioChunksForAfterResuming.length = 0;
45
187
  },
46
- isReadyToPlay: () => {
47
- return audioIteratorStarted && audioBufferHealth > 0;
48
- },
49
- setAudioIteratorStarted: (started) => {
50
- audioIteratorStarted = started;
51
- },
52
- getNext: () => {
53
- return iterator.next();
54
- },
55
- setAudioBufferHealth: (health) => {
56
- audioBufferHealth = health;
188
+ getNext: async () => {
189
+ const next = await iterator.next();
190
+ if (next.value) {
191
+ lastReturnedBuffer = next.value;
192
+ } else {
193
+ iteratorEnded = true;
194
+ }
195
+ return next;
57
196
  },
58
197
  isDestroyed: () => {
59
198
  return destroyed;
60
199
  },
61
- addQueuedAudioNode: (node) => {
62
- queuedAudioNodes.add(node);
200
+ addQueuedAudioNode: (node, timestamp, buffer) => {
201
+ queuedAudioNodes.push({ node, timestamp, buffer });
63
202
  },
64
203
  removeQueuedAudioNode: (node) => {
65
- queuedAudioNodes.delete(node);
204
+ const index = queuedAudioNodes.findIndex((n) => n.node === node);
205
+ if (index !== -1) {
206
+ queuedAudioNodes.splice(index, 1);
207
+ }
208
+ },
209
+ getAndClearAudioChunksForAfterResuming: () => {
210
+ const chunks = audioChunksForAfterResuming.slice();
211
+ audioChunksForAfterResuming.length = 0;
212
+ return chunks;
213
+ },
214
+ getQueuedPeriod: (pendingBuffers) => {
215
+ let until = -Infinity;
216
+ let from = Infinity;
217
+ for (const buffer of pendingBuffers) {
218
+ until = Math.max(until, buffer.timestamp + buffer.duration);
219
+ from = Math.min(from, buffer.timestamp);
220
+ }
221
+ for (const node of queuedAudioNodes) {
222
+ until = Math.max(until, node.timestamp + node.buffer.duration);
223
+ from = Math.min(from, node.timestamp);
224
+ }
225
+ for (const chunk of audioChunksForAfterResuming) {
226
+ until = Math.max(until, chunk.timestamp + chunk.buffer.duration);
227
+ from = Math.min(from, chunk.timestamp);
228
+ }
229
+ if (!Number.isFinite(from) || !Number.isFinite(until)) {
230
+ return null;
231
+ }
232
+ return {
233
+ from,
234
+ until
235
+ };
236
+ },
237
+ tryToSatisfySeek,
238
+ addChunkForAfterResuming,
239
+ moveQueuedChunksToPauseQueue,
240
+ getNumberOfChunksAfterResuming
241
+ };
242
+ };
243
+ var isAlreadyQueued = (time, queuedPeriod) => {
244
+ if (!queuedPeriod) {
245
+ return false;
246
+ }
247
+ return time >= queuedPeriod.from && time < queuedPeriod.until;
248
+ };
249
+
250
+ // src/audio-iterator-manager.ts
251
+ var audioIteratorManager = ({
252
+ audioTrack,
253
+ delayPlaybackHandleIfNotPremounting,
254
+ sharedAudioContext
255
+ }) => {
256
+ let muted = false;
257
+ let currentVolume = 1;
258
+ const gainNode = sharedAudioContext.createGain();
259
+ gainNode.connect(sharedAudioContext.destination);
260
+ const audioSink = new AudioBufferSink(audioTrack);
261
+ let audioBufferIterator = null;
262
+ let audioIteratorsCreated = 0;
263
+ const scheduleAudioChunk = ({
264
+ buffer,
265
+ mediaTimestamp,
266
+ playbackRate,
267
+ scheduleAudioNode
268
+ }) => {
269
+ if (!audioBufferIterator) {
270
+ throw new Error("Audio buffer iterator not found");
271
+ }
272
+ const node = sharedAudioContext.createBufferSource();
273
+ node.buffer = buffer;
274
+ node.playbackRate.value = playbackRate;
275
+ node.connect(gainNode);
276
+ scheduleAudioNode(node, mediaTimestamp);
277
+ const iterator = audioBufferIterator;
278
+ iterator.addQueuedAudioNode(node, mediaTimestamp, buffer);
279
+ node.onended = () => {
280
+ setTimeout(() => {
281
+ iterator.removeQueuedAudioNode(node);
282
+ }, 30);
283
+ };
284
+ };
285
+ const onAudioChunk = ({
286
+ getIsPlaying,
287
+ buffer,
288
+ playbackRate,
289
+ scheduleAudioNode
290
+ }) => {
291
+ if (getIsPlaying()) {
292
+ scheduleAudioChunk({
293
+ buffer: buffer.buffer,
294
+ mediaTimestamp: buffer.timestamp,
295
+ playbackRate,
296
+ scheduleAudioNode
297
+ });
298
+ } else {
299
+ if (!audioBufferIterator) {
300
+ throw new Error("Audio buffer iterator not found");
301
+ }
302
+ audioBufferIterator.addChunkForAfterResuming(buffer.buffer, buffer.timestamp);
66
303
  }
67
304
  };
305
+ const startAudioIterator = async ({
306
+ nonce,
307
+ playbackRate,
308
+ startFromSecond,
309
+ getIsPlaying,
310
+ scheduleAudioNode
311
+ }) => {
312
+ audioBufferIterator?.destroy();
313
+ const delayHandle = delayPlaybackHandleIfNotPremounting();
314
+ const iterator = makeAudioIterator(audioSink, startFromSecond);
315
+ audioIteratorsCreated++;
316
+ audioBufferIterator = iterator;
317
+ for (let i = 0;i < 3; i++) {
318
+ const result = await iterator.getNext();
319
+ if (iterator.isDestroyed()) {
320
+ delayHandle.unblock();
321
+ return;
322
+ }
323
+ if (nonce.isStale()) {
324
+ delayHandle.unblock();
325
+ return;
326
+ }
327
+ if (!result.value) {
328
+ delayHandle.unblock();
329
+ return;
330
+ }
331
+ onAudioChunk({
332
+ getIsPlaying,
333
+ buffer: result.value,
334
+ playbackRate,
335
+ scheduleAudioNode
336
+ });
337
+ }
338
+ delayHandle.unblock();
339
+ };
340
+ const pausePlayback = () => {
341
+ if (!audioBufferIterator) {
342
+ return;
343
+ }
344
+ audioBufferIterator.moveQueuedChunksToPauseQueue();
345
+ };
346
+ const seek = async ({
347
+ newTime,
348
+ nonce,
349
+ fps,
350
+ playbackRate,
351
+ getIsPlaying,
352
+ scheduleAudioNode
353
+ }) => {
354
+ if (!audioBufferIterator) {
355
+ await startAudioIterator({
356
+ nonce,
357
+ playbackRate,
358
+ startFromSecond: newTime,
359
+ getIsPlaying,
360
+ scheduleAudioNode
361
+ });
362
+ return;
363
+ }
364
+ const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, audioBufferIterator.getQueuedPeriod([]));
365
+ const toBeScheduled = [];
366
+ if (!currentTimeIsAlreadyQueued) {
367
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, false);
368
+ if (nonce.isStale()) {
369
+ return;
370
+ }
371
+ if (audioSatisfyResult.type === "not-satisfied") {
372
+ await startAudioIterator({
373
+ nonce,
374
+ playbackRate,
375
+ startFromSecond: newTime,
376
+ getIsPlaying,
377
+ scheduleAudioNode
378
+ });
379
+ return;
380
+ }
381
+ toBeScheduled.push(...audioSatisfyResult.buffers);
382
+ }
383
+ const nextTime = newTime + 1 / fps * playbackRate + 1 / fps * playbackRate;
384
+ const nextIsAlreadyQueued = isAlreadyQueued(nextTime, audioBufferIterator.getQueuedPeriod(toBeScheduled));
385
+ if (!nextIsAlreadyQueued) {
386
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(nextTime, true);
387
+ if (nonce.isStale()) {
388
+ return;
389
+ }
390
+ if (audioSatisfyResult.type === "not-satisfied") {
391
+ await startAudioIterator({
392
+ nonce,
393
+ playbackRate,
394
+ startFromSecond: newTime,
395
+ getIsPlaying,
396
+ scheduleAudioNode
397
+ });
398
+ return;
399
+ }
400
+ toBeScheduled.push(...audioSatisfyResult.buffers);
401
+ }
402
+ for (const buffer of toBeScheduled) {
403
+ onAudioChunk({
404
+ getIsPlaying,
405
+ buffer,
406
+ playbackRate,
407
+ scheduleAudioNode
408
+ });
409
+ }
410
+ };
411
+ const resumeScheduledAudioChunks = ({
412
+ playbackRate,
413
+ scheduleAudioNode
414
+ }) => {
415
+ if (!audioBufferIterator) {
416
+ return;
417
+ }
418
+ for (const chunk of audioBufferIterator.getAndClearAudioChunksForAfterResuming()) {
419
+ scheduleAudioChunk({
420
+ buffer: chunk.buffer,
421
+ mediaTimestamp: chunk.timestamp,
422
+ playbackRate,
423
+ scheduleAudioNode
424
+ });
425
+ }
426
+ };
427
+ return {
428
+ startAudioIterator,
429
+ resumeScheduledAudioChunks,
430
+ pausePlayback,
431
+ getAudioBufferIterator: () => audioBufferIterator,
432
+ destroy: () => {
433
+ audioBufferIterator?.destroy();
434
+ audioBufferIterator = null;
435
+ },
436
+ seek,
437
+ getAudioIteratorsCreated: () => audioIteratorsCreated,
438
+ setMuted: (newMuted) => {
439
+ muted = newMuted;
440
+ gainNode.gain.value = muted ? 0 : currentVolume;
441
+ },
442
+ setVolume: (volume) => {
443
+ currentVolume = Math.max(0, volume);
444
+ gainNode.gain.value = muted ? 0 : currentVolume;
445
+ },
446
+ scheduleAudioChunk
447
+ };
448
+ };
449
+
450
+ // src/calculate-playbacktime.ts
451
+ var calculatePlaybackTime = ({
452
+ audioSyncAnchor,
453
+ currentTime,
454
+ playbackRate
455
+ }) => {
456
+ const timeSinceAnchor = currentTime - audioSyncAnchor;
457
+ return timeSinceAnchor * playbackRate;
68
458
  };
69
459
 
70
460
  // src/debug-overlay/preview-overlay.ts
71
- var drawPreviewOverlay = (context, stats, audioContextState, audioSyncAnchor) => {
461
+ var drawPreviewOverlay = ({
462
+ context,
463
+ audioTime,
464
+ audioContextState,
465
+ audioSyncAnchor,
466
+ playing,
467
+ audioIteratorManager: audioIteratorManager2,
468
+ videoIteratorManager
469
+ }) => {
470
+ const lines = [
471
+ "Debug overlay",
472
+ `Video iterators created: ${videoIteratorManager?.getVideoIteratorsCreated()}`,
473
+ `Audio iterators created: ${audioIteratorManager2?.getAudioIteratorsCreated()}`,
474
+ `Frames rendered: ${videoIteratorManager?.getFramesRendered()}`,
475
+ `Audio context state: ${audioContextState}`,
476
+ `Audio time: ${(audioTime - audioSyncAnchor).toFixed(3)}s`
477
+ ];
478
+ if (audioIteratorManager2) {
479
+ const queuedPeriod = audioIteratorManager2.getAudioBufferIterator()?.getQueuedPeriod([]);
480
+ const numberOfChunksAfterResuming = audioIteratorManager2?.getAudioBufferIterator()?.getNumberOfChunksAfterResuming();
481
+ if (queuedPeriod) {
482
+ lines.push(`Audio queued until: ${(queuedPeriod.until - (audioTime - audioSyncAnchor)).toFixed(3)}s`);
483
+ } else if (numberOfChunksAfterResuming) {
484
+ lines.push(`Audio chunks for after resuming: ${numberOfChunksAfterResuming}`);
485
+ }
486
+ lines.push(`Playing: ${playing}`);
487
+ }
488
+ const lineHeight = 30;
489
+ const boxPaddingX = 10;
490
+ const boxPaddingY = 10;
491
+ const boxLeft = 20;
492
+ const boxTop = 20;
493
+ const boxWidth = 600;
494
+ const boxHeight = lines.length * lineHeight + 2 * boxPaddingY;
72
495
  context.fillStyle = "rgba(0, 0, 0, 1)";
73
- context.fillRect(20, 20, 600, 180);
496
+ context.fillRect(boxLeft, boxTop, boxWidth, boxHeight);
74
497
  context.fillStyle = "white";
75
498
  context.font = "24px sans-serif";
76
499
  context.textBaseline = "top";
77
- context.fillText(`Debug overlay`, 30, 30);
78
- context.fillText(`Video iterators created: ${stats.videoIteratorsCreated}`, 30, 60);
79
- context.fillText(`Frames rendered: ${stats.framesRendered}`, 30, 90);
80
- context.fillText(`Audio context state: ${audioContextState}`, 30, 120);
81
- context.fillText(`Audio time: ${audioSyncAnchor.toFixed(3)}s`, 30, 150);
82
- };
83
-
84
- // src/get-time-in-seconds.ts
85
- import { Internals } from "remotion";
86
- var getTimeInSeconds = ({
87
- loop,
88
- mediaDurationInSeconds,
89
- unloopedTimeInSeconds,
90
- src,
91
- trimAfter,
92
- trimBefore,
93
- fps,
94
- playbackRate,
95
- ifNoMediaDuration
96
- }) => {
97
- if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === "fail") {
98
- throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
99
- }
100
- const loopDuration = loop ? Internals.calculateMediaDuration({
101
- trimAfter,
102
- mediaDurationInFrames: mediaDurationInSeconds ? mediaDurationInSeconds * fps : Infinity,
103
- playbackRate: 1,
104
- trimBefore
105
- }) / fps : Infinity;
106
- const timeInSeconds = unloopedTimeInSeconds * playbackRate % loopDuration;
107
- if ((trimAfter ?? null) !== null && !loop) {
108
- const time = (trimAfter - (trimBefore ?? 0)) / fps;
109
- if (timeInSeconds >= time) {
110
- return null;
111
- }
500
+ for (let i = 0;i < lines.length; i++) {
501
+ context.fillText(lines[i], boxLeft + boxPaddingX, boxTop + boxPaddingY + i * lineHeight);
112
502
  }
113
- return timeInSeconds + (trimBefore ?? 0) / fps;
114
503
  };
115
504
 
116
505
  // src/is-network-error.ts
@@ -121,37 +510,25 @@ function isNetworkError(error) {
121
510
  return false;
122
511
  }
123
512
 
124
- // src/video/timeout-utils.ts
125
- var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
126
-
127
- class TimeoutError extends Error {
128
- constructor(message = "Operation timed out") {
129
- super(message);
130
- this.name = "TimeoutError";
131
- }
132
- }
133
- function withTimeout(promise, timeoutMs, errorMessage = "Operation timed out") {
134
- let timeoutId = null;
135
- const timeoutPromise = new Promise((_, reject) => {
136
- timeoutId = window.setTimeout(() => {
137
- reject(new TimeoutError(errorMessage));
138
- }, timeoutMs);
139
- });
140
- return Promise.race([
141
- promise.finally(() => {
142
- if (timeoutId) {
143
- clearTimeout(timeoutId);
144
- }
145
- }),
146
- timeoutPromise
147
- ]);
148
- }
149
-
150
- // src/helpers/round-to-4-digits.ts
151
- var roundTo4Digits = (timestamp) => {
152
- return Math.round(timestamp * 1000) / 1000;
513
+ // src/nonce-manager.ts
514
+ var makeNonceManager = () => {
515
+ let nonce = 0;
516
+ const createAsyncOperation = () => {
517
+ nonce++;
518
+ const currentNonce = nonce;
519
+ return {
520
+ isStale: () => nonce !== currentNonce
521
+ };
522
+ };
523
+ return {
524
+ createAsyncOperation
525
+ };
153
526
  };
154
527
 
528
+ // src/video-iterator-manager.ts
529
+ import { CanvasSink } from "mediabunny";
530
+ import { Internals as Internals2 } from "remotion";
531
+
155
532
  // src/video/video-preview-iterator.ts
156
533
  var createVideoIterator = (timeToSeek, videoSink) => {
157
534
  let destroyed = false;
@@ -275,43 +652,115 @@ var createVideoIterator = (timeToSeek, videoSink) => {
275
652
  };
276
653
  };
277
654
 
278
- // src/media-player.ts
279
- var AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
655
+ // src/video-iterator-manager.ts
656
+ var videoIteratorManager = ({
657
+ delayPlaybackHandleIfNotPremounting,
658
+ canvas,
659
+ context,
660
+ drawDebugOverlay,
661
+ logLevel,
662
+ getOnVideoFrameCallback,
663
+ videoTrack
664
+ }) => {
665
+ let videoIteratorsCreated = 0;
666
+ let videoFrameIterator = null;
667
+ let framesRendered = 0;
668
+ canvas.width = videoTrack.displayWidth;
669
+ canvas.height = videoTrack.displayHeight;
670
+ const canvasSink = new CanvasSink(videoTrack, {
671
+ poolSize: 2,
672
+ fit: "contain",
673
+ alpha: true
674
+ });
675
+ const drawFrame = (frame) => {
676
+ context.clearRect(0, 0, canvas.width, canvas.height);
677
+ context.drawImage(frame.canvas, 0, 0);
678
+ framesRendered++;
679
+ drawDebugOverlay();
680
+ const callback = getOnVideoFrameCallback();
681
+ if (callback) {
682
+ callback(canvas);
683
+ }
684
+ Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
685
+ };
686
+ const startVideoIterator = async (timeToSeek, nonce) => {
687
+ videoFrameIterator?.destroy();
688
+ const iterator = createVideoIterator(timeToSeek, canvasSink);
689
+ videoIteratorsCreated++;
690
+ videoFrameIterator = iterator;
691
+ const delayHandle = delayPlaybackHandleIfNotPremounting();
692
+ const frameResult = await iterator.getNext();
693
+ delayHandle.unblock();
694
+ if (iterator.isDestroyed()) {
695
+ return;
696
+ }
697
+ if (nonce.isStale()) {
698
+ return;
699
+ }
700
+ if (videoFrameIterator.isDestroyed()) {
701
+ return;
702
+ }
703
+ if (!frameResult.value) {
704
+ return;
705
+ }
706
+ drawFrame(frameResult.value);
707
+ };
708
+ const seek = async ({ newTime, nonce }) => {
709
+ if (!videoFrameIterator) {
710
+ return;
711
+ }
712
+ const videoSatisfyResult = await videoFrameIterator.tryToSatisfySeek(newTime);
713
+ if (videoSatisfyResult.type === "satisfied") {
714
+ drawFrame(videoSatisfyResult.frame);
715
+ return;
716
+ }
717
+ if (nonce.isStale()) {
718
+ return;
719
+ }
720
+ startVideoIterator(newTime, nonce).catch(() => {});
721
+ };
722
+ return {
723
+ startVideoIterator,
724
+ getVideoIteratorsCreated: () => videoIteratorsCreated,
725
+ seek,
726
+ destroy: () => {
727
+ videoFrameIterator?.destroy();
728
+ context.clearRect(0, 0, canvas.width, canvas.height);
729
+ videoFrameIterator = null;
730
+ },
731
+ getVideoFrameIterator: () => videoFrameIterator,
732
+ drawFrame,
733
+ getFramesRendered: () => framesRendered
734
+ };
735
+ };
280
736
 
737
+ // src/media-player.ts
281
738
  class MediaPlayer {
282
739
  canvas;
283
740
  context;
284
741
  src;
285
742
  logLevel;
286
743
  playbackRate;
744
+ globalPlaybackRate;
287
745
  audioStreamIndex;
288
- canvasSink = null;
289
- videoFrameIterator = null;
290
- debugStats = {
291
- videoIteratorsCreated: 0,
292
- framesRendered: 0
293
- };
294
- audioSink = null;
295
- audioBufferIterator = null;
296
- gainNode = null;
297
- currentVolume = 1;
298
746
  sharedAudioContext;
747
+ audioIteratorManager = null;
748
+ videoIteratorManager = null;
299
749
  audioSyncAnchor = 0;
300
750
  playing = false;
301
- muted = false;
302
751
  loop = false;
303
752
  fps;
304
753
  trimBefore;
305
754
  trimAfter;
306
- initialized = false;
307
755
  totalDuration;
308
- isBuffering = false;
309
- onBufferingChangeCallback;
310
- mediaEnded = false;
311
756
  debugOverlay = false;
312
- onVideoFrameCallback;
757
+ nonceManager;
758
+ onVideoFrameCallback = null;
313
759
  initializationPromise = null;
314
760
  bufferState;
761
+ isPremounting;
762
+ isPostmounting;
763
+ seekPromiseChain = Promise.resolve();
315
764
  constructor({
316
765
  canvas,
317
766
  src,
@@ -321,16 +770,20 @@ class MediaPlayer {
321
770
  trimBefore,
322
771
  trimAfter,
323
772
  playbackRate,
773
+ globalPlaybackRate,
324
774
  audioStreamIndex,
325
775
  fps,
326
776
  debugOverlay,
327
- bufferState
777
+ bufferState,
778
+ isPremounting,
779
+ isPostmounting
328
780
  }) {
329
781
  this.canvas = canvas ?? null;
330
782
  this.src = src;
331
783
  this.logLevel = logLevel ?? window.remotion_logLevel;
332
784
  this.sharedAudioContext = sharedAudioContext;
333
785
  this.playbackRate = playbackRate;
786
+ this.globalPlaybackRate = globalPlaybackRate;
334
787
  this.loop = loop;
335
788
  this.trimBefore = trimBefore;
336
789
  this.trimAfter = trimAfter;
@@ -338,6 +791,13 @@ class MediaPlayer {
338
791
  this.fps = fps;
339
792
  this.debugOverlay = debugOverlay;
340
793
  this.bufferState = bufferState;
794
+ this.isPremounting = isPremounting;
795
+ this.isPostmounting = isPostmounting;
796
+ this.nonceManager = makeNonceManager();
797
+ this.input = new Input({
798
+ source: new UrlSource(this.src),
799
+ formats: ALL_FORMATS
800
+ });
341
801
  if (canvas) {
342
802
  const context = canvas.getContext("2d", {
343
803
  alpha: true,
@@ -351,18 +811,9 @@ class MediaPlayer {
351
811
  this.context = null;
352
812
  }
353
813
  }
354
- input = null;
355
- isReady() {
356
- return this.initialized && Boolean(this.sharedAudioContext) && !this.input?.disposed;
357
- }
358
- hasAudio() {
359
- return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
360
- }
361
- isCurrentlyBuffering() {
362
- return this.isBuffering && Boolean(this.bufferingStartedAtMs);
363
- }
814
+ input;
364
815
  isDisposalError() {
365
- return this.input?.disposed === true;
816
+ return this.input.disposed === true;
366
817
  }
367
818
  initialize(startTimeUnresolved) {
368
819
  const promise = this._initialize(startTimeUnresolved);
@@ -371,17 +822,11 @@ class MediaPlayer {
371
822
  }
372
823
  async _initialize(startTimeUnresolved) {
373
824
  try {
374
- const urlSource = new UrlSource(this.src);
375
- const input = new Input({
376
- source: urlSource,
377
- formats: ALL_FORMATS
378
- });
379
- this.input = input;
380
- if (input.disposed) {
825
+ if (this.input.disposed) {
381
826
  return { type: "disposed" };
382
827
  }
383
828
  try {
384
- await input.getFormat();
829
+ await this.input.getFormat();
385
830
  } catch (error) {
386
831
  if (this.isDisposalError()) {
387
832
  return { type: "disposed" };
@@ -390,14 +835,17 @@ class MediaPlayer {
390
835
  if (isNetworkError(err)) {
391
836
  throw error;
392
837
  }
393
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
838
+ Internals3.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
394
839
  return { type: "unknown-container-format" };
395
840
  }
396
841
  const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
397
- input.computeDuration(),
398
- input.getPrimaryVideoTrack(),
399
- input.getAudioTracks()
842
+ this.input.computeDuration(),
843
+ this.input.getPrimaryVideoTrack(),
844
+ this.input.getAudioTracks()
400
845
  ]);
846
+ if (this.input.disposed) {
847
+ return { type: "disposed" };
848
+ }
401
849
  this.totalDuration = durationInSeconds;
402
850
  const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
403
851
  if (!videoTrack && !audioTrack) {
@@ -408,18 +856,18 @@ class MediaPlayer {
408
856
  if (!canDecode) {
409
857
  return { type: "cannot-decode" };
410
858
  }
411
- this.canvasSink = new CanvasSink(videoTrack, {
412
- poolSize: 2,
413
- fit: "contain",
414
- alpha: true
859
+ if (this.input.disposed) {
860
+ return { type: "disposed" };
861
+ }
862
+ this.videoIteratorManager = videoIteratorManager({
863
+ videoTrack,
864
+ delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,
865
+ context: this.context,
866
+ canvas: this.canvas,
867
+ getOnVideoFrameCallback: () => this.onVideoFrameCallback,
868
+ logLevel: this.logLevel,
869
+ drawDebugOverlay: this.drawDebugOverlay
415
870
  });
416
- this.canvas.width = videoTrack.displayWidth;
417
- this.canvas.height = videoTrack.displayHeight;
418
- }
419
- if (audioTrack && this.sharedAudioContext) {
420
- this.audioSink = new AudioBufferSink(audioTrack);
421
- this.gainNode = this.sharedAudioContext.createGain();
422
- this.gainNode.connect(this.sharedAudioContext.destination);
423
871
  }
424
872
  const startTime = getTimeInSeconds({
425
873
  unloopedTimeInSeconds: startTimeUnresolved,
@@ -433,53 +881,46 @@ class MediaPlayer {
433
881
  src: this.src
434
882
  });
435
883
  if (startTime === null) {
436
- this.clearCanvas();
437
- return { type: "success", durationInSeconds: this.totalDuration };
884
+ throw new Error(`should have asserted that the time is not null`);
438
885
  }
439
- if (this.sharedAudioContext) {
440
- this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
886
+ this.setPlaybackTime(startTime, this.playbackRate * this.globalPlaybackRate);
887
+ if (audioTrack) {
888
+ this.audioIteratorManager = audioIteratorManager({
889
+ audioTrack,
890
+ delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,
891
+ sharedAudioContext: this.sharedAudioContext
892
+ });
441
893
  }
442
- this.initialized = true;
894
+ const nonce = this.nonceManager.createAsyncOperation();
443
895
  try {
444
- this.startAudioIterator(startTime);
445
- await this.startVideoIterator(startTime, this.currentSeekNonce);
896
+ if (this.audioIteratorManager) {
897
+ this.audioIteratorManager.startAudioIterator({
898
+ nonce,
899
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
900
+ startFromSecond: startTime,
901
+ getIsPlaying: () => this.playing,
902
+ scheduleAudioNode: this.scheduleAudioNode
903
+ });
904
+ }
905
+ await this.videoIteratorManager?.startVideoIterator(startTime, nonce);
446
906
  } catch (error) {
447
907
  if (this.isDisposalError()) {
448
908
  return { type: "disposed" };
449
909
  }
450
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio and video iterators", error);
910
+ Internals3.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio and video iterators", error);
451
911
  }
452
912
  return { type: "success", durationInSeconds };
453
913
  } catch (error) {
454
914
  const err = error;
455
915
  if (isNetworkError(err)) {
456
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
916
+ Internals3.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
457
917
  return { type: "network-error" };
458
918
  }
459
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
919
+ Internals3.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
460
920
  throw error;
461
921
  }
462
922
  }
463
- clearCanvas() {
464
- if (this.context && this.canvas) {
465
- this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
466
- }
467
- }
468
- currentSeekNonce = 0;
469
- seekPromiseChain = Promise.resolve();
470
923
  async seekTo(time) {
471
- this.currentSeekNonce++;
472
- const nonce = this.currentSeekNonce;
473
- await this.seekPromiseChain;
474
- this.seekPromiseChain = this.seekToDoNotCallDirectly(time, nonce);
475
- await this.seekPromiseChain;
476
- }
477
- async seekToDoNotCallDirectly(time, nonce) {
478
- if (nonce !== this.currentSeekNonce) {
479
- return;
480
- }
481
- if (!this.isReady())
482
- return;
483
924
  const newTime = getTimeInSeconds({
484
925
  unloopedTimeInSeconds: time,
485
926
  playbackRate: this.playbackRate,
@@ -492,278 +933,189 @@ class MediaPlayer {
492
933
  src: this.src
493
934
  });
494
935
  if (newTime === null) {
495
- this.videoFrameIterator?.destroy();
496
- this.videoFrameIterator = null;
497
- this.clearCanvas();
498
- this.audioBufferIterator?.destroy();
499
- this.audioBufferIterator = null;
936
+ throw new Error(`should have asserted that the time is not null`);
937
+ }
938
+ const nonce = this.nonceManager.createAsyncOperation();
939
+ await this.seekPromiseChain;
940
+ this.seekPromiseChain = this.seekToDoNotCallDirectly(newTime, nonce);
941
+ await this.seekPromiseChain;
942
+ }
943
+ async seekToDoNotCallDirectly(newTime, nonce) {
944
+ if (nonce.isStale()) {
500
945
  return;
501
946
  }
502
947
  const currentPlaybackTime = this.getPlaybackTime();
503
948
  if (currentPlaybackTime === newTime) {
504
949
  return;
505
950
  }
506
- const satisfyResult = await this.videoFrameIterator?.tryToSatisfySeek(newTime);
507
- if (satisfyResult?.type === "satisfied") {
508
- this.drawFrame(satisfyResult.frame);
509
- return;
510
- }
511
- if (this.currentSeekNonce !== nonce) {
512
- return;
951
+ const newAudioSyncAnchor = this.sharedAudioContext.currentTime - newTime / (this.playbackRate * this.globalPlaybackRate);
952
+ const diff = Math.abs(newAudioSyncAnchor - this.audioSyncAnchor);
953
+ if (diff > 0.04) {
954
+ this.setPlaybackTime(newTime, this.playbackRate * this.globalPlaybackRate);
513
955
  }
514
- this.mediaEnded = false;
515
- this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
516
- this.startAudioIterator(newTime);
517
- this.startVideoIterator(newTime, nonce);
956
+ await this.videoIteratorManager?.seek({
957
+ newTime,
958
+ nonce
959
+ });
960
+ await this.audioIteratorManager?.seek({
961
+ newTime,
962
+ nonce,
963
+ fps: this.fps,
964
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
965
+ getIsPlaying: () => this.playing,
966
+ scheduleAudioNode: this.scheduleAudioNode
967
+ });
518
968
  }
519
- async play() {
520
- if (!this.isReady())
521
- return;
522
- if (!this.playing) {
523
- if (this.sharedAudioContext.state === "suspended") {
524
- await this.sharedAudioContext.resume();
525
- }
526
- this.playing = true;
969
+ async play(time) {
970
+ const newTime = getTimeInSeconds({
971
+ unloopedTimeInSeconds: time,
972
+ playbackRate: this.playbackRate,
973
+ loop: this.loop,
974
+ trimBefore: this.trimBefore,
975
+ trimAfter: this.trimAfter,
976
+ mediaDurationInSeconds: this.totalDuration ?? null,
977
+ fps: this.fps,
978
+ ifNoMediaDuration: "infinity",
979
+ src: this.src
980
+ });
981
+ if (newTime === null) {
982
+ throw new Error(`should have asserted that the time is not null`);
983
+ }
984
+ this.setPlaybackTime(newTime, this.playbackRate * this.globalPlaybackRate);
985
+ this.playing = true;
986
+ if (this.audioIteratorManager) {
987
+ this.audioIteratorManager.resumeScheduledAudioChunks({
988
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
989
+ scheduleAudioNode: this.scheduleAudioNode
990
+ });
991
+ }
992
+ if (this.sharedAudioContext.state === "suspended") {
993
+ await this.sharedAudioContext.resume();
527
994
  }
995
+ this.drawDebugOverlay();
528
996
  }
997
+ delayPlaybackHandleIfNotPremounting = () => {
998
+ if (this.isPremounting || this.isPostmounting) {
999
+ return {
1000
+ unblock: () => {}
1001
+ };
1002
+ }
1003
+ return this.bufferState.delayPlayback();
1004
+ };
529
1005
  pause() {
530
1006
  this.playing = false;
531
- this.audioBufferIterator?.cleanupAudioQueue();
1007
+ this.audioIteratorManager?.pausePlayback();
1008
+ this.drawDebugOverlay();
532
1009
  }
533
1010
  setMuted(muted) {
534
- this.muted = muted;
535
- if (this.gainNode) {
536
- this.gainNode.gain.value = muted ? 0 : this.currentVolume;
537
- }
1011
+ this.audioIteratorManager?.setMuted(muted);
538
1012
  }
539
1013
  setVolume(volume) {
540
- if (!this.gainNode) {
1014
+ if (!this.audioIteratorManager) {
541
1015
  return;
542
1016
  }
543
- const appliedVolume = Math.max(0, volume);
544
- this.currentVolume = appliedVolume;
545
- if (!this.muted) {
546
- this.gainNode.gain.value = appliedVolume;
547
- }
1017
+ this.audioIteratorManager.setVolume(volume);
1018
+ }
1019
+ setTrimBefore(trimBefore) {
1020
+ this.trimBefore = trimBefore;
1021
+ }
1022
+ setTrimAfter(trimAfter) {
1023
+ this.trimAfter = trimAfter;
548
1024
  }
549
1025
  setDebugOverlay(debugOverlay) {
550
1026
  this.debugOverlay = debugOverlay;
551
1027
  }
1028
+ updateAfterPlaybackRateChange() {
1029
+ if (!this.audioIteratorManager) {
1030
+ return;
1031
+ }
1032
+ this.setPlaybackTime(this.getPlaybackTime(), this.playbackRate * this.globalPlaybackRate);
1033
+ const iterator = this.audioIteratorManager.getAudioBufferIterator();
1034
+ if (!iterator) {
1035
+ return;
1036
+ }
1037
+ iterator.moveQueuedChunksToPauseQueue();
1038
+ if (this.playing) {
1039
+ this.audioIteratorManager.resumeScheduledAudioChunks({
1040
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
1041
+ scheduleAudioNode: this.scheduleAudioNode
1042
+ });
1043
+ }
1044
+ }
552
1045
  setPlaybackRate(rate) {
553
1046
  this.playbackRate = rate;
1047
+ this.updateAfterPlaybackRateChange();
1048
+ }
1049
+ setGlobalPlaybackRate(rate) {
1050
+ this.globalPlaybackRate = rate;
1051
+ this.updateAfterPlaybackRateChange();
554
1052
  }
555
1053
  setFps(fps) {
556
1054
  this.fps = fps;
557
1055
  }
1056
+ setIsPremounting(isPremounting) {
1057
+ this.isPremounting = isPremounting;
1058
+ }
1059
+ setIsPostmounting(isPostmounting) {
1060
+ this.isPostmounting = isPostmounting;
1061
+ }
558
1062
  setLoop(loop) {
559
1063
  this.loop = loop;
560
1064
  }
561
1065
  async dispose() {
562
- this.initialized = false;
563
1066
  if (this.initializationPromise) {
564
1067
  try {
565
1068
  await this.initializationPromise;
566
1069
  } catch {}
567
1070
  }
568
- this.input?.dispose();
569
- this.videoFrameIterator?.destroy();
570
- this.videoFrameIterator = null;
571
- this.audioBufferIterator?.destroy();
572
- this.audioBufferIterator = null;
1071
+ this.nonceManager.createAsyncOperation();
1072
+ this.videoIteratorManager?.destroy();
1073
+ this.audioIteratorManager?.destroy();
1074
+ this.input.dispose();
573
1075
  }
574
- getPlaybackTime() {
575
- return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
576
- }
577
- scheduleAudioChunk(buffer, mediaTimestamp) {
578
- const targetTime = mediaTimestamp + this.audioSyncAnchor;
579
- const delay = targetTime - this.sharedAudioContext.currentTime;
580
- const node = this.sharedAudioContext.createBufferSource();
581
- node.buffer = buffer;
582
- node.playbackRate.value = this.playbackRate;
583
- node.connect(this.gainNode);
1076
+ scheduleAudioNode = (node, mediaTimestamp) => {
1077
+ const currentTime = this.getPlaybackTime();
1078
+ const delayWithoutPlaybackRate = mediaTimestamp - currentTime;
1079
+ const delay = delayWithoutPlaybackRate / (this.playbackRate * this.globalPlaybackRate);
584
1080
  if (delay >= 0) {
585
- node.start(targetTime);
1081
+ node.start(this.sharedAudioContext.currentTime + delay);
586
1082
  } else {
587
1083
  node.start(this.sharedAudioContext.currentTime, -delay);
588
1084
  }
589
- this.audioBufferIterator?.addQueuedAudioNode(node);
590
- node.onended = () => this.audioBufferIterator?.removeQueuedAudioNode(node);
1085
+ };
1086
+ getPlaybackTime() {
1087
+ return calculatePlaybackTime({
1088
+ audioSyncAnchor: this.audioSyncAnchor,
1089
+ currentTime: this.sharedAudioContext.currentTime,
1090
+ playbackRate: this.playbackRate * this.globalPlaybackRate
1091
+ });
591
1092
  }
592
- onBufferingChange(callback) {
593
- this.onBufferingChangeCallback = callback;
594
- return () => {
595
- if (this.onBufferingChangeCallback === callback) {
596
- this.onBufferingChangeCallback = undefined;
597
- }
598
- };
1093
+ setPlaybackTime(time, playbackRate) {
1094
+ this.audioSyncAnchor = this.sharedAudioContext.currentTime - time / playbackRate;
599
1095
  }
600
- onVideoFrame(callback) {
1096
+ setVideoFrameCallback(callback) {
601
1097
  this.onVideoFrameCallback = callback;
602
- if (this.initialized && callback && this.canvas) {
603
- callback(this.canvas);
604
- }
605
- return () => {
606
- if (this.onVideoFrameCallback === callback) {
607
- this.onVideoFrameCallback = undefined;
608
- }
609
- };
610
1098
  }
611
- drawFrame = (frame) => {
612
- if (!this.context) {
613
- throw new Error("Context not initialized");
614
- }
615
- this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
616
- this.context.drawImage(frame.canvas, 0, 0);
617
- this.debugStats.framesRendered++;
618
- this.drawDebugOverlay();
619
- if (this.onVideoFrameCallback && this.canvas) {
620
- this.onVideoFrameCallback(this.canvas);
621
- }
622
- Internals2.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
623
- };
624
- startAudioIterator = (startFromSecond) => {
625
- if (!this.hasAudio())
626
- return;
627
- this.audioBufferIterator?.destroy();
628
- try {
629
- const iterator = makeAudioIterator(this.audioSink, startFromSecond);
630
- this.audioBufferIterator = iterator;
631
- this.runAudioIterator(startFromSecond, iterator);
632
- } catch (error) {
633
- if (this.isDisposalError()) {
634
- return;
635
- }
636
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio iterator", error);
637
- }
638
- };
639
- drawDebugOverlay() {
1099
+ drawDebugOverlay = () => {
640
1100
  if (!this.debugOverlay)
641
1101
  return;
642
1102
  if (this.context && this.canvas) {
643
- drawPreviewOverlay(this.context, this.debugStats, this.sharedAudioContext.state, this.sharedAudioContext.currentTime);
644
- }
645
- }
646
- startVideoIterator = async (timeToSeek, nonce) => {
647
- if (!this.canvasSink) {
648
- return;
649
- }
650
- this.videoFrameIterator?.destroy();
651
- const iterator = createVideoIterator(timeToSeek, this.canvasSink);
652
- this.debugStats.videoIteratorsCreated++;
653
- this.videoFrameIterator = iterator;
654
- const delayHandle = this.bufferState?.delayPlayback();
655
- const frameResult = await iterator.getNext();
656
- delayHandle?.unblock();
657
- if (iterator.isDestroyed()) {
658
- return;
659
- }
660
- if (nonce !== this.currentSeekNonce) {
661
- return;
662
- }
663
- if (this.videoFrameIterator.isDestroyed()) {
664
- return;
665
- }
666
- if (frameResult.value) {
667
- this.audioSyncAnchor = this.sharedAudioContext.currentTime - frameResult.value.timestamp;
668
- this.drawFrame(frameResult.value);
669
- } else {}
670
- };
671
- bufferingStartedAtMs = null;
672
- minBufferingTimeoutMs = 500;
673
- setBufferingState(isBuffering) {
674
- if (this.isBuffering !== isBuffering) {
675
- this.isBuffering = isBuffering;
676
- if (isBuffering) {
677
- this.bufferingStartedAtMs = performance.now();
678
- this.onBufferingChangeCallback?.(true);
679
- } else {
680
- this.bufferingStartedAtMs = null;
681
- this.onBufferingChangeCallback?.(false);
682
- }
683
- }
684
- }
685
- maybeResumeFromBuffering(currentBufferDuration) {
686
- if (!this.isCurrentlyBuffering())
687
- return;
688
- const now = performance.now();
689
- const bufferingDuration = now - this.bufferingStartedAtMs;
690
- const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
691
- const bufferHealthy = currentBufferDuration >= HEALTHY_BUFFER_THRESHOLD_SECONDS;
692
- if (minTimeElapsed && bufferHealthy) {
693
- Internals2.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
694
- this.setBufferingState(false);
695
- }
696
- }
697
- runAudioIterator = async (startFromSecond, audioIterator) => {
698
- if (!this.hasAudio())
699
- return;
700
- try {
701
- let totalBufferDuration = 0;
702
- let isFirstBuffer = true;
703
- audioIterator.setAudioIteratorStarted(true);
704
- while (true) {
705
- if (audioIterator.isDestroyed()) {
706
- return;
707
- }
708
- const BUFFERING_TIMEOUT_MS = 50;
709
- let result;
710
- try {
711
- result = await withTimeout(audioIterator.getNext(), BUFFERING_TIMEOUT_MS, "Iterator timeout");
712
- } catch (error) {
713
- if (error instanceof TimeoutError && !this.mediaEnded) {
714
- this.setBufferingState(true);
715
- }
716
- await sleep(10);
717
- continue;
718
- }
719
- if (result.done || !result.value) {
720
- this.mediaEnded = true;
721
- break;
722
- }
723
- const { buffer, timestamp, duration } = result.value;
724
- totalBufferDuration += duration;
725
- audioIterator.setAudioBufferHealth(Math.max(0, totalBufferDuration / this.playbackRate));
726
- this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
727
- if (this.playing) {
728
- if (isFirstBuffer) {
729
- this.audioSyncAnchor = this.sharedAudioContext.currentTime - timestamp;
730
- isFirstBuffer = false;
731
- }
732
- if (timestamp < startFromSecond - AUDIO_BUFFER_TOLERANCE_THRESHOLD) {
733
- continue;
734
- }
735
- this.scheduleAudioChunk(buffer, timestamp);
736
- }
737
- const playbackTime = this.getPlaybackTime();
738
- if (playbackTime === null) {
739
- continue;
740
- }
741
- if (timestamp - playbackTime >= 1) {
742
- await new Promise((resolve) => {
743
- const check = () => {
744
- const currentPlaybackTime = this.getPlaybackTime();
745
- if (currentPlaybackTime !== null && timestamp - currentPlaybackTime < 1) {
746
- resolve();
747
- } else {
748
- requestAnimationFrame(check);
749
- }
750
- };
751
- check();
752
- });
753
- }
754
- }
755
- } catch (error) {
756
- if (this.isDisposalError()) {
757
- return;
758
- }
759
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to run audio iterator", error);
1103
+ drawPreviewOverlay({
1104
+ context: this.context,
1105
+ audioTime: this.sharedAudioContext.currentTime,
1106
+ audioContextState: this.sharedAudioContext.state,
1107
+ audioSyncAnchor: this.audioSyncAnchor,
1108
+ audioIteratorManager: this.audioIteratorManager,
1109
+ playing: this.playing,
1110
+ videoIteratorManager: this.videoIteratorManager
1111
+ });
760
1112
  }
761
1113
  };
762
1114
  }
763
1115
 
764
1116
  // src/show-in-timeline.ts
765
1117
  import { useMemo } from "react";
766
- import { Internals as Internals3, useVideoConfig } from "remotion";
1118
+ import { Internals as Internals4, useVideoConfig } from "remotion";
767
1119
  var useLoopDisplay = ({
768
1120
  loop,
769
1121
  mediaDurationInSeconds,
@@ -776,7 +1128,7 @@ var useLoopDisplay = ({
776
1128
  if (!loop || !mediaDurationInSeconds) {
777
1129
  return;
778
1130
  }
779
- const durationInFrames = Internals3.calculateMediaDuration({
1131
+ const durationInFrames = Internals4.calculateMediaDuration({
780
1132
  mediaDurationInFrames: mediaDurationInSeconds * fps,
781
1133
  playbackRate,
782
1134
  trimAfter,
@@ -802,7 +1154,7 @@ var useLoopDisplay = ({
802
1154
 
803
1155
  // src/use-media-in-timeline.ts
804
1156
  import { useContext, useEffect, useState } from "react";
805
- import { Internals as Internals4, useCurrentFrame } from "remotion";
1157
+ import { Internals as Internals5, useCurrentFrame } from "remotion";
806
1158
  var useMediaInTimeline = ({
807
1159
  volume,
808
1160
  mediaVolume,
@@ -818,9 +1170,9 @@ var useMediaInTimeline = ({
818
1170
  trimBefore,
819
1171
  trimAfter
820
1172
  }) => {
821
- const parentSequence = useContext(Internals4.SequenceContext);
822
- const startsAt = Internals4.useMediaStartsAt();
823
- const { registerSequence, unregisterSequence } = useContext(Internals4.SequenceManager);
1173
+ const parentSequence = useContext(Internals5.SequenceContext);
1174
+ const startsAt = Internals5.useMediaStartsAt();
1175
+ const { registerSequence, unregisterSequence } = useContext(Internals5.SequenceManager);
824
1176
  const [sequenceId] = useState(() => String(Math.random()));
825
1177
  const [mediaId] = useState(() => String(Math.random()));
826
1178
  const frame = useCurrentFrame();
@@ -832,7 +1184,7 @@ var useMediaInTimeline = ({
832
1184
  rootId,
833
1185
  isStudio,
834
1186
  finalDisplayName
835
- } = Internals4.useBasicMediaInTimeline({
1187
+ } = Internals5.useBasicMediaInTimeline({
836
1188
  volume,
837
1189
  mediaVolume,
838
1190
  mediaType,
@@ -938,8 +1290,8 @@ var {
938
1290
  warnAboutTooHighVolume,
939
1291
  usePreload,
940
1292
  SequenceContext
941
- } = Internals5;
942
- var NewAudioForPreview = ({
1293
+ } = Internals6;
1294
+ var AudioForPreviewAssertedShowing = ({
943
1295
  src,
944
1296
  playbackRate,
945
1297
  logLevel,
@@ -988,6 +1340,8 @@ var NewAudioForPreview = ({
988
1340
  currentTimeRef.current = currentTime;
989
1341
  const preloadedSrc = usePreload(src);
990
1342
  const parentSequence = useContext2(SequenceContext);
1343
+ const isPremounting = Boolean(parentSequence?.premounting);
1344
+ const isPostmounting = Boolean(parentSequence?.postmounting);
991
1345
  const loopDisplay = useLoopDisplay({
992
1346
  loop,
993
1347
  mediaDurationInSeconds: videoConfig.durationInFrames,
@@ -1010,6 +1364,11 @@ var NewAudioForPreview = ({
1010
1364
  trimAfter,
1011
1365
  trimBefore
1012
1366
  });
1367
+ const buffering = useContext2(Internals6.BufferingContextReact);
1368
+ if (!buffering) {
1369
+ throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
1370
+ }
1371
+ const isPlayerBuffering = Internals6.useIsPlayerBuffering(buffering);
1013
1372
  useEffect2(() => {
1014
1373
  if (!sharedAudioContext)
1015
1374
  return;
@@ -1028,7 +1387,10 @@ var NewAudioForPreview = ({
1028
1387
  playbackRate,
1029
1388
  audioStreamIndex: audioStreamIndex ?? 0,
1030
1389
  debugOverlay: false,
1031
- bufferState: buffer
1390
+ bufferState: buffer,
1391
+ isPostmounting,
1392
+ isPremounting,
1393
+ globalPlaybackRate
1032
1394
  });
1033
1395
  mediaPlayerRef.current = player;
1034
1396
  player.initialize(currentTimeRef.current).then((result) => {
@@ -1036,7 +1398,7 @@ var NewAudioForPreview = ({
1036
1398
  if (disallowFallbackToHtml5Audio) {
1037
1399
  throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
1038
1400
  }
1039
- Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
1401
+ Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
1040
1402
  setShouldFallbackToNativeAudio(true);
1041
1403
  return;
1042
1404
  }
@@ -1044,7 +1406,7 @@ var NewAudioForPreview = ({
1044
1406
  if (disallowFallbackToHtml5Audio) {
1045
1407
  throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
1046
1408
  }
1047
- Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <Html5Audio>`);
1409
+ Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <Html5Audio>`);
1048
1410
  setShouldFallbackToNativeAudio(true);
1049
1411
  return;
1050
1412
  }
@@ -1052,7 +1414,7 @@ var NewAudioForPreview = ({
1052
1414
  if (disallowFallbackToHtml5Audio) {
1053
1415
  throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
1054
1416
  }
1055
- Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <Html5Audio>`);
1417
+ Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <Html5Audio>`);
1056
1418
  setShouldFallbackToNativeAudio(true);
1057
1419
  return;
1058
1420
  }
@@ -1060,20 +1422,20 @@ var NewAudioForPreview = ({
1060
1422
  if (disallowFallbackToHtml5Audio) {
1061
1423
  throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
1062
1424
  }
1063
- Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Html5Audio>`);
1425
+ Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Html5Audio>`);
1064
1426
  setShouldFallbackToNativeAudio(true);
1065
1427
  return;
1066
1428
  }
1067
1429
  if (result.type === "success") {
1068
1430
  setMediaPlayerReady(true);
1069
- Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] MediaPlayer initialized successfully`);
1431
+ Internals6.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] MediaPlayer initialized successfully`);
1070
1432
  }
1071
1433
  }).catch((error) => {
1072
- Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to initialize MediaPlayer", error);
1434
+ Internals6.Log.error({ logLevel, tag: "@remotion/media" }, "[AudioForPreview] Failed to initialize MediaPlayer", error);
1073
1435
  setShouldFallbackToNativeAudio(true);
1074
1436
  });
1075
1437
  } catch (error) {
1076
- Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer initialization failed", error);
1438
+ Internals6.Log.error({ logLevel, tag: "@remotion/media" }, "[AudioForPreview] MediaPlayer initialization failed", error);
1077
1439
  setShouldFallbackToNativeAudio(true);
1078
1440
  }
1079
1441
  return () => {
@@ -1082,7 +1444,7 @@ var NewAudioForPreview = ({
1082
1444
  delayHandleRef.current = null;
1083
1445
  }
1084
1446
  if (mediaPlayerRef.current) {
1085
- Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Disposing MediaPlayer`);
1447
+ Internals6.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] Disposing MediaPlayer`);
1086
1448
  mediaPlayerRef.current.dispose();
1087
1449
  mediaPlayerRef.current = null;
1088
1450
  }
@@ -1101,42 +1463,28 @@ var NewAudioForPreview = ({
1101
1463
  videoConfig.fps,
1102
1464
  audioStreamIndex,
1103
1465
  disallowFallbackToHtml5Audio,
1104
- buffer
1466
+ buffer,
1467
+ isPremounting,
1468
+ isPostmounting,
1469
+ globalPlaybackRate
1105
1470
  ]);
1106
1471
  useEffect2(() => {
1107
1472
  const audioPlayer = mediaPlayerRef.current;
1108
1473
  if (!audioPlayer)
1109
1474
  return;
1110
- if (playing) {
1111
- audioPlayer.play().catch((error) => {
1112
- Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to play", error);
1113
- });
1475
+ if (playing && !isPlayerBuffering) {
1476
+ audioPlayer.play(currentTimeRef.current);
1114
1477
  } else {
1115
1478
  audioPlayer.pause();
1116
1479
  }
1117
- }, [playing, logLevel, mediaPlayerReady]);
1480
+ }, [isPlayerBuffering, logLevel, playing]);
1118
1481
  useEffect2(() => {
1119
1482
  const audioPlayer = mediaPlayerRef.current;
1120
1483
  if (!audioPlayer || !mediaPlayerReady)
1121
1484
  return;
1122
- audioPlayer.seekTo(currentTime);
1123
- Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
1485
+ audioPlayer.seekTo(currentTime).catch(() => {});
1486
+ Internals6.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
1124
1487
  }, [currentTime, logLevel, mediaPlayerReady]);
1125
- useEffect2(() => {
1126
- const audioPlayer = mediaPlayerRef.current;
1127
- if (!audioPlayer || !mediaPlayerReady)
1128
- return;
1129
- audioPlayer.onBufferingChange((newBufferingState) => {
1130
- if (newBufferingState && !delayHandleRef.current) {
1131
- delayHandleRef.current = buffer.delayPlayback();
1132
- Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer buffering - blocking Remotion playback");
1133
- } else if (!newBufferingState && delayHandleRef.current) {
1134
- delayHandleRef.current.unblock();
1135
- delayHandleRef.current = null;
1136
- Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
1137
- }
1138
- });
1139
- }, [mediaPlayerReady, buffer, logLevel]);
1140
1488
  const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
1141
1489
  useEffect2(() => {
1142
1490
  const audioPlayer = mediaPlayerRef.current;
@@ -1151,14 +1499,20 @@ var NewAudioForPreview = ({
1151
1499
  }
1152
1500
  audioPlayer.setVolume(userPreferredVolume);
1153
1501
  }, [userPreferredVolume, mediaPlayerReady]);
1154
- const effectivePlaybackRate = useMemo2(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
1155
1502
  useEffect2(() => {
1156
1503
  const audioPlayer = mediaPlayerRef.current;
1157
1504
  if (!audioPlayer || !mediaPlayerReady) {
1158
1505
  return;
1159
1506
  }
1160
- audioPlayer.setPlaybackRate(effectivePlaybackRate);
1161
- }, [effectivePlaybackRate, mediaPlayerReady]);
1507
+ audioPlayer.setPlaybackRate(playbackRate);
1508
+ }, [playbackRate, mediaPlayerReady]);
1509
+ useEffect2(() => {
1510
+ const audioPlayer = mediaPlayerRef.current;
1511
+ if (!audioPlayer || !mediaPlayerReady) {
1512
+ return;
1513
+ }
1514
+ audioPlayer.setGlobalPlaybackRate(globalPlaybackRate);
1515
+ }, [globalPlaybackRate, mediaPlayerReady]);
1162
1516
  useEffect2(() => {
1163
1517
  const audioPlayer = mediaPlayerRef.current;
1164
1518
  if (!audioPlayer || !mediaPlayerReady) {
@@ -1166,6 +1520,41 @@ var NewAudioForPreview = ({
1166
1520
  }
1167
1521
  audioPlayer.setFps(videoConfig.fps);
1168
1522
  }, [videoConfig.fps, mediaPlayerReady]);
1523
+ useEffect2(() => {
1524
+ const mediaPlayer = mediaPlayerRef.current;
1525
+ if (!mediaPlayer || !mediaPlayerReady) {
1526
+ return;
1527
+ }
1528
+ mediaPlayer.setTrimBefore(trimBefore);
1529
+ }, [trimBefore, mediaPlayerReady]);
1530
+ useEffect2(() => {
1531
+ const mediaPlayer = mediaPlayerRef.current;
1532
+ if (!mediaPlayer || !mediaPlayerReady) {
1533
+ return;
1534
+ }
1535
+ mediaPlayer.setTrimAfter(trimAfter);
1536
+ }, [trimAfter, mediaPlayerReady]);
1537
+ useEffect2(() => {
1538
+ const mediaPlayer = mediaPlayerRef.current;
1539
+ if (!mediaPlayer || !mediaPlayerReady) {
1540
+ return;
1541
+ }
1542
+ mediaPlayer.setLoop(loop);
1543
+ }, [loop, mediaPlayerReady]);
1544
+ useEffect2(() => {
1545
+ const mediaPlayer = mediaPlayerRef.current;
1546
+ if (!mediaPlayer || !mediaPlayerReady) {
1547
+ return;
1548
+ }
1549
+ mediaPlayer.setIsPremounting(isPremounting);
1550
+ }, [isPremounting, mediaPlayerReady]);
1551
+ useEffect2(() => {
1552
+ const mediaPlayer = mediaPlayerRef.current;
1553
+ if (!mediaPlayer || !mediaPlayerReady) {
1554
+ return;
1555
+ }
1556
+ mediaPlayer.setIsPostmounting(isPostmounting);
1557
+ }, [isPostmounting, mediaPlayerReady]);
1169
1558
  if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
1170
1559
  return /* @__PURE__ */ jsx(RemotionAudio, {
1171
1560
  src,
@@ -1206,7 +1595,34 @@ var AudioForPreview = ({
1206
1595
  fallbackHtml5AudioProps
1207
1596
  }) => {
1208
1597
  const preloadedSrc = usePreload(src);
1209
- return /* @__PURE__ */ jsx(NewAudioForPreview, {
1598
+ const frame = useCurrentFrame2();
1599
+ const videoConfig = useVideoConfig2();
1600
+ const currentTime = frame / videoConfig.fps;
1601
+ const showShow = useMemo2(() => {
1602
+ return getTimeInSeconds({
1603
+ unloopedTimeInSeconds: currentTime,
1604
+ playbackRate: playbackRate ?? 1,
1605
+ loop: loop ?? false,
1606
+ trimBefore,
1607
+ trimAfter,
1608
+ mediaDurationInSeconds: Infinity,
1609
+ fps: videoConfig.fps,
1610
+ ifNoMediaDuration: "infinity",
1611
+ src
1612
+ }) !== null;
1613
+ }, [
1614
+ currentTime,
1615
+ loop,
1616
+ playbackRate,
1617
+ src,
1618
+ trimAfter,
1619
+ trimBefore,
1620
+ videoConfig.fps
1621
+ ]);
1622
+ if (!showShow) {
1623
+ return null;
1624
+ }
1625
+ return /* @__PURE__ */ jsx(AudioForPreviewAssertedShowing, {
1210
1626
  audioStreamIndex: audioStreamIndex ?? 0,
1211
1627
  src: preloadedSrc,
1212
1628
  playbackRate: playbackRate ?? 1,
@@ -1231,7 +1647,7 @@ import { useContext as useContext3, useLayoutEffect, useMemo as useMemo3, useSta
1231
1647
  import {
1232
1648
  cancelRender as cancelRender2,
1233
1649
  Html5Audio,
1234
- Internals as Internals12,
1650
+ Internals as Internals13,
1235
1651
  random,
1236
1652
  useCurrentFrame as useCurrentFrame3,
1237
1653
  useDelayRender,
@@ -1362,13 +1778,13 @@ var frameForVolumeProp = ({
1362
1778
  };
1363
1779
 
1364
1780
  // src/caches.ts
1365
- import { cancelRender, Internals as Internals10 } from "remotion";
1781
+ import { cancelRender, Internals as Internals11 } from "remotion";
1366
1782
 
1367
1783
  // src/audio-extraction/audio-manager.ts
1368
- import { Internals as Internals7 } from "remotion";
1784
+ import { Internals as Internals8 } from "remotion";
1369
1785
 
1370
1786
  // src/audio-extraction/audio-iterator.ts
1371
- import { Internals as Internals6 } from "remotion";
1787
+ import { Internals as Internals7 } from "remotion";
1372
1788
 
1373
1789
  // src/audio-extraction/audio-cache.ts
1374
1790
  var makeAudioCache = () => {
@@ -1446,7 +1862,7 @@ var warnAboutMatroskaOnce = (src, logLevel) => {
1446
1862
  return;
1447
1863
  }
1448
1864
  warned[src] = true;
1449
- Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
1865
+ Internals7.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
1450
1866
  };
1451
1867
  var makeAudioIterator2 = ({
1452
1868
  audioSampleSink,
@@ -1514,7 +1930,7 @@ var makeAudioIterator2 = ({
1514
1930
  if (openTimestamps.length > 0) {
1515
1931
  const first = openTimestamps[0];
1516
1932
  const last = openTimestamps[openTimestamps.length - 1];
1517
- Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, `${first.toFixed(3)}...${last.toFixed(3)}`);
1933
+ Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, `${first.toFixed(3)}...${last.toFixed(3)}`);
1518
1934
  }
1519
1935
  };
1520
1936
  const getCacheStats = () => {
@@ -1611,7 +2027,7 @@ var makeAudioManager = () => {
1611
2027
  if (seenKeys.has(key)) {
1612
2028
  iterator.prepareForDeletion();
1613
2029
  iterators.splice(iterators.indexOf(iterator), 1);
1614
- Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted duplicate iterator for ${iterator.src}`);
2030
+ Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted duplicate iterator for ${iterator.src}`);
1615
2031
  }
1616
2032
  seenKeys.add(key);
1617
2033
  }
@@ -1693,7 +2109,7 @@ var makeAudioManager = () => {
1693
2109
  };
1694
2110
 
1695
2111
  // src/video-extraction/keyframe-manager.ts
1696
- import { Internals as Internals9 } from "remotion";
2112
+ import { Internals as Internals10 } from "remotion";
1697
2113
 
1698
2114
  // src/browser-can-use-webgl2.ts
1699
2115
  var browserCanUseWebGl2 = null;
@@ -1734,7 +2150,7 @@ import {
1734
2150
  } from "mediabunny";
1735
2151
 
1736
2152
  // src/video-extraction/keyframe-bank.ts
1737
- import { Internals as Internals8 } from "remotion";
2153
+ import { Internals as Internals9 } from "remotion";
1738
2154
  var makeKeyframeBank = ({
1739
2155
  startTimestampInSeconds,
1740
2156
  endTimestampInSeconds,
@@ -1742,7 +2158,7 @@ var makeKeyframeBank = ({
1742
2158
  logLevel: parentLogLevel,
1743
2159
  src
1744
2160
  }) => {
1745
- Internals8.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
2161
+ Internals9.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1746
2162
  const frames = {};
1747
2163
  const frameTimestamps = [];
1748
2164
  let lastUsed = Date.now();
@@ -1769,7 +2185,7 @@ var makeKeyframeBank = ({
1769
2185
  }
1770
2186
  }
1771
2187
  if (deletedTimestamps.length > 0) {
1772
- Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
2188
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
1773
2189
  }
1774
2190
  };
1775
2191
  const hasDecodedEnoughForTimestamp = (timestamp) => {
@@ -1829,7 +2245,7 @@ var makeKeyframeBank = ({
1829
2245
  return await getFrameFromTimestamp(timestamp) !== null;
1830
2246
  };
1831
2247
  const prepareForDeletion = (logLevel) => {
1832
- Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Preparing for deletion of keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
2248
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Preparing for deletion of keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1833
2249
  sampleIterator.return().then((result) => {
1834
2250
  if (result.value) {
1835
2251
  result.value.close();
@@ -2022,10 +2438,10 @@ var makeKeyframeManager = () => {
2022
2438
  if (size === 0) {
2023
2439
  continue;
2024
2440
  }
2025
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);
2441
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);
2026
2442
  }
2027
2443
  }
2028
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
2444
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
2029
2445
  };
2030
2446
  const getCacheStats = async () => {
2031
2447
  let count = 0;
@@ -2074,7 +2490,7 @@ var makeKeyframeManager = () => {
2074
2490
  if (mostInThePastBank) {
2075
2491
  const { framesDeleted } = mostInThePastBank.prepareForDeletion(logLevel);
2076
2492
  delete sources[mostInThePastSrc][mostInThePastBank.startTimestampInSeconds];
2077
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${framesDeleted} frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
2493
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${framesDeleted} frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
2078
2494
  }
2079
2495
  return { finish: false };
2080
2496
  };
@@ -2086,7 +2502,7 @@ var makeKeyframeManager = () => {
2086
2502
  if (finish) {
2087
2503
  break;
2088
2504
  }
2089
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, "Deleted oldest keyframe bank to stay under max cache size", (cacheStats.totalSize / 1024 / 1024).toFixed(1), "out of", (maxCacheSize / 1024 / 1024).toFixed(1));
2505
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, "Deleted oldest keyframe bank to stay under max cache size", (cacheStats.totalSize / 1024 / 1024).toFixed(1), "out of", (maxCacheSize / 1024 / 1024).toFixed(1));
2090
2506
  cacheStats = await getTotalCacheStats();
2091
2507
  }
2092
2508
  };
@@ -2105,7 +2521,7 @@ var makeKeyframeManager = () => {
2105
2521
  const { endTimestampInSeconds, startTimestampInSeconds } = bank;
2106
2522
  if (endTimestampInSeconds < threshold) {
2107
2523
  bank.prepareForDeletion(logLevel);
2108
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
2524
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
2109
2525
  delete sources[src][startTimeInSeconds];
2110
2526
  } else {
2111
2527
  bank.deleteFramesBeforeTimestamp({
@@ -2149,7 +2565,7 @@ var makeKeyframeManager = () => {
2149
2565
  if (await (await existingBank).hasTimestampInSecond(timestamp)) {
2150
2566
  return existingBank;
2151
2567
  }
2152
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);
2568
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);
2153
2569
  await (await existingBank).prepareForDeletion(logLevel);
2154
2570
  delete sources[src][startTimestampInSeconds];
2155
2571
  const replacementKeybank = getFramesSinceKeyframe({
@@ -2238,20 +2654,20 @@ var getUncachedMaxCacheSize = (logLevel) => {
2238
2654
  if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {
2239
2655
  cancelRender(new Error(`The maximum value for the "mediaCacheSizeInBytes" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
2240
2656
  }
2241
- Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
2657
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
2242
2658
  return window.remotion_mediaCacheSizeInBytes;
2243
2659
  }
2244
2660
  if (typeof window !== "undefined" && window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
2245
2661
  const value = window.remotion_initialMemoryAvailable / 2;
2246
2662
  if (value < 500 * 1024 * 1024) {
2247
- Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);
2663
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);
2248
2664
  return 500 * 1024 * 1024;
2249
2665
  }
2250
2666
  if (value > 20000 * 1024 * 1024) {
2251
- Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
2667
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
2252
2668
  return 20000 * 1024 * 1024;
2253
2669
  }
2254
- Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
2670
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
2255
2671
  return value;
2256
2672
  }
2257
2673
  return 1000 * 1000 * 1000;
@@ -2338,12 +2754,12 @@ var convertAudioData = ({
2338
2754
  };
2339
2755
 
2340
2756
  // src/get-sink.ts
2341
- import { Internals as Internals11 } from "remotion";
2757
+ import { Internals as Internals12 } from "remotion";
2342
2758
  var sinkPromises = {};
2343
2759
  var getSink = (src, logLevel) => {
2344
2760
  let promise = sinkPromises[src];
2345
2761
  if (!promise) {
2346
- Internals11.Log.verbose({
2762
+ Internals12.Log.verbose({
2347
2763
  logLevel,
2348
2764
  tag: "@remotion/media"
2349
2765
  }, `Sink for ${src} was not found, creating new sink`);
@@ -2836,10 +3252,10 @@ var AudioForRendering = ({
2836
3252
  trimBefore
2837
3253
  }) => {
2838
3254
  const frame = useCurrentFrame3();
2839
- const absoluteFrame = Internals12.useTimelinePosition();
2840
- const videoConfig = Internals12.useUnsafeVideoConfig();
2841
- const { registerRenderAsset, unregisterRenderAsset } = useContext3(Internals12.RenderAssetManager);
2842
- const startsAt = Internals12.useMediaStartsAt();
3255
+ const absoluteFrame = Internals13.useTimelinePosition();
3256
+ const videoConfig = Internals13.useUnsafeVideoConfig();
3257
+ const { registerRenderAsset, unregisterRenderAsset } = useContext3(Internals13.RenderAssetManager);
3258
+ const startsAt = Internals13.useMediaStartsAt();
2843
3259
  const environment = useRemotionEnvironment();
2844
3260
  if (!videoConfig) {
2845
3261
  throw new Error("No video config found");
@@ -2850,7 +3266,7 @@ var AudioForRendering = ({
2850
3266
  const { fps } = videoConfig;
2851
3267
  const { delayRender, continueRender } = useDelayRender();
2852
3268
  const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState3(false);
2853
- const sequenceContext = useContext3(Internals12.SequenceContext);
3269
+ const sequenceContext = useContext3(Internals13.SequenceContext);
2854
3270
  const id = useMemo3(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
2855
3271
  src,
2856
3272
  sequenceContext?.cumulatedFrom,
@@ -2895,7 +3311,7 @@ var AudioForRendering = ({
2895
3311
  if (disallowFallbackToHtml5Audio) {
2896
3312
  cancelRender2(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2897
3313
  }
2898
- Internals12.Log.warn({
3314
+ Internals13.Log.warn({
2899
3315
  logLevel: logLevel ?? window.remotion_logLevel,
2900
3316
  tag: "@remotion/media"
2901
3317
  }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
@@ -2906,7 +3322,7 @@ var AudioForRendering = ({
2906
3322
  if (disallowFallbackToHtml5Audio) {
2907
3323
  cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2908
3324
  }
2909
- Internals12.Log.warn({
3325
+ Internals13.Log.warn({
2910
3326
  logLevel: logLevel ?? window.remotion_logLevel,
2911
3327
  tag: "@remotion/media"
2912
3328
  }, `Cannot decode ${src}, falling back to <Html5Audio>`);
@@ -2920,7 +3336,7 @@ var AudioForRendering = ({
2920
3336
  if (disallowFallbackToHtml5Audio) {
2921
3337
  cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2922
3338
  }
2923
- Internals12.Log.warn({
3339
+ Internals13.Log.warn({
2924
3340
  logLevel: logLevel ?? window.remotion_logLevel,
2925
3341
  tag: "@remotion/media"
2926
3342
  }, `Network error fetching ${src}, falling back to <Html5Audio>`);
@@ -2936,12 +3352,12 @@ var AudioForRendering = ({
2936
3352
  frame,
2937
3353
  startsAt
2938
3354
  });
2939
- const volume = Internals12.evaluateVolume({
3355
+ const volume = Internals13.evaluateVolume({
2940
3356
  volume: volumeProp,
2941
3357
  frame: volumePropsFrame,
2942
3358
  mediaVolume: 1
2943
3359
  });
2944
- Internals12.warnAboutTooHighVolume(volume);
3360
+ Internals13.warnAboutTooHighVolume(volume);
2945
3361
  if (audio && volume > 0) {
2946
3362
  applyVolume(audio.data, volume);
2947
3363
  registerRenderAsset({
@@ -3014,7 +3430,7 @@ var AudioForRendering = ({
3014
3430
 
3015
3431
  // src/audio/audio.tsx
3016
3432
  import { jsx as jsx3 } from "react/jsx-runtime";
3017
- var { validateMediaProps } = Internals13;
3433
+ var { validateMediaProps } = Internals14;
3018
3434
  var Audio = (props) => {
3019
3435
  const { name, stack, showInTimeline, ...otherProps } = props;
3020
3436
  const environment = useRemotionEnvironment2();
@@ -3033,10 +3449,10 @@ var Audio = (props) => {
3033
3449
  stack: stack ?? null
3034
3450
  });
3035
3451
  };
3036
- Internals13.addSequenceStackTraces(Audio);
3452
+ Internals14.addSequenceStackTraces(Audio);
3037
3453
 
3038
3454
  // src/video/video.tsx
3039
- import { Internals as Internals16, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
3455
+ import { Internals as Internals17, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
3040
3456
 
3041
3457
  // src/video/video-for-preview.tsx
3042
3458
  import {
@@ -3047,7 +3463,13 @@ import {
3047
3463
  useRef as useRef2,
3048
3464
  useState as useState4
3049
3465
  } from "react";
3050
- import { Html5Video, Internals as Internals14, useBufferState as useBufferState2, useCurrentFrame as useCurrentFrame4 } from "remotion";
3466
+ import {
3467
+ Html5Video,
3468
+ Internals as Internals15,
3469
+ useBufferState as useBufferState2,
3470
+ useCurrentFrame as useCurrentFrame4,
3471
+ useVideoConfig as useVideoConfig3
3472
+ } from "remotion";
3051
3473
  import { jsx as jsx4 } from "react/jsx-runtime";
3052
3474
  var {
3053
3475
  useUnsafeVideoConfig: useUnsafeVideoConfig2,
@@ -3061,8 +3483,8 @@ var {
3061
3483
  usePreload: usePreload2,
3062
3484
  SequenceContext: SequenceContext2,
3063
3485
  SequenceVisibilityToggleContext
3064
- } = Internals14;
3065
- var VideoForPreview = ({
3486
+ } = Internals15;
3487
+ var VideoForPreviewAssertedShowing = ({
3066
3488
  src: unpreloadedSrc,
3067
3489
  style,
3068
3490
  playbackRate,
@@ -3107,6 +3529,8 @@ var VideoForPreview = ({
3107
3529
  });
3108
3530
  warnAboutTooHighVolume2(userPreferredVolume);
3109
3531
  const parentSequence = useContext4(SequenceContext2);
3532
+ const isPremounting = Boolean(parentSequence?.premounting);
3533
+ const isPostmounting = Boolean(parentSequence?.postmounting);
3110
3534
  const loopDisplay = useLoopDisplay({
3111
3535
  loop,
3112
3536
  mediaDurationInSeconds,
@@ -3137,6 +3561,11 @@ var VideoForPreview = ({
3137
3561
  const currentTimeRef = useRef2(currentTime);
3138
3562
  currentTimeRef.current = currentTime;
3139
3563
  const preloadedSrc = usePreload2(src);
3564
+ const buffering = useContext4(Internals15.BufferingContextReact);
3565
+ if (!buffering) {
3566
+ throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
3567
+ }
3568
+ const isPlayerBuffering = Internals15.useIsPlayerBuffering(buffering);
3140
3569
  useEffect3(() => {
3141
3570
  if (!canvasRef.current)
3142
3571
  return;
@@ -3157,7 +3586,10 @@ var VideoForPreview = ({
3157
3586
  playbackRate,
3158
3587
  audioStreamIndex,
3159
3588
  debugOverlay,
3160
- bufferState: buffer
3589
+ bufferState: buffer,
3590
+ isPremounting,
3591
+ isPostmounting,
3592
+ globalPlaybackRate
3161
3593
  });
3162
3594
  mediaPlayerRef.current = player;
3163
3595
  player.initialize(currentTimeRef.current).then((result) => {
@@ -3168,7 +3600,7 @@ var VideoForPreview = ({
3168
3600
  if (disallowFallbackToOffthreadVideo) {
3169
3601
  throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
3170
3602
  }
3171
- Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3603
+ Internals15.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3172
3604
  setShouldFallbackToNativeVideo(true);
3173
3605
  return;
3174
3606
  }
@@ -3176,7 +3608,7 @@ var VideoForPreview = ({
3176
3608
  if (disallowFallbackToOffthreadVideo) {
3177
3609
  throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
3178
3610
  }
3179
- Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
3611
+ Internals15.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
3180
3612
  setShouldFallbackToNativeVideo(true);
3181
3613
  return;
3182
3614
  }
@@ -3184,7 +3616,7 @@ var VideoForPreview = ({
3184
3616
  if (disallowFallbackToOffthreadVideo) {
3185
3617
  throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
3186
3618
  }
3187
- Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
3619
+ Internals15.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
3188
3620
  setShouldFallbackToNativeVideo(true);
3189
3621
  return;
3190
3622
  }
@@ -3192,7 +3624,7 @@ var VideoForPreview = ({
3192
3624
  if (disallowFallbackToOffthreadVideo) {
3193
3625
  throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
3194
3626
  }
3195
- Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
3627
+ Internals15.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
3196
3628
  setShouldFallbackToNativeVideo(true);
3197
3629
  return;
3198
3630
  }
@@ -3201,16 +3633,16 @@ var VideoForPreview = ({
3201
3633
  setMediaDurationInSeconds(result.durationInSeconds);
3202
3634
  }
3203
3635
  }).catch((error) => {
3204
- Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to initialize MediaPlayer", error);
3636
+ Internals15.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to initialize MediaPlayer", error);
3205
3637
  setShouldFallbackToNativeVideo(true);
3206
3638
  });
3207
3639
  } catch (error) {
3208
- Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer initialization failed", error);
3640
+ Internals15.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer initialization failed", error);
3209
3641
  setShouldFallbackToNativeVideo(true);
3210
3642
  }
3211
3643
  return () => {
3212
3644
  if (mediaPlayerRef.current) {
3213
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Disposing MediaPlayer`);
3645
+ Internals15.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Disposing MediaPlayer`);
3214
3646
  mediaPlayerRef.current.dispose();
3215
3647
  mediaPlayerRef.current = null;
3216
3648
  }
@@ -3229,53 +3661,31 @@ var VideoForPreview = ({
3229
3661
  disallowFallbackToOffthreadVideo,
3230
3662
  audioStreamIndex,
3231
3663
  debugOverlay,
3232
- buffer
3664
+ buffer,
3665
+ isPremounting,
3666
+ isPostmounting,
3667
+ globalPlaybackRate
3233
3668
  ]);
3234
3669
  const classNameValue = useMemo4(() => {
3235
- return [Internals14.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals14.truthy).join(" ");
3670
+ return [Internals15.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals15.truthy).join(" ");
3236
3671
  }, [className]);
3237
3672
  useEffect3(() => {
3238
3673
  const mediaPlayer = mediaPlayerRef.current;
3239
3674
  if (!mediaPlayer)
3240
3675
  return;
3241
- if (playing) {
3242
- mediaPlayer.play().catch((error) => {
3243
- Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to play", error);
3244
- });
3676
+ if (playing && !isPlayerBuffering) {
3677
+ mediaPlayer.play(currentTimeRef.current);
3245
3678
  } else {
3246
3679
  mediaPlayer.pause();
3247
3680
  }
3248
- }, [playing, logLevel, mediaPlayerReady]);
3681
+ }, [isPlayerBuffering, playing, logLevel, mediaPlayerReady]);
3249
3682
  useLayoutEffect2(() => {
3250
3683
  const mediaPlayer = mediaPlayerRef.current;
3251
3684
  if (!mediaPlayer || !mediaPlayerReady)
3252
3685
  return;
3253
- mediaPlayer.seekTo(currentTime);
3254
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
3686
+ mediaPlayer.seekTo(currentTime).catch(() => {});
3687
+ Internals15.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
3255
3688
  }, [currentTime, logLevel, mediaPlayerReady]);
3256
- useEffect3(() => {
3257
- const mediaPlayer = mediaPlayerRef.current;
3258
- if (!mediaPlayer || !mediaPlayerReady)
3259
- return;
3260
- let currentBlock = null;
3261
- const unsubscribe = mediaPlayer.onBufferingChange((newBufferingState) => {
3262
- if (newBufferingState && !currentBlock) {
3263
- currentBlock = buffer.delayPlayback();
3264
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer buffering - blocking Remotion playback");
3265
- } else if (!newBufferingState && currentBlock) {
3266
- currentBlock.unblock();
3267
- currentBlock = null;
3268
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
3269
- }
3270
- });
3271
- return () => {
3272
- unsubscribe();
3273
- if (currentBlock) {
3274
- currentBlock.unblock();
3275
- currentBlock = null;
3276
- }
3277
- };
3278
- }, [mediaPlayerReady, buffer, logLevel]);
3279
3689
  const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
3280
3690
  useEffect3(() => {
3281
3691
  const mediaPlayer = mediaPlayerRef.current;
@@ -3297,14 +3707,20 @@ var VideoForPreview = ({
3297
3707
  }
3298
3708
  mediaPlayer.setDebugOverlay(debugOverlay);
3299
3709
  }, [debugOverlay, mediaPlayerReady]);
3300
- const effectivePlaybackRate = useMemo4(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
3301
3710
  useEffect3(() => {
3302
3711
  const mediaPlayer = mediaPlayerRef.current;
3303
3712
  if (!mediaPlayer || !mediaPlayerReady) {
3304
3713
  return;
3305
3714
  }
3306
- mediaPlayer.setPlaybackRate(effectivePlaybackRate);
3307
- }, [effectivePlaybackRate, mediaPlayerReady]);
3715
+ mediaPlayer.setPlaybackRate(playbackRate);
3716
+ }, [playbackRate, mediaPlayerReady]);
3717
+ useEffect3(() => {
3718
+ const mediaPlayer = mediaPlayerRef.current;
3719
+ if (!mediaPlayer || !mediaPlayerReady) {
3720
+ return;
3721
+ }
3722
+ mediaPlayer.setGlobalPlaybackRate(globalPlaybackRate);
3723
+ }, [globalPlaybackRate, mediaPlayerReady]);
3308
3724
  useEffect3(() => {
3309
3725
  const mediaPlayer = mediaPlayerRef.current;
3310
3726
  if (!mediaPlayer || !mediaPlayerReady) {
@@ -3312,6 +3728,20 @@ var VideoForPreview = ({
3312
3728
  }
3313
3729
  mediaPlayer.setLoop(loop);
3314
3730
  }, [loop, mediaPlayerReady]);
3731
+ useEffect3(() => {
3732
+ const mediaPlayer = mediaPlayerRef.current;
3733
+ if (!mediaPlayer || !mediaPlayerReady) {
3734
+ return;
3735
+ }
3736
+ mediaPlayer.setIsPremounting(isPremounting);
3737
+ }, [isPremounting, mediaPlayerReady]);
3738
+ useEffect3(() => {
3739
+ const mediaPlayer = mediaPlayerRef.current;
3740
+ if (!mediaPlayer || !mediaPlayerReady) {
3741
+ return;
3742
+ }
3743
+ mediaPlayer.setIsPostmounting(isPostmounting);
3744
+ }, [isPostmounting, mediaPlayerReady]);
3315
3745
  useEffect3(() => {
3316
3746
  const mediaPlayer = mediaPlayerRef.current;
3317
3747
  if (!mediaPlayer || !mediaPlayerReady) {
@@ -3321,14 +3751,25 @@ var VideoForPreview = ({
3321
3751
  }, [videoConfig.fps, mediaPlayerReady]);
3322
3752
  useEffect3(() => {
3323
3753
  const mediaPlayer = mediaPlayerRef.current;
3324
- if (!mediaPlayer || !mediaPlayerReady || !onVideoFrame) {
3754
+ if (!mediaPlayer || !mediaPlayerReady) {
3325
3755
  return;
3326
3756
  }
3327
- const unsubscribe = mediaPlayer.onVideoFrame(onVideoFrame);
3328
- return () => {
3329
- unsubscribe();
3330
- };
3757
+ mediaPlayer.setVideoFrameCallback(onVideoFrame ?? null);
3331
3758
  }, [onVideoFrame, mediaPlayerReady]);
3759
+ useEffect3(() => {
3760
+ const mediaPlayer = mediaPlayerRef.current;
3761
+ if (!mediaPlayer || !mediaPlayerReady) {
3762
+ return;
3763
+ }
3764
+ mediaPlayer.setTrimBefore(trimBefore);
3765
+ }, [trimBefore, mediaPlayerReady]);
3766
+ useEffect3(() => {
3767
+ const mediaPlayer = mediaPlayerRef.current;
3768
+ if (!mediaPlayer || !mediaPlayerReady) {
3769
+ return;
3770
+ }
3771
+ mediaPlayer.setTrimAfter(trimAfter);
3772
+ }, [trimAfter, mediaPlayerReady]);
3332
3773
  const actualStyle = useMemo4(() => {
3333
3774
  return {
3334
3775
  ...style,
@@ -3361,6 +3802,38 @@ var VideoForPreview = ({
3361
3802
  className: classNameValue
3362
3803
  });
3363
3804
  };
3805
+ var VideoForPreview = (props) => {
3806
+ const frame = useCurrentFrame4();
3807
+ const videoConfig = useVideoConfig3();
3808
+ const currentTime = frame / videoConfig.fps;
3809
+ const showShow = useMemo4(() => {
3810
+ return getTimeInSeconds({
3811
+ unloopedTimeInSeconds: currentTime,
3812
+ playbackRate: props.playbackRate,
3813
+ loop: props.loop,
3814
+ trimBefore: props.trimBefore,
3815
+ trimAfter: props.trimAfter,
3816
+ mediaDurationInSeconds: Infinity,
3817
+ fps: videoConfig.fps,
3818
+ ifNoMediaDuration: "infinity",
3819
+ src: props.src
3820
+ }) !== null;
3821
+ }, [
3822
+ currentTime,
3823
+ props.loop,
3824
+ props.playbackRate,
3825
+ props.src,
3826
+ props.trimAfter,
3827
+ props.trimBefore,
3828
+ videoConfig.fps
3829
+ ]);
3830
+ if (!showShow) {
3831
+ return null;
3832
+ }
3833
+ return /* @__PURE__ */ jsx4(VideoForPreviewAssertedShowing, {
3834
+ ...props
3835
+ });
3836
+ };
3364
3837
 
3365
3838
  // src/video/video-for-rendering.tsx
3366
3839
  import {
@@ -3372,13 +3845,13 @@ import {
3372
3845
  } from "react";
3373
3846
  import {
3374
3847
  cancelRender as cancelRender3,
3375
- Internals as Internals15,
3848
+ Internals as Internals16,
3376
3849
  Loop,
3377
3850
  random as random2,
3378
3851
  useCurrentFrame as useCurrentFrame5,
3379
3852
  useDelayRender as useDelayRender2,
3380
3853
  useRemotionEnvironment as useRemotionEnvironment3,
3381
- useVideoConfig as useVideoConfig2
3854
+ useVideoConfig as useVideoConfig4
3382
3855
  } from "remotion";
3383
3856
  import { jsx as jsx5 } from "react/jsx-runtime";
3384
3857
  var VideoForRendering = ({
@@ -3407,11 +3880,11 @@ var VideoForRendering = ({
3407
3880
  throw new TypeError("No `src` was passed to <Video>.");
3408
3881
  }
3409
3882
  const frame = useCurrentFrame5();
3410
- const absoluteFrame = Internals15.useTimelinePosition();
3411
- const { fps } = useVideoConfig2();
3412
- const { registerRenderAsset, unregisterRenderAsset } = useContext5(Internals15.RenderAssetManager);
3413
- const startsAt = Internals15.useMediaStartsAt();
3414
- const sequenceContext = useContext5(Internals15.SequenceContext);
3883
+ const absoluteFrame = Internals16.useTimelinePosition();
3884
+ const { fps } = useVideoConfig4();
3885
+ const { registerRenderAsset, unregisterRenderAsset } = useContext5(Internals16.RenderAssetManager);
3886
+ const startsAt = Internals16.useMediaStartsAt();
3887
+ const sequenceContext = useContext5(Internals16.SequenceContext);
3415
3888
  const id = useMemo5(() => `media-video-${random2(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
3416
3889
  src,
3417
3890
  sequenceContext?.cumulatedFrom,
@@ -3422,8 +3895,8 @@ var VideoForRendering = ({
3422
3895
  const { delayRender, continueRender } = useDelayRender2();
3423
3896
  const canvasRef = useRef3(null);
3424
3897
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
3425
- const audioEnabled = Internals15.useAudioEnabled();
3426
- const videoEnabled = Internals15.useVideoEnabled();
3898
+ const audioEnabled = Internals16.useAudioEnabled();
3899
+ const videoEnabled = Internals16.useVideoEnabled();
3427
3900
  useLayoutEffect3(() => {
3428
3901
  if (!canvasRef.current) {
3429
3902
  return;
@@ -3466,7 +3939,7 @@ var VideoForRendering = ({
3466
3939
  cancelRender3(new Error(`Unknown container format ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3467
3940
  }
3468
3941
  if (window.remotion_isMainTab) {
3469
- Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3942
+ Internals16.Log.info({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3470
3943
  }
3471
3944
  setReplaceWithOffthreadVideo({ durationInSeconds: null });
3472
3945
  return;
@@ -3476,7 +3949,7 @@ var VideoForRendering = ({
3476
3949
  cancelRender3(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3477
3950
  }
3478
3951
  if (window.remotion_isMainTab) {
3479
- Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
3952
+ Internals16.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
3480
3953
  }
3481
3954
  setReplaceWithOffthreadVideo({
3482
3955
  durationInSeconds: result.durationInSeconds
@@ -3488,7 +3961,7 @@ var VideoForRendering = ({
3488
3961
  cancelRender3(new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3489
3962
  }
3490
3963
  if (window.remotion_isMainTab) {
3491
- Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode alpha component for ${src}, falling back to <OffthreadVideo>`);
3964
+ Internals16.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode alpha component for ${src}, falling back to <OffthreadVideo>`);
3492
3965
  }
3493
3966
  setReplaceWithOffthreadVideo({
3494
3967
  durationInSeconds: result.durationInSeconds
@@ -3500,7 +3973,7 @@ var VideoForRendering = ({
3500
3973
  cancelRender3(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3501
3974
  }
3502
3975
  if (window.remotion_isMainTab) {
3503
- Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
3976
+ Internals16.Log.info({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
3504
3977
  }
3505
3978
  setReplaceWithOffthreadVideo({ durationInSeconds: null });
3506
3979
  return;
@@ -3539,12 +4012,12 @@ var VideoForRendering = ({
3539
4012
  frame,
3540
4013
  startsAt
3541
4014
  });
3542
- const volume = Internals15.evaluateVolume({
4015
+ const volume = Internals16.evaluateVolume({
3543
4016
  volume: volumeProp,
3544
4017
  frame: volumePropsFrame,
3545
4018
  mediaVolume: 1
3546
4019
  });
3547
- Internals15.warnAboutTooHighVolume(volume);
4020
+ Internals16.warnAboutTooHighVolume(volume);
3548
4021
  if (audio && volume > 0) {
3549
4022
  applyVolume(audio.data, volume);
3550
4023
  registerRenderAsset({
@@ -3596,10 +4069,10 @@ var VideoForRendering = ({
3596
4069
  videoEnabled
3597
4070
  ]);
3598
4071
  const classNameValue = useMemo5(() => {
3599
- return [Internals15.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals15.truthy).join(" ");
4072
+ return [Internals16.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals16.truthy).join(" ");
3600
4073
  }, [className]);
3601
4074
  if (replaceWithOffthreadVideo) {
3602
- const fallback = /* @__PURE__ */ jsx5(Internals15.InnerOffthreadVideo, {
4075
+ const fallback = /* @__PURE__ */ jsx5(Internals16.InnerOffthreadVideo, {
3603
4076
  src,
3604
4077
  playbackRate: playbackRate ?? 1,
3605
4078
  muted: muted ?? false,
@@ -3639,7 +4112,7 @@ var VideoForRendering = ({
3639
4112
  }
3640
4113
  return /* @__PURE__ */ jsx5(Loop, {
3641
4114
  layout: "none",
3642
- durationInFrames: Internals15.calculateMediaDuration({
4115
+ durationInFrames: Internals16.calculateMediaDuration({
3643
4116
  trimAfter: trimAfterValue,
3644
4117
  mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
3645
4118
  playbackRate,
@@ -3659,7 +4132,7 @@ var VideoForRendering = ({
3659
4132
 
3660
4133
  // src/video/video.tsx
3661
4134
  import { jsx as jsx6 } from "react/jsx-runtime";
3662
- var { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals16;
4135
+ var { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals17;
3663
4136
  var InnerVideo = ({
3664
4137
  src,
3665
4138
  audioStreamIndex,
@@ -3796,7 +4269,7 @@ var Video = ({
3796
4269
  debugOverlay: debugOverlay ?? false
3797
4270
  });
3798
4271
  };
3799
- Internals16.addSequenceStackTraces(Video);
4272
+ Internals17.addSequenceStackTraces(Video);
3800
4273
  // src/index.ts
3801
4274
  var experimental_Audio = Audio;
3802
4275
  var experimental_Video = Video;