@remotion/media 4.0.365 → 4.0.367

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,24 +1,54 @@
1
1
  // src/audio/audio.tsx
2
- import { Internals as Internals13, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
2
+ import { Internals as Internals14, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
3
3
 
4
4
  // src/audio/audio-for-preview.tsx
5
5
  import { useContext as useContext2, useEffect as useEffect2, useMemo as useMemo2, useRef, useState as useState2 } from "react";
6
6
  import {
7
- Internals as Internals5,
7
+ Internals as Internals6,
8
8
  Audio as RemotionAudio,
9
9
  useBufferState,
10
- useCurrentFrame as useCurrentFrame2
10
+ useCurrentFrame as useCurrentFrame2,
11
+ useVideoConfig as useVideoConfig2
11
12
  } from "remotion";
12
13
 
14
+ // src/get-time-in-seconds.ts
15
+ import { Internals } from "remotion";
16
+ var getTimeInSeconds = ({
17
+ loop,
18
+ mediaDurationInSeconds,
19
+ unloopedTimeInSeconds,
20
+ src,
21
+ trimAfter,
22
+ trimBefore,
23
+ fps,
24
+ playbackRate,
25
+ ifNoMediaDuration
26
+ }) => {
27
+ if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === "fail") {
28
+ throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
29
+ }
30
+ const loopDuration = loop ? Internals.calculateMediaDuration({
31
+ trimAfter,
32
+ mediaDurationInFrames: mediaDurationInSeconds ? mediaDurationInSeconds * fps : Infinity,
33
+ playbackRate: 1,
34
+ trimBefore
35
+ }) / fps : Infinity;
36
+ const timeInSeconds = unloopedTimeInSeconds * playbackRate % loopDuration;
37
+ if ((trimAfter ?? null) !== null && !loop) {
38
+ const time = (trimAfter - (trimBefore ?? 0)) / fps;
39
+ if (timeInSeconds >= time) {
40
+ return null;
41
+ }
42
+ }
43
+ return timeInSeconds + (trimBefore ?? 0) / fps;
44
+ };
45
+
13
46
  // src/media-player.ts
14
- import {
15
- ALL_FORMATS,
16
- AudioBufferSink,
17
- CanvasSink,
18
- Input,
19
- UrlSource
20
- } from "mediabunny";
21
- import { Internals as Internals2 } from "remotion";
47
+ import { ALL_FORMATS, Input, UrlSource } from "mediabunny";
48
+ import { Internals as Internals3 } from "remotion";
49
+
50
+ // src/audio-iterator-manager.ts
51
+ import { AudioBufferSink } from "mediabunny";
22
52
 
23
53
  // src/helpers/round-to-4-digits.ts
24
54
  var roundTo4Digits = (timestamp) => {
@@ -30,6 +60,7 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
30
60
  let destroyed = false;
31
61
  const iterator = audioSink.buffers(startFromSecond);
32
62
  const queuedAudioNodes = [];
63
+ const audioChunksForAfterResuming = [];
33
64
  const cleanupAudioQueue = () => {
34
65
  for (const node of queuedAudioNodes) {
35
66
  node.node.stop();
@@ -38,9 +69,9 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
38
69
  };
39
70
  let lastReturnedBuffer = null;
40
71
  let iteratorEnded = false;
41
- const getNextOrNullIfNotAvailable = async () => {
72
+ const getNextOrNullIfNotAvailable = async (allowWait) => {
42
73
  const next = iterator.next();
43
- const result = await Promise.race([
74
+ const result = allowWait ? await next : await Promise.race([
44
75
  next,
45
76
  new Promise((resolve) => {
46
77
  Promise.resolve().then(() => resolve());
@@ -70,7 +101,7 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
70
101
  buffer: result.value ?? null
71
102
  };
72
103
  };
73
- const tryToSatisfySeek = async (time) => {
104
+ const tryToSatisfySeek = async (time, allowWait) => {
74
105
  if (lastReturnedBuffer) {
75
106
  const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
76
107
  const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
@@ -88,20 +119,14 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
88
119
  }
89
120
  }
90
121
  if (iteratorEnded) {
91
- if (lastReturnedBuffer) {
92
- return {
93
- type: "satisfied",
94
- buffers: [lastReturnedBuffer]
95
- };
96
- }
97
122
  return {
98
- type: "not-satisfied",
99
- reason: "iterator ended"
123
+ type: "satisfied",
124
+ buffers: lastReturnedBuffer ? [lastReturnedBuffer] : []
100
125
  };
101
126
  }
102
127
  const toBeReturned = [];
103
128
  while (true) {
104
- const buffer = await getNextOrNullIfNotAvailable();
129
+ const buffer = await getNextOrNullIfNotAvailable(allowWait);
105
130
  if (buffer.type === "need-to-wait-for-it") {
106
131
  return {
107
132
  type: "not-satisfied",
@@ -111,15 +136,9 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
111
136
  if (buffer.type === "got-buffer-or-end") {
112
137
  if (buffer.buffer === null) {
113
138
  iteratorEnded = true;
114
- if (lastReturnedBuffer) {
115
- return {
116
- type: "satisfied",
117
- buffers: [lastReturnedBuffer]
118
- };
119
- }
120
139
  return {
121
- type: "not-satisfied",
122
- reason: "iterator ended and did not have buffer ready"
140
+ type: "satisfied",
141
+ buffers: lastReturnedBuffer ? [lastReturnedBuffer] : []
123
142
  };
124
143
  }
125
144
  const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
@@ -137,6 +156,26 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
137
156
  throw new Error("Unreachable");
138
157
  }
139
158
  };
159
+ const removeAndReturnAllQueuedAudioNodes = () => {
160
+ const nodes = queuedAudioNodes.slice();
161
+ for (const node of nodes) {
162
+ node.node.stop();
163
+ }
164
+ queuedAudioNodes.length = 0;
165
+ return nodes;
166
+ };
167
+ const addChunkForAfterResuming = (buffer, timestamp) => {
168
+ audioChunksForAfterResuming.push({ buffer, timestamp });
169
+ };
170
+ const moveQueuedChunksToPauseQueue = () => {
171
+ const toQueue = removeAndReturnAllQueuedAudioNodes();
172
+ for (const chunk of toQueue) {
173
+ addChunkForAfterResuming(chunk.buffer, chunk.timestamp);
174
+ }
175
+ };
176
+ const getNumberOfChunksAfterResuming = () => {
177
+ return audioChunksForAfterResuming.length;
178
+ };
140
179
  return {
141
180
  destroy: () => {
142
181
  cleanupAudioQueue();
@@ -144,9 +183,16 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
144
183
  iterator.return().catch(() => {
145
184
  return;
146
185
  });
186
+ audioChunksForAfterResuming.length = 0;
147
187
  },
148
- getNext: () => {
149
- return iterator.next();
188
+ getNext: async () => {
189
+ const next = await iterator.next();
190
+ if (next.value) {
191
+ lastReturnedBuffer = next.value;
192
+ } else {
193
+ iteratorEnded = true;
194
+ }
195
+ return next;
150
196
  },
151
197
  isDestroyed: () => {
152
198
  return destroyed;
@@ -160,29 +206,38 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
160
206
  queuedAudioNodes.splice(index, 1);
161
207
  }
162
208
  },
163
- removeAndReturnAllQueuedAudioNodes: () => {
164
- const nodes = queuedAudioNodes.slice();
165
- for (const node of nodes) {
166
- node.node.stop();
167
- }
168
- queuedAudioNodes.length = 0;
169
- return nodes;
209
+ getAndClearAudioChunksForAfterResuming: () => {
210
+ const chunks = audioChunksForAfterResuming.slice();
211
+ audioChunksForAfterResuming.length = 0;
212
+ return chunks;
170
213
  },
171
- getQueuedPeriod: () => {
172
- const lastNode = queuedAudioNodes[queuedAudioNodes.length - 1];
173
- if (!lastNode) {
174
- return null;
214
+ getQueuedPeriod: (pendingBuffers) => {
215
+ let until = -Infinity;
216
+ let from = Infinity;
217
+ for (const buffer of pendingBuffers) {
218
+ until = Math.max(until, buffer.timestamp + buffer.duration);
219
+ from = Math.min(from, buffer.timestamp);
220
+ }
221
+ for (const node of queuedAudioNodes) {
222
+ until = Math.max(until, node.timestamp + node.buffer.duration);
223
+ from = Math.min(from, node.timestamp);
175
224
  }
176
- const firstNode = queuedAudioNodes[0];
177
- if (!firstNode) {
225
+ for (const chunk of audioChunksForAfterResuming) {
226
+ until = Math.max(until, chunk.timestamp + chunk.buffer.duration);
227
+ from = Math.min(from, chunk.timestamp);
228
+ }
229
+ if (!Number.isFinite(from) || !Number.isFinite(until)) {
178
230
  return null;
179
231
  }
180
232
  return {
181
- from: firstNode.timestamp,
182
- until: lastNode.timestamp + lastNode.buffer.duration
233
+ from,
234
+ until
183
235
  };
184
236
  },
185
- tryToSatisfySeek
237
+ tryToSatisfySeek,
238
+ addChunkForAfterResuming,
239
+ moveQueuedChunksToPauseQueue,
240
+ getNumberOfChunksAfterResuming
186
241
  };
187
242
  };
188
243
  var isAlreadyQueued = (time, queuedPeriod) => {
@@ -192,31 +247,241 @@ var isAlreadyQueued = (time, queuedPeriod) => {
192
247
  return time >= queuedPeriod.from && time < queuedPeriod.until;
193
248
  };
194
249
 
250
+ // src/audio-iterator-manager.ts
251
+ var audioIteratorManager = ({
252
+ audioTrack,
253
+ delayPlaybackHandleIfNotPremounting,
254
+ sharedAudioContext
255
+ }) => {
256
+ let muted = false;
257
+ let currentVolume = 1;
258
+ const gainNode = sharedAudioContext.createGain();
259
+ gainNode.connect(sharedAudioContext.destination);
260
+ const audioSink = new AudioBufferSink(audioTrack);
261
+ let audioBufferIterator = null;
262
+ let audioIteratorsCreated = 0;
263
+ const scheduleAudioChunk = ({
264
+ buffer,
265
+ mediaTimestamp,
266
+ playbackRate,
267
+ scheduleAudioNode
268
+ }) => {
269
+ if (!audioBufferIterator) {
270
+ throw new Error("Audio buffer iterator not found");
271
+ }
272
+ const node = sharedAudioContext.createBufferSource();
273
+ node.buffer = buffer;
274
+ node.playbackRate.value = playbackRate;
275
+ node.connect(gainNode);
276
+ scheduleAudioNode(node, mediaTimestamp);
277
+ const iterator = audioBufferIterator;
278
+ iterator.addQueuedAudioNode(node, mediaTimestamp, buffer);
279
+ node.onended = () => {
280
+ setTimeout(() => {
281
+ iterator.removeQueuedAudioNode(node);
282
+ }, 30);
283
+ };
284
+ };
285
+ const onAudioChunk = ({
286
+ getIsPlaying,
287
+ buffer,
288
+ playbackRate,
289
+ scheduleAudioNode
290
+ }) => {
291
+ if (getIsPlaying()) {
292
+ scheduleAudioChunk({
293
+ buffer: buffer.buffer,
294
+ mediaTimestamp: buffer.timestamp,
295
+ playbackRate,
296
+ scheduleAudioNode
297
+ });
298
+ } else {
299
+ if (!audioBufferIterator) {
300
+ throw new Error("Audio buffer iterator not found");
301
+ }
302
+ audioBufferIterator.addChunkForAfterResuming(buffer.buffer, buffer.timestamp);
303
+ }
304
+ };
305
+ const startAudioIterator = async ({
306
+ nonce,
307
+ playbackRate,
308
+ startFromSecond,
309
+ getIsPlaying,
310
+ scheduleAudioNode
311
+ }) => {
312
+ audioBufferIterator?.destroy();
313
+ const delayHandle = delayPlaybackHandleIfNotPremounting();
314
+ const iterator = makeAudioIterator(audioSink, startFromSecond);
315
+ audioIteratorsCreated++;
316
+ audioBufferIterator = iterator;
317
+ for (let i = 0;i < 3; i++) {
318
+ const result = await iterator.getNext();
319
+ if (iterator.isDestroyed()) {
320
+ delayHandle.unblock();
321
+ return;
322
+ }
323
+ if (nonce.isStale()) {
324
+ delayHandle.unblock();
325
+ return;
326
+ }
327
+ if (!result.value) {
328
+ delayHandle.unblock();
329
+ return;
330
+ }
331
+ onAudioChunk({
332
+ getIsPlaying,
333
+ buffer: result.value,
334
+ playbackRate,
335
+ scheduleAudioNode
336
+ });
337
+ }
338
+ delayHandle.unblock();
339
+ };
340
+ const pausePlayback = () => {
341
+ if (!audioBufferIterator) {
342
+ return;
343
+ }
344
+ audioBufferIterator.moveQueuedChunksToPauseQueue();
345
+ };
346
+ const seek = async ({
347
+ newTime,
348
+ nonce,
349
+ fps,
350
+ playbackRate,
351
+ getIsPlaying,
352
+ scheduleAudioNode
353
+ }) => {
354
+ if (!audioBufferIterator) {
355
+ await startAudioIterator({
356
+ nonce,
357
+ playbackRate,
358
+ startFromSecond: newTime,
359
+ getIsPlaying,
360
+ scheduleAudioNode
361
+ });
362
+ return;
363
+ }
364
+ const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, audioBufferIterator.getQueuedPeriod([]));
365
+ const toBeScheduled = [];
366
+ if (!currentTimeIsAlreadyQueued) {
367
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, false);
368
+ if (nonce.isStale()) {
369
+ return;
370
+ }
371
+ if (audioSatisfyResult.type === "not-satisfied") {
372
+ await startAudioIterator({
373
+ nonce,
374
+ playbackRate,
375
+ startFromSecond: newTime,
376
+ getIsPlaying,
377
+ scheduleAudioNode
378
+ });
379
+ return;
380
+ }
381
+ toBeScheduled.push(...audioSatisfyResult.buffers);
382
+ }
383
+ const nextTime = newTime + 1 / fps * playbackRate + 1 / fps * playbackRate;
384
+ const nextIsAlreadyQueued = isAlreadyQueued(nextTime, audioBufferIterator.getQueuedPeriod(toBeScheduled));
385
+ if (!nextIsAlreadyQueued) {
386
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(nextTime, true);
387
+ if (nonce.isStale()) {
388
+ return;
389
+ }
390
+ if (audioSatisfyResult.type === "not-satisfied") {
391
+ await startAudioIterator({
392
+ nonce,
393
+ playbackRate,
394
+ startFromSecond: newTime,
395
+ getIsPlaying,
396
+ scheduleAudioNode
397
+ });
398
+ return;
399
+ }
400
+ toBeScheduled.push(...audioSatisfyResult.buffers);
401
+ }
402
+ for (const buffer of toBeScheduled) {
403
+ onAudioChunk({
404
+ getIsPlaying,
405
+ buffer,
406
+ playbackRate,
407
+ scheduleAudioNode
408
+ });
409
+ }
410
+ };
411
+ const resumeScheduledAudioChunks = ({
412
+ playbackRate,
413
+ scheduleAudioNode
414
+ }) => {
415
+ if (!audioBufferIterator) {
416
+ return;
417
+ }
418
+ for (const chunk of audioBufferIterator.getAndClearAudioChunksForAfterResuming()) {
419
+ scheduleAudioChunk({
420
+ buffer: chunk.buffer,
421
+ mediaTimestamp: chunk.timestamp,
422
+ playbackRate,
423
+ scheduleAudioNode
424
+ });
425
+ }
426
+ };
427
+ return {
428
+ startAudioIterator,
429
+ resumeScheduledAudioChunks,
430
+ pausePlayback,
431
+ getAudioBufferIterator: () => audioBufferIterator,
432
+ destroy: () => {
433
+ audioBufferIterator?.destroy();
434
+ audioBufferIterator = null;
435
+ },
436
+ seek,
437
+ getAudioIteratorsCreated: () => audioIteratorsCreated,
438
+ setMuted: (newMuted) => {
439
+ muted = newMuted;
440
+ gainNode.gain.value = muted ? 0 : currentVolume;
441
+ },
442
+ setVolume: (volume) => {
443
+ currentVolume = Math.max(0, volume);
444
+ gainNode.gain.value = muted ? 0 : currentVolume;
445
+ },
446
+ scheduleAudioChunk
447
+ };
448
+ };
449
+
450
+ // src/calculate-playbacktime.ts
451
+ var calculatePlaybackTime = ({
452
+ audioSyncAnchor,
453
+ currentTime,
454
+ playbackRate
455
+ }) => {
456
+ const timeSinceAnchor = currentTime - audioSyncAnchor;
457
+ return timeSinceAnchor * playbackRate;
458
+ };
459
+
195
460
  // src/debug-overlay/preview-overlay.ts
196
461
  var drawPreviewOverlay = ({
197
462
  context,
198
- stats,
199
463
  audioTime,
200
464
  audioContextState,
201
- audioIterator,
202
465
  audioSyncAnchor,
203
- audioChunksForAfterResuming,
204
- playing
466
+ playing,
467
+ audioIteratorManager: audioIteratorManager2,
468
+ videoIteratorManager
205
469
  }) => {
206
470
  const lines = [
207
471
  "Debug overlay",
208
- `Video iterators created: ${stats.videoIteratorsCreated}`,
209
- `Audio iterators created: ${stats.audioIteratorsCreated}`,
210
- `Frames rendered: ${stats.framesRendered}`,
472
+ `Video iterators created: ${videoIteratorManager?.getVideoIteratorsCreated()}`,
473
+ `Audio iterators created: ${audioIteratorManager2?.getAudioIteratorsCreated()}`,
474
+ `Frames rendered: ${videoIteratorManager?.getFramesRendered()}`,
211
475
  `Audio context state: ${audioContextState}`,
212
476
  `Audio time: ${(audioTime - audioSyncAnchor).toFixed(3)}s`
213
477
  ];
214
- if (audioIterator) {
215
- const queuedPeriod = audioIterator.getQueuedPeriod();
478
+ if (audioIteratorManager2) {
479
+ const queuedPeriod = audioIteratorManager2.getAudioBufferIterator()?.getQueuedPeriod([]);
480
+ const numberOfChunksAfterResuming = audioIteratorManager2?.getAudioBufferIterator()?.getNumberOfChunksAfterResuming();
216
481
  if (queuedPeriod) {
217
482
  lines.push(`Audio queued until: ${(queuedPeriod.until - (audioTime - audioSyncAnchor)).toFixed(3)}s`);
218
- } else if (audioChunksForAfterResuming.length > 0) {
219
- lines.push(`Audio chunks for after resuming: ${audioChunksForAfterResuming.length}`);
483
+ } else if (numberOfChunksAfterResuming) {
484
+ lines.push(`Audio chunks for after resuming: ${numberOfChunksAfterResuming}`);
220
485
  }
221
486
  lines.push(`Playing: ${playing}`);
222
487
  }
@@ -237,38 +502,6 @@ var drawPreviewOverlay = ({
237
502
  }
238
503
  };
239
504
 
240
- // src/get-time-in-seconds.ts
241
- import { Internals } from "remotion";
242
- var getTimeInSeconds = ({
243
- loop,
244
- mediaDurationInSeconds,
245
- unloopedTimeInSeconds,
246
- src,
247
- trimAfter,
248
- trimBefore,
249
- fps,
250
- playbackRate,
251
- ifNoMediaDuration
252
- }) => {
253
- if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === "fail") {
254
- throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
255
- }
256
- const loopDuration = loop ? Internals.calculateMediaDuration({
257
- trimAfter,
258
- mediaDurationInFrames: mediaDurationInSeconds ? mediaDurationInSeconds * fps : Infinity,
259
- playbackRate: 1,
260
- trimBefore
261
- }) / fps : Infinity;
262
- const timeInSeconds = unloopedTimeInSeconds * playbackRate % loopDuration;
263
- if ((trimAfter ?? null) !== null && !loop) {
264
- const time = (trimAfter - (trimBefore ?? 0)) / fps;
265
- if (timeInSeconds >= time) {
266
- return null;
267
- }
268
- }
269
- return timeInSeconds + (trimBefore ?? 0) / fps;
270
- };
271
-
272
505
  // src/is-network-error.ts
273
506
  function isNetworkError(error) {
274
507
  if (error.message.includes("Failed to fetch") || error.message.includes("Load failed") || error.message.includes("NetworkError when attempting to fetch resource")) {
@@ -277,6 +510,25 @@ function isNetworkError(error) {
277
510
  return false;
278
511
  }
279
512
 
513
+ // src/nonce-manager.ts
514
+ var makeNonceManager = () => {
515
+ let nonce = 0;
516
+ const createAsyncOperation = () => {
517
+ nonce++;
518
+ const currentNonce = nonce;
519
+ return {
520
+ isStale: () => nonce !== currentNonce
521
+ };
522
+ };
523
+ return {
524
+ createAsyncOperation
525
+ };
526
+ };
527
+
528
+ // src/video-iterator-manager.ts
529
+ import { CanvasSink } from "mediabunny";
530
+ import { Internals as Internals2 } from "remotion";
531
+
280
532
  // src/video/video-preview-iterator.ts
281
533
  var createVideoIterator = (timeToSeek, videoSink) => {
282
534
  let destroyed = false;
@@ -400,6 +652,88 @@ var createVideoIterator = (timeToSeek, videoSink) => {
400
652
  };
401
653
  };
402
654
 
655
+ // src/video-iterator-manager.ts
656
+ var videoIteratorManager = ({
657
+ delayPlaybackHandleIfNotPremounting,
658
+ canvas,
659
+ context,
660
+ drawDebugOverlay,
661
+ logLevel,
662
+ getOnVideoFrameCallback,
663
+ videoTrack
664
+ }) => {
665
+ let videoIteratorsCreated = 0;
666
+ let videoFrameIterator = null;
667
+ let framesRendered = 0;
668
+ canvas.width = videoTrack.displayWidth;
669
+ canvas.height = videoTrack.displayHeight;
670
+ const canvasSink = new CanvasSink(videoTrack, {
671
+ poolSize: 2,
672
+ fit: "contain",
673
+ alpha: true
674
+ });
675
+ const drawFrame = (frame) => {
676
+ context.clearRect(0, 0, canvas.width, canvas.height);
677
+ context.drawImage(frame.canvas, 0, 0);
678
+ framesRendered++;
679
+ drawDebugOverlay();
680
+ const callback = getOnVideoFrameCallback();
681
+ if (callback) {
682
+ callback(canvas);
683
+ }
684
+ Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
685
+ };
686
+ const startVideoIterator = async (timeToSeek, nonce) => {
687
+ videoFrameIterator?.destroy();
688
+ const iterator = createVideoIterator(timeToSeek, canvasSink);
689
+ videoIteratorsCreated++;
690
+ videoFrameIterator = iterator;
691
+ const delayHandle = delayPlaybackHandleIfNotPremounting();
692
+ const frameResult = await iterator.getNext();
693
+ delayHandle.unblock();
694
+ if (iterator.isDestroyed()) {
695
+ return;
696
+ }
697
+ if (nonce.isStale()) {
698
+ return;
699
+ }
700
+ if (videoFrameIterator.isDestroyed()) {
701
+ return;
702
+ }
703
+ if (!frameResult.value) {
704
+ return;
705
+ }
706
+ drawFrame(frameResult.value);
707
+ };
708
+ const seek = async ({ newTime, nonce }) => {
709
+ if (!videoFrameIterator) {
710
+ return;
711
+ }
712
+ const videoSatisfyResult = await videoFrameIterator.tryToSatisfySeek(newTime);
713
+ if (videoSatisfyResult.type === "satisfied") {
714
+ drawFrame(videoSatisfyResult.frame);
715
+ return;
716
+ }
717
+ if (nonce.isStale()) {
718
+ return;
719
+ }
720
+ startVideoIterator(newTime, nonce).catch(() => {});
721
+ };
722
+ return {
723
+ startVideoIterator,
724
+ getVideoIteratorsCreated: () => videoIteratorsCreated,
725
+ seek,
726
+ destroy: () => {
727
+ videoFrameIterator?.destroy();
728
+ context.clearRect(0, 0, canvas.width, canvas.height);
729
+ videoFrameIterator = null;
730
+ },
731
+ getVideoFrameIterator: () => videoFrameIterator,
732
+ drawFrame,
733
+ getFramesRendered: () => framesRendered
734
+ };
735
+ };
736
+
403
737
  // src/media-player.ts
404
738
  class MediaPlayer {
405
739
  canvas;
@@ -407,32 +741,26 @@ class MediaPlayer {
407
741
  src;
408
742
  logLevel;
409
743
  playbackRate;
744
+ globalPlaybackRate;
410
745
  audioStreamIndex;
411
- canvasSink = null;
412
- videoFrameIterator = null;
413
- debugStats = {
414
- videoIteratorsCreated: 0,
415
- audioIteratorsCreated: 0,
416
- framesRendered: 0
417
- };
418
- audioSink = null;
419
- audioBufferIterator = null;
420
- gainNode = null;
421
- currentVolume = 1;
422
746
  sharedAudioContext;
747
+ audioIteratorManager = null;
748
+ videoIteratorManager = null;
423
749
  audioSyncAnchor = 0;
424
750
  playing = false;
425
- muted = false;
426
751
  loop = false;
427
752
  fps;
428
753
  trimBefore;
429
754
  trimAfter;
430
- initialized = false;
431
755
  totalDuration;
432
756
  debugOverlay = false;
433
- onVideoFrameCallback;
757
+ nonceManager;
758
+ onVideoFrameCallback = null;
434
759
  initializationPromise = null;
435
760
  bufferState;
761
+ isPremounting;
762
+ isPostmounting;
763
+ seekPromiseChain = Promise.resolve();
436
764
  constructor({
437
765
  canvas,
438
766
  src,
@@ -442,16 +770,20 @@ class MediaPlayer {
442
770
  trimBefore,
443
771
  trimAfter,
444
772
  playbackRate,
773
+ globalPlaybackRate,
445
774
  audioStreamIndex,
446
775
  fps,
447
776
  debugOverlay,
448
- bufferState
777
+ bufferState,
778
+ isPremounting,
779
+ isPostmounting
449
780
  }) {
450
781
  this.canvas = canvas ?? null;
451
782
  this.src = src;
452
783
  this.logLevel = logLevel ?? window.remotion_logLevel;
453
784
  this.sharedAudioContext = sharedAudioContext;
454
785
  this.playbackRate = playbackRate;
786
+ this.globalPlaybackRate = globalPlaybackRate;
455
787
  this.loop = loop;
456
788
  this.trimBefore = trimBefore;
457
789
  this.trimAfter = trimAfter;
@@ -459,6 +791,13 @@ class MediaPlayer {
459
791
  this.fps = fps;
460
792
  this.debugOverlay = debugOverlay;
461
793
  this.bufferState = bufferState;
794
+ this.isPremounting = isPremounting;
795
+ this.isPostmounting = isPostmounting;
796
+ this.nonceManager = makeNonceManager();
797
+ this.input = new Input({
798
+ source: new UrlSource(this.src),
799
+ formats: ALL_FORMATS
800
+ });
462
801
  if (canvas) {
463
802
  const context = canvas.getContext("2d", {
464
803
  alpha: true,
@@ -472,15 +811,9 @@ class MediaPlayer {
472
811
  this.context = null;
473
812
  }
474
813
  }
475
- input = null;
476
- isReady() {
477
- return this.initialized && Boolean(this.sharedAudioContext) && !this.input?.disposed;
478
- }
479
- hasAudio() {
480
- return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
481
- }
814
+ input;
482
815
  isDisposalError() {
483
- return this.input?.disposed === true;
816
+ return this.input.disposed === true;
484
817
  }
485
818
  initialize(startTimeUnresolved) {
486
819
  const promise = this._initialize(startTimeUnresolved);
@@ -489,17 +822,11 @@ class MediaPlayer {
489
822
  }
490
823
  async _initialize(startTimeUnresolved) {
491
824
  try {
492
- const urlSource = new UrlSource(this.src);
493
- const input = new Input({
494
- source: urlSource,
495
- formats: ALL_FORMATS
496
- });
497
- this.input = input;
498
- if (input.disposed) {
825
+ if (this.input.disposed) {
499
826
  return { type: "disposed" };
500
827
  }
501
828
  try {
502
- await input.getFormat();
829
+ await this.input.getFormat();
503
830
  } catch (error) {
504
831
  if (this.isDisposalError()) {
505
832
  return { type: "disposed" };
@@ -508,14 +835,17 @@ class MediaPlayer {
508
835
  if (isNetworkError(err)) {
509
836
  throw error;
510
837
  }
511
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
838
+ Internals3.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
512
839
  return { type: "unknown-container-format" };
513
840
  }
514
841
  const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
515
- input.computeDuration(),
516
- input.getPrimaryVideoTrack(),
517
- input.getAudioTracks()
842
+ this.input.computeDuration(),
843
+ this.input.getPrimaryVideoTrack(),
844
+ this.input.getAudioTracks()
518
845
  ]);
846
+ if (this.input.disposed) {
847
+ return { type: "disposed" };
848
+ }
519
849
  this.totalDuration = durationInSeconds;
520
850
  const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
521
851
  if (!videoTrack && !audioTrack) {
@@ -526,18 +856,18 @@ class MediaPlayer {
526
856
  if (!canDecode) {
527
857
  return { type: "cannot-decode" };
528
858
  }
529
- this.canvasSink = new CanvasSink(videoTrack, {
530
- poolSize: 2,
531
- fit: "contain",
532
- alpha: true
859
+ if (this.input.disposed) {
860
+ return { type: "disposed" };
861
+ }
862
+ this.videoIteratorManager = videoIteratorManager({
863
+ videoTrack,
864
+ delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,
865
+ context: this.context,
866
+ canvas: this.canvas,
867
+ getOnVideoFrameCallback: () => this.onVideoFrameCallback,
868
+ logLevel: this.logLevel,
869
+ drawDebugOverlay: this.drawDebugOverlay
533
870
  });
534
- this.canvas.width = videoTrack.displayWidth;
535
- this.canvas.height = videoTrack.displayHeight;
536
- }
537
- if (audioTrack && this.sharedAudioContext) {
538
- this.audioSink = new AudioBufferSink(audioTrack);
539
- this.gainNode = this.sharedAudioContext.createGain();
540
- this.gainNode.connect(this.sharedAudioContext.destination);
541
871
  }
542
872
  const startTime = getTimeInSeconds({
543
873
  unloopedTimeInSeconds: startTimeUnresolved,
@@ -551,53 +881,46 @@ class MediaPlayer {
551
881
  src: this.src
552
882
  });
553
883
  if (startTime === null) {
554
- this.clearCanvas();
555
- return { type: "success", durationInSeconds: this.totalDuration };
884
+ throw new Error(`should have asserted that the time is not null`);
556
885
  }
557
- if (this.sharedAudioContext) {
558
- this.setPlaybackTime(startTime);
886
+ this.setPlaybackTime(startTime, this.playbackRate * this.globalPlaybackRate);
887
+ if (audioTrack) {
888
+ this.audioIteratorManager = audioIteratorManager({
889
+ audioTrack,
890
+ delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,
891
+ sharedAudioContext: this.sharedAudioContext
892
+ });
559
893
  }
560
- this.initialized = true;
894
+ const nonce = this.nonceManager.createAsyncOperation();
561
895
  try {
562
- this.startAudioIterator(startTime, this.currentSeekNonce);
563
- await this.startVideoIterator(startTime, this.currentSeekNonce);
896
+ if (this.audioIteratorManager) {
897
+ this.audioIteratorManager.startAudioIterator({
898
+ nonce,
899
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
900
+ startFromSecond: startTime,
901
+ getIsPlaying: () => this.playing,
902
+ scheduleAudioNode: this.scheduleAudioNode
903
+ });
904
+ }
905
+ await this.videoIteratorManager?.startVideoIterator(startTime, nonce);
564
906
  } catch (error) {
565
907
  if (this.isDisposalError()) {
566
908
  return { type: "disposed" };
567
909
  }
568
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio and video iterators", error);
910
+ Internals3.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio and video iterators", error);
569
911
  }
570
912
  return { type: "success", durationInSeconds };
571
913
  } catch (error) {
572
914
  const err = error;
573
915
  if (isNetworkError(err)) {
574
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
916
+ Internals3.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
575
917
  return { type: "network-error" };
576
918
  }
577
- Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
919
+ Internals3.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
578
920
  throw error;
579
921
  }
580
922
  }
581
- clearCanvas() {
582
- if (this.context && this.canvas) {
583
- this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
584
- }
585
- }
586
- currentSeekNonce = 0;
587
- seekPromiseChain = Promise.resolve();
588
923
  async seekTo(time) {
589
- this.currentSeekNonce++;
590
- const nonce = this.currentSeekNonce;
591
- await this.seekPromiseChain;
592
- this.seekPromiseChain = this.seekToDoNotCallDirectly(time, nonce);
593
- await this.seekPromiseChain;
594
- }
595
- async seekToDoNotCallDirectly(time, nonce) {
596
- if (nonce !== this.currentSeekNonce) {
597
- return;
598
- }
599
- if (!this.isReady())
600
- return;
601
924
  const newTime = getTimeInSeconds({
602
925
  unloopedTimeInSeconds: time,
603
926
  playbackRate: this.playbackRate,
@@ -610,265 +933,189 @@ class MediaPlayer {
610
933
  src: this.src
611
934
  });
612
935
  if (newTime === null) {
613
- this.videoFrameIterator?.destroy();
614
- this.videoFrameIterator = null;
615
- this.clearCanvas();
616
- this.audioBufferIterator?.destroy();
617
- this.audioBufferIterator = null;
936
+ throw new Error(`should have asserted that the time is not null`);
937
+ }
938
+ const nonce = this.nonceManager.createAsyncOperation();
939
+ await this.seekPromiseChain;
940
+ this.seekPromiseChain = this.seekToDoNotCallDirectly(newTime, nonce);
941
+ await this.seekPromiseChain;
942
+ }
943
+ async seekToDoNotCallDirectly(newTime, nonce) {
944
+ if (nonce.isStale()) {
618
945
  return;
619
946
  }
620
947
  const currentPlaybackTime = this.getPlaybackTime();
621
948
  if (currentPlaybackTime === newTime) {
622
949
  return;
623
950
  }
624
- const newAudioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
951
+ const newAudioSyncAnchor = this.sharedAudioContext.currentTime - newTime / (this.playbackRate * this.globalPlaybackRate);
625
952
  const diff = Math.abs(newAudioSyncAnchor - this.audioSyncAnchor);
626
- if (diff > 0.1) {
627
- this.setPlaybackTime(newTime);
628
- }
629
- const videoSatisfyResult = await this.videoFrameIterator?.tryToSatisfySeek(newTime);
630
- if (videoSatisfyResult?.type === "satisfied") {
631
- this.drawFrame(videoSatisfyResult.frame);
632
- } else if (videoSatisfyResult && this.currentSeekNonce === nonce) {
633
- this.startVideoIterator(newTime, nonce);
634
- }
635
- const queuedPeriod = this.audioBufferIterator?.getQueuedPeriod();
636
- const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriod);
637
- const toBeScheduled = [];
638
- if (!currentTimeIsAlreadyQueued) {
639
- const audioSatisfyResult = await this.audioBufferIterator?.tryToSatisfySeek(newTime);
640
- if (this.currentSeekNonce !== nonce) {
641
- return;
642
- }
643
- if (!audioSatisfyResult) {
644
- return;
645
- }
646
- if (audioSatisfyResult.type === "not-satisfied") {
647
- await this.startAudioIterator(newTime, nonce);
648
- return;
649
- }
650
- toBeScheduled.push(...audioSatisfyResult.buffers);
651
- }
652
- const nextTime = newTime + 1 / this.fps * this.playbackRate + 1 / this.fps * this.playbackRate;
653
- const nextIsAlreadyQueued = isAlreadyQueued(nextTime, queuedPeriod);
654
- if (!nextIsAlreadyQueued) {
655
- const audioSatisfyResult = await this.audioBufferIterator?.tryToSatisfySeek(nextTime);
656
- if (this.currentSeekNonce !== nonce) {
657
- return;
658
- }
659
- if (!audioSatisfyResult) {
660
- return;
661
- }
662
- if (audioSatisfyResult.type === "not-satisfied") {
663
- await this.startAudioIterator(nextTime, nonce);
664
- return;
665
- }
666
- toBeScheduled.push(...audioSatisfyResult.buffers);
667
- }
668
- for (const buffer of toBeScheduled) {
669
- if (this.playing) {
670
- this.scheduleAudioChunk(buffer.buffer, buffer.timestamp);
671
- } else {
672
- this.audioChunksForAfterResuming.push({
673
- buffer: buffer.buffer,
674
- timestamp: buffer.timestamp
675
- });
676
- }
953
+ if (diff > 0.04) {
954
+ this.setPlaybackTime(newTime, this.playbackRate * this.globalPlaybackRate);
677
955
  }
956
+ await this.videoIteratorManager?.seek({
957
+ newTime,
958
+ nonce
959
+ });
960
+ await this.audioIteratorManager?.seek({
961
+ newTime,
962
+ nonce,
963
+ fps: this.fps,
964
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
965
+ getIsPlaying: () => this.playing,
966
+ scheduleAudioNode: this.scheduleAudioNode
967
+ });
678
968
  }
679
969
  async play(time) {
680
- if (!this.isReady())
681
- return;
682
- this.setPlaybackTime(time);
970
+ const newTime = getTimeInSeconds({
971
+ unloopedTimeInSeconds: time,
972
+ playbackRate: this.playbackRate,
973
+ loop: this.loop,
974
+ trimBefore: this.trimBefore,
975
+ trimAfter: this.trimAfter,
976
+ mediaDurationInSeconds: this.totalDuration ?? null,
977
+ fps: this.fps,
978
+ ifNoMediaDuration: "infinity",
979
+ src: this.src
980
+ });
981
+ if (newTime === null) {
982
+ throw new Error(`should have asserted that the time is not null`);
983
+ }
984
+ this.setPlaybackTime(newTime, this.playbackRate * this.globalPlaybackRate);
683
985
  this.playing = true;
684
- for (const chunk of this.audioChunksForAfterResuming) {
685
- this.scheduleAudioChunk(chunk.buffer, chunk.timestamp);
986
+ if (this.audioIteratorManager) {
987
+ this.audioIteratorManager.resumeScheduledAudioChunks({
988
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
989
+ scheduleAudioNode: this.scheduleAudioNode
990
+ });
686
991
  }
687
992
  if (this.sharedAudioContext.state === "suspended") {
688
993
  await this.sharedAudioContext.resume();
689
994
  }
690
- this.audioChunksForAfterResuming.length = 0;
691
995
  this.drawDebugOverlay();
692
996
  }
997
+ delayPlaybackHandleIfNotPremounting = () => {
998
+ if (this.isPremounting || this.isPostmounting) {
999
+ return {
1000
+ unblock: () => {}
1001
+ };
1002
+ }
1003
+ return this.bufferState.delayPlayback();
1004
+ };
693
1005
  pause() {
694
1006
  this.playing = false;
695
- const toQueue = this.audioBufferIterator?.removeAndReturnAllQueuedAudioNodes();
696
- if (toQueue) {
697
- for (const chunk of toQueue) {
698
- this.audioChunksForAfterResuming.push({
699
- buffer: chunk.buffer,
700
- timestamp: chunk.timestamp
701
- });
702
- }
703
- }
1007
+ this.audioIteratorManager?.pausePlayback();
704
1008
  this.drawDebugOverlay();
705
1009
  }
706
1010
  setMuted(muted) {
707
- this.muted = muted;
708
- if (this.gainNode) {
709
- this.gainNode.gain.value = muted ? 0 : this.currentVolume;
710
- }
1011
+ this.audioIteratorManager?.setMuted(muted);
711
1012
  }
712
1013
  setVolume(volume) {
713
- if (!this.gainNode) {
1014
+ if (!this.audioIteratorManager) {
714
1015
  return;
715
1016
  }
716
- const appliedVolume = Math.max(0, volume);
717
- this.currentVolume = appliedVolume;
718
- if (!this.muted) {
719
- this.gainNode.gain.value = appliedVolume;
720
- }
1017
+ this.audioIteratorManager.setVolume(volume);
1018
+ }
1019
+ setTrimBefore(trimBefore) {
1020
+ this.trimBefore = trimBefore;
1021
+ }
1022
+ setTrimAfter(trimAfter) {
1023
+ this.trimAfter = trimAfter;
721
1024
  }
722
1025
  setDebugOverlay(debugOverlay) {
723
1026
  this.debugOverlay = debugOverlay;
724
1027
  }
1028
+ updateAfterPlaybackRateChange() {
1029
+ if (!this.audioIteratorManager) {
1030
+ return;
1031
+ }
1032
+ this.setPlaybackTime(this.getPlaybackTime(), this.playbackRate * this.globalPlaybackRate);
1033
+ const iterator = this.audioIteratorManager.getAudioBufferIterator();
1034
+ if (!iterator) {
1035
+ return;
1036
+ }
1037
+ iterator.moveQueuedChunksToPauseQueue();
1038
+ if (this.playing) {
1039
+ this.audioIteratorManager.resumeScheduledAudioChunks({
1040
+ playbackRate: this.playbackRate * this.globalPlaybackRate,
1041
+ scheduleAudioNode: this.scheduleAudioNode
1042
+ });
1043
+ }
1044
+ }
725
1045
  setPlaybackRate(rate) {
726
1046
  this.playbackRate = rate;
1047
+ this.updateAfterPlaybackRateChange();
1048
+ }
1049
+ setGlobalPlaybackRate(rate) {
1050
+ this.globalPlaybackRate = rate;
1051
+ this.updateAfterPlaybackRateChange();
727
1052
  }
728
1053
  setFps(fps) {
729
1054
  this.fps = fps;
730
1055
  }
1056
+ setIsPremounting(isPremounting) {
1057
+ this.isPremounting = isPremounting;
1058
+ }
1059
+ setIsPostmounting(isPostmounting) {
1060
+ this.isPostmounting = isPostmounting;
1061
+ }
731
1062
  setLoop(loop) {
732
1063
  this.loop = loop;
733
1064
  }
734
1065
  async dispose() {
735
- this.initialized = false;
736
1066
  if (this.initializationPromise) {
737
1067
  try {
738
1068
  await this.initializationPromise;
739
1069
  } catch {}
740
1070
  }
741
- this.input?.dispose();
742
- this.videoFrameIterator?.destroy();
743
- this.videoFrameIterator = null;
744
- this.audioBufferIterator?.destroy();
745
- this.audioBufferIterator = null;
1071
+ this.nonceManager.createAsyncOperation();
1072
+ this.videoIteratorManager?.destroy();
1073
+ this.audioIteratorManager?.destroy();
1074
+ this.input.dispose();
746
1075
  }
747
- getPlaybackTime() {
748
- return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
749
- }
750
- setPlaybackTime(time) {
751
- this.audioSyncAnchor = this.sharedAudioContext.currentTime - time;
752
- }
753
- audioChunksForAfterResuming = [];
754
- scheduleAudioChunk(buffer, mediaTimestamp) {
755
- const targetTime = (mediaTimestamp - (this.trimBefore ?? 0) / this.fps) / this.playbackRate;
756
- const delay = targetTime + this.audioSyncAnchor - this.sharedAudioContext.currentTime;
757
- const node = this.sharedAudioContext.createBufferSource();
758
- node.buffer = buffer;
759
- node.playbackRate.value = this.playbackRate;
760
- node.connect(this.gainNode);
1076
+ scheduleAudioNode = (node, mediaTimestamp) => {
1077
+ const currentTime = this.getPlaybackTime();
1078
+ const delayWithoutPlaybackRate = mediaTimestamp - currentTime;
1079
+ const delay = delayWithoutPlaybackRate / (this.playbackRate * this.globalPlaybackRate);
761
1080
  if (delay >= 0) {
762
- node.start(targetTime + this.audioSyncAnchor);
1081
+ node.start(this.sharedAudioContext.currentTime + delay);
763
1082
  } else {
764
1083
  node.start(this.sharedAudioContext.currentTime, -delay);
765
1084
  }
766
- this.audioBufferIterator.addQueuedAudioNode(node, mediaTimestamp, buffer);
767
- node.onended = () => {
768
- return this.audioBufferIterator.removeQueuedAudioNode(node);
769
- };
1085
+ };
1086
+ getPlaybackTime() {
1087
+ return calculatePlaybackTime({
1088
+ audioSyncAnchor: this.audioSyncAnchor,
1089
+ currentTime: this.sharedAudioContext.currentTime,
1090
+ playbackRate: this.playbackRate * this.globalPlaybackRate
1091
+ });
1092
+ }
1093
+ setPlaybackTime(time, playbackRate) {
1094
+ this.audioSyncAnchor = this.sharedAudioContext.currentTime - time / playbackRate;
770
1095
  }
771
- onVideoFrame(callback) {
1096
+ setVideoFrameCallback(callback) {
772
1097
  this.onVideoFrameCallback = callback;
773
- if (this.initialized && callback && this.canvas) {
774
- callback(this.canvas);
775
- }
776
- return () => {
777
- if (this.onVideoFrameCallback === callback) {
778
- this.onVideoFrameCallback = undefined;
779
- }
780
- };
781
1098
  }
782
- drawFrame = (frame) => {
783
- if (!this.context) {
784
- throw new Error("Context not initialized");
785
- }
786
- this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
787
- this.context.drawImage(frame.canvas, 0, 0);
788
- this.debugStats.framesRendered++;
789
- this.drawDebugOverlay();
790
- if (this.onVideoFrameCallback && this.canvas) {
791
- this.onVideoFrameCallback(this.canvas);
792
- }
793
- Internals2.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
794
- };
795
- startAudioIterator = async (startFromSecond, nonce) => {
796
- if (!this.hasAudio())
797
- return;
798
- this.audioBufferIterator?.destroy();
799
- this.audioChunksForAfterResuming = [];
800
- const delayHandle = this.bufferState.delayPlayback();
801
- const iterator = makeAudioIterator(this.audioSink, startFromSecond);
802
- this.debugStats.audioIteratorsCreated++;
803
- this.audioBufferIterator = iterator;
804
- for (let i = 0;i < 3; i++) {
805
- const result = await iterator.getNext();
806
- if (iterator.isDestroyed()) {
807
- delayHandle.unblock();
808
- return;
809
- }
810
- if (nonce !== this.currentSeekNonce) {
811
- delayHandle.unblock();
812
- return;
813
- }
814
- if (!result.value) {
815
- delayHandle.unblock();
816
- return;
817
- }
818
- const { buffer, timestamp } = result.value;
819
- this.audioChunksForAfterResuming.push({
820
- buffer,
821
- timestamp
822
- });
823
- }
824
- delayHandle.unblock();
825
- };
826
- drawDebugOverlay() {
1099
+ drawDebugOverlay = () => {
827
1100
  if (!this.debugOverlay)
828
1101
  return;
829
1102
  if (this.context && this.canvas) {
830
1103
  drawPreviewOverlay({
831
1104
  context: this.context,
832
- stats: this.debugStats,
833
1105
  audioTime: this.sharedAudioContext.currentTime,
834
1106
  audioContextState: this.sharedAudioContext.state,
835
1107
  audioSyncAnchor: this.audioSyncAnchor,
836
- audioIterator: this.audioBufferIterator,
837
- audioChunksForAfterResuming: this.audioChunksForAfterResuming,
838
- playing: this.playing
1108
+ audioIteratorManager: this.audioIteratorManager,
1109
+ playing: this.playing,
1110
+ videoIteratorManager: this.videoIteratorManager
839
1111
  });
840
1112
  }
841
- }
842
- startVideoIterator = async (timeToSeek, nonce) => {
843
- if (!this.canvasSink) {
844
- return;
845
- }
846
- this.videoFrameIterator?.destroy();
847
- const iterator = createVideoIterator(timeToSeek, this.canvasSink);
848
- this.debugStats.videoIteratorsCreated++;
849
- this.videoFrameIterator = iterator;
850
- const delayHandle = this.bufferState.delayPlayback();
851
- const frameResult = await iterator.getNext();
852
- delayHandle.unblock();
853
- if (iterator.isDestroyed()) {
854
- return;
855
- }
856
- if (nonce !== this.currentSeekNonce) {
857
- return;
858
- }
859
- if (this.videoFrameIterator.isDestroyed()) {
860
- return;
861
- }
862
- if (!frameResult.value) {
863
- return;
864
- }
865
- this.drawFrame(frameResult.value);
866
1113
  };
867
1114
  }
868
1115
 
869
1116
  // src/show-in-timeline.ts
870
1117
  import { useMemo } from "react";
871
- import { Internals as Internals3, useVideoConfig } from "remotion";
1118
+ import { Internals as Internals4, useVideoConfig } from "remotion";
872
1119
  var useLoopDisplay = ({
873
1120
  loop,
874
1121
  mediaDurationInSeconds,
@@ -881,7 +1128,7 @@ var useLoopDisplay = ({
881
1128
  if (!loop || !mediaDurationInSeconds) {
882
1129
  return;
883
1130
  }
884
- const durationInFrames = Internals3.calculateMediaDuration({
1131
+ const durationInFrames = Internals4.calculateMediaDuration({
885
1132
  mediaDurationInFrames: mediaDurationInSeconds * fps,
886
1133
  playbackRate,
887
1134
  trimAfter,
@@ -907,7 +1154,7 @@ var useLoopDisplay = ({
907
1154
 
908
1155
  // src/use-media-in-timeline.ts
909
1156
  import { useContext, useEffect, useState } from "react";
910
- import { Internals as Internals4, useCurrentFrame } from "remotion";
1157
+ import { Internals as Internals5, useCurrentFrame } from "remotion";
911
1158
  var useMediaInTimeline = ({
912
1159
  volume,
913
1160
  mediaVolume,
@@ -923,9 +1170,9 @@ var useMediaInTimeline = ({
923
1170
  trimBefore,
924
1171
  trimAfter
925
1172
  }) => {
926
- const parentSequence = useContext(Internals4.SequenceContext);
927
- const startsAt = Internals4.useMediaStartsAt();
928
- const { registerSequence, unregisterSequence } = useContext(Internals4.SequenceManager);
1173
+ const parentSequence = useContext(Internals5.SequenceContext);
1174
+ const startsAt = Internals5.useMediaStartsAt();
1175
+ const { registerSequence, unregisterSequence } = useContext(Internals5.SequenceManager);
929
1176
  const [sequenceId] = useState(() => String(Math.random()));
930
1177
  const [mediaId] = useState(() => String(Math.random()));
931
1178
  const frame = useCurrentFrame();
@@ -937,7 +1184,7 @@ var useMediaInTimeline = ({
937
1184
  rootId,
938
1185
  isStudio,
939
1186
  finalDisplayName
940
- } = Internals4.useBasicMediaInTimeline({
1187
+ } = Internals5.useBasicMediaInTimeline({
941
1188
  volume,
942
1189
  mediaVolume,
943
1190
  mediaType,
@@ -1043,8 +1290,8 @@ var {
1043
1290
  warnAboutTooHighVolume,
1044
1291
  usePreload,
1045
1292
  SequenceContext
1046
- } = Internals5;
1047
- var NewAudioForPreview = ({
1293
+ } = Internals6;
1294
+ var AudioForPreviewAssertedShowing = ({
1048
1295
  src,
1049
1296
  playbackRate,
1050
1297
  logLevel,
@@ -1072,9 +1319,9 @@ var NewAudioForPreview = ({
1072
1319
  const globalPlaybackRate = timelineContext.playbackRate;
1073
1320
  const sharedAudioContext = useContext2(SharedAudioContext);
1074
1321
  const buffer = useBufferState();
1075
- const delayHandleRef = useRef(null);
1076
1322
  const [mediaMuted] = useMediaMutedState();
1077
1323
  const [mediaVolume] = useMediaVolumeState();
1324
+ const [mediaDurationInSeconds, setMediaDurationInSeconds] = useState2(null);
1078
1325
  const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
1079
1326
  const userPreferredVolume = evaluateVolume({
1080
1327
  frame: volumePropFrame,
@@ -1093,9 +1340,11 @@ var NewAudioForPreview = ({
1093
1340
  currentTimeRef.current = currentTime;
1094
1341
  const preloadedSrc = usePreload(src);
1095
1342
  const parentSequence = useContext2(SequenceContext);
1343
+ const isPremounting = Boolean(parentSequence?.premounting);
1344
+ const isPostmounting = Boolean(parentSequence?.postmounting);
1096
1345
  const loopDisplay = useLoopDisplay({
1097
1346
  loop,
1098
- mediaDurationInSeconds: videoConfig.durationInFrames,
1347
+ mediaDurationInSeconds,
1099
1348
  playbackRate,
1100
1349
  trimAfter,
1101
1350
  trimBefore
@@ -1115,11 +1364,11 @@ var NewAudioForPreview = ({
1115
1364
  trimAfter,
1116
1365
  trimBefore
1117
1366
  });
1118
- const buffering = useContext2(Internals5.BufferingContextReact);
1367
+ const buffering = useContext2(Internals6.BufferingContextReact);
1119
1368
  if (!buffering) {
1120
1369
  throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
1121
1370
  }
1122
- const isPlayerBuffering = Internals5.useIsPlayerBuffering(buffering);
1371
+ const isPlayerBuffering = Internals6.useIsPlayerBuffering(buffering);
1123
1372
  useEffect2(() => {
1124
1373
  if (!sharedAudioContext)
1125
1374
  return;
@@ -1138,15 +1387,21 @@ var NewAudioForPreview = ({
1138
1387
  playbackRate,
1139
1388
  audioStreamIndex: audioStreamIndex ?? 0,
1140
1389
  debugOverlay: false,
1141
- bufferState: buffer
1390
+ bufferState: buffer,
1391
+ isPostmounting,
1392
+ isPremounting,
1393
+ globalPlaybackRate
1142
1394
  });
1143
1395
  mediaPlayerRef.current = player;
1144
1396
  player.initialize(currentTimeRef.current).then((result) => {
1397
+ if (result.type === "disposed") {
1398
+ return;
1399
+ }
1145
1400
  if (result.type === "unknown-container-format") {
1146
1401
  if (disallowFallbackToHtml5Audio) {
1147
1402
  throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
1148
1403
  }
1149
- Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
1404
+ Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
1150
1405
  setShouldFallbackToNativeAudio(true);
1151
1406
  return;
1152
1407
  }
@@ -1154,7 +1409,7 @@ var NewAudioForPreview = ({
1154
1409
  if (disallowFallbackToHtml5Audio) {
1155
1410
  throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
1156
1411
  }
1157
- Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <Html5Audio>`);
1412
+ Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <Html5Audio>`);
1158
1413
  setShouldFallbackToNativeAudio(true);
1159
1414
  return;
1160
1415
  }
@@ -1162,7 +1417,7 @@ var NewAudioForPreview = ({
1162
1417
  if (disallowFallbackToHtml5Audio) {
1163
1418
  throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
1164
1419
  }
1165
- Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <Html5Audio>`);
1420
+ Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <Html5Audio>`);
1166
1421
  setShouldFallbackToNativeAudio(true);
1167
1422
  return;
1168
1423
  }
@@ -1170,29 +1425,26 @@ var NewAudioForPreview = ({
1170
1425
  if (disallowFallbackToHtml5Audio) {
1171
1426
  throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
1172
1427
  }
1173
- Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Html5Audio>`);
1428
+ Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Html5Audio>`);
1174
1429
  setShouldFallbackToNativeAudio(true);
1175
1430
  return;
1176
1431
  }
1177
1432
  if (result.type === "success") {
1178
1433
  setMediaPlayerReady(true);
1179
- Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] MediaPlayer initialized successfully`);
1434
+ setMediaDurationInSeconds(result.durationInSeconds);
1435
+ Internals6.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] MediaPlayer initialized successfully`);
1180
1436
  }
1181
1437
  }).catch((error) => {
1182
- Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to initialize MediaPlayer", error);
1438
+ Internals6.Log.error({ logLevel, tag: "@remotion/media" }, "[AudioForPreview] Failed to initialize MediaPlayer", error);
1183
1439
  setShouldFallbackToNativeAudio(true);
1184
1440
  });
1185
1441
  } catch (error) {
1186
- Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer initialization failed", error);
1442
+ Internals6.Log.error({ logLevel, tag: "@remotion/media" }, "[AudioForPreview] MediaPlayer initialization failed", error);
1187
1443
  setShouldFallbackToNativeAudio(true);
1188
1444
  }
1189
1445
  return () => {
1190
- if (delayHandleRef.current) {
1191
- delayHandleRef.current.unblock();
1192
- delayHandleRef.current = null;
1193
- }
1194
1446
  if (mediaPlayerRef.current) {
1195
- Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Disposing MediaPlayer`);
1447
+ Internals6.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] Disposing MediaPlayer`);
1196
1448
  mediaPlayerRef.current.dispose();
1197
1449
  mediaPlayerRef.current = null;
1198
1450
  }
@@ -1211,7 +1463,10 @@ var NewAudioForPreview = ({
1211
1463
  videoConfig.fps,
1212
1464
  audioStreamIndex,
1213
1465
  disallowFallbackToHtml5Audio,
1214
- buffer
1466
+ buffer,
1467
+ isPremounting,
1468
+ isPostmounting,
1469
+ globalPlaybackRate
1215
1470
  ]);
1216
1471
  useEffect2(() => {
1217
1472
  const audioPlayer = mediaPlayerRef.current;
@@ -1227,8 +1482,8 @@ var NewAudioForPreview = ({
1227
1482
  const audioPlayer = mediaPlayerRef.current;
1228
1483
  if (!audioPlayer || !mediaPlayerReady)
1229
1484
  return;
1230
- audioPlayer.seekTo(currentTime);
1231
- Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
1485
+ audioPlayer.seekTo(currentTime).catch(() => {});
1486
+ Internals6.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
1232
1487
  }, [currentTime, logLevel, mediaPlayerReady]);
1233
1488
  const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
1234
1489
  useEffect2(() => {
@@ -1244,14 +1499,20 @@ var NewAudioForPreview = ({
1244
1499
  }
1245
1500
  audioPlayer.setVolume(userPreferredVolume);
1246
1501
  }, [userPreferredVolume, mediaPlayerReady]);
1247
- const effectivePlaybackRate = useMemo2(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
1248
1502
  useEffect2(() => {
1249
1503
  const audioPlayer = mediaPlayerRef.current;
1250
1504
  if (!audioPlayer || !mediaPlayerReady) {
1251
1505
  return;
1252
1506
  }
1253
- audioPlayer.setPlaybackRate(effectivePlaybackRate);
1254
- }, [effectivePlaybackRate, mediaPlayerReady]);
1507
+ audioPlayer.setPlaybackRate(playbackRate);
1508
+ }, [playbackRate, mediaPlayerReady]);
1509
+ useEffect2(() => {
1510
+ const audioPlayer = mediaPlayerRef.current;
1511
+ if (!audioPlayer || !mediaPlayerReady) {
1512
+ return;
1513
+ }
1514
+ audioPlayer.setGlobalPlaybackRate(globalPlaybackRate);
1515
+ }, [globalPlaybackRate, mediaPlayerReady]);
1255
1516
  useEffect2(() => {
1256
1517
  const audioPlayer = mediaPlayerRef.current;
1257
1518
  if (!audioPlayer || !mediaPlayerReady) {
@@ -1259,6 +1520,41 @@ var NewAudioForPreview = ({
1259
1520
  }
1260
1521
  audioPlayer.setFps(videoConfig.fps);
1261
1522
  }, [videoConfig.fps, mediaPlayerReady]);
1523
+ useEffect2(() => {
1524
+ const mediaPlayer = mediaPlayerRef.current;
1525
+ if (!mediaPlayer || !mediaPlayerReady) {
1526
+ return;
1527
+ }
1528
+ mediaPlayer.setTrimBefore(trimBefore);
1529
+ }, [trimBefore, mediaPlayerReady]);
1530
+ useEffect2(() => {
1531
+ const mediaPlayer = mediaPlayerRef.current;
1532
+ if (!mediaPlayer || !mediaPlayerReady) {
1533
+ return;
1534
+ }
1535
+ mediaPlayer.setTrimAfter(trimAfter);
1536
+ }, [trimAfter, mediaPlayerReady]);
1537
+ useEffect2(() => {
1538
+ const mediaPlayer = mediaPlayerRef.current;
1539
+ if (!mediaPlayer || !mediaPlayerReady) {
1540
+ return;
1541
+ }
1542
+ mediaPlayer.setLoop(loop);
1543
+ }, [loop, mediaPlayerReady]);
1544
+ useEffect2(() => {
1545
+ const mediaPlayer = mediaPlayerRef.current;
1546
+ if (!mediaPlayer || !mediaPlayerReady) {
1547
+ return;
1548
+ }
1549
+ mediaPlayer.setIsPremounting(isPremounting);
1550
+ }, [isPremounting, mediaPlayerReady]);
1551
+ useEffect2(() => {
1552
+ const mediaPlayer = mediaPlayerRef.current;
1553
+ if (!mediaPlayer || !mediaPlayerReady) {
1554
+ return;
1555
+ }
1556
+ mediaPlayer.setIsPostmounting(isPostmounting);
1557
+ }, [isPostmounting, mediaPlayerReady]);
1262
1558
  if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
1263
1559
  return /* @__PURE__ */ jsx(RemotionAudio, {
1264
1560
  src,
@@ -1299,11 +1595,38 @@ var AudioForPreview = ({
1299
1595
  fallbackHtml5AudioProps
1300
1596
  }) => {
1301
1597
  const preloadedSrc = usePreload(src);
1302
- return /* @__PURE__ */ jsx(NewAudioForPreview, {
1598
+ const frame = useCurrentFrame2();
1599
+ const videoConfig = useVideoConfig2();
1600
+ const currentTime = frame / videoConfig.fps;
1601
+ const showShow = useMemo2(() => {
1602
+ return getTimeInSeconds({
1603
+ unloopedTimeInSeconds: currentTime,
1604
+ playbackRate: playbackRate ?? 1,
1605
+ loop: loop ?? false,
1606
+ trimBefore,
1607
+ trimAfter,
1608
+ mediaDurationInSeconds: Infinity,
1609
+ fps: videoConfig.fps,
1610
+ ifNoMediaDuration: "infinity",
1611
+ src
1612
+ }) !== null;
1613
+ }, [
1614
+ currentTime,
1615
+ loop,
1616
+ playbackRate,
1617
+ src,
1618
+ trimAfter,
1619
+ trimBefore,
1620
+ videoConfig.fps
1621
+ ]);
1622
+ if (!showShow) {
1623
+ return null;
1624
+ }
1625
+ return /* @__PURE__ */ jsx(AudioForPreviewAssertedShowing, {
1303
1626
  audioStreamIndex: audioStreamIndex ?? 0,
1304
1627
  src: preloadedSrc,
1305
1628
  playbackRate: playbackRate ?? 1,
1306
- logLevel: logLevel ?? window.remotion_logLevel,
1629
+ logLevel: logLevel ?? (typeof window !== "undefined" ? window.remotion_logLevel ?? "info" : "info"),
1307
1630
  muted: muted ?? false,
1308
1631
  volume: volume ?? 1,
1309
1632
  loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? "repeat",
@@ -1324,7 +1647,7 @@ import { useContext as useContext3, useLayoutEffect, useMemo as useMemo3, useSta
1324
1647
  import {
1325
1648
  cancelRender as cancelRender2,
1326
1649
  Html5Audio,
1327
- Internals as Internals12,
1650
+ Internals as Internals13,
1328
1651
  random,
1329
1652
  useCurrentFrame as useCurrentFrame3,
1330
1653
  useDelayRender,
@@ -1455,13 +1778,13 @@ var frameForVolumeProp = ({
1455
1778
  };
1456
1779
 
1457
1780
  // src/caches.ts
1458
- import { cancelRender, Internals as Internals10 } from "remotion";
1781
+ import { cancelRender, Internals as Internals11 } from "remotion";
1459
1782
 
1460
1783
  // src/audio-extraction/audio-manager.ts
1461
- import { Internals as Internals7 } from "remotion";
1784
+ import { Internals as Internals8 } from "remotion";
1462
1785
 
1463
1786
  // src/audio-extraction/audio-iterator.ts
1464
- import { Internals as Internals6 } from "remotion";
1787
+ import { Internals as Internals7 } from "remotion";
1465
1788
 
1466
1789
  // src/audio-extraction/audio-cache.ts
1467
1790
  var makeAudioCache = () => {
@@ -1539,7 +1862,7 @@ var warnAboutMatroskaOnce = (src, logLevel) => {
1539
1862
  return;
1540
1863
  }
1541
1864
  warned[src] = true;
1542
- Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
1865
+ Internals7.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
1543
1866
  };
1544
1867
  var makeAudioIterator2 = ({
1545
1868
  audioSampleSink,
@@ -1607,7 +1930,7 @@ var makeAudioIterator2 = ({
1607
1930
  if (openTimestamps.length > 0) {
1608
1931
  const first = openTimestamps[0];
1609
1932
  const last = openTimestamps[openTimestamps.length - 1];
1610
- Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, `${first.toFixed(3)}...${last.toFixed(3)}`);
1933
+ Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, `${first.toFixed(3)}...${last.toFixed(3)}`);
1611
1934
  }
1612
1935
  };
1613
1936
  const getCacheStats = () => {
@@ -1704,7 +2027,7 @@ var makeAudioManager = () => {
1704
2027
  if (seenKeys.has(key)) {
1705
2028
  iterator.prepareForDeletion();
1706
2029
  iterators.splice(iterators.indexOf(iterator), 1);
1707
- Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted duplicate iterator for ${iterator.src}`);
2030
+ Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted duplicate iterator for ${iterator.src}`);
1708
2031
  }
1709
2032
  seenKeys.add(key);
1710
2033
  }
@@ -1786,7 +2109,7 @@ var makeAudioManager = () => {
1786
2109
  };
1787
2110
 
1788
2111
  // src/video-extraction/keyframe-manager.ts
1789
- import { Internals as Internals9 } from "remotion";
2112
+ import { Internals as Internals10 } from "remotion";
1790
2113
 
1791
2114
  // src/browser-can-use-webgl2.ts
1792
2115
  var browserCanUseWebGl2 = null;
@@ -1827,7 +2150,7 @@ import {
1827
2150
  } from "mediabunny";
1828
2151
 
1829
2152
  // src/video-extraction/keyframe-bank.ts
1830
- import { Internals as Internals8 } from "remotion";
2153
+ import { Internals as Internals9 } from "remotion";
1831
2154
  var makeKeyframeBank = ({
1832
2155
  startTimestampInSeconds,
1833
2156
  endTimestampInSeconds,
@@ -1835,7 +2158,7 @@ var makeKeyframeBank = ({
1835
2158
  logLevel: parentLogLevel,
1836
2159
  src
1837
2160
  }) => {
1838
- Internals8.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
2161
+ Internals9.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1839
2162
  const frames = {};
1840
2163
  const frameTimestamps = [];
1841
2164
  let lastUsed = Date.now();
@@ -1862,7 +2185,7 @@ var makeKeyframeBank = ({
1862
2185
  }
1863
2186
  }
1864
2187
  if (deletedTimestamps.length > 0) {
1865
- Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
2188
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
1866
2189
  }
1867
2190
  };
1868
2191
  const hasDecodedEnoughForTimestamp = (timestamp) => {
@@ -1922,7 +2245,7 @@ var makeKeyframeBank = ({
1922
2245
  return await getFrameFromTimestamp(timestamp) !== null;
1923
2246
  };
1924
2247
  const prepareForDeletion = (logLevel) => {
1925
- Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Preparing for deletion of keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
2248
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Preparing for deletion of keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1926
2249
  sampleIterator.return().then((result) => {
1927
2250
  if (result.value) {
1928
2251
  result.value.close();
@@ -2115,10 +2438,10 @@ var makeKeyframeManager = () => {
2115
2438
  if (size === 0) {
2116
2439
  continue;
2117
2440
  }
2118
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);
2441
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);
2119
2442
  }
2120
2443
  }
2121
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
2444
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
2122
2445
  };
2123
2446
  const getCacheStats = async () => {
2124
2447
  let count = 0;
@@ -2167,7 +2490,7 @@ var makeKeyframeManager = () => {
2167
2490
  if (mostInThePastBank) {
2168
2491
  const { framesDeleted } = mostInThePastBank.prepareForDeletion(logLevel);
2169
2492
  delete sources[mostInThePastSrc][mostInThePastBank.startTimestampInSeconds];
2170
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${framesDeleted} frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
2493
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${framesDeleted} frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
2171
2494
  }
2172
2495
  return { finish: false };
2173
2496
  };
@@ -2179,7 +2502,7 @@ var makeKeyframeManager = () => {
2179
2502
  if (finish) {
2180
2503
  break;
2181
2504
  }
2182
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, "Deleted oldest keyframe bank to stay under max cache size", (cacheStats.totalSize / 1024 / 1024).toFixed(1), "out of", (maxCacheSize / 1024 / 1024).toFixed(1));
2505
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, "Deleted oldest keyframe bank to stay under max cache size", (cacheStats.totalSize / 1024 / 1024).toFixed(1), "out of", (maxCacheSize / 1024 / 1024).toFixed(1));
2183
2506
  cacheStats = await getTotalCacheStats();
2184
2507
  }
2185
2508
  };
@@ -2198,7 +2521,7 @@ var makeKeyframeManager = () => {
2198
2521
  const { endTimestampInSeconds, startTimestampInSeconds } = bank;
2199
2522
  if (endTimestampInSeconds < threshold) {
2200
2523
  bank.prepareForDeletion(logLevel);
2201
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
2524
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
2202
2525
  delete sources[src][startTimeInSeconds];
2203
2526
  } else {
2204
2527
  bank.deleteFramesBeforeTimestamp({
@@ -2242,7 +2565,7 @@ var makeKeyframeManager = () => {
2242
2565
  if (await (await existingBank).hasTimestampInSecond(timestamp)) {
2243
2566
  return existingBank;
2244
2567
  }
2245
- Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);
2568
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);
2246
2569
  await (await existingBank).prepareForDeletion(logLevel);
2247
2570
  delete sources[src][startTimestampInSeconds];
2248
2571
  const replacementKeybank = getFramesSinceKeyframe({
@@ -2331,20 +2654,20 @@ var getUncachedMaxCacheSize = (logLevel) => {
2331
2654
  if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {
2332
2655
  cancelRender(new Error(`The maximum value for the "mediaCacheSizeInBytes" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
2333
2656
  }
2334
- Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
2657
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
2335
2658
  return window.remotion_mediaCacheSizeInBytes;
2336
2659
  }
2337
2660
  if (typeof window !== "undefined" && window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
2338
2661
  const value = window.remotion_initialMemoryAvailable / 2;
2339
2662
  if (value < 500 * 1024 * 1024) {
2340
- Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);
2663
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 500MB (which is more than half of the available system memory!)`);
2341
2664
  return 500 * 1024 * 1024;
2342
2665
  }
2343
2666
  if (value > 20000 * 1024 * 1024) {
2344
- Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
2667
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
2345
2668
  return 20000 * 1024 * 1024;
2346
2669
  }
2347
- Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
2670
+ Internals11.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
2348
2671
  return value;
2349
2672
  }
2350
2673
  return 1000 * 1000 * 1000;
@@ -2431,12 +2754,12 @@ var convertAudioData = ({
2431
2754
  };
2432
2755
 
2433
2756
  // src/get-sink.ts
2434
- import { Internals as Internals11 } from "remotion";
2757
+ import { Internals as Internals12 } from "remotion";
2435
2758
  var sinkPromises = {};
2436
2759
  var getSink = (src, logLevel) => {
2437
2760
  let promise = sinkPromises[src];
2438
2761
  if (!promise) {
2439
- Internals11.Log.verbose({
2762
+ Internals12.Log.verbose({
2440
2763
  logLevel,
2441
2764
  tag: "@remotion/media"
2442
2765
  }, `Sink for ${src} was not found, creating new sink`);
@@ -2929,10 +3252,10 @@ var AudioForRendering = ({
2929
3252
  trimBefore
2930
3253
  }) => {
2931
3254
  const frame = useCurrentFrame3();
2932
- const absoluteFrame = Internals12.useTimelinePosition();
2933
- const videoConfig = Internals12.useUnsafeVideoConfig();
2934
- const { registerRenderAsset, unregisterRenderAsset } = useContext3(Internals12.RenderAssetManager);
2935
- const startsAt = Internals12.useMediaStartsAt();
3255
+ const absoluteFrame = Internals13.useTimelinePosition();
3256
+ const videoConfig = Internals13.useUnsafeVideoConfig();
3257
+ const { registerRenderAsset, unregisterRenderAsset } = useContext3(Internals13.RenderAssetManager);
3258
+ const startsAt = Internals13.useMediaStartsAt();
2936
3259
  const environment = useRemotionEnvironment();
2937
3260
  if (!videoConfig) {
2938
3261
  throw new Error("No video config found");
@@ -2943,7 +3266,7 @@ var AudioForRendering = ({
2943
3266
  const { fps } = videoConfig;
2944
3267
  const { delayRender, continueRender } = useDelayRender();
2945
3268
  const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState3(false);
2946
- const sequenceContext = useContext3(Internals12.SequenceContext);
3269
+ const sequenceContext = useContext3(Internals13.SequenceContext);
2947
3270
  const id = useMemo3(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
2948
3271
  src,
2949
3272
  sequenceContext?.cumulatedFrom,
@@ -2988,7 +3311,7 @@ var AudioForRendering = ({
2988
3311
  if (disallowFallbackToHtml5Audio) {
2989
3312
  cancelRender2(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2990
3313
  }
2991
- Internals12.Log.warn({
3314
+ Internals13.Log.warn({
2992
3315
  logLevel: logLevel ?? window.remotion_logLevel,
2993
3316
  tag: "@remotion/media"
2994
3317
  }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
@@ -2999,7 +3322,7 @@ var AudioForRendering = ({
2999
3322
  if (disallowFallbackToHtml5Audio) {
3000
3323
  cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
3001
3324
  }
3002
- Internals12.Log.warn({
3325
+ Internals13.Log.warn({
3003
3326
  logLevel: logLevel ?? window.remotion_logLevel,
3004
3327
  tag: "@remotion/media"
3005
3328
  }, `Cannot decode ${src}, falling back to <Html5Audio>`);
@@ -3013,7 +3336,7 @@ var AudioForRendering = ({
3013
3336
  if (disallowFallbackToHtml5Audio) {
3014
3337
  cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
3015
3338
  }
3016
- Internals12.Log.warn({
3339
+ Internals13.Log.warn({
3017
3340
  logLevel: logLevel ?? window.remotion_logLevel,
3018
3341
  tag: "@remotion/media"
3019
3342
  }, `Network error fetching ${src}, falling back to <Html5Audio>`);
@@ -3029,12 +3352,12 @@ var AudioForRendering = ({
3029
3352
  frame,
3030
3353
  startsAt
3031
3354
  });
3032
- const volume = Internals12.evaluateVolume({
3355
+ const volume = Internals13.evaluateVolume({
3033
3356
  volume: volumeProp,
3034
3357
  frame: volumePropsFrame,
3035
3358
  mediaVolume: 1
3036
3359
  });
3037
- Internals12.warnAboutTooHighVolume(volume);
3360
+ Internals13.warnAboutTooHighVolume(volume);
3038
3361
  if (audio && volume > 0) {
3039
3362
  applyVolume(audio.data, volume);
3040
3363
  registerRenderAsset({
@@ -3107,7 +3430,7 @@ var AudioForRendering = ({
3107
3430
 
3108
3431
  // src/audio/audio.tsx
3109
3432
  import { jsx as jsx3 } from "react/jsx-runtime";
3110
- var { validateMediaProps } = Internals13;
3433
+ var { validateMediaProps } = Internals14;
3111
3434
  var Audio = (props) => {
3112
3435
  const { name, stack, showInTimeline, ...otherProps } = props;
3113
3436
  const environment = useRemotionEnvironment2();
@@ -3126,10 +3449,10 @@ var Audio = (props) => {
3126
3449
  stack: stack ?? null
3127
3450
  });
3128
3451
  };
3129
- Internals13.addSequenceStackTraces(Audio);
3452
+ Internals14.addSequenceStackTraces(Audio);
3130
3453
 
3131
3454
  // src/video/video.tsx
3132
- import { Internals as Internals16, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
3455
+ import { Internals as Internals17, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
3133
3456
 
3134
3457
  // src/video/video-for-preview.tsx
3135
3458
  import {
@@ -3140,7 +3463,13 @@ import {
3140
3463
  useRef as useRef2,
3141
3464
  useState as useState4
3142
3465
  } from "react";
3143
- import { Html5Video, Internals as Internals14, useBufferState as useBufferState2, useCurrentFrame as useCurrentFrame4 } from "remotion";
3466
+ import {
3467
+ Html5Video,
3468
+ Internals as Internals15,
3469
+ useBufferState as useBufferState2,
3470
+ useCurrentFrame as useCurrentFrame4,
3471
+ useVideoConfig as useVideoConfig3
3472
+ } from "remotion";
3144
3473
  import { jsx as jsx4 } from "react/jsx-runtime";
3145
3474
  var {
3146
3475
  useUnsafeVideoConfig: useUnsafeVideoConfig2,
@@ -3154,8 +3483,8 @@ var {
3154
3483
  usePreload: usePreload2,
3155
3484
  SequenceContext: SequenceContext2,
3156
3485
  SequenceVisibilityToggleContext
3157
- } = Internals14;
3158
- var VideoForPreview = ({
3486
+ } = Internals15;
3487
+ var VideoForPreviewAssertedShowing = ({
3159
3488
  src: unpreloadedSrc,
3160
3489
  style,
3161
3490
  playbackRate,
@@ -3200,6 +3529,8 @@ var VideoForPreview = ({
3200
3529
  });
3201
3530
  warnAboutTooHighVolume2(userPreferredVolume);
3202
3531
  const parentSequence = useContext4(SequenceContext2);
3532
+ const isPremounting = Boolean(parentSequence?.premounting);
3533
+ const isPostmounting = Boolean(parentSequence?.postmounting);
3203
3534
  const loopDisplay = useLoopDisplay({
3204
3535
  loop,
3205
3536
  mediaDurationInSeconds,
@@ -3230,11 +3561,11 @@ var VideoForPreview = ({
3230
3561
  const currentTimeRef = useRef2(currentTime);
3231
3562
  currentTimeRef.current = currentTime;
3232
3563
  const preloadedSrc = usePreload2(src);
3233
- const buffering = useContext4(Internals14.BufferingContextReact);
3564
+ const buffering = useContext4(Internals15.BufferingContextReact);
3234
3565
  if (!buffering) {
3235
3566
  throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
3236
3567
  }
3237
- const isPlayerBuffering = Internals14.useIsPlayerBuffering(buffering);
3568
+ const isPlayerBuffering = Internals15.useIsPlayerBuffering(buffering);
3238
3569
  useEffect3(() => {
3239
3570
  if (!canvasRef.current)
3240
3571
  return;
@@ -3255,7 +3586,10 @@ var VideoForPreview = ({
3255
3586
  playbackRate,
3256
3587
  audioStreamIndex,
3257
3588
  debugOverlay,
3258
- bufferState: buffer
3589
+ bufferState: buffer,
3590
+ isPremounting,
3591
+ isPostmounting,
3592
+ globalPlaybackRate
3259
3593
  });
3260
3594
  mediaPlayerRef.current = player;
3261
3595
  player.initialize(currentTimeRef.current).then((result) => {
@@ -3266,7 +3600,7 @@ var VideoForPreview = ({
3266
3600
  if (disallowFallbackToOffthreadVideo) {
3267
3601
  throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
3268
3602
  }
3269
- Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3603
+ Internals15.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3270
3604
  setShouldFallbackToNativeVideo(true);
3271
3605
  return;
3272
3606
  }
@@ -3274,7 +3608,7 @@ var VideoForPreview = ({
3274
3608
  if (disallowFallbackToOffthreadVideo) {
3275
3609
  throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
3276
3610
  }
3277
- Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
3611
+ Internals15.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
3278
3612
  setShouldFallbackToNativeVideo(true);
3279
3613
  return;
3280
3614
  }
@@ -3282,7 +3616,7 @@ var VideoForPreview = ({
3282
3616
  if (disallowFallbackToOffthreadVideo) {
3283
3617
  throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
3284
3618
  }
3285
- Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
3619
+ Internals15.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
3286
3620
  setShouldFallbackToNativeVideo(true);
3287
3621
  return;
3288
3622
  }
@@ -3290,7 +3624,7 @@ var VideoForPreview = ({
3290
3624
  if (disallowFallbackToOffthreadVideo) {
3291
3625
  throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
3292
3626
  }
3293
- Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
3627
+ Internals15.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
3294
3628
  setShouldFallbackToNativeVideo(true);
3295
3629
  return;
3296
3630
  }
@@ -3299,16 +3633,16 @@ var VideoForPreview = ({
3299
3633
  setMediaDurationInSeconds(result.durationInSeconds);
3300
3634
  }
3301
3635
  }).catch((error) => {
3302
- Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to initialize MediaPlayer", error);
3636
+ Internals15.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to initialize MediaPlayer", error);
3303
3637
  setShouldFallbackToNativeVideo(true);
3304
3638
  });
3305
3639
  } catch (error) {
3306
- Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer initialization failed", error);
3640
+ Internals15.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer initialization failed", error);
3307
3641
  setShouldFallbackToNativeVideo(true);
3308
3642
  }
3309
3643
  return () => {
3310
3644
  if (mediaPlayerRef.current) {
3311
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Disposing MediaPlayer`);
3645
+ Internals15.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Disposing MediaPlayer`);
3312
3646
  mediaPlayerRef.current.dispose();
3313
3647
  mediaPlayerRef.current = null;
3314
3648
  }
@@ -3327,10 +3661,13 @@ var VideoForPreview = ({
3327
3661
  disallowFallbackToOffthreadVideo,
3328
3662
  audioStreamIndex,
3329
3663
  debugOverlay,
3330
- buffer
3664
+ buffer,
3665
+ isPremounting,
3666
+ isPostmounting,
3667
+ globalPlaybackRate
3331
3668
  ]);
3332
3669
  const classNameValue = useMemo4(() => {
3333
- return [Internals14.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals14.truthy).join(" ");
3670
+ return [Internals15.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals15.truthy).join(" ");
3334
3671
  }, [className]);
3335
3672
  useEffect3(() => {
3336
3673
  const mediaPlayer = mediaPlayerRef.current;
@@ -3346,8 +3683,8 @@ var VideoForPreview = ({
3346
3683
  const mediaPlayer = mediaPlayerRef.current;
3347
3684
  if (!mediaPlayer || !mediaPlayerReady)
3348
3685
  return;
3349
- mediaPlayer.seekTo(currentTime);
3350
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
3686
+ mediaPlayer.seekTo(currentTime).catch(() => {});
3687
+ Internals15.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
3351
3688
  }, [currentTime, logLevel, mediaPlayerReady]);
3352
3689
  const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
3353
3690
  useEffect3(() => {
@@ -3370,14 +3707,20 @@ var VideoForPreview = ({
3370
3707
  }
3371
3708
  mediaPlayer.setDebugOverlay(debugOverlay);
3372
3709
  }, [debugOverlay, mediaPlayerReady]);
3373
- const effectivePlaybackRate = useMemo4(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
3374
3710
  useEffect3(() => {
3375
3711
  const mediaPlayer = mediaPlayerRef.current;
3376
3712
  if (!mediaPlayer || !mediaPlayerReady) {
3377
3713
  return;
3378
3714
  }
3379
- mediaPlayer.setPlaybackRate(effectivePlaybackRate);
3380
- }, [effectivePlaybackRate, mediaPlayerReady]);
3715
+ mediaPlayer.setPlaybackRate(playbackRate);
3716
+ }, [playbackRate, mediaPlayerReady]);
3717
+ useEffect3(() => {
3718
+ const mediaPlayer = mediaPlayerRef.current;
3719
+ if (!mediaPlayer || !mediaPlayerReady) {
3720
+ return;
3721
+ }
3722
+ mediaPlayer.setGlobalPlaybackRate(globalPlaybackRate);
3723
+ }, [globalPlaybackRate, mediaPlayerReady]);
3381
3724
  useEffect3(() => {
3382
3725
  const mediaPlayer = mediaPlayerRef.current;
3383
3726
  if (!mediaPlayer || !mediaPlayerReady) {
@@ -3385,6 +3728,20 @@ var VideoForPreview = ({
3385
3728
  }
3386
3729
  mediaPlayer.setLoop(loop);
3387
3730
  }, [loop, mediaPlayerReady]);
3731
+ useEffect3(() => {
3732
+ const mediaPlayer = mediaPlayerRef.current;
3733
+ if (!mediaPlayer || !mediaPlayerReady) {
3734
+ return;
3735
+ }
3736
+ mediaPlayer.setIsPremounting(isPremounting);
3737
+ }, [isPremounting, mediaPlayerReady]);
3738
+ useEffect3(() => {
3739
+ const mediaPlayer = mediaPlayerRef.current;
3740
+ if (!mediaPlayer || !mediaPlayerReady) {
3741
+ return;
3742
+ }
3743
+ mediaPlayer.setIsPostmounting(isPostmounting);
3744
+ }, [isPostmounting, mediaPlayerReady]);
3388
3745
  useEffect3(() => {
3389
3746
  const mediaPlayer = mediaPlayerRef.current;
3390
3747
  if (!mediaPlayer || !mediaPlayerReady) {
@@ -3394,14 +3751,25 @@ var VideoForPreview = ({
3394
3751
  }, [videoConfig.fps, mediaPlayerReady]);
3395
3752
  useEffect3(() => {
3396
3753
  const mediaPlayer = mediaPlayerRef.current;
3397
- if (!mediaPlayer || !mediaPlayerReady || !onVideoFrame) {
3754
+ if (!mediaPlayer || !mediaPlayerReady) {
3398
3755
  return;
3399
3756
  }
3400
- const unsubscribe = mediaPlayer.onVideoFrame(onVideoFrame);
3401
- return () => {
3402
- unsubscribe();
3403
- };
3757
+ mediaPlayer.setVideoFrameCallback(onVideoFrame ?? null);
3404
3758
  }, [onVideoFrame, mediaPlayerReady]);
3759
+ useEffect3(() => {
3760
+ const mediaPlayer = mediaPlayerRef.current;
3761
+ if (!mediaPlayer || !mediaPlayerReady) {
3762
+ return;
3763
+ }
3764
+ mediaPlayer.setTrimBefore(trimBefore);
3765
+ }, [trimBefore, mediaPlayerReady]);
3766
+ useEffect3(() => {
3767
+ const mediaPlayer = mediaPlayerRef.current;
3768
+ if (!mediaPlayer || !mediaPlayerReady) {
3769
+ return;
3770
+ }
3771
+ mediaPlayer.setTrimAfter(trimAfter);
3772
+ }, [trimAfter, mediaPlayerReady]);
3405
3773
  const actualStyle = useMemo4(() => {
3406
3774
  return {
3407
3775
  ...style,
@@ -3434,6 +3802,38 @@ var VideoForPreview = ({
3434
3802
  className: classNameValue
3435
3803
  });
3436
3804
  };
3805
+ var VideoForPreview = (props) => {
3806
+ const frame = useCurrentFrame4();
3807
+ const videoConfig = useVideoConfig3();
3808
+ const currentTime = frame / videoConfig.fps;
3809
+ const showShow = useMemo4(() => {
3810
+ return getTimeInSeconds({
3811
+ unloopedTimeInSeconds: currentTime,
3812
+ playbackRate: props.playbackRate,
3813
+ loop: props.loop,
3814
+ trimBefore: props.trimBefore,
3815
+ trimAfter: props.trimAfter,
3816
+ mediaDurationInSeconds: Infinity,
3817
+ fps: videoConfig.fps,
3818
+ ifNoMediaDuration: "infinity",
3819
+ src: props.src
3820
+ }) !== null;
3821
+ }, [
3822
+ currentTime,
3823
+ props.loop,
3824
+ props.playbackRate,
3825
+ props.src,
3826
+ props.trimAfter,
3827
+ props.trimBefore,
3828
+ videoConfig.fps
3829
+ ]);
3830
+ if (!showShow) {
3831
+ return null;
3832
+ }
3833
+ return /* @__PURE__ */ jsx4(VideoForPreviewAssertedShowing, {
3834
+ ...props
3835
+ });
3836
+ };
3437
3837
 
3438
3838
  // src/video/video-for-rendering.tsx
3439
3839
  import {
@@ -3445,13 +3845,13 @@ import {
3445
3845
  } from "react";
3446
3846
  import {
3447
3847
  cancelRender as cancelRender3,
3448
- Internals as Internals15,
3848
+ Internals as Internals16,
3449
3849
  Loop,
3450
3850
  random as random2,
3451
3851
  useCurrentFrame as useCurrentFrame5,
3452
3852
  useDelayRender as useDelayRender2,
3453
3853
  useRemotionEnvironment as useRemotionEnvironment3,
3454
- useVideoConfig as useVideoConfig2
3854
+ useVideoConfig as useVideoConfig4
3455
3855
  } from "remotion";
3456
3856
  import { jsx as jsx5 } from "react/jsx-runtime";
3457
3857
  var VideoForRendering = ({
@@ -3480,11 +3880,11 @@ var VideoForRendering = ({
3480
3880
  throw new TypeError("No `src` was passed to <Video>.");
3481
3881
  }
3482
3882
  const frame = useCurrentFrame5();
3483
- const absoluteFrame = Internals15.useTimelinePosition();
3484
- const { fps } = useVideoConfig2();
3485
- const { registerRenderAsset, unregisterRenderAsset } = useContext5(Internals15.RenderAssetManager);
3486
- const startsAt = Internals15.useMediaStartsAt();
3487
- const sequenceContext = useContext5(Internals15.SequenceContext);
3883
+ const absoluteFrame = Internals16.useTimelinePosition();
3884
+ const { fps } = useVideoConfig4();
3885
+ const { registerRenderAsset, unregisterRenderAsset } = useContext5(Internals16.RenderAssetManager);
3886
+ const startsAt = Internals16.useMediaStartsAt();
3887
+ const sequenceContext = useContext5(Internals16.SequenceContext);
3488
3888
  const id = useMemo5(() => `media-video-${random2(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
3489
3889
  src,
3490
3890
  sequenceContext?.cumulatedFrom,
@@ -3495,8 +3895,8 @@ var VideoForRendering = ({
3495
3895
  const { delayRender, continueRender } = useDelayRender2();
3496
3896
  const canvasRef = useRef3(null);
3497
3897
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
3498
- const audioEnabled = Internals15.useAudioEnabled();
3499
- const videoEnabled = Internals15.useVideoEnabled();
3898
+ const audioEnabled = Internals16.useAudioEnabled();
3899
+ const videoEnabled = Internals16.useVideoEnabled();
3500
3900
  useLayoutEffect3(() => {
3501
3901
  if (!canvasRef.current) {
3502
3902
  return;
@@ -3539,7 +3939,7 @@ var VideoForRendering = ({
3539
3939
  cancelRender3(new Error(`Unknown container format ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3540
3940
  }
3541
3941
  if (window.remotion_isMainTab) {
3542
- Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3942
+ Internals16.Log.info({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3543
3943
  }
3544
3944
  setReplaceWithOffthreadVideo({ durationInSeconds: null });
3545
3945
  return;
@@ -3549,7 +3949,7 @@ var VideoForRendering = ({
3549
3949
  cancelRender3(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3550
3950
  }
3551
3951
  if (window.remotion_isMainTab) {
3552
- Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
3952
+ Internals16.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
3553
3953
  }
3554
3954
  setReplaceWithOffthreadVideo({
3555
3955
  durationInSeconds: result.durationInSeconds
@@ -3561,7 +3961,7 @@ var VideoForRendering = ({
3561
3961
  cancelRender3(new Error(`Cannot decode alpha component for ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3562
3962
  }
3563
3963
  if (window.remotion_isMainTab) {
3564
- Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode alpha component for ${src}, falling back to <OffthreadVideo>`);
3964
+ Internals16.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode alpha component for ${src}, falling back to <OffthreadVideo>`);
3565
3965
  }
3566
3966
  setReplaceWithOffthreadVideo({
3567
3967
  durationInSeconds: result.durationInSeconds
@@ -3573,7 +3973,7 @@ var VideoForRendering = ({
3573
3973
  cancelRender3(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3574
3974
  }
3575
3975
  if (window.remotion_isMainTab) {
3576
- Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
3976
+ Internals16.Log.info({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
3577
3977
  }
3578
3978
  setReplaceWithOffthreadVideo({ durationInSeconds: null });
3579
3979
  return;
@@ -3612,12 +4012,12 @@ var VideoForRendering = ({
3612
4012
  frame,
3613
4013
  startsAt
3614
4014
  });
3615
- const volume = Internals15.evaluateVolume({
4015
+ const volume = Internals16.evaluateVolume({
3616
4016
  volume: volumeProp,
3617
4017
  frame: volumePropsFrame,
3618
4018
  mediaVolume: 1
3619
4019
  });
3620
- Internals15.warnAboutTooHighVolume(volume);
4020
+ Internals16.warnAboutTooHighVolume(volume);
3621
4021
  if (audio && volume > 0) {
3622
4022
  applyVolume(audio.data, volume);
3623
4023
  registerRenderAsset({
@@ -3669,10 +4069,10 @@ var VideoForRendering = ({
3669
4069
  videoEnabled
3670
4070
  ]);
3671
4071
  const classNameValue = useMemo5(() => {
3672
- return [Internals15.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals15.truthy).join(" ");
4072
+ return [Internals16.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals16.truthy).join(" ");
3673
4073
  }, [className]);
3674
4074
  if (replaceWithOffthreadVideo) {
3675
- const fallback = /* @__PURE__ */ jsx5(Internals15.InnerOffthreadVideo, {
4075
+ const fallback = /* @__PURE__ */ jsx5(Internals16.InnerOffthreadVideo, {
3676
4076
  src,
3677
4077
  playbackRate: playbackRate ?? 1,
3678
4078
  muted: muted ?? false,
@@ -3712,7 +4112,7 @@ var VideoForRendering = ({
3712
4112
  }
3713
4113
  return /* @__PURE__ */ jsx5(Loop, {
3714
4114
  layout: "none",
3715
- durationInFrames: Internals15.calculateMediaDuration({
4115
+ durationInFrames: Internals16.calculateMediaDuration({
3716
4116
  trimAfter: trimAfterValue,
3717
4117
  mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
3718
4118
  playbackRate,
@@ -3732,7 +4132,7 @@ var VideoForRendering = ({
3732
4132
 
3733
4133
  // src/video/video.tsx
3734
4134
  import { jsx as jsx6 } from "react/jsx-runtime";
3735
- var { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals16;
4135
+ var { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals17;
3736
4136
  var InnerVideo = ({
3737
4137
  src,
3738
4138
  audioStreamIndex,
@@ -3851,7 +4251,7 @@ var Video = ({
3851
4251
  delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null,
3852
4252
  disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false,
3853
4253
  fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {},
3854
- logLevel: logLevel ?? (typeof window !== "undefined" ? window.remotion_logLevel : "info"),
4254
+ logLevel: logLevel ?? (typeof window !== "undefined" ? window.remotion_logLevel ?? "info" : "info"),
3855
4255
  loop: loop ?? false,
3856
4256
  loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? "repeat",
3857
4257
  muted: muted ?? false,
@@ -3869,7 +4269,7 @@ var Video = ({
3869
4269
  debugOverlay: debugOverlay ?? false
3870
4270
  });
3871
4271
  };
3872
- Internals16.addSequenceStackTraces(Video);
4272
+ Internals17.addSequenceStackTraces(Video);
3873
4273
  // src/index.ts
3874
4274
  var experimental_Audio = Audio;
3875
4275
  var experimental_Video = Video;