@remotion/media 4.0.357 → 4.0.358

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,15 +1,179 @@
1
1
  // src/audio/audio.tsx
2
- import { Internals as Internals11, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
2
+ import { Internals as Internals13, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
3
3
 
4
4
  // src/audio/audio-for-preview.tsx
5
- import { useContext, useEffect, useMemo, useRef, useState } from "react";
5
+ import { useContext as useContext2, useEffect as useEffect2, useMemo as useMemo2, useRef, useState as useState2 } from "react";
6
6
  import {
7
- Internals as Internals2,
7
+ Internals as Internals5,
8
8
  Audio as RemotionAudio,
9
9
  useBufferState,
10
- useCurrentFrame
10
+ useCurrentFrame as useCurrentFrame2
11
11
  } from "remotion";
12
12
 
13
+ // src/show-in-timeline.ts
14
+ import { useMemo } from "react";
15
+ import { Internals, useVideoConfig } from "remotion";
16
+ var useLoopDisplay = ({
17
+ loop,
18
+ mediaDurationInSeconds,
19
+ playbackRate,
20
+ trimAfter,
21
+ trimBefore
22
+ }) => {
23
+ const { durationInFrames: compDuration, fps } = useVideoConfig();
24
+ const loopDisplay = useMemo(() => {
25
+ if (!loop || !mediaDurationInSeconds) {
26
+ return;
27
+ }
28
+ const durationInFrames = Internals.calculateMediaDuration({
29
+ mediaDurationInFrames: mediaDurationInSeconds * fps,
30
+ playbackRate,
31
+ trimAfter,
32
+ trimBefore
33
+ });
34
+ const maxTimes = compDuration / durationInFrames;
35
+ return {
36
+ numberOfTimes: maxTimes,
37
+ startOffset: 0,
38
+ durationInFrames
39
+ };
40
+ }, [
41
+ compDuration,
42
+ fps,
43
+ loop,
44
+ mediaDurationInSeconds,
45
+ playbackRate,
46
+ trimAfter,
47
+ trimBefore
48
+ ]);
49
+ return loopDisplay;
50
+ };
51
+
52
+ // src/use-media-in-timeline.ts
53
+ import { useContext, useEffect, useState } from "react";
54
+ import { Internals as Internals2, useCurrentFrame } from "remotion";
55
+ var useMediaInTimeline = ({
56
+ volume,
57
+ mediaVolume,
58
+ src,
59
+ mediaType,
60
+ playbackRate,
61
+ displayName,
62
+ stack,
63
+ showInTimeline,
64
+ premountDisplay,
65
+ postmountDisplay,
66
+ loopDisplay,
67
+ trimBefore,
68
+ trimAfter
69
+ }) => {
70
+ const parentSequence = useContext(Internals2.SequenceContext);
71
+ const startsAt = Internals2.useMediaStartsAt();
72
+ const { registerSequence, unregisterSequence } = useContext(Internals2.SequenceManager);
73
+ const [sequenceId] = useState(() => String(Math.random()));
74
+ const [mediaId] = useState(() => String(Math.random()));
75
+ const frame = useCurrentFrame();
76
+ const {
77
+ volumes,
78
+ duration,
79
+ doesVolumeChange,
80
+ nonce,
81
+ rootId,
82
+ isStudio,
83
+ finalDisplayName
84
+ } = Internals2.useBasicMediaInTimeline({
85
+ volume,
86
+ mediaVolume,
87
+ mediaType,
88
+ src,
89
+ displayName,
90
+ trimBefore,
91
+ trimAfter,
92
+ playbackRate
93
+ });
94
+ useEffect(() => {
95
+ if (!src) {
96
+ throw new Error("No src passed");
97
+ }
98
+ if (!isStudio && window.process?.env?.NODE_ENV !== "test") {
99
+ return;
100
+ }
101
+ if (!showInTimeline) {
102
+ return;
103
+ }
104
+ const loopIteration = loopDisplay ? Math.floor(frame / loopDisplay.durationInFrames) : 0;
105
+ if (loopDisplay) {
106
+ registerSequence({
107
+ type: "sequence",
108
+ premountDisplay,
109
+ postmountDisplay,
110
+ parent: parentSequence?.id ?? null,
111
+ displayName: finalDisplayName,
112
+ rootId,
113
+ showInTimeline: true,
114
+ nonce,
115
+ loopDisplay,
116
+ stack,
117
+ from: 0,
118
+ duration,
119
+ id: sequenceId
120
+ });
121
+ }
122
+ registerSequence({
123
+ type: mediaType,
124
+ src,
125
+ id: mediaId,
126
+ duration: loopDisplay?.durationInFrames ?? duration,
127
+ from: loopDisplay ? loopIteration * loopDisplay.durationInFrames : 0,
128
+ parent: loopDisplay ? sequenceId : parentSequence?.id ?? null,
129
+ displayName: finalDisplayName,
130
+ rootId,
131
+ volume: volumes,
132
+ showInTimeline: true,
133
+ nonce,
134
+ startMediaFrom: 0 - startsAt,
135
+ doesVolumeChange,
136
+ loopDisplay: undefined,
137
+ playbackRate,
138
+ stack,
139
+ premountDisplay: null,
140
+ postmountDisplay: null
141
+ });
142
+ return () => {
143
+ if (loopDisplay) {
144
+ unregisterSequence(sequenceId);
145
+ }
146
+ unregisterSequence(mediaId);
147
+ };
148
+ }, [
149
+ doesVolumeChange,
150
+ duration,
151
+ finalDisplayName,
152
+ isStudio,
153
+ loopDisplay,
154
+ mediaId,
155
+ mediaType,
156
+ nonce,
157
+ parentSequence?.id,
158
+ playbackRate,
159
+ postmountDisplay,
160
+ premountDisplay,
161
+ registerSequence,
162
+ rootId,
163
+ sequenceId,
164
+ showInTimeline,
165
+ src,
166
+ stack,
167
+ startsAt,
168
+ unregisterSequence,
169
+ volumes,
170
+ frame
171
+ ]);
172
+ return {
173
+ id: mediaId
174
+ };
175
+ };
176
+
13
177
  // src/video/media-player.ts
14
178
  import {
15
179
  ALL_FORMATS,
@@ -18,7 +182,39 @@ import {
18
182
  Input,
19
183
  UrlSource
20
184
  } from "mediabunny";
21
- import { Internals } from "remotion";
185
+ import { Internals as Internals4 } from "remotion";
186
+
187
+ // src/get-time-in-seconds.ts
188
+ import { Internals as Internals3 } from "remotion";
189
+ var getTimeInSeconds = ({
190
+ loop,
191
+ mediaDurationInSeconds,
192
+ unloopedTimeInSeconds,
193
+ src,
194
+ trimAfter,
195
+ trimBefore,
196
+ fps,
197
+ playbackRate,
198
+ ifNoMediaDuration
199
+ }) => {
200
+ if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === "fail") {
201
+ throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
202
+ }
203
+ const loopDuration = loop ? Internals3.calculateMediaDuration({
204
+ trimAfter,
205
+ mediaDurationInFrames: mediaDurationInSeconds ? mediaDurationInSeconds * fps : Infinity,
206
+ playbackRate: 1,
207
+ trimBefore
208
+ }) / fps : Infinity;
209
+ const timeInSeconds = unloopedTimeInSeconds * playbackRate % loopDuration;
210
+ if ((trimAfter ?? null) !== null && !loop) {
211
+ const time = (trimAfter - (trimBefore ?? 0)) / fps;
212
+ if (timeInSeconds >= time) {
213
+ return null;
214
+ }
215
+ }
216
+ return timeInSeconds + (trimBefore ?? 0) / fps;
217
+ };
22
218
 
23
219
  // src/is-network-error.ts
24
220
  function isNetworkError(error) {
@@ -28,34 +224,6 @@ function isNetworkError(error) {
28
224
  return false;
29
225
  }
30
226
 
31
- // src/video/resolve-playback-time.ts
32
- var resolvePlaybackTime = ({
33
- absolutePlaybackTimeInSeconds,
34
- playbackRate,
35
- loop,
36
- trimBeforeInSeconds,
37
- trimAfterInSeconds,
38
- mediaDurationInSeconds
39
- }) => {
40
- const loopAfterPreliminary = loop ? Math.min(trimAfterInSeconds ?? Infinity, mediaDurationInSeconds ?? Infinity) : Infinity;
41
- const loopAfterConsideringTrimBefore = loopAfterPreliminary - (trimBeforeInSeconds ?? 0);
42
- const loopAfterConsideringPlaybackRate = loopAfterConsideringTrimBefore / playbackRate;
43
- const timeConsideringLoop = absolutePlaybackTimeInSeconds % loopAfterConsideringPlaybackRate;
44
- const time = timeConsideringLoop * playbackRate + (trimBeforeInSeconds ?? 0);
45
- if (Number.isNaN(time)) {
46
- console.log({
47
- absolutePlaybackTimeInSeconds,
48
- playbackRate,
49
- loop,
50
- trimBeforeInSeconds,
51
- trimAfterInSeconds,
52
- mediaDurationInSeconds
53
- });
54
- throw new Error("Time is NaN");
55
- }
56
- return time;
57
- };
58
-
59
227
  // src/video/timeout-utils.ts
60
228
  var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
61
229
  function withTimeout(promise, timeoutMs, errorMessage = "Operation timed out") {
@@ -93,13 +261,15 @@ class MediaPlayer {
93
261
  audioBufferIterator = null;
94
262
  queuedAudioNodes = new Set;
95
263
  gainNode = null;
264
+ currentVolume = 1;
96
265
  sharedAudioContext;
97
266
  audioSyncAnchor = 0;
98
267
  playing = false;
99
268
  muted = false;
100
269
  loop = false;
101
- trimBeforeSeconds;
102
- trimAfterSeconds;
270
+ fps;
271
+ trimBefore;
272
+ trimAfter;
103
273
  animationFrameId = null;
104
274
  videoAsyncId = 0;
105
275
  audioAsyncId = 0;
@@ -117,10 +287,11 @@ class MediaPlayer {
117
287
  logLevel,
118
288
  sharedAudioContext,
119
289
  loop,
120
- trimBeforeSeconds,
121
- trimAfterSeconds,
290
+ trimBefore,
291
+ trimAfter,
122
292
  playbackRate,
123
- audioStreamIndex
293
+ audioStreamIndex,
294
+ fps
124
295
  }) {
125
296
  this.canvas = canvas ?? null;
126
297
  this.src = src;
@@ -128,12 +299,13 @@ class MediaPlayer {
128
299
  this.sharedAudioContext = sharedAudioContext;
129
300
  this.playbackRate = playbackRate;
130
301
  this.loop = loop;
131
- this.trimBeforeSeconds = trimBeforeSeconds;
132
- this.trimAfterSeconds = trimAfterSeconds;
302
+ this.trimBefore = trimBefore;
303
+ this.trimAfter = trimAfter;
133
304
  this.audioStreamIndex = audioStreamIndex ?? 0;
305
+ this.fps = fps;
134
306
  if (canvas) {
135
307
  const context = canvas.getContext("2d", {
136
- alpha: false,
308
+ alpha: true,
137
309
  desynchronized: true
138
310
  });
139
311
  if (!context) {
@@ -169,15 +341,15 @@ class MediaPlayer {
169
341
  if (isNetworkError(err)) {
170
342
  throw error;
171
343
  }
172
- Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
344
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
173
345
  return { type: "unknown-container-format" };
174
346
  }
175
- const [duration, videoTrack, audioTracks] = await Promise.all([
347
+ const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
176
348
  input.computeDuration(),
177
349
  input.getPrimaryVideoTrack(),
178
350
  input.getAudioTracks()
179
351
  ]);
180
- this.totalDuration = duration;
352
+ this.totalDuration = durationInSeconds;
181
353
  const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
182
354
  if (!videoTrack && !audioTrack) {
183
355
  return { type: "no-tracks" };
@@ -199,14 +371,21 @@ class MediaPlayer {
199
371
  this.gainNode = this.sharedAudioContext.createGain();
200
372
  this.gainNode.connect(this.sharedAudioContext.destination);
201
373
  }
202
- const startTime = resolvePlaybackTime({
203
- absolutePlaybackTimeInSeconds: startTimeUnresolved,
374
+ const startTime = getTimeInSeconds({
375
+ unloopedTimeInSeconds: startTimeUnresolved,
204
376
  playbackRate: this.playbackRate,
205
377
  loop: this.loop,
206
- trimBeforeInSeconds: this.trimBeforeSeconds,
207
- trimAfterInSeconds: this.trimAfterSeconds,
208
- mediaDurationInSeconds: this.totalDuration
378
+ trimBefore: this.trimBefore,
379
+ trimAfter: this.trimAfter,
380
+ mediaDurationInSeconds: this.totalDuration,
381
+ fps: this.fps,
382
+ ifNoMediaDuration: "infinity",
383
+ src: this.src
209
384
  });
385
+ if (startTime === null) {
386
+ this.clearCanvas();
387
+ return { type: "success", durationInSeconds: this.totalDuration };
388
+ }
210
389
  if (this.sharedAudioContext) {
211
390
  this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
212
391
  }
@@ -216,17 +395,22 @@ class MediaPlayer {
216
395
  this.startVideoIterator(startTime)
217
396
  ]);
218
397
  this.startRenderLoop();
219
- return { type: "success" };
398
+ return { type: "success", durationInSeconds };
220
399
  } catch (error) {
221
400
  const err = error;
222
401
  if (isNetworkError(err)) {
223
- Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
402
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
224
403
  return { type: "network-error" };
225
404
  }
226
- Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
405
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
227
406
  throw error;
228
407
  }
229
408
  }
409
+ clearCanvas() {
410
+ if (this.context && this.canvas) {
411
+ this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
412
+ }
413
+ }
230
414
  cleanupAudioQueue() {
231
415
  for (const node of this.queuedAudioNodes) {
232
416
  node.stop();
@@ -243,16 +427,24 @@ class MediaPlayer {
243
427
  async seekTo(time) {
244
428
  if (!this.isReady())
245
429
  return;
246
- const newTime = resolvePlaybackTime({
247
- absolutePlaybackTimeInSeconds: time,
430
+ const newTime = getTimeInSeconds({
431
+ unloopedTimeInSeconds: time,
248
432
  playbackRate: this.playbackRate,
249
433
  loop: this.loop,
250
- trimBeforeInSeconds: this.trimBeforeSeconds,
251
- trimAfterInSeconds: this.trimAfterSeconds,
252
- mediaDurationInSeconds: this.totalDuration
434
+ trimBefore: this.trimBefore,
435
+ trimAfter: this.trimAfter,
436
+ mediaDurationInSeconds: this.totalDuration ?? null,
437
+ fps: this.fps,
438
+ ifNoMediaDuration: "infinity",
439
+ src: this.src
253
440
  });
441
+ if (newTime === null) {
442
+ this.clearCanvas();
443
+ await this.cleanAudioIteratorAndNodes();
444
+ return;
445
+ }
254
446
  const currentPlaybackTime = this.getPlaybackTime();
255
- const isSignificantSeek = Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
447
+ const isSignificantSeek = currentPlaybackTime === null || Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
256
448
  if (isSignificantSeek) {
257
449
  this.nextFrame = null;
258
450
  this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
@@ -286,8 +478,8 @@ class MediaPlayer {
286
478
  }
287
479
  setMuted(muted) {
288
480
  this.muted = muted;
289
- if (muted) {
290
- this.cleanupAudioQueue();
481
+ if (this.gainNode) {
482
+ this.gainNode.gain.value = muted ? 0 : this.currentVolume;
291
483
  }
292
484
  }
293
485
  setVolume(volume) {
@@ -295,11 +487,17 @@ class MediaPlayer {
295
487
  return;
296
488
  }
297
489
  const appliedVolume = Math.max(0, volume);
298
- this.gainNode.gain.value = appliedVolume;
490
+ this.currentVolume = appliedVolume;
491
+ if (!this.muted) {
492
+ this.gainNode.gain.value = appliedVolume;
493
+ }
299
494
  }
300
495
  setPlaybackRate(rate) {
301
496
  this.playbackRate = rate;
302
497
  }
498
+ setFps(fps) {
499
+ this.fps = fps;
500
+ }
303
501
  setLoop(loop) {
304
502
  this.loop = loop;
305
503
  }
@@ -311,15 +509,7 @@ class MediaPlayer {
311
509
  this.videoAsyncId++;
312
510
  }
313
511
  getPlaybackTime() {
314
- const absoluteTime = this.sharedAudioContext.currentTime - this.audioSyncAnchor;
315
- return resolvePlaybackTime({
316
- absolutePlaybackTimeInSeconds: absoluteTime,
317
- playbackRate: this.playbackRate,
318
- loop: this.loop,
319
- trimBeforeInSeconds: this.trimBeforeSeconds,
320
- trimAfterInSeconds: this.trimAfterSeconds,
321
- mediaDurationInSeconds: this.totalDuration
322
- });
512
+ return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
323
513
  }
324
514
  scheduleAudioChunk(buffer, mediaTimestamp) {
325
515
  const targetTime = mediaTimestamp + this.audioSyncAnchor;
@@ -384,7 +574,11 @@ class MediaPlayer {
384
574
  }
385
575
  };
386
576
  shouldRenderFrame() {
387
- return !this.isBuffering && this.canRenderVideo() && this.nextFrame !== null && this.nextFrame.timestamp <= this.getPlaybackTime();
577
+ const playbackTime = this.getPlaybackTime();
578
+ if (playbackTime === null) {
579
+ return false;
580
+ }
581
+ return !this.isBuffering && this.canRenderVideo() && this.nextFrame !== null && this.nextFrame.timestamp <= playbackTime;
388
582
  }
389
583
  drawCurrentFrame() {
390
584
  if (this.context && this.nextFrame) {
@@ -408,7 +602,7 @@ class MediaPlayer {
408
602
  this.audioBufferIterator = this.audioSink.buffers(startFromSecond);
409
603
  this.runAudioIterator(startFromSecond, currentAsyncId);
410
604
  } catch (error) {
411
- Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio iterator", error);
605
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio iterator", error);
412
606
  }
413
607
  };
414
608
  startVideoIterator = async (timeToSeek) => {
@@ -428,7 +622,7 @@ class MediaPlayer {
428
622
  return;
429
623
  }
430
624
  if (firstFrame && this.context) {
431
- Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
625
+ Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
432
626
  this.context.drawImage(firstFrame.canvas, 0, 0);
433
627
  if (this.onVideoFrameCallback && this.canvas) {
434
628
  this.onVideoFrameCallback(this.canvas);
@@ -436,10 +630,10 @@ class MediaPlayer {
436
630
  }
437
631
  this.nextFrame = secondFrame ?? null;
438
632
  if (secondFrame) {
439
- Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
633
+ Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
440
634
  }
441
635
  } catch (error) {
442
- Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start video iterator", error);
636
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start video iterator", error);
443
637
  }
444
638
  };
445
639
  updateNextFrame = async () => {
@@ -452,16 +646,20 @@ class MediaPlayer {
452
646
  if (!newNextFrame) {
453
647
  break;
454
648
  }
455
- if (newNextFrame.timestamp <= this.getPlaybackTime()) {
649
+ const playbackTime = this.getPlaybackTime();
650
+ if (playbackTime === null) {
651
+ continue;
652
+ }
653
+ if (newNextFrame.timestamp <= playbackTime) {
456
654
  continue;
457
655
  } else {
458
656
  this.nextFrame = newNextFrame;
459
- Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${newNextFrame.timestamp.toFixed(3)}s`);
657
+ Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${newNextFrame.timestamp.toFixed(3)}s`);
460
658
  break;
461
659
  }
462
660
  }
463
661
  } catch (error) {
464
- Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to update next frame", error);
662
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to update next frame", error);
465
663
  }
466
664
  };
467
665
  bufferingStartedAtMs = null;
@@ -486,7 +684,7 @@ class MediaPlayer {
486
684
  const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
487
685
  const bufferHealthy = currentBufferDuration >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
488
686
  if (minTimeElapsed && bufferHealthy) {
489
- Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
687
+ Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
490
688
  this.setBufferingState(false);
491
689
  }
492
690
  }
@@ -497,7 +695,7 @@ class MediaPlayer {
497
695
  const bufferingDuration = now - this.bufferingStartedAtMs;
498
696
  const forceTimeout = bufferingDuration > this.minBufferingTimeoutMs * 10;
499
697
  if (forceTimeout) {
500
- Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Force resuming from buffering after ${bufferingDuration}ms`);
698
+ Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Force resuming from buffering after ${bufferingDuration}ms`);
501
699
  this.setBufferingState(false);
502
700
  }
503
701
  }
@@ -528,7 +726,7 @@ class MediaPlayer {
528
726
  totalBufferDuration += duration;
529
727
  this.audioBufferHealth = Math.max(0, totalBufferDuration / this.playbackRate);
530
728
  this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
531
- if (this.playing && !this.muted) {
729
+ if (this.playing) {
532
730
  if (isFirstBuffer) {
533
731
  this.audioSyncAnchor = this.sharedAudioContext.currentTime - timestamp;
534
732
  isFirstBuffer = false;
@@ -538,10 +736,15 @@ class MediaPlayer {
538
736
  }
539
737
  this.scheduleAudioChunk(buffer, timestamp);
540
738
  }
541
- if (timestamp - this.getPlaybackTime() >= 1) {
739
+ const playbackTime = this.getPlaybackTime();
740
+ if (playbackTime === null) {
741
+ continue;
742
+ }
743
+ if (timestamp - playbackTime >= 1) {
542
744
  await new Promise((resolve) => {
543
745
  const check = () => {
544
- if (timestamp - this.getPlaybackTime() < 1) {
746
+ const currentPlaybackTime = this.getPlaybackTime();
747
+ if (currentPlaybackTime !== null && timestamp - currentPlaybackTime < 1) {
545
748
  resolve();
546
749
  } else {
547
750
  requestAnimationFrame(check);
@@ -552,7 +755,7 @@ class MediaPlayer {
552
755
  }
553
756
  }
554
757
  } catch (error) {
555
- Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to run audio iterator", error);
758
+ Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to run audio iterator", error);
556
759
  }
557
760
  };
558
761
  }
@@ -569,9 +772,8 @@ var {
569
772
  evaluateVolume,
570
773
  warnAboutTooHighVolume,
571
774
  usePreload,
572
- useMediaInTimeline,
573
775
  SequenceContext
574
- } = Internals2;
776
+ } = Internals5;
575
777
  var NewAudioForPreview = ({
576
778
  src,
577
779
  playbackRate,
@@ -591,14 +793,14 @@ var NewAudioForPreview = ({
591
793
  fallbackHtml5AudioProps
592
794
  }) => {
593
795
  const videoConfig = useUnsafeVideoConfig();
594
- const frame = useCurrentFrame();
796
+ const frame = useCurrentFrame2();
595
797
  const mediaPlayerRef = useRef(null);
596
- const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
597
- const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState(false);
798
+ const [mediaPlayerReady, setMediaPlayerReady] = useState2(false);
799
+ const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState2(false);
598
800
  const [playing] = Timeline.usePlayingState();
599
- const timelineContext = useContext(Timeline.TimelineContext);
801
+ const timelineContext = useContext2(Timeline.TimelineContext);
600
802
  const globalPlaybackRate = timelineContext.playbackRate;
601
- const sharedAudioContext = useContext(SharedAudioContext);
803
+ const sharedAudioContext = useContext2(SharedAudioContext);
602
804
  const buffer = useBufferState();
603
805
  const delayHandleRef = useRef(null);
604
806
  const [mediaMuted] = useMediaMutedState();
@@ -620,8 +822,14 @@ var NewAudioForPreview = ({
620
822
  const currentTimeRef = useRef(currentTime);
621
823
  currentTimeRef.current = currentTime;
622
824
  const preloadedSrc = usePreload(src);
623
- const [timelineId] = useState(() => String(Math.random()));
624
- const parentSequence = useContext(SequenceContext);
825
+ const parentSequence = useContext2(SequenceContext);
826
+ const loopDisplay = useLoopDisplay({
827
+ loop,
828
+ mediaDurationInSeconds: videoConfig.durationInFrames,
829
+ playbackRate,
830
+ trimAfter,
831
+ trimBefore
832
+ });
625
833
  useMediaInTimeline({
626
834
  volume,
627
835
  mediaVolume,
@@ -629,13 +837,15 @@ var NewAudioForPreview = ({
629
837
  src,
630
838
  playbackRate,
631
839
  displayName: name ?? null,
632
- id: timelineId,
633
840
  stack,
634
841
  showInTimeline,
635
842
  premountDisplay: parentSequence?.premountDisplay ?? null,
636
- postmountDisplay: parentSequence?.postmountDisplay ?? null
843
+ postmountDisplay: parentSequence?.postmountDisplay ?? null,
844
+ loopDisplay,
845
+ trimAfter,
846
+ trimBefore
637
847
  });
638
- useEffect(() => {
848
+ useEffect2(() => {
639
849
  if (!sharedAudioContext)
640
850
  return;
641
851
  if (!sharedAudioContext.audioContext)
@@ -646,8 +856,9 @@ var NewAudioForPreview = ({
646
856
  logLevel,
647
857
  sharedAudioContext: sharedAudioContext.audioContext,
648
858
  loop,
649
- trimAfterSeconds: trimAfter ? trimAfter / videoConfig.fps : undefined,
650
- trimBeforeSeconds: trimBefore ? trimBefore / videoConfig.fps : undefined,
859
+ trimAfter,
860
+ trimBefore,
861
+ fps: videoConfig.fps,
651
862
  canvas: null,
652
863
  playbackRate,
653
864
  audioStreamIndex: audioStreamIndex ?? 0
@@ -658,7 +869,7 @@ var NewAudioForPreview = ({
658
869
  if (disallowFallbackToHtml5Audio) {
659
870
  throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
660
871
  }
661
- Internals2.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Audio>`);
872
+ Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
662
873
  setShouldFallbackToNativeAudio(true);
663
874
  return;
664
875
  }
@@ -666,7 +877,7 @@ var NewAudioForPreview = ({
666
877
  if (disallowFallbackToHtml5Audio) {
667
878
  throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
668
879
  }
669
- Internals2.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <Audio>`);
880
+ Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <Html5Audio>`);
670
881
  setShouldFallbackToNativeAudio(true);
671
882
  return;
672
883
  }
@@ -674,7 +885,7 @@ var NewAudioForPreview = ({
674
885
  if (disallowFallbackToHtml5Audio) {
675
886
  throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
676
887
  }
677
- Internals2.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <Audio>`);
888
+ Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <Html5Audio>`);
678
889
  setShouldFallbackToNativeAudio(true);
679
890
  return;
680
891
  }
@@ -682,20 +893,20 @@ var NewAudioForPreview = ({
682
893
  if (disallowFallbackToHtml5Audio) {
683
894
  throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
684
895
  }
685
- Internals2.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Audio>`);
896
+ Internals5.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Html5Audio>`);
686
897
  setShouldFallbackToNativeAudio(true);
687
898
  return;
688
899
  }
689
900
  if (result.type === "success") {
690
901
  setMediaPlayerReady(true);
691
- Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] MediaPlayer initialized successfully`);
902
+ Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] MediaPlayer initialized successfully`);
692
903
  }
693
904
  }).catch((error) => {
694
- Internals2.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to initialize MediaPlayer", error);
905
+ Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to initialize MediaPlayer", error);
695
906
  setShouldFallbackToNativeAudio(true);
696
907
  });
697
908
  } catch (error) {
698
- Internals2.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer initialization failed", error);
909
+ Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer initialization failed", error);
699
910
  setShouldFallbackToNativeAudio(true);
700
911
  }
701
912
  return () => {
@@ -704,7 +915,7 @@ var NewAudioForPreview = ({
704
915
  delayHandleRef.current = null;
705
916
  }
706
917
  if (mediaPlayerRef.current) {
707
- Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Disposing MediaPlayer`);
918
+ Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Disposing MediaPlayer`);
708
919
  mediaPlayerRef.current.dispose();
709
920
  mediaPlayerRef.current = null;
710
921
  }
@@ -724,62 +935,69 @@ var NewAudioForPreview = ({
724
935
  audioStreamIndex,
725
936
  disallowFallbackToHtml5Audio
726
937
  ]);
727
- useEffect(() => {
938
+ useEffect2(() => {
728
939
  const audioPlayer = mediaPlayerRef.current;
729
940
  if (!audioPlayer)
730
941
  return;
731
942
  if (playing) {
732
943
  audioPlayer.play().catch((error) => {
733
- Internals2.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to play", error);
944
+ Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to play", error);
734
945
  });
735
946
  } else {
736
947
  audioPlayer.pause();
737
948
  }
738
949
  }, [playing, logLevel, mediaPlayerReady]);
739
- useEffect(() => {
950
+ useEffect2(() => {
740
951
  const audioPlayer = mediaPlayerRef.current;
741
952
  if (!audioPlayer || !mediaPlayerReady)
742
953
  return;
743
954
  audioPlayer.seekTo(currentTime);
744
- Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
955
+ Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
745
956
  }, [currentTime, logLevel, mediaPlayerReady]);
746
- useEffect(() => {
957
+ useEffect2(() => {
747
958
  const audioPlayer = mediaPlayerRef.current;
748
959
  if (!audioPlayer || !mediaPlayerReady)
749
960
  return;
750
961
  audioPlayer.onBufferingChange((newBufferingState) => {
751
962
  if (newBufferingState && !delayHandleRef.current) {
752
963
  delayHandleRef.current = buffer.delayPlayback();
753
- Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer buffering - blocking Remotion playback");
964
+ Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer buffering - blocking Remotion playback");
754
965
  } else if (!newBufferingState && delayHandleRef.current) {
755
966
  delayHandleRef.current.unblock();
756
967
  delayHandleRef.current = null;
757
- Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
968
+ Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
758
969
  }
759
970
  });
760
971
  }, [mediaPlayerReady, buffer, logLevel]);
761
972
  const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
762
- useEffect(() => {
973
+ useEffect2(() => {
763
974
  const audioPlayer = mediaPlayerRef.current;
764
975
  if (!audioPlayer || !mediaPlayerReady)
765
976
  return;
766
977
  audioPlayer.setMuted(effectiveMuted);
767
978
  }, [effectiveMuted, mediaPlayerReady]);
768
- useEffect(() => {
979
+ useEffect2(() => {
769
980
  const audioPlayer = mediaPlayerRef.current;
770
981
  if (!audioPlayer || !mediaPlayerReady) {
771
982
  return;
772
983
  }
773
984
  audioPlayer.setVolume(userPreferredVolume);
774
- }, [userPreferredVolume, mediaPlayerReady, logLevel]);
775
- const effectivePlaybackRate = useMemo(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
776
- useEffect(() => {
985
+ }, [userPreferredVolume, mediaPlayerReady]);
986
+ const effectivePlaybackRate = useMemo2(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
987
+ useEffect2(() => {
777
988
  const audioPlayer = mediaPlayerRef.current;
778
989
  if (!audioPlayer || !mediaPlayerReady) {
779
990
  return;
780
991
  }
781
992
  audioPlayer.setPlaybackRate(effectivePlaybackRate);
782
- }, [effectivePlaybackRate, mediaPlayerReady, logLevel]);
993
+ }, [effectivePlaybackRate, mediaPlayerReady]);
994
+ useEffect2(() => {
995
+ const audioPlayer = mediaPlayerRef.current;
996
+ if (!audioPlayer || !mediaPlayerReady) {
997
+ return;
998
+ }
999
+ audioPlayer.setFps(videoConfig.fps);
1000
+ }, [videoConfig.fps, mediaPlayerReady]);
783
1001
  if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
784
1002
  return /* @__PURE__ */ jsx(RemotionAudio, {
785
1003
  src,
@@ -841,13 +1059,13 @@ var AudioForPreview = ({
841
1059
  };
842
1060
 
843
1061
  // src/audio/audio-for-rendering.tsx
844
- import { useContext as useContext2, useLayoutEffect, useMemo as useMemo2, useState as useState2 } from "react";
1062
+ import { useContext as useContext3, useLayoutEffect, useMemo as useMemo3, useState as useState3 } from "react";
845
1063
  import {
846
- Audio,
847
1064
  cancelRender as cancelRender2,
848
- Internals as Internals10,
1065
+ Html5Audio,
1066
+ Internals as Internals12,
849
1067
  random,
850
- useCurrentFrame as useCurrentFrame2,
1068
+ useCurrentFrame as useCurrentFrame3,
851
1069
  useDelayRender,
852
1070
  useRemotionEnvironment
853
1071
  } from "remotion";
@@ -976,13 +1194,13 @@ var frameForVolumeProp = ({
976
1194
  };
977
1195
 
978
1196
  // src/caches.ts
979
- import { cancelRender, Internals as Internals7 } from "remotion";
1197
+ import { cancelRender, Internals as Internals10 } from "remotion";
980
1198
 
981
1199
  // src/audio-extraction/audio-manager.ts
982
- import { Internals as Internals4 } from "remotion";
1200
+ import { Internals as Internals7 } from "remotion";
983
1201
 
984
1202
  // src/audio-extraction/audio-iterator.ts
985
- import { Internals as Internals3 } from "remotion";
1203
+ import { Internals as Internals6 } from "remotion";
986
1204
 
987
1205
  // src/audio-extraction/audio-cache.ts
988
1206
  var makeAudioCache = () => {
@@ -1060,7 +1278,7 @@ var warnAboutMatroskaOnce = (src, logLevel) => {
1060
1278
  return;
1061
1279
  }
1062
1280
  warned[src] = true;
1063
- Internals3.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
1281
+ Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
1064
1282
  };
1065
1283
  var makeAudioIterator = ({
1066
1284
  audioSampleSink,
@@ -1128,7 +1346,7 @@ var makeAudioIterator = ({
1128
1346
  if (openTimestamps.length > 0) {
1129
1347
  const first = openTimestamps[0];
1130
1348
  const last = openTimestamps[openTimestamps.length - 1];
1131
- Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, `${first.toFixed(3)}...${last.toFixed(3)}`);
1349
+ Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, `${first.toFixed(3)}...${last.toFixed(3)}`);
1132
1350
  }
1133
1351
  };
1134
1352
  const getCacheStats = () => {
@@ -1225,7 +1443,7 @@ var makeAudioManager = () => {
1225
1443
  if (seenKeys.has(key)) {
1226
1444
  iterator.prepareForDeletion();
1227
1445
  iterators.splice(iterators.indexOf(iterator), 1);
1228
- Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted duplicate iterator for ${iterator.src}`);
1446
+ Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted duplicate iterator for ${iterator.src}`);
1229
1447
  }
1230
1448
  seenKeys.add(key);
1231
1449
  }
@@ -1307,7 +1525,7 @@ var makeAudioManager = () => {
1307
1525
  };
1308
1526
 
1309
1527
  // src/video-extraction/keyframe-manager.ts
1310
- import { Internals as Internals6 } from "remotion";
1528
+ import { Internals as Internals9 } from "remotion";
1311
1529
 
1312
1530
  // src/render-timestamp-range.ts
1313
1531
  var renderTimestampRange = (timestamps) => {
@@ -1333,7 +1551,7 @@ import {
1333
1551
  } from "mediabunny";
1334
1552
 
1335
1553
  // src/video-extraction/keyframe-bank.ts
1336
- import { Internals as Internals5 } from "remotion";
1554
+ import { Internals as Internals8 } from "remotion";
1337
1555
  var roundTo4Digits = (timestamp) => {
1338
1556
  return Math.round(timestamp * 1000) / 1000;
1339
1557
  };
@@ -1343,7 +1561,7 @@ var makeKeyframeBank = ({
1343
1561
  sampleIterator,
1344
1562
  logLevel: parentLogLevel
1345
1563
  }) => {
1346
- Internals5.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1564
+ Internals8.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1347
1565
  const frames = {};
1348
1566
  const frameTimestamps = [];
1349
1567
  let lastUsed = Date.now();
@@ -1401,7 +1619,7 @@ var makeKeyframeBank = ({
1401
1619
  return await getFrameFromTimestamp(timestamp) !== null;
1402
1620
  };
1403
1621
  const prepareForDeletion = (logLevel) => {
1404
- Internals5.Log.verbose({ logLevel, tag: "@remotion/media" }, `Preparing for deletion of keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1622
+ Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Preparing for deletion of keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1405
1623
  sampleIterator.return().then((result) => {
1406
1624
  if (result.value) {
1407
1625
  result.value.close();
@@ -1441,7 +1659,7 @@ var makeKeyframeBank = ({
1441
1659
  }
1442
1660
  }
1443
1661
  if (deletedTimestamps.length > 0) {
1444
- Internals5.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
1662
+ Internals8.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
1445
1663
  }
1446
1664
  };
1447
1665
  const getOpenFrameCount = () => {
@@ -1617,10 +1835,10 @@ var makeKeyframeManager = () => {
1617
1835
  if (size === 0) {
1618
1836
  continue;
1619
1837
  }
1620
- Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);
1838
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);
1621
1839
  }
1622
1840
  }
1623
- Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
1841
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
1624
1842
  };
1625
1843
  const getCacheStats = async () => {
1626
1844
  let count = 0;
@@ -1661,7 +1879,7 @@ var makeKeyframeManager = () => {
1661
1879
  if (mostInThePastBank) {
1662
1880
  await mostInThePastBank.prepareForDeletion(logLevel);
1663
1881
  delete sources[mostInThePastSrc][mostInThePastBank.startTimestampInSeconds];
1664
- Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
1882
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
1665
1883
  }
1666
1884
  };
1667
1885
  const ensureToStayUnderMaxCacheSize = async (logLevel) => {
@@ -1687,7 +1905,7 @@ var makeKeyframeManager = () => {
1687
1905
  const { endTimestampInSeconds, startTimestampInSeconds } = bank;
1688
1906
  if (endTimestampInSeconds < threshold) {
1689
1907
  await bank.prepareForDeletion(logLevel);
1690
- Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1908
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
1691
1909
  delete sources[src][startTimeInSeconds];
1692
1910
  } else {
1693
1911
  bank.deleteFramesBeforeTimestamp({
@@ -1727,7 +1945,7 @@ var makeKeyframeManager = () => {
1727
1945
  if (await (await existingBank).hasTimestampInSecond(timestamp)) {
1728
1946
  return existingBank;
1729
1947
  }
1730
- Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);
1948
+ Internals9.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);
1731
1949
  await (await existingBank).prepareForDeletion(logLevel);
1732
1950
  delete sources[src][startTimestampInSeconds];
1733
1951
  const replacementKeybank = getFramesSinceKeyframe({
@@ -1815,20 +2033,20 @@ var getUncachedMaxCacheSize = (logLevel) => {
1815
2033
  if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {
1816
2034
  cancelRender(new Error(`The maximum value for the "mediaCacheSizeInBytes" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
1817
2035
  }
1818
- Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
2036
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
1819
2037
  return window.remotion_mediaCacheSizeInBytes;
1820
2038
  }
1821
2039
  if (window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
1822
2040
  const value = window.remotion_initialMemoryAvailable / 2;
1823
2041
  if (value < 240 * 1024 * 1024) {
1824
- Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 240MB (which is more than half of the available system memory!)`);
2042
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 240MB (which is more than half of the available system memory!)`);
1825
2043
  return 240 * 1024 * 1024;
1826
2044
  }
1827
2045
  if (value > 20000 * 1024 * 1024) {
1828
- Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
2046
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
1829
2047
  return 20000 * 1024 * 1024;
1830
2048
  }
1831
- Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
2049
+ Internals10.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
1832
2050
  return value;
1833
2051
  }
1834
2052
  return 1000 * 1000 * 1000;
@@ -1915,12 +2133,12 @@ var convertAudioData = ({
1915
2133
  };
1916
2134
 
1917
2135
  // src/get-sink.ts
1918
- import { Internals as Internals8 } from "remotion";
2136
+ import { Internals as Internals11 } from "remotion";
1919
2137
  var sinkPromises = {};
1920
2138
  var getSink = (src, logLevel) => {
1921
2139
  let promise = sinkPromises[src];
1922
2140
  if (!promise) {
1923
- Internals8.Log.verbose({
2141
+ Internals11.Log.verbose({
1924
2142
  logLevel,
1925
2143
  tag: "@remotion/media"
1926
2144
  }, `Sink for ${src} was not found, creating new sink`);
@@ -1930,39 +2148,6 @@ var getSink = (src, logLevel) => {
1930
2148
  return promise;
1931
2149
  };
1932
2150
 
1933
- // src/get-time-in-seconds.ts
1934
- import { Internals as Internals9 } from "remotion";
1935
- var getTimeInSeconds = ({
1936
- loop,
1937
- mediaDurationInSeconds,
1938
- unloopedTimeInSeconds,
1939
- src,
1940
- trimAfter,
1941
- trimBefore,
1942
- fps,
1943
- playbackRate
1944
- }) => {
1945
- if (mediaDurationInSeconds === null && loop) {
1946
- throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
1947
- }
1948
- const loopDuration = loop ? Internals9.calculateLoopDuration({
1949
- trimAfter,
1950
- mediaDurationInFrames: mediaDurationInSeconds * fps,
1951
- playbackRate: 1,
1952
- trimBefore
1953
- }) / fps : Infinity;
1954
- const timeInSeconds = unloopedTimeInSeconds * playbackRate % loopDuration;
1955
- if ((trimAfter ?? null) !== null) {
1956
- if (!loop) {
1957
- const time = (trimAfter - (trimBefore ?? 0)) / fps;
1958
- if (timeInSeconds >= time) {
1959
- return null;
1960
- }
1961
- }
1962
- }
1963
- return timeInSeconds + (trimBefore ?? 0) / fps;
1964
- };
1965
-
1966
2151
  // src/audio-extraction/extract-audio.ts
1967
2152
  var extractAudioInternal = async ({
1968
2153
  src,
@@ -1999,7 +2184,8 @@ var extractAudioInternal = async ({
1999
2184
  trimAfter,
2000
2185
  playbackRate,
2001
2186
  trimBefore,
2002
- fps
2187
+ fps,
2188
+ ifNoMediaDuration: "fail"
2003
2189
  });
2004
2190
  if (timeInSeconds === null) {
2005
2191
  return { data: null, durationInSeconds: mediaDurationInSeconds };
@@ -2099,7 +2285,8 @@ var extractFrameInternal = async ({
2099
2285
  trimAfter,
2100
2286
  playbackRate,
2101
2287
  trimBefore,
2102
- fps
2288
+ fps,
2289
+ ifNoMediaDuration: "fail"
2103
2290
  });
2104
2291
  if (timeInSeconds === null) {
2105
2292
  return {
@@ -2414,11 +2601,11 @@ var AudioForRendering = ({
2414
2601
  trimAfter,
2415
2602
  trimBefore
2416
2603
  }) => {
2417
- const frame = useCurrentFrame2();
2418
- const absoluteFrame = Internals10.useTimelinePosition();
2419
- const videoConfig = Internals10.useUnsafeVideoConfig();
2420
- const { registerRenderAsset, unregisterRenderAsset } = useContext2(Internals10.RenderAssetManager);
2421
- const startsAt = Internals10.useMediaStartsAt();
2604
+ const frame = useCurrentFrame3();
2605
+ const absoluteFrame = Internals12.useTimelinePosition();
2606
+ const videoConfig = Internals12.useUnsafeVideoConfig();
2607
+ const { registerRenderAsset, unregisterRenderAsset } = useContext3(Internals12.RenderAssetManager);
2608
+ const startsAt = Internals12.useMediaStartsAt();
2422
2609
  const environment = useRemotionEnvironment();
2423
2610
  if (!videoConfig) {
2424
2611
  throw new Error("No video config found");
@@ -2428,9 +2615,9 @@ var AudioForRendering = ({
2428
2615
  }
2429
2616
  const { fps } = videoConfig;
2430
2617
  const { delayRender, continueRender } = useDelayRender();
2431
- const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState2(false);
2432
- const sequenceContext = useContext2(Internals10.SequenceContext);
2433
- const id = useMemo2(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
2618
+ const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState3(false);
2619
+ const sequenceContext = useContext3(Internals12.SequenceContext);
2620
+ const id = useMemo3(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
2434
2621
  src,
2435
2622
  sequenceContext?.cumulatedFrom,
2436
2623
  sequenceContext?.relativeFrom,
@@ -2474,7 +2661,7 @@ var AudioForRendering = ({
2474
2661
  if (disallowFallbackToHtml5Audio) {
2475
2662
  cancelRender2(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2476
2663
  }
2477
- Internals10.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Audio>`);
2664
+ Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Html5Audio>`);
2478
2665
  setReplaceWithHtml5Audio(true);
2479
2666
  return;
2480
2667
  }
@@ -2482,7 +2669,7 @@ var AudioForRendering = ({
2482
2669
  if (disallowFallbackToHtml5Audio) {
2483
2670
  cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2484
2671
  }
2485
- Internals10.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <Audio>`);
2672
+ Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <Html5Audio>`);
2486
2673
  setReplaceWithHtml5Audio(true);
2487
2674
  return;
2488
2675
  }
@@ -2490,7 +2677,7 @@ var AudioForRendering = ({
2490
2677
  if (disallowFallbackToHtml5Audio) {
2491
2678
  cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
2492
2679
  }
2493
- Internals10.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <Audio>`);
2680
+ Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <Html5Audio>`);
2494
2681
  setReplaceWithHtml5Audio(true);
2495
2682
  return;
2496
2683
  }
@@ -2503,12 +2690,12 @@ var AudioForRendering = ({
2503
2690
  frame,
2504
2691
  startsAt
2505
2692
  });
2506
- const volume = Internals10.evaluateVolume({
2693
+ const volume = Internals12.evaluateVolume({
2507
2694
  volume: volumeProp,
2508
2695
  frame: volumePropsFrame,
2509
2696
  mediaVolume: 1
2510
2697
  });
2511
- Internals10.warnAboutTooHighVolume(volume);
2698
+ Internals12.warnAboutTooHighVolume(volume);
2512
2699
  if (audio && volume > 0) {
2513
2700
  applyVolume(audio.data, volume);
2514
2701
  registerRenderAsset({
@@ -2557,7 +2744,7 @@ var AudioForRendering = ({
2557
2744
  replaceWithHtml5Audio
2558
2745
  ]);
2559
2746
  if (replaceWithHtml5Audio) {
2560
- return /* @__PURE__ */ jsx2(Audio, {
2747
+ return /* @__PURE__ */ jsx2(Html5Audio, {
2561
2748
  src,
2562
2749
  playbackRate,
2563
2750
  muted,
@@ -2581,8 +2768,8 @@ var AudioForRendering = ({
2581
2768
 
2582
2769
  // src/audio/audio.tsx
2583
2770
  import { jsx as jsx3 } from "react/jsx-runtime";
2584
- var { validateMediaProps } = Internals11;
2585
- var Audio2 = (props) => {
2771
+ var { validateMediaProps } = Internals13;
2772
+ var Audio = (props) => {
2586
2773
  const { name, stack, showInTimeline, ...otherProps } = props;
2587
2774
  const environment = useRemotionEnvironment2();
2588
2775
  if (typeof props.src !== "string") {
@@ -2600,14 +2787,14 @@ var Audio2 = (props) => {
2600
2787
  stack: stack ?? null
2601
2788
  });
2602
2789
  };
2603
- Internals11.addSequenceStackTraces(Audio2);
2790
+ Internals13.addSequenceStackTraces(Audio);
2604
2791
 
2605
2792
  // src/video/video.tsx
2606
- import { Internals as Internals14, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
2793
+ import { Internals as Internals16, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
2607
2794
 
2608
2795
  // src/video/video-for-preview.tsx
2609
- import { useContext as useContext3, useEffect as useEffect2, useMemo as useMemo3, useRef as useRef2, useState as useState3 } from "react";
2610
- import { Internals as Internals12, useBufferState as useBufferState2, useCurrentFrame as useCurrentFrame3, Video } from "remotion";
2796
+ import { useContext as useContext4, useEffect as useEffect3, useMemo as useMemo4, useRef as useRef2, useState as useState4 } from "react";
2797
+ import { Html5Video, Internals as Internals14, useBufferState as useBufferState2, useCurrentFrame as useCurrentFrame4 } from "remotion";
2611
2798
  import { jsx as jsx4 } from "react/jsx-runtime";
2612
2799
  var {
2613
2800
  useUnsafeVideoConfig: useUnsafeVideoConfig2,
@@ -2619,11 +2806,11 @@ var {
2619
2806
  evaluateVolume: evaluateVolume2,
2620
2807
  warnAboutTooHighVolume: warnAboutTooHighVolume2,
2621
2808
  usePreload: usePreload2,
2622
- useMediaInTimeline: useMediaInTimeline2,
2623
- SequenceContext: SequenceContext2
2624
- } = Internals12;
2625
- var NewVideoForPreview = ({
2626
- src,
2809
+ SequenceContext: SequenceContext2,
2810
+ SequenceVisibilityToggleContext
2811
+ } = Internals14;
2812
+ var VideoForPreview = ({
2813
+ src: unpreloadedSrc,
2627
2814
  style,
2628
2815
  playbackRate,
2629
2816
  logLevel,
@@ -2642,19 +2829,22 @@ var NewVideoForPreview = ({
2642
2829
  fallbackOffthreadVideoProps,
2643
2830
  audioStreamIndex
2644
2831
  }) => {
2832
+ const src = usePreload2(unpreloadedSrc);
2645
2833
  const canvasRef = useRef2(null);
2646
2834
  const videoConfig = useUnsafeVideoConfig2();
2647
- const frame = useCurrentFrame3();
2835
+ const frame = useCurrentFrame4();
2648
2836
  const mediaPlayerRef = useRef2(null);
2649
- const [mediaPlayerReady, setMediaPlayerReady] = useState3(false);
2650
- const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState3(false);
2837
+ const [mediaPlayerReady, setMediaPlayerReady] = useState4(false);
2838
+ const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState4(false);
2651
2839
  const [playing] = Timeline2.usePlayingState();
2652
- const timelineContext = useContext3(Timeline2.TimelineContext);
2840
+ const timelineContext = useContext4(Timeline2.TimelineContext);
2653
2841
  const globalPlaybackRate = timelineContext.playbackRate;
2654
- const sharedAudioContext = useContext3(SharedAudioContext2);
2842
+ const sharedAudioContext = useContext4(SharedAudioContext2);
2655
2843
  const buffer = useBufferState2();
2656
2844
  const [mediaMuted] = useMediaMutedState2();
2657
2845
  const [mediaVolume] = useMediaVolumeState2();
2846
+ const [mediaDurationInSeconds, setMediaDurationInSeconds] = useState4(null);
2847
+ const { hidden } = useContext4(SequenceVisibilityToggleContext);
2658
2848
  const volumePropFrame = useFrameForVolumeProp2(loopVolumeCurveBehavior);
2659
2849
  const userPreferredVolume = evaluateVolume2({
2660
2850
  frame: volumePropFrame,
@@ -2662,21 +2852,30 @@ var NewVideoForPreview = ({
2662
2852
  mediaVolume
2663
2853
  });
2664
2854
  warnAboutTooHighVolume2(userPreferredVolume);
2665
- const [timelineId] = useState3(() => String(Math.random()));
2666
- const parentSequence = useContext3(SequenceContext2);
2667
- useMediaInTimeline2({
2855
+ const parentSequence = useContext4(SequenceContext2);
2856
+ const loopDisplay = useLoopDisplay({
2857
+ loop,
2858
+ mediaDurationInSeconds,
2859
+ playbackRate,
2860
+ trimAfter,
2861
+ trimBefore
2862
+ });
2863
+ const { id: timelineId } = useMediaInTimeline({
2668
2864
  volume,
2669
- mediaVolume,
2670
2865
  mediaType: "video",
2671
2866
  src,
2672
2867
  playbackRate,
2673
2868
  displayName: name ?? null,
2674
- id: timelineId,
2675
2869
  stack,
2676
2870
  showInTimeline,
2677
2871
  premountDisplay: parentSequence?.premountDisplay ?? null,
2678
- postmountDisplay: parentSequence?.postmountDisplay ?? null
2872
+ postmountDisplay: parentSequence?.postmountDisplay ?? null,
2873
+ loopDisplay,
2874
+ mediaVolume,
2875
+ trimAfter,
2876
+ trimBefore
2679
2877
  });
2878
+ const isSequenceHidden = hidden[timelineId] ?? false;
2680
2879
  if (!videoConfig) {
2681
2880
  throw new Error("No video config found");
2682
2881
  }
@@ -2687,7 +2886,7 @@ var NewVideoForPreview = ({
2687
2886
  const currentTimeRef = useRef2(currentTime);
2688
2887
  currentTimeRef.current = currentTime;
2689
2888
  const preloadedSrc = usePreload2(src);
2690
- useEffect2(() => {
2889
+ useEffect3(() => {
2691
2890
  if (!canvasRef.current)
2692
2891
  return;
2693
2892
  if (!sharedAudioContext)
@@ -2701,8 +2900,9 @@ var NewVideoForPreview = ({
2701
2900
  logLevel,
2702
2901
  sharedAudioContext: sharedAudioContext.audioContext,
2703
2902
  loop,
2704
- trimAfterSeconds: trimAfter ? trimAfter / videoConfig.fps : undefined,
2705
- trimBeforeSeconds: trimBefore ? trimBefore / videoConfig.fps : undefined,
2903
+ trimAfter,
2904
+ trimBefore,
2905
+ fps: videoConfig.fps,
2706
2906
  playbackRate,
2707
2907
  audioStreamIndex
2708
2908
  });
@@ -2712,7 +2912,7 @@ var NewVideoForPreview = ({
2712
2912
  if (disallowFallbackToOffthreadVideo) {
2713
2913
  throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
2714
2914
  }
2715
- Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
2915
+ Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
2716
2916
  setShouldFallbackToNativeVideo(true);
2717
2917
  return;
2718
2918
  }
@@ -2720,7 +2920,7 @@ var NewVideoForPreview = ({
2720
2920
  if (disallowFallbackToOffthreadVideo) {
2721
2921
  throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
2722
2922
  }
2723
- Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
2923
+ Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
2724
2924
  setShouldFallbackToNativeVideo(true);
2725
2925
  return;
2726
2926
  }
@@ -2728,7 +2928,7 @@ var NewVideoForPreview = ({
2728
2928
  if (disallowFallbackToOffthreadVideo) {
2729
2929
  throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
2730
2930
  }
2731
- Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
2931
+ Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
2732
2932
  setShouldFallbackToNativeVideo(true);
2733
2933
  return;
2734
2934
  }
@@ -2736,24 +2936,25 @@ var NewVideoForPreview = ({
2736
2936
  if (disallowFallbackToOffthreadVideo) {
2737
2937
  throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
2738
2938
  }
2739
- Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
2939
+ Internals14.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
2740
2940
  setShouldFallbackToNativeVideo(true);
2741
2941
  return;
2742
2942
  }
2743
2943
  if (result.type === "success") {
2744
2944
  setMediaPlayerReady(true);
2945
+ setMediaDurationInSeconds(result.durationInSeconds);
2745
2946
  }
2746
2947
  }).catch((error) => {
2747
- Internals12.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to initialize MediaPlayer", error);
2948
+ Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to initialize MediaPlayer", error);
2748
2949
  setShouldFallbackToNativeVideo(true);
2749
2950
  });
2750
2951
  } catch (error) {
2751
- Internals12.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer initialization failed", error);
2952
+ Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer initialization failed", error);
2752
2953
  setShouldFallbackToNativeVideo(true);
2753
2954
  }
2754
2955
  return () => {
2755
2956
  if (mediaPlayerRef.current) {
2756
- Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Disposing MediaPlayer`);
2957
+ Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Disposing MediaPlayer`);
2757
2958
  mediaPlayerRef.current.dispose();
2758
2959
  mediaPlayerRef.current = null;
2759
2960
  }
@@ -2772,29 +2973,29 @@ var NewVideoForPreview = ({
2772
2973
  disallowFallbackToOffthreadVideo,
2773
2974
  audioStreamIndex
2774
2975
  ]);
2775
- const classNameValue = useMemo3(() => {
2776
- return [Internals12.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals12.truthy).join(" ");
2976
+ const classNameValue = useMemo4(() => {
2977
+ return [Internals14.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals14.truthy).join(" ");
2777
2978
  }, [className]);
2778
- useEffect2(() => {
2979
+ useEffect3(() => {
2779
2980
  const mediaPlayer = mediaPlayerRef.current;
2780
2981
  if (!mediaPlayer)
2781
2982
  return;
2782
2983
  if (playing) {
2783
2984
  mediaPlayer.play().catch((error) => {
2784
- Internals12.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to play", error);
2985
+ Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to play", error);
2785
2986
  });
2786
2987
  } else {
2787
2988
  mediaPlayer.pause();
2788
2989
  }
2789
2990
  }, [playing, logLevel, mediaPlayerReady]);
2790
- useEffect2(() => {
2991
+ useEffect3(() => {
2791
2992
  const mediaPlayer = mediaPlayerRef.current;
2792
2993
  if (!mediaPlayer || !mediaPlayerReady)
2793
2994
  return;
2794
2995
  mediaPlayer.seekTo(currentTime);
2795
- Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
2996
+ Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
2796
2997
  }, [currentTime, logLevel, mediaPlayerReady]);
2797
- useEffect2(() => {
2998
+ useEffect3(() => {
2798
2999
  const mediaPlayer = mediaPlayerRef.current;
2799
3000
  if (!mediaPlayer || !mediaPlayerReady)
2800
3001
  return;
@@ -2802,11 +3003,11 @@ var NewVideoForPreview = ({
2802
3003
  const unsubscribe = mediaPlayer.onBufferingChange((newBufferingState) => {
2803
3004
  if (newBufferingState && !currentBlock) {
2804
3005
  currentBlock = buffer.delayPlayback();
2805
- Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback");
3006
+ Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback");
2806
3007
  } else if (!newBufferingState && currentBlock) {
2807
3008
  currentBlock.unblock();
2808
3009
  currentBlock = null;
2809
- Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
3010
+ Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
2810
3011
  }
2811
3012
  });
2812
3013
  return () => {
@@ -2817,36 +3018,43 @@ var NewVideoForPreview = ({
2817
3018
  }
2818
3019
  };
2819
3020
  }, [mediaPlayerReady, buffer, logLevel]);
2820
- const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
2821
- useEffect2(() => {
3021
+ const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
3022
+ useEffect3(() => {
2822
3023
  const mediaPlayer = mediaPlayerRef.current;
2823
3024
  if (!mediaPlayer || !mediaPlayerReady)
2824
3025
  return;
2825
3026
  mediaPlayer.setMuted(effectiveMuted);
2826
3027
  }, [effectiveMuted, mediaPlayerReady]);
2827
- useEffect2(() => {
3028
+ useEffect3(() => {
2828
3029
  const mediaPlayer = mediaPlayerRef.current;
2829
3030
  if (!mediaPlayer || !mediaPlayerReady) {
2830
3031
  return;
2831
3032
  }
2832
3033
  mediaPlayer.setVolume(userPreferredVolume);
2833
- }, [userPreferredVolume, mediaPlayerReady, logLevel]);
2834
- const effectivePlaybackRate = useMemo3(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
2835
- useEffect2(() => {
3034
+ }, [userPreferredVolume, mediaPlayerReady]);
3035
+ const effectivePlaybackRate = useMemo4(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
3036
+ useEffect3(() => {
2836
3037
  const mediaPlayer = mediaPlayerRef.current;
2837
3038
  if (!mediaPlayer || !mediaPlayerReady) {
2838
3039
  return;
2839
3040
  }
2840
3041
  mediaPlayer.setPlaybackRate(effectivePlaybackRate);
2841
- }, [effectivePlaybackRate, mediaPlayerReady, logLevel]);
2842
- useEffect2(() => {
3042
+ }, [effectivePlaybackRate, mediaPlayerReady]);
3043
+ useEffect3(() => {
2843
3044
  const mediaPlayer = mediaPlayerRef.current;
2844
3045
  if (!mediaPlayer || !mediaPlayerReady) {
2845
3046
  return;
2846
3047
  }
2847
3048
  mediaPlayer.setLoop(loop);
2848
3049
  }, [loop, mediaPlayerReady]);
2849
- useEffect2(() => {
3050
+ useEffect3(() => {
3051
+ const mediaPlayer = mediaPlayerRef.current;
3052
+ if (!mediaPlayer || !mediaPlayerReady) {
3053
+ return;
3054
+ }
3055
+ mediaPlayer.setFps(videoConfig.fps);
3056
+ }, [videoConfig.fps, mediaPlayerReady]);
3057
+ useEffect3(() => {
2850
3058
  const mediaPlayer = mediaPlayerRef.current;
2851
3059
  if (!mediaPlayer || !mediaPlayerReady || !onVideoFrame) {
2852
3060
  return;
@@ -2856,10 +3064,16 @@ var NewVideoForPreview = ({
2856
3064
  unsubscribe();
2857
3065
  };
2858
3066
  }, [onVideoFrame, mediaPlayerReady]);
3067
+ const actualStyle = useMemo4(() => {
3068
+ return {
3069
+ ...style,
3070
+ opacity: isSequenceHidden ? 0 : style?.opacity ?? 1
3071
+ };
3072
+ }, [isSequenceHidden, style]);
2859
3073
  if (shouldFallbackToNativeVideo && !disallowFallbackToOffthreadVideo) {
2860
- return /* @__PURE__ */ jsx4(Video, {
3074
+ return /* @__PURE__ */ jsx4(Html5Video, {
2861
3075
  src,
2862
- style,
3076
+ style: actualStyle,
2863
3077
  className,
2864
3078
  muted,
2865
3079
  volume,
@@ -2878,74 +3092,32 @@ var NewVideoForPreview = ({
2878
3092
  ref: canvasRef,
2879
3093
  width: videoConfig.width,
2880
3094
  height: videoConfig.height,
2881
- style,
3095
+ style: actualStyle,
2882
3096
  className: classNameValue
2883
3097
  });
2884
3098
  };
2885
- var VideoForPreview = ({
2886
- className,
2887
- loop,
2888
- src,
2889
- logLevel,
2890
- muted,
2891
- name,
2892
- volume,
2893
- loopVolumeCurveBehavior,
2894
- onVideoFrame,
2895
- playbackRate,
2896
- style,
2897
- showInTimeline,
2898
- trimAfter,
2899
- trimBefore,
2900
- stack,
2901
- disallowFallbackToOffthreadVideo,
2902
- fallbackOffthreadVideoProps,
2903
- audioStreamIndex
2904
- }) => {
2905
- const preloadedSrc = usePreload2(src);
2906
- return /* @__PURE__ */ jsx4(NewVideoForPreview, {
2907
- className,
2908
- logLevel,
2909
- muted,
2910
- onVideoFrame,
2911
- playbackRate,
2912
- src: preloadedSrc,
2913
- style,
2914
- volume,
2915
- name,
2916
- trimAfter,
2917
- trimBefore,
2918
- loop,
2919
- loopVolumeCurveBehavior,
2920
- showInTimeline,
2921
- stack,
2922
- disallowFallbackToOffthreadVideo,
2923
- fallbackOffthreadVideoProps,
2924
- audioStreamIndex
2925
- });
2926
- };
2927
3099
 
2928
3100
  // src/video/video-for-rendering.tsx
2929
3101
  import {
2930
- useContext as useContext4,
3102
+ useContext as useContext5,
2931
3103
  useLayoutEffect as useLayoutEffect2,
2932
- useMemo as useMemo4,
3104
+ useMemo as useMemo5,
2933
3105
  useRef as useRef3,
2934
- useState as useState4
3106
+ useState as useState5
2935
3107
  } from "react";
2936
3108
  import {
2937
3109
  cancelRender as cancelRender3,
2938
- Internals as Internals13,
3110
+ Internals as Internals15,
2939
3111
  Loop,
2940
3112
  random as random2,
2941
- useCurrentFrame as useCurrentFrame4,
3113
+ useCurrentFrame as useCurrentFrame5,
2942
3114
  useDelayRender as useDelayRender2,
2943
3115
  useRemotionEnvironment as useRemotionEnvironment3,
2944
- useVideoConfig
3116
+ useVideoConfig as useVideoConfig2
2945
3117
  } from "remotion";
2946
3118
 
2947
- // ../core/src/calculate-loop.ts
2948
- var calculateLoopDuration = ({
3119
+ // ../core/src/calculate-media-duration.ts
3120
+ var calculateMediaDuration = ({
2949
3121
  trimAfter,
2950
3122
  mediaDurationInFrames,
2951
3123
  playbackRate,
@@ -2989,13 +3161,13 @@ var VideoForRendering = ({
2989
3161
  if (!src) {
2990
3162
  throw new TypeError("No `src` was passed to <Video>.");
2991
3163
  }
2992
- const frame = useCurrentFrame4();
2993
- const absoluteFrame = Internals13.useTimelinePosition();
2994
- const { fps } = useVideoConfig();
2995
- const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals13.RenderAssetManager);
2996
- const startsAt = Internals13.useMediaStartsAt();
2997
- const sequenceContext = useContext4(Internals13.SequenceContext);
2998
- const id = useMemo4(() => `media-video-${random2(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
3164
+ const frame = useCurrentFrame5();
3165
+ const absoluteFrame = Internals15.useTimelinePosition();
3166
+ const { fps } = useVideoConfig2();
3167
+ const { registerRenderAsset, unregisterRenderAsset } = useContext5(Internals15.RenderAssetManager);
3168
+ const startsAt = Internals15.useMediaStartsAt();
3169
+ const sequenceContext = useContext5(Internals15.SequenceContext);
3170
+ const id = useMemo5(() => `media-video-${random2(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
2999
3171
  src,
3000
3172
  sequenceContext?.cumulatedFrom,
3001
3173
  sequenceContext?.relativeFrom,
@@ -3004,7 +3176,7 @@ var VideoForRendering = ({
3004
3176
  const environment = useRemotionEnvironment3();
3005
3177
  const { delayRender, continueRender } = useDelayRender2();
3006
3178
  const canvasRef = useRef3(null);
3007
- const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState4(false);
3179
+ const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
3008
3180
  useLayoutEffect2(() => {
3009
3181
  if (!canvasRef.current) {
3010
3182
  return;
@@ -3047,7 +3219,7 @@ var VideoForRendering = ({
3047
3219
  cancelRender3(new Error(`Unknown container format ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3048
3220
  }
3049
3221
  if (window.remotion_isMainTab) {
3050
- Internals13.Log.info({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3222
+ Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
3051
3223
  }
3052
3224
  setReplaceWithOffthreadVideo({ durationInSeconds: null });
3053
3225
  return;
@@ -3057,7 +3229,7 @@ var VideoForRendering = ({
3057
3229
  cancelRender3(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3058
3230
  }
3059
3231
  if (window.remotion_isMainTab) {
3060
- Internals13.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
3232
+ Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
3061
3233
  }
3062
3234
  setReplaceWithOffthreadVideo({
3063
3235
  durationInSeconds: result.durationInSeconds
@@ -3069,7 +3241,7 @@ var VideoForRendering = ({
3069
3241
  cancelRender3(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
3070
3242
  }
3071
3243
  if (window.remotion_isMainTab) {
3072
- Internals13.Log.info({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
3244
+ Internals15.Log.info({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
3073
3245
  }
3074
3246
  setReplaceWithOffthreadVideo({ durationInSeconds: null });
3075
3247
  return;
@@ -3104,12 +3276,12 @@ var VideoForRendering = ({
3104
3276
  frame,
3105
3277
  startsAt
3106
3278
  });
3107
- const volume = Internals13.evaluateVolume({
3279
+ const volume = Internals15.evaluateVolume({
3108
3280
  volume: volumeProp,
3109
3281
  frame: volumePropsFrame,
3110
3282
  mediaVolume: 1
3111
3283
  });
3112
- Internals13.warnAboutTooHighVolume(volume);
3284
+ Internals15.warnAboutTooHighVolume(volume);
3113
3285
  if (audio && volume > 0) {
3114
3286
  applyVolume(audio.data, volume);
3115
3287
  registerRenderAsset({
@@ -3158,11 +3330,11 @@ var VideoForRendering = ({
3158
3330
  trimAfterValue,
3159
3331
  trimBeforeValue
3160
3332
  ]);
3161
- const classNameValue = useMemo4(() => {
3162
- return [Internals13.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals13.truthy).join(" ");
3333
+ const classNameValue = useMemo5(() => {
3334
+ return [Internals15.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals15.truthy).join(" ");
3163
3335
  }, [className]);
3164
3336
  if (replaceWithOffthreadVideo) {
3165
- const fallback = /* @__PURE__ */ jsx5(Internals13.InnerOffthreadVideo, {
3337
+ const fallback = /* @__PURE__ */ jsx5(Internals15.InnerOffthreadVideo, {
3166
3338
  src,
3167
3339
  playbackRate: playbackRate ?? 1,
3168
3340
  muted: muted ?? false,
@@ -3202,7 +3374,7 @@ var VideoForRendering = ({
3202
3374
  }
3203
3375
  return /* @__PURE__ */ jsx5(Loop, {
3204
3376
  layout: "none",
3205
- durationInFrames: calculateLoopDuration({
3377
+ durationInFrames: calculateMediaDuration({
3206
3378
  trimAfter: trimAfterValue,
3207
3379
  mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
3208
3380
  playbackRate,
@@ -3222,7 +3394,7 @@ var VideoForRendering = ({
3222
3394
 
3223
3395
  // src/video/video.tsx
3224
3396
  import { jsx as jsx6 } from "react/jsx-runtime";
3225
- var { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals14;
3397
+ var { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals16;
3226
3398
  var InnerVideo = ({
3227
3399
  src,
3228
3400
  audioStreamIndex,
@@ -3308,7 +3480,7 @@ var InnerVideo = ({
3308
3480
  fallbackOffthreadVideoProps
3309
3481
  });
3310
3482
  };
3311
- var Video2 = ({
3483
+ var Video = ({
3312
3484
  src,
3313
3485
  audioStreamIndex,
3314
3486
  className,
@@ -3355,14 +3527,14 @@ var Video2 = ({
3355
3527
  stack
3356
3528
  });
3357
3529
  };
3358
- Internals14.addSequenceStackTraces(Video2);
3530
+ Internals16.addSequenceStackTraces(Video);
3359
3531
  // src/index.ts
3360
- var experimental_Audio = Audio2;
3361
- var experimental_Video = Video2;
3532
+ var experimental_Audio = Audio;
3533
+ var experimental_Video = Video;
3362
3534
  export {
3363
3535
  experimental_Video,
3364
3536
  experimental_Audio,
3365
- Video2 as Video,
3537
+ Video,
3366
3538
  AudioForPreview,
3367
- Audio2 as Audio
3539
+ Audio
3368
3540
  };