@remotion/media 4.0.363 → 4.0.364

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,171 +10,7 @@ import {
10
10
  useCurrentFrame as useCurrentFrame2
11
11
  } from "remotion";
12
12
 
13
- // src/show-in-timeline.ts
14
- import { useMemo } from "react";
15
- import { Internals, useVideoConfig } from "remotion";
16
- var useLoopDisplay = ({
17
- loop,
18
- mediaDurationInSeconds,
19
- playbackRate,
20
- trimAfter,
21
- trimBefore
22
- }) => {
23
- const { durationInFrames: compDuration, fps } = useVideoConfig();
24
- const loopDisplay = useMemo(() => {
25
- if (!loop || !mediaDurationInSeconds) {
26
- return;
27
- }
28
- const durationInFrames = Internals.calculateMediaDuration({
29
- mediaDurationInFrames: mediaDurationInSeconds * fps,
30
- playbackRate,
31
- trimAfter,
32
- trimBefore
33
- });
34
- const maxTimes = compDuration / durationInFrames;
35
- return {
36
- numberOfTimes: maxTimes,
37
- startOffset: 0,
38
- durationInFrames
39
- };
40
- }, [
41
- compDuration,
42
- fps,
43
- loop,
44
- mediaDurationInSeconds,
45
- playbackRate,
46
- trimAfter,
47
- trimBefore
48
- ]);
49
- return loopDisplay;
50
- };
51
-
52
- // src/use-media-in-timeline.ts
53
- import { useContext, useEffect, useState } from "react";
54
- import { Internals as Internals2, useCurrentFrame } from "remotion";
55
- var useMediaInTimeline = ({
56
- volume,
57
- mediaVolume,
58
- src,
59
- mediaType,
60
- playbackRate,
61
- displayName,
62
- stack,
63
- showInTimeline,
64
- premountDisplay,
65
- postmountDisplay,
66
- loopDisplay,
67
- trimBefore,
68
- trimAfter
69
- }) => {
70
- const parentSequence = useContext(Internals2.SequenceContext);
71
- const startsAt = Internals2.useMediaStartsAt();
72
- const { registerSequence, unregisterSequence } = useContext(Internals2.SequenceManager);
73
- const [sequenceId] = useState(() => String(Math.random()));
74
- const [mediaId] = useState(() => String(Math.random()));
75
- const frame = useCurrentFrame();
76
- const {
77
- volumes,
78
- duration,
79
- doesVolumeChange,
80
- nonce,
81
- rootId,
82
- isStudio,
83
- finalDisplayName
84
- } = Internals2.useBasicMediaInTimeline({
85
- volume,
86
- mediaVolume,
87
- mediaType,
88
- src,
89
- displayName,
90
- trimBefore,
91
- trimAfter,
92
- playbackRate
93
- });
94
- useEffect(() => {
95
- if (!src) {
96
- throw new Error("No src passed");
97
- }
98
- if (!isStudio && window.process?.env?.NODE_ENV !== "test") {
99
- return;
100
- }
101
- if (!showInTimeline) {
102
- return;
103
- }
104
- const loopIteration = loopDisplay ? Math.floor(frame / loopDisplay.durationInFrames) : 0;
105
- if (loopDisplay) {
106
- registerSequence({
107
- type: "sequence",
108
- premountDisplay,
109
- postmountDisplay,
110
- parent: parentSequence?.id ?? null,
111
- displayName: finalDisplayName,
112
- rootId,
113
- showInTimeline: true,
114
- nonce,
115
- loopDisplay,
116
- stack,
117
- from: 0,
118
- duration,
119
- id: sequenceId
120
- });
121
- }
122
- registerSequence({
123
- type: mediaType,
124
- src,
125
- id: mediaId,
126
- duration: loopDisplay?.durationInFrames ?? duration,
127
- from: loopDisplay ? loopIteration * loopDisplay.durationInFrames : 0,
128
- parent: loopDisplay ? sequenceId : parentSequence?.id ?? null,
129
- displayName: finalDisplayName,
130
- rootId,
131
- volume: volumes,
132
- showInTimeline: true,
133
- nonce,
134
- startMediaFrom: 0 - startsAt,
135
- doesVolumeChange,
136
- loopDisplay: undefined,
137
- playbackRate,
138
- stack,
139
- premountDisplay: null,
140
- postmountDisplay: null
141
- });
142
- return () => {
143
- if (loopDisplay) {
144
- unregisterSequence(sequenceId);
145
- }
146
- unregisterSequence(mediaId);
147
- };
148
- }, [
149
- doesVolumeChange,
150
- duration,
151
- finalDisplayName,
152
- isStudio,
153
- loopDisplay,
154
- mediaId,
155
- mediaType,
156
- nonce,
157
- parentSequence?.id,
158
- playbackRate,
159
- postmountDisplay,
160
- premountDisplay,
161
- registerSequence,
162
- rootId,
163
- sequenceId,
164
- showInTimeline,
165
- src,
166
- stack,
167
- startsAt,
168
- unregisterSequence,
169
- volumes,
170
- frame
171
- ]);
172
- return {
173
- id: mediaId
174
- };
175
- };
176
-
177
- // src/video/media-player.ts
13
+ // src/media-player.ts
178
14
  import {
179
15
  ALL_FORMATS,
180
16
  AudioBufferSink,
@@ -182,10 +18,71 @@ import {
182
18
  Input,
183
19
  UrlSource
184
20
  } from "mediabunny";
185
- import { Internals as Internals4 } from "remotion";
21
+ import { Internals as Internals2 } from "remotion";
22
+
23
+ // src/audio/audio-preview-iterator.ts
24
+ var HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
25
+ var makeAudioIterator = (audioSink, startFromSecond) => {
26
+ let destroyed = false;
27
+ const iterator = audioSink.buffers(startFromSecond);
28
+ let audioIteratorStarted = false;
29
+ let audioBufferHealth = 0;
30
+ const queuedAudioNodes = new Set;
31
+ const cleanupAudioQueue = () => {
32
+ for (const node of queuedAudioNodes) {
33
+ node.stop();
34
+ }
35
+ queuedAudioNodes.clear();
36
+ };
37
+ return {
38
+ cleanupAudioQueue,
39
+ destroy: () => {
40
+ cleanupAudioQueue();
41
+ destroyed = true;
42
+ iterator.return().catch(() => {
43
+ return;
44
+ });
45
+ },
46
+ isReadyToPlay: () => {
47
+ return audioIteratorStarted && audioBufferHealth > 0;
48
+ },
49
+ setAudioIteratorStarted: (started) => {
50
+ audioIteratorStarted = started;
51
+ },
52
+ getNext: () => {
53
+ return iterator.next();
54
+ },
55
+ setAudioBufferHealth: (health) => {
56
+ audioBufferHealth = health;
57
+ },
58
+ isDestroyed: () => {
59
+ return destroyed;
60
+ },
61
+ addQueuedAudioNode: (node) => {
62
+ queuedAudioNodes.add(node);
63
+ },
64
+ removeQueuedAudioNode: (node) => {
65
+ queuedAudioNodes.delete(node);
66
+ }
67
+ };
68
+ };
69
+
70
+ // src/debug-overlay/preview-overlay.ts
71
+ var drawPreviewOverlay = (context, stats, audioContextState, audioSyncAnchor) => {
72
+ context.fillStyle = "rgba(0, 0, 0, 1)";
73
+ context.fillRect(20, 20, 600, 180);
74
+ context.fillStyle = "white";
75
+ context.font = "24px sans-serif";
76
+ context.textBaseline = "top";
77
+ context.fillText(`Debug overlay`, 30, 30);
78
+ context.fillText(`Video iterators created: ${stats.videoIteratorsCreated}`, 30, 60);
79
+ context.fillText(`Frames rendered: ${stats.framesRendered}`, 30, 90);
80
+ context.fillText(`Audio context state: ${audioContextState}`, 30, 120);
81
+ context.fillText(`Audio time: ${audioSyncAnchor.toFixed(3)}s`, 30, 150);
82
+ };
186
83
 
187
84
  // src/get-time-in-seconds.ts
188
- import { Internals as Internals3 } from "remotion";
85
+ import { Internals } from "remotion";
189
86
  var getTimeInSeconds = ({
190
87
  loop,
191
88
  mediaDurationInSeconds,
@@ -200,7 +97,7 @@ var getTimeInSeconds = ({
200
97
  if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === "fail") {
201
98
  throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
202
99
  }
203
- const loopDuration = loop ? Internals3.calculateMediaDuration({
100
+ const loopDuration = loop ? Internals.calculateMediaDuration({
204
101
  trimAfter,
205
102
  mediaDurationInFrames: mediaDurationInSeconds ? mediaDurationInSeconds * fps : Infinity,
206
103
  playbackRate: 1,
@@ -250,8 +147,135 @@ function withTimeout(promise, timeoutMs, errorMessage = "Operation timed out") {
250
147
  ]);
251
148
  }
252
149
 
253
- // src/video/media-player.ts
254
- var SEEK_THRESHOLD = 0.05;
150
+ // src/helpers/round-to-4-digits.ts
151
+ var roundTo4Digits = (timestamp) => {
152
+ return Math.round(timestamp * 1000) / 1000;
153
+ };
154
+
155
+ // src/video/video-preview-iterator.ts
156
+ var createVideoIterator = (timeToSeek, videoSink) => {
157
+ let destroyed = false;
158
+ const iterator = videoSink.canvases(timeToSeek);
159
+ let lastReturnedFrame = null;
160
+ let iteratorEnded = false;
161
+ const getNextOrNullIfNotAvailable = async () => {
162
+ const next = iterator.next();
163
+ const result = await Promise.race([
164
+ next,
165
+ new Promise((resolve) => {
166
+ Promise.resolve().then(() => resolve());
167
+ })
168
+ ]);
169
+ if (!result) {
170
+ return {
171
+ type: "need-to-wait-for-it",
172
+ waitPromise: async () => {
173
+ const res = await next;
174
+ if (res.value) {
175
+ lastReturnedFrame = res.value;
176
+ } else {
177
+ iteratorEnded = true;
178
+ }
179
+ return res.value;
180
+ }
181
+ };
182
+ }
183
+ if (result.value) {
184
+ lastReturnedFrame = result.value;
185
+ } else {
186
+ iteratorEnded = true;
187
+ }
188
+ return {
189
+ type: "got-frame-or-end",
190
+ frame: result.value ?? null
191
+ };
192
+ };
193
+ const destroy = () => {
194
+ destroyed = true;
195
+ lastReturnedFrame = null;
196
+ iterator.return().catch(() => {
197
+ return;
198
+ });
199
+ };
200
+ const tryToSatisfySeek = async (time) => {
201
+ if (lastReturnedFrame) {
202
+ const frameTimestamp = roundTo4Digits(lastReturnedFrame.timestamp);
203
+ if (roundTo4Digits(time) < frameTimestamp) {
204
+ return {
205
+ type: "not-satisfied",
206
+ reason: `iterator is too far, most recently returned ${frameTimestamp}`
207
+ };
208
+ }
209
+ const frameEndTimestamp = roundTo4Digits(lastReturnedFrame.timestamp + lastReturnedFrame.duration);
210
+ const timestamp = roundTo4Digits(time);
211
+ if (frameTimestamp <= timestamp && frameEndTimestamp > timestamp) {
212
+ return {
213
+ type: "satisfied",
214
+ frame: lastReturnedFrame
215
+ };
216
+ }
217
+ }
218
+ if (iteratorEnded) {
219
+ if (lastReturnedFrame) {
220
+ return {
221
+ type: "satisfied",
222
+ frame: lastReturnedFrame
223
+ };
224
+ }
225
+ return {
226
+ type: "not-satisfied",
227
+ reason: "iterator ended"
228
+ };
229
+ }
230
+ while (true) {
231
+ const frame = await getNextOrNullIfNotAvailable();
232
+ if (frame.type === "need-to-wait-for-it") {
233
+ return {
234
+ type: "not-satisfied",
235
+ reason: "iterator did not have frame ready"
236
+ };
237
+ }
238
+ if (frame.type === "got-frame-or-end") {
239
+ if (frame.frame === null) {
240
+ iteratorEnded = true;
241
+ if (lastReturnedFrame) {
242
+ return {
243
+ type: "satisfied",
244
+ frame: lastReturnedFrame
245
+ };
246
+ }
247
+ return {
248
+ type: "not-satisfied",
249
+ reason: "iterator ended and did not have frame ready"
250
+ };
251
+ }
252
+ const frameTimestamp = roundTo4Digits(frame.frame.timestamp);
253
+ const frameEndTimestamp = roundTo4Digits(frame.frame.timestamp + frame.frame.duration);
254
+ const timestamp = roundTo4Digits(time);
255
+ if (frameTimestamp <= timestamp && frameEndTimestamp > timestamp) {
256
+ return {
257
+ type: "satisfied",
258
+ frame: frame.frame
259
+ };
260
+ }
261
+ continue;
262
+ }
263
+ throw new Error("Unreachable");
264
+ }
265
+ };
266
+ return {
267
+ destroy,
268
+ getNext: () => {
269
+ return iterator.next();
270
+ },
271
+ isDestroyed: () => {
272
+ return destroyed;
273
+ },
274
+ tryToSatisfySeek
275
+ };
276
+ };
277
+
278
+ // src/media-player.ts
255
279
  var AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
256
280
 
257
281
  class MediaPlayer {
@@ -263,10 +287,12 @@ class MediaPlayer {
263
287
  audioStreamIndex;
264
288
  canvasSink = null;
265
289
  videoFrameIterator = null;
266
- nextFrame = null;
290
+ debugStats = {
291
+ videoIteratorsCreated: 0,
292
+ framesRendered: 0
293
+ };
267
294
  audioSink = null;
268
295
  audioBufferIterator = null;
269
- queuedAudioNodes = new Set;
270
296
  gainNode = null;
271
297
  currentVolume = 1;
272
298
  sharedAudioContext;
@@ -277,18 +303,15 @@ class MediaPlayer {
277
303
  fps;
278
304
  trimBefore;
279
305
  trimAfter;
280
- animationFrameId = null;
281
- videoAsyncId = 0;
282
- audioAsyncId = 0;
283
306
  initialized = false;
284
307
  totalDuration;
285
308
  isBuffering = false;
286
309
  onBufferingChangeCallback;
287
- audioBufferHealth = 0;
288
- audioIteratorStarted = false;
289
- HEALTHY_BUFER_THRESHOLD_SECONDS = 1;
290
310
  mediaEnded = false;
311
+ debugOverlay = false;
291
312
  onVideoFrameCallback;
313
+ initializationPromise = null;
314
+ bufferState;
292
315
  constructor({
293
316
  canvas,
294
317
  src,
@@ -299,7 +322,9 @@ class MediaPlayer {
299
322
  trimAfter,
300
323
  playbackRate,
301
324
  audioStreamIndex,
302
- fps
325
+ fps,
326
+ debugOverlay,
327
+ bufferState
303
328
  }) {
304
329
  this.canvas = canvas ?? null;
305
330
  this.src = src;
@@ -311,6 +336,8 @@ class MediaPlayer {
311
336
  this.trimAfter = trimAfter;
312
337
  this.audioStreamIndex = audioStreamIndex ?? 0;
313
338
  this.fps = fps;
339
+ this.debugOverlay = debugOverlay;
340
+ this.bufferState = bufferState;
314
341
  if (canvas) {
315
342
  const context = canvas.getContext("2d", {
316
343
  alpha: true,
@@ -326,7 +353,7 @@ class MediaPlayer {
326
353
  }
327
354
  input = null;
328
355
  isReady() {
329
- return this.initialized && Boolean(this.sharedAudioContext);
356
+ return this.initialized && Boolean(this.sharedAudioContext) && !this.input?.disposed;
330
357
  }
331
358
  hasAudio() {
332
359
  return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
@@ -334,7 +361,15 @@ class MediaPlayer {
334
361
  isCurrentlyBuffering() {
335
362
  return this.isBuffering && Boolean(this.bufferingStartedAtMs);
336
363
  }
337
- async initialize(startTimeUnresolved) {
364
+ isDisposalError() {
365
+ return this.input?.disposed === true;
366
+ }
367
+ initialize(startTimeUnresolved) {
368
+ const promise = this._initialize(startTimeUnresolved);
369
+ this.initializationPromise = promise;
370
+ return promise;
371
+ }
372
+ async _initialize(startTimeUnresolved) {
338
373
  try {
339
374
  const urlSource = new UrlSource(this.src);
340
375
  const input = new Input({
@@ -342,14 +377,20 @@ class MediaPlayer {
342
377
  formats: ALL_FORMATS
343
378
  });
344
379
  this.input = input;
380
+ if (input.disposed) {
381
+ return { type: "disposed" };
382
+ }
345
383
  try {
346
- await this.input.getFormat();
384
+ await input.getFormat();
347
385
  } catch (error) {
386
+ if (this.isDisposalError()) {
387
+ return { type: "disposed" };
388
+ }
348
389
  const err = error;
349
390
  if (isNetworkError(err)) {
350
391
  throw error;
351
392
  }
352
- Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
393
+ Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
353
394
  return { type: "unknown-container-format" };
354
395
  }
355
396
  const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
@@ -399,19 +440,23 @@ class MediaPlayer {
399
440
  this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
400
441
  }
401
442
  this.initialized = true;
402
- await Promise.all([
403
- this.startAudioIterator(startTime),
404
- this.startVideoIterator(startTime)
405
- ]);
406
- this.startRenderLoop();
443
+ try {
444
+ this.startAudioIterator(startTime);
445
+ await this.startVideoIterator(startTime, this.currentSeekNonce);
446
+ } catch (error) {
447
+ if (this.isDisposalError()) {
448
+ return { type: "disposed" };
449
+ }
450
+ Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio and video iterators", error);
451
+ }
407
452
  return { type: "success", durationInSeconds };
408
453
  } catch (error) {
409
454
  const err = error;
410
455
  if (isNetworkError(err)) {
411
- Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
456
+ Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
412
457
  return { type: "network-error" };
413
458
  }
414
- Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
459
+ Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
415
460
  throw error;
416
461
  }
417
462
  }
@@ -420,20 +465,19 @@ class MediaPlayer {
420
465
  this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
421
466
  }
422
467
  }
423
- cleanupAudioQueue() {
424
- for (const node of this.queuedAudioNodes) {
425
- node.stop();
426
- }
427
- this.queuedAudioNodes.clear();
428
- }
429
- async cleanAudioIteratorAndNodes() {
430
- await this.audioBufferIterator?.return();
431
- this.audioBufferIterator = null;
432
- this.audioIteratorStarted = false;
433
- this.audioBufferHealth = 0;
434
- this.cleanupAudioQueue();
435
- }
468
+ currentSeekNonce = 0;
469
+ seekPromiseChain = Promise.resolve();
436
470
  async seekTo(time) {
471
+ this.currentSeekNonce++;
472
+ const nonce = this.currentSeekNonce;
473
+ await this.seekPromiseChain;
474
+ this.seekPromiseChain = this.seekToDoNotCallDirectly(time, nonce);
475
+ await this.seekPromiseChain;
476
+ }
477
+ async seekToDoNotCallDirectly(time, nonce) {
478
+ if (nonce !== this.currentSeekNonce) {
479
+ return;
480
+ }
437
481
  if (!this.isReady())
438
482
  return;
439
483
  const newTime = getTimeInSeconds({
@@ -448,29 +492,29 @@ class MediaPlayer {
448
492
  src: this.src
449
493
  });
450
494
  if (newTime === null) {
451
- this.videoAsyncId++;
452
- this.nextFrame = null;
495
+ this.videoFrameIterator?.destroy();
496
+ this.videoFrameIterator = null;
453
497
  this.clearCanvas();
454
- await this.cleanAudioIteratorAndNodes();
498
+ this.audioBufferIterator?.destroy();
499
+ this.audioBufferIterator = null;
455
500
  return;
456
501
  }
457
502
  const currentPlaybackTime = this.getPlaybackTime();
458
- const isSignificantSeek = currentPlaybackTime === null || Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
459
- if (isSignificantSeek) {
460
- this.nextFrame = null;
461
- this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
462
- this.mediaEnded = false;
463
- if (this.audioSink) {
464
- await this.cleanAudioIteratorAndNodes();
465
- }
466
- await Promise.all([
467
- this.startAudioIterator(newTime),
468
- this.startVideoIterator(newTime)
469
- ]);
503
+ if (currentPlaybackTime === newTime) {
504
+ return;
470
505
  }
471
- if (!this.playing) {
472
- this.render();
506
+ const satisfyResult = await this.videoFrameIterator?.tryToSatisfySeek(newTime);
507
+ if (satisfyResult?.type === "satisfied") {
508
+ this.drawFrame(satisfyResult.frame);
509
+ return;
473
510
  }
511
+ if (this.currentSeekNonce !== nonce) {
512
+ return;
513
+ }
514
+ this.mediaEnded = false;
515
+ this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
516
+ this.startAudioIterator(newTime);
517
+ this.startVideoIterator(newTime, nonce);
474
518
  }
475
519
  async play() {
476
520
  if (!this.isReady())
@@ -480,13 +524,11 @@ class MediaPlayer {
480
524
  await this.sharedAudioContext.resume();
481
525
  }
482
526
  this.playing = true;
483
- this.startRenderLoop();
484
527
  }
485
528
  }
486
529
  pause() {
487
530
  this.playing = false;
488
- this.cleanupAudioQueue();
489
- this.stopRenderLoop();
531
+ this.audioBufferIterator?.cleanupAudioQueue();
490
532
  }
491
533
  setMuted(muted) {
492
534
  this.muted = muted;
@@ -504,6 +546,9 @@ class MediaPlayer {
504
546
  this.gainNode.gain.value = appliedVolume;
505
547
  }
506
548
  }
549
+ setDebugOverlay(debugOverlay) {
550
+ this.debugOverlay = debugOverlay;
551
+ }
507
552
  setPlaybackRate(rate) {
508
553
  this.playbackRate = rate;
509
554
  }
@@ -513,12 +558,18 @@ class MediaPlayer {
513
558
  setLoop(loop) {
514
559
  this.loop = loop;
515
560
  }
516
- dispose() {
561
+ async dispose() {
562
+ this.initialized = false;
563
+ if (this.initializationPromise) {
564
+ try {
565
+ await this.initializationPromise;
566
+ } catch {}
567
+ }
517
568
  this.input?.dispose();
518
- this.stopRenderLoop();
519
- this.videoFrameIterator?.return();
520
- this.cleanAudioIteratorAndNodes();
521
- this.videoAsyncId++;
569
+ this.videoFrameIterator?.destroy();
570
+ this.videoFrameIterator = null;
571
+ this.audioBufferIterator?.destroy();
572
+ this.audioBufferIterator = null;
522
573
  }
523
574
  getPlaybackTime() {
524
575
  return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
@@ -535,8 +586,8 @@ class MediaPlayer {
535
586
  } else {
536
587
  node.start(this.sharedAudioContext.currentTime, -delay);
537
588
  }
538
- this.queuedAudioNodes.add(node);
539
- node.onended = () => this.queuedAudioNodes.delete(node);
589
+ this.audioBufferIterator?.addQueuedAudioNode(node);
590
+ node.onended = () => this.audioBufferIterator?.removeQueuedAudioNode(node);
540
591
  }
541
592
  onBufferingChange(callback) {
542
593
  this.onBufferingChangeCallback = callback;
@@ -557,124 +608,65 @@ class MediaPlayer {
557
608
  }
558
609
  };
559
610
  }
560
- canRenderVideo() {
561
- return !this.hasAudio() || this.audioIteratorStarted && this.audioBufferHealth >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
562
- }
563
- startRenderLoop() {
564
- if (this.animationFrameId !== null) {
565
- return;
566
- }
567
- this.render();
568
- }
569
- stopRenderLoop() {
570
- if (this.animationFrameId !== null) {
571
- cancelAnimationFrame(this.animationFrameId);
572
- this.animationFrameId = null;
573
- }
574
- }
575
- render = () => {
576
- if (this.isBuffering) {
577
- this.maybeForceResumeFromBuffering();
578
- }
579
- if (this.shouldRenderFrame()) {
580
- this.drawCurrentFrame();
581
- }
582
- if (this.playing) {
583
- this.animationFrameId = requestAnimationFrame(this.render);
584
- } else {
585
- this.animationFrameId = null;
586
- }
587
- };
588
- shouldRenderFrame() {
589
- const playbackTime = this.getPlaybackTime();
590
- if (playbackTime === null) {
591
- return false;
592
- }
593
- return !this.isBuffering && this.canRenderVideo() && this.nextFrame !== null && this.nextFrame.timestamp <= playbackTime;
594
- }
595
- drawCurrentFrame() {
596
- if (this.context && this.nextFrame) {
597
- this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
598
- this.context.drawImage(this.nextFrame.canvas, 0, 0);
611
+ drawFrame = (frame) => {
612
+ if (!this.context) {
613
+ throw new Error("Context not initialized");
599
614
  }
615
+ this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
616
+ this.context.drawImage(frame.canvas, 0, 0);
617
+ this.debugStats.framesRendered++;
618
+ this.drawDebugOverlay();
600
619
  if (this.onVideoFrameCallback && this.canvas) {
601
620
  this.onVideoFrameCallback(this.canvas);
602
621
  }
603
- this.nextFrame = null;
604
- this.updateNextFrame();
605
- }
606
- startAudioIterator = async (startFromSecond) => {
622
+ Internals2.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
623
+ };
624
+ startAudioIterator = (startFromSecond) => {
607
625
  if (!this.hasAudio())
608
626
  return;
609
- this.audioAsyncId++;
610
- const currentAsyncId = this.audioAsyncId;
611
- await this.audioBufferIterator?.return();
612
- this.audioIteratorStarted = false;
613
- this.audioBufferHealth = 0;
627
+ this.audioBufferIterator?.destroy();
614
628
  try {
615
- this.audioBufferIterator = this.audioSink.buffers(startFromSecond);
616
- this.runAudioIterator(startFromSecond, currentAsyncId);
629
+ const iterator = makeAudioIterator(this.audioSink, startFromSecond);
630
+ this.audioBufferIterator = iterator;
631
+ this.runAudioIterator(startFromSecond, iterator);
617
632
  } catch (error) {
618
- Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio iterator", error);
633
+ if (this.isDisposalError()) {
634
+ return;
635
+ }
636
+ Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio iterator", error);
619
637
  }
620
638
  };
621
- startVideoIterator = async (timeToSeek) => {
639
+ drawDebugOverlay() {
640
+ if (!this.debugOverlay)
641
+ return;
642
+ if (this.context && this.canvas) {
643
+ drawPreviewOverlay(this.context, this.debugStats, this.sharedAudioContext.state, this.sharedAudioContext.currentTime);
644
+ }
645
+ }
646
+ startVideoIterator = async (timeToSeek, nonce) => {
622
647
  if (!this.canvasSink) {
623
648
  return;
624
649
  }
625
- this.videoAsyncId++;
626
- const currentAsyncId = this.videoAsyncId;
627
- this.videoFrameIterator?.return().catch(() => {
650
+ this.videoFrameIterator?.destroy();
651
+ const iterator = createVideoIterator(timeToSeek, this.canvasSink);
652
+ this.debugStats.videoIteratorsCreated++;
653
+ this.videoFrameIterator = iterator;
654
+ const delayHandle = this.bufferState?.delayPlayback();
655
+ const frameResult = await iterator.getNext();
656
+ delayHandle?.unblock();
657
+ if (iterator.isDestroyed()) {
628
658
  return;
629
- });
630
- this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
631
- try {
632
- const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
633
- const secondFrame = (await this.videoFrameIterator.next()).value ?? null;
634
- if (currentAsyncId !== this.videoAsyncId) {
635
- return;
636
- }
637
- if (firstFrame && this.context) {
638
- Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
639
- this.context.drawImage(firstFrame.canvas, 0, 0);
640
- if (this.onVideoFrameCallback && this.canvas) {
641
- this.onVideoFrameCallback(this.canvas);
642
- }
643
- }
644
- this.nextFrame = secondFrame ?? null;
645
- if (secondFrame) {
646
- Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
647
- }
648
- } catch (error) {
649
- Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start video iterator", error);
650
659
  }
651
- };
652
- updateNextFrame = async () => {
653
- if (!this.videoFrameIterator) {
660
+ if (nonce !== this.currentSeekNonce) {
654
661
  return;
655
662
  }
656
- try {
657
- while (true) {
658
- const newNextFrame = (await this.videoFrameIterator.next()).value ?? null;
659
- if (!newNextFrame) {
660
- this.mediaEnded = true;
661
- break;
662
- }
663
- const playbackTime = this.getPlaybackTime();
664
- if (playbackTime === null) {
665
- continue;
666
- }
667
- if (newNextFrame.timestamp <= playbackTime) {
668
- continue;
669
- } else {
670
- this.nextFrame = newNextFrame;
671
- Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${newNextFrame.timestamp.toFixed(3)}s`);
672
- break;
673
- }
674
- }
675
- } catch (error) {
676
- Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to update next frame", error);
663
+ if (this.videoFrameIterator.isDestroyed()) {
664
+ return;
677
665
  }
666
+ if (frameResult.value) {
667
+ this.audioSyncAnchor = this.sharedAudioContext.currentTime - frameResult.value.timestamp;
668
+ this.drawFrame(frameResult.value);
669
+ } else {}
678
670
  };
679
671
  bufferingStartedAtMs = null;
680
672
  minBufferingTimeoutMs = 500;
@@ -696,38 +688,27 @@ class MediaPlayer {
696
688
  const now = performance.now();
697
689
  const bufferingDuration = now - this.bufferingStartedAtMs;
698
690
  const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
699
- const bufferHealthy = currentBufferDuration >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
691
+ const bufferHealthy = currentBufferDuration >= HEALTHY_BUFFER_THRESHOLD_SECONDS;
700
692
  if (minTimeElapsed && bufferHealthy) {
701
- Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
693
+ Internals2.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
702
694
  this.setBufferingState(false);
703
695
  }
704
696
  }
705
- maybeForceResumeFromBuffering() {
706
- if (!this.isCurrentlyBuffering())
707
- return;
708
- const now = performance.now();
709
- const bufferingDuration = now - this.bufferingStartedAtMs;
710
- const forceTimeout = bufferingDuration > this.minBufferingTimeoutMs * 10;
711
- if (forceTimeout) {
712
- Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Force resuming from buffering after ${bufferingDuration}ms`);
713
- this.setBufferingState(false);
714
- }
715
- }
716
- runAudioIterator = async (startFromSecond, audioAsyncId) => {
717
- if (!this.hasAudio() || !this.audioBufferIterator)
697
+ runAudioIterator = async (startFromSecond, audioIterator) => {
698
+ if (!this.hasAudio())
718
699
  return;
719
700
  try {
720
701
  let totalBufferDuration = 0;
721
702
  let isFirstBuffer = true;
722
- this.audioIteratorStarted = true;
703
+ audioIterator.setAudioIteratorStarted(true);
723
704
  while (true) {
724
- if (audioAsyncId !== this.audioAsyncId) {
705
+ if (audioIterator.isDestroyed()) {
725
706
  return;
726
707
  }
727
708
  const BUFFERING_TIMEOUT_MS = 50;
728
709
  let result;
729
710
  try {
730
- result = await withTimeout(this.audioBufferIterator.next(), BUFFERING_TIMEOUT_MS, "Iterator timeout");
711
+ result = await withTimeout(audioIterator.getNext(), BUFFERING_TIMEOUT_MS, "Iterator timeout");
731
712
  } catch (error) {
732
713
  if (error instanceof TimeoutError && !this.mediaEnded) {
733
714
  this.setBufferingState(true);
@@ -741,7 +722,7 @@ class MediaPlayer {
741
722
  }
742
723
  const { buffer, timestamp, duration } = result.value;
743
724
  totalBufferDuration += duration;
744
- this.audioBufferHealth = Math.max(0, totalBufferDuration / this.playbackRate);
725
+ audioIterator.setAudioBufferHealth(Math.max(0, totalBufferDuration / this.playbackRate));
745
726
  this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
746
727
  if (this.playing) {
747
728
  if (isFirstBuffer) {
@@ -772,11 +753,178 @@ class MediaPlayer {
772
753
  }
773
754
  }
774
755
  } catch (error) {
775
- Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to run audio iterator", error);
756
+ if (this.isDisposalError()) {
757
+ return;
758
+ }
759
+ Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to run audio iterator", error);
776
760
  }
777
761
  };
778
762
  }
779
763
 
764
+ // src/show-in-timeline.ts
765
+ import { useMemo } from "react";
766
+ import { Internals as Internals3, useVideoConfig } from "remotion";
767
+ var useLoopDisplay = ({
768
+ loop,
769
+ mediaDurationInSeconds,
770
+ playbackRate,
771
+ trimAfter,
772
+ trimBefore
773
+ }) => {
774
+ const { durationInFrames: compDuration, fps } = useVideoConfig();
775
+ const loopDisplay = useMemo(() => {
776
+ if (!loop || !mediaDurationInSeconds) {
777
+ return;
778
+ }
779
+ const durationInFrames = Internals3.calculateMediaDuration({
780
+ mediaDurationInFrames: mediaDurationInSeconds * fps,
781
+ playbackRate,
782
+ trimAfter,
783
+ trimBefore
784
+ });
785
+ const maxTimes = compDuration / durationInFrames;
786
+ return {
787
+ numberOfTimes: maxTimes,
788
+ startOffset: 0,
789
+ durationInFrames
790
+ };
791
+ }, [
792
+ compDuration,
793
+ fps,
794
+ loop,
795
+ mediaDurationInSeconds,
796
+ playbackRate,
797
+ trimAfter,
798
+ trimBefore
799
+ ]);
800
+ return loopDisplay;
801
+ };
802
+
803
+ // src/use-media-in-timeline.ts
804
+ import { useContext, useEffect, useState } from "react";
805
+ import { Internals as Internals4, useCurrentFrame } from "remotion";
806
+ var useMediaInTimeline = ({
807
+ volume,
808
+ mediaVolume,
809
+ src,
810
+ mediaType,
811
+ playbackRate,
812
+ displayName,
813
+ stack,
814
+ showInTimeline,
815
+ premountDisplay,
816
+ postmountDisplay,
817
+ loopDisplay,
818
+ trimBefore,
819
+ trimAfter
820
+ }) => {
821
+ const parentSequence = useContext(Internals4.SequenceContext);
822
+ const startsAt = Internals4.useMediaStartsAt();
823
+ const { registerSequence, unregisterSequence } = useContext(Internals4.SequenceManager);
824
+ const [sequenceId] = useState(() => String(Math.random()));
825
+ const [mediaId] = useState(() => String(Math.random()));
826
+ const frame = useCurrentFrame();
827
+ const {
828
+ volumes,
829
+ duration,
830
+ doesVolumeChange,
831
+ nonce,
832
+ rootId,
833
+ isStudio,
834
+ finalDisplayName
835
+ } = Internals4.useBasicMediaInTimeline({
836
+ volume,
837
+ mediaVolume,
838
+ mediaType,
839
+ src,
840
+ displayName,
841
+ trimBefore,
842
+ trimAfter,
843
+ playbackRate
844
+ });
845
+ useEffect(() => {
846
+ if (!src) {
847
+ throw new Error("No src passed");
848
+ }
849
+ if (!isStudio && window.process?.env?.NODE_ENV !== "test") {
850
+ return;
851
+ }
852
+ if (!showInTimeline) {
853
+ return;
854
+ }
855
+ const loopIteration = loopDisplay ? Math.floor(frame / loopDisplay.durationInFrames) : 0;
856
+ if (loopDisplay) {
857
+ registerSequence({
858
+ type: "sequence",
859
+ premountDisplay,
860
+ postmountDisplay,
861
+ parent: parentSequence?.id ?? null,
862
+ displayName: finalDisplayName,
863
+ rootId,
864
+ showInTimeline: true,
865
+ nonce,
866
+ loopDisplay,
867
+ stack,
868
+ from: 0,
869
+ duration,
870
+ id: sequenceId
871
+ });
872
+ }
873
+ registerSequence({
874
+ type: mediaType,
875
+ src,
876
+ id: mediaId,
877
+ duration: loopDisplay?.durationInFrames ?? duration,
878
+ from: loopDisplay ? loopIteration * loopDisplay.durationInFrames : 0,
879
+ parent: loopDisplay ? sequenceId : parentSequence?.id ?? null,
880
+ displayName: finalDisplayName,
881
+ rootId,
882
+ volume: volumes,
883
+ showInTimeline: true,
884
+ nonce,
885
+ startMediaFrom: 0 - startsAt,
886
+ doesVolumeChange,
887
+ loopDisplay: undefined,
888
+ playbackRate,
889
+ stack,
890
+ premountDisplay: null,
891
+ postmountDisplay: null
892
+ });
893
+ return () => {
894
+ if (loopDisplay) {
895
+ unregisterSequence(sequenceId);
896
+ }
897
+ unregisterSequence(mediaId);
898
+ };
899
+ }, [
900
+ doesVolumeChange,
901
+ duration,
902
+ finalDisplayName,
903
+ isStudio,
904
+ loopDisplay,
905
+ mediaId,
906
+ mediaType,
907
+ nonce,
908
+ parentSequence?.id,
909
+ playbackRate,
910
+ postmountDisplay,
911
+ premountDisplay,
912
+ registerSequence,
913
+ rootId,
914
+ sequenceId,
915
+ showInTimeline,
916
+ src,
917
+ stack,
918
+ startsAt,
919
+ unregisterSequence,
920
+ volumes,
921
+ frame
922
+ ]);
923
+ return {
924
+ id: mediaId
925
+ };
926
+ };
927
+
780
928
  // src/audio/audio-for-preview.tsx
781
929
  import { jsx } from "react/jsx-runtime";
782
930
  var {
@@ -878,7 +1026,9 @@ var NewAudioForPreview = ({
878
1026
  fps: videoConfig.fps,
879
1027
  canvas: null,
880
1028
  playbackRate,
881
- audioStreamIndex: audioStreamIndex ?? 0
1029
+ audioStreamIndex: audioStreamIndex ?? 0,
1030
+ debugOverlay: false,
1031
+ bufferState: buffer
882
1032
  });
883
1033
  mediaPlayerRef.current = player;
884
1034
  player.initialize(currentTimeRef.current).then((result) => {
@@ -950,7 +1100,8 @@ var NewAudioForPreview = ({
950
1100
  playbackRate,
951
1101
  videoConfig.fps,
952
1102
  audioStreamIndex,
953
- disallowFallbackToHtml5Audio
1103
+ disallowFallbackToHtml5Audio,
1104
+ buffer
954
1105
  ]);
955
1106
  useEffect2(() => {
956
1107
  const audioPlayer = mediaPlayerRef.current;
@@ -1297,7 +1448,7 @@ var warnAboutMatroskaOnce = (src, logLevel) => {
1297
1448
  warned[src] = true;
1298
1449
  Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
1299
1450
  };
1300
- var makeAudioIterator = ({
1451
+ var makeAudioIterator2 = ({
1301
1452
  audioSampleSink,
1302
1453
  isMatroska,
1303
1454
  startTimestamp,
@@ -1422,7 +1573,7 @@ var makeAudioManager = () => {
1422
1573
  actualMatroskaTimestamps,
1423
1574
  logLevel
1424
1575
  }) => {
1425
- const iterator = makeAudioIterator({
1576
+ const iterator = makeAudioIterator2({
1426
1577
  audioSampleSink,
1427
1578
  isMatroska,
1428
1579
  startTimestamp: timeInSeconds,
@@ -1584,9 +1735,6 @@ import {
1584
1735
 
1585
1736
  // src/video-extraction/keyframe-bank.ts
1586
1737
  import { Internals as Internals8 } from "remotion";
1587
- var roundTo4Digits = (timestamp) => {
1588
- return Math.round(timestamp * 1000) / 1000;
1589
- };
1590
1738
  var makeKeyframeBank = ({
1591
1739
  startTimestampInSeconds,
1592
1740
  endTimestampInSeconds,
@@ -2891,7 +3039,14 @@ Internals13.addSequenceStackTraces(Audio);
2891
3039
  import { Internals as Internals16, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
2892
3040
 
2893
3041
  // src/video/video-for-preview.tsx
2894
- import { useContext as useContext4, useEffect as useEffect3, useMemo as useMemo4, useRef as useRef2, useState as useState4 } from "react";
3042
+ import {
3043
+ useContext as useContext4,
3044
+ useEffect as useEffect3,
3045
+ useLayoutEffect as useLayoutEffect2,
3046
+ useMemo as useMemo4,
3047
+ useRef as useRef2,
3048
+ useState as useState4
3049
+ } from "react";
2895
3050
  import { Html5Video, Internals as Internals14, useBufferState as useBufferState2, useCurrentFrame as useCurrentFrame4 } from "remotion";
2896
3051
  import { jsx as jsx4 } from "react/jsx-runtime";
2897
3052
  var {
@@ -2925,7 +3080,8 @@ var VideoForPreview = ({
2925
3080
  stack,
2926
3081
  disallowFallbackToOffthreadVideo,
2927
3082
  fallbackOffthreadVideoProps,
2928
- audioStreamIndex
3083
+ audioStreamIndex,
3084
+ debugOverlay
2929
3085
  }) => {
2930
3086
  const src = usePreload2(unpreloadedSrc);
2931
3087
  const canvasRef = useRef2(null);
@@ -2977,9 +3133,6 @@ var VideoForPreview = ({
2977
3133
  if (!videoConfig) {
2978
3134
  throw new Error("No video config found");
2979
3135
  }
2980
- if (!src) {
2981
- throw new TypeError("No `src` was passed to <NewVideoForPreview>.");
2982
- }
2983
3136
  const currentTime = frame / videoConfig.fps;
2984
3137
  const currentTimeRef = useRef2(currentTime);
2985
3138
  currentTimeRef.current = currentTime;
@@ -3002,10 +3155,15 @@ var VideoForPreview = ({
3002
3155
  trimBefore,
3003
3156
  fps: videoConfig.fps,
3004
3157
  playbackRate,
3005
- audioStreamIndex
3158
+ audioStreamIndex,
3159
+ debugOverlay,
3160
+ bufferState: buffer
3006
3161
  });
3007
3162
  mediaPlayerRef.current = player;
3008
3163
  player.initialize(currentTimeRef.current).then((result) => {
3164
+ if (result.type === "disposed") {
3165
+ return;
3166
+ }
3009
3167
  if (result.type === "unknown-container-format") {
3010
3168
  if (disallowFallbackToOffthreadVideo) {
3011
3169
  throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
@@ -3043,16 +3201,16 @@ var VideoForPreview = ({
3043
3201
  setMediaDurationInSeconds(result.durationInSeconds);
3044
3202
  }
3045
3203
  }).catch((error) => {
3046
- Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to initialize MediaPlayer", error);
3204
+ Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to initialize MediaPlayer", error);
3047
3205
  setShouldFallbackToNativeVideo(true);
3048
3206
  });
3049
3207
  } catch (error) {
3050
- Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer initialization failed", error);
3208
+ Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer initialization failed", error);
3051
3209
  setShouldFallbackToNativeVideo(true);
3052
3210
  }
3053
3211
  return () => {
3054
3212
  if (mediaPlayerRef.current) {
3055
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Disposing MediaPlayer`);
3213
+ Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Disposing MediaPlayer`);
3056
3214
  mediaPlayerRef.current.dispose();
3057
3215
  mediaPlayerRef.current = null;
3058
3216
  }
@@ -3069,7 +3227,9 @@ var VideoForPreview = ({
3069
3227
  videoConfig.fps,
3070
3228
  playbackRate,
3071
3229
  disallowFallbackToOffthreadVideo,
3072
- audioStreamIndex
3230
+ audioStreamIndex,
3231
+ debugOverlay,
3232
+ buffer
3073
3233
  ]);
3074
3234
  const classNameValue = useMemo4(() => {
3075
3235
  return [Internals14.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals14.truthy).join(" ");
@@ -3080,18 +3240,18 @@ var VideoForPreview = ({
3080
3240
  return;
3081
3241
  if (playing) {
3082
3242
  mediaPlayer.play().catch((error) => {
3083
- Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to play", error);
3243
+ Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to play", error);
3084
3244
  });
3085
3245
  } else {
3086
3246
  mediaPlayer.pause();
3087
3247
  }
3088
3248
  }, [playing, logLevel, mediaPlayerReady]);
3089
- useEffect3(() => {
3249
+ useLayoutEffect2(() => {
3090
3250
  const mediaPlayer = mediaPlayerRef.current;
3091
3251
  if (!mediaPlayer || !mediaPlayerReady)
3092
3252
  return;
3093
3253
  mediaPlayer.seekTo(currentTime);
3094
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
3254
+ Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
3095
3255
  }, [currentTime, logLevel, mediaPlayerReady]);
3096
3256
  useEffect3(() => {
3097
3257
  const mediaPlayer = mediaPlayerRef.current;
@@ -3101,11 +3261,11 @@ var VideoForPreview = ({
3101
3261
  const unsubscribe = mediaPlayer.onBufferingChange((newBufferingState) => {
3102
3262
  if (newBufferingState && !currentBlock) {
3103
3263
  currentBlock = buffer.delayPlayback();
3104
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback");
3264
+ Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer buffering - blocking Remotion playback");
3105
3265
  } else if (!newBufferingState && currentBlock) {
3106
3266
  currentBlock.unblock();
3107
3267
  currentBlock = null;
3108
- Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
3268
+ Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
3109
3269
  }
3110
3270
  });
3111
3271
  return () => {
@@ -3130,6 +3290,13 @@ var VideoForPreview = ({
3130
3290
  }
3131
3291
  mediaPlayer.setVolume(userPreferredVolume);
3132
3292
  }, [userPreferredVolume, mediaPlayerReady]);
3293
+ useEffect3(() => {
3294
+ const mediaPlayer = mediaPlayerRef.current;
3295
+ if (!mediaPlayer || !mediaPlayerReady) {
3296
+ return;
3297
+ }
3298
+ mediaPlayer.setDebugOverlay(debugOverlay);
3299
+ }, [debugOverlay, mediaPlayerReady]);
3133
3300
  const effectivePlaybackRate = useMemo4(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
3134
3301
  useEffect3(() => {
3135
3302
  const mediaPlayer = mediaPlayerRef.current;
@@ -3198,7 +3365,7 @@ var VideoForPreview = ({
3198
3365
  // src/video/video-for-rendering.tsx
3199
3366
  import {
3200
3367
  useContext as useContext5,
3201
- useLayoutEffect as useLayoutEffect2,
3368
+ useLayoutEffect as useLayoutEffect3,
3202
3369
  useMemo as useMemo5,
3203
3370
  useRef as useRef3,
3204
3371
  useState as useState5
@@ -3257,7 +3424,7 @@ var VideoForRendering = ({
3257
3424
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
3258
3425
  const audioEnabled = Internals15.useAudioEnabled();
3259
3426
  const videoEnabled = Internals15.useVideoEnabled();
3260
- useLayoutEffect2(() => {
3427
+ useLayoutEffect3(() => {
3261
3428
  if (!canvasRef.current) {
3262
3429
  return;
3263
3430
  }
@@ -3514,7 +3681,8 @@ var InnerVideo = ({
3514
3681
  volume,
3515
3682
  stack,
3516
3683
  toneFrequency,
3517
- showInTimeline
3684
+ showInTimeline,
3685
+ debugOverlay
3518
3686
  }) => {
3519
3687
  const environment = useRemotionEnvironment4();
3520
3688
  if (typeof src !== "string") {
@@ -3575,7 +3743,8 @@ var InnerVideo = ({
3575
3743
  trimBefore: trimBeforeValue,
3576
3744
  stack: stack ?? null,
3577
3745
  disallowFallbackToOffthreadVideo,
3578
- fallbackOffthreadVideoProps
3746
+ fallbackOffthreadVideoProps,
3747
+ debugOverlay: debugOverlay ?? false
3579
3748
  });
3580
3749
  };
3581
3750
  var Video = ({
@@ -3599,7 +3768,8 @@ var Video = ({
3599
3768
  trimBefore,
3600
3769
  volume,
3601
3770
  stack,
3602
- toneFrequency
3771
+ toneFrequency,
3772
+ debugOverlay
3603
3773
  }) => {
3604
3774
  return /* @__PURE__ */ jsx6(InnerVideo, {
3605
3775
  audioStreamIndex: audioStreamIndex ?? 0,
@@ -3622,7 +3792,8 @@ var Video = ({
3622
3792
  trimBefore,
3623
3793
  volume: volume ?? 1,
3624
3794
  toneFrequency: toneFrequency ?? 1,
3625
- stack
3795
+ stack,
3796
+ debugOverlay: debugOverlay ?? false
3626
3797
  });
3627
3798
  };
3628
3799
  Internals16.addSequenceStackTraces(Video);