@remotion/media 4.0.363 → 4.0.364

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ export declare const roundTo4Digits: (timestamp: number) => number;
@@ -0,0 +1,4 @@
1
+ // Round to only 4 digits, because WebM has a timescale of 1_000, e.g. framer.webm
2
+ export const roundTo4Digits = (timestamp) => {
3
+ return Math.round(timestamp * 1000) / 1000;
4
+ };
@@ -0,0 +1,95 @@
1
+ import type { LogLevel, useBufferState } from 'remotion';
2
+ export type MediaPlayerInitResult = {
3
+ type: 'success';
4
+ durationInSeconds: number;
5
+ } | {
6
+ type: 'unknown-container-format';
7
+ } | {
8
+ type: 'cannot-decode';
9
+ } | {
10
+ type: 'network-error';
11
+ } | {
12
+ type: 'no-tracks';
13
+ } | {
14
+ type: 'disposed';
15
+ };
16
+ export declare class MediaPlayer {
17
+ private canvas;
18
+ private context;
19
+ private src;
20
+ private logLevel;
21
+ private playbackRate;
22
+ private audioStreamIndex;
23
+ private canvasSink;
24
+ private videoFrameIterator;
25
+ private debugStats;
26
+ private audioSink;
27
+ private audioBufferIterator;
28
+ private gainNode;
29
+ private currentVolume;
30
+ private sharedAudioContext;
31
+ private audioSyncAnchor;
32
+ private playing;
33
+ private muted;
34
+ private loop;
35
+ private fps;
36
+ private trimBefore;
37
+ private trimAfter;
38
+ private initialized;
39
+ private totalDuration;
40
+ private isBuffering;
41
+ private onBufferingChangeCallback?;
42
+ private mediaEnded;
43
+ private debugOverlay;
44
+ private onVideoFrameCallback?;
45
+ private initializationPromise;
46
+ private bufferState;
47
+ constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, debugOverlay, bufferState, }: {
48
+ canvas: HTMLCanvasElement | null;
49
+ src: string;
50
+ logLevel: LogLevel;
51
+ sharedAudioContext: AudioContext;
52
+ loop: boolean;
53
+ trimBefore: number | undefined;
54
+ trimAfter: number | undefined;
55
+ playbackRate: number;
56
+ audioStreamIndex: number;
57
+ fps: number;
58
+ debugOverlay: boolean;
59
+ bufferState: ReturnType<typeof useBufferState>;
60
+ });
61
+ private input;
62
+ private isReady;
63
+ private hasAudio;
64
+ private isCurrentlyBuffering;
65
+ private isDisposalError;
66
+ initialize(startTimeUnresolved: number): Promise<MediaPlayerInitResult>;
67
+ private _initialize;
68
+ private clearCanvas;
69
+ private currentSeekNonce;
70
+ private seekPromiseChain;
71
+ seekTo(time: number): Promise<void>;
72
+ seekToDoNotCallDirectly(time: number, nonce: number): Promise<void>;
73
+ play(): Promise<void>;
74
+ pause(): void;
75
+ setMuted(muted: boolean): void;
76
+ setVolume(volume: number): void;
77
+ setDebugOverlay(debugOverlay: boolean): void;
78
+ setPlaybackRate(rate: number): void;
79
+ setFps(fps: number): void;
80
+ setLoop(loop: boolean): void;
81
+ dispose(): Promise<void>;
82
+ private getPlaybackTime;
83
+ private scheduleAudioChunk;
84
+ onBufferingChange(callback: (isBuffering: boolean) => void): () => void;
85
+ onVideoFrame(callback: (frame: CanvasImageSource) => void): () => void;
86
+ private drawFrame;
87
+ private startAudioIterator;
88
+ private drawDebugOverlay;
89
+ private startVideoIterator;
90
+ private bufferingStartedAtMs;
91
+ private minBufferingTimeoutMs;
92
+ private setBufferingState;
93
+ private maybeResumeFromBuffering;
94
+ private runAudioIterator;
95
+ }
@@ -0,0 +1,496 @@
1
+ import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
2
+ import { Internals } from 'remotion';
3
+ import { HEALTHY_BUFFER_THRESHOLD_SECONDS, makeAudioIterator, } from './audio/audio-preview-iterator';
4
+ import { drawPreviewOverlay } from './debug-overlay/preview-overlay';
5
+ import { getTimeInSeconds } from './get-time-in-seconds';
6
+ import { isNetworkError } from './is-network-error';
7
+ import { sleep, TimeoutError, withTimeout } from './video/timeout-utils';
8
+ import { createVideoIterator, } from './video/video-preview-iterator';
9
+ const AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
10
+ export class MediaPlayer {
11
+ constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, debugOverlay, bufferState, }) {
12
+ this.canvasSink = null;
13
+ this.videoFrameIterator = null;
14
+ this.debugStats = {
15
+ videoIteratorsCreated: 0,
16
+ framesRendered: 0,
17
+ };
18
+ this.audioSink = null;
19
+ this.audioBufferIterator = null;
20
+ this.gainNode = null;
21
+ this.currentVolume = 1;
22
+ // this is the time difference between Web Audio timeline
23
+ // and media file timeline
24
+ this.audioSyncAnchor = 0;
25
+ this.playing = false;
26
+ this.muted = false;
27
+ this.loop = false;
28
+ this.initialized = false;
29
+ // for remotion buffer state
30
+ this.isBuffering = false;
31
+ this.mediaEnded = false;
32
+ this.debugOverlay = false;
33
+ this.initializationPromise = null;
34
+ this.input = null;
35
+ this.currentSeekNonce = 0;
36
+ this.seekPromiseChain = Promise.resolve();
37
+ this.drawFrame = (frame) => {
38
+ if (!this.context) {
39
+ throw new Error('Context not initialized');
40
+ }
41
+ this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
42
+ this.context.drawImage(frame.canvas, 0, 0);
43
+ this.debugStats.framesRendered++;
44
+ this.drawDebugOverlay();
45
+ if (this.onVideoFrameCallback && this.canvas) {
46
+ this.onVideoFrameCallback(this.canvas);
47
+ }
48
+ Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
49
+ };
50
+ this.startAudioIterator = (startFromSecond) => {
51
+ if (!this.hasAudio())
52
+ return;
53
+ // Clean up existing audio iterator
54
+ this.audioBufferIterator?.destroy();
55
+ try {
56
+ const iterator = makeAudioIterator(this.audioSink, startFromSecond);
57
+ this.audioBufferIterator = iterator;
58
+ this.runAudioIterator(startFromSecond, iterator);
59
+ }
60
+ catch (error) {
61
+ if (this.isDisposalError()) {
62
+ return;
63
+ }
64
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start audio iterator', error);
65
+ }
66
+ };
67
+ this.startVideoIterator = async (timeToSeek, nonce) => {
68
+ if (!this.canvasSink) {
69
+ return;
70
+ }
71
+ this.videoFrameIterator?.destroy();
72
+ const iterator = createVideoIterator(timeToSeek, this.canvasSink);
73
+ this.debugStats.videoIteratorsCreated++;
74
+ this.videoFrameIterator = iterator;
75
+ const delayHandle = this.bufferState?.delayPlayback();
76
+ const frameResult = await iterator.getNext();
77
+ delayHandle?.unblock();
78
+ if (iterator.isDestroyed()) {
79
+ return;
80
+ }
81
+ if (nonce !== this.currentSeekNonce) {
82
+ return;
83
+ }
84
+ if (this.videoFrameIterator.isDestroyed()) {
85
+ return;
86
+ }
87
+ if (frameResult.value) {
88
+ this.audioSyncAnchor =
89
+ this.sharedAudioContext.currentTime - frameResult.value.timestamp;
90
+ this.drawFrame(frameResult.value);
91
+ }
92
+ else {
93
+ // media ended
94
+ }
95
+ };
96
+ this.bufferingStartedAtMs = null;
97
+ this.minBufferingTimeoutMs = 500;
98
+ this.runAudioIterator = async (startFromSecond, audioIterator) => {
99
+ if (!this.hasAudio())
100
+ return;
101
+ try {
102
+ let totalBufferDuration = 0;
103
+ let isFirstBuffer = true;
104
+ audioIterator.setAudioIteratorStarted(true);
105
+ while (true) {
106
+ if (audioIterator.isDestroyed()) {
107
+ return;
108
+ }
109
+ const BUFFERING_TIMEOUT_MS = 50;
110
+ let result;
111
+ try {
112
+ result = await withTimeout(audioIterator.getNext(), BUFFERING_TIMEOUT_MS, 'Iterator timeout');
113
+ }
114
+ catch (error) {
115
+ if (error instanceof TimeoutError && !this.mediaEnded) {
116
+ this.setBufferingState(true);
117
+ }
118
+ await sleep(10);
119
+ continue;
120
+ }
121
+ // media has ended
122
+ if (result.done || !result.value) {
123
+ this.mediaEnded = true;
124
+ break;
125
+ }
126
+ const { buffer, timestamp, duration } = result.value;
127
+ totalBufferDuration += duration;
128
+ audioIterator.setAudioBufferHealth(Math.max(0, totalBufferDuration / this.playbackRate));
129
+ this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
130
+ if (this.playing) {
131
+ if (isFirstBuffer) {
132
+ this.audioSyncAnchor =
133
+ this.sharedAudioContext.currentTime - timestamp;
134
+ isFirstBuffer = false;
135
+ }
136
+ // if timestamp is less than timeToSeek, skip
137
+ // context: for some reason, mediabunny returns buffer at 9.984s, when requested at 10s
138
+ if (timestamp < startFromSecond - AUDIO_BUFFER_TOLERANCE_THRESHOLD) {
139
+ continue;
140
+ }
141
+ this.scheduleAudioChunk(buffer, timestamp);
142
+ }
143
+ const playbackTime = this.getPlaybackTime();
144
+ if (playbackTime === null) {
145
+ continue;
146
+ }
147
+ if (timestamp - playbackTime >= 1) {
148
+ await new Promise((resolve) => {
149
+ const check = () => {
150
+ const currentPlaybackTime = this.getPlaybackTime();
151
+ if (currentPlaybackTime !== null &&
152
+ timestamp - currentPlaybackTime < 1) {
153
+ resolve();
154
+ }
155
+ else {
156
+ requestAnimationFrame(check);
157
+ }
158
+ };
159
+ check();
160
+ });
161
+ }
162
+ }
163
+ }
164
+ catch (error) {
165
+ if (this.isDisposalError()) {
166
+ return;
167
+ }
168
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to run audio iterator', error);
169
+ }
170
+ };
171
+ this.canvas = canvas ?? null;
172
+ this.src = src;
173
+ this.logLevel = logLevel ?? window.remotion_logLevel;
174
+ this.sharedAudioContext = sharedAudioContext;
175
+ this.playbackRate = playbackRate;
176
+ this.loop = loop;
177
+ this.trimBefore = trimBefore;
178
+ this.trimAfter = trimAfter;
179
+ this.audioStreamIndex = audioStreamIndex ?? 0;
180
+ this.fps = fps;
181
+ this.debugOverlay = debugOverlay;
182
+ this.bufferState = bufferState;
183
+ if (canvas) {
184
+ const context = canvas.getContext('2d', {
185
+ alpha: true,
186
+ desynchronized: true,
187
+ });
188
+ if (!context) {
189
+ throw new Error('Could not get 2D context from canvas');
190
+ }
191
+ this.context = context;
192
+ }
193
+ else {
194
+ this.context = null;
195
+ }
196
+ }
197
+ isReady() {
198
+ return (this.initialized &&
199
+ Boolean(this.sharedAudioContext) &&
200
+ !this.input?.disposed);
201
+ }
202
+ hasAudio() {
203
+ return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
204
+ }
205
+ isCurrentlyBuffering() {
206
+ return this.isBuffering && Boolean(this.bufferingStartedAtMs);
207
+ }
208
+ isDisposalError() {
209
+ return this.input?.disposed === true;
210
+ }
211
+ initialize(startTimeUnresolved) {
212
+ const promise = this._initialize(startTimeUnresolved);
213
+ this.initializationPromise = promise;
214
+ return promise;
215
+ }
216
+ async _initialize(startTimeUnresolved) {
217
+ try {
218
+ const urlSource = new UrlSource(this.src);
219
+ const input = new Input({
220
+ source: urlSource,
221
+ formats: ALL_FORMATS,
222
+ });
223
+ this.input = input;
224
+ if (input.disposed) {
225
+ return { type: 'disposed' };
226
+ }
227
+ try {
228
+ await input.getFormat();
229
+ }
230
+ catch (error) {
231
+ if (this.isDisposalError()) {
232
+ return { type: 'disposed' };
233
+ }
234
+ const err = error;
235
+ if (isNetworkError(err)) {
236
+ throw error;
237
+ }
238
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
239
+ return { type: 'unknown-container-format' };
240
+ }
241
+ const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
242
+ input.computeDuration(),
243
+ input.getPrimaryVideoTrack(),
244
+ input.getAudioTracks(),
245
+ ]);
246
+ this.totalDuration = durationInSeconds;
247
+ const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
248
+ if (!videoTrack && !audioTrack) {
249
+ return { type: 'no-tracks' };
250
+ }
251
+ if (videoTrack && this.canvas && this.context) {
252
+ const canDecode = await videoTrack.canDecode();
253
+ if (!canDecode) {
254
+ return { type: 'cannot-decode' };
255
+ }
256
+ this.canvasSink = new CanvasSink(videoTrack, {
257
+ poolSize: 2,
258
+ fit: 'contain',
259
+ alpha: true,
260
+ });
261
+ this.canvas.width = videoTrack.displayWidth;
262
+ this.canvas.height = videoTrack.displayHeight;
263
+ }
264
+ if (audioTrack && this.sharedAudioContext) {
265
+ this.audioSink = new AudioBufferSink(audioTrack);
266
+ this.gainNode = this.sharedAudioContext.createGain();
267
+ this.gainNode.connect(this.sharedAudioContext.destination);
268
+ }
269
+ const startTime = getTimeInSeconds({
270
+ unloopedTimeInSeconds: startTimeUnresolved,
271
+ playbackRate: this.playbackRate,
272
+ loop: this.loop,
273
+ trimBefore: this.trimBefore,
274
+ trimAfter: this.trimAfter,
275
+ mediaDurationInSeconds: this.totalDuration,
276
+ fps: this.fps,
277
+ ifNoMediaDuration: 'infinity',
278
+ src: this.src,
279
+ });
280
+ if (startTime === null) {
281
+ this.clearCanvas();
282
+ return { type: 'success', durationInSeconds: this.totalDuration };
283
+ }
284
+ if (this.sharedAudioContext) {
285
+ this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
286
+ }
287
+ this.initialized = true;
288
+ try {
289
+ this.startAudioIterator(startTime);
290
+ await this.startVideoIterator(startTime, this.currentSeekNonce);
291
+ }
292
+ catch (error) {
293
+ if (this.isDisposalError()) {
294
+ return { type: 'disposed' };
295
+ }
296
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start audio and video iterators', error);
297
+ }
298
+ return { type: 'success', durationInSeconds };
299
+ }
300
+ catch (error) {
301
+ const err = error;
302
+ if (isNetworkError(err)) {
303
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
304
+ return { type: 'network-error' };
305
+ }
306
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to initialize', error);
307
+ throw error;
308
+ }
309
+ }
310
+ clearCanvas() {
311
+ if (this.context && this.canvas) {
312
+ this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
313
+ }
314
+ }
315
+ async seekTo(time) {
316
+ this.currentSeekNonce++;
317
+ const nonce = this.currentSeekNonce;
318
+ await this.seekPromiseChain;
319
+ this.seekPromiseChain = this.seekToDoNotCallDirectly(time, nonce);
320
+ await this.seekPromiseChain;
321
+ }
322
+ async seekToDoNotCallDirectly(time, nonce) {
323
+ if (nonce !== this.currentSeekNonce) {
324
+ return;
325
+ }
326
+ if (!this.isReady())
327
+ return;
328
+ const newTime = getTimeInSeconds({
329
+ unloopedTimeInSeconds: time,
330
+ playbackRate: this.playbackRate,
331
+ loop: this.loop,
332
+ trimBefore: this.trimBefore,
333
+ trimAfter: this.trimAfter,
334
+ mediaDurationInSeconds: this.totalDuration ?? null,
335
+ fps: this.fps,
336
+ ifNoMediaDuration: 'infinity',
337
+ src: this.src,
338
+ });
339
+ if (newTime === null) {
340
+ // invalidate in-flight video operations
341
+ this.videoFrameIterator?.destroy();
342
+ this.videoFrameIterator = null;
343
+ this.clearCanvas();
344
+ this.audioBufferIterator?.destroy();
345
+ this.audioBufferIterator = null;
346
+ return;
347
+ }
348
+ const currentPlaybackTime = this.getPlaybackTime();
349
+ if (currentPlaybackTime === newTime) {
350
+ return;
351
+ }
352
+ const satisfyResult = await this.videoFrameIterator?.tryToSatisfySeek(newTime);
353
+ if (satisfyResult?.type === 'satisfied') {
354
+ this.drawFrame(satisfyResult.frame);
355
+ return;
356
+ }
357
+ if (this.currentSeekNonce !== nonce) {
358
+ return;
359
+ }
360
+ this.mediaEnded = false;
361
+ this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
362
+ this.startAudioIterator(newTime);
363
+ this.startVideoIterator(newTime, nonce);
364
+ }
365
+ async play() {
366
+ if (!this.isReady())
367
+ return;
368
+ if (!this.playing) {
369
+ if (this.sharedAudioContext.state === 'suspended') {
370
+ await this.sharedAudioContext.resume();
371
+ }
372
+ this.playing = true;
373
+ }
374
+ }
375
+ pause() {
376
+ this.playing = false;
377
+ this.audioBufferIterator?.cleanupAudioQueue();
378
+ }
379
+ setMuted(muted) {
380
+ this.muted = muted;
381
+ if (this.gainNode) {
382
+ this.gainNode.gain.value = muted ? 0 : this.currentVolume;
383
+ }
384
+ }
385
+ setVolume(volume) {
386
+ if (!this.gainNode) {
387
+ return;
388
+ }
389
+ const appliedVolume = Math.max(0, volume);
390
+ this.currentVolume = appliedVolume;
391
+ if (!this.muted) {
392
+ this.gainNode.gain.value = appliedVolume;
393
+ }
394
+ }
395
+ setDebugOverlay(debugOverlay) {
396
+ this.debugOverlay = debugOverlay;
397
+ }
398
+ setPlaybackRate(rate) {
399
+ this.playbackRate = rate;
400
+ }
401
+ setFps(fps) {
402
+ this.fps = fps;
403
+ }
404
+ setLoop(loop) {
405
+ this.loop = loop;
406
+ }
407
+ async dispose() {
408
+ this.initialized = false;
409
+ if (this.initializationPromise) {
410
+ try {
411
+ // wait for the init to finished
412
+ // otherwise we might get errors like:
413
+ // Error: Response stream reader stopped unexpectedly before all requested data was read. from UrlSource
414
+ await this.initializationPromise;
415
+ }
416
+ catch {
417
+ // Ignore initialization errors during disposal
418
+ }
419
+ }
420
+ this.input?.dispose();
421
+ this.videoFrameIterator?.destroy();
422
+ this.videoFrameIterator = null;
423
+ this.audioBufferIterator?.destroy();
424
+ this.audioBufferIterator = null;
425
+ }
426
+ getPlaybackTime() {
427
+ return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
428
+ }
429
+ scheduleAudioChunk(buffer, mediaTimestamp) {
430
+ const targetTime = mediaTimestamp + this.audioSyncAnchor;
431
+ const delay = targetTime - this.sharedAudioContext.currentTime;
432
+ const node = this.sharedAudioContext.createBufferSource();
433
+ node.buffer = buffer;
434
+ node.playbackRate.value = this.playbackRate;
435
+ node.connect(this.gainNode);
436
+ if (delay >= 0) {
437
+ node.start(targetTime);
438
+ }
439
+ else {
440
+ node.start(this.sharedAudioContext.currentTime, -delay);
441
+ }
442
+ this.audioBufferIterator?.addQueuedAudioNode(node);
443
+ node.onended = () => this.audioBufferIterator?.removeQueuedAudioNode(node);
444
+ }
445
+ onBufferingChange(callback) {
446
+ this.onBufferingChangeCallback = callback;
447
+ return () => {
448
+ if (this.onBufferingChangeCallback === callback) {
449
+ this.onBufferingChangeCallback = undefined;
450
+ }
451
+ };
452
+ }
453
+ onVideoFrame(callback) {
454
+ this.onVideoFrameCallback = callback;
455
+ if (this.initialized && callback && this.canvas) {
456
+ callback(this.canvas);
457
+ }
458
+ return () => {
459
+ if (this.onVideoFrameCallback === callback) {
460
+ this.onVideoFrameCallback = undefined;
461
+ }
462
+ };
463
+ }
464
+ drawDebugOverlay() {
465
+ if (!this.debugOverlay)
466
+ return;
467
+ if (this.context && this.canvas) {
468
+ drawPreviewOverlay(this.context, this.debugStats, this.sharedAudioContext.state, this.sharedAudioContext.currentTime);
469
+ }
470
+ }
471
+ setBufferingState(isBuffering) {
472
+ if (this.isBuffering !== isBuffering) {
473
+ this.isBuffering = isBuffering;
474
+ if (isBuffering) {
475
+ this.bufferingStartedAtMs = performance.now();
476
+ this.onBufferingChangeCallback?.(true);
477
+ }
478
+ else {
479
+ this.bufferingStartedAtMs = null;
480
+ this.onBufferingChangeCallback?.(false);
481
+ }
482
+ }
483
+ }
484
+ maybeResumeFromBuffering(currentBufferDuration) {
485
+ if (!this.isCurrentlyBuffering())
486
+ return;
487
+ const now = performance.now();
488
+ const bufferingDuration = now - this.bufferingStartedAtMs;
489
+ const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
490
+ const bufferHealthy = currentBufferDuration >= HEALTHY_BUFFER_THRESHOLD_SECONDS;
491
+ if (minTimeElapsed && bufferHealthy) {
492
+ Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
493
+ this.setBufferingState(false);
494
+ }
495
+ }
496
+ }
@@ -37,6 +37,7 @@ type OptionalVideoProps = {
37
37
  trimBefore: number | undefined;
38
38
  toneFrequency: number;
39
39
  showInTimeline: boolean;
40
+ debugOverlay: boolean;
40
41
  };
41
42
  export type InnerVideoProps = MandatoryVideoProps & OuterVideoProps & OptionalVideoProps;
42
43
  export type VideoProps = MandatoryVideoProps & Partial<OuterVideoProps> & Partial<OptionalVideoProps>;
@@ -1,7 +1,7 @@
1
1
  import React from 'react';
2
2
  import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
3
3
  import type { FallbackOffthreadVideoProps } from './props';
4
- type NewVideoForPreviewProps = {
4
+ type VideoForPreviewProps = {
5
5
  readonly src: string;
6
6
  readonly style: React.CSSProperties | undefined;
7
7
  readonly playbackRate: number;
@@ -20,6 +20,7 @@ type NewVideoForPreviewProps = {
20
20
  readonly disallowFallbackToOffthreadVideo: boolean;
21
21
  readonly fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
22
22
  readonly audioStreamIndex: number;
23
+ readonly debugOverlay: boolean;
23
24
  };
24
- export declare const VideoForPreview: React.FC<NewVideoForPreviewProps>;
25
+ export declare const VideoForPreview: React.FC<VideoForPreviewProps>;
25
26
  export {};