@remotion/media 4.0.363 → 4.0.365

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ export declare const roundTo4Digits: (timestamp: number) => number;
@@ -0,0 +1,4 @@
1
+ // Round to only 4 digits, because WebM has a timescale of 1_000, e.g. framer.webm
2
+ export const roundTo4Digits = (timestamp) => {
3
+ return Math.round(timestamp * 1000) / 1000;
4
+ };
@@ -0,0 +1,87 @@
1
+ import type { LogLevel, useBufferState } from 'remotion';
2
+ export type MediaPlayerInitResult = {
3
+ type: 'success';
4
+ durationInSeconds: number;
5
+ } | {
6
+ type: 'unknown-container-format';
7
+ } | {
8
+ type: 'cannot-decode';
9
+ } | {
10
+ type: 'network-error';
11
+ } | {
12
+ type: 'no-tracks';
13
+ } | {
14
+ type: 'disposed';
15
+ };
16
+ export declare class MediaPlayer {
17
+ private canvas;
18
+ private context;
19
+ private src;
20
+ private logLevel;
21
+ private playbackRate;
22
+ private audioStreamIndex;
23
+ private canvasSink;
24
+ private videoFrameIterator;
25
+ private debugStats;
26
+ private audioSink;
27
+ private audioBufferIterator;
28
+ private gainNode;
29
+ private currentVolume;
30
+ private sharedAudioContext;
31
+ private audioSyncAnchor;
32
+ private playing;
33
+ private muted;
34
+ private loop;
35
+ private fps;
36
+ private trimBefore;
37
+ private trimAfter;
38
+ private initialized;
39
+ private totalDuration;
40
+ private debugOverlay;
41
+ private onVideoFrameCallback?;
42
+ private initializationPromise;
43
+ private bufferState;
44
+ constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, debugOverlay, bufferState, }: {
45
+ canvas: HTMLCanvasElement | null;
46
+ src: string;
47
+ logLevel: LogLevel;
48
+ sharedAudioContext: AudioContext;
49
+ loop: boolean;
50
+ trimBefore: number | undefined;
51
+ trimAfter: number | undefined;
52
+ playbackRate: number;
53
+ audioStreamIndex: number;
54
+ fps: number;
55
+ debugOverlay: boolean;
56
+ bufferState: ReturnType<typeof useBufferState>;
57
+ });
58
+ private input;
59
+ private isReady;
60
+ private hasAudio;
61
+ private isDisposalError;
62
+ initialize(startTimeUnresolved: number): Promise<MediaPlayerInitResult>;
63
+ private _initialize;
64
+ private clearCanvas;
65
+ private currentSeekNonce;
66
+ private seekPromiseChain;
67
+ seekTo(time: number): Promise<void>;
68
+ seekToDoNotCallDirectly(time: number, nonce: number): Promise<void>;
69
+ play(time: number): Promise<void>;
70
+ pause(): void;
71
+ setMuted(muted: boolean): void;
72
+ setVolume(volume: number): void;
73
+ setDebugOverlay(debugOverlay: boolean): void;
74
+ setPlaybackRate(rate: number): void;
75
+ setFps(fps: number): void;
76
+ setLoop(loop: boolean): void;
77
+ dispose(): Promise<void>;
78
+ private getPlaybackTime;
79
+ private setPlaybackTime;
80
+ private audioChunksForAfterResuming;
81
+ private scheduleAudioChunk;
82
+ onVideoFrame(callback: (frame: CanvasImageSource) => void): () => void;
83
+ private drawFrame;
84
+ private startAudioIterator;
85
+ private drawDebugOverlay;
86
+ private startVideoIterator;
87
+ }
@@ -0,0 +1,475 @@
1
+ import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
2
+ import { Internals } from 'remotion';
3
+ import { isAlreadyQueued, makeAudioIterator, } from './audio/audio-preview-iterator';
4
+ import { drawPreviewOverlay } from './debug-overlay/preview-overlay';
5
+ import { getTimeInSeconds } from './get-time-in-seconds';
6
+ import { isNetworkError } from './is-network-error';
7
+ import { createVideoIterator, } from './video/video-preview-iterator';
8
+ export class MediaPlayer {
9
+ constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, debugOverlay, bufferState, }) {
10
+ this.canvasSink = null;
11
+ this.videoFrameIterator = null;
12
+ this.debugStats = {
13
+ videoIteratorsCreated: 0,
14
+ audioIteratorsCreated: 0,
15
+ framesRendered: 0,
16
+ };
17
+ this.audioSink = null;
18
+ this.audioBufferIterator = null;
19
+ this.gainNode = null;
20
+ this.currentVolume = 1;
21
+ // this is the time difference between Web Audio timeline
22
+ // and media file timeline
23
+ this.audioSyncAnchor = 0;
24
+ this.playing = false;
25
+ this.muted = false;
26
+ this.loop = false;
27
+ this.initialized = false;
28
+ this.debugOverlay = false;
29
+ this.initializationPromise = null;
30
+ this.input = null;
31
+ this.currentSeekNonce = 0;
32
+ this.seekPromiseChain = Promise.resolve();
33
+ this.audioChunksForAfterResuming = [];
34
+ this.drawFrame = (frame) => {
35
+ if (!this.context) {
36
+ throw new Error('Context not initialized');
37
+ }
38
+ this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
39
+ this.context.drawImage(frame.canvas, 0, 0);
40
+ this.debugStats.framesRendered++;
41
+ this.drawDebugOverlay();
42
+ if (this.onVideoFrameCallback && this.canvas) {
43
+ this.onVideoFrameCallback(this.canvas);
44
+ }
45
+ Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
46
+ };
47
+ this.startAudioIterator = async (startFromSecond, nonce) => {
48
+ if (!this.hasAudio())
49
+ return;
50
+ this.audioBufferIterator?.destroy();
51
+ this.audioChunksForAfterResuming = [];
52
+ const delayHandle = this.bufferState.delayPlayback();
53
+ const iterator = makeAudioIterator(this.audioSink, startFromSecond);
54
+ this.debugStats.audioIteratorsCreated++;
55
+ this.audioBufferIterator = iterator;
56
+ // Schedule up to 3 buffers ahead of the current time
57
+ for (let i = 0; i < 3; i++) {
58
+ const result = await iterator.getNext();
59
+ if (iterator.isDestroyed()) {
60
+ delayHandle.unblock();
61
+ return;
62
+ }
63
+ if (nonce !== this.currentSeekNonce) {
64
+ delayHandle.unblock();
65
+ return;
66
+ }
67
+ if (!result.value) {
68
+ // media ended
69
+ delayHandle.unblock();
70
+ return;
71
+ }
72
+ const { buffer, timestamp } = result.value;
73
+ this.audioChunksForAfterResuming.push({
74
+ buffer,
75
+ timestamp,
76
+ });
77
+ }
78
+ delayHandle.unblock();
79
+ };
80
+ this.startVideoIterator = async (timeToSeek, nonce) => {
81
+ if (!this.canvasSink) {
82
+ return;
83
+ }
84
+ this.videoFrameIterator?.destroy();
85
+ const iterator = createVideoIterator(timeToSeek, this.canvasSink);
86
+ this.debugStats.videoIteratorsCreated++;
87
+ this.videoFrameIterator = iterator;
88
+ const delayHandle = this.bufferState.delayPlayback();
89
+ const frameResult = await iterator.getNext();
90
+ delayHandle.unblock();
91
+ if (iterator.isDestroyed()) {
92
+ return;
93
+ }
94
+ if (nonce !== this.currentSeekNonce) {
95
+ return;
96
+ }
97
+ if (this.videoFrameIterator.isDestroyed()) {
98
+ return;
99
+ }
100
+ if (!frameResult.value) {
101
+ // media ended
102
+ return;
103
+ }
104
+ this.drawFrame(frameResult.value);
105
+ };
106
+ this.canvas = canvas ?? null;
107
+ this.src = src;
108
+ this.logLevel = logLevel ?? window.remotion_logLevel;
109
+ this.sharedAudioContext = sharedAudioContext;
110
+ this.playbackRate = playbackRate;
111
+ this.loop = loop;
112
+ this.trimBefore = trimBefore;
113
+ this.trimAfter = trimAfter;
114
+ this.audioStreamIndex = audioStreamIndex ?? 0;
115
+ this.fps = fps;
116
+ this.debugOverlay = debugOverlay;
117
+ this.bufferState = bufferState;
118
+ if (canvas) {
119
+ const context = canvas.getContext('2d', {
120
+ alpha: true,
121
+ desynchronized: true,
122
+ });
123
+ if (!context) {
124
+ throw new Error('Could not get 2D context from canvas');
125
+ }
126
+ this.context = context;
127
+ }
128
+ else {
129
+ this.context = null;
130
+ }
131
+ }
132
+ isReady() {
133
+ return (this.initialized &&
134
+ Boolean(this.sharedAudioContext) &&
135
+ !this.input?.disposed);
136
+ }
137
+ hasAudio() {
138
+ return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
139
+ }
140
+ isDisposalError() {
141
+ return this.input?.disposed === true;
142
+ }
143
+ initialize(startTimeUnresolved) {
144
+ const promise = this._initialize(startTimeUnresolved);
145
+ this.initializationPromise = promise;
146
+ return promise;
147
+ }
148
+ async _initialize(startTimeUnresolved) {
149
+ try {
150
+ const urlSource = new UrlSource(this.src);
151
+ const input = new Input({
152
+ source: urlSource,
153
+ formats: ALL_FORMATS,
154
+ });
155
+ this.input = input;
156
+ if (input.disposed) {
157
+ return { type: 'disposed' };
158
+ }
159
+ try {
160
+ await input.getFormat();
161
+ }
162
+ catch (error) {
163
+ if (this.isDisposalError()) {
164
+ return { type: 'disposed' };
165
+ }
166
+ const err = error;
167
+ if (isNetworkError(err)) {
168
+ throw error;
169
+ }
170
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
171
+ return { type: 'unknown-container-format' };
172
+ }
173
+ const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
174
+ input.computeDuration(),
175
+ input.getPrimaryVideoTrack(),
176
+ input.getAudioTracks(),
177
+ ]);
178
+ this.totalDuration = durationInSeconds;
179
+ const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
180
+ if (!videoTrack && !audioTrack) {
181
+ return { type: 'no-tracks' };
182
+ }
183
+ if (videoTrack && this.canvas && this.context) {
184
+ const canDecode = await videoTrack.canDecode();
185
+ if (!canDecode) {
186
+ return { type: 'cannot-decode' };
187
+ }
188
+ this.canvasSink = new CanvasSink(videoTrack, {
189
+ poolSize: 2,
190
+ fit: 'contain',
191
+ alpha: true,
192
+ });
193
+ this.canvas.width = videoTrack.displayWidth;
194
+ this.canvas.height = videoTrack.displayHeight;
195
+ }
196
+ if (audioTrack && this.sharedAudioContext) {
197
+ this.audioSink = new AudioBufferSink(audioTrack);
198
+ this.gainNode = this.sharedAudioContext.createGain();
199
+ this.gainNode.connect(this.sharedAudioContext.destination);
200
+ }
201
+ const startTime = getTimeInSeconds({
202
+ unloopedTimeInSeconds: startTimeUnresolved,
203
+ playbackRate: this.playbackRate,
204
+ loop: this.loop,
205
+ trimBefore: this.trimBefore,
206
+ trimAfter: this.trimAfter,
207
+ mediaDurationInSeconds: this.totalDuration,
208
+ fps: this.fps,
209
+ ifNoMediaDuration: 'infinity',
210
+ src: this.src,
211
+ });
212
+ if (startTime === null) {
213
+ this.clearCanvas();
214
+ return { type: 'success', durationInSeconds: this.totalDuration };
215
+ }
216
+ if (this.sharedAudioContext) {
217
+ this.setPlaybackTime(startTime);
218
+ }
219
+ this.initialized = true;
220
+ try {
221
+ // intentionally not awaited
222
+ this.startAudioIterator(startTime, this.currentSeekNonce);
223
+ await this.startVideoIterator(startTime, this.currentSeekNonce);
224
+ }
225
+ catch (error) {
226
+ if (this.isDisposalError()) {
227
+ return { type: 'disposed' };
228
+ }
229
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start audio and video iterators', error);
230
+ }
231
+ return { type: 'success', durationInSeconds };
232
+ }
233
+ catch (error) {
234
+ const err = error;
235
+ if (isNetworkError(err)) {
236
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
237
+ return { type: 'network-error' };
238
+ }
239
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to initialize', error);
240
+ throw error;
241
+ }
242
+ }
243
+ clearCanvas() {
244
+ if (this.context && this.canvas) {
245
+ this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
246
+ }
247
+ }
248
+ async seekTo(time) {
249
+ this.currentSeekNonce++;
250
+ const nonce = this.currentSeekNonce;
251
+ await this.seekPromiseChain;
252
+ this.seekPromiseChain = this.seekToDoNotCallDirectly(time, nonce);
253
+ await this.seekPromiseChain;
254
+ }
255
+ async seekToDoNotCallDirectly(time, nonce) {
256
+ if (nonce !== this.currentSeekNonce) {
257
+ return;
258
+ }
259
+ if (!this.isReady())
260
+ return;
261
+ const newTime = getTimeInSeconds({
262
+ unloopedTimeInSeconds: time,
263
+ playbackRate: this.playbackRate,
264
+ loop: this.loop,
265
+ trimBefore: this.trimBefore,
266
+ trimAfter: this.trimAfter,
267
+ mediaDurationInSeconds: this.totalDuration ?? null,
268
+ fps: this.fps,
269
+ ifNoMediaDuration: 'infinity',
270
+ src: this.src,
271
+ });
272
+ if (newTime === null) {
273
+ // invalidate in-flight video operations
274
+ this.videoFrameIterator?.destroy();
275
+ this.videoFrameIterator = null;
276
+ this.clearCanvas();
277
+ this.audioBufferIterator?.destroy();
278
+ this.audioBufferIterator = null;
279
+ return;
280
+ }
281
+ const currentPlaybackTime = this.getPlaybackTime();
282
+ if (currentPlaybackTime === newTime) {
283
+ return;
284
+ }
285
+ const newAudioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
286
+ const diff = Math.abs(newAudioSyncAnchor - this.audioSyncAnchor);
287
+ if (diff > 0.1) {
288
+ this.setPlaybackTime(newTime);
289
+ }
290
+ // Should return immediately, so it's okay to not use Promise.all here
291
+ const videoSatisfyResult = await this.videoFrameIterator?.tryToSatisfySeek(newTime);
292
+ if (videoSatisfyResult?.type === 'satisfied') {
293
+ this.drawFrame(videoSatisfyResult.frame);
294
+ }
295
+ else if (videoSatisfyResult && this.currentSeekNonce === nonce) {
296
+ this.startVideoIterator(newTime, nonce);
297
+ }
298
+ const queuedPeriod = this.audioBufferIterator?.getQueuedPeriod();
299
+ const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriod);
300
+ const toBeScheduled = [];
301
+ if (!currentTimeIsAlreadyQueued) {
302
+ const audioSatisfyResult = await this.audioBufferIterator?.tryToSatisfySeek(newTime);
303
+ if (this.currentSeekNonce !== nonce) {
304
+ return;
305
+ }
306
+ if (!audioSatisfyResult) {
307
+ return;
308
+ }
309
+ if (audioSatisfyResult.type === 'not-satisfied') {
310
+ await this.startAudioIterator(newTime, nonce);
311
+ return;
312
+ }
313
+ toBeScheduled.push(...audioSatisfyResult.buffers);
314
+ }
315
+ // TODO: What is this is beyond the end of the video
316
+ const nextTime = newTime +
317
+ // start of next frame
318
+ (1 / this.fps) * this.playbackRate +
319
+ // need the full duration of the next frame to be queued
320
+ (1 / this.fps) * this.playbackRate;
321
+ const nextIsAlreadyQueued = isAlreadyQueued(nextTime, queuedPeriod);
322
+ if (!nextIsAlreadyQueued) {
323
+ const audioSatisfyResult = await this.audioBufferIterator?.tryToSatisfySeek(nextTime);
324
+ if (this.currentSeekNonce !== nonce) {
325
+ return;
326
+ }
327
+ if (!audioSatisfyResult) {
328
+ return;
329
+ }
330
+ if (audioSatisfyResult.type === 'not-satisfied') {
331
+ await this.startAudioIterator(nextTime, nonce);
332
+ return;
333
+ }
334
+ toBeScheduled.push(...audioSatisfyResult.buffers);
335
+ }
336
+ for (const buffer of toBeScheduled) {
337
+ if (this.playing) {
338
+ this.scheduleAudioChunk(buffer.buffer, buffer.timestamp);
339
+ }
340
+ else {
341
+ this.audioChunksForAfterResuming.push({
342
+ buffer: buffer.buffer,
343
+ timestamp: buffer.timestamp,
344
+ });
345
+ }
346
+ }
347
+ }
348
+ async play(time) {
349
+ if (!this.isReady())
350
+ return;
351
+ this.setPlaybackTime(time);
352
+ this.playing = true;
353
+ for (const chunk of this.audioChunksForAfterResuming) {
354
+ this.scheduleAudioChunk(chunk.buffer, chunk.timestamp);
355
+ }
356
+ if (this.sharedAudioContext.state === 'suspended') {
357
+ await this.sharedAudioContext.resume();
358
+ }
359
+ this.audioChunksForAfterResuming.length = 0;
360
+ this.drawDebugOverlay();
361
+ }
362
+ pause() {
363
+ this.playing = false;
364
+ const toQueue = this.audioBufferIterator?.removeAndReturnAllQueuedAudioNodes();
365
+ if (toQueue) {
366
+ for (const chunk of toQueue) {
367
+ this.audioChunksForAfterResuming.push({
368
+ buffer: chunk.buffer,
369
+ timestamp: chunk.timestamp,
370
+ });
371
+ }
372
+ }
373
+ this.drawDebugOverlay();
374
+ }
375
+ setMuted(muted) {
376
+ this.muted = muted;
377
+ if (this.gainNode) {
378
+ this.gainNode.gain.value = muted ? 0 : this.currentVolume;
379
+ }
380
+ }
381
+ setVolume(volume) {
382
+ if (!this.gainNode) {
383
+ return;
384
+ }
385
+ const appliedVolume = Math.max(0, volume);
386
+ this.currentVolume = appliedVolume;
387
+ if (!this.muted) {
388
+ this.gainNode.gain.value = appliedVolume;
389
+ }
390
+ }
391
+ setDebugOverlay(debugOverlay) {
392
+ this.debugOverlay = debugOverlay;
393
+ }
394
+ setPlaybackRate(rate) {
395
+ this.playbackRate = rate;
396
+ }
397
+ setFps(fps) {
398
+ this.fps = fps;
399
+ }
400
+ setLoop(loop) {
401
+ this.loop = loop;
402
+ }
403
+ async dispose() {
404
+ this.initialized = false;
405
+ if (this.initializationPromise) {
406
+ try {
407
+ // wait for the init to finished
408
+ // otherwise we might get errors like:
409
+ // Error: Response stream reader stopped unexpectedly before all requested data was read. from UrlSource
410
+ await this.initializationPromise;
411
+ }
412
+ catch {
413
+ // Ignore initialization errors during disposal
414
+ }
415
+ }
416
+ this.input?.dispose();
417
+ this.videoFrameIterator?.destroy();
418
+ this.videoFrameIterator = null;
419
+ this.audioBufferIterator?.destroy();
420
+ this.audioBufferIterator = null;
421
+ }
422
+ getPlaybackTime() {
423
+ return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
424
+ }
425
+ setPlaybackTime(time) {
426
+ this.audioSyncAnchor = this.sharedAudioContext.currentTime - time;
427
+ }
428
+ scheduleAudioChunk(buffer, mediaTimestamp) {
429
+ // TODO: Might already be scheduled, and then the playback rate changes
430
+ // TODO: Playbackrate does not yet work
431
+ const targetTime = (mediaTimestamp - (this.trimBefore ?? 0) / this.fps) / this.playbackRate;
432
+ const delay = targetTime + this.audioSyncAnchor - this.sharedAudioContext.currentTime;
433
+ const node = this.sharedAudioContext.createBufferSource();
434
+ node.buffer = buffer;
435
+ node.playbackRate.value = this.playbackRate;
436
+ node.connect(this.gainNode);
437
+ if (delay >= 0) {
438
+ node.start(targetTime + this.audioSyncAnchor);
439
+ }
440
+ else {
441
+ node.start(this.sharedAudioContext.currentTime, -delay);
442
+ }
443
+ this.audioBufferIterator.addQueuedAudioNode(node, mediaTimestamp, buffer);
444
+ node.onended = () => {
445
+ return this.audioBufferIterator.removeQueuedAudioNode(node);
446
+ };
447
+ }
448
+ onVideoFrame(callback) {
449
+ this.onVideoFrameCallback = callback;
450
+ if (this.initialized && callback && this.canvas) {
451
+ callback(this.canvas);
452
+ }
453
+ return () => {
454
+ if (this.onVideoFrameCallback === callback) {
455
+ this.onVideoFrameCallback = undefined;
456
+ }
457
+ };
458
+ }
459
+ drawDebugOverlay() {
460
+ if (!this.debugOverlay)
461
+ return;
462
+ if (this.context && this.canvas) {
463
+ drawPreviewOverlay({
464
+ context: this.context,
465
+ stats: this.debugStats,
466
+ audioTime: this.sharedAudioContext.currentTime,
467
+ audioContextState: this.sharedAudioContext.state,
468
+ audioSyncAnchor: this.audioSyncAnchor,
469
+ audioIterator: this.audioBufferIterator,
470
+ audioChunksForAfterResuming: this.audioChunksForAfterResuming,
471
+ playing: this.playing,
472
+ });
473
+ }
474
+ }
475
+ }
@@ -37,6 +37,7 @@ type OptionalVideoProps = {
37
37
  trimBefore: number | undefined;
38
38
  toneFrequency: number;
39
39
  showInTimeline: boolean;
40
+ debugOverlay: boolean;
40
41
  };
41
42
  export type InnerVideoProps = MandatoryVideoProps & OuterVideoProps & OptionalVideoProps;
42
43
  export type VideoProps = MandatoryVideoProps & Partial<OuterVideoProps> & Partial<OptionalVideoProps>;
@@ -1,7 +1,7 @@
1
1
  import React from 'react';
2
2
  import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
3
3
  import type { FallbackOffthreadVideoProps } from './props';
4
- type NewVideoForPreviewProps = {
4
+ type VideoForPreviewProps = {
5
5
  readonly src: string;
6
6
  readonly style: React.CSSProperties | undefined;
7
7
  readonly playbackRate: number;
@@ -20,6 +20,7 @@ type NewVideoForPreviewProps = {
20
20
  readonly disallowFallbackToOffthreadVideo: boolean;
21
21
  readonly fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
22
22
  readonly audioStreamIndex: number;
23
+ readonly debugOverlay: boolean;
23
24
  };
24
- export declare const VideoForPreview: React.FC<NewVideoForPreviewProps>;
25
+ export declare const VideoForPreview: React.FC<VideoForPreviewProps>;
25
26
  export {};