@remotion/media 4.0.356 → 4.0.358

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/dist/audio/audio-for-preview.d.ts +30 -0
  2. package/dist/audio/audio-for-preview.js +229 -0
  3. package/dist/audio/audio-for-rendering.js +35 -19
  4. package/dist/audio/audio.js +7 -49
  5. package/dist/audio/props.d.ts +8 -14
  6. package/dist/audio-extraction/audio-cache.d.ts +1 -1
  7. package/dist/audio-extraction/audio-cache.js +5 -1
  8. package/dist/audio-extraction/audio-iterator.d.ts +4 -1
  9. package/dist/audio-extraction/audio-iterator.js +22 -10
  10. package/dist/audio-extraction/audio-manager.d.ts +8 -37
  11. package/dist/audio-extraction/audio-manager.js +35 -8
  12. package/dist/audio-extraction/extract-audio.d.ts +9 -2
  13. package/dist/audio-extraction/extract-audio.js +29 -15
  14. package/dist/caches.d.ts +9 -44
  15. package/dist/convert-audiodata/combine-audiodata.js +2 -23
  16. package/dist/convert-audiodata/convert-audiodata.d.ts +1 -5
  17. package/dist/convert-audiodata/convert-audiodata.js +16 -24
  18. package/dist/esm/index.mjs +2864 -2173
  19. package/dist/extract-frame-and-audio.d.ts +6 -7
  20. package/dist/extract-frame-and-audio.js +28 -19
  21. package/dist/{get-sink-weak.d.ts → get-sink.d.ts} +1 -1
  22. package/dist/get-sink.js +15 -0
  23. package/dist/get-time-in-seconds.d.ts +11 -0
  24. package/dist/get-time-in-seconds.js +25 -0
  25. package/dist/index.d.ts +1 -0
  26. package/dist/index.js +1 -0
  27. package/dist/is-network-error.d.ts +6 -0
  28. package/dist/is-network-error.js +17 -0
  29. package/dist/render-timestamp-range.d.ts +1 -0
  30. package/dist/render-timestamp-range.js +9 -0
  31. package/dist/show-in-timeline.d.ts +8 -0
  32. package/dist/show-in-timeline.js +31 -0
  33. package/dist/use-media-in-timeline.d.ts +19 -0
  34. package/dist/use-media-in-timeline.js +103 -0
  35. package/dist/video/media-player.d.ts +34 -7
  36. package/dist/video/media-player.js +164 -63
  37. package/dist/video/props.d.ts +1 -0
  38. package/dist/video/video-for-preview.d.ts +17 -9
  39. package/dist/video/video-for-preview.js +138 -92
  40. package/dist/video/video-for-rendering.d.ts +3 -0
  41. package/dist/video/video-for-rendering.js +58 -25
  42. package/dist/video/video.js +6 -10
  43. package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +18 -6
  44. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +21 -7
  45. package/dist/video-extraction/extract-frame.d.ts +20 -2
  46. package/dist/video-extraction/extract-frame.js +41 -9
  47. package/dist/video-extraction/get-frames-since-keyframe.d.ts +5 -3
  48. package/dist/video-extraction/get-frames-since-keyframe.js +7 -4
  49. package/dist/video-extraction/keyframe-bank.d.ts +3 -2
  50. package/dist/video-extraction/keyframe-bank.js +32 -12
  51. package/dist/video-extraction/keyframe-manager.d.ts +3 -8
  52. package/dist/video-extraction/keyframe-manager.js +25 -10
  53. package/package.json +54 -54
  54. package/LICENSE.md +0 -49
  55. package/dist/convert-audiodata/apply-tonefrequency.d.ts +0 -2
  56. package/dist/convert-audiodata/apply-tonefrequency.js +0 -44
  57. package/dist/convert-audiodata/wsola.d.ts +0 -13
  58. package/dist/convert-audiodata/wsola.js +0 -197
  59. package/dist/get-sink-weak.js +0 -23
  60. package/dist/log.d.ts +0 -10
  61. package/dist/log.js +0 -33
@@ -1,10 +1,12 @@
1
1
  import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
2
2
  import { Internals } from 'remotion';
3
+ import { getTimeInSeconds } from '../get-time-in-seconds';
4
+ import { isNetworkError } from '../is-network-error';
3
5
  import { sleep, withTimeout } from './timeout-utils';
4
6
  export const SEEK_THRESHOLD = 0.05;
5
7
  const AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
6
8
  export class MediaPlayer {
7
- constructor({ canvas, src, logLevel, sharedAudioContext, }) {
9
+ constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, }) {
8
10
  this.canvasSink = null;
9
11
  this.videoFrameIterator = null;
10
12
  this.nextFrame = null;
@@ -12,14 +14,17 @@ export class MediaPlayer {
12
14
  this.audioBufferIterator = null;
13
15
  this.queuedAudioNodes = new Set();
14
16
  this.gainNode = null;
15
- // audioDelay = mediaTimestamp + audioSyncAnchor - sharedAudioContext.currentTime
17
+ this.currentVolume = 1;
18
+ // this is the time difference between Web Audio timeline
19
+ // and media file timeline
16
20
  this.audioSyncAnchor = 0;
17
21
  this.playing = false;
18
22
  this.muted = false;
23
+ this.loop = false;
19
24
  this.animationFrameId = null;
20
25
  this.videoAsyncId = 0;
26
+ this.audioAsyncId = 0;
21
27
  this.initialized = false;
22
- this.totalDuration = 0;
23
28
  // for remotion buffer state
24
29
  this.isBuffering = false;
25
30
  this.audioBufferHealth = 0;
@@ -43,13 +48,15 @@ export class MediaPlayer {
43
48
  this.startAudioIterator = async (startFromSecond) => {
44
49
  if (!this.hasAudio())
45
50
  return;
51
+ this.audioAsyncId++;
52
+ const currentAsyncId = this.audioAsyncId;
46
53
  // Clean up existing audio iterator
47
54
  await this.audioBufferIterator?.return();
48
55
  this.audioIteratorStarted = false;
49
56
  this.audioBufferHealth = 0;
50
57
  try {
51
58
  this.audioBufferIterator = this.audioSink.buffers(startFromSecond);
52
- this.runAudioIterator(startFromSecond);
59
+ this.runAudioIterator(startFromSecond, currentAsyncId);
53
60
  }
54
61
  catch (error) {
55
62
  Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start audio iterator', error);
@@ -61,7 +68,7 @@ export class MediaPlayer {
61
68
  }
62
69
  this.videoAsyncId++;
63
70
  const currentAsyncId = this.videoAsyncId;
64
- await this.videoFrameIterator?.return();
71
+ this.videoFrameIterator?.return().catch(() => undefined);
65
72
  this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
66
73
  try {
67
74
  const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
@@ -69,10 +76,10 @@ export class MediaPlayer {
69
76
  if (currentAsyncId !== this.videoAsyncId) {
70
77
  return;
71
78
  }
72
- if (firstFrame) {
79
+ if (firstFrame && this.context) {
73
80
  Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
74
81
  this.context.drawImage(firstFrame.canvas, 0, 0);
75
- if (this.onVideoFrameCallback) {
82
+ if (this.onVideoFrameCallback && this.canvas) {
76
83
  this.onVideoFrameCallback(this.canvas);
77
84
  }
78
85
  }
@@ -95,8 +102,11 @@ export class MediaPlayer {
95
102
  if (!newNextFrame) {
96
103
  break;
97
104
  }
98
- if (this.getAdjustedTimestamp(newNextFrame.timestamp) <=
99
- this.getPlaybackTime()) {
105
+ const playbackTime = this.getPlaybackTime();
106
+ if (playbackTime === null) {
107
+ continue;
108
+ }
109
+ if (newNextFrame.timestamp <= playbackTime) {
100
110
  continue;
101
111
  }
102
112
  else {
@@ -112,7 +122,7 @@ export class MediaPlayer {
112
122
  };
113
123
  this.bufferingStartedAtMs = null;
114
124
  this.minBufferingTimeoutMs = 500;
115
- this.runAudioIterator = async (startFromSecond) => {
125
+ this.runAudioIterator = async (startFromSecond, audioAsyncId) => {
116
126
  if (!this.hasAudio() || !this.audioBufferIterator)
117
127
  return;
118
128
  try {
@@ -120,6 +130,9 @@ export class MediaPlayer {
120
130
  let isFirstBuffer = true;
121
131
  this.audioIteratorStarted = true;
122
132
  while (true) {
133
+ if (audioAsyncId !== this.audioAsyncId) {
134
+ return;
135
+ }
123
136
  const BUFFERING_TIMEOUT_MS = 50;
124
137
  let result;
125
138
  try {
@@ -137,11 +150,10 @@ export class MediaPlayer {
137
150
  totalBufferDuration += duration;
138
151
  this.audioBufferHealth = Math.max(0, totalBufferDuration / this.playbackRate);
139
152
  this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
140
- if (this.playing && !this.muted) {
153
+ if (this.playing) {
141
154
  if (isFirstBuffer) {
142
155
  this.audioSyncAnchor =
143
- this.sharedAudioContext.currentTime -
144
- this.getAdjustedTimestamp(timestamp);
156
+ this.sharedAudioContext.currentTime - timestamp;
145
157
  isFirstBuffer = false;
146
158
  }
147
159
  // if timestamp is less than timeToSeek, skip
@@ -151,12 +163,16 @@ export class MediaPlayer {
151
163
  }
152
164
  this.scheduleAudioChunk(buffer, timestamp);
153
165
  }
154
- if (this.getAdjustedTimestamp(timestamp) - this.getPlaybackTime() >=
155
- 1) {
166
+ const playbackTime = this.getPlaybackTime();
167
+ if (playbackTime === null) {
168
+ continue;
169
+ }
170
+ if (timestamp - playbackTime >= 1) {
156
171
  await new Promise((resolve) => {
157
172
  const check = () => {
158
- if (this.getAdjustedTimestamp(timestamp) - this.getPlaybackTime() <
159
- 1) {
173
+ const currentPlaybackTime = this.getPlaybackTime();
174
+ if (currentPlaybackTime !== null &&
175
+ timestamp - currentPlaybackTime < 1) {
160
176
  resolve();
161
177
  }
162
178
  else {
@@ -172,19 +188,29 @@ export class MediaPlayer {
172
188
  Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to run audio iterator', error);
173
189
  }
174
190
  };
175
- this.canvas = canvas;
191
+ this.canvas = canvas ?? null;
176
192
  this.src = src;
177
- this.logLevel = logLevel ?? 'info';
193
+ this.logLevel = logLevel ?? window.remotion_logLevel;
178
194
  this.sharedAudioContext = sharedAudioContext;
179
- this.playbackRate = 1;
180
- const context = canvas.getContext('2d', {
181
- alpha: false,
182
- desynchronized: true,
183
- });
184
- if (!context) {
185
- throw new Error('Could not get 2D context from canvas');
195
+ this.playbackRate = playbackRate;
196
+ this.loop = loop;
197
+ this.trimBefore = trimBefore;
198
+ this.trimAfter = trimAfter;
199
+ this.audioStreamIndex = audioStreamIndex ?? 0;
200
+ this.fps = fps;
201
+ if (canvas) {
202
+ const context = canvas.getContext('2d', {
203
+ alpha: true,
204
+ desynchronized: true,
205
+ });
206
+ if (!context) {
207
+ throw new Error('Could not get 2D context from canvas');
208
+ }
209
+ this.context = context;
210
+ }
211
+ else {
212
+ this.context = null;
186
213
  }
187
- this.context = context;
188
214
  }
189
215
  isReady() {
190
216
  return this.initialized && Boolean(this.sharedAudioContext);
@@ -195,7 +221,7 @@ export class MediaPlayer {
195
221
  isCurrentlyBuffering() {
196
222
  return this.isBuffering && Boolean(this.bufferingStartedAtMs);
197
223
  }
198
- async initialize(startTime = 0) {
224
+ async initialize(startTimeUnresolved) {
199
225
  try {
200
226
  const urlSource = new UrlSource(this.src);
201
227
  const input = new Input({
@@ -203,13 +229,32 @@ export class MediaPlayer {
203
229
  formats: ALL_FORMATS,
204
230
  });
205
231
  this.input = input;
206
- this.totalDuration = await input.computeDuration();
207
- const videoTrack = await input.getPrimaryVideoTrack();
208
- const audioTrack = await input.getPrimaryAudioTrack();
232
+ try {
233
+ await this.input.getFormat();
234
+ }
235
+ catch (error) {
236
+ const err = error;
237
+ if (isNetworkError(err)) {
238
+ throw error;
239
+ }
240
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
241
+ return { type: 'unknown-container-format' };
242
+ }
243
+ const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
244
+ input.computeDuration(),
245
+ input.getPrimaryVideoTrack(),
246
+ input.getAudioTracks(),
247
+ ]);
248
+ this.totalDuration = durationInSeconds;
249
+ const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
209
250
  if (!videoTrack && !audioTrack) {
210
- throw new Error(`No video or audio track found for ${this.src}`);
251
+ return { type: 'no-tracks' };
211
252
  }
212
- if (videoTrack) {
253
+ if (videoTrack && this.canvas && this.context) {
254
+ const canDecode = await videoTrack.canDecode();
255
+ if (!canDecode) {
256
+ return { type: 'cannot-decode' };
257
+ }
213
258
  this.canvasSink = new CanvasSink(videoTrack, {
214
259
  poolSize: 2,
215
260
  fit: 'contain',
@@ -222,20 +267,47 @@ export class MediaPlayer {
222
267
  this.gainNode = this.sharedAudioContext.createGain();
223
268
  this.gainNode.connect(this.sharedAudioContext.destination);
224
269
  }
270
+ const startTime = getTimeInSeconds({
271
+ unloopedTimeInSeconds: startTimeUnresolved,
272
+ playbackRate: this.playbackRate,
273
+ loop: this.loop,
274
+ trimBefore: this.trimBefore,
275
+ trimAfter: this.trimAfter,
276
+ mediaDurationInSeconds: this.totalDuration,
277
+ fps: this.fps,
278
+ ifNoMediaDuration: 'infinity',
279
+ src: this.src,
280
+ });
281
+ if (startTime === null) {
282
+ this.clearCanvas();
283
+ return { type: 'success', durationInSeconds: this.totalDuration };
284
+ }
225
285
  if (this.sharedAudioContext) {
226
286
  this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
227
287
  }
228
288
  this.initialized = true;
229
- const mediaTime = startTime * this.playbackRate;
230
- await this.startAudioIterator(mediaTime);
231
- await this.startVideoIterator(mediaTime);
289
+ await Promise.all([
290
+ this.startAudioIterator(startTime),
291
+ this.startVideoIterator(startTime),
292
+ ]);
232
293
  this.startRenderLoop();
294
+ return { type: 'success', durationInSeconds };
233
295
  }
234
296
  catch (error) {
297
+ const err = error;
298
+ if (isNetworkError(err)) {
299
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
300
+ return { type: 'network-error' };
301
+ }
235
302
  Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to initialize', error);
236
303
  throw error;
237
304
  }
238
305
  }
306
+ clearCanvas() {
307
+ if (this.context && this.canvas) {
308
+ this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
309
+ }
310
+ }
239
311
  cleanupAudioQueue() {
240
312
  for (const node of this.queuedAudioNodes) {
241
313
  node.stop();
@@ -252,18 +324,35 @@ export class MediaPlayer {
252
324
  async seekTo(time) {
253
325
  if (!this.isReady())
254
326
  return;
255
- const newTime = Math.max(0, Math.min(time, this.totalDuration));
327
+ const newTime = getTimeInSeconds({
328
+ unloopedTimeInSeconds: time,
329
+ playbackRate: this.playbackRate,
330
+ loop: this.loop,
331
+ trimBefore: this.trimBefore,
332
+ trimAfter: this.trimAfter,
333
+ mediaDurationInSeconds: this.totalDuration ?? null,
334
+ fps: this.fps,
335
+ ifNoMediaDuration: 'infinity',
336
+ src: this.src,
337
+ });
338
+ if (newTime === null) {
339
+ this.clearCanvas();
340
+ await this.cleanAudioIteratorAndNodes();
341
+ return;
342
+ }
256
343
  const currentPlaybackTime = this.getPlaybackTime();
257
- const isSignificantSeek = Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
344
+ const isSignificantSeek = currentPlaybackTime === null ||
345
+ Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
258
346
  if (isSignificantSeek) {
259
347
  this.nextFrame = null;
260
348
  this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
261
349
  if (this.audioSink) {
262
350
  await this.cleanAudioIteratorAndNodes();
263
351
  }
264
- const mediaTime = newTime * this.playbackRate;
265
- await this.startAudioIterator(mediaTime);
266
- await this.startVideoIterator(mediaTime);
352
+ await Promise.all([
353
+ this.startAudioIterator(newTime),
354
+ this.startVideoIterator(newTime),
355
+ ]);
267
356
  }
268
357
  if (!this.playing) {
269
358
  this.render();
@@ -287,8 +376,8 @@ export class MediaPlayer {
287
376
  }
288
377
  setMuted(muted) {
289
378
  this.muted = muted;
290
- if (muted) {
291
- this.cleanupAudioQueue();
379
+ if (this.gainNode) {
380
+ this.gainNode.gain.value = muted ? 0 : this.currentVolume;
292
381
  }
293
382
  }
294
383
  setVolume(volume) {
@@ -296,18 +385,19 @@ export class MediaPlayer {
296
385
  return;
297
386
  }
298
387
  const appliedVolume = Math.max(0, volume);
299
- this.gainNode.gain.value = appliedVolume;
388
+ this.currentVolume = appliedVolume;
389
+ if (!this.muted) {
390
+ this.gainNode.gain.value = appliedVolume;
391
+ }
300
392
  }
301
- async setPlaybackRate(rate) {
302
- if (this.playbackRate === rate)
303
- return;
393
+ setPlaybackRate(rate) {
304
394
  this.playbackRate = rate;
305
- if (this.hasAudio() && this.playing) {
306
- const currentPlaybackTime = this.getPlaybackTime();
307
- const mediaTime = currentPlaybackTime * rate;
308
- await this.cleanAudioIteratorAndNodes();
309
- await this.startAudioIterator(mediaTime);
310
- }
395
+ }
396
+ setFps(fps) {
397
+ this.fps = fps;
398
+ }
399
+ setLoop(loop) {
400
+ this.loop = loop;
311
401
  }
312
402
  dispose() {
313
403
  this.input?.dispose();
@@ -319,12 +409,8 @@ export class MediaPlayer {
319
409
  getPlaybackTime() {
320
410
  return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
321
411
  }
322
- getAdjustedTimestamp(mediaTimestamp) {
323
- return mediaTimestamp / this.playbackRate;
324
- }
325
412
  scheduleAudioChunk(buffer, mediaTimestamp) {
326
- const adjustedTimestamp = this.getAdjustedTimestamp(mediaTimestamp);
327
- const targetTime = adjustedTimestamp + this.audioSyncAnchor;
413
+ const targetTime = mediaTimestamp + this.audioSyncAnchor;
328
414
  const delay = targetTime - this.sharedAudioContext.currentTime;
329
415
  const node = this.sharedAudioContext.createBufferSource();
330
416
  node.buffer = buffer;
@@ -341,12 +427,22 @@ export class MediaPlayer {
341
427
  }
342
428
  onBufferingChange(callback) {
343
429
  this.onBufferingChangeCallback = callback;
430
+ return () => {
431
+ if (this.onBufferingChangeCallback === callback) {
432
+ this.onBufferingChangeCallback = undefined;
433
+ }
434
+ };
344
435
  }
345
436
  onVideoFrame(callback) {
346
437
  this.onVideoFrameCallback = callback;
347
- if (this.initialized && callback) {
438
+ if (this.initialized && callback && this.canvas) {
348
439
  callback(this.canvas);
349
440
  }
441
+ return () => {
442
+ if (this.onVideoFrameCallback === callback) {
443
+ this.onVideoFrameCallback = undefined;
444
+ }
445
+ };
350
446
  }
351
447
  canRenderVideo() {
352
448
  return (!this.hasAudio() ||
@@ -366,15 +462,20 @@ export class MediaPlayer {
366
462
  }
367
463
  }
368
464
  shouldRenderFrame() {
465
+ const playbackTime = this.getPlaybackTime();
466
+ if (playbackTime === null) {
467
+ return false;
468
+ }
369
469
  return (!this.isBuffering &&
370
470
  this.canRenderVideo() &&
371
471
  this.nextFrame !== null &&
372
- this.getAdjustedTimestamp(this.nextFrame.timestamp) <=
373
- this.getPlaybackTime());
472
+ this.nextFrame.timestamp <= playbackTime);
374
473
  }
375
474
  drawCurrentFrame() {
376
- this.context.drawImage(this.nextFrame.canvas, 0, 0);
377
- if (this.onVideoFrameCallback) {
475
+ if (this.context && this.nextFrame) {
476
+ this.context.drawImage(this.nextFrame.canvas, 0, 0);
477
+ }
478
+ if (this.onVideoFrameCallback && this.canvas) {
378
479
  this.onVideoFrameCallback(this.canvas);
379
480
  }
380
481
  this.nextFrame = null;
@@ -35,6 +35,7 @@ type OptionalVideoProps = {
35
35
  fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
36
36
  trimAfter: number | undefined;
37
37
  trimBefore: number | undefined;
38
+ toneFrequency: number;
38
39
  showInTimeline: boolean;
39
40
  };
40
41
  export type InnerVideoProps = MandatoryVideoProps & OuterVideoProps & OptionalVideoProps;
@@ -1,17 +1,25 @@
1
1
  import React from 'react';
2
- import type { LogLevel, LoopVolumeCurveBehavior, OnVideoFrame, VolumeProp } from 'remotion';
3
- type InnerVideoProps = {
4
- readonly className: string | undefined;
5
- readonly loop: boolean;
2
+ import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
3
+ import type { FallbackOffthreadVideoProps } from './props';
4
+ type NewVideoForPreviewProps = {
6
5
  readonly src: string;
6
+ readonly style: React.CSSProperties | undefined;
7
+ readonly playbackRate: number;
7
8
  readonly logLevel: LogLevel;
9
+ readonly className: string | undefined;
8
10
  readonly muted: boolean;
9
- readonly name: string | undefined;
10
11
  readonly volume: VolumeProp;
11
12
  readonly loopVolumeCurveBehavior: LoopVolumeCurveBehavior;
12
- readonly onVideoFrame: OnVideoFrame | undefined;
13
- readonly playbackRate: number;
14
- readonly style: React.CSSProperties;
13
+ readonly onVideoFrame: undefined | ((frame: CanvasImageSource) => void);
14
+ readonly showInTimeline: boolean;
15
+ readonly loop: boolean;
16
+ readonly name: string | undefined;
17
+ readonly trimAfter: number | undefined;
18
+ readonly trimBefore: number | undefined;
19
+ readonly stack: string | null;
20
+ readonly disallowFallbackToOffthreadVideo: boolean;
21
+ readonly fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
22
+ readonly audioStreamIndex: number;
15
23
  };
16
- export declare const VideoForPreview: React.FC<InnerVideoProps>;
24
+ export declare const VideoForPreview: React.FC<NewVideoForPreviewProps>;
17
25
  export {};