@remotion/media 4.0.356 → 4.0.357

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/audio/audio-for-preview.d.ts +30 -0
  2. package/dist/audio/audio-for-preview.js +213 -0
  3. package/dist/audio/audio-for-rendering.js +32 -15
  4. package/dist/audio/audio.js +7 -49
  5. package/dist/audio/props.d.ts +8 -14
  6. package/dist/audio-extraction/audio-cache.d.ts +1 -1
  7. package/dist/audio-extraction/audio-cache.js +5 -1
  8. package/dist/audio-extraction/audio-iterator.d.ts +4 -1
  9. package/dist/audio-extraction/audio-iterator.js +22 -10
  10. package/dist/audio-extraction/audio-manager.d.ts +8 -37
  11. package/dist/audio-extraction/audio-manager.js +35 -8
  12. package/dist/audio-extraction/extract-audio.d.ts +9 -2
  13. package/dist/audio-extraction/extract-audio.js +28 -15
  14. package/dist/caches.d.ts +9 -44
  15. package/dist/convert-audiodata/apply-tonefrequency.js +0 -1
  16. package/dist/convert-audiodata/combine-audiodata.js +2 -23
  17. package/dist/convert-audiodata/convert-audiodata.d.ts +1 -5
  18. package/dist/convert-audiodata/convert-audiodata.js +16 -24
  19. package/dist/convert-audiodata/wsola.js +1 -1
  20. package/dist/esm/index.mjs +2681 -2162
  21. package/dist/extract-frame-and-audio.d.ts +6 -7
  22. package/dist/extract-frame-and-audio.js +28 -19
  23. package/dist/get-sink-weak.d.ts +1 -1
  24. package/dist/get-sink-weak.js +3 -11
  25. package/dist/get-sink.d.ts +13 -0
  26. package/dist/get-sink.js +15 -0
  27. package/dist/get-time-in-seconds.d.ts +10 -0
  28. package/dist/get-time-in-seconds.js +25 -0
  29. package/dist/index.d.ts +1 -0
  30. package/dist/index.js +1 -0
  31. package/dist/is-network-error.d.ts +6 -0
  32. package/dist/is-network-error.js +17 -0
  33. package/dist/render-timestamp-range.d.ts +1 -0
  34. package/dist/render-timestamp-range.js +9 -0
  35. package/dist/video/media-player.d.ts +28 -7
  36. package/dist/video/media-player.js +123 -58
  37. package/dist/video/props.d.ts +1 -0
  38. package/dist/video/resolve-playback-time.d.ts +8 -0
  39. package/dist/video/resolve-playback-time.js +22 -0
  40. package/dist/video/video-for-preview.d.ts +8 -0
  41. package/dist/video/video-for-preview.js +113 -90
  42. package/dist/video/video-for-rendering.d.ts +3 -0
  43. package/dist/video/video-for-rendering.js +58 -25
  44. package/dist/video/video.js +6 -10
  45. package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +18 -6
  46. package/dist/video-extraction/extract-frame-via-broadcast-channel.js +21 -7
  47. package/dist/video-extraction/extract-frame.d.ts +20 -2
  48. package/dist/video-extraction/extract-frame.js +40 -9
  49. package/dist/video-extraction/get-frames-since-keyframe.d.ts +5 -3
  50. package/dist/video-extraction/get-frames-since-keyframe.js +7 -4
  51. package/dist/video-extraction/keyframe-bank.d.ts +3 -2
  52. package/dist/video-extraction/keyframe-bank.js +32 -12
  53. package/dist/video-extraction/keyframe-manager.d.ts +3 -8
  54. package/dist/video-extraction/keyframe-manager.js +25 -10
  55. package/package.json +4 -4
@@ -1,10 +1,12 @@
1
1
  import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
2
2
  import { Internals } from 'remotion';
3
+ import { isNetworkError } from '../is-network-error';
4
+ import { resolvePlaybackTime } from './resolve-playback-time';
3
5
  import { sleep, withTimeout } from './timeout-utils';
4
6
  export const SEEK_THRESHOLD = 0.05;
5
7
  const AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
6
8
  export class MediaPlayer {
7
- constructor({ canvas, src, logLevel, sharedAudioContext, }) {
9
+ constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBeforeSeconds, trimAfterSeconds, playbackRate, audioStreamIndex, }) {
8
10
  this.canvasSink = null;
9
11
  this.videoFrameIterator = null;
10
12
  this.nextFrame = null;
@@ -16,10 +18,11 @@ export class MediaPlayer {
16
18
  this.audioSyncAnchor = 0;
17
19
  this.playing = false;
18
20
  this.muted = false;
21
+ this.loop = false;
19
22
  this.animationFrameId = null;
20
23
  this.videoAsyncId = 0;
24
+ this.audioAsyncId = 0;
21
25
  this.initialized = false;
22
- this.totalDuration = 0;
23
26
  // for remotion buffer state
24
27
  this.isBuffering = false;
25
28
  this.audioBufferHealth = 0;
@@ -43,13 +46,15 @@ export class MediaPlayer {
43
46
  this.startAudioIterator = async (startFromSecond) => {
44
47
  if (!this.hasAudio())
45
48
  return;
49
+ this.audioAsyncId++;
50
+ const currentAsyncId = this.audioAsyncId;
46
51
  // Clean up existing audio iterator
47
52
  await this.audioBufferIterator?.return();
48
53
  this.audioIteratorStarted = false;
49
54
  this.audioBufferHealth = 0;
50
55
  try {
51
56
  this.audioBufferIterator = this.audioSink.buffers(startFromSecond);
52
- this.runAudioIterator(startFromSecond);
57
+ this.runAudioIterator(startFromSecond, currentAsyncId);
53
58
  }
54
59
  catch (error) {
55
60
  Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start audio iterator', error);
@@ -61,7 +66,7 @@ export class MediaPlayer {
61
66
  }
62
67
  this.videoAsyncId++;
63
68
  const currentAsyncId = this.videoAsyncId;
64
- await this.videoFrameIterator?.return();
69
+ this.videoFrameIterator?.return().catch(() => undefined);
65
70
  this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
66
71
  try {
67
72
  const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
@@ -69,10 +74,10 @@ export class MediaPlayer {
69
74
  if (currentAsyncId !== this.videoAsyncId) {
70
75
  return;
71
76
  }
72
- if (firstFrame) {
77
+ if (firstFrame && this.context) {
73
78
  Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
74
79
  this.context.drawImage(firstFrame.canvas, 0, 0);
75
- if (this.onVideoFrameCallback) {
80
+ if (this.onVideoFrameCallback && this.canvas) {
76
81
  this.onVideoFrameCallback(this.canvas);
77
82
  }
78
83
  }
@@ -95,8 +100,7 @@ export class MediaPlayer {
95
100
  if (!newNextFrame) {
96
101
  break;
97
102
  }
98
- if (this.getAdjustedTimestamp(newNextFrame.timestamp) <=
99
- this.getPlaybackTime()) {
103
+ if (newNextFrame.timestamp <= this.getPlaybackTime()) {
100
104
  continue;
101
105
  }
102
106
  else {
@@ -112,7 +116,7 @@ export class MediaPlayer {
112
116
  };
113
117
  this.bufferingStartedAtMs = null;
114
118
  this.minBufferingTimeoutMs = 500;
115
- this.runAudioIterator = async (startFromSecond) => {
119
+ this.runAudioIterator = async (startFromSecond, audioAsyncId) => {
116
120
  if (!this.hasAudio() || !this.audioBufferIterator)
117
121
  return;
118
122
  try {
@@ -120,6 +124,9 @@ export class MediaPlayer {
120
124
  let isFirstBuffer = true;
121
125
  this.audioIteratorStarted = true;
122
126
  while (true) {
127
+ if (audioAsyncId !== this.audioAsyncId) {
128
+ return;
129
+ }
123
130
  const BUFFERING_TIMEOUT_MS = 50;
124
131
  let result;
125
132
  try {
@@ -140,8 +147,7 @@ export class MediaPlayer {
140
147
  if (this.playing && !this.muted) {
141
148
  if (isFirstBuffer) {
142
149
  this.audioSyncAnchor =
143
- this.sharedAudioContext.currentTime -
144
- this.getAdjustedTimestamp(timestamp);
150
+ this.sharedAudioContext.currentTime - timestamp;
145
151
  isFirstBuffer = false;
146
152
  }
147
153
  // if timestamp is less than timeToSeek, skip
@@ -151,12 +157,10 @@ export class MediaPlayer {
151
157
  }
152
158
  this.scheduleAudioChunk(buffer, timestamp);
153
159
  }
154
- if (this.getAdjustedTimestamp(timestamp) - this.getPlaybackTime() >=
155
- 1) {
160
+ if (timestamp - this.getPlaybackTime() >= 1) {
156
161
  await new Promise((resolve) => {
157
162
  const check = () => {
158
- if (this.getAdjustedTimestamp(timestamp) - this.getPlaybackTime() <
159
- 1) {
163
+ if (timestamp - this.getPlaybackTime() < 1) {
160
164
  resolve();
161
165
  }
162
166
  else {
@@ -172,19 +176,28 @@ export class MediaPlayer {
172
176
  Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to run audio iterator', error);
173
177
  }
174
178
  };
175
- this.canvas = canvas;
179
+ this.canvas = canvas ?? null;
176
180
  this.src = src;
177
- this.logLevel = logLevel ?? 'info';
181
+ this.logLevel = logLevel ?? window.remotion_logLevel;
178
182
  this.sharedAudioContext = sharedAudioContext;
179
- this.playbackRate = 1;
180
- const context = canvas.getContext('2d', {
181
- alpha: false,
182
- desynchronized: true,
183
- });
184
- if (!context) {
185
- throw new Error('Could not get 2D context from canvas');
183
+ this.playbackRate = playbackRate;
184
+ this.loop = loop;
185
+ this.trimBeforeSeconds = trimBeforeSeconds;
186
+ this.trimAfterSeconds = trimAfterSeconds;
187
+ this.audioStreamIndex = audioStreamIndex ?? 0;
188
+ if (canvas) {
189
+ const context = canvas.getContext('2d', {
190
+ alpha: false,
191
+ desynchronized: true,
192
+ });
193
+ if (!context) {
194
+ throw new Error('Could not get 2D context from canvas');
195
+ }
196
+ this.context = context;
197
+ }
198
+ else {
199
+ this.context = null;
186
200
  }
187
- this.context = context;
188
201
  }
189
202
  isReady() {
190
203
  return this.initialized && Boolean(this.sharedAudioContext);
@@ -195,7 +208,7 @@ export class MediaPlayer {
195
208
  isCurrentlyBuffering() {
196
209
  return this.isBuffering && Boolean(this.bufferingStartedAtMs);
197
210
  }
198
- async initialize(startTime = 0) {
211
+ async initialize(startTimeUnresolved) {
199
212
  try {
200
213
  const urlSource = new UrlSource(this.src);
201
214
  const input = new Input({
@@ -203,13 +216,32 @@ export class MediaPlayer {
203
216
  formats: ALL_FORMATS,
204
217
  });
205
218
  this.input = input;
206
- this.totalDuration = await input.computeDuration();
207
- const videoTrack = await input.getPrimaryVideoTrack();
208
- const audioTrack = await input.getPrimaryAudioTrack();
219
+ try {
220
+ await this.input.getFormat();
221
+ }
222
+ catch (error) {
223
+ const err = error;
224
+ if (isNetworkError(err)) {
225
+ throw error;
226
+ }
227
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
228
+ return { type: 'unknown-container-format' };
229
+ }
230
+ const [duration, videoTrack, audioTracks] = await Promise.all([
231
+ input.computeDuration(),
232
+ input.getPrimaryVideoTrack(),
233
+ input.getAudioTracks(),
234
+ ]);
235
+ this.totalDuration = duration;
236
+ const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
209
237
  if (!videoTrack && !audioTrack) {
210
- throw new Error(`No video or audio track found for ${this.src}`);
238
+ return { type: 'no-tracks' };
211
239
  }
212
- if (videoTrack) {
240
+ if (videoTrack && this.canvas && this.context) {
241
+ const canDecode = await videoTrack.canDecode();
242
+ if (!canDecode) {
243
+ return { type: 'cannot-decode' };
244
+ }
213
245
  this.canvasSink = new CanvasSink(videoTrack, {
214
246
  poolSize: 2,
215
247
  fit: 'contain',
@@ -222,16 +254,31 @@ export class MediaPlayer {
222
254
  this.gainNode = this.sharedAudioContext.createGain();
223
255
  this.gainNode.connect(this.sharedAudioContext.destination);
224
256
  }
257
+ const startTime = resolvePlaybackTime({
258
+ absolutePlaybackTimeInSeconds: startTimeUnresolved,
259
+ playbackRate: this.playbackRate,
260
+ loop: this.loop,
261
+ trimBeforeInSeconds: this.trimBeforeSeconds,
262
+ trimAfterInSeconds: this.trimAfterSeconds,
263
+ mediaDurationInSeconds: this.totalDuration,
264
+ });
225
265
  if (this.sharedAudioContext) {
226
266
  this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
227
267
  }
228
268
  this.initialized = true;
229
- const mediaTime = startTime * this.playbackRate;
230
- await this.startAudioIterator(mediaTime);
231
- await this.startVideoIterator(mediaTime);
269
+ await Promise.all([
270
+ this.startAudioIterator(startTime),
271
+ this.startVideoIterator(startTime),
272
+ ]);
232
273
  this.startRenderLoop();
274
+ return { type: 'success' };
233
275
  }
234
276
  catch (error) {
277
+ const err = error;
278
+ if (isNetworkError(err)) {
279
+ Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
280
+ return { type: 'network-error' };
281
+ }
235
282
  Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to initialize', error);
236
283
  throw error;
237
284
  }
@@ -252,7 +299,14 @@ export class MediaPlayer {
252
299
  async seekTo(time) {
253
300
  if (!this.isReady())
254
301
  return;
255
- const newTime = Math.max(0, Math.min(time, this.totalDuration));
302
+ const newTime = resolvePlaybackTime({
303
+ absolutePlaybackTimeInSeconds: time,
304
+ playbackRate: this.playbackRate,
305
+ loop: this.loop,
306
+ trimBeforeInSeconds: this.trimBeforeSeconds,
307
+ trimAfterInSeconds: this.trimAfterSeconds,
308
+ mediaDurationInSeconds: this.totalDuration,
309
+ });
256
310
  const currentPlaybackTime = this.getPlaybackTime();
257
311
  const isSignificantSeek = Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
258
312
  if (isSignificantSeek) {
@@ -261,9 +315,10 @@ export class MediaPlayer {
261
315
  if (this.audioSink) {
262
316
  await this.cleanAudioIteratorAndNodes();
263
317
  }
264
- const mediaTime = newTime * this.playbackRate;
265
- await this.startAudioIterator(mediaTime);
266
- await this.startVideoIterator(mediaTime);
318
+ await Promise.all([
319
+ this.startAudioIterator(newTime),
320
+ this.startVideoIterator(newTime),
321
+ ]);
267
322
  }
268
323
  if (!this.playing) {
269
324
  this.render();
@@ -298,16 +353,11 @@ export class MediaPlayer {
298
353
  const appliedVolume = Math.max(0, volume);
299
354
  this.gainNode.gain.value = appliedVolume;
300
355
  }
301
- async setPlaybackRate(rate) {
302
- if (this.playbackRate === rate)
303
- return;
356
+ setPlaybackRate(rate) {
304
357
  this.playbackRate = rate;
305
- if (this.hasAudio() && this.playing) {
306
- const currentPlaybackTime = this.getPlaybackTime();
307
- const mediaTime = currentPlaybackTime * rate;
308
- await this.cleanAudioIteratorAndNodes();
309
- await this.startAudioIterator(mediaTime);
310
- }
358
+ }
359
+ setLoop(loop) {
360
+ this.loop = loop;
311
361
  }
312
362
  dispose() {
313
363
  this.input?.dispose();
@@ -317,14 +367,18 @@ export class MediaPlayer {
317
367
  this.videoAsyncId++;
318
368
  }
319
369
  getPlaybackTime() {
320
- return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
321
- }
322
- getAdjustedTimestamp(mediaTimestamp) {
323
- return mediaTimestamp / this.playbackRate;
370
+ const absoluteTime = this.sharedAudioContext.currentTime - this.audioSyncAnchor;
371
+ return resolvePlaybackTime({
372
+ absolutePlaybackTimeInSeconds: absoluteTime,
373
+ playbackRate: this.playbackRate,
374
+ loop: this.loop,
375
+ trimBeforeInSeconds: this.trimBeforeSeconds,
376
+ trimAfterInSeconds: this.trimAfterSeconds,
377
+ mediaDurationInSeconds: this.totalDuration,
378
+ });
324
379
  }
325
380
  scheduleAudioChunk(buffer, mediaTimestamp) {
326
- const adjustedTimestamp = this.getAdjustedTimestamp(mediaTimestamp);
327
- const targetTime = adjustedTimestamp + this.audioSyncAnchor;
381
+ const targetTime = mediaTimestamp + this.audioSyncAnchor;
328
382
  const delay = targetTime - this.sharedAudioContext.currentTime;
329
383
  const node = this.sharedAudioContext.createBufferSource();
330
384
  node.buffer = buffer;
@@ -341,12 +395,22 @@ export class MediaPlayer {
341
395
  }
342
396
  onBufferingChange(callback) {
343
397
  this.onBufferingChangeCallback = callback;
398
+ return () => {
399
+ if (this.onBufferingChangeCallback === callback) {
400
+ this.onBufferingChangeCallback = undefined;
401
+ }
402
+ };
344
403
  }
345
404
  onVideoFrame(callback) {
346
405
  this.onVideoFrameCallback = callback;
347
- if (this.initialized && callback) {
406
+ if (this.initialized && callback && this.canvas) {
348
407
  callback(this.canvas);
349
408
  }
409
+ return () => {
410
+ if (this.onVideoFrameCallback === callback) {
411
+ this.onVideoFrameCallback = undefined;
412
+ }
413
+ };
350
414
  }
351
415
  canRenderVideo() {
352
416
  return (!this.hasAudio() ||
@@ -369,12 +433,13 @@ export class MediaPlayer {
369
433
  return (!this.isBuffering &&
370
434
  this.canRenderVideo() &&
371
435
  this.nextFrame !== null &&
372
- this.getAdjustedTimestamp(this.nextFrame.timestamp) <=
373
- this.getPlaybackTime());
436
+ this.nextFrame.timestamp <= this.getPlaybackTime());
374
437
  }
375
438
  drawCurrentFrame() {
376
- this.context.drawImage(this.nextFrame.canvas, 0, 0);
377
- if (this.onVideoFrameCallback) {
439
+ if (this.context && this.nextFrame) {
440
+ this.context.drawImage(this.nextFrame.canvas, 0, 0);
441
+ }
442
+ if (this.onVideoFrameCallback && this.canvas) {
378
443
  this.onVideoFrameCallback(this.canvas);
379
444
  }
380
445
  this.nextFrame = null;
@@ -35,6 +35,7 @@ type OptionalVideoProps = {
35
35
  fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
36
36
  trimAfter: number | undefined;
37
37
  trimBefore: number | undefined;
38
+ toneFrequency: number;
38
39
  showInTimeline: boolean;
39
40
  };
40
41
  export type InnerVideoProps = MandatoryVideoProps & OuterVideoProps & OptionalVideoProps;
@@ -0,0 +1,8 @@
1
+ export declare const resolvePlaybackTime: ({ absolutePlaybackTimeInSeconds, playbackRate, loop, trimBeforeInSeconds, trimAfterInSeconds, mediaDurationInSeconds, }: {
2
+ absolutePlaybackTimeInSeconds: number;
3
+ playbackRate: number;
4
+ loop: boolean;
5
+ trimBeforeInSeconds: number | undefined;
6
+ trimAfterInSeconds: number | undefined;
7
+ mediaDurationInSeconds: number | undefined;
8
+ }) => number;
@@ -0,0 +1,22 @@
1
+ export const resolvePlaybackTime = ({ absolutePlaybackTimeInSeconds, playbackRate, loop, trimBeforeInSeconds, trimAfterInSeconds, mediaDurationInSeconds, }) => {
2
+ const loopAfterPreliminary = loop
3
+ ? Math.min(trimAfterInSeconds ?? Infinity, mediaDurationInSeconds ?? Infinity)
4
+ : Infinity;
5
+ const loopAfterConsideringTrimBefore = loopAfterPreliminary - (trimBeforeInSeconds ?? 0);
6
+ const loopAfterConsideringPlaybackRate = loopAfterConsideringTrimBefore / playbackRate;
7
+ const timeConsideringLoop = absolutePlaybackTimeInSeconds % loopAfterConsideringPlaybackRate;
8
+ const time = timeConsideringLoop * playbackRate + (trimBeforeInSeconds ?? 0);
9
+ if (Number.isNaN(time)) {
10
+ // eslint-disable-next-line no-console
11
+ console.log({
12
+ absolutePlaybackTimeInSeconds,
13
+ playbackRate,
14
+ loop,
15
+ trimBeforeInSeconds,
16
+ trimAfterInSeconds,
17
+ mediaDurationInSeconds,
18
+ });
19
+ throw new Error('Time is NaN');
20
+ }
21
+ return time;
22
+ };
@@ -1,5 +1,6 @@
1
1
  import React from 'react';
2
2
  import type { LogLevel, LoopVolumeCurveBehavior, OnVideoFrame, VolumeProp } from 'remotion';
3
+ import type { FallbackOffthreadVideoProps } from './props';
3
4
  type InnerVideoProps = {
4
5
  readonly className: string | undefined;
5
6
  readonly loop: boolean;
@@ -12,6 +13,13 @@ type InnerVideoProps = {
12
13
  readonly onVideoFrame: OnVideoFrame | undefined;
13
14
  readonly playbackRate: number;
14
15
  readonly style: React.CSSProperties;
16
+ readonly showInTimeline: boolean;
17
+ readonly trimAfter: number | undefined;
18
+ readonly trimBefore: number | undefined;
19
+ readonly stack: string | null;
20
+ readonly disallowFallbackToOffthreadVideo: boolean;
21
+ readonly fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
22
+ readonly audioStreamIndex: number;
15
23
  };
16
24
  export declare const VideoForPreview: React.FC<InnerVideoProps>;
17
25
  export {};