@remotion/media 4.0.370 → 4.0.371

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,14 @@
2
2
  import { Internals as Internals14, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
3
3
 
4
4
  // src/audio/audio-for-preview.tsx
5
- import { useContext as useContext2, useEffect as useEffect2, useMemo as useMemo2, useRef, useState as useState2 } from "react";
5
+ import {
6
+ useContext as useContext2,
7
+ useEffect as useEffect2,
8
+ useLayoutEffect,
9
+ useMemo as useMemo2,
10
+ useRef,
11
+ useState as useState2
12
+ } from "react";
6
13
  import {
7
14
  Internals as Internals6,
8
15
  Audio as RemotionAudio,
@@ -55,6 +62,23 @@ var roundTo4Digits = (timestamp) => {
55
62
  return Math.round(timestamp * 1000) / 1000;
56
63
  };
57
64
 
65
+ // src/audio/allow-wait.ts
66
+ var allowWaitRoutine = async (next, waitFn) => {
67
+ const result = await Promise.race([
68
+ next,
69
+ new Promise((resolve) => {
70
+ Promise.resolve().then(() => resolve());
71
+ })
72
+ ]);
73
+ if (!result) {
74
+ const unblock = waitFn.waitCallback();
75
+ const newRes = await next;
76
+ unblock();
77
+ return newRes;
78
+ }
79
+ return result;
80
+ };
81
+
58
82
  // src/audio/audio-preview-iterator.ts
59
83
  var makeAudioIterator = (audioSink, startFromSecond) => {
60
84
  let destroyed = false;
@@ -71,7 +95,7 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
71
95
  let iteratorEnded = false;
72
96
  const getNextOrNullIfNotAvailable = async (allowWait) => {
73
97
  const next = iterator.next();
74
- const result = allowWait ? await next : await Promise.race([
98
+ const result = allowWait ? await allowWaitRoutine(next, allowWait) : await Promise.race([
75
99
  next,
76
100
  new Promise((resolve) => {
77
101
  Promise.resolve().then(() => resolve());
@@ -101,7 +125,7 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
101
125
  buffer: result.value ?? null
102
126
  };
103
127
  };
104
- const tryToSatisfySeek = async (time, allowWait) => {
128
+ const tryToSatisfySeek = async (time, allowWait, onBufferScheduled) => {
105
129
  if (lastReturnedBuffer) {
106
130
  const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
107
131
  const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
@@ -112,19 +136,20 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
112
136
  };
113
137
  }
114
138
  if (roundTo4Digits(time) <= bufferEndTimestamp) {
139
+ onBufferScheduled(lastReturnedBuffer);
115
140
  return {
116
- type: "satisfied",
117
- buffers: [lastReturnedBuffer]
141
+ type: "satisfied"
118
142
  };
119
143
  }
120
144
  }
121
145
  if (iteratorEnded) {
146
+ if (lastReturnedBuffer) {
147
+ onBufferScheduled(lastReturnedBuffer);
148
+ }
122
149
  return {
123
- type: "satisfied",
124
- buffers: lastReturnedBuffer ? [lastReturnedBuffer] : []
150
+ type: "satisfied"
125
151
  };
126
152
  }
127
- const toBeReturned = [];
128
153
  while (true) {
129
154
  const buffer = await getNextOrNullIfNotAvailable(allowWait);
130
155
  if (buffer.type === "need-to-wait-for-it") {
@@ -136,21 +161,23 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
136
161
  if (buffer.type === "got-buffer-or-end") {
137
162
  if (buffer.buffer === null) {
138
163
  iteratorEnded = true;
164
+ if (lastReturnedBuffer) {
165
+ onBufferScheduled(lastReturnedBuffer);
166
+ }
139
167
  return {
140
- type: "satisfied",
141
- buffers: lastReturnedBuffer ? [lastReturnedBuffer] : []
168
+ type: "satisfied"
142
169
  };
143
170
  }
144
171
  const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
145
172
  const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
146
173
  const timestamp = roundTo4Digits(time);
147
174
  if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
175
+ onBufferScheduled(buffer.buffer);
148
176
  return {
149
- type: "satisfied",
150
- buffers: [...toBeReturned, buffer.buffer]
177
+ type: "satisfied"
151
178
  };
152
179
  }
153
- toBeReturned.push(buffer.buffer);
180
+ onBufferScheduled(buffer.buffer);
154
181
  continue;
155
182
  }
156
183
  throw new Error("Unreachable");
@@ -211,13 +238,9 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
211
238
  audioChunksForAfterResuming.length = 0;
212
239
  return chunks;
213
240
  },
214
- getQueuedPeriod: (pendingBuffers) => {
241
+ getQueuedPeriod: () => {
215
242
  let until = -Infinity;
216
243
  let from = Infinity;
217
- for (const buffer of pendingBuffers) {
218
- until = Math.max(until, buffer.timestamp + buffer.duration);
219
- from = Math.min(from, buffer.timestamp);
220
- }
221
244
  for (const node of queuedAudioNodes) {
222
245
  until = Math.max(until, node.timestamp + node.buffer.duration);
223
246
  from = Math.min(from, node.timestamp);
@@ -349,7 +372,8 @@ var audioIteratorManager = ({
349
372
  fps,
350
373
  playbackRate,
351
374
  getIsPlaying,
352
- scheduleAudioNode
375
+ scheduleAudioNode,
376
+ bufferState
353
377
  }) => {
354
378
  if (!audioBufferIterator) {
355
379
  await startAudioIterator({
@@ -361,10 +385,18 @@ var audioIteratorManager = ({
361
385
  });
362
386
  return;
363
387
  }
364
- const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, audioBufferIterator.getQueuedPeriod([]));
365
- const toBeScheduled = [];
388
+ const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, audioBufferIterator.getQueuedPeriod());
366
389
  if (!currentTimeIsAlreadyQueued) {
367
- const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, false);
390
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, null, (buffer) => {
391
+ if (!nonce.isStale()) {
392
+ onAudioChunk({
393
+ getIsPlaying,
394
+ buffer,
395
+ playbackRate,
396
+ scheduleAudioNode
397
+ });
398
+ }
399
+ });
368
400
  if (nonce.isStale()) {
369
401
  return;
370
402
  }
@@ -378,12 +410,28 @@ var audioIteratorManager = ({
378
410
  });
379
411
  return;
380
412
  }
381
- toBeScheduled.push(...audioSatisfyResult.buffers);
382
413
  }
383
- const nextTime = newTime + 1 / fps * playbackRate + 1 / fps * playbackRate;
384
- const nextIsAlreadyQueued = isAlreadyQueued(nextTime, audioBufferIterator.getQueuedPeriod(toBeScheduled));
414
+ const nextTime = newTime + 1 / fps * Math.max(1, playbackRate) * 3;
415
+ const nextIsAlreadyQueued = isAlreadyQueued(nextTime, audioBufferIterator.getQueuedPeriod());
385
416
  if (!nextIsAlreadyQueued) {
386
- const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(nextTime, true);
417
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(nextTime, {
418
+ type: "allow-wait",
419
+ waitCallback: () => {
420
+ const handle = bufferState.delayPlayback();
421
+ return () => {
422
+ handle.unblock();
423
+ };
424
+ }
425
+ }, (buffer) => {
426
+ if (!nonce.isStale()) {
427
+ onAudioChunk({
428
+ getIsPlaying,
429
+ buffer,
430
+ playbackRate,
431
+ scheduleAudioNode
432
+ });
433
+ }
434
+ });
387
435
  if (nonce.isStale()) {
388
436
  return;
389
437
  }
@@ -395,17 +443,7 @@ var audioIteratorManager = ({
395
443
  getIsPlaying,
396
444
  scheduleAudioNode
397
445
  });
398
- return;
399
446
  }
400
- toBeScheduled.push(...audioSatisfyResult.buffers);
401
- }
402
- for (const buffer of toBeScheduled) {
403
- onAudioChunk({
404
- getIsPlaying,
405
- buffer,
406
- playbackRate,
407
- scheduleAudioNode
408
- });
409
447
  }
410
448
  };
411
449
  const resumeScheduledAudioChunks = ({
@@ -476,7 +514,7 @@ var drawPreviewOverlay = ({
476
514
  `Audio time: ${(audioTime - audioSyncAnchor).toFixed(3)}s`
477
515
  ];
478
516
  if (audioIteratorManager2) {
479
- const queuedPeriod = audioIteratorManager2.getAudioBufferIterator()?.getQueuedPeriod([]);
517
+ const queuedPeriod = audioIteratorManager2.getAudioBufferIterator()?.getQueuedPeriod();
480
518
  const numberOfChunksAfterResuming = audioIteratorManager2?.getAudioBufferIterator()?.getNumberOfChunksAfterResuming();
481
519
  if (queuedPeriod) {
482
520
  lines.push(`Audio queued until: ${(queuedPeriod.until - (audioTime - audioSyncAnchor)).toFixed(3)}s`);
@@ -963,7 +1001,8 @@ class MediaPlayer {
963
1001
  fps: this.fps,
964
1002
  playbackRate: this.playbackRate * this.globalPlaybackRate,
965
1003
  getIsPlaying: () => this.playing,
966
- scheduleAudioNode: this.scheduleAudioNode
1004
+ scheduleAudioNode: this.scheduleAudioNode,
1005
+ bufferState: this.bufferState
967
1006
  });
968
1007
  }
969
1008
  async play(time) {
@@ -1478,7 +1517,7 @@ var AudioForPreviewAssertedShowing = ({
1478
1517
  audioPlayer.pause();
1479
1518
  }
1480
1519
  }, [isPlayerBuffering, logLevel, playing]);
1481
- useEffect2(() => {
1520
+ useLayoutEffect(() => {
1482
1521
  const audioPlayer = mediaPlayerRef.current;
1483
1522
  if (!audioPlayer || !mediaPlayerReady)
1484
1523
  return;
@@ -1643,7 +1682,7 @@ var AudioForPreview = ({
1643
1682
  };
1644
1683
 
1645
1684
  // src/audio/audio-for-rendering.tsx
1646
- import { useContext as useContext3, useLayoutEffect, useMemo as useMemo3, useState as useState3 } from "react";
1685
+ import { useContext as useContext3, useLayoutEffect as useLayoutEffect2, useMemo as useMemo3, useState as useState3 } from "react";
1647
1686
  import {
1648
1687
  cancelRender as cancelRender2,
1649
1688
  Html5Audio,
@@ -2681,33 +2720,28 @@ var getMaxVideoCacheSize = (logLevel) => {
2681
2720
  return cachedMaxCacheSize;
2682
2721
  };
2683
2722
 
2684
- // src/convert-audiodata/combine-audiodata.ts
2685
- var combineAudioDataAndClosePrevious = (audioDataArray) => {
2686
- let numberOfFrames = 0;
2687
- const { timestamp } = audioDataArray[0];
2688
- for (const audioData of audioDataArray) {
2689
- numberOfFrames += audioData.numberOfFrames;
2723
+ // src/convert-audiodata/convert-audiodata.ts
2724
+ var FORMAT = "s16";
2725
+ var fixFloatingPoint2 = (value) => {
2726
+ if (value % 1 < 0.0000001) {
2727
+ return Math.floor(value);
2690
2728
  }
2691
- const arr = new Int16Array(numberOfFrames * TARGET_NUMBER_OF_CHANNELS);
2692
- let offset = 0;
2693
- for (const audioData of audioDataArray) {
2694
- arr.set(audioData.data, offset);
2695
- offset += audioData.data.length;
2729
+ if (value % 1 > 0.9999999) {
2730
+ return Math.ceil(value);
2696
2731
  }
2697
- return {
2698
- data: arr,
2699
- numberOfFrames,
2700
- timestamp
2701
- };
2732
+ return value;
2733
+ };
2734
+ var ceilButNotIfFloatingPointIssue = (value) => {
2735
+ const fixed = fixFloatingPoint2(value);
2736
+ return Math.ceil(fixed);
2702
2737
  };
2703
-
2704
- // src/convert-audiodata/convert-audiodata.ts
2705
- var FORMAT = "s16";
2706
2738
  var convertAudioData = ({
2707
2739
  audioData,
2708
2740
  trimStartInSeconds,
2709
2741
  trimEndInSeconds,
2710
- playbackRate
2742
+ playbackRate,
2743
+ audioDataTimestamp,
2744
+ isLast
2711
2745
  }) => {
2712
2746
  const {
2713
2747
  numberOfChannels: srcNumberOfChannels,
@@ -2715,10 +2749,10 @@ var convertAudioData = ({
2715
2749
  numberOfFrames
2716
2750
  } = audioData;
2717
2751
  const ratio = currentSampleRate / TARGET_SAMPLE_RATE;
2718
- const frameOffset = Math.floor(trimStartInSeconds * audioData.sampleRate);
2719
- const unroundedFrameCount = numberOfFrames - (trimEndInSeconds + trimStartInSeconds) * audioData.sampleRate;
2720
- const frameCount = Math.ceil(unroundedFrameCount);
2721
- const newNumberOfFrames = Math.ceil(unroundedFrameCount / ratio / playbackRate);
2752
+ const frameOffset = Math.floor(fixFloatingPoint2(trimStartInSeconds * audioData.sampleRate));
2753
+ const unroundedFrameCount = numberOfFrames - trimEndInSeconds * audioData.sampleRate - frameOffset;
2754
+ const frameCount = isLast ? ceilButNotIfFloatingPointIssue(unroundedFrameCount) : Math.round(unroundedFrameCount);
2755
+ const newNumberOfFrames = isLast ? ceilButNotIfFloatingPointIssue(unroundedFrameCount / ratio / playbackRate) : Math.round(unroundedFrameCount / ratio / playbackRate);
2722
2756
  if (newNumberOfFrames === 0) {
2723
2757
  throw new Error("Cannot resample - the given sample rate would result in less than 1 sample");
2724
2758
  }
@@ -2731,11 +2765,13 @@ var convertAudioData = ({
2731
2765
  });
2732
2766
  const data = new Int16Array(newNumberOfFrames * TARGET_NUMBER_OF_CHANNELS);
2733
2767
  const chunkSize = frameCount / newNumberOfFrames;
2768
+ const timestampOffsetMicroseconds = frameOffset / audioData.sampleRate * 1e6;
2734
2769
  if (newNumberOfFrames === frameCount && TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels && playbackRate === 1) {
2735
2770
  return {
2736
2771
  data: srcChannels,
2737
2772
  numberOfFrames: newNumberOfFrames,
2738
- timestamp: audioData.timestamp + frameOffset / audioData.sampleRate * 1e6
2773
+ timestamp: audioDataTimestamp * 1e6 + fixFloatingPoint2(timestampOffsetMicroseconds),
2774
+ durationInMicroSeconds: fixFloatingPoint2(newNumberOfFrames / TARGET_SAMPLE_RATE * 1e6)
2739
2775
  };
2740
2776
  }
2741
2777
  resampleAudioData({
@@ -2748,11 +2784,35 @@ var convertAudioData = ({
2748
2784
  const newAudioData = {
2749
2785
  data,
2750
2786
  numberOfFrames: newNumberOfFrames,
2751
- timestamp: audioData.timestamp + frameOffset / audioData.sampleRate * 1e6
2787
+ timestamp: audioDataTimestamp * 1e6 + fixFloatingPoint2(timestampOffsetMicroseconds),
2788
+ durationInMicroSeconds: fixFloatingPoint2(newNumberOfFrames / TARGET_SAMPLE_RATE * 1e6)
2752
2789
  };
2753
2790
  return newAudioData;
2754
2791
  };
2755
2792
 
2793
+ // src/convert-audiodata/combine-audiodata.ts
2794
+ var combineAudioDataAndClosePrevious = (audioDataArray) => {
2795
+ let numberOfFrames = 0;
2796
+ let durationInMicroSeconds = 0;
2797
+ const { timestamp } = audioDataArray[0];
2798
+ for (const audioData of audioDataArray) {
2799
+ numberOfFrames += audioData.numberOfFrames;
2800
+ durationInMicroSeconds += audioData.durationInMicroSeconds;
2801
+ }
2802
+ const arr = new Int16Array(numberOfFrames * TARGET_NUMBER_OF_CHANNELS);
2803
+ let offset = 0;
2804
+ for (const audioData of audioDataArray) {
2805
+ arr.set(audioData.data, offset);
2806
+ offset += audioData.data.length;
2807
+ }
2808
+ return {
2809
+ data: arr,
2810
+ numberOfFrames,
2811
+ timestamp: fixFloatingPoint2(timestamp),
2812
+ durationInMicroSeconds: fixFloatingPoint2(durationInMicroSeconds)
2813
+ };
2814
+ };
2815
+
2756
2816
  // src/get-sink.ts
2757
2817
  import { Internals as Internals12 } from "remotion";
2758
2818
  var sinkPromises = {};
@@ -2852,7 +2912,9 @@ var extractAudioInternal = async ({
2852
2912
  audioData: audioDataRaw,
2853
2913
  trimStartInSeconds,
2854
2914
  trimEndInSeconds,
2855
- playbackRate
2915
+ playbackRate,
2916
+ audioDataTimestamp: sample.timestamp,
2917
+ isLast: isLastSample
2856
2918
  });
2857
2919
  audioDataRaw.close();
2858
2920
  if (audioData.numberOfFrames === 0) {
@@ -3273,7 +3335,7 @@ var AudioForRendering = ({
3273
3335
  sequenceContext?.relativeFrom,
3274
3336
  sequenceContext?.durationInFrames
3275
3337
  ]);
3276
- useLayoutEffect(() => {
3338
+ useLayoutEffect2(() => {
3277
3339
  const timestamp = frame / fps;
3278
3340
  const durationInSeconds = 1 / fps;
3279
3341
  if (replaceWithHtml5Audio) {
@@ -3458,7 +3520,7 @@ import { Internals as Internals17, useRemotionEnvironment as useRemotionEnvironm
3458
3520
  import {
3459
3521
  useContext as useContext4,
3460
3522
  useEffect as useEffect3,
3461
- useLayoutEffect as useLayoutEffect2,
3523
+ useLayoutEffect as useLayoutEffect3,
3462
3524
  useMemo as useMemo4,
3463
3525
  useRef as useRef2,
3464
3526
  useState as useState4
@@ -3679,7 +3741,7 @@ var VideoForPreviewAssertedShowing = ({
3679
3741
  mediaPlayer.pause();
3680
3742
  }
3681
3743
  }, [isPlayerBuffering, playing, logLevel, mediaPlayerReady]);
3682
- useLayoutEffect2(() => {
3744
+ useLayoutEffect3(() => {
3683
3745
  const mediaPlayer = mediaPlayerRef.current;
3684
3746
  if (!mediaPlayer || !mediaPlayerReady)
3685
3747
  return;
@@ -3838,7 +3900,7 @@ var VideoForPreview = (props) => {
3838
3900
  // src/video/video-for-rendering.tsx
3839
3901
  import {
3840
3902
  useContext as useContext5,
3841
- useLayoutEffect as useLayoutEffect3,
3903
+ useLayoutEffect as useLayoutEffect4,
3842
3904
  useMemo as useMemo5,
3843
3905
  useRef as useRef3,
3844
3906
  useState as useState5
@@ -3897,7 +3959,7 @@ var VideoForRendering = ({
3897
3959
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
3898
3960
  const audioEnabled = Internals16.useAudioEnabled();
3899
3961
  const videoEnabled = Internals16.useVideoEnabled();
3900
- useLayoutEffect3(() => {
3962
+ useLayoutEffect4(() => {
3901
3963
  if (!canvasRef.current) {
3902
3964
  return;
3903
3965
  }
@@ -4091,15 +4153,15 @@ var VideoForRendering = ({
4091
4153
  volume: volumeProp,
4092
4154
  id,
4093
4155
  onError: fallbackOffthreadVideoProps?.onError,
4094
- toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
4156
+ toneFrequency,
4095
4157
  showInTimeline: false,
4096
4158
  crossOrigin: undefined,
4097
4159
  onAutoPlayError: () => {
4098
4160
  return;
4099
4161
  },
4100
4162
  pauseWhenBuffering: false,
4101
- trimAfter: undefined,
4102
- trimBefore: undefined,
4163
+ trimAfter: trimAfterValue,
4164
+ trimBefore: trimBeforeValue,
4103
4165
  useWebAudioApi: false,
4104
4166
  startFrom: undefined,
4105
4167
  endAt: undefined,
@@ -245,6 +245,7 @@ export class MediaPlayer {
245
245
  playbackRate: this.playbackRate * this.globalPlaybackRate,
246
246
  getIsPlaying: () => this.playing,
247
247
  scheduleAudioNode: this.scheduleAudioNode,
248
+ bufferState: this.bufferState,
248
249
  });
249
250
  }
250
251
  async play(time) {
@@ -1,7 +1,6 @@
1
1
  import type { LogLevel, LoopVolumeCurveBehavior, OnVideoFrame, VolumeProp } from 'remotion';
2
2
  export type FallbackOffthreadVideoProps = {
3
3
  acceptableTimeShiftInSeconds?: number;
4
- toneFrequency?: number;
5
4
  transparent?: boolean;
6
5
  toneMapped?: boolean;
7
6
  onError?: (err: Error) => void;
@@ -213,9 +213,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
213
213
  .join(' ');
214
214
  }, [className]);
215
215
  if (replaceWithOffthreadVideo) {
216
- const fallback = (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? true, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
216
+ const fallback = (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? true, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: toneFrequency,
217
217
  // these shouldn't matter during rendering / should not appear at all
218
- showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: undefined, trimBefore: undefined, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
218
+ showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
219
219
  if (loop) {
220
220
  if (!replaceWithOffthreadVideo.durationInSeconds) {
221
221
  cancelRender(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.370",
3
+ "version": "4.0.371",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -21,8 +21,8 @@
21
21
  "make": "tsc -d && bun --env-file=../.env.bundle bundle.ts"
22
22
  },
23
23
  "dependencies": {
24
- "mediabunny": "1.24.2",
25
- "remotion": "4.0.370",
24
+ "mediabunny": "1.24.3",
25
+ "remotion": "4.0.371",
26
26
  "webdriverio": "9.19.2"
27
27
  },
28
28
  "peerDependencies": {
@@ -30,7 +30,7 @@
30
30
  "react-dom": ">=16.8.0"
31
31
  },
32
32
  "devDependencies": {
33
- "@remotion/eslint-config-internal": "4.0.370",
33
+ "@remotion/eslint-config-internal": "4.0.371",
34
34
  "@vitest/browser": "^3.2.4",
35
35
  "eslint": "9.19.0",
36
36
  "react": "19.0.0",
@@ -1,11 +0,0 @@
1
- import type { AudioBufferSink } from 'mediabunny';
2
- export declare const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
3
- export declare const makeAudioIterator: (audioSink: AudioBufferSink, startFromSecond: number) => {
4
- iterator: AsyncGenerator<import("mediabunny").WrappedAudioBuffer, void, unknown>;
5
- destroy: () => void;
6
- isReadyToPlay: () => boolean;
7
- setAudioIteratorStarted: (started: boolean) => void;
8
- getNext: () => Promise<IteratorResult<import("mediabunny").WrappedAudioBuffer, void>>;
9
- setAudioBufferHealth: (health: number) => void;
10
- };
11
- export type AudioIterator = ReturnType<typeof makeAudioIterator>;
@@ -1,24 +0,0 @@
1
- export const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
2
- export const makeAudioIterator = (audioSink, startFromSecond) => {
3
- const iterator = audioSink.buffers(startFromSecond);
4
- let audioIteratorStarted = false;
5
- let audioBufferHealth = 0;
6
- return {
7
- iterator,
8
- destroy: () => {
9
- iterator.return().catch(() => undefined);
10
- },
11
- isReadyToPlay: () => {
12
- return audioIteratorStarted && audioBufferHealth > 0;
13
- },
14
- setAudioIteratorStarted: (started) => {
15
- audioIteratorStarted = started;
16
- },
17
- getNext: () => {
18
- return iterator.next();
19
- },
20
- setAudioBufferHealth: (health) => {
21
- audioBufferHealth = health;
22
- },
23
- };
24
- };
@@ -1,98 +0,0 @@
1
- import type { LogLevel } from 'remotion';
2
- export declare const SEEK_THRESHOLD = 0.05;
3
- export type MediaPlayerInitResult = {
4
- type: 'success';
5
- durationInSeconds: number;
6
- } | {
7
- type: 'unknown-container-format';
8
- } | {
9
- type: 'cannot-decode';
10
- } | {
11
- type: 'network-error';
12
- } | {
13
- type: 'no-tracks';
14
- };
15
- export declare class MediaPlayer {
16
- private canvas;
17
- private context;
18
- private src;
19
- private logLevel;
20
- private playbackRate;
21
- private audioStreamIndex;
22
- private canvasSink;
23
- private videoFrameIterator;
24
- private nextFrame;
25
- private audioSink;
26
- private audioBufferIterator;
27
- private queuedAudioNodes;
28
- private gainNode;
29
- private currentVolume;
30
- private sharedAudioContext;
31
- private audioSyncAnchor;
32
- private playing;
33
- private muted;
34
- private loop;
35
- private fps;
36
- private trimBefore;
37
- private trimAfter;
38
- private animationFrameId;
39
- private videoAsyncId;
40
- private audioAsyncId;
41
- private initialized;
42
- private totalDuration;
43
- private isBuffering;
44
- private onBufferingChangeCallback?;
45
- private audioBufferHealth;
46
- private audioIteratorStarted;
47
- private readonly HEALTHY_BUFER_THRESHOLD_SECONDS;
48
- private mediaEnded;
49
- private onVideoFrameCallback?;
50
- constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, }: {
51
- canvas: HTMLCanvasElement | null;
52
- src: string;
53
- logLevel: LogLevel;
54
- sharedAudioContext: AudioContext;
55
- loop: boolean;
56
- trimBefore: number | undefined;
57
- trimAfter: number | undefined;
58
- playbackRate: number;
59
- audioStreamIndex: number;
60
- fps: number;
61
- });
62
- private input;
63
- private isReady;
64
- private hasAudio;
65
- private isCurrentlyBuffering;
66
- initialize(startTimeUnresolved: number): Promise<MediaPlayerInitResult>;
67
- private clearCanvas;
68
- private cleanupAudioQueue;
69
- private cleanAudioIteratorAndNodes;
70
- seekTo(time: number): Promise<void>;
71
- play(): Promise<void>;
72
- pause(): void;
73
- setMuted(muted: boolean): void;
74
- setVolume(volume: number): void;
75
- setPlaybackRate(rate: number): void;
76
- setFps(fps: number): void;
77
- setLoop(loop: boolean): void;
78
- dispose(): void;
79
- private getPlaybackTime;
80
- private scheduleAudioChunk;
81
- onBufferingChange(callback: (isBuffering: boolean) => void): () => void;
82
- onVideoFrame(callback: (frame: CanvasImageSource) => void): () => void;
83
- private canRenderVideo;
84
- private startRenderLoop;
85
- private stopRenderLoop;
86
- private render;
87
- private shouldRenderFrame;
88
- private drawCurrentFrame;
89
- private startAudioIterator;
90
- private startVideoIterator;
91
- private updateNextFrame;
92
- private bufferingStartedAtMs;
93
- private minBufferingTimeoutMs;
94
- private setBufferingState;
95
- private maybeResumeFromBuffering;
96
- private maybeForceResumeFromBuffering;
97
- private runAudioIterator;
98
- }