@remotion/media 4.0.369 → 4.0.371

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,14 @@
2
2
  import { Internals as Internals14, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
3
3
 
4
4
  // src/audio/audio-for-preview.tsx
5
- import { useContext as useContext2, useEffect as useEffect2, useMemo as useMemo2, useRef, useState as useState2 } from "react";
5
+ import {
6
+ useContext as useContext2,
7
+ useEffect as useEffect2,
8
+ useLayoutEffect,
9
+ useMemo as useMemo2,
10
+ useRef,
11
+ useState as useState2
12
+ } from "react";
6
13
  import {
7
14
  Internals as Internals6,
8
15
  Audio as RemotionAudio,
@@ -48,13 +55,30 @@ import { ALL_FORMATS, Input, UrlSource } from "mediabunny";
48
55
  import { Internals as Internals3 } from "remotion";
49
56
 
50
57
  // src/audio-iterator-manager.ts
51
- import { AudioBufferSink, InputDisposedError } from "mediabunny";
58
+ import { AudioBufferSink } from "mediabunny";
52
59
 
53
60
  // src/helpers/round-to-4-digits.ts
54
61
  var roundTo4Digits = (timestamp) => {
55
62
  return Math.round(timestamp * 1000) / 1000;
56
63
  };
57
64
 
65
+ // src/audio/allow-wait.ts
66
+ var allowWaitRoutine = async (next, waitFn) => {
67
+ const result = await Promise.race([
68
+ next,
69
+ new Promise((resolve) => {
70
+ Promise.resolve().then(() => resolve());
71
+ })
72
+ ]);
73
+ if (!result) {
74
+ const unblock = waitFn.waitCallback();
75
+ const newRes = await next;
76
+ unblock();
77
+ return newRes;
78
+ }
79
+ return result;
80
+ };
81
+
58
82
  // src/audio/audio-preview-iterator.ts
59
83
  var makeAudioIterator = (audioSink, startFromSecond) => {
60
84
  let destroyed = false;
@@ -71,7 +95,7 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
71
95
  let iteratorEnded = false;
72
96
  const getNextOrNullIfNotAvailable = async (allowWait) => {
73
97
  const next = iterator.next();
74
- const result = allowWait ? await next : await Promise.race([
98
+ const result = allowWait ? await allowWaitRoutine(next, allowWait) : await Promise.race([
75
99
  next,
76
100
  new Promise((resolve) => {
77
101
  Promise.resolve().then(() => resolve());
@@ -101,7 +125,7 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
101
125
  buffer: result.value ?? null
102
126
  };
103
127
  };
104
- const tryToSatisfySeek = async (time, allowWait) => {
128
+ const tryToSatisfySeek = async (time, allowWait, onBufferScheduled) => {
105
129
  if (lastReturnedBuffer) {
106
130
  const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
107
131
  const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
@@ -112,19 +136,20 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
112
136
  };
113
137
  }
114
138
  if (roundTo4Digits(time) <= bufferEndTimestamp) {
139
+ onBufferScheduled(lastReturnedBuffer);
115
140
  return {
116
- type: "satisfied",
117
- buffers: [lastReturnedBuffer]
141
+ type: "satisfied"
118
142
  };
119
143
  }
120
144
  }
121
145
  if (iteratorEnded) {
146
+ if (lastReturnedBuffer) {
147
+ onBufferScheduled(lastReturnedBuffer);
148
+ }
122
149
  return {
123
- type: "satisfied",
124
- buffers: lastReturnedBuffer ? [lastReturnedBuffer] : []
150
+ type: "satisfied"
125
151
  };
126
152
  }
127
- const toBeReturned = [];
128
153
  while (true) {
129
154
  const buffer = await getNextOrNullIfNotAvailable(allowWait);
130
155
  if (buffer.type === "need-to-wait-for-it") {
@@ -136,21 +161,23 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
136
161
  if (buffer.type === "got-buffer-or-end") {
137
162
  if (buffer.buffer === null) {
138
163
  iteratorEnded = true;
164
+ if (lastReturnedBuffer) {
165
+ onBufferScheduled(lastReturnedBuffer);
166
+ }
139
167
  return {
140
- type: "satisfied",
141
- buffers: lastReturnedBuffer ? [lastReturnedBuffer] : []
168
+ type: "satisfied"
142
169
  };
143
170
  }
144
171
  const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
145
172
  const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
146
173
  const timestamp = roundTo4Digits(time);
147
174
  if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
175
+ onBufferScheduled(buffer.buffer);
148
176
  return {
149
- type: "satisfied",
150
- buffers: [...toBeReturned, buffer.buffer]
177
+ type: "satisfied"
151
178
  };
152
179
  }
153
- toBeReturned.push(buffer.buffer);
180
+ onBufferScheduled(buffer.buffer);
154
181
  continue;
155
182
  }
156
183
  throw new Error("Unreachable");
@@ -211,13 +238,9 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
211
238
  audioChunksForAfterResuming.length = 0;
212
239
  return chunks;
213
240
  },
214
- getQueuedPeriod: (pendingBuffers) => {
241
+ getQueuedPeriod: () => {
215
242
  let until = -Infinity;
216
243
  let from = Infinity;
217
- for (const buffer of pendingBuffers) {
218
- until = Math.max(until, buffer.timestamp + buffer.duration);
219
- from = Math.min(from, buffer.timestamp);
220
- }
221
244
  for (const node of queuedAudioNodes) {
222
245
  until = Math.max(until, node.timestamp + node.buffer.duration);
223
246
  from = Math.min(from, node.timestamp);
@@ -315,15 +338,7 @@ var audioIteratorManager = ({
315
338
  audioIteratorsCreated++;
316
339
  audioBufferIterator = iterator;
317
340
  for (let i = 0;i < 3; i++) {
318
- const result = await iterator.getNext().catch((err) => {
319
- if (iterator.isDestroyed() || err instanceof InputDisposedError) {} else {
320
- throw err;
321
- }
322
- });
323
- if (!result) {
324
- delayHandle.unblock();
325
- return;
326
- }
341
+ const result = await iterator.getNext();
327
342
  if (iterator.isDestroyed()) {
328
343
  delayHandle.unblock();
329
344
  return;
@@ -357,7 +372,8 @@ var audioIteratorManager = ({
357
372
  fps,
358
373
  playbackRate,
359
374
  getIsPlaying,
360
- scheduleAudioNode
375
+ scheduleAudioNode,
376
+ bufferState
361
377
  }) => {
362
378
  if (!audioBufferIterator) {
363
379
  await startAudioIterator({
@@ -369,10 +385,18 @@ var audioIteratorManager = ({
369
385
  });
370
386
  return;
371
387
  }
372
- const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, audioBufferIterator.getQueuedPeriod([]));
373
- const toBeScheduled = [];
388
+ const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, audioBufferIterator.getQueuedPeriod());
374
389
  if (!currentTimeIsAlreadyQueued) {
375
- const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, false);
390
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, null, (buffer) => {
391
+ if (!nonce.isStale()) {
392
+ onAudioChunk({
393
+ getIsPlaying,
394
+ buffer,
395
+ playbackRate,
396
+ scheduleAudioNode
397
+ });
398
+ }
399
+ });
376
400
  if (nonce.isStale()) {
377
401
  return;
378
402
  }
@@ -386,12 +410,28 @@ var audioIteratorManager = ({
386
410
  });
387
411
  return;
388
412
  }
389
- toBeScheduled.push(...audioSatisfyResult.buffers);
390
413
  }
391
- const nextTime = newTime + 1 / fps * playbackRate + 1 / fps * playbackRate;
392
- const nextIsAlreadyQueued = isAlreadyQueued(nextTime, audioBufferIterator.getQueuedPeriod(toBeScheduled));
414
+ const nextTime = newTime + 1 / fps * Math.max(1, playbackRate) * 3;
415
+ const nextIsAlreadyQueued = isAlreadyQueued(nextTime, audioBufferIterator.getQueuedPeriod());
393
416
  if (!nextIsAlreadyQueued) {
394
- const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(nextTime, true);
417
+ const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(nextTime, {
418
+ type: "allow-wait",
419
+ waitCallback: () => {
420
+ const handle = bufferState.delayPlayback();
421
+ return () => {
422
+ handle.unblock();
423
+ };
424
+ }
425
+ }, (buffer) => {
426
+ if (!nonce.isStale()) {
427
+ onAudioChunk({
428
+ getIsPlaying,
429
+ buffer,
430
+ playbackRate,
431
+ scheduleAudioNode
432
+ });
433
+ }
434
+ });
395
435
  if (nonce.isStale()) {
396
436
  return;
397
437
  }
@@ -403,17 +443,7 @@ var audioIteratorManager = ({
403
443
  getIsPlaying,
404
444
  scheduleAudioNode
405
445
  });
406
- return;
407
446
  }
408
- toBeScheduled.push(...audioSatisfyResult.buffers);
409
- }
410
- for (const buffer of toBeScheduled) {
411
- onAudioChunk({
412
- getIsPlaying,
413
- buffer,
414
- playbackRate,
415
- scheduleAudioNode
416
- });
417
447
  }
418
448
  };
419
449
  const resumeScheduledAudioChunks = ({
@@ -484,7 +514,7 @@ var drawPreviewOverlay = ({
484
514
  `Audio time: ${(audioTime - audioSyncAnchor).toFixed(3)}s`
485
515
  ];
486
516
  if (audioIteratorManager2) {
487
- const queuedPeriod = audioIteratorManager2.getAudioBufferIterator()?.getQueuedPeriod([]);
517
+ const queuedPeriod = audioIteratorManager2.getAudioBufferIterator()?.getQueuedPeriod();
488
518
  const numberOfChunksAfterResuming = audioIteratorManager2?.getAudioBufferIterator()?.getNumberOfChunksAfterResuming();
489
519
  if (queuedPeriod) {
490
520
  lines.push(`Audio queued until: ${(queuedPeriod.until - (audioTime - audioSyncAnchor)).toFixed(3)}s`);
@@ -971,7 +1001,8 @@ class MediaPlayer {
971
1001
  fps: this.fps,
972
1002
  playbackRate: this.playbackRate * this.globalPlaybackRate,
973
1003
  getIsPlaying: () => this.playing,
974
- scheduleAudioNode: this.scheduleAudioNode
1004
+ scheduleAudioNode: this.scheduleAudioNode,
1005
+ bufferState: this.bufferState
975
1006
  });
976
1007
  }
977
1008
  async play(time) {
@@ -1486,7 +1517,7 @@ var AudioForPreviewAssertedShowing = ({
1486
1517
  audioPlayer.pause();
1487
1518
  }
1488
1519
  }, [isPlayerBuffering, logLevel, playing]);
1489
- useEffect2(() => {
1520
+ useLayoutEffect(() => {
1490
1521
  const audioPlayer = mediaPlayerRef.current;
1491
1522
  if (!audioPlayer || !mediaPlayerReady)
1492
1523
  return;
@@ -1651,7 +1682,7 @@ var AudioForPreview = ({
1651
1682
  };
1652
1683
 
1653
1684
  // src/audio/audio-for-rendering.tsx
1654
- import { useContext as useContext3, useLayoutEffect, useMemo as useMemo3, useState as useState3 } from "react";
1685
+ import { useContext as useContext3, useLayoutEffect as useLayoutEffect2, useMemo as useMemo3, useState as useState3 } from "react";
1655
1686
  import {
1656
1687
  cancelRender as cancelRender2,
1657
1688
  Html5Audio,
@@ -2689,33 +2720,28 @@ var getMaxVideoCacheSize = (logLevel) => {
2689
2720
  return cachedMaxCacheSize;
2690
2721
  };
2691
2722
 
2692
- // src/convert-audiodata/combine-audiodata.ts
2693
- var combineAudioDataAndClosePrevious = (audioDataArray) => {
2694
- let numberOfFrames = 0;
2695
- const { timestamp } = audioDataArray[0];
2696
- for (const audioData of audioDataArray) {
2697
- numberOfFrames += audioData.numberOfFrames;
2723
+ // src/convert-audiodata/convert-audiodata.ts
2724
+ var FORMAT = "s16";
2725
+ var fixFloatingPoint2 = (value) => {
2726
+ if (value % 1 < 0.0000001) {
2727
+ return Math.floor(value);
2698
2728
  }
2699
- const arr = new Int16Array(numberOfFrames * TARGET_NUMBER_OF_CHANNELS);
2700
- let offset = 0;
2701
- for (const audioData of audioDataArray) {
2702
- arr.set(audioData.data, offset);
2703
- offset += audioData.data.length;
2729
+ if (value % 1 > 0.9999999) {
2730
+ return Math.ceil(value);
2704
2731
  }
2705
- return {
2706
- data: arr,
2707
- numberOfFrames,
2708
- timestamp
2709
- };
2732
+ return value;
2733
+ };
2734
+ var ceilButNotIfFloatingPointIssue = (value) => {
2735
+ const fixed = fixFloatingPoint2(value);
2736
+ return Math.ceil(fixed);
2710
2737
  };
2711
-
2712
- // src/convert-audiodata/convert-audiodata.ts
2713
- var FORMAT = "s16";
2714
2738
  var convertAudioData = ({
2715
2739
  audioData,
2716
2740
  trimStartInSeconds,
2717
2741
  trimEndInSeconds,
2718
- playbackRate
2742
+ playbackRate,
2743
+ audioDataTimestamp,
2744
+ isLast
2719
2745
  }) => {
2720
2746
  const {
2721
2747
  numberOfChannels: srcNumberOfChannels,
@@ -2723,10 +2749,10 @@ var convertAudioData = ({
2723
2749
  numberOfFrames
2724
2750
  } = audioData;
2725
2751
  const ratio = currentSampleRate / TARGET_SAMPLE_RATE;
2726
- const frameOffset = Math.floor(trimStartInSeconds * audioData.sampleRate);
2727
- const unroundedFrameCount = numberOfFrames - (trimEndInSeconds + trimStartInSeconds) * audioData.sampleRate;
2728
- const frameCount = Math.ceil(unroundedFrameCount);
2729
- const newNumberOfFrames = Math.ceil(unroundedFrameCount / ratio / playbackRate);
2752
+ const frameOffset = Math.floor(fixFloatingPoint2(trimStartInSeconds * audioData.sampleRate));
2753
+ const unroundedFrameCount = numberOfFrames - trimEndInSeconds * audioData.sampleRate - frameOffset;
2754
+ const frameCount = isLast ? ceilButNotIfFloatingPointIssue(unroundedFrameCount) : Math.round(unroundedFrameCount);
2755
+ const newNumberOfFrames = isLast ? ceilButNotIfFloatingPointIssue(unroundedFrameCount / ratio / playbackRate) : Math.round(unroundedFrameCount / ratio / playbackRate);
2730
2756
  if (newNumberOfFrames === 0) {
2731
2757
  throw new Error("Cannot resample - the given sample rate would result in less than 1 sample");
2732
2758
  }
@@ -2739,11 +2765,13 @@ var convertAudioData = ({
2739
2765
  });
2740
2766
  const data = new Int16Array(newNumberOfFrames * TARGET_NUMBER_OF_CHANNELS);
2741
2767
  const chunkSize = frameCount / newNumberOfFrames;
2768
+ const timestampOffsetMicroseconds = frameOffset / audioData.sampleRate * 1e6;
2742
2769
  if (newNumberOfFrames === frameCount && TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels && playbackRate === 1) {
2743
2770
  return {
2744
2771
  data: srcChannels,
2745
2772
  numberOfFrames: newNumberOfFrames,
2746
- timestamp: audioData.timestamp + frameOffset / audioData.sampleRate * 1e6
2773
+ timestamp: audioDataTimestamp * 1e6 + fixFloatingPoint2(timestampOffsetMicroseconds),
2774
+ durationInMicroSeconds: fixFloatingPoint2(newNumberOfFrames / TARGET_SAMPLE_RATE * 1e6)
2747
2775
  };
2748
2776
  }
2749
2777
  resampleAudioData({
@@ -2756,11 +2784,35 @@ var convertAudioData = ({
2756
2784
  const newAudioData = {
2757
2785
  data,
2758
2786
  numberOfFrames: newNumberOfFrames,
2759
- timestamp: audioData.timestamp + frameOffset / audioData.sampleRate * 1e6
2787
+ timestamp: audioDataTimestamp * 1e6 + fixFloatingPoint2(timestampOffsetMicroseconds),
2788
+ durationInMicroSeconds: fixFloatingPoint2(newNumberOfFrames / TARGET_SAMPLE_RATE * 1e6)
2760
2789
  };
2761
2790
  return newAudioData;
2762
2791
  };
2763
2792
 
2793
+ // src/convert-audiodata/combine-audiodata.ts
2794
+ var combineAudioDataAndClosePrevious = (audioDataArray) => {
2795
+ let numberOfFrames = 0;
2796
+ let durationInMicroSeconds = 0;
2797
+ const { timestamp } = audioDataArray[0];
2798
+ for (const audioData of audioDataArray) {
2799
+ numberOfFrames += audioData.numberOfFrames;
2800
+ durationInMicroSeconds += audioData.durationInMicroSeconds;
2801
+ }
2802
+ const arr = new Int16Array(numberOfFrames * TARGET_NUMBER_OF_CHANNELS);
2803
+ let offset = 0;
2804
+ for (const audioData of audioDataArray) {
2805
+ arr.set(audioData.data, offset);
2806
+ offset += audioData.data.length;
2807
+ }
2808
+ return {
2809
+ data: arr,
2810
+ numberOfFrames,
2811
+ timestamp: fixFloatingPoint2(timestamp),
2812
+ durationInMicroSeconds: fixFloatingPoint2(durationInMicroSeconds)
2813
+ };
2814
+ };
2815
+
2764
2816
  // src/get-sink.ts
2765
2817
  import { Internals as Internals12 } from "remotion";
2766
2818
  var sinkPromises = {};
@@ -2860,7 +2912,9 @@ var extractAudioInternal = async ({
2860
2912
  audioData: audioDataRaw,
2861
2913
  trimStartInSeconds,
2862
2914
  trimEndInSeconds,
2863
- playbackRate
2915
+ playbackRate,
2916
+ audioDataTimestamp: sample.timestamp,
2917
+ isLast: isLastSample
2864
2918
  });
2865
2919
  audioDataRaw.close();
2866
2920
  if (audioData.numberOfFrames === 0) {
@@ -3281,7 +3335,7 @@ var AudioForRendering = ({
3281
3335
  sequenceContext?.relativeFrom,
3282
3336
  sequenceContext?.durationInFrames
3283
3337
  ]);
3284
- useLayoutEffect(() => {
3338
+ useLayoutEffect2(() => {
3285
3339
  const timestamp = frame / fps;
3286
3340
  const durationInSeconds = 1 / fps;
3287
3341
  if (replaceWithHtml5Audio) {
@@ -3466,7 +3520,7 @@ import { Internals as Internals17, useRemotionEnvironment as useRemotionEnvironm
3466
3520
  import {
3467
3521
  useContext as useContext4,
3468
3522
  useEffect as useEffect3,
3469
- useLayoutEffect as useLayoutEffect2,
3523
+ useLayoutEffect as useLayoutEffect3,
3470
3524
  useMemo as useMemo4,
3471
3525
  useRef as useRef2,
3472
3526
  useState as useState4
@@ -3687,7 +3741,7 @@ var VideoForPreviewAssertedShowing = ({
3687
3741
  mediaPlayer.pause();
3688
3742
  }
3689
3743
  }, [isPlayerBuffering, playing, logLevel, mediaPlayerReady]);
3690
- useLayoutEffect2(() => {
3744
+ useLayoutEffect3(() => {
3691
3745
  const mediaPlayer = mediaPlayerRef.current;
3692
3746
  if (!mediaPlayer || !mediaPlayerReady)
3693
3747
  return;
@@ -3846,7 +3900,7 @@ var VideoForPreview = (props) => {
3846
3900
  // src/video/video-for-rendering.tsx
3847
3901
  import {
3848
3902
  useContext as useContext5,
3849
- useLayoutEffect as useLayoutEffect3,
3903
+ useLayoutEffect as useLayoutEffect4,
3850
3904
  useMemo as useMemo5,
3851
3905
  useRef as useRef3,
3852
3906
  useState as useState5
@@ -3905,7 +3959,7 @@ var VideoForRendering = ({
3905
3959
  const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
3906
3960
  const audioEnabled = Internals16.useAudioEnabled();
3907
3961
  const videoEnabled = Internals16.useVideoEnabled();
3908
- useLayoutEffect3(() => {
3962
+ useLayoutEffect4(() => {
3909
3963
  if (!canvasRef.current) {
3910
3964
  return;
3911
3965
  }
@@ -4099,15 +4153,15 @@ var VideoForRendering = ({
4099
4153
  volume: volumeProp,
4100
4154
  id,
4101
4155
  onError: fallbackOffthreadVideoProps?.onError,
4102
- toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
4156
+ toneFrequency,
4103
4157
  showInTimeline: false,
4104
4158
  crossOrigin: undefined,
4105
4159
  onAutoPlayError: () => {
4106
4160
  return;
4107
4161
  },
4108
4162
  pauseWhenBuffering: false,
4109
- trimAfter: undefined,
4110
- trimBefore: undefined,
4163
+ trimAfter: trimAfterValue,
4164
+ trimBefore: trimBeforeValue,
4111
4165
  useWebAudioApi: false,
4112
4166
  startFrom: undefined,
4113
4167
  endAt: undefined,
@@ -245,6 +245,7 @@ export class MediaPlayer {
245
245
  playbackRate: this.playbackRate * this.globalPlaybackRate,
246
246
  getIsPlaying: () => this.playing,
247
247
  scheduleAudioNode: this.scheduleAudioNode,
248
+ bufferState: this.bufferState,
248
249
  });
249
250
  }
250
251
  async play(time) {
@@ -1,7 +1,6 @@
1
1
  import type { LogLevel, LoopVolumeCurveBehavior, OnVideoFrame, VolumeProp } from 'remotion';
2
2
  export type FallbackOffthreadVideoProps = {
3
3
  acceptableTimeShiftInSeconds?: number;
4
- toneFrequency?: number;
5
4
  transparent?: boolean;
6
5
  toneMapped?: boolean;
7
6
  onError?: (err: Error) => void;
@@ -213,9 +213,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
213
213
  .join(' ');
214
214
  }, [className]);
215
215
  if (replaceWithOffthreadVideo) {
216
- const fallback = (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? true, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
216
+ const fallback = (_jsx(Internals.InnerOffthreadVideo, { src: src, playbackRate: playbackRate ?? 1, muted: muted ?? false, acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', delayRenderRetries: delayRenderRetries ?? undefined, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined, style: style, allowAmplificationDuringRender: true, transparent: fallbackOffthreadVideoProps?.transparent ?? true, toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true, audioStreamIndex: audioStreamIndex ?? 0, name: name, className: className, onVideoFrame: onVideoFrame, volume: volumeProp, id: id, onError: fallbackOffthreadVideoProps?.onError, toneFrequency: toneFrequency,
217
217
  // these shouldn't matter during rendering / should not appear at all
218
- showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: undefined, trimBefore: undefined, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
218
+ showInTimeline: false, crossOrigin: undefined, onAutoPlayError: () => undefined, pauseWhenBuffering: false, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, useWebAudioApi: false, startFrom: undefined, endAt: undefined, stack: stack, _remotionInternalNativeLoopPassed: false }));
219
219
  if (loop) {
220
220
  if (!replaceWithOffthreadVideo.durationInSeconds) {
221
221
  cancelRender(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.369",
3
+ "version": "4.0.371",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -21,8 +21,8 @@
21
21
  "make": "tsc -d && bun --env-file=../.env.bundle bundle.ts"
22
22
  },
23
23
  "dependencies": {
24
- "mediabunny": "1.24.2",
25
- "remotion": "4.0.369",
24
+ "mediabunny": "1.24.3",
25
+ "remotion": "4.0.371",
26
26
  "webdriverio": "9.19.2"
27
27
  },
28
28
  "peerDependencies": {
@@ -30,7 +30,7 @@
30
30
  "react-dom": ">=16.8.0"
31
31
  },
32
32
  "devDependencies": {
33
- "@remotion/eslint-config-internal": "4.0.369",
33
+ "@remotion/eslint-config-internal": "4.0.371",
34
34
  "@vitest/browser": "^3.2.4",
35
35
  "eslint": "9.19.0",
36
36
  "react": "19.0.0",
@@ -1,11 +0,0 @@
1
- import type { AudioBufferSink } from 'mediabunny';
2
- export declare const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
3
- export declare const makeAudioIterator: (audioSink: AudioBufferSink, startFromSecond: number) => {
4
- iterator: AsyncGenerator<import("mediabunny").WrappedAudioBuffer, void, unknown>;
5
- destroy: () => void;
6
- isReadyToPlay: () => boolean;
7
- setAudioIteratorStarted: (started: boolean) => void;
8
- getNext: () => Promise<IteratorResult<import("mediabunny").WrappedAudioBuffer, void>>;
9
- setAudioBufferHealth: (health: number) => void;
10
- };
11
- export type AudioIterator = ReturnType<typeof makeAudioIterator>;
@@ -1,24 +0,0 @@
1
- export const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
2
- export const makeAudioIterator = (audioSink, startFromSecond) => {
3
- const iterator = audioSink.buffers(startFromSecond);
4
- let audioIteratorStarted = false;
5
- let audioBufferHealth = 0;
6
- return {
7
- iterator,
8
- destroy: () => {
9
- iterator.return().catch(() => undefined);
10
- },
11
- isReadyToPlay: () => {
12
- return audioIteratorStarted && audioBufferHealth > 0;
13
- },
14
- setAudioIteratorStarted: (started) => {
15
- audioIteratorStarted = started;
16
- },
17
- getNext: () => {
18
- return iterator.next();
19
- },
20
- setAudioBufferHealth: (health) => {
21
- audioBufferHealth = health;
22
- },
23
- };
24
- };
@@ -1,98 +0,0 @@
1
- import type { LogLevel } from 'remotion';
2
- export declare const SEEK_THRESHOLD = 0.05;
3
- export type MediaPlayerInitResult = {
4
- type: 'success';
5
- durationInSeconds: number;
6
- } | {
7
- type: 'unknown-container-format';
8
- } | {
9
- type: 'cannot-decode';
10
- } | {
11
- type: 'network-error';
12
- } | {
13
- type: 'no-tracks';
14
- };
15
- export declare class MediaPlayer {
16
- private canvas;
17
- private context;
18
- private src;
19
- private logLevel;
20
- private playbackRate;
21
- private audioStreamIndex;
22
- private canvasSink;
23
- private videoFrameIterator;
24
- private nextFrame;
25
- private audioSink;
26
- private audioBufferIterator;
27
- private queuedAudioNodes;
28
- private gainNode;
29
- private currentVolume;
30
- private sharedAudioContext;
31
- private audioSyncAnchor;
32
- private playing;
33
- private muted;
34
- private loop;
35
- private fps;
36
- private trimBefore;
37
- private trimAfter;
38
- private animationFrameId;
39
- private videoAsyncId;
40
- private audioAsyncId;
41
- private initialized;
42
- private totalDuration;
43
- private isBuffering;
44
- private onBufferingChangeCallback?;
45
- private audioBufferHealth;
46
- private audioIteratorStarted;
47
- private readonly HEALTHY_BUFER_THRESHOLD_SECONDS;
48
- private mediaEnded;
49
- private onVideoFrameCallback?;
50
- constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, }: {
51
- canvas: HTMLCanvasElement | null;
52
- src: string;
53
- logLevel: LogLevel;
54
- sharedAudioContext: AudioContext;
55
- loop: boolean;
56
- trimBefore: number | undefined;
57
- trimAfter: number | undefined;
58
- playbackRate: number;
59
- audioStreamIndex: number;
60
- fps: number;
61
- });
62
- private input;
63
- private isReady;
64
- private hasAudio;
65
- private isCurrentlyBuffering;
66
- initialize(startTimeUnresolved: number): Promise<MediaPlayerInitResult>;
67
- private clearCanvas;
68
- private cleanupAudioQueue;
69
- private cleanAudioIteratorAndNodes;
70
- seekTo(time: number): Promise<void>;
71
- play(): Promise<void>;
72
- pause(): void;
73
- setMuted(muted: boolean): void;
74
- setVolume(volume: number): void;
75
- setPlaybackRate(rate: number): void;
76
- setFps(fps: number): void;
77
- setLoop(loop: boolean): void;
78
- dispose(): void;
79
- private getPlaybackTime;
80
- private scheduleAudioChunk;
81
- onBufferingChange(callback: (isBuffering: boolean) => void): () => void;
82
- onVideoFrame(callback: (frame: CanvasImageSource) => void): () => void;
83
- private canRenderVideo;
84
- private startRenderLoop;
85
- private stopRenderLoop;
86
- private render;
87
- private shouldRenderFrame;
88
- private drawCurrentFrame;
89
- private startAudioIterator;
90
- private startVideoIterator;
91
- private updateNextFrame;
92
- private bufferingStartedAtMs;
93
- private minBufferingTimeoutMs;
94
- private setBufferingState;
95
- private maybeResumeFromBuffering;
96
- private maybeForceResumeFromBuffering;
97
- private runAudioIterator;
98
- }