@remotion/media 4.0.383 → 4.0.385

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,6 +10,8 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
10
10
  const videoConfig = useUnsafeVideoConfig();
11
11
  const frame = useCurrentFrame();
12
12
  const mediaPlayerRef = useRef(null);
13
+ const initialTrimBeforeRef = useRef(trimBefore);
14
+ const initialTrimAfterRef = useRef(trimAfter);
13
15
  const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
14
16
  const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState(false);
15
17
  const [playing] = Timeline.usePlayingState();
@@ -62,11 +64,11 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
62
64
  trimAfter,
63
65
  trimBefore,
64
66
  });
65
- const buffering = useContext(Internals.BufferingContextReact);
66
- if (!buffering) {
67
+ const bufferingContext = useContext(Internals.BufferingContextReact);
68
+ if (!bufferingContext) {
67
69
  throw new Error('useMediaPlayback must be used inside a <BufferingContext>');
68
70
  }
69
- const isPlayerBuffering = Internals.useIsPlayerBuffering(buffering);
71
+ const isPlayerBuffering = Internals.useIsPlayerBuffering(bufferingContext);
70
72
  useEffect(() => {
71
73
  if (!sharedAudioContext)
72
74
  return;
@@ -78,8 +80,8 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
78
80
  logLevel,
79
81
  sharedAudioContext: sharedAudioContext.audioContext,
80
82
  loop,
81
- trimAfter,
82
- trimBefore,
83
+ trimAfter: initialTrimAfterRef.current,
84
+ trimBefore: initialTrimBeforeRef.current,
83
85
  fps: videoConfig.fps,
84
86
  canvas: null,
85
87
  playbackRate,
@@ -159,8 +161,6 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
159
161
  sharedAudioContext,
160
162
  currentTimeRef,
161
163
  loop,
162
- trimAfter,
163
- trimBefore,
164
164
  playbackRate,
165
165
  videoConfig.fps,
166
166
  audioStreamIndex,
@@ -170,7 +170,7 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
170
170
  isPostmounting,
171
171
  globalPlaybackRate,
172
172
  ]);
173
- useEffect(() => {
173
+ useLayoutEffect(() => {
174
174
  const audioPlayer = mediaPlayerRef.current;
175
175
  if (!audioPlayer)
176
176
  return;
@@ -182,16 +182,21 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
182
182
  }
183
183
  }, [isPlayerBuffering, logLevel, playing]);
184
184
  useLayoutEffect(() => {
185
- const audioPlayer = mediaPlayerRef.current;
186
- if (!audioPlayer || !mediaPlayerReady)
185
+ const mediaPlayer = mediaPlayerRef.current;
186
+ if (!mediaPlayer || !mediaPlayerReady) {
187
187
  return;
188
- audioPlayer.seekTo(currentTime).catch(() => {
189
- // Might be disposed
190
- });
191
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
192
- }, [currentTime, logLevel, mediaPlayerReady]);
188
+ }
189
+ mediaPlayer.setTrimBefore(trimBefore, currentTimeRef.current);
190
+ }, [trimBefore, mediaPlayerReady]);
191
+ useLayoutEffect(() => {
192
+ const mediaPlayer = mediaPlayerRef.current;
193
+ if (!mediaPlayer || !mediaPlayerReady) {
194
+ return;
195
+ }
196
+ mediaPlayer.setTrimAfter(trimAfter, currentTimeRef.current);
197
+ }, [trimAfter, mediaPlayerReady]);
193
198
  const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
194
- useEffect(() => {
199
+ useLayoutEffect(() => {
195
200
  const audioPlayer = mediaPlayerRef.current;
196
201
  if (!audioPlayer || !mediaPlayerReady)
197
202
  return;
@@ -211,55 +216,50 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
211
216
  }
212
217
  audioPlayer.setPlaybackRate(playbackRate);
213
218
  }, [playbackRate, mediaPlayerReady]);
214
- useEffect(() => {
219
+ useLayoutEffect(() => {
215
220
  const audioPlayer = mediaPlayerRef.current;
216
221
  if (!audioPlayer || !mediaPlayerReady) {
217
222
  return;
218
223
  }
219
224
  audioPlayer.setGlobalPlaybackRate(globalPlaybackRate);
220
225
  }, [globalPlaybackRate, mediaPlayerReady]);
221
- useEffect(() => {
226
+ useLayoutEffect(() => {
222
227
  const audioPlayer = mediaPlayerRef.current;
223
228
  if (!audioPlayer || !mediaPlayerReady) {
224
229
  return;
225
230
  }
226
231
  audioPlayer.setFps(videoConfig.fps);
227
232
  }, [videoConfig.fps, mediaPlayerReady]);
228
- useEffect(() => {
229
- const mediaPlayer = mediaPlayerRef.current;
230
- if (!mediaPlayer || !mediaPlayerReady) {
231
- return;
232
- }
233
- mediaPlayer.setTrimBefore(trimBefore);
234
- }, [trimBefore, mediaPlayerReady]);
235
- useEffect(() => {
236
- const mediaPlayer = mediaPlayerRef.current;
237
- if (!mediaPlayer || !mediaPlayerReady) {
238
- return;
239
- }
240
- mediaPlayer.setTrimAfter(trimAfter);
241
- }, [trimAfter, mediaPlayerReady]);
242
- useEffect(() => {
233
+ useLayoutEffect(() => {
243
234
  const mediaPlayer = mediaPlayerRef.current;
244
235
  if (!mediaPlayer || !mediaPlayerReady) {
245
236
  return;
246
237
  }
247
238
  mediaPlayer.setLoop(loop);
248
239
  }, [loop, mediaPlayerReady]);
249
- useEffect(() => {
240
+ useLayoutEffect(() => {
250
241
  const mediaPlayer = mediaPlayerRef.current;
251
242
  if (!mediaPlayer || !mediaPlayerReady) {
252
243
  return;
253
244
  }
254
245
  mediaPlayer.setIsPremounting(isPremounting);
255
246
  }, [isPremounting, mediaPlayerReady]);
256
- useEffect(() => {
247
+ useLayoutEffect(() => {
257
248
  const mediaPlayer = mediaPlayerRef.current;
258
249
  if (!mediaPlayer || !mediaPlayerReady) {
259
250
  return;
260
251
  }
261
252
  mediaPlayer.setIsPostmounting(isPostmounting);
262
253
  }, [isPostmounting, mediaPlayerReady]);
254
+ useLayoutEffect(() => {
255
+ const audioPlayer = mediaPlayerRef.current;
256
+ if (!audioPlayer || !mediaPlayerReady)
257
+ return;
258
+ audioPlayer.seekTo(currentTime).catch(() => {
259
+ // Might be disposed
260
+ });
261
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[AudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
262
+ }, [currentTime, logLevel, mediaPlayerReady]);
263
263
  if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
264
264
  return (_jsx(RemotionAudio, { src: src, muted: muted, volume: volume, startFrom: trimBefore, endAt: trimAfter, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, toneFrequency: toneFrequency, audioStreamIndex: audioStreamIndex, pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering, crossOrigin: fallbackHtml5AudioProps?.crossOrigin, ...fallbackHtml5AudioProps }));
265
265
  }
@@ -6,7 +6,7 @@ import { applyVolume } from '../convert-audiodata/apply-volume';
6
6
  import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
7
7
  import { frameForVolumeProp } from '../looped-frame';
8
8
  import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
9
- export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel, loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, toneFrequency, trimAfter, trimBefore, }) => {
9
+ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, logLevel = window.remotion_logLevel ?? 'info', loop, fallbackHtml5AudioProps, audioStreamIndex, showInTimeline, style, name, disallowFallbackToHtml5Audio, toneFrequency, trimAfter, trimBefore, }) => {
10
10
  const frame = useCurrentFrame();
11
11
  const absoluteFrame = Internals.useTimelinePosition();
12
12
  const videoConfig = Internals.useUnsafeVideoConfig();
@@ -25,13 +25,14 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
25
25
  const sequenceContext = useContext(Internals.SequenceContext);
26
26
  // Generate a string that's as unique as possible for this asset
27
27
  // but at the same time the same on all threads
28
- const id = useMemo(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
28
+ const id = useMemo(() => `media-audio-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
29
29
  src,
30
30
  sequenceContext?.cumulatedFrom,
31
31
  sequenceContext?.relativeFrom,
32
32
  sequenceContext?.durationInFrames,
33
33
  ]);
34
34
  const maxCacheSize = useMaxMediaCacheSize(logLevel ?? window.remotion_logLevel);
35
+ const audioEnabled = Internals.useAudioEnabled();
35
36
  useLayoutEffect(() => {
36
37
  const timestamp = frame / fps;
37
38
  const durationInSeconds = 1 / fps;
@@ -43,7 +44,7 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
43
44
  timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
44
45
  });
45
46
  const shouldRenderAudio = (() => {
46
- if (!window.remotion_audioEnabled) {
47
+ if (!audioEnabled) {
47
48
  return false;
48
49
  }
49
50
  if (muted) {
@@ -124,7 +125,9 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
124
125
  registerRenderAsset({
125
126
  type: 'inline-audio',
126
127
  id,
127
- audio: Array.from(audio.data),
128
+ audio: environment.isClientSideRendering
129
+ ? audio.data
130
+ : Array.from(audio.data),
128
131
  frame: absoluteFrame,
129
132
  timestamp: audio.timestamp,
130
133
  duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
@@ -167,6 +170,7 @@ export const AudioForRendering = ({ volume: volumeProp, playbackRate, src, muted
167
170
  trimBefore,
168
171
  replaceWithHtml5Audio,
169
172
  maxCacheSize,
173
+ audioEnabled,
170
174
  ]);
171
175
  if (replaceWithHtml5Audio) {
172
176
  return (_jsx(Html5Audio, { src: src, playbackRate: playbackRate, muted: muted, loop: loop, volume: volumeProp, delayRenderRetries: delayRenderRetries, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds, style: style, loopVolumeCurveBehavior: loopVolumeCurveBehavior, audioStreamIndex: audioStreamIndex, useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi, onError: fallbackHtml5AudioProps?.onError, toneFrequency: toneFrequency, acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds, name: name, showInTimeline: showInTimeline }));
@@ -46,7 +46,7 @@ export declare const audioIteratorManager: ({ audioTrack, delayPlaybackHandleIfN
46
46
  moveQueuedChunksToPauseQueue: () => void;
47
47
  getNumberOfChunksAfterResuming: () => number;
48
48
  } | null;
49
- destroy: () => void;
49
+ destroyIterator: () => void;
50
50
  seek: ({ newTime, nonce, fps, playbackRate, getIsPlaying, scheduleAudioNode, bufferState, }: {
51
51
  newTime: number;
52
52
  nonce: Nonce;
@@ -102,7 +102,8 @@ export const audioIteratorManager = ({ audioTrack, delayPlaybackHandleIfNotPremo
102
102
  });
103
103
  return;
104
104
  }
105
- const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, audioBufferIterator.getQueuedPeriod());
105
+ const queuedPeriod = audioBufferIterator.getQueuedPeriod();
106
+ const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriod);
106
107
  if (!currentTimeIsAlreadyQueued) {
107
108
  const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, null, (buffer) => {
108
109
  if (!nonce.isStale()) {
@@ -192,7 +193,7 @@ export const audioIteratorManager = ({ audioTrack, delayPlaybackHandleIfNotPremo
192
193
  resumeScheduledAudioChunks,
193
194
  pausePlayback,
194
195
  getAudioBufferIterator: () => audioBufferIterator,
195
- destroy: () => {
196
+ destroyIterator: () => {
196
197
  audioBufferIterator?.destroy();
197
198
  audioBufferIterator = null;
198
199
  },
@@ -377,7 +377,8 @@ var audioIteratorManager = ({
377
377
  });
378
378
  return;
379
379
  }
380
- const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, audioBufferIterator.getQueuedPeriod());
380
+ const queuedPeriod = audioBufferIterator.getQueuedPeriod();
381
+ const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriod);
381
382
  if (!currentTimeIsAlreadyQueued) {
382
383
  const audioSatisfyResult = await audioBufferIterator.tryToSatisfySeek(newTime, null, (buffer) => {
383
384
  if (!nonce.isStale()) {
@@ -465,7 +466,7 @@ var audioIteratorManager = ({
465
466
  resumeScheduledAudioChunks,
466
467
  pausePlayback,
467
468
  getAudioBufferIterator: () => audioBufferIterator,
468
- destroy: () => {
469
+ destroyIterator: () => {
469
470
  audioBufferIterator?.destroy();
470
471
  audioBufferIterator = null;
471
472
  },
@@ -1051,11 +1052,33 @@ class MediaPlayer {
1051
1052
  }
1052
1053
  this.audioIteratorManager.setVolume(volume);
1053
1054
  }
1054
- setTrimBefore(trimBefore) {
1055
+ updateAfterTrimChange(unloopedTimeInSeconds) {
1056
+ if (!this.audioIteratorManager && !this.videoIteratorManager) {
1057
+ return;
1058
+ }
1059
+ const newMediaTime = getTimeInSeconds({
1060
+ unloopedTimeInSeconds,
1061
+ playbackRate: this.playbackRate,
1062
+ loop: this.loop,
1063
+ trimBefore: this.trimBefore,
1064
+ trimAfter: this.trimAfter,
1065
+ mediaDurationInSeconds: this.totalDuration ?? null,
1066
+ fps: this.fps,
1067
+ ifNoMediaDuration: "infinity",
1068
+ src: this.src
1069
+ });
1070
+ if (newMediaTime !== null) {
1071
+ this.setPlaybackTime(newMediaTime, this.playbackRate * this.globalPlaybackRate);
1072
+ }
1073
+ this.audioIteratorManager?.destroyIterator();
1074
+ }
1075
+ setTrimBefore(trimBefore, unloopedTimeInSeconds) {
1055
1076
  this.trimBefore = trimBefore;
1077
+ this.updateAfterTrimChange(unloopedTimeInSeconds);
1056
1078
  }
1057
- setTrimAfter(trimAfter) {
1079
+ setTrimAfter(trimAfter, unloopedTimeInSeconds) {
1058
1080
  this.trimAfter = trimAfter;
1081
+ this.updateAfterTrimChange(unloopedTimeInSeconds);
1059
1082
  }
1060
1083
  setDebugOverlay(debugOverlay) {
1061
1084
  this.debugOverlay = debugOverlay;
@@ -1105,7 +1128,7 @@ class MediaPlayer {
1105
1128
  }
1106
1129
  this.nonceManager.createAsyncOperation();
1107
1130
  this.videoIteratorManager?.destroy();
1108
- this.audioIteratorManager?.destroy();
1131
+ this.audioIteratorManager?.destroyIterator();
1109
1132
  this.input.dispose();
1110
1133
  }
1111
1134
  scheduleAudioNode = (node, mediaTimestamp) => {
@@ -1347,6 +1370,8 @@ var AudioForPreviewAssertedShowing = ({
1347
1370
  const videoConfig = useUnsafeVideoConfig();
1348
1371
  const frame = useCurrentFrame2();
1349
1372
  const mediaPlayerRef = useRef(null);
1373
+ const initialTrimBeforeRef = useRef(trimBefore);
1374
+ const initialTrimAfterRef = useRef(trimAfter);
1350
1375
  const [mediaPlayerReady, setMediaPlayerReady] = useState2(false);
1351
1376
  const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState2(false);
1352
1377
  const [playing] = Timeline.usePlayingState();
@@ -1399,11 +1424,11 @@ var AudioForPreviewAssertedShowing = ({
1399
1424
  trimAfter,
1400
1425
  trimBefore
1401
1426
  });
1402
- const buffering = useContext2(Internals6.BufferingContextReact);
1403
- if (!buffering) {
1427
+ const bufferingContext = useContext2(Internals6.BufferingContextReact);
1428
+ if (!bufferingContext) {
1404
1429
  throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
1405
1430
  }
1406
- const isPlayerBuffering = Internals6.useIsPlayerBuffering(buffering);
1431
+ const isPlayerBuffering = Internals6.useIsPlayerBuffering(bufferingContext);
1407
1432
  useEffect2(() => {
1408
1433
  if (!sharedAudioContext)
1409
1434
  return;
@@ -1415,8 +1440,8 @@ var AudioForPreviewAssertedShowing = ({
1415
1440
  logLevel,
1416
1441
  sharedAudioContext: sharedAudioContext.audioContext,
1417
1442
  loop,
1418
- trimAfter,
1419
- trimBefore,
1443
+ trimAfter: initialTrimAfterRef.current,
1444
+ trimBefore: initialTrimBeforeRef.current,
1420
1445
  fps: videoConfig.fps,
1421
1446
  canvas: null,
1422
1447
  playbackRate,
@@ -1492,8 +1517,6 @@ var AudioForPreviewAssertedShowing = ({
1492
1517
  sharedAudioContext,
1493
1518
  currentTimeRef,
1494
1519
  loop,
1495
- trimAfter,
1496
- trimBefore,
1497
1520
  playbackRate,
1498
1521
  videoConfig.fps,
1499
1522
  audioStreamIndex,
@@ -1503,7 +1526,7 @@ var AudioForPreviewAssertedShowing = ({
1503
1526
  isPostmounting,
1504
1527
  globalPlaybackRate
1505
1528
  ]);
1506
- useEffect2(() => {
1529
+ useLayoutEffect(() => {
1507
1530
  const audioPlayer = mediaPlayerRef.current;
1508
1531
  if (!audioPlayer)
1509
1532
  return;
@@ -1514,14 +1537,21 @@ var AudioForPreviewAssertedShowing = ({
1514
1537
  }
1515
1538
  }, [isPlayerBuffering, logLevel, playing]);
1516
1539
  useLayoutEffect(() => {
1517
- const audioPlayer = mediaPlayerRef.current;
1518
- if (!audioPlayer || !mediaPlayerReady)
1540
+ const mediaPlayer = mediaPlayerRef.current;
1541
+ if (!mediaPlayer || !mediaPlayerReady) {
1519
1542
  return;
1520
- audioPlayer.seekTo(currentTime).catch(() => {});
1521
- Internals6.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
1522
- }, [currentTime, logLevel, mediaPlayerReady]);
1543
+ }
1544
+ mediaPlayer.setTrimBefore(trimBefore, currentTimeRef.current);
1545
+ }, [trimBefore, mediaPlayerReady]);
1546
+ useLayoutEffect(() => {
1547
+ const mediaPlayer = mediaPlayerRef.current;
1548
+ if (!mediaPlayer || !mediaPlayerReady) {
1549
+ return;
1550
+ }
1551
+ mediaPlayer.setTrimAfter(trimAfter, currentTimeRef.current);
1552
+ }, [trimAfter, mediaPlayerReady]);
1523
1553
  const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
1524
- useEffect2(() => {
1554
+ useLayoutEffect(() => {
1525
1555
  const audioPlayer = mediaPlayerRef.current;
1526
1556
  if (!audioPlayer || !mediaPlayerReady)
1527
1557
  return;
@@ -1541,55 +1571,48 @@ var AudioForPreviewAssertedShowing = ({
1541
1571
  }
1542
1572
  audioPlayer.setPlaybackRate(playbackRate);
1543
1573
  }, [playbackRate, mediaPlayerReady]);
1544
- useEffect2(() => {
1574
+ useLayoutEffect(() => {
1545
1575
  const audioPlayer = mediaPlayerRef.current;
1546
1576
  if (!audioPlayer || !mediaPlayerReady) {
1547
1577
  return;
1548
1578
  }
1549
1579
  audioPlayer.setGlobalPlaybackRate(globalPlaybackRate);
1550
1580
  }, [globalPlaybackRate, mediaPlayerReady]);
1551
- useEffect2(() => {
1581
+ useLayoutEffect(() => {
1552
1582
  const audioPlayer = mediaPlayerRef.current;
1553
1583
  if (!audioPlayer || !mediaPlayerReady) {
1554
1584
  return;
1555
1585
  }
1556
1586
  audioPlayer.setFps(videoConfig.fps);
1557
1587
  }, [videoConfig.fps, mediaPlayerReady]);
1558
- useEffect2(() => {
1559
- const mediaPlayer = mediaPlayerRef.current;
1560
- if (!mediaPlayer || !mediaPlayerReady) {
1561
- return;
1562
- }
1563
- mediaPlayer.setTrimBefore(trimBefore);
1564
- }, [trimBefore, mediaPlayerReady]);
1565
- useEffect2(() => {
1566
- const mediaPlayer = mediaPlayerRef.current;
1567
- if (!mediaPlayer || !mediaPlayerReady) {
1568
- return;
1569
- }
1570
- mediaPlayer.setTrimAfter(trimAfter);
1571
- }, [trimAfter, mediaPlayerReady]);
1572
- useEffect2(() => {
1588
+ useLayoutEffect(() => {
1573
1589
  const mediaPlayer = mediaPlayerRef.current;
1574
1590
  if (!mediaPlayer || !mediaPlayerReady) {
1575
1591
  return;
1576
1592
  }
1577
1593
  mediaPlayer.setLoop(loop);
1578
1594
  }, [loop, mediaPlayerReady]);
1579
- useEffect2(() => {
1595
+ useLayoutEffect(() => {
1580
1596
  const mediaPlayer = mediaPlayerRef.current;
1581
1597
  if (!mediaPlayer || !mediaPlayerReady) {
1582
1598
  return;
1583
1599
  }
1584
1600
  mediaPlayer.setIsPremounting(isPremounting);
1585
1601
  }, [isPremounting, mediaPlayerReady]);
1586
- useEffect2(() => {
1602
+ useLayoutEffect(() => {
1587
1603
  const mediaPlayer = mediaPlayerRef.current;
1588
1604
  if (!mediaPlayer || !mediaPlayerReady) {
1589
1605
  return;
1590
1606
  }
1591
1607
  mediaPlayer.setIsPostmounting(isPostmounting);
1592
1608
  }, [isPostmounting, mediaPlayerReady]);
1609
+ useLayoutEffect(() => {
1610
+ const audioPlayer = mediaPlayerRef.current;
1611
+ if (!audioPlayer || !mediaPlayerReady)
1612
+ return;
1613
+ audioPlayer.seekTo(currentTime).catch(() => {});
1614
+ Internals6.Log.trace({ logLevel, tag: "@remotion/media" }, `[AudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
1615
+ }, [currentTime, logLevel, mediaPlayerReady]);
1593
1616
  if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
1594
1617
  return /* @__PURE__ */ jsx(RemotionAudio, {
1595
1618
  src,
@@ -3408,7 +3431,7 @@ var AudioForRendering = ({
3408
3431
  loopVolumeCurveBehavior,
3409
3432
  delayRenderRetries,
3410
3433
  delayRenderTimeoutInMilliseconds,
3411
- logLevel,
3434
+ logLevel = window.remotion_logLevel ?? "info",
3412
3435
  loop,
3413
3436
  fallbackHtml5AudioProps,
3414
3437
  audioStreamIndex,
@@ -3436,13 +3459,14 @@ var AudioForRendering = ({
3436
3459
  const { delayRender, continueRender } = useDelayRender();
3437
3460
  const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState3(false);
3438
3461
  const sequenceContext = useContext3(Internals13.SequenceContext);
3439
- const id = useMemo3(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
3462
+ const id = useMemo3(() => `media-audio-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
3440
3463
  src,
3441
3464
  sequenceContext?.cumulatedFrom,
3442
3465
  sequenceContext?.relativeFrom,
3443
3466
  sequenceContext?.durationInFrames
3444
3467
  ]);
3445
3468
  const maxCacheSize = useMaxMediaCacheSize(logLevel ?? window.remotion_logLevel);
3469
+ const audioEnabled = Internals13.useAudioEnabled();
3446
3470
  useLayoutEffect2(() => {
3447
3471
  const timestamp = frame / fps;
3448
3472
  const durationInSeconds = 1 / fps;
@@ -3454,7 +3478,7 @@ var AudioForRendering = ({
3454
3478
  timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined
3455
3479
  });
3456
3480
  const shouldRenderAudio = (() => {
3457
- if (!window.remotion_audioEnabled) {
3481
+ if (!audioEnabled) {
3458
3482
  return false;
3459
3483
  }
3460
3484
  if (muted) {
@@ -3534,7 +3558,7 @@ var AudioForRendering = ({
3534
3558
  registerRenderAsset({
3535
3559
  type: "inline-audio",
3536
3560
  id,
3537
- audio: Array.from(audio.data),
3561
+ audio: environment.isClientSideRendering ? audio.data : Array.from(audio.data),
3538
3562
  frame: absoluteFrame,
3539
3563
  timestamp: audio.timestamp,
3540
3564
  duration: audio.numberOfFrames / TARGET_SAMPLE_RATE * 1e6,
@@ -3575,7 +3599,8 @@ var AudioForRendering = ({
3575
3599
  trimAfter,
3576
3600
  trimBefore,
3577
3601
  replaceWithHtml5Audio,
3578
- maxCacheSize
3602
+ maxCacheSize,
3603
+ audioEnabled
3579
3604
  ]);
3580
3605
  if (replaceWithHtml5Audio) {
3581
3606
  return /* @__PURE__ */ jsx2(Html5Audio, {
@@ -3682,6 +3707,8 @@ var VideoForPreviewAssertedShowing = ({
3682
3707
  const videoConfig = useUnsafeVideoConfig2();
3683
3708
  const frame = useCurrentFrame4();
3684
3709
  const mediaPlayerRef = useRef2(null);
3710
+ const initialTrimBeforeRef = useRef2(trimBefore);
3711
+ const initialTrimAfterRef = useRef2(trimAfter);
3685
3712
  const [mediaPlayerReady, setMediaPlayerReady] = useState4(false);
3686
3713
  const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState4(false);
3687
3714
  const [playing] = Timeline2.usePlayingState();
@@ -3752,8 +3779,8 @@ var VideoForPreviewAssertedShowing = ({
3752
3779
  logLevel,
3753
3780
  sharedAudioContext: sharedAudioContext.audioContext,
3754
3781
  loop,
3755
- trimAfter,
3756
- trimBefore,
3782
+ trimAfter: initialTrimAfterRef.current,
3783
+ trimBefore: initialTrimBeforeRef.current,
3757
3784
  fps: videoConfig.fps,
3758
3785
  playbackRate,
3759
3786
  audioStreamIndex,
@@ -3826,8 +3853,6 @@ var VideoForPreviewAssertedShowing = ({
3826
3853
  logLevel,
3827
3854
  sharedAudioContext,
3828
3855
  loop,
3829
- trimAfter,
3830
- trimBefore,
3831
3856
  videoConfig.fps,
3832
3857
  playbackRate,
3833
3858
  disallowFallbackToOffthreadVideo,
@@ -3851,97 +3876,97 @@ var VideoForPreviewAssertedShowing = ({
3851
3876
  mediaPlayer.pause();
3852
3877
  }
3853
3878
  }, [isPlayerBuffering, playing, logLevel, mediaPlayerReady]);
3854
- useLayoutEffect3(() => {
3879
+ useEffect3(() => {
3855
3880
  const mediaPlayer = mediaPlayerRef.current;
3856
- if (!mediaPlayer || !mediaPlayerReady)
3881
+ if (!mediaPlayer || !mediaPlayerReady) {
3857
3882
  return;
3858
- mediaPlayer.seekTo(currentTime).catch(() => {});
3859
- Internals15.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
3860
- }, [currentTime, logLevel, mediaPlayerReady]);
3861
- const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
3883
+ }
3884
+ mediaPlayer.setTrimBefore(trimBefore, currentTimeRef.current);
3885
+ }, [trimBefore, mediaPlayerReady]);
3862
3886
  useEffect3(() => {
3887
+ const mediaPlayer = mediaPlayerRef.current;
3888
+ if (!mediaPlayer || !mediaPlayerReady) {
3889
+ return;
3890
+ }
3891
+ mediaPlayer.setTrimAfter(trimAfter, currentTimeRef.current);
3892
+ }, [trimAfter, mediaPlayerReady]);
3893
+ const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
3894
+ useLayoutEffect3(() => {
3863
3895
  const mediaPlayer = mediaPlayerRef.current;
3864
3896
  if (!mediaPlayer || !mediaPlayerReady)
3865
3897
  return;
3866
3898
  mediaPlayer.setMuted(effectiveMuted);
3867
3899
  }, [effectiveMuted, mediaPlayerReady]);
3868
- useEffect3(() => {
3900
+ useLayoutEffect3(() => {
3869
3901
  const mediaPlayer = mediaPlayerRef.current;
3870
3902
  if (!mediaPlayer || !mediaPlayerReady) {
3871
3903
  return;
3872
3904
  }
3873
3905
  mediaPlayer.setVolume(userPreferredVolume);
3874
3906
  }, [userPreferredVolume, mediaPlayerReady]);
3875
- useEffect3(() => {
3907
+ useLayoutEffect3(() => {
3876
3908
  const mediaPlayer = mediaPlayerRef.current;
3877
3909
  if (!mediaPlayer || !mediaPlayerReady) {
3878
3910
  return;
3879
3911
  }
3880
3912
  mediaPlayer.setDebugOverlay(debugOverlay);
3881
3913
  }, [debugOverlay, mediaPlayerReady]);
3882
- useEffect3(() => {
3914
+ useLayoutEffect3(() => {
3883
3915
  const mediaPlayer = mediaPlayerRef.current;
3884
3916
  if (!mediaPlayer || !mediaPlayerReady) {
3885
3917
  return;
3886
3918
  }
3887
3919
  mediaPlayer.setPlaybackRate(playbackRate);
3888
3920
  }, [playbackRate, mediaPlayerReady]);
3889
- useEffect3(() => {
3921
+ useLayoutEffect3(() => {
3890
3922
  const mediaPlayer = mediaPlayerRef.current;
3891
3923
  if (!mediaPlayer || !mediaPlayerReady) {
3892
3924
  return;
3893
3925
  }
3894
3926
  mediaPlayer.setGlobalPlaybackRate(globalPlaybackRate);
3895
3927
  }, [globalPlaybackRate, mediaPlayerReady]);
3896
- useEffect3(() => {
3928
+ useLayoutEffect3(() => {
3897
3929
  const mediaPlayer = mediaPlayerRef.current;
3898
3930
  if (!mediaPlayer || !mediaPlayerReady) {
3899
3931
  return;
3900
3932
  }
3901
3933
  mediaPlayer.setLoop(loop);
3902
3934
  }, [loop, mediaPlayerReady]);
3903
- useEffect3(() => {
3935
+ useLayoutEffect3(() => {
3904
3936
  const mediaPlayer = mediaPlayerRef.current;
3905
3937
  if (!mediaPlayer || !mediaPlayerReady) {
3906
3938
  return;
3907
3939
  }
3908
3940
  mediaPlayer.setIsPremounting(isPremounting);
3909
3941
  }, [isPremounting, mediaPlayerReady]);
3910
- useEffect3(() => {
3942
+ useLayoutEffect3(() => {
3911
3943
  const mediaPlayer = mediaPlayerRef.current;
3912
3944
  if (!mediaPlayer || !mediaPlayerReady) {
3913
3945
  return;
3914
3946
  }
3915
3947
  mediaPlayer.setIsPostmounting(isPostmounting);
3916
3948
  }, [isPostmounting, mediaPlayerReady]);
3917
- useEffect3(() => {
3949
+ useLayoutEffect3(() => {
3918
3950
  const mediaPlayer = mediaPlayerRef.current;
3919
3951
  if (!mediaPlayer || !mediaPlayerReady) {
3920
3952
  return;
3921
3953
  }
3922
3954
  mediaPlayer.setFps(videoConfig.fps);
3923
3955
  }, [videoConfig.fps, mediaPlayerReady]);
3924
- useEffect3(() => {
3956
+ useLayoutEffect3(() => {
3925
3957
  const mediaPlayer = mediaPlayerRef.current;
3926
3958
  if (!mediaPlayer || !mediaPlayerReady) {
3927
3959
  return;
3928
3960
  }
3929
3961
  mediaPlayer.setVideoFrameCallback(onVideoFrame ?? null);
3930
3962
  }, [onVideoFrame, mediaPlayerReady]);
3931
- useEffect3(() => {
3932
- const mediaPlayer = mediaPlayerRef.current;
3933
- if (!mediaPlayer || !mediaPlayerReady) {
3934
- return;
3935
- }
3936
- mediaPlayer.setTrimBefore(trimBefore);
3937
- }, [trimBefore, mediaPlayerReady]);
3938
- useEffect3(() => {
3963
+ useLayoutEffect3(() => {
3939
3964
  const mediaPlayer = mediaPlayerRef.current;
3940
- if (!mediaPlayer || !mediaPlayerReady) {
3965
+ if (!mediaPlayer || !mediaPlayerReady)
3941
3966
  return;
3942
- }
3943
- mediaPlayer.setTrimAfter(trimAfter);
3944
- }, [trimAfter, mediaPlayerReady]);
3967
+ mediaPlayer.seekTo(currentTime).catch(() => {});
3968
+ Internals15.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
3969
+ }, [currentTime, logLevel, mediaPlayerReady]);
3945
3970
  const actualStyle = useMemo4(() => {
3946
3971
  return {
3947
3972
  ...style,
@@ -4203,7 +4228,7 @@ var VideoForRendering = ({
4203
4228
  registerRenderAsset({
4204
4229
  type: "inline-audio",
4205
4230
  id,
4206
- audio: Array.from(audio.data),
4231
+ audio: environment.isClientSideRendering ? audio.data : Array.from(audio.data),
4207
4232
  frame: absoluteFrame,
4208
4233
  timestamp: audio.timestamp,
4209
4234
  duration: audio.numberOfFrames / TARGET_SAMPLE_RATE * 1e6,
@@ -70,8 +70,9 @@ export declare class MediaPlayer {
70
70
  pause(): void;
71
71
  setMuted(muted: boolean): void;
72
72
  setVolume(volume: number): void;
73
- setTrimBefore(trimBefore: number | undefined): void;
74
- setTrimAfter(trimAfter: number | undefined): void;
73
+ private updateAfterTrimChange;
74
+ setTrimBefore(trimBefore: number | undefined, unloopedTimeInSeconds: number): void;
75
+ setTrimAfter(trimAfter: number | undefined, unloopedTimeInSeconds: number): void;
75
76
  setDebugOverlay(debugOverlay: boolean): void;
76
77
  private updateAfterPlaybackRateChange;
77
78
  setPlaybackRate(rate: number): void;
@@ -284,11 +284,35 @@ export class MediaPlayer {
284
284
  }
285
285
  this.audioIteratorManager.setVolume(volume);
286
286
  }
287
- setTrimBefore(trimBefore) {
287
+ updateAfterTrimChange(unloopedTimeInSeconds) {
288
+ if (!this.audioIteratorManager && !this.videoIteratorManager) {
289
+ return;
290
+ }
291
+ const newMediaTime = getTimeInSeconds({
292
+ unloopedTimeInSeconds,
293
+ playbackRate: this.playbackRate,
294
+ loop: this.loop,
295
+ trimBefore: this.trimBefore,
296
+ trimAfter: this.trimAfter,
297
+ mediaDurationInSeconds: this.totalDuration ?? null,
298
+ fps: this.fps,
299
+ ifNoMediaDuration: 'infinity',
300
+ src: this.src,
301
+ });
302
+ if (newMediaTime !== null) {
303
+ this.setPlaybackTime(newMediaTime, this.playbackRate * this.globalPlaybackRate);
304
+ }
305
+ // audio iterator will be re-created on next play/seek
306
+ // video iterator doesn't need to be re-created
307
+ this.audioIteratorManager?.destroyIterator();
308
+ }
309
+ setTrimBefore(trimBefore, unloopedTimeInSeconds) {
288
310
  this.trimBefore = trimBefore;
311
+ this.updateAfterTrimChange(unloopedTimeInSeconds);
289
312
  }
290
- setTrimAfter(trimAfter) {
313
+ setTrimAfter(trimAfter, unloopedTimeInSeconds) {
291
314
  this.trimAfter = trimAfter;
315
+ this.updateAfterTrimChange(unloopedTimeInSeconds);
292
316
  }
293
317
  setDebugOverlay(debugOverlay) {
294
318
  this.debugOverlay = debugOverlay;
@@ -345,7 +369,7 @@ export class MediaPlayer {
345
369
  // Mark all async operations as stale
346
370
  this.nonceManager.createAsyncOperation();
347
371
  this.videoIteratorManager?.destroy();
348
- this.audioIteratorManager?.destroy();
372
+ this.audioIteratorManager?.destroyIterator();
349
373
  this.input.dispose();
350
374
  }
351
375
  getPlaybackTime() {
@@ -12,6 +12,8 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
12
12
  const videoConfig = useUnsafeVideoConfig();
13
13
  const frame = useCurrentFrame();
14
14
  const mediaPlayerRef = useRef(null);
15
+ const initialTrimBeforeRef = useRef(trimBefore);
16
+ const initialTrimAfterRef = useRef(trimAfter);
15
17
  const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
16
18
  const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState(false);
17
19
  const [playing] = Timeline.usePlayingState();
@@ -82,8 +84,8 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
82
84
  logLevel,
83
85
  sharedAudioContext: sharedAudioContext.audioContext,
84
86
  loop,
85
- trimAfter,
86
- trimBefore,
87
+ trimAfter: initialTrimAfterRef.current,
88
+ trimBefore: initialTrimBeforeRef.current,
87
89
  fps: videoConfig.fps,
88
90
  playbackRate,
89
91
  audioStreamIndex,
@@ -160,8 +162,6 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
160
162
  logLevel,
161
163
  sharedAudioContext,
162
164
  loop,
163
- trimAfter,
164
- trimBefore,
165
165
  videoConfig.fps,
166
166
  playbackRate,
167
167
  disallowFallbackToOffthreadVideo,
@@ -188,99 +188,99 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
188
188
  mediaPlayer.pause();
189
189
  }
190
190
  }, [isPlayerBuffering, playing, logLevel, mediaPlayerReady]);
191
- useLayoutEffect(() => {
191
+ useEffect(() => {
192
192
  const mediaPlayer = mediaPlayerRef.current;
193
- if (!mediaPlayer || !mediaPlayerReady)
193
+ if (!mediaPlayer || !mediaPlayerReady) {
194
194
  return;
195
- mediaPlayer.seekTo(currentTime).catch(() => {
196
- // Might be disposed
197
- });
198
- Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
199
- }, [currentTime, logLevel, mediaPlayerReady]);
200
- const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
195
+ }
196
+ mediaPlayer.setTrimBefore(trimBefore, currentTimeRef.current);
197
+ }, [trimBefore, mediaPlayerReady]);
201
198
  useEffect(() => {
199
+ const mediaPlayer = mediaPlayerRef.current;
200
+ if (!mediaPlayer || !mediaPlayerReady) {
201
+ return;
202
+ }
203
+ mediaPlayer.setTrimAfter(trimAfter, currentTimeRef.current);
204
+ }, [trimAfter, mediaPlayerReady]);
205
+ const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
206
+ useLayoutEffect(() => {
202
207
  const mediaPlayer = mediaPlayerRef.current;
203
208
  if (!mediaPlayer || !mediaPlayerReady)
204
209
  return;
205
210
  mediaPlayer.setMuted(effectiveMuted);
206
211
  }, [effectiveMuted, mediaPlayerReady]);
207
- useEffect(() => {
212
+ useLayoutEffect(() => {
208
213
  const mediaPlayer = mediaPlayerRef.current;
209
214
  if (!mediaPlayer || !mediaPlayerReady) {
210
215
  return;
211
216
  }
212
217
  mediaPlayer.setVolume(userPreferredVolume);
213
218
  }, [userPreferredVolume, mediaPlayerReady]);
214
- useEffect(() => {
219
+ useLayoutEffect(() => {
215
220
  const mediaPlayer = mediaPlayerRef.current;
216
221
  if (!mediaPlayer || !mediaPlayerReady) {
217
222
  return;
218
223
  }
219
224
  mediaPlayer.setDebugOverlay(debugOverlay);
220
225
  }, [debugOverlay, mediaPlayerReady]);
221
- useEffect(() => {
226
+ useLayoutEffect(() => {
222
227
  const mediaPlayer = mediaPlayerRef.current;
223
228
  if (!mediaPlayer || !mediaPlayerReady) {
224
229
  return;
225
230
  }
226
231
  mediaPlayer.setPlaybackRate(playbackRate);
227
232
  }, [playbackRate, mediaPlayerReady]);
228
- useEffect(() => {
233
+ useLayoutEffect(() => {
229
234
  const mediaPlayer = mediaPlayerRef.current;
230
235
  if (!mediaPlayer || !mediaPlayerReady) {
231
236
  return;
232
237
  }
233
238
  mediaPlayer.setGlobalPlaybackRate(globalPlaybackRate);
234
239
  }, [globalPlaybackRate, mediaPlayerReady]);
235
- useEffect(() => {
240
+ useLayoutEffect(() => {
236
241
  const mediaPlayer = mediaPlayerRef.current;
237
242
  if (!mediaPlayer || !mediaPlayerReady) {
238
243
  return;
239
244
  }
240
245
  mediaPlayer.setLoop(loop);
241
246
  }, [loop, mediaPlayerReady]);
242
- useEffect(() => {
247
+ useLayoutEffect(() => {
243
248
  const mediaPlayer = mediaPlayerRef.current;
244
249
  if (!mediaPlayer || !mediaPlayerReady) {
245
250
  return;
246
251
  }
247
252
  mediaPlayer.setIsPremounting(isPremounting);
248
253
  }, [isPremounting, mediaPlayerReady]);
249
- useEffect(() => {
254
+ useLayoutEffect(() => {
250
255
  const mediaPlayer = mediaPlayerRef.current;
251
256
  if (!mediaPlayer || !mediaPlayerReady) {
252
257
  return;
253
258
  }
254
259
  mediaPlayer.setIsPostmounting(isPostmounting);
255
260
  }, [isPostmounting, mediaPlayerReady]);
256
- useEffect(() => {
261
+ useLayoutEffect(() => {
257
262
  const mediaPlayer = mediaPlayerRef.current;
258
263
  if (!mediaPlayer || !mediaPlayerReady) {
259
264
  return;
260
265
  }
261
266
  mediaPlayer.setFps(videoConfig.fps);
262
267
  }, [videoConfig.fps, mediaPlayerReady]);
263
- useEffect(() => {
268
+ useLayoutEffect(() => {
264
269
  const mediaPlayer = mediaPlayerRef.current;
265
270
  if (!mediaPlayer || !mediaPlayerReady) {
266
271
  return;
267
272
  }
268
273
  mediaPlayer.setVideoFrameCallback(onVideoFrame ?? null);
269
274
  }, [onVideoFrame, mediaPlayerReady]);
270
- useEffect(() => {
271
- const mediaPlayer = mediaPlayerRef.current;
272
- if (!mediaPlayer || !mediaPlayerReady) {
273
- return;
274
- }
275
- mediaPlayer.setTrimBefore(trimBefore);
276
- }, [trimBefore, mediaPlayerReady]);
277
- useEffect(() => {
275
+ useLayoutEffect(() => {
278
276
  const mediaPlayer = mediaPlayerRef.current;
279
- if (!mediaPlayer || !mediaPlayerReady) {
277
+ if (!mediaPlayer || !mediaPlayerReady)
280
278
  return;
281
- }
282
- mediaPlayer.setTrimAfter(trimAfter);
283
- }, [trimAfter, mediaPlayerReady]);
279
+ mediaPlayer.seekTo(currentTime).catch(() => {
280
+ // Might be disposed
281
+ });
282
+ Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
283
+ }, [currentTime, logLevel, mediaPlayerReady]);
284
284
  const actualStyle = useMemo(() => {
285
285
  return {
286
286
  ...style,
@@ -165,7 +165,9 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
165
165
  registerRenderAsset({
166
166
  type: 'inline-audio',
167
167
  id,
168
- audio: Array.from(audio.data),
168
+ audio: environment.isClientSideRendering
169
+ ? audio.data
170
+ : Array.from(audio.data),
169
171
  frame: absoluteFrame,
170
172
  timestamp: audio.timestamp,
171
173
  duration: (audio.numberOfFrames / TARGET_SAMPLE_RATE) * 1000000,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.383",
3
+ "version": "4.0.385",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -21,15 +21,15 @@
21
21
  "make": "tsc -d && bun --env-file=../.env.bundle bundle.ts"
22
22
  },
23
23
  "dependencies": {
24
- "mediabunny": "1.25.3",
25
- "remotion": "4.0.383"
24
+ "mediabunny": "1.25.8",
25
+ "remotion": "4.0.385"
26
26
  },
27
27
  "peerDependencies": {
28
28
  "react": ">=16.8.0",
29
29
  "react-dom": ">=16.8.0"
30
30
  },
31
31
  "devDependencies": {
32
- "@remotion/eslint-config-internal": "4.0.383",
32
+ "@remotion/eslint-config-internal": "4.0.385",
33
33
  "@vitest/browser-webdriverio": "4.0.7",
34
34
  "eslint": "9.19.0",
35
35
  "react": "19.2.1",