@remotion/media 4.0.434 → 4.0.436

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,5 @@
1
1
  import type { WrappedAudioBuffer } from 'mediabunny';
2
+ import type { SharedAudioContextForMediaPlayer } from '../shared-audio-context-for-media-player';
2
3
  export declare const HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
3
4
  export type QueuedNode = {
4
5
  node: AudioBufferSourceNode;
@@ -6,28 +7,35 @@ export type QueuedNode = {
6
7
  buffer: AudioBuffer;
7
8
  scheduledTime: number;
8
9
  playbackRate: number;
10
+ scheduledAtAnchor: number;
9
11
  };
10
12
  export type QueuedPeriod = {
11
13
  from: number;
12
14
  until: number;
13
15
  };
14
- export declare const makeAudioIterator: (startFromSecond: number, maximumTimestamp: number, cache: {
15
- prewarmIteratorForLooping: ({ timeToSeek, maximumTimestamp, }: {
16
- timeToSeek: number;
17
- maximumTimestamp: number;
18
- }) => void;
19
- makeIteratorOrUsePrewarmed: (timeToSeek: number, maximumTimestamp: number) => AsyncGenerator<WrappedAudioBuffer, void, unknown>;
20
- destroy: () => void;
21
- }, debugAudioScheduling: boolean) => {
22
- destroy: (audioContext: AudioContext) => void;
16
+ export declare const makeAudioIterator: ({ startFromSecond, maximumTimestamp, cache, debugAudioScheduling, }: {
17
+ startFromSecond: number;
18
+ maximumTimestamp: number;
19
+ cache: {
20
+ prewarmIteratorForLooping: ({ timeToSeek, maximumTimestamp, }: {
21
+ timeToSeek: number;
22
+ maximumTimestamp: number;
23
+ }) => void;
24
+ makeIteratorOrUsePrewarmed: (timeToSeek: number, maximumTimestamp: number) => AsyncGenerator<WrappedAudioBuffer, void, unknown>;
25
+ destroy: () => void;
26
+ };
27
+ debugAudioScheduling: boolean;
28
+ }) => {
29
+ destroy: (audioContext: SharedAudioContextForMediaPlayer) => void;
23
30
  getNext: () => Promise<IteratorResult<WrappedAudioBuffer, void>>;
24
31
  isDestroyed: () => boolean;
25
- addQueuedAudioNode: ({ node, timestamp, buffer, scheduledTime, playbackRate, }: {
32
+ addQueuedAudioNode: ({ node, timestamp, buffer, scheduledTime, playbackRate, scheduledAtAnchor, }: {
26
33
  node: AudioBufferSourceNode;
27
34
  timestamp: number;
28
35
  buffer: AudioBuffer;
29
36
  scheduledTime: number;
30
37
  playbackRate: number;
38
+ scheduledAtAnchor: number;
31
39
  }) => void;
32
40
  removeQueuedAudioNode: (node: AudioBufferSourceNode) => void;
33
41
  getAndClearAudioChunksForAfterResuming: () => {
@@ -2,11 +2,12 @@ import type { InputAudioTrack, WrappedAudioBuffer } from 'mediabunny';
2
2
  import { type ScheduleAudioNodeResult } from 'remotion';
3
3
  import type { DelayPlaybackIfNotPremounting } from './delay-playback-if-not-premounting';
4
4
  import type { Nonce } from './nonce-manager';
5
+ import type { SharedAudioContextForMediaPlayer } from './shared-audio-context-for-media-player';
5
6
  type ScheduleAudioNode = (node: AudioBufferSourceNode, mediaTimestamp: number) => ScheduleAudioNodeResult;
6
7
  export declare const audioIteratorManager: ({ audioTrack, delayPlaybackHandleIfNotPremounting, sharedAudioContext, getIsLooping, getEndTime, getStartTime, initialMuted, drawDebugOverlay, }: {
7
8
  audioTrack: InputAudioTrack;
8
9
  delayPlaybackHandleIfNotPremounting: () => DelayPlaybackIfNotPremounting;
9
- sharedAudioContext: AudioContext;
10
+ sharedAudioContext: SharedAudioContextForMediaPlayer;
10
11
  getIsLooping: () => boolean;
11
12
  getEndTime: () => number;
12
13
  getStartTime: () => number;
@@ -28,15 +29,16 @@ export declare const audioIteratorManager: ({ audioTrack, delayPlaybackHandleIfN
28
29
  }) => void;
29
30
  pausePlayback: () => void;
30
31
  getAudioBufferIterator: () => {
31
- destroy: (audioContext: AudioContext) => void;
32
+ destroy: (audioContext: SharedAudioContextForMediaPlayer) => void;
32
33
  getNext: () => Promise<IteratorResult<WrappedAudioBuffer, void>>;
33
34
  isDestroyed: () => boolean;
34
- addQueuedAudioNode: ({ node, timestamp, buffer, scheduledTime, playbackRate, }: {
35
+ addQueuedAudioNode: ({ node, timestamp, buffer, scheduledTime, playbackRate, scheduledAtAnchor, }: {
35
36
  node: AudioBufferSourceNode;
36
37
  timestamp: number;
37
38
  buffer: AudioBuffer;
38
39
  scheduledTime: number;
39
40
  playbackRate: number;
41
+ scheduledAtAnchor: number;
40
42
  }) => void;
41
43
  removeQueuedAudioNode: (node: AudioBufferSourceNode) => void;
42
44
  getAndClearAudioChunksForAfterResuming: () => {
@@ -22,15 +22,16 @@ export declare const drawPreviewOverlay: ({ context, audioTime, audioContextStat
22
22
  }) => void;
23
23
  pausePlayback: () => void;
24
24
  getAudioBufferIterator: () => {
25
- destroy: (audioContext: AudioContext) => void;
25
+ destroy: (audioContext: import("../shared-audio-context-for-media-player").SharedAudioContextForMediaPlayer) => void;
26
26
  getNext: () => Promise<IteratorResult<import("mediabunny").WrappedAudioBuffer, void>>;
27
27
  isDestroyed: () => boolean;
28
- addQueuedAudioNode: ({ node, timestamp, buffer, scheduledTime, playbackRate, }: {
28
+ addQueuedAudioNode: ({ node, timestamp, buffer, scheduledTime, playbackRate, scheduledAtAnchor, }: {
29
29
  node: AudioBufferSourceNode;
30
30
  timestamp: number;
31
31
  buffer: AudioBuffer;
32
32
  scheduledTime: number;
33
33
  playbackRate: number;
34
+ scheduledAtAnchor: number;
34
35
  }) => void;
35
36
  removeQueuedAudioNode: (node: AudioBufferSourceNode) => void;
36
37
  getAndClearAudioChunksForAfterResuming: () => {
@@ -139,7 +139,12 @@ var setGlobalTimeAnchor = ({
139
139
  };
140
140
 
141
141
  // src/audio/audio-preview-iterator.ts
142
- var makeAudioIterator = (startFromSecond, maximumTimestamp, cache, debugAudioScheduling) => {
142
+ var makeAudioIterator = ({
143
+ startFromSecond,
144
+ maximumTimestamp,
145
+ cache,
146
+ debugAudioScheduling
147
+ }) => {
143
148
  let destroyed = false;
144
149
  const iterator = cache.makeIteratorOrUsePrewarmed(startFromSecond, maximumTimestamp);
145
150
  const queuedAudioNodes = [];
@@ -149,15 +154,15 @@ var makeAudioIterator = (startFromSecond, maximumTimestamp, cache, debugAudioSch
149
154
  const cleanupAudioQueue = (audioContext) => {
150
155
  for (const node of queuedAudioNodes) {
151
156
  try {
152
- const currentlyHearing = audioContext.getOutputTimestamp().contextTime;
153
- const nodeEndTime = node.scheduledTime + node.buffer.duration / node.playbackRate;
154
- const isAlreadyPlaying = node.scheduledTime - ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT < audioContext.currentTime;
155
- const shouldKeep = isAlreadyPlaying;
156
- if (shouldKeep) {
157
+ const isAlreadyPlaying = node.scheduledTime - ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT < audioContext.audioContext.currentTime;
158
+ const wasScheduledForThisAnchor = node.scheduledAtAnchor === audioContext.audioSyncAnchor.value;
159
+ if (isAlreadyPlaying && wasScheduledForThisAnchor) {
157
160
  continue;
158
161
  }
159
162
  if (debugAudioScheduling) {
160
- Internals3.Log.info({ logLevel: "trace", tag: "audio-scheduling" }, `Stopping node ${node.timestamp.toFixed(3)}, currently hearing = ${currentlyHearing.toFixed(3)} currentTime = ${audioContext.currentTime.toFixed(3)} nodeEndTime = ${nodeEndTime.toFixed(3)} scheduledTime = ${node.scheduledTime.toFixed(3)}`);
163
+ const currentlyHearing = audioContext.audioContext.getOutputTimestamp().contextTime;
164
+ const nodeEndTime = node.scheduledTime + node.buffer.duration / node.playbackRate;
165
+ Internals3.Log.info({ logLevel: "trace", tag: "audio-scheduling" }, `Stopping node ${node.timestamp.toFixed(3)}, currently hearing = ${currentlyHearing.toFixed(3)} currentTime = ${audioContext.audioContext.currentTime.toFixed(3)} nodeEndTime = ${nodeEndTime.toFixed(3)} scheduledTime = ${node.scheduledTime.toFixed(3)}`);
161
166
  }
162
167
  node.node.stop();
163
168
  } catch {}
@@ -318,14 +323,16 @@ var makeAudioIterator = (startFromSecond, maximumTimestamp, cache, debugAudioSch
318
323
  timestamp,
319
324
  buffer,
320
325
  scheduledTime,
321
- playbackRate
326
+ playbackRate,
327
+ scheduledAtAnchor
322
328
  }) => {
323
329
  queuedAudioNodes.push({
324
330
  node,
325
331
  timestamp,
326
332
  buffer,
327
333
  scheduledTime,
328
- playbackRate
334
+ playbackRate,
335
+ scheduledAtAnchor
329
336
  });
330
337
  },
331
338
  removeQueuedAudioNode: (node) => {
@@ -570,8 +577,8 @@ var audioIteratorManager = ({
570
577
  }) => {
571
578
  let muted = initialMuted;
572
579
  let currentVolume = 1;
573
- const gainNode = sharedAudioContext.createGain();
574
- gainNode.connect(sharedAudioContext.destination);
580
+ const gainNode = sharedAudioContext.audioContext.createGain();
581
+ gainNode.connect(sharedAudioContext.audioContext.destination);
575
582
  const audioSink = new AudioBufferSink(audioTrack);
576
583
  const prewarmedAudioIteratorCache = makePrewarmedAudioIteratorCache(audioSink);
577
584
  let audioBufferIterator = null;
@@ -587,13 +594,13 @@ var audioIteratorManager = ({
587
594
  if (!audioBufferIterator) {
588
595
  throw new Error("Audio buffer iterator not found");
589
596
  }
590
- if (sharedAudioContext.state !== "running") {
597
+ if (sharedAudioContext.audioContext.state !== "running") {
591
598
  throw new Error("Tried to schedule node while audio context is not running");
592
599
  }
593
600
  if (muted) {
594
601
  return;
595
602
  }
596
- const node = sharedAudioContext.createBufferSource();
603
+ const node = sharedAudioContext.audioContext.createBufferSource();
597
604
  node.buffer = buffer;
598
605
  node.playbackRate.value = playbackRate;
599
606
  node.connect(gainNode);
@@ -611,7 +618,8 @@ var audioIteratorManager = ({
611
618
  timestamp: mediaTimestamp,
612
619
  buffer,
613
620
  scheduledTime: started.scheduledTime,
614
- playbackRate
621
+ playbackRate,
622
+ scheduledAtAnchor: sharedAudioContext.audioSyncAnchor.value
615
623
  });
616
624
  node.onended = () => {
617
625
  setTimeout(() => {
@@ -658,7 +666,7 @@ var audioIteratorManager = ({
658
666
  if (buffer.timestamp >= endTime) {
659
667
  return;
660
668
  }
661
- if (getIsPlaying() && sharedAudioContext.state === "running" && (sharedAudioContext.getOutputTimestamp().contextTime ?? 0) > 0) {
669
+ if (getIsPlaying() && sharedAudioContext.audioContext.state === "running" && (sharedAudioContext.audioContext.getOutputTimestamp().contextTime ?? 0) > 0) {
662
670
  resumeScheduledAudioChunks({
663
671
  playbackRate,
664
672
  scheduleAudioNode,
@@ -698,7 +706,12 @@ var audioIteratorManager = ({
698
706
  audioBufferIterator?.destroy(sharedAudioContext);
699
707
  const delayHandle = __using(__stack, delayPlaybackHandleIfNotPremounting(), 0);
700
708
  currentDelayHandle = delayHandle;
701
- const iterator = makeAudioIterator(startFromSecond, getEndTime(), prewarmedAudioIteratorCache, debugAudioScheduling);
709
+ const iterator = makeAudioIterator({
710
+ startFromSecond,
711
+ maximumTimestamp: getEndTime(),
712
+ cache: prewarmedAudioIteratorCache,
713
+ debugAudioScheduling
714
+ });
702
715
  audioIteratorsCreated++;
703
716
  audioBufferIterator = iterator;
704
717
  try {
@@ -782,7 +795,7 @@ var audioIteratorManager = ({
782
795
  }
783
796
  const queuedPeriod = audioBufferIterator.getQueuedPeriod();
784
797
  const queuedPeriodMinusLatency = queuedPeriod ? {
785
- from: queuedPeriod.from - ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT - sharedAudioContext.baseLatency - sharedAudioContext.outputLatency,
798
+ from: queuedPeriod.from - ALLOWED_GLOBAL_TIME_ANCHOR_SHIFT - sharedAudioContext.audioContext.baseLatency - sharedAudioContext.audioContext.outputLatency,
786
799
  until: queuedPeriod.until
787
800
  } : null;
788
801
  const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriodMinusLatency);
@@ -1360,7 +1373,7 @@ class MediaPlayer {
1360
1373
  this.audioIteratorManager = audioIteratorManager({
1361
1374
  audioTrack,
1362
1375
  delayPlaybackHandleIfNotPremounting: this.delayPlaybackHandleIfNotPremounting,
1363
- sharedAudioContext: this.sharedAudioContext.audioContext,
1376
+ sharedAudioContext: this.sharedAudioContext,
1364
1377
  getIsLooping: () => this.loop,
1365
1378
  getEndTime: () => this.getEndTime(),
1366
1379
  getStartTime: () => this.getStartTime(),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@remotion/media",
3
- "version": "4.0.434",
3
+ "version": "4.0.436",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "module": "dist/esm/index.mjs",
@@ -23,7 +23,7 @@
23
23
  },
24
24
  "dependencies": {
25
25
  "mediabunny": "1.37.0",
26
- "remotion": "4.0.434",
26
+ "remotion": "4.0.436",
27
27
  "zod": "4.3.6"
28
28
  },
29
29
  "peerDependencies": {
@@ -31,7 +31,7 @@
31
31
  "react-dom": ">=16.8.0"
32
32
  },
33
33
  "devDependencies": {
34
- "@remotion/eslint-config-internal": "4.0.434",
34
+ "@remotion/eslint-config-internal": "4.0.436",
35
35
  "@vitest/browser-webdriverio": "4.0.9",
36
36
  "eslint": "9.19.0",
37
37
  "react": "19.2.3",