@remotion/webcodecs 4.0.252 → 4.0.253

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -505,16 +505,38 @@ import { MediaParserInternals } from "@remotion/media-parser";
505
505
  var { Log } = MediaParserInternals;
506
506
 
507
507
  // src/io-manager/make-timeout-promise.ts
508
- var makeTimeoutPromise = (label, ms) => {
508
+ var makeTimeoutPromise = ({
509
+ label,
510
+ ms,
511
+ controller
512
+ }) => {
509
513
  const { promise, reject, resolve } = withResolvers();
510
- const timeout = setTimeout(() => {
511
- reject(new Error(`${label()} (timed out after ${ms}ms)`));
512
- }, ms);
514
+ let timeout = null;
515
+ const set = () => {
516
+ timeout = setTimeout(() => {
517
+ reject(new Error(`${label()} (timed out after ${ms}ms)`));
518
+ }, ms);
519
+ };
520
+ set();
521
+ const onPause = () => {
522
+ if (timeout) {
523
+ clearTimeout(timeout);
524
+ }
525
+ };
526
+ const onResume = () => {
527
+ set();
528
+ };
529
+ controller.addEventListener("pause", onPause);
530
+ controller.addEventListener("resume", onResume);
513
531
  return {
514
532
  timeoutPromise: promise,
515
533
  clear: () => {
516
- clearTimeout(timeout);
534
+ if (timeout) {
535
+ clearTimeout(timeout);
536
+ }
517
537
  resolve();
538
+ controller.removeEventListener("pause", onPause);
539
+ controller.removeEventListener("resume", onResume);
518
540
  }
519
541
  };
520
542
  };
@@ -590,19 +612,24 @@ var makeIoSynchronizer = ({
590
612
  unprocessed,
591
613
  unemitted,
592
614
  minimumProgress,
593
- signal
615
+ controller
594
616
  }) => {
595
- const { timeoutPromise, clear } = makeTimeoutPromise(() => [
596
- `Waited too long for ${label} to finish:`,
597
- `${getUnemittedItems()} unemitted items`,
598
- `${getUnprocessed()} unprocessed items: ${JSON.stringify(_unprocessed)}`,
599
- `smallest progress: ${progress.getSmallestProgress()}`,
600
- `inputs: ${JSON.stringify(inputs)}`,
601
- `last output: ${lastOutput}`,
602
- `wanted: ${unemitted} unemitted items, ${unprocessed} unprocessed items, minimum progress ${minimumProgress}`
603
- ].join(`
604
- `), 1e4);
605
- signal.addEventListener("abort", clear);
617
+ await controller._internals.checkForAbortAndPause();
618
+ const { timeoutPromise, clear } = makeTimeoutPromise({
619
+ label: () => [
620
+ `Waited too long for ${label} to finish:`,
621
+ `${getUnemittedItems()} unemitted items`,
622
+ `${getUnprocessed()} unprocessed items: ${JSON.stringify(_unprocessed)}`,
623
+ `smallest progress: ${progress.getSmallestProgress()}`,
624
+ `inputs: ${JSON.stringify(inputs)}`,
625
+ `last output: ${lastOutput}`,
626
+ `wanted: ${unemitted} unemitted items, ${unprocessed} unprocessed items, minimum progress ${minimumProgress}`
627
+ ].join(`
628
+ `),
629
+ ms: 1e4,
630
+ controller
631
+ });
632
+ controller._internals.signal.addEventListener("abort", clear);
606
633
  await Promise.race([
607
634
  timeoutPromise,
608
635
  Promise.all([
@@ -623,14 +650,14 @@ var makeIoSynchronizer = ({
623
650
  })()
624
651
  ])
625
652
  ]).finally(() => clear());
626
- signal.removeEventListener("abort", clear);
653
+ controller._internals.signal.removeEventListener("abort", clear);
627
654
  };
628
- const waitForFinish = async (signal) => {
655
+ const waitForFinish = async (controller) => {
629
656
  await waitFor({
630
657
  unprocessed: 0,
631
658
  unemitted: 0,
632
659
  minimumProgress: null,
633
- signal
660
+ controller
634
661
  });
635
662
  };
636
663
  const onProcessed = () => {
@@ -651,13 +678,13 @@ var makeIoSynchronizer = ({
651
678
  var createAudioDecoder = ({
652
679
  onFrame,
653
680
  onError,
654
- signal,
681
+ controller,
655
682
  config,
656
683
  logLevel,
657
684
  track,
658
685
  progressTracker
659
686
  }) => {
660
- if (signal.aborted) {
687
+ if (controller._internals.signal.aborted) {
661
688
  throw new Error("Not creating audio decoder, already aborted");
662
689
  }
663
690
  if (config.codec === "pcm-s16") {
@@ -675,15 +702,17 @@ var createAudioDecoder = ({
675
702
  const abortHandler = () => {
676
703
  frame.close();
677
704
  };
678
- signal.addEventListener("abort", abortHandler, { once: true });
705
+ controller._internals.signal.addEventListener("abort", abortHandler, {
706
+ once: true
707
+ });
679
708
  outputQueue = outputQueue.then(() => {
680
- if (signal.aborted) {
709
+ if (controller._internals.signal.aborted) {
681
710
  return;
682
711
  }
683
712
  return onFrame(frame);
684
713
  }).then(() => {
685
714
  ioSynchronizer.onProcessed();
686
- signal.removeEventListener("abort", abortHandler);
715
+ controller._internals.signal.removeEventListener("abort", abortHandler);
687
716
  return Promise.resolve();
688
717
  }).catch((err) => {
689
718
  frame.close();
@@ -695,7 +724,7 @@ var createAudioDecoder = ({
695
724
  }
696
725
  });
697
726
  const close = () => {
698
- signal.removeEventListener("abort", onAbort);
727
+ controller._internals.signal.removeEventListener("abort", onAbort);
699
728
  if (audioDecoder.state === "closed") {
700
729
  return;
701
730
  }
@@ -704,7 +733,7 @@ var createAudioDecoder = ({
704
733
  const onAbort = () => {
705
734
  close();
706
735
  };
707
- signal.addEventListener("abort", onAbort);
736
+ controller._internals.signal.addEventListener("abort", onAbort);
708
737
  audioDecoder.configure(config);
709
738
  const processSample = async (audioSample) => {
710
739
  if (audioDecoder.state === "closed") {
@@ -715,7 +744,7 @@ var createAudioDecoder = ({
715
744
  unemitted: 20,
716
745
  unprocessed: 20,
717
746
  minimumProgress: audioSample.timestamp - 1e7,
718
- signal
747
+ controller
719
748
  });
720
749
  const chunk = new EncodedAudioChunk(audioSample);
721
750
  audioDecoder.decode(chunk);
@@ -733,7 +762,7 @@ var createAudioDecoder = ({
733
762
  } catch {
734
763
  }
735
764
  await queue;
736
- await ioSynchronizer.waitForFinish(signal);
765
+ await ioSynchronizer.waitForFinish(controller);
737
766
  await outputQueue;
738
767
  },
739
768
  close,
@@ -742,17 +771,20 @@ var createAudioDecoder = ({
742
771
  }
743
772
  };
744
773
  };
774
+ // src/audio-encoder.ts
775
+ import { MediaParserAbortError } from "@remotion/media-parser";
776
+
745
777
  // src/wav-audio-encoder.ts
746
778
  var getWaveAudioEncoder = ({
747
779
  onChunk,
748
- signal
780
+ controller
749
781
  }) => {
750
782
  return {
751
783
  close: () => {
752
784
  return Promise.resolve();
753
785
  },
754
786
  encodeFrame: (audioData) => {
755
- if (signal.aborted) {
787
+ if (controller._internals.signal.aborted) {
756
788
  return Promise.resolve();
757
789
  }
758
790
  const chunk = {
@@ -774,17 +806,17 @@ var createAudioEncoder = ({
774
806
  onChunk,
775
807
  onError,
776
808
  codec,
777
- signal,
809
+ controller,
778
810
  config: audioEncoderConfig,
779
811
  logLevel,
780
812
  onNewAudioSampleRate,
781
813
  progressTracker
782
814
  }) => {
783
- if (signal.aborted) {
784
- throw new Error("Not creating audio encoder, already aborted");
815
+ if (controller._internals.signal.aborted) {
816
+ throw new MediaParserAbortError("Not creating audio encoder, already aborted");
785
817
  }
786
818
  if (codec === "wav") {
787
- return getWaveAudioEncoder({ onChunk, signal });
819
+ return getWaveAudioEncoder({ onChunk, controller });
788
820
  }
789
821
  const ioSynchronizer = makeIoSynchronizer({
790
822
  logLevel,
@@ -796,7 +828,7 @@ var createAudioEncoder = ({
796
828
  output: (chunk) => {
797
829
  ioSynchronizer.onOutput(chunk.timestamp);
798
830
  prom = prom.then(() => {
799
- if (signal.aborted) {
831
+ if (controller._internals.signal.aborted) {
800
832
  return;
801
833
  }
802
834
  return onChunk(chunk);
@@ -812,7 +844,7 @@ var createAudioEncoder = ({
812
844
  }
813
845
  });
814
846
  const close = () => {
815
- signal.removeEventListener("abort", onAbort);
847
+ controller._internals.signal.removeEventListener("abort", onAbort);
816
848
  if (encoder.state === "closed") {
817
849
  return;
818
850
  }
@@ -821,7 +853,7 @@ var createAudioEncoder = ({
821
853
  const onAbort = () => {
822
854
  close();
823
855
  };
824
- signal.addEventListener("abort", onAbort);
856
+ controller._internals.signal.addEventListener("abort", onAbort);
825
857
  if (codec !== "opus" && codec !== "aac") {
826
858
  throw new Error('Only `codec: "opus"` and `codec: "aac"` is supported currently');
827
859
  }
@@ -835,7 +867,7 @@ var createAudioEncoder = ({
835
867
  unemitted: 20,
836
868
  unprocessed: 20,
837
869
  minimumProgress: audioData.timestamp - 1e7,
838
- signal
870
+ controller
839
871
  });
840
872
  if (encoder.state === "closed") {
841
873
  return;
@@ -862,7 +894,7 @@ var createAudioEncoder = ({
862
894
  },
863
895
  waitForFinish: async () => {
864
896
  await encoder.flush();
865
- await ioSynchronizer.waitForFinish(signal);
897
+ await ioSynchronizer.waitForFinish(controller);
866
898
  await prom;
867
899
  },
868
900
  close,
@@ -1387,7 +1419,11 @@ var canReencodeVideoTrack = async ({
1387
1419
  return Boolean(videoDecoderConfig && videoEncoderConfig);
1388
1420
  };
1389
1421
  // src/convert-media.ts
1390
- import { parseMedia } from "@remotion/media-parser";
1422
+ import {
1423
+ MediaParserAbortError as MediaParserAbortError3,
1424
+ MediaParserInternals as MediaParserInternals9
1425
+ } from "@remotion/media-parser";
1426
+ import { fetchReader } from "@remotion/media-parser/fetch";
1391
1427
 
1392
1428
  // src/auto-select-writer.ts
1393
1429
  var autoSelectWriter = async (writer, logLevel) => {
@@ -1492,9 +1528,6 @@ var makeProgressTracker = () => {
1492
1528
  };
1493
1529
  };
1494
1530
 
1495
- // src/error-cause.ts
1496
- var error_cause_default = Error;
1497
-
1498
1531
  // src/generate-output-filename.ts
1499
1532
  var generateOutputFilename = (source, container) => {
1500
1533
  const filename = typeof source === "string" ? source : source instanceof File ? source.name : "converted";
@@ -1629,7 +1662,7 @@ var makeAudioTrackHandler = ({
1629
1662
  return null;
1630
1663
  }
1631
1664
  if (audioOperation.type === "fail") {
1632
- throw new error_cause_default(`Audio track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this audio track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
1665
+ throw new Error(`Audio track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this audio track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
1633
1666
  }
1634
1667
  if (audioOperation.type === "copy") {
1635
1668
  const addedTrack = await state.addTrack({
@@ -1669,11 +1702,11 @@ var makeAudioTrackHandler = ({
1669
1702
  description: track.description
1670
1703
  });
1671
1704
  if (!audioEncoderConfig) {
1672
- abortConversion(new error_cause_default(`Could not configure audio encoder of track ${track.trackId}`));
1705
+ abortConversion(new Error(`Could not configure audio encoder of track ${track.trackId}`));
1673
1706
  return null;
1674
1707
  }
1675
1708
  if (!audioDecoderConfig) {
1676
- abortConversion(new error_cause_default(`Could not configure audio decoder of track ${track.trackId}`));
1709
+ abortConversion(new Error(`Could not configure audio decoder of track ${track.trackId}`));
1677
1710
  return null;
1678
1711
  }
1679
1712
  const codecPrivate = audioOperation.audioCodec === "aac" ? MediaParserInternals3.createAacCodecPrivate({
@@ -1709,12 +1742,12 @@ var makeAudioTrackHandler = ({
1709
1742
  });
1710
1743
  },
1711
1744
  onError: (err) => {
1712
- abortConversion(new error_cause_default(`Audio encoder of ${track.trackId} failed (see .cause of this error)`, {
1745
+ abortConversion(new Error(`Audio encoder of ${track.trackId} failed (see .cause of this error)`, {
1713
1746
  cause: err
1714
1747
  }));
1715
1748
  },
1716
1749
  codec: audioOperation.audioCodec,
1717
- signal: controller.signal,
1750
+ controller,
1718
1751
  config: audioEncoderConfig,
1719
1752
  logLevel,
1720
1753
  progressTracker
@@ -1724,19 +1757,19 @@ var makeAudioTrackHandler = ({
1724
1757
  const newAudioData = onAudioData ? await onAudioData?.({ audioData, track }) : audioData;
1725
1758
  if (newAudioData !== audioData) {
1726
1759
  if (newAudioData.duration !== audioData.duration) {
1727
- throw new error_cause_default(`onAudioData returned a different duration than the input audio data. Original duration: ${audioData.duration}, new duration: ${newAudioData.duration}`);
1760
+ throw new Error(`onAudioData returned a different duration than the input audio data. Original duration: ${audioData.duration}, new duration: ${newAudioData.duration}`);
1728
1761
  }
1729
1762
  if (newAudioData.numberOfChannels !== audioData.numberOfChannels) {
1730
- throw new error_cause_default(`onAudioData returned a different number of channels than the input audio data. Original channels: ${audioData.numberOfChannels}, new channels: ${newAudioData.numberOfChannels}`);
1763
+ throw new Error(`onAudioData returned a different number of channels than the input audio data. Original channels: ${audioData.numberOfChannels}, new channels: ${newAudioData.numberOfChannels}`);
1731
1764
  }
1732
1765
  if (newAudioData.sampleRate !== audioData.sampleRate) {
1733
- throw new error_cause_default(`onAudioData returned a different sample rate than the input audio data. Original sample rate: ${audioData.sampleRate}, new sample rate: ${newAudioData.sampleRate}`);
1766
+ throw new Error(`onAudioData returned a different sample rate than the input audio data. Original sample rate: ${audioData.sampleRate}, new sample rate: ${newAudioData.sampleRate}`);
1734
1767
  }
1735
1768
  if (newAudioData.format !== audioData.format) {
1736
- throw new error_cause_default(`onAudioData returned a different format than the input audio data. Original format: ${audioData.format}, new format: ${newAudioData.format}`);
1769
+ throw new Error(`onAudioData returned a different format than the input audio data. Original format: ${audioData.format}, new format: ${newAudioData.format}`);
1737
1770
  }
1738
1771
  if (newAudioData.timestamp !== audioData.timestamp) {
1739
- throw new error_cause_default(`onAudioData returned a different timestamp than the input audio data. Original timestamp: ${audioData.timestamp}, new timestamp: ${newAudioData.timestamp}`);
1772
+ throw new Error(`onAudioData returned a different timestamp than the input audio data. Original timestamp: ${audioData.timestamp}, new timestamp: ${newAudioData.timestamp}`);
1740
1773
  }
1741
1774
  audioData.close();
1742
1775
  }
@@ -1750,11 +1783,11 @@ var makeAudioTrackHandler = ({
1750
1783
  newAudioData.close();
1751
1784
  },
1752
1785
  onError(error) {
1753
- abortConversion(new error_cause_default(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
1786
+ abortConversion(new Error(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
1754
1787
  cause: error
1755
1788
  }));
1756
1789
  },
1757
- signal: controller.signal,
1790
+ controller,
1758
1791
  config: audioDecoderConfig,
1759
1792
  logLevel,
1760
1793
  track,
@@ -1915,7 +1948,7 @@ var onFrame = async ({
1915
1948
  var createVideoDecoder = ({
1916
1949
  onFrame: onFrame2,
1917
1950
  onError,
1918
- signal,
1951
+ controller,
1919
1952
  config,
1920
1953
  logLevel,
1921
1954
  progress
@@ -1932,15 +1965,17 @@ var createVideoDecoder = ({
1932
1965
  const abortHandler = () => {
1933
1966
  inputFrame.close();
1934
1967
  };
1935
- signal.addEventListener("abort", abortHandler, { once: true });
1968
+ controller._internals.signal.addEventListener("abort", abortHandler, {
1969
+ once: true
1970
+ });
1936
1971
  outputQueue = outputQueue.then(() => {
1937
- if (signal.aborted) {
1972
+ if (controller._internals.signal.aborted) {
1938
1973
  return;
1939
1974
  }
1940
1975
  return onFrame2(inputFrame);
1941
1976
  }).then(() => {
1942
1977
  ioSynchronizer.onProcessed();
1943
- signal.removeEventListener("abort", abortHandler);
1978
+ controller._internals.signal.removeEventListener("abort", abortHandler);
1944
1979
  return Promise.resolve();
1945
1980
  }).catch((err) => {
1946
1981
  inputFrame.close();
@@ -1952,7 +1987,7 @@ var createVideoDecoder = ({
1952
1987
  }
1953
1988
  });
1954
1989
  const close = () => {
1955
- signal.removeEventListener("abort", onAbort);
1990
+ controller._internals.signal.removeEventListener("abort", onAbort);
1956
1991
  if (videoDecoder.state === "closed") {
1957
1992
  return;
1958
1993
  }
@@ -1961,7 +1996,7 @@ var createVideoDecoder = ({
1961
1996
  const onAbort = () => {
1962
1997
  close();
1963
1998
  };
1964
- signal.addEventListener("abort", onAbort);
1999
+ controller._internals.signal.addEventListener("abort", onAbort);
1965
2000
  videoDecoder.configure(config);
1966
2001
  const processSample = async (sample) => {
1967
2002
  if (videoDecoder.state === "closed") {
@@ -1975,7 +2010,7 @@ var createVideoDecoder = ({
1975
2010
  unemitted: 20,
1976
2011
  unprocessed: 2,
1977
2012
  minimumProgress: sample.timestamp - 1e7,
1978
- signal
2013
+ controller
1979
2014
  });
1980
2015
  if (sample.type === "key") {
1981
2016
  await videoDecoder.flush();
@@ -1992,7 +2027,7 @@ var createVideoDecoder = ({
1992
2027
  waitForFinish: async () => {
1993
2028
  await videoDecoder.flush();
1994
2029
  Log.verbose(logLevel, "Flushed video decoder");
1995
- await ioSynchronizer.waitForFinish(signal);
2030
+ await ioSynchronizer.waitForFinish(controller);
1996
2031
  Log.verbose(logLevel, "IO synchro finished");
1997
2032
  await outputQueue;
1998
2033
  Log.verbose(logLevel, "Output queue finished");
@@ -2007,17 +2042,18 @@ var createVideoDecoder = ({
2007
2042
  };
2008
2043
 
2009
2044
  // src/video-encoder.ts
2045
+ import { MediaParserAbortError as MediaParserAbortError2 } from "@remotion/media-parser";
2010
2046
  var createVideoEncoder = ({
2011
2047
  onChunk,
2012
2048
  onError,
2013
- signal,
2049
+ controller,
2014
2050
  config,
2015
2051
  logLevel,
2016
2052
  outputCodec,
2017
2053
  progress
2018
2054
  }) => {
2019
- if (signal.aborted) {
2020
- throw new Error("Not creating video encoder, already aborted");
2055
+ if (controller._internals.signal.aborted) {
2056
+ throw new MediaParserAbortError2("Not creating video encoder, already aborted");
2021
2057
  }
2022
2058
  const ioSynchronizer = makeIoSynchronizer({
2023
2059
  logLevel,
@@ -2033,7 +2069,7 @@ var createVideoEncoder = ({
2033
2069
  const timestamp = chunk.timestamp + (chunk.duration ?? 0);
2034
2070
  ioSynchronizer.onOutput(timestamp);
2035
2071
  outputQueue = outputQueue.then(() => {
2036
- if (signal.aborted) {
2072
+ if (controller._internals.signal.aborted) {
2037
2073
  return;
2038
2074
  }
2039
2075
  return onChunk(chunk, metadata ?? null);
@@ -2046,7 +2082,7 @@ var createVideoEncoder = ({
2046
2082
  }
2047
2083
  });
2048
2084
  const close = () => {
2049
- signal.removeEventListener("abort", onAbort);
2085
+ controller._internals.signal.removeEventListener("abort", onAbort);
2050
2086
  if (encoder.state === "closed") {
2051
2087
  return;
2052
2088
  }
@@ -2055,7 +2091,7 @@ var createVideoEncoder = ({
2055
2091
  const onAbort = () => {
2056
2092
  close();
2057
2093
  };
2058
- signal.addEventListener("abort", onAbort);
2094
+ controller._internals.signal.addEventListener("abort", onAbort);
2059
2095
  Log.verbose(logLevel, "Configuring video encoder", config);
2060
2096
  encoder.configure(config);
2061
2097
  let framesProcessed = 0;
@@ -2068,7 +2104,7 @@ var createVideoEncoder = ({
2068
2104
  unemitted: 10,
2069
2105
  unprocessed: 10,
2070
2106
  minimumProgress: frame.timestamp - 1e7,
2071
- signal
2107
+ controller
2072
2108
  });
2073
2109
  if (encoder.state === "closed") {
2074
2110
  return;
@@ -2092,7 +2128,7 @@ var createVideoEncoder = ({
2092
2128
  waitForFinish: async () => {
2093
2129
  await encoder.flush();
2094
2130
  await outputQueue;
2095
- await ioSynchronizer.waitForFinish(signal);
2131
+ await ioSynchronizer.waitForFinish(controller);
2096
2132
  },
2097
2133
  close,
2098
2134
  flush: async () => {
@@ -2116,8 +2152,8 @@ var makeVideoTrackHandler = ({
2116
2152
  progress,
2117
2153
  resizeOperation
2118
2154
  }) => async ({ track, container: inputContainer }) => {
2119
- if (controller.signal.aborted) {
2120
- throw new error_cause_default("Aborted");
2155
+ if (controller._internals.signal.aborted) {
2156
+ throw new Error("Aborted");
2121
2157
  }
2122
2158
  const canCopyTrack = canCopyVideoTrack({
2123
2159
  inputContainer,
@@ -2140,7 +2176,7 @@ var makeVideoTrackHandler = ({
2140
2176
  return null;
2141
2177
  }
2142
2178
  if (videoOperation.type === "fail") {
2143
- throw new error_cause_default(`Video track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this video track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
2179
+ throw new Error(`Video track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this video track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
2144
2180
  }
2145
2181
  if (videoOperation.type === "copy") {
2146
2182
  Log.verbose(logLevel, `Copying video track with codec ${track.codec} and timescale ${track.timescale}`);
@@ -2169,7 +2205,7 @@ var makeVideoTrackHandler = ({
2169
2205
  };
2170
2206
  }
2171
2207
  if (videoOperation.type !== "reencode") {
2172
- throw new error_cause_default(`Video track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(videoOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
2208
+ throw new Error(`Video track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(videoOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
2173
2209
  }
2174
2210
  const rotation = (videoOperation.rotate ?? rotate) - track.rotation;
2175
2211
  const { height: newHeight, width: newWidth } = calculateNewDimensionsFromRotateAndScale({
@@ -2187,11 +2223,11 @@ var makeVideoTrackHandler = ({
2187
2223
  });
2188
2224
  const videoDecoderConfig = await getVideoDecoderConfigWithHardwareAcceleration(track);
2189
2225
  if (videoEncoderConfig === null) {
2190
- abortConversion(new error_cause_default(`Could not configure video encoder of track ${track.trackId}`));
2226
+ abortConversion(new Error(`Could not configure video encoder of track ${track.trackId}`));
2191
2227
  return null;
2192
2228
  }
2193
2229
  if (videoDecoderConfig === null) {
2194
- abortConversion(new error_cause_default(`Could not configure video decoder of track ${track.trackId}`));
2230
+ abortConversion(new Error(`Could not configure video decoder of track ${track.trackId}`));
2195
2231
  return null;
2196
2232
  }
2197
2233
  const { trackNumber } = await state.addTrack({
@@ -2220,11 +2256,11 @@ var makeVideoTrackHandler = ({
2220
2256
  });
2221
2257
  },
2222
2258
  onError: (err) => {
2223
- abortConversion(new error_cause_default(`Video encoder of track ${track.trackId} failed (see .cause of this error)`, {
2259
+ abortConversion(new Error(`Video encoder of track ${track.trackId} failed (see .cause of this error)`, {
2224
2260
  cause: err
2225
2261
  }));
2226
2262
  },
2227
- signal: controller.signal,
2263
+ controller,
2228
2264
  config: videoEncoderConfig,
2229
2265
  logLevel,
2230
2266
  outputCodec: videoOperation.videoCodec,
@@ -2244,11 +2280,11 @@ var makeVideoTrackHandler = ({
2244
2280
  });
2245
2281
  },
2246
2282
  onError: (err) => {
2247
- abortConversion(new error_cause_default(`Video decoder of track ${track.trackId} failed (see .cause of this error)`, {
2283
+ abortConversion(new Error(`Video decoder of track ${track.trackId} failed (see .cause of this error)`, {
2248
2284
  cause: err
2249
2285
  }));
2250
2286
  },
2251
- signal: controller.signal,
2287
+ controller,
2252
2288
  logLevel,
2253
2289
  progress
2254
2290
  });
@@ -4681,7 +4717,7 @@ var throttledStateUpdate = ({
4681
4717
  const onAbort = () => {
4682
4718
  clearInterval(interval);
4683
4719
  };
4684
- signal.addEventListener("abort", onAbort, { once: true });
4720
+ signal?.addEventListener("abort", onAbort, { once: true });
4685
4721
  return {
4686
4722
  get: () => currentState,
4687
4723
  update: (fn) => {
@@ -4689,12 +4725,26 @@ var throttledStateUpdate = ({
4689
4725
  },
4690
4726
  stopAndGetLastProgress: () => {
4691
4727
  clearInterval(interval);
4692
- signal.removeEventListener("abort", onAbort);
4728
+ signal?.removeEventListener("abort", onAbort);
4693
4729
  return currentState;
4694
4730
  }
4695
4731
  };
4696
4732
  };
4697
4733
 
4734
+ // src/webcodecs-controller.ts
4735
+ import { mediaParserController } from "@remotion/media-parser";
4736
+ var webcodecsController = () => {
4737
+ const controller = mediaParserController();
4738
+ return {
4739
+ abort: controller.abort,
4740
+ pause: controller.pause,
4741
+ resume: controller.resume,
4742
+ addEventListener: controller.addEventListener,
4743
+ removeEventListener: controller.removeEventListener,
4744
+ _internals: controller._internals
4745
+ };
4746
+ };
4747
+
4698
4748
  // src/convert-media.ts
4699
4749
  var convertMedia = async function({
4700
4750
  src,
@@ -4704,7 +4754,7 @@ var convertMedia = async function({
4704
4754
  audioCodec,
4705
4755
  container,
4706
4756
  videoCodec,
4707
- signal: userPassedAbortSignal,
4757
+ controller = webcodecsController(),
4708
4758
  onAudioTrack: userAudioResolver,
4709
4759
  onVideoTrack: userVideoResolver,
4710
4760
  reader,
@@ -4715,34 +4765,60 @@ var convertMedia = async function({
4715
4765
  rotate,
4716
4766
  apiKey,
4717
4767
  resize,
4768
+ onAudioCodec,
4769
+ onContainer,
4770
+ onDimensions,
4771
+ onDurationInSeconds,
4772
+ onFps,
4773
+ onImages,
4774
+ onInternalStats,
4775
+ onIsHdr,
4776
+ onKeyframes,
4777
+ onLocation,
4778
+ onMetadata,
4779
+ onMimeType,
4780
+ onName,
4781
+ onNumberOfAudioChannels,
4782
+ onRotation,
4783
+ onSampleRate,
4784
+ onSize,
4785
+ onSlowAudioBitrate,
4786
+ onSlowDurationInSeconds,
4787
+ onSlowFps,
4788
+ onSlowKeyframes,
4789
+ onSlowNumberOfFrames,
4790
+ onSlowVideoBitrate,
4791
+ onStructure,
4792
+ onTracks,
4793
+ onUnrotatedDimensions,
4794
+ onVideoCodec,
4718
4795
  ...more
4719
4796
  }) {
4720
- if (userPassedAbortSignal?.aborted) {
4721
- return Promise.reject(new error_cause_default("Aborted"));
4797
+ if (controller._internals.signal.aborted) {
4798
+ return Promise.reject(new MediaParserAbortError3("Aborted"));
4722
4799
  }
4723
- if (container !== "webm" && availableContainers.indexOf(container) === -1) {
4800
+ if (availableContainers.indexOf(container) === -1) {
4724
4801
  return Promise.reject(new TypeError(`Only the following values for "container" are supported currently: ${JSON.stringify(availableContainers)}`));
4725
4802
  }
4726
4803
  if (videoCodec && availableVideoCodecs.indexOf(videoCodec) === -1) {
4727
4804
  return Promise.reject(new TypeError(`Only the following values for "videoCodec" are supported currently: ${JSON.stringify(availableVideoCodecs)}`));
4728
4805
  }
4729
4806
  const { resolve, reject, getPromiseToImmediatelyReturn } = withResolversAndWaitForReturn();
4730
- const controller = new AbortController;
4731
4807
  const abortConversion = (errCause) => {
4732
4808
  reject(errCause);
4733
- if (!controller.signal.aborted) {
4809
+ if (!controller._internals.signal.aborted) {
4734
4810
  controller.abort();
4735
4811
  }
4736
4812
  };
4737
4813
  const onUserAbort = () => {
4738
- abortConversion(new error_cause_default("Conversion aborted by user"));
4814
+ abortConversion(new MediaParserAbortError3("Conversion aborted by user"));
4739
4815
  };
4740
- userPassedAbortSignal?.addEventListener("abort", onUserAbort);
4816
+ controller._internals.signal.addEventListener("abort", onUserAbort);
4741
4817
  const creator = selectContainerCreator(container);
4742
4818
  const throttledState = throttledStateUpdate({
4743
4819
  updateFn: onProgressDoNotCallDirectly ?? null,
4744
4820
  everyMilliseconds: progressIntervalInMs ?? 100,
4745
- signal: controller.signal
4821
+ signal: controller._internals.signal
4746
4822
  });
4747
4823
  const progressTracker = makeProgressTracker();
4748
4824
  const state = await creator({
@@ -4800,17 +4876,17 @@ var convertMedia = async function({
4800
4876
  progressTracker,
4801
4877
  onAudioData: onAudioData ?? null
4802
4878
  });
4803
- parseMedia({
4879
+ MediaParserInternals9.internalParseMedia({
4804
4880
  logLevel,
4805
4881
  src,
4806
4882
  onVideoTrack,
4807
4883
  onAudioTrack,
4808
- signal: controller.signal,
4884
+ controller,
4809
4885
  fields: {
4810
4886
  ...fields,
4811
4887
  durationInSeconds: true
4812
4888
  },
4813
- reader,
4889
+ reader: reader ?? fetchReader,
4814
4890
  ...more,
4815
4891
  onDurationInSeconds: (durationInSeconds) => {
4816
4892
  if (durationInSeconds === null) {
@@ -4831,7 +4907,40 @@ var convertMedia = async function({
4831
4907
  })
4832
4908
  };
4833
4909
  });
4834
- }
4910
+ },
4911
+ acknowledgeRemotionLicense: true,
4912
+ mode: "query",
4913
+ onDiscardedData: null,
4914
+ onError: () => ({ action: "fail" }),
4915
+ onParseProgress: null,
4916
+ progressIntervalInMs: null,
4917
+ onAudioCodec: onAudioCodec ?? null,
4918
+ onContainer: onContainer ?? null,
4919
+ onDimensions: onDimensions ?? null,
4920
+ onFps: onFps ?? null,
4921
+ onImages: onImages ?? null,
4922
+ onInternalStats: onInternalStats ?? null,
4923
+ onIsHdr: onIsHdr ?? null,
4924
+ onKeyframes: onKeyframes ?? null,
4925
+ onLocation: onLocation ?? null,
4926
+ onMetadata: onMetadata ?? null,
4927
+ onMimeType: onMimeType ?? null,
4928
+ onName: onName ?? null,
4929
+ onNumberOfAudioChannels: onNumberOfAudioChannels ?? null,
4930
+ onRotation: onRotation ?? null,
4931
+ onSampleRate: onSampleRate ?? null,
4932
+ onSize: onSize ?? null,
4933
+ onSlowAudioBitrate: onSlowAudioBitrate ?? null,
4934
+ onSlowDurationInSeconds: onSlowDurationInSeconds ?? null,
4935
+ onSlowFps: onSlowFps ?? null,
4936
+ onSlowKeyframes: onSlowKeyframes ?? null,
4937
+ onSlowNumberOfFrames: onSlowNumberOfFrames ?? null,
4938
+ onSlowVideoBitrate: onSlowVideoBitrate ?? null,
4939
+ onStructure: onStructure ?? null,
4940
+ onTracks: onTracks ?? null,
4941
+ onUnrotatedDimensions: onUnrotatedDimensions ?? null,
4942
+ onVideoCodec: onVideoCodec ?? null,
4943
+ apiName: "convertMedia()"
4835
4944
  }).then(() => {
4836
4945
  return state.waitForFinish();
4837
4946
  }).then(() => {
@@ -4853,7 +4962,7 @@ var convertMedia = async function({
4853
4962
  throttledState.stopAndGetLastProgress();
4854
4963
  });
4855
4964
  return getPromiseToImmediatelyReturn().finally(() => {
4856
- userPassedAbortSignal?.removeEventListener("abort", onUserAbort);
4965
+ controller._internals.signal.removeEventListener("abort", onUserAbort);
4857
4966
  });
4858
4967
  };
4859
4968
  // src/get-available-audio-codecs.ts
@@ -4879,6 +4988,7 @@ var WebCodecsInternals = {
4879
4988
  };
4880
4989
  setRemotionImported();
4881
4990
  export {
4991
+ webcodecsController,
4882
4992
  getDefaultVideoCodec,
4883
4993
  getDefaultAudioCodec,
4884
4994
  getAvailableVideoCodecs,