@remotion/webcodecs 4.0.230 → 4.0.232

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +12 -1
  2. package/dist/audio-decoder.js +6 -0
  3. package/dist/audio-encoder-config.d.ts +4 -1
  4. package/dist/audio-encoder-config.js +6 -0
  5. package/dist/audio-encoder.d.ts +4 -3
  6. package/dist/audio-encoder.js +5 -1
  7. package/dist/browser-quirks.d.ts +2 -0
  8. package/dist/browser-quirks.js +11 -0
  9. package/dist/can-copy-audio-track.d.ts +1 -1
  10. package/dist/can-copy-audio-track.js +3 -0
  11. package/dist/can-copy-video-track.d.ts +1 -1
  12. package/dist/can-copy-video-track.js +4 -1
  13. package/dist/can-reencode-audio-track.d.ts +1 -1
  14. package/dist/can-reencode-audio-track.js +3 -0
  15. package/dist/can-reencode-video-track.d.ts +1 -1
  16. package/dist/codec-id.d.ts +2 -2
  17. package/dist/codec-id.js +8 -2
  18. package/dist/convert-media.d.ts +9 -5
  19. package/dist/convert-media.js +55 -42
  20. package/dist/convert-to-correct-videoframe.d.ts +9 -0
  21. package/dist/convert-to-correct-videoframe.js +32 -0
  22. package/dist/default-on-video-track-handler.js +4 -0
  23. package/dist/esm/index.mjs +347 -110
  24. package/dist/generate-output-filename.d.ts +2 -0
  25. package/dist/generate-output-filename.js +14 -0
  26. package/dist/get-available-audio-codecs.d.ts +7 -0
  27. package/dist/get-available-audio-codecs.js +18 -0
  28. package/dist/get-available-containers.d.ts +4 -0
  29. package/dist/get-available-containers.js +8 -0
  30. package/dist/get-available-video-codecs.d.ts +7 -0
  31. package/dist/get-available-video-codecs.js +18 -0
  32. package/dist/get-default-audio-codec.d.ts +2 -1
  33. package/dist/get-default-audio-codec.js +3 -0
  34. package/dist/get-default-video-codec.d.ts +3 -2
  35. package/dist/get-default-video-codec.js +7 -1
  36. package/dist/index.d.ts +8 -6
  37. package/dist/index.js +9 -5
  38. package/dist/io-manager/io-synchronizer.js +12 -6
  39. package/dist/on-audio-track-handler.d.ts +2 -1
  40. package/dist/on-audio-track.d.ts +5 -5
  41. package/dist/on-audio-track.js +22 -8
  42. package/dist/on-frame.d.ts +4 -4
  43. package/dist/on-frame.js +11 -11
  44. package/dist/on-video-track-handler.d.ts +2 -1
  45. package/dist/on-video-track.d.ts +6 -5
  46. package/dist/on-video-track.js +15 -7
  47. package/dist/select-container-creator.d.ts +2 -0
  48. package/dist/select-container-creator.js +17 -0
  49. package/dist/set-remotion-imported.d.ts +6 -0
  50. package/dist/set-remotion-imported.js +25 -0
  51. package/dist/throttled-state-update.d.ts +13 -0
  52. package/dist/throttled-state-update.js +49 -0
  53. package/dist/video-encoder-config.d.ts +1 -1
  54. package/dist/video-encoder-config.js +2 -0
  55. package/dist/video-encoder.d.ts +3 -1
  56. package/dist/video-encoder.js +8 -4
  57. package/dist/wav-audio-encoder.d.ts +2 -0
  58. package/dist/wav-audio-encoder.js +26 -0
  59. package/package.json +4 -3
@@ -1,3 +1,18 @@
1
+ // src/set-remotion-imported.ts
2
+ import { VERSION } from "@remotion/media-parser";
3
+ var setRemotionImported = () => {
4
+ if (typeof globalThis === "undefined") {
5
+ return;
6
+ }
7
+ if (globalThis.remotion_imported) {
8
+ return;
9
+ }
10
+ globalThis.remotion_imported = VERSION;
11
+ if (typeof window !== "undefined") {
12
+ window.remotion_imported = `${VERSION}-webcodecs`;
13
+ }
14
+ };
15
+
1
16
  // src/log.ts
2
17
  import { MediaParserInternals } from "@remotion/media-parser";
3
18
  var { Log } = MediaParserInternals;
@@ -114,12 +129,18 @@ var makeIoSynchronizer = (logLevel, label) => {
114
129
  _unprocessed,
115
130
  unemitted
116
131
  }) => {
117
- while (getUnemittedItems() > unemitted) {
118
- await waitForOutput();
119
- }
120
- while (getUnprocessed() > _unprocessed) {
121
- await waitForProcessed();
122
- }
132
+ await Promise.all([
133
+ async () => {
134
+ while (getUnemittedItems() > unemitted) {
135
+ await waitForOutput();
136
+ }
137
+ },
138
+ async () => {
139
+ while (getUnprocessed() > _unprocessed) {
140
+ await waitForProcessed();
141
+ }
142
+ }
143
+ ]);
123
144
  };
124
145
  const waitForFinish = async () => {
125
146
  await waitFor({ _unprocessed: 0, unemitted: 0 });
@@ -200,11 +221,15 @@ var createAudioDecoder = ({
200
221
  let queue = Promise.resolve();
201
222
  return {
202
223
  processSample: (sample) => {
224
+ if (sample.data.length === 0) {
225
+ return queue;
226
+ }
203
227
  queue = queue.then(() => processSample(sample));
204
228
  return queue;
205
229
  },
206
230
  waitForFinish: async () => {
207
231
  await audioDecoder.flush();
232
+ await queue;
208
233
  await ioSynchronizer.waitForFinish();
209
234
  await outputQueue;
210
235
  },
@@ -214,6 +239,33 @@ var createAudioDecoder = ({
214
239
  }
215
240
  };
216
241
  };
242
+ // src/wav-audio-encoder.ts
243
+ var getWaveAudioEncoder = ({
244
+ onChunk,
245
+ signal
246
+ }) => {
247
+ return {
248
+ close: () => {
249
+ return Promise.resolve();
250
+ },
251
+ encodeFrame: (audioData) => {
252
+ if (signal.aborted) {
253
+ return Promise.resolve();
254
+ }
255
+ const chunk = {
256
+ timestamp: audioData.timestamp,
257
+ duration: audioData.duration,
258
+ type: "key",
259
+ copyTo: (destination) => audioData.copyTo(destination, { planeIndex: 0, format: "s16" }),
260
+ byteLength: audioData.allocationSize({ planeIndex: 0, format: "s16" })
261
+ };
262
+ return onChunk(chunk);
263
+ },
264
+ flush: () => Promise.resolve(),
265
+ waitForFinish: () => Promise.resolve()
266
+ };
267
+ };
268
+
217
269
  // src/audio-encoder.ts
218
270
  var createAudioEncoder = ({
219
271
  onChunk,
@@ -227,6 +279,9 @@ var createAudioEncoder = ({
227
279
  if (signal.aborted) {
228
280
  throw new Error("Not creating audio encoder, already aborted");
229
281
  }
282
+ if (codec === "wav") {
283
+ return getWaveAudioEncoder({ onChunk, signal });
284
+ }
230
285
  const ioSynchronizer = makeIoSynchronizer(logLevel, "Audio encoder");
231
286
  let prom = Promise.resolve();
232
287
  const encoder = new AudioEncoder({
@@ -267,7 +322,7 @@ var createAudioEncoder = ({
267
322
  if (encoder.state === "closed") {
268
323
  return;
269
324
  }
270
- await ioSynchronizer.waitFor({ unemitted: 2, _unprocessed: 2 });
325
+ await ioSynchronizer.waitFor({ unemitted: 20, _unprocessed: 20 });
271
326
  if (encoder.state === "closed") {
272
327
  return;
273
328
  }
@@ -313,6 +368,9 @@ var canCopyAudioTrack = ({
313
368
  if (container === "mp4") {
314
369
  return inputCodec === "aac";
315
370
  }
371
+ if (container === "wav") {
372
+ return false;
373
+ }
316
374
  throw new Error(`Unhandled codec: ${container}`);
317
375
  };
318
376
  // src/can-copy-video-track.ts
@@ -324,7 +382,10 @@ var canCopyVideoTrack = ({
324
382
  return inputCodec === "vp8" || inputCodec === "vp9";
325
383
  }
326
384
  if (container === "mp4") {
327
- return inputCodec === "h264" || inputCodec === "h265";
385
+ return inputCodec === "h264";
386
+ }
387
+ if (container === "wav") {
388
+ return false;
328
389
  }
329
390
  throw new Error(`Unhandled codec: ${container}`);
330
391
  };
@@ -350,6 +411,9 @@ var getCodecString = (audioCodec) => {
350
411
  if (audioCodec === "aac") {
351
412
  return "mp4a.40.02";
352
413
  }
414
+ if (audioCodec === "wav") {
415
+ return "wav-should-not-to-into-audio-encoder";
416
+ }
353
417
  throw new Error(`Unsupported audio codec: ${audioCodec}`);
354
418
  };
355
419
  var getAudioEncoderConfig = async (config) => {
@@ -357,6 +421,9 @@ var getAudioEncoderConfig = async (config) => {
357
421
  ...config,
358
422
  codec: getCodecString(config.codec)
359
423
  };
424
+ if (config.codec === "wav") {
425
+ return actualConfig;
426
+ }
360
427
  if (typeof AudioEncoder === "undefined") {
361
428
  return null;
362
429
  }
@@ -373,6 +440,9 @@ var canReencodeAudioTrack = async ({
373
440
  bitrate
374
441
  }) => {
375
442
  const audioDecoderConfig = await getAudioDecoderConfig(track);
443
+ if (audioCodec === "wav" && audioDecoderConfig) {
444
+ return true;
445
+ }
376
446
  const audioEncoderConfig = await getAudioEncoderConfig({
377
447
  codec: audioCodec,
378
448
  numberOfChannels: track.numberOfChannels,
@@ -403,6 +473,14 @@ var getVideoDecoderConfigWithHardwareAcceleration = async (config) => {
403
473
  return null;
404
474
  };
405
475
 
476
+ // src/browser-quirks.ts
477
+ var isFirefox = () => {
478
+ return navigator.userAgent.toLowerCase().indexOf("firefox") > -1;
479
+ };
480
+ var isSafari = () => {
481
+ return /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
482
+ };
483
+
406
484
  // src/choose-correct-avc1-profile.ts
407
485
  var chooseCorrectAvc1Profile = ({
408
486
  width,
@@ -451,7 +529,8 @@ var getVideoEncoderConfig = async ({
451
529
  const config = {
452
530
  codec: codec === "h264" ? chooseCorrectAvc1Profile({ fps, height, width }) : codec === "vp9" ? "vp09.00.10.08" : codec,
453
531
  height,
454
- width
532
+ width,
533
+ bitrate: isSafari() ? 3000000 : undefined
455
534
  };
456
535
  const hardware = {
457
536
  ...config,
@@ -484,34 +563,8 @@ var canReencodeVideoTrack = async ({
484
563
  const videoDecoderConfig = await getVideoDecoderConfigWithHardwareAcceleration(track);
485
564
  return Boolean(videoDecoderConfig && videoEncoderConfig);
486
565
  };
487
- // src/codec-id.ts
488
- var availableContainers = ["webm", "mp4"];
489
- var getAvailableContainers = () => {
490
- return availableContainers;
491
- };
492
- var getAvailableVideoCodecs = (container) => {
493
- if (container === "mp4") {
494
- return ["h264"];
495
- }
496
- if (container === "webm") {
497
- return ["vp8", "vp9"];
498
- }
499
- throw new Error(`Unsupported container: ${container}`);
500
- };
501
- var getAvailableAudioCodecs = (container) => {
502
- if (container === "mp4") {
503
- return ["aac"];
504
- }
505
- if (container === "webm") {
506
- return ["opus"];
507
- }
508
- throw new Error(`Unsupported container: ${container}`);
509
- };
510
566
  // src/convert-media.ts
511
- import {
512
- MediaParserInternals as MediaParserInternals4,
513
- parseMedia
514
- } from "@remotion/media-parser";
567
+ import { parseMedia } from "@remotion/media-parser";
515
568
 
516
569
  // src/auto-select-writer.ts
517
570
  import { bufferWriter } from "@remotion/media-parser/buffer";
@@ -544,6 +597,14 @@ var calculateProgress = ({
544
597
  // src/error-cause.ts
545
598
  var error_cause_default = Error;
546
599
 
600
+ // src/generate-output-filename.ts
601
+ var generateOutputFilename = (source, container) => {
602
+ const filename = typeof source === "string" ? source : source instanceof File ? source.name : "converted";
603
+ const behindSlash = filename.split("/").pop();
604
+ const withoutExtension = behindSlash.split(".").slice(0, -1).join(".");
605
+ return `${withoutExtension}.${container}`;
606
+ };
607
+
547
608
  // src/convert-encoded-chunk.ts
548
609
  var convertEncodedChunk = (chunk, trackId) => {
549
610
  const arr = new Uint8Array(chunk.byteLength);
@@ -572,6 +633,9 @@ var getDefaultAudioCodec = ({
572
633
  if (container === "mp4") {
573
634
  return "aac";
574
635
  }
636
+ if (container === "wav") {
637
+ return "wav";
638
+ }
575
639
  throw new Error(`Unhandled container: ${container}`);
576
640
  };
577
641
 
@@ -614,7 +678,6 @@ var defaultOnAudioTrackHandler = async ({
614
678
  var makeAudioTrackHandler = ({
615
679
  state,
616
680
  defaultAudioCodec: audioCodec,
617
- convertMediaState,
618
681
  controller,
619
682
  abortConversion,
620
683
  onMediaStateUpdate,
@@ -652,8 +715,12 @@ var makeAudioTrackHandler = ({
652
715
  timescale: track.timescale,
653
716
  codecPrivate: track.codecPrivate
654
717
  });
655
- convertMediaState.encodedAudioFrames++;
656
- onMediaStateUpdate?.({ ...convertMediaState });
718
+ onMediaStateUpdate?.((prevState) => {
719
+ return {
720
+ ...prevState,
721
+ encodedAudioFrames: prevState.encodedAudioFrames + 1
722
+ };
723
+ });
657
724
  };
658
725
  }
659
726
  const audioEncoderConfig = await getAudioEncoderConfig({
@@ -679,7 +746,7 @@ var makeAudioTrackHandler = ({
679
746
  const codecPrivate = audioOperation.audioCodec === "aac" ? new Uint8Array([17, 144]) : null;
680
747
  const { trackNumber } = await state.addTrack({
681
748
  type: "audio",
682
- codec: audioOperation.audioCodec,
749
+ codec: audioOperation.audioCodec === "wav" ? "pcm-s16" : audioOperation.audioCodec,
683
750
  numberOfChannels: track.numberOfChannels,
684
751
  sampleRate: track.sampleRate,
685
752
  codecPrivate,
@@ -697,8 +764,12 @@ var makeAudioTrackHandler = ({
697
764
  timescale: track.timescale,
698
765
  codecPrivate
699
766
  });
700
- convertMediaState.encodedAudioFrames++;
701
- onMediaStateUpdate?.({ ...convertMediaState });
767
+ onMediaStateUpdate?.((prevState) => {
768
+ return {
769
+ ...prevState,
770
+ encodedAudioFrames: prevState.encodedAudioFrames + 1
771
+ };
772
+ });
702
773
  },
703
774
  onError: (err) => {
704
775
  abortConversion(new error_cause_default(`Audio encoder of ${track.trackId} failed (see .cause of this error)`, {
@@ -713,8 +784,12 @@ var makeAudioTrackHandler = ({
713
784
  const audioDecoder = createAudioDecoder({
714
785
  onFrame: async (frame) => {
715
786
  await audioEncoder.encodeFrame(frame);
716
- convertMediaState.decodedAudioFrames++;
717
- onMediaStateUpdate?.(convertMediaState);
787
+ onMediaStateUpdate?.((prevState) => {
788
+ return {
789
+ ...prevState,
790
+ decodedAudioFrames: prevState.decodedAudioFrames + 1
791
+ };
792
+ });
718
793
  frame.close();
719
794
  },
720
795
  onError(error) {
@@ -752,7 +827,13 @@ var getDefaultVideoCodec = ({
752
827
  if (container === "webm") {
753
828
  return "vp8";
754
829
  }
755
- throw new Error(`Unhandled container: ${container} satisfies never`);
830
+ if (container === "mp4") {
831
+ return "h264";
832
+ }
833
+ if (container === "wav") {
834
+ return null;
835
+ }
836
+ throw new Error(`Unhandled container: ${container}`);
756
837
  };
757
838
 
758
839
  // src/default-on-video-track-handler.ts
@@ -771,6 +852,10 @@ var defaultOnVideoTrackHandler = async ({
771
852
  return Promise.resolve({ type: "copy" });
772
853
  }
773
854
  const videoCodec = defaultVideoCodec ?? getDefaultVideoCodec({ container });
855
+ if (videoCodec === null) {
856
+ MediaParserInternals3.Log.verbose(logLevel, `Track ${track.trackId} (video): No default video codec, therefore dropping`);
857
+ return Promise.resolve({ type: "drop" });
858
+ }
774
859
  const canReencode = await canReencodeVideoTrack({
775
860
  videoCodec,
776
861
  track
@@ -783,22 +868,48 @@ var defaultOnVideoTrackHandler = async ({
783
868
  return Promise.resolve({ type: "fail" });
784
869
  };
785
870
 
871
+ // src/convert-to-correct-videoframe.ts
872
+ var needsToCorrectVideoFrame = ({
873
+ videoFrame,
874
+ outputCodec
875
+ }) => {
876
+ if (videoFrame.format === null) {
877
+ return true;
878
+ }
879
+ return isFirefox() && videoFrame.format === "BGRX" && outputCodec === "h264";
880
+ };
881
+ var convertToCorrectVideoFrame = ({
882
+ videoFrame,
883
+ outputCodec
884
+ }) => {
885
+ if (!needsToCorrectVideoFrame({ videoFrame, outputCodec })) {
886
+ return videoFrame;
887
+ }
888
+ const canvas = new OffscreenCanvas(videoFrame.displayWidth, videoFrame.displayHeight);
889
+ canvas.width = videoFrame.displayWidth;
890
+ canvas.height = videoFrame.displayHeight;
891
+ const ctx = canvas.getContext("2d");
892
+ if (!ctx) {
893
+ throw new Error("Could not get 2d context");
894
+ }
895
+ ctx.drawImage(videoFrame, 0, 0);
896
+ return new VideoFrame(canvas, {
897
+ displayHeight: videoFrame.displayHeight,
898
+ displayWidth: videoFrame.displayWidth,
899
+ duration: videoFrame.duration,
900
+ timestamp: videoFrame.timestamp
901
+ });
902
+ };
903
+
786
904
  // src/on-frame.ts
787
905
  var onFrame = async ({
788
906
  frame,
789
907
  onVideoFrame,
790
908
  videoEncoder,
791
- onMediaStateUpdate,
792
909
  track,
793
- convertMediaState
910
+ outputCodec
794
911
  }) => {
795
912
  const newFrame = onVideoFrame ? await onVideoFrame({ frame, track }) : frame;
796
- if (newFrame.codedHeight !== frame.codedHeight) {
797
- throw new Error(`Returned VideoFrame of track ${track.trackId} has different codedHeight (${newFrame.codedHeight}) than the input frame (${frame.codedHeight})`);
798
- }
799
- if (newFrame.codedWidth !== frame.codedWidth) {
800
- throw new Error(`Returned VideoFrame of track ${track.trackId} has different codedWidth (${newFrame.codedWidth}) than the input frame (${frame.codedWidth})`);
801
- }
802
913
  if (newFrame.displayWidth !== frame.displayWidth) {
803
914
  throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayWidth (${newFrame.displayWidth}) than the input frame (${newFrame.displayHeight})`);
804
915
  }
@@ -811,13 +922,18 @@ var onFrame = async ({
811
922
  if (newFrame.duration !== frame.duration) {
812
923
  throw new Error(`Returned VideoFrame of track ${track.trackId} has different duration (${newFrame.duration}) than the input frame (${newFrame.duration}). When calling new VideoFrame(), pass {duration: frame.duration} as second argument`);
813
924
  }
814
- await videoEncoder.encodeFrame(newFrame, newFrame.timestamp);
815
- convertMediaState.decodedVideoFrames++;
816
- onMediaStateUpdate?.({ ...convertMediaState });
817
- newFrame.close();
925
+ const fixedFrame = convertToCorrectVideoFrame({
926
+ videoFrame: newFrame,
927
+ outputCodec
928
+ });
929
+ await videoEncoder.encodeFrame(fixedFrame, fixedFrame.timestamp);
930
+ fixedFrame.close();
818
931
  if (frame !== newFrame) {
819
932
  frame.close();
820
933
  }
934
+ if (fixedFrame !== newFrame) {
935
+ fixedFrame.close();
936
+ }
821
937
  };
822
938
 
823
939
  // src/video-decoder.ts
@@ -910,7 +1026,8 @@ var createVideoEncoder = ({
910
1026
  onError,
911
1027
  signal,
912
1028
  config,
913
- logLevel
1029
+ logLevel,
1030
+ outputCodec
914
1031
  }) => {
915
1032
  if (signal.aborted) {
916
1033
  throw new Error("Not creating video encoder, already aborted");
@@ -948,6 +1065,7 @@ var createVideoEncoder = ({
948
1065
  close();
949
1066
  };
950
1067
  signal.addEventListener("abort", onAbort);
1068
+ Log.verbose(logLevel, "Configuring video encoder", config);
951
1069
  encoder.configure(config);
952
1070
  let framesProcessed = 0;
953
1071
  const encodeFrame = async (frame) => {
@@ -955,14 +1073,14 @@ var createVideoEncoder = ({
955
1073
  return;
956
1074
  }
957
1075
  await ioSynchronizer.waitFor({
958
- unemitted: 2,
959
- _unprocessed: 2
1076
+ unemitted: 10,
1077
+ _unprocessed: 10
960
1078
  });
961
1079
  if (encoder.state === "closed") {
962
1080
  return;
963
1081
  }
964
1082
  const keyFrame = framesProcessed % 40 === 0;
965
- encoder.encode(frame, {
1083
+ encoder.encode(convertToCorrectVideoFrame({ videoFrame: frame, outputCodec }), {
966
1084
  keyFrame
967
1085
  });
968
1086
  ioSynchronizer.inputItem(frame.timestamp, keyFrame);
@@ -992,7 +1110,6 @@ var makeVideoTrackHandler = ({
992
1110
  onVideoFrame,
993
1111
  onMediaStateUpdate,
994
1112
  abortConversion,
995
- convertMediaState,
996
1113
  controller,
997
1114
  defaultVideoCodec,
998
1115
  onVideoTrack,
@@ -1033,8 +1150,12 @@ var makeVideoTrackHandler = ({
1033
1150
  timescale: track.timescale,
1034
1151
  codecPrivate: track.codecPrivate
1035
1152
  });
1036
- convertMediaState.decodedVideoFrames++;
1037
- onMediaStateUpdate?.({ ...convertMediaState });
1153
+ onMediaStateUpdate?.((prevState) => {
1154
+ return {
1155
+ ...prevState,
1156
+ decodedVideoFrames: prevState.decodedVideoFrames + 1
1157
+ };
1158
+ });
1038
1159
  };
1039
1160
  }
1040
1161
  const videoEncoderConfig = await getVideoEncoderConfig({
@@ -1071,8 +1192,12 @@ var makeVideoTrackHandler = ({
1071
1192
  timescale: track.timescale,
1072
1193
  codecPrivate: arrayBufferToUint8Array(metadata?.decoderConfig?.description ?? null)
1073
1194
  });
1074
- convertMediaState.encodedVideoFrames++;
1075
- onMediaStateUpdate?.({ ...convertMediaState });
1195
+ onMediaStateUpdate?.((prevState) => {
1196
+ return {
1197
+ ...prevState,
1198
+ encodedVideoFrames: prevState.encodedVideoFrames + 1
1199
+ };
1200
+ });
1076
1201
  },
1077
1202
  onError: (err) => {
1078
1203
  abortConversion(new error_cause_default(`Video encoder of track ${track.trackId} failed (see .cause of this error)`, {
@@ -1081,18 +1206,18 @@ var makeVideoTrackHandler = ({
1081
1206
  },
1082
1207
  signal: controller.signal,
1083
1208
  config: videoEncoderConfig,
1084
- logLevel
1209
+ logLevel,
1210
+ outputCodec: videoOperation.videoCodec
1085
1211
  });
1086
1212
  const videoDecoder = createVideoDecoder({
1087
1213
  config: videoDecoderConfig,
1088
1214
  onFrame: async (frame) => {
1089
1215
  await onFrame({
1090
- convertMediaState,
1091
1216
  frame,
1092
- onMediaStateUpdate,
1093
1217
  track,
1094
1218
  videoEncoder,
1095
- onVideoFrame
1219
+ onVideoFrame,
1220
+ outputCodec: videoOperation.videoCodec
1096
1221
  });
1097
1222
  },
1098
1223
  onError: (err) => {
@@ -1117,11 +1242,78 @@ var makeVideoTrackHandler = ({
1117
1242
  };
1118
1243
  };
1119
1244
 
1245
+ // src/select-container-creator.ts
1246
+ import { MediaParserInternals as MediaParserInternals4 } from "@remotion/media-parser";
1247
+ var selectContainerCreator = (container) => {
1248
+ if (container === "mp4") {
1249
+ return MediaParserInternals4.createIsoBaseMedia;
1250
+ }
1251
+ if (container === "wav") {
1252
+ return MediaParserInternals4.createWav;
1253
+ }
1254
+ if (container === "webm") {
1255
+ return MediaParserInternals4.createMatroskaMedia;
1256
+ }
1257
+ throw new Error(`Unsupported container: ${container}`);
1258
+ };
1259
+
1260
+ // src/throttled-state-update.ts
1261
+ var throttledStateUpdate = ({
1262
+ updateFn,
1263
+ everyMilliseconds,
1264
+ signal
1265
+ }) => {
1266
+ let currentState = {
1267
+ decodedAudioFrames: 0,
1268
+ decodedVideoFrames: 0,
1269
+ encodedVideoFrames: 0,
1270
+ encodedAudioFrames: 0,
1271
+ bytesWritten: 0,
1272
+ millisecondsWritten: 0,
1273
+ expectedOutputDurationInMs: null,
1274
+ overallProgress: 0
1275
+ };
1276
+ if (!updateFn) {
1277
+ return {
1278
+ get: () => currentState,
1279
+ update: null,
1280
+ stopAndGetLastProgress: () => {
1281
+ }
1282
+ };
1283
+ }
1284
+ let lastUpdated = null;
1285
+ const callUpdateIfChanged = () => {
1286
+ if (currentState === lastUpdated) {
1287
+ return;
1288
+ }
1289
+ updateFn(currentState);
1290
+ lastUpdated = currentState;
1291
+ };
1292
+ const interval = setInterval(() => {
1293
+ callUpdateIfChanged();
1294
+ }, everyMilliseconds);
1295
+ const onAbort = () => {
1296
+ clearInterval(interval);
1297
+ };
1298
+ signal.addEventListener("abort", onAbort, { once: true });
1299
+ return {
1300
+ get: () => currentState,
1301
+ update: (fn) => {
1302
+ currentState = fn(currentState);
1303
+ },
1304
+ stopAndGetLastProgress: () => {
1305
+ clearInterval(interval);
1306
+ signal.removeEventListener("abort", onAbort);
1307
+ return currentState;
1308
+ }
1309
+ };
1310
+ };
1311
+
1120
1312
  // src/convert-media.ts
1121
1313
  var convertMedia = async function({
1122
1314
  src,
1123
1315
  onVideoFrame,
1124
- onMediaStateUpdate: onMediaStateDoNoCallDirectly,
1316
+ onProgress: onProgressDoNotCallDirectly,
1125
1317
  audioCodec,
1126
1318
  container,
1127
1319
  videoCodec,
@@ -1132,13 +1324,14 @@ var convertMedia = async function({
1132
1324
  fields,
1133
1325
  logLevel = "info",
1134
1326
  writer,
1327
+ progressIntervalInMs,
1135
1328
  ...more
1136
1329
  }) {
1137
1330
  if (userPassedAbortSignal?.aborted) {
1138
1331
  return Promise.reject(new error_cause_default("Aborted"));
1139
1332
  }
1140
- if (container !== "webm" && container !== "mp4") {
1141
- return Promise.reject(new TypeError('Only `to: "webm"` and `to: "mp4"` is supported currently'));
1333
+ if (container !== "webm" && container !== "mp4" && container !== "wav") {
1334
+ return Promise.reject(new TypeError('Only `to: "webm"`, `to: "mp4"` and `to: "wav"` is supported currently'));
1142
1335
  }
1143
1336
  if (videoCodec && videoCodec !== "vp8" && videoCodec !== "vp9") {
1144
1337
  return Promise.reject(new TypeError('Only `videoCodec: "vp8"` and `videoCodec: "vp9"` are supported currently'));
@@ -1155,47 +1348,45 @@ var convertMedia = async function({
1155
1348
  abortConversion(new error_cause_default("Conversion aborted by user"));
1156
1349
  };
1157
1350
  userPassedAbortSignal?.addEventListener("abort", onUserAbort);
1158
- const convertMediaState = {
1159
- decodedAudioFrames: 0,
1160
- decodedVideoFrames: 0,
1161
- encodedVideoFrames: 0,
1162
- encodedAudioFrames: 0,
1163
- bytesWritten: 0,
1164
- millisecondsWritten: 0,
1165
- expectedOutputDurationInMs: null,
1166
- overallProgress: 0
1167
- };
1168
- const onMediaStateUpdate = (newState) => {
1169
- if (controller.signal.aborted) {
1170
- return;
1171
- }
1172
- onMediaStateDoNoCallDirectly?.(newState);
1173
- };
1174
- const creator = container === "webm" ? MediaParserInternals4.createMatroskaMedia : MediaParserInternals4.createIsoBaseMedia;
1351
+ const creator = selectContainerCreator(container);
1352
+ const throttledState = throttledStateUpdate({
1353
+ updateFn: onProgressDoNotCallDirectly ?? null,
1354
+ everyMilliseconds: progressIntervalInMs ?? 100,
1355
+ signal: controller.signal
1356
+ });
1175
1357
  const state = await creator({
1358
+ filename: generateOutputFilename(src, container),
1176
1359
  writer: await autoSelectWriter(writer, logLevel),
1177
1360
  onBytesProgress: (bytesWritten) => {
1178
- convertMediaState.bytesWritten = bytesWritten;
1179
- onMediaStateUpdate?.(convertMediaState);
1361
+ throttledState.update?.((prevState) => {
1362
+ return {
1363
+ ...prevState,
1364
+ bytesWritten
1365
+ };
1366
+ });
1180
1367
  },
1181
1368
  onMillisecondsProgress: (millisecondsWritten) => {
1182
- if (millisecondsWritten > convertMediaState.millisecondsWritten) {
1183
- convertMediaState.millisecondsWritten = millisecondsWritten;
1184
- convertMediaState.overallProgress = calculateProgress({
1185
- millisecondsWritten: convertMediaState.millisecondsWritten,
1186
- expectedOutputDurationInMs: convertMediaState.expectedOutputDurationInMs
1187
- });
1188
- onMediaStateUpdate?.(convertMediaState);
1189
- }
1369
+ throttledState.update?.((prevState) => {
1370
+ if (millisecondsWritten > prevState.millisecondsWritten) {
1371
+ return {
1372
+ ...prevState,
1373
+ millisecondsWritten,
1374
+ overallProgress: calculateProgress({
1375
+ millisecondsWritten: prevState.millisecondsWritten,
1376
+ expectedOutputDurationInMs: prevState.expectedOutputDurationInMs
1377
+ })
1378
+ };
1379
+ }
1380
+ return prevState;
1381
+ });
1190
1382
  },
1191
1383
  logLevel
1192
1384
  });
1193
1385
  const onVideoTrack = makeVideoTrackHandler({
1194
1386
  state,
1195
1387
  onVideoFrame: onVideoFrame ?? null,
1196
- onMediaStateUpdate: onMediaStateUpdate ?? null,
1388
+ onMediaStateUpdate: throttledState.update ?? null,
1197
1389
  abortConversion,
1198
- convertMediaState,
1199
1390
  controller,
1200
1391
  defaultVideoCodec: videoCodec ?? null,
1201
1392
  onVideoTrack: userVideoResolver ?? null,
@@ -1206,8 +1397,7 @@ var convertMedia = async function({
1206
1397
  abortConversion,
1207
1398
  defaultAudioCodec: audioCodec ?? null,
1208
1399
  controller,
1209
- convertMediaState,
1210
- onMediaStateUpdate: onMediaStateUpdate ?? null,
1400
+ onMediaStateUpdate: throttledState.update ?? null,
1211
1401
  state,
1212
1402
  onAudioTrack: userAudioResolver ?? null,
1213
1403
  logLevel,
@@ -1234,24 +1424,71 @@ var convertMedia = async function({
1234
1424
  casted.onDurationInSeconds(durationInSeconds);
1235
1425
  }
1236
1426
  const expectedOutputDurationInMs = durationInSeconds * 1000;
1237
- convertMediaState.expectedOutputDurationInMs = expectedOutputDurationInMs;
1238
- convertMediaState.overallProgress = calculateProgress({
1239
- millisecondsWritten: convertMediaState.millisecondsWritten,
1240
- expectedOutputDurationInMs
1427
+ throttledState.update?.((prevState) => {
1428
+ return {
1429
+ ...prevState,
1430
+ expectedOutputDurationInMs,
1431
+ overallProgress: calculateProgress({
1432
+ millisecondsWritten: prevState.millisecondsWritten,
1433
+ expectedOutputDurationInMs
1434
+ })
1435
+ };
1241
1436
  });
1242
- onMediaStateUpdate(convertMediaState);
1243
1437
  }
1244
1438
  }).then(() => {
1245
1439
  return state.waitForFinish();
1246
1440
  }).then(() => {
1247
- resolve({ save: state.save, remove: state.remove });
1441
+ resolve({
1442
+ save: state.save,
1443
+ remove: state.remove,
1444
+ finalState: throttledState.get()
1445
+ });
1248
1446
  }).catch((err) => {
1249
1447
  reject(err);
1448
+ }).finally(() => {
1449
+ throttledState.stopAndGetLastProgress();
1250
1450
  });
1251
1451
  return getPromiseToImmediatelyReturn().finally(() => {
1252
1452
  userPassedAbortSignal?.removeEventListener("abort", onUserAbort);
1253
1453
  });
1254
1454
  };
1455
+ // src/get-available-audio-codecs.ts
1456
+ var getAvailableAudioCodecs = ({
1457
+ container
1458
+ }) => {
1459
+ if (container === "mp4") {
1460
+ return ["aac"];
1461
+ }
1462
+ if (container === "webm") {
1463
+ return ["opus"];
1464
+ }
1465
+ if (container === "wav") {
1466
+ return ["wav"];
1467
+ }
1468
+ throw new Error(`Unsupported container: ${container}`);
1469
+ };
1470
+ // src/get-available-containers.ts
1471
+ var availableContainers = ["webm", "mp4", "wav"];
1472
+ var getAvailableContainers = () => {
1473
+ return availableContainers;
1474
+ };
1475
+ // src/get-available-video-codecs.ts
1476
+ var getAvailableVideoCodecs = ({
1477
+ container
1478
+ }) => {
1479
+ if (container === "mp4") {
1480
+ return ["h264"];
1481
+ }
1482
+ if (container === "webm") {
1483
+ return ["vp8", "vp9"];
1484
+ }
1485
+ if (container === "wav") {
1486
+ return [];
1487
+ }
1488
+ throw new Error(`Unsupported container: ${container}`);
1489
+ };
1490
+ // src/index.ts
1491
+ setRemotionImported();
1255
1492
  export {
1256
1493
  getDefaultVideoCodec,
1257
1494
  getDefaultAudioCodec,