@remotion/webcodecs 4.0.229 → 4.0.231

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/README.md +12 -1
  2. package/dist/arraybuffer-to-uint8-array.d.ts +1 -0
  3. package/dist/arraybuffer-to-uint8-array.js +7 -0
  4. package/dist/audio-decoder.d.ts +2 -2
  5. package/dist/audio-encoder-config.js +15 -2
  6. package/dist/audio-encoder.d.ts +2 -1
  7. package/dist/audio-encoder.js +17 -5
  8. package/dist/browser-quirks.d.ts +2 -0
  9. package/dist/browser-quirks.js +11 -0
  10. package/dist/can-copy-audio-track.d.ts +2 -4
  11. package/dist/can-copy-audio-track.js +7 -4
  12. package/dist/can-copy-video-track.d.ts +2 -4
  13. package/dist/can-copy-video-track.js +6 -6
  14. package/dist/can-reencode-audio-track.js +1 -6
  15. package/dist/can-reencode-video-track.js +1 -0
  16. package/dist/choose-correct-avc1-profile.d.ts +5 -0
  17. package/dist/choose-correct-avc1-profile.js +54 -0
  18. package/dist/codec-id.d.ts +7 -4
  19. package/dist/codec-id.js +28 -5
  20. package/dist/convert-encoded-chunk.d.ts +2 -1
  21. package/dist/convert-encoded-chunk.js +25 -2
  22. package/dist/convert-media.d.ts +13 -12
  23. package/dist/convert-media.js +62 -46
  24. package/dist/convert-to-correct-videoframe.d.ts +9 -0
  25. package/dist/convert-to-correct-videoframe.js +28 -0
  26. package/dist/default-on-audio-track-handler.d.ts +2 -0
  27. package/dist/default-on-audio-track-handler.js +36 -0
  28. package/dist/default-on-video-track-handler.d.ts +2 -0
  29. package/dist/default-on-video-track-handler.js +29 -0
  30. package/dist/esm/index.mjs +493 -174
  31. package/dist/generate-output-filename.d.ts +2 -0
  32. package/dist/generate-output-filename.js +14 -0
  33. package/dist/get-default-audio-codec.d.ts +4 -0
  34. package/dist/get-default-audio-codec.js +13 -0
  35. package/dist/get-default-video-codec.d.ts +4 -0
  36. package/dist/get-default-video-codec.js +10 -0
  37. package/dist/index.d.ts +12 -8
  38. package/dist/index.js +12 -1
  39. package/dist/io-manager/io-synchronizer.js +2 -2
  40. package/dist/{resolve-audio-action.d.ts → on-audio-track-handler.d.ts} +5 -5
  41. package/dist/on-audio-track-handler.js +2 -0
  42. package/dist/on-audio-track.d.ts +7 -8
  43. package/dist/on-audio-track.js +55 -16
  44. package/dist/on-frame.d.ts +4 -4
  45. package/dist/on-frame.js +15 -9
  46. package/dist/{resolve-video-action.d.ts → on-video-track-handler.d.ts} +5 -5
  47. package/dist/on-video-track-handler.js +2 -0
  48. package/dist/on-video-track.d.ts +8 -8
  49. package/dist/on-video-track.js +49 -15
  50. package/dist/set-remotion-imported.d.ts +6 -0
  51. package/dist/set-remotion-imported.js +25 -0
  52. package/dist/throttled-state-update.d.ts +13 -0
  53. package/dist/throttled-state-update.js +49 -0
  54. package/dist/video-decoder.d.ts +2 -2
  55. package/dist/video-decoder.js +5 -0
  56. package/dist/video-encoder-config.d.ts +2 -1
  57. package/dist/video-encoder-config.js +9 -2
  58. package/dist/video-encoder.d.ts +4 -2
  59. package/dist/video-encoder.js +12 -10
  60. package/package.json +4 -3
  61. package/dist/can-reencode-audio.d.ts +0 -7
  62. package/dist/can-reencode-audio.js +0 -21
  63. package/dist/can-reencode-video.d.ts +0 -6
  64. package/dist/can-reencode-video.js +0 -15
  65. package/dist/event-emitter.d.ts +0 -25
  66. package/dist/event-emitter.js +0 -23
  67. package/dist/polyfill-encoded-audio-chunk.d.ts +0 -3
  68. package/dist/polyfill-encoded-audio-chunk.js +0 -5
  69. package/dist/resolve-audio-action.js +0 -32
  70. package/dist/resolve-video-action.js +0 -26
  71. package/dist/wait-until-return.d.ts +0 -4
  72. package/dist/wait-until-return.js +0 -14
@@ -1,3 +1,18 @@
1
+ // src/set-remotion-imported.ts
2
+ import { VERSION } from "@remotion/media-parser";
3
+ var setRemotionImported = () => {
4
+ if (typeof globalThis === "undefined") {
5
+ return;
6
+ }
7
+ if (globalThis.remotion_imported) {
8
+ return;
9
+ }
10
+ globalThis.remotion_imported = VERSION;
11
+ if (typeof window !== "undefined") {
12
+ window.remotion_imported = `${VERSION}-webcodecs`;
13
+ }
14
+ };
15
+
1
16
  // src/log.ts
2
17
  import { MediaParserInternals } from "@remotion/media-parser";
3
18
  var { Log } = MediaParserInternals;
@@ -59,11 +74,11 @@ var makeIoSynchronizer = (logLevel, label) => {
59
74
  let unprocessed = 0;
60
75
  const getUnprocessed = () => unprocessed;
61
76
  const getUnemittedItems = () => {
62
- inputs = inputs.filter((input) => input > lastOutput);
77
+ inputs = inputs.filter((input) => Math.floor(input) > Math.floor(lastOutput));
63
78
  return inputs.length;
64
79
  };
65
80
  const getUnemittedKeyframes = () => {
66
- keyframes = keyframes.filter((keyframe) => keyframe > lastOutput);
81
+ keyframes = keyframes.filter((keyframe) => Math.floor(keyframe) > Math.floor(lastOutput));
67
82
  return keyframes.length;
68
83
  };
69
84
  const printState = (prefix) => {
@@ -221,7 +236,8 @@ var createAudioEncoder = ({
221
236
  codec,
222
237
  signal,
223
238
  config: audioEncoderConfig,
224
- logLevel
239
+ logLevel,
240
+ onNewAudioSampleRate
225
241
  }) => {
226
242
  if (signal.aborted) {
227
243
  throw new Error("Not creating audio encoder, already aborted");
@@ -258,18 +274,29 @@ var createAudioEncoder = ({
258
274
  close();
259
275
  };
260
276
  signal.addEventListener("abort", onAbort);
261
- if (codec !== "opus") {
262
- throw new Error('Only `codec: "opus"` is supported currently');
277
+ if (codec !== "opus" && codec !== "aac") {
278
+ throw new Error('Only `codec: "opus"` and `codec: "aac"` is supported currently');
263
279
  }
264
- encoder.configure(audioEncoderConfig);
280
+ const wantedSampleRate = audioEncoderConfig.sampleRate;
265
281
  const encodeFrame = async (audioData) => {
266
282
  if (encoder.state === "closed") {
267
283
  return;
268
284
  }
269
- await ioSynchronizer.waitFor({ unemitted: 2, _unprocessed: 2 });
285
+ await ioSynchronizer.waitFor({ unemitted: 20, _unprocessed: 20 });
270
286
  if (encoder.state === "closed") {
271
287
  return;
272
288
  }
289
+ if (encoder.state === "unconfigured") {
290
+ if (audioData.sampleRate === wantedSampleRate) {
291
+ encoder.configure(audioEncoderConfig);
292
+ } else {
293
+ encoder.configure({
294
+ ...audioEncoderConfig,
295
+ sampleRate: audioData.sampleRate
296
+ });
297
+ onNewAudioSampleRate(audioData.sampleRate);
298
+ }
299
+ }
273
300
  encoder.encode(audioData);
274
301
  ioSynchronizer.inputItem(audioData.timestamp, true);
275
302
  };
@@ -293,27 +320,28 @@ var createAudioEncoder = ({
293
320
  // src/can-copy-audio-track.ts
294
321
  var canCopyAudioTrack = ({
295
322
  inputCodec,
296
- outputCodec,
297
323
  container
298
324
  }) => {
299
- if (outputCodec === "opus") {
300
- return inputCodec === "opus" && container === "webm";
325
+ if (container === "webm") {
326
+ return inputCodec === "opus";
327
+ }
328
+ if (container === "mp4") {
329
+ return inputCodec === "aac";
301
330
  }
302
- throw new Error(`Unhandled codec: ${outputCodec}`);
331
+ throw new Error(`Unhandled codec: ${container}`);
303
332
  };
304
333
  // src/can-copy-video-track.ts
305
334
  var canCopyVideoTrack = ({
306
335
  inputCodec,
307
- outputCodec,
308
336
  container
309
337
  }) => {
310
- if (outputCodec === "vp8") {
311
- return inputCodec === "vp8" && container === "webm";
338
+ if (container === "webm") {
339
+ return inputCodec === "vp8" || inputCodec === "vp9";
312
340
  }
313
- if (outputCodec === "vp9") {
314
- return inputCodec === "vp9" && container === "webm";
341
+ if (container === "mp4") {
342
+ return inputCodec === "h264" || inputCodec === "h265";
315
343
  }
316
- throw new Error(`Unhandled codec: ${outputCodec}`);
344
+ throw new Error(`Unhandled codec: ${container}`);
317
345
  };
318
346
  // src/audio-decoder-config.ts
319
347
  var getAudioDecoderConfig = async (config) => {
@@ -330,12 +358,25 @@ var getAudioDecoderConfig = async (config) => {
330
358
  };
331
359
 
332
360
  // src/audio-encoder-config.ts
361
+ var getCodecString = (audioCodec) => {
362
+ if (audioCodec === "opus") {
363
+ return "opus";
364
+ }
365
+ if (audioCodec === "aac") {
366
+ return "mp4a.40.02";
367
+ }
368
+ throw new Error(`Unsupported audio codec: ${audioCodec}`);
369
+ };
333
370
  var getAudioEncoderConfig = async (config) => {
371
+ const actualConfig = {
372
+ ...config,
373
+ codec: getCodecString(config.codec)
374
+ };
334
375
  if (typeof AudioEncoder === "undefined") {
335
376
  return null;
336
377
  }
337
- if ((await AudioEncoder.isConfigSupported(config)).supported) {
338
- return config;
378
+ if ((await AudioEncoder.isConfigSupported(actualConfig)).supported) {
379
+ return actualConfig;
339
380
  }
340
381
  return null;
341
382
  };
@@ -346,12 +387,7 @@ var canReencodeAudioTrack = async ({
346
387
  audioCodec,
347
388
  bitrate
348
389
  }) => {
349
- const audioDecoderConfig = await getAudioDecoderConfig({
350
- codec: track.codec,
351
- numberOfChannels: track.numberOfChannels,
352
- sampleRate: track.sampleRate,
353
- description: track.description
354
- });
390
+ const audioDecoderConfig = await getAudioDecoderConfig(track);
355
391
  const audioEncoderConfig = await getAudioEncoderConfig({
356
392
  codec: audioCodec,
357
393
  numberOfChannels: track.numberOfChannels,
@@ -382,19 +418,64 @@ var getVideoDecoderConfigWithHardwareAcceleration = async (config) => {
382
418
  return null;
383
419
  };
384
420
 
421
+ // src/browser-quirks.ts
422
+ var isFirefox = () => {
423
+ return navigator.userAgent.toLowerCase().indexOf("firefox") > -1;
424
+ };
425
+ var isSafari = () => {
426
+ return /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
427
+ };
428
+
429
+ // src/choose-correct-avc1-profile.ts
430
+ var chooseCorrectAvc1Profile = ({
431
+ width,
432
+ height,
433
+ fps
434
+ }) => {
435
+ const profiles = [
436
+ { level: "3.1", hex: "1F", width: 1280, height: 720, fps: 30 },
437
+ { level: "3.2", hex: "20", width: 1280, height: 1024, fps: 42.2 },
438
+ { level: "4.0", hex: "28", width: 2048, height: 1024, fps: 30 },
439
+ { level: "4.1", hex: "29", width: 2048, height: 1024, fps: 30 },
440
+ { level: "4.2", hex: "2A", width: 2048, height: 1080, fps: 60 },
441
+ { level: "5.0", hex: "32", width: 3672, height: 1536, fps: 26.7 },
442
+ { level: "5.1", hex: "33", width: 4096, height: 2304, fps: 26.7 },
443
+ { level: "5.2", hex: "34", width: 4096, height: 2304, fps: 56.3 },
444
+ { level: "6.0", hex: "3C", width: 8192, height: 4320, fps: 30.2 },
445
+ { level: "6.1", hex: "3D", width: 8192, height: 4320, fps: 60.4 },
446
+ { level: "6.2", hex: "3E", width: 8192, height: 4320, fps: 120.8 }
447
+ ];
448
+ const profile = profiles.find((p) => {
449
+ if (width > p.width) {
450
+ return false;
451
+ }
452
+ if (height > p.height) {
453
+ return false;
454
+ }
455
+ const fallbackFps = fps ?? 60;
456
+ return fallbackFps <= p.fps;
457
+ });
458
+ if (!profile) {
459
+ throw new Error(`No suitable AVC1 profile found for ${width}x${height}@${fps}fps`);
460
+ }
461
+ return `avc1.6400${profile.hex}`;
462
+ };
463
+
385
464
  // src/video-encoder-config.ts
386
465
  var getVideoEncoderConfig = async ({
387
- width,
466
+ codec,
388
467
  height,
389
- codec
468
+ width,
469
+ fps
390
470
  }) => {
391
471
  if (typeof VideoEncoder === "undefined") {
392
472
  return null;
393
473
  }
394
474
  const config = {
395
- codec: codec === "vp9" ? "vp09.00.10.08" : codec,
475
+ codec: codec === "h264" ? chooseCorrectAvc1Profile({ fps, height, width }) : codec === "vp9" ? "vp09.00.10.08" : codec,
396
476
  height,
397
- width
477
+ width,
478
+ bitrate: isSafari() ? 3000000 : undefined
398
479
  };
399
480
  const hardware = {
400
481
  ...config,
@@ -421,19 +502,38 @@ var canReencodeVideoTrack = async ({
421
502
  const videoEncoderConfig = await getVideoEncoderConfig({
422
503
  codec: videoCodec,
423
504
  height: track.displayAspectHeight,
424
- width: track.displayAspectWidth
505
+ width: track.displayAspectWidth,
506
+ fps: track.fps
425
507
  });
426
508
  const videoDecoderConfig = await getVideoDecoderConfigWithHardwareAcceleration(track);
427
509
  return Boolean(videoDecoderConfig && videoEncoderConfig);
428
510
  };
429
511
  // src/codec-id.ts
430
- var availableVideoCodecs = ["vp8", "vp9"];
431
- var getAvailableVideoCodecs = () => availableVideoCodecs;
432
- var availableAudioCodecs = ["opus"];
433
- var getAvailableAudioCodecs = () => availableAudioCodecs;
512
+ var availableContainers = ["webm", "mp4"];
513
+ var getAvailableContainers = () => {
514
+ return availableContainers;
515
+ };
516
+ var getAvailableVideoCodecs = (container) => {
517
+ if (container === "mp4") {
518
+ return ["h264"];
519
+ }
520
+ if (container === "webm") {
521
+ return ["vp8", "vp9"];
522
+ }
523
+ throw new Error(`Unsupported container: ${container}`);
524
+ };
525
+ var getAvailableAudioCodecs = (container) => {
526
+ if (container === "mp4") {
527
+ return ["aac"];
528
+ }
529
+ if (container === "webm") {
530
+ return ["opus"];
531
+ }
532
+ throw new Error(`Unsupported container: ${container}`);
533
+ };
434
534
  // src/convert-media.ts
435
535
  import {
436
- MediaParserInternals as MediaParserInternals2,
536
+ MediaParserInternals as MediaParserInternals4,
437
537
  parseMedia
438
538
  } from "@remotion/media-parser";
439
539
 
@@ -468,54 +568,84 @@ var calculateProgress = ({
468
568
  // src/error-cause.ts
469
569
  var error_cause_default = Error;
470
570
 
571
+ // src/generate-output-filename.ts
572
+ var generateOutputFilename = (source, container) => {
573
+ const filename = typeof source === "string" ? source : source instanceof File ? source.name : "converted";
574
+ const behindSlash = filename.split("/").pop();
575
+ const withoutExtension = behindSlash.split(".").slice(0, -1).join(".");
576
+ return `${withoutExtension}.${container}`;
577
+ };
578
+
471
579
  // src/convert-encoded-chunk.ts
472
- var convertEncodedChunk = (chunk) => {
580
+ var convertEncodedChunk = (chunk, trackId) => {
473
581
  const arr = new Uint8Array(chunk.byteLength);
474
582
  chunk.copyTo(arr);
475
583
  return {
476
584
  data: arr,
477
585
  duration: chunk.duration ?? undefined,
478
586
  timestamp: chunk.timestamp,
479
- type: chunk.type
587
+ type: chunk.type,
588
+ cts: chunk.timestamp,
589
+ dts: chunk.timestamp,
590
+ trackId
480
591
  };
481
592
  };
482
593
 
483
- // src/resolve-audio-action.ts
594
+ // src/default-on-audio-track-handler.ts
595
+ import { MediaParserInternals as MediaParserInternals2 } from "@remotion/media-parser";
596
+
597
+ // src/get-default-audio-codec.ts
598
+ var getDefaultAudioCodec = ({
599
+ container
600
+ }) => {
601
+ if (container === "webm") {
602
+ return "opus";
603
+ }
604
+ if (container === "mp4") {
605
+ return "aac";
606
+ }
607
+ throw new Error(`Unhandled container: ${container}`);
608
+ };
609
+
610
+ // src/default-on-audio-track-handler.ts
484
611
  var DEFAULT_BITRATE = 128000;
485
- var defaultResolveAudioAction = async ({
612
+ var defaultOnAudioTrackHandler = async ({
486
613
  track,
487
- audioCodec,
614
+ defaultAudioCodec,
488
615
  logLevel,
489
616
  container
490
617
  }) => {
491
618
  const bitrate = DEFAULT_BITRATE;
492
619
  const canCopy = canCopyAudioTrack({
493
620
  inputCodec: track.codecWithoutConfig,
494
- outputCodec: audioCodec,
495
621
  container
496
622
  });
497
623
  if (canCopy) {
498
- Log.verbose(logLevel, `Track ${track.trackId} (audio): Can copy = ${canCopy}, action = copy`);
624
+ MediaParserInternals2.Log.verbose(logLevel, `Track ${track.trackId} (audio): Can copy track, therefore copying`);
499
625
  return Promise.resolve({ type: "copy" });
500
626
  }
627
+ const audioCodec = defaultAudioCodec ?? getDefaultAudioCodec({ container });
501
628
  const canReencode = await canReencodeAudioTrack({
502
629
  audioCodec,
503
630
  track,
504
631
  bitrate
505
632
  });
506
633
  if (canReencode) {
507
- Log.verbose(logLevel, `Track ${track.trackId} (audio): Can re-encode = ${canReencode}, can copy = ${canCopy}, action = reencode`);
508
- return Promise.resolve({ type: "reencode", bitrate, audioCodec });
634
+ MediaParserInternals2.Log.verbose(logLevel, `Track ${track.trackId} (audio): Cannot copy, but re-encode, therefore re-encoding`);
635
+ return Promise.resolve({
636
+ type: "reencode",
637
+ bitrate,
638
+ audioCodec
639
+ });
509
640
  }
510
- Log.verbose(logLevel, `Track ${track.trackId} (audio): Can re-encode = ${canReencode}, can copy = ${canCopy}, action = drop`);
511
- return Promise.resolve({ type: "drop" });
641
+ MediaParserInternals2.Log.verbose(logLevel, `Track ${track.trackId} (audio): Can neither re-encode nor copy, failing render`);
642
+ return Promise.resolve({ type: "fail" });
512
643
  };
513
644
 
514
645
  // src/on-audio-track.ts
515
646
  var makeAudioTrackHandler = ({
516
647
  state,
517
- audioCodec,
518
- convertMediaState,
648
+ defaultAudioCodec: audioCodec,
519
649
  controller,
520
650
  abortConversion,
521
651
  onMediaStateUpdate,
@@ -523,8 +653,8 @@ var makeAudioTrackHandler = ({
523
653
  logLevel,
524
654
  container
525
655
  }) => async (track) => {
526
- const audioOperation = await (onAudioTrack ?? defaultResolveAudioAction)({
527
- audioCodec,
656
+ const audioOperation = await (onAudioTrack ?? defaultOnAudioTrackHandler)({
657
+ defaultAudioCodec: audioCodec,
528
658
  track,
529
659
  logLevel,
530
660
  container
@@ -532,18 +662,33 @@ var makeAudioTrackHandler = ({
532
662
  if (audioOperation.type === "drop") {
533
663
  return null;
534
664
  }
665
+ if (audioOperation.type === "fail") {
666
+ throw new error_cause_default(`Audio track with ID ${track.trackId} could resolved with {"type": "fail"}. This could mean that this audio track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
667
+ }
535
668
  if (audioOperation.type === "copy") {
536
669
  const addedTrack = await state.addTrack({
537
670
  type: "audio",
538
- codec: audioCodec,
671
+ codec: track.codecWithoutConfig,
539
672
  numberOfChannels: track.numberOfChannels,
540
673
  sampleRate: track.sampleRate,
541
- codecPrivate: track.codecPrivate
674
+ codecPrivate: track.codecPrivate,
675
+ timescale: track.timescale
542
676
  });
677
+ Log.verbose(logLevel, `Copying audio track ${track.trackId} as track ${addedTrack.trackNumber}. Timescale = ${track.timescale}, codec = ${track.codecWithoutConfig} (${track.codec}) `);
543
678
  return async (audioSample) => {
544
- await state.addSample(audioSample, addedTrack.trackNumber, false);
545
- convertMediaState.encodedAudioFrames++;
546
- onMediaStateUpdate?.({ ...convertMediaState });
679
+ await state.addSample({
680
+ chunk: audioSample,
681
+ trackNumber: addedTrack.trackNumber,
682
+ isVideo: false,
683
+ timescale: track.timescale,
684
+ codecPrivate: track.codecPrivate
685
+ });
686
+ onMediaStateUpdate?.((prevState) => {
687
+ return {
688
+ ...prevState,
689
+ encodedAudioFrames: prevState.encodedAudioFrames + 1
690
+ };
691
+ });
547
692
  };
548
693
  }
549
694
  const audioEncoderConfig = await getAudioEncoderConfig({
@@ -566,25 +711,40 @@ var makeAudioTrackHandler = ({
566
711
  abortConversion(new error_cause_default(`Could not configure audio decoder of track ${track.trackId}`));
567
712
  return null;
568
713
  }
714
+ const codecPrivate = audioOperation.audioCodec === "aac" ? new Uint8Array([17, 144]) : null;
569
715
  const { trackNumber } = await state.addTrack({
570
716
  type: "audio",
571
- codec: audioCodec,
717
+ codec: audioOperation.audioCodec,
572
718
  numberOfChannels: track.numberOfChannels,
573
719
  sampleRate: track.sampleRate,
574
- codecPrivate: null
720
+ codecPrivate,
721
+ timescale: track.timescale
575
722
  });
576
723
  const audioEncoder = createAudioEncoder({
724
+ onNewAudioSampleRate: (sampleRate) => {
725
+ state.updateTrackSampleRate({ sampleRate, trackNumber });
726
+ },
577
727
  onChunk: async (chunk) => {
578
- await state.addSample(convertEncodedChunk(chunk), trackNumber, false);
579
- convertMediaState.encodedAudioFrames++;
580
- onMediaStateUpdate?.({ ...convertMediaState });
728
+ await state.addSample({
729
+ chunk: convertEncodedChunk(chunk, trackNumber),
730
+ trackNumber,
731
+ isVideo: false,
732
+ timescale: track.timescale,
733
+ codecPrivate
734
+ });
735
+ onMediaStateUpdate?.((prevState) => {
736
+ return {
737
+ ...prevState,
738
+ encodedAudioFrames: prevState.encodedAudioFrames + 1
739
+ };
740
+ });
581
741
  },
582
742
  onError: (err) => {
583
743
  abortConversion(new error_cause_default(`Audio encoder of ${track.trackId} failed (see .cause of this error)`, {
584
744
  cause: err
585
745
  }));
586
746
  },
587
- codec: audioCodec,
747
+ codec: audioOperation.audioCodec,
588
748
  signal: controller.signal,
589
749
  config: audioEncoderConfig,
590
750
  logLevel
@@ -592,8 +752,12 @@ var makeAudioTrackHandler = ({
592
752
  const audioDecoder = createAudioDecoder({
593
753
  onFrame: async (frame) => {
594
754
  await audioEncoder.encodeFrame(frame);
595
- convertMediaState.decodedAudioFrames++;
596
- onMediaStateUpdate?.(convertMediaState);
755
+ onMediaStateUpdate?.((prevState) => {
756
+ return {
757
+ ...prevState,
758
+ decodedAudioFrames: prevState.decodedAudioFrames + 1
759
+ };
760
+ });
597
761
  frame.close();
598
762
  },
599
763
  onError(error) {
@@ -616,21 +780,96 @@ var makeAudioTrackHandler = ({
616
780
  };
617
781
  };
618
782
 
783
+ // src/arraybuffer-to-uint8-array.ts
784
+ var arrayBufferToUint8Array = (buffer) => {
785
+ return buffer ? new Uint8Array(buffer) : null;
786
+ };
787
+
788
+ // src/default-on-video-track-handler.ts
789
+ import { MediaParserInternals as MediaParserInternals3 } from "@remotion/media-parser";
790
+
791
+ // src/get-default-video-codec.ts
792
+ var getDefaultVideoCodec = ({
793
+ container
794
+ }) => {
795
+ if (container === "webm") {
796
+ return "vp8";
797
+ }
798
+ throw new Error(`Unhandled container: ${container} satisfies never`);
799
+ };
800
+
801
+ // src/default-on-video-track-handler.ts
802
+ var defaultOnVideoTrackHandler = async ({
803
+ track,
804
+ defaultVideoCodec,
805
+ logLevel,
806
+ container
807
+ }) => {
808
+ const canCopy = canCopyVideoTrack({
809
+ inputCodec: track.codecWithoutConfig,
810
+ container
811
+ });
812
+ if (canCopy) {
813
+ MediaParserInternals3.Log.verbose(logLevel, `Track ${track.trackId} (video): Can copy, therefore copying`);
814
+ return Promise.resolve({ type: "copy" });
815
+ }
816
+ const videoCodec = defaultVideoCodec ?? getDefaultVideoCodec({ container });
817
+ const canReencode = await canReencodeVideoTrack({
818
+ videoCodec,
819
+ track
820
+ });
821
+ if (canReencode) {
822
+ MediaParserInternals3.Log.verbose(logLevel, `Track ${track.trackId} (video): Cannot copy, but re-enconde, therefore re-encoding`);
823
+ return Promise.resolve({ type: "reencode", videoCodec });
824
+ }
825
+ MediaParserInternals3.Log.verbose(logLevel, `Track ${track.trackId} (video): Can neither copy nor re-encode, therefore failing`);
826
+ return Promise.resolve({ type: "fail" });
827
+ };
828
+
829
+ // src/convert-to-correct-videoframe.ts
830
+ var needsToCorrectVideoFrame = ({
831
+ videoFrame,
832
+ outputCodec
833
+ }) => {
834
+ return isFirefox() && videoFrame.format === "BGRX" && outputCodec === "h264";
835
+ };
836
+ var convertToCorrectVideoFrame = ({
837
+ videoFrame,
838
+ outputCodec
839
+ }) => {
840
+ if (!needsToCorrectVideoFrame({ videoFrame, outputCodec })) {
841
+ return videoFrame;
842
+ }
843
+ const canvas = new OffscreenCanvas(videoFrame.displayWidth, videoFrame.displayHeight);
844
+ canvas.width = videoFrame.displayWidth;
845
+ canvas.height = videoFrame.displayHeight;
846
+ const ctx = canvas.getContext("2d");
847
+ if (!ctx) {
848
+ throw new Error("Could not get 2d context");
849
+ }
850
+ ctx.drawImage(videoFrame, 0, 0);
851
+ return new VideoFrame(canvas, {
852
+ displayHeight: videoFrame.displayHeight,
853
+ displayWidth: videoFrame.displayWidth,
854
+ duration: videoFrame.duration,
855
+ timestamp: videoFrame.timestamp
856
+ });
857
+ };
858
+
619
859
  // src/on-frame.ts
620
860
  var onFrame = async ({
621
861
  frame,
622
862
  onVideoFrame,
623
863
  videoEncoder,
624
- onMediaStateUpdate,
625
864
  track,
626
- convertMediaState
865
+ outputCodec
627
866
  }) => {
628
867
  const newFrame = onVideoFrame ? await onVideoFrame({ frame, track }) : frame;
629
- if (newFrame.codedHeight !== frame.codedHeight) {
630
- throw new Error(`Returned VideoFrame of track ${track.trackId} has different codedHeight (${newFrame.codedHeight}) than the input frame (${frame.codedHeight})`);
868
+ if (newFrame.codedHeight !== frame.displayHeight) {
869
+ throw new Error(`Returned VideoFrame of track ${track.trackId} has different codedHeight (${newFrame.codedHeight}) than the input frame displayHeight (${frame.displayHeight})`);
631
870
  }
632
- if (newFrame.codedWidth !== frame.codedWidth) {
633
- throw new Error(`Returned VideoFrame of track ${track.trackId} has different codedWidth (${newFrame.codedWidth}) than the input frame (${frame.codedWidth})`);
871
+ if (newFrame.codedWidth !== frame.displayWidth) {
872
+ throw new Error(`Returned VideoFrame of track ${track.trackId} has different codedWidth (${newFrame.codedWidth}) than the input frame displayWidth (${frame.displayWidth})`);
634
873
  }
635
874
  if (newFrame.displayWidth !== frame.displayWidth) {
636
875
  throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayWidth (${newFrame.displayWidth}) than the input frame (${newFrame.displayHeight})`);
@@ -644,38 +883,18 @@ var onFrame = async ({
644
883
  if (newFrame.duration !== frame.duration) {
645
884
  throw new Error(`Returned VideoFrame of track ${track.trackId} has different duration (${newFrame.duration}) than the input frame (${newFrame.duration}). When calling new VideoFrame(), pass {duration: frame.duration} as second argument`);
646
885
  }
647
- await videoEncoder.encodeFrame(newFrame, newFrame.timestamp);
648
- convertMediaState.decodedVideoFrames++;
649
- onMediaStateUpdate?.({ ...convertMediaState });
650
- newFrame.close();
886
+ const fixedFrame = convertToCorrectVideoFrame({
887
+ videoFrame: newFrame,
888
+ outputCodec
889
+ });
890
+ await videoEncoder.encodeFrame(fixedFrame, fixedFrame.timestamp);
891
+ fixedFrame.close();
651
892
  if (frame !== newFrame) {
652
893
  frame.close();
653
894
  }
654
- };
655
-
656
- // src/resolve-video-action.ts
657
- var defaultResolveVideoAction = async ({
658
- track,
659
- videoCodec,
660
- logLevel,
661
- container
662
- }) => {
663
- const canCopy = canCopyVideoTrack({
664
- inputCodec: track.codecWithoutConfig,
665
- outputCodec: videoCodec,
666
- container
667
- });
668
- if (canCopy) {
669
- Log.verbose(logLevel, `Track ${track.trackId} (video): Can copy, therefore copying`);
670
- return Promise.resolve({ type: "copy" });
671
- }
672
- const canReencode = await canReencodeVideoTrack({ videoCodec, track });
673
- if (canReencode) {
674
- Log.verbose(logLevel, `Track ${track.trackId} (video): Cannot copy, but re-enconde, therefore re-encoding`);
675
- return Promise.resolve({ type: "reencode", videoCodec });
895
+ if (fixedFrame !== newFrame) {
896
+ fixedFrame.close();
676
897
  }
677
- Log.verbose(logLevel, `Track ${track.trackId} (video): Can neither copy nor re-encode, therefore dropping`);
678
- return Promise.resolve({ type: "drop" });
679
898
  };
680
899
 
681
900
  // src/video-decoder.ts
@@ -747,9 +966,13 @@ var createVideoDecoder = ({
747
966
  },
748
967
  waitForFinish: async () => {
749
968
  await videoDecoder.flush();
969
+ Log.verbose(logLevel, "Flushed video decoder");
750
970
  await ioSynchronizer.waitForFinish();
971
+ Log.verbose(logLevel, "IO synchro finished");
751
972
  await outputQueue;
973
+ Log.verbose(logLevel, "Output queue finished");
752
974
  await inputQueue;
975
+ Log.verbose(logLevel, "Input queue finished");
753
976
  },
754
977
  close,
755
978
  flush: async () => {
@@ -764,7 +987,8 @@ var createVideoEncoder = ({
764
987
  onError,
765
988
  signal,
766
989
  config,
767
- logLevel
990
+ logLevel,
991
+ outputCodec
768
992
  }) => {
769
993
  if (signal.aborted) {
770
994
  throw new Error("Not creating video encoder, already aborted");
@@ -775,17 +999,14 @@ var createVideoEncoder = ({
775
999
  error(error) {
776
1000
  onError(error);
777
1001
  },
778
- output(chunk) {
779
- if (chunk.duration === null) {
780
- throw new Error("Duration is null");
781
- }
782
- const timestamp = chunk.timestamp + chunk.duration;
1002
+ output(chunk, metadata) {
1003
+ const timestamp = chunk.timestamp + (chunk.duration ?? 0);
783
1004
  ioSynchronizer.onOutput(timestamp);
784
1005
  outputQueue = outputQueue.then(() => {
785
1006
  if (signal.aborted) {
786
1007
  return;
787
1008
  }
788
- return onChunk(chunk);
1009
+ return onChunk(chunk, metadata ?? null);
789
1010
  }).then(() => {
790
1011
  ioSynchronizer.onProcessed();
791
1012
  return Promise.resolve();
@@ -805,6 +1026,7 @@ var createVideoEncoder = ({
805
1026
  close();
806
1027
  };
807
1028
  signal.addEventListener("abort", onAbort);
1029
+ Log.verbose(logLevel, "Configuring video encoder", config);
808
1030
  encoder.configure(config);
809
1031
  let framesProcessed = 0;
810
1032
  const encodeFrame = async (frame) => {
@@ -812,14 +1034,14 @@ var createVideoEncoder = ({
812
1034
  return;
813
1035
  }
814
1036
  await ioSynchronizer.waitFor({
815
- unemitted: 2,
816
- _unprocessed: 2
1037
+ unemitted: 10,
1038
+ _unprocessed: 10
817
1039
  });
818
1040
  if (encoder.state === "closed") {
819
1041
  return;
820
1042
  }
821
1043
  const keyFrame = framesProcessed % 40 === 0;
822
- encoder.encode(frame, {
1044
+ encoder.encode(convertToCorrectVideoFrame({ videoFrame: frame, outputCodec }), {
823
1045
  keyFrame
824
1046
  });
825
1047
  ioSynchronizer.inputItem(frame.timestamp, keyFrame);
@@ -849,9 +1071,8 @@ var makeVideoTrackHandler = ({
849
1071
  onVideoFrame,
850
1072
  onMediaStateUpdate,
851
1073
  abortConversion,
852
- convertMediaState,
853
1074
  controller,
854
- videoCodec,
1075
+ defaultVideoCodec,
855
1076
  onVideoTrack,
856
1077
  logLevel,
857
1078
  container
@@ -859,34 +1080,50 @@ var makeVideoTrackHandler = ({
859
1080
  if (controller.signal.aborted) {
860
1081
  throw new error_cause_default("Aborted");
861
1082
  }
862
- const videoOperation = await (onVideoTrack ?? defaultResolveVideoAction)({
1083
+ const videoOperation = await (onVideoTrack ?? defaultOnVideoTrackHandler)({
863
1084
  track,
864
- videoCodec,
1085
+ defaultVideoCodec,
865
1086
  logLevel,
866
1087
  container
867
1088
  });
868
1089
  if (videoOperation.type === "drop") {
869
1090
  return null;
870
1091
  }
1092
+ if (videoOperation.type === "fail") {
1093
+ throw new error_cause_default(`Video track with ID ${track.trackId} could resolved with {"type": "fail"}. This could mean that this video track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
1094
+ }
871
1095
  if (videoOperation.type === "copy") {
1096
+ Log.verbose(logLevel, `Copying video track with codec ${track.codec} and timescale ${track.timescale}`);
872
1097
  const videoTrack = await state.addTrack({
873
1098
  type: "video",
874
1099
  color: track.color,
875
1100
  width: track.codedWidth,
876
1101
  height: track.codedHeight,
877
1102
  codec: track.codecWithoutConfig,
878
- codecPrivate: track.codecPrivate
1103
+ codecPrivate: track.codecPrivate,
1104
+ timescale: track.timescale
879
1105
  });
880
1106
  return async (sample) => {
881
- await state.addSample(sample, videoTrack.trackNumber, true);
882
- convertMediaState.decodedVideoFrames++;
883
- onMediaStateUpdate?.({ ...convertMediaState });
1107
+ await state.addSample({
1108
+ chunk: sample,
1109
+ trackNumber: videoTrack.trackNumber,
1110
+ isVideo: true,
1111
+ timescale: track.timescale,
1112
+ codecPrivate: track.codecPrivate
1113
+ });
1114
+ onMediaStateUpdate?.((prevState) => {
1115
+ return {
1116
+ ...prevState,
1117
+ decodedVideoFrames: prevState.decodedVideoFrames + 1
1118
+ };
1119
+ });
884
1120
  };
885
1121
  }
886
1122
  const videoEncoderConfig = await getVideoEncoderConfig({
887
1123
  codec: videoOperation.videoCodec,
888
1124
  height: track.displayAspectHeight,
889
- width: track.displayAspectWidth
1125
+ width: track.displayAspectWidth,
1126
+ fps: track.fps
890
1127
  });
891
1128
  const videoDecoderConfig = await getVideoDecoderConfigWithHardwareAcceleration(track);
892
1129
  if (videoEncoderConfig === null) {
@@ -902,14 +1139,26 @@ var makeVideoTrackHandler = ({
902
1139
  color: track.color,
903
1140
  width: track.codedWidth,
904
1141
  height: track.codedHeight,
905
- codec: videoCodec,
906
- codecPrivate: null
1142
+ codec: videoOperation.videoCodec,
1143
+ codecPrivate: null,
1144
+ timescale: track.timescale
907
1145
  });
1146
+ Log.verbose(logLevel, `Created new video track with ID ${trackNumber}, codec ${videoOperation.videoCodec} and timescale ${track.timescale}`);
908
1147
  const videoEncoder = createVideoEncoder({
909
- onChunk: async (chunk) => {
910
- await state.addSample(convertEncodedChunk(chunk), trackNumber, true);
911
- convertMediaState.encodedVideoFrames++;
912
- onMediaStateUpdate?.({ ...convertMediaState });
1148
+ onChunk: async (chunk, metadata) => {
1149
+ await state.addSample({
1150
+ chunk: convertEncodedChunk(chunk, trackNumber),
1151
+ trackNumber,
1152
+ isVideo: true,
1153
+ timescale: track.timescale,
1154
+ codecPrivate: arrayBufferToUint8Array(metadata?.decoderConfig?.description ?? null)
1155
+ });
1156
+ onMediaStateUpdate?.((prevState) => {
1157
+ return {
1158
+ ...prevState,
1159
+ encodedVideoFrames: prevState.encodedVideoFrames + 1
1160
+ };
1161
+ });
913
1162
  },
914
1163
  onError: (err) => {
915
1164
  abortConversion(new error_cause_default(`Video encoder of track ${track.trackId} failed (see .cause of this error)`, {
@@ -918,18 +1167,18 @@ var makeVideoTrackHandler = ({
918
1167
  },
919
1168
  signal: controller.signal,
920
1169
  config: videoEncoderConfig,
921
- logLevel
1170
+ logLevel,
1171
+ outputCodec: videoOperation.videoCodec
922
1172
  });
923
1173
  const videoDecoder = createVideoDecoder({
924
1174
  config: videoDecoderConfig,
925
1175
  onFrame: async (frame) => {
926
1176
  await onFrame({
927
- convertMediaState,
928
1177
  frame,
929
- onMediaStateUpdate,
930
1178
  track,
931
1179
  videoEncoder,
932
- onVideoFrame
1180
+ onVideoFrame,
1181
+ outputCodec: videoOperation.videoCodec
933
1182
  });
934
1183
  },
935
1184
  onError: (err) => {
@@ -941,21 +1190,76 @@ var makeVideoTrackHandler = ({
941
1190
  logLevel
942
1191
  });
943
1192
  state.addWaitForFinishPromise(async () => {
1193
+ Log.verbose(logLevel, "Waiting for video decoder to finish");
944
1194
  await videoDecoder.waitForFinish();
945
- await videoEncoder.waitForFinish();
946
1195
  videoDecoder.close();
1196
+ Log.verbose(logLevel, "Video decoder finished. Waiting for encoder to finish");
1197
+ await videoEncoder.waitForFinish();
947
1198
  videoEncoder.close();
1199
+ Log.verbose(logLevel, "Encoder finished");
948
1200
  });
949
1201
  return async (chunk) => {
950
1202
  await videoDecoder.processSample(chunk);
951
1203
  };
952
1204
  };
953
1205
 
1206
+ // src/throttled-state-update.ts
1207
+ var throttledStateUpdate = ({
1208
+ updateFn,
1209
+ everyMilliseconds,
1210
+ signal
1211
+ }) => {
1212
+ let currentState = {
1213
+ decodedAudioFrames: 0,
1214
+ decodedVideoFrames: 0,
1215
+ encodedVideoFrames: 0,
1216
+ encodedAudioFrames: 0,
1217
+ bytesWritten: 0,
1218
+ millisecondsWritten: 0,
1219
+ expectedOutputDurationInMs: null,
1220
+ overallProgress: 0
1221
+ };
1222
+ if (!updateFn) {
1223
+ return {
1224
+ get: () => currentState,
1225
+ update: null,
1226
+ stopAndGetLastProgress: () => {
1227
+ }
1228
+ };
1229
+ }
1230
+ let lastUpdated = null;
1231
+ const callUpdateIfChanged = () => {
1232
+ if (currentState === lastUpdated) {
1233
+ return;
1234
+ }
1235
+ updateFn(currentState);
1236
+ lastUpdated = currentState;
1237
+ };
1238
+ const interval = setInterval(() => {
1239
+ callUpdateIfChanged();
1240
+ }, everyMilliseconds);
1241
+ const onAbort = () => {
1242
+ clearInterval(interval);
1243
+ };
1244
+ signal.addEventListener("abort", onAbort, { once: true });
1245
+ return {
1246
+ get: () => currentState,
1247
+ update: (fn) => {
1248
+ currentState = fn(currentState);
1249
+ },
1250
+ stopAndGetLastProgress: () => {
1251
+ clearInterval(interval);
1252
+ signal.removeEventListener("abort", onAbort);
1253
+ return currentState;
1254
+ }
1255
+ };
1256
+ };
1257
+
954
1258
  // src/convert-media.ts
955
1259
  var convertMedia = async function({
956
1260
  src,
957
1261
  onVideoFrame,
958
- onMediaStateUpdate: onMediaStateDoNoCallDirectly,
1262
+ onProgress: onProgressDoNotCallDirectly,
959
1263
  audioCodec,
960
1264
  container,
961
1265
  videoCodec,
@@ -966,18 +1270,16 @@ var convertMedia = async function({
966
1270
  fields,
967
1271
  logLevel = "info",
968
1272
  writer,
1273
+ progressIntervalInMs,
969
1274
  ...more
970
1275
  }) {
971
1276
  if (userPassedAbortSignal?.aborted) {
972
1277
  return Promise.reject(new error_cause_default("Aborted"));
973
1278
  }
974
- if (container !== "webm") {
975
- return Promise.reject(new TypeError('Only `to: "webm"` is supported currently'));
1279
+ if (container !== "webm" && container !== "mp4") {
1280
+ return Promise.reject(new TypeError('Only `to: "webm"` and `to: "mp4"` is supported currently'));
976
1281
  }
977
- if (audioCodec !== "opus") {
978
- return Promise.reject(new TypeError('Only `audioCodec: "opus"` is supported currently'));
979
- }
980
- if (videoCodec !== "vp8" && videoCodec !== "vp9") {
1282
+ if (videoCodec && videoCodec !== "vp8" && videoCodec !== "vp9") {
981
1283
  return Promise.reject(new TypeError('Only `videoCodec: "vp8"` and `videoCodec: "vp9"` are supported currently'));
982
1284
  }
983
1285
  const { resolve, reject, getPromiseToImmediatelyReturn } = withResolversAndWaitForReturn();
@@ -992,63 +1294,63 @@ var convertMedia = async function({
992
1294
  abortConversion(new error_cause_default("Conversion aborted by user"));
993
1295
  };
994
1296
  userPassedAbortSignal?.addEventListener("abort", onUserAbort);
995
- const convertMediaState = {
996
- decodedAudioFrames: 0,
997
- decodedVideoFrames: 0,
998
- encodedVideoFrames: 0,
999
- encodedAudioFrames: 0,
1000
- bytesWritten: 0,
1001
- millisecondsWritten: 0,
1002
- expectedOutputDurationInMs: null,
1003
- overallProgress: 0
1004
- };
1005
- const onMediaStateUpdate = (newState) => {
1006
- if (controller.signal.aborted) {
1007
- return;
1008
- }
1009
- onMediaStateDoNoCallDirectly?.(newState);
1010
- };
1011
- const state = await MediaParserInternals2.createMedia({
1297
+ const creator = container === "webm" ? MediaParserInternals4.createMatroskaMedia : MediaParserInternals4.createIsoBaseMedia;
1298
+ const throttledState = throttledStateUpdate({
1299
+ updateFn: onProgressDoNotCallDirectly ?? null,
1300
+ everyMilliseconds: progressIntervalInMs ?? 100,
1301
+ signal: controller.signal
1302
+ });
1303
+ const state = await creator({
1304
+ filename: generateOutputFilename(src, container),
1012
1305
  writer: await autoSelectWriter(writer, logLevel),
1013
1306
  onBytesProgress: (bytesWritten) => {
1014
- convertMediaState.bytesWritten = bytesWritten;
1015
- onMediaStateUpdate?.(convertMediaState);
1307
+ throttledState.update?.((prevState) => {
1308
+ return {
1309
+ ...prevState,
1310
+ bytesWritten
1311
+ };
1312
+ });
1016
1313
  },
1017
1314
  onMillisecondsProgress: (millisecondsWritten) => {
1018
- if (millisecondsWritten > convertMediaState.millisecondsWritten) {
1019
- convertMediaState.millisecondsWritten = millisecondsWritten;
1020
- convertMediaState.overallProgress = calculateProgress({
1021
- millisecondsWritten: convertMediaState.millisecondsWritten,
1022
- expectedOutputDurationInMs: convertMediaState.expectedOutputDurationInMs
1023
- });
1024
- onMediaStateUpdate?.(convertMediaState);
1025
- }
1026
- }
1315
+ throttledState.update?.((prevState) => {
1316
+ if (millisecondsWritten > prevState.millisecondsWritten) {
1317
+ return {
1318
+ ...prevState,
1319
+ millisecondsWritten,
1320
+ overallProgress: calculateProgress({
1321
+ millisecondsWritten: prevState.millisecondsWritten,
1322
+ expectedOutputDurationInMs: prevState.expectedOutputDurationInMs
1323
+ })
1324
+ };
1325
+ }
1326
+ return prevState;
1327
+ });
1328
+ },
1329
+ logLevel
1027
1330
  });
1028
1331
  const onVideoTrack = makeVideoTrackHandler({
1029
1332
  state,
1030
1333
  onVideoFrame: onVideoFrame ?? null,
1031
- onMediaStateUpdate: onMediaStateUpdate ?? null,
1334
+ onMediaStateUpdate: throttledState.update ?? null,
1032
1335
  abortConversion,
1033
- convertMediaState,
1034
1336
  controller,
1035
- videoCodec,
1337
+ defaultVideoCodec: videoCodec ?? null,
1036
1338
  onVideoTrack: userVideoResolver ?? null,
1037
1339
  logLevel,
1038
1340
  container
1039
1341
  });
1040
1342
  const onAudioTrack = makeAudioTrackHandler({
1041
1343
  abortConversion,
1042
- audioCodec,
1344
+ defaultAudioCodec: audioCodec ?? null,
1043
1345
  controller,
1044
- convertMediaState,
1045
- onMediaStateUpdate: onMediaStateUpdate ?? null,
1346
+ onMediaStateUpdate: throttledState.update ?? null,
1046
1347
  state,
1047
1348
  onAudioTrack: userAudioResolver ?? null,
1048
1349
  logLevel,
1049
1350
  container
1050
1351
  });
1051
1352
  parseMedia({
1353
+ logLevel,
1052
1354
  src,
1053
1355
  onVideoTrack,
1054
1356
  onAudioTrack,
@@ -1068,27 +1370,44 @@ var convertMedia = async function({
1068
1370
  casted.onDurationInSeconds(durationInSeconds);
1069
1371
  }
1070
1372
  const expectedOutputDurationInMs = durationInSeconds * 1000;
1071
- convertMediaState.expectedOutputDurationInMs = expectedOutputDurationInMs;
1072
- convertMediaState.overallProgress = calculateProgress({
1073
- millisecondsWritten: convertMediaState.millisecondsWritten,
1074
- expectedOutputDurationInMs
1373
+ throttledState.update?.((prevState) => {
1374
+ return {
1375
+ ...prevState,
1376
+ expectedOutputDurationInMs,
1377
+ overallProgress: calculateProgress({
1378
+ millisecondsWritten: prevState.millisecondsWritten,
1379
+ expectedOutputDurationInMs
1380
+ })
1381
+ };
1075
1382
  });
1076
- onMediaStateUpdate(convertMediaState);
1077
1383
  }
1078
1384
  }).then(() => {
1079
1385
  return state.waitForFinish();
1080
1386
  }).then(() => {
1081
- resolve({ save: state.save, remove: state.remove });
1387
+ resolve({
1388
+ save: state.save,
1389
+ remove: state.remove,
1390
+ finalState: throttledState.get()
1391
+ });
1082
1392
  }).catch((err) => {
1083
1393
  reject(err);
1394
+ }).finally(() => {
1395
+ throttledState.stopAndGetLastProgress();
1084
1396
  });
1085
1397
  return getPromiseToImmediatelyReturn().finally(() => {
1086
1398
  userPassedAbortSignal?.removeEventListener("abort", onUserAbort);
1087
1399
  });
1088
1400
  };
1401
+ // src/index.ts
1402
+ setRemotionImported();
1089
1403
  export {
1404
+ getDefaultVideoCodec,
1405
+ getDefaultAudioCodec,
1090
1406
  getAvailableVideoCodecs,
1407
+ getAvailableContainers,
1091
1408
  getAvailableAudioCodecs,
1409
+ defaultOnVideoTrackHandler,
1410
+ defaultOnAudioTrackHandler,
1092
1411
  createVideoEncoder,
1093
1412
  createVideoDecoder,
1094
1413
  createAudioEncoder,