@remotion/webcodecs 4.0.305 → 4.0.306
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio-decoder.d.ts +15 -10
- package/dist/audio-decoder.js +49 -52
- package/dist/audio-encoder.d.ts +5 -5
- package/dist/audio-encoder.js +20 -42
- package/dist/convert-media.js +2 -2
- package/dist/copy-audio-track.d.ts +11 -0
- package/dist/copy-audio-track.js +31 -0
- package/dist/copy-video-track.d.ts +11 -0
- package/dist/copy-video-track.js +32 -0
- package/dist/create/event-emitter.d.ts +0 -1
- package/dist/create/progress-tracker.d.ts +0 -2
- package/dist/create/progress-tracker.js +3 -20
- package/dist/esm/index.mjs +560 -470
- package/dist/get-wave-audio-decoder.d.ts +6 -1
- package/dist/get-wave-audio-decoder.js +16 -11
- package/dist/io-manager/io-synchronizer.d.ts +6 -13
- package/dist/io-manager/io-synchronizer.js +31 -72
- package/dist/io-manager/make-timeout-promise.d.ts +1 -1
- package/dist/io-manager/make-timeout-promise.js +8 -4
- package/dist/on-audio-track.d.ts +2 -2
- package/dist/on-audio-track.js +15 -150
- package/dist/on-frame.d.ts +2 -4
- package/dist/on-frame.js +8 -9
- package/dist/on-video-track.d.ts +2 -2
- package/dist/on-video-track.js +18 -129
- package/dist/processing-queue.d.ts +19 -0
- package/dist/processing-queue.js +47 -0
- package/dist/reencode-audio-track.d.ts +18 -0
- package/dist/reencode-audio-track.js +164 -0
- package/dist/reencode-video-track.d.ts +19 -0
- package/dist/reencode-video-track.js +151 -0
- package/dist/sort-video-frames.d.ts +4 -3
- package/dist/sort-video-frames.js +7 -3
- package/dist/video-decoder.d.ts +14 -8
- package/dist/video-decoder.js +37 -72
- package/dist/video-encoder.d.ts +6 -5
- package/dist/video-encoder.js +16 -40
- package/dist/wav-audio-encoder.d.ts +4 -1
- package/dist/wav-audio-encoder.js +3 -2
- package/package.json +5 -5
package/dist/esm/index.mjs
CHANGED
|
@@ -462,31 +462,38 @@ var getBytesPerSample = (sampleFormat) => {
|
|
|
462
462
|
};
|
|
463
463
|
var getWaveAudioDecoder = ({
|
|
464
464
|
onFrame,
|
|
465
|
-
|
|
466
|
-
sampleFormat
|
|
465
|
+
config,
|
|
466
|
+
sampleFormat,
|
|
467
|
+
ioSynchronizer,
|
|
468
|
+
onError
|
|
467
469
|
}) => {
|
|
468
|
-
let queue = Promise.resolve();
|
|
469
470
|
const processSample = async (audioSample) => {
|
|
470
471
|
const bytesPerSample = getBytesPerSample(sampleFormat);
|
|
471
|
-
|
|
472
|
+
const audioData = new AudioData({
|
|
472
473
|
data: audioSample.data,
|
|
473
474
|
format: sampleFormat,
|
|
474
|
-
numberOfChannels:
|
|
475
|
-
numberOfFrames: audioSample.data.byteLength / bytesPerSample /
|
|
476
|
-
sampleRate:
|
|
475
|
+
numberOfChannels: config.numberOfChannels,
|
|
476
|
+
numberOfFrames: audioSample.data.byteLength / bytesPerSample / config.numberOfChannels,
|
|
477
|
+
sampleRate: config.sampleRate,
|
|
477
478
|
timestamp: audioSample.timestamp
|
|
478
|
-
})
|
|
479
|
+
});
|
|
480
|
+
try {
|
|
481
|
+
await onFrame(audioData);
|
|
482
|
+
} catch (err) {
|
|
483
|
+
audioData.close();
|
|
484
|
+
onError(err);
|
|
485
|
+
}
|
|
479
486
|
};
|
|
480
487
|
return {
|
|
481
488
|
close() {
|
|
482
489
|
return Promise.resolve();
|
|
483
490
|
},
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
return queue;
|
|
491
|
+
decode(audioSample) {
|
|
492
|
+
processSample(audioSample);
|
|
487
493
|
},
|
|
488
494
|
flush: () => Promise.resolve(),
|
|
489
|
-
waitForFinish: () => Promise.resolve()
|
|
495
|
+
waitForFinish: () => Promise.resolve(),
|
|
496
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize
|
|
490
497
|
};
|
|
491
498
|
};
|
|
492
499
|
|
|
@@ -562,8 +569,10 @@ var makeTimeoutPromise = ({
|
|
|
562
569
|
const onResume = () => {
|
|
563
570
|
set();
|
|
564
571
|
};
|
|
565
|
-
controller
|
|
566
|
-
|
|
572
|
+
if (controller) {
|
|
573
|
+
controller.addEventListener("pause", onPause);
|
|
574
|
+
controller.addEventListener("resume", onResume);
|
|
575
|
+
}
|
|
567
576
|
return {
|
|
568
577
|
timeoutPromise: promise,
|
|
569
578
|
clear: () => {
|
|
@@ -571,8 +580,10 @@ var makeTimeoutPromise = ({
|
|
|
571
580
|
clearTimeout(timeout);
|
|
572
581
|
}
|
|
573
582
|
resolve();
|
|
574
|
-
controller
|
|
575
|
-
|
|
583
|
+
if (controller) {
|
|
584
|
+
controller.removeEventListener("pause", onPause);
|
|
585
|
+
controller.removeEventListener("resume", onResume);
|
|
586
|
+
}
|
|
576
587
|
}
|
|
577
588
|
};
|
|
578
589
|
};
|
|
@@ -581,39 +592,26 @@ var makeTimeoutPromise = ({
|
|
|
581
592
|
var makeIoSynchronizer = ({
|
|
582
593
|
logLevel,
|
|
583
594
|
label,
|
|
584
|
-
|
|
595
|
+
controller
|
|
585
596
|
}) => {
|
|
586
597
|
const eventEmitter = new IoEventEmitter;
|
|
587
598
|
let lastInput = 0;
|
|
588
|
-
let lastInputKeyframe = 0;
|
|
589
599
|
let lastOutput = 0;
|
|
590
600
|
let inputsSinceLastOutput = 0;
|
|
591
601
|
let inputs = [];
|
|
592
|
-
|
|
593
|
-
let _unprocessed = 0;
|
|
594
|
-
const getUnprocessed = () => _unprocessed;
|
|
595
|
-
const getUnemittedItems = () => {
|
|
602
|
+
const getQueuedItems = () => {
|
|
596
603
|
inputs = inputs.filter((input) => Math.floor(input) > Math.floor(lastOutput));
|
|
597
604
|
return inputs.length;
|
|
598
605
|
};
|
|
599
|
-
const getUnemittedKeyframes = () => {
|
|
600
|
-
keyframes = keyframes.filter((keyframe) => Math.floor(keyframe) > Math.floor(lastOutput));
|
|
601
|
-
return keyframes.length;
|
|
602
|
-
};
|
|
603
606
|
const printState = (prefix) => {
|
|
604
|
-
Log.trace(logLevel, `[${label}] ${prefix}, state: Last input = ${lastInput} Last
|
|
607
|
+
Log.trace(logLevel, `[${label}] ${prefix}, state: Last input = ${lastInput} Last output = ${lastOutput} Inputs since last output = ${inputsSinceLastOutput}, Queue = ${getQueuedItems()}`);
|
|
605
608
|
};
|
|
606
|
-
const inputItem = (timestamp
|
|
609
|
+
const inputItem = (timestamp) => {
|
|
607
610
|
lastInput = timestamp;
|
|
608
|
-
if (keyFrame) {
|
|
609
|
-
lastInputKeyframe = timestamp;
|
|
610
|
-
keyframes.push(timestamp);
|
|
611
|
-
}
|
|
612
611
|
inputsSinceLastOutput++;
|
|
613
612
|
inputs.push(timestamp);
|
|
614
613
|
eventEmitter.dispatchEvent("input", {
|
|
615
|
-
timestamp
|
|
616
|
-
keyFrame
|
|
614
|
+
timestamp
|
|
617
615
|
});
|
|
618
616
|
printState("Input item");
|
|
619
617
|
};
|
|
@@ -623,7 +621,6 @@ var makeIoSynchronizer = ({
|
|
|
623
621
|
eventEmitter.dispatchEvent("output", {
|
|
624
622
|
timestamp
|
|
625
623
|
});
|
|
626
|
-
_unprocessed++;
|
|
627
624
|
printState("Got output");
|
|
628
625
|
};
|
|
629
626
|
const waitForOutput = () => {
|
|
@@ -635,133 +632,98 @@ var makeIoSynchronizer = ({
|
|
|
635
632
|
eventEmitter.addEventListener("output", on);
|
|
636
633
|
return promise;
|
|
637
634
|
};
|
|
638
|
-
const
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
return promise;
|
|
635
|
+
const makeErrorBanner = () => {
|
|
636
|
+
return [
|
|
637
|
+
`Waited too long for ${label} to finish:`,
|
|
638
|
+
`${getQueuedItems()} queued items`,
|
|
639
|
+
`inputs: ${JSON.stringify(inputs)}`,
|
|
640
|
+
`last output: ${lastOutput}`
|
|
641
|
+
];
|
|
646
642
|
};
|
|
647
|
-
const
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
controller
|
|
652
|
-
}) => {
|
|
653
|
-
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
643
|
+
const waitForQueueSize = async (queueSize) => {
|
|
644
|
+
if (getQueuedItems() <= queueSize) {
|
|
645
|
+
return Promise.resolve();
|
|
646
|
+
}
|
|
654
647
|
const { timeoutPromise, clear } = makeTimeoutPromise({
|
|
655
648
|
label: () => [
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
`${getUnprocessed()} unprocessed items: ${JSON.stringify(_unprocessed)}`,
|
|
659
|
-
`smallest progress: ${progress.getSmallestProgress()}`,
|
|
660
|
-
`inputs: ${JSON.stringify(inputs)}`,
|
|
661
|
-
`last output: ${lastOutput}`,
|
|
662
|
-
`wanted: ${unemitted} unemitted items, ${unprocessed} unprocessed items, minimum progress ${minimumProgress}`,
|
|
649
|
+
...makeErrorBanner(),
|
|
650
|
+
`wanted: <${queueSize} queued items`,
|
|
663
651
|
`Report this at https://remotion.dev/report`
|
|
664
652
|
].join(`
|
|
665
653
|
`),
|
|
666
654
|
ms: 1e4,
|
|
667
655
|
controller
|
|
668
656
|
});
|
|
669
|
-
controller
|
|
657
|
+
if (controller) {
|
|
658
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", clear);
|
|
659
|
+
}
|
|
670
660
|
await Promise.race([
|
|
671
661
|
timeoutPromise,
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
})(),
|
|
678
|
-
(async () => {
|
|
679
|
-
while (getUnprocessed() > unprocessed) {
|
|
680
|
-
await waitForProcessed();
|
|
681
|
-
}
|
|
682
|
-
})(),
|
|
683
|
-
minimumProgress === null || progress.getSmallestProgress() === null ? Promise.resolve() : (async () => {
|
|
684
|
-
while (progress.getSmallestProgress() < minimumProgress) {
|
|
685
|
-
await progress.waitForProgress();
|
|
686
|
-
}
|
|
687
|
-
})()
|
|
688
|
-
])
|
|
662
|
+
(async () => {
|
|
663
|
+
while (getQueuedItems() > queueSize) {
|
|
664
|
+
await waitForOutput();
|
|
665
|
+
}
|
|
666
|
+
})()
|
|
689
667
|
]).finally(() => clear());
|
|
690
|
-
controller
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
await waitFor({
|
|
694
|
-
unprocessed: 0,
|
|
695
|
-
unemitted: 0,
|
|
696
|
-
minimumProgress: null,
|
|
697
|
-
controller
|
|
698
|
-
});
|
|
668
|
+
if (controller) {
|
|
669
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", clear);
|
|
670
|
+
}
|
|
699
671
|
};
|
|
700
|
-
const
|
|
701
|
-
|
|
702
|
-
_unprocessed--;
|
|
672
|
+
const waitForFinish = async () => {
|
|
673
|
+
await waitForQueueSize(0);
|
|
703
674
|
};
|
|
704
675
|
return {
|
|
705
676
|
inputItem,
|
|
706
677
|
onOutput,
|
|
707
|
-
waitFor,
|
|
708
678
|
waitForFinish,
|
|
709
|
-
|
|
710
|
-
getUnprocessed
|
|
679
|
+
waitForQueueSize
|
|
711
680
|
};
|
|
712
681
|
};
|
|
713
682
|
|
|
714
683
|
// src/audio-decoder.ts
|
|
715
|
-
var
|
|
684
|
+
var internalCreateAudioDecoder = ({
|
|
716
685
|
onFrame,
|
|
717
686
|
onError,
|
|
718
687
|
controller,
|
|
719
688
|
config,
|
|
720
|
-
logLevel
|
|
721
|
-
track,
|
|
722
|
-
progressTracker
|
|
689
|
+
logLevel
|
|
723
690
|
}) => {
|
|
724
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
691
|
+
if (controller && controller._internals._mediaParserController._internals.signal.aborted) {
|
|
725
692
|
throw new Error("Not creating audio decoder, already aborted");
|
|
726
693
|
}
|
|
727
|
-
if (config.codec === "pcm-s16") {
|
|
728
|
-
return getWaveAudioDecoder({ onFrame, track, sampleFormat: "s16" });
|
|
729
|
-
}
|
|
730
694
|
const ioSynchronizer = makeIoSynchronizer({
|
|
731
695
|
logLevel,
|
|
732
696
|
label: "Audio decoder",
|
|
733
|
-
|
|
697
|
+
controller
|
|
734
698
|
});
|
|
735
|
-
|
|
699
|
+
if (config.codec === "pcm-s16") {
|
|
700
|
+
return getWaveAudioDecoder({
|
|
701
|
+
onFrame,
|
|
702
|
+
config,
|
|
703
|
+
sampleFormat: "s16",
|
|
704
|
+
logLevel,
|
|
705
|
+
ioSynchronizer,
|
|
706
|
+
onError
|
|
707
|
+
});
|
|
708
|
+
}
|
|
736
709
|
const audioDecoder = new AudioDecoder({
|
|
737
|
-
output(frame) {
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
};
|
|
742
|
-
controller._internals._mediaParserController._internals.signal.addEventListener("abort", abortHandler, {
|
|
743
|
-
once: true
|
|
744
|
-
});
|
|
745
|
-
outputQueue = outputQueue.then(() => {
|
|
746
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
747
|
-
return;
|
|
748
|
-
}
|
|
749
|
-
return onFrame(frame);
|
|
750
|
-
}).then(() => {
|
|
751
|
-
ioSynchronizer.onProcessed();
|
|
752
|
-
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", abortHandler);
|
|
753
|
-
return Promise.resolve();
|
|
754
|
-
}).catch((err) => {
|
|
710
|
+
async output(frame) {
|
|
711
|
+
try {
|
|
712
|
+
await onFrame(frame);
|
|
713
|
+
} catch (err) {
|
|
755
714
|
frame.close();
|
|
756
715
|
onError(err);
|
|
757
|
-
}
|
|
716
|
+
}
|
|
717
|
+
ioSynchronizer.onOutput(frame.timestamp + (frame.duration ?? 0));
|
|
758
718
|
},
|
|
759
719
|
error(error) {
|
|
760
720
|
onError(error);
|
|
761
721
|
}
|
|
762
722
|
});
|
|
763
723
|
const close = () => {
|
|
764
|
-
controller
|
|
724
|
+
if (controller) {
|
|
725
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
726
|
+
}
|
|
765
727
|
if (audioDecoder.state === "closed") {
|
|
766
728
|
return;
|
|
767
729
|
}
|
|
@@ -770,45 +732,52 @@ var createAudioDecoder = ({
|
|
|
770
732
|
const onAbort = () => {
|
|
771
733
|
close();
|
|
772
734
|
};
|
|
773
|
-
controller
|
|
735
|
+
if (controller) {
|
|
736
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
737
|
+
}
|
|
774
738
|
audioDecoder.configure(config);
|
|
775
|
-
const processSample =
|
|
739
|
+
const processSample = (audioSample) => {
|
|
776
740
|
if (audioDecoder.state === "closed") {
|
|
777
741
|
return;
|
|
778
742
|
}
|
|
779
|
-
|
|
780
|
-
await ioSynchronizer.waitFor({
|
|
781
|
-
unemitted: 20,
|
|
782
|
-
unprocessed: 20,
|
|
783
|
-
minimumProgress: audioSample.timestamp - 1e7,
|
|
784
|
-
controller
|
|
785
|
-
});
|
|
786
|
-
const chunk = new EncodedAudioChunk(audioSample);
|
|
743
|
+
const chunk = audioSample instanceof EncodedAudioChunk ? audioSample : new EncodedAudioChunk(audioSample);
|
|
787
744
|
audioDecoder.decode(chunk);
|
|
788
745
|
if (chunk.byteLength > 16) {
|
|
789
|
-
ioSynchronizer.inputItem(chunk.timestamp
|
|
746
|
+
ioSynchronizer.inputItem(chunk.timestamp);
|
|
790
747
|
}
|
|
791
748
|
};
|
|
792
|
-
let queue = Promise.resolve();
|
|
793
749
|
return {
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
return queue;
|
|
750
|
+
decode: (sample) => {
|
|
751
|
+
processSample(sample);
|
|
797
752
|
},
|
|
798
753
|
waitForFinish: async () => {
|
|
799
754
|
try {
|
|
800
755
|
await audioDecoder.flush();
|
|
801
756
|
} catch {}
|
|
802
|
-
await
|
|
803
|
-
await ioSynchronizer.waitForFinish(controller);
|
|
804
|
-
await outputQueue;
|
|
757
|
+
await ioSynchronizer.waitForFinish();
|
|
805
758
|
},
|
|
806
759
|
close,
|
|
807
760
|
flush: async () => {
|
|
808
761
|
await audioDecoder.flush();
|
|
809
|
-
}
|
|
762
|
+
},
|
|
763
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize
|
|
810
764
|
};
|
|
811
765
|
};
|
|
766
|
+
var createAudioDecoder = ({
|
|
767
|
+
onFrame,
|
|
768
|
+
onError,
|
|
769
|
+
controller,
|
|
770
|
+
track,
|
|
771
|
+
logLevel
|
|
772
|
+
}) => {
|
|
773
|
+
return internalCreateAudioDecoder({
|
|
774
|
+
onFrame,
|
|
775
|
+
onError,
|
|
776
|
+
controller: controller ?? null,
|
|
777
|
+
config: track,
|
|
778
|
+
logLevel: logLevel ?? "error"
|
|
779
|
+
});
|
|
780
|
+
};
|
|
812
781
|
// src/audio-encoder.ts
|
|
813
782
|
import {
|
|
814
783
|
MediaParserAbortError
|
|
@@ -928,13 +897,14 @@ var convertAudioData = ({
|
|
|
928
897
|
var getWaveAudioEncoder = ({
|
|
929
898
|
onChunk,
|
|
930
899
|
controller,
|
|
931
|
-
config
|
|
900
|
+
config,
|
|
901
|
+
ioSynchronizer
|
|
932
902
|
}) => {
|
|
933
903
|
return {
|
|
934
904
|
close: () => {
|
|
935
905
|
return Promise.resolve();
|
|
936
906
|
},
|
|
937
|
-
|
|
907
|
+
encode: (unconvertedAudioData) => {
|
|
938
908
|
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
939
909
|
return Promise.resolve();
|
|
940
910
|
}
|
|
@@ -954,7 +924,8 @@ var getWaveAudioEncoder = ({
|
|
|
954
924
|
return onChunk(chunk);
|
|
955
925
|
},
|
|
956
926
|
flush: () => Promise.resolve(),
|
|
957
|
-
waitForFinish: () => Promise.resolve()
|
|
927
|
+
waitForFinish: () => Promise.resolve(),
|
|
928
|
+
ioSynchronizer
|
|
958
929
|
};
|
|
959
930
|
};
|
|
960
931
|
|
|
@@ -966,39 +937,32 @@ var createAudioEncoder = ({
|
|
|
966
937
|
controller,
|
|
967
938
|
config: audioEncoderConfig,
|
|
968
939
|
logLevel,
|
|
969
|
-
onNewAudioSampleRate
|
|
970
|
-
progressTracker
|
|
940
|
+
onNewAudioSampleRate
|
|
971
941
|
}) => {
|
|
972
942
|
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
973
943
|
throw new MediaParserAbortError("Not creating audio encoder, already aborted");
|
|
974
944
|
}
|
|
945
|
+
const ioSynchronizer = makeIoSynchronizer({
|
|
946
|
+
logLevel,
|
|
947
|
+
label: "Audio encoder",
|
|
948
|
+
controller
|
|
949
|
+
});
|
|
975
950
|
if (codec === "wav") {
|
|
976
951
|
return getWaveAudioEncoder({
|
|
977
952
|
onChunk,
|
|
978
953
|
controller,
|
|
979
|
-
config: audioEncoderConfig
|
|
954
|
+
config: audioEncoderConfig,
|
|
955
|
+
ioSynchronizer
|
|
980
956
|
});
|
|
981
957
|
}
|
|
982
|
-
const ioSynchronizer = makeIoSynchronizer({
|
|
983
|
-
logLevel,
|
|
984
|
-
label: "Audio encoder",
|
|
985
|
-
progress: progressTracker
|
|
986
|
-
});
|
|
987
|
-
let prom = Promise.resolve();
|
|
988
958
|
const encoder = new AudioEncoder({
|
|
989
|
-
output: (chunk) => {
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
return;
|
|
994
|
-
}
|
|
995
|
-
return onChunk(chunk);
|
|
996
|
-
}).then(() => {
|
|
997
|
-
ioSynchronizer.onProcessed();
|
|
998
|
-
return Promise.resolve();
|
|
999
|
-
}).catch((err) => {
|
|
959
|
+
output: async (chunk) => {
|
|
960
|
+
try {
|
|
961
|
+
await onChunk(chunk);
|
|
962
|
+
} catch (err) {
|
|
1000
963
|
onError(err);
|
|
1001
|
-
}
|
|
964
|
+
}
|
|
965
|
+
ioSynchronizer.onOutput(chunk.timestamp);
|
|
1002
966
|
},
|
|
1003
967
|
error(error) {
|
|
1004
968
|
onError(error);
|
|
@@ -1019,17 +983,7 @@ var createAudioEncoder = ({
|
|
|
1019
983
|
throw new Error('Only `codec: "opus"` and `codec: "aac"` is supported currently');
|
|
1020
984
|
}
|
|
1021
985
|
const wantedSampleRate = audioEncoderConfig.sampleRate;
|
|
1022
|
-
const encodeFrame =
|
|
1023
|
-
if (encoder.state === "closed") {
|
|
1024
|
-
return;
|
|
1025
|
-
}
|
|
1026
|
-
progressTracker.setPossibleLowestTimestamp(audioData.timestamp);
|
|
1027
|
-
await ioSynchronizer.waitFor({
|
|
1028
|
-
unemitted: 20,
|
|
1029
|
-
unprocessed: 20,
|
|
1030
|
-
minimumProgress: audioData.timestamp - 1e7,
|
|
1031
|
-
controller
|
|
1032
|
-
});
|
|
986
|
+
const encodeFrame = (audioData) => {
|
|
1033
987
|
if (encoder.state === "closed") {
|
|
1034
988
|
return;
|
|
1035
989
|
}
|
|
@@ -1045,23 +999,21 @@ var createAudioEncoder = ({
|
|
|
1045
999
|
}
|
|
1046
1000
|
}
|
|
1047
1001
|
encoder.encode(audioData);
|
|
1048
|
-
ioSynchronizer.inputItem(audioData.timestamp
|
|
1002
|
+
ioSynchronizer.inputItem(audioData.timestamp);
|
|
1049
1003
|
};
|
|
1050
|
-
let queue = Promise.resolve();
|
|
1051
1004
|
return {
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
return queue;
|
|
1005
|
+
encode: (audioData) => {
|
|
1006
|
+
encodeFrame(audioData);
|
|
1055
1007
|
},
|
|
1056
1008
|
waitForFinish: async () => {
|
|
1057
1009
|
await encoder.flush();
|
|
1058
|
-
await ioSynchronizer.waitForFinish(
|
|
1059
|
-
await prom;
|
|
1010
|
+
await ioSynchronizer.waitForFinish();
|
|
1060
1011
|
},
|
|
1061
1012
|
close,
|
|
1062
1013
|
flush: async () => {
|
|
1063
1014
|
await encoder.flush();
|
|
1064
|
-
}
|
|
1015
|
+
},
|
|
1016
|
+
ioSynchronizer
|
|
1065
1017
|
};
|
|
1066
1018
|
};
|
|
1067
1019
|
// src/can-copy-audio-track.ts
|
|
@@ -4132,13 +4084,7 @@ var makeProgressTracker = () => {
|
|
|
4132
4084
|
startingTimestamp = Math.min(startingTimestamp, timestamp);
|
|
4133
4085
|
}
|
|
4134
4086
|
};
|
|
4135
|
-
const
|
|
4136
|
-
if (startingTimestamp === null) {
|
|
4137
|
-
throw new Error("No starting timestamp");
|
|
4138
|
-
}
|
|
4139
|
-
return startingTimestamp;
|
|
4140
|
-
};
|
|
4141
|
-
const calculateSmallestProgress = () => {
|
|
4087
|
+
const getSmallestProgress = () => {
|
|
4142
4088
|
const progressValues = Object.values(trackNumberProgresses).map((p) => {
|
|
4143
4089
|
if (p !== null) {
|
|
4144
4090
|
return p;
|
|
@@ -4154,26 +4100,16 @@ var makeProgressTracker = () => {
|
|
|
4154
4100
|
registerTrack: (trackNumber) => {
|
|
4155
4101
|
trackNumberProgresses[trackNumber] = null;
|
|
4156
4102
|
},
|
|
4157
|
-
getSmallestProgress
|
|
4103
|
+
getSmallestProgress,
|
|
4158
4104
|
updateTrackProgress: (trackNumber, progress) => {
|
|
4159
4105
|
if (trackNumberProgresses[trackNumber] === undefined) {
|
|
4160
4106
|
throw new Error(`Tried to update progress for a track that was not registered: ${trackNumber}`);
|
|
4161
4107
|
}
|
|
4162
4108
|
trackNumberProgresses[trackNumber] = progress;
|
|
4163
4109
|
eventEmitter.dispatchEvent("progress", {
|
|
4164
|
-
smallestProgress:
|
|
4110
|
+
smallestProgress: getSmallestProgress()
|
|
4165
4111
|
});
|
|
4166
4112
|
},
|
|
4167
|
-
waitForProgress: () => {
|
|
4168
|
-
const { promise, resolve } = withResolvers();
|
|
4169
|
-
const on = () => {
|
|
4170
|
-
eventEmitter.removeEventListener("progress", on);
|
|
4171
|
-
resolve();
|
|
4172
|
-
};
|
|
4173
|
-
eventEmitter.addEventListener("progress", on);
|
|
4174
|
-
return promise;
|
|
4175
|
-
},
|
|
4176
|
-
getStartingTimestamp,
|
|
4177
4113
|
setPossibleLowestTimestamp
|
|
4178
4114
|
};
|
|
4179
4115
|
};
|
|
@@ -4209,22 +4145,37 @@ var getAvailableVideoCodecs = ({
|
|
|
4209
4145
|
throw new Error(`Unsupported container: ${container}`);
|
|
4210
4146
|
};
|
|
4211
4147
|
|
|
4212
|
-
// src/
|
|
4213
|
-
|
|
4214
|
-
|
|
4215
|
-
|
|
4216
|
-
|
|
4217
|
-
|
|
4218
|
-
|
|
4219
|
-
|
|
4220
|
-
|
|
4221
|
-
|
|
4222
|
-
|
|
4223
|
-
|
|
4224
|
-
|
|
4225
|
-
|
|
4226
|
-
|
|
4227
|
-
|
|
4148
|
+
// src/copy-audio-track.ts
|
|
4149
|
+
var copyAudioTrack = async ({
|
|
4150
|
+
state,
|
|
4151
|
+
track,
|
|
4152
|
+
logLevel,
|
|
4153
|
+
onMediaStateUpdate,
|
|
4154
|
+
progressTracker
|
|
4155
|
+
}) => {
|
|
4156
|
+
const addedTrack = await state.addTrack({
|
|
4157
|
+
type: "audio",
|
|
4158
|
+
codec: track.codecEnum,
|
|
4159
|
+
numberOfChannels: track.numberOfChannels,
|
|
4160
|
+
sampleRate: track.sampleRate,
|
|
4161
|
+
codecPrivate: track.codecData?.data ?? null,
|
|
4162
|
+
timescale: track.originalTimescale
|
|
4163
|
+
});
|
|
4164
|
+
Log.verbose(logLevel, `Copying audio track ${track.trackId} as track ${addedTrack.trackNumber}. Timescale = ${track.originalTimescale}, codec = ${track.codecEnum} (${track.codec}) `);
|
|
4165
|
+
return async (audioSample) => {
|
|
4166
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(audioSample.timestamp, audioSample.decodingTimestamp ?? Infinity));
|
|
4167
|
+
await state.addSample({
|
|
4168
|
+
chunk: audioSample,
|
|
4169
|
+
trackNumber: addedTrack.trackNumber,
|
|
4170
|
+
isVideo: false,
|
|
4171
|
+
codecPrivate: track.codecData?.data ?? null
|
|
4172
|
+
});
|
|
4173
|
+
onMediaStateUpdate?.((prevState) => {
|
|
4174
|
+
return {
|
|
4175
|
+
...prevState,
|
|
4176
|
+
encodedAudioFrames: prevState.encodedAudioFrames + 1
|
|
4177
|
+
};
|
|
4178
|
+
});
|
|
4228
4179
|
};
|
|
4229
4180
|
};
|
|
4230
4181
|
|
|
@@ -4281,62 +4232,85 @@ var getDefaultAudioCodec = ({
|
|
|
4281
4232
|
throw new Error(`Unhandled container: ${container}`);
|
|
4282
4233
|
};
|
|
4283
4234
|
|
|
4284
|
-
// src/
|
|
4285
|
-
|
|
4286
|
-
|
|
4287
|
-
|
|
4288
|
-
|
|
4289
|
-
|
|
4290
|
-
|
|
4291
|
-
|
|
4235
|
+
// src/reencode-audio-track.ts
|
|
4236
|
+
import { MediaParserInternals as MediaParserInternals7 } from "@remotion/media-parser";
|
|
4237
|
+
|
|
4238
|
+
// src/convert-encoded-chunk.ts
|
|
4239
|
+
var convertEncodedChunk = (chunk) => {
|
|
4240
|
+
const arr = new Uint8Array(chunk.byteLength);
|
|
4241
|
+
chunk.copyTo(arr);
|
|
4242
|
+
return {
|
|
4243
|
+
data: arr,
|
|
4244
|
+
duration: chunk.duration ?? undefined,
|
|
4245
|
+
timestamp: chunk.timestamp,
|
|
4246
|
+
type: chunk.type,
|
|
4247
|
+
decodingTimestamp: chunk.timestamp,
|
|
4248
|
+
offset: 0
|
|
4249
|
+
};
|
|
4250
|
+
};
|
|
4251
|
+
|
|
4252
|
+
// src/processing-queue.ts
|
|
4253
|
+
function processingQueue({
|
|
4254
|
+
onOutput,
|
|
4292
4255
|
logLevel,
|
|
4293
|
-
|
|
4294
|
-
|
|
4295
|
-
|
|
4296
|
-
})
|
|
4297
|
-
const
|
|
4298
|
-
inputCodec: track.codecEnum,
|
|
4299
|
-
outputContainer,
|
|
4300
|
-
inputContainer
|
|
4301
|
-
});
|
|
4302
|
-
const audioOperation = await (onAudioTrack ?? defaultOnAudioTrackHandler)({
|
|
4303
|
-
defaultAudioCodec: audioCodec ?? getDefaultAudioCodec({ container: outputContainer }),
|
|
4304
|
-
track,
|
|
4256
|
+
label,
|
|
4257
|
+
onError,
|
|
4258
|
+
controller
|
|
4259
|
+
}) {
|
|
4260
|
+
const ioSynchronizer = makeIoSynchronizer({
|
|
4305
4261
|
logLevel,
|
|
4306
|
-
|
|
4307
|
-
|
|
4308
|
-
canCopyTrack
|
|
4262
|
+
label,
|
|
4263
|
+
controller
|
|
4309
4264
|
});
|
|
4310
|
-
|
|
4311
|
-
|
|
4312
|
-
|
|
4313
|
-
|
|
4314
|
-
|
|
4315
|
-
|
|
4316
|
-
|
|
4317
|
-
|
|
4318
|
-
|
|
4319
|
-
|
|
4320
|
-
|
|
4321
|
-
|
|
4322
|
-
|
|
4323
|
-
|
|
4265
|
+
let queue = Promise.resolve();
|
|
4266
|
+
let stopped = false;
|
|
4267
|
+
const input = (item) => {
|
|
4268
|
+
if (stopped) {
|
|
4269
|
+
return;
|
|
4270
|
+
}
|
|
4271
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4272
|
+
stopped = true;
|
|
4273
|
+
return;
|
|
4274
|
+
}
|
|
4275
|
+
const { timestamp } = item;
|
|
4276
|
+
ioSynchronizer.inputItem(timestamp);
|
|
4277
|
+
queue = queue.then(() => {
|
|
4278
|
+
if (stopped) {
|
|
4279
|
+
return;
|
|
4280
|
+
}
|
|
4281
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4282
|
+
stopped = true;
|
|
4283
|
+
return;
|
|
4284
|
+
}
|
|
4285
|
+
return onOutput(item);
|
|
4286
|
+
}).then(() => {
|
|
4287
|
+
ioSynchronizer.onOutput(timestamp);
|
|
4288
|
+
return Promise.resolve();
|
|
4289
|
+
}).catch((err) => {
|
|
4290
|
+
stopped = true;
|
|
4291
|
+
onError(err);
|
|
4324
4292
|
});
|
|
4325
|
-
|
|
4326
|
-
|
|
4327
|
-
|
|
4328
|
-
|
|
4329
|
-
|
|
4330
|
-
|
|
4331
|
-
|
|
4332
|
-
|
|
4333
|
-
|
|
4334
|
-
|
|
4335
|
-
|
|
4336
|
-
|
|
4337
|
-
|
|
4338
|
-
|
|
4339
|
-
|
|
4293
|
+
};
|
|
4294
|
+
return {
|
|
4295
|
+
input,
|
|
4296
|
+
ioSynchronizer
|
|
4297
|
+
};
|
|
4298
|
+
}
|
|
4299
|
+
|
|
4300
|
+
// src/reencode-audio-track.ts
|
|
4301
|
+
var reencodeAudioTrack = async ({
|
|
4302
|
+
audioOperation,
|
|
4303
|
+
track,
|
|
4304
|
+
logLevel,
|
|
4305
|
+
abortConversion,
|
|
4306
|
+
state,
|
|
4307
|
+
controller,
|
|
4308
|
+
onMediaStateUpdate,
|
|
4309
|
+
onAudioData,
|
|
4310
|
+
progressTracker
|
|
4311
|
+
}) => {
|
|
4312
|
+
if (audioOperation.type !== "reencode") {
|
|
4313
|
+
throw new Error(`Audio track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(audioOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
|
|
4340
4314
|
}
|
|
4341
4315
|
const audioEncoderConfig = await getAudioEncoderConfig({
|
|
4342
4316
|
numberOfChannels: track.numberOfChannels,
|
|
@@ -4400,11 +4374,18 @@ var makeAudioTrackHandler = ({
|
|
|
4400
4374
|
codec: audioOperation.audioCodec,
|
|
4401
4375
|
controller,
|
|
4402
4376
|
config: audioEncoderConfig,
|
|
4403
|
-
logLevel
|
|
4404
|
-
progressTracker
|
|
4377
|
+
logLevel
|
|
4405
4378
|
});
|
|
4406
|
-
const
|
|
4407
|
-
|
|
4379
|
+
const audioProcessingQueue = processingQueue({
|
|
4380
|
+
controller,
|
|
4381
|
+
label: "AudioData processing queue",
|
|
4382
|
+
logLevel,
|
|
4383
|
+
onError(error) {
|
|
4384
|
+
abortConversion(new Error(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
|
|
4385
|
+
cause: error
|
|
4386
|
+
}));
|
|
4387
|
+
},
|
|
4388
|
+
onOutput: async (audioData) => {
|
|
4408
4389
|
const newAudioData = onAudioData ? await onAudioData?.({ audioData, track }) : audioData;
|
|
4409
4390
|
if (newAudioData !== audioData) {
|
|
4410
4391
|
if (newAudioData.duration !== audioData.duration) {
|
|
@@ -4424,7 +4405,10 @@ var makeAudioTrackHandler = ({
|
|
|
4424
4405
|
}
|
|
4425
4406
|
audioData.close();
|
|
4426
4407
|
}
|
|
4427
|
-
await
|
|
4408
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4409
|
+
await audioEncoder.ioSynchronizer.waitForQueueSize(10);
|
|
4410
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4411
|
+
audioEncoder.encode(newAudioData);
|
|
4428
4412
|
onMediaStateUpdate?.((prevState) => {
|
|
4429
4413
|
return {
|
|
4430
4414
|
...prevState,
|
|
@@ -4432,6 +4416,13 @@ var makeAudioTrackHandler = ({
|
|
|
4432
4416
|
};
|
|
4433
4417
|
});
|
|
4434
4418
|
newAudioData.close();
|
|
4419
|
+
}
|
|
4420
|
+
});
|
|
4421
|
+
const audioDecoder = internalCreateAudioDecoder({
|
|
4422
|
+
onFrame: async (audioData) => {
|
|
4423
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4424
|
+
await audioProcessingQueue.ioSynchronizer.waitForQueueSize(10);
|
|
4425
|
+
audioProcessingQueue.input(audioData);
|
|
4435
4426
|
},
|
|
4436
4427
|
onError(error) {
|
|
4437
4428
|
abortConversion(new Error(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
|
|
@@ -4440,24 +4431,113 @@ var makeAudioTrackHandler = ({
|
|
|
4440
4431
|
},
|
|
4441
4432
|
controller,
|
|
4442
4433
|
config: audioDecoderConfig,
|
|
4443
|
-
logLevel
|
|
4444
|
-
track,
|
|
4445
|
-
progressTracker
|
|
4434
|
+
logLevel
|
|
4446
4435
|
});
|
|
4447
4436
|
state.addWaitForFinishPromise(async () => {
|
|
4448
4437
|
await audioDecoder.waitForFinish();
|
|
4449
|
-
|
|
4438
|
+
Log.verbose(logLevel, "Audio decoder finished");
|
|
4450
4439
|
audioDecoder.close();
|
|
4440
|
+
await audioProcessingQueue.ioSynchronizer.waitForFinish();
|
|
4441
|
+
Log.verbose(logLevel, "Audio processing queue finished");
|
|
4442
|
+
await audioEncoder.waitForFinish();
|
|
4443
|
+
Log.verbose(logLevel, "Audio encoder finished");
|
|
4451
4444
|
audioEncoder.close();
|
|
4452
4445
|
});
|
|
4453
4446
|
return async (audioSample) => {
|
|
4454
|
-
|
|
4447
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(audioSample.timestamp, audioSample.decodingTimestamp ?? Infinity));
|
|
4448
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4449
|
+
await audioDecoder.waitForQueueToBeLessThan(10);
|
|
4450
|
+
audioDecoder.decode(audioSample);
|
|
4455
4451
|
};
|
|
4456
4452
|
};
|
|
4457
4453
|
|
|
4458
|
-
// src/
|
|
4459
|
-
var
|
|
4460
|
-
|
|
4454
|
+
// src/on-audio-track.ts
|
|
4455
|
+
var makeAudioTrackHandler = ({
|
|
4456
|
+
state,
|
|
4457
|
+
defaultAudioCodec: audioCodec,
|
|
4458
|
+
controller,
|
|
4459
|
+
abortConversion,
|
|
4460
|
+
onMediaStateUpdate,
|
|
4461
|
+
onAudioTrack,
|
|
4462
|
+
logLevel,
|
|
4463
|
+
outputContainer,
|
|
4464
|
+
onAudioData,
|
|
4465
|
+
progressTracker
|
|
4466
|
+
}) => async ({ track, container: inputContainer }) => {
|
|
4467
|
+
const canCopyTrack = canCopyAudioTrack({
|
|
4468
|
+
inputCodec: track.codecEnum,
|
|
4469
|
+
outputContainer,
|
|
4470
|
+
inputContainer
|
|
4471
|
+
});
|
|
4472
|
+
const audioOperation = await (onAudioTrack ?? defaultOnAudioTrackHandler)({
|
|
4473
|
+
defaultAudioCodec: audioCodec ?? getDefaultAudioCodec({ container: outputContainer }),
|
|
4474
|
+
track,
|
|
4475
|
+
logLevel,
|
|
4476
|
+
outputContainer,
|
|
4477
|
+
inputContainer,
|
|
4478
|
+
canCopyTrack
|
|
4479
|
+
});
|
|
4480
|
+
if (audioOperation.type === "drop") {
|
|
4481
|
+
return null;
|
|
4482
|
+
}
|
|
4483
|
+
if (audioOperation.type === "fail") {
|
|
4484
|
+
throw new Error(`Audio track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this audio track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
|
|
4485
|
+
}
|
|
4486
|
+
if (audioOperation.type === "copy") {
|
|
4487
|
+
return copyAudioTrack({
|
|
4488
|
+
logLevel,
|
|
4489
|
+
onMediaStateUpdate,
|
|
4490
|
+
state,
|
|
4491
|
+
track,
|
|
4492
|
+
progressTracker
|
|
4493
|
+
});
|
|
4494
|
+
}
|
|
4495
|
+
return reencodeAudioTrack({
|
|
4496
|
+
abortConversion,
|
|
4497
|
+
controller,
|
|
4498
|
+
logLevel,
|
|
4499
|
+
onMediaStateUpdate,
|
|
4500
|
+
audioOperation,
|
|
4501
|
+
onAudioData,
|
|
4502
|
+
state,
|
|
4503
|
+
track,
|
|
4504
|
+
progressTracker
|
|
4505
|
+
});
|
|
4506
|
+
};
|
|
4507
|
+
|
|
4508
|
+
// src/copy-video-track.ts
|
|
4509
|
+
var copyVideoTrack = async ({
|
|
4510
|
+
logLevel,
|
|
4511
|
+
state,
|
|
4512
|
+
track,
|
|
4513
|
+
onMediaStateUpdate,
|
|
4514
|
+
progressTracker
|
|
4515
|
+
}) => {
|
|
4516
|
+
Log.verbose(logLevel, `Copying video track with codec ${track.codec} and timescale ${track.originalTimescale}`);
|
|
4517
|
+
const videoTrack = await state.addTrack({
|
|
4518
|
+
type: "video",
|
|
4519
|
+
color: track.advancedColor,
|
|
4520
|
+
width: track.codedWidth,
|
|
4521
|
+
height: track.codedHeight,
|
|
4522
|
+
codec: track.codecEnum,
|
|
4523
|
+
codecPrivate: track.codecData?.data ?? null,
|
|
4524
|
+
timescale: track.originalTimescale
|
|
4525
|
+
});
|
|
4526
|
+
return async (sample) => {
|
|
4527
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(sample.timestamp, sample.decodingTimestamp ?? Infinity));
|
|
4528
|
+
await state.addSample({
|
|
4529
|
+
chunk: sample,
|
|
4530
|
+
trackNumber: videoTrack.trackNumber,
|
|
4531
|
+
isVideo: true,
|
|
4532
|
+
codecPrivate: track.codecData?.data ?? null
|
|
4533
|
+
});
|
|
4534
|
+
onMediaStateUpdate?.((prevState) => {
|
|
4535
|
+
return {
|
|
4536
|
+
...prevState,
|
|
4537
|
+
decodedVideoFrames: prevState.decodedVideoFrames + 1
|
|
4538
|
+
};
|
|
4539
|
+
});
|
|
4540
|
+
};
|
|
4461
4541
|
};
|
|
4462
4542
|
|
|
4463
4543
|
// src/default-on-video-track-handler.ts
|
|
@@ -4513,6 +4593,11 @@ var getDefaultVideoCodec = ({
|
|
|
4513
4593
|
throw new Error(`Unhandled container: ${container}`);
|
|
4514
4594
|
};
|
|
4515
4595
|
|
|
4596
|
+
// src/arraybuffer-to-uint8-array.ts
|
|
4597
|
+
var arrayBufferToUint8Array = (buffer) => {
|
|
4598
|
+
return buffer ? new Uint8Array(buffer) : null;
|
|
4599
|
+
};
|
|
4600
|
+
|
|
4516
4601
|
// src/convert-to-correct-videoframe.ts
|
|
4517
4602
|
var needsToCorrectVideoFrame = ({
|
|
4518
4603
|
videoFrame,
|
|
@@ -4553,7 +4638,6 @@ var convertToCorrectVideoFrame = ({
|
|
|
4553
4638
|
var onFrame = async ({
|
|
4554
4639
|
frame: unrotatedFrame,
|
|
4555
4640
|
onVideoFrame,
|
|
4556
|
-
videoEncoder,
|
|
4557
4641
|
track,
|
|
4558
4642
|
outputCodec,
|
|
4559
4643
|
rotation,
|
|
@@ -4570,10 +4654,10 @@ var onFrame = async ({
|
|
|
4570
4654
|
}
|
|
4571
4655
|
const userProcessedFrame = onVideoFrame ? await onVideoFrame({ frame: rotated, track }) : rotated;
|
|
4572
4656
|
if (userProcessedFrame.displayWidth !== rotated.displayWidth) {
|
|
4573
|
-
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayWidth (${userProcessedFrame.displayWidth}) than the input frame (${
|
|
4657
|
+
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayWidth (${userProcessedFrame.displayWidth}) than the input frame (${rotated.displayWidth})`);
|
|
4574
4658
|
}
|
|
4575
4659
|
if (userProcessedFrame.displayHeight !== rotated.displayHeight) {
|
|
4576
|
-
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayHeight (${userProcessedFrame.displayHeight}) than the input frame (${
|
|
4660
|
+
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayHeight (${userProcessedFrame.displayHeight}) than the input frame (${rotated.displayHeight})`);
|
|
4577
4661
|
}
|
|
4578
4662
|
if (userProcessedFrame.timestamp !== rotated.timestamp && !isSafari()) {
|
|
4579
4663
|
throw new Error(`Returned VideoFrame of track ${track.trackId} has different timestamp (${userProcessedFrame.timestamp}) than the input frame (${rotated.timestamp}). When calling new VideoFrame(), pass {timestamp: frame.timestamp} as second argument`);
|
|
@@ -4581,32 +4665,31 @@ var onFrame = async ({
|
|
|
4581
4665
|
if ((userProcessedFrame.duration ?? 0) !== (rotated.duration ?? 0)) {
|
|
4582
4666
|
throw new Error(`Returned VideoFrame of track ${track.trackId} has different duration (${userProcessedFrame.duration}) than the input frame (${rotated.duration}). When calling new VideoFrame(), pass {duration: frame.duration} as second argument`);
|
|
4583
4667
|
}
|
|
4668
|
+
if (rotated !== userProcessedFrame) {
|
|
4669
|
+
rotated.close();
|
|
4670
|
+
}
|
|
4584
4671
|
const fixedFrame = convertToCorrectVideoFrame({
|
|
4585
4672
|
videoFrame: userProcessedFrame,
|
|
4586
4673
|
outputCodec
|
|
4587
4674
|
});
|
|
4588
|
-
await videoEncoder.encodeFrame(fixedFrame, fixedFrame.timestamp);
|
|
4589
|
-
fixedFrame.close();
|
|
4590
|
-
if (rotated !== userProcessedFrame) {
|
|
4591
|
-
rotated.close();
|
|
4592
|
-
}
|
|
4593
4675
|
if (fixedFrame !== userProcessedFrame) {
|
|
4594
|
-
|
|
4676
|
+
userProcessedFrame.close();
|
|
4595
4677
|
}
|
|
4678
|
+
return fixedFrame;
|
|
4596
4679
|
};
|
|
4597
4680
|
|
|
4598
4681
|
// src/sort-video-frames.ts
|
|
4599
4682
|
var MAX_QUEUE_SIZE = 5;
|
|
4600
4683
|
var videoFrameSorter = ({
|
|
4601
|
-
|
|
4602
|
-
|
|
4684
|
+
controller,
|
|
4685
|
+
onOutput
|
|
4603
4686
|
}) => {
|
|
4604
4687
|
const frames = [];
|
|
4605
4688
|
const releaseFrame = async () => {
|
|
4606
4689
|
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4607
4690
|
const frame = frames.shift();
|
|
4608
4691
|
if (frame) {
|
|
4609
|
-
await
|
|
4692
|
+
await onOutput(frame);
|
|
4610
4693
|
}
|
|
4611
4694
|
};
|
|
4612
4695
|
const sortFrames = () => {
|
|
@@ -4642,66 +4725,47 @@ var videoFrameSorter = ({
|
|
|
4642
4725
|
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
4643
4726
|
};
|
|
4644
4727
|
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
4728
|
+
let promise = Promise.resolve();
|
|
4645
4729
|
return {
|
|
4646
|
-
inputFrame
|
|
4730
|
+
inputFrame: (frame) => {
|
|
4731
|
+
promise = promise.then(() => inputFrame(frame));
|
|
4732
|
+
},
|
|
4733
|
+
waitUntilProcessed: () => promise,
|
|
4647
4734
|
flush
|
|
4648
4735
|
};
|
|
4649
4736
|
};
|
|
4650
4737
|
|
|
4651
4738
|
// src/video-decoder.ts
|
|
4652
|
-
var
|
|
4739
|
+
var internalCreateVideoDecoder = ({
|
|
4653
4740
|
onFrame: onFrame2,
|
|
4654
4741
|
onError,
|
|
4655
4742
|
controller,
|
|
4656
4743
|
config,
|
|
4657
|
-
logLevel
|
|
4658
|
-
progress
|
|
4744
|
+
logLevel
|
|
4659
4745
|
}) => {
|
|
4660
4746
|
const ioSynchronizer = makeIoSynchronizer({
|
|
4661
4747
|
logLevel,
|
|
4662
4748
|
label: "Video decoder",
|
|
4663
|
-
|
|
4664
|
-
});
|
|
4665
|
-
let outputQueue = Promise.resolve();
|
|
4666
|
-
const addToQueue = (frame) => {
|
|
4667
|
-
const cleanup = () => {
|
|
4668
|
-
frame.close();
|
|
4669
|
-
};
|
|
4670
|
-
controller._internals._mediaParserController._internals.signal.addEventListener("abort", cleanup, {
|
|
4671
|
-
once: true
|
|
4672
|
-
});
|
|
4673
|
-
outputQueue = outputQueue.then(() => {
|
|
4674
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4675
|
-
return;
|
|
4676
|
-
}
|
|
4677
|
-
return onFrame2(frame);
|
|
4678
|
-
}).then(() => {
|
|
4679
|
-
ioSynchronizer.onProcessed();
|
|
4680
|
-
}).catch((err) => {
|
|
4681
|
-
onError(err);
|
|
4682
|
-
}).finally(() => {
|
|
4683
|
-
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", cleanup);
|
|
4684
|
-
cleanup();
|
|
4685
|
-
});
|
|
4686
|
-
return outputQueue;
|
|
4687
|
-
};
|
|
4688
|
-
const frameSorter = videoFrameSorter({
|
|
4689
|
-
controller,
|
|
4690
|
-
onRelease: async (frame) => {
|
|
4691
|
-
await addToQueue(frame);
|
|
4692
|
-
}
|
|
4749
|
+
controller
|
|
4693
4750
|
});
|
|
4694
4751
|
const videoDecoder = new VideoDecoder({
|
|
4695
|
-
output(frame) {
|
|
4752
|
+
async output(frame) {
|
|
4753
|
+
try {
|
|
4754
|
+
await onFrame2(frame);
|
|
4755
|
+
} catch (err) {
|
|
4756
|
+
onError(err);
|
|
4757
|
+
frame.close();
|
|
4758
|
+
}
|
|
4696
4759
|
ioSynchronizer.onOutput(frame.timestamp);
|
|
4697
|
-
frameSorter.inputFrame(frame);
|
|
4698
4760
|
},
|
|
4699
4761
|
error(error) {
|
|
4700
4762
|
onError(error);
|
|
4701
4763
|
}
|
|
4702
4764
|
});
|
|
4703
4765
|
const close = () => {
|
|
4704
|
-
controller
|
|
4766
|
+
if (controller) {
|
|
4767
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
4768
|
+
}
|
|
4705
4769
|
if (videoDecoder.state === "closed") {
|
|
4706
4770
|
return;
|
|
4707
4771
|
}
|
|
@@ -4710,49 +4774,48 @@ var createVideoDecoder = ({
|
|
|
4710
4774
|
const onAbort = () => {
|
|
4711
4775
|
close();
|
|
4712
4776
|
};
|
|
4713
|
-
controller
|
|
4777
|
+
if (controller) {
|
|
4778
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
4779
|
+
}
|
|
4714
4780
|
videoDecoder.configure(config);
|
|
4715
|
-
const
|
|
4781
|
+
const decode = (sample) => {
|
|
4716
4782
|
if (videoDecoder.state === "closed") {
|
|
4717
4783
|
return;
|
|
4718
4784
|
}
|
|
4719
|
-
|
|
4720
|
-
|
|
4721
|
-
|
|
4722
|
-
progress.setPossibleLowestTimestamp(Math.min(sample.timestamp, sample.decodingTimestamp ?? Infinity));
|
|
4723
|
-
await ioSynchronizer.waitFor({
|
|
4724
|
-
unemitted: 20,
|
|
4725
|
-
unprocessed: 10,
|
|
4726
|
-
minimumProgress: sample.timestamp - 1e7,
|
|
4727
|
-
controller
|
|
4728
|
-
});
|
|
4729
|
-
videoDecoder.decode(new EncodedVideoChunk(sample));
|
|
4730
|
-
ioSynchronizer.inputItem(sample.timestamp, sample.type === "key");
|
|
4785
|
+
const encodedChunk = sample instanceof EncodedVideoChunk ? sample : new EncodedVideoChunk(sample);
|
|
4786
|
+
videoDecoder.decode(encodedChunk);
|
|
4787
|
+
ioSynchronizer.inputItem(sample.timestamp);
|
|
4731
4788
|
};
|
|
4732
|
-
let inputQueue = Promise.resolve();
|
|
4733
4789
|
return {
|
|
4734
|
-
|
|
4735
|
-
inputQueue = inputQueue.then(() => processSample(sample));
|
|
4736
|
-
return inputQueue;
|
|
4737
|
-
},
|
|
4790
|
+
decode,
|
|
4738
4791
|
waitForFinish: async () => {
|
|
4739
4792
|
await videoDecoder.flush();
|
|
4740
4793
|
Log.verbose(logLevel, "Flushed video decoder");
|
|
4741
|
-
await
|
|
4742
|
-
Log.verbose(logLevel, "Frame sorter flushed");
|
|
4743
|
-
await ioSynchronizer.waitForFinish(controller);
|
|
4794
|
+
await ioSynchronizer.waitForFinish();
|
|
4744
4795
|
Log.verbose(logLevel, "IO synchro finished");
|
|
4745
|
-
await outputQueue;
|
|
4746
|
-
Log.verbose(logLevel, "Output queue finished");
|
|
4747
|
-
await inputQueue;
|
|
4748
|
-
Log.verbose(logLevel, "Input queue finished");
|
|
4749
4796
|
},
|
|
4750
4797
|
close,
|
|
4751
4798
|
flush: async () => {
|
|
4752
4799
|
await videoDecoder.flush();
|
|
4753
|
-
}
|
|
4800
|
+
},
|
|
4801
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize
|
|
4754
4802
|
};
|
|
4755
4803
|
};
|
|
4804
|
+
var createVideoDecoder = ({
|
|
4805
|
+
onFrame: onFrame2,
|
|
4806
|
+
onError,
|
|
4807
|
+
controller,
|
|
4808
|
+
track,
|
|
4809
|
+
logLevel
|
|
4810
|
+
}) => {
|
|
4811
|
+
return internalCreateVideoDecoder({
|
|
4812
|
+
onFrame: onFrame2,
|
|
4813
|
+
onError,
|
|
4814
|
+
controller: controller ?? null,
|
|
4815
|
+
config: track,
|
|
4816
|
+
logLevel: logLevel ?? "info"
|
|
4817
|
+
});
|
|
4818
|
+
};
|
|
4756
4819
|
|
|
4757
4820
|
// src/video-encoder.ts
|
|
4758
4821
|
import {
|
|
@@ -4765,7 +4828,7 @@ var createVideoEncoder = ({
|
|
|
4765
4828
|
config,
|
|
4766
4829
|
logLevel,
|
|
4767
4830
|
outputCodec,
|
|
4768
|
-
|
|
4831
|
+
keyframeInterval
|
|
4769
4832
|
}) => {
|
|
4770
4833
|
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4771
4834
|
throw new MediaParserAbortError2("Not creating video encoder, already aborted");
|
|
@@ -4773,27 +4836,20 @@ var createVideoEncoder = ({
|
|
|
4773
4836
|
const ioSynchronizer = makeIoSynchronizer({
|
|
4774
4837
|
logLevel,
|
|
4775
4838
|
label: "Video encoder",
|
|
4776
|
-
|
|
4839
|
+
controller
|
|
4777
4840
|
});
|
|
4778
|
-
let outputQueue = Promise.resolve();
|
|
4779
4841
|
const encoder = new VideoEncoder({
|
|
4780
4842
|
error(error) {
|
|
4781
4843
|
onError(error);
|
|
4782
4844
|
},
|
|
4783
|
-
output(chunk, metadata) {
|
|
4845
|
+
async output(chunk, metadata) {
|
|
4784
4846
|
const timestamp = chunk.timestamp + (chunk.duration ?? 0);
|
|
4785
|
-
|
|
4786
|
-
|
|
4787
|
-
|
|
4788
|
-
return;
|
|
4789
|
-
}
|
|
4790
|
-
return onChunk(chunk, metadata ?? null);
|
|
4791
|
-
}).then(() => {
|
|
4792
|
-
ioSynchronizer.onProcessed();
|
|
4793
|
-
return Promise.resolve();
|
|
4794
|
-
}).catch((err) => {
|
|
4847
|
+
try {
|
|
4848
|
+
await onChunk(chunk, metadata ?? null);
|
|
4849
|
+
} catch (err) {
|
|
4795
4850
|
onError(err);
|
|
4796
|
-
}
|
|
4851
|
+
}
|
|
4852
|
+
ioSynchronizer.onOutput(timestamp);
|
|
4797
4853
|
}
|
|
4798
4854
|
});
|
|
4799
4855
|
const close = () => {
|
|
@@ -4810,115 +4866,49 @@ var createVideoEncoder = ({
|
|
|
4810
4866
|
Log.verbose(logLevel, "Configuring video encoder", config);
|
|
4811
4867
|
encoder.configure(config);
|
|
4812
4868
|
let framesProcessed = 0;
|
|
4813
|
-
const encodeFrame =
|
|
4869
|
+
const encodeFrame = (frame) => {
|
|
4814
4870
|
if (encoder.state === "closed") {
|
|
4815
4871
|
return;
|
|
4816
4872
|
}
|
|
4817
|
-
|
|
4818
|
-
await ioSynchronizer.waitFor({
|
|
4819
|
-
unemitted: 10,
|
|
4820
|
-
unprocessed: 10,
|
|
4821
|
-
minimumProgress: frame.timestamp - 1e7,
|
|
4822
|
-
controller
|
|
4823
|
-
});
|
|
4824
|
-
if (encoder.state === "closed") {
|
|
4825
|
-
return;
|
|
4826
|
-
}
|
|
4827
|
-
const keyFrame = framesProcessed % 40 === 0;
|
|
4873
|
+
const keyFrame = framesProcessed % keyframeInterval === 0;
|
|
4828
4874
|
encoder.encode(convertToCorrectVideoFrame({ videoFrame: frame, outputCodec }), {
|
|
4829
4875
|
keyFrame,
|
|
4830
4876
|
vp9: {
|
|
4831
4877
|
quantizer: 36
|
|
4832
4878
|
}
|
|
4833
4879
|
});
|
|
4834
|
-
ioSynchronizer.inputItem(frame.timestamp
|
|
4880
|
+
ioSynchronizer.inputItem(frame.timestamp);
|
|
4835
4881
|
framesProcessed++;
|
|
4836
4882
|
};
|
|
4837
|
-
let inputQueue = Promise.resolve();
|
|
4838
4883
|
return {
|
|
4839
|
-
|
|
4840
|
-
|
|
4841
|
-
return inputQueue;
|
|
4884
|
+
encode: (frame) => {
|
|
4885
|
+
encodeFrame(frame);
|
|
4842
4886
|
},
|
|
4843
4887
|
waitForFinish: async () => {
|
|
4844
4888
|
await encoder.flush();
|
|
4845
|
-
await
|
|
4846
|
-
await ioSynchronizer.waitForFinish(controller);
|
|
4889
|
+
await ioSynchronizer.waitForFinish();
|
|
4847
4890
|
},
|
|
4848
4891
|
close,
|
|
4849
4892
|
flush: async () => {
|
|
4850
4893
|
await encoder.flush();
|
|
4851
|
-
}
|
|
4894
|
+
},
|
|
4895
|
+
ioSynchronizer
|
|
4852
4896
|
};
|
|
4853
4897
|
};
|
|
4854
4898
|
|
|
4855
|
-
// src/
|
|
4856
|
-
var
|
|
4857
|
-
|
|
4858
|
-
|
|
4859
|
-
|
|
4899
|
+
// src/reencode-video-track.ts
|
|
4900
|
+
var reencodeVideoTrack = async ({
|
|
4901
|
+
videoOperation,
|
|
4902
|
+
rotate,
|
|
4903
|
+
track,
|
|
4904
|
+
logLevel,
|
|
4860
4905
|
abortConversion,
|
|
4906
|
+
onMediaStateUpdate,
|
|
4861
4907
|
controller,
|
|
4862
|
-
|
|
4863
|
-
|
|
4864
|
-
|
|
4865
|
-
|
|
4866
|
-
rotate,
|
|
4867
|
-
progress,
|
|
4868
|
-
resizeOperation
|
|
4869
|
-
}) => async ({ track, container: inputContainer }) => {
|
|
4870
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4871
|
-
throw new Error("Aborted");
|
|
4872
|
-
}
|
|
4873
|
-
const canCopyTrack = canCopyVideoTrack({
|
|
4874
|
-
inputContainer,
|
|
4875
|
-
outputContainer,
|
|
4876
|
-
rotationToApply: rotate,
|
|
4877
|
-
inputTrack: track,
|
|
4878
|
-
resizeOperation
|
|
4879
|
-
});
|
|
4880
|
-
const videoOperation = await (onVideoTrack ?? defaultOnVideoTrackHandler)({
|
|
4881
|
-
track,
|
|
4882
|
-
defaultVideoCodec: defaultVideoCodec ?? getDefaultVideoCodec({ container: outputContainer }),
|
|
4883
|
-
logLevel,
|
|
4884
|
-
outputContainer,
|
|
4885
|
-
rotate,
|
|
4886
|
-
inputContainer,
|
|
4887
|
-
canCopyTrack,
|
|
4888
|
-
resizeOperation
|
|
4889
|
-
});
|
|
4890
|
-
if (videoOperation.type === "drop") {
|
|
4891
|
-
return null;
|
|
4892
|
-
}
|
|
4893
|
-
if (videoOperation.type === "fail") {
|
|
4894
|
-
throw new Error(`Video track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this video track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
|
|
4895
|
-
}
|
|
4896
|
-
if (videoOperation.type === "copy") {
|
|
4897
|
-
Log.verbose(logLevel, `Copying video track with codec ${track.codec} and timescale ${track.originalTimescale}`);
|
|
4898
|
-
const videoTrack = await state.addTrack({
|
|
4899
|
-
type: "video",
|
|
4900
|
-
color: track.advancedColor,
|
|
4901
|
-
width: track.codedWidth,
|
|
4902
|
-
height: track.codedHeight,
|
|
4903
|
-
codec: track.codecEnum,
|
|
4904
|
-
codecPrivate: track.codecData?.data ?? null,
|
|
4905
|
-
timescale: track.originalTimescale
|
|
4906
|
-
});
|
|
4907
|
-
return async (sample) => {
|
|
4908
|
-
await state.addSample({
|
|
4909
|
-
chunk: sample,
|
|
4910
|
-
trackNumber: videoTrack.trackNumber,
|
|
4911
|
-
isVideo: true,
|
|
4912
|
-
codecPrivate: track.codecData?.data ?? null
|
|
4913
|
-
});
|
|
4914
|
-
onMediaStateUpdate?.((prevState) => {
|
|
4915
|
-
return {
|
|
4916
|
-
...prevState,
|
|
4917
|
-
decodedVideoFrames: prevState.decodedVideoFrames + 1
|
|
4918
|
-
};
|
|
4919
|
-
});
|
|
4920
|
-
};
|
|
4921
|
-
}
|
|
4908
|
+
onVideoFrame,
|
|
4909
|
+
state,
|
|
4910
|
+
progressTracker
|
|
4911
|
+
}) => {
|
|
4922
4912
|
if (videoOperation.type !== "reencode") {
|
|
4923
4913
|
throw new Error(`Video track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(videoOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
|
|
4924
4914
|
}
|
|
@@ -4981,20 +4971,47 @@ var makeVideoTrackHandler = ({
|
|
|
4981
4971
|
config: videoEncoderConfig,
|
|
4982
4972
|
logLevel,
|
|
4983
4973
|
outputCodec: videoOperation.videoCodec,
|
|
4984
|
-
|
|
4974
|
+
keyframeInterval: 40
|
|
4985
4975
|
});
|
|
4986
|
-
const
|
|
4987
|
-
|
|
4988
|
-
|
|
4989
|
-
|
|
4976
|
+
const videoProcessingQueue = processingQueue({
|
|
4977
|
+
controller,
|
|
4978
|
+
label: "VideoFrame processing queue",
|
|
4979
|
+
logLevel,
|
|
4980
|
+
onError: (err) => {
|
|
4981
|
+
abortConversion(new Error(`VideoFrame processing queue of track ${track.trackId} failed (see .cause of this error)`, {
|
|
4982
|
+
cause: err
|
|
4983
|
+
}));
|
|
4984
|
+
},
|
|
4985
|
+
onOutput: async (frame) => {
|
|
4986
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4987
|
+
const processedFrame = await onFrame({
|
|
4990
4988
|
frame,
|
|
4991
4989
|
track,
|
|
4992
|
-
videoEncoder,
|
|
4993
4990
|
onVideoFrame,
|
|
4994
4991
|
outputCodec: videoOperation.videoCodec,
|
|
4995
4992
|
rotation,
|
|
4996
4993
|
resizeOperation: videoOperation.resize ?? null
|
|
4997
4994
|
});
|
|
4995
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4996
|
+
await videoEncoder.ioSynchronizer.waitForQueueSize(10);
|
|
4997
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4998
|
+
videoEncoder.encode(processedFrame);
|
|
4999
|
+
processedFrame.close();
|
|
5000
|
+
}
|
|
5001
|
+
});
|
|
5002
|
+
const frameSorter = videoFrameSorter({
|
|
5003
|
+
controller,
|
|
5004
|
+
onOutput: async (frame) => {
|
|
5005
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5006
|
+
await videoProcessingQueue.ioSynchronizer.waitForQueueSize(10);
|
|
5007
|
+
videoProcessingQueue.input(frame);
|
|
5008
|
+
}
|
|
5009
|
+
});
|
|
5010
|
+
const videoDecoder = createVideoDecoder({
|
|
5011
|
+
track: videoDecoderConfig,
|
|
5012
|
+
onFrame: async (frame) => {
|
|
5013
|
+
await frameSorter.waitUntilProcessed();
|
|
5014
|
+
frameSorter.inputFrame(frame);
|
|
4998
5015
|
},
|
|
4999
5016
|
onError: (err) => {
|
|
5000
5017
|
abortConversion(new Error(`Video decoder of track ${track.trackId} failed (see .cause of this error)`, {
|
|
@@ -5002,23 +5019,96 @@ var makeVideoTrackHandler = ({
|
|
|
5002
5019
|
}));
|
|
5003
5020
|
},
|
|
5004
5021
|
controller,
|
|
5005
|
-
logLevel
|
|
5006
|
-
progress
|
|
5022
|
+
logLevel
|
|
5007
5023
|
});
|
|
5008
5024
|
state.addWaitForFinishPromise(async () => {
|
|
5009
5025
|
Log.verbose(logLevel, "Waiting for video decoder to finish");
|
|
5010
5026
|
await videoDecoder.waitForFinish();
|
|
5011
5027
|
videoDecoder.close();
|
|
5012
5028
|
Log.verbose(logLevel, "Video decoder finished. Waiting for encoder to finish");
|
|
5029
|
+
await frameSorter.flush();
|
|
5030
|
+
Log.verbose(logLevel, "Frame sorter flushed");
|
|
5031
|
+
await videoProcessingQueue.ioSynchronizer.waitForFinish();
|
|
5032
|
+
Log.verbose(logLevel, "Video processing queue finished");
|
|
5013
5033
|
await videoEncoder.waitForFinish();
|
|
5014
5034
|
videoEncoder.close();
|
|
5015
|
-
Log.verbose(logLevel, "
|
|
5035
|
+
Log.verbose(logLevel, "Video encoder finished");
|
|
5016
5036
|
});
|
|
5017
5037
|
return async (chunk) => {
|
|
5018
|
-
|
|
5038
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(chunk.timestamp, chunk.decodingTimestamp ?? Infinity));
|
|
5039
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5040
|
+
await videoDecoder.waitForQueueToBeLessThan(10);
|
|
5041
|
+
if (chunk.type === "key") {
|
|
5042
|
+
await videoDecoder.flush();
|
|
5043
|
+
}
|
|
5044
|
+
videoDecoder.decode(chunk);
|
|
5019
5045
|
};
|
|
5020
5046
|
};
|
|
5021
5047
|
|
|
5048
|
+
// src/on-video-track.ts
|
|
5049
|
+
var makeVideoTrackHandler = ({
|
|
5050
|
+
state,
|
|
5051
|
+
onVideoFrame,
|
|
5052
|
+
onMediaStateUpdate,
|
|
5053
|
+
abortConversion,
|
|
5054
|
+
controller,
|
|
5055
|
+
defaultVideoCodec,
|
|
5056
|
+
onVideoTrack,
|
|
5057
|
+
logLevel,
|
|
5058
|
+
outputContainer,
|
|
5059
|
+
rotate,
|
|
5060
|
+
resizeOperation,
|
|
5061
|
+
progressTracker
|
|
5062
|
+
}) => async ({ track, container: inputContainer }) => {
|
|
5063
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
5064
|
+
throw new Error("Aborted");
|
|
5065
|
+
}
|
|
5066
|
+
const canCopyTrack = canCopyVideoTrack({
|
|
5067
|
+
inputContainer,
|
|
5068
|
+
outputContainer,
|
|
5069
|
+
rotationToApply: rotate,
|
|
5070
|
+
inputTrack: track,
|
|
5071
|
+
resizeOperation
|
|
5072
|
+
});
|
|
5073
|
+
const videoOperation = await (onVideoTrack ?? defaultOnVideoTrackHandler)({
|
|
5074
|
+
track,
|
|
5075
|
+
defaultVideoCodec: defaultVideoCodec ?? getDefaultVideoCodec({ container: outputContainer }),
|
|
5076
|
+
logLevel,
|
|
5077
|
+
outputContainer,
|
|
5078
|
+
rotate,
|
|
5079
|
+
inputContainer,
|
|
5080
|
+
canCopyTrack,
|
|
5081
|
+
resizeOperation
|
|
5082
|
+
});
|
|
5083
|
+
if (videoOperation.type === "drop") {
|
|
5084
|
+
return null;
|
|
5085
|
+
}
|
|
5086
|
+
if (videoOperation.type === "fail") {
|
|
5087
|
+
throw new Error(`Video track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this video track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
|
|
5088
|
+
}
|
|
5089
|
+
if (videoOperation.type === "copy") {
|
|
5090
|
+
return copyVideoTrack({
|
|
5091
|
+
logLevel,
|
|
5092
|
+
onMediaStateUpdate,
|
|
5093
|
+
state,
|
|
5094
|
+
track,
|
|
5095
|
+
progressTracker
|
|
5096
|
+
});
|
|
5097
|
+
}
|
|
5098
|
+
return reencodeVideoTrack({
|
|
5099
|
+
videoOperation,
|
|
5100
|
+
abortConversion,
|
|
5101
|
+
controller,
|
|
5102
|
+
logLevel,
|
|
5103
|
+
rotate,
|
|
5104
|
+
track,
|
|
5105
|
+
onVideoFrame,
|
|
5106
|
+
state,
|
|
5107
|
+
onMediaStateUpdate,
|
|
5108
|
+
progressTracker
|
|
5109
|
+
});
|
|
5110
|
+
};
|
|
5111
|
+
|
|
5022
5112
|
// src/send-telemetry-event.ts
|
|
5023
5113
|
var import_licensing = __toESM(require_dist(), 1);
|
|
5024
5114
|
var sendUsageEvent = async ({
|
|
@@ -5216,6 +5306,7 @@ var convertMedia = async function({
|
|
|
5216
5306
|
expectedFrameRate: expectedFrameRate ?? null
|
|
5217
5307
|
});
|
|
5218
5308
|
const onVideoTrack = makeVideoTrackHandler({
|
|
5309
|
+
progressTracker,
|
|
5219
5310
|
state,
|
|
5220
5311
|
onVideoFrame: onVideoFrame ?? null,
|
|
5221
5312
|
onMediaStateUpdate: throttledState.update ?? null,
|
|
@@ -5226,10 +5317,10 @@ var convertMedia = async function({
|
|
|
5226
5317
|
logLevel,
|
|
5227
5318
|
outputContainer: container,
|
|
5228
5319
|
rotate: rotate ?? 0,
|
|
5229
|
-
progress: progressTracker,
|
|
5230
5320
|
resizeOperation: resize ?? null
|
|
5231
5321
|
});
|
|
5232
5322
|
const onAudioTrack = makeAudioTrackHandler({
|
|
5323
|
+
progressTracker,
|
|
5233
5324
|
abortConversion,
|
|
5234
5325
|
defaultAudioCodec: audioCodec ?? null,
|
|
5235
5326
|
controller,
|
|
@@ -5238,7 +5329,6 @@ var convertMedia = async function({
|
|
|
5238
5329
|
onAudioTrack: userAudioResolver ?? null,
|
|
5239
5330
|
logLevel,
|
|
5240
5331
|
outputContainer: container,
|
|
5241
|
-
progressTracker,
|
|
5242
5332
|
onAudioData: onAudioData ?? null
|
|
5243
5333
|
});
|
|
5244
5334
|
MediaParserInternals9.internalParseMedia({
|