@remotion/webcodecs 4.0.304 → 4.0.306
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio-decoder.d.ts +15 -10
- package/dist/audio-decoder.js +49 -52
- package/dist/audio-encoder.d.ts +5 -5
- package/dist/audio-encoder.js +20 -42
- package/dist/convert-encoded-chunk.d.ts +1 -1
- package/dist/convert-encoded-chunk.js +2 -5
- package/dist/convert-media.js +2 -2
- package/dist/copy-audio-track.d.ts +11 -0
- package/dist/copy-audio-track.js +31 -0
- package/dist/copy-video-track.d.ts +11 -0
- package/dist/copy-video-track.js +32 -0
- package/dist/create/event-emitter.d.ts +0 -1
- package/dist/create/iso-base-media/create-iso-base-media.js +3 -3
- package/dist/create/iso-base-media/example-stts.js +620 -620
- package/dist/create/iso-base-media/trak/mdia/minf/create-stbl.js +3 -1
- package/dist/create/iso-base-media/trak/mdia/minf/stbl/create-ctts.js +1 -1
- package/dist/create/iso-base-media/trak/mdia/minf/stbl/create-stts.js +3 -2
- package/dist/create/matroska/create-matroska-media.js +1 -1
- package/dist/create/progress-tracker.d.ts +0 -2
- package/dist/create/progress-tracker.js +3 -20
- package/dist/esm/index.mjs +583 -496
- package/dist/get-wave-audio-decoder.d.ts +6 -1
- package/dist/get-wave-audio-decoder.js +16 -11
- package/dist/io-manager/io-synchronizer.d.ts +6 -13
- package/dist/io-manager/io-synchronizer.js +31 -72
- package/dist/io-manager/make-timeout-promise.d.ts +1 -1
- package/dist/io-manager/make-timeout-promise.js +8 -4
- package/dist/on-audio-track.d.ts +2 -2
- package/dist/on-audio-track.js +15 -150
- package/dist/on-frame.d.ts +2 -4
- package/dist/on-frame.js +8 -9
- package/dist/on-video-track.d.ts +2 -2
- package/dist/on-video-track.js +18 -129
- package/dist/processing-queue.d.ts +19 -0
- package/dist/processing-queue.js +47 -0
- package/dist/reencode-audio-track.d.ts +18 -0
- package/dist/reencode-audio-track.js +164 -0
- package/dist/reencode-video-track.d.ts +19 -0
- package/dist/reencode-video-track.js +151 -0
- package/dist/sort-video-frames.d.ts +4 -3
- package/dist/sort-video-frames.js +7 -3
- package/dist/video-decoder.d.ts +14 -8
- package/dist/video-decoder.js +37 -72
- package/dist/video-encoder.d.ts +6 -5
- package/dist/video-encoder.js +16 -40
- package/dist/wav-audio-encoder.d.ts +4 -1
- package/dist/wav-audio-encoder.js +3 -2
- package/package.json +5 -5
package/dist/esm/index.mjs
CHANGED
|
@@ -462,31 +462,38 @@ var getBytesPerSample = (sampleFormat) => {
|
|
|
462
462
|
};
|
|
463
463
|
var getWaveAudioDecoder = ({
|
|
464
464
|
onFrame,
|
|
465
|
-
|
|
466
|
-
sampleFormat
|
|
465
|
+
config,
|
|
466
|
+
sampleFormat,
|
|
467
|
+
ioSynchronizer,
|
|
468
|
+
onError
|
|
467
469
|
}) => {
|
|
468
|
-
let queue = Promise.resolve();
|
|
469
470
|
const processSample = async (audioSample) => {
|
|
470
471
|
const bytesPerSample = getBytesPerSample(sampleFormat);
|
|
471
|
-
|
|
472
|
+
const audioData = new AudioData({
|
|
472
473
|
data: audioSample.data,
|
|
473
474
|
format: sampleFormat,
|
|
474
|
-
numberOfChannels:
|
|
475
|
-
numberOfFrames: audioSample.data.byteLength / bytesPerSample /
|
|
476
|
-
sampleRate:
|
|
475
|
+
numberOfChannels: config.numberOfChannels,
|
|
476
|
+
numberOfFrames: audioSample.data.byteLength / bytesPerSample / config.numberOfChannels,
|
|
477
|
+
sampleRate: config.sampleRate,
|
|
477
478
|
timestamp: audioSample.timestamp
|
|
478
|
-
})
|
|
479
|
+
});
|
|
480
|
+
try {
|
|
481
|
+
await onFrame(audioData);
|
|
482
|
+
} catch (err) {
|
|
483
|
+
audioData.close();
|
|
484
|
+
onError(err);
|
|
485
|
+
}
|
|
479
486
|
};
|
|
480
487
|
return {
|
|
481
488
|
close() {
|
|
482
489
|
return Promise.resolve();
|
|
483
490
|
},
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
return queue;
|
|
491
|
+
decode(audioSample) {
|
|
492
|
+
processSample(audioSample);
|
|
487
493
|
},
|
|
488
494
|
flush: () => Promise.resolve(),
|
|
489
|
-
waitForFinish: () => Promise.resolve()
|
|
495
|
+
waitForFinish: () => Promise.resolve(),
|
|
496
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize
|
|
490
497
|
};
|
|
491
498
|
};
|
|
492
499
|
|
|
@@ -562,8 +569,10 @@ var makeTimeoutPromise = ({
|
|
|
562
569
|
const onResume = () => {
|
|
563
570
|
set();
|
|
564
571
|
};
|
|
565
|
-
controller
|
|
566
|
-
|
|
572
|
+
if (controller) {
|
|
573
|
+
controller.addEventListener("pause", onPause);
|
|
574
|
+
controller.addEventListener("resume", onResume);
|
|
575
|
+
}
|
|
567
576
|
return {
|
|
568
577
|
timeoutPromise: promise,
|
|
569
578
|
clear: () => {
|
|
@@ -571,8 +580,10 @@ var makeTimeoutPromise = ({
|
|
|
571
580
|
clearTimeout(timeout);
|
|
572
581
|
}
|
|
573
582
|
resolve();
|
|
574
|
-
controller
|
|
575
|
-
|
|
583
|
+
if (controller) {
|
|
584
|
+
controller.removeEventListener("pause", onPause);
|
|
585
|
+
controller.removeEventListener("resume", onResume);
|
|
586
|
+
}
|
|
576
587
|
}
|
|
577
588
|
};
|
|
578
589
|
};
|
|
@@ -581,39 +592,26 @@ var makeTimeoutPromise = ({
|
|
|
581
592
|
var makeIoSynchronizer = ({
|
|
582
593
|
logLevel,
|
|
583
594
|
label,
|
|
584
|
-
|
|
595
|
+
controller
|
|
585
596
|
}) => {
|
|
586
597
|
const eventEmitter = new IoEventEmitter;
|
|
587
598
|
let lastInput = 0;
|
|
588
|
-
let lastInputKeyframe = 0;
|
|
589
599
|
let lastOutput = 0;
|
|
590
600
|
let inputsSinceLastOutput = 0;
|
|
591
601
|
let inputs = [];
|
|
592
|
-
|
|
593
|
-
let _unprocessed = 0;
|
|
594
|
-
const getUnprocessed = () => _unprocessed;
|
|
595
|
-
const getUnemittedItems = () => {
|
|
602
|
+
const getQueuedItems = () => {
|
|
596
603
|
inputs = inputs.filter((input) => Math.floor(input) > Math.floor(lastOutput));
|
|
597
604
|
return inputs.length;
|
|
598
605
|
};
|
|
599
|
-
const getUnemittedKeyframes = () => {
|
|
600
|
-
keyframes = keyframes.filter((keyframe) => Math.floor(keyframe) > Math.floor(lastOutput));
|
|
601
|
-
return keyframes.length;
|
|
602
|
-
};
|
|
603
606
|
const printState = (prefix) => {
|
|
604
|
-
Log.trace(logLevel, `[${label}] ${prefix}, state: Last input = ${lastInput} Last
|
|
607
|
+
Log.trace(logLevel, `[${label}] ${prefix}, state: Last input = ${lastInput} Last output = ${lastOutput} Inputs since last output = ${inputsSinceLastOutput}, Queue = ${getQueuedItems()}`);
|
|
605
608
|
};
|
|
606
|
-
const inputItem = (timestamp
|
|
609
|
+
const inputItem = (timestamp) => {
|
|
607
610
|
lastInput = timestamp;
|
|
608
|
-
if (keyFrame) {
|
|
609
|
-
lastInputKeyframe = timestamp;
|
|
610
|
-
keyframes.push(timestamp);
|
|
611
|
-
}
|
|
612
611
|
inputsSinceLastOutput++;
|
|
613
612
|
inputs.push(timestamp);
|
|
614
613
|
eventEmitter.dispatchEvent("input", {
|
|
615
|
-
timestamp
|
|
616
|
-
keyFrame
|
|
614
|
+
timestamp
|
|
617
615
|
});
|
|
618
616
|
printState("Input item");
|
|
619
617
|
};
|
|
@@ -623,7 +621,6 @@ var makeIoSynchronizer = ({
|
|
|
623
621
|
eventEmitter.dispatchEvent("output", {
|
|
624
622
|
timestamp
|
|
625
623
|
});
|
|
626
|
-
_unprocessed++;
|
|
627
624
|
printState("Got output");
|
|
628
625
|
};
|
|
629
626
|
const waitForOutput = () => {
|
|
@@ -635,133 +632,98 @@ var makeIoSynchronizer = ({
|
|
|
635
632
|
eventEmitter.addEventListener("output", on);
|
|
636
633
|
return promise;
|
|
637
634
|
};
|
|
638
|
-
const
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
return promise;
|
|
635
|
+
const makeErrorBanner = () => {
|
|
636
|
+
return [
|
|
637
|
+
`Waited too long for ${label} to finish:`,
|
|
638
|
+
`${getQueuedItems()} queued items`,
|
|
639
|
+
`inputs: ${JSON.stringify(inputs)}`,
|
|
640
|
+
`last output: ${lastOutput}`
|
|
641
|
+
];
|
|
646
642
|
};
|
|
647
|
-
const
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
controller
|
|
652
|
-
}) => {
|
|
653
|
-
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
643
|
+
const waitForQueueSize = async (queueSize) => {
|
|
644
|
+
if (getQueuedItems() <= queueSize) {
|
|
645
|
+
return Promise.resolve();
|
|
646
|
+
}
|
|
654
647
|
const { timeoutPromise, clear } = makeTimeoutPromise({
|
|
655
648
|
label: () => [
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
`${getUnprocessed()} unprocessed items: ${JSON.stringify(_unprocessed)}`,
|
|
659
|
-
`smallest progress: ${progress.getSmallestProgress()}`,
|
|
660
|
-
`inputs: ${JSON.stringify(inputs)}`,
|
|
661
|
-
`last output: ${lastOutput}`,
|
|
662
|
-
`wanted: ${unemitted} unemitted items, ${unprocessed} unprocessed items, minimum progress ${minimumProgress}`,
|
|
649
|
+
...makeErrorBanner(),
|
|
650
|
+
`wanted: <${queueSize} queued items`,
|
|
663
651
|
`Report this at https://remotion.dev/report`
|
|
664
652
|
].join(`
|
|
665
653
|
`),
|
|
666
654
|
ms: 1e4,
|
|
667
655
|
controller
|
|
668
656
|
});
|
|
669
|
-
controller
|
|
657
|
+
if (controller) {
|
|
658
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", clear);
|
|
659
|
+
}
|
|
670
660
|
await Promise.race([
|
|
671
661
|
timeoutPromise,
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
})(),
|
|
678
|
-
(async () => {
|
|
679
|
-
while (getUnprocessed() > unprocessed) {
|
|
680
|
-
await waitForProcessed();
|
|
681
|
-
}
|
|
682
|
-
})(),
|
|
683
|
-
minimumProgress === null || progress.getSmallestProgress() === null ? Promise.resolve() : (async () => {
|
|
684
|
-
while (progress.getSmallestProgress() < minimumProgress) {
|
|
685
|
-
await progress.waitForProgress();
|
|
686
|
-
}
|
|
687
|
-
})()
|
|
688
|
-
])
|
|
662
|
+
(async () => {
|
|
663
|
+
while (getQueuedItems() > queueSize) {
|
|
664
|
+
await waitForOutput();
|
|
665
|
+
}
|
|
666
|
+
})()
|
|
689
667
|
]).finally(() => clear());
|
|
690
|
-
controller
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
await waitFor({
|
|
694
|
-
unprocessed: 0,
|
|
695
|
-
unemitted: 0,
|
|
696
|
-
minimumProgress: null,
|
|
697
|
-
controller
|
|
698
|
-
});
|
|
668
|
+
if (controller) {
|
|
669
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", clear);
|
|
670
|
+
}
|
|
699
671
|
};
|
|
700
|
-
const
|
|
701
|
-
|
|
702
|
-
_unprocessed--;
|
|
672
|
+
const waitForFinish = async () => {
|
|
673
|
+
await waitForQueueSize(0);
|
|
703
674
|
};
|
|
704
675
|
return {
|
|
705
676
|
inputItem,
|
|
706
677
|
onOutput,
|
|
707
|
-
waitFor,
|
|
708
678
|
waitForFinish,
|
|
709
|
-
|
|
710
|
-
getUnprocessed
|
|
679
|
+
waitForQueueSize
|
|
711
680
|
};
|
|
712
681
|
};
|
|
713
682
|
|
|
714
683
|
// src/audio-decoder.ts
|
|
715
|
-
var
|
|
684
|
+
var internalCreateAudioDecoder = ({
|
|
716
685
|
onFrame,
|
|
717
686
|
onError,
|
|
718
687
|
controller,
|
|
719
688
|
config,
|
|
720
|
-
logLevel
|
|
721
|
-
track,
|
|
722
|
-
progressTracker
|
|
689
|
+
logLevel
|
|
723
690
|
}) => {
|
|
724
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
691
|
+
if (controller && controller._internals._mediaParserController._internals.signal.aborted) {
|
|
725
692
|
throw new Error("Not creating audio decoder, already aborted");
|
|
726
693
|
}
|
|
727
|
-
if (config.codec === "pcm-s16") {
|
|
728
|
-
return getWaveAudioDecoder({ onFrame, track, sampleFormat: "s16" });
|
|
729
|
-
}
|
|
730
694
|
const ioSynchronizer = makeIoSynchronizer({
|
|
731
695
|
logLevel,
|
|
732
696
|
label: "Audio decoder",
|
|
733
|
-
|
|
697
|
+
controller
|
|
734
698
|
});
|
|
735
|
-
|
|
699
|
+
if (config.codec === "pcm-s16") {
|
|
700
|
+
return getWaveAudioDecoder({
|
|
701
|
+
onFrame,
|
|
702
|
+
config,
|
|
703
|
+
sampleFormat: "s16",
|
|
704
|
+
logLevel,
|
|
705
|
+
ioSynchronizer,
|
|
706
|
+
onError
|
|
707
|
+
});
|
|
708
|
+
}
|
|
736
709
|
const audioDecoder = new AudioDecoder({
|
|
737
|
-
output(frame) {
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
};
|
|
742
|
-
controller._internals._mediaParserController._internals.signal.addEventListener("abort", abortHandler, {
|
|
743
|
-
once: true
|
|
744
|
-
});
|
|
745
|
-
outputQueue = outputQueue.then(() => {
|
|
746
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
747
|
-
return;
|
|
748
|
-
}
|
|
749
|
-
return onFrame(frame);
|
|
750
|
-
}).then(() => {
|
|
751
|
-
ioSynchronizer.onProcessed();
|
|
752
|
-
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", abortHandler);
|
|
753
|
-
return Promise.resolve();
|
|
754
|
-
}).catch((err) => {
|
|
710
|
+
async output(frame) {
|
|
711
|
+
try {
|
|
712
|
+
await onFrame(frame);
|
|
713
|
+
} catch (err) {
|
|
755
714
|
frame.close();
|
|
756
715
|
onError(err);
|
|
757
|
-
}
|
|
716
|
+
}
|
|
717
|
+
ioSynchronizer.onOutput(frame.timestamp + (frame.duration ?? 0));
|
|
758
718
|
},
|
|
759
719
|
error(error) {
|
|
760
720
|
onError(error);
|
|
761
721
|
}
|
|
762
722
|
});
|
|
763
723
|
const close = () => {
|
|
764
|
-
controller
|
|
724
|
+
if (controller) {
|
|
725
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
726
|
+
}
|
|
765
727
|
if (audioDecoder.state === "closed") {
|
|
766
728
|
return;
|
|
767
729
|
}
|
|
@@ -770,45 +732,52 @@ var createAudioDecoder = ({
|
|
|
770
732
|
const onAbort = () => {
|
|
771
733
|
close();
|
|
772
734
|
};
|
|
773
|
-
controller
|
|
735
|
+
if (controller) {
|
|
736
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
737
|
+
}
|
|
774
738
|
audioDecoder.configure(config);
|
|
775
|
-
const processSample =
|
|
739
|
+
const processSample = (audioSample) => {
|
|
776
740
|
if (audioDecoder.state === "closed") {
|
|
777
741
|
return;
|
|
778
742
|
}
|
|
779
|
-
|
|
780
|
-
await ioSynchronizer.waitFor({
|
|
781
|
-
unemitted: 20,
|
|
782
|
-
unprocessed: 20,
|
|
783
|
-
minimumProgress: audioSample.timestamp - 1e7,
|
|
784
|
-
controller
|
|
785
|
-
});
|
|
786
|
-
const chunk = new EncodedAudioChunk(audioSample);
|
|
743
|
+
const chunk = audioSample instanceof EncodedAudioChunk ? audioSample : new EncodedAudioChunk(audioSample);
|
|
787
744
|
audioDecoder.decode(chunk);
|
|
788
745
|
if (chunk.byteLength > 16) {
|
|
789
|
-
ioSynchronizer.inputItem(chunk.timestamp
|
|
746
|
+
ioSynchronizer.inputItem(chunk.timestamp);
|
|
790
747
|
}
|
|
791
748
|
};
|
|
792
|
-
let queue = Promise.resolve();
|
|
793
749
|
return {
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
return queue;
|
|
750
|
+
decode: (sample) => {
|
|
751
|
+
processSample(sample);
|
|
797
752
|
},
|
|
798
753
|
waitForFinish: async () => {
|
|
799
754
|
try {
|
|
800
755
|
await audioDecoder.flush();
|
|
801
756
|
} catch {}
|
|
802
|
-
await
|
|
803
|
-
await ioSynchronizer.waitForFinish(controller);
|
|
804
|
-
await outputQueue;
|
|
757
|
+
await ioSynchronizer.waitForFinish();
|
|
805
758
|
},
|
|
806
759
|
close,
|
|
807
760
|
flush: async () => {
|
|
808
761
|
await audioDecoder.flush();
|
|
809
|
-
}
|
|
762
|
+
},
|
|
763
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize
|
|
810
764
|
};
|
|
811
765
|
};
|
|
766
|
+
var createAudioDecoder = ({
|
|
767
|
+
onFrame,
|
|
768
|
+
onError,
|
|
769
|
+
controller,
|
|
770
|
+
track,
|
|
771
|
+
logLevel
|
|
772
|
+
}) => {
|
|
773
|
+
return internalCreateAudioDecoder({
|
|
774
|
+
onFrame,
|
|
775
|
+
onError,
|
|
776
|
+
controller: controller ?? null,
|
|
777
|
+
config: track,
|
|
778
|
+
logLevel: logLevel ?? "error"
|
|
779
|
+
});
|
|
780
|
+
};
|
|
812
781
|
// src/audio-encoder.ts
|
|
813
782
|
import {
|
|
814
783
|
MediaParserAbortError
|
|
@@ -928,13 +897,14 @@ var convertAudioData = ({
|
|
|
928
897
|
var getWaveAudioEncoder = ({
|
|
929
898
|
onChunk,
|
|
930
899
|
controller,
|
|
931
|
-
config
|
|
900
|
+
config,
|
|
901
|
+
ioSynchronizer
|
|
932
902
|
}) => {
|
|
933
903
|
return {
|
|
934
904
|
close: () => {
|
|
935
905
|
return Promise.resolve();
|
|
936
906
|
},
|
|
937
|
-
|
|
907
|
+
encode: (unconvertedAudioData) => {
|
|
938
908
|
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
939
909
|
return Promise.resolve();
|
|
940
910
|
}
|
|
@@ -954,7 +924,8 @@ var getWaveAudioEncoder = ({
|
|
|
954
924
|
return onChunk(chunk);
|
|
955
925
|
},
|
|
956
926
|
flush: () => Promise.resolve(),
|
|
957
|
-
waitForFinish: () => Promise.resolve()
|
|
927
|
+
waitForFinish: () => Promise.resolve(),
|
|
928
|
+
ioSynchronizer
|
|
958
929
|
};
|
|
959
930
|
};
|
|
960
931
|
|
|
@@ -966,39 +937,32 @@ var createAudioEncoder = ({
|
|
|
966
937
|
controller,
|
|
967
938
|
config: audioEncoderConfig,
|
|
968
939
|
logLevel,
|
|
969
|
-
onNewAudioSampleRate
|
|
970
|
-
progressTracker
|
|
940
|
+
onNewAudioSampleRate
|
|
971
941
|
}) => {
|
|
972
942
|
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
973
943
|
throw new MediaParserAbortError("Not creating audio encoder, already aborted");
|
|
974
944
|
}
|
|
945
|
+
const ioSynchronizer = makeIoSynchronizer({
|
|
946
|
+
logLevel,
|
|
947
|
+
label: "Audio encoder",
|
|
948
|
+
controller
|
|
949
|
+
});
|
|
975
950
|
if (codec === "wav") {
|
|
976
951
|
return getWaveAudioEncoder({
|
|
977
952
|
onChunk,
|
|
978
953
|
controller,
|
|
979
|
-
config: audioEncoderConfig
|
|
954
|
+
config: audioEncoderConfig,
|
|
955
|
+
ioSynchronizer
|
|
980
956
|
});
|
|
981
957
|
}
|
|
982
|
-
const ioSynchronizer = makeIoSynchronizer({
|
|
983
|
-
logLevel,
|
|
984
|
-
label: "Audio encoder",
|
|
985
|
-
progress: progressTracker
|
|
986
|
-
});
|
|
987
|
-
let prom = Promise.resolve();
|
|
988
958
|
const encoder = new AudioEncoder({
|
|
989
|
-
output: (chunk) => {
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
return;
|
|
994
|
-
}
|
|
995
|
-
return onChunk(chunk);
|
|
996
|
-
}).then(() => {
|
|
997
|
-
ioSynchronizer.onProcessed();
|
|
998
|
-
return Promise.resolve();
|
|
999
|
-
}).catch((err) => {
|
|
959
|
+
output: async (chunk) => {
|
|
960
|
+
try {
|
|
961
|
+
await onChunk(chunk);
|
|
962
|
+
} catch (err) {
|
|
1000
963
|
onError(err);
|
|
1001
|
-
}
|
|
964
|
+
}
|
|
965
|
+
ioSynchronizer.onOutput(chunk.timestamp);
|
|
1002
966
|
},
|
|
1003
967
|
error(error) {
|
|
1004
968
|
onError(error);
|
|
@@ -1019,17 +983,7 @@ var createAudioEncoder = ({
|
|
|
1019
983
|
throw new Error('Only `codec: "opus"` and `codec: "aac"` is supported currently');
|
|
1020
984
|
}
|
|
1021
985
|
const wantedSampleRate = audioEncoderConfig.sampleRate;
|
|
1022
|
-
const encodeFrame =
|
|
1023
|
-
if (encoder.state === "closed") {
|
|
1024
|
-
return;
|
|
1025
|
-
}
|
|
1026
|
-
progressTracker.setPossibleLowestTimestamp(audioData.timestamp);
|
|
1027
|
-
await ioSynchronizer.waitFor({
|
|
1028
|
-
unemitted: 20,
|
|
1029
|
-
unprocessed: 20,
|
|
1030
|
-
minimumProgress: audioData.timestamp - 1e7,
|
|
1031
|
-
controller
|
|
1032
|
-
});
|
|
986
|
+
const encodeFrame = (audioData) => {
|
|
1033
987
|
if (encoder.state === "closed") {
|
|
1034
988
|
return;
|
|
1035
989
|
}
|
|
@@ -1045,23 +999,21 @@ var createAudioEncoder = ({
|
|
|
1045
999
|
}
|
|
1046
1000
|
}
|
|
1047
1001
|
encoder.encode(audioData);
|
|
1048
|
-
ioSynchronizer.inputItem(audioData.timestamp
|
|
1002
|
+
ioSynchronizer.inputItem(audioData.timestamp);
|
|
1049
1003
|
};
|
|
1050
|
-
let queue = Promise.resolve();
|
|
1051
1004
|
return {
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
return queue;
|
|
1005
|
+
encode: (audioData) => {
|
|
1006
|
+
encodeFrame(audioData);
|
|
1055
1007
|
},
|
|
1056
1008
|
waitForFinish: async () => {
|
|
1057
1009
|
await encoder.flush();
|
|
1058
|
-
await ioSynchronizer.waitForFinish(
|
|
1059
|
-
await prom;
|
|
1010
|
+
await ioSynchronizer.waitForFinish();
|
|
1060
1011
|
},
|
|
1061
1012
|
close,
|
|
1062
1013
|
flush: async () => {
|
|
1063
1014
|
await encoder.flush();
|
|
1064
|
-
}
|
|
1015
|
+
},
|
|
1016
|
+
ioSynchronizer
|
|
1065
1017
|
};
|
|
1066
1018
|
};
|
|
1067
1019
|
// src/can-copy-audio-track.ts
|
|
@@ -2552,7 +2504,7 @@ var makeEntry = (entry) => {
|
|
|
2552
2504
|
]);
|
|
2553
2505
|
};
|
|
2554
2506
|
var createCttsBox = (samplePositions) => {
|
|
2555
|
-
const offsets = samplePositions.map((s) => s.
|
|
2507
|
+
const offsets = samplePositions.map((s) => s.timestamp - s.decodingTimestamp);
|
|
2556
2508
|
const entries = [];
|
|
2557
2509
|
let lastOffset = null;
|
|
2558
2510
|
for (const offset of offsets) {
|
|
@@ -2684,9 +2636,9 @@ var createSttsAtom = (samplePositions) => {
|
|
|
2684
2636
|
const durations = samplePositions.map((_, i, a) => {
|
|
2685
2637
|
if (a[i].duration === undefined || a[i].duration === 0) {
|
|
2686
2638
|
if (a[i + 1] === undefined) {
|
|
2687
|
-
return a[i].
|
|
2639
|
+
return a[i].decodingTimestamp - (a[i - 1]?.decodingTimestamp ?? a[i].decodingTimestamp);
|
|
2688
2640
|
}
|
|
2689
|
-
return a[i + 1].
|
|
2641
|
+
return a[i + 1].decodingTimestamp - a[i].decodingTimestamp;
|
|
2690
2642
|
}
|
|
2691
2643
|
return a[i].duration;
|
|
2692
2644
|
});
|
|
@@ -2728,7 +2680,7 @@ var createStbl = ({
|
|
|
2728
2680
|
codecSpecificData,
|
|
2729
2681
|
isVideo
|
|
2730
2682
|
}) => {
|
|
2731
|
-
const sorted = samplePositions.slice().sort((a, b) => a.
|
|
2683
|
+
const sorted = samplePositions.slice().sort((a, b) => a.decodingTimestamp - b.decodingTimestamp);
|
|
2732
2684
|
return addSize(combineUint8Arrays([
|
|
2733
2685
|
stringsToUint8Array("stbl"),
|
|
2734
2686
|
createStsdData(codecSpecificData),
|
|
@@ -3019,7 +2971,7 @@ var createIsoBaseMedia = async ({
|
|
|
3019
2971
|
await w.write(chunk.data);
|
|
3020
2972
|
mdatSize += chunk.data.length;
|
|
3021
2973
|
onBytesProgress(w.getWrittenByteCount());
|
|
3022
|
-
progressTracker.setPossibleLowestTimestamp(Math.min(chunk.timestamp, chunk.
|
|
2974
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(chunk.timestamp, chunk.decodingTimestamp ?? Infinity));
|
|
3023
2975
|
progressTracker.updateTrackProgress(trackNumber, chunk.timestamp);
|
|
3024
2976
|
if (codecPrivate) {
|
|
3025
2977
|
addCodecPrivateToTrack({ trackNumber, codecPrivate });
|
|
@@ -3059,8 +3011,8 @@ var createIsoBaseMedia = async ({
|
|
|
3059
3011
|
isKeyframe: chunk.type === "key",
|
|
3060
3012
|
offset: position,
|
|
3061
3013
|
chunk: sampleChunkIndices[trackNumber],
|
|
3062
|
-
|
|
3063
|
-
|
|
3014
|
+
timestamp: Math.round(chunk.timestamp / 1e6 * currentTrack.timescale),
|
|
3015
|
+
decodingTimestamp: Math.round(chunk.decodingTimestamp / 1e6 * currentTrack.timescale),
|
|
3064
3016
|
duration: Math.round((chunk.duration ?? 0) / 1e6 * currentTrack.timescale),
|
|
3065
3017
|
size: chunk.data.length,
|
|
3066
3018
|
bigEndian: false,
|
|
@@ -3881,7 +3833,7 @@ var createMatroskaMedia = async ({
|
|
|
3881
3833
|
chunk,
|
|
3882
3834
|
isVideo
|
|
3883
3835
|
}) => {
|
|
3884
|
-
progressTracker.setPossibleLowestTimestamp(Math.min(chunk.timestamp, chunk.
|
|
3836
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(chunk.timestamp, chunk.decodingTimestamp ?? Infinity));
|
|
3885
3837
|
const smallestProgress = progressTracker.getSmallestProgress();
|
|
3886
3838
|
if (!currentCluster.shouldMakeNewCluster({
|
|
3887
3839
|
newT: smallestProgress,
|
|
@@ -4132,13 +4084,7 @@ var makeProgressTracker = () => {
|
|
|
4132
4084
|
startingTimestamp = Math.min(startingTimestamp, timestamp);
|
|
4133
4085
|
}
|
|
4134
4086
|
};
|
|
4135
|
-
const
|
|
4136
|
-
if (startingTimestamp === null) {
|
|
4137
|
-
throw new Error("No starting timestamp");
|
|
4138
|
-
}
|
|
4139
|
-
return startingTimestamp;
|
|
4140
|
-
};
|
|
4141
|
-
const calculateSmallestProgress = () => {
|
|
4087
|
+
const getSmallestProgress = () => {
|
|
4142
4088
|
const progressValues = Object.values(trackNumberProgresses).map((p) => {
|
|
4143
4089
|
if (p !== null) {
|
|
4144
4090
|
return p;
|
|
@@ -4154,26 +4100,16 @@ var makeProgressTracker = () => {
|
|
|
4154
4100
|
registerTrack: (trackNumber) => {
|
|
4155
4101
|
trackNumberProgresses[trackNumber] = null;
|
|
4156
4102
|
},
|
|
4157
|
-
getSmallestProgress
|
|
4103
|
+
getSmallestProgress,
|
|
4158
4104
|
updateTrackProgress: (trackNumber, progress) => {
|
|
4159
4105
|
if (trackNumberProgresses[trackNumber] === undefined) {
|
|
4160
4106
|
throw new Error(`Tried to update progress for a track that was not registered: ${trackNumber}`);
|
|
4161
4107
|
}
|
|
4162
4108
|
trackNumberProgresses[trackNumber] = progress;
|
|
4163
4109
|
eventEmitter.dispatchEvent("progress", {
|
|
4164
|
-
smallestProgress:
|
|
4110
|
+
smallestProgress: getSmallestProgress()
|
|
4165
4111
|
});
|
|
4166
4112
|
},
|
|
4167
|
-
waitForProgress: () => {
|
|
4168
|
-
const { promise, resolve } = withResolvers();
|
|
4169
|
-
const on = () => {
|
|
4170
|
-
eventEmitter.removeEventListener("progress", on);
|
|
4171
|
-
resolve();
|
|
4172
|
-
};
|
|
4173
|
-
eventEmitter.addEventListener("progress", on);
|
|
4174
|
-
return promise;
|
|
4175
|
-
},
|
|
4176
|
-
getStartingTimestamp,
|
|
4177
4113
|
setPossibleLowestTimestamp
|
|
4178
4114
|
};
|
|
4179
4115
|
};
|
|
@@ -4209,25 +4145,37 @@ var getAvailableVideoCodecs = ({
|
|
|
4209
4145
|
throw new Error(`Unsupported container: ${container}`);
|
|
4210
4146
|
};
|
|
4211
4147
|
|
|
4212
|
-
// src/
|
|
4213
|
-
|
|
4214
|
-
|
|
4215
|
-
|
|
4216
|
-
|
|
4217
|
-
|
|
4218
|
-
|
|
4219
|
-
|
|
4220
|
-
|
|
4221
|
-
|
|
4222
|
-
|
|
4223
|
-
|
|
4224
|
-
|
|
4225
|
-
|
|
4226
|
-
|
|
4227
|
-
|
|
4228
|
-
|
|
4229
|
-
|
|
4230
|
-
|
|
4148
|
+
// src/copy-audio-track.ts
|
|
4149
|
+
var copyAudioTrack = async ({
|
|
4150
|
+
state,
|
|
4151
|
+
track,
|
|
4152
|
+
logLevel,
|
|
4153
|
+
onMediaStateUpdate,
|
|
4154
|
+
progressTracker
|
|
4155
|
+
}) => {
|
|
4156
|
+
const addedTrack = await state.addTrack({
|
|
4157
|
+
type: "audio",
|
|
4158
|
+
codec: track.codecEnum,
|
|
4159
|
+
numberOfChannels: track.numberOfChannels,
|
|
4160
|
+
sampleRate: track.sampleRate,
|
|
4161
|
+
codecPrivate: track.codecData?.data ?? null,
|
|
4162
|
+
timescale: track.originalTimescale
|
|
4163
|
+
});
|
|
4164
|
+
Log.verbose(logLevel, `Copying audio track ${track.trackId} as track ${addedTrack.trackNumber}. Timescale = ${track.originalTimescale}, codec = ${track.codecEnum} (${track.codec}) `);
|
|
4165
|
+
return async (audioSample) => {
|
|
4166
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(audioSample.timestamp, audioSample.decodingTimestamp ?? Infinity));
|
|
4167
|
+
await state.addSample({
|
|
4168
|
+
chunk: audioSample,
|
|
4169
|
+
trackNumber: addedTrack.trackNumber,
|
|
4170
|
+
isVideo: false,
|
|
4171
|
+
codecPrivate: track.codecData?.data ?? null
|
|
4172
|
+
});
|
|
4173
|
+
onMediaStateUpdate?.((prevState) => {
|
|
4174
|
+
return {
|
|
4175
|
+
...prevState,
|
|
4176
|
+
encodedAudioFrames: prevState.encodedAudioFrames + 1
|
|
4177
|
+
};
|
|
4178
|
+
});
|
|
4231
4179
|
};
|
|
4232
4180
|
};
|
|
4233
4181
|
|
|
@@ -4284,62 +4232,85 @@ var getDefaultAudioCodec = ({
|
|
|
4284
4232
|
throw new Error(`Unhandled container: ${container}`);
|
|
4285
4233
|
};
|
|
4286
4234
|
|
|
4287
|
-
// src/
|
|
4288
|
-
|
|
4289
|
-
|
|
4290
|
-
|
|
4291
|
-
|
|
4292
|
-
|
|
4293
|
-
|
|
4294
|
-
|
|
4235
|
+
// src/reencode-audio-track.ts
|
|
4236
|
+
import { MediaParserInternals as MediaParserInternals7 } from "@remotion/media-parser";
|
|
4237
|
+
|
|
4238
|
+
// src/convert-encoded-chunk.ts
|
|
4239
|
+
var convertEncodedChunk = (chunk) => {
|
|
4240
|
+
const arr = new Uint8Array(chunk.byteLength);
|
|
4241
|
+
chunk.copyTo(arr);
|
|
4242
|
+
return {
|
|
4243
|
+
data: arr,
|
|
4244
|
+
duration: chunk.duration ?? undefined,
|
|
4245
|
+
timestamp: chunk.timestamp,
|
|
4246
|
+
type: chunk.type,
|
|
4247
|
+
decodingTimestamp: chunk.timestamp,
|
|
4248
|
+
offset: 0
|
|
4249
|
+
};
|
|
4250
|
+
};
|
|
4251
|
+
|
|
4252
|
+
// src/processing-queue.ts
|
|
4253
|
+
function processingQueue({
|
|
4254
|
+
onOutput,
|
|
4295
4255
|
logLevel,
|
|
4296
|
-
|
|
4297
|
-
|
|
4298
|
-
|
|
4299
|
-
})
|
|
4300
|
-
const
|
|
4301
|
-
inputCodec: track.codecEnum,
|
|
4302
|
-
outputContainer,
|
|
4303
|
-
inputContainer
|
|
4304
|
-
});
|
|
4305
|
-
const audioOperation = await (onAudioTrack ?? defaultOnAudioTrackHandler)({
|
|
4306
|
-
defaultAudioCodec: audioCodec ?? getDefaultAudioCodec({ container: outputContainer }),
|
|
4307
|
-
track,
|
|
4256
|
+
label,
|
|
4257
|
+
onError,
|
|
4258
|
+
controller
|
|
4259
|
+
}) {
|
|
4260
|
+
const ioSynchronizer = makeIoSynchronizer({
|
|
4308
4261
|
logLevel,
|
|
4309
|
-
|
|
4310
|
-
|
|
4311
|
-
canCopyTrack
|
|
4262
|
+
label,
|
|
4263
|
+
controller
|
|
4312
4264
|
});
|
|
4313
|
-
|
|
4314
|
-
|
|
4315
|
-
|
|
4316
|
-
|
|
4317
|
-
|
|
4318
|
-
|
|
4319
|
-
|
|
4320
|
-
|
|
4321
|
-
|
|
4322
|
-
|
|
4323
|
-
|
|
4324
|
-
|
|
4325
|
-
|
|
4326
|
-
|
|
4265
|
+
let queue = Promise.resolve();
|
|
4266
|
+
let stopped = false;
|
|
4267
|
+
const input = (item) => {
|
|
4268
|
+
if (stopped) {
|
|
4269
|
+
return;
|
|
4270
|
+
}
|
|
4271
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4272
|
+
stopped = true;
|
|
4273
|
+
return;
|
|
4274
|
+
}
|
|
4275
|
+
const { timestamp } = item;
|
|
4276
|
+
ioSynchronizer.inputItem(timestamp);
|
|
4277
|
+
queue = queue.then(() => {
|
|
4278
|
+
if (stopped) {
|
|
4279
|
+
return;
|
|
4280
|
+
}
|
|
4281
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4282
|
+
stopped = true;
|
|
4283
|
+
return;
|
|
4284
|
+
}
|
|
4285
|
+
return onOutput(item);
|
|
4286
|
+
}).then(() => {
|
|
4287
|
+
ioSynchronizer.onOutput(timestamp);
|
|
4288
|
+
return Promise.resolve();
|
|
4289
|
+
}).catch((err) => {
|
|
4290
|
+
stopped = true;
|
|
4291
|
+
onError(err);
|
|
4327
4292
|
});
|
|
4328
|
-
|
|
4329
|
-
|
|
4330
|
-
|
|
4331
|
-
|
|
4332
|
-
|
|
4333
|
-
|
|
4334
|
-
|
|
4335
|
-
|
|
4336
|
-
|
|
4337
|
-
|
|
4338
|
-
|
|
4339
|
-
|
|
4340
|
-
|
|
4341
|
-
|
|
4342
|
-
|
|
4293
|
+
};
|
|
4294
|
+
return {
|
|
4295
|
+
input,
|
|
4296
|
+
ioSynchronizer
|
|
4297
|
+
};
|
|
4298
|
+
}
|
|
4299
|
+
|
|
4300
|
+
// src/reencode-audio-track.ts
|
|
4301
|
+
var reencodeAudioTrack = async ({
|
|
4302
|
+
audioOperation,
|
|
4303
|
+
track,
|
|
4304
|
+
logLevel,
|
|
4305
|
+
abortConversion,
|
|
4306
|
+
state,
|
|
4307
|
+
controller,
|
|
4308
|
+
onMediaStateUpdate,
|
|
4309
|
+
onAudioData,
|
|
4310
|
+
progressTracker
|
|
4311
|
+
}) => {
|
|
4312
|
+
if (audioOperation.type !== "reencode") {
|
|
4313
|
+
throw new Error(`Audio track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(audioOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
|
|
4343
4314
|
}
|
|
4344
4315
|
const audioEncoderConfig = await getAudioEncoderConfig({
|
|
4345
4316
|
numberOfChannels: track.numberOfChannels,
|
|
@@ -4375,7 +4346,7 @@ var makeAudioTrackHandler = ({
|
|
|
4375
4346
|
numberOfChannels: audioEncoderConfig.numberOfChannels,
|
|
4376
4347
|
sampleRate: audioOperation.sampleRate ?? audioEncoderConfig.sampleRate,
|
|
4377
4348
|
codecPrivate,
|
|
4378
|
-
timescale: track.
|
|
4349
|
+
timescale: track.originalTimescale
|
|
4379
4350
|
});
|
|
4380
4351
|
const audioEncoder = createAudioEncoder({
|
|
4381
4352
|
onNewAudioSampleRate: (sampleRate) => {
|
|
@@ -4383,7 +4354,7 @@ var makeAudioTrackHandler = ({
|
|
|
4383
4354
|
},
|
|
4384
4355
|
onChunk: async (chunk) => {
|
|
4385
4356
|
await state.addSample({
|
|
4386
|
-
chunk: convertEncodedChunk(chunk
|
|
4357
|
+
chunk: convertEncodedChunk(chunk),
|
|
4387
4358
|
trackNumber,
|
|
4388
4359
|
isVideo: false,
|
|
4389
4360
|
codecPrivate
|
|
@@ -4403,11 +4374,18 @@ var makeAudioTrackHandler = ({
|
|
|
4403
4374
|
codec: audioOperation.audioCodec,
|
|
4404
4375
|
controller,
|
|
4405
4376
|
config: audioEncoderConfig,
|
|
4406
|
-
logLevel
|
|
4407
|
-
progressTracker
|
|
4377
|
+
logLevel
|
|
4408
4378
|
});
|
|
4409
|
-
const
|
|
4410
|
-
|
|
4379
|
+
const audioProcessingQueue = processingQueue({
|
|
4380
|
+
controller,
|
|
4381
|
+
label: "AudioData processing queue",
|
|
4382
|
+
logLevel,
|
|
4383
|
+
onError(error) {
|
|
4384
|
+
abortConversion(new Error(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
|
|
4385
|
+
cause: error
|
|
4386
|
+
}));
|
|
4387
|
+
},
|
|
4388
|
+
onOutput: async (audioData) => {
|
|
4411
4389
|
const newAudioData = onAudioData ? await onAudioData?.({ audioData, track }) : audioData;
|
|
4412
4390
|
if (newAudioData !== audioData) {
|
|
4413
4391
|
if (newAudioData.duration !== audioData.duration) {
|
|
@@ -4427,7 +4405,10 @@ var makeAudioTrackHandler = ({
|
|
|
4427
4405
|
}
|
|
4428
4406
|
audioData.close();
|
|
4429
4407
|
}
|
|
4430
|
-
await
|
|
4408
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4409
|
+
await audioEncoder.ioSynchronizer.waitForQueueSize(10);
|
|
4410
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4411
|
+
audioEncoder.encode(newAudioData);
|
|
4431
4412
|
onMediaStateUpdate?.((prevState) => {
|
|
4432
4413
|
return {
|
|
4433
4414
|
...prevState,
|
|
@@ -4435,6 +4416,13 @@ var makeAudioTrackHandler = ({
|
|
|
4435
4416
|
};
|
|
4436
4417
|
});
|
|
4437
4418
|
newAudioData.close();
|
|
4419
|
+
}
|
|
4420
|
+
});
|
|
4421
|
+
const audioDecoder = internalCreateAudioDecoder({
|
|
4422
|
+
onFrame: async (audioData) => {
|
|
4423
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4424
|
+
await audioProcessingQueue.ioSynchronizer.waitForQueueSize(10);
|
|
4425
|
+
audioProcessingQueue.input(audioData);
|
|
4438
4426
|
},
|
|
4439
4427
|
onError(error) {
|
|
4440
4428
|
abortConversion(new Error(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
|
|
@@ -4443,34 +4431,123 @@ var makeAudioTrackHandler = ({
|
|
|
4443
4431
|
},
|
|
4444
4432
|
controller,
|
|
4445
4433
|
config: audioDecoderConfig,
|
|
4446
|
-
logLevel
|
|
4447
|
-
track,
|
|
4448
|
-
progressTracker
|
|
4434
|
+
logLevel
|
|
4449
4435
|
});
|
|
4450
4436
|
state.addWaitForFinishPromise(async () => {
|
|
4451
4437
|
await audioDecoder.waitForFinish();
|
|
4452
|
-
|
|
4438
|
+
Log.verbose(logLevel, "Audio decoder finished");
|
|
4453
4439
|
audioDecoder.close();
|
|
4440
|
+
await audioProcessingQueue.ioSynchronizer.waitForFinish();
|
|
4441
|
+
Log.verbose(logLevel, "Audio processing queue finished");
|
|
4442
|
+
await audioEncoder.waitForFinish();
|
|
4443
|
+
Log.verbose(logLevel, "Audio encoder finished");
|
|
4454
4444
|
audioEncoder.close();
|
|
4455
4445
|
});
|
|
4456
4446
|
return async (audioSample) => {
|
|
4457
|
-
|
|
4447
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(audioSample.timestamp, audioSample.decodingTimestamp ?? Infinity));
|
|
4448
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4449
|
+
await audioDecoder.waitForQueueToBeLessThan(10);
|
|
4450
|
+
audioDecoder.decode(audioSample);
|
|
4458
4451
|
};
|
|
4459
4452
|
};
|
|
4460
4453
|
|
|
4461
|
-
// src/
|
|
4462
|
-
var
|
|
4463
|
-
|
|
4464
|
-
|
|
4465
|
-
|
|
4466
|
-
|
|
4467
|
-
|
|
4468
|
-
|
|
4469
|
-
|
|
4470
|
-
|
|
4471
|
-
|
|
4472
|
-
|
|
4473
|
-
|
|
4454
|
+
// src/on-audio-track.ts
|
|
4455
|
+
var makeAudioTrackHandler = ({
|
|
4456
|
+
state,
|
|
4457
|
+
defaultAudioCodec: audioCodec,
|
|
4458
|
+
controller,
|
|
4459
|
+
abortConversion,
|
|
4460
|
+
onMediaStateUpdate,
|
|
4461
|
+
onAudioTrack,
|
|
4462
|
+
logLevel,
|
|
4463
|
+
outputContainer,
|
|
4464
|
+
onAudioData,
|
|
4465
|
+
progressTracker
|
|
4466
|
+
}) => async ({ track, container: inputContainer }) => {
|
|
4467
|
+
const canCopyTrack = canCopyAudioTrack({
|
|
4468
|
+
inputCodec: track.codecEnum,
|
|
4469
|
+
outputContainer,
|
|
4470
|
+
inputContainer
|
|
4471
|
+
});
|
|
4472
|
+
const audioOperation = await (onAudioTrack ?? defaultOnAudioTrackHandler)({
|
|
4473
|
+
defaultAudioCodec: audioCodec ?? getDefaultAudioCodec({ container: outputContainer }),
|
|
4474
|
+
track,
|
|
4475
|
+
logLevel,
|
|
4476
|
+
outputContainer,
|
|
4477
|
+
inputContainer,
|
|
4478
|
+
canCopyTrack
|
|
4479
|
+
});
|
|
4480
|
+
if (audioOperation.type === "drop") {
|
|
4481
|
+
return null;
|
|
4482
|
+
}
|
|
4483
|
+
if (audioOperation.type === "fail") {
|
|
4484
|
+
throw new Error(`Audio track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this audio track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
|
|
4485
|
+
}
|
|
4486
|
+
if (audioOperation.type === "copy") {
|
|
4487
|
+
return copyAudioTrack({
|
|
4488
|
+
logLevel,
|
|
4489
|
+
onMediaStateUpdate,
|
|
4490
|
+
state,
|
|
4491
|
+
track,
|
|
4492
|
+
progressTracker
|
|
4493
|
+
});
|
|
4494
|
+
}
|
|
4495
|
+
return reencodeAudioTrack({
|
|
4496
|
+
abortConversion,
|
|
4497
|
+
controller,
|
|
4498
|
+
logLevel,
|
|
4499
|
+
onMediaStateUpdate,
|
|
4500
|
+
audioOperation,
|
|
4501
|
+
onAudioData,
|
|
4502
|
+
state,
|
|
4503
|
+
track,
|
|
4504
|
+
progressTracker
|
|
4505
|
+
});
|
|
4506
|
+
};
|
|
4507
|
+
|
|
4508
|
+
// src/copy-video-track.ts
|
|
4509
|
+
var copyVideoTrack = async ({
|
|
4510
|
+
logLevel,
|
|
4511
|
+
state,
|
|
4512
|
+
track,
|
|
4513
|
+
onMediaStateUpdate,
|
|
4514
|
+
progressTracker
|
|
4515
|
+
}) => {
|
|
4516
|
+
Log.verbose(logLevel, `Copying video track with codec ${track.codec} and timescale ${track.originalTimescale}`);
|
|
4517
|
+
const videoTrack = await state.addTrack({
|
|
4518
|
+
type: "video",
|
|
4519
|
+
color: track.advancedColor,
|
|
4520
|
+
width: track.codedWidth,
|
|
4521
|
+
height: track.codedHeight,
|
|
4522
|
+
codec: track.codecEnum,
|
|
4523
|
+
codecPrivate: track.codecData?.data ?? null,
|
|
4524
|
+
timescale: track.originalTimescale
|
|
4525
|
+
});
|
|
4526
|
+
return async (sample) => {
|
|
4527
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(sample.timestamp, sample.decodingTimestamp ?? Infinity));
|
|
4528
|
+
await state.addSample({
|
|
4529
|
+
chunk: sample,
|
|
4530
|
+
trackNumber: videoTrack.trackNumber,
|
|
4531
|
+
isVideo: true,
|
|
4532
|
+
codecPrivate: track.codecData?.data ?? null
|
|
4533
|
+
});
|
|
4534
|
+
onMediaStateUpdate?.((prevState) => {
|
|
4535
|
+
return {
|
|
4536
|
+
...prevState,
|
|
4537
|
+
decodedVideoFrames: prevState.decodedVideoFrames + 1
|
|
4538
|
+
};
|
|
4539
|
+
});
|
|
4540
|
+
};
|
|
4541
|
+
};
|
|
4542
|
+
|
|
4543
|
+
// src/default-on-video-track-handler.ts
|
|
4544
|
+
import { MediaParserInternals as MediaParserInternals8 } from "@remotion/media-parser";
|
|
4545
|
+
var defaultOnVideoTrackHandler = async ({
|
|
4546
|
+
track,
|
|
4547
|
+
defaultVideoCodec,
|
|
4548
|
+
logLevel,
|
|
4549
|
+
rotate,
|
|
4550
|
+
canCopyTrack,
|
|
4474
4551
|
resizeOperation
|
|
4475
4552
|
}) => {
|
|
4476
4553
|
if (canCopyTrack) {
|
|
@@ -4516,6 +4593,11 @@ var getDefaultVideoCodec = ({
|
|
|
4516
4593
|
throw new Error(`Unhandled container: ${container}`);
|
|
4517
4594
|
};
|
|
4518
4595
|
|
|
4596
|
+
// src/arraybuffer-to-uint8-array.ts
|
|
4597
|
+
var arrayBufferToUint8Array = (buffer) => {
|
|
4598
|
+
return buffer ? new Uint8Array(buffer) : null;
|
|
4599
|
+
};
|
|
4600
|
+
|
|
4519
4601
|
// src/convert-to-correct-videoframe.ts
|
|
4520
4602
|
var needsToCorrectVideoFrame = ({
|
|
4521
4603
|
videoFrame,
|
|
@@ -4556,7 +4638,6 @@ var convertToCorrectVideoFrame = ({
|
|
|
4556
4638
|
var onFrame = async ({
|
|
4557
4639
|
frame: unrotatedFrame,
|
|
4558
4640
|
onVideoFrame,
|
|
4559
|
-
videoEncoder,
|
|
4560
4641
|
track,
|
|
4561
4642
|
outputCodec,
|
|
4562
4643
|
rotation,
|
|
@@ -4573,10 +4654,10 @@ var onFrame = async ({
|
|
|
4573
4654
|
}
|
|
4574
4655
|
const userProcessedFrame = onVideoFrame ? await onVideoFrame({ frame: rotated, track }) : rotated;
|
|
4575
4656
|
if (userProcessedFrame.displayWidth !== rotated.displayWidth) {
|
|
4576
|
-
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayWidth (${userProcessedFrame.displayWidth}) than the input frame (${
|
|
4657
|
+
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayWidth (${userProcessedFrame.displayWidth}) than the input frame (${rotated.displayWidth})`);
|
|
4577
4658
|
}
|
|
4578
4659
|
if (userProcessedFrame.displayHeight !== rotated.displayHeight) {
|
|
4579
|
-
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayHeight (${userProcessedFrame.displayHeight}) than the input frame (${
|
|
4660
|
+
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayHeight (${userProcessedFrame.displayHeight}) than the input frame (${rotated.displayHeight})`);
|
|
4580
4661
|
}
|
|
4581
4662
|
if (userProcessedFrame.timestamp !== rotated.timestamp && !isSafari()) {
|
|
4582
4663
|
throw new Error(`Returned VideoFrame of track ${track.trackId} has different timestamp (${userProcessedFrame.timestamp}) than the input frame (${rotated.timestamp}). When calling new VideoFrame(), pass {timestamp: frame.timestamp} as second argument`);
|
|
@@ -4584,32 +4665,31 @@ var onFrame = async ({
|
|
|
4584
4665
|
if ((userProcessedFrame.duration ?? 0) !== (rotated.duration ?? 0)) {
|
|
4585
4666
|
throw new Error(`Returned VideoFrame of track ${track.trackId} has different duration (${userProcessedFrame.duration}) than the input frame (${rotated.duration}). When calling new VideoFrame(), pass {duration: frame.duration} as second argument`);
|
|
4586
4667
|
}
|
|
4668
|
+
if (rotated !== userProcessedFrame) {
|
|
4669
|
+
rotated.close();
|
|
4670
|
+
}
|
|
4587
4671
|
const fixedFrame = convertToCorrectVideoFrame({
|
|
4588
4672
|
videoFrame: userProcessedFrame,
|
|
4589
4673
|
outputCodec
|
|
4590
4674
|
});
|
|
4591
|
-
await videoEncoder.encodeFrame(fixedFrame, fixedFrame.timestamp);
|
|
4592
|
-
fixedFrame.close();
|
|
4593
|
-
if (rotated !== userProcessedFrame) {
|
|
4594
|
-
rotated.close();
|
|
4595
|
-
}
|
|
4596
4675
|
if (fixedFrame !== userProcessedFrame) {
|
|
4597
|
-
|
|
4676
|
+
userProcessedFrame.close();
|
|
4598
4677
|
}
|
|
4678
|
+
return fixedFrame;
|
|
4599
4679
|
};
|
|
4600
4680
|
|
|
4601
4681
|
// src/sort-video-frames.ts
|
|
4602
4682
|
var MAX_QUEUE_SIZE = 5;
|
|
4603
4683
|
var videoFrameSorter = ({
|
|
4604
|
-
|
|
4605
|
-
|
|
4684
|
+
controller,
|
|
4685
|
+
onOutput
|
|
4606
4686
|
}) => {
|
|
4607
4687
|
const frames = [];
|
|
4608
4688
|
const releaseFrame = async () => {
|
|
4609
4689
|
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4610
4690
|
const frame = frames.shift();
|
|
4611
4691
|
if (frame) {
|
|
4612
|
-
await
|
|
4692
|
+
await onOutput(frame);
|
|
4613
4693
|
}
|
|
4614
4694
|
};
|
|
4615
4695
|
const sortFrames = () => {
|
|
@@ -4645,66 +4725,47 @@ var videoFrameSorter = ({
|
|
|
4645
4725
|
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
4646
4726
|
};
|
|
4647
4727
|
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
4728
|
+
let promise = Promise.resolve();
|
|
4648
4729
|
return {
|
|
4649
|
-
inputFrame
|
|
4730
|
+
inputFrame: (frame) => {
|
|
4731
|
+
promise = promise.then(() => inputFrame(frame));
|
|
4732
|
+
},
|
|
4733
|
+
waitUntilProcessed: () => promise,
|
|
4650
4734
|
flush
|
|
4651
4735
|
};
|
|
4652
4736
|
};
|
|
4653
4737
|
|
|
4654
4738
|
// src/video-decoder.ts
|
|
4655
|
-
var
|
|
4739
|
+
var internalCreateVideoDecoder = ({
|
|
4656
4740
|
onFrame: onFrame2,
|
|
4657
4741
|
onError,
|
|
4658
4742
|
controller,
|
|
4659
4743
|
config,
|
|
4660
|
-
logLevel
|
|
4661
|
-
progress
|
|
4744
|
+
logLevel
|
|
4662
4745
|
}) => {
|
|
4663
4746
|
const ioSynchronizer = makeIoSynchronizer({
|
|
4664
4747
|
logLevel,
|
|
4665
4748
|
label: "Video decoder",
|
|
4666
|
-
|
|
4667
|
-
});
|
|
4668
|
-
let outputQueue = Promise.resolve();
|
|
4669
|
-
const addToQueue = (frame) => {
|
|
4670
|
-
const cleanup = () => {
|
|
4671
|
-
frame.close();
|
|
4672
|
-
};
|
|
4673
|
-
controller._internals._mediaParserController._internals.signal.addEventListener("abort", cleanup, {
|
|
4674
|
-
once: true
|
|
4675
|
-
});
|
|
4676
|
-
outputQueue = outputQueue.then(() => {
|
|
4677
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4678
|
-
return;
|
|
4679
|
-
}
|
|
4680
|
-
return onFrame2(frame);
|
|
4681
|
-
}).then(() => {
|
|
4682
|
-
ioSynchronizer.onProcessed();
|
|
4683
|
-
}).catch((err) => {
|
|
4684
|
-
onError(err);
|
|
4685
|
-
}).finally(() => {
|
|
4686
|
-
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", cleanup);
|
|
4687
|
-
cleanup();
|
|
4688
|
-
});
|
|
4689
|
-
return outputQueue;
|
|
4690
|
-
};
|
|
4691
|
-
const frameSorter = videoFrameSorter({
|
|
4692
|
-
controller,
|
|
4693
|
-
onRelease: async (frame) => {
|
|
4694
|
-
await addToQueue(frame);
|
|
4695
|
-
}
|
|
4749
|
+
controller
|
|
4696
4750
|
});
|
|
4697
4751
|
const videoDecoder = new VideoDecoder({
|
|
4698
|
-
output(frame) {
|
|
4752
|
+
async output(frame) {
|
|
4753
|
+
try {
|
|
4754
|
+
await onFrame2(frame);
|
|
4755
|
+
} catch (err) {
|
|
4756
|
+
onError(err);
|
|
4757
|
+
frame.close();
|
|
4758
|
+
}
|
|
4699
4759
|
ioSynchronizer.onOutput(frame.timestamp);
|
|
4700
|
-
frameSorter.inputFrame(frame);
|
|
4701
4760
|
},
|
|
4702
4761
|
error(error) {
|
|
4703
4762
|
onError(error);
|
|
4704
4763
|
}
|
|
4705
4764
|
});
|
|
4706
4765
|
const close = () => {
|
|
4707
|
-
controller
|
|
4766
|
+
if (controller) {
|
|
4767
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
4768
|
+
}
|
|
4708
4769
|
if (videoDecoder.state === "closed") {
|
|
4709
4770
|
return;
|
|
4710
4771
|
}
|
|
@@ -4713,49 +4774,48 @@ var createVideoDecoder = ({
|
|
|
4713
4774
|
const onAbort = () => {
|
|
4714
4775
|
close();
|
|
4715
4776
|
};
|
|
4716
|
-
controller
|
|
4777
|
+
if (controller) {
|
|
4778
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
4779
|
+
}
|
|
4717
4780
|
videoDecoder.configure(config);
|
|
4718
|
-
const
|
|
4781
|
+
const decode = (sample) => {
|
|
4719
4782
|
if (videoDecoder.state === "closed") {
|
|
4720
4783
|
return;
|
|
4721
4784
|
}
|
|
4722
|
-
|
|
4723
|
-
|
|
4724
|
-
|
|
4725
|
-
progress.setPossibleLowestTimestamp(Math.min(sample.timestamp, sample.dts ?? Infinity, sample.cts ?? Infinity));
|
|
4726
|
-
await ioSynchronizer.waitFor({
|
|
4727
|
-
unemitted: 20,
|
|
4728
|
-
unprocessed: 10,
|
|
4729
|
-
minimumProgress: sample.timestamp - 1e7,
|
|
4730
|
-
controller
|
|
4731
|
-
});
|
|
4732
|
-
videoDecoder.decode(new EncodedVideoChunk(sample));
|
|
4733
|
-
ioSynchronizer.inputItem(sample.timestamp, sample.type === "key");
|
|
4785
|
+
const encodedChunk = sample instanceof EncodedVideoChunk ? sample : new EncodedVideoChunk(sample);
|
|
4786
|
+
videoDecoder.decode(encodedChunk);
|
|
4787
|
+
ioSynchronizer.inputItem(sample.timestamp);
|
|
4734
4788
|
};
|
|
4735
|
-
let inputQueue = Promise.resolve();
|
|
4736
4789
|
return {
|
|
4737
|
-
|
|
4738
|
-
inputQueue = inputQueue.then(() => processSample(sample));
|
|
4739
|
-
return inputQueue;
|
|
4740
|
-
},
|
|
4790
|
+
decode,
|
|
4741
4791
|
waitForFinish: async () => {
|
|
4742
4792
|
await videoDecoder.flush();
|
|
4743
4793
|
Log.verbose(logLevel, "Flushed video decoder");
|
|
4744
|
-
await
|
|
4745
|
-
Log.verbose(logLevel, "Frame sorter flushed");
|
|
4746
|
-
await ioSynchronizer.waitForFinish(controller);
|
|
4794
|
+
await ioSynchronizer.waitForFinish();
|
|
4747
4795
|
Log.verbose(logLevel, "IO synchro finished");
|
|
4748
|
-
await outputQueue;
|
|
4749
|
-
Log.verbose(logLevel, "Output queue finished");
|
|
4750
|
-
await inputQueue;
|
|
4751
|
-
Log.verbose(logLevel, "Input queue finished");
|
|
4752
4796
|
},
|
|
4753
4797
|
close,
|
|
4754
4798
|
flush: async () => {
|
|
4755
4799
|
await videoDecoder.flush();
|
|
4756
|
-
}
|
|
4800
|
+
},
|
|
4801
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize
|
|
4757
4802
|
};
|
|
4758
4803
|
};
|
|
4804
|
+
var createVideoDecoder = ({
|
|
4805
|
+
onFrame: onFrame2,
|
|
4806
|
+
onError,
|
|
4807
|
+
controller,
|
|
4808
|
+
track,
|
|
4809
|
+
logLevel
|
|
4810
|
+
}) => {
|
|
4811
|
+
return internalCreateVideoDecoder({
|
|
4812
|
+
onFrame: onFrame2,
|
|
4813
|
+
onError,
|
|
4814
|
+
controller: controller ?? null,
|
|
4815
|
+
config: track,
|
|
4816
|
+
logLevel: logLevel ?? "info"
|
|
4817
|
+
});
|
|
4818
|
+
};
|
|
4759
4819
|
|
|
4760
4820
|
// src/video-encoder.ts
|
|
4761
4821
|
import {
|
|
@@ -4768,7 +4828,7 @@ var createVideoEncoder = ({
|
|
|
4768
4828
|
config,
|
|
4769
4829
|
logLevel,
|
|
4770
4830
|
outputCodec,
|
|
4771
|
-
|
|
4831
|
+
keyframeInterval
|
|
4772
4832
|
}) => {
|
|
4773
4833
|
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4774
4834
|
throw new MediaParserAbortError2("Not creating video encoder, already aborted");
|
|
@@ -4776,27 +4836,20 @@ var createVideoEncoder = ({
|
|
|
4776
4836
|
const ioSynchronizer = makeIoSynchronizer({
|
|
4777
4837
|
logLevel,
|
|
4778
4838
|
label: "Video encoder",
|
|
4779
|
-
|
|
4839
|
+
controller
|
|
4780
4840
|
});
|
|
4781
|
-
let outputQueue = Promise.resolve();
|
|
4782
4841
|
const encoder = new VideoEncoder({
|
|
4783
4842
|
error(error) {
|
|
4784
4843
|
onError(error);
|
|
4785
4844
|
},
|
|
4786
|
-
output(chunk, metadata) {
|
|
4845
|
+
async output(chunk, metadata) {
|
|
4787
4846
|
const timestamp = chunk.timestamp + (chunk.duration ?? 0);
|
|
4788
|
-
|
|
4789
|
-
|
|
4790
|
-
|
|
4791
|
-
return;
|
|
4792
|
-
}
|
|
4793
|
-
return onChunk(chunk, metadata ?? null);
|
|
4794
|
-
}).then(() => {
|
|
4795
|
-
ioSynchronizer.onProcessed();
|
|
4796
|
-
return Promise.resolve();
|
|
4797
|
-
}).catch((err) => {
|
|
4847
|
+
try {
|
|
4848
|
+
await onChunk(chunk, metadata ?? null);
|
|
4849
|
+
} catch (err) {
|
|
4798
4850
|
onError(err);
|
|
4799
|
-
}
|
|
4851
|
+
}
|
|
4852
|
+
ioSynchronizer.onOutput(timestamp);
|
|
4800
4853
|
}
|
|
4801
4854
|
});
|
|
4802
4855
|
const close = () => {
|
|
@@ -4813,115 +4866,49 @@ var createVideoEncoder = ({
|
|
|
4813
4866
|
Log.verbose(logLevel, "Configuring video encoder", config);
|
|
4814
4867
|
encoder.configure(config);
|
|
4815
4868
|
let framesProcessed = 0;
|
|
4816
|
-
const encodeFrame =
|
|
4817
|
-
if (encoder.state === "closed") {
|
|
4818
|
-
return;
|
|
4819
|
-
}
|
|
4820
|
-
progress.setPossibleLowestTimestamp(frame.timestamp);
|
|
4821
|
-
await ioSynchronizer.waitFor({
|
|
4822
|
-
unemitted: 10,
|
|
4823
|
-
unprocessed: 10,
|
|
4824
|
-
minimumProgress: frame.timestamp - 1e7,
|
|
4825
|
-
controller
|
|
4826
|
-
});
|
|
4869
|
+
const encodeFrame = (frame) => {
|
|
4827
4870
|
if (encoder.state === "closed") {
|
|
4828
4871
|
return;
|
|
4829
4872
|
}
|
|
4830
|
-
const keyFrame = framesProcessed %
|
|
4873
|
+
const keyFrame = framesProcessed % keyframeInterval === 0;
|
|
4831
4874
|
encoder.encode(convertToCorrectVideoFrame({ videoFrame: frame, outputCodec }), {
|
|
4832
4875
|
keyFrame,
|
|
4833
4876
|
vp9: {
|
|
4834
4877
|
quantizer: 36
|
|
4835
4878
|
}
|
|
4836
4879
|
});
|
|
4837
|
-
ioSynchronizer.inputItem(frame.timestamp
|
|
4880
|
+
ioSynchronizer.inputItem(frame.timestamp);
|
|
4838
4881
|
framesProcessed++;
|
|
4839
4882
|
};
|
|
4840
|
-
let inputQueue = Promise.resolve();
|
|
4841
4883
|
return {
|
|
4842
|
-
|
|
4843
|
-
|
|
4844
|
-
return inputQueue;
|
|
4884
|
+
encode: (frame) => {
|
|
4885
|
+
encodeFrame(frame);
|
|
4845
4886
|
},
|
|
4846
4887
|
waitForFinish: async () => {
|
|
4847
4888
|
await encoder.flush();
|
|
4848
|
-
await
|
|
4849
|
-
await ioSynchronizer.waitForFinish(controller);
|
|
4889
|
+
await ioSynchronizer.waitForFinish();
|
|
4850
4890
|
},
|
|
4851
4891
|
close,
|
|
4852
4892
|
flush: async () => {
|
|
4853
4893
|
await encoder.flush();
|
|
4854
|
-
}
|
|
4894
|
+
},
|
|
4895
|
+
ioSynchronizer
|
|
4855
4896
|
};
|
|
4856
4897
|
};
|
|
4857
4898
|
|
|
4858
|
-
// src/
|
|
4859
|
-
var
|
|
4860
|
-
|
|
4861
|
-
|
|
4862
|
-
|
|
4899
|
+
// src/reencode-video-track.ts
|
|
4900
|
+
var reencodeVideoTrack = async ({
|
|
4901
|
+
videoOperation,
|
|
4902
|
+
rotate,
|
|
4903
|
+
track,
|
|
4904
|
+
logLevel,
|
|
4863
4905
|
abortConversion,
|
|
4906
|
+
onMediaStateUpdate,
|
|
4864
4907
|
controller,
|
|
4865
|
-
|
|
4866
|
-
|
|
4867
|
-
|
|
4868
|
-
|
|
4869
|
-
rotate,
|
|
4870
|
-
progress,
|
|
4871
|
-
resizeOperation
|
|
4872
|
-
}) => async ({ track, container: inputContainer }) => {
|
|
4873
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4874
|
-
throw new Error("Aborted");
|
|
4875
|
-
}
|
|
4876
|
-
const canCopyTrack = canCopyVideoTrack({
|
|
4877
|
-
inputContainer,
|
|
4878
|
-
outputContainer,
|
|
4879
|
-
rotationToApply: rotate,
|
|
4880
|
-
inputTrack: track,
|
|
4881
|
-
resizeOperation
|
|
4882
|
-
});
|
|
4883
|
-
const videoOperation = await (onVideoTrack ?? defaultOnVideoTrackHandler)({
|
|
4884
|
-
track,
|
|
4885
|
-
defaultVideoCodec: defaultVideoCodec ?? getDefaultVideoCodec({ container: outputContainer }),
|
|
4886
|
-
logLevel,
|
|
4887
|
-
outputContainer,
|
|
4888
|
-
rotate,
|
|
4889
|
-
inputContainer,
|
|
4890
|
-
canCopyTrack,
|
|
4891
|
-
resizeOperation
|
|
4892
|
-
});
|
|
4893
|
-
if (videoOperation.type === "drop") {
|
|
4894
|
-
return null;
|
|
4895
|
-
}
|
|
4896
|
-
if (videoOperation.type === "fail") {
|
|
4897
|
-
throw new Error(`Video track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this video track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
|
|
4898
|
-
}
|
|
4899
|
-
if (videoOperation.type === "copy") {
|
|
4900
|
-
Log.verbose(logLevel, `Copying video track with codec ${track.codec} and timescale ${track.timescale}`);
|
|
4901
|
-
const videoTrack = await state.addTrack({
|
|
4902
|
-
type: "video",
|
|
4903
|
-
color: track.advancedColor,
|
|
4904
|
-
width: track.codedWidth,
|
|
4905
|
-
height: track.codedHeight,
|
|
4906
|
-
codec: track.codecEnum,
|
|
4907
|
-
codecPrivate: track.codecData?.data ?? null,
|
|
4908
|
-
timescale: track.timescale
|
|
4909
|
-
});
|
|
4910
|
-
return async (sample) => {
|
|
4911
|
-
await state.addSample({
|
|
4912
|
-
chunk: sample,
|
|
4913
|
-
trackNumber: videoTrack.trackNumber,
|
|
4914
|
-
isVideo: true,
|
|
4915
|
-
codecPrivate: track.codecData?.data ?? null
|
|
4916
|
-
});
|
|
4917
|
-
onMediaStateUpdate?.((prevState) => {
|
|
4918
|
-
return {
|
|
4919
|
-
...prevState,
|
|
4920
|
-
decodedVideoFrames: prevState.decodedVideoFrames + 1
|
|
4921
|
-
};
|
|
4922
|
-
});
|
|
4923
|
-
};
|
|
4924
|
-
}
|
|
4908
|
+
onVideoFrame,
|
|
4909
|
+
state,
|
|
4910
|
+
progressTracker
|
|
4911
|
+
}) => {
|
|
4925
4912
|
if (videoOperation.type !== "reencode") {
|
|
4926
4913
|
throw new Error(`Video track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(videoOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
|
|
4927
4914
|
}
|
|
@@ -4957,13 +4944,13 @@ var makeVideoTrackHandler = ({
|
|
|
4957
4944
|
height: newHeight,
|
|
4958
4945
|
codec: videoOperation.videoCodec,
|
|
4959
4946
|
codecPrivate: null,
|
|
4960
|
-
timescale: track.
|
|
4947
|
+
timescale: track.originalTimescale
|
|
4961
4948
|
});
|
|
4962
|
-
Log.verbose(logLevel, `Created new video track with ID ${trackNumber}, codec ${videoOperation.videoCodec} and timescale ${track.
|
|
4949
|
+
Log.verbose(logLevel, `Created new video track with ID ${trackNumber}, codec ${videoOperation.videoCodec} and timescale ${track.originalTimescale}`);
|
|
4963
4950
|
const videoEncoder = createVideoEncoder({
|
|
4964
4951
|
onChunk: async (chunk, metadata) => {
|
|
4965
4952
|
await state.addSample({
|
|
4966
|
-
chunk: convertEncodedChunk(chunk
|
|
4953
|
+
chunk: convertEncodedChunk(chunk),
|
|
4967
4954
|
trackNumber,
|
|
4968
4955
|
isVideo: true,
|
|
4969
4956
|
codecPrivate: arrayBufferToUint8Array(metadata?.decoderConfig?.description ?? null)
|
|
@@ -4984,20 +4971,47 @@ var makeVideoTrackHandler = ({
|
|
|
4984
4971
|
config: videoEncoderConfig,
|
|
4985
4972
|
logLevel,
|
|
4986
4973
|
outputCodec: videoOperation.videoCodec,
|
|
4987
|
-
|
|
4974
|
+
keyframeInterval: 40
|
|
4988
4975
|
});
|
|
4989
|
-
const
|
|
4990
|
-
|
|
4991
|
-
|
|
4992
|
-
|
|
4976
|
+
const videoProcessingQueue = processingQueue({
|
|
4977
|
+
controller,
|
|
4978
|
+
label: "VideoFrame processing queue",
|
|
4979
|
+
logLevel,
|
|
4980
|
+
onError: (err) => {
|
|
4981
|
+
abortConversion(new Error(`VideoFrame processing queue of track ${track.trackId} failed (see .cause of this error)`, {
|
|
4982
|
+
cause: err
|
|
4983
|
+
}));
|
|
4984
|
+
},
|
|
4985
|
+
onOutput: async (frame) => {
|
|
4986
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4987
|
+
const processedFrame = await onFrame({
|
|
4993
4988
|
frame,
|
|
4994
4989
|
track,
|
|
4995
|
-
videoEncoder,
|
|
4996
4990
|
onVideoFrame,
|
|
4997
4991
|
outputCodec: videoOperation.videoCodec,
|
|
4998
4992
|
rotation,
|
|
4999
4993
|
resizeOperation: videoOperation.resize ?? null
|
|
5000
4994
|
});
|
|
4995
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4996
|
+
await videoEncoder.ioSynchronizer.waitForQueueSize(10);
|
|
4997
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4998
|
+
videoEncoder.encode(processedFrame);
|
|
4999
|
+
processedFrame.close();
|
|
5000
|
+
}
|
|
5001
|
+
});
|
|
5002
|
+
const frameSorter = videoFrameSorter({
|
|
5003
|
+
controller,
|
|
5004
|
+
onOutput: async (frame) => {
|
|
5005
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5006
|
+
await videoProcessingQueue.ioSynchronizer.waitForQueueSize(10);
|
|
5007
|
+
videoProcessingQueue.input(frame);
|
|
5008
|
+
}
|
|
5009
|
+
});
|
|
5010
|
+
const videoDecoder = createVideoDecoder({
|
|
5011
|
+
track: videoDecoderConfig,
|
|
5012
|
+
onFrame: async (frame) => {
|
|
5013
|
+
await frameSorter.waitUntilProcessed();
|
|
5014
|
+
frameSorter.inputFrame(frame);
|
|
5001
5015
|
},
|
|
5002
5016
|
onError: (err) => {
|
|
5003
5017
|
abortConversion(new Error(`Video decoder of track ${track.trackId} failed (see .cause of this error)`, {
|
|
@@ -5005,23 +5019,96 @@ var makeVideoTrackHandler = ({
|
|
|
5005
5019
|
}));
|
|
5006
5020
|
},
|
|
5007
5021
|
controller,
|
|
5008
|
-
logLevel
|
|
5009
|
-
progress
|
|
5022
|
+
logLevel
|
|
5010
5023
|
});
|
|
5011
5024
|
state.addWaitForFinishPromise(async () => {
|
|
5012
5025
|
Log.verbose(logLevel, "Waiting for video decoder to finish");
|
|
5013
5026
|
await videoDecoder.waitForFinish();
|
|
5014
5027
|
videoDecoder.close();
|
|
5015
5028
|
Log.verbose(logLevel, "Video decoder finished. Waiting for encoder to finish");
|
|
5029
|
+
await frameSorter.flush();
|
|
5030
|
+
Log.verbose(logLevel, "Frame sorter flushed");
|
|
5031
|
+
await videoProcessingQueue.ioSynchronizer.waitForFinish();
|
|
5032
|
+
Log.verbose(logLevel, "Video processing queue finished");
|
|
5016
5033
|
await videoEncoder.waitForFinish();
|
|
5017
5034
|
videoEncoder.close();
|
|
5018
|
-
Log.verbose(logLevel, "
|
|
5035
|
+
Log.verbose(logLevel, "Video encoder finished");
|
|
5019
5036
|
});
|
|
5020
5037
|
return async (chunk) => {
|
|
5021
|
-
|
|
5038
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(chunk.timestamp, chunk.decodingTimestamp ?? Infinity));
|
|
5039
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5040
|
+
await videoDecoder.waitForQueueToBeLessThan(10);
|
|
5041
|
+
if (chunk.type === "key") {
|
|
5042
|
+
await videoDecoder.flush();
|
|
5043
|
+
}
|
|
5044
|
+
videoDecoder.decode(chunk);
|
|
5022
5045
|
};
|
|
5023
5046
|
};
|
|
5024
5047
|
|
|
5048
|
+
// src/on-video-track.ts
|
|
5049
|
+
var makeVideoTrackHandler = ({
|
|
5050
|
+
state,
|
|
5051
|
+
onVideoFrame,
|
|
5052
|
+
onMediaStateUpdate,
|
|
5053
|
+
abortConversion,
|
|
5054
|
+
controller,
|
|
5055
|
+
defaultVideoCodec,
|
|
5056
|
+
onVideoTrack,
|
|
5057
|
+
logLevel,
|
|
5058
|
+
outputContainer,
|
|
5059
|
+
rotate,
|
|
5060
|
+
resizeOperation,
|
|
5061
|
+
progressTracker
|
|
5062
|
+
}) => async ({ track, container: inputContainer }) => {
|
|
5063
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
5064
|
+
throw new Error("Aborted");
|
|
5065
|
+
}
|
|
5066
|
+
const canCopyTrack = canCopyVideoTrack({
|
|
5067
|
+
inputContainer,
|
|
5068
|
+
outputContainer,
|
|
5069
|
+
rotationToApply: rotate,
|
|
5070
|
+
inputTrack: track,
|
|
5071
|
+
resizeOperation
|
|
5072
|
+
});
|
|
5073
|
+
const videoOperation = await (onVideoTrack ?? defaultOnVideoTrackHandler)({
|
|
5074
|
+
track,
|
|
5075
|
+
defaultVideoCodec: defaultVideoCodec ?? getDefaultVideoCodec({ container: outputContainer }),
|
|
5076
|
+
logLevel,
|
|
5077
|
+
outputContainer,
|
|
5078
|
+
rotate,
|
|
5079
|
+
inputContainer,
|
|
5080
|
+
canCopyTrack,
|
|
5081
|
+
resizeOperation
|
|
5082
|
+
});
|
|
5083
|
+
if (videoOperation.type === "drop") {
|
|
5084
|
+
return null;
|
|
5085
|
+
}
|
|
5086
|
+
if (videoOperation.type === "fail") {
|
|
5087
|
+
throw new Error(`Video track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this video track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
|
|
5088
|
+
}
|
|
5089
|
+
if (videoOperation.type === "copy") {
|
|
5090
|
+
return copyVideoTrack({
|
|
5091
|
+
logLevel,
|
|
5092
|
+
onMediaStateUpdate,
|
|
5093
|
+
state,
|
|
5094
|
+
track,
|
|
5095
|
+
progressTracker
|
|
5096
|
+
});
|
|
5097
|
+
}
|
|
5098
|
+
return reencodeVideoTrack({
|
|
5099
|
+
videoOperation,
|
|
5100
|
+
abortConversion,
|
|
5101
|
+
controller,
|
|
5102
|
+
logLevel,
|
|
5103
|
+
rotate,
|
|
5104
|
+
track,
|
|
5105
|
+
onVideoFrame,
|
|
5106
|
+
state,
|
|
5107
|
+
onMediaStateUpdate,
|
|
5108
|
+
progressTracker
|
|
5109
|
+
});
|
|
5110
|
+
};
|
|
5111
|
+
|
|
5025
5112
|
// src/send-telemetry-event.ts
|
|
5026
5113
|
var import_licensing = __toESM(require_dist(), 1);
|
|
5027
5114
|
var sendUsageEvent = async ({
|
|
@@ -5219,6 +5306,7 @@ var convertMedia = async function({
|
|
|
5219
5306
|
expectedFrameRate: expectedFrameRate ?? null
|
|
5220
5307
|
});
|
|
5221
5308
|
const onVideoTrack = makeVideoTrackHandler({
|
|
5309
|
+
progressTracker,
|
|
5222
5310
|
state,
|
|
5223
5311
|
onVideoFrame: onVideoFrame ?? null,
|
|
5224
5312
|
onMediaStateUpdate: throttledState.update ?? null,
|
|
@@ -5229,10 +5317,10 @@ var convertMedia = async function({
|
|
|
5229
5317
|
logLevel,
|
|
5230
5318
|
outputContainer: container,
|
|
5231
5319
|
rotate: rotate ?? 0,
|
|
5232
|
-
progress: progressTracker,
|
|
5233
5320
|
resizeOperation: resize ?? null
|
|
5234
5321
|
});
|
|
5235
5322
|
const onAudioTrack = makeAudioTrackHandler({
|
|
5323
|
+
progressTracker,
|
|
5236
5324
|
abortConversion,
|
|
5237
5325
|
defaultAudioCodec: audioCodec ?? null,
|
|
5238
5326
|
controller,
|
|
@@ -5241,7 +5329,6 @@ var convertMedia = async function({
|
|
|
5241
5329
|
onAudioTrack: userAudioResolver ?? null,
|
|
5242
5330
|
logLevel,
|
|
5243
5331
|
outputContainer: container,
|
|
5244
|
-
progressTracker,
|
|
5245
5332
|
onAudioData: onAudioData ?? null
|
|
5246
5333
|
});
|
|
5247
5334
|
MediaParserInternals9.internalParseMedia({
|