@remotion/webcodecs 4.0.305 → 4.0.308
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio-decoder.d.ts +15 -10
- package/dist/audio-decoder.js +49 -52
- package/dist/audio-encoder.d.ts +5 -5
- package/dist/audio-encoder.js +20 -42
- package/dist/can-copy-audio-track.d.ts +3 -1
- package/dist/can-copy-audio-track.js +10 -1
- package/dist/can-copy-video-track.d.ts +3 -1
- package/dist/can-copy-video-track.js +10 -1
- package/dist/convert-media.js +2 -2
- package/dist/copy-audio-track.d.ts +11 -0
- package/dist/copy-audio-track.js +31 -0
- package/dist/copy-video-track.d.ts +11 -0
- package/dist/copy-video-track.js +32 -0
- package/dist/create/event-emitter.d.ts +0 -1
- package/dist/create/matroska/matroska-utils.d.ts +1 -1
- package/dist/create/progress-tracker.d.ts +0 -2
- package/dist/create/progress-tracker.js +3 -20
- package/dist/create-audio-decoder.d.ts +24 -0
- package/dist/create-audio-decoder.js +112 -0
- package/dist/create-video-decoder.d.ts +23 -0
- package/dist/create-video-decoder.js +89 -0
- package/dist/esm/index.mjs +794 -631
- package/dist/get-wave-audio-decoder.d.ts +7 -2
- package/dist/get-wave-audio-decoder.js +27 -13
- package/dist/index.d.ts +4 -4
- package/dist/index.js +5 -5
- package/dist/io-manager/io-synchronizer.d.ts +5 -13
- package/dist/io-manager/io-synchronizer.js +29 -74
- package/dist/io-manager/make-timeout-promise.d.ts +1 -1
- package/dist/io-manager/make-timeout-promise.js +8 -4
- package/dist/is-different-video-codec.d.ts +11 -0
- package/dist/is-different-video-codec.js +36 -0
- package/dist/on-audio-track.d.ts +2 -2
- package/dist/on-audio-track.js +16 -150
- package/dist/on-frame.d.ts +2 -4
- package/dist/on-frame.js +8 -9
- package/dist/on-video-track.d.ts +2 -2
- package/dist/on-video-track.js +19 -129
- package/dist/processing-queue.d.ts +18 -0
- package/dist/processing-queue.js +47 -0
- package/dist/reencode-audio-track.d.ts +18 -0
- package/dist/reencode-audio-track.js +164 -0
- package/dist/reencode-video-track.d.ts +19 -0
- package/dist/reencode-video-track.js +151 -0
- package/dist/sort-video-frames.d.ts +4 -3
- package/dist/sort-video-frames.js +7 -3
- package/dist/video-decoder.d.ts +14 -8
- package/dist/video-decoder.js +37 -72
- package/dist/video-encoder.d.ts +6 -5
- package/dist/video-encoder.js +16 -40
- package/dist/wav-audio-encoder.d.ts +4 -1
- package/dist/wav-audio-encoder.js +3 -2
- package/package.json +5 -5
- package/dist/select-container-creator.d.ts +0 -2
- package/dist/select-container-creator.js +0 -19
package/dist/esm/index.mjs
CHANGED
|
@@ -432,63 +432,10 @@ var setRemotionImported = () => {
|
|
|
432
432
|
}
|
|
433
433
|
};
|
|
434
434
|
|
|
435
|
-
// src/
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
}
|
|
440
|
-
if (sampleFormat === "s32") {
|
|
441
|
-
return 4;
|
|
442
|
-
}
|
|
443
|
-
if (sampleFormat === "f32") {
|
|
444
|
-
return 4;
|
|
445
|
-
}
|
|
446
|
-
if (sampleFormat === "u8") {
|
|
447
|
-
return 1;
|
|
448
|
-
}
|
|
449
|
-
if (sampleFormat === "f32-planar") {
|
|
450
|
-
return 4;
|
|
451
|
-
}
|
|
452
|
-
if (sampleFormat === "s16-planar") {
|
|
453
|
-
return 2;
|
|
454
|
-
}
|
|
455
|
-
if (sampleFormat === "s32-planar") {
|
|
456
|
-
return 4;
|
|
457
|
-
}
|
|
458
|
-
if (sampleFormat === "u8-planar") {
|
|
459
|
-
return 1;
|
|
460
|
-
}
|
|
461
|
-
throw new Error(`Unsupported sample format: ${sampleFormat}`);
|
|
462
|
-
};
|
|
463
|
-
var getWaveAudioDecoder = ({
|
|
464
|
-
onFrame,
|
|
465
|
-
track,
|
|
466
|
-
sampleFormat
|
|
467
|
-
}) => {
|
|
468
|
-
let queue = Promise.resolve();
|
|
469
|
-
const processSample = async (audioSample) => {
|
|
470
|
-
const bytesPerSample = getBytesPerSample(sampleFormat);
|
|
471
|
-
await onFrame(new AudioData({
|
|
472
|
-
data: audioSample.data,
|
|
473
|
-
format: sampleFormat,
|
|
474
|
-
numberOfChannels: track.numberOfChannels,
|
|
475
|
-
numberOfFrames: audioSample.data.byteLength / bytesPerSample / track.numberOfChannels,
|
|
476
|
-
sampleRate: track.sampleRate,
|
|
477
|
-
timestamp: audioSample.timestamp
|
|
478
|
-
}));
|
|
479
|
-
};
|
|
480
|
-
return {
|
|
481
|
-
close() {
|
|
482
|
-
return Promise.resolve();
|
|
483
|
-
},
|
|
484
|
-
processSample(audioSample) {
|
|
485
|
-
queue = queue.then(() => processSample(audioSample));
|
|
486
|
-
return queue;
|
|
487
|
-
},
|
|
488
|
-
flush: () => Promise.resolve(),
|
|
489
|
-
waitForFinish: () => Promise.resolve()
|
|
490
|
-
};
|
|
491
|
-
};
|
|
435
|
+
// src/audio-encoder.ts
|
|
436
|
+
import {
|
|
437
|
+
MediaParserAbortError
|
|
438
|
+
} from "@remotion/media-parser";
|
|
492
439
|
|
|
493
440
|
// src/create/event-emitter.ts
|
|
494
441
|
class IoEventEmitter {
|
|
@@ -562,8 +509,10 @@ var makeTimeoutPromise = ({
|
|
|
562
509
|
const onResume = () => {
|
|
563
510
|
set();
|
|
564
511
|
};
|
|
565
|
-
controller
|
|
566
|
-
|
|
512
|
+
if (controller) {
|
|
513
|
+
controller.addEventListener("pause", onPause);
|
|
514
|
+
controller.addEventListener("resume", onResume);
|
|
515
|
+
}
|
|
567
516
|
return {
|
|
568
517
|
timeoutPromise: promise,
|
|
569
518
|
clear: () => {
|
|
@@ -571,8 +520,10 @@ var makeTimeoutPromise = ({
|
|
|
571
520
|
clearTimeout(timeout);
|
|
572
521
|
}
|
|
573
522
|
resolve();
|
|
574
|
-
controller
|
|
575
|
-
|
|
523
|
+
if (controller) {
|
|
524
|
+
controller.removeEventListener("pause", onPause);
|
|
525
|
+
controller.removeEventListener("resume", onResume);
|
|
526
|
+
}
|
|
576
527
|
}
|
|
577
528
|
};
|
|
578
529
|
};
|
|
@@ -581,39 +532,26 @@ var makeTimeoutPromise = ({
|
|
|
581
532
|
var makeIoSynchronizer = ({
|
|
582
533
|
logLevel,
|
|
583
534
|
label,
|
|
584
|
-
|
|
535
|
+
controller
|
|
585
536
|
}) => {
|
|
586
537
|
const eventEmitter = new IoEventEmitter;
|
|
587
538
|
let lastInput = 0;
|
|
588
|
-
let lastInputKeyframe = 0;
|
|
589
539
|
let lastOutput = 0;
|
|
590
540
|
let inputsSinceLastOutput = 0;
|
|
591
541
|
let inputs = [];
|
|
592
|
-
|
|
593
|
-
let _unprocessed = 0;
|
|
594
|
-
const getUnprocessed = () => _unprocessed;
|
|
595
|
-
const getUnemittedItems = () => {
|
|
542
|
+
const getQueuedItems = () => {
|
|
596
543
|
inputs = inputs.filter((input) => Math.floor(input) > Math.floor(lastOutput));
|
|
597
544
|
return inputs.length;
|
|
598
545
|
};
|
|
599
|
-
const getUnemittedKeyframes = () => {
|
|
600
|
-
keyframes = keyframes.filter((keyframe) => Math.floor(keyframe) > Math.floor(lastOutput));
|
|
601
|
-
return keyframes.length;
|
|
602
|
-
};
|
|
603
546
|
const printState = (prefix) => {
|
|
604
|
-
Log.trace(logLevel, `[${label}] ${prefix}, state: Last input = ${lastInput} Last
|
|
547
|
+
Log.trace(logLevel, `[${label}] ${prefix}, state: Last input = ${lastInput} Last output = ${lastOutput} Inputs since last output = ${inputsSinceLastOutput}, Queue = ${getQueuedItems()}`);
|
|
605
548
|
};
|
|
606
|
-
const inputItem = (timestamp
|
|
549
|
+
const inputItem = (timestamp) => {
|
|
607
550
|
lastInput = timestamp;
|
|
608
|
-
if (keyFrame) {
|
|
609
|
-
lastInputKeyframe = timestamp;
|
|
610
|
-
keyframes.push(timestamp);
|
|
611
|
-
}
|
|
612
551
|
inputsSinceLastOutput++;
|
|
613
552
|
inputs.push(timestamp);
|
|
614
553
|
eventEmitter.dispatchEvent("input", {
|
|
615
|
-
timestamp
|
|
616
|
-
keyFrame
|
|
554
|
+
timestamp
|
|
617
555
|
});
|
|
618
556
|
printState("Input item");
|
|
619
557
|
};
|
|
@@ -623,7 +561,6 @@ var makeIoSynchronizer = ({
|
|
|
623
561
|
eventEmitter.dispatchEvent("output", {
|
|
624
562
|
timestamp
|
|
625
563
|
});
|
|
626
|
-
_unprocessed++;
|
|
627
564
|
printState("Got output");
|
|
628
565
|
};
|
|
629
566
|
const waitForOutput = () => {
|
|
@@ -635,184 +572,49 @@ var makeIoSynchronizer = ({
|
|
|
635
572
|
eventEmitter.addEventListener("output", on);
|
|
636
573
|
return promise;
|
|
637
574
|
};
|
|
638
|
-
const
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
return promise;
|
|
575
|
+
const makeErrorBanner = () => {
|
|
576
|
+
return [
|
|
577
|
+
`Waited too long for ${label} to finish:`,
|
|
578
|
+
`${getQueuedItems()} queued items`,
|
|
579
|
+
`inputs: ${JSON.stringify(inputs)}`,
|
|
580
|
+
`last output: ${lastOutput}`
|
|
581
|
+
];
|
|
646
582
|
};
|
|
647
|
-
const
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
controller
|
|
652
|
-
}) => {
|
|
653
|
-
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
583
|
+
const waitForQueueSize = async (queueSize) => {
|
|
584
|
+
if (getQueuedItems() <= queueSize) {
|
|
585
|
+
return Promise.resolve();
|
|
586
|
+
}
|
|
654
587
|
const { timeoutPromise, clear } = makeTimeoutPromise({
|
|
655
588
|
label: () => [
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
`${getUnprocessed()} unprocessed items: ${JSON.stringify(_unprocessed)}`,
|
|
659
|
-
`smallest progress: ${progress.getSmallestProgress()}`,
|
|
660
|
-
`inputs: ${JSON.stringify(inputs)}`,
|
|
661
|
-
`last output: ${lastOutput}`,
|
|
662
|
-
`wanted: ${unemitted} unemitted items, ${unprocessed} unprocessed items, minimum progress ${minimumProgress}`,
|
|
589
|
+
...makeErrorBanner(),
|
|
590
|
+
`wanted: <${queueSize} queued items`,
|
|
663
591
|
`Report this at https://remotion.dev/report`
|
|
664
592
|
].join(`
|
|
665
593
|
`),
|
|
666
594
|
ms: 1e4,
|
|
667
595
|
controller
|
|
668
596
|
});
|
|
669
|
-
controller
|
|
597
|
+
if (controller) {
|
|
598
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", clear);
|
|
599
|
+
}
|
|
670
600
|
await Promise.race([
|
|
671
601
|
timeoutPromise,
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
})(),
|
|
678
|
-
(async () => {
|
|
679
|
-
while (getUnprocessed() > unprocessed) {
|
|
680
|
-
await waitForProcessed();
|
|
681
|
-
}
|
|
682
|
-
})(),
|
|
683
|
-
minimumProgress === null || progress.getSmallestProgress() === null ? Promise.resolve() : (async () => {
|
|
684
|
-
while (progress.getSmallestProgress() < minimumProgress) {
|
|
685
|
-
await progress.waitForProgress();
|
|
686
|
-
}
|
|
687
|
-
})()
|
|
688
|
-
])
|
|
602
|
+
(async () => {
|
|
603
|
+
while (getQueuedItems() > queueSize) {
|
|
604
|
+
await waitForOutput();
|
|
605
|
+
}
|
|
606
|
+
})()
|
|
689
607
|
]).finally(() => clear());
|
|
690
|
-
controller
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
await waitFor({
|
|
694
|
-
unprocessed: 0,
|
|
695
|
-
unemitted: 0,
|
|
696
|
-
minimumProgress: null,
|
|
697
|
-
controller
|
|
698
|
-
});
|
|
699
|
-
};
|
|
700
|
-
const onProcessed = () => {
|
|
701
|
-
eventEmitter.dispatchEvent("processed", {});
|
|
702
|
-
_unprocessed--;
|
|
608
|
+
if (controller) {
|
|
609
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", clear);
|
|
610
|
+
}
|
|
703
611
|
};
|
|
704
612
|
return {
|
|
705
613
|
inputItem,
|
|
706
614
|
onOutput,
|
|
707
|
-
|
|
708
|
-
waitForFinish,
|
|
709
|
-
onProcessed,
|
|
710
|
-
getUnprocessed
|
|
711
|
-
};
|
|
712
|
-
};
|
|
713
|
-
|
|
714
|
-
// src/audio-decoder.ts
|
|
715
|
-
var createAudioDecoder = ({
|
|
716
|
-
onFrame,
|
|
717
|
-
onError,
|
|
718
|
-
controller,
|
|
719
|
-
config,
|
|
720
|
-
logLevel,
|
|
721
|
-
track,
|
|
722
|
-
progressTracker
|
|
723
|
-
}) => {
|
|
724
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
725
|
-
throw new Error("Not creating audio decoder, already aborted");
|
|
726
|
-
}
|
|
727
|
-
if (config.codec === "pcm-s16") {
|
|
728
|
-
return getWaveAudioDecoder({ onFrame, track, sampleFormat: "s16" });
|
|
729
|
-
}
|
|
730
|
-
const ioSynchronizer = makeIoSynchronizer({
|
|
731
|
-
logLevel,
|
|
732
|
-
label: "Audio decoder",
|
|
733
|
-
progress: progressTracker
|
|
734
|
-
});
|
|
735
|
-
let outputQueue = Promise.resolve();
|
|
736
|
-
const audioDecoder = new AudioDecoder({
|
|
737
|
-
output(frame) {
|
|
738
|
-
ioSynchronizer.onOutput(frame.timestamp + (frame.duration ?? 0));
|
|
739
|
-
const abortHandler = () => {
|
|
740
|
-
frame.close();
|
|
741
|
-
};
|
|
742
|
-
controller._internals._mediaParserController._internals.signal.addEventListener("abort", abortHandler, {
|
|
743
|
-
once: true
|
|
744
|
-
});
|
|
745
|
-
outputQueue = outputQueue.then(() => {
|
|
746
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
747
|
-
return;
|
|
748
|
-
}
|
|
749
|
-
return onFrame(frame);
|
|
750
|
-
}).then(() => {
|
|
751
|
-
ioSynchronizer.onProcessed();
|
|
752
|
-
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", abortHandler);
|
|
753
|
-
return Promise.resolve();
|
|
754
|
-
}).catch((err) => {
|
|
755
|
-
frame.close();
|
|
756
|
-
onError(err);
|
|
757
|
-
});
|
|
758
|
-
},
|
|
759
|
-
error(error) {
|
|
760
|
-
onError(error);
|
|
761
|
-
}
|
|
762
|
-
});
|
|
763
|
-
const close = () => {
|
|
764
|
-
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
765
|
-
if (audioDecoder.state === "closed") {
|
|
766
|
-
return;
|
|
767
|
-
}
|
|
768
|
-
audioDecoder.close();
|
|
769
|
-
};
|
|
770
|
-
const onAbort = () => {
|
|
771
|
-
close();
|
|
772
|
-
};
|
|
773
|
-
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
774
|
-
audioDecoder.configure(config);
|
|
775
|
-
const processSample = async (audioSample) => {
|
|
776
|
-
if (audioDecoder.state === "closed") {
|
|
777
|
-
return;
|
|
778
|
-
}
|
|
779
|
-
progressTracker.setPossibleLowestTimestamp(Math.min(audioSample.timestamp, audioSample.decodingTimestamp ?? Infinity));
|
|
780
|
-
await ioSynchronizer.waitFor({
|
|
781
|
-
unemitted: 20,
|
|
782
|
-
unprocessed: 20,
|
|
783
|
-
minimumProgress: audioSample.timestamp - 1e7,
|
|
784
|
-
controller
|
|
785
|
-
});
|
|
786
|
-
const chunk = new EncodedAudioChunk(audioSample);
|
|
787
|
-
audioDecoder.decode(chunk);
|
|
788
|
-
if (chunk.byteLength > 16) {
|
|
789
|
-
ioSynchronizer.inputItem(chunk.timestamp, audioSample.type === "key");
|
|
790
|
-
}
|
|
791
|
-
};
|
|
792
|
-
let queue = Promise.resolve();
|
|
793
|
-
return {
|
|
794
|
-
processSample: (sample) => {
|
|
795
|
-
queue = queue.then(() => processSample(sample));
|
|
796
|
-
return queue;
|
|
797
|
-
},
|
|
798
|
-
waitForFinish: async () => {
|
|
799
|
-
try {
|
|
800
|
-
await audioDecoder.flush();
|
|
801
|
-
} catch {}
|
|
802
|
-
await queue;
|
|
803
|
-
await ioSynchronizer.waitForFinish(controller);
|
|
804
|
-
await outputQueue;
|
|
805
|
-
},
|
|
806
|
-
close,
|
|
807
|
-
flush: async () => {
|
|
808
|
-
await audioDecoder.flush();
|
|
809
|
-
}
|
|
615
|
+
waitForQueueSize
|
|
810
616
|
};
|
|
811
617
|
};
|
|
812
|
-
// src/audio-encoder.ts
|
|
813
|
-
import {
|
|
814
|
-
MediaParserAbortError
|
|
815
|
-
} from "@remotion/media-parser";
|
|
816
618
|
|
|
817
619
|
// src/audio-data/data-types.ts
|
|
818
620
|
var getDataTypeForAudioFormat = (format) => {
|
|
@@ -928,13 +730,14 @@ var convertAudioData = ({
|
|
|
928
730
|
var getWaveAudioEncoder = ({
|
|
929
731
|
onChunk,
|
|
930
732
|
controller,
|
|
931
|
-
config
|
|
733
|
+
config,
|
|
734
|
+
ioSynchronizer
|
|
932
735
|
}) => {
|
|
933
736
|
return {
|
|
934
737
|
close: () => {
|
|
935
738
|
return Promise.resolve();
|
|
936
739
|
},
|
|
937
|
-
|
|
740
|
+
encode: (unconvertedAudioData) => {
|
|
938
741
|
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
939
742
|
return Promise.resolve();
|
|
940
743
|
}
|
|
@@ -954,7 +757,8 @@ var getWaveAudioEncoder = ({
|
|
|
954
757
|
return onChunk(chunk);
|
|
955
758
|
},
|
|
956
759
|
flush: () => Promise.resolve(),
|
|
957
|
-
waitForFinish: () => Promise.resolve()
|
|
760
|
+
waitForFinish: () => Promise.resolve(),
|
|
761
|
+
ioSynchronizer
|
|
958
762
|
};
|
|
959
763
|
};
|
|
960
764
|
|
|
@@ -966,39 +770,32 @@ var createAudioEncoder = ({
|
|
|
966
770
|
controller,
|
|
967
771
|
config: audioEncoderConfig,
|
|
968
772
|
logLevel,
|
|
969
|
-
onNewAudioSampleRate
|
|
970
|
-
progressTracker
|
|
773
|
+
onNewAudioSampleRate
|
|
971
774
|
}) => {
|
|
972
775
|
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
973
776
|
throw new MediaParserAbortError("Not creating audio encoder, already aborted");
|
|
974
777
|
}
|
|
778
|
+
const ioSynchronizer = makeIoSynchronizer({
|
|
779
|
+
logLevel,
|
|
780
|
+
label: "Audio encoder",
|
|
781
|
+
controller
|
|
782
|
+
});
|
|
975
783
|
if (codec === "wav") {
|
|
976
784
|
return getWaveAudioEncoder({
|
|
977
785
|
onChunk,
|
|
978
786
|
controller,
|
|
979
|
-
config: audioEncoderConfig
|
|
787
|
+
config: audioEncoderConfig,
|
|
788
|
+
ioSynchronizer
|
|
980
789
|
});
|
|
981
790
|
}
|
|
982
|
-
const ioSynchronizer = makeIoSynchronizer({
|
|
983
|
-
logLevel,
|
|
984
|
-
label: "Audio encoder",
|
|
985
|
-
progress: progressTracker
|
|
986
|
-
});
|
|
987
|
-
let prom = Promise.resolve();
|
|
988
791
|
const encoder = new AudioEncoder({
|
|
989
|
-
output: (chunk) => {
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
return;
|
|
994
|
-
}
|
|
995
|
-
return onChunk(chunk);
|
|
996
|
-
}).then(() => {
|
|
997
|
-
ioSynchronizer.onProcessed();
|
|
998
|
-
return Promise.resolve();
|
|
999
|
-
}).catch((err) => {
|
|
792
|
+
output: async (chunk) => {
|
|
793
|
+
try {
|
|
794
|
+
await onChunk(chunk);
|
|
795
|
+
} catch (err) {
|
|
1000
796
|
onError(err);
|
|
1001
|
-
}
|
|
797
|
+
}
|
|
798
|
+
ioSynchronizer.onOutput(chunk.timestamp);
|
|
1002
799
|
},
|
|
1003
800
|
error(error) {
|
|
1004
801
|
onError(error);
|
|
@@ -1019,17 +816,7 @@ var createAudioEncoder = ({
|
|
|
1019
816
|
throw new Error('Only `codec: "opus"` and `codec: "aac"` is supported currently');
|
|
1020
817
|
}
|
|
1021
818
|
const wantedSampleRate = audioEncoderConfig.sampleRate;
|
|
1022
|
-
const encodeFrame =
|
|
1023
|
-
if (encoder.state === "closed") {
|
|
1024
|
-
return;
|
|
1025
|
-
}
|
|
1026
|
-
progressTracker.setPossibleLowestTimestamp(audioData.timestamp);
|
|
1027
|
-
await ioSynchronizer.waitFor({
|
|
1028
|
-
unemitted: 20,
|
|
1029
|
-
unprocessed: 20,
|
|
1030
|
-
minimumProgress: audioData.timestamp - 1e7,
|
|
1031
|
-
controller
|
|
1032
|
-
});
|
|
819
|
+
const encodeFrame = (audioData) => {
|
|
1033
820
|
if (encoder.state === "closed") {
|
|
1034
821
|
return;
|
|
1035
822
|
}
|
|
@@ -1045,31 +832,73 @@ var createAudioEncoder = ({
|
|
|
1045
832
|
}
|
|
1046
833
|
}
|
|
1047
834
|
encoder.encode(audioData);
|
|
1048
|
-
ioSynchronizer.inputItem(audioData.timestamp
|
|
835
|
+
ioSynchronizer.inputItem(audioData.timestamp);
|
|
1049
836
|
};
|
|
1050
|
-
let queue = Promise.resolve();
|
|
1051
837
|
return {
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
return queue;
|
|
838
|
+
encode: (audioData) => {
|
|
839
|
+
encodeFrame(audioData);
|
|
1055
840
|
},
|
|
1056
841
|
waitForFinish: async () => {
|
|
1057
842
|
await encoder.flush();
|
|
1058
|
-
await ioSynchronizer.
|
|
1059
|
-
await prom;
|
|
843
|
+
await ioSynchronizer.waitForQueueSize(0);
|
|
1060
844
|
},
|
|
1061
845
|
close,
|
|
1062
846
|
flush: async () => {
|
|
1063
847
|
await encoder.flush();
|
|
1064
|
-
}
|
|
848
|
+
},
|
|
849
|
+
ioSynchronizer
|
|
1065
850
|
};
|
|
1066
851
|
};
|
|
852
|
+
// src/is-different-video-codec.ts
|
|
853
|
+
var isSameVideoCodec = ({
|
|
854
|
+
inputVideoCodec,
|
|
855
|
+
outputCodec
|
|
856
|
+
}) => {
|
|
857
|
+
if (outputCodec === "h264") {
|
|
858
|
+
return inputVideoCodec === "h264";
|
|
859
|
+
}
|
|
860
|
+
if (outputCodec === "h265") {
|
|
861
|
+
return inputVideoCodec === "h265";
|
|
862
|
+
}
|
|
863
|
+
if (outputCodec === "vp8") {
|
|
864
|
+
return inputVideoCodec === "vp8";
|
|
865
|
+
}
|
|
866
|
+
if (outputCodec === "vp9") {
|
|
867
|
+
return inputVideoCodec === "vp9";
|
|
868
|
+
}
|
|
869
|
+
throw new Error(`Unsupported output codec: ${outputCodec}`);
|
|
870
|
+
};
|
|
871
|
+
var isSameAudioCodec = ({
|
|
872
|
+
inputAudioCodec,
|
|
873
|
+
outputCodec
|
|
874
|
+
}) => {
|
|
875
|
+
if (outputCodec === "aac") {
|
|
876
|
+
return inputAudioCodec === "aac";
|
|
877
|
+
}
|
|
878
|
+
if (outputCodec === "opus") {
|
|
879
|
+
return inputAudioCodec === "opus";
|
|
880
|
+
}
|
|
881
|
+
if (outputCodec === "wav") {
|
|
882
|
+
return inputAudioCodec === "pcm-f32" || inputAudioCodec === "pcm-s16" || inputAudioCodec === "pcm-s24" || inputAudioCodec === "pcm-s32" || inputAudioCodec === "pcm-u8";
|
|
883
|
+
}
|
|
884
|
+
throw new Error(`Unsupported output codec: ${outputCodec}`);
|
|
885
|
+
};
|
|
886
|
+
|
|
1067
887
|
// src/can-copy-audio-track.ts
|
|
1068
888
|
var canCopyAudioTrack = ({
|
|
1069
889
|
inputCodec,
|
|
1070
890
|
outputContainer,
|
|
1071
|
-
inputContainer
|
|
891
|
+
inputContainer,
|
|
892
|
+
outputAudioCodec
|
|
1072
893
|
}) => {
|
|
894
|
+
if (outputAudioCodec) {
|
|
895
|
+
if (!isSameAudioCodec({
|
|
896
|
+
inputAudioCodec: inputCodec,
|
|
897
|
+
outputCodec: outputAudioCodec
|
|
898
|
+
})) {
|
|
899
|
+
return false;
|
|
900
|
+
}
|
|
901
|
+
}
|
|
1073
902
|
if (outputContainer === "webm") {
|
|
1074
903
|
return inputCodec === "opus";
|
|
1075
904
|
}
|
|
@@ -1087,11 +916,20 @@ var canCopyVideoTrack = ({
|
|
|
1087
916
|
rotationToApply,
|
|
1088
917
|
inputContainer,
|
|
1089
918
|
resizeOperation,
|
|
1090
|
-
inputTrack
|
|
919
|
+
inputTrack,
|
|
920
|
+
outputVideoCodec
|
|
1091
921
|
}) => {
|
|
1092
922
|
if (normalizeVideoRotation(inputTrack.rotation) !== normalizeVideoRotation(rotationToApply)) {
|
|
1093
923
|
return false;
|
|
1094
924
|
}
|
|
925
|
+
if (outputVideoCodec) {
|
|
926
|
+
if (!isSameVideoCodec({
|
|
927
|
+
inputVideoCodec: inputTrack.codecEnum,
|
|
928
|
+
outputCodec: outputVideoCodec
|
|
929
|
+
})) {
|
|
930
|
+
return false;
|
|
931
|
+
}
|
|
932
|
+
}
|
|
1095
933
|
const newDimensions = calculateNewDimensionsFromRotateAndScale({
|
|
1096
934
|
height: inputTrack.height,
|
|
1097
935
|
resizeOperation,
|
|
@@ -4132,13 +3970,7 @@ var makeProgressTracker = () => {
|
|
|
4132
3970
|
startingTimestamp = Math.min(startingTimestamp, timestamp);
|
|
4133
3971
|
}
|
|
4134
3972
|
};
|
|
4135
|
-
const
|
|
4136
|
-
if (startingTimestamp === null) {
|
|
4137
|
-
throw new Error("No starting timestamp");
|
|
4138
|
-
}
|
|
4139
|
-
return startingTimestamp;
|
|
4140
|
-
};
|
|
4141
|
-
const calculateSmallestProgress = () => {
|
|
3973
|
+
const getSmallestProgress = () => {
|
|
4142
3974
|
const progressValues = Object.values(trackNumberProgresses).map((p) => {
|
|
4143
3975
|
if (p !== null) {
|
|
4144
3976
|
return p;
|
|
@@ -4154,26 +3986,16 @@ var makeProgressTracker = () => {
|
|
|
4154
3986
|
registerTrack: (trackNumber) => {
|
|
4155
3987
|
trackNumberProgresses[trackNumber] = null;
|
|
4156
3988
|
},
|
|
4157
|
-
getSmallestProgress
|
|
3989
|
+
getSmallestProgress,
|
|
4158
3990
|
updateTrackProgress: (trackNumber, progress) => {
|
|
4159
3991
|
if (trackNumberProgresses[trackNumber] === undefined) {
|
|
4160
3992
|
throw new Error(`Tried to update progress for a track that was not registered: ${trackNumber}`);
|
|
4161
3993
|
}
|
|
4162
3994
|
trackNumberProgresses[trackNumber] = progress;
|
|
4163
3995
|
eventEmitter.dispatchEvent("progress", {
|
|
4164
|
-
smallestProgress:
|
|
3996
|
+
smallestProgress: getSmallestProgress()
|
|
4165
3997
|
});
|
|
4166
3998
|
},
|
|
4167
|
-
waitForProgress: () => {
|
|
4168
|
-
const { promise, resolve } = withResolvers();
|
|
4169
|
-
const on = () => {
|
|
4170
|
-
eventEmitter.removeEventListener("progress", on);
|
|
4171
|
-
resolve();
|
|
4172
|
-
};
|
|
4173
|
-
eventEmitter.addEventListener("progress", on);
|
|
4174
|
-
return promise;
|
|
4175
|
-
},
|
|
4176
|
-
getStartingTimestamp,
|
|
4177
3999
|
setPossibleLowestTimestamp
|
|
4178
4000
|
};
|
|
4179
4001
|
};
|
|
@@ -4209,22 +4031,37 @@ var getAvailableVideoCodecs = ({
|
|
|
4209
4031
|
throw new Error(`Unsupported container: ${container}`);
|
|
4210
4032
|
};
|
|
4211
4033
|
|
|
4212
|
-
// src/
|
|
4213
|
-
|
|
4214
|
-
|
|
4215
|
-
|
|
4216
|
-
|
|
4217
|
-
|
|
4218
|
-
|
|
4219
|
-
|
|
4220
|
-
|
|
4221
|
-
|
|
4222
|
-
|
|
4223
|
-
|
|
4224
|
-
|
|
4225
|
-
|
|
4226
|
-
|
|
4227
|
-
|
|
4034
|
+
// src/copy-audio-track.ts
|
|
4035
|
+
var copyAudioTrack = async ({
|
|
4036
|
+
state,
|
|
4037
|
+
track,
|
|
4038
|
+
logLevel,
|
|
4039
|
+
onMediaStateUpdate,
|
|
4040
|
+
progressTracker
|
|
4041
|
+
}) => {
|
|
4042
|
+
const addedTrack = await state.addTrack({
|
|
4043
|
+
type: "audio",
|
|
4044
|
+
codec: track.codecEnum,
|
|
4045
|
+
numberOfChannels: track.numberOfChannels,
|
|
4046
|
+
sampleRate: track.sampleRate,
|
|
4047
|
+
codecPrivate: track.codecData?.data ?? null,
|
|
4048
|
+
timescale: track.originalTimescale
|
|
4049
|
+
});
|
|
4050
|
+
Log.verbose(logLevel, `Copying audio track ${track.trackId} as track ${addedTrack.trackNumber}. Timescale = ${track.originalTimescale}, codec = ${track.codecEnum} (${track.codec}) `);
|
|
4051
|
+
return async (audioSample) => {
|
|
4052
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(audioSample.timestamp, audioSample.decodingTimestamp ?? Infinity));
|
|
4053
|
+
await state.addSample({
|
|
4054
|
+
chunk: audioSample,
|
|
4055
|
+
trackNumber: addedTrack.trackNumber,
|
|
4056
|
+
isVideo: false,
|
|
4057
|
+
codecPrivate: track.codecData?.data ?? null
|
|
4058
|
+
});
|
|
4059
|
+
onMediaStateUpdate?.((prevState) => {
|
|
4060
|
+
return {
|
|
4061
|
+
...prevState,
|
|
4062
|
+
encodedAudioFrames: prevState.encodedAudioFrames + 1
|
|
4063
|
+
};
|
|
4064
|
+
});
|
|
4228
4065
|
};
|
|
4229
4066
|
};
|
|
4230
4067
|
|
|
@@ -4281,62 +4118,263 @@ var getDefaultAudioCodec = ({
|
|
|
4281
4118
|
throw new Error(`Unhandled container: ${container}`);
|
|
4282
4119
|
};
|
|
4283
4120
|
|
|
4284
|
-
// src/
|
|
4285
|
-
|
|
4286
|
-
|
|
4287
|
-
|
|
4288
|
-
|
|
4289
|
-
|
|
4290
|
-
|
|
4291
|
-
|
|
4292
|
-
|
|
4293
|
-
|
|
4294
|
-
|
|
4295
|
-
|
|
4296
|
-
|
|
4297
|
-
|
|
4298
|
-
|
|
4299
|
-
|
|
4300
|
-
|
|
4301
|
-
|
|
4302
|
-
|
|
4303
|
-
|
|
4304
|
-
|
|
4121
|
+
// src/reencode-audio-track.ts
|
|
4122
|
+
import { MediaParserInternals as MediaParserInternals7 } from "@remotion/media-parser";
|
|
4123
|
+
|
|
4124
|
+
// src/convert-encoded-chunk.ts
|
|
4125
|
+
var convertEncodedChunk = (chunk) => {
|
|
4126
|
+
const arr = new Uint8Array(chunk.byteLength);
|
|
4127
|
+
chunk.copyTo(arr);
|
|
4128
|
+
return {
|
|
4129
|
+
data: arr,
|
|
4130
|
+
duration: chunk.duration ?? undefined,
|
|
4131
|
+
timestamp: chunk.timestamp,
|
|
4132
|
+
type: chunk.type,
|
|
4133
|
+
decodingTimestamp: chunk.timestamp,
|
|
4134
|
+
offset: 0
|
|
4135
|
+
};
|
|
4136
|
+
};
|
|
4137
|
+
|
|
4138
|
+
// src/get-wave-audio-decoder.ts
|
|
4139
|
+
var getBytesPerSample = (sampleFormat) => {
|
|
4140
|
+
if (sampleFormat === "s16") {
|
|
4141
|
+
return 2;
|
|
4142
|
+
}
|
|
4143
|
+
if (sampleFormat === "s32") {
|
|
4144
|
+
return 4;
|
|
4145
|
+
}
|
|
4146
|
+
if (sampleFormat === "f32") {
|
|
4147
|
+
return 4;
|
|
4148
|
+
}
|
|
4149
|
+
if (sampleFormat === "u8") {
|
|
4150
|
+
return 1;
|
|
4151
|
+
}
|
|
4152
|
+
if (sampleFormat === "f32-planar") {
|
|
4153
|
+
return 4;
|
|
4154
|
+
}
|
|
4155
|
+
if (sampleFormat === "s16-planar") {
|
|
4156
|
+
return 2;
|
|
4157
|
+
}
|
|
4158
|
+
if (sampleFormat === "s32-planar") {
|
|
4159
|
+
return 4;
|
|
4160
|
+
}
|
|
4161
|
+
if (sampleFormat === "u8-planar") {
|
|
4162
|
+
return 1;
|
|
4163
|
+
}
|
|
4164
|
+
throw new Error(`Unsupported sample format: ${sampleFormat}`);
|
|
4165
|
+
};
|
|
4166
|
+
var getAudioData = (audioSample) => {
|
|
4167
|
+
if (audioSample instanceof EncodedAudioChunk) {
|
|
4168
|
+
const data = new Uint8Array(audioSample.byteLength);
|
|
4169
|
+
audioSample.copyTo(data);
|
|
4170
|
+
return data;
|
|
4171
|
+
}
|
|
4172
|
+
return audioSample.data;
|
|
4173
|
+
};
|
|
4174
|
+
var getWaveAudioDecoder = ({
|
|
4175
|
+
onFrame,
|
|
4176
|
+
config,
|
|
4177
|
+
sampleFormat,
|
|
4178
|
+
ioSynchronizer,
|
|
4179
|
+
onError
|
|
4180
|
+
}) => {
|
|
4181
|
+
const processSample = async (audioSample) => {
|
|
4182
|
+
const bytesPerSample = getBytesPerSample(sampleFormat);
|
|
4183
|
+
const data = getAudioData(audioSample);
|
|
4184
|
+
const audioData = new AudioData({
|
|
4185
|
+
data,
|
|
4186
|
+
format: sampleFormat,
|
|
4187
|
+
numberOfChannels: config.numberOfChannels,
|
|
4188
|
+
numberOfFrames: data.byteLength / bytesPerSample / config.numberOfChannels,
|
|
4189
|
+
sampleRate: config.sampleRate,
|
|
4190
|
+
timestamp: audioSample.timestamp
|
|
4191
|
+
});
|
|
4192
|
+
try {
|
|
4193
|
+
await onFrame(audioData);
|
|
4194
|
+
} catch (err) {
|
|
4195
|
+
audioData.close();
|
|
4196
|
+
onError(err);
|
|
4197
|
+
}
|
|
4198
|
+
};
|
|
4199
|
+
return {
|
|
4200
|
+
close() {
|
|
4201
|
+
return Promise.resolve();
|
|
4202
|
+
},
|
|
4203
|
+
decode(audioSample) {
|
|
4204
|
+
return processSample(audioSample);
|
|
4205
|
+
},
|
|
4206
|
+
flush: () => Promise.resolve(),
|
|
4207
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
4208
|
+
reset: () => {}
|
|
4209
|
+
};
|
|
4210
|
+
};
|
|
4211
|
+
|
|
4212
|
+
// src/create-audio-decoder.ts
|
|
4213
|
+
var internalCreateAudioDecoder = ({
|
|
4214
|
+
onFrame,
|
|
4215
|
+
onError,
|
|
4216
|
+
controller,
|
|
4217
|
+
config,
|
|
4218
|
+
logLevel
|
|
4219
|
+
}) => {
|
|
4220
|
+
if (controller && controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4221
|
+
throw new Error("Not creating audio decoder, already aborted");
|
|
4222
|
+
}
|
|
4223
|
+
const ioSynchronizer = makeIoSynchronizer({
|
|
4305
4224
|
logLevel,
|
|
4306
|
-
|
|
4307
|
-
|
|
4308
|
-
canCopyTrack
|
|
4225
|
+
label: "Audio decoder",
|
|
4226
|
+
controller
|
|
4309
4227
|
});
|
|
4310
|
-
if (
|
|
4311
|
-
return
|
|
4228
|
+
if (config.codec === "pcm-s16") {
|
|
4229
|
+
return getWaveAudioDecoder({
|
|
4230
|
+
onFrame,
|
|
4231
|
+
config,
|
|
4232
|
+
sampleFormat: "s16",
|
|
4233
|
+
logLevel,
|
|
4234
|
+
ioSynchronizer,
|
|
4235
|
+
onError
|
|
4236
|
+
});
|
|
4312
4237
|
}
|
|
4313
|
-
|
|
4314
|
-
|
|
4238
|
+
const audioDecoder = new AudioDecoder({
|
|
4239
|
+
async output(frame) {
|
|
4240
|
+
try {
|
|
4241
|
+
await onFrame(frame);
|
|
4242
|
+
} catch (err) {
|
|
4243
|
+
frame.close();
|
|
4244
|
+
onError(err);
|
|
4245
|
+
}
|
|
4246
|
+
ioSynchronizer.onOutput(frame.timestamp + (frame.duration ?? 0));
|
|
4247
|
+
},
|
|
4248
|
+
error(error) {
|
|
4249
|
+
onError(error);
|
|
4250
|
+
}
|
|
4251
|
+
});
|
|
4252
|
+
const close = () => {
|
|
4253
|
+
if (controller) {
|
|
4254
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
4255
|
+
}
|
|
4256
|
+
if (audioDecoder.state === "closed") {
|
|
4257
|
+
return;
|
|
4258
|
+
}
|
|
4259
|
+
audioDecoder.close();
|
|
4260
|
+
};
|
|
4261
|
+
const onAbort = () => {
|
|
4262
|
+
close();
|
|
4263
|
+
};
|
|
4264
|
+
if (controller) {
|
|
4265
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
4315
4266
|
}
|
|
4316
|
-
|
|
4317
|
-
|
|
4318
|
-
|
|
4319
|
-
|
|
4320
|
-
|
|
4321
|
-
|
|
4322
|
-
|
|
4323
|
-
|
|
4267
|
+
audioDecoder.configure(config);
|
|
4268
|
+
const decode = async (audioSample) => {
|
|
4269
|
+
if (audioDecoder.state === "closed") {
|
|
4270
|
+
return;
|
|
4271
|
+
}
|
|
4272
|
+
try {
|
|
4273
|
+
await controller?._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4274
|
+
} catch (err) {
|
|
4275
|
+
onError(err);
|
|
4276
|
+
return;
|
|
4277
|
+
}
|
|
4278
|
+
const chunk = audioSample instanceof EncodedAudioChunk ? audioSample : new EncodedAudioChunk(audioSample);
|
|
4279
|
+
audioDecoder.decode(chunk);
|
|
4280
|
+
if (chunk.byteLength > 16) {
|
|
4281
|
+
ioSynchronizer.inputItem(chunk.timestamp);
|
|
4282
|
+
}
|
|
4283
|
+
};
|
|
4284
|
+
return {
|
|
4285
|
+
decode,
|
|
4286
|
+
close,
|
|
4287
|
+
flush: async () => {
|
|
4288
|
+
try {
|
|
4289
|
+
await audioDecoder.flush();
|
|
4290
|
+
} catch {}
|
|
4291
|
+
await ioSynchronizer.waitForQueueSize(0);
|
|
4292
|
+
},
|
|
4293
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
4294
|
+
reset: () => {
|
|
4295
|
+
audioDecoder.reset();
|
|
4296
|
+
audioDecoder.configure(config);
|
|
4297
|
+
}
|
|
4298
|
+
};
|
|
4299
|
+
};
|
|
4300
|
+
var createAudioDecoder = ({
|
|
4301
|
+
track,
|
|
4302
|
+
onFrame,
|
|
4303
|
+
onError,
|
|
4304
|
+
controller,
|
|
4305
|
+
logLevel
|
|
4306
|
+
}) => {
|
|
4307
|
+
return internalCreateAudioDecoder({
|
|
4308
|
+
onFrame,
|
|
4309
|
+
onError,
|
|
4310
|
+
controller: controller ?? null,
|
|
4311
|
+
config: track,
|
|
4312
|
+
logLevel: logLevel ?? "error"
|
|
4313
|
+
});
|
|
4314
|
+
};
|
|
4315
|
+
|
|
4316
|
+
// src/processing-queue.ts
|
|
4317
|
+
function processingQueue({
|
|
4318
|
+
onOutput,
|
|
4319
|
+
logLevel,
|
|
4320
|
+
label,
|
|
4321
|
+
onError,
|
|
4322
|
+
controller
|
|
4323
|
+
}) {
|
|
4324
|
+
const ioSynchronizer = makeIoSynchronizer({
|
|
4325
|
+
logLevel,
|
|
4326
|
+
label,
|
|
4327
|
+
controller
|
|
4328
|
+
});
|
|
4329
|
+
let queue = Promise.resolve();
|
|
4330
|
+
let stopped = false;
|
|
4331
|
+
const input = (item) => {
|
|
4332
|
+
if (stopped) {
|
|
4333
|
+
return;
|
|
4334
|
+
}
|
|
4335
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4336
|
+
stopped = true;
|
|
4337
|
+
return;
|
|
4338
|
+
}
|
|
4339
|
+
const { timestamp } = item;
|
|
4340
|
+
ioSynchronizer.inputItem(timestamp);
|
|
4341
|
+
queue = queue.then(() => {
|
|
4342
|
+
if (stopped) {
|
|
4343
|
+
return;
|
|
4344
|
+
}
|
|
4345
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4346
|
+
stopped = true;
|
|
4347
|
+
return;
|
|
4348
|
+
}
|
|
4349
|
+
return onOutput(item);
|
|
4350
|
+
}).then(() => {
|
|
4351
|
+
ioSynchronizer.onOutput(timestamp);
|
|
4352
|
+
return Promise.resolve();
|
|
4353
|
+
}).catch((err) => {
|
|
4354
|
+
stopped = true;
|
|
4355
|
+
onError(err);
|
|
4324
4356
|
});
|
|
4325
|
-
|
|
4326
|
-
|
|
4327
|
-
|
|
4328
|
-
|
|
4329
|
-
|
|
4330
|
-
|
|
4331
|
-
|
|
4332
|
-
|
|
4333
|
-
|
|
4334
|
-
|
|
4335
|
-
|
|
4336
|
-
|
|
4337
|
-
|
|
4338
|
-
|
|
4339
|
-
|
|
4357
|
+
};
|
|
4358
|
+
return {
|
|
4359
|
+
input,
|
|
4360
|
+
ioSynchronizer
|
|
4361
|
+
};
|
|
4362
|
+
}
|
|
4363
|
+
|
|
4364
|
+
// src/reencode-audio-track.ts
|
|
4365
|
+
var reencodeAudioTrack = async ({
|
|
4366
|
+
audioOperation,
|
|
4367
|
+
track,
|
|
4368
|
+
logLevel,
|
|
4369
|
+
abortConversion,
|
|
4370
|
+
state,
|
|
4371
|
+
controller,
|
|
4372
|
+
onMediaStateUpdate,
|
|
4373
|
+
onAudioData,
|
|
4374
|
+
progressTracker
|
|
4375
|
+
}) => {
|
|
4376
|
+
if (audioOperation.type !== "reencode") {
|
|
4377
|
+
throw new Error(`Audio track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(audioOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
|
|
4340
4378
|
}
|
|
4341
4379
|
const audioEncoderConfig = await getAudioEncoderConfig({
|
|
4342
4380
|
numberOfChannels: track.numberOfChannels,
|
|
@@ -4400,11 +4438,18 @@ var makeAudioTrackHandler = ({
|
|
|
4400
4438
|
codec: audioOperation.audioCodec,
|
|
4401
4439
|
controller,
|
|
4402
4440
|
config: audioEncoderConfig,
|
|
4403
|
-
logLevel
|
|
4404
|
-
progressTracker
|
|
4441
|
+
logLevel
|
|
4405
4442
|
});
|
|
4406
|
-
const
|
|
4407
|
-
|
|
4443
|
+
const audioProcessingQueue = processingQueue({
|
|
4444
|
+
controller,
|
|
4445
|
+
label: "AudioData processing queue",
|
|
4446
|
+
logLevel,
|
|
4447
|
+
onError(error) {
|
|
4448
|
+
abortConversion(new Error(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
|
|
4449
|
+
cause: error
|
|
4450
|
+
}));
|
|
4451
|
+
},
|
|
4452
|
+
onOutput: async (audioData) => {
|
|
4408
4453
|
const newAudioData = onAudioData ? await onAudioData?.({ audioData, track }) : audioData;
|
|
4409
4454
|
if (newAudioData !== audioData) {
|
|
4410
4455
|
if (newAudioData.duration !== audioData.duration) {
|
|
@@ -4424,7 +4469,10 @@ var makeAudioTrackHandler = ({
|
|
|
4424
4469
|
}
|
|
4425
4470
|
audioData.close();
|
|
4426
4471
|
}
|
|
4427
|
-
await
|
|
4472
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4473
|
+
await audioEncoder.ioSynchronizer.waitForQueueSize(10);
|
|
4474
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4475
|
+
audioEncoder.encode(newAudioData);
|
|
4428
4476
|
onMediaStateUpdate?.((prevState) => {
|
|
4429
4477
|
return {
|
|
4430
4478
|
...prevState,
|
|
@@ -4432,6 +4480,13 @@ var makeAudioTrackHandler = ({
|
|
|
4432
4480
|
};
|
|
4433
4481
|
});
|
|
4434
4482
|
newAudioData.close();
|
|
4483
|
+
}
|
|
4484
|
+
});
|
|
4485
|
+
const audioDecoder = internalCreateAudioDecoder({
|
|
4486
|
+
onFrame: async (audioData) => {
|
|
4487
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4488
|
+
await audioProcessingQueue.ioSynchronizer.waitForQueueSize(10);
|
|
4489
|
+
audioProcessingQueue.input(audioData);
|
|
4435
4490
|
},
|
|
4436
4491
|
onError(error) {
|
|
4437
4492
|
abortConversion(new Error(`Audio decoder of track ${track.trackId} failed. Config: ${JSON.stringify(audioDecoderConfig)} (see .cause of this error)`, {
|
|
@@ -4440,24 +4495,114 @@ var makeAudioTrackHandler = ({
|
|
|
4440
4495
|
},
|
|
4441
4496
|
controller,
|
|
4442
4497
|
config: audioDecoderConfig,
|
|
4443
|
-
logLevel
|
|
4444
|
-
track,
|
|
4445
|
-
progressTracker
|
|
4498
|
+
logLevel
|
|
4446
4499
|
});
|
|
4447
4500
|
state.addWaitForFinishPromise(async () => {
|
|
4448
|
-
await audioDecoder.
|
|
4449
|
-
|
|
4501
|
+
await audioDecoder.waitForQueueToBeLessThan(0);
|
|
4502
|
+
Log.verbose(logLevel, "Audio decoder finished");
|
|
4450
4503
|
audioDecoder.close();
|
|
4504
|
+
await audioProcessingQueue.ioSynchronizer.waitForQueueSize(0);
|
|
4505
|
+
Log.verbose(logLevel, "Audio processing queue finished");
|
|
4506
|
+
await audioEncoder.waitForFinish();
|
|
4507
|
+
Log.verbose(logLevel, "Audio encoder finished");
|
|
4451
4508
|
audioEncoder.close();
|
|
4452
4509
|
});
|
|
4453
4510
|
return async (audioSample) => {
|
|
4454
|
-
|
|
4511
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(audioSample.timestamp, audioSample.decodingTimestamp ?? Infinity));
|
|
4512
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4513
|
+
await audioDecoder.waitForQueueToBeLessThan(10);
|
|
4514
|
+
audioDecoder.decode(audioSample);
|
|
4455
4515
|
};
|
|
4456
4516
|
};
|
|
4457
4517
|
|
|
4458
|
-
// src/
|
|
4459
|
-
var
|
|
4460
|
-
|
|
4518
|
+
// src/on-audio-track.ts
|
|
4519
|
+
var makeAudioTrackHandler = ({
|
|
4520
|
+
state,
|
|
4521
|
+
defaultAudioCodec: audioCodec,
|
|
4522
|
+
controller,
|
|
4523
|
+
abortConversion,
|
|
4524
|
+
onMediaStateUpdate,
|
|
4525
|
+
onAudioTrack,
|
|
4526
|
+
logLevel,
|
|
4527
|
+
outputContainer,
|
|
4528
|
+
onAudioData,
|
|
4529
|
+
progressTracker
|
|
4530
|
+
}) => async ({ track, container: inputContainer }) => {
|
|
4531
|
+
const canCopyTrack = canCopyAudioTrack({
|
|
4532
|
+
inputCodec: track.codecEnum,
|
|
4533
|
+
outputContainer,
|
|
4534
|
+
inputContainer,
|
|
4535
|
+
outputAudioCodec: audioCodec
|
|
4536
|
+
});
|
|
4537
|
+
const audioOperation = await (onAudioTrack ?? defaultOnAudioTrackHandler)({
|
|
4538
|
+
defaultAudioCodec: audioCodec ?? getDefaultAudioCodec({ container: outputContainer }),
|
|
4539
|
+
track,
|
|
4540
|
+
logLevel,
|
|
4541
|
+
outputContainer,
|
|
4542
|
+
inputContainer,
|
|
4543
|
+
canCopyTrack
|
|
4544
|
+
});
|
|
4545
|
+
if (audioOperation.type === "drop") {
|
|
4546
|
+
return null;
|
|
4547
|
+
}
|
|
4548
|
+
if (audioOperation.type === "fail") {
|
|
4549
|
+
throw new Error(`Audio track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this audio track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
|
|
4550
|
+
}
|
|
4551
|
+
if (audioOperation.type === "copy") {
|
|
4552
|
+
return copyAudioTrack({
|
|
4553
|
+
logLevel,
|
|
4554
|
+
onMediaStateUpdate,
|
|
4555
|
+
state,
|
|
4556
|
+
track,
|
|
4557
|
+
progressTracker
|
|
4558
|
+
});
|
|
4559
|
+
}
|
|
4560
|
+
return reencodeAudioTrack({
|
|
4561
|
+
abortConversion,
|
|
4562
|
+
controller,
|
|
4563
|
+
logLevel,
|
|
4564
|
+
onMediaStateUpdate,
|
|
4565
|
+
audioOperation,
|
|
4566
|
+
onAudioData,
|
|
4567
|
+
state,
|
|
4568
|
+
track,
|
|
4569
|
+
progressTracker
|
|
4570
|
+
});
|
|
4571
|
+
};
|
|
4572
|
+
|
|
4573
|
+
// src/copy-video-track.ts
|
|
4574
|
+
var copyVideoTrack = async ({
|
|
4575
|
+
logLevel,
|
|
4576
|
+
state,
|
|
4577
|
+
track,
|
|
4578
|
+
onMediaStateUpdate,
|
|
4579
|
+
progressTracker
|
|
4580
|
+
}) => {
|
|
4581
|
+
Log.verbose(logLevel, `Copying video track with codec ${track.codec} and timescale ${track.originalTimescale}`);
|
|
4582
|
+
const videoTrack = await state.addTrack({
|
|
4583
|
+
type: "video",
|
|
4584
|
+
color: track.advancedColor,
|
|
4585
|
+
width: track.codedWidth,
|
|
4586
|
+
height: track.codedHeight,
|
|
4587
|
+
codec: track.codecEnum,
|
|
4588
|
+
codecPrivate: track.codecData?.data ?? null,
|
|
4589
|
+
timescale: track.originalTimescale
|
|
4590
|
+
});
|
|
4591
|
+
return async (sample) => {
|
|
4592
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(sample.timestamp, sample.decodingTimestamp ?? Infinity));
|
|
4593
|
+
await state.addSample({
|
|
4594
|
+
chunk: sample,
|
|
4595
|
+
trackNumber: videoTrack.trackNumber,
|
|
4596
|
+
isVideo: true,
|
|
4597
|
+
codecPrivate: track.codecData?.data ?? null
|
|
4598
|
+
});
|
|
4599
|
+
onMediaStateUpdate?.((prevState) => {
|
|
4600
|
+
return {
|
|
4601
|
+
...prevState,
|
|
4602
|
+
decodedVideoFrames: prevState.decodedVideoFrames + 1
|
|
4603
|
+
};
|
|
4604
|
+
});
|
|
4605
|
+
};
|
|
4461
4606
|
};
|
|
4462
4607
|
|
|
4463
4608
|
// src/default-on-video-track-handler.ts
|
|
@@ -4496,21 +4641,115 @@ var defaultOnVideoTrackHandler = async ({
|
|
|
4496
4641
|
MediaParserInternals8.Log.verbose(logLevel, `Track ${track.trackId} (video): Can neither copy nor re-encode, therefore failing`);
|
|
4497
4642
|
return Promise.resolve({ type: "fail" });
|
|
4498
4643
|
};
|
|
4499
|
-
|
|
4500
|
-
// src/get-default-video-codec.ts
|
|
4501
|
-
var getDefaultVideoCodec = ({
|
|
4502
|
-
container
|
|
4644
|
+
|
|
4645
|
+
// src/get-default-video-codec.ts
|
|
4646
|
+
var getDefaultVideoCodec = ({
|
|
4647
|
+
container
|
|
4648
|
+
}) => {
|
|
4649
|
+
if (container === "webm") {
|
|
4650
|
+
return "vp8";
|
|
4651
|
+
}
|
|
4652
|
+
if (container === "mp4") {
|
|
4653
|
+
return "h264";
|
|
4654
|
+
}
|
|
4655
|
+
if (container === "wav") {
|
|
4656
|
+
return null;
|
|
4657
|
+
}
|
|
4658
|
+
throw new Error(`Unhandled container: ${container}`);
|
|
4659
|
+
};
|
|
4660
|
+
|
|
4661
|
+
// src/arraybuffer-to-uint8-array.ts
|
|
4662
|
+
var arrayBufferToUint8Array = (buffer) => {
|
|
4663
|
+
return buffer ? new Uint8Array(buffer) : null;
|
|
4664
|
+
};
|
|
4665
|
+
|
|
4666
|
+
// src/create-video-decoder.ts
|
|
4667
|
+
var internalCreateVideoDecoder = ({
|
|
4668
|
+
onFrame,
|
|
4669
|
+
onError,
|
|
4670
|
+
controller,
|
|
4671
|
+
config,
|
|
4672
|
+
logLevel
|
|
4673
|
+
}) => {
|
|
4674
|
+
const ioSynchronizer = makeIoSynchronizer({
|
|
4675
|
+
logLevel,
|
|
4676
|
+
label: "Video decoder",
|
|
4677
|
+
controller
|
|
4678
|
+
});
|
|
4679
|
+
const videoDecoder = new VideoDecoder({
|
|
4680
|
+
async output(frame) {
|
|
4681
|
+
try {
|
|
4682
|
+
await onFrame(frame);
|
|
4683
|
+
} catch (err) {
|
|
4684
|
+
onError(err);
|
|
4685
|
+
frame.close();
|
|
4686
|
+
}
|
|
4687
|
+
ioSynchronizer.onOutput(frame.timestamp);
|
|
4688
|
+
},
|
|
4689
|
+
error(error) {
|
|
4690
|
+
onError(error);
|
|
4691
|
+
}
|
|
4692
|
+
});
|
|
4693
|
+
const close = () => {
|
|
4694
|
+
if (controller) {
|
|
4695
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
4696
|
+
}
|
|
4697
|
+
if (videoDecoder.state === "closed") {
|
|
4698
|
+
return;
|
|
4699
|
+
}
|
|
4700
|
+
videoDecoder.close();
|
|
4701
|
+
};
|
|
4702
|
+
const onAbort = () => {
|
|
4703
|
+
close();
|
|
4704
|
+
};
|
|
4705
|
+
if (controller) {
|
|
4706
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
4707
|
+
}
|
|
4708
|
+
videoDecoder.configure(config);
|
|
4709
|
+
const decode = async (sample) => {
|
|
4710
|
+
if (videoDecoder.state === "closed") {
|
|
4711
|
+
return;
|
|
4712
|
+
}
|
|
4713
|
+
try {
|
|
4714
|
+
await controller?._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4715
|
+
} catch (err) {
|
|
4716
|
+
onError(err);
|
|
4717
|
+
return;
|
|
4718
|
+
}
|
|
4719
|
+
const encodedChunk = sample instanceof EncodedVideoChunk ? sample : new EncodedVideoChunk(sample);
|
|
4720
|
+
videoDecoder.decode(encodedChunk);
|
|
4721
|
+
ioSynchronizer.inputItem(sample.timestamp);
|
|
4722
|
+
};
|
|
4723
|
+
return {
|
|
4724
|
+
decode,
|
|
4725
|
+
close,
|
|
4726
|
+
flush: async () => {
|
|
4727
|
+
try {
|
|
4728
|
+
await videoDecoder.flush();
|
|
4729
|
+
} catch {}
|
|
4730
|
+
await ioSynchronizer.waitForQueueSize(0);
|
|
4731
|
+
},
|
|
4732
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
4733
|
+
reset: () => {
|
|
4734
|
+
videoDecoder.reset();
|
|
4735
|
+
videoDecoder.configure(config);
|
|
4736
|
+
}
|
|
4737
|
+
};
|
|
4738
|
+
};
|
|
4739
|
+
var createVideoDecoder = ({
|
|
4740
|
+
onFrame,
|
|
4741
|
+
onError,
|
|
4742
|
+
controller,
|
|
4743
|
+
track,
|
|
4744
|
+
logLevel
|
|
4503
4745
|
}) => {
|
|
4504
|
-
|
|
4505
|
-
|
|
4506
|
-
|
|
4507
|
-
|
|
4508
|
-
|
|
4509
|
-
|
|
4510
|
-
|
|
4511
|
-
return null;
|
|
4512
|
-
}
|
|
4513
|
-
throw new Error(`Unhandled container: ${container}`);
|
|
4746
|
+
return internalCreateVideoDecoder({
|
|
4747
|
+
onFrame,
|
|
4748
|
+
onError,
|
|
4749
|
+
controller: controller ?? null,
|
|
4750
|
+
config: track,
|
|
4751
|
+
logLevel: logLevel ?? "info"
|
|
4752
|
+
});
|
|
4514
4753
|
};
|
|
4515
4754
|
|
|
4516
4755
|
// src/convert-to-correct-videoframe.ts
|
|
@@ -4553,7 +4792,6 @@ var convertToCorrectVideoFrame = ({
|
|
|
4553
4792
|
var onFrame = async ({
|
|
4554
4793
|
frame: unrotatedFrame,
|
|
4555
4794
|
onVideoFrame,
|
|
4556
|
-
videoEncoder,
|
|
4557
4795
|
track,
|
|
4558
4796
|
outputCodec,
|
|
4559
4797
|
rotation,
|
|
@@ -4570,10 +4808,10 @@ var onFrame = async ({
|
|
|
4570
4808
|
}
|
|
4571
4809
|
const userProcessedFrame = onVideoFrame ? await onVideoFrame({ frame: rotated, track }) : rotated;
|
|
4572
4810
|
if (userProcessedFrame.displayWidth !== rotated.displayWidth) {
|
|
4573
|
-
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayWidth (${userProcessedFrame.displayWidth}) than the input frame (${
|
|
4811
|
+
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayWidth (${userProcessedFrame.displayWidth}) than the input frame (${rotated.displayWidth})`);
|
|
4574
4812
|
}
|
|
4575
4813
|
if (userProcessedFrame.displayHeight !== rotated.displayHeight) {
|
|
4576
|
-
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayHeight (${userProcessedFrame.displayHeight}) than the input frame (${
|
|
4814
|
+
throw new Error(`Returned VideoFrame of track ${track.trackId} has different displayHeight (${userProcessedFrame.displayHeight}) than the input frame (${rotated.displayHeight})`);
|
|
4577
4815
|
}
|
|
4578
4816
|
if (userProcessedFrame.timestamp !== rotated.timestamp && !isSafari()) {
|
|
4579
4817
|
throw new Error(`Returned VideoFrame of track ${track.trackId} has different timestamp (${userProcessedFrame.timestamp}) than the input frame (${rotated.timestamp}). When calling new VideoFrame(), pass {timestamp: frame.timestamp} as second argument`);
|
|
@@ -4581,32 +4819,31 @@ var onFrame = async ({
|
|
|
4581
4819
|
if ((userProcessedFrame.duration ?? 0) !== (rotated.duration ?? 0)) {
|
|
4582
4820
|
throw new Error(`Returned VideoFrame of track ${track.trackId} has different duration (${userProcessedFrame.duration}) than the input frame (${rotated.duration}). When calling new VideoFrame(), pass {duration: frame.duration} as second argument`);
|
|
4583
4821
|
}
|
|
4822
|
+
if (rotated !== userProcessedFrame) {
|
|
4823
|
+
rotated.close();
|
|
4824
|
+
}
|
|
4584
4825
|
const fixedFrame = convertToCorrectVideoFrame({
|
|
4585
4826
|
videoFrame: userProcessedFrame,
|
|
4586
4827
|
outputCodec
|
|
4587
4828
|
});
|
|
4588
|
-
await videoEncoder.encodeFrame(fixedFrame, fixedFrame.timestamp);
|
|
4589
|
-
fixedFrame.close();
|
|
4590
|
-
if (rotated !== userProcessedFrame) {
|
|
4591
|
-
rotated.close();
|
|
4592
|
-
}
|
|
4593
4829
|
if (fixedFrame !== userProcessedFrame) {
|
|
4594
|
-
|
|
4830
|
+
userProcessedFrame.close();
|
|
4595
4831
|
}
|
|
4832
|
+
return fixedFrame;
|
|
4596
4833
|
};
|
|
4597
4834
|
|
|
4598
4835
|
// src/sort-video-frames.ts
|
|
4599
4836
|
var MAX_QUEUE_SIZE = 5;
|
|
4600
4837
|
var videoFrameSorter = ({
|
|
4601
|
-
|
|
4602
|
-
|
|
4838
|
+
controller,
|
|
4839
|
+
onOutput
|
|
4603
4840
|
}) => {
|
|
4604
4841
|
const frames = [];
|
|
4605
4842
|
const releaseFrame = async () => {
|
|
4606
4843
|
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
4607
4844
|
const frame = frames.shift();
|
|
4608
4845
|
if (frame) {
|
|
4609
|
-
await
|
|
4846
|
+
await onOutput(frame);
|
|
4610
4847
|
}
|
|
4611
4848
|
};
|
|
4612
4849
|
const sortFrames = () => {
|
|
@@ -4642,115 +4879,13 @@ var videoFrameSorter = ({
|
|
|
4642
4879
|
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
4643
4880
|
};
|
|
4644
4881
|
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
4882
|
+
let promise = Promise.resolve();
|
|
4645
4883
|
return {
|
|
4646
|
-
inputFrame
|
|
4647
|
-
|
|
4648
|
-
};
|
|
4649
|
-
};
|
|
4650
|
-
|
|
4651
|
-
// src/video-decoder.ts
|
|
4652
|
-
var createVideoDecoder = ({
|
|
4653
|
-
onFrame: onFrame2,
|
|
4654
|
-
onError,
|
|
4655
|
-
controller,
|
|
4656
|
-
config,
|
|
4657
|
-
logLevel,
|
|
4658
|
-
progress
|
|
4659
|
-
}) => {
|
|
4660
|
-
const ioSynchronizer = makeIoSynchronizer({
|
|
4661
|
-
logLevel,
|
|
4662
|
-
label: "Video decoder",
|
|
4663
|
-
progress
|
|
4664
|
-
});
|
|
4665
|
-
let outputQueue = Promise.resolve();
|
|
4666
|
-
const addToQueue = (frame) => {
|
|
4667
|
-
const cleanup = () => {
|
|
4668
|
-
frame.close();
|
|
4669
|
-
};
|
|
4670
|
-
controller._internals._mediaParserController._internals.signal.addEventListener("abort", cleanup, {
|
|
4671
|
-
once: true
|
|
4672
|
-
});
|
|
4673
|
-
outputQueue = outputQueue.then(() => {
|
|
4674
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4675
|
-
return;
|
|
4676
|
-
}
|
|
4677
|
-
return onFrame2(frame);
|
|
4678
|
-
}).then(() => {
|
|
4679
|
-
ioSynchronizer.onProcessed();
|
|
4680
|
-
}).catch((err) => {
|
|
4681
|
-
onError(err);
|
|
4682
|
-
}).finally(() => {
|
|
4683
|
-
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", cleanup);
|
|
4684
|
-
cleanup();
|
|
4685
|
-
});
|
|
4686
|
-
return outputQueue;
|
|
4687
|
-
};
|
|
4688
|
-
const frameSorter = videoFrameSorter({
|
|
4689
|
-
controller,
|
|
4690
|
-
onRelease: async (frame) => {
|
|
4691
|
-
await addToQueue(frame);
|
|
4692
|
-
}
|
|
4693
|
-
});
|
|
4694
|
-
const videoDecoder = new VideoDecoder({
|
|
4695
|
-
output(frame) {
|
|
4696
|
-
ioSynchronizer.onOutput(frame.timestamp);
|
|
4697
|
-
frameSorter.inputFrame(frame);
|
|
4698
|
-
},
|
|
4699
|
-
error(error) {
|
|
4700
|
-
onError(error);
|
|
4701
|
-
}
|
|
4702
|
-
});
|
|
4703
|
-
const close = () => {
|
|
4704
|
-
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
4705
|
-
if (videoDecoder.state === "closed") {
|
|
4706
|
-
return;
|
|
4707
|
-
}
|
|
4708
|
-
videoDecoder.close();
|
|
4709
|
-
};
|
|
4710
|
-
const onAbort = () => {
|
|
4711
|
-
close();
|
|
4712
|
-
};
|
|
4713
|
-
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
4714
|
-
videoDecoder.configure(config);
|
|
4715
|
-
const processSample = async (sample) => {
|
|
4716
|
-
if (videoDecoder.state === "closed") {
|
|
4717
|
-
return;
|
|
4718
|
-
}
|
|
4719
|
-
if (videoDecoder.state === "closed") {
|
|
4720
|
-
return;
|
|
4721
|
-
}
|
|
4722
|
-
progress.setPossibleLowestTimestamp(Math.min(sample.timestamp, sample.decodingTimestamp ?? Infinity));
|
|
4723
|
-
await ioSynchronizer.waitFor({
|
|
4724
|
-
unemitted: 20,
|
|
4725
|
-
unprocessed: 10,
|
|
4726
|
-
minimumProgress: sample.timestamp - 1e7,
|
|
4727
|
-
controller
|
|
4728
|
-
});
|
|
4729
|
-
videoDecoder.decode(new EncodedVideoChunk(sample));
|
|
4730
|
-
ioSynchronizer.inputItem(sample.timestamp, sample.type === "key");
|
|
4731
|
-
};
|
|
4732
|
-
let inputQueue = Promise.resolve();
|
|
4733
|
-
return {
|
|
4734
|
-
processSample: (sample) => {
|
|
4735
|
-
inputQueue = inputQueue.then(() => processSample(sample));
|
|
4736
|
-
return inputQueue;
|
|
4737
|
-
},
|
|
4738
|
-
waitForFinish: async () => {
|
|
4739
|
-
await videoDecoder.flush();
|
|
4740
|
-
Log.verbose(logLevel, "Flushed video decoder");
|
|
4741
|
-
await frameSorter.flush();
|
|
4742
|
-
Log.verbose(logLevel, "Frame sorter flushed");
|
|
4743
|
-
await ioSynchronizer.waitForFinish(controller);
|
|
4744
|
-
Log.verbose(logLevel, "IO synchro finished");
|
|
4745
|
-
await outputQueue;
|
|
4746
|
-
Log.verbose(logLevel, "Output queue finished");
|
|
4747
|
-
await inputQueue;
|
|
4748
|
-
Log.verbose(logLevel, "Input queue finished");
|
|
4884
|
+
inputFrame: (frame) => {
|
|
4885
|
+
promise = promise.then(() => inputFrame(frame));
|
|
4749
4886
|
},
|
|
4750
|
-
|
|
4751
|
-
flush
|
|
4752
|
-
await videoDecoder.flush();
|
|
4753
|
-
}
|
|
4887
|
+
waitUntilProcessed: () => promise,
|
|
4888
|
+
flush
|
|
4754
4889
|
};
|
|
4755
4890
|
};
|
|
4756
4891
|
|
|
@@ -4765,7 +4900,7 @@ var createVideoEncoder = ({
|
|
|
4765
4900
|
config,
|
|
4766
4901
|
logLevel,
|
|
4767
4902
|
outputCodec,
|
|
4768
|
-
|
|
4903
|
+
keyframeInterval
|
|
4769
4904
|
}) => {
|
|
4770
4905
|
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4771
4906
|
throw new MediaParserAbortError2("Not creating video encoder, already aborted");
|
|
@@ -4773,27 +4908,20 @@ var createVideoEncoder = ({
|
|
|
4773
4908
|
const ioSynchronizer = makeIoSynchronizer({
|
|
4774
4909
|
logLevel,
|
|
4775
4910
|
label: "Video encoder",
|
|
4776
|
-
|
|
4911
|
+
controller
|
|
4777
4912
|
});
|
|
4778
|
-
let outputQueue = Promise.resolve();
|
|
4779
4913
|
const encoder = new VideoEncoder({
|
|
4780
4914
|
error(error) {
|
|
4781
4915
|
onError(error);
|
|
4782
4916
|
},
|
|
4783
|
-
output(chunk, metadata) {
|
|
4917
|
+
async output(chunk, metadata) {
|
|
4784
4918
|
const timestamp = chunk.timestamp + (chunk.duration ?? 0);
|
|
4785
|
-
|
|
4786
|
-
|
|
4787
|
-
|
|
4788
|
-
return;
|
|
4789
|
-
}
|
|
4790
|
-
return onChunk(chunk, metadata ?? null);
|
|
4791
|
-
}).then(() => {
|
|
4792
|
-
ioSynchronizer.onProcessed();
|
|
4793
|
-
return Promise.resolve();
|
|
4794
|
-
}).catch((err) => {
|
|
4919
|
+
try {
|
|
4920
|
+
await onChunk(chunk, metadata ?? null);
|
|
4921
|
+
} catch (err) {
|
|
4795
4922
|
onError(err);
|
|
4796
|
-
}
|
|
4923
|
+
}
|
|
4924
|
+
ioSynchronizer.onOutput(timestamp);
|
|
4797
4925
|
}
|
|
4798
4926
|
});
|
|
4799
4927
|
const close = () => {
|
|
@@ -4810,115 +4938,49 @@ var createVideoEncoder = ({
|
|
|
4810
4938
|
Log.verbose(logLevel, "Configuring video encoder", config);
|
|
4811
4939
|
encoder.configure(config);
|
|
4812
4940
|
let framesProcessed = 0;
|
|
4813
|
-
const encodeFrame =
|
|
4814
|
-
if (encoder.state === "closed") {
|
|
4815
|
-
return;
|
|
4816
|
-
}
|
|
4817
|
-
progress.setPossibleLowestTimestamp(frame.timestamp);
|
|
4818
|
-
await ioSynchronizer.waitFor({
|
|
4819
|
-
unemitted: 10,
|
|
4820
|
-
unprocessed: 10,
|
|
4821
|
-
minimumProgress: frame.timestamp - 1e7,
|
|
4822
|
-
controller
|
|
4823
|
-
});
|
|
4941
|
+
const encodeFrame = (frame) => {
|
|
4824
4942
|
if (encoder.state === "closed") {
|
|
4825
4943
|
return;
|
|
4826
4944
|
}
|
|
4827
|
-
const keyFrame = framesProcessed %
|
|
4945
|
+
const keyFrame = framesProcessed % keyframeInterval === 0;
|
|
4828
4946
|
encoder.encode(convertToCorrectVideoFrame({ videoFrame: frame, outputCodec }), {
|
|
4829
4947
|
keyFrame,
|
|
4830
4948
|
vp9: {
|
|
4831
4949
|
quantizer: 36
|
|
4832
4950
|
}
|
|
4833
4951
|
});
|
|
4834
|
-
ioSynchronizer.inputItem(frame.timestamp
|
|
4952
|
+
ioSynchronizer.inputItem(frame.timestamp);
|
|
4835
4953
|
framesProcessed++;
|
|
4836
4954
|
};
|
|
4837
|
-
let inputQueue = Promise.resolve();
|
|
4838
4955
|
return {
|
|
4839
|
-
|
|
4840
|
-
|
|
4841
|
-
return inputQueue;
|
|
4956
|
+
encode: (frame) => {
|
|
4957
|
+
encodeFrame(frame);
|
|
4842
4958
|
},
|
|
4843
4959
|
waitForFinish: async () => {
|
|
4844
4960
|
await encoder.flush();
|
|
4845
|
-
await
|
|
4846
|
-
await ioSynchronizer.waitForFinish(controller);
|
|
4961
|
+
await ioSynchronizer.waitForQueueSize(0);
|
|
4847
4962
|
},
|
|
4848
4963
|
close,
|
|
4849
4964
|
flush: async () => {
|
|
4850
4965
|
await encoder.flush();
|
|
4851
|
-
}
|
|
4966
|
+
},
|
|
4967
|
+
ioSynchronizer
|
|
4852
4968
|
};
|
|
4853
4969
|
};
|
|
4854
4970
|
|
|
4855
|
-
// src/
|
|
4856
|
-
var
|
|
4857
|
-
|
|
4858
|
-
|
|
4859
|
-
|
|
4971
|
+
// src/reencode-video-track.ts
|
|
4972
|
+
var reencodeVideoTrack = async ({
|
|
4973
|
+
videoOperation,
|
|
4974
|
+
rotate,
|
|
4975
|
+
track,
|
|
4976
|
+
logLevel,
|
|
4860
4977
|
abortConversion,
|
|
4978
|
+
onMediaStateUpdate,
|
|
4861
4979
|
controller,
|
|
4862
|
-
|
|
4863
|
-
|
|
4864
|
-
|
|
4865
|
-
|
|
4866
|
-
rotate,
|
|
4867
|
-
progress,
|
|
4868
|
-
resizeOperation
|
|
4869
|
-
}) => async ({ track, container: inputContainer }) => {
|
|
4870
|
-
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4871
|
-
throw new Error("Aborted");
|
|
4872
|
-
}
|
|
4873
|
-
const canCopyTrack = canCopyVideoTrack({
|
|
4874
|
-
inputContainer,
|
|
4875
|
-
outputContainer,
|
|
4876
|
-
rotationToApply: rotate,
|
|
4877
|
-
inputTrack: track,
|
|
4878
|
-
resizeOperation
|
|
4879
|
-
});
|
|
4880
|
-
const videoOperation = await (onVideoTrack ?? defaultOnVideoTrackHandler)({
|
|
4881
|
-
track,
|
|
4882
|
-
defaultVideoCodec: defaultVideoCodec ?? getDefaultVideoCodec({ container: outputContainer }),
|
|
4883
|
-
logLevel,
|
|
4884
|
-
outputContainer,
|
|
4885
|
-
rotate,
|
|
4886
|
-
inputContainer,
|
|
4887
|
-
canCopyTrack,
|
|
4888
|
-
resizeOperation
|
|
4889
|
-
});
|
|
4890
|
-
if (videoOperation.type === "drop") {
|
|
4891
|
-
return null;
|
|
4892
|
-
}
|
|
4893
|
-
if (videoOperation.type === "fail") {
|
|
4894
|
-
throw new Error(`Video track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this video track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
|
|
4895
|
-
}
|
|
4896
|
-
if (videoOperation.type === "copy") {
|
|
4897
|
-
Log.verbose(logLevel, `Copying video track with codec ${track.codec} and timescale ${track.originalTimescale}`);
|
|
4898
|
-
const videoTrack = await state.addTrack({
|
|
4899
|
-
type: "video",
|
|
4900
|
-
color: track.advancedColor,
|
|
4901
|
-
width: track.codedWidth,
|
|
4902
|
-
height: track.codedHeight,
|
|
4903
|
-
codec: track.codecEnum,
|
|
4904
|
-
codecPrivate: track.codecData?.data ?? null,
|
|
4905
|
-
timescale: track.originalTimescale
|
|
4906
|
-
});
|
|
4907
|
-
return async (sample) => {
|
|
4908
|
-
await state.addSample({
|
|
4909
|
-
chunk: sample,
|
|
4910
|
-
trackNumber: videoTrack.trackNumber,
|
|
4911
|
-
isVideo: true,
|
|
4912
|
-
codecPrivate: track.codecData?.data ?? null
|
|
4913
|
-
});
|
|
4914
|
-
onMediaStateUpdate?.((prevState) => {
|
|
4915
|
-
return {
|
|
4916
|
-
...prevState,
|
|
4917
|
-
decodedVideoFrames: prevState.decodedVideoFrames + 1
|
|
4918
|
-
};
|
|
4919
|
-
});
|
|
4920
|
-
};
|
|
4921
|
-
}
|
|
4980
|
+
onVideoFrame,
|
|
4981
|
+
state,
|
|
4982
|
+
progressTracker
|
|
4983
|
+
}) => {
|
|
4922
4984
|
if (videoOperation.type !== "reencode") {
|
|
4923
4985
|
throw new Error(`Video track with ID ${track.trackId} could not be resolved with a valid operation. Received ${JSON.stringify(videoOperation)}, but must be either "copy", "reencode", "drop" or "fail"`);
|
|
4924
4986
|
}
|
|
@@ -4981,20 +5043,47 @@ var makeVideoTrackHandler = ({
|
|
|
4981
5043
|
config: videoEncoderConfig,
|
|
4982
5044
|
logLevel,
|
|
4983
5045
|
outputCodec: videoOperation.videoCodec,
|
|
4984
|
-
|
|
5046
|
+
keyframeInterval: 40
|
|
4985
5047
|
});
|
|
4986
|
-
const
|
|
4987
|
-
|
|
4988
|
-
|
|
4989
|
-
|
|
5048
|
+
const videoProcessingQueue = processingQueue({
|
|
5049
|
+
controller,
|
|
5050
|
+
label: "VideoFrame processing queue",
|
|
5051
|
+
logLevel,
|
|
5052
|
+
onError: (err) => {
|
|
5053
|
+
abortConversion(new Error(`VideoFrame processing queue of track ${track.trackId} failed (see .cause of this error)`, {
|
|
5054
|
+
cause: err
|
|
5055
|
+
}));
|
|
5056
|
+
},
|
|
5057
|
+
onOutput: async (frame) => {
|
|
5058
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5059
|
+
const processedFrame = await onFrame({
|
|
4990
5060
|
frame,
|
|
4991
5061
|
track,
|
|
4992
|
-
videoEncoder,
|
|
4993
5062
|
onVideoFrame,
|
|
4994
5063
|
outputCodec: videoOperation.videoCodec,
|
|
4995
5064
|
rotation,
|
|
4996
5065
|
resizeOperation: videoOperation.resize ?? null
|
|
4997
5066
|
});
|
|
5067
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5068
|
+
await videoEncoder.ioSynchronizer.waitForQueueSize(10);
|
|
5069
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5070
|
+
videoEncoder.encode(processedFrame);
|
|
5071
|
+
processedFrame.close();
|
|
5072
|
+
}
|
|
5073
|
+
});
|
|
5074
|
+
const frameSorter = videoFrameSorter({
|
|
5075
|
+
controller,
|
|
5076
|
+
onOutput: async (frame) => {
|
|
5077
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5078
|
+
await videoProcessingQueue.ioSynchronizer.waitForQueueSize(10);
|
|
5079
|
+
videoProcessingQueue.input(frame);
|
|
5080
|
+
}
|
|
5081
|
+
});
|
|
5082
|
+
const videoDecoder = createVideoDecoder({
|
|
5083
|
+
track: videoDecoderConfig,
|
|
5084
|
+
onFrame: async (frame) => {
|
|
5085
|
+
await frameSorter.waitUntilProcessed();
|
|
5086
|
+
frameSorter.inputFrame(frame);
|
|
4998
5087
|
},
|
|
4999
5088
|
onError: (err) => {
|
|
5000
5089
|
abortConversion(new Error(`Video decoder of track ${track.trackId} failed (see .cause of this error)`, {
|
|
@@ -5002,23 +5091,97 @@ var makeVideoTrackHandler = ({
|
|
|
5002
5091
|
}));
|
|
5003
5092
|
},
|
|
5004
5093
|
controller,
|
|
5005
|
-
logLevel
|
|
5006
|
-
progress
|
|
5094
|
+
logLevel
|
|
5007
5095
|
});
|
|
5008
5096
|
state.addWaitForFinishPromise(async () => {
|
|
5009
5097
|
Log.verbose(logLevel, "Waiting for video decoder to finish");
|
|
5010
|
-
await videoDecoder.
|
|
5098
|
+
await videoDecoder.waitForQueueToBeLessThan(0);
|
|
5011
5099
|
videoDecoder.close();
|
|
5012
5100
|
Log.verbose(logLevel, "Video decoder finished. Waiting for encoder to finish");
|
|
5101
|
+
await frameSorter.flush();
|
|
5102
|
+
Log.verbose(logLevel, "Frame sorter flushed");
|
|
5103
|
+
await videoProcessingQueue.ioSynchronizer.waitForQueueSize(0);
|
|
5104
|
+
Log.verbose(logLevel, "Video processing queue finished");
|
|
5013
5105
|
await videoEncoder.waitForFinish();
|
|
5014
5106
|
videoEncoder.close();
|
|
5015
|
-
Log.verbose(logLevel, "
|
|
5107
|
+
Log.verbose(logLevel, "Video encoder finished");
|
|
5016
5108
|
});
|
|
5017
5109
|
return async (chunk) => {
|
|
5018
|
-
|
|
5110
|
+
progressTracker.setPossibleLowestTimestamp(Math.min(chunk.timestamp, chunk.decodingTimestamp ?? Infinity));
|
|
5111
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5112
|
+
await videoDecoder.waitForQueueToBeLessThan(10);
|
|
5113
|
+
if (chunk.type === "key") {
|
|
5114
|
+
await videoDecoder.flush();
|
|
5115
|
+
}
|
|
5116
|
+
videoDecoder.decode(chunk);
|
|
5019
5117
|
};
|
|
5020
5118
|
};
|
|
5021
5119
|
|
|
5120
|
+
// src/on-video-track.ts
|
|
5121
|
+
var makeVideoTrackHandler = ({
|
|
5122
|
+
state,
|
|
5123
|
+
onVideoFrame,
|
|
5124
|
+
onMediaStateUpdate,
|
|
5125
|
+
abortConversion,
|
|
5126
|
+
controller,
|
|
5127
|
+
defaultVideoCodec,
|
|
5128
|
+
onVideoTrack,
|
|
5129
|
+
logLevel,
|
|
5130
|
+
outputContainer,
|
|
5131
|
+
rotate,
|
|
5132
|
+
resizeOperation,
|
|
5133
|
+
progressTracker
|
|
5134
|
+
}) => async ({ track, container: inputContainer }) => {
|
|
5135
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
5136
|
+
throw new Error("Aborted");
|
|
5137
|
+
}
|
|
5138
|
+
const canCopyTrack = canCopyVideoTrack({
|
|
5139
|
+
inputContainer,
|
|
5140
|
+
outputContainer,
|
|
5141
|
+
rotationToApply: rotate,
|
|
5142
|
+
inputTrack: track,
|
|
5143
|
+
resizeOperation,
|
|
5144
|
+
outputVideoCodec: defaultVideoCodec
|
|
5145
|
+
});
|
|
5146
|
+
const videoOperation = await (onVideoTrack ?? defaultOnVideoTrackHandler)({
|
|
5147
|
+
track,
|
|
5148
|
+
defaultVideoCodec: defaultVideoCodec ?? getDefaultVideoCodec({ container: outputContainer }),
|
|
5149
|
+
logLevel,
|
|
5150
|
+
outputContainer,
|
|
5151
|
+
rotate,
|
|
5152
|
+
inputContainer,
|
|
5153
|
+
canCopyTrack,
|
|
5154
|
+
resizeOperation
|
|
5155
|
+
});
|
|
5156
|
+
if (videoOperation.type === "drop") {
|
|
5157
|
+
return null;
|
|
5158
|
+
}
|
|
5159
|
+
if (videoOperation.type === "fail") {
|
|
5160
|
+
throw new Error(`Video track with ID ${track.trackId} resolved with {"type": "fail"}. This could mean that this video track could neither be copied to the output container or re-encoded. You have the option to drop the track instead of failing it: https://remotion.dev/docs/webcodecs/track-transformation`);
|
|
5161
|
+
}
|
|
5162
|
+
if (videoOperation.type === "copy") {
|
|
5163
|
+
return copyVideoTrack({
|
|
5164
|
+
logLevel,
|
|
5165
|
+
onMediaStateUpdate,
|
|
5166
|
+
state,
|
|
5167
|
+
track,
|
|
5168
|
+
progressTracker
|
|
5169
|
+
});
|
|
5170
|
+
}
|
|
5171
|
+
return reencodeVideoTrack({
|
|
5172
|
+
videoOperation,
|
|
5173
|
+
abortConversion,
|
|
5174
|
+
controller,
|
|
5175
|
+
logLevel,
|
|
5176
|
+
rotate,
|
|
5177
|
+
track,
|
|
5178
|
+
onVideoFrame,
|
|
5179
|
+
state,
|
|
5180
|
+
onMediaStateUpdate,
|
|
5181
|
+
progressTracker
|
|
5182
|
+
});
|
|
5183
|
+
};
|
|
5184
|
+
|
|
5022
5185
|
// src/send-telemetry-event.ts
|
|
5023
5186
|
var import_licensing = __toESM(require_dist(), 1);
|
|
5024
5187
|
var sendUsageEvent = async ({
|
|
@@ -5216,6 +5379,7 @@ var convertMedia = async function({
|
|
|
5216
5379
|
expectedFrameRate: expectedFrameRate ?? null
|
|
5217
5380
|
});
|
|
5218
5381
|
const onVideoTrack = makeVideoTrackHandler({
|
|
5382
|
+
progressTracker,
|
|
5219
5383
|
state,
|
|
5220
5384
|
onVideoFrame: onVideoFrame ?? null,
|
|
5221
5385
|
onMediaStateUpdate: throttledState.update ?? null,
|
|
@@ -5226,10 +5390,10 @@ var convertMedia = async function({
|
|
|
5226
5390
|
logLevel,
|
|
5227
5391
|
outputContainer: container,
|
|
5228
5392
|
rotate: rotate ?? 0,
|
|
5229
|
-
progress: progressTracker,
|
|
5230
5393
|
resizeOperation: resize ?? null
|
|
5231
5394
|
});
|
|
5232
5395
|
const onAudioTrack = makeAudioTrackHandler({
|
|
5396
|
+
progressTracker,
|
|
5233
5397
|
abortConversion,
|
|
5234
5398
|
defaultAudioCodec: audioCodec ?? null,
|
|
5235
5399
|
controller,
|
|
@@ -5238,7 +5402,6 @@ var convertMedia = async function({
|
|
|
5238
5402
|
onAudioTrack: userAudioResolver ?? null,
|
|
5239
5403
|
logLevel,
|
|
5240
5404
|
outputContainer: container,
|
|
5241
|
-
progressTracker,
|
|
5242
5405
|
onAudioData: onAudioData ?? null
|
|
5243
5406
|
});
|
|
5244
5407
|
MediaParserInternals9.internalParseMedia({
|