@remotion/webcodecs 4.0.297 → 4.0.300
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio-decoder.d.ts +4 -4
- package/dist/audio-decoder.js +8 -6
- package/dist/audio-encoder.d.ts +2 -2
- package/dist/audio-encoder.js +6 -4
- package/dist/auto-select-writer.d.ts +2 -3
- package/dist/can-copy-video-track.d.ts +2 -2
- package/dist/can-copy-video-track.js +3 -5
- package/dist/can-reencode-audio-track.d.ts +2 -2
- package/dist/can-reencode-video-track.d.ts +2 -2
- package/dist/convert-encoded-chunk.d.ts +2 -2
- package/dist/convert-media.d.ts +7 -8
- package/dist/convert-media.js +8 -8
- package/dist/create/iso-base-media/example-stts.d.ts +3 -3
- package/dist/create/iso-base-media/mp4-header.d.ts +2 -2
- package/dist/create/iso-base-media/serialize-track.d.ts +2 -2
- package/dist/create/iso-base-media/trak/mdia/minf/create-stbl.d.ts +2 -2
- package/dist/create/iso-base-media/trak/mdia/minf/stbl/create-ctts.d.ts +2 -2
- package/dist/create/iso-base-media/trak/mdia/minf/stbl/create-stco.d.ts +2 -2
- package/dist/create/iso-base-media/trak/mdia/minf/stbl/create-stsc.d.ts +2 -2
- package/dist/create/iso-base-media/trak/mdia/minf/stbl/create-stss.d.ts +2 -2
- package/dist/create/iso-base-media/trak/mdia/minf/stbl/create-stsz.d.ts +2 -2
- package/dist/create/iso-base-media/trak/mdia/minf/stbl/create-stts.d.ts +2 -2
- package/dist/create/make-track-info.d.ts +2 -2
- package/dist/create/matroska/cluster.d.ts +7 -8
- package/dist/create/matroska/color.d.ts +2 -2
- package/dist/create/matroska/color.js +83 -26
- package/dist/create/matroska/matroska-seek.d.ts +2 -2
- package/dist/create/matroska/matroska-trackentry.d.ts +2 -2
- package/dist/create/matroska/matroska-utils.d.ts +7 -3
- package/dist/create/media-fn.d.ts +4 -5
- package/dist/esm/buffer.mjs +4 -1
- package/dist/esm/index.mjs +147 -62
- package/dist/esm/web-fs.mjs +3 -1
- package/dist/index.d.ts +1 -1
- package/dist/io-manager/io-synchronizer.js +3 -3
- package/dist/log.d.ts +6 -6
- package/dist/on-audio-track-handler.d.ts +3 -3
- package/dist/on-audio-track.d.ts +3 -3
- package/dist/on-audio-track.js +5 -5
- package/dist/on-frame.d.ts +2 -2
- package/dist/on-video-track-handler.d.ts +3 -3
- package/dist/on-video-track.d.ts +3 -3
- package/dist/on-video-track.js +6 -6
- package/dist/resizing/calculate-new-size.d.ts +4 -4
- package/dist/resizing/mode.d.ts +0 -4
- package/dist/rotation.d.ts +3 -3
- package/dist/sort-video-frames.js +3 -3
- package/dist/video-decoder.d.ts +3 -3
- package/dist/video-decoder.js +6 -5
- package/dist/video-encoder.d.ts +2 -2
- package/dist/video-encoder.js +6 -4
- package/dist/wav-audio-encoder.js +1 -1
- package/dist/webcodecs-controller.d.ts +1 -3
- package/dist/webcodecs-controller.js +1 -2
- package/dist/writers/buffer-implementation/writer.d.ts +2 -2
- package/dist/writers/buffer-implementation/writer.js +2 -4
- package/dist/writers/buffer.d.ts +2 -2
- package/dist/writers/web-fs.d.ts +2 -2
- package/dist/writers/web-fs.js +1 -1
- package/package.json +5 -5
|
@@ -1,5 +1,8 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { _InternalEbmlValue, MediaParserInternalTypes } from '@remotion/media-parser';
|
|
2
2
|
import { MediaParserInternals } from '@remotion/media-parser';
|
|
3
|
+
type Prettify<T> = {
|
|
4
|
+
[K in keyof T]: T[K];
|
|
5
|
+
} & {};
|
|
3
6
|
export declare const getIdForName: (name: string) => EbmlMapKey;
|
|
4
7
|
export declare const combineUint8Arrays: (arrays: Uint8Array[]) => Uint8Array<ArrayBufferLike>;
|
|
5
8
|
export type OffsetAndChildren = {
|
|
@@ -13,8 +16,8 @@ export type BytesAndOffset = {
|
|
|
13
16
|
bytes: Uint8Array;
|
|
14
17
|
offsets: OffsetAndChildren;
|
|
15
18
|
};
|
|
16
|
-
export type EbmlValueOrUint8Array<T extends Ebml> = Uint8Array |
|
|
17
|
-
export type EbmlParsedOrUint8Array<T extends Ebml> = {
|
|
19
|
+
export type EbmlValueOrUint8Array<T extends MediaParserInternalTypes['Ebml']> = Uint8Array | _InternalEbmlValue<T, PossibleEbmlOrUint8Array>;
|
|
20
|
+
export type EbmlParsedOrUint8Array<T extends MediaParserInternalTypes['Ebml']> = {
|
|
18
21
|
type: T['name'];
|
|
19
22
|
value: EbmlValueOrUint8Array<T>;
|
|
20
23
|
minVintWidth: number | null;
|
|
@@ -28,3 +31,4 @@ export type PossibleEbmlOrUint8Array = Prettify<{
|
|
|
28
31
|
export type EbmlMapKey = keyof typeof MediaParserInternals.ebmlMap;
|
|
29
32
|
export declare const padMatroskaBytes: (fields: PossibleEbmlOrUint8Array, totalLength: number) => BytesAndOffset[];
|
|
30
33
|
export declare function serializeUint16(value: number): Uint8Array;
|
|
34
|
+
export {};
|
|
@@ -1,12 +1,11 @@
|
|
|
1
|
-
import type {
|
|
2
|
-
import type { LogLevel } from '../log';
|
|
1
|
+
import type { MediaParserAudioSample, MediaParserInternalTypes, MediaParserLogLevel, MediaParserVideoSample } from '@remotion/media-parser';
|
|
3
2
|
import type { MakeTrackAudio, MakeTrackVideo } from './make-track-info';
|
|
4
3
|
import type { ProgressTracker } from './progress-tracker';
|
|
5
4
|
export type MediaFn = {
|
|
6
5
|
getBlob: () => Promise<Blob>;
|
|
7
6
|
remove: () => Promise<void>;
|
|
8
7
|
addSample: (options: {
|
|
9
|
-
chunk:
|
|
8
|
+
chunk: MediaParserAudioSample | MediaParserVideoSample;
|
|
10
9
|
trackNumber: number;
|
|
11
10
|
isVideo: boolean;
|
|
12
11
|
codecPrivate: Uint8Array | null;
|
|
@@ -22,10 +21,10 @@ export type MediaFn = {
|
|
|
22
21
|
}) => void;
|
|
23
22
|
};
|
|
24
23
|
export type MediaFnGeneratorInput = {
|
|
25
|
-
writer: WriterInterface;
|
|
24
|
+
writer: MediaParserInternalTypes['WriterInterface'];
|
|
26
25
|
onBytesProgress: (totalBytes: number) => void;
|
|
27
26
|
onMillisecondsProgress: (totalMilliseconds: number) => void;
|
|
28
|
-
logLevel:
|
|
27
|
+
logLevel: MediaParserLogLevel;
|
|
29
28
|
filename: string;
|
|
30
29
|
progressTracker: ProgressTracker;
|
|
31
30
|
expectedDurationInSeconds: number | null;
|
package/dist/esm/buffer.mjs
CHANGED
package/dist/esm/index.mjs
CHANGED
|
@@ -93,7 +93,9 @@ var require_dist = __commonJS((exports) => {
|
|
|
93
93
|
});
|
|
94
94
|
|
|
95
95
|
// src/writers/web-fs.ts
|
|
96
|
-
var createContent = async ({
|
|
96
|
+
var createContent = async ({
|
|
97
|
+
filename
|
|
98
|
+
}) => {
|
|
97
99
|
const directoryHandle = await navigator.storage.getDirectory();
|
|
98
100
|
const actualFilename = `__remotion_mediaparser:${filename}`;
|
|
99
101
|
const remove = async () => {
|
|
@@ -169,7 +171,10 @@ var canUseWebFsWriter = async () => {
|
|
|
169
171
|
};
|
|
170
172
|
|
|
171
173
|
// src/writers/buffer-implementation/writer.ts
|
|
172
|
-
var createContent2 = ({
|
|
174
|
+
var createContent2 = ({
|
|
175
|
+
filename,
|
|
176
|
+
mimeType
|
|
177
|
+
}) => {
|
|
173
178
|
const buf = new ArrayBuffer(0, {
|
|
174
179
|
maxByteLength: 2000000000
|
|
175
180
|
});
|
|
@@ -645,7 +650,7 @@ var makeIoSynchronizer = ({
|
|
|
645
650
|
minimumProgress,
|
|
646
651
|
controller
|
|
647
652
|
}) => {
|
|
648
|
-
await controller._internals.checkForAbortAndPause();
|
|
653
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
649
654
|
const { timeoutPromise, clear } = makeTimeoutPromise({
|
|
650
655
|
label: () => [
|
|
651
656
|
`Waited too long for ${label} to finish:`,
|
|
@@ -661,7 +666,7 @@ var makeIoSynchronizer = ({
|
|
|
661
666
|
ms: 1e4,
|
|
662
667
|
controller
|
|
663
668
|
});
|
|
664
|
-
controller._internals.signal.addEventListener("abort", clear);
|
|
669
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", clear);
|
|
665
670
|
await Promise.race([
|
|
666
671
|
timeoutPromise,
|
|
667
672
|
Promise.all([
|
|
@@ -682,7 +687,7 @@ var makeIoSynchronizer = ({
|
|
|
682
687
|
})()
|
|
683
688
|
])
|
|
684
689
|
]).finally(() => clear());
|
|
685
|
-
controller._internals.signal.removeEventListener("abort", clear);
|
|
690
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", clear);
|
|
686
691
|
};
|
|
687
692
|
const waitForFinish = async (controller) => {
|
|
688
693
|
await waitFor({
|
|
@@ -716,7 +721,7 @@ var createAudioDecoder = ({
|
|
|
716
721
|
track,
|
|
717
722
|
progressTracker
|
|
718
723
|
}) => {
|
|
719
|
-
if (controller._internals.signal.aborted) {
|
|
724
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
720
725
|
throw new Error("Not creating audio decoder, already aborted");
|
|
721
726
|
}
|
|
722
727
|
if (config.codec === "pcm-s16") {
|
|
@@ -734,17 +739,17 @@ var createAudioDecoder = ({
|
|
|
734
739
|
const abortHandler = () => {
|
|
735
740
|
frame.close();
|
|
736
741
|
};
|
|
737
|
-
controller._internals.signal.addEventListener("abort", abortHandler, {
|
|
742
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", abortHandler, {
|
|
738
743
|
once: true
|
|
739
744
|
});
|
|
740
745
|
outputQueue = outputQueue.then(() => {
|
|
741
|
-
if (controller._internals.signal.aborted) {
|
|
746
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
742
747
|
return;
|
|
743
748
|
}
|
|
744
749
|
return onFrame(frame);
|
|
745
750
|
}).then(() => {
|
|
746
751
|
ioSynchronizer.onProcessed();
|
|
747
|
-
controller._internals.signal.removeEventListener("abort", abortHandler);
|
|
752
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", abortHandler);
|
|
748
753
|
return Promise.resolve();
|
|
749
754
|
}).catch((err) => {
|
|
750
755
|
frame.close();
|
|
@@ -756,7 +761,7 @@ var createAudioDecoder = ({
|
|
|
756
761
|
}
|
|
757
762
|
});
|
|
758
763
|
const close = () => {
|
|
759
|
-
controller._internals.signal.removeEventListener("abort", onAbort);
|
|
764
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
760
765
|
if (audioDecoder.state === "closed") {
|
|
761
766
|
return;
|
|
762
767
|
}
|
|
@@ -765,7 +770,7 @@ var createAudioDecoder = ({
|
|
|
765
770
|
const onAbort = () => {
|
|
766
771
|
close();
|
|
767
772
|
};
|
|
768
|
-
controller._internals.signal.addEventListener("abort", onAbort);
|
|
773
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
769
774
|
audioDecoder.configure(config);
|
|
770
775
|
const processSample = async (audioSample) => {
|
|
771
776
|
if (audioDecoder.state === "closed") {
|
|
@@ -805,7 +810,9 @@ var createAudioDecoder = ({
|
|
|
805
810
|
};
|
|
806
811
|
};
|
|
807
812
|
// src/audio-encoder.ts
|
|
808
|
-
import {
|
|
813
|
+
import {
|
|
814
|
+
MediaParserAbortError
|
|
815
|
+
} from "@remotion/media-parser";
|
|
809
816
|
|
|
810
817
|
// src/audio-data/data-types.ts
|
|
811
818
|
var getDataTypeForAudioFormat = (format) => {
|
|
@@ -928,7 +935,7 @@ var getWaveAudioEncoder = ({
|
|
|
928
935
|
return Promise.resolve();
|
|
929
936
|
},
|
|
930
937
|
encodeFrame: (unconvertedAudioData) => {
|
|
931
|
-
if (controller._internals.signal.aborted) {
|
|
938
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
932
939
|
return Promise.resolve();
|
|
933
940
|
}
|
|
934
941
|
const audioData = convertAudioData({
|
|
@@ -962,7 +969,7 @@ var createAudioEncoder = ({
|
|
|
962
969
|
onNewAudioSampleRate,
|
|
963
970
|
progressTracker
|
|
964
971
|
}) => {
|
|
965
|
-
if (controller._internals.signal.aborted) {
|
|
972
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
966
973
|
throw new MediaParserAbortError("Not creating audio encoder, already aborted");
|
|
967
974
|
}
|
|
968
975
|
if (codec === "wav") {
|
|
@@ -982,7 +989,7 @@ var createAudioEncoder = ({
|
|
|
982
989
|
output: (chunk) => {
|
|
983
990
|
ioSynchronizer.onOutput(chunk.timestamp);
|
|
984
991
|
prom = prom.then(() => {
|
|
985
|
-
if (controller._internals.signal.aborted) {
|
|
992
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
986
993
|
return;
|
|
987
994
|
}
|
|
988
995
|
return onChunk(chunk);
|
|
@@ -998,7 +1005,7 @@ var createAudioEncoder = ({
|
|
|
998
1005
|
}
|
|
999
1006
|
});
|
|
1000
1007
|
const close = () => {
|
|
1001
|
-
controller._internals.signal.removeEventListener("abort", onAbort);
|
|
1008
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
1002
1009
|
if (encoder.state === "closed") {
|
|
1003
1010
|
return;
|
|
1004
1011
|
}
|
|
@@ -1007,7 +1014,7 @@ var createAudioEncoder = ({
|
|
|
1007
1014
|
const onAbort = () => {
|
|
1008
1015
|
close();
|
|
1009
1016
|
};
|
|
1010
|
-
controller._internals.signal.addEventListener("abort", onAbort);
|
|
1017
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
1011
1018
|
if (codec !== "opus" && codec !== "aac") {
|
|
1012
1019
|
throw new Error('Only `codec: "opus"` and `codec: "aac"` is supported currently');
|
|
1013
1020
|
}
|
|
@@ -1089,17 +1096,17 @@ var canCopyVideoTrack = ({
|
|
|
1089
1096
|
height: inputTrack.height,
|
|
1090
1097
|
resizeOperation,
|
|
1091
1098
|
rotation: rotationToApply,
|
|
1092
|
-
videoCodec: inputTrack.
|
|
1099
|
+
videoCodec: inputTrack.codecEnum,
|
|
1093
1100
|
width: inputTrack.width
|
|
1094
1101
|
});
|
|
1095
1102
|
if (newDimensions.height !== inputTrack.height || newDimensions.width !== inputTrack.width) {
|
|
1096
1103
|
return false;
|
|
1097
1104
|
}
|
|
1098
1105
|
if (outputContainer === "webm") {
|
|
1099
|
-
return inputTrack.
|
|
1106
|
+
return inputTrack.codecEnum === "vp8" || inputTrack.codecEnum === "vp9";
|
|
1100
1107
|
}
|
|
1101
1108
|
if (outputContainer === "mp4") {
|
|
1102
|
-
return (inputTrack.
|
|
1109
|
+
return (inputTrack.codecEnum === "h264" || inputTrack.codecEnum === "h265") && (inputContainer === "mp4" || inputContainer === "avi" || inputContainer === "m3u8" && inputTrack.m3uStreamFormat === "mp4");
|
|
1103
1110
|
}
|
|
1104
1111
|
if (outputContainer === "wav") {
|
|
1105
1112
|
return false;
|
|
@@ -1815,7 +1822,7 @@ var makeAudioTrackHandler = ({
|
|
|
1815
1822
|
onAudioData
|
|
1816
1823
|
}) => async ({ track, container: inputContainer }) => {
|
|
1817
1824
|
const canCopyTrack = canCopyAudioTrack({
|
|
1818
|
-
inputCodec: track.
|
|
1825
|
+
inputCodec: track.codecEnum,
|
|
1819
1826
|
outputContainer,
|
|
1820
1827
|
inputContainer
|
|
1821
1828
|
});
|
|
@@ -1836,19 +1843,19 @@ var makeAudioTrackHandler = ({
|
|
|
1836
1843
|
if (audioOperation.type === "copy") {
|
|
1837
1844
|
const addedTrack = await state.addTrack({
|
|
1838
1845
|
type: "audio",
|
|
1839
|
-
codec: track.
|
|
1846
|
+
codec: track.codecEnum,
|
|
1840
1847
|
numberOfChannels: track.numberOfChannels,
|
|
1841
1848
|
sampleRate: track.sampleRate,
|
|
1842
|
-
codecPrivate: track.
|
|
1849
|
+
codecPrivate: track.codecData?.data ?? null,
|
|
1843
1850
|
timescale: track.timescale
|
|
1844
1851
|
});
|
|
1845
|
-
Log.verbose(logLevel, `Copying audio track ${track.trackId} as track ${addedTrack.trackNumber}. Timescale = ${track.timescale}, codec = ${track.
|
|
1852
|
+
Log.verbose(logLevel, `Copying audio track ${track.trackId} as track ${addedTrack.trackNumber}. Timescale = ${track.timescale}, codec = ${track.codecEnum} (${track.codec}) `);
|
|
1846
1853
|
return async (audioSample) => {
|
|
1847
1854
|
await state.addSample({
|
|
1848
1855
|
chunk: audioSample,
|
|
1849
1856
|
trackNumber: addedTrack.trackNumber,
|
|
1850
1857
|
isVideo: false,
|
|
1851
|
-
codecPrivate: track.
|
|
1858
|
+
codecPrivate: track.codecData?.data ?? null
|
|
1852
1859
|
});
|
|
1853
1860
|
onMediaStateUpdate?.((prevState) => {
|
|
1854
1861
|
return {
|
|
@@ -2123,7 +2130,7 @@ var videoFrameSorter = ({
|
|
|
2123
2130
|
}) => {
|
|
2124
2131
|
const frames = [];
|
|
2125
2132
|
const releaseFrame = async () => {
|
|
2126
|
-
await controller._internals.checkForAbortAndPause();
|
|
2133
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
2127
2134
|
const frame = frames.shift();
|
|
2128
2135
|
if (frame) {
|
|
2129
2136
|
await onRelease(frame);
|
|
@@ -2159,9 +2166,9 @@ var videoFrameSorter = ({
|
|
|
2159
2166
|
while (frames.length > 0) {
|
|
2160
2167
|
await releaseFrame();
|
|
2161
2168
|
}
|
|
2162
|
-
controller._internals.signal.removeEventListener("abort", onAbort);
|
|
2169
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
2163
2170
|
};
|
|
2164
|
-
controller._internals.signal.addEventListener("abort", onAbort);
|
|
2171
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
2165
2172
|
return {
|
|
2166
2173
|
inputFrame,
|
|
2167
2174
|
flush
|
|
@@ -2187,11 +2194,11 @@ var createVideoDecoder = ({
|
|
|
2187
2194
|
const cleanup = () => {
|
|
2188
2195
|
frame.close();
|
|
2189
2196
|
};
|
|
2190
|
-
controller._internals.signal.addEventListener("abort", cleanup, {
|
|
2197
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", cleanup, {
|
|
2191
2198
|
once: true
|
|
2192
2199
|
});
|
|
2193
2200
|
outputQueue = outputQueue.then(() => {
|
|
2194
|
-
if (controller._internals.signal.aborted) {
|
|
2201
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
2195
2202
|
return;
|
|
2196
2203
|
}
|
|
2197
2204
|
return onFrame2(frame);
|
|
@@ -2200,7 +2207,7 @@ var createVideoDecoder = ({
|
|
|
2200
2207
|
}).catch((err) => {
|
|
2201
2208
|
onError(err);
|
|
2202
2209
|
}).finally(() => {
|
|
2203
|
-
controller._internals.signal.removeEventListener("abort", cleanup);
|
|
2210
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", cleanup);
|
|
2204
2211
|
cleanup();
|
|
2205
2212
|
});
|
|
2206
2213
|
return outputQueue;
|
|
@@ -2221,7 +2228,7 @@ var createVideoDecoder = ({
|
|
|
2221
2228
|
}
|
|
2222
2229
|
});
|
|
2223
2230
|
const close = () => {
|
|
2224
|
-
controller._internals.signal.removeEventListener("abort", onAbort);
|
|
2231
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
2225
2232
|
if (videoDecoder.state === "closed") {
|
|
2226
2233
|
return;
|
|
2227
2234
|
}
|
|
@@ -2230,7 +2237,7 @@ var createVideoDecoder = ({
|
|
|
2230
2237
|
const onAbort = () => {
|
|
2231
2238
|
close();
|
|
2232
2239
|
};
|
|
2233
|
-
controller._internals.signal.addEventListener("abort", onAbort);
|
|
2240
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
2234
2241
|
videoDecoder.configure(config);
|
|
2235
2242
|
const processSample = async (sample) => {
|
|
2236
2243
|
if (videoDecoder.state === "closed") {
|
|
@@ -2275,7 +2282,9 @@ var createVideoDecoder = ({
|
|
|
2275
2282
|
};
|
|
2276
2283
|
|
|
2277
2284
|
// src/video-encoder.ts
|
|
2278
|
-
import {
|
|
2285
|
+
import {
|
|
2286
|
+
MediaParserAbortError as MediaParserAbortError2
|
|
2287
|
+
} from "@remotion/media-parser";
|
|
2279
2288
|
var createVideoEncoder = ({
|
|
2280
2289
|
onChunk,
|
|
2281
2290
|
onError,
|
|
@@ -2285,7 +2294,7 @@ var createVideoEncoder = ({
|
|
|
2285
2294
|
outputCodec,
|
|
2286
2295
|
progress
|
|
2287
2296
|
}) => {
|
|
2288
|
-
if (controller._internals.signal.aborted) {
|
|
2297
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
2289
2298
|
throw new MediaParserAbortError2("Not creating video encoder, already aborted");
|
|
2290
2299
|
}
|
|
2291
2300
|
const ioSynchronizer = makeIoSynchronizer({
|
|
@@ -2302,7 +2311,7 @@ var createVideoEncoder = ({
|
|
|
2302
2311
|
const timestamp = chunk.timestamp + (chunk.duration ?? 0);
|
|
2303
2312
|
ioSynchronizer.onOutput(timestamp);
|
|
2304
2313
|
outputQueue = outputQueue.then(() => {
|
|
2305
|
-
if (controller._internals.signal.aborted) {
|
|
2314
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
2306
2315
|
return;
|
|
2307
2316
|
}
|
|
2308
2317
|
return onChunk(chunk, metadata ?? null);
|
|
@@ -2315,7 +2324,7 @@ var createVideoEncoder = ({
|
|
|
2315
2324
|
}
|
|
2316
2325
|
});
|
|
2317
2326
|
const close = () => {
|
|
2318
|
-
controller._internals.signal.removeEventListener("abort", onAbort);
|
|
2327
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onAbort);
|
|
2319
2328
|
if (encoder.state === "closed") {
|
|
2320
2329
|
return;
|
|
2321
2330
|
}
|
|
@@ -2324,7 +2333,7 @@ var createVideoEncoder = ({
|
|
|
2324
2333
|
const onAbort = () => {
|
|
2325
2334
|
close();
|
|
2326
2335
|
};
|
|
2327
|
-
controller._internals.signal.addEventListener("abort", onAbort);
|
|
2336
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onAbort);
|
|
2328
2337
|
Log.verbose(logLevel, "Configuring video encoder", config);
|
|
2329
2338
|
encoder.configure(config);
|
|
2330
2339
|
let framesProcessed = 0;
|
|
@@ -2385,7 +2394,7 @@ var makeVideoTrackHandler = ({
|
|
|
2385
2394
|
progress,
|
|
2386
2395
|
resizeOperation
|
|
2387
2396
|
}) => async ({ track, container: inputContainer }) => {
|
|
2388
|
-
if (controller._internals.signal.aborted) {
|
|
2397
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
2389
2398
|
throw new Error("Aborted");
|
|
2390
2399
|
}
|
|
2391
2400
|
const canCopyTrack = canCopyVideoTrack({
|
|
@@ -2415,11 +2424,11 @@ var makeVideoTrackHandler = ({
|
|
|
2415
2424
|
Log.verbose(logLevel, `Copying video track with codec ${track.codec} and timescale ${track.timescale}`);
|
|
2416
2425
|
const videoTrack = await state.addTrack({
|
|
2417
2426
|
type: "video",
|
|
2418
|
-
color: track.
|
|
2427
|
+
color: track.advancedColor,
|
|
2419
2428
|
width: track.codedWidth,
|
|
2420
2429
|
height: track.codedHeight,
|
|
2421
|
-
codec: track.
|
|
2422
|
-
codecPrivate: track.
|
|
2430
|
+
codec: track.codecEnum,
|
|
2431
|
+
codecPrivate: track.codecData?.data ?? null,
|
|
2423
2432
|
timescale: track.timescale
|
|
2424
2433
|
});
|
|
2425
2434
|
return async (sample) => {
|
|
@@ -2427,7 +2436,7 @@ var makeVideoTrackHandler = ({
|
|
|
2427
2436
|
chunk: sample,
|
|
2428
2437
|
trackNumber: videoTrack.trackNumber,
|
|
2429
2438
|
isVideo: true,
|
|
2430
|
-
codecPrivate: track.
|
|
2439
|
+
codecPrivate: track.codecData?.data ?? null
|
|
2431
2440
|
});
|
|
2432
2441
|
onMediaStateUpdate?.((prevState) => {
|
|
2433
2442
|
return {
|
|
@@ -2467,7 +2476,7 @@ var makeVideoTrackHandler = ({
|
|
|
2467
2476
|
}
|
|
2468
2477
|
const { trackNumber } = await state.addTrack({
|
|
2469
2478
|
type: "video",
|
|
2470
|
-
color: track.
|
|
2479
|
+
color: track.advancedColor,
|
|
2471
2480
|
width: newWidth,
|
|
2472
2481
|
height: newHeight,
|
|
2473
2482
|
codec: videoOperation.videoCodec,
|
|
@@ -4331,24 +4340,101 @@ var createMatroskaSegment = (children) => {
|
|
|
4331
4340
|
};
|
|
4332
4341
|
|
|
4333
4342
|
// src/create/matroska/color.ts
|
|
4334
|
-
var
|
|
4343
|
+
var getRangeValue = ({
|
|
4335
4344
|
transferCharacteristics,
|
|
4336
4345
|
matrixCoefficients,
|
|
4346
|
+
fullRange
|
|
4347
|
+
}) => {
|
|
4348
|
+
return transferCharacteristics && matrixCoefficients ? 3 : fullRange === true ? 2 : fullRange === false ? 1 : 0;
|
|
4349
|
+
};
|
|
4350
|
+
var getPrimariesValue = (primaries) => {
|
|
4351
|
+
if (primaries === null) {
|
|
4352
|
+
return null;
|
|
4353
|
+
}
|
|
4354
|
+
if (primaries === "bt709") {
|
|
4355
|
+
return 1;
|
|
4356
|
+
}
|
|
4357
|
+
if (primaries === "bt470bg") {
|
|
4358
|
+
return 5;
|
|
4359
|
+
}
|
|
4360
|
+
if (primaries === "smpte170m") {
|
|
4361
|
+
return 6;
|
|
4362
|
+
}
|
|
4363
|
+
if (primaries === "bt2020") {
|
|
4364
|
+
return 9;
|
|
4365
|
+
}
|
|
4366
|
+
if (primaries === "smpte432") {
|
|
4367
|
+
return 12;
|
|
4368
|
+
}
|
|
4369
|
+
throw new Error("Unknown primaries " + primaries);
|
|
4370
|
+
};
|
|
4371
|
+
var getTransferCharacteristicsValue = (transferCharacteristics) => {
|
|
4372
|
+
if (transferCharacteristics === null) {
|
|
4373
|
+
return null;
|
|
4374
|
+
}
|
|
4375
|
+
if (transferCharacteristics === "bt709") {
|
|
4376
|
+
return 1;
|
|
4377
|
+
}
|
|
4378
|
+
if (transferCharacteristics === "smpte170m") {
|
|
4379
|
+
return 6;
|
|
4380
|
+
}
|
|
4381
|
+
if (transferCharacteristics === "iec61966-2-1") {
|
|
4382
|
+
return 13;
|
|
4383
|
+
}
|
|
4384
|
+
if (transferCharacteristics === "linear") {
|
|
4385
|
+
return 8;
|
|
4386
|
+
}
|
|
4387
|
+
if (transferCharacteristics === "pq") {
|
|
4388
|
+
return 16;
|
|
4389
|
+
}
|
|
4390
|
+
if (transferCharacteristics === "hlg") {
|
|
4391
|
+
return 18;
|
|
4392
|
+
}
|
|
4393
|
+
throw new Error("Unknown transfer characteristics " + transferCharacteristics);
|
|
4394
|
+
};
|
|
4395
|
+
var getMatrixCoefficientsValue = (matrixCoefficients) => {
|
|
4396
|
+
if (matrixCoefficients === null) {
|
|
4397
|
+
return null;
|
|
4398
|
+
}
|
|
4399
|
+
if (matrixCoefficients === "rgb") {
|
|
4400
|
+
return 0;
|
|
4401
|
+
}
|
|
4402
|
+
if (matrixCoefficients === "bt709") {
|
|
4403
|
+
return 1;
|
|
4404
|
+
}
|
|
4405
|
+
if (matrixCoefficients === "bt470bg") {
|
|
4406
|
+
return 5;
|
|
4407
|
+
}
|
|
4408
|
+
if (matrixCoefficients === "smpte170m") {
|
|
4409
|
+
return 6;
|
|
4410
|
+
}
|
|
4411
|
+
if (matrixCoefficients === "bt2020-ncl") {
|
|
4412
|
+
return 9;
|
|
4413
|
+
}
|
|
4414
|
+
throw new Error("Unknown matrix coefficients " + matrixCoefficients);
|
|
4415
|
+
};
|
|
4416
|
+
var makeMatroskaColorBytes = ({
|
|
4417
|
+
transfer: transferCharacteristics,
|
|
4418
|
+
matrix: matrixCoefficients,
|
|
4337
4419
|
primaries,
|
|
4338
4420
|
fullRange
|
|
4339
4421
|
}) => {
|
|
4340
|
-
const rangeValue =
|
|
4341
|
-
|
|
4342
|
-
|
|
4422
|
+
const rangeValue = getRangeValue({
|
|
4423
|
+
transferCharacteristics,
|
|
4424
|
+
matrixCoefficients,
|
|
4425
|
+
fullRange
|
|
4426
|
+
});
|
|
4427
|
+
const primariesValue = getPrimariesValue(primaries);
|
|
4428
|
+
const transferChracteristicsValue = getTransferCharacteristicsValue(transferCharacteristics);
|
|
4343
4429
|
if (matrixCoefficients === "rgb") {
|
|
4344
4430
|
throw new Error("Cannot encode Matroska in RGB");
|
|
4345
4431
|
}
|
|
4346
|
-
const matrixCoefficientsValue = matrixCoefficients
|
|
4432
|
+
const matrixCoefficientsValue = getMatrixCoefficientsValue(matrixCoefficients);
|
|
4347
4433
|
return makeMatroskaBytes({
|
|
4348
4434
|
type: "Colour",
|
|
4349
4435
|
minVintWidth: null,
|
|
4350
4436
|
value: [
|
|
4351
|
-
transferChracteristicsValue ===
|
|
4437
|
+
transferChracteristicsValue === null ? null : {
|
|
4352
4438
|
type: "TransferCharacteristics",
|
|
4353
4439
|
value: {
|
|
4354
4440
|
value: transferChracteristicsValue,
|
|
@@ -4356,7 +4442,7 @@ var makeMatroskaColorBytes = ({
|
|
|
4356
4442
|
},
|
|
4357
4443
|
minVintWidth: null
|
|
4358
4444
|
},
|
|
4359
|
-
matrixCoefficientsValue ===
|
|
4445
|
+
matrixCoefficientsValue === null ? null : {
|
|
4360
4446
|
type: "MatrixCoefficients",
|
|
4361
4447
|
value: {
|
|
4362
4448
|
value: matrixCoefficientsValue,
|
|
@@ -4364,7 +4450,7 @@ var makeMatroskaColorBytes = ({
|
|
|
4364
4450
|
},
|
|
4365
4451
|
minVintWidth: null
|
|
4366
4452
|
},
|
|
4367
|
-
primariesValue ===
|
|
4453
|
+
primariesValue === null ? null : {
|
|
4368
4454
|
type: "Primaries",
|
|
4369
4455
|
value: {
|
|
4370
4456
|
value: primariesValue,
|
|
@@ -5015,8 +5101,7 @@ var webcodecsController = () => {
|
|
|
5015
5101
|
resume: controller.resume,
|
|
5016
5102
|
addEventListener: controller.addEventListener,
|
|
5017
5103
|
removeEventListener: controller.removeEventListener,
|
|
5018
|
-
_internals: controller
|
|
5019
|
-
_mediaParserController: controller
|
|
5104
|
+
_internals: { _mediaParserController: controller }
|
|
5020
5105
|
};
|
|
5021
5106
|
};
|
|
5022
5107
|
|
|
@@ -5063,7 +5148,7 @@ var convertMedia = async function({
|
|
|
5063
5148
|
onSlowKeyframes,
|
|
5064
5149
|
onSlowNumberOfFrames,
|
|
5065
5150
|
onSlowVideoBitrate,
|
|
5066
|
-
|
|
5151
|
+
onSlowStructure,
|
|
5067
5152
|
onTracks,
|
|
5068
5153
|
onUnrotatedDimensions,
|
|
5069
5154
|
onVideoCodec,
|
|
@@ -5075,7 +5160,7 @@ var convertMedia = async function({
|
|
|
5075
5160
|
seekingHints,
|
|
5076
5161
|
...more
|
|
5077
5162
|
}) {
|
|
5078
|
-
if (controller._internals.signal.aborted) {
|
|
5163
|
+
if (controller._internals._mediaParserController._internals.signal.aborted) {
|
|
5079
5164
|
return Promise.reject(new MediaParserAbortError3("Aborted"));
|
|
5080
5165
|
}
|
|
5081
5166
|
if (availableContainers.indexOf(container) === -1) {
|
|
@@ -5087,19 +5172,19 @@ var convertMedia = async function({
|
|
|
5087
5172
|
const { resolve, reject, getPromiseToImmediatelyReturn } = withResolversAndWaitForReturn();
|
|
5088
5173
|
const abortConversion = (errCause) => {
|
|
5089
5174
|
reject(errCause);
|
|
5090
|
-
if (!controller._internals.signal.aborted) {
|
|
5175
|
+
if (!controller._internals._mediaParserController._internals.signal.aborted) {
|
|
5091
5176
|
controller.abort();
|
|
5092
5177
|
}
|
|
5093
5178
|
};
|
|
5094
5179
|
const onUserAbort = () => {
|
|
5095
5180
|
abortConversion(new MediaParserAbortError3("Conversion aborted by user"));
|
|
5096
5181
|
};
|
|
5097
|
-
controller._internals.signal.addEventListener("abort", onUserAbort);
|
|
5182
|
+
controller._internals._mediaParserController._internals.signal.addEventListener("abort", onUserAbort);
|
|
5098
5183
|
const creator = selectContainerCreator(container);
|
|
5099
5184
|
const throttledState = throttledStateUpdate({
|
|
5100
5185
|
updateFn: onProgressDoNotCallDirectly ?? null,
|
|
5101
5186
|
everyMilliseconds: progressIntervalInMs ?? 100,
|
|
5102
|
-
signal: controller._internals.signal
|
|
5187
|
+
signal: controller._internals._mediaParserController._internals.signal
|
|
5103
5188
|
});
|
|
5104
5189
|
const progressTracker = makeProgressTracker();
|
|
5105
5190
|
const state = await creator({
|
|
@@ -5164,7 +5249,7 @@ var convertMedia = async function({
|
|
|
5164
5249
|
src,
|
|
5165
5250
|
onVideoTrack,
|
|
5166
5251
|
onAudioTrack,
|
|
5167
|
-
controller: controller._mediaParserController,
|
|
5252
|
+
controller: controller._internals._mediaParserController,
|
|
5168
5253
|
fields: {
|
|
5169
5254
|
...fields,
|
|
5170
5255
|
durationInSeconds: true
|
|
@@ -5219,7 +5304,7 @@ var convertMedia = async function({
|
|
|
5219
5304
|
onSlowKeyframes: onSlowKeyframes ?? null,
|
|
5220
5305
|
onSlowNumberOfFrames: onSlowNumberOfFrames ?? null,
|
|
5221
5306
|
onSlowVideoBitrate: onSlowVideoBitrate ?? null,
|
|
5222
|
-
|
|
5307
|
+
onSlowStructure: onSlowStructure ?? null,
|
|
5223
5308
|
onTracks: onTracks ?? null,
|
|
5224
5309
|
onUnrotatedDimensions: onUnrotatedDimensions ?? null,
|
|
5225
5310
|
onVideoCodec: onVideoCodec ?? null,
|
|
@@ -5251,7 +5336,7 @@ var convertMedia = async function({
|
|
|
5251
5336
|
throttledState.stopAndGetLastProgress();
|
|
5252
5337
|
});
|
|
5253
5338
|
return getPromiseToImmediatelyReturn().finally(() => {
|
|
5254
|
-
controller._internals.signal.removeEventListener("abort", onUserAbort);
|
|
5339
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onUserAbort);
|
|
5255
5340
|
});
|
|
5256
5341
|
};
|
|
5257
5342
|
// src/get-available-audio-codecs.ts
|
package/dist/esm/web-fs.mjs
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
// src/writers/web-fs.ts
|
|
2
|
-
var createContent = async ({
|
|
2
|
+
var createContent = async ({
|
|
3
|
+
filename
|
|
4
|
+
}) => {
|
|
3
5
|
const directoryHandle = await navigator.storage.getDirectory();
|
|
4
6
|
const actualFilename = `__remotion_mediaparser:${filename}`;
|
|
5
7
|
const remove = async () => {
|
package/dist/index.d.ts
CHANGED
|
@@ -42,5 +42,5 @@ export declare const WebCodecsInternals: {
|
|
|
42
42
|
rotation: number;
|
|
43
43
|
resizeOperation: import("./resizing/mode").ResizeOperation | null;
|
|
44
44
|
videoCodec: import("./get-available-video-codecs").ConvertMediaVideoCodec | import("@remotion/media-parser").MediaParserVideoCodec;
|
|
45
|
-
}) => import("
|
|
45
|
+
}) => import("@remotion/media-parser").MediaParserDimensions;
|
|
46
46
|
};
|
|
@@ -70,7 +70,7 @@ const makeIoSynchronizer = ({ logLevel, label, progress, }) => {
|
|
|
70
70
|
return promise;
|
|
71
71
|
};
|
|
72
72
|
const waitFor = async ({ unprocessed, unemitted, minimumProgress, controller, }) => {
|
|
73
|
-
await controller._internals.checkForAbortAndPause();
|
|
73
|
+
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
74
74
|
const { timeoutPromise, clear } = (0, make_timeout_promise_1.makeTimeoutPromise)({
|
|
75
75
|
label: () => [
|
|
76
76
|
`Waited too long for ${label} to finish:`,
|
|
@@ -85,7 +85,7 @@ const makeIoSynchronizer = ({ logLevel, label, progress, }) => {
|
|
|
85
85
|
ms: 10000,
|
|
86
86
|
controller,
|
|
87
87
|
});
|
|
88
|
-
controller._internals.signal.addEventListener('abort', clear);
|
|
88
|
+
controller._internals._mediaParserController._internals.signal.addEventListener('abort', clear);
|
|
89
89
|
await Promise.race([
|
|
90
90
|
timeoutPromise,
|
|
91
91
|
Promise.all([
|
|
@@ -108,7 +108,7 @@ const makeIoSynchronizer = ({ logLevel, label, progress, }) => {
|
|
|
108
108
|
})(),
|
|
109
109
|
]),
|
|
110
110
|
]).finally(() => clear());
|
|
111
|
-
controller._internals.signal.removeEventListener('abort', clear);
|
|
111
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener('abort', clear);
|
|
112
112
|
};
|
|
113
113
|
const waitForFinish = async (controller) => {
|
|
114
114
|
await waitFor({
|
package/dist/log.d.ts
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { MediaParserLogLevel } from '@remotion/media-parser';
|
|
2
2
|
declare const Log: {
|
|
3
|
-
trace: (logLevel:
|
|
4
|
-
verbose: (logLevel:
|
|
5
|
-
info: (logLevel:
|
|
6
|
-
warn: (logLevel:
|
|
3
|
+
trace: (logLevel: MediaParserLogLevel, message?: any, ...optionalParams: any[]) => void;
|
|
4
|
+
verbose: (logLevel: MediaParserLogLevel, message?: any, ...optionalParams: any[]) => void;
|
|
5
|
+
info: (logLevel: MediaParserLogLevel, message?: any, ...optionalParams: any[]) => void;
|
|
6
|
+
warn: (logLevel: MediaParserLogLevel, message?: any, ...optionalParams: any[]) => void;
|
|
7
7
|
error: (message?: any, ...optionalParams: any[]) => void;
|
|
8
8
|
};
|
|
9
9
|
export { Log };
|
|
10
|
-
export type { LogLevel };
|
|
10
|
+
export type { MediaParserLogLevel as LogLevel };
|