@remotion/webcodecs 4.0.344 → 4.0.345
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio-decoder.d.ts +24 -0
- package/dist/audio-decoder.js +106 -0
- package/dist/create-audio-decoder.js +10 -0
- package/dist/create-frames.d.ts +8 -0
- package/dist/create-frames.js +69 -0
- package/dist/esm/index.mjs +31 -22
- package/dist/get-wave-audio-decoder.d.ts +1 -1
- package/dist/get-wave-audio-decoder.js +23 -22
- package/dist/video-decoder.d.ts +23 -0
- package/dist/video-decoder.js +80 -0
- package/package.json +6 -6
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import type { MediaParserLogLevel } from '@remotion/media-parser';
|
|
2
|
+
import type { WebCodecsController } from './webcodecs-controller';
|
|
3
|
+
export type WebCodecsAudioDecoder = {
|
|
4
|
+
decode: (audioSample: EncodedAudioChunkInit | EncodedAudioChunk) => void;
|
|
5
|
+
close: () => void;
|
|
6
|
+
flush: () => Promise<void>;
|
|
7
|
+
waitForFinish: () => Promise<void>;
|
|
8
|
+
waitForQueueToBeLessThan: (items: number) => Promise<void>;
|
|
9
|
+
};
|
|
10
|
+
export type CreateAudioDecoderInit = {
|
|
11
|
+
onFrame: (frame: AudioData) => Promise<void> | void;
|
|
12
|
+
onError: (error: Error) => void;
|
|
13
|
+
controller: WebCodecsController | null;
|
|
14
|
+
config: AudioDecoderConfig;
|
|
15
|
+
logLevel: MediaParserLogLevel;
|
|
16
|
+
};
|
|
17
|
+
export declare const internalCreateAudioDecoder: ({ onFrame, onError, controller, config, logLevel, }: CreateAudioDecoderInit) => WebCodecsAudioDecoder;
|
|
18
|
+
export declare const createAudioDecoder: ({ onFrame, onError, controller, track, logLevel, }: {
|
|
19
|
+
track: AudioDecoderConfig;
|
|
20
|
+
onFrame: (frame: AudioData) => Promise<void> | void;
|
|
21
|
+
onError: (error: Error) => void;
|
|
22
|
+
controller?: WebCodecsController | null;
|
|
23
|
+
logLevel?: MediaParserLogLevel;
|
|
24
|
+
}) => WebCodecsAudioDecoder;
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createAudioDecoder = exports.internalCreateAudioDecoder = void 0;
|
|
4
|
+
const get_wave_audio_decoder_1 = require("./get-wave-audio-decoder");
|
|
5
|
+
const io_synchronizer_1 = require("./io-manager/io-synchronizer");
|
|
6
|
+
const internalCreateAudioDecoder = ({ onFrame, onError, controller, config, logLevel, }) => {
|
|
7
|
+
if (controller &&
|
|
8
|
+
controller._internals._mediaParserController._internals.signal.aborted) {
|
|
9
|
+
throw new Error('Not creating audio decoder, already aborted');
|
|
10
|
+
}
|
|
11
|
+
const ioSynchronizer = (0, io_synchronizer_1.makeIoSynchronizer)({
|
|
12
|
+
logLevel,
|
|
13
|
+
label: 'Audio decoder',
|
|
14
|
+
controller,
|
|
15
|
+
});
|
|
16
|
+
if (config.codec === 'pcm-s16') {
|
|
17
|
+
return (0, get_wave_audio_decoder_1.getWaveAudioDecoder)({
|
|
18
|
+
onFrame,
|
|
19
|
+
config,
|
|
20
|
+
sampleFormat: 's16',
|
|
21
|
+
logLevel,
|
|
22
|
+
ioSynchronizer,
|
|
23
|
+
onError,
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
const audioDecoder = new AudioDecoder({
|
|
27
|
+
async output(frame) {
|
|
28
|
+
try {
|
|
29
|
+
await onFrame(frame);
|
|
30
|
+
}
|
|
31
|
+
catch (err) {
|
|
32
|
+
frame.close();
|
|
33
|
+
onError(err);
|
|
34
|
+
}
|
|
35
|
+
ioSynchronizer.onOutput(frame.timestamp + (frame.duration ?? 0));
|
|
36
|
+
},
|
|
37
|
+
error(error) {
|
|
38
|
+
onError(error);
|
|
39
|
+
},
|
|
40
|
+
});
|
|
41
|
+
const close = () => {
|
|
42
|
+
if (controller) {
|
|
43
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener('abort',
|
|
44
|
+
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
|
45
|
+
onAbort);
|
|
46
|
+
}
|
|
47
|
+
if (audioDecoder.state === 'closed') {
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
audioDecoder.close();
|
|
51
|
+
};
|
|
52
|
+
const onAbort = () => {
|
|
53
|
+
close();
|
|
54
|
+
};
|
|
55
|
+
if (controller) {
|
|
56
|
+
controller._internals._mediaParserController._internals.signal.addEventListener('abort', onAbort);
|
|
57
|
+
}
|
|
58
|
+
audioDecoder.configure(config);
|
|
59
|
+
const processSample = (audioSample) => {
|
|
60
|
+
if (audioDecoder.state === 'closed') {
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
// Don't flush, it messes up the audio
|
|
64
|
+
const chunk = audioSample instanceof EncodedAudioChunk
|
|
65
|
+
? audioSample
|
|
66
|
+
: new EncodedAudioChunk(audioSample);
|
|
67
|
+
audioDecoder.decode(chunk);
|
|
68
|
+
// https://test-streams.mux.dev/x36xhzz/url_0/url_525/193039199_mp4_h264_aac_hd_7.ts
|
|
69
|
+
// has a 16 byte audio sample at the end which chrome does not decode
|
|
70
|
+
// Might be empty audio
|
|
71
|
+
// For now only reporting chunks that are bigger than that
|
|
72
|
+
// 16 was chosen arbitrarily, can be improved
|
|
73
|
+
if (chunk.byteLength > 16) {
|
|
74
|
+
ioSynchronizer.inputItem(chunk.timestamp);
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
return {
|
|
78
|
+
decode: (sample) => {
|
|
79
|
+
processSample(sample);
|
|
80
|
+
},
|
|
81
|
+
waitForFinish: async () => {
|
|
82
|
+
// Firefox might throw "Needs to be configured first"
|
|
83
|
+
try {
|
|
84
|
+
await audioDecoder.flush();
|
|
85
|
+
}
|
|
86
|
+
catch { }
|
|
87
|
+
await ioSynchronizer.waitForFinish();
|
|
88
|
+
},
|
|
89
|
+
close,
|
|
90
|
+
flush: async () => {
|
|
91
|
+
await audioDecoder.flush();
|
|
92
|
+
},
|
|
93
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
94
|
+
};
|
|
95
|
+
};
|
|
96
|
+
exports.internalCreateAudioDecoder = internalCreateAudioDecoder;
|
|
97
|
+
const createAudioDecoder = ({ onFrame, onError, controller, track, logLevel, }) => {
|
|
98
|
+
return (0, exports.internalCreateAudioDecoder)({
|
|
99
|
+
onFrame,
|
|
100
|
+
onError,
|
|
101
|
+
controller: controller ?? null,
|
|
102
|
+
config: track,
|
|
103
|
+
logLevel: logLevel ?? 'error',
|
|
104
|
+
});
|
|
105
|
+
};
|
|
106
|
+
exports.createAudioDecoder = createAudioDecoder;
|
|
@@ -26,6 +26,16 @@ const internalCreateAudioDecoder = async ({ onFrame, onError, controller, config
|
|
|
26
26
|
onError,
|
|
27
27
|
});
|
|
28
28
|
}
|
|
29
|
+
if (config.codec === 'pcm-s24') {
|
|
30
|
+
return (0, get_wave_audio_decoder_1.getWaveAudioDecoder)({
|
|
31
|
+
onFrame,
|
|
32
|
+
config,
|
|
33
|
+
sampleFormat: 's24',
|
|
34
|
+
logLevel,
|
|
35
|
+
ioSynchronizer,
|
|
36
|
+
onError,
|
|
37
|
+
});
|
|
38
|
+
}
|
|
29
39
|
const audioDecoder = new AudioDecoder({
|
|
30
40
|
async output(frame) {
|
|
31
41
|
try {
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.extractFrames = void 0;
|
|
4
|
+
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
+
const worker_1 = require("@remotion/media-parser/worker");
|
|
6
|
+
const create_video_decoder_1 = require("./create-video-decoder");
|
|
7
|
+
const extractFrames = async ({ fromSeconds, toSeconds, width, height, src, onFrame, }) => {
|
|
8
|
+
const controller = (0, media_parser_1.mediaParserController)();
|
|
9
|
+
controller.seek(fromSeconds);
|
|
10
|
+
const segmentDuration = toSeconds - fromSeconds;
|
|
11
|
+
const expectedFrames = [];
|
|
12
|
+
try {
|
|
13
|
+
await (0, worker_1.parseMediaOnWebWorker)({
|
|
14
|
+
src: new URL(src, window.location.href).toString(),
|
|
15
|
+
acknowledgeRemotionLicense: true,
|
|
16
|
+
controller,
|
|
17
|
+
onVideoTrack: ({ track }) => {
|
|
18
|
+
const aspectRatio = track.width / track.height;
|
|
19
|
+
const framesFitInWidth = Math.ceil(width / (height * aspectRatio));
|
|
20
|
+
const timestampTargets = [];
|
|
21
|
+
for (let i = 0; i < framesFitInWidth; i++) {
|
|
22
|
+
timestampTargets.push(fromSeconds +
|
|
23
|
+
((segmentDuration * media_parser_1.WEBCODECS_TIMESCALE) / framesFitInWidth) *
|
|
24
|
+
(i + 0.5));
|
|
25
|
+
}
|
|
26
|
+
const decoder = (0, create_video_decoder_1.createVideoDecoder)({
|
|
27
|
+
onFrame: (frame) => {
|
|
28
|
+
if (frame.timestamp >= expectedFrames[0] - 1) {
|
|
29
|
+
expectedFrames.shift();
|
|
30
|
+
onFrame(frame);
|
|
31
|
+
}
|
|
32
|
+
frame.close();
|
|
33
|
+
},
|
|
34
|
+
onError: console.error,
|
|
35
|
+
track,
|
|
36
|
+
});
|
|
37
|
+
const queued = [];
|
|
38
|
+
return async (sample) => {
|
|
39
|
+
const nextTimestampWeWant = timestampTargets[0];
|
|
40
|
+
if (nextTimestampWeWant === undefined) {
|
|
41
|
+
throw new Error('this should not happen');
|
|
42
|
+
}
|
|
43
|
+
if (sample.type === 'key') {
|
|
44
|
+
queued.length = 0;
|
|
45
|
+
}
|
|
46
|
+
queued.push(sample);
|
|
47
|
+
if (sample.timestamp > nextTimestampWeWant) {
|
|
48
|
+
expectedFrames.push(timestampTargets.shift());
|
|
49
|
+
while (queued.length > 0) {
|
|
50
|
+
const sam = queued.shift();
|
|
51
|
+
await decoder.waitForQueueToBeLessThan(10);
|
|
52
|
+
await decoder.decode(sam);
|
|
53
|
+
}
|
|
54
|
+
if (timestampTargets.length === 0) {
|
|
55
|
+
await decoder.flush();
|
|
56
|
+
controller.abort();
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
},
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
catch (e) {
|
|
64
|
+
if (!(0, media_parser_1.hasBeenAborted)(e)) {
|
|
65
|
+
throw e;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
exports.extractFrames = extractFrames;
|
package/dist/esm/index.mjs
CHANGED
|
@@ -4185,29 +4185,26 @@ var getBytesPerSample = (sampleFormat) => {
|
|
|
4185
4185
|
if (sampleFormat === "s16") {
|
|
4186
4186
|
return 2;
|
|
4187
4187
|
}
|
|
4188
|
-
if (sampleFormat === "
|
|
4188
|
+
if (sampleFormat === "s24") {
|
|
4189
4189
|
return 4;
|
|
4190
4190
|
}
|
|
4191
|
-
if (sampleFormat === "f32") {
|
|
4192
|
-
return 4;
|
|
4193
|
-
}
|
|
4194
|
-
if (sampleFormat === "u8") {
|
|
4195
|
-
return 1;
|
|
4196
|
-
}
|
|
4197
|
-
if (sampleFormat === "f32-planar") {
|
|
4198
|
-
return 4;
|
|
4199
|
-
}
|
|
4200
|
-
if (sampleFormat === "s16-planar") {
|
|
4201
|
-
return 2;
|
|
4202
|
-
}
|
|
4203
|
-
if (sampleFormat === "s32-planar") {
|
|
4204
|
-
return 4;
|
|
4205
|
-
}
|
|
4206
|
-
if (sampleFormat === "u8-planar") {
|
|
4207
|
-
return 1;
|
|
4208
|
-
}
|
|
4209
4191
|
throw new Error(`Unsupported sample format: ${sampleFormat}`);
|
|
4210
4192
|
};
|
|
4193
|
+
function uint8_24le_to_uint32(u8) {
|
|
4194
|
+
if (u8.length % 3 !== 0) {
|
|
4195
|
+
throw new Error("Input length must be a multiple of 3");
|
|
4196
|
+
}
|
|
4197
|
+
const count = u8.length / 3;
|
|
4198
|
+
const out = new Uint32Array(count);
|
|
4199
|
+
let j = 0;
|
|
4200
|
+
for (let i = 0;i < count; i++) {
|
|
4201
|
+
const b0 = u8[j++];
|
|
4202
|
+
const b1 = u8[j++];
|
|
4203
|
+
const b2 = u8[j++];
|
|
4204
|
+
out[i] = (b0 | b1 << 8 | b2 << 16) << 8;
|
|
4205
|
+
}
|
|
4206
|
+
return out;
|
|
4207
|
+
}
|
|
4211
4208
|
var getAudioData = (audioSample) => {
|
|
4212
4209
|
if (audioSample instanceof EncodedAudioChunk) {
|
|
4213
4210
|
const data = new Uint8Array(audioSample.byteLength);
|
|
@@ -4225,12 +4222,14 @@ var getWaveAudioDecoder = ({
|
|
|
4225
4222
|
}) => {
|
|
4226
4223
|
const processSample = async (audioSample) => {
|
|
4227
4224
|
const bytesPerSample = getBytesPerSample(sampleFormat);
|
|
4228
|
-
const
|
|
4225
|
+
const rawData = getAudioData(audioSample);
|
|
4226
|
+
const data = sampleFormat === "s24" && rawData instanceof Uint8Array ? uint8_24le_to_uint32(rawData) : rawData;
|
|
4227
|
+
const numberOfFrames = data.byteLength / bytesPerSample / config.numberOfChannels;
|
|
4229
4228
|
const audioData = new AudioData({
|
|
4230
4229
|
data,
|
|
4231
|
-
format: sampleFormat,
|
|
4230
|
+
format: sampleFormat === "s16" ? "s16" : "s32",
|
|
4232
4231
|
numberOfChannels: config.numberOfChannels,
|
|
4233
|
-
numberOfFrames
|
|
4232
|
+
numberOfFrames,
|
|
4234
4233
|
sampleRate: config.sampleRate,
|
|
4235
4234
|
timestamp: audioSample.timestamp
|
|
4236
4235
|
});
|
|
@@ -4321,6 +4320,16 @@ var internalCreateAudioDecoder = async ({
|
|
|
4321
4320
|
onError
|
|
4322
4321
|
});
|
|
4323
4322
|
}
|
|
4323
|
+
if (config.codec === "pcm-s24") {
|
|
4324
|
+
return getWaveAudioDecoder({
|
|
4325
|
+
onFrame,
|
|
4326
|
+
config,
|
|
4327
|
+
sampleFormat: "s24",
|
|
4328
|
+
logLevel,
|
|
4329
|
+
ioSynchronizer,
|
|
4330
|
+
onError
|
|
4331
|
+
});
|
|
4332
|
+
}
|
|
4324
4333
|
const audioDecoder = new AudioDecoder({
|
|
4325
4334
|
async output(frame) {
|
|
4326
4335
|
try {
|
|
@@ -2,7 +2,7 @@ import type { MediaParserLogLevel } from '@remotion/media-parser';
|
|
|
2
2
|
import type { CreateAudioDecoderInit, WebCodecsAudioDecoder } from './create-audio-decoder';
|
|
3
3
|
import type { IoSynchronizer } from './io-manager/io-synchronizer';
|
|
4
4
|
export declare const getWaveAudioDecoder: ({ onFrame, config, sampleFormat, ioSynchronizer, onError, }: Pick<CreateAudioDecoderInit, "onFrame" | "config"> & {
|
|
5
|
-
sampleFormat:
|
|
5
|
+
sampleFormat: "s16" | "s24";
|
|
6
6
|
logLevel: MediaParserLogLevel;
|
|
7
7
|
ioSynchronizer: IoSynchronizer;
|
|
8
8
|
onError: (error: Error) => void;
|
|
@@ -5,29 +5,26 @@ const getBytesPerSample = (sampleFormat) => {
|
|
|
5
5
|
if (sampleFormat === 's16') {
|
|
6
6
|
return 2;
|
|
7
7
|
}
|
|
8
|
-
if (sampleFormat === '
|
|
8
|
+
if (sampleFormat === 's24') {
|
|
9
9
|
return 4;
|
|
10
10
|
}
|
|
11
|
-
if (sampleFormat === 'f32') {
|
|
12
|
-
return 4;
|
|
13
|
-
}
|
|
14
|
-
if (sampleFormat === 'u8') {
|
|
15
|
-
return 1;
|
|
16
|
-
}
|
|
17
|
-
if (sampleFormat === 'f32-planar') {
|
|
18
|
-
return 4;
|
|
19
|
-
}
|
|
20
|
-
if (sampleFormat === 's16-planar') {
|
|
21
|
-
return 2;
|
|
22
|
-
}
|
|
23
|
-
if (sampleFormat === 's32-planar') {
|
|
24
|
-
return 4;
|
|
25
|
-
}
|
|
26
|
-
if (sampleFormat === 'u8-planar') {
|
|
27
|
-
return 1;
|
|
28
|
-
}
|
|
29
11
|
throw new Error(`Unsupported sample format: ${sampleFormat}`);
|
|
30
12
|
};
|
|
13
|
+
function uint8_24le_to_uint32(u8) {
|
|
14
|
+
if (u8.length % 3 !== 0) {
|
|
15
|
+
throw new Error('Input length must be a multiple of 3');
|
|
16
|
+
}
|
|
17
|
+
const count = u8.length / 3;
|
|
18
|
+
const out = new Uint32Array(count);
|
|
19
|
+
let j = 0;
|
|
20
|
+
for (let i = 0; i < count; i++) {
|
|
21
|
+
const b0 = u8[j++];
|
|
22
|
+
const b1 = u8[j++];
|
|
23
|
+
const b2 = u8[j++];
|
|
24
|
+
out[i] = (b0 | (b1 << 8) | (b2 << 16)) << 8;
|
|
25
|
+
}
|
|
26
|
+
return out;
|
|
27
|
+
}
|
|
31
28
|
const getAudioData = (audioSample) => {
|
|
32
29
|
if (audioSample instanceof EncodedAudioChunk) {
|
|
33
30
|
const data = new Uint8Array(audioSample.byteLength);
|
|
@@ -39,12 +36,16 @@ const getAudioData = (audioSample) => {
|
|
|
39
36
|
const getWaveAudioDecoder = ({ onFrame, config, sampleFormat, ioSynchronizer, onError, }) => {
|
|
40
37
|
const processSample = async (audioSample) => {
|
|
41
38
|
const bytesPerSample = getBytesPerSample(sampleFormat);
|
|
42
|
-
const
|
|
39
|
+
const rawData = getAudioData(audioSample);
|
|
40
|
+
const data = sampleFormat === 's24' && rawData instanceof Uint8Array
|
|
41
|
+
? uint8_24le_to_uint32(rawData)
|
|
42
|
+
: rawData;
|
|
43
|
+
const numberOfFrames = data.byteLength / bytesPerSample / config.numberOfChannels;
|
|
43
44
|
const audioData = new AudioData({
|
|
44
45
|
data,
|
|
45
|
-
format: sampleFormat,
|
|
46
|
+
format: sampleFormat === 's16' ? 's16' : 's32',
|
|
46
47
|
numberOfChannels: config.numberOfChannels,
|
|
47
|
-
numberOfFrames
|
|
48
|
+
numberOfFrames,
|
|
48
49
|
sampleRate: config.sampleRate,
|
|
49
50
|
timestamp: audioSample.timestamp,
|
|
50
51
|
});
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { MediaParserLogLevel, MediaParserVideoSample } from '@remotion/media-parser';
|
|
2
|
+
import type { WebCodecsController } from './webcodecs-controller';
|
|
3
|
+
export type WebCodecsVideoDecoder = {
|
|
4
|
+
decode: (videoSample: MediaParserVideoSample | EncodedVideoChunk) => void;
|
|
5
|
+
close: () => void;
|
|
6
|
+
flush: () => Promise<void>;
|
|
7
|
+
waitForFinish: () => Promise<void>;
|
|
8
|
+
waitForQueueToBeLessThan: (items: number) => Promise<void>;
|
|
9
|
+
};
|
|
10
|
+
export declare const internalCreateVideoDecoder: ({ onFrame, onError, controller, config, logLevel, }: {
|
|
11
|
+
onFrame: (frame: VideoFrame) => Promise<void> | void;
|
|
12
|
+
onError: (error: Error) => void;
|
|
13
|
+
controller: WebCodecsController | null;
|
|
14
|
+
config: VideoDecoderConfig;
|
|
15
|
+
logLevel: MediaParserLogLevel;
|
|
16
|
+
}) => WebCodecsVideoDecoder;
|
|
17
|
+
export declare const createVideoDecoder: ({ onFrame, onError, controller, track, logLevel, }: {
|
|
18
|
+
track: VideoDecoderConfig;
|
|
19
|
+
onFrame: (frame: VideoFrame) => Promise<void> | void;
|
|
20
|
+
onError: (error: Error) => void;
|
|
21
|
+
controller?: WebCodecsController;
|
|
22
|
+
logLevel?: MediaParserLogLevel;
|
|
23
|
+
}) => WebCodecsVideoDecoder;
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createVideoDecoder = exports.internalCreateVideoDecoder = void 0;
|
|
4
|
+
const io_synchronizer_1 = require("./io-manager/io-synchronizer");
|
|
5
|
+
const log_1 = require("./log");
|
|
6
|
+
const internalCreateVideoDecoder = ({ onFrame, onError, controller, config, logLevel, }) => {
|
|
7
|
+
const ioSynchronizer = (0, io_synchronizer_1.makeIoSynchronizer)({
|
|
8
|
+
logLevel,
|
|
9
|
+
label: 'Video decoder',
|
|
10
|
+
controller,
|
|
11
|
+
});
|
|
12
|
+
const videoDecoder = new VideoDecoder({
|
|
13
|
+
async output(frame) {
|
|
14
|
+
try {
|
|
15
|
+
await onFrame(frame);
|
|
16
|
+
}
|
|
17
|
+
catch (err) {
|
|
18
|
+
onError(err);
|
|
19
|
+
frame.close();
|
|
20
|
+
}
|
|
21
|
+
ioSynchronizer.onOutput(frame.timestamp);
|
|
22
|
+
},
|
|
23
|
+
error(error) {
|
|
24
|
+
onError(error);
|
|
25
|
+
},
|
|
26
|
+
});
|
|
27
|
+
const close = () => {
|
|
28
|
+
if (controller) {
|
|
29
|
+
controller._internals._mediaParserController._internals.signal.removeEventListener('abort',
|
|
30
|
+
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
|
31
|
+
onAbort);
|
|
32
|
+
}
|
|
33
|
+
if (videoDecoder.state === 'closed') {
|
|
34
|
+
return;
|
|
35
|
+
}
|
|
36
|
+
videoDecoder.close();
|
|
37
|
+
};
|
|
38
|
+
const onAbort = () => {
|
|
39
|
+
close();
|
|
40
|
+
};
|
|
41
|
+
if (controller) {
|
|
42
|
+
controller._internals._mediaParserController._internals.signal.addEventListener('abort', onAbort);
|
|
43
|
+
}
|
|
44
|
+
videoDecoder.configure(config);
|
|
45
|
+
const decode = (sample) => {
|
|
46
|
+
if (videoDecoder.state === 'closed') {
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
const encodedChunk = sample instanceof EncodedVideoChunk
|
|
50
|
+
? sample
|
|
51
|
+
: new EncodedVideoChunk(sample);
|
|
52
|
+
videoDecoder.decode(encodedChunk);
|
|
53
|
+
ioSynchronizer.inputItem(sample.timestamp);
|
|
54
|
+
};
|
|
55
|
+
return {
|
|
56
|
+
decode,
|
|
57
|
+
waitForFinish: async () => {
|
|
58
|
+
await videoDecoder.flush();
|
|
59
|
+
log_1.Log.verbose(logLevel, 'Flushed video decoder');
|
|
60
|
+
await ioSynchronizer.waitForFinish();
|
|
61
|
+
log_1.Log.verbose(logLevel, 'IO synchro finished');
|
|
62
|
+
},
|
|
63
|
+
close,
|
|
64
|
+
flush: async () => {
|
|
65
|
+
await videoDecoder.flush();
|
|
66
|
+
},
|
|
67
|
+
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
68
|
+
};
|
|
69
|
+
};
|
|
70
|
+
exports.internalCreateVideoDecoder = internalCreateVideoDecoder;
|
|
71
|
+
const createVideoDecoder = ({ onFrame, onError, controller, track, logLevel, }) => {
|
|
72
|
+
return (0, exports.internalCreateVideoDecoder)({
|
|
73
|
+
onFrame,
|
|
74
|
+
onError,
|
|
75
|
+
controller: controller ?? null,
|
|
76
|
+
config: track,
|
|
77
|
+
logLevel: logLevel ?? 'info',
|
|
78
|
+
});
|
|
79
|
+
};
|
|
80
|
+
exports.createVideoDecoder = createVideoDecoder;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@remotion/webcodecs",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.345",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"module": "dist/esm/index.mjs",
|
|
@@ -19,18 +19,18 @@
|
|
|
19
19
|
"author": "Jonny Burger <jonny@remotion.dev>",
|
|
20
20
|
"license": "Remotion License (See https://remotion.dev/docs/webcodecs#license)",
|
|
21
21
|
"dependencies": {
|
|
22
|
-
"@remotion/media-parser": "4.0.
|
|
23
|
-
"@remotion/licensing": "4.0.
|
|
22
|
+
"@remotion/media-parser": "4.0.345",
|
|
23
|
+
"@remotion/licensing": "4.0.345"
|
|
24
24
|
},
|
|
25
25
|
"peerDependencies": {},
|
|
26
26
|
"devDependencies": {
|
|
27
27
|
"@types/dom-webcodecs": "0.1.11",
|
|
28
28
|
"playwright": "1.51.1",
|
|
29
|
-
"vite": "5.4.
|
|
29
|
+
"vite": "5.4.20",
|
|
30
30
|
"@playwright/test": "1.51.1",
|
|
31
31
|
"eslint": "9.19.0",
|
|
32
|
-
"@remotion/
|
|
33
|
-
"@remotion/
|
|
32
|
+
"@remotion/eslint-config-internal": "4.0.345",
|
|
33
|
+
"@remotion/example-videos": "4.0.345"
|
|
34
34
|
},
|
|
35
35
|
"keywords": [],
|
|
36
36
|
"publishConfig": {
|