@remotion/webcodecs 4.0.310 → 4.0.312
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/can-copy-video-track.js +2 -1
- package/dist/can-reencode-video-track.js +1 -1
- package/dist/create-audio-decoder.d.ts +4 -0
- package/dist/create-audio-decoder.js +31 -6
- package/dist/create-frames.d.ts +8 -0
- package/dist/create-frames.js +69 -0
- package/dist/create-video-decoder.d.ts +5 -1
- package/dist/create-video-decoder.js +38 -6
- package/dist/esm/index.mjs +223 -27
- package/dist/extract-frames.d.ts +13 -0
- package/dist/extract-frames.js +114 -0
- package/dist/flush-pending.d.ts +10 -0
- package/dist/flush-pending.js +13 -0
- package/dist/get-wave-audio-decoder.js +10 -1
- package/dist/index.d.ts +6 -4
- package/dist/index.js +3 -1
- package/dist/io-manager/io-synchronizer.d.ts +1 -0
- package/dist/io-manager/io-synchronizer.js +18 -1
- package/dist/on-frame.js +1 -1
- package/dist/processing-queue.d.ts +1 -0
- package/dist/reencode-audio-track.js +2 -1
- package/dist/reencode-video-track.js +3 -3
- package/dist/resizing/calculate-new-size.d.ts +3 -4
- package/dist/resizing/calculate-new-size.js +1 -2
- package/dist/rotate-and-resize-video-frame.d.ts +2 -3
- package/dist/rotate-and-resize-video-frame.js +2 -2
- package/dist/rotation.d.ts +3 -4
- package/dist/rotation.js +2 -2
- package/package.json +6 -5
|
@@ -17,12 +17,13 @@ const canCopyVideoTrack = ({ outputContainer, rotationToApply, inputContainer, r
|
|
|
17
17
|
return false;
|
|
18
18
|
}
|
|
19
19
|
}
|
|
20
|
+
const needsToBeMultipleOfTwo = inputTrack.codecEnum === 'h264';
|
|
20
21
|
const newDimensions = (0, rotation_1.calculateNewDimensionsFromRotateAndScale)({
|
|
21
22
|
height: inputTrack.height,
|
|
22
23
|
resizeOperation,
|
|
23
24
|
rotation: rotationToApply,
|
|
24
|
-
videoCodec: inputTrack.codecEnum,
|
|
25
25
|
width: inputTrack.width,
|
|
26
|
+
needsToBeMultipleOfTwo,
|
|
26
27
|
});
|
|
27
28
|
if (newDimensions.height !== inputTrack.height ||
|
|
28
29
|
newDimensions.width !== inputTrack.width) {
|
|
@@ -9,7 +9,7 @@ const canReencodeVideoTrack = async ({ videoCodec, track, resizeOperation, rotat
|
|
|
9
9
|
height: track.displayAspectHeight,
|
|
10
10
|
resizeOperation,
|
|
11
11
|
rotation: rotate ?? 0,
|
|
12
|
-
videoCodec,
|
|
12
|
+
needsToBeMultipleOfTwo: videoCodec === 'h264',
|
|
13
13
|
width: track.displayAspectWidth,
|
|
14
14
|
});
|
|
15
15
|
const videoEncoderConfig = await (0, video_encoder_config_1.getVideoEncoderConfig)({
|
|
@@ -6,6 +6,10 @@ export type WebCodecsAudioDecoder = {
|
|
|
6
6
|
flush: () => Promise<void>;
|
|
7
7
|
waitForQueueToBeLessThan: (items: number) => Promise<void>;
|
|
8
8
|
reset: () => void;
|
|
9
|
+
checkReset: () => {
|
|
10
|
+
wasReset: () => boolean;
|
|
11
|
+
};
|
|
12
|
+
getMostRecentSampleInput: () => number | null;
|
|
9
13
|
};
|
|
10
14
|
export type CreateAudioDecoderInit = {
|
|
11
15
|
onFrame: (frame: AudioData) => Promise<void> | void;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.createAudioDecoder = exports.internalCreateAudioDecoder = void 0;
|
|
4
|
+
const flush_pending_1 = require("./flush-pending");
|
|
4
5
|
const get_wave_audio_decoder_1 = require("./get-wave-audio-decoder");
|
|
5
6
|
const io_synchronizer_1 = require("./io-manager/io-synchronizer");
|
|
6
7
|
const internalCreateAudioDecoder = ({ onFrame, onError, controller, config, logLevel, }) => {
|
|
@@ -13,6 +14,7 @@ const internalCreateAudioDecoder = ({ onFrame, onError, controller, config, logL
|
|
|
13
14
|
label: 'Audio decoder',
|
|
14
15
|
controller,
|
|
15
16
|
});
|
|
17
|
+
let mostRecentSampleReceived = null;
|
|
16
18
|
if (config.codec === 'pcm-s16') {
|
|
17
19
|
return (0, get_wave_audio_decoder_1.getWaveAudioDecoder)({
|
|
18
20
|
onFrame,
|
|
@@ -67,6 +69,7 @@ const internalCreateAudioDecoder = ({ onFrame, onError, controller, config, logL
|
|
|
67
69
|
onError(err);
|
|
68
70
|
return;
|
|
69
71
|
}
|
|
72
|
+
mostRecentSampleReceived = audioSample.timestamp;
|
|
70
73
|
// Don't flush, it messes up the audio
|
|
71
74
|
const chunk = audioSample instanceof EncodedAudioChunk
|
|
72
75
|
? audioSample
|
|
@@ -81,22 +84,44 @@ const internalCreateAudioDecoder = ({ onFrame, onError, controller, config, logL
|
|
|
81
84
|
ioSynchronizer.inputItem(chunk.timestamp);
|
|
82
85
|
}
|
|
83
86
|
};
|
|
87
|
+
let flushPending = null;
|
|
88
|
+
const lastReset = null;
|
|
84
89
|
return {
|
|
85
90
|
decode,
|
|
86
91
|
close,
|
|
87
|
-
flush:
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
await audioDecoder.flush();
|
|
92
|
+
flush: () => {
|
|
93
|
+
if (flushPending) {
|
|
94
|
+
throw new Error('Flush already pending');
|
|
91
95
|
}
|
|
92
|
-
|
|
93
|
-
|
|
96
|
+
const pendingFlush = (0, flush_pending_1.makeFlushPending)();
|
|
97
|
+
flushPending = pendingFlush;
|
|
98
|
+
Promise.resolve()
|
|
99
|
+
.then(() => {
|
|
100
|
+
return audioDecoder.flush();
|
|
101
|
+
})
|
|
102
|
+
.catch(() => {
|
|
103
|
+
// Firefox might throw "Needs to be configured first"
|
|
104
|
+
})
|
|
105
|
+
.finally(() => {
|
|
106
|
+
pendingFlush.resolve();
|
|
107
|
+
flushPending = null;
|
|
108
|
+
});
|
|
109
|
+
return pendingFlush.promise;
|
|
94
110
|
},
|
|
95
111
|
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
96
112
|
reset: () => {
|
|
97
113
|
audioDecoder.reset();
|
|
98
114
|
audioDecoder.configure(config);
|
|
99
115
|
},
|
|
116
|
+
checkReset: () => {
|
|
117
|
+
const initTime = Date.now();
|
|
118
|
+
return {
|
|
119
|
+
wasReset: () => lastReset !== null && lastReset > initTime,
|
|
120
|
+
};
|
|
121
|
+
},
|
|
122
|
+
getMostRecentSampleInput() {
|
|
123
|
+
return mostRecentSampleReceived;
|
|
124
|
+
},
|
|
100
125
|
};
|
|
101
126
|
};
|
|
102
127
|
exports.internalCreateAudioDecoder = internalCreateAudioDecoder;
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.extractFrames = void 0;
|
|
4
|
+
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
+
const worker_1 = require("@remotion/media-parser/worker");
|
|
6
|
+
const create_video_decoder_1 = require("./create-video-decoder");
|
|
7
|
+
const extractFrames = async ({ fromSeconds, toSeconds, width, height, src, onFrame, }) => {
|
|
8
|
+
const controller = (0, media_parser_1.mediaParserController)();
|
|
9
|
+
controller.seek(fromSeconds);
|
|
10
|
+
const segmentDuration = toSeconds - fromSeconds;
|
|
11
|
+
const expectedFrames = [];
|
|
12
|
+
try {
|
|
13
|
+
await (0, worker_1.parseMediaOnWebWorker)({
|
|
14
|
+
src: new URL(src, window.location.href).toString(),
|
|
15
|
+
acknowledgeRemotionLicense: true,
|
|
16
|
+
controller,
|
|
17
|
+
onVideoTrack: ({ track }) => {
|
|
18
|
+
const aspectRatio = track.width / track.height;
|
|
19
|
+
const framesFitInWidth = Math.ceil(width / (height * aspectRatio));
|
|
20
|
+
const timestampTargets = [];
|
|
21
|
+
for (let i = 0; i < framesFitInWidth; i++) {
|
|
22
|
+
timestampTargets.push(fromSeconds +
|
|
23
|
+
((segmentDuration * media_parser_1.WEBCODECS_TIMESCALE) / framesFitInWidth) *
|
|
24
|
+
(i + 0.5));
|
|
25
|
+
}
|
|
26
|
+
const decoder = (0, create_video_decoder_1.createVideoDecoder)({
|
|
27
|
+
onFrame: (frame) => {
|
|
28
|
+
if (frame.timestamp >= expectedFrames[0] - 1) {
|
|
29
|
+
expectedFrames.shift();
|
|
30
|
+
onFrame(frame);
|
|
31
|
+
}
|
|
32
|
+
frame.close();
|
|
33
|
+
},
|
|
34
|
+
onError: console.error,
|
|
35
|
+
track,
|
|
36
|
+
});
|
|
37
|
+
const queued = [];
|
|
38
|
+
return async (sample) => {
|
|
39
|
+
const nextTimestampWeWant = timestampTargets[0];
|
|
40
|
+
if (nextTimestampWeWant === undefined) {
|
|
41
|
+
throw new Error('this should not happen');
|
|
42
|
+
}
|
|
43
|
+
if (sample.type === 'key') {
|
|
44
|
+
queued.length = 0;
|
|
45
|
+
}
|
|
46
|
+
queued.push(sample);
|
|
47
|
+
if (sample.timestamp > nextTimestampWeWant) {
|
|
48
|
+
expectedFrames.push(timestampTargets.shift());
|
|
49
|
+
while (queued.length > 0) {
|
|
50
|
+
const sam = queued.shift();
|
|
51
|
+
await decoder.waitForQueueToBeLessThan(10);
|
|
52
|
+
await decoder.decode(sam);
|
|
53
|
+
}
|
|
54
|
+
if (timestampTargets.length === 0) {
|
|
55
|
+
await decoder.flush();
|
|
56
|
+
controller.abort();
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
},
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
catch (e) {
|
|
64
|
+
if (!(0, media_parser_1.hasBeenAborted)(e)) {
|
|
65
|
+
throw e;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
exports.extractFrames = extractFrames;
|
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
import type { MediaParserLogLevel } from '@remotion/media-parser';
|
|
2
2
|
import type { WebCodecsController } from './webcodecs-controller';
|
|
3
3
|
export type WebCodecsVideoDecoder = {
|
|
4
|
-
decode: (videoSample: EncodedVideoChunkInit | EncodedVideoChunk) => void
|
|
4
|
+
decode: (videoSample: EncodedVideoChunkInit | EncodedVideoChunk) => Promise<void>;
|
|
5
5
|
close: () => void;
|
|
6
6
|
flush: () => Promise<void>;
|
|
7
7
|
waitForQueueToBeLessThan: (items: number) => Promise<void>;
|
|
8
8
|
reset: () => void;
|
|
9
|
+
checkReset: () => {
|
|
10
|
+
wasReset: () => boolean;
|
|
11
|
+
};
|
|
12
|
+
getMostRecentSampleInput: () => number | null;
|
|
9
13
|
};
|
|
10
14
|
export declare const internalCreateVideoDecoder: ({ onFrame, onError, controller, config, logLevel, }: {
|
|
11
15
|
onFrame: (frame: VideoFrame) => Promise<void> | void;
|
|
@@ -1,13 +1,19 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.createVideoDecoder = exports.internalCreateVideoDecoder = void 0;
|
|
4
|
+
const flush_pending_1 = require("./flush-pending");
|
|
4
5
|
const io_synchronizer_1 = require("./io-manager/io-synchronizer");
|
|
5
6
|
const internalCreateVideoDecoder = ({ onFrame, onError, controller, config, logLevel, }) => {
|
|
7
|
+
if (controller &&
|
|
8
|
+
controller._internals._mediaParserController._internals.signal.aborted) {
|
|
9
|
+
throw new Error('Not creating audio decoder, already aborted');
|
|
10
|
+
}
|
|
6
11
|
const ioSynchronizer = (0, io_synchronizer_1.makeIoSynchronizer)({
|
|
7
12
|
logLevel,
|
|
8
13
|
label: 'Video decoder',
|
|
9
14
|
controller,
|
|
10
15
|
});
|
|
16
|
+
let mostRecentSampleReceived = null;
|
|
11
17
|
const videoDecoder = new VideoDecoder({
|
|
12
18
|
async output(frame) {
|
|
13
19
|
try {
|
|
@@ -52,28 +58,54 @@ const internalCreateVideoDecoder = ({ onFrame, onError, controller, config, logL
|
|
|
52
58
|
onError(err);
|
|
53
59
|
return;
|
|
54
60
|
}
|
|
61
|
+
mostRecentSampleReceived = sample.timestamp;
|
|
55
62
|
const encodedChunk = sample instanceof EncodedVideoChunk
|
|
56
63
|
? sample
|
|
57
64
|
: new EncodedVideoChunk(sample);
|
|
58
65
|
videoDecoder.decode(encodedChunk);
|
|
59
66
|
ioSynchronizer.inputItem(sample.timestamp);
|
|
60
67
|
};
|
|
68
|
+
let flushPending = null;
|
|
69
|
+
let lastReset = null;
|
|
61
70
|
return {
|
|
62
71
|
decode,
|
|
63
72
|
close,
|
|
64
|
-
flush:
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
await videoDecoder.flush();
|
|
73
|
+
flush: () => {
|
|
74
|
+
if (flushPending) {
|
|
75
|
+
throw new Error('Flush already pending');
|
|
68
76
|
}
|
|
69
|
-
|
|
70
|
-
|
|
77
|
+
const pendingFlush = (0, flush_pending_1.makeFlushPending)();
|
|
78
|
+
flushPending = pendingFlush;
|
|
79
|
+
Promise.resolve()
|
|
80
|
+
.then(() => {
|
|
81
|
+
return videoDecoder.flush();
|
|
82
|
+
})
|
|
83
|
+
.catch(() => {
|
|
84
|
+
// Firefox might throw "Needs to be configured first"
|
|
85
|
+
})
|
|
86
|
+
.finally(() => {
|
|
87
|
+
pendingFlush.resolve();
|
|
88
|
+
flushPending = null;
|
|
89
|
+
});
|
|
90
|
+
return pendingFlush.promise;
|
|
71
91
|
},
|
|
72
92
|
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
73
93
|
reset: () => {
|
|
94
|
+
lastReset = Date.now();
|
|
95
|
+
flushPending?.resolve();
|
|
96
|
+
ioSynchronizer.clearQueue();
|
|
74
97
|
videoDecoder.reset();
|
|
75
98
|
videoDecoder.configure(config);
|
|
76
99
|
},
|
|
100
|
+
checkReset: () => {
|
|
101
|
+
const initTime = Date.now();
|
|
102
|
+
return {
|
|
103
|
+
wasReset: () => lastReset !== null && lastReset > initTime,
|
|
104
|
+
};
|
|
105
|
+
},
|
|
106
|
+
getMostRecentSampleInput() {
|
|
107
|
+
return mostRecentSampleReceived;
|
|
108
|
+
},
|
|
77
109
|
};
|
|
78
110
|
};
|
|
79
111
|
exports.internalCreateVideoDecoder = internalCreateVideoDecoder;
|
package/dist/esm/index.mjs
CHANGED
|
@@ -244,9 +244,8 @@ var ensureMultipleOfTwo = ({
|
|
|
244
244
|
var calculateNewSizeAfterResizing = ({
|
|
245
245
|
dimensions,
|
|
246
246
|
resizeOperation,
|
|
247
|
-
|
|
247
|
+
needsToBeMultipleOfTwo
|
|
248
248
|
}) => {
|
|
249
|
-
const needsToBeMultipleOfTwo = videoCodec === "h264";
|
|
250
249
|
if (resizeOperation === null) {
|
|
251
250
|
return ensureMultipleOfTwo({
|
|
252
251
|
dimensions,
|
|
@@ -342,7 +341,7 @@ var calculateNewDimensionsFromRotateAndScale = ({
|
|
|
342
341
|
height,
|
|
343
342
|
rotation,
|
|
344
343
|
resizeOperation,
|
|
345
|
-
|
|
344
|
+
needsToBeMultipleOfTwo
|
|
346
345
|
}) => {
|
|
347
346
|
const { height: newHeight, width: newWidth } = calculateNewDimensionsFromRotate({
|
|
348
347
|
height,
|
|
@@ -352,7 +351,7 @@ var calculateNewDimensionsFromRotateAndScale = ({
|
|
|
352
351
|
return calculateNewSizeAfterResizing({
|
|
353
352
|
dimensions: { height: newHeight, width: newWidth },
|
|
354
353
|
resizeOperation,
|
|
355
|
-
|
|
354
|
+
needsToBeMultipleOfTwo
|
|
356
355
|
});
|
|
357
356
|
};
|
|
358
357
|
|
|
@@ -363,7 +362,7 @@ var normalizeVideoRotation = (rotation) => {
|
|
|
363
362
|
var rotateAndResizeVideoFrame = ({
|
|
364
363
|
frame,
|
|
365
364
|
rotation,
|
|
366
|
-
|
|
365
|
+
needsToBeMultipleOfTwo,
|
|
367
366
|
resizeOperation
|
|
368
367
|
}) => {
|
|
369
368
|
const normalized = (rotation % 360 + 360) % 360;
|
|
@@ -377,7 +376,7 @@ var rotateAndResizeVideoFrame = ({
|
|
|
377
376
|
height: frame.displayHeight,
|
|
378
377
|
width: frame.displayWidth,
|
|
379
378
|
rotation,
|
|
380
|
-
|
|
379
|
+
needsToBeMultipleOfTwo,
|
|
381
380
|
resizeOperation
|
|
382
381
|
});
|
|
383
382
|
if (normalized === 0 && height === frame.displayHeight && width === frame.displayWidth) {
|
|
@@ -539,8 +538,9 @@ var makeIoSynchronizer = ({
|
|
|
539
538
|
let lastOutput = 0;
|
|
540
539
|
let inputsSinceLastOutput = 0;
|
|
541
540
|
let inputs = [];
|
|
541
|
+
let resolvers = [];
|
|
542
542
|
const getQueuedItems = () => {
|
|
543
|
-
inputs = inputs.filter((input) => Math.floor(input) > Math.floor(lastOutput));
|
|
543
|
+
inputs = inputs.filter((input) => Math.floor(input) > Math.floor(lastOutput) + 1);
|
|
544
544
|
return inputs.length;
|
|
545
545
|
};
|
|
546
546
|
const printState = (prefix) => {
|
|
@@ -568,8 +568,10 @@ var makeIoSynchronizer = ({
|
|
|
568
568
|
const on = () => {
|
|
569
569
|
eventEmitter.removeEventListener("output", on);
|
|
570
570
|
resolve();
|
|
571
|
+
resolvers = resolvers.filter((resolver) => resolver !== resolve);
|
|
571
572
|
};
|
|
572
573
|
eventEmitter.addEventListener("output", on);
|
|
574
|
+
resolvers.push(resolve);
|
|
573
575
|
return promise;
|
|
574
576
|
};
|
|
575
577
|
const makeErrorBanner = () => {
|
|
@@ -609,10 +611,22 @@ var makeIoSynchronizer = ({
|
|
|
609
611
|
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", clear);
|
|
610
612
|
}
|
|
611
613
|
};
|
|
614
|
+
const clearQueue = () => {
|
|
615
|
+
inputs.length = 0;
|
|
616
|
+
lastInput = 0;
|
|
617
|
+
lastOutput = 0;
|
|
618
|
+
inputsSinceLastOutput = 0;
|
|
619
|
+
resolvers.forEach((resolver) => {
|
|
620
|
+
return resolver();
|
|
621
|
+
});
|
|
622
|
+
resolvers.length = 0;
|
|
623
|
+
inputs.length = 0;
|
|
624
|
+
};
|
|
612
625
|
return {
|
|
613
626
|
inputItem,
|
|
614
627
|
onOutput,
|
|
615
|
-
waitForQueueSize
|
|
628
|
+
waitForQueueSize,
|
|
629
|
+
clearQueue
|
|
616
630
|
};
|
|
617
631
|
};
|
|
618
632
|
|
|
@@ -930,12 +944,13 @@ var canCopyVideoTrack = ({
|
|
|
930
944
|
return false;
|
|
931
945
|
}
|
|
932
946
|
}
|
|
947
|
+
const needsToBeMultipleOfTwo = inputTrack.codecEnum === "h264";
|
|
933
948
|
const newDimensions = calculateNewDimensionsFromRotateAndScale({
|
|
934
949
|
height: inputTrack.height,
|
|
935
950
|
resizeOperation,
|
|
936
951
|
rotation: rotationToApply,
|
|
937
|
-
|
|
938
|
-
|
|
952
|
+
width: inputTrack.width,
|
|
953
|
+
needsToBeMultipleOfTwo
|
|
939
954
|
});
|
|
940
955
|
if (newDimensions.height !== inputTrack.height || newDimensions.width !== inputTrack.width) {
|
|
941
956
|
return false;
|
|
@@ -1416,7 +1431,7 @@ var canReencodeVideoTrack = async ({
|
|
|
1416
1431
|
height: track.displayAspectHeight,
|
|
1417
1432
|
resizeOperation,
|
|
1418
1433
|
rotation: rotate ?? 0,
|
|
1419
|
-
videoCodec,
|
|
1434
|
+
needsToBeMultipleOfTwo: videoCodec === "h264",
|
|
1420
1435
|
width: track.displayAspectWidth
|
|
1421
1436
|
});
|
|
1422
1437
|
const videoEncoderConfig = await getVideoEncoderConfig({
|
|
@@ -4135,6 +4150,16 @@ var convertEncodedChunk = (chunk) => {
|
|
|
4135
4150
|
};
|
|
4136
4151
|
};
|
|
4137
4152
|
|
|
4153
|
+
// src/flush-pending.ts
|
|
4154
|
+
var makeFlushPending = () => {
|
|
4155
|
+
const { promise, resolve, reject } = withResolvers();
|
|
4156
|
+
return {
|
|
4157
|
+
promise,
|
|
4158
|
+
resolve,
|
|
4159
|
+
reject
|
|
4160
|
+
};
|
|
4161
|
+
};
|
|
4162
|
+
|
|
4138
4163
|
// src/get-wave-audio-decoder.ts
|
|
4139
4164
|
var getBytesPerSample = (sampleFormat) => {
|
|
4140
4165
|
if (sampleFormat === "s16") {
|
|
@@ -4196,16 +4221,25 @@ var getWaveAudioDecoder = ({
|
|
|
4196
4221
|
onError(err);
|
|
4197
4222
|
}
|
|
4198
4223
|
};
|
|
4224
|
+
let lastReset = null;
|
|
4225
|
+
let mostRecentSampleInput = null;
|
|
4199
4226
|
return {
|
|
4200
4227
|
close() {
|
|
4201
4228
|
return Promise.resolve();
|
|
4202
4229
|
},
|
|
4203
4230
|
decode(audioSample) {
|
|
4231
|
+
mostRecentSampleInput = audioSample.timestamp;
|
|
4204
4232
|
return processSample(audioSample);
|
|
4205
4233
|
},
|
|
4206
4234
|
flush: () => Promise.resolve(),
|
|
4207
4235
|
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
4208
|
-
reset: () => {
|
|
4236
|
+
reset: () => {
|
|
4237
|
+
lastReset = Date.now();
|
|
4238
|
+
},
|
|
4239
|
+
checkReset: () => ({
|
|
4240
|
+
wasReset: () => lastReset !== null && lastReset > Date.now()
|
|
4241
|
+
}),
|
|
4242
|
+
getMostRecentSampleInput: () => mostRecentSampleInput
|
|
4209
4243
|
};
|
|
4210
4244
|
};
|
|
4211
4245
|
|
|
@@ -4225,6 +4259,7 @@ var internalCreateAudioDecoder = ({
|
|
|
4225
4259
|
label: "Audio decoder",
|
|
4226
4260
|
controller
|
|
4227
4261
|
});
|
|
4262
|
+
let mostRecentSampleReceived = null;
|
|
4228
4263
|
if (config.codec === "pcm-s16") {
|
|
4229
4264
|
return getWaveAudioDecoder({
|
|
4230
4265
|
onFrame,
|
|
@@ -4275,25 +4310,45 @@ var internalCreateAudioDecoder = ({
|
|
|
4275
4310
|
onError(err);
|
|
4276
4311
|
return;
|
|
4277
4312
|
}
|
|
4313
|
+
mostRecentSampleReceived = audioSample.timestamp;
|
|
4278
4314
|
const chunk = audioSample instanceof EncodedAudioChunk ? audioSample : new EncodedAudioChunk(audioSample);
|
|
4279
4315
|
audioDecoder.decode(chunk);
|
|
4280
4316
|
if (chunk.byteLength > 16) {
|
|
4281
4317
|
ioSynchronizer.inputItem(chunk.timestamp);
|
|
4282
4318
|
}
|
|
4283
4319
|
};
|
|
4320
|
+
let flushPending = null;
|
|
4321
|
+
const lastReset = null;
|
|
4284
4322
|
return {
|
|
4285
4323
|
decode,
|
|
4286
4324
|
close,
|
|
4287
|
-
flush:
|
|
4288
|
-
|
|
4289
|
-
|
|
4290
|
-
}
|
|
4291
|
-
|
|
4325
|
+
flush: () => {
|
|
4326
|
+
if (flushPending) {
|
|
4327
|
+
throw new Error("Flush already pending");
|
|
4328
|
+
}
|
|
4329
|
+
const pendingFlush = makeFlushPending();
|
|
4330
|
+
flushPending = pendingFlush;
|
|
4331
|
+
Promise.resolve().then(() => {
|
|
4332
|
+
return audioDecoder.flush();
|
|
4333
|
+
}).catch(() => {}).finally(() => {
|
|
4334
|
+
pendingFlush.resolve();
|
|
4335
|
+
flushPending = null;
|
|
4336
|
+
});
|
|
4337
|
+
return pendingFlush.promise;
|
|
4292
4338
|
},
|
|
4293
4339
|
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
4294
4340
|
reset: () => {
|
|
4295
4341
|
audioDecoder.reset();
|
|
4296
4342
|
audioDecoder.configure(config);
|
|
4343
|
+
},
|
|
4344
|
+
checkReset: () => {
|
|
4345
|
+
const initTime = Date.now();
|
|
4346
|
+
return {
|
|
4347
|
+
wasReset: () => lastReset !== null && lastReset > initTime
|
|
4348
|
+
};
|
|
4349
|
+
},
|
|
4350
|
+
getMostRecentSampleInput() {
|
|
4351
|
+
return mostRecentSampleReceived;
|
|
4297
4352
|
}
|
|
4298
4353
|
};
|
|
4299
4354
|
};
|
|
@@ -4498,7 +4553,8 @@ var reencodeAudioTrack = async ({
|
|
|
4498
4553
|
logLevel
|
|
4499
4554
|
});
|
|
4500
4555
|
state.addWaitForFinishPromise(async () => {
|
|
4501
|
-
|
|
4556
|
+
Log.verbose(logLevel, "Waiting for audio decoder to finish");
|
|
4557
|
+
await audioDecoder.flush();
|
|
4502
4558
|
Log.verbose(logLevel, "Audio decoder finished");
|
|
4503
4559
|
audioDecoder.close();
|
|
4504
4560
|
await audioProcessingQueue.ioSynchronizer.waitForQueueSize(0);
|
|
@@ -4671,11 +4727,15 @@ var internalCreateVideoDecoder = ({
|
|
|
4671
4727
|
config,
|
|
4672
4728
|
logLevel
|
|
4673
4729
|
}) => {
|
|
4730
|
+
if (controller && controller._internals._mediaParserController._internals.signal.aborted) {
|
|
4731
|
+
throw new Error("Not creating audio decoder, already aborted");
|
|
4732
|
+
}
|
|
4674
4733
|
const ioSynchronizer = makeIoSynchronizer({
|
|
4675
4734
|
logLevel,
|
|
4676
4735
|
label: "Video decoder",
|
|
4677
4736
|
controller
|
|
4678
4737
|
});
|
|
4738
|
+
let mostRecentSampleReceived = null;
|
|
4679
4739
|
const videoDecoder = new VideoDecoder({
|
|
4680
4740
|
async output(frame) {
|
|
4681
4741
|
try {
|
|
@@ -4716,23 +4776,46 @@ var internalCreateVideoDecoder = ({
|
|
|
4716
4776
|
onError(err);
|
|
4717
4777
|
return;
|
|
4718
4778
|
}
|
|
4779
|
+
mostRecentSampleReceived = sample.timestamp;
|
|
4719
4780
|
const encodedChunk = sample instanceof EncodedVideoChunk ? sample : new EncodedVideoChunk(sample);
|
|
4720
4781
|
videoDecoder.decode(encodedChunk);
|
|
4721
4782
|
ioSynchronizer.inputItem(sample.timestamp);
|
|
4722
4783
|
};
|
|
4784
|
+
let flushPending = null;
|
|
4785
|
+
let lastReset = null;
|
|
4723
4786
|
return {
|
|
4724
4787
|
decode,
|
|
4725
4788
|
close,
|
|
4726
|
-
flush:
|
|
4727
|
-
|
|
4728
|
-
|
|
4729
|
-
}
|
|
4730
|
-
|
|
4789
|
+
flush: () => {
|
|
4790
|
+
if (flushPending) {
|
|
4791
|
+
throw new Error("Flush already pending");
|
|
4792
|
+
}
|
|
4793
|
+
const pendingFlush = makeFlushPending();
|
|
4794
|
+
flushPending = pendingFlush;
|
|
4795
|
+
Promise.resolve().then(() => {
|
|
4796
|
+
return videoDecoder.flush();
|
|
4797
|
+
}).catch(() => {}).finally(() => {
|
|
4798
|
+
pendingFlush.resolve();
|
|
4799
|
+
flushPending = null;
|
|
4800
|
+
});
|
|
4801
|
+
return pendingFlush.promise;
|
|
4731
4802
|
},
|
|
4732
4803
|
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
4733
4804
|
reset: () => {
|
|
4805
|
+
lastReset = Date.now();
|
|
4806
|
+
flushPending?.resolve();
|
|
4807
|
+
ioSynchronizer.clearQueue();
|
|
4734
4808
|
videoDecoder.reset();
|
|
4735
4809
|
videoDecoder.configure(config);
|
|
4810
|
+
},
|
|
4811
|
+
checkReset: () => {
|
|
4812
|
+
const initTime = Date.now();
|
|
4813
|
+
return {
|
|
4814
|
+
wasReset: () => lastReset !== null && lastReset > initTime
|
|
4815
|
+
};
|
|
4816
|
+
},
|
|
4817
|
+
getMostRecentSampleInput() {
|
|
4818
|
+
return mostRecentSampleReceived;
|
|
4736
4819
|
}
|
|
4737
4820
|
};
|
|
4738
4821
|
};
|
|
@@ -4801,7 +4884,7 @@ var onFrame = async ({
|
|
|
4801
4884
|
rotation,
|
|
4802
4885
|
frame: unrotatedFrame,
|
|
4803
4886
|
resizeOperation,
|
|
4804
|
-
|
|
4887
|
+
needsToBeMultipleOfTwo: outputCodec === "h264"
|
|
4805
4888
|
});
|
|
4806
4889
|
if (unrotatedFrame !== rotated) {
|
|
4807
4890
|
unrotatedFrame.close();
|
|
@@ -4989,7 +5072,7 @@ var reencodeVideoTrack = async ({
|
|
|
4989
5072
|
width: track.codedWidth,
|
|
4990
5073
|
height: track.codedHeight,
|
|
4991
5074
|
rotation,
|
|
4992
|
-
|
|
5075
|
+
needsToBeMultipleOfTwo: videoOperation.videoCodec === "h264",
|
|
4993
5076
|
resizeOperation: videoOperation.resize ?? null
|
|
4994
5077
|
});
|
|
4995
5078
|
const videoEncoderConfig = await getVideoEncoderConfig({
|
|
@@ -5095,7 +5178,7 @@ var reencodeVideoTrack = async ({
|
|
|
5095
5178
|
});
|
|
5096
5179
|
state.addWaitForFinishPromise(async () => {
|
|
5097
5180
|
Log.verbose(logLevel, "Waiting for video decoder to finish");
|
|
5098
|
-
await videoDecoder.
|
|
5181
|
+
await videoDecoder.flush();
|
|
5099
5182
|
videoDecoder.close();
|
|
5100
5183
|
Log.verbose(logLevel, "Video decoder finished. Waiting for encoder to finish");
|
|
5101
5184
|
await frameSorter.flush();
|
|
@@ -5109,7 +5192,7 @@ var reencodeVideoTrack = async ({
|
|
|
5109
5192
|
return async (chunk) => {
|
|
5110
5193
|
progressTracker.setPossibleLowestTimestamp(Math.min(chunk.timestamp, chunk.decodingTimestamp ?? Infinity));
|
|
5111
5194
|
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
5112
|
-
await videoDecoder.waitForQueueToBeLessThan(
|
|
5195
|
+
await videoDecoder.waitForQueueToBeLessThan(15);
|
|
5113
5196
|
if (chunk.type === "key") {
|
|
5114
5197
|
await videoDecoder.flush();
|
|
5115
5198
|
}
|
|
@@ -5499,6 +5582,118 @@ var convertMedia = async function({
|
|
|
5499
5582
|
controller._internals._mediaParserController._internals.signal.removeEventListener("abort", onUserAbort);
|
|
5500
5583
|
});
|
|
5501
5584
|
};
|
|
5585
|
+
// src/extract-frames.ts
|
|
5586
|
+
import {
|
|
5587
|
+
hasBeenAborted,
|
|
5588
|
+
MediaParserAbortError as MediaParserAbortError4,
|
|
5589
|
+
mediaParserController as mediaParserController2,
|
|
5590
|
+
WEBCODECS_TIMESCALE
|
|
5591
|
+
} from "@remotion/media-parser";
|
|
5592
|
+
import { parseMediaOnWebWorker } from "@remotion/media-parser/worker";
|
|
5593
|
+
var internalExtractFrames = ({
|
|
5594
|
+
src,
|
|
5595
|
+
onFrame: onFrame2,
|
|
5596
|
+
signal,
|
|
5597
|
+
timestampsInSeconds,
|
|
5598
|
+
acknowledgeRemotionLicense,
|
|
5599
|
+
logLevel
|
|
5600
|
+
}) => {
|
|
5601
|
+
const controller = mediaParserController2();
|
|
5602
|
+
const expectedFrames = [];
|
|
5603
|
+
const resolvers = withResolvers();
|
|
5604
|
+
const abortListener = () => {
|
|
5605
|
+
controller.abort();
|
|
5606
|
+
resolvers.reject(new MediaParserAbortError4("Aborted by user"));
|
|
5607
|
+
};
|
|
5608
|
+
signal?.addEventListener("abort", abortListener, { once: true });
|
|
5609
|
+
let dur = null;
|
|
5610
|
+
parseMediaOnWebWorker({
|
|
5611
|
+
src: new URL(src, window.location.href),
|
|
5612
|
+
acknowledgeRemotionLicense,
|
|
5613
|
+
controller,
|
|
5614
|
+
logLevel,
|
|
5615
|
+
onDurationInSeconds(durationInSeconds) {
|
|
5616
|
+
dur = durationInSeconds;
|
|
5617
|
+
},
|
|
5618
|
+
onVideoTrack: async ({ track }) => {
|
|
5619
|
+
const timestampTargetsUnsorted = typeof timestampsInSeconds === "function" ? await timestampsInSeconds({
|
|
5620
|
+
track,
|
|
5621
|
+
durationInSeconds: dur
|
|
5622
|
+
}) : timestampsInSeconds;
|
|
5623
|
+
const timestampTargets = timestampTargetsUnsorted.sort((a, b) => a - b);
|
|
5624
|
+
controller.seek(timestampTargets[0]);
|
|
5625
|
+
const decoder = createVideoDecoder({
|
|
5626
|
+
onFrame: (frame) => {
|
|
5627
|
+
if (frame.timestamp >= expectedFrames[0] - 1) {
|
|
5628
|
+
expectedFrames.shift();
|
|
5629
|
+
onFrame2(frame);
|
|
5630
|
+
} else {
|
|
5631
|
+
frame.close();
|
|
5632
|
+
}
|
|
5633
|
+
},
|
|
5634
|
+
onError: (e) => {
|
|
5635
|
+
controller.abort();
|
|
5636
|
+
try {
|
|
5637
|
+
decoder.close();
|
|
5638
|
+
} catch {}
|
|
5639
|
+
resolvers.reject(e);
|
|
5640
|
+
},
|
|
5641
|
+
track
|
|
5642
|
+
});
|
|
5643
|
+
const queued = [];
|
|
5644
|
+
const doProcess = async () => {
|
|
5645
|
+
expectedFrames.push(timestampTargets.shift() * WEBCODECS_TIMESCALE);
|
|
5646
|
+
while (queued.length > 0) {
|
|
5647
|
+
const sam = queued.shift();
|
|
5648
|
+
await decoder.waitForQueueToBeLessThan(10);
|
|
5649
|
+
await decoder.decode(sam);
|
|
5650
|
+
}
|
|
5651
|
+
};
|
|
5652
|
+
return async (sample) => {
|
|
5653
|
+
const nextTimestampWeWant = timestampTargets[0];
|
|
5654
|
+
if (sample.type === "key") {
|
|
5655
|
+
queued.length = 0;
|
|
5656
|
+
}
|
|
5657
|
+
queued.push(sample);
|
|
5658
|
+
if (sample.timestamp >= timestampTargets[timestampTargets.length - 1] * WEBCODECS_TIMESCALE) {
|
|
5659
|
+
await doProcess();
|
|
5660
|
+
await decoder.flush();
|
|
5661
|
+
controller.abort();
|
|
5662
|
+
return;
|
|
5663
|
+
}
|
|
5664
|
+
if (nextTimestampWeWant === undefined) {
|
|
5665
|
+
throw new Error("this should not happen");
|
|
5666
|
+
}
|
|
5667
|
+
if (sample.timestamp >= nextTimestampWeWant * WEBCODECS_TIMESCALE) {
|
|
5668
|
+
await doProcess();
|
|
5669
|
+
if (timestampTargets.length === 0) {
|
|
5670
|
+
await decoder.flush();
|
|
5671
|
+
controller.abort();
|
|
5672
|
+
}
|
|
5673
|
+
}
|
|
5674
|
+
};
|
|
5675
|
+
}
|
|
5676
|
+
}).then(() => {
|
|
5677
|
+
resolvers.resolve();
|
|
5678
|
+
}).catch((e) => {
|
|
5679
|
+
if (!hasBeenAborted(e)) {
|
|
5680
|
+
resolvers.reject(e);
|
|
5681
|
+
} else {
|
|
5682
|
+
resolvers.resolve();
|
|
5683
|
+
}
|
|
5684
|
+
}).finally(() => {
|
|
5685
|
+
signal?.removeEventListener("abort", abortListener);
|
|
5686
|
+
});
|
|
5687
|
+
return resolvers.promise;
|
|
5688
|
+
};
|
|
5689
|
+
var extractFrames = (options) => {
|
|
5690
|
+
return internalExtractFrames({
|
|
5691
|
+
...options,
|
|
5692
|
+
signal: options.signal ?? null,
|
|
5693
|
+
acknowledgeRemotionLicense: options.acknowledgeRemotionLicense ?? false,
|
|
5694
|
+
logLevel: options.logLevel ?? "info"
|
|
5695
|
+
});
|
|
5696
|
+
};
|
|
5502
5697
|
// src/get-available-audio-codecs.ts
|
|
5503
5698
|
var getAvailableAudioCodecs = ({
|
|
5504
5699
|
container
|
|
@@ -5529,6 +5724,7 @@ export {
|
|
|
5529
5724
|
getAvailableVideoCodecs,
|
|
5530
5725
|
getAvailableContainers,
|
|
5531
5726
|
getAvailableAudioCodecs,
|
|
5727
|
+
extractFrames,
|
|
5532
5728
|
defaultOnVideoTrackHandler,
|
|
5533
5729
|
defaultOnAudioTrackHandler,
|
|
5534
5730
|
createVideoEncoder,
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { MediaParserLogLevel, MediaParserVideoTrack } from '@remotion/media-parser';
|
|
2
|
+
export type ExtractFramesTimestampsInSecondsFn = (options: {
|
|
3
|
+
track: MediaParserVideoTrack;
|
|
4
|
+
durationInSeconds: number | null;
|
|
5
|
+
}) => Promise<number[]> | number[];
|
|
6
|
+
export declare const extractFrames: (options: {
|
|
7
|
+
src: string;
|
|
8
|
+
timestampsInSeconds: number[] | ExtractFramesTimestampsInSecondsFn;
|
|
9
|
+
onFrame: (frame: VideoFrame) => void;
|
|
10
|
+
signal?: AbortSignal;
|
|
11
|
+
acknowledgeRemotionLicense?: boolean;
|
|
12
|
+
logLevel?: MediaParserLogLevel;
|
|
13
|
+
}) => Promise<void>;
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.extractFrames = void 0;
|
|
4
|
+
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
+
const worker_1 = require("@remotion/media-parser/worker");
|
|
6
|
+
const create_video_decoder_1 = require("./create-video-decoder");
|
|
7
|
+
const with_resolvers_1 = require("./create/with-resolvers");
|
|
8
|
+
const internalExtractFrames = ({ src, onFrame, signal, timestampsInSeconds, acknowledgeRemotionLicense, logLevel, }) => {
|
|
9
|
+
const controller = (0, media_parser_1.mediaParserController)();
|
|
10
|
+
const expectedFrames = [];
|
|
11
|
+
const resolvers = (0, with_resolvers_1.withResolvers)();
|
|
12
|
+
const abortListener = () => {
|
|
13
|
+
controller.abort();
|
|
14
|
+
resolvers.reject(new media_parser_1.MediaParserAbortError('Aborted by user'));
|
|
15
|
+
};
|
|
16
|
+
signal?.addEventListener('abort', abortListener, { once: true });
|
|
17
|
+
let dur = null;
|
|
18
|
+
(0, worker_1.parseMediaOnWebWorker)({
|
|
19
|
+
src: new URL(src, window.location.href),
|
|
20
|
+
acknowledgeRemotionLicense,
|
|
21
|
+
controller,
|
|
22
|
+
logLevel,
|
|
23
|
+
onDurationInSeconds(durationInSeconds) {
|
|
24
|
+
dur = durationInSeconds;
|
|
25
|
+
},
|
|
26
|
+
onVideoTrack: async ({ track }) => {
|
|
27
|
+
const timestampTargetsUnsorted = typeof timestampsInSeconds === 'function'
|
|
28
|
+
? await timestampsInSeconds({
|
|
29
|
+
track,
|
|
30
|
+
durationInSeconds: dur,
|
|
31
|
+
})
|
|
32
|
+
: timestampsInSeconds;
|
|
33
|
+
const timestampTargets = timestampTargetsUnsorted.sort((a, b) => a - b);
|
|
34
|
+
controller.seek(timestampTargets[0]);
|
|
35
|
+
const decoder = (0, create_video_decoder_1.createVideoDecoder)({
|
|
36
|
+
onFrame: (frame) => {
|
|
37
|
+
if (frame.timestamp >= expectedFrames[0] - 1) {
|
|
38
|
+
expectedFrames.shift();
|
|
39
|
+
onFrame(frame);
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
frame.close();
|
|
43
|
+
}
|
|
44
|
+
},
|
|
45
|
+
onError: (e) => {
|
|
46
|
+
controller.abort();
|
|
47
|
+
try {
|
|
48
|
+
decoder.close();
|
|
49
|
+
}
|
|
50
|
+
catch { }
|
|
51
|
+
resolvers.reject(e);
|
|
52
|
+
},
|
|
53
|
+
track,
|
|
54
|
+
});
|
|
55
|
+
const queued = [];
|
|
56
|
+
const doProcess = async () => {
|
|
57
|
+
expectedFrames.push(timestampTargets.shift() * media_parser_1.WEBCODECS_TIMESCALE);
|
|
58
|
+
while (queued.length > 0) {
|
|
59
|
+
const sam = queued.shift();
|
|
60
|
+
await decoder.waitForQueueToBeLessThan(10);
|
|
61
|
+
await decoder.decode(sam);
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
return async (sample) => {
|
|
65
|
+
const nextTimestampWeWant = timestampTargets[0];
|
|
66
|
+
if (sample.type === 'key') {
|
|
67
|
+
queued.length = 0;
|
|
68
|
+
}
|
|
69
|
+
queued.push(sample);
|
|
70
|
+
if (sample.timestamp >=
|
|
71
|
+
timestampTargets[timestampTargets.length - 1] * media_parser_1.WEBCODECS_TIMESCALE) {
|
|
72
|
+
await doProcess();
|
|
73
|
+
await decoder.flush();
|
|
74
|
+
controller.abort();
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
if (nextTimestampWeWant === undefined) {
|
|
78
|
+
throw new Error('this should not happen');
|
|
79
|
+
}
|
|
80
|
+
if (sample.timestamp >= nextTimestampWeWant * media_parser_1.WEBCODECS_TIMESCALE) {
|
|
81
|
+
await doProcess();
|
|
82
|
+
if (timestampTargets.length === 0) {
|
|
83
|
+
await decoder.flush();
|
|
84
|
+
controller.abort();
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
};
|
|
88
|
+
},
|
|
89
|
+
})
|
|
90
|
+
.then(() => {
|
|
91
|
+
resolvers.resolve();
|
|
92
|
+
})
|
|
93
|
+
.catch((e) => {
|
|
94
|
+
if (!(0, media_parser_1.hasBeenAborted)(e)) {
|
|
95
|
+
resolvers.reject(e);
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
resolvers.resolve();
|
|
99
|
+
}
|
|
100
|
+
})
|
|
101
|
+
.finally(() => {
|
|
102
|
+
signal?.removeEventListener('abort', abortListener);
|
|
103
|
+
});
|
|
104
|
+
return resolvers.promise;
|
|
105
|
+
};
|
|
106
|
+
const extractFrames = (options) => {
|
|
107
|
+
return internalExtractFrames({
|
|
108
|
+
...options,
|
|
109
|
+
signal: options.signal ?? null,
|
|
110
|
+
acknowledgeRemotionLicense: options.acknowledgeRemotionLicense ?? false,
|
|
111
|
+
logLevel: options.logLevel ?? 'info',
|
|
112
|
+
});
|
|
113
|
+
};
|
|
114
|
+
exports.extractFrames = extractFrames;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export type FlushPending = {
|
|
2
|
+
resolve: (value: void | PromiseLike<void>) => void;
|
|
3
|
+
reject: (reason?: any) => void;
|
|
4
|
+
promise: Promise<void>;
|
|
5
|
+
};
|
|
6
|
+
export declare const makeFlushPending: () => {
|
|
7
|
+
promise: Promise<void>;
|
|
8
|
+
resolve: (value: void | PromiseLike<void>) => void;
|
|
9
|
+
reject: (reason?: any) => void;
|
|
10
|
+
};
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.makeFlushPending = void 0;
|
|
4
|
+
const with_resolvers_1 = require("./create/with-resolvers");
|
|
5
|
+
const makeFlushPending = () => {
|
|
6
|
+
const { promise, resolve, reject } = (0, with_resolvers_1.withResolvers)();
|
|
7
|
+
return {
|
|
8
|
+
promise,
|
|
9
|
+
resolve,
|
|
10
|
+
reject,
|
|
11
|
+
};
|
|
12
|
+
};
|
|
13
|
+
exports.makeFlushPending = makeFlushPending;
|
|
@@ -56,16 +56,25 @@ const getWaveAudioDecoder = ({ onFrame, config, sampleFormat, ioSynchronizer, on
|
|
|
56
56
|
onError(err);
|
|
57
57
|
}
|
|
58
58
|
};
|
|
59
|
+
let lastReset = null;
|
|
60
|
+
let mostRecentSampleInput = null;
|
|
59
61
|
return {
|
|
60
62
|
close() {
|
|
61
63
|
return Promise.resolve();
|
|
62
64
|
},
|
|
63
65
|
decode(audioSample) {
|
|
66
|
+
mostRecentSampleInput = audioSample.timestamp;
|
|
64
67
|
return processSample(audioSample);
|
|
65
68
|
},
|
|
66
69
|
flush: () => Promise.resolve(),
|
|
67
70
|
waitForQueueToBeLessThan: ioSynchronizer.waitForQueueSize,
|
|
68
|
-
reset: () => {
|
|
71
|
+
reset: () => {
|
|
72
|
+
lastReset = Date.now();
|
|
73
|
+
},
|
|
74
|
+
checkReset: () => ({
|
|
75
|
+
wasReset: () => lastReset !== null && lastReset > Date.now(),
|
|
76
|
+
}),
|
|
77
|
+
getMostRecentSampleInput: () => mostRecentSampleInput,
|
|
69
78
|
};
|
|
70
79
|
};
|
|
71
80
|
exports.getWaveAudioDecoder = getWaveAudioDecoder;
|
package/dist/index.d.ts
CHANGED
|
@@ -13,6 +13,8 @@ export { createVideoDecoder } from './create-video-decoder';
|
|
|
13
13
|
export type { WebCodecsVideoDecoder } from './create-video-decoder';
|
|
14
14
|
export { defaultOnAudioTrackHandler } from './default-on-audio-track-handler';
|
|
15
15
|
export { defaultOnVideoTrackHandler } from './default-on-video-track-handler';
|
|
16
|
+
export { extractFrames } from './extract-frames';
|
|
17
|
+
export type { ExtractFramesTimestampsInSecondsFn } from './extract-frames';
|
|
16
18
|
export { getAvailableAudioCodecs } from './get-available-audio-codecs';
|
|
17
19
|
export type { ConvertMediaAudioCodec } from './get-available-audio-codecs';
|
|
18
20
|
export { getAvailableContainers } from './get-available-containers';
|
|
@@ -29,18 +31,18 @@ export type { WebCodecsVideoEncoder } from './video-encoder';
|
|
|
29
31
|
export { webcodecsController } from './webcodecs-controller';
|
|
30
32
|
export type { WebCodecsController } from './webcodecs-controller';
|
|
31
33
|
export declare const WebCodecsInternals: {
|
|
32
|
-
rotateAndResizeVideoFrame: ({ frame, rotation,
|
|
34
|
+
rotateAndResizeVideoFrame: ({ frame, rotation, needsToBeMultipleOfTwo, resizeOperation, }: {
|
|
33
35
|
frame: VideoFrame;
|
|
34
36
|
rotation: number;
|
|
35
|
-
|
|
37
|
+
needsToBeMultipleOfTwo: boolean;
|
|
36
38
|
resizeOperation: import("./resizing/mode").ResizeOperation | null;
|
|
37
39
|
}) => VideoFrame;
|
|
38
40
|
normalizeVideoRotation: (rotation: number) => number;
|
|
39
|
-
calculateNewDimensionsFromDimensions: ({ width, height, rotation, resizeOperation,
|
|
41
|
+
calculateNewDimensionsFromDimensions: ({ width, height, rotation, resizeOperation, needsToBeMultipleOfTwo, }: {
|
|
40
42
|
width: number;
|
|
41
43
|
height: number;
|
|
42
44
|
rotation: number;
|
|
43
45
|
resizeOperation: import("./resizing/mode").ResizeOperation | null;
|
|
44
|
-
|
|
46
|
+
needsToBeMultipleOfTwo: boolean;
|
|
45
47
|
}) => import("@remotion/media-parser").MediaParserDimensions;
|
|
46
48
|
};
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.WebCodecsInternals = exports.webcodecsController = exports.createVideoEncoder = exports.getDefaultVideoCodec = exports.getDefaultAudioCodec = exports.getAvailableVideoCodecs = exports.getAvailableContainers = exports.getAvailableAudioCodecs = exports.defaultOnVideoTrackHandler = exports.defaultOnAudioTrackHandler = exports.createVideoDecoder = exports.createAudioDecoder = exports.convertMedia = exports.convertAudioData = exports.canReencodeVideoTrack = exports.canReencodeAudioTrack = exports.canCopyVideoTrack = exports.canCopyAudioTrack = exports.createAudioEncoder = void 0;
|
|
3
|
+
exports.WebCodecsInternals = exports.webcodecsController = exports.createVideoEncoder = exports.getDefaultVideoCodec = exports.getDefaultAudioCodec = exports.getAvailableVideoCodecs = exports.getAvailableContainers = exports.getAvailableAudioCodecs = exports.extractFrames = exports.defaultOnVideoTrackHandler = exports.defaultOnAudioTrackHandler = exports.createVideoDecoder = exports.createAudioDecoder = exports.convertMedia = exports.convertAudioData = exports.canReencodeVideoTrack = exports.canReencodeAudioTrack = exports.canCopyVideoTrack = exports.canCopyAudioTrack = exports.createAudioEncoder = void 0;
|
|
4
4
|
const rotate_and_resize_video_frame_1 = require("./rotate-and-resize-video-frame");
|
|
5
5
|
const rotation_1 = require("./rotation");
|
|
6
6
|
const set_remotion_imported_1 = require("./set-remotion-imported");
|
|
@@ -26,6 +26,8 @@ var default_on_audio_track_handler_1 = require("./default-on-audio-track-handler
|
|
|
26
26
|
Object.defineProperty(exports, "defaultOnAudioTrackHandler", { enumerable: true, get: function () { return default_on_audio_track_handler_1.defaultOnAudioTrackHandler; } });
|
|
27
27
|
var default_on_video_track_handler_1 = require("./default-on-video-track-handler");
|
|
28
28
|
Object.defineProperty(exports, "defaultOnVideoTrackHandler", { enumerable: true, get: function () { return default_on_video_track_handler_1.defaultOnVideoTrackHandler; } });
|
|
29
|
+
var extract_frames_1 = require("./extract-frames");
|
|
30
|
+
Object.defineProperty(exports, "extractFrames", { enumerable: true, get: function () { return extract_frames_1.extractFrames; } });
|
|
29
31
|
var get_available_audio_codecs_1 = require("./get-available-audio-codecs");
|
|
30
32
|
Object.defineProperty(exports, "getAvailableAudioCodecs", { enumerable: true, get: function () { return get_available_audio_codecs_1.getAvailableAudioCodecs; } });
|
|
31
33
|
var get_available_containers_1 = require("./get-available-containers");
|
|
@@ -8,5 +8,6 @@ export declare const makeIoSynchronizer: ({ logLevel, label, controller, }: {
|
|
|
8
8
|
inputItem: (timestamp: number) => void;
|
|
9
9
|
onOutput: (timestamp: number) => void;
|
|
10
10
|
waitForQueueSize: (queueSize: number) => Promise<void>;
|
|
11
|
+
clearQueue: () => void;
|
|
11
12
|
};
|
|
12
13
|
export type IoSynchronizer = ReturnType<typeof makeIoSynchronizer>;
|
|
@@ -11,8 +11,11 @@ const makeIoSynchronizer = ({ logLevel, label, controller, }) => {
|
|
|
11
11
|
let lastOutput = 0;
|
|
12
12
|
let inputsSinceLastOutput = 0;
|
|
13
13
|
let inputs = [];
|
|
14
|
+
let resolvers = [];
|
|
14
15
|
const getQueuedItems = () => {
|
|
15
|
-
inputs = inputs.filter(
|
|
16
|
+
inputs = inputs.filter(
|
|
17
|
+
// In chrome, the last output sometimes shifts the timestamp by 1 macrosecond - allowing this to happen
|
|
18
|
+
(input) => Math.floor(input) > Math.floor(lastOutput) + 1);
|
|
16
19
|
return inputs.length;
|
|
17
20
|
};
|
|
18
21
|
const printState = (prefix) => {
|
|
@@ -40,8 +43,10 @@ const makeIoSynchronizer = ({ logLevel, label, controller, }) => {
|
|
|
40
43
|
const on = () => {
|
|
41
44
|
eventEmitter.removeEventListener('output', on);
|
|
42
45
|
resolve();
|
|
46
|
+
resolvers = resolvers.filter((resolver) => resolver !== resolve);
|
|
43
47
|
};
|
|
44
48
|
eventEmitter.addEventListener('output', on);
|
|
49
|
+
resolvers.push(resolve);
|
|
45
50
|
return promise;
|
|
46
51
|
};
|
|
47
52
|
const makeErrorBanner = () => {
|
|
@@ -80,10 +85,22 @@ const makeIoSynchronizer = ({ logLevel, label, controller, }) => {
|
|
|
80
85
|
controller._internals._mediaParserController._internals.signal.removeEventListener('abort', clear);
|
|
81
86
|
}
|
|
82
87
|
};
|
|
88
|
+
const clearQueue = () => {
|
|
89
|
+
inputs.length = 0;
|
|
90
|
+
lastInput = 0;
|
|
91
|
+
lastOutput = 0;
|
|
92
|
+
inputsSinceLastOutput = 0;
|
|
93
|
+
resolvers.forEach((resolver) => {
|
|
94
|
+
return resolver();
|
|
95
|
+
});
|
|
96
|
+
resolvers.length = 0;
|
|
97
|
+
inputs.length = 0;
|
|
98
|
+
};
|
|
83
99
|
return {
|
|
84
100
|
inputItem,
|
|
85
101
|
onOutput,
|
|
86
102
|
waitForQueueSize,
|
|
103
|
+
clearQueue,
|
|
87
104
|
};
|
|
88
105
|
};
|
|
89
106
|
exports.makeIoSynchronizer = makeIoSynchronizer;
|
package/dist/on-frame.js
CHANGED
|
@@ -9,7 +9,7 @@ const onFrame = async ({ frame: unrotatedFrame, onVideoFrame, track, outputCodec
|
|
|
9
9
|
rotation,
|
|
10
10
|
frame: unrotatedFrame,
|
|
11
11
|
resizeOperation,
|
|
12
|
-
|
|
12
|
+
needsToBeMultipleOfTwo: outputCodec === 'h264',
|
|
13
13
|
});
|
|
14
14
|
if (unrotatedFrame !== rotated) {
|
|
15
15
|
unrotatedFrame.close();
|
|
@@ -13,6 +13,7 @@ export declare function processingQueue<T extends Processable>({ onOutput, logLe
|
|
|
13
13
|
inputItem: (timestamp: number) => void;
|
|
14
14
|
onOutput: (timestamp: number) => void;
|
|
15
15
|
waitForQueueSize: (queueSize: number) => Promise<void>;
|
|
16
|
+
clearQueue: () => void;
|
|
16
17
|
};
|
|
17
18
|
};
|
|
18
19
|
export {};
|
|
@@ -145,7 +145,8 @@ const reencodeAudioTrack = async ({ audioOperation, track, logLevel, abortConver
|
|
|
145
145
|
logLevel,
|
|
146
146
|
});
|
|
147
147
|
state.addWaitForFinishPromise(async () => {
|
|
148
|
-
|
|
148
|
+
log_1.Log.verbose(logLevel, 'Waiting for audio decoder to finish');
|
|
149
|
+
await audioDecoder.flush();
|
|
149
150
|
log_1.Log.verbose(logLevel, 'Audio decoder finished');
|
|
150
151
|
audioDecoder.close();
|
|
151
152
|
await audioProcessingQueue.ioSynchronizer.waitForQueueSize(0);
|
|
@@ -21,7 +21,7 @@ const reencodeVideoTrack = async ({ videoOperation, rotate, track, logLevel, abo
|
|
|
21
21
|
width: track.codedWidth,
|
|
22
22
|
height: track.codedHeight,
|
|
23
23
|
rotation,
|
|
24
|
-
|
|
24
|
+
needsToBeMultipleOfTwo: videoOperation.videoCodec === 'h264',
|
|
25
25
|
resizeOperation: videoOperation.resize ?? null,
|
|
26
26
|
});
|
|
27
27
|
const videoEncoderConfig = await (0, video_encoder_config_1.getVideoEncoderConfig)({
|
|
@@ -127,7 +127,7 @@ const reencodeVideoTrack = async ({ videoOperation, rotate, track, logLevel, abo
|
|
|
127
127
|
});
|
|
128
128
|
state.addWaitForFinishPromise(async () => {
|
|
129
129
|
log_1.Log.verbose(logLevel, 'Waiting for video decoder to finish');
|
|
130
|
-
await videoDecoder.
|
|
130
|
+
await videoDecoder.flush();
|
|
131
131
|
videoDecoder.close();
|
|
132
132
|
log_1.Log.verbose(logLevel, 'Video decoder finished. Waiting for encoder to finish');
|
|
133
133
|
await frameSorter.flush();
|
|
@@ -141,7 +141,7 @@ const reencodeVideoTrack = async ({ videoOperation, rotate, track, logLevel, abo
|
|
|
141
141
|
return async (chunk) => {
|
|
142
142
|
progressTracker.setPossibleLowestTimestamp(Math.min(chunk.timestamp, chunk.decodingTimestamp ?? Infinity));
|
|
143
143
|
await controller._internals._mediaParserController._internals.checkForAbortAndPause();
|
|
144
|
-
await videoDecoder.waitForQueueToBeLessThan(
|
|
144
|
+
await videoDecoder.waitForQueueToBeLessThan(15);
|
|
145
145
|
if (chunk.type === 'key') {
|
|
146
146
|
await videoDecoder.flush();
|
|
147
147
|
}
|
|
@@ -1,8 +1,7 @@
|
|
|
1
|
-
import type { MediaParserDimensions
|
|
2
|
-
import type { ConvertMediaVideoCodec } from '../get-available-video-codecs';
|
|
1
|
+
import type { MediaParserDimensions } from '@remotion/media-parser';
|
|
3
2
|
import type { ResizeOperation } from './mode';
|
|
4
|
-
export declare const calculateNewSizeAfterResizing: ({ dimensions, resizeOperation,
|
|
3
|
+
export declare const calculateNewSizeAfterResizing: ({ dimensions, resizeOperation, needsToBeMultipleOfTwo, }: {
|
|
5
4
|
dimensions: MediaParserDimensions;
|
|
6
5
|
resizeOperation: ResizeOperation | null;
|
|
7
|
-
|
|
6
|
+
needsToBeMultipleOfTwo: boolean;
|
|
8
7
|
}) => MediaParserDimensions;
|
|
@@ -10,8 +10,7 @@ const ensureMultipleOfTwo = ({ dimensions, needsToBeMultipleOfTwo, }) => {
|
|
|
10
10
|
height: Math.floor(dimensions.height / 2) * 2,
|
|
11
11
|
};
|
|
12
12
|
};
|
|
13
|
-
const calculateNewSizeAfterResizing = ({ dimensions, resizeOperation,
|
|
14
|
-
const needsToBeMultipleOfTwo = videoCodec === 'h264';
|
|
13
|
+
const calculateNewSizeAfterResizing = ({ dimensions, resizeOperation, needsToBeMultipleOfTwo, }) => {
|
|
15
14
|
if (resizeOperation === null) {
|
|
16
15
|
return ensureMultipleOfTwo({
|
|
17
16
|
dimensions,
|
|
@@ -1,9 +1,8 @@
|
|
|
1
|
-
import type { ConvertMediaVideoCodec } from './get-available-video-codecs';
|
|
2
1
|
import type { ResizeOperation } from './resizing/mode';
|
|
3
2
|
export declare const normalizeVideoRotation: (rotation: number) => number;
|
|
4
|
-
export declare const rotateAndResizeVideoFrame: ({ frame, rotation,
|
|
3
|
+
export declare const rotateAndResizeVideoFrame: ({ frame, rotation, needsToBeMultipleOfTwo, resizeOperation, }: {
|
|
5
4
|
frame: VideoFrame;
|
|
6
5
|
rotation: number;
|
|
7
|
-
|
|
6
|
+
needsToBeMultipleOfTwo: boolean;
|
|
8
7
|
resizeOperation: ResizeOperation | null;
|
|
9
8
|
}) => VideoFrame;
|
|
@@ -6,7 +6,7 @@ const normalizeVideoRotation = (rotation) => {
|
|
|
6
6
|
return ((rotation % 360) + 360) % 360;
|
|
7
7
|
};
|
|
8
8
|
exports.normalizeVideoRotation = normalizeVideoRotation;
|
|
9
|
-
const rotateAndResizeVideoFrame = ({ frame, rotation,
|
|
9
|
+
const rotateAndResizeVideoFrame = ({ frame, rotation, needsToBeMultipleOfTwo, resizeOperation, }) => {
|
|
10
10
|
const normalized = ((rotation % 360) + 360) % 360;
|
|
11
11
|
// No resize, no rotation
|
|
12
12
|
if (normalized === 0 && resizeOperation === null) {
|
|
@@ -19,7 +19,7 @@ const rotateAndResizeVideoFrame = ({ frame, rotation, videoCodec, resizeOperatio
|
|
|
19
19
|
height: frame.displayHeight,
|
|
20
20
|
width: frame.displayWidth,
|
|
21
21
|
rotation,
|
|
22
|
-
|
|
22
|
+
needsToBeMultipleOfTwo,
|
|
23
23
|
resizeOperation,
|
|
24
24
|
});
|
|
25
25
|
// No rotation, and resize turned out to be same dimensions
|
package/dist/rotation.d.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import type { MediaParserDimensions
|
|
2
|
-
import type { ConvertMediaVideoCodec } from './get-available-video-codecs';
|
|
1
|
+
import type { MediaParserDimensions } from '@remotion/media-parser';
|
|
3
2
|
import type { ResizeOperation } from './resizing/mode';
|
|
4
3
|
export declare const calculateNewDimensionsFromRotate: ({ height, width, rotation, }: MediaParserDimensions & {
|
|
5
4
|
rotation: number;
|
|
@@ -7,10 +6,10 @@ export declare const calculateNewDimensionsFromRotate: ({ height, width, rotatio
|
|
|
7
6
|
height: number;
|
|
8
7
|
width: number;
|
|
9
8
|
};
|
|
10
|
-
export declare const calculateNewDimensionsFromRotateAndScale: ({ width, height, rotation, resizeOperation,
|
|
9
|
+
export declare const calculateNewDimensionsFromRotateAndScale: ({ width, height, rotation, resizeOperation, needsToBeMultipleOfTwo, }: {
|
|
11
10
|
width: number;
|
|
12
11
|
height: number;
|
|
13
12
|
rotation: number;
|
|
14
13
|
resizeOperation: ResizeOperation | null;
|
|
15
|
-
|
|
14
|
+
needsToBeMultipleOfTwo: boolean;
|
|
16
15
|
}) => MediaParserDimensions;
|
package/dist/rotation.js
CHANGED
|
@@ -14,7 +14,7 @@ const calculateNewDimensionsFromRotate = ({ height, width, rotation, }) => {
|
|
|
14
14
|
};
|
|
15
15
|
};
|
|
16
16
|
exports.calculateNewDimensionsFromRotate = calculateNewDimensionsFromRotate;
|
|
17
|
-
const calculateNewDimensionsFromRotateAndScale = ({ width, height, rotation, resizeOperation,
|
|
17
|
+
const calculateNewDimensionsFromRotateAndScale = ({ width, height, rotation, resizeOperation, needsToBeMultipleOfTwo, }) => {
|
|
18
18
|
const { height: newHeight, width: newWidth } = (0, exports.calculateNewDimensionsFromRotate)({
|
|
19
19
|
height,
|
|
20
20
|
rotation,
|
|
@@ -23,7 +23,7 @@ const calculateNewDimensionsFromRotateAndScale = ({ width, height, rotation, res
|
|
|
23
23
|
return (0, calculate_new_size_1.calculateNewSizeAfterResizing)({
|
|
24
24
|
dimensions: { height: newHeight, width: newWidth },
|
|
25
25
|
resizeOperation,
|
|
26
|
-
|
|
26
|
+
needsToBeMultipleOfTwo,
|
|
27
27
|
});
|
|
28
28
|
};
|
|
29
29
|
exports.calculateNewDimensionsFromRotateAndScale = calculateNewDimensionsFromRotateAndScale;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@remotion/webcodecs",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.312",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"module": "dist/esm/index.mjs",
|
|
@@ -19,17 +19,18 @@
|
|
|
19
19
|
"author": "Jonny Burger <jonny@remotion.dev>",
|
|
20
20
|
"license": "Remotion License (See https://remotion.dev/docs/webcodecs#license)",
|
|
21
21
|
"dependencies": {
|
|
22
|
-
"@remotion/
|
|
23
|
-
"@remotion/
|
|
22
|
+
"@remotion/licensing": "4.0.312",
|
|
23
|
+
"@remotion/media-parser": "4.0.312"
|
|
24
24
|
},
|
|
25
25
|
"peerDependencies": {},
|
|
26
26
|
"devDependencies": {
|
|
27
27
|
"@types/dom-webcodecs": "0.1.11",
|
|
28
28
|
"playwright": "1.51.1",
|
|
29
|
+
"vite": "5.4.19",
|
|
29
30
|
"@playwright/test": "1.51.1",
|
|
30
31
|
"eslint": "9.19.0",
|
|
31
|
-
"@remotion/
|
|
32
|
-
"@remotion/
|
|
32
|
+
"@remotion/example-videos": "4.0.312",
|
|
33
|
+
"@remotion/eslint-config-internal": "4.0.312"
|
|
33
34
|
},
|
|
34
35
|
"keywords": [],
|
|
35
36
|
"publishConfig": {
|