@remotion/media 4.0.353 → 4.0.354
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-rendering.js +37 -27
- package/dist/audio/audio.js +6 -3
- package/dist/audio/props.d.ts +0 -5
- package/dist/audio-extraction/extract-audio.d.ts +6 -3
- package/dist/audio-extraction/extract-audio.js +16 -7
- package/dist/audio-for-rendering.d.ts +3 -0
- package/dist/audio-for-rendering.js +94 -0
- package/dist/audio.d.ts +3 -0
- package/dist/audio.js +60 -0
- package/dist/audiodata-to-array.d.ts +0 -0
- package/dist/audiodata-to-array.js +1 -0
- package/dist/convert-audiodata/apply-volume.d.ts +1 -0
- package/dist/convert-audiodata/apply-volume.js +17 -0
- package/dist/convert-audiodata/convert-audiodata.d.ts +2 -2
- package/dist/convert-audiodata/convert-audiodata.js +13 -7
- package/dist/convert-audiodata/data-types.d.ts +1 -0
- package/dist/convert-audiodata/data-types.js +22 -0
- package/dist/convert-audiodata/is-planar-format.d.ts +1 -0
- package/dist/convert-audiodata/is-planar-format.js +3 -0
- package/dist/convert-audiodata/log-audiodata.d.ts +1 -0
- package/dist/convert-audiodata/log-audiodata.js +8 -0
- package/dist/convert-audiodata/resample-audiodata.d.ts +1 -2
- package/dist/convert-audiodata/resample-audiodata.js +39 -18
- package/dist/convert-audiodata/trim-audiodata.d.ts +0 -0
- package/dist/convert-audiodata/trim-audiodata.js +1 -0
- package/dist/deserialized-audiodata.d.ts +15 -0
- package/dist/deserialized-audiodata.js +26 -0
- package/dist/esm/index.mjs +206 -120
- package/dist/extract-audio.d.ts +7 -0
- package/dist/extract-audio.js +98 -0
- package/dist/extract-frame-and-audio.d.ts +3 -2
- package/dist/extract-frame-and-audio.js +4 -3
- package/dist/extract-frame-via-broadcast-channel.d.ts +15 -0
- package/dist/extract-frame-via-broadcast-channel.js +104 -0
- package/dist/extract-frame.d.ts +27 -0
- package/dist/extract-frame.js +21 -0
- package/dist/extrct-audio.d.ts +7 -0
- package/dist/extrct-audio.js +94 -0
- package/dist/get-frames-since-keyframe.d.ts +22 -0
- package/dist/get-frames-since-keyframe.js +41 -0
- package/dist/keyframe-bank.d.ts +25 -0
- package/dist/keyframe-bank.js +120 -0
- package/dist/keyframe-manager.d.ts +23 -0
- package/dist/keyframe-manager.js +170 -0
- package/dist/looped-frame.d.ts +9 -0
- package/dist/looped-frame.js +10 -0
- package/dist/new-video-for-rendering.d.ts +3 -0
- package/dist/new-video-for-rendering.js +108 -0
- package/dist/new-video.d.ts +3 -0
- package/dist/new-video.js +37 -0
- package/dist/props.d.ts +29 -0
- package/dist/props.js +1 -0
- package/dist/remember-actual-matroska-timestamps.d.ts +4 -0
- package/dist/remember-actual-matroska-timestamps.js +19 -0
- package/dist/serialize-videoframe.d.ts +0 -0
- package/dist/serialize-videoframe.js +1 -0
- package/dist/video/media-player.d.ts +62 -0
- package/dist/video/media-player.js +361 -0
- package/dist/video/new-video-for-preview.d.ts +10 -0
- package/dist/video/new-video-for-preview.js +108 -0
- package/dist/video/props.d.ts +0 -5
- package/dist/video/timeout-utils.d.ts +2 -0
- package/dist/video/timeout-utils.js +18 -0
- package/dist/video/video-for-preview.d.ts +11 -0
- package/dist/video/video-for-preview.js +113 -0
- package/dist/video/video-for-rendering.js +41 -31
- package/dist/video/video.js +2 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +3 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +9 -5
- package/dist/video-extraction/extract-frame.js +3 -0
- package/dist/video-extraction/get-frames-since-keyframe.d.ts +1 -1
- package/dist/video-extraction/get-frames-since-keyframe.js +6 -7
- package/dist/video-extraction/media-player.d.ts +64 -0
- package/dist/video-extraction/media-player.js +501 -0
- package/dist/video-extraction/new-video-for-preview.d.ts +10 -0
- package/dist/video-extraction/new-video-for-preview.js +114 -0
- package/dist/video-for-rendering.d.ts +3 -0
- package/dist/video-for-rendering.js +108 -0
- package/dist/video.d.ts +3 -0
- package/dist/video.js +37 -0
- package/package.json +3 -3
|
@@ -1,40 +1,24 @@
|
|
|
1
1
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
2
|
import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
|
|
3
3
|
import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
|
|
4
|
+
import { applyVolume } from '../convert-audiodata/apply-volume';
|
|
5
|
+
import { frameForVolumeProp } from '../looped-frame';
|
|
4
6
|
import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
|
|
5
7
|
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds,
|
|
6
8
|
// call when a frame of the video, i.e. frame drawn on canvas
|
|
7
9
|
onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) => {
|
|
10
|
+
if (!src) {
|
|
11
|
+
throw new TypeError('No `src` was passed to <Video>.');
|
|
12
|
+
}
|
|
13
|
+
const frame = useCurrentFrame();
|
|
8
14
|
const absoluteFrame = Internals.useTimelinePosition();
|
|
9
15
|
const { fps } = useVideoConfig();
|
|
10
|
-
const canvasRef = useRef(null);
|
|
11
16
|
const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
|
|
12
|
-
const
|
|
13
|
-
const volumePropsFrame = Internals.useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
|
|
14
|
-
const environment = useRemotionEnvironment();
|
|
17
|
+
const startsAt = Internals.useMediaStartsAt();
|
|
15
18
|
const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
|
|
16
|
-
|
|
17
|
-
throw new TypeError('No `src` was passed to <Video>.');
|
|
18
|
-
}
|
|
19
|
-
const volume = Internals.evaluateVolume({
|
|
20
|
-
volume: volumeProp,
|
|
21
|
-
frame: volumePropsFrame,
|
|
22
|
-
mediaVolume: 1,
|
|
23
|
-
});
|
|
24
|
-
Internals.warnAboutTooHighVolume(volume);
|
|
25
|
-
const shouldRenderAudio = useMemo(() => {
|
|
26
|
-
if (!window.remotion_audioEnabled) {
|
|
27
|
-
return false;
|
|
28
|
-
}
|
|
29
|
-
if (muted) {
|
|
30
|
-
return false;
|
|
31
|
-
}
|
|
32
|
-
if (volume <= 0) {
|
|
33
|
-
return false;
|
|
34
|
-
}
|
|
35
|
-
return true;
|
|
36
|
-
}, [muted, volume]);
|
|
19
|
+
const environment = useRemotionEnvironment();
|
|
37
20
|
const { delayRender, continueRender } = useDelayRender();
|
|
21
|
+
const canvasRef = useRef(null);
|
|
38
22
|
useLayoutEffect(() => {
|
|
39
23
|
if (!canvasRef.current) {
|
|
40
24
|
return;
|
|
@@ -46,18 +30,27 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
|
|
|
46
30
|
retries: delayRenderRetries ?? undefined,
|
|
47
31
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
|
|
48
32
|
});
|
|
33
|
+
const shouldRenderAudio = (() => {
|
|
34
|
+
if (!window.remotion_audioEnabled) {
|
|
35
|
+
return false;
|
|
36
|
+
}
|
|
37
|
+
if (muted) {
|
|
38
|
+
return false;
|
|
39
|
+
}
|
|
40
|
+
return true;
|
|
41
|
+
})();
|
|
49
42
|
extractFrameViaBroadcastChannel({
|
|
50
43
|
src,
|
|
51
44
|
timeInSeconds: timestamp,
|
|
52
45
|
durationInSeconds,
|
|
46
|
+
playbackRate: playbackRate ?? 1,
|
|
53
47
|
logLevel: logLevel ?? 'info',
|
|
54
48
|
includeAudio: shouldRenderAudio,
|
|
55
49
|
includeVideo: window.remotion_videoEnabled,
|
|
56
50
|
isClientSideRendering: environment.isClientSideRendering,
|
|
57
|
-
volume,
|
|
58
51
|
loop: loop ?? false,
|
|
59
52
|
})
|
|
60
|
-
.then(({ frame: imageBitmap, audio }) => {
|
|
53
|
+
.then(({ frame: imageBitmap, audio, durationInSeconds: assetDurationInSeconds, }) => {
|
|
61
54
|
if (imageBitmap) {
|
|
62
55
|
onVideoFrame?.(imageBitmap);
|
|
63
56
|
const context = canvasRef.current?.getContext('2d');
|
|
@@ -79,7 +72,22 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
|
|
|
79
72
|
else if (window.remotion_videoEnabled) {
|
|
80
73
|
cancelRender(new Error('No video frame found'));
|
|
81
74
|
}
|
|
82
|
-
|
|
75
|
+
const volumePropsFrame = frameForVolumeProp({
|
|
76
|
+
behavior: loopVolumeCurveBehavior ?? 'repeat',
|
|
77
|
+
loop: loop ?? false,
|
|
78
|
+
assetDurationInSeconds: assetDurationInSeconds ?? 0,
|
|
79
|
+
fps,
|
|
80
|
+
frame,
|
|
81
|
+
startsAt,
|
|
82
|
+
});
|
|
83
|
+
const volume = Internals.evaluateVolume({
|
|
84
|
+
volume: volumeProp,
|
|
85
|
+
frame: volumePropsFrame,
|
|
86
|
+
mediaVolume: 1,
|
|
87
|
+
});
|
|
88
|
+
Internals.warnAboutTooHighVolume(volume);
|
|
89
|
+
if (audio && volume > 0) {
|
|
90
|
+
applyVolume(audio.data, volume);
|
|
83
91
|
registerRenderAsset({
|
|
84
92
|
type: 'inline-audio',
|
|
85
93
|
id,
|
|
@@ -111,14 +119,16 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
|
|
|
111
119
|
frame,
|
|
112
120
|
id,
|
|
113
121
|
logLevel,
|
|
122
|
+
loop,
|
|
123
|
+
loopVolumeCurveBehavior,
|
|
124
|
+
muted,
|
|
114
125
|
onVideoFrame,
|
|
115
126
|
playbackRate,
|
|
116
127
|
registerRenderAsset,
|
|
117
|
-
shouldRenderAudio,
|
|
118
128
|
src,
|
|
129
|
+
startsAt,
|
|
119
130
|
unregisterRenderAsset,
|
|
120
|
-
|
|
121
|
-
loop,
|
|
131
|
+
volumeProp,
|
|
122
132
|
]);
|
|
123
133
|
const classNameValue = useMemo(() => {
|
|
124
134
|
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
package/dist/video/video.js
CHANGED
|
@@ -32,6 +32,6 @@ export const Video = (props) => {
|
|
|
32
32
|
if (environment.isRendering) {
|
|
33
33
|
return _jsx(VideoForRendering, { ...otherProps });
|
|
34
34
|
}
|
|
35
|
-
const {
|
|
36
|
-
return (_jsx(VideoForPreview, { _remotionInternalStack: stack ?? null, _remotionInternalNativeLoopPassed: false, onDuration: onDuration, onlyWarnForMediaSeekingError: true, pauseWhenBuffering: pauseWhenBuffering ?? false, showInTimeline: showInTimeline ?? true,
|
|
35
|
+
const { onVideoFrame, delayRenderRetries, delayRenderTimeoutInMilliseconds, ...propsForPreview } = otherProps;
|
|
36
|
+
return (_jsx(VideoForPreview, { _remotionInternalStack: stack ?? null, _remotionInternalNativeLoopPassed: false, onDuration: onDuration, onlyWarnForMediaSeekingError: true, pauseWhenBuffering: pauseWhenBuffering ?? false, showInTimeline: showInTimeline ?? true, onVideoFrame: onVideoFrame ?? null, ...propsForPreview }));
|
|
37
37
|
};
|
|
@@ -1,16 +1,17 @@
|
|
|
1
1
|
import { type LogLevel } from 'remotion';
|
|
2
2
|
import type { PcmS16AudioData } from '../convert-audiodata/convert-audiodata';
|
|
3
|
-
export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, includeAudio, includeVideo, isClientSideRendering,
|
|
3
|
+
export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, }: {
|
|
4
4
|
src: string;
|
|
5
5
|
timeInSeconds: number;
|
|
6
6
|
durationInSeconds: number;
|
|
7
|
+
playbackRate: number;
|
|
7
8
|
logLevel: LogLevel;
|
|
8
9
|
includeAudio: boolean;
|
|
9
10
|
includeVideo: boolean;
|
|
10
11
|
isClientSideRendering: boolean;
|
|
11
|
-
volume: number;
|
|
12
12
|
loop: boolean;
|
|
13
13
|
}) => Promise<{
|
|
14
14
|
frame: ImageBitmap | VideoFrame | null;
|
|
15
15
|
audio: PcmS16AudioData | null;
|
|
16
|
+
durationInSeconds: number | null;
|
|
16
17
|
}>;
|
|
@@ -5,14 +5,14 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
5
5
|
const data = event.data;
|
|
6
6
|
if (data.type === 'request') {
|
|
7
7
|
try {
|
|
8
|
-
const { frame, audio } = await extractFrameAndAudio({
|
|
8
|
+
const { frame, audio, durationInSeconds } = await extractFrameAndAudio({
|
|
9
9
|
src: data.src,
|
|
10
10
|
timeInSeconds: data.timeInSeconds,
|
|
11
11
|
logLevel: data.logLevel,
|
|
12
12
|
durationInSeconds: data.durationInSeconds,
|
|
13
|
+
playbackRate: data.playbackRate,
|
|
13
14
|
includeAudio: data.includeAudio,
|
|
14
15
|
includeVideo: data.includeVideo,
|
|
15
|
-
volume: data.volume,
|
|
16
16
|
loop: data.loop,
|
|
17
17
|
});
|
|
18
18
|
const videoFrame = frame;
|
|
@@ -27,6 +27,7 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
27
27
|
id: data.id,
|
|
28
28
|
frame: imageBitmap,
|
|
29
29
|
audio,
|
|
30
|
+
durationInSeconds: durationInSeconds ?? null,
|
|
30
31
|
};
|
|
31
32
|
window.remotion_broadcastChannel.postMessage(response);
|
|
32
33
|
videoFrame?.close();
|
|
@@ -45,16 +46,16 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
45
46
|
}
|
|
46
47
|
});
|
|
47
48
|
}
|
|
48
|
-
export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, includeAudio, includeVideo, isClientSideRendering,
|
|
49
|
+
export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, }) => {
|
|
49
50
|
if (isClientSideRendering || window.remotion_isMainTab) {
|
|
50
51
|
return extractFrameAndAudio({
|
|
51
52
|
logLevel,
|
|
52
53
|
src,
|
|
53
54
|
timeInSeconds,
|
|
54
55
|
durationInSeconds,
|
|
56
|
+
playbackRate,
|
|
55
57
|
includeAudio,
|
|
56
58
|
includeVideo,
|
|
57
|
-
volume,
|
|
58
59
|
loop,
|
|
59
60
|
});
|
|
60
61
|
}
|
|
@@ -69,6 +70,9 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
|
|
|
69
70
|
resolve({
|
|
70
71
|
frame: data.frame ? data.frame : null,
|
|
71
72
|
audio: data.audio ? data.audio : null,
|
|
73
|
+
durationInSeconds: data.durationInSeconds
|
|
74
|
+
? data.durationInSeconds
|
|
75
|
+
: null,
|
|
72
76
|
});
|
|
73
77
|
window.remotion_broadcastChannel.removeEventListener('message', onMessage);
|
|
74
78
|
}
|
|
@@ -86,9 +90,9 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
|
|
|
86
90
|
id: requestId,
|
|
87
91
|
logLevel,
|
|
88
92
|
durationInSeconds,
|
|
93
|
+
playbackRate,
|
|
89
94
|
includeAudio,
|
|
90
95
|
includeVideo,
|
|
91
|
-
volume,
|
|
92
96
|
loop,
|
|
93
97
|
};
|
|
94
98
|
window.remotion_broadcastChannel.postMessage(request);
|
|
@@ -6,6 +6,9 @@ export const extractFrame = async ({ src, timeInSeconds: unloopedTimeinSeconds,
|
|
|
6
6
|
sinkPromises[src] = getSinks(src);
|
|
7
7
|
}
|
|
8
8
|
const { video, getDuration } = await sinkPromises[src];
|
|
9
|
+
if (video === null) {
|
|
10
|
+
throw new Error(`No video track found for ${src}`);
|
|
11
|
+
}
|
|
9
12
|
const timeInSeconds = loop
|
|
10
13
|
? unloopedTimeinSeconds % (await getDuration())
|
|
11
14
|
: unloopedTimeinSeconds;
|
|
@@ -8,16 +8,15 @@ export const getSinks = async (src) => {
|
|
|
8
8
|
});
|
|
9
9
|
const format = await input.getFormat();
|
|
10
10
|
const videoTrack = await input.getPrimaryVideoTrack();
|
|
11
|
-
if (!videoTrack) {
|
|
12
|
-
throw new Error(`No video track found for ${src}`);
|
|
13
|
-
}
|
|
14
11
|
const audioTrack = await input.getPrimaryAudioTrack();
|
|
15
12
|
const isMatroska = format === MATROSKA;
|
|
16
13
|
return {
|
|
17
|
-
video:
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
14
|
+
video: videoTrack
|
|
15
|
+
? {
|
|
16
|
+
sampleSink: new VideoSampleSink(videoTrack),
|
|
17
|
+
packetSink: new EncodedPacketSink(videoTrack),
|
|
18
|
+
}
|
|
19
|
+
: null,
|
|
21
20
|
audio: audioTrack
|
|
22
21
|
? {
|
|
23
22
|
sampleSink: new AudioSampleSink(audioTrack),
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { type LogLevel } from '../log';
|
|
2
|
+
export declare class MediaPlayer {
|
|
3
|
+
private canvas;
|
|
4
|
+
private context;
|
|
5
|
+
private src;
|
|
6
|
+
private logLevel;
|
|
7
|
+
private canvasSink;
|
|
8
|
+
private videoFrameIterator;
|
|
9
|
+
private nextFrame;
|
|
10
|
+
private audioSink;
|
|
11
|
+
private audioBufferIterator;
|
|
12
|
+
private queuedAudioNodes;
|
|
13
|
+
private gainNode;
|
|
14
|
+
private expectedAudioTime;
|
|
15
|
+
private sharedAudioContext;
|
|
16
|
+
private mediaTimeOffset;
|
|
17
|
+
private playing;
|
|
18
|
+
private animationFrameId;
|
|
19
|
+
private asyncId;
|
|
20
|
+
private initialized;
|
|
21
|
+
private totalDuration;
|
|
22
|
+
private actualFps;
|
|
23
|
+
private isStalled;
|
|
24
|
+
private onStalledChangeCallback?;
|
|
25
|
+
private lastAudioProgressAtMs;
|
|
26
|
+
private lastNetworkActivityAtMs;
|
|
27
|
+
private isNetworkActive;
|
|
28
|
+
private isSeeking;
|
|
29
|
+
private canStartAudio;
|
|
30
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, }: {
|
|
31
|
+
canvas: HTMLCanvasElement;
|
|
32
|
+
src: string;
|
|
33
|
+
logLevel: LogLevel;
|
|
34
|
+
sharedAudioContext?: AudioContext | null;
|
|
35
|
+
});
|
|
36
|
+
initialize(startTime?: number): Promise<void>;
|
|
37
|
+
seekTo(time: number): void;
|
|
38
|
+
drawInitialFrame(time?: number): Promise<void>;
|
|
39
|
+
play(): Promise<void>;
|
|
40
|
+
pause(): void;
|
|
41
|
+
dispose(): void;
|
|
42
|
+
get currentTime(): number;
|
|
43
|
+
private getPlaybackTime;
|
|
44
|
+
get duration(): number;
|
|
45
|
+
get isPlaying(): boolean;
|
|
46
|
+
get stalled(): boolean;
|
|
47
|
+
onStalledChange(callback: (isStalled: boolean) => void): void;
|
|
48
|
+
private renderSingleFrame;
|
|
49
|
+
private startRenderLoop;
|
|
50
|
+
private stopRenderLoop;
|
|
51
|
+
private render;
|
|
52
|
+
private startVideoIterator;
|
|
53
|
+
private updateNextFrame;
|
|
54
|
+
private tryStartAudio;
|
|
55
|
+
private getCurrentTimeMs;
|
|
56
|
+
private resetAudioProgressStopwatch;
|
|
57
|
+
private getAudioLookaheadSec;
|
|
58
|
+
private calculateAudioStallThresholdSec;
|
|
59
|
+
private isNetworkStalled;
|
|
60
|
+
private checkVideoStall;
|
|
61
|
+
private checkIfStalled;
|
|
62
|
+
private updateStalledState;
|
|
63
|
+
private runAudioIterator;
|
|
64
|
+
}
|