@remotion/media 4.0.353 → 4.0.355
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-rendering.js +37 -27
- package/dist/audio/audio.js +6 -3
- package/dist/audio/props.d.ts +0 -5
- package/dist/audio-extraction/extract-audio.d.ts +6 -3
- package/dist/audio-extraction/extract-audio.js +17 -12
- package/dist/convert-audiodata/apply-volume.d.ts +1 -0
- package/dist/convert-audiodata/apply-volume.js +17 -0
- package/dist/convert-audiodata/convert-audiodata.d.ts +2 -2
- package/dist/convert-audiodata/convert-audiodata.js +13 -7
- package/dist/convert-audiodata/resample-audiodata.d.ts +1 -2
- package/dist/convert-audiodata/resample-audiodata.js +39 -18
- package/dist/esm/index.mjs +328 -13149
- package/dist/extract-frame-and-audio.d.ts +3 -2
- package/dist/extract-frame-and-audio.js +4 -3
- package/dist/get-sink-weak.d.ts +18 -0
- package/dist/get-sink-weak.js +23 -0
- package/dist/looped-frame.d.ts +9 -0
- package/dist/looped-frame.js +10 -0
- package/dist/video/props.d.ts +0 -5
- package/dist/video/video-for-rendering.js +41 -31
- package/dist/video/video.js +2 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +3 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +9 -5
- package/dist/video-extraction/extract-frame.d.ts +0 -2
- package/dist/video-extraction/extract-frame.js +5 -5
- package/dist/video-extraction/get-frames-since-keyframe.d.ts +3 -3
- package/dist/video-extraction/get-frames-since-keyframe.js +8 -9
- package/package.json +5 -5
|
@@ -1,15 +1,16 @@
|
|
|
1
1
|
import type { LogLevel } from 'remotion';
|
|
2
2
|
import type { PcmS16AudioData } from './convert-audiodata/convert-audiodata';
|
|
3
|
-
export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, durationInSeconds, includeAudio, includeVideo,
|
|
3
|
+
export declare const extractFrameAndAudio: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, }: {
|
|
4
4
|
src: string;
|
|
5
5
|
timeInSeconds: number;
|
|
6
6
|
logLevel: LogLevel;
|
|
7
7
|
durationInSeconds: number;
|
|
8
|
+
playbackRate: number;
|
|
8
9
|
includeAudio: boolean;
|
|
9
10
|
includeVideo: boolean;
|
|
10
|
-
volume: number;
|
|
11
11
|
loop: boolean;
|
|
12
12
|
}) => Promise<{
|
|
13
13
|
frame: VideoFrame | null;
|
|
14
14
|
audio: PcmS16AudioData | null;
|
|
15
|
+
durationInSeconds: number | null;
|
|
15
16
|
}>;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { extractAudio } from './audio-extraction/extract-audio';
|
|
2
2
|
import { extractFrame } from './video-extraction/extract-frame';
|
|
3
|
-
export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durationInSeconds, includeAudio, includeVideo,
|
|
3
|
+
export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, loop, }) => {
|
|
4
4
|
const [frame, audio] = await Promise.all([
|
|
5
5
|
includeVideo
|
|
6
6
|
? extractFrame({
|
|
@@ -15,14 +15,15 @@ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durat
|
|
|
15
15
|
src,
|
|
16
16
|
timeInSeconds,
|
|
17
17
|
durationInSeconds,
|
|
18
|
-
volume,
|
|
19
18
|
logLevel,
|
|
20
19
|
loop,
|
|
20
|
+
playbackRate,
|
|
21
21
|
})
|
|
22
22
|
: null,
|
|
23
23
|
]);
|
|
24
24
|
return {
|
|
25
25
|
frame: frame?.toVideoFrame() ?? null,
|
|
26
|
-
audio,
|
|
26
|
+
audio: audio?.data ?? null,
|
|
27
|
+
durationInSeconds: audio?.durationInSeconds ?? null,
|
|
27
28
|
};
|
|
28
29
|
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { LogLevel } from 'remotion';
|
|
2
|
+
import type { GetSink } from './video-extraction/get-frames-since-keyframe';
|
|
3
|
+
export declare const sinkPromises: Record<string, Promise<GetSink>>;
|
|
4
|
+
export declare const getSinkWeak: (src: string, logLevel: LogLevel) => Promise<{
|
|
5
|
+
video: {
|
|
6
|
+
sampleSink: import("mediabunny").VideoSampleSink;
|
|
7
|
+
packetSink: import("mediabunny").EncodedPacketSink;
|
|
8
|
+
} | null;
|
|
9
|
+
audio: {
|
|
10
|
+
sampleSink: import("mediabunny").AudioSampleSink;
|
|
11
|
+
} | null;
|
|
12
|
+
actualMatroskaTimestamps: {
|
|
13
|
+
observeTimestamp: (startTime: number) => void;
|
|
14
|
+
getRealTimestamp: (observedTimestamp: number) => number | null;
|
|
15
|
+
};
|
|
16
|
+
isMatroska: boolean;
|
|
17
|
+
getDuration: () => Promise<number>;
|
|
18
|
+
}>;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { Internals } from 'remotion';
|
|
2
|
+
import { getSinks } from './video-extraction/get-frames-since-keyframe';
|
|
3
|
+
export const sinkPromises = {};
|
|
4
|
+
export const getSinkWeak = async (src, logLevel) => {
|
|
5
|
+
let promise = sinkPromises[src];
|
|
6
|
+
if (!promise) {
|
|
7
|
+
promise = getSinks(src);
|
|
8
|
+
sinkPromises[src] = promise;
|
|
9
|
+
}
|
|
10
|
+
let awaited = await promise;
|
|
11
|
+
let deferredValue = awaited.deref();
|
|
12
|
+
if (!deferredValue) {
|
|
13
|
+
Internals.Log.verbose({
|
|
14
|
+
logLevel,
|
|
15
|
+
tag: '@remotion/media',
|
|
16
|
+
}, `Sink for ${src} was garbage collected, creating new sink`);
|
|
17
|
+
promise = getSinks(src);
|
|
18
|
+
sinkPromises[src] = promise;
|
|
19
|
+
awaited = await promise;
|
|
20
|
+
deferredValue = awaited.deref();
|
|
21
|
+
}
|
|
22
|
+
return deferredValue;
|
|
23
|
+
};
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { type LoopVolumeCurveBehavior } from 'remotion';
|
|
2
|
+
export declare const frameForVolumeProp: ({ behavior, loop, assetDurationInSeconds, fps, frame, startsAt, }: {
|
|
3
|
+
behavior: LoopVolumeCurveBehavior;
|
|
4
|
+
loop: boolean;
|
|
5
|
+
assetDurationInSeconds: number;
|
|
6
|
+
fps: number;
|
|
7
|
+
frame: number;
|
|
8
|
+
startsAt: number;
|
|
9
|
+
}) => number;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export const frameForVolumeProp = ({ behavior, loop, assetDurationInSeconds, fps, frame, startsAt, }) => {
|
|
2
|
+
if (!loop) {
|
|
3
|
+
return frame + startsAt;
|
|
4
|
+
}
|
|
5
|
+
if (behavior === 'extend') {
|
|
6
|
+
return frame + startsAt;
|
|
7
|
+
}
|
|
8
|
+
const assetDurationInFrames = Math.floor(assetDurationInSeconds * fps) - startsAt;
|
|
9
|
+
return (frame % assetDurationInFrames) + startsAt;
|
|
10
|
+
};
|
package/dist/video/props.d.ts
CHANGED
|
@@ -9,17 +9,12 @@ export type VideoProps = {
|
|
|
9
9
|
name?: string;
|
|
10
10
|
pauseWhenBuffering?: boolean;
|
|
11
11
|
showInTimeline?: boolean;
|
|
12
|
-
onAutoPlayError?: null | (() => void);
|
|
13
12
|
onVideoFrame?: OnVideoFrame;
|
|
14
13
|
playbackRate?: number;
|
|
15
14
|
muted?: boolean;
|
|
16
15
|
delayRenderRetries?: number;
|
|
17
16
|
delayRenderTimeoutInMilliseconds?: number;
|
|
18
|
-
crossOrigin?: '' | 'anonymous' | 'use-credentials';
|
|
19
17
|
style?: React.CSSProperties;
|
|
20
|
-
onError?: (err: Error) => void;
|
|
21
|
-
useWebAudioApi?: boolean;
|
|
22
|
-
acceptableTimeShiftInSeconds?: number;
|
|
23
18
|
/**
|
|
24
19
|
* @deprecated For internal use only
|
|
25
20
|
*/
|
|
@@ -1,40 +1,24 @@
|
|
|
1
1
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
2
|
import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
|
|
3
3
|
import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
|
|
4
|
+
import { applyVolume } from '../convert-audiodata/apply-volume';
|
|
5
|
+
import { frameForVolumeProp } from '../looped-frame';
|
|
4
6
|
import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
|
|
5
7
|
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds,
|
|
6
8
|
// call when a frame of the video, i.e. frame drawn on canvas
|
|
7
9
|
onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) => {
|
|
10
|
+
if (!src) {
|
|
11
|
+
throw new TypeError('No `src` was passed to <Video>.');
|
|
12
|
+
}
|
|
13
|
+
const frame = useCurrentFrame();
|
|
8
14
|
const absoluteFrame = Internals.useTimelinePosition();
|
|
9
15
|
const { fps } = useVideoConfig();
|
|
10
|
-
const canvasRef = useRef(null);
|
|
11
16
|
const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
|
|
12
|
-
const
|
|
13
|
-
const volumePropsFrame = Internals.useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
|
|
14
|
-
const environment = useRemotionEnvironment();
|
|
17
|
+
const startsAt = Internals.useMediaStartsAt();
|
|
15
18
|
const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
|
|
16
|
-
|
|
17
|
-
throw new TypeError('No `src` was passed to <Video>.');
|
|
18
|
-
}
|
|
19
|
-
const volume = Internals.evaluateVolume({
|
|
20
|
-
volume: volumeProp,
|
|
21
|
-
frame: volumePropsFrame,
|
|
22
|
-
mediaVolume: 1,
|
|
23
|
-
});
|
|
24
|
-
Internals.warnAboutTooHighVolume(volume);
|
|
25
|
-
const shouldRenderAudio = useMemo(() => {
|
|
26
|
-
if (!window.remotion_audioEnabled) {
|
|
27
|
-
return false;
|
|
28
|
-
}
|
|
29
|
-
if (muted) {
|
|
30
|
-
return false;
|
|
31
|
-
}
|
|
32
|
-
if (volume <= 0) {
|
|
33
|
-
return false;
|
|
34
|
-
}
|
|
35
|
-
return true;
|
|
36
|
-
}, [muted, volume]);
|
|
19
|
+
const environment = useRemotionEnvironment();
|
|
37
20
|
const { delayRender, continueRender } = useDelayRender();
|
|
21
|
+
const canvasRef = useRef(null);
|
|
38
22
|
useLayoutEffect(() => {
|
|
39
23
|
if (!canvasRef.current) {
|
|
40
24
|
return;
|
|
@@ -46,18 +30,27 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
|
|
|
46
30
|
retries: delayRenderRetries ?? undefined,
|
|
47
31
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
|
|
48
32
|
});
|
|
33
|
+
const shouldRenderAudio = (() => {
|
|
34
|
+
if (!window.remotion_audioEnabled) {
|
|
35
|
+
return false;
|
|
36
|
+
}
|
|
37
|
+
if (muted) {
|
|
38
|
+
return false;
|
|
39
|
+
}
|
|
40
|
+
return true;
|
|
41
|
+
})();
|
|
49
42
|
extractFrameViaBroadcastChannel({
|
|
50
43
|
src,
|
|
51
44
|
timeInSeconds: timestamp,
|
|
52
45
|
durationInSeconds,
|
|
46
|
+
playbackRate: playbackRate ?? 1,
|
|
53
47
|
logLevel: logLevel ?? 'info',
|
|
54
48
|
includeAudio: shouldRenderAudio,
|
|
55
49
|
includeVideo: window.remotion_videoEnabled,
|
|
56
50
|
isClientSideRendering: environment.isClientSideRendering,
|
|
57
|
-
volume,
|
|
58
51
|
loop: loop ?? false,
|
|
59
52
|
})
|
|
60
|
-
.then(({ frame: imageBitmap, audio }) => {
|
|
53
|
+
.then(({ frame: imageBitmap, audio, durationInSeconds: assetDurationInSeconds, }) => {
|
|
61
54
|
if (imageBitmap) {
|
|
62
55
|
onVideoFrame?.(imageBitmap);
|
|
63
56
|
const context = canvasRef.current?.getContext('2d');
|
|
@@ -79,7 +72,22 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
|
|
|
79
72
|
else if (window.remotion_videoEnabled) {
|
|
80
73
|
cancelRender(new Error('No video frame found'));
|
|
81
74
|
}
|
|
82
|
-
|
|
75
|
+
const volumePropsFrame = frameForVolumeProp({
|
|
76
|
+
behavior: loopVolumeCurveBehavior ?? 'repeat',
|
|
77
|
+
loop: loop ?? false,
|
|
78
|
+
assetDurationInSeconds: assetDurationInSeconds ?? 0,
|
|
79
|
+
fps,
|
|
80
|
+
frame,
|
|
81
|
+
startsAt,
|
|
82
|
+
});
|
|
83
|
+
const volume = Internals.evaluateVolume({
|
|
84
|
+
volume: volumeProp,
|
|
85
|
+
frame: volumePropsFrame,
|
|
86
|
+
mediaVolume: 1,
|
|
87
|
+
});
|
|
88
|
+
Internals.warnAboutTooHighVolume(volume);
|
|
89
|
+
if (audio && volume > 0) {
|
|
90
|
+
applyVolume(audio.data, volume);
|
|
83
91
|
registerRenderAsset({
|
|
84
92
|
type: 'inline-audio',
|
|
85
93
|
id,
|
|
@@ -111,14 +119,16 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) =>
|
|
|
111
119
|
frame,
|
|
112
120
|
id,
|
|
113
121
|
logLevel,
|
|
122
|
+
loop,
|
|
123
|
+
loopVolumeCurveBehavior,
|
|
124
|
+
muted,
|
|
114
125
|
onVideoFrame,
|
|
115
126
|
playbackRate,
|
|
116
127
|
registerRenderAsset,
|
|
117
|
-
shouldRenderAudio,
|
|
118
128
|
src,
|
|
129
|
+
startsAt,
|
|
119
130
|
unregisterRenderAsset,
|
|
120
|
-
|
|
121
|
-
loop,
|
|
131
|
+
volumeProp,
|
|
122
132
|
]);
|
|
123
133
|
const classNameValue = useMemo(() => {
|
|
124
134
|
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
package/dist/video/video.js
CHANGED
|
@@ -32,6 +32,6 @@ export const Video = (props) => {
|
|
|
32
32
|
if (environment.isRendering) {
|
|
33
33
|
return _jsx(VideoForRendering, { ...otherProps });
|
|
34
34
|
}
|
|
35
|
-
const {
|
|
36
|
-
return (_jsx(VideoForPreview, { _remotionInternalStack: stack ?? null, _remotionInternalNativeLoopPassed: false, onDuration: onDuration, onlyWarnForMediaSeekingError: true, pauseWhenBuffering: pauseWhenBuffering ?? false, showInTimeline: showInTimeline ?? true,
|
|
35
|
+
const { onVideoFrame, delayRenderRetries, delayRenderTimeoutInMilliseconds, ...propsForPreview } = otherProps;
|
|
36
|
+
return (_jsx(VideoForPreview, { _remotionInternalStack: stack ?? null, _remotionInternalNativeLoopPassed: false, onDuration: onDuration, onlyWarnForMediaSeekingError: true, pauseWhenBuffering: pauseWhenBuffering ?? false, showInTimeline: showInTimeline ?? true, onVideoFrame: onVideoFrame ?? null, ...propsForPreview }));
|
|
37
37
|
};
|
|
@@ -1,16 +1,17 @@
|
|
|
1
1
|
import { type LogLevel } from 'remotion';
|
|
2
2
|
import type { PcmS16AudioData } from '../convert-audiodata/convert-audiodata';
|
|
3
|
-
export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, includeAudio, includeVideo, isClientSideRendering,
|
|
3
|
+
export declare const extractFrameViaBroadcastChannel: ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, }: {
|
|
4
4
|
src: string;
|
|
5
5
|
timeInSeconds: number;
|
|
6
6
|
durationInSeconds: number;
|
|
7
|
+
playbackRate: number;
|
|
7
8
|
logLevel: LogLevel;
|
|
8
9
|
includeAudio: boolean;
|
|
9
10
|
includeVideo: boolean;
|
|
10
11
|
isClientSideRendering: boolean;
|
|
11
|
-
volume: number;
|
|
12
12
|
loop: boolean;
|
|
13
13
|
}) => Promise<{
|
|
14
14
|
frame: ImageBitmap | VideoFrame | null;
|
|
15
15
|
audio: PcmS16AudioData | null;
|
|
16
|
+
durationInSeconds: number | null;
|
|
16
17
|
}>;
|
|
@@ -5,14 +5,14 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
5
5
|
const data = event.data;
|
|
6
6
|
if (data.type === 'request') {
|
|
7
7
|
try {
|
|
8
|
-
const { frame, audio } = await extractFrameAndAudio({
|
|
8
|
+
const { frame, audio, durationInSeconds } = await extractFrameAndAudio({
|
|
9
9
|
src: data.src,
|
|
10
10
|
timeInSeconds: data.timeInSeconds,
|
|
11
11
|
logLevel: data.logLevel,
|
|
12
12
|
durationInSeconds: data.durationInSeconds,
|
|
13
|
+
playbackRate: data.playbackRate,
|
|
13
14
|
includeAudio: data.includeAudio,
|
|
14
15
|
includeVideo: data.includeVideo,
|
|
15
|
-
volume: data.volume,
|
|
16
16
|
loop: data.loop,
|
|
17
17
|
});
|
|
18
18
|
const videoFrame = frame;
|
|
@@ -27,6 +27,7 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
27
27
|
id: data.id,
|
|
28
28
|
frame: imageBitmap,
|
|
29
29
|
audio,
|
|
30
|
+
durationInSeconds: durationInSeconds ?? null,
|
|
30
31
|
};
|
|
31
32
|
window.remotion_broadcastChannel.postMessage(response);
|
|
32
33
|
videoFrame?.close();
|
|
@@ -45,16 +46,16 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
45
46
|
}
|
|
46
47
|
});
|
|
47
48
|
}
|
|
48
|
-
export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, includeAudio, includeVideo, isClientSideRendering,
|
|
49
|
+
export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel, durationInSeconds, playbackRate, includeAudio, includeVideo, isClientSideRendering, loop, }) => {
|
|
49
50
|
if (isClientSideRendering || window.remotion_isMainTab) {
|
|
50
51
|
return extractFrameAndAudio({
|
|
51
52
|
logLevel,
|
|
52
53
|
src,
|
|
53
54
|
timeInSeconds,
|
|
54
55
|
durationInSeconds,
|
|
56
|
+
playbackRate,
|
|
55
57
|
includeAudio,
|
|
56
58
|
includeVideo,
|
|
57
|
-
volume,
|
|
58
59
|
loop,
|
|
59
60
|
});
|
|
60
61
|
}
|
|
@@ -69,6 +70,9 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
|
|
|
69
70
|
resolve({
|
|
70
71
|
frame: data.frame ? data.frame : null,
|
|
71
72
|
audio: data.audio ? data.audio : null,
|
|
73
|
+
durationInSeconds: data.durationInSeconds
|
|
74
|
+
? data.durationInSeconds
|
|
75
|
+
: null,
|
|
72
76
|
});
|
|
73
77
|
window.remotion_broadcastChannel.removeEventListener('message', onMessage);
|
|
74
78
|
}
|
|
@@ -86,9 +90,9 @@ export const extractFrameViaBroadcastChannel = ({ src, timeInSeconds, logLevel,
|
|
|
86
90
|
id: requestId,
|
|
87
91
|
logLevel,
|
|
88
92
|
durationInSeconds,
|
|
93
|
+
playbackRate,
|
|
89
94
|
includeAudio,
|
|
90
95
|
includeVideo,
|
|
91
|
-
volume,
|
|
92
96
|
loop,
|
|
93
97
|
};
|
|
94
98
|
window.remotion_broadcastChannel.postMessage(request);
|
|
@@ -1,6 +1,4 @@
|
|
|
1
1
|
import { type LogLevel } from 'remotion';
|
|
2
|
-
import { type GetSink } from './get-frames-since-keyframe';
|
|
3
|
-
export declare const sinkPromises: Record<string, Promise<GetSink>>;
|
|
4
2
|
export declare const extractFrame: ({ src, timeInSeconds: unloopedTimeinSeconds, logLevel, loop, }: {
|
|
5
3
|
src: string;
|
|
6
4
|
timeInSeconds: number;
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import { keyframeManager } from '../caches';
|
|
2
|
-
import {
|
|
3
|
-
export const sinkPromises = {};
|
|
2
|
+
import { getSinkWeak } from '../get-sink-weak';
|
|
4
3
|
export const extractFrame = async ({ src, timeInSeconds: unloopedTimeinSeconds, logLevel, loop, }) => {
|
|
5
|
-
|
|
6
|
-
|
|
4
|
+
const sink = await getSinkWeak(src, logLevel);
|
|
5
|
+
const { video, getDuration } = sink;
|
|
6
|
+
if (video === null) {
|
|
7
|
+
throw new Error(`No video track found for ${src}`);
|
|
7
8
|
}
|
|
8
|
-
const { video, getDuration } = await sinkPromises[src];
|
|
9
9
|
const timeInSeconds = loop
|
|
10
10
|
? unloopedTimeinSeconds % (await getDuration())
|
|
11
11
|
: unloopedTimeinSeconds;
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import type { EncodedPacket } from 'mediabunny';
|
|
2
2
|
import { AudioSampleSink, EncodedPacketSink, VideoSampleSink } from 'mediabunny';
|
|
3
|
-
export declare const getSinks: (src: string) => Promise<{
|
|
3
|
+
export declare const getSinks: (src: string) => Promise<WeakRef<{
|
|
4
4
|
video: {
|
|
5
5
|
sampleSink: VideoSampleSink;
|
|
6
6
|
packetSink: EncodedPacketSink;
|
|
7
|
-
};
|
|
7
|
+
} | null;
|
|
8
8
|
audio: {
|
|
9
9
|
sampleSink: AudioSampleSink;
|
|
10
10
|
} | null;
|
|
@@ -14,7 +14,7 @@ export declare const getSinks: (src: string) => Promise<{
|
|
|
14
14
|
};
|
|
15
15
|
isMatroska: boolean;
|
|
16
16
|
getDuration: () => Promise<number>;
|
|
17
|
-
}
|
|
17
|
+
}>>;
|
|
18
18
|
export type GetSink = Awaited<ReturnType<typeof getSinks>>;
|
|
19
19
|
export declare const getFramesSinceKeyframe: ({ packetSink, videoSampleSink, startPacket, }: {
|
|
20
20
|
packetSink: EncodedPacketSink;
|
|
@@ -8,16 +8,15 @@ export const getSinks = async (src) => {
|
|
|
8
8
|
});
|
|
9
9
|
const format = await input.getFormat();
|
|
10
10
|
const videoTrack = await input.getPrimaryVideoTrack();
|
|
11
|
-
if (!videoTrack) {
|
|
12
|
-
throw new Error(`No video track found for ${src}`);
|
|
13
|
-
}
|
|
14
11
|
const audioTrack = await input.getPrimaryAudioTrack();
|
|
15
12
|
const isMatroska = format === MATROSKA;
|
|
16
|
-
return {
|
|
17
|
-
video:
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
13
|
+
return new WeakRef({
|
|
14
|
+
video: videoTrack
|
|
15
|
+
? {
|
|
16
|
+
sampleSink: new VideoSampleSink(videoTrack),
|
|
17
|
+
packetSink: new EncodedPacketSink(videoTrack),
|
|
18
|
+
}
|
|
19
|
+
: null,
|
|
21
20
|
audio: audioTrack
|
|
22
21
|
? {
|
|
23
22
|
sampleSink: new AudioSampleSink(audioTrack),
|
|
@@ -26,7 +25,7 @@ export const getSinks = async (src) => {
|
|
|
26
25
|
actualMatroskaTimestamps: rememberActualMatroskaTimestamps(isMatroska),
|
|
27
26
|
isMatroska,
|
|
28
27
|
getDuration: () => input.computeDuration(),
|
|
29
|
-
};
|
|
28
|
+
});
|
|
30
29
|
};
|
|
31
30
|
export const getFramesSinceKeyframe = async ({ packetSink, videoSampleSink, startPacket, }) => {
|
|
32
31
|
const nextKeyPacket = await packetSink.getNextKeyPacket(startPacket, {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@remotion/media",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.0.355",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"types": "dist/index.d.ts",
|
|
6
6
|
"module": "dist/esm/index.mjs",
|
|
@@ -8,14 +8,14 @@
|
|
|
8
8
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/media"
|
|
9
9
|
},
|
|
10
10
|
"sideEffects": false,
|
|
11
|
-
"author": "Hunain Ahmed <junaidhunain6@gmail.com>",
|
|
11
|
+
"author": "Jonny Burger <jonny@remotion.dev>, Hunain Ahmed <junaidhunain6@gmail.com>",
|
|
12
12
|
"bugs": {
|
|
13
13
|
"url": "https://github.com/remotion-dev/remotion/issues"
|
|
14
14
|
},
|
|
15
15
|
"dependencies": {
|
|
16
|
-
"mediabunny": "1.
|
|
16
|
+
"mediabunny": "1.21.0",
|
|
17
17
|
"webdriverio": "9.19.2",
|
|
18
|
-
"remotion": "4.0.
|
|
18
|
+
"remotion": "4.0.355"
|
|
19
19
|
},
|
|
20
20
|
"peerDependencies": {
|
|
21
21
|
"react": ">=16.8.0",
|
|
@@ -27,7 +27,7 @@
|
|
|
27
27
|
"react": "19.0.0",
|
|
28
28
|
"react-dom": "19.0.0",
|
|
29
29
|
"vitest": "3.2.4",
|
|
30
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
30
|
+
"@remotion/eslint-config-internal": "4.0.355"
|
|
31
31
|
},
|
|
32
32
|
"keywords": [],
|
|
33
33
|
"publishConfig": {
|