@remotion/media 4.0.357 → 4.0.361
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-preview.js +29 -13
- package/dist/audio/audio-for-rendering.js +8 -6
- package/dist/audio-extraction/extract-audio.js +1 -0
- package/dist/browser-can-use-webgl2.d.ts +1 -0
- package/dist/browser-can-use-webgl2.js +13 -0
- package/dist/caches.d.ts +1 -1
- package/dist/caches.js +3 -3
- package/dist/esm/index.mjs +568 -327
- package/dist/extract-frame-and-audio.js +6 -0
- package/dist/get-time-in-seconds.d.ts +2 -1
- package/dist/get-time-in-seconds.js +10 -10
- package/dist/show-in-timeline.d.ts +8 -0
- package/dist/show-in-timeline.js +31 -0
- package/dist/use-media-in-timeline.d.ts +19 -0
- package/dist/use-media-in-timeline.js +103 -0
- package/dist/video/media-player.d.ts +11 -5
- package/dist/video/media-player.js +74 -36
- package/dist/video/video-for-preview.d.ts +9 -9
- package/dist/video/video-for-preview.js +43 -20
- package/dist/video/video-for-rendering.js +21 -5
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +3 -0
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +17 -0
- package/dist/video-extraction/extract-frame.d.ts +3 -0
- package/dist/video-extraction/extract-frame.js +7 -0
- package/dist/video-extraction/keyframe-manager.d.ts +1 -1
- package/dist/video-extraction/keyframe-manager.js +5 -0
- package/package.json +54 -54
- package/LICENSE.md +0 -49
- package/dist/convert-audiodata/apply-tonefrequency.d.ts +0 -2
- package/dist/convert-audiodata/apply-tonefrequency.js +0 -43
- package/dist/convert-audiodata/wsola.d.ts +0 -13
- package/dist/convert-audiodata/wsola.js +0 -197
- package/dist/get-sink-weak.d.ts +0 -13
- package/dist/get-sink-weak.js +0 -15
- package/dist/log.d.ts +0 -10
- package/dist/log.js +0 -33
- package/dist/video/resolve-playback-time.d.ts +0 -8
- package/dist/video/resolve-playback-time.js +0 -22
|
@@ -40,6 +40,12 @@ export const extractFrameAndAudio = async ({ src, timeInSeconds, logLevel, durat
|
|
|
40
40
|
if (frame?.type === 'unknown-container-format') {
|
|
41
41
|
return { type: 'unknown-container-format' };
|
|
42
42
|
}
|
|
43
|
+
if (frame?.type === 'cannot-decode-alpha') {
|
|
44
|
+
return {
|
|
45
|
+
type: 'cannot-decode-alpha',
|
|
46
|
+
durationInSeconds: frame.durationInSeconds,
|
|
47
|
+
};
|
|
48
|
+
}
|
|
43
49
|
if (audio === 'unknown-container-format') {
|
|
44
50
|
if (frame !== null) {
|
|
45
51
|
frame?.frame?.close();
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export declare const getTimeInSeconds: ({ loop, mediaDurationInSeconds, unloopedTimeInSeconds, src, trimAfter, trimBefore, fps, playbackRate, }: {
|
|
1
|
+
export declare const getTimeInSeconds: ({ loop, mediaDurationInSeconds, unloopedTimeInSeconds, src, trimAfter, trimBefore, fps, playbackRate, ifNoMediaDuration, }: {
|
|
2
2
|
loop: boolean;
|
|
3
3
|
mediaDurationInSeconds: number | null;
|
|
4
4
|
unloopedTimeInSeconds: number;
|
|
@@ -7,4 +7,5 @@ export declare const getTimeInSeconds: ({ loop, mediaDurationInSeconds, unlooped
|
|
|
7
7
|
trimBefore: number | undefined;
|
|
8
8
|
playbackRate: number;
|
|
9
9
|
fps: number;
|
|
10
|
+
ifNoMediaDuration: "fail" | "infinity";
|
|
10
11
|
}) => number | null;
|
|
@@ -1,24 +1,24 @@
|
|
|
1
1
|
import { Internals } from 'remotion';
|
|
2
|
-
export const getTimeInSeconds = ({ loop, mediaDurationInSeconds, unloopedTimeInSeconds, src, trimAfter, trimBefore, fps, playbackRate, }) => {
|
|
3
|
-
if (mediaDurationInSeconds === null && loop) {
|
|
2
|
+
export const getTimeInSeconds = ({ loop, mediaDurationInSeconds, unloopedTimeInSeconds, src, trimAfter, trimBefore, fps, playbackRate, ifNoMediaDuration, }) => {
|
|
3
|
+
if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === 'fail') {
|
|
4
4
|
throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
|
|
5
5
|
}
|
|
6
6
|
const loopDuration = loop
|
|
7
|
-
? Internals.
|
|
7
|
+
? Internals.calculateMediaDuration({
|
|
8
8
|
trimAfter,
|
|
9
|
-
mediaDurationInFrames: mediaDurationInSeconds
|
|
9
|
+
mediaDurationInFrames: mediaDurationInSeconds
|
|
10
|
+
? mediaDurationInSeconds * fps
|
|
11
|
+
: Infinity,
|
|
10
12
|
// Playback rate was already specified before
|
|
11
13
|
playbackRate: 1,
|
|
12
14
|
trimBefore,
|
|
13
15
|
}) / fps
|
|
14
16
|
: Infinity;
|
|
15
17
|
const timeInSeconds = (unloopedTimeInSeconds * playbackRate) % loopDuration;
|
|
16
|
-
if ((trimAfter ?? null) !== null) {
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
return null;
|
|
21
|
-
}
|
|
18
|
+
if ((trimAfter ?? null) !== null && !loop) {
|
|
19
|
+
const time = (trimAfter - (trimBefore ?? 0)) / fps;
|
|
20
|
+
if (timeInSeconds >= time) {
|
|
21
|
+
return null;
|
|
22
22
|
}
|
|
23
23
|
}
|
|
24
24
|
return timeInSeconds + (trimBefore ?? 0) / fps;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { _InternalTypes } from 'remotion';
|
|
2
|
+
export declare const useLoopDisplay: ({ loop, mediaDurationInSeconds, playbackRate, trimAfter, trimBefore, }: {
|
|
3
|
+
loop: boolean;
|
|
4
|
+
mediaDurationInSeconds: number | null;
|
|
5
|
+
trimAfter: number | undefined;
|
|
6
|
+
trimBefore: number | undefined;
|
|
7
|
+
playbackRate: number;
|
|
8
|
+
}) => _InternalTypes["LoopDisplay"] | undefined;
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { useMemo } from 'react';
|
|
2
|
+
import { Internals, useVideoConfig } from 'remotion';
|
|
3
|
+
export const useLoopDisplay = ({ loop, mediaDurationInSeconds, playbackRate, trimAfter, trimBefore, }) => {
|
|
4
|
+
const { durationInFrames: compDuration, fps } = useVideoConfig();
|
|
5
|
+
const loopDisplay = useMemo(() => {
|
|
6
|
+
if (!loop || !mediaDurationInSeconds) {
|
|
7
|
+
return undefined;
|
|
8
|
+
}
|
|
9
|
+
const durationInFrames = Internals.calculateMediaDuration({
|
|
10
|
+
mediaDurationInFrames: mediaDurationInSeconds * fps,
|
|
11
|
+
playbackRate,
|
|
12
|
+
trimAfter,
|
|
13
|
+
trimBefore,
|
|
14
|
+
});
|
|
15
|
+
const maxTimes = compDuration / durationInFrames;
|
|
16
|
+
return {
|
|
17
|
+
numberOfTimes: maxTimes,
|
|
18
|
+
startOffset: 0,
|
|
19
|
+
durationInFrames,
|
|
20
|
+
};
|
|
21
|
+
}, [
|
|
22
|
+
compDuration,
|
|
23
|
+
fps,
|
|
24
|
+
loop,
|
|
25
|
+
mediaDurationInSeconds,
|
|
26
|
+
playbackRate,
|
|
27
|
+
trimAfter,
|
|
28
|
+
trimBefore,
|
|
29
|
+
]);
|
|
30
|
+
return loopDisplay;
|
|
31
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { _InternalTypes } from 'remotion';
|
|
2
|
+
import { type VolumeProp } from 'remotion';
|
|
3
|
+
export declare const useMediaInTimeline: ({ volume, mediaVolume, src, mediaType, playbackRate, displayName, stack, showInTimeline, premountDisplay, postmountDisplay, loopDisplay, trimBefore, trimAfter, }: {
|
|
4
|
+
volume: VolumeProp | undefined;
|
|
5
|
+
mediaVolume: number;
|
|
6
|
+
src: string | undefined;
|
|
7
|
+
mediaType: "audio" | "video";
|
|
8
|
+
playbackRate: number;
|
|
9
|
+
displayName: string | null;
|
|
10
|
+
stack: string | null;
|
|
11
|
+
showInTimeline: boolean;
|
|
12
|
+
premountDisplay: number | null;
|
|
13
|
+
postmountDisplay: number | null;
|
|
14
|
+
loopDisplay: _InternalTypes["LoopDisplay"] | undefined;
|
|
15
|
+
trimBefore: number | undefined;
|
|
16
|
+
trimAfter: number | undefined;
|
|
17
|
+
}) => {
|
|
18
|
+
id: string;
|
|
19
|
+
};
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import { useContext, useEffect, useState } from 'react';
|
|
2
|
+
import { Internals, useCurrentFrame } from 'remotion';
|
|
3
|
+
export const useMediaInTimeline = ({ volume, mediaVolume, src, mediaType, playbackRate, displayName, stack, showInTimeline, premountDisplay, postmountDisplay, loopDisplay, trimBefore, trimAfter, }) => {
|
|
4
|
+
const parentSequence = useContext(Internals.SequenceContext);
|
|
5
|
+
const startsAt = Internals.useMediaStartsAt();
|
|
6
|
+
const { registerSequence, unregisterSequence } = useContext(Internals.SequenceManager);
|
|
7
|
+
const [sequenceId] = useState(() => String(Math.random()));
|
|
8
|
+
const [mediaId] = useState(() => String(Math.random()));
|
|
9
|
+
const frame = useCurrentFrame();
|
|
10
|
+
const { volumes, duration, doesVolumeChange, nonce, rootId, isStudio, finalDisplayName, } = Internals.useBasicMediaInTimeline({
|
|
11
|
+
volume,
|
|
12
|
+
mediaVolume,
|
|
13
|
+
mediaType,
|
|
14
|
+
src,
|
|
15
|
+
displayName,
|
|
16
|
+
trimBefore,
|
|
17
|
+
trimAfter,
|
|
18
|
+
playbackRate,
|
|
19
|
+
});
|
|
20
|
+
useEffect(() => {
|
|
21
|
+
if (!src) {
|
|
22
|
+
throw new Error('No src passed');
|
|
23
|
+
}
|
|
24
|
+
if (!isStudio && window.process?.env?.NODE_ENV !== 'test') {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
if (!showInTimeline) {
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
const loopIteration = loopDisplay
|
|
31
|
+
? Math.floor(frame / loopDisplay.durationInFrames)
|
|
32
|
+
: 0;
|
|
33
|
+
if (loopDisplay) {
|
|
34
|
+
registerSequence({
|
|
35
|
+
type: 'sequence',
|
|
36
|
+
premountDisplay,
|
|
37
|
+
postmountDisplay,
|
|
38
|
+
parent: parentSequence?.id ?? null,
|
|
39
|
+
displayName: finalDisplayName,
|
|
40
|
+
rootId,
|
|
41
|
+
showInTimeline: true,
|
|
42
|
+
nonce,
|
|
43
|
+
loopDisplay,
|
|
44
|
+
stack,
|
|
45
|
+
from: 0,
|
|
46
|
+
duration,
|
|
47
|
+
id: sequenceId,
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
registerSequence({
|
|
51
|
+
type: mediaType,
|
|
52
|
+
src,
|
|
53
|
+
id: mediaId,
|
|
54
|
+
duration: loopDisplay?.durationInFrames ?? duration,
|
|
55
|
+
from: loopDisplay ? loopIteration * loopDisplay.durationInFrames : 0,
|
|
56
|
+
parent: loopDisplay ? sequenceId : (parentSequence?.id ?? null),
|
|
57
|
+
displayName: finalDisplayName,
|
|
58
|
+
rootId,
|
|
59
|
+
volume: volumes,
|
|
60
|
+
showInTimeline: true,
|
|
61
|
+
nonce,
|
|
62
|
+
startMediaFrom: 0 - startsAt,
|
|
63
|
+
doesVolumeChange,
|
|
64
|
+
loopDisplay: undefined,
|
|
65
|
+
playbackRate,
|
|
66
|
+
stack,
|
|
67
|
+
premountDisplay: null,
|
|
68
|
+
postmountDisplay: null,
|
|
69
|
+
});
|
|
70
|
+
return () => {
|
|
71
|
+
if (loopDisplay) {
|
|
72
|
+
unregisterSequence(sequenceId);
|
|
73
|
+
}
|
|
74
|
+
unregisterSequence(mediaId);
|
|
75
|
+
};
|
|
76
|
+
}, [
|
|
77
|
+
doesVolumeChange,
|
|
78
|
+
duration,
|
|
79
|
+
finalDisplayName,
|
|
80
|
+
isStudio,
|
|
81
|
+
loopDisplay,
|
|
82
|
+
mediaId,
|
|
83
|
+
mediaType,
|
|
84
|
+
nonce,
|
|
85
|
+
parentSequence?.id,
|
|
86
|
+
playbackRate,
|
|
87
|
+
postmountDisplay,
|
|
88
|
+
premountDisplay,
|
|
89
|
+
registerSequence,
|
|
90
|
+
rootId,
|
|
91
|
+
sequenceId,
|
|
92
|
+
showInTimeline,
|
|
93
|
+
src,
|
|
94
|
+
stack,
|
|
95
|
+
startsAt,
|
|
96
|
+
unregisterSequence,
|
|
97
|
+
volumes,
|
|
98
|
+
frame,
|
|
99
|
+
]);
|
|
100
|
+
return {
|
|
101
|
+
id: mediaId,
|
|
102
|
+
};
|
|
103
|
+
};
|
|
@@ -2,6 +2,7 @@ import type { LogLevel } from 'remotion';
|
|
|
2
2
|
export declare const SEEK_THRESHOLD = 0.05;
|
|
3
3
|
export type MediaPlayerInitResult = {
|
|
4
4
|
type: 'success';
|
|
5
|
+
durationInSeconds: number;
|
|
5
6
|
} | {
|
|
6
7
|
type: 'unknown-container-format';
|
|
7
8
|
} | {
|
|
@@ -25,13 +26,15 @@ export declare class MediaPlayer {
|
|
|
25
26
|
private audioBufferIterator;
|
|
26
27
|
private queuedAudioNodes;
|
|
27
28
|
private gainNode;
|
|
29
|
+
private currentVolume;
|
|
28
30
|
private sharedAudioContext;
|
|
29
31
|
private audioSyncAnchor;
|
|
30
32
|
private playing;
|
|
31
33
|
private muted;
|
|
32
34
|
private loop;
|
|
33
|
-
private
|
|
34
|
-
private
|
|
35
|
+
private fps;
|
|
36
|
+
private trimBefore;
|
|
37
|
+
private trimAfter;
|
|
35
38
|
private animationFrameId;
|
|
36
39
|
private videoAsyncId;
|
|
37
40
|
private audioAsyncId;
|
|
@@ -43,22 +46,24 @@ export declare class MediaPlayer {
|
|
|
43
46
|
private audioIteratorStarted;
|
|
44
47
|
private readonly HEALTHY_BUFER_THRESHOLD_SECONDS;
|
|
45
48
|
private onVideoFrameCallback?;
|
|
46
|
-
constructor({ canvas, src, logLevel, sharedAudioContext, loop,
|
|
49
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, }: {
|
|
47
50
|
canvas: HTMLCanvasElement | null;
|
|
48
51
|
src: string;
|
|
49
52
|
logLevel: LogLevel;
|
|
50
53
|
sharedAudioContext: AudioContext;
|
|
51
54
|
loop: boolean;
|
|
52
|
-
|
|
53
|
-
|
|
55
|
+
trimBefore: number | undefined;
|
|
56
|
+
trimAfter: number | undefined;
|
|
54
57
|
playbackRate: number;
|
|
55
58
|
audioStreamIndex: number;
|
|
59
|
+
fps: number;
|
|
56
60
|
});
|
|
57
61
|
private input;
|
|
58
62
|
private isReady;
|
|
59
63
|
private hasAudio;
|
|
60
64
|
private isCurrentlyBuffering;
|
|
61
65
|
initialize(startTimeUnresolved: number): Promise<MediaPlayerInitResult>;
|
|
66
|
+
private clearCanvas;
|
|
62
67
|
private cleanupAudioQueue;
|
|
63
68
|
private cleanAudioIteratorAndNodes;
|
|
64
69
|
seekTo(time: number): Promise<void>;
|
|
@@ -67,6 +72,7 @@ export declare class MediaPlayer {
|
|
|
67
72
|
setMuted(muted: boolean): void;
|
|
68
73
|
setVolume(volume: number): void;
|
|
69
74
|
setPlaybackRate(rate: number): void;
|
|
75
|
+
setFps(fps: number): void;
|
|
70
76
|
setLoop(loop: boolean): void;
|
|
71
77
|
dispose(): void;
|
|
72
78
|
private getPlaybackTime;
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
|
|
2
2
|
import { Internals } from 'remotion';
|
|
3
|
+
import { getTimeInSeconds } from '../get-time-in-seconds';
|
|
3
4
|
import { isNetworkError } from '../is-network-error';
|
|
4
|
-
import { resolvePlaybackTime } from './resolve-playback-time';
|
|
5
5
|
import { sleep, withTimeout } from './timeout-utils';
|
|
6
6
|
export const SEEK_THRESHOLD = 0.05;
|
|
7
7
|
const AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
|
|
8
8
|
export class MediaPlayer {
|
|
9
|
-
constructor({ canvas, src, logLevel, sharedAudioContext, loop,
|
|
9
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, }) {
|
|
10
10
|
this.canvasSink = null;
|
|
11
11
|
this.videoFrameIterator = null;
|
|
12
12
|
this.nextFrame = null;
|
|
@@ -14,7 +14,9 @@ export class MediaPlayer {
|
|
|
14
14
|
this.audioBufferIterator = null;
|
|
15
15
|
this.queuedAudioNodes = new Set();
|
|
16
16
|
this.gainNode = null;
|
|
17
|
-
|
|
17
|
+
this.currentVolume = 1;
|
|
18
|
+
// this is the time difference between Web Audio timeline
|
|
19
|
+
// and media file timeline
|
|
18
20
|
this.audioSyncAnchor = 0;
|
|
19
21
|
this.playing = false;
|
|
20
22
|
this.muted = false;
|
|
@@ -100,7 +102,11 @@ export class MediaPlayer {
|
|
|
100
102
|
if (!newNextFrame) {
|
|
101
103
|
break;
|
|
102
104
|
}
|
|
103
|
-
|
|
105
|
+
const playbackTime = this.getPlaybackTime();
|
|
106
|
+
if (playbackTime === null) {
|
|
107
|
+
continue;
|
|
108
|
+
}
|
|
109
|
+
if (newNextFrame.timestamp <= playbackTime) {
|
|
104
110
|
continue;
|
|
105
111
|
}
|
|
106
112
|
else {
|
|
@@ -144,7 +150,7 @@ export class MediaPlayer {
|
|
|
144
150
|
totalBufferDuration += duration;
|
|
145
151
|
this.audioBufferHealth = Math.max(0, totalBufferDuration / this.playbackRate);
|
|
146
152
|
this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
|
|
147
|
-
if (this.playing
|
|
153
|
+
if (this.playing) {
|
|
148
154
|
if (isFirstBuffer) {
|
|
149
155
|
this.audioSyncAnchor =
|
|
150
156
|
this.sharedAudioContext.currentTime - timestamp;
|
|
@@ -157,10 +163,16 @@ export class MediaPlayer {
|
|
|
157
163
|
}
|
|
158
164
|
this.scheduleAudioChunk(buffer, timestamp);
|
|
159
165
|
}
|
|
160
|
-
|
|
166
|
+
const playbackTime = this.getPlaybackTime();
|
|
167
|
+
if (playbackTime === null) {
|
|
168
|
+
continue;
|
|
169
|
+
}
|
|
170
|
+
if (timestamp - playbackTime >= 1) {
|
|
161
171
|
await new Promise((resolve) => {
|
|
162
172
|
const check = () => {
|
|
163
|
-
|
|
173
|
+
const currentPlaybackTime = this.getPlaybackTime();
|
|
174
|
+
if (currentPlaybackTime !== null &&
|
|
175
|
+
timestamp - currentPlaybackTime < 1) {
|
|
164
176
|
resolve();
|
|
165
177
|
}
|
|
166
178
|
else {
|
|
@@ -182,12 +194,13 @@ export class MediaPlayer {
|
|
|
182
194
|
this.sharedAudioContext = sharedAudioContext;
|
|
183
195
|
this.playbackRate = playbackRate;
|
|
184
196
|
this.loop = loop;
|
|
185
|
-
this.
|
|
186
|
-
this.
|
|
197
|
+
this.trimBefore = trimBefore;
|
|
198
|
+
this.trimAfter = trimAfter;
|
|
187
199
|
this.audioStreamIndex = audioStreamIndex ?? 0;
|
|
200
|
+
this.fps = fps;
|
|
188
201
|
if (canvas) {
|
|
189
202
|
const context = canvas.getContext('2d', {
|
|
190
|
-
alpha:
|
|
203
|
+
alpha: true,
|
|
191
204
|
desynchronized: true,
|
|
192
205
|
});
|
|
193
206
|
if (!context) {
|
|
@@ -227,12 +240,12 @@ export class MediaPlayer {
|
|
|
227
240
|
Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
|
|
228
241
|
return { type: 'unknown-container-format' };
|
|
229
242
|
}
|
|
230
|
-
const [
|
|
243
|
+
const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
|
|
231
244
|
input.computeDuration(),
|
|
232
245
|
input.getPrimaryVideoTrack(),
|
|
233
246
|
input.getAudioTracks(),
|
|
234
247
|
]);
|
|
235
|
-
this.totalDuration =
|
|
248
|
+
this.totalDuration = durationInSeconds;
|
|
236
249
|
const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
|
|
237
250
|
if (!videoTrack && !audioTrack) {
|
|
238
251
|
return { type: 'no-tracks' };
|
|
@@ -245,6 +258,7 @@ export class MediaPlayer {
|
|
|
245
258
|
this.canvasSink = new CanvasSink(videoTrack, {
|
|
246
259
|
poolSize: 2,
|
|
247
260
|
fit: 'contain',
|
|
261
|
+
alpha: true,
|
|
248
262
|
});
|
|
249
263
|
this.canvas.width = videoTrack.displayWidth;
|
|
250
264
|
this.canvas.height = videoTrack.displayHeight;
|
|
@@ -254,14 +268,21 @@ export class MediaPlayer {
|
|
|
254
268
|
this.gainNode = this.sharedAudioContext.createGain();
|
|
255
269
|
this.gainNode.connect(this.sharedAudioContext.destination);
|
|
256
270
|
}
|
|
257
|
-
const startTime =
|
|
258
|
-
|
|
271
|
+
const startTime = getTimeInSeconds({
|
|
272
|
+
unloopedTimeInSeconds: startTimeUnresolved,
|
|
259
273
|
playbackRate: this.playbackRate,
|
|
260
274
|
loop: this.loop,
|
|
261
|
-
|
|
262
|
-
|
|
275
|
+
trimBefore: this.trimBefore,
|
|
276
|
+
trimAfter: this.trimAfter,
|
|
263
277
|
mediaDurationInSeconds: this.totalDuration,
|
|
278
|
+
fps: this.fps,
|
|
279
|
+
ifNoMediaDuration: 'infinity',
|
|
280
|
+
src: this.src,
|
|
264
281
|
});
|
|
282
|
+
if (startTime === null) {
|
|
283
|
+
this.clearCanvas();
|
|
284
|
+
return { type: 'success', durationInSeconds: this.totalDuration };
|
|
285
|
+
}
|
|
265
286
|
if (this.sharedAudioContext) {
|
|
266
287
|
this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
|
|
267
288
|
}
|
|
@@ -271,7 +292,7 @@ export class MediaPlayer {
|
|
|
271
292
|
this.startVideoIterator(startTime),
|
|
272
293
|
]);
|
|
273
294
|
this.startRenderLoop();
|
|
274
|
-
return { type: 'success' };
|
|
295
|
+
return { type: 'success', durationInSeconds };
|
|
275
296
|
}
|
|
276
297
|
catch (error) {
|
|
277
298
|
const err = error;
|
|
@@ -283,6 +304,11 @@ export class MediaPlayer {
|
|
|
283
304
|
throw error;
|
|
284
305
|
}
|
|
285
306
|
}
|
|
307
|
+
clearCanvas() {
|
|
308
|
+
if (this.context && this.canvas) {
|
|
309
|
+
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
310
|
+
}
|
|
311
|
+
}
|
|
286
312
|
cleanupAudioQueue() {
|
|
287
313
|
for (const node of this.queuedAudioNodes) {
|
|
288
314
|
node.stop();
|
|
@@ -299,16 +325,25 @@ export class MediaPlayer {
|
|
|
299
325
|
async seekTo(time) {
|
|
300
326
|
if (!this.isReady())
|
|
301
327
|
return;
|
|
302
|
-
const newTime =
|
|
303
|
-
|
|
328
|
+
const newTime = getTimeInSeconds({
|
|
329
|
+
unloopedTimeInSeconds: time,
|
|
304
330
|
playbackRate: this.playbackRate,
|
|
305
331
|
loop: this.loop,
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
mediaDurationInSeconds: this.totalDuration,
|
|
332
|
+
trimBefore: this.trimBefore,
|
|
333
|
+
trimAfter: this.trimAfter,
|
|
334
|
+
mediaDurationInSeconds: this.totalDuration ?? null,
|
|
335
|
+
fps: this.fps,
|
|
336
|
+
ifNoMediaDuration: 'infinity',
|
|
337
|
+
src: this.src,
|
|
309
338
|
});
|
|
339
|
+
if (newTime === null) {
|
|
340
|
+
this.clearCanvas();
|
|
341
|
+
await this.cleanAudioIteratorAndNodes();
|
|
342
|
+
return;
|
|
343
|
+
}
|
|
310
344
|
const currentPlaybackTime = this.getPlaybackTime();
|
|
311
|
-
const isSignificantSeek =
|
|
345
|
+
const isSignificantSeek = currentPlaybackTime === null ||
|
|
346
|
+
Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
|
|
312
347
|
if (isSignificantSeek) {
|
|
313
348
|
this.nextFrame = null;
|
|
314
349
|
this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
|
|
@@ -342,8 +377,8 @@ export class MediaPlayer {
|
|
|
342
377
|
}
|
|
343
378
|
setMuted(muted) {
|
|
344
379
|
this.muted = muted;
|
|
345
|
-
if (
|
|
346
|
-
this.
|
|
380
|
+
if (this.gainNode) {
|
|
381
|
+
this.gainNode.gain.value = muted ? 0 : this.currentVolume;
|
|
347
382
|
}
|
|
348
383
|
}
|
|
349
384
|
setVolume(volume) {
|
|
@@ -351,11 +386,17 @@ export class MediaPlayer {
|
|
|
351
386
|
return;
|
|
352
387
|
}
|
|
353
388
|
const appliedVolume = Math.max(0, volume);
|
|
354
|
-
this.
|
|
389
|
+
this.currentVolume = appliedVolume;
|
|
390
|
+
if (!this.muted) {
|
|
391
|
+
this.gainNode.gain.value = appliedVolume;
|
|
392
|
+
}
|
|
355
393
|
}
|
|
356
394
|
setPlaybackRate(rate) {
|
|
357
395
|
this.playbackRate = rate;
|
|
358
396
|
}
|
|
397
|
+
setFps(fps) {
|
|
398
|
+
this.fps = fps;
|
|
399
|
+
}
|
|
359
400
|
setLoop(loop) {
|
|
360
401
|
this.loop = loop;
|
|
361
402
|
}
|
|
@@ -367,15 +408,7 @@ export class MediaPlayer {
|
|
|
367
408
|
this.videoAsyncId++;
|
|
368
409
|
}
|
|
369
410
|
getPlaybackTime() {
|
|
370
|
-
|
|
371
|
-
return resolvePlaybackTime({
|
|
372
|
-
absolutePlaybackTimeInSeconds: absoluteTime,
|
|
373
|
-
playbackRate: this.playbackRate,
|
|
374
|
-
loop: this.loop,
|
|
375
|
-
trimBeforeInSeconds: this.trimBeforeSeconds,
|
|
376
|
-
trimAfterInSeconds: this.trimAfterSeconds,
|
|
377
|
-
mediaDurationInSeconds: this.totalDuration,
|
|
378
|
-
});
|
|
411
|
+
return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
|
|
379
412
|
}
|
|
380
413
|
scheduleAudioChunk(buffer, mediaTimestamp) {
|
|
381
414
|
const targetTime = mediaTimestamp + this.audioSyncAnchor;
|
|
@@ -430,13 +463,18 @@ export class MediaPlayer {
|
|
|
430
463
|
}
|
|
431
464
|
}
|
|
432
465
|
shouldRenderFrame() {
|
|
466
|
+
const playbackTime = this.getPlaybackTime();
|
|
467
|
+
if (playbackTime === null) {
|
|
468
|
+
return false;
|
|
469
|
+
}
|
|
433
470
|
return (!this.isBuffering &&
|
|
434
471
|
this.canRenderVideo() &&
|
|
435
472
|
this.nextFrame !== null &&
|
|
436
|
-
this.nextFrame.timestamp <=
|
|
473
|
+
this.nextFrame.timestamp <= playbackTime);
|
|
437
474
|
}
|
|
438
475
|
drawCurrentFrame() {
|
|
439
476
|
if (this.context && this.nextFrame) {
|
|
477
|
+
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
440
478
|
this.context.drawImage(this.nextFrame.canvas, 0, 0);
|
|
441
479
|
}
|
|
442
480
|
if (this.onVideoFrameCallback && this.canvas) {
|
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
import React from 'react';
|
|
2
|
-
import type { LogLevel, LoopVolumeCurveBehavior,
|
|
2
|
+
import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
|
|
3
3
|
import type { FallbackOffthreadVideoProps } from './props';
|
|
4
|
-
type
|
|
5
|
-
readonly className: string | undefined;
|
|
6
|
-
readonly loop: boolean;
|
|
4
|
+
type NewVideoForPreviewProps = {
|
|
7
5
|
readonly src: string;
|
|
6
|
+
readonly style: React.CSSProperties | undefined;
|
|
7
|
+
readonly playbackRate: number;
|
|
8
8
|
readonly logLevel: LogLevel;
|
|
9
|
+
readonly className: string | undefined;
|
|
9
10
|
readonly muted: boolean;
|
|
10
|
-
readonly name: string | undefined;
|
|
11
11
|
readonly volume: VolumeProp;
|
|
12
12
|
readonly loopVolumeCurveBehavior: LoopVolumeCurveBehavior;
|
|
13
|
-
readonly onVideoFrame:
|
|
14
|
-
readonly playbackRate: number;
|
|
15
|
-
readonly style: React.CSSProperties;
|
|
13
|
+
readonly onVideoFrame: undefined | ((frame: CanvasImageSource) => void);
|
|
16
14
|
readonly showInTimeline: boolean;
|
|
15
|
+
readonly loop: boolean;
|
|
16
|
+
readonly name: string | undefined;
|
|
17
17
|
readonly trimAfter: number | undefined;
|
|
18
18
|
readonly trimBefore: number | undefined;
|
|
19
19
|
readonly stack: string | null;
|
|
@@ -21,5 +21,5 @@ type InnerVideoProps = {
|
|
|
21
21
|
readonly fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
|
|
22
22
|
readonly audioStreamIndex: number;
|
|
23
23
|
};
|
|
24
|
-
export declare const VideoForPreview: React.FC<
|
|
24
|
+
export declare const VideoForPreview: React.FC<NewVideoForPreviewProps>;
|
|
25
25
|
export {};
|