@remotion/media 4.0.357 → 4.0.358
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-preview.js +29 -13
- package/dist/audio/audio-for-rendering.js +5 -6
- package/dist/audio-extraction/extract-audio.js +1 -0
- package/dist/esm/index.mjs +493 -321
- package/dist/get-time-in-seconds.d.ts +2 -1
- package/dist/get-time-in-seconds.js +10 -10
- package/dist/show-in-timeline.d.ts +8 -0
- package/dist/show-in-timeline.js +31 -0
- package/dist/use-media-in-timeline.d.ts +19 -0
- package/dist/use-media-in-timeline.js +103 -0
- package/dist/video/media-player.d.ts +11 -5
- package/dist/video/media-player.js +72 -36
- package/dist/video/video-for-preview.d.ts +9 -9
- package/dist/video/video-for-preview.js +43 -20
- package/dist/video/video-for-rendering.js +2 -2
- package/dist/video-extraction/extract-frame.js +1 -0
- package/package.json +54 -54
- package/LICENSE.md +0 -49
- package/dist/convert-audiodata/apply-tonefrequency.d.ts +0 -2
- package/dist/convert-audiodata/apply-tonefrequency.js +0 -43
- package/dist/convert-audiodata/wsola.d.ts +0 -13
- package/dist/convert-audiodata/wsola.js +0 -197
- package/dist/get-sink-weak.d.ts +0 -13
- package/dist/get-sink-weak.js +0 -15
- package/dist/log.d.ts +0 -10
- package/dist/log.js +0 -33
- package/dist/video/resolve-playback-time.d.ts +0 -8
- package/dist/video/resolve-playback-time.js +0 -22
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export declare const getTimeInSeconds: ({ loop, mediaDurationInSeconds, unloopedTimeInSeconds, src, trimAfter, trimBefore, fps, playbackRate, }: {
|
|
1
|
+
export declare const getTimeInSeconds: ({ loop, mediaDurationInSeconds, unloopedTimeInSeconds, src, trimAfter, trimBefore, fps, playbackRate, ifNoMediaDuration, }: {
|
|
2
2
|
loop: boolean;
|
|
3
3
|
mediaDurationInSeconds: number | null;
|
|
4
4
|
unloopedTimeInSeconds: number;
|
|
@@ -7,4 +7,5 @@ export declare const getTimeInSeconds: ({ loop, mediaDurationInSeconds, unlooped
|
|
|
7
7
|
trimBefore: number | undefined;
|
|
8
8
|
playbackRate: number;
|
|
9
9
|
fps: number;
|
|
10
|
+
ifNoMediaDuration: "fail" | "infinity";
|
|
10
11
|
}) => number | null;
|
|
@@ -1,24 +1,24 @@
|
|
|
1
1
|
import { Internals } from 'remotion';
|
|
2
|
-
export const getTimeInSeconds = ({ loop, mediaDurationInSeconds, unloopedTimeInSeconds, src, trimAfter, trimBefore, fps, playbackRate, }) => {
|
|
3
|
-
if (mediaDurationInSeconds === null && loop) {
|
|
2
|
+
export const getTimeInSeconds = ({ loop, mediaDurationInSeconds, unloopedTimeInSeconds, src, trimAfter, trimBefore, fps, playbackRate, ifNoMediaDuration, }) => {
|
|
3
|
+
if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === 'fail') {
|
|
4
4
|
throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
|
|
5
5
|
}
|
|
6
6
|
const loopDuration = loop
|
|
7
|
-
? Internals.
|
|
7
|
+
? Internals.calculateMediaDuration({
|
|
8
8
|
trimAfter,
|
|
9
|
-
mediaDurationInFrames: mediaDurationInSeconds
|
|
9
|
+
mediaDurationInFrames: mediaDurationInSeconds
|
|
10
|
+
? mediaDurationInSeconds * fps
|
|
11
|
+
: Infinity,
|
|
10
12
|
// Playback rate was already specified before
|
|
11
13
|
playbackRate: 1,
|
|
12
14
|
trimBefore,
|
|
13
15
|
}) / fps
|
|
14
16
|
: Infinity;
|
|
15
17
|
const timeInSeconds = (unloopedTimeInSeconds * playbackRate) % loopDuration;
|
|
16
|
-
if ((trimAfter ?? null) !== null) {
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
return null;
|
|
21
|
-
}
|
|
18
|
+
if ((trimAfter ?? null) !== null && !loop) {
|
|
19
|
+
const time = (trimAfter - (trimBefore ?? 0)) / fps;
|
|
20
|
+
if (timeInSeconds >= time) {
|
|
21
|
+
return null;
|
|
22
22
|
}
|
|
23
23
|
}
|
|
24
24
|
return timeInSeconds + (trimBefore ?? 0) / fps;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { _InternalTypes } from 'remotion';
|
|
2
|
+
export declare const useLoopDisplay: ({ loop, mediaDurationInSeconds, playbackRate, trimAfter, trimBefore, }: {
|
|
3
|
+
loop: boolean;
|
|
4
|
+
mediaDurationInSeconds: number | null;
|
|
5
|
+
trimAfter: number | undefined;
|
|
6
|
+
trimBefore: number | undefined;
|
|
7
|
+
playbackRate: number;
|
|
8
|
+
}) => _InternalTypes["LoopDisplay"] | undefined;
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { useMemo } from 'react';
|
|
2
|
+
import { Internals, useVideoConfig } from 'remotion';
|
|
3
|
+
export const useLoopDisplay = ({ loop, mediaDurationInSeconds, playbackRate, trimAfter, trimBefore, }) => {
|
|
4
|
+
const { durationInFrames: compDuration, fps } = useVideoConfig();
|
|
5
|
+
const loopDisplay = useMemo(() => {
|
|
6
|
+
if (!loop || !mediaDurationInSeconds) {
|
|
7
|
+
return undefined;
|
|
8
|
+
}
|
|
9
|
+
const durationInFrames = Internals.calculateMediaDuration({
|
|
10
|
+
mediaDurationInFrames: mediaDurationInSeconds * fps,
|
|
11
|
+
playbackRate,
|
|
12
|
+
trimAfter,
|
|
13
|
+
trimBefore,
|
|
14
|
+
});
|
|
15
|
+
const maxTimes = compDuration / durationInFrames;
|
|
16
|
+
return {
|
|
17
|
+
numberOfTimes: maxTimes,
|
|
18
|
+
startOffset: 0,
|
|
19
|
+
durationInFrames,
|
|
20
|
+
};
|
|
21
|
+
}, [
|
|
22
|
+
compDuration,
|
|
23
|
+
fps,
|
|
24
|
+
loop,
|
|
25
|
+
mediaDurationInSeconds,
|
|
26
|
+
playbackRate,
|
|
27
|
+
trimAfter,
|
|
28
|
+
trimBefore,
|
|
29
|
+
]);
|
|
30
|
+
return loopDisplay;
|
|
31
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { _InternalTypes } from 'remotion';
|
|
2
|
+
import { type VolumeProp } from 'remotion';
|
|
3
|
+
export declare const useMediaInTimeline: ({ volume, mediaVolume, src, mediaType, playbackRate, displayName, stack, showInTimeline, premountDisplay, postmountDisplay, loopDisplay, trimBefore, trimAfter, }: {
|
|
4
|
+
volume: VolumeProp | undefined;
|
|
5
|
+
mediaVolume: number;
|
|
6
|
+
src: string | undefined;
|
|
7
|
+
mediaType: "audio" | "video";
|
|
8
|
+
playbackRate: number;
|
|
9
|
+
displayName: string | null;
|
|
10
|
+
stack: string | null;
|
|
11
|
+
showInTimeline: boolean;
|
|
12
|
+
premountDisplay: number | null;
|
|
13
|
+
postmountDisplay: number | null;
|
|
14
|
+
loopDisplay: _InternalTypes["LoopDisplay"] | undefined;
|
|
15
|
+
trimBefore: number | undefined;
|
|
16
|
+
trimAfter: number | undefined;
|
|
17
|
+
}) => {
|
|
18
|
+
id: string;
|
|
19
|
+
};
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import { useContext, useEffect, useState } from 'react';
|
|
2
|
+
import { Internals, useCurrentFrame } from 'remotion';
|
|
3
|
+
export const useMediaInTimeline = ({ volume, mediaVolume, src, mediaType, playbackRate, displayName, stack, showInTimeline, premountDisplay, postmountDisplay, loopDisplay, trimBefore, trimAfter, }) => {
|
|
4
|
+
const parentSequence = useContext(Internals.SequenceContext);
|
|
5
|
+
const startsAt = Internals.useMediaStartsAt();
|
|
6
|
+
const { registerSequence, unregisterSequence } = useContext(Internals.SequenceManager);
|
|
7
|
+
const [sequenceId] = useState(() => String(Math.random()));
|
|
8
|
+
const [mediaId] = useState(() => String(Math.random()));
|
|
9
|
+
const frame = useCurrentFrame();
|
|
10
|
+
const { volumes, duration, doesVolumeChange, nonce, rootId, isStudio, finalDisplayName, } = Internals.useBasicMediaInTimeline({
|
|
11
|
+
volume,
|
|
12
|
+
mediaVolume,
|
|
13
|
+
mediaType,
|
|
14
|
+
src,
|
|
15
|
+
displayName,
|
|
16
|
+
trimBefore,
|
|
17
|
+
trimAfter,
|
|
18
|
+
playbackRate,
|
|
19
|
+
});
|
|
20
|
+
useEffect(() => {
|
|
21
|
+
if (!src) {
|
|
22
|
+
throw new Error('No src passed');
|
|
23
|
+
}
|
|
24
|
+
if (!isStudio && window.process?.env?.NODE_ENV !== 'test') {
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
if (!showInTimeline) {
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
const loopIteration = loopDisplay
|
|
31
|
+
? Math.floor(frame / loopDisplay.durationInFrames)
|
|
32
|
+
: 0;
|
|
33
|
+
if (loopDisplay) {
|
|
34
|
+
registerSequence({
|
|
35
|
+
type: 'sequence',
|
|
36
|
+
premountDisplay,
|
|
37
|
+
postmountDisplay,
|
|
38
|
+
parent: parentSequence?.id ?? null,
|
|
39
|
+
displayName: finalDisplayName,
|
|
40
|
+
rootId,
|
|
41
|
+
showInTimeline: true,
|
|
42
|
+
nonce,
|
|
43
|
+
loopDisplay,
|
|
44
|
+
stack,
|
|
45
|
+
from: 0,
|
|
46
|
+
duration,
|
|
47
|
+
id: sequenceId,
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
registerSequence({
|
|
51
|
+
type: mediaType,
|
|
52
|
+
src,
|
|
53
|
+
id: mediaId,
|
|
54
|
+
duration: loopDisplay?.durationInFrames ?? duration,
|
|
55
|
+
from: loopDisplay ? loopIteration * loopDisplay.durationInFrames : 0,
|
|
56
|
+
parent: loopDisplay ? sequenceId : (parentSequence?.id ?? null),
|
|
57
|
+
displayName: finalDisplayName,
|
|
58
|
+
rootId,
|
|
59
|
+
volume: volumes,
|
|
60
|
+
showInTimeline: true,
|
|
61
|
+
nonce,
|
|
62
|
+
startMediaFrom: 0 - startsAt,
|
|
63
|
+
doesVolumeChange,
|
|
64
|
+
loopDisplay: undefined,
|
|
65
|
+
playbackRate,
|
|
66
|
+
stack,
|
|
67
|
+
premountDisplay: null,
|
|
68
|
+
postmountDisplay: null,
|
|
69
|
+
});
|
|
70
|
+
return () => {
|
|
71
|
+
if (loopDisplay) {
|
|
72
|
+
unregisterSequence(sequenceId);
|
|
73
|
+
}
|
|
74
|
+
unregisterSequence(mediaId);
|
|
75
|
+
};
|
|
76
|
+
}, [
|
|
77
|
+
doesVolumeChange,
|
|
78
|
+
duration,
|
|
79
|
+
finalDisplayName,
|
|
80
|
+
isStudio,
|
|
81
|
+
loopDisplay,
|
|
82
|
+
mediaId,
|
|
83
|
+
mediaType,
|
|
84
|
+
nonce,
|
|
85
|
+
parentSequence?.id,
|
|
86
|
+
playbackRate,
|
|
87
|
+
postmountDisplay,
|
|
88
|
+
premountDisplay,
|
|
89
|
+
registerSequence,
|
|
90
|
+
rootId,
|
|
91
|
+
sequenceId,
|
|
92
|
+
showInTimeline,
|
|
93
|
+
src,
|
|
94
|
+
stack,
|
|
95
|
+
startsAt,
|
|
96
|
+
unregisterSequence,
|
|
97
|
+
volumes,
|
|
98
|
+
frame,
|
|
99
|
+
]);
|
|
100
|
+
return {
|
|
101
|
+
id: mediaId,
|
|
102
|
+
};
|
|
103
|
+
};
|
|
@@ -2,6 +2,7 @@ import type { LogLevel } from 'remotion';
|
|
|
2
2
|
export declare const SEEK_THRESHOLD = 0.05;
|
|
3
3
|
export type MediaPlayerInitResult = {
|
|
4
4
|
type: 'success';
|
|
5
|
+
durationInSeconds: number;
|
|
5
6
|
} | {
|
|
6
7
|
type: 'unknown-container-format';
|
|
7
8
|
} | {
|
|
@@ -25,13 +26,15 @@ export declare class MediaPlayer {
|
|
|
25
26
|
private audioBufferIterator;
|
|
26
27
|
private queuedAudioNodes;
|
|
27
28
|
private gainNode;
|
|
29
|
+
private currentVolume;
|
|
28
30
|
private sharedAudioContext;
|
|
29
31
|
private audioSyncAnchor;
|
|
30
32
|
private playing;
|
|
31
33
|
private muted;
|
|
32
34
|
private loop;
|
|
33
|
-
private
|
|
34
|
-
private
|
|
35
|
+
private fps;
|
|
36
|
+
private trimBefore;
|
|
37
|
+
private trimAfter;
|
|
35
38
|
private animationFrameId;
|
|
36
39
|
private videoAsyncId;
|
|
37
40
|
private audioAsyncId;
|
|
@@ -43,22 +46,24 @@ export declare class MediaPlayer {
|
|
|
43
46
|
private audioIteratorStarted;
|
|
44
47
|
private readonly HEALTHY_BUFER_THRESHOLD_SECONDS;
|
|
45
48
|
private onVideoFrameCallback?;
|
|
46
|
-
constructor({ canvas, src, logLevel, sharedAudioContext, loop,
|
|
49
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, }: {
|
|
47
50
|
canvas: HTMLCanvasElement | null;
|
|
48
51
|
src: string;
|
|
49
52
|
logLevel: LogLevel;
|
|
50
53
|
sharedAudioContext: AudioContext;
|
|
51
54
|
loop: boolean;
|
|
52
|
-
|
|
53
|
-
|
|
55
|
+
trimBefore: number | undefined;
|
|
56
|
+
trimAfter: number | undefined;
|
|
54
57
|
playbackRate: number;
|
|
55
58
|
audioStreamIndex: number;
|
|
59
|
+
fps: number;
|
|
56
60
|
});
|
|
57
61
|
private input;
|
|
58
62
|
private isReady;
|
|
59
63
|
private hasAudio;
|
|
60
64
|
private isCurrentlyBuffering;
|
|
61
65
|
initialize(startTimeUnresolved: number): Promise<MediaPlayerInitResult>;
|
|
66
|
+
private clearCanvas;
|
|
62
67
|
private cleanupAudioQueue;
|
|
63
68
|
private cleanAudioIteratorAndNodes;
|
|
64
69
|
seekTo(time: number): Promise<void>;
|
|
@@ -67,6 +72,7 @@ export declare class MediaPlayer {
|
|
|
67
72
|
setMuted(muted: boolean): void;
|
|
68
73
|
setVolume(volume: number): void;
|
|
69
74
|
setPlaybackRate(rate: number): void;
|
|
75
|
+
setFps(fps: number): void;
|
|
70
76
|
setLoop(loop: boolean): void;
|
|
71
77
|
dispose(): void;
|
|
72
78
|
private getPlaybackTime;
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
|
|
2
2
|
import { Internals } from 'remotion';
|
|
3
|
+
import { getTimeInSeconds } from '../get-time-in-seconds';
|
|
3
4
|
import { isNetworkError } from '../is-network-error';
|
|
4
|
-
import { resolvePlaybackTime } from './resolve-playback-time';
|
|
5
5
|
import { sleep, withTimeout } from './timeout-utils';
|
|
6
6
|
export const SEEK_THRESHOLD = 0.05;
|
|
7
7
|
const AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
|
|
8
8
|
export class MediaPlayer {
|
|
9
|
-
constructor({ canvas, src, logLevel, sharedAudioContext, loop,
|
|
9
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, audioStreamIndex, fps, }) {
|
|
10
10
|
this.canvasSink = null;
|
|
11
11
|
this.videoFrameIterator = null;
|
|
12
12
|
this.nextFrame = null;
|
|
@@ -14,7 +14,9 @@ export class MediaPlayer {
|
|
|
14
14
|
this.audioBufferIterator = null;
|
|
15
15
|
this.queuedAudioNodes = new Set();
|
|
16
16
|
this.gainNode = null;
|
|
17
|
-
|
|
17
|
+
this.currentVolume = 1;
|
|
18
|
+
// this is the time difference between Web Audio timeline
|
|
19
|
+
// and media file timeline
|
|
18
20
|
this.audioSyncAnchor = 0;
|
|
19
21
|
this.playing = false;
|
|
20
22
|
this.muted = false;
|
|
@@ -100,7 +102,11 @@ export class MediaPlayer {
|
|
|
100
102
|
if (!newNextFrame) {
|
|
101
103
|
break;
|
|
102
104
|
}
|
|
103
|
-
|
|
105
|
+
const playbackTime = this.getPlaybackTime();
|
|
106
|
+
if (playbackTime === null) {
|
|
107
|
+
continue;
|
|
108
|
+
}
|
|
109
|
+
if (newNextFrame.timestamp <= playbackTime) {
|
|
104
110
|
continue;
|
|
105
111
|
}
|
|
106
112
|
else {
|
|
@@ -144,7 +150,7 @@ export class MediaPlayer {
|
|
|
144
150
|
totalBufferDuration += duration;
|
|
145
151
|
this.audioBufferHealth = Math.max(0, totalBufferDuration / this.playbackRate);
|
|
146
152
|
this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
|
|
147
|
-
if (this.playing
|
|
153
|
+
if (this.playing) {
|
|
148
154
|
if (isFirstBuffer) {
|
|
149
155
|
this.audioSyncAnchor =
|
|
150
156
|
this.sharedAudioContext.currentTime - timestamp;
|
|
@@ -157,10 +163,16 @@ export class MediaPlayer {
|
|
|
157
163
|
}
|
|
158
164
|
this.scheduleAudioChunk(buffer, timestamp);
|
|
159
165
|
}
|
|
160
|
-
|
|
166
|
+
const playbackTime = this.getPlaybackTime();
|
|
167
|
+
if (playbackTime === null) {
|
|
168
|
+
continue;
|
|
169
|
+
}
|
|
170
|
+
if (timestamp - playbackTime >= 1) {
|
|
161
171
|
await new Promise((resolve) => {
|
|
162
172
|
const check = () => {
|
|
163
|
-
|
|
173
|
+
const currentPlaybackTime = this.getPlaybackTime();
|
|
174
|
+
if (currentPlaybackTime !== null &&
|
|
175
|
+
timestamp - currentPlaybackTime < 1) {
|
|
164
176
|
resolve();
|
|
165
177
|
}
|
|
166
178
|
else {
|
|
@@ -182,12 +194,13 @@ export class MediaPlayer {
|
|
|
182
194
|
this.sharedAudioContext = sharedAudioContext;
|
|
183
195
|
this.playbackRate = playbackRate;
|
|
184
196
|
this.loop = loop;
|
|
185
|
-
this.
|
|
186
|
-
this.
|
|
197
|
+
this.trimBefore = trimBefore;
|
|
198
|
+
this.trimAfter = trimAfter;
|
|
187
199
|
this.audioStreamIndex = audioStreamIndex ?? 0;
|
|
200
|
+
this.fps = fps;
|
|
188
201
|
if (canvas) {
|
|
189
202
|
const context = canvas.getContext('2d', {
|
|
190
|
-
alpha:
|
|
203
|
+
alpha: true,
|
|
191
204
|
desynchronized: true,
|
|
192
205
|
});
|
|
193
206
|
if (!context) {
|
|
@@ -227,12 +240,12 @@ export class MediaPlayer {
|
|
|
227
240
|
Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
|
|
228
241
|
return { type: 'unknown-container-format' };
|
|
229
242
|
}
|
|
230
|
-
const [
|
|
243
|
+
const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
|
|
231
244
|
input.computeDuration(),
|
|
232
245
|
input.getPrimaryVideoTrack(),
|
|
233
246
|
input.getAudioTracks(),
|
|
234
247
|
]);
|
|
235
|
-
this.totalDuration =
|
|
248
|
+
this.totalDuration = durationInSeconds;
|
|
236
249
|
const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
|
|
237
250
|
if (!videoTrack && !audioTrack) {
|
|
238
251
|
return { type: 'no-tracks' };
|
|
@@ -254,14 +267,21 @@ export class MediaPlayer {
|
|
|
254
267
|
this.gainNode = this.sharedAudioContext.createGain();
|
|
255
268
|
this.gainNode.connect(this.sharedAudioContext.destination);
|
|
256
269
|
}
|
|
257
|
-
const startTime =
|
|
258
|
-
|
|
270
|
+
const startTime = getTimeInSeconds({
|
|
271
|
+
unloopedTimeInSeconds: startTimeUnresolved,
|
|
259
272
|
playbackRate: this.playbackRate,
|
|
260
273
|
loop: this.loop,
|
|
261
|
-
|
|
262
|
-
|
|
274
|
+
trimBefore: this.trimBefore,
|
|
275
|
+
trimAfter: this.trimAfter,
|
|
263
276
|
mediaDurationInSeconds: this.totalDuration,
|
|
277
|
+
fps: this.fps,
|
|
278
|
+
ifNoMediaDuration: 'infinity',
|
|
279
|
+
src: this.src,
|
|
264
280
|
});
|
|
281
|
+
if (startTime === null) {
|
|
282
|
+
this.clearCanvas();
|
|
283
|
+
return { type: 'success', durationInSeconds: this.totalDuration };
|
|
284
|
+
}
|
|
265
285
|
if (this.sharedAudioContext) {
|
|
266
286
|
this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
|
|
267
287
|
}
|
|
@@ -271,7 +291,7 @@ export class MediaPlayer {
|
|
|
271
291
|
this.startVideoIterator(startTime),
|
|
272
292
|
]);
|
|
273
293
|
this.startRenderLoop();
|
|
274
|
-
return { type: 'success' };
|
|
294
|
+
return { type: 'success', durationInSeconds };
|
|
275
295
|
}
|
|
276
296
|
catch (error) {
|
|
277
297
|
const err = error;
|
|
@@ -283,6 +303,11 @@ export class MediaPlayer {
|
|
|
283
303
|
throw error;
|
|
284
304
|
}
|
|
285
305
|
}
|
|
306
|
+
clearCanvas() {
|
|
307
|
+
if (this.context && this.canvas) {
|
|
308
|
+
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
286
311
|
cleanupAudioQueue() {
|
|
287
312
|
for (const node of this.queuedAudioNodes) {
|
|
288
313
|
node.stop();
|
|
@@ -299,16 +324,25 @@ export class MediaPlayer {
|
|
|
299
324
|
async seekTo(time) {
|
|
300
325
|
if (!this.isReady())
|
|
301
326
|
return;
|
|
302
|
-
const newTime =
|
|
303
|
-
|
|
327
|
+
const newTime = getTimeInSeconds({
|
|
328
|
+
unloopedTimeInSeconds: time,
|
|
304
329
|
playbackRate: this.playbackRate,
|
|
305
330
|
loop: this.loop,
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
mediaDurationInSeconds: this.totalDuration,
|
|
331
|
+
trimBefore: this.trimBefore,
|
|
332
|
+
trimAfter: this.trimAfter,
|
|
333
|
+
mediaDurationInSeconds: this.totalDuration ?? null,
|
|
334
|
+
fps: this.fps,
|
|
335
|
+
ifNoMediaDuration: 'infinity',
|
|
336
|
+
src: this.src,
|
|
309
337
|
});
|
|
338
|
+
if (newTime === null) {
|
|
339
|
+
this.clearCanvas();
|
|
340
|
+
await this.cleanAudioIteratorAndNodes();
|
|
341
|
+
return;
|
|
342
|
+
}
|
|
310
343
|
const currentPlaybackTime = this.getPlaybackTime();
|
|
311
|
-
const isSignificantSeek =
|
|
344
|
+
const isSignificantSeek = currentPlaybackTime === null ||
|
|
345
|
+
Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
|
|
312
346
|
if (isSignificantSeek) {
|
|
313
347
|
this.nextFrame = null;
|
|
314
348
|
this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
|
|
@@ -342,8 +376,8 @@ export class MediaPlayer {
|
|
|
342
376
|
}
|
|
343
377
|
setMuted(muted) {
|
|
344
378
|
this.muted = muted;
|
|
345
|
-
if (
|
|
346
|
-
this.
|
|
379
|
+
if (this.gainNode) {
|
|
380
|
+
this.gainNode.gain.value = muted ? 0 : this.currentVolume;
|
|
347
381
|
}
|
|
348
382
|
}
|
|
349
383
|
setVolume(volume) {
|
|
@@ -351,11 +385,17 @@ export class MediaPlayer {
|
|
|
351
385
|
return;
|
|
352
386
|
}
|
|
353
387
|
const appliedVolume = Math.max(0, volume);
|
|
354
|
-
this.
|
|
388
|
+
this.currentVolume = appliedVolume;
|
|
389
|
+
if (!this.muted) {
|
|
390
|
+
this.gainNode.gain.value = appliedVolume;
|
|
391
|
+
}
|
|
355
392
|
}
|
|
356
393
|
setPlaybackRate(rate) {
|
|
357
394
|
this.playbackRate = rate;
|
|
358
395
|
}
|
|
396
|
+
setFps(fps) {
|
|
397
|
+
this.fps = fps;
|
|
398
|
+
}
|
|
359
399
|
setLoop(loop) {
|
|
360
400
|
this.loop = loop;
|
|
361
401
|
}
|
|
@@ -367,15 +407,7 @@ export class MediaPlayer {
|
|
|
367
407
|
this.videoAsyncId++;
|
|
368
408
|
}
|
|
369
409
|
getPlaybackTime() {
|
|
370
|
-
|
|
371
|
-
return resolvePlaybackTime({
|
|
372
|
-
absolutePlaybackTimeInSeconds: absoluteTime,
|
|
373
|
-
playbackRate: this.playbackRate,
|
|
374
|
-
loop: this.loop,
|
|
375
|
-
trimBeforeInSeconds: this.trimBeforeSeconds,
|
|
376
|
-
trimAfterInSeconds: this.trimAfterSeconds,
|
|
377
|
-
mediaDurationInSeconds: this.totalDuration,
|
|
378
|
-
});
|
|
410
|
+
return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
|
|
379
411
|
}
|
|
380
412
|
scheduleAudioChunk(buffer, mediaTimestamp) {
|
|
381
413
|
const targetTime = mediaTimestamp + this.audioSyncAnchor;
|
|
@@ -430,10 +462,14 @@ export class MediaPlayer {
|
|
|
430
462
|
}
|
|
431
463
|
}
|
|
432
464
|
shouldRenderFrame() {
|
|
465
|
+
const playbackTime = this.getPlaybackTime();
|
|
466
|
+
if (playbackTime === null) {
|
|
467
|
+
return false;
|
|
468
|
+
}
|
|
433
469
|
return (!this.isBuffering &&
|
|
434
470
|
this.canRenderVideo() &&
|
|
435
471
|
this.nextFrame !== null &&
|
|
436
|
-
this.nextFrame.timestamp <=
|
|
472
|
+
this.nextFrame.timestamp <= playbackTime);
|
|
437
473
|
}
|
|
438
474
|
drawCurrentFrame() {
|
|
439
475
|
if (this.context && this.nextFrame) {
|
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
import React from 'react';
|
|
2
|
-
import type { LogLevel, LoopVolumeCurveBehavior,
|
|
2
|
+
import type { LogLevel, LoopVolumeCurveBehavior, VolumeProp } from 'remotion';
|
|
3
3
|
import type { FallbackOffthreadVideoProps } from './props';
|
|
4
|
-
type
|
|
5
|
-
readonly className: string | undefined;
|
|
6
|
-
readonly loop: boolean;
|
|
4
|
+
type NewVideoForPreviewProps = {
|
|
7
5
|
readonly src: string;
|
|
6
|
+
readonly style: React.CSSProperties | undefined;
|
|
7
|
+
readonly playbackRate: number;
|
|
8
8
|
readonly logLevel: LogLevel;
|
|
9
|
+
readonly className: string | undefined;
|
|
9
10
|
readonly muted: boolean;
|
|
10
|
-
readonly name: string | undefined;
|
|
11
11
|
readonly volume: VolumeProp;
|
|
12
12
|
readonly loopVolumeCurveBehavior: LoopVolumeCurveBehavior;
|
|
13
|
-
readonly onVideoFrame:
|
|
14
|
-
readonly playbackRate: number;
|
|
15
|
-
readonly style: React.CSSProperties;
|
|
13
|
+
readonly onVideoFrame: undefined | ((frame: CanvasImageSource) => void);
|
|
16
14
|
readonly showInTimeline: boolean;
|
|
15
|
+
readonly loop: boolean;
|
|
16
|
+
readonly name: string | undefined;
|
|
17
17
|
readonly trimAfter: number | undefined;
|
|
18
18
|
readonly trimBefore: number | undefined;
|
|
19
19
|
readonly stack: string | null;
|
|
@@ -21,5 +21,5 @@ type InnerVideoProps = {
|
|
|
21
21
|
readonly fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
|
|
22
22
|
readonly audioStreamIndex: number;
|
|
23
23
|
};
|
|
24
|
-
export declare const VideoForPreview: React.FC<
|
|
24
|
+
export declare const VideoForPreview: React.FC<NewVideoForPreviewProps>;
|
|
25
25
|
export {};
|