@remotion/media 4.0.351 → 4.0.352
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio-extraction/audio-cache.js +4 -0
- package/dist/audio-extraction/audio-iterator.js +5 -2
- package/dist/audio-extraction/audio-manager.d.ts +2 -1
- package/dist/audio-extraction/audio-manager.js +4 -3
- package/dist/audio-extraction/extract-audio.js +1 -0
- package/dist/caches.d.ts +4 -2
- package/dist/caches.js +38 -1
- package/dist/esm/index.mjs +75 -23
- package/dist/video/media-player.d.ts +64 -0
- package/dist/video/media-player.js +501 -0
- package/dist/video/new-video-for-preview.d.ts +10 -0
- package/dist/video/new-video-for-preview.js +114 -0
- package/dist/video/props.d.ts +1 -0
- package/dist/video/video-for-rendering.js +23 -9
- package/dist/video-extraction/keyframe-manager.js +3 -2
- package/dist/video-extraction/media-player.d.ts +64 -0
- package/dist/video-extraction/media-player.js +501 -0
- package/dist/video-extraction/new-video-for-preview.d.ts +10 -0
- package/dist/video-extraction/new-video-for-preview.js +114 -0
- package/package.json +3 -3
|
@@ -1,21 +1,18 @@
|
|
|
1
1
|
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
2
|
import { useContext, useLayoutEffect, useMemo, useRef, useState, } from 'react';
|
|
3
|
-
import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, } from 'remotion';
|
|
3
|
+
import { cancelRender, Internals, useCurrentFrame, useDelayRender, useRemotionEnvironment, useVideoConfig, } from 'remotion';
|
|
4
4
|
import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
|
|
5
5
|
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds,
|
|
6
6
|
// call when a frame of the video, i.e. frame drawn on canvas
|
|
7
|
-
onVideoFrame, logLevel = window.remotion_logLevel, loop, style, }) => {
|
|
7
|
+
onVideoFrame, logLevel = window.remotion_logLevel, loop, style, className, }) => {
|
|
8
8
|
const absoluteFrame = Internals.useTimelinePosition();
|
|
9
|
-
const
|
|
9
|
+
const { fps } = useVideoConfig();
|
|
10
10
|
const canvasRef = useRef(null);
|
|
11
11
|
const { registerRenderAsset, unregisterRenderAsset } = useContext(Internals.RenderAssetManager);
|
|
12
12
|
const frame = useCurrentFrame();
|
|
13
13
|
const volumePropsFrame = Internals.useFrameForVolumeProp(loopVolumeCurveBehavior ?? 'repeat');
|
|
14
14
|
const environment = useRemotionEnvironment();
|
|
15
15
|
const [id] = useState(() => `${Math.random()}`.replace('0.', ''));
|
|
16
|
-
if (!videoConfig) {
|
|
17
|
-
throw new Error('No video config found');
|
|
18
|
-
}
|
|
19
16
|
if (!src) {
|
|
20
17
|
throw new TypeError('No `src` was passed to <Video>.');
|
|
21
18
|
}
|
|
@@ -37,7 +34,6 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, }) => {
|
|
|
37
34
|
}
|
|
38
35
|
return true;
|
|
39
36
|
}, [muted, volume]);
|
|
40
|
-
const { fps } = videoConfig;
|
|
41
37
|
const { delayRender, continueRender } = useDelayRender();
|
|
42
38
|
useLayoutEffect(() => {
|
|
43
39
|
if (!canvasRef.current) {
|
|
@@ -64,7 +60,20 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, }) => {
|
|
|
64
60
|
.then(({ frame: imageBitmap, audio }) => {
|
|
65
61
|
if (imageBitmap) {
|
|
66
62
|
onVideoFrame?.(imageBitmap);
|
|
67
|
-
canvasRef.current?.getContext('2d')
|
|
63
|
+
const context = canvasRef.current?.getContext('2d');
|
|
64
|
+
if (!context) {
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
context.canvas.width =
|
|
68
|
+
imageBitmap instanceof ImageBitmap
|
|
69
|
+
? imageBitmap.width
|
|
70
|
+
: imageBitmap.displayWidth;
|
|
71
|
+
context.canvas.height =
|
|
72
|
+
imageBitmap instanceof ImageBitmap
|
|
73
|
+
? imageBitmap.height
|
|
74
|
+
: imageBitmap.displayHeight;
|
|
75
|
+
context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
|
|
76
|
+
context.drawImage(imageBitmap, 0, 0);
|
|
68
77
|
imageBitmap.close();
|
|
69
78
|
}
|
|
70
79
|
else if (window.remotion_videoEnabled) {
|
|
@@ -111,5 +120,10 @@ onVideoFrame, logLevel = window.remotion_logLevel, loop, style, }) => {
|
|
|
111
120
|
volume,
|
|
112
121
|
loop,
|
|
113
122
|
]);
|
|
114
|
-
|
|
123
|
+
const classNameValue = useMemo(() => {
|
|
124
|
+
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
125
|
+
.filter(Internals.truthy)
|
|
126
|
+
.join(' ');
|
|
127
|
+
}, [className]);
|
|
128
|
+
return _jsx("canvas", { ref: canvasRef, style: style, className: classNameValue });
|
|
115
129
|
};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { getMaxVideoCacheSize, getTotalCacheStats, SAFE_BACK_WINDOW_IN_SECONDS, } from '../caches';
|
|
2
2
|
import { Log } from '../log';
|
|
3
3
|
import { getFramesSinceKeyframe } from './get-frames-since-keyframe';
|
|
4
4
|
export const makeKeyframeManager = () => {
|
|
@@ -69,7 +69,8 @@ export const makeKeyframeManager = () => {
|
|
|
69
69
|
};
|
|
70
70
|
const ensureToStayUnderMaxCacheSize = async (logLevel) => {
|
|
71
71
|
let cacheStats = await getTotalCacheStats();
|
|
72
|
-
|
|
72
|
+
const maxCacheSize = getMaxVideoCacheSize(logLevel);
|
|
73
|
+
while (cacheStats.totalSize > maxCacheSize) {
|
|
73
74
|
await deleteOldestKeyframeBank(logLevel);
|
|
74
75
|
cacheStats = await getTotalCacheStats();
|
|
75
76
|
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { type LogLevel } from '../log';
|
|
2
|
+
export declare class MediaPlayer {
|
|
3
|
+
private canvas;
|
|
4
|
+
private context;
|
|
5
|
+
private src;
|
|
6
|
+
private logLevel;
|
|
7
|
+
private canvasSink;
|
|
8
|
+
private videoFrameIterator;
|
|
9
|
+
private nextFrame;
|
|
10
|
+
private audioSink;
|
|
11
|
+
private audioBufferIterator;
|
|
12
|
+
private queuedAudioNodes;
|
|
13
|
+
private gainNode;
|
|
14
|
+
private expectedAudioTime;
|
|
15
|
+
private sharedAudioContext;
|
|
16
|
+
private mediaTimeOffset;
|
|
17
|
+
private playing;
|
|
18
|
+
private animationFrameId;
|
|
19
|
+
private asyncId;
|
|
20
|
+
private initialized;
|
|
21
|
+
private totalDuration;
|
|
22
|
+
private actualFps;
|
|
23
|
+
private isStalled;
|
|
24
|
+
private onStalledChangeCallback?;
|
|
25
|
+
private lastAudioProgressAtMs;
|
|
26
|
+
private lastNetworkActivityAtMs;
|
|
27
|
+
private isNetworkActive;
|
|
28
|
+
private isSeeking;
|
|
29
|
+
private canStartAudio;
|
|
30
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, }: {
|
|
31
|
+
canvas: HTMLCanvasElement;
|
|
32
|
+
src: string;
|
|
33
|
+
logLevel: LogLevel;
|
|
34
|
+
sharedAudioContext?: AudioContext | null;
|
|
35
|
+
});
|
|
36
|
+
initialize(startTime?: number): Promise<void>;
|
|
37
|
+
seekTo(time: number): void;
|
|
38
|
+
drawInitialFrame(time?: number): Promise<void>;
|
|
39
|
+
play(): Promise<void>;
|
|
40
|
+
pause(): void;
|
|
41
|
+
dispose(): void;
|
|
42
|
+
get currentTime(): number;
|
|
43
|
+
private getPlaybackTime;
|
|
44
|
+
get duration(): number;
|
|
45
|
+
get isPlaying(): boolean;
|
|
46
|
+
get stalled(): boolean;
|
|
47
|
+
onStalledChange(callback: (isStalled: boolean) => void): void;
|
|
48
|
+
private renderSingleFrame;
|
|
49
|
+
private startRenderLoop;
|
|
50
|
+
private stopRenderLoop;
|
|
51
|
+
private render;
|
|
52
|
+
private startVideoIterator;
|
|
53
|
+
private updateNextFrame;
|
|
54
|
+
private tryStartAudio;
|
|
55
|
+
private getCurrentTimeMs;
|
|
56
|
+
private resetAudioProgressStopwatch;
|
|
57
|
+
private getAudioLookaheadSec;
|
|
58
|
+
private calculateAudioStallThresholdSec;
|
|
59
|
+
private isNetworkStalled;
|
|
60
|
+
private checkVideoStall;
|
|
61
|
+
private checkIfStalled;
|
|
62
|
+
private updateStalledState;
|
|
63
|
+
private runAudioIterator;
|
|
64
|
+
}
|
|
@@ -0,0 +1,501 @@
|
|
|
1
|
+
import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
|
|
2
|
+
import { Log } from '../log';
|
|
3
|
+
const SEEK_THRESHOLD = 0.05;
|
|
4
|
+
export class MediaPlayer {
|
|
5
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, }) {
|
|
6
|
+
this.canvasSink = null;
|
|
7
|
+
this.videoFrameIterator = null;
|
|
8
|
+
this.nextFrame = null;
|
|
9
|
+
this.audioSink = null;
|
|
10
|
+
this.audioBufferIterator = null;
|
|
11
|
+
this.queuedAudioNodes = new Set();
|
|
12
|
+
this.gainNode = null;
|
|
13
|
+
this.expectedAudioTime = 0;
|
|
14
|
+
this.sharedAudioContext = null;
|
|
15
|
+
this.mediaTimeOffset = 0;
|
|
16
|
+
this.playing = false;
|
|
17
|
+
this.animationFrameId = null;
|
|
18
|
+
this.asyncId = 0;
|
|
19
|
+
this.initialized = false;
|
|
20
|
+
this.totalDuration = 0;
|
|
21
|
+
this.actualFps = null;
|
|
22
|
+
// for remotion buffer state
|
|
23
|
+
this.isStalled = false;
|
|
24
|
+
this.lastAudioProgressAtMs = 0;
|
|
25
|
+
this.lastNetworkActivityAtMs = 0;
|
|
26
|
+
this.isNetworkActive = false;
|
|
27
|
+
this.isSeeking = false;
|
|
28
|
+
// A/V sync coordination
|
|
29
|
+
this.canStartAudio = false;
|
|
30
|
+
this.render = () => {
|
|
31
|
+
const currentPlaybackTime = this.getPlaybackTime();
|
|
32
|
+
if (this.nextFrame && this.nextFrame.timestamp <= currentPlaybackTime) {
|
|
33
|
+
Log.trace(this.logLevel, `[MediaPlayer] Drawing frame at ${this.nextFrame.timestamp.toFixed(3)}s (playback time: ${currentPlaybackTime.toFixed(3)}s)`);
|
|
34
|
+
this.context.drawImage(this.nextFrame.canvas, 0, 0);
|
|
35
|
+
// For video-only content, track video progress as audio progress
|
|
36
|
+
if (!this.audioSink) {
|
|
37
|
+
this.resetAudioProgressStopwatch();
|
|
38
|
+
}
|
|
39
|
+
this.nextFrame = null;
|
|
40
|
+
this.updateNextFrame();
|
|
41
|
+
}
|
|
42
|
+
this.updateStalledState();
|
|
43
|
+
// continue render loop only if playing
|
|
44
|
+
if (this.playing) {
|
|
45
|
+
this.animationFrameId = requestAnimationFrame(this.render);
|
|
46
|
+
}
|
|
47
|
+
else {
|
|
48
|
+
this.animationFrameId = null;
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
this.startVideoIterator = async (timeToSeek) => {
|
|
52
|
+
if (!this.canvasSink) {
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
this.asyncId++;
|
|
56
|
+
const currentAsyncId = this.asyncId;
|
|
57
|
+
await this.videoFrameIterator?.return();
|
|
58
|
+
this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
|
|
59
|
+
try {
|
|
60
|
+
const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
61
|
+
const secondFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
62
|
+
if (currentAsyncId !== this.asyncId) {
|
|
63
|
+
Log.trace(this.logLevel, `[MediaPlayer] Race condition detected, aborting startVideoIterator for ${timeToSeek.toFixed(3)}s`);
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
if (firstFrame) {
|
|
67
|
+
Log.trace(this.logLevel, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
|
|
68
|
+
this.context.drawImage(firstFrame.canvas, 0, 0);
|
|
69
|
+
// For video-only content, track video progress as audio progress
|
|
70
|
+
if (!this.audioSink) {
|
|
71
|
+
this.resetAudioProgressStopwatch();
|
|
72
|
+
}
|
|
73
|
+
this.canStartAudio = true;
|
|
74
|
+
this.isSeeking = false;
|
|
75
|
+
this.tryStartAudio();
|
|
76
|
+
}
|
|
77
|
+
this.nextFrame = secondFrame ?? null;
|
|
78
|
+
if (secondFrame) {
|
|
79
|
+
Log.trace(this.logLevel, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
|
|
80
|
+
// For video-only content, track video progress as audio progress
|
|
81
|
+
if (!this.audioSink) {
|
|
82
|
+
this.resetAudioProgressStopwatch();
|
|
83
|
+
}
|
|
84
|
+
if (!this.canStartAudio) {
|
|
85
|
+
this.canStartAudio = true;
|
|
86
|
+
this.tryStartAudio();
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
this.updateStalledState();
|
|
90
|
+
}
|
|
91
|
+
catch (error) {
|
|
92
|
+
Log.error('[MediaPlayer] Failed to start video iterator', error);
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
this.updateNextFrame = async () => {
|
|
96
|
+
if (!this.videoFrameIterator) {
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
const currentAsyncId = this.asyncId;
|
|
100
|
+
try {
|
|
101
|
+
while (true) {
|
|
102
|
+
const newNextFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
103
|
+
if (!newNextFrame) {
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
if (currentAsyncId !== this.asyncId) {
|
|
107
|
+
Log.trace(this.logLevel, `[MediaPlayer] Race condition detected in updateNextFrame`);
|
|
108
|
+
break;
|
|
109
|
+
}
|
|
110
|
+
if (newNextFrame.timestamp <= this.getPlaybackTime()) {
|
|
111
|
+
Log.trace(this.logLevel, `[MediaPlayer] Drawing immediate frame ${newNextFrame.timestamp.toFixed(3)}s`);
|
|
112
|
+
this.context.drawImage(newNextFrame.canvas, 0, 0);
|
|
113
|
+
// For video-only content, track video progress as audio progress
|
|
114
|
+
if (!this.audioSink) {
|
|
115
|
+
this.resetAudioProgressStopwatch();
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
else {
|
|
119
|
+
this.nextFrame = newNextFrame;
|
|
120
|
+
Log.trace(this.logLevel, `[MediaPlayer] Buffered next frame ${newNextFrame.timestamp.toFixed(3)}s`);
|
|
121
|
+
// For video-only content, track video progress as audio progress
|
|
122
|
+
if (!this.audioSink) {
|
|
123
|
+
this.resetAudioProgressStopwatch();
|
|
124
|
+
}
|
|
125
|
+
// Open audio gate when new frames become available
|
|
126
|
+
if (!this.canStartAudio) {
|
|
127
|
+
this.canStartAudio = true;
|
|
128
|
+
this.tryStartAudio();
|
|
129
|
+
}
|
|
130
|
+
break;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
catch (error) {
|
|
135
|
+
Log.error('[MediaPlayer] Failed to update next frame', error);
|
|
136
|
+
}
|
|
137
|
+
this.updateStalledState();
|
|
138
|
+
};
|
|
139
|
+
this.runAudioIterator = async () => {
|
|
140
|
+
if (!this.audioSink ||
|
|
141
|
+
!this.sharedAudioContext ||
|
|
142
|
+
!this.audioBufferIterator ||
|
|
143
|
+
!this.gainNode) {
|
|
144
|
+
return;
|
|
145
|
+
}
|
|
146
|
+
try {
|
|
147
|
+
this.expectedAudioTime = this.sharedAudioContext.currentTime;
|
|
148
|
+
for await (const { buffer, timestamp } of this.audioBufferIterator) {
|
|
149
|
+
const node = this.sharedAudioContext.createBufferSource();
|
|
150
|
+
node.buffer = buffer;
|
|
151
|
+
node.connect(this.gainNode);
|
|
152
|
+
if (this.expectedAudioTime >= this.sharedAudioContext.currentTime) {
|
|
153
|
+
node.start(this.expectedAudioTime);
|
|
154
|
+
}
|
|
155
|
+
else {
|
|
156
|
+
const offset = this.sharedAudioContext.currentTime - this.expectedAudioTime;
|
|
157
|
+
node.start(this.sharedAudioContext.currentTime, offset);
|
|
158
|
+
}
|
|
159
|
+
this.queuedAudioNodes.add(node);
|
|
160
|
+
node.onended = () => {
|
|
161
|
+
this.queuedAudioNodes.delete(node);
|
|
162
|
+
};
|
|
163
|
+
this.expectedAudioTime += buffer.duration;
|
|
164
|
+
this.updateStalledState();
|
|
165
|
+
// If we're more than a second ahead of the current playback time, let's slow down the loop until time has
|
|
166
|
+
// passed. Use timestamp for throttling logic as it represents media time.
|
|
167
|
+
if (timestamp - this.getPlaybackTime() >= 1) {
|
|
168
|
+
await new Promise((resolve) => {
|
|
169
|
+
const check = () => {
|
|
170
|
+
if (timestamp - this.getPlaybackTime() < 1) {
|
|
171
|
+
resolve();
|
|
172
|
+
}
|
|
173
|
+
else {
|
|
174
|
+
requestAnimationFrame(check);
|
|
175
|
+
}
|
|
176
|
+
};
|
|
177
|
+
check();
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
catch (error) {
|
|
183
|
+
Log.error('[MediaPlayer] Failed to run audio iterator', error);
|
|
184
|
+
}
|
|
185
|
+
};
|
|
186
|
+
this.canvas = canvas;
|
|
187
|
+
this.src = src;
|
|
188
|
+
this.logLevel = logLevel ?? 'info';
|
|
189
|
+
this.sharedAudioContext = sharedAudioContext || null;
|
|
190
|
+
const context = canvas.getContext('2d', {
|
|
191
|
+
alpha: false,
|
|
192
|
+
desynchronized: true,
|
|
193
|
+
});
|
|
194
|
+
if (!context) {
|
|
195
|
+
throw new Error('Could not get 2D context from canvas');
|
|
196
|
+
}
|
|
197
|
+
this.context = context;
|
|
198
|
+
// Initialize audio progress stopwatch
|
|
199
|
+
this.resetAudioProgressStopwatch();
|
|
200
|
+
Log.trace(this.logLevel, `[MediaPlayer] Created for src: ${src}`);
|
|
201
|
+
}
|
|
202
|
+
async initialize(startTime = 0) {
|
|
203
|
+
if (this.initialized) {
|
|
204
|
+
Log.trace(this.logLevel, `[MediaPlayer] Already initialized, skipping`);
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
try {
|
|
208
|
+
Log.trace(this.logLevel, `[MediaPlayer] Initializing at startTime: ${startTime.toFixed(3)}s...`);
|
|
209
|
+
const urlSource = new UrlSource(this.src);
|
|
210
|
+
urlSource.onread = () => {
|
|
211
|
+
this.lastNetworkActivityAtMs = this.getCurrentTimeMs();
|
|
212
|
+
this.isNetworkActive = true;
|
|
213
|
+
};
|
|
214
|
+
const input = new Input({
|
|
215
|
+
source: urlSource,
|
|
216
|
+
formats: ALL_FORMATS,
|
|
217
|
+
});
|
|
218
|
+
this.totalDuration = await input.computeDuration();
|
|
219
|
+
const videoTrack = await input.getPrimaryVideoTrack();
|
|
220
|
+
const audioTrack = await input.getPrimaryAudioTrack();
|
|
221
|
+
if (!videoTrack && !audioTrack) {
|
|
222
|
+
throw new Error(`No video or audio track found for ${this.src}`);
|
|
223
|
+
}
|
|
224
|
+
if (videoTrack) {
|
|
225
|
+
this.canvasSink = new CanvasSink(videoTrack, {
|
|
226
|
+
poolSize: 2,
|
|
227
|
+
fit: 'contain',
|
|
228
|
+
});
|
|
229
|
+
this.canvas.width = videoTrack.displayWidth;
|
|
230
|
+
this.canvas.height = videoTrack.displayHeight;
|
|
231
|
+
// Extract actual FPS for stall detection
|
|
232
|
+
const packetStats = await videoTrack.computePacketStats();
|
|
233
|
+
this.actualFps = packetStats.averagePacketRate;
|
|
234
|
+
Log.trace(this.logLevel, `[MediaPlayer] Detected video FPS: ${this.actualFps}`);
|
|
235
|
+
}
|
|
236
|
+
if (audioTrack && this.sharedAudioContext) {
|
|
237
|
+
this.audioSink = new AudioBufferSink(audioTrack);
|
|
238
|
+
this.gainNode = this.sharedAudioContext.createGain();
|
|
239
|
+
this.gainNode.connect(this.sharedAudioContext.destination);
|
|
240
|
+
}
|
|
241
|
+
// For audio-only content, allow audio to start immediately
|
|
242
|
+
if (!videoTrack && audioTrack) {
|
|
243
|
+
this.canStartAudio = true;
|
|
244
|
+
}
|
|
245
|
+
// Initialize timing offset based on actual starting position
|
|
246
|
+
if (this.sharedAudioContext) {
|
|
247
|
+
this.mediaTimeOffset = this.sharedAudioContext.currentTime - startTime;
|
|
248
|
+
Log.trace(this.logLevel, `[MediaPlayer] Set mediaTimeOffset to ${this.mediaTimeOffset.toFixed(3)}s (audioContext: ${this.sharedAudioContext.currentTime.toFixed(3)}s, startTime: ${startTime.toFixed(3)}s)`);
|
|
249
|
+
this.lastAudioProgressAtMs = this.getCurrentTimeMs();
|
|
250
|
+
this.lastNetworkActivityAtMs = this.getCurrentTimeMs();
|
|
251
|
+
}
|
|
252
|
+
this.initialized = true;
|
|
253
|
+
await this.startVideoIterator(startTime);
|
|
254
|
+
this.startRenderLoop();
|
|
255
|
+
Log.trace(this.logLevel, `[MediaPlayer] Initialized successfully with iterators started, duration: ${this.totalDuration}s`);
|
|
256
|
+
}
|
|
257
|
+
catch (error) {
|
|
258
|
+
Log.error('[MediaPlayer] Failed to initialize', error);
|
|
259
|
+
throw error;
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
seekTo(time) {
|
|
263
|
+
if (!this.initialized || !this.sharedAudioContext) {
|
|
264
|
+
return;
|
|
265
|
+
}
|
|
266
|
+
// Ensure mediaTimeOffset is initialized (safety fallback)
|
|
267
|
+
if (this.mediaTimeOffset === 0) {
|
|
268
|
+
this.mediaTimeOffset = this.sharedAudioContext.currentTime - time;
|
|
269
|
+
Log.trace(this.logLevel, `[MediaPlayer] Late-initialized mediaTimeOffset to ${this.mediaTimeOffset.toFixed(3)}s`);
|
|
270
|
+
}
|
|
271
|
+
const newTime = Math.max(0, Math.min(time, this.totalDuration));
|
|
272
|
+
const currentPlaybackTime = this.getPlaybackTime();
|
|
273
|
+
const isSignificantSeek = Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
|
|
274
|
+
// Update offset to make audio context time correspond to new media time
|
|
275
|
+
this.mediaTimeOffset = this.sharedAudioContext.currentTime - newTime;
|
|
276
|
+
if (isSignificantSeek) {
|
|
277
|
+
Log.trace(this.logLevel, `[MediaPlayer] Significant seek to ${newTime.toFixed(3)}s - creating new iterator`);
|
|
278
|
+
this.isSeeking = true;
|
|
279
|
+
this.canStartAudio = false;
|
|
280
|
+
this.updateStalledState();
|
|
281
|
+
// Stop existing audio first
|
|
282
|
+
if (this.playing && this.audioSink) {
|
|
283
|
+
this.audioBufferIterator?.return();
|
|
284
|
+
this.audioBufferIterator = null;
|
|
285
|
+
// Stop current audio nodes
|
|
286
|
+
for (const node of this.queuedAudioNodes) {
|
|
287
|
+
node.stop();
|
|
288
|
+
}
|
|
289
|
+
this.queuedAudioNodes.clear();
|
|
290
|
+
}
|
|
291
|
+
// Start video iterator (which will open audio gate when ready)
|
|
292
|
+
this.startVideoIterator(newTime);
|
|
293
|
+
}
|
|
294
|
+
else {
|
|
295
|
+
Log.trace(this.logLevel, `[MediaPlayer] Minor time update to ${newTime.toFixed(3)}s - using existing iterator`);
|
|
296
|
+
// if paused, trigger a single frame update to show current position
|
|
297
|
+
if (!this.playing) {
|
|
298
|
+
this.renderSingleFrame();
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
async drawInitialFrame(time = 0) {
|
|
303
|
+
if (!this.initialized || !this.canvasSink) {
|
|
304
|
+
Log.trace(this.logLevel, `[MediaPlayer] Cannot draw initial frame - not initialized or no canvas sink`);
|
|
305
|
+
return;
|
|
306
|
+
}
|
|
307
|
+
try {
|
|
308
|
+
Log.trace(this.logLevel, `[MediaPlayer] Drawing initial frame at ${time.toFixed(3)}s`);
|
|
309
|
+
// create temporary iterator just to get the first frame
|
|
310
|
+
const tempIterator = this.canvasSink.canvases(time);
|
|
311
|
+
const firstFrame = (await tempIterator.next()).value;
|
|
312
|
+
if (firstFrame) {
|
|
313
|
+
this.context.drawImage(firstFrame.canvas, 0, 0);
|
|
314
|
+
Log.trace(this.logLevel, `[MediaPlayer] Drew initial frame at timestamp ${firstFrame.timestamp.toFixed(3)}s`);
|
|
315
|
+
}
|
|
316
|
+
else {
|
|
317
|
+
Log.trace(this.logLevel, `[MediaPlayer] No frame available at ${time.toFixed(3)}s`);
|
|
318
|
+
}
|
|
319
|
+
// clean up the temporary iterator
|
|
320
|
+
await tempIterator.return();
|
|
321
|
+
}
|
|
322
|
+
catch (error) {
|
|
323
|
+
Log.error('[MediaPlayer] Failed to draw initial frame', error);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
async play() {
|
|
327
|
+
if (!this.initialized || !this.sharedAudioContext) {
|
|
328
|
+
return;
|
|
329
|
+
}
|
|
330
|
+
if (!this.playing) {
|
|
331
|
+
if (this.sharedAudioContext.state === 'suspended') {
|
|
332
|
+
await this.sharedAudioContext.resume();
|
|
333
|
+
}
|
|
334
|
+
this.playing = true;
|
|
335
|
+
Log.trace(this.logLevel, `[MediaPlayer] Play - starting render loop`);
|
|
336
|
+
this.startRenderLoop();
|
|
337
|
+
// Audio will start automatically when video signals readiness via tryStartAudio()
|
|
338
|
+
this.tryStartAudio();
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
pause() {
|
|
342
|
+
if (this.playing) {
|
|
343
|
+
this.playing = false;
|
|
344
|
+
// stop audio iterator
|
|
345
|
+
this.audioBufferIterator?.return();
|
|
346
|
+
this.audioBufferIterator = null;
|
|
347
|
+
// stop all playing audio nodes
|
|
348
|
+
for (const node of this.queuedAudioNodes) {
|
|
349
|
+
node.stop();
|
|
350
|
+
}
|
|
351
|
+
this.queuedAudioNodes.clear();
|
|
352
|
+
Log.trace(this.logLevel, `[MediaPlayer] Pause - stopping render loop`);
|
|
353
|
+
this.stopRenderLoop();
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
dispose() {
|
|
357
|
+
Log.trace(this.logLevel, `[MediaPlayer] Disposing...`);
|
|
358
|
+
this.stopRenderLoop();
|
|
359
|
+
// clean up video resources
|
|
360
|
+
this.videoFrameIterator?.return();
|
|
361
|
+
this.videoFrameIterator = null;
|
|
362
|
+
this.nextFrame = null;
|
|
363
|
+
this.canvasSink = null;
|
|
364
|
+
// Clean up audio resources
|
|
365
|
+
for (const node of this.queuedAudioNodes) {
|
|
366
|
+
node.stop();
|
|
367
|
+
}
|
|
368
|
+
this.queuedAudioNodes.clear();
|
|
369
|
+
this.audioBufferIterator?.return();
|
|
370
|
+
this.audioBufferIterator = null;
|
|
371
|
+
this.audioSink = null;
|
|
372
|
+
this.gainNode = null;
|
|
373
|
+
this.initialized = false;
|
|
374
|
+
this.asyncId++;
|
|
375
|
+
}
|
|
376
|
+
get currentTime() {
|
|
377
|
+
return this.getPlaybackTime();
|
|
378
|
+
}
|
|
379
|
+
// current position in the media
|
|
380
|
+
getPlaybackTime() {
|
|
381
|
+
if (!this.sharedAudioContext) {
|
|
382
|
+
return 0;
|
|
383
|
+
}
|
|
384
|
+
// Audio context is single source of truth
|
|
385
|
+
return this.sharedAudioContext.currentTime - this.mediaTimeOffset;
|
|
386
|
+
}
|
|
387
|
+
get duration() {
|
|
388
|
+
return this.totalDuration;
|
|
389
|
+
}
|
|
390
|
+
get isPlaying() {
|
|
391
|
+
return this.playing;
|
|
392
|
+
}
|
|
393
|
+
get stalled() {
|
|
394
|
+
return this.isStalled;
|
|
395
|
+
}
|
|
396
|
+
onStalledChange(callback) {
|
|
397
|
+
this.onStalledChangeCallback = callback;
|
|
398
|
+
}
|
|
399
|
+
renderSingleFrame() {
|
|
400
|
+
const currentPlaybackTime = this.getPlaybackTime();
|
|
401
|
+
if (this.nextFrame && this.nextFrame.timestamp <= currentPlaybackTime) {
|
|
402
|
+
Log.trace(this.logLevel, `[MediaPlayer] Single frame update at ${this.nextFrame.timestamp.toFixed(3)}s`);
|
|
403
|
+
this.context.drawImage(this.nextFrame.canvas, 0, 0);
|
|
404
|
+
// For video-only content, track video progress as audio progress
|
|
405
|
+
if (!this.audioSink) {
|
|
406
|
+
this.resetAudioProgressStopwatch();
|
|
407
|
+
}
|
|
408
|
+
this.nextFrame = null;
|
|
409
|
+
this.updateNextFrame();
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
startRenderLoop() {
|
|
413
|
+
if (this.animationFrameId !== null) {
|
|
414
|
+
return;
|
|
415
|
+
}
|
|
416
|
+
Log.trace(this.logLevel, `[MediaPlayer] Starting render loop`);
|
|
417
|
+
this.render();
|
|
418
|
+
}
|
|
419
|
+
stopRenderLoop() {
|
|
420
|
+
if (this.animationFrameId !== null) {
|
|
421
|
+
cancelAnimationFrame(this.animationFrameId);
|
|
422
|
+
this.animationFrameId = null;
|
|
423
|
+
Log.trace(this.logLevel, `[MediaPlayer] Stopped render loop`);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
// A/V sync coordination methods (WIP)
|
|
427
|
+
tryStartAudio() {
|
|
428
|
+
// Only start if: playing + audio exists + gate is open + not already started
|
|
429
|
+
if (this.playing &&
|
|
430
|
+
this.audioSink &&
|
|
431
|
+
this.canStartAudio &&
|
|
432
|
+
!this.audioBufferIterator) {
|
|
433
|
+
this.audioBufferIterator = this.audioSink.buffers(this.getPlaybackTime());
|
|
434
|
+
this.runAudioIterator();
|
|
435
|
+
this.resetAudioProgressStopwatch();
|
|
436
|
+
Log.trace(this.logLevel, '[MediaPlayer] Audio started - A/V sync established');
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
// Unified time reference for stall detection
|
|
440
|
+
getCurrentTimeMs() {
|
|
441
|
+
if (!this.sharedAudioContext) {
|
|
442
|
+
return performance.now();
|
|
443
|
+
}
|
|
444
|
+
return this.sharedAudioContext.currentTime * 1000;
|
|
445
|
+
}
|
|
446
|
+
// Stall detection methods
|
|
447
|
+
resetAudioProgressStopwatch() {
|
|
448
|
+
this.lastAudioProgressAtMs = this.getCurrentTimeMs();
|
|
449
|
+
}
|
|
450
|
+
getAudioLookaheadSec() {
|
|
451
|
+
if (!this.sharedAudioContext)
|
|
452
|
+
return 0;
|
|
453
|
+
return this.expectedAudioTime - this.sharedAudioContext.currentTime;
|
|
454
|
+
}
|
|
455
|
+
calculateAudioStallThresholdSec() {
|
|
456
|
+
return 0.2; // Need 200ms of audio scheduled ahead
|
|
457
|
+
}
|
|
458
|
+
isNetworkStalled() {
|
|
459
|
+
const nowMs = this.getCurrentTimeMs();
|
|
460
|
+
const timeSinceNetworkMs = nowMs - this.lastNetworkActivityAtMs;
|
|
461
|
+
if (timeSinceNetworkMs > 100) {
|
|
462
|
+
this.isNetworkActive = false;
|
|
463
|
+
}
|
|
464
|
+
return !this.isNetworkActive && timeSinceNetworkMs >= 500;
|
|
465
|
+
}
|
|
466
|
+
checkVideoStall() {
|
|
467
|
+
if (!this.actualFps)
|
|
468
|
+
return false;
|
|
469
|
+
const nowMs = this.getCurrentTimeMs();
|
|
470
|
+
const frameIntervalMs = 1000 / this.actualFps;
|
|
471
|
+
const STALL_FRAME_COUNT = 6;
|
|
472
|
+
const calculatedThresholdMs = frameIntervalMs * STALL_FRAME_COUNT;
|
|
473
|
+
const MIN_THRESHOLD_MS = 150;
|
|
474
|
+
const MAX_THRESHOLD_MS = 300;
|
|
475
|
+
const threshold = Math.min(Math.max(calculatedThresholdMs, MIN_THRESHOLD_MS), MAX_THRESHOLD_MS);
|
|
476
|
+
// Use a separate video progress tracker for video-only content
|
|
477
|
+
const timeSinceVideoProgressMs = nowMs - this.lastAudioProgressAtMs; // Reuse for now
|
|
478
|
+
return (!this.nextFrame &&
|
|
479
|
+
timeSinceVideoProgressMs > threshold &&
|
|
480
|
+
this.playing &&
|
|
481
|
+
this.currentTime < this.duration);
|
|
482
|
+
}
|
|
483
|
+
checkIfStalled() {
|
|
484
|
+
// Only check what matters for playback readiness
|
|
485
|
+
if (this.audioSink && this.playing) {
|
|
486
|
+
const audioLookaheadSec = this.getAudioLookaheadSec();
|
|
487
|
+
const isAudioStarved = audioLookaheadSec < this.calculateAudioStallThresholdSec();
|
|
488
|
+
return isAudioStarved && this.isNetworkStalled();
|
|
489
|
+
}
|
|
490
|
+
// Video-only fallback
|
|
491
|
+
if (!this.audioSink) {
|
|
492
|
+
return this.checkVideoStall() && this.isNetworkStalled();
|
|
493
|
+
}
|
|
494
|
+
return false; // Remove: return this.isSeeking;
|
|
495
|
+
}
|
|
496
|
+
updateStalledState() {
|
|
497
|
+
const isStalled = this.checkIfStalled();
|
|
498
|
+
this.isStalled = isStalled;
|
|
499
|
+
this.onStalledChangeCallback?.(isStalled);
|
|
500
|
+
}
|
|
501
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import { type LogLevel } from '../log';
|
|
3
|
+
type NewVideoForPreviewProps = {
|
|
4
|
+
readonly src: string;
|
|
5
|
+
readonly style?: React.CSSProperties;
|
|
6
|
+
readonly playbackRate?: number;
|
|
7
|
+
readonly logLevel?: LogLevel;
|
|
8
|
+
};
|
|
9
|
+
export declare const NewVideoForPreview: React.FC<NewVideoForPreviewProps>;
|
|
10
|
+
export {};
|