@remotion/media 4.0.353 → 4.0.354
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-rendering.js +37 -27
- package/dist/audio/audio.js +6 -3
- package/dist/audio/props.d.ts +0 -5
- package/dist/audio-extraction/extract-audio.d.ts +6 -3
- package/dist/audio-extraction/extract-audio.js +16 -7
- package/dist/audio-for-rendering.d.ts +3 -0
- package/dist/audio-for-rendering.js +94 -0
- package/dist/audio.d.ts +3 -0
- package/dist/audio.js +60 -0
- package/dist/audiodata-to-array.d.ts +0 -0
- package/dist/audiodata-to-array.js +1 -0
- package/dist/convert-audiodata/apply-volume.d.ts +1 -0
- package/dist/convert-audiodata/apply-volume.js +17 -0
- package/dist/convert-audiodata/convert-audiodata.d.ts +2 -2
- package/dist/convert-audiodata/convert-audiodata.js +13 -7
- package/dist/convert-audiodata/data-types.d.ts +1 -0
- package/dist/convert-audiodata/data-types.js +22 -0
- package/dist/convert-audiodata/is-planar-format.d.ts +1 -0
- package/dist/convert-audiodata/is-planar-format.js +3 -0
- package/dist/convert-audiodata/log-audiodata.d.ts +1 -0
- package/dist/convert-audiodata/log-audiodata.js +8 -0
- package/dist/convert-audiodata/resample-audiodata.d.ts +1 -2
- package/dist/convert-audiodata/resample-audiodata.js +39 -18
- package/dist/convert-audiodata/trim-audiodata.d.ts +0 -0
- package/dist/convert-audiodata/trim-audiodata.js +1 -0
- package/dist/deserialized-audiodata.d.ts +15 -0
- package/dist/deserialized-audiodata.js +26 -0
- package/dist/esm/index.mjs +206 -120
- package/dist/extract-audio.d.ts +7 -0
- package/dist/extract-audio.js +98 -0
- package/dist/extract-frame-and-audio.d.ts +3 -2
- package/dist/extract-frame-and-audio.js +4 -3
- package/dist/extract-frame-via-broadcast-channel.d.ts +15 -0
- package/dist/extract-frame-via-broadcast-channel.js +104 -0
- package/dist/extract-frame.d.ts +27 -0
- package/dist/extract-frame.js +21 -0
- package/dist/extrct-audio.d.ts +7 -0
- package/dist/extrct-audio.js +94 -0
- package/dist/get-frames-since-keyframe.d.ts +22 -0
- package/dist/get-frames-since-keyframe.js +41 -0
- package/dist/keyframe-bank.d.ts +25 -0
- package/dist/keyframe-bank.js +120 -0
- package/dist/keyframe-manager.d.ts +23 -0
- package/dist/keyframe-manager.js +170 -0
- package/dist/looped-frame.d.ts +9 -0
- package/dist/looped-frame.js +10 -0
- package/dist/new-video-for-rendering.d.ts +3 -0
- package/dist/new-video-for-rendering.js +108 -0
- package/dist/new-video.d.ts +3 -0
- package/dist/new-video.js +37 -0
- package/dist/props.d.ts +29 -0
- package/dist/props.js +1 -0
- package/dist/remember-actual-matroska-timestamps.d.ts +4 -0
- package/dist/remember-actual-matroska-timestamps.js +19 -0
- package/dist/serialize-videoframe.d.ts +0 -0
- package/dist/serialize-videoframe.js +1 -0
- package/dist/video/media-player.d.ts +62 -0
- package/dist/video/media-player.js +361 -0
- package/dist/video/new-video-for-preview.d.ts +10 -0
- package/dist/video/new-video-for-preview.js +108 -0
- package/dist/video/props.d.ts +0 -5
- package/dist/video/timeout-utils.d.ts +2 -0
- package/dist/video/timeout-utils.js +18 -0
- package/dist/video/video-for-preview.d.ts +11 -0
- package/dist/video/video-for-preview.js +113 -0
- package/dist/video/video-for-rendering.js +41 -31
- package/dist/video/video.js +2 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +3 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +9 -5
- package/dist/video-extraction/extract-frame.js +3 -0
- package/dist/video-extraction/get-frames-since-keyframe.d.ts +1 -1
- package/dist/video-extraction/get-frames-since-keyframe.js +6 -7
- package/dist/video-extraction/media-player.d.ts +64 -0
- package/dist/video-extraction/media-player.js +501 -0
- package/dist/video-extraction/new-video-for-preview.d.ts +10 -0
- package/dist/video-extraction/new-video-for-preview.js +114 -0
- package/dist/video-for-rendering.d.ts +3 -0
- package/dist/video-for-rendering.js +108 -0
- package/dist/video.d.ts +3 -0
- package/dist/video.js +37 -0
- package/package.json +3 -3
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
|
|
2
|
+
import { Internals } from 'remotion';
|
|
3
|
+
import { sleep, withTimeout } from './timeout-utils';
|
|
4
|
+
export const SEEK_THRESHOLD = 0.05;
|
|
5
|
+
export class MediaPlayer {
|
|
6
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, }) {
|
|
7
|
+
this.canvasSink = null;
|
|
8
|
+
this.videoFrameIterator = null;
|
|
9
|
+
this.nextFrame = null;
|
|
10
|
+
this.audioSink = null;
|
|
11
|
+
this.audioBufferIterator = null;
|
|
12
|
+
this.queuedAudioNodes = new Set();
|
|
13
|
+
this.gainNode = null;
|
|
14
|
+
// audioDelay = mediaTimestamp + audioSyncAnchor - sharedAudioContext.currentTime
|
|
15
|
+
this.audioSyncAnchor = 0;
|
|
16
|
+
this.playing = false;
|
|
17
|
+
this.animationFrameId = null;
|
|
18
|
+
this.videoAsyncId = 0;
|
|
19
|
+
this.initialized = false;
|
|
20
|
+
this.totalDuration = 0;
|
|
21
|
+
// for remotion buffer state
|
|
22
|
+
this.isBuffering = false;
|
|
23
|
+
this.audioBufferHealth = 0;
|
|
24
|
+
this.audioIteratorStarted = false;
|
|
25
|
+
this.HEALTHY_BUFER_THRESHOLD_SECONDS = 1;
|
|
26
|
+
this.input = null;
|
|
27
|
+
this.render = () => {
|
|
28
|
+
if (this.isBuffering) {
|
|
29
|
+
this.maybeForceResumeFromBuffering();
|
|
30
|
+
}
|
|
31
|
+
if (this.shouldRenderFrame()) {
|
|
32
|
+
this.drawCurrentFrame();
|
|
33
|
+
}
|
|
34
|
+
if (this.playing) {
|
|
35
|
+
this.animationFrameId = requestAnimationFrame(this.render);
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
this.animationFrameId = null;
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
this.startAudioIterator = async (timeToSeek) => {
|
|
42
|
+
if (!this.hasAudio())
|
|
43
|
+
return;
|
|
44
|
+
// Clean up existing audio iterator
|
|
45
|
+
await this.audioBufferIterator?.return();
|
|
46
|
+
this.audioIteratorStarted = false;
|
|
47
|
+
this.audioBufferHealth = 0;
|
|
48
|
+
try {
|
|
49
|
+
this.audioBufferIterator = this.audioSink.buffers(timeToSeek);
|
|
50
|
+
this.runAudioIterator();
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start audio iterator', error);
|
|
54
|
+
}
|
|
55
|
+
};
|
|
56
|
+
this.startVideoIterator = async (timeToSeek) => {
|
|
57
|
+
if (!this.canvasSink) {
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
this.videoAsyncId++;
|
|
61
|
+
const currentAsyncId = this.videoAsyncId;
|
|
62
|
+
await this.videoFrameIterator?.return();
|
|
63
|
+
this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
|
|
64
|
+
try {
|
|
65
|
+
const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
66
|
+
const secondFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
67
|
+
if (currentAsyncId !== this.videoAsyncId) {
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
if (firstFrame) {
|
|
71
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
|
|
72
|
+
this.context.drawImage(firstFrame.canvas, 0, 0);
|
|
73
|
+
}
|
|
74
|
+
this.nextFrame = secondFrame ?? null;
|
|
75
|
+
if (secondFrame) {
|
|
76
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
catch (error) {
|
|
80
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to start video iterator', error);
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
this.updateNextFrame = async () => {
|
|
84
|
+
if (!this.videoFrameIterator) {
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
try {
|
|
88
|
+
while (true) {
|
|
89
|
+
const newNextFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
90
|
+
if (!newNextFrame) {
|
|
91
|
+
break;
|
|
92
|
+
}
|
|
93
|
+
if (newNextFrame.timestamp <= this.getPlaybackTime()) {
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
else {
|
|
97
|
+
this.nextFrame = newNextFrame;
|
|
98
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Buffered next frame ${newNextFrame.timestamp.toFixed(3)}s`);
|
|
99
|
+
break;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
catch (error) {
|
|
104
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to update next frame', error);
|
|
105
|
+
}
|
|
106
|
+
};
|
|
107
|
+
this.bufferingStartedAtMs = null;
|
|
108
|
+
this.minBufferingTimeoutMs = 500;
|
|
109
|
+
this.runAudioIterator = async () => {
|
|
110
|
+
if (!this.hasAudio() || !this.audioBufferIterator)
|
|
111
|
+
return;
|
|
112
|
+
try {
|
|
113
|
+
let totalBufferDuration = 0;
|
|
114
|
+
let isFirstBuffer = true;
|
|
115
|
+
this.audioIteratorStarted = true;
|
|
116
|
+
while (true) {
|
|
117
|
+
const BUFFERING_TIMEOUT_MS = 50;
|
|
118
|
+
let result;
|
|
119
|
+
try {
|
|
120
|
+
result = await withTimeout(this.audioBufferIterator.next(), BUFFERING_TIMEOUT_MS, 'Iterator timeout');
|
|
121
|
+
}
|
|
122
|
+
catch {
|
|
123
|
+
this.setBufferingState(true);
|
|
124
|
+
await sleep(10);
|
|
125
|
+
continue;
|
|
126
|
+
}
|
|
127
|
+
if (result.done || !result.value) {
|
|
128
|
+
break;
|
|
129
|
+
}
|
|
130
|
+
const { buffer, timestamp, duration } = result.value;
|
|
131
|
+
totalBufferDuration += duration;
|
|
132
|
+
this.audioBufferHealth = Math.max(0, totalBufferDuration);
|
|
133
|
+
this.maybeResumeFromBuffering(totalBufferDuration);
|
|
134
|
+
if (this.playing) {
|
|
135
|
+
if (isFirstBuffer) {
|
|
136
|
+
this.audioSyncAnchor =
|
|
137
|
+
this.sharedAudioContext.currentTime - timestamp;
|
|
138
|
+
isFirstBuffer = false;
|
|
139
|
+
}
|
|
140
|
+
this.scheduleAudioChunk(buffer, timestamp);
|
|
141
|
+
}
|
|
142
|
+
if (timestamp - this.getPlaybackTime() >= 1) {
|
|
143
|
+
await new Promise((resolve) => {
|
|
144
|
+
const check = () => {
|
|
145
|
+
if (timestamp - this.getPlaybackTime() < 1) {
|
|
146
|
+
resolve();
|
|
147
|
+
}
|
|
148
|
+
else {
|
|
149
|
+
requestAnimationFrame(check);
|
|
150
|
+
}
|
|
151
|
+
};
|
|
152
|
+
check();
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
catch (error) {
|
|
158
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to run audio iterator', error);
|
|
159
|
+
}
|
|
160
|
+
};
|
|
161
|
+
this.canvas = canvas;
|
|
162
|
+
this.src = src;
|
|
163
|
+
this.logLevel = logLevel ?? 'info';
|
|
164
|
+
this.sharedAudioContext = sharedAudioContext;
|
|
165
|
+
const context = canvas.getContext('2d', {
|
|
166
|
+
alpha: false,
|
|
167
|
+
desynchronized: true,
|
|
168
|
+
});
|
|
169
|
+
if (!context) {
|
|
170
|
+
throw new Error('Could not get 2D context from canvas');
|
|
171
|
+
}
|
|
172
|
+
this.context = context;
|
|
173
|
+
}
|
|
174
|
+
isReady() {
|
|
175
|
+
return this.initialized && Boolean(this.sharedAudioContext);
|
|
176
|
+
}
|
|
177
|
+
hasAudio() {
|
|
178
|
+
return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
|
|
179
|
+
}
|
|
180
|
+
isCurrentlyBuffering() {
|
|
181
|
+
return this.isBuffering && Boolean(this.bufferingStartedAtMs);
|
|
182
|
+
}
|
|
183
|
+
async initialize(startTime = 0) {
|
|
184
|
+
try {
|
|
185
|
+
const urlSource = new UrlSource(this.src);
|
|
186
|
+
const input = new Input({
|
|
187
|
+
source: urlSource,
|
|
188
|
+
formats: ALL_FORMATS,
|
|
189
|
+
});
|
|
190
|
+
this.input = input;
|
|
191
|
+
this.totalDuration = await input.computeDuration();
|
|
192
|
+
const videoTrack = await input.getPrimaryVideoTrack();
|
|
193
|
+
const audioTrack = await input.getPrimaryAudioTrack();
|
|
194
|
+
if (!videoTrack && !audioTrack) {
|
|
195
|
+
throw new Error(`No video or audio track found for ${this.src}`);
|
|
196
|
+
}
|
|
197
|
+
if (videoTrack) {
|
|
198
|
+
this.canvasSink = new CanvasSink(videoTrack, {
|
|
199
|
+
poolSize: 2,
|
|
200
|
+
fit: 'contain',
|
|
201
|
+
});
|
|
202
|
+
this.canvas.width = videoTrack.displayWidth;
|
|
203
|
+
this.canvas.height = videoTrack.displayHeight;
|
|
204
|
+
}
|
|
205
|
+
if (audioTrack && this.sharedAudioContext) {
|
|
206
|
+
this.audioSink = new AudioBufferSink(audioTrack);
|
|
207
|
+
this.gainNode = this.sharedAudioContext.createGain();
|
|
208
|
+
this.gainNode.connect(this.sharedAudioContext.destination);
|
|
209
|
+
}
|
|
210
|
+
if (this.sharedAudioContext) {
|
|
211
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
|
|
212
|
+
}
|
|
213
|
+
this.initialized = true;
|
|
214
|
+
await this.startAudioIterator(startTime);
|
|
215
|
+
await this.startVideoIterator(startTime);
|
|
216
|
+
this.startRenderLoop();
|
|
217
|
+
}
|
|
218
|
+
catch (error) {
|
|
219
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: '@remotion/media' }, '[MediaPlayer] Failed to initialize', error);
|
|
220
|
+
throw error;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
cleanupAudioQueue() {
|
|
224
|
+
for (const node of this.queuedAudioNodes) {
|
|
225
|
+
node.stop();
|
|
226
|
+
}
|
|
227
|
+
this.queuedAudioNodes.clear();
|
|
228
|
+
}
|
|
229
|
+
async cleanAudioIteratorAndNodes() {
|
|
230
|
+
await this.audioBufferIterator?.return();
|
|
231
|
+
this.audioBufferIterator = null;
|
|
232
|
+
this.audioIteratorStarted = false;
|
|
233
|
+
this.audioBufferHealth = 0;
|
|
234
|
+
this.cleanupAudioQueue();
|
|
235
|
+
}
|
|
236
|
+
async seekTo(time) {
|
|
237
|
+
if (!this.isReady())
|
|
238
|
+
return;
|
|
239
|
+
const newTime = Math.max(0, Math.min(time, this.totalDuration));
|
|
240
|
+
const currentPlaybackTime = this.getPlaybackTime();
|
|
241
|
+
const isSignificantSeek = Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
|
|
242
|
+
if (isSignificantSeek) {
|
|
243
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
|
|
244
|
+
if (this.audioSink) {
|
|
245
|
+
await this.cleanAudioIteratorAndNodes();
|
|
246
|
+
}
|
|
247
|
+
await this.startAudioIterator(newTime);
|
|
248
|
+
await this.startVideoIterator(newTime);
|
|
249
|
+
}
|
|
250
|
+
if (!this.playing) {
|
|
251
|
+
this.render();
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
async play() {
|
|
255
|
+
if (!this.isReady())
|
|
256
|
+
return;
|
|
257
|
+
if (!this.playing) {
|
|
258
|
+
if (this.sharedAudioContext.state === 'suspended') {
|
|
259
|
+
await this.sharedAudioContext.resume();
|
|
260
|
+
}
|
|
261
|
+
this.playing = true;
|
|
262
|
+
this.startRenderLoop();
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
pause() {
|
|
266
|
+
this.playing = false;
|
|
267
|
+
this.cleanupAudioQueue();
|
|
268
|
+
this.stopRenderLoop();
|
|
269
|
+
}
|
|
270
|
+
dispose() {
|
|
271
|
+
this.input?.dispose();
|
|
272
|
+
this.stopRenderLoop();
|
|
273
|
+
this.videoFrameIterator?.return();
|
|
274
|
+
this.cleanAudioIteratorAndNodes();
|
|
275
|
+
this.videoAsyncId++;
|
|
276
|
+
}
|
|
277
|
+
getPlaybackTime() {
|
|
278
|
+
return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
|
|
279
|
+
}
|
|
280
|
+
scheduleAudioChunk(buffer, mediaTimestamp) {
|
|
281
|
+
const targetTime = mediaTimestamp + this.audioSyncAnchor;
|
|
282
|
+
const delay = targetTime - this.sharedAudioContext.currentTime;
|
|
283
|
+
const node = this.sharedAudioContext.createBufferSource();
|
|
284
|
+
node.buffer = buffer;
|
|
285
|
+
node.connect(this.gainNode);
|
|
286
|
+
if (delay >= 0) {
|
|
287
|
+
node.start(targetTime);
|
|
288
|
+
}
|
|
289
|
+
else {
|
|
290
|
+
node.start(this.sharedAudioContext.currentTime, -delay);
|
|
291
|
+
}
|
|
292
|
+
this.queuedAudioNodes.add(node);
|
|
293
|
+
node.onended = () => this.queuedAudioNodes.delete(node);
|
|
294
|
+
}
|
|
295
|
+
onBufferingChange(callback) {
|
|
296
|
+
this.onBufferingChangeCallback = callback;
|
|
297
|
+
}
|
|
298
|
+
canRenderVideo() {
|
|
299
|
+
return (this.audioIteratorStarted &&
|
|
300
|
+
this.audioBufferHealth >= this.HEALTHY_BUFER_THRESHOLD_SECONDS);
|
|
301
|
+
}
|
|
302
|
+
startRenderLoop() {
|
|
303
|
+
if (this.animationFrameId !== null) {
|
|
304
|
+
return;
|
|
305
|
+
}
|
|
306
|
+
this.render();
|
|
307
|
+
}
|
|
308
|
+
stopRenderLoop() {
|
|
309
|
+
if (this.animationFrameId !== null) {
|
|
310
|
+
cancelAnimationFrame(this.animationFrameId);
|
|
311
|
+
this.animationFrameId = null;
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
shouldRenderFrame() {
|
|
315
|
+
return (!this.isBuffering &&
|
|
316
|
+
this.canRenderVideo() &&
|
|
317
|
+
this.nextFrame !== null &&
|
|
318
|
+
this.nextFrame.timestamp <= this.getPlaybackTime());
|
|
319
|
+
}
|
|
320
|
+
drawCurrentFrame() {
|
|
321
|
+
this.context.drawImage(this.nextFrame.canvas, 0, 0);
|
|
322
|
+
this.nextFrame = null;
|
|
323
|
+
this.updateNextFrame();
|
|
324
|
+
}
|
|
325
|
+
setBufferingState(isBuffering) {
|
|
326
|
+
if (this.isBuffering !== isBuffering) {
|
|
327
|
+
this.isBuffering = isBuffering;
|
|
328
|
+
if (isBuffering) {
|
|
329
|
+
this.bufferingStartedAtMs = performance.now();
|
|
330
|
+
this.onBufferingChangeCallback?.(true);
|
|
331
|
+
}
|
|
332
|
+
else {
|
|
333
|
+
this.bufferingStartedAtMs = null;
|
|
334
|
+
this.onBufferingChangeCallback?.(false);
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
maybeResumeFromBuffering(currentBufferDuration) {
|
|
339
|
+
if (!this.isCurrentlyBuffering())
|
|
340
|
+
return;
|
|
341
|
+
const now = performance.now();
|
|
342
|
+
const bufferingDuration = now - this.bufferingStartedAtMs;
|
|
343
|
+
const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
|
|
344
|
+
const bufferHealthy = currentBufferDuration >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
|
|
345
|
+
if (minTimeElapsed && bufferHealthy) {
|
|
346
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
|
|
347
|
+
this.setBufferingState(false);
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
maybeForceResumeFromBuffering() {
|
|
351
|
+
if (!this.isCurrentlyBuffering())
|
|
352
|
+
return;
|
|
353
|
+
const now = performance.now();
|
|
354
|
+
const bufferingDuration = now - this.bufferingStartedAtMs;
|
|
355
|
+
const forceTimeout = bufferingDuration > this.minBufferingTimeoutMs * 10;
|
|
356
|
+
if (forceTimeout) {
|
|
357
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: '@remotion/media' }, `[MediaPlayer] Force resuming from buffering after ${bufferingDuration}ms`);
|
|
358
|
+
this.setBufferingState(false);
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import type { LogLevel } from 'remotion';
|
|
3
|
+
type NewVideoForPreviewProps = {
|
|
4
|
+
readonly src: string;
|
|
5
|
+
readonly style?: React.CSSProperties;
|
|
6
|
+
readonly playbackRate?: number;
|
|
7
|
+
readonly logLevel?: LogLevel;
|
|
8
|
+
};
|
|
9
|
+
export declare const NewVideoForPreview: React.FC<NewVideoForPreviewProps>;
|
|
10
|
+
export {};
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useContext, useEffect, useRef, useState } from 'react';
|
|
3
|
+
import { Internals, useBufferState, useCurrentFrame } from 'remotion';
|
|
4
|
+
import { MediaPlayer } from './media-player';
|
|
5
|
+
const { useUnsafeVideoConfig, Timeline, SharedAudioContext } = Internals;
|
|
6
|
+
export const NewVideoForPreview = ({ src, style, playbackRate = 1, logLevel = 'info', }) => {
|
|
7
|
+
const canvasRef = useRef(null);
|
|
8
|
+
const videoConfig = useUnsafeVideoConfig();
|
|
9
|
+
const frame = useCurrentFrame();
|
|
10
|
+
const mediaPlayerRef = useRef(null);
|
|
11
|
+
const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
|
|
12
|
+
const [playing] = Timeline.usePlayingState();
|
|
13
|
+
const sharedAudioContext = useContext(SharedAudioContext);
|
|
14
|
+
const buffer = useBufferState();
|
|
15
|
+
const delayHandleRef = useRef(null);
|
|
16
|
+
if (!videoConfig) {
|
|
17
|
+
throw new Error('No video config found');
|
|
18
|
+
}
|
|
19
|
+
if (!src) {
|
|
20
|
+
throw new TypeError('No `src` was passed to <NewVideoForPreview>.');
|
|
21
|
+
}
|
|
22
|
+
const actualFps = videoConfig.fps / playbackRate;
|
|
23
|
+
const currentTime = frame / actualFps;
|
|
24
|
+
const [initialTimestamp] = useState(currentTime);
|
|
25
|
+
useEffect(() => {
|
|
26
|
+
if (!canvasRef.current)
|
|
27
|
+
return;
|
|
28
|
+
if (!sharedAudioContext)
|
|
29
|
+
return;
|
|
30
|
+
if (!sharedAudioContext.audioContext)
|
|
31
|
+
return;
|
|
32
|
+
try {
|
|
33
|
+
const player = new MediaPlayer({
|
|
34
|
+
canvas: canvasRef.current,
|
|
35
|
+
src,
|
|
36
|
+
logLevel,
|
|
37
|
+
sharedAudioContext: sharedAudioContext.audioContext,
|
|
38
|
+
});
|
|
39
|
+
mediaPlayerRef.current = player;
|
|
40
|
+
player
|
|
41
|
+
.initialize(initialTimestamp)
|
|
42
|
+
.then(() => {
|
|
43
|
+
setMediaPlayerReady(true);
|
|
44
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] MediaPlayer initialized successfully`);
|
|
45
|
+
})
|
|
46
|
+
.catch((error) => {
|
|
47
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] Failed to initialize MediaPlayer', error);
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
catch (error) {
|
|
51
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer initialization failed', error);
|
|
52
|
+
}
|
|
53
|
+
return () => {
|
|
54
|
+
if (delayHandleRef.current) {
|
|
55
|
+
delayHandleRef.current.unblock();
|
|
56
|
+
delayHandleRef.current = null;
|
|
57
|
+
}
|
|
58
|
+
if (mediaPlayerRef.current) {
|
|
59
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] Disposing MediaPlayer`);
|
|
60
|
+
mediaPlayerRef.current.dispose();
|
|
61
|
+
mediaPlayerRef.current = null;
|
|
62
|
+
}
|
|
63
|
+
setMediaPlayerReady(false);
|
|
64
|
+
};
|
|
65
|
+
}, [src, logLevel, sharedAudioContext, initialTimestamp]);
|
|
66
|
+
// sync play/pause state with Remotion timeline (like old VideoForPreview video does)
|
|
67
|
+
useEffect(() => {
|
|
68
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
69
|
+
if (!mediaPlayer)
|
|
70
|
+
return;
|
|
71
|
+
if (playing) {
|
|
72
|
+
mediaPlayer.play().catch((error) => {
|
|
73
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] Failed to play', error);
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
else {
|
|
77
|
+
mediaPlayer.pause();
|
|
78
|
+
}
|
|
79
|
+
}, [playing, logLevel, mediaPlayerReady]);
|
|
80
|
+
// sync target time with MediaPlayer
|
|
81
|
+
useEffect(() => {
|
|
82
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
83
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
84
|
+
return;
|
|
85
|
+
mediaPlayer.seekTo(currentTime);
|
|
86
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
87
|
+
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
88
|
+
// sync MediaPlayer buffering with Remotion buffering
|
|
89
|
+
useEffect(() => {
|
|
90
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
91
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
92
|
+
return;
|
|
93
|
+
mediaPlayer.onBufferingChange((newBufferingState) => {
|
|
94
|
+
if (newBufferingState && !delayHandleRef.current) {
|
|
95
|
+
// Start blocking Remotion playback
|
|
96
|
+
delayHandleRef.current = buffer.delayPlayback();
|
|
97
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback');
|
|
98
|
+
}
|
|
99
|
+
else if (!newBufferingState && delayHandleRef.current) {
|
|
100
|
+
// Unblock Remotion playback
|
|
101
|
+
delayHandleRef.current.unblock();
|
|
102
|
+
delayHandleRef.current = null;
|
|
103
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback');
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
}, [mediaPlayerReady, buffer, logLevel]);
|
|
107
|
+
return (_jsx("canvas", { ref: canvasRef, width: videoConfig.width, height: videoConfig.height, style: style }));
|
|
108
|
+
};
|
package/dist/video/props.d.ts
CHANGED
|
@@ -9,17 +9,12 @@ export type VideoProps = {
|
|
|
9
9
|
name?: string;
|
|
10
10
|
pauseWhenBuffering?: boolean;
|
|
11
11
|
showInTimeline?: boolean;
|
|
12
|
-
onAutoPlayError?: null | (() => void);
|
|
13
12
|
onVideoFrame?: OnVideoFrame;
|
|
14
13
|
playbackRate?: number;
|
|
15
14
|
muted?: boolean;
|
|
16
15
|
delayRenderRetries?: number;
|
|
17
16
|
delayRenderTimeoutInMilliseconds?: number;
|
|
18
|
-
crossOrigin?: '' | 'anonymous' | 'use-credentials';
|
|
19
17
|
style?: React.CSSProperties;
|
|
20
|
-
onError?: (err: Error) => void;
|
|
21
|
-
useWebAudioApi?: boolean;
|
|
22
|
-
acceptableTimeShiftInSeconds?: number;
|
|
23
18
|
/**
|
|
24
19
|
* @deprecated For internal use only
|
|
25
20
|
*/
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/* eslint-disable no-promise-executor-return */
|
|
2
|
+
export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
3
|
+
export function withTimeout(promise, timeoutMs, errorMessage = 'Operation timed out') {
|
|
4
|
+
let timeoutId = null;
|
|
5
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
6
|
+
timeoutId = window.setTimeout(() => {
|
|
7
|
+
reject(new Error(errorMessage));
|
|
8
|
+
}, timeoutMs);
|
|
9
|
+
});
|
|
10
|
+
return Promise.race([
|
|
11
|
+
promise.finally(() => {
|
|
12
|
+
if (timeoutId) {
|
|
13
|
+
clearTimeout(timeoutId);
|
|
14
|
+
}
|
|
15
|
+
}),
|
|
16
|
+
timeoutPromise,
|
|
17
|
+
]);
|
|
18
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import type { LogLevel } from 'remotion';
|
|
3
|
+
type NewVideoForPreviewProps = {
|
|
4
|
+
readonly src: string;
|
|
5
|
+
readonly style?: React.CSSProperties;
|
|
6
|
+
readonly playbackRate?: number;
|
|
7
|
+
readonly logLevel?: LogLevel;
|
|
8
|
+
readonly className?: string;
|
|
9
|
+
};
|
|
10
|
+
export declare const NewVideoForPreview: React.FC<NewVideoForPreviewProps>;
|
|
11
|
+
export {};
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import { jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useContext, useEffect, useMemo, useRef, useState } from 'react';
|
|
3
|
+
import { Internals, useBufferState, useCurrentFrame } from 'remotion';
|
|
4
|
+
import { MediaPlayer } from './media-player';
|
|
5
|
+
const { useUnsafeVideoConfig, Timeline, SharedAudioContext } = Internals;
|
|
6
|
+
export const NewVideoForPreview = ({ src, style, playbackRate = 1, logLevel = 'info', className, }) => {
|
|
7
|
+
const canvasRef = useRef(null);
|
|
8
|
+
const videoConfig = useUnsafeVideoConfig();
|
|
9
|
+
const frame = useCurrentFrame();
|
|
10
|
+
const mediaPlayerRef = useRef(null);
|
|
11
|
+
const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
|
|
12
|
+
const [playing] = Timeline.usePlayingState();
|
|
13
|
+
const sharedAudioContext = useContext(SharedAudioContext);
|
|
14
|
+
const buffer = useBufferState();
|
|
15
|
+
const delayHandleRef = useRef(null);
|
|
16
|
+
if (!videoConfig) {
|
|
17
|
+
throw new Error('No video config found');
|
|
18
|
+
}
|
|
19
|
+
if (!src) {
|
|
20
|
+
throw new TypeError('No `src` was passed to <NewVideoForPreview>.');
|
|
21
|
+
}
|
|
22
|
+
const actualFps = videoConfig.fps / playbackRate;
|
|
23
|
+
const currentTime = frame / actualFps;
|
|
24
|
+
const [initialTimestamp] = useState(currentTime);
|
|
25
|
+
useEffect(() => {
|
|
26
|
+
if (!canvasRef.current)
|
|
27
|
+
return;
|
|
28
|
+
if (!sharedAudioContext)
|
|
29
|
+
return;
|
|
30
|
+
if (!sharedAudioContext.audioContext)
|
|
31
|
+
return;
|
|
32
|
+
try {
|
|
33
|
+
const player = new MediaPlayer({
|
|
34
|
+
canvas: canvasRef.current,
|
|
35
|
+
src,
|
|
36
|
+
logLevel,
|
|
37
|
+
sharedAudioContext: sharedAudioContext.audioContext,
|
|
38
|
+
});
|
|
39
|
+
mediaPlayerRef.current = player;
|
|
40
|
+
player
|
|
41
|
+
.initialize(initialTimestamp)
|
|
42
|
+
.then(() => {
|
|
43
|
+
setMediaPlayerReady(true);
|
|
44
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] MediaPlayer initialized successfully`);
|
|
45
|
+
})
|
|
46
|
+
.catch((error) => {
|
|
47
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] Failed to initialize MediaPlayer', error);
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
catch (error) {
|
|
51
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer initialization failed', error);
|
|
52
|
+
}
|
|
53
|
+
return () => {
|
|
54
|
+
if (delayHandleRef.current) {
|
|
55
|
+
delayHandleRef.current.unblock();
|
|
56
|
+
delayHandleRef.current = null;
|
|
57
|
+
}
|
|
58
|
+
if (mediaPlayerRef.current) {
|
|
59
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] Disposing MediaPlayer`);
|
|
60
|
+
mediaPlayerRef.current.dispose();
|
|
61
|
+
mediaPlayerRef.current = null;
|
|
62
|
+
}
|
|
63
|
+
setMediaPlayerReady(false);
|
|
64
|
+
};
|
|
65
|
+
}, [src, logLevel, sharedAudioContext, initialTimestamp]);
|
|
66
|
+
const classNameValue = useMemo(() => {
|
|
67
|
+
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
68
|
+
.filter(Internals.truthy)
|
|
69
|
+
.join(' ');
|
|
70
|
+
}, [className]);
|
|
71
|
+
// sync play/pause state with Remotion timeline (like old VideoForPreview video does)
|
|
72
|
+
useEffect(() => {
|
|
73
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
74
|
+
if (!mediaPlayer)
|
|
75
|
+
return;
|
|
76
|
+
if (playing) {
|
|
77
|
+
mediaPlayer.play().catch((error) => {
|
|
78
|
+
Internals.Log.error({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] Failed to play', error);
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
mediaPlayer.pause();
|
|
83
|
+
}
|
|
84
|
+
}, [playing, logLevel, mediaPlayerReady]);
|
|
85
|
+
// sync target time with MediaPlayer
|
|
86
|
+
useEffect(() => {
|
|
87
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
88
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
89
|
+
return;
|
|
90
|
+
mediaPlayer.seekTo(currentTime);
|
|
91
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
92
|
+
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
93
|
+
// sync MediaPlayer buffering with Remotion buffering
|
|
94
|
+
useEffect(() => {
|
|
95
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
96
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
97
|
+
return;
|
|
98
|
+
mediaPlayer.onBufferingChange((newBufferingState) => {
|
|
99
|
+
if (newBufferingState && !delayHandleRef.current) {
|
|
100
|
+
// Start blocking Remotion playback
|
|
101
|
+
delayHandleRef.current = buffer.delayPlayback();
|
|
102
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback');
|
|
103
|
+
}
|
|
104
|
+
else if (!newBufferingState && delayHandleRef.current) {
|
|
105
|
+
// Unblock Remotion playback
|
|
106
|
+
delayHandleRef.current.unblock();
|
|
107
|
+
delayHandleRef.current = null;
|
|
108
|
+
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, '[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback');
|
|
109
|
+
}
|
|
110
|
+
});
|
|
111
|
+
}, [mediaPlayerReady, buffer, logLevel]);
|
|
112
|
+
return (_jsx("canvas", { ref: canvasRef, width: videoConfig.width, height: videoConfig.height, style: style, className: classNameValue }));
|
|
113
|
+
};
|