@remotion/media 4.0.378 → 4.0.380

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/dist/audio/audio-for-preview.js +1 -1
  2. package/dist/audio/audio-preview-iterator.js +27 -4
  3. package/dist/audio/props.d.ts +1 -0
  4. package/dist/audio-extraction/extract-audio.d.ts +1 -1
  5. package/dist/audio-extraction/extract-audio.js +3 -0
  6. package/dist/audio-for-rendering.d.ts +3 -0
  7. package/dist/audio-for-rendering.js +94 -0
  8. package/dist/audio.d.ts +3 -0
  9. package/dist/audio.js +60 -0
  10. package/dist/audiodata-to-array.d.ts +0 -0
  11. package/dist/audiodata-to-array.js +1 -0
  12. package/dist/convert-audiodata/data-types.d.ts +1 -0
  13. package/dist/convert-audiodata/data-types.js +22 -0
  14. package/dist/convert-audiodata/is-planar-format.d.ts +1 -0
  15. package/dist/convert-audiodata/is-planar-format.js +3 -0
  16. package/dist/convert-audiodata/log-audiodata.d.ts +1 -0
  17. package/dist/convert-audiodata/log-audiodata.js +8 -0
  18. package/dist/convert-audiodata/trim-audiodata.d.ts +0 -0
  19. package/dist/convert-audiodata/trim-audiodata.js +1 -0
  20. package/dist/deserialized-audiodata.d.ts +15 -0
  21. package/dist/deserialized-audiodata.js +26 -0
  22. package/dist/esm/index.mjs +56 -8
  23. package/dist/extract-audio.d.ts +7 -0
  24. package/dist/extract-audio.js +98 -0
  25. package/dist/extract-frame-and-audio.js +9 -0
  26. package/dist/extract-frame-via-broadcast-channel.d.ts +15 -0
  27. package/dist/extract-frame-via-broadcast-channel.js +104 -0
  28. package/dist/extract-frame.d.ts +27 -0
  29. package/dist/extract-frame.js +21 -0
  30. package/dist/extrct-audio.d.ts +7 -0
  31. package/dist/extrct-audio.js +94 -0
  32. package/dist/get-frames-since-keyframe.d.ts +22 -0
  33. package/dist/get-frames-since-keyframe.js +41 -0
  34. package/dist/keyframe-bank.d.ts +25 -0
  35. package/dist/keyframe-bank.js +120 -0
  36. package/dist/keyframe-manager.d.ts +23 -0
  37. package/dist/keyframe-manager.js +170 -0
  38. package/dist/log.d.ts +10 -0
  39. package/dist/log.js +33 -0
  40. package/dist/new-video-for-rendering.d.ts +3 -0
  41. package/dist/new-video-for-rendering.js +108 -0
  42. package/dist/new-video.d.ts +3 -0
  43. package/dist/new-video.js +37 -0
  44. package/dist/props.d.ts +29 -0
  45. package/dist/props.js +1 -0
  46. package/dist/remember-actual-matroska-timestamps.d.ts +4 -0
  47. package/dist/remember-actual-matroska-timestamps.js +19 -0
  48. package/dist/serialize-videoframe.d.ts +0 -0
  49. package/dist/serialize-videoframe.js +1 -0
  50. package/dist/video/media-player.d.ts +62 -0
  51. package/dist/video/media-player.js +361 -0
  52. package/dist/video/new-video-for-preview.d.ts +10 -0
  53. package/dist/video/new-video-for-preview.js +108 -0
  54. package/dist/video/props.d.ts +1 -0
  55. package/dist/video/timeout-utils.d.ts +2 -0
  56. package/dist/video/timeout-utils.js +18 -0
  57. package/dist/video/video-for-rendering.js +1 -1
  58. package/dist/video-extraction/extract-frame.d.ts +2 -0
  59. package/dist/video-extraction/extract-frame.js +3 -0
  60. package/dist/video-extraction/get-frames-since-keyframe.d.ts +2 -2
  61. package/dist/video-extraction/get-frames-since-keyframe.js +13 -3
  62. package/dist/video-extraction/media-player.d.ts +64 -0
  63. package/dist/video-extraction/media-player.js +501 -0
  64. package/dist/video-extraction/new-video-for-preview.d.ts +10 -0
  65. package/dist/video-extraction/new-video-for-preview.js +114 -0
  66. package/dist/video-for-rendering.d.ts +3 -0
  67. package/dist/video-for-rendering.js +108 -0
  68. package/dist/video.d.ts +3 -0
  69. package/dist/video.js +37 -0
  70. package/package.json +3 -3
@@ -0,0 +1,501 @@
1
+ import { ALL_FORMATS, AudioBufferSink, CanvasSink, Input, UrlSource, } from 'mediabunny';
2
+ import { Log } from '../log';
3
+ const SEEK_THRESHOLD = 0.05;
4
+ export class MediaPlayer {
5
+ constructor({ canvas, src, logLevel, sharedAudioContext, }) {
6
+ this.canvasSink = null;
7
+ this.videoFrameIterator = null;
8
+ this.nextFrame = null;
9
+ this.audioSink = null;
10
+ this.audioBufferIterator = null;
11
+ this.queuedAudioNodes = new Set();
12
+ this.gainNode = null;
13
+ this.expectedAudioTime = 0;
14
+ this.sharedAudioContext = null;
15
+ this.mediaTimeOffset = 0;
16
+ this.playing = false;
17
+ this.animationFrameId = null;
18
+ this.asyncId = 0;
19
+ this.initialized = false;
20
+ this.totalDuration = 0;
21
+ this.actualFps = null;
22
+ // for remotion buffer state
23
+ this.isStalled = false;
24
+ this.lastAudioProgressAtMs = 0;
25
+ this.lastNetworkActivityAtMs = 0;
26
+ this.isNetworkActive = false;
27
+ this.isSeeking = false;
28
+ // A/V sync coordination
29
+ this.canStartAudio = false;
30
+ this.render = () => {
31
+ const currentPlaybackTime = this.getPlaybackTime();
32
+ if (this.nextFrame && this.nextFrame.timestamp <= currentPlaybackTime) {
33
+ Log.trace(this.logLevel, `[MediaPlayer] Drawing frame at ${this.nextFrame.timestamp.toFixed(3)}s (playback time: ${currentPlaybackTime.toFixed(3)}s)`);
34
+ this.context.drawImage(this.nextFrame.canvas, 0, 0);
35
+ // For video-only content, track video progress as audio progress
36
+ if (!this.audioSink) {
37
+ this.resetAudioProgressStopwatch();
38
+ }
39
+ this.nextFrame = null;
40
+ this.updateNextFrame();
41
+ }
42
+ this.updateStalledState();
43
+ // continue render loop only if playing
44
+ if (this.playing) {
45
+ this.animationFrameId = requestAnimationFrame(this.render);
46
+ }
47
+ else {
48
+ this.animationFrameId = null;
49
+ }
50
+ };
51
+ this.startVideoIterator = async (timeToSeek) => {
52
+ if (!this.canvasSink) {
53
+ return;
54
+ }
55
+ this.asyncId++;
56
+ const currentAsyncId = this.asyncId;
57
+ await this.videoFrameIterator?.return();
58
+ this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
59
+ try {
60
+ const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
61
+ const secondFrame = (await this.videoFrameIterator.next()).value ?? null;
62
+ if (currentAsyncId !== this.asyncId) {
63
+ Log.trace(this.logLevel, `[MediaPlayer] Race condition detected, aborting startVideoIterator for ${timeToSeek.toFixed(3)}s`);
64
+ return;
65
+ }
66
+ if (firstFrame) {
67
+ Log.trace(this.logLevel, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
68
+ this.context.drawImage(firstFrame.canvas, 0, 0);
69
+ // For video-only content, track video progress as audio progress
70
+ if (!this.audioSink) {
71
+ this.resetAudioProgressStopwatch();
72
+ }
73
+ this.canStartAudio = true;
74
+ this.isSeeking = false;
75
+ this.tryStartAudio();
76
+ }
77
+ this.nextFrame = secondFrame ?? null;
78
+ if (secondFrame) {
79
+ Log.trace(this.logLevel, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
80
+ // For video-only content, track video progress as audio progress
81
+ if (!this.audioSink) {
82
+ this.resetAudioProgressStopwatch();
83
+ }
84
+ if (!this.canStartAudio) {
85
+ this.canStartAudio = true;
86
+ this.tryStartAudio();
87
+ }
88
+ }
89
+ this.updateStalledState();
90
+ }
91
+ catch (error) {
92
+ Log.error('[MediaPlayer] Failed to start video iterator', error);
93
+ }
94
+ };
95
+ this.updateNextFrame = async () => {
96
+ if (!this.videoFrameIterator) {
97
+ return;
98
+ }
99
+ const currentAsyncId = this.asyncId;
100
+ try {
101
+ while (true) {
102
+ const newNextFrame = (await this.videoFrameIterator.next()).value ?? null;
103
+ if (!newNextFrame) {
104
+ break;
105
+ }
106
+ if (currentAsyncId !== this.asyncId) {
107
+ Log.trace(this.logLevel, `[MediaPlayer] Race condition detected in updateNextFrame`);
108
+ break;
109
+ }
110
+ if (newNextFrame.timestamp <= this.getPlaybackTime()) {
111
+ Log.trace(this.logLevel, `[MediaPlayer] Drawing immediate frame ${newNextFrame.timestamp.toFixed(3)}s`);
112
+ this.context.drawImage(newNextFrame.canvas, 0, 0);
113
+ // For video-only content, track video progress as audio progress
114
+ if (!this.audioSink) {
115
+ this.resetAudioProgressStopwatch();
116
+ }
117
+ }
118
+ else {
119
+ this.nextFrame = newNextFrame;
120
+ Log.trace(this.logLevel, `[MediaPlayer] Buffered next frame ${newNextFrame.timestamp.toFixed(3)}s`);
121
+ // For video-only content, track video progress as audio progress
122
+ if (!this.audioSink) {
123
+ this.resetAudioProgressStopwatch();
124
+ }
125
+ // Open audio gate when new frames become available
126
+ if (!this.canStartAudio) {
127
+ this.canStartAudio = true;
128
+ this.tryStartAudio();
129
+ }
130
+ break;
131
+ }
132
+ }
133
+ }
134
+ catch (error) {
135
+ Log.error('[MediaPlayer] Failed to update next frame', error);
136
+ }
137
+ this.updateStalledState();
138
+ };
139
+ this.runAudioIterator = async () => {
140
+ if (!this.audioSink ||
141
+ !this.sharedAudioContext ||
142
+ !this.audioBufferIterator ||
143
+ !this.gainNode) {
144
+ return;
145
+ }
146
+ try {
147
+ this.expectedAudioTime = this.sharedAudioContext.currentTime;
148
+ for await (const { buffer, timestamp } of this.audioBufferIterator) {
149
+ const node = this.sharedAudioContext.createBufferSource();
150
+ node.buffer = buffer;
151
+ node.connect(this.gainNode);
152
+ if (this.expectedAudioTime >= this.sharedAudioContext.currentTime) {
153
+ node.start(this.expectedAudioTime);
154
+ }
155
+ else {
156
+ const offset = this.sharedAudioContext.currentTime - this.expectedAudioTime;
157
+ node.start(this.sharedAudioContext.currentTime, offset);
158
+ }
159
+ this.queuedAudioNodes.add(node);
160
+ node.onended = () => {
161
+ this.queuedAudioNodes.delete(node);
162
+ };
163
+ this.expectedAudioTime += buffer.duration;
164
+ this.updateStalledState();
165
+ // If we're more than a second ahead of the current playback time, let's slow down the loop until time has
166
+ // passed. Use timestamp for throttling logic as it represents media time.
167
+ if (timestamp - this.getPlaybackTime() >= 1) {
168
+ await new Promise((resolve) => {
169
+ const check = () => {
170
+ if (timestamp - this.getPlaybackTime() < 1) {
171
+ resolve();
172
+ }
173
+ else {
174
+ requestAnimationFrame(check);
175
+ }
176
+ };
177
+ check();
178
+ });
179
+ }
180
+ }
181
+ }
182
+ catch (error) {
183
+ Log.error('[MediaPlayer] Failed to run audio iterator', error);
184
+ }
185
+ };
186
+ this.canvas = canvas;
187
+ this.src = src;
188
+ this.logLevel = logLevel ?? 'info';
189
+ this.sharedAudioContext = sharedAudioContext || null;
190
+ const context = canvas.getContext('2d', {
191
+ alpha: false,
192
+ desynchronized: true,
193
+ });
194
+ if (!context) {
195
+ throw new Error('Could not get 2D context from canvas');
196
+ }
197
+ this.context = context;
198
+ // Initialize audio progress stopwatch
199
+ this.resetAudioProgressStopwatch();
200
+ Log.trace(this.logLevel, `[MediaPlayer] Created for src: ${src}`);
201
+ }
202
+ async initialize(startTime = 0) {
203
+ if (this.initialized) {
204
+ Log.trace(this.logLevel, `[MediaPlayer] Already initialized, skipping`);
205
+ return;
206
+ }
207
+ try {
208
+ Log.trace(this.logLevel, `[MediaPlayer] Initializing at startTime: ${startTime.toFixed(3)}s...`);
209
+ const urlSource = new UrlSource(this.src);
210
+ urlSource.onread = () => {
211
+ this.lastNetworkActivityAtMs = this.getCurrentTimeMs();
212
+ this.isNetworkActive = true;
213
+ };
214
+ const input = new Input({
215
+ source: urlSource,
216
+ formats: ALL_FORMATS,
217
+ });
218
+ this.totalDuration = await input.computeDuration();
219
+ const videoTrack = await input.getPrimaryVideoTrack();
220
+ const audioTrack = await input.getPrimaryAudioTrack();
221
+ if (!videoTrack && !audioTrack) {
222
+ throw new Error(`No video or audio track found for ${this.src}`);
223
+ }
224
+ if (videoTrack) {
225
+ this.canvasSink = new CanvasSink(videoTrack, {
226
+ poolSize: 2,
227
+ fit: 'contain',
228
+ });
229
+ this.canvas.width = videoTrack.displayWidth;
230
+ this.canvas.height = videoTrack.displayHeight;
231
+ // Extract actual FPS for stall detection
232
+ const packetStats = await videoTrack.computePacketStats();
233
+ this.actualFps = packetStats.averagePacketRate;
234
+ Log.trace(this.logLevel, `[MediaPlayer] Detected video FPS: ${this.actualFps}`);
235
+ }
236
+ if (audioTrack && this.sharedAudioContext) {
237
+ this.audioSink = new AudioBufferSink(audioTrack);
238
+ this.gainNode = this.sharedAudioContext.createGain();
239
+ this.gainNode.connect(this.sharedAudioContext.destination);
240
+ }
241
+ // For audio-only content, allow audio to start immediately
242
+ if (!videoTrack && audioTrack) {
243
+ this.canStartAudio = true;
244
+ }
245
+ // Initialize timing offset based on actual starting position
246
+ if (this.sharedAudioContext) {
247
+ this.mediaTimeOffset = this.sharedAudioContext.currentTime - startTime;
248
+ Log.trace(this.logLevel, `[MediaPlayer] Set mediaTimeOffset to ${this.mediaTimeOffset.toFixed(3)}s (audioContext: ${this.sharedAudioContext.currentTime.toFixed(3)}s, startTime: ${startTime.toFixed(3)}s)`);
249
+ this.lastAudioProgressAtMs = this.getCurrentTimeMs();
250
+ this.lastNetworkActivityAtMs = this.getCurrentTimeMs();
251
+ }
252
+ this.initialized = true;
253
+ await this.startVideoIterator(startTime);
254
+ this.startRenderLoop();
255
+ Log.trace(this.logLevel, `[MediaPlayer] Initialized successfully with iterators started, duration: ${this.totalDuration}s`);
256
+ }
257
+ catch (error) {
258
+ Log.error('[MediaPlayer] Failed to initialize', error);
259
+ throw error;
260
+ }
261
+ }
262
+ seekTo(time) {
263
+ if (!this.initialized || !this.sharedAudioContext) {
264
+ return;
265
+ }
266
+ // Ensure mediaTimeOffset is initialized (safety fallback)
267
+ if (this.mediaTimeOffset === 0) {
268
+ this.mediaTimeOffset = this.sharedAudioContext.currentTime - time;
269
+ Log.trace(this.logLevel, `[MediaPlayer] Late-initialized mediaTimeOffset to ${this.mediaTimeOffset.toFixed(3)}s`);
270
+ }
271
+ const newTime = Math.max(0, Math.min(time, this.totalDuration));
272
+ const currentPlaybackTime = this.getPlaybackTime();
273
+ const isSignificantSeek = Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
274
+ // Update offset to make audio context time correspond to new media time
275
+ this.mediaTimeOffset = this.sharedAudioContext.currentTime - newTime;
276
+ if (isSignificantSeek) {
277
+ Log.trace(this.logLevel, `[MediaPlayer] Significant seek to ${newTime.toFixed(3)}s - creating new iterator`);
278
+ this.isSeeking = true;
279
+ this.canStartAudio = false;
280
+ this.updateStalledState();
281
+ // Stop existing audio first
282
+ if (this.playing && this.audioSink) {
283
+ this.audioBufferIterator?.return();
284
+ this.audioBufferIterator = null;
285
+ // Stop current audio nodes
286
+ for (const node of this.queuedAudioNodes) {
287
+ node.stop();
288
+ }
289
+ this.queuedAudioNodes.clear();
290
+ }
291
+ // Start video iterator (which will open audio gate when ready)
292
+ this.startVideoIterator(newTime);
293
+ }
294
+ else {
295
+ Log.trace(this.logLevel, `[MediaPlayer] Minor time update to ${newTime.toFixed(3)}s - using existing iterator`);
296
+ // if paused, trigger a single frame update to show current position
297
+ if (!this.playing) {
298
+ this.renderSingleFrame();
299
+ }
300
+ }
301
+ }
302
+ async drawInitialFrame(time = 0) {
303
+ if (!this.initialized || !this.canvasSink) {
304
+ Log.trace(this.logLevel, `[MediaPlayer] Cannot draw initial frame - not initialized or no canvas sink`);
305
+ return;
306
+ }
307
+ try {
308
+ Log.trace(this.logLevel, `[MediaPlayer] Drawing initial frame at ${time.toFixed(3)}s`);
309
+ // create temporary iterator just to get the first frame
310
+ const tempIterator = this.canvasSink.canvases(time);
311
+ const firstFrame = (await tempIterator.next()).value;
312
+ if (firstFrame) {
313
+ this.context.drawImage(firstFrame.canvas, 0, 0);
314
+ Log.trace(this.logLevel, `[MediaPlayer] Drew initial frame at timestamp ${firstFrame.timestamp.toFixed(3)}s`);
315
+ }
316
+ else {
317
+ Log.trace(this.logLevel, `[MediaPlayer] No frame available at ${time.toFixed(3)}s`);
318
+ }
319
+ // clean up the temporary iterator
320
+ await tempIterator.return();
321
+ }
322
+ catch (error) {
323
+ Log.error('[MediaPlayer] Failed to draw initial frame', error);
324
+ }
325
+ }
326
+ async play() {
327
+ if (!this.initialized || !this.sharedAudioContext) {
328
+ return;
329
+ }
330
+ if (!this.playing) {
331
+ if (this.sharedAudioContext.state === 'suspended') {
332
+ await this.sharedAudioContext.resume();
333
+ }
334
+ this.playing = true;
335
+ Log.trace(this.logLevel, `[MediaPlayer] Play - starting render loop`);
336
+ this.startRenderLoop();
337
+ // Audio will start automatically when video signals readiness via tryStartAudio()
338
+ this.tryStartAudio();
339
+ }
340
+ }
341
+ pause() {
342
+ if (this.playing) {
343
+ this.playing = false;
344
+ // stop audio iterator
345
+ this.audioBufferIterator?.return();
346
+ this.audioBufferIterator = null;
347
+ // stop all playing audio nodes
348
+ for (const node of this.queuedAudioNodes) {
349
+ node.stop();
350
+ }
351
+ this.queuedAudioNodes.clear();
352
+ Log.trace(this.logLevel, `[MediaPlayer] Pause - stopping render loop`);
353
+ this.stopRenderLoop();
354
+ }
355
+ }
356
+ dispose() {
357
+ Log.trace(this.logLevel, `[MediaPlayer] Disposing...`);
358
+ this.stopRenderLoop();
359
+ // clean up video resources
360
+ this.videoFrameIterator?.return();
361
+ this.videoFrameIterator = null;
362
+ this.nextFrame = null;
363
+ this.canvasSink = null;
364
+ // Clean up audio resources
365
+ for (const node of this.queuedAudioNodes) {
366
+ node.stop();
367
+ }
368
+ this.queuedAudioNodes.clear();
369
+ this.audioBufferIterator?.return();
370
+ this.audioBufferIterator = null;
371
+ this.audioSink = null;
372
+ this.gainNode = null;
373
+ this.initialized = false;
374
+ this.asyncId++;
375
+ }
376
+ get currentTime() {
377
+ return this.getPlaybackTime();
378
+ }
379
+ // current position in the media
380
+ getPlaybackTime() {
381
+ if (!this.sharedAudioContext) {
382
+ return 0;
383
+ }
384
+ // Audio context is single source of truth
385
+ return this.sharedAudioContext.currentTime - this.mediaTimeOffset;
386
+ }
387
+ get duration() {
388
+ return this.totalDuration;
389
+ }
390
+ get isPlaying() {
391
+ return this.playing;
392
+ }
393
+ get stalled() {
394
+ return this.isStalled;
395
+ }
396
+ onStalledChange(callback) {
397
+ this.onStalledChangeCallback = callback;
398
+ }
399
+ renderSingleFrame() {
400
+ const currentPlaybackTime = this.getPlaybackTime();
401
+ if (this.nextFrame && this.nextFrame.timestamp <= currentPlaybackTime) {
402
+ Log.trace(this.logLevel, `[MediaPlayer] Single frame update at ${this.nextFrame.timestamp.toFixed(3)}s`);
403
+ this.context.drawImage(this.nextFrame.canvas, 0, 0);
404
+ // For video-only content, track video progress as audio progress
405
+ if (!this.audioSink) {
406
+ this.resetAudioProgressStopwatch();
407
+ }
408
+ this.nextFrame = null;
409
+ this.updateNextFrame();
410
+ }
411
+ }
412
+ startRenderLoop() {
413
+ if (this.animationFrameId !== null) {
414
+ return;
415
+ }
416
+ Log.trace(this.logLevel, `[MediaPlayer] Starting render loop`);
417
+ this.render();
418
+ }
419
+ stopRenderLoop() {
420
+ if (this.animationFrameId !== null) {
421
+ cancelAnimationFrame(this.animationFrameId);
422
+ this.animationFrameId = null;
423
+ Log.trace(this.logLevel, `[MediaPlayer] Stopped render loop`);
424
+ }
425
+ }
426
+ // A/V sync coordination methods (WIP)
427
+ tryStartAudio() {
428
+ // Only start if: playing + audio exists + gate is open + not already started
429
+ if (this.playing &&
430
+ this.audioSink &&
431
+ this.canStartAudio &&
432
+ !this.audioBufferIterator) {
433
+ this.audioBufferIterator = this.audioSink.buffers(this.getPlaybackTime());
434
+ this.runAudioIterator();
435
+ this.resetAudioProgressStopwatch();
436
+ Log.trace(this.logLevel, '[MediaPlayer] Audio started - A/V sync established');
437
+ }
438
+ }
439
+ // Unified time reference for stall detection
440
+ getCurrentTimeMs() {
441
+ if (!this.sharedAudioContext) {
442
+ return performance.now();
443
+ }
444
+ return this.sharedAudioContext.currentTime * 1000;
445
+ }
446
+ // Stall detection methods
447
+ resetAudioProgressStopwatch() {
448
+ this.lastAudioProgressAtMs = this.getCurrentTimeMs();
449
+ }
450
+ getAudioLookaheadSec() {
451
+ if (!this.sharedAudioContext)
452
+ return 0;
453
+ return this.expectedAudioTime - this.sharedAudioContext.currentTime;
454
+ }
455
+ calculateAudioStallThresholdSec() {
456
+ return 0.2; // Need 200ms of audio scheduled ahead
457
+ }
458
+ isNetworkStalled() {
459
+ const nowMs = this.getCurrentTimeMs();
460
+ const timeSinceNetworkMs = nowMs - this.lastNetworkActivityAtMs;
461
+ if (timeSinceNetworkMs > 100) {
462
+ this.isNetworkActive = false;
463
+ }
464
+ return !this.isNetworkActive && timeSinceNetworkMs >= 500;
465
+ }
466
+ checkVideoStall() {
467
+ if (!this.actualFps)
468
+ return false;
469
+ const nowMs = this.getCurrentTimeMs();
470
+ const frameIntervalMs = 1000 / this.actualFps;
471
+ const STALL_FRAME_COUNT = 6;
472
+ const calculatedThresholdMs = frameIntervalMs * STALL_FRAME_COUNT;
473
+ const MIN_THRESHOLD_MS = 150;
474
+ const MAX_THRESHOLD_MS = 300;
475
+ const threshold = Math.min(Math.max(calculatedThresholdMs, MIN_THRESHOLD_MS), MAX_THRESHOLD_MS);
476
+ // Use a separate video progress tracker for video-only content
477
+ const timeSinceVideoProgressMs = nowMs - this.lastAudioProgressAtMs; // Reuse for now
478
+ return (!this.nextFrame &&
479
+ timeSinceVideoProgressMs > threshold &&
480
+ this.playing &&
481
+ this.currentTime < this.duration);
482
+ }
483
+ checkIfStalled() {
484
+ // Only check what matters for playback readiness
485
+ if (this.audioSink && this.playing) {
486
+ const audioLookaheadSec = this.getAudioLookaheadSec();
487
+ const isAudioStarved = audioLookaheadSec < this.calculateAudioStallThresholdSec();
488
+ return isAudioStarved && this.isNetworkStalled();
489
+ }
490
+ // Video-only fallback
491
+ if (!this.audioSink) {
492
+ return this.checkVideoStall() && this.isNetworkStalled();
493
+ }
494
+ return false; // Remove: return this.isSeeking;
495
+ }
496
+ updateStalledState() {
497
+ const isStalled = this.checkIfStalled();
498
+ this.isStalled = isStalled;
499
+ this.onStalledChangeCallback?.(isStalled);
500
+ }
501
+ }
@@ -0,0 +1,10 @@
1
+ import React from 'react';
2
+ import { type LogLevel } from '../log';
3
+ type NewVideoForPreviewProps = {
4
+ readonly src: string;
5
+ readonly style?: React.CSSProperties;
6
+ readonly playbackRate?: number;
7
+ readonly logLevel?: LogLevel;
8
+ };
9
+ export declare const NewVideoForPreview: React.FC<NewVideoForPreviewProps>;
10
+ export {};
@@ -0,0 +1,114 @@
1
+ import { jsx as _jsx } from "react/jsx-runtime";
2
+ import { useContext, useEffect, useRef, useState } from 'react';
3
+ import { Internals, useBufferState, useCurrentFrame } from 'remotion';
4
+ import { Log } from '../log';
5
+ import { MediaPlayer } from '../video/media-player';
6
+ const { useUnsafeVideoConfig, Timeline, SharedAudioContext } = Internals;
7
+ export const NewVideoForPreview = ({ src, style, playbackRate = 1, logLevel = 'info', }) => {
8
+ const canvasRef = useRef(null);
9
+ const videoConfig = useUnsafeVideoConfig();
10
+ const frame = useCurrentFrame();
11
+ const lastCurrentTimeRef = useRef(-1);
12
+ const mediaPlayerRef = useRef(null);
13
+ const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
14
+ const [playing] = Timeline.usePlayingState();
15
+ const sharedAudioContext = useContext(SharedAudioContext);
16
+ const buffer = useBufferState();
17
+ const delayHandleRef = useRef(null);
18
+ if (!videoConfig) {
19
+ throw new Error('No video config found');
20
+ }
21
+ if (!src) {
22
+ throw new TypeError('No `src` was passed to <NewVideoForPreview>.');
23
+ }
24
+ const actualFps = videoConfig.fps / playbackRate;
25
+ const currentTime = frame / actualFps;
26
+ const [initialTimestamp] = useState(currentTime);
27
+ useEffect(() => {
28
+ if (!canvasRef.current)
29
+ return;
30
+ if (!sharedAudioContext)
31
+ return;
32
+ if (!sharedAudioContext.audioContext)
33
+ return;
34
+ try {
35
+ Log.trace(logLevel, `[NewVideoForPreview] Creating MediaPlayer for src: ${src}`);
36
+ const player = new MediaPlayer({
37
+ canvas: canvasRef.current,
38
+ src,
39
+ logLevel,
40
+ sharedAudioContext: sharedAudioContext.audioContext,
41
+ });
42
+ mediaPlayerRef.current = player;
43
+ player
44
+ .initialize(initialTimestamp)
45
+ .then(() => {
46
+ setMediaPlayerReady(true);
47
+ Log.trace(logLevel, `[NewVideoForPreview] MediaPlayer initialized successfully`);
48
+ })
49
+ .catch((error) => {
50
+ Log.error('[NewVideoForPreview] Failed to initialize MediaPlayer', error);
51
+ });
52
+ }
53
+ catch (error) {
54
+ Log.error('[NewVideoForPreview] MediaPlayer initialization failed', error);
55
+ }
56
+ return () => {
57
+ if (delayHandleRef.current) {
58
+ delayHandleRef.current.unblock();
59
+ delayHandleRef.current = null;
60
+ }
61
+ if (mediaPlayerRef.current) {
62
+ Log.trace(logLevel, `[NewVideoForPreview] Disposing MediaPlayer`);
63
+ mediaPlayerRef.current.dispose();
64
+ mediaPlayerRef.current = null;
65
+ }
66
+ setMediaPlayerReady(false);
67
+ };
68
+ }, [src, logLevel, sharedAudioContext, initialTimestamp]);
69
+ // sync play/pause state with Remotion timeline (like old VideoForPreview video does)
70
+ useEffect(() => {
71
+ const mediaPlayer = mediaPlayerRef.current;
72
+ if (!mediaPlayer)
73
+ return;
74
+ if (playing) {
75
+ Log.trace(logLevel, `[NewVideoForPreview] Remotion playing - calling MediaPlayer.play()`);
76
+ mediaPlayer.play().catch((error) => {
77
+ Log.error('[NewVideoForPreview] Failed to play', error);
78
+ });
79
+ }
80
+ else {
81
+ Log.trace(logLevel, `[NewVideoForPreview] Remotion paused - calling MediaPlayer.pause()`);
82
+ mediaPlayer.pause();
83
+ }
84
+ }, [playing, logLevel, mediaPlayerReady]);
85
+ // sync target time with MediaPlayer
86
+ useEffect(() => {
87
+ const mediaPlayer = mediaPlayerRef.current;
88
+ if (!mediaPlayer || !mediaPlayerReady)
89
+ return;
90
+ mediaPlayer.seekTo(currentTime);
91
+ Log.trace(logLevel, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
92
+ lastCurrentTimeRef.current = currentTime;
93
+ }, [currentTime, logLevel, mediaPlayerReady]);
94
+ // sync MediaPlayer stalling with Remotion buffering
95
+ useEffect(() => {
96
+ const mediaPlayer = mediaPlayerRef.current;
97
+ if (!mediaPlayer || !mediaPlayerReady)
98
+ return;
99
+ mediaPlayer.onStalledChange((isStalled) => {
100
+ if (isStalled && !delayHandleRef.current) {
101
+ // Start blocking Remotion playback
102
+ delayHandleRef.current = buffer.delayPlayback();
103
+ Log.trace(logLevel, '[NewVideoForPreview] MediaPlayer stalled - blocking Remotion playback');
104
+ }
105
+ else if (!isStalled && delayHandleRef.current) {
106
+ // Unblock Remotion playback
107
+ delayHandleRef.current.unblock();
108
+ delayHandleRef.current = null;
109
+ Log.trace(logLevel, '[NewVideoForPreview] MediaPlayer unstalled - unblocking Remotion playback');
110
+ }
111
+ });
112
+ }, [mediaPlayerReady, buffer, logLevel]);
113
+ return (_jsx("canvas", { ref: canvasRef, width: videoConfig.width, height: videoConfig.height, style: style }));
114
+ };
@@ -0,0 +1,3 @@
1
+ import React from 'react';
2
+ import type { VideoProps } from './video/props';
3
+ export declare const VideoForRendering: React.FC<VideoProps>;