@remotion/media 4.0.364 → 4.0.365
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-preview.js +8 -21
- package/dist/audio/audio-preview-iterator.d.ts +24 -7
- package/dist/audio/audio-preview-iterator.js +143 -18
- package/dist/debug-overlay/preview-overlay.d.ts +15 -1
- package/dist/debug-overlay/preview-overlay.js +32 -8
- package/dist/esm/index.mjs +322 -249
- package/dist/media-player.d.ts +3 -11
- package/dist/media-player.js +137 -158
- package/dist/video/video-for-preview.js +8 -29
- package/package.json +4 -4
package/dist/esm/index.mjs
CHANGED
|
@@ -20,22 +20,124 @@ import {
|
|
|
20
20
|
} from "mediabunny";
|
|
21
21
|
import { Internals as Internals2 } from "remotion";
|
|
22
22
|
|
|
23
|
+
// src/helpers/round-to-4-digits.ts
|
|
24
|
+
var roundTo4Digits = (timestamp) => {
|
|
25
|
+
return Math.round(timestamp * 1000) / 1000;
|
|
26
|
+
};
|
|
27
|
+
|
|
23
28
|
// src/audio/audio-preview-iterator.ts
|
|
24
|
-
var HEALTHY_BUFFER_THRESHOLD_SECONDS = 1;
|
|
25
29
|
var makeAudioIterator = (audioSink, startFromSecond) => {
|
|
26
30
|
let destroyed = false;
|
|
27
31
|
const iterator = audioSink.buffers(startFromSecond);
|
|
28
|
-
|
|
29
|
-
let audioBufferHealth = 0;
|
|
30
|
-
const queuedAudioNodes = new Set;
|
|
32
|
+
const queuedAudioNodes = [];
|
|
31
33
|
const cleanupAudioQueue = () => {
|
|
32
34
|
for (const node of queuedAudioNodes) {
|
|
33
|
-
node.stop();
|
|
35
|
+
node.node.stop();
|
|
36
|
+
}
|
|
37
|
+
queuedAudioNodes.length = 0;
|
|
38
|
+
};
|
|
39
|
+
let lastReturnedBuffer = null;
|
|
40
|
+
let iteratorEnded = false;
|
|
41
|
+
const getNextOrNullIfNotAvailable = async () => {
|
|
42
|
+
const next = iterator.next();
|
|
43
|
+
const result = await Promise.race([
|
|
44
|
+
next,
|
|
45
|
+
new Promise((resolve) => {
|
|
46
|
+
Promise.resolve().then(() => resolve());
|
|
47
|
+
})
|
|
48
|
+
]);
|
|
49
|
+
if (!result) {
|
|
50
|
+
return {
|
|
51
|
+
type: "need-to-wait-for-it",
|
|
52
|
+
waitPromise: async () => {
|
|
53
|
+
const res = await next;
|
|
54
|
+
if (res.value) {
|
|
55
|
+
lastReturnedBuffer = res.value;
|
|
56
|
+
} else {
|
|
57
|
+
iteratorEnded = true;
|
|
58
|
+
}
|
|
59
|
+
return res.value;
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
if (result.value) {
|
|
64
|
+
lastReturnedBuffer = result.value;
|
|
65
|
+
} else {
|
|
66
|
+
iteratorEnded = true;
|
|
67
|
+
}
|
|
68
|
+
return {
|
|
69
|
+
type: "got-buffer-or-end",
|
|
70
|
+
buffer: result.value ?? null
|
|
71
|
+
};
|
|
72
|
+
};
|
|
73
|
+
const tryToSatisfySeek = async (time) => {
|
|
74
|
+
if (lastReturnedBuffer) {
|
|
75
|
+
const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
|
|
76
|
+
const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
|
|
77
|
+
if (roundTo4Digits(time) < bufferTimestamp) {
|
|
78
|
+
return {
|
|
79
|
+
type: "not-satisfied",
|
|
80
|
+
reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
if (roundTo4Digits(time) <= bufferEndTimestamp) {
|
|
84
|
+
return {
|
|
85
|
+
type: "satisfied",
|
|
86
|
+
buffers: [lastReturnedBuffer]
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
if (iteratorEnded) {
|
|
91
|
+
if (lastReturnedBuffer) {
|
|
92
|
+
return {
|
|
93
|
+
type: "satisfied",
|
|
94
|
+
buffers: [lastReturnedBuffer]
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
return {
|
|
98
|
+
type: "not-satisfied",
|
|
99
|
+
reason: "iterator ended"
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
const toBeReturned = [];
|
|
103
|
+
while (true) {
|
|
104
|
+
const buffer = await getNextOrNullIfNotAvailable();
|
|
105
|
+
if (buffer.type === "need-to-wait-for-it") {
|
|
106
|
+
return {
|
|
107
|
+
type: "not-satisfied",
|
|
108
|
+
reason: "iterator did not have buffer ready"
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
if (buffer.type === "got-buffer-or-end") {
|
|
112
|
+
if (buffer.buffer === null) {
|
|
113
|
+
iteratorEnded = true;
|
|
114
|
+
if (lastReturnedBuffer) {
|
|
115
|
+
return {
|
|
116
|
+
type: "satisfied",
|
|
117
|
+
buffers: [lastReturnedBuffer]
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
return {
|
|
121
|
+
type: "not-satisfied",
|
|
122
|
+
reason: "iterator ended and did not have buffer ready"
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
|
|
126
|
+
const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
|
|
127
|
+
const timestamp = roundTo4Digits(time);
|
|
128
|
+
if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
|
|
129
|
+
return {
|
|
130
|
+
type: "satisfied",
|
|
131
|
+
buffers: [...toBeReturned, buffer.buffer]
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
toBeReturned.push(buffer.buffer);
|
|
135
|
+
continue;
|
|
136
|
+
}
|
|
137
|
+
throw new Error("Unreachable");
|
|
34
138
|
}
|
|
35
|
-
queuedAudioNodes.clear();
|
|
36
139
|
};
|
|
37
140
|
return {
|
|
38
|
-
cleanupAudioQueue,
|
|
39
141
|
destroy: () => {
|
|
40
142
|
cleanupAudioQueue();
|
|
41
143
|
destroyed = true;
|
|
@@ -43,42 +145,96 @@ var makeAudioIterator = (audioSink, startFromSecond) => {
|
|
|
43
145
|
return;
|
|
44
146
|
});
|
|
45
147
|
},
|
|
46
|
-
isReadyToPlay: () => {
|
|
47
|
-
return audioIteratorStarted && audioBufferHealth > 0;
|
|
48
|
-
},
|
|
49
|
-
setAudioIteratorStarted: (started) => {
|
|
50
|
-
audioIteratorStarted = started;
|
|
51
|
-
},
|
|
52
148
|
getNext: () => {
|
|
53
149
|
return iterator.next();
|
|
54
150
|
},
|
|
55
|
-
setAudioBufferHealth: (health) => {
|
|
56
|
-
audioBufferHealth = health;
|
|
57
|
-
},
|
|
58
151
|
isDestroyed: () => {
|
|
59
152
|
return destroyed;
|
|
60
153
|
},
|
|
61
|
-
addQueuedAudioNode: (node) => {
|
|
62
|
-
queuedAudioNodes.
|
|
154
|
+
addQueuedAudioNode: (node, timestamp, buffer) => {
|
|
155
|
+
queuedAudioNodes.push({ node, timestamp, buffer });
|
|
63
156
|
},
|
|
64
157
|
removeQueuedAudioNode: (node) => {
|
|
65
|
-
queuedAudioNodes.
|
|
66
|
-
|
|
158
|
+
const index = queuedAudioNodes.findIndex((n) => n.node === node);
|
|
159
|
+
if (index !== -1) {
|
|
160
|
+
queuedAudioNodes.splice(index, 1);
|
|
161
|
+
}
|
|
162
|
+
},
|
|
163
|
+
removeAndReturnAllQueuedAudioNodes: () => {
|
|
164
|
+
const nodes = queuedAudioNodes.slice();
|
|
165
|
+
for (const node of nodes) {
|
|
166
|
+
node.node.stop();
|
|
167
|
+
}
|
|
168
|
+
queuedAudioNodes.length = 0;
|
|
169
|
+
return nodes;
|
|
170
|
+
},
|
|
171
|
+
getQueuedPeriod: () => {
|
|
172
|
+
const lastNode = queuedAudioNodes[queuedAudioNodes.length - 1];
|
|
173
|
+
if (!lastNode) {
|
|
174
|
+
return null;
|
|
175
|
+
}
|
|
176
|
+
const firstNode = queuedAudioNodes[0];
|
|
177
|
+
if (!firstNode) {
|
|
178
|
+
return null;
|
|
179
|
+
}
|
|
180
|
+
return {
|
|
181
|
+
from: firstNode.timestamp,
|
|
182
|
+
until: lastNode.timestamp + lastNode.buffer.duration
|
|
183
|
+
};
|
|
184
|
+
},
|
|
185
|
+
tryToSatisfySeek
|
|
67
186
|
};
|
|
68
187
|
};
|
|
188
|
+
var isAlreadyQueued = (time, queuedPeriod) => {
|
|
189
|
+
if (!queuedPeriod) {
|
|
190
|
+
return false;
|
|
191
|
+
}
|
|
192
|
+
return time >= queuedPeriod.from && time < queuedPeriod.until;
|
|
193
|
+
};
|
|
69
194
|
|
|
70
195
|
// src/debug-overlay/preview-overlay.ts
|
|
71
|
-
var drawPreviewOverlay = (
|
|
196
|
+
var drawPreviewOverlay = ({
|
|
197
|
+
context,
|
|
198
|
+
stats,
|
|
199
|
+
audioTime,
|
|
200
|
+
audioContextState,
|
|
201
|
+
audioIterator,
|
|
202
|
+
audioSyncAnchor,
|
|
203
|
+
audioChunksForAfterResuming,
|
|
204
|
+
playing
|
|
205
|
+
}) => {
|
|
206
|
+
const lines = [
|
|
207
|
+
"Debug overlay",
|
|
208
|
+
`Video iterators created: ${stats.videoIteratorsCreated}`,
|
|
209
|
+
`Audio iterators created: ${stats.audioIteratorsCreated}`,
|
|
210
|
+
`Frames rendered: ${stats.framesRendered}`,
|
|
211
|
+
`Audio context state: ${audioContextState}`,
|
|
212
|
+
`Audio time: ${(audioTime - audioSyncAnchor).toFixed(3)}s`
|
|
213
|
+
];
|
|
214
|
+
if (audioIterator) {
|
|
215
|
+
const queuedPeriod = audioIterator.getQueuedPeriod();
|
|
216
|
+
if (queuedPeriod) {
|
|
217
|
+
lines.push(`Audio queued until: ${(queuedPeriod.until - (audioTime - audioSyncAnchor)).toFixed(3)}s`);
|
|
218
|
+
} else if (audioChunksForAfterResuming.length > 0) {
|
|
219
|
+
lines.push(`Audio chunks for after resuming: ${audioChunksForAfterResuming.length}`);
|
|
220
|
+
}
|
|
221
|
+
lines.push(`Playing: ${playing}`);
|
|
222
|
+
}
|
|
223
|
+
const lineHeight = 30;
|
|
224
|
+
const boxPaddingX = 10;
|
|
225
|
+
const boxPaddingY = 10;
|
|
226
|
+
const boxLeft = 20;
|
|
227
|
+
const boxTop = 20;
|
|
228
|
+
const boxWidth = 600;
|
|
229
|
+
const boxHeight = lines.length * lineHeight + 2 * boxPaddingY;
|
|
72
230
|
context.fillStyle = "rgba(0, 0, 0, 1)";
|
|
73
|
-
context.fillRect(
|
|
231
|
+
context.fillRect(boxLeft, boxTop, boxWidth, boxHeight);
|
|
74
232
|
context.fillStyle = "white";
|
|
75
233
|
context.font = "24px sans-serif";
|
|
76
234
|
context.textBaseline = "top";
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
context.fillText(`Audio context state: ${audioContextState}`, 30, 120);
|
|
81
|
-
context.fillText(`Audio time: ${audioSyncAnchor.toFixed(3)}s`, 30, 150);
|
|
235
|
+
for (let i = 0;i < lines.length; i++) {
|
|
236
|
+
context.fillText(lines[i], boxLeft + boxPaddingX, boxTop + boxPaddingY + i * lineHeight);
|
|
237
|
+
}
|
|
82
238
|
};
|
|
83
239
|
|
|
84
240
|
// src/get-time-in-seconds.ts
|
|
@@ -121,37 +277,6 @@ function isNetworkError(error) {
|
|
|
121
277
|
return false;
|
|
122
278
|
}
|
|
123
279
|
|
|
124
|
-
// src/video/timeout-utils.ts
|
|
125
|
-
var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
126
|
-
|
|
127
|
-
class TimeoutError extends Error {
|
|
128
|
-
constructor(message = "Operation timed out") {
|
|
129
|
-
super(message);
|
|
130
|
-
this.name = "TimeoutError";
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
function withTimeout(promise, timeoutMs, errorMessage = "Operation timed out") {
|
|
134
|
-
let timeoutId = null;
|
|
135
|
-
const timeoutPromise = new Promise((_, reject) => {
|
|
136
|
-
timeoutId = window.setTimeout(() => {
|
|
137
|
-
reject(new TimeoutError(errorMessage));
|
|
138
|
-
}, timeoutMs);
|
|
139
|
-
});
|
|
140
|
-
return Promise.race([
|
|
141
|
-
promise.finally(() => {
|
|
142
|
-
if (timeoutId) {
|
|
143
|
-
clearTimeout(timeoutId);
|
|
144
|
-
}
|
|
145
|
-
}),
|
|
146
|
-
timeoutPromise
|
|
147
|
-
]);
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
// src/helpers/round-to-4-digits.ts
|
|
151
|
-
var roundTo4Digits = (timestamp) => {
|
|
152
|
-
return Math.round(timestamp * 1000) / 1000;
|
|
153
|
-
};
|
|
154
|
-
|
|
155
280
|
// src/video/video-preview-iterator.ts
|
|
156
281
|
var createVideoIterator = (timeToSeek, videoSink) => {
|
|
157
282
|
let destroyed = false;
|
|
@@ -276,8 +401,6 @@ var createVideoIterator = (timeToSeek, videoSink) => {
|
|
|
276
401
|
};
|
|
277
402
|
|
|
278
403
|
// src/media-player.ts
|
|
279
|
-
var AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
|
|
280
|
-
|
|
281
404
|
class MediaPlayer {
|
|
282
405
|
canvas;
|
|
283
406
|
context;
|
|
@@ -289,6 +412,7 @@ class MediaPlayer {
|
|
|
289
412
|
videoFrameIterator = null;
|
|
290
413
|
debugStats = {
|
|
291
414
|
videoIteratorsCreated: 0,
|
|
415
|
+
audioIteratorsCreated: 0,
|
|
292
416
|
framesRendered: 0
|
|
293
417
|
};
|
|
294
418
|
audioSink = null;
|
|
@@ -305,9 +429,6 @@ class MediaPlayer {
|
|
|
305
429
|
trimAfter;
|
|
306
430
|
initialized = false;
|
|
307
431
|
totalDuration;
|
|
308
|
-
isBuffering = false;
|
|
309
|
-
onBufferingChangeCallback;
|
|
310
|
-
mediaEnded = false;
|
|
311
432
|
debugOverlay = false;
|
|
312
433
|
onVideoFrameCallback;
|
|
313
434
|
initializationPromise = null;
|
|
@@ -358,9 +479,6 @@ class MediaPlayer {
|
|
|
358
479
|
hasAudio() {
|
|
359
480
|
return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
|
|
360
481
|
}
|
|
361
|
-
isCurrentlyBuffering() {
|
|
362
|
-
return this.isBuffering && Boolean(this.bufferingStartedAtMs);
|
|
363
|
-
}
|
|
364
482
|
isDisposalError() {
|
|
365
483
|
return this.input?.disposed === true;
|
|
366
484
|
}
|
|
@@ -437,11 +555,11 @@ class MediaPlayer {
|
|
|
437
555
|
return { type: "success", durationInSeconds: this.totalDuration };
|
|
438
556
|
}
|
|
439
557
|
if (this.sharedAudioContext) {
|
|
440
|
-
this.
|
|
558
|
+
this.setPlaybackTime(startTime);
|
|
441
559
|
}
|
|
442
560
|
this.initialized = true;
|
|
443
561
|
try {
|
|
444
|
-
this.startAudioIterator(startTime);
|
|
562
|
+
this.startAudioIterator(startTime, this.currentSeekNonce);
|
|
445
563
|
await this.startVideoIterator(startTime, this.currentSeekNonce);
|
|
446
564
|
} catch (error) {
|
|
447
565
|
if (this.isDisposalError()) {
|
|
@@ -503,32 +621,87 @@ class MediaPlayer {
|
|
|
503
621
|
if (currentPlaybackTime === newTime) {
|
|
504
622
|
return;
|
|
505
623
|
}
|
|
506
|
-
const
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
624
|
+
const newAudioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
|
|
625
|
+
const diff = Math.abs(newAudioSyncAnchor - this.audioSyncAnchor);
|
|
626
|
+
if (diff > 0.1) {
|
|
627
|
+
this.setPlaybackTime(newTime);
|
|
628
|
+
}
|
|
629
|
+
const videoSatisfyResult = await this.videoFrameIterator?.tryToSatisfySeek(newTime);
|
|
630
|
+
if (videoSatisfyResult?.type === "satisfied") {
|
|
631
|
+
this.drawFrame(videoSatisfyResult.frame);
|
|
632
|
+
} else if (videoSatisfyResult && this.currentSeekNonce === nonce) {
|
|
633
|
+
this.startVideoIterator(newTime, nonce);
|
|
634
|
+
}
|
|
635
|
+
const queuedPeriod = this.audioBufferIterator?.getQueuedPeriod();
|
|
636
|
+
const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriod);
|
|
637
|
+
const toBeScheduled = [];
|
|
638
|
+
if (!currentTimeIsAlreadyQueued) {
|
|
639
|
+
const audioSatisfyResult = await this.audioBufferIterator?.tryToSatisfySeek(newTime);
|
|
640
|
+
if (this.currentSeekNonce !== nonce) {
|
|
641
|
+
return;
|
|
642
|
+
}
|
|
643
|
+
if (!audioSatisfyResult) {
|
|
644
|
+
return;
|
|
645
|
+
}
|
|
646
|
+
if (audioSatisfyResult.type === "not-satisfied") {
|
|
647
|
+
await this.startAudioIterator(newTime, nonce);
|
|
648
|
+
return;
|
|
649
|
+
}
|
|
650
|
+
toBeScheduled.push(...audioSatisfyResult.buffers);
|
|
510
651
|
}
|
|
511
|
-
|
|
512
|
-
|
|
652
|
+
const nextTime = newTime + 1 / this.fps * this.playbackRate + 1 / this.fps * this.playbackRate;
|
|
653
|
+
const nextIsAlreadyQueued = isAlreadyQueued(nextTime, queuedPeriod);
|
|
654
|
+
if (!nextIsAlreadyQueued) {
|
|
655
|
+
const audioSatisfyResult = await this.audioBufferIterator?.tryToSatisfySeek(nextTime);
|
|
656
|
+
if (this.currentSeekNonce !== nonce) {
|
|
657
|
+
return;
|
|
658
|
+
}
|
|
659
|
+
if (!audioSatisfyResult) {
|
|
660
|
+
return;
|
|
661
|
+
}
|
|
662
|
+
if (audioSatisfyResult.type === "not-satisfied") {
|
|
663
|
+
await this.startAudioIterator(nextTime, nonce);
|
|
664
|
+
return;
|
|
665
|
+
}
|
|
666
|
+
toBeScheduled.push(...audioSatisfyResult.buffers);
|
|
667
|
+
}
|
|
668
|
+
for (const buffer of toBeScheduled) {
|
|
669
|
+
if (this.playing) {
|
|
670
|
+
this.scheduleAudioChunk(buffer.buffer, buffer.timestamp);
|
|
671
|
+
} else {
|
|
672
|
+
this.audioChunksForAfterResuming.push({
|
|
673
|
+
buffer: buffer.buffer,
|
|
674
|
+
timestamp: buffer.timestamp
|
|
675
|
+
});
|
|
676
|
+
}
|
|
513
677
|
}
|
|
514
|
-
this.mediaEnded = false;
|
|
515
|
-
this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
|
|
516
|
-
this.startAudioIterator(newTime);
|
|
517
|
-
this.startVideoIterator(newTime, nonce);
|
|
518
678
|
}
|
|
519
|
-
async play() {
|
|
679
|
+
async play(time) {
|
|
520
680
|
if (!this.isReady())
|
|
521
681
|
return;
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
682
|
+
this.setPlaybackTime(time);
|
|
683
|
+
this.playing = true;
|
|
684
|
+
for (const chunk of this.audioChunksForAfterResuming) {
|
|
685
|
+
this.scheduleAudioChunk(chunk.buffer, chunk.timestamp);
|
|
686
|
+
}
|
|
687
|
+
if (this.sharedAudioContext.state === "suspended") {
|
|
688
|
+
await this.sharedAudioContext.resume();
|
|
527
689
|
}
|
|
690
|
+
this.audioChunksForAfterResuming.length = 0;
|
|
691
|
+
this.drawDebugOverlay();
|
|
528
692
|
}
|
|
529
693
|
pause() {
|
|
530
694
|
this.playing = false;
|
|
531
|
-
this.audioBufferIterator?.
|
|
695
|
+
const toQueue = this.audioBufferIterator?.removeAndReturnAllQueuedAudioNodes();
|
|
696
|
+
if (toQueue) {
|
|
697
|
+
for (const chunk of toQueue) {
|
|
698
|
+
this.audioChunksForAfterResuming.push({
|
|
699
|
+
buffer: chunk.buffer,
|
|
700
|
+
timestamp: chunk.timestamp
|
|
701
|
+
});
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
this.drawDebugOverlay();
|
|
532
705
|
}
|
|
533
706
|
setMuted(muted) {
|
|
534
707
|
this.muted = muted;
|
|
@@ -574,27 +747,25 @@ class MediaPlayer {
|
|
|
574
747
|
getPlaybackTime() {
|
|
575
748
|
return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
|
|
576
749
|
}
|
|
750
|
+
setPlaybackTime(time) {
|
|
751
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - time;
|
|
752
|
+
}
|
|
753
|
+
audioChunksForAfterResuming = [];
|
|
577
754
|
scheduleAudioChunk(buffer, mediaTimestamp) {
|
|
578
|
-
const targetTime = mediaTimestamp
|
|
579
|
-
const delay = targetTime - this.sharedAudioContext.currentTime;
|
|
755
|
+
const targetTime = (mediaTimestamp - (this.trimBefore ?? 0) / this.fps) / this.playbackRate;
|
|
756
|
+
const delay = targetTime + this.audioSyncAnchor - this.sharedAudioContext.currentTime;
|
|
580
757
|
const node = this.sharedAudioContext.createBufferSource();
|
|
581
758
|
node.buffer = buffer;
|
|
582
759
|
node.playbackRate.value = this.playbackRate;
|
|
583
760
|
node.connect(this.gainNode);
|
|
584
761
|
if (delay >= 0) {
|
|
585
|
-
node.start(targetTime);
|
|
762
|
+
node.start(targetTime + this.audioSyncAnchor);
|
|
586
763
|
} else {
|
|
587
764
|
node.start(this.sharedAudioContext.currentTime, -delay);
|
|
588
765
|
}
|
|
589
|
-
this.audioBufferIterator
|
|
590
|
-
node.onended = () =>
|
|
591
|
-
|
|
592
|
-
onBufferingChange(callback) {
|
|
593
|
-
this.onBufferingChangeCallback = callback;
|
|
594
|
-
return () => {
|
|
595
|
-
if (this.onBufferingChangeCallback === callback) {
|
|
596
|
-
this.onBufferingChangeCallback = undefined;
|
|
597
|
-
}
|
|
766
|
+
this.audioBufferIterator.addQueuedAudioNode(node, mediaTimestamp, buffer);
|
|
767
|
+
node.onended = () => {
|
|
768
|
+
return this.audioBufferIterator.removeQueuedAudioNode(node);
|
|
598
769
|
};
|
|
599
770
|
}
|
|
600
771
|
onVideoFrame(callback) {
|
|
@@ -621,26 +792,51 @@ class MediaPlayer {
|
|
|
621
792
|
}
|
|
622
793
|
Internals2.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
|
|
623
794
|
};
|
|
624
|
-
startAudioIterator = (startFromSecond) => {
|
|
795
|
+
startAudioIterator = async (startFromSecond, nonce) => {
|
|
625
796
|
if (!this.hasAudio())
|
|
626
797
|
return;
|
|
627
798
|
this.audioBufferIterator?.destroy();
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
799
|
+
this.audioChunksForAfterResuming = [];
|
|
800
|
+
const delayHandle = this.bufferState.delayPlayback();
|
|
801
|
+
const iterator = makeAudioIterator(this.audioSink, startFromSecond);
|
|
802
|
+
this.debugStats.audioIteratorsCreated++;
|
|
803
|
+
this.audioBufferIterator = iterator;
|
|
804
|
+
for (let i = 0;i < 3; i++) {
|
|
805
|
+
const result = await iterator.getNext();
|
|
806
|
+
if (iterator.isDestroyed()) {
|
|
807
|
+
delayHandle.unblock();
|
|
808
|
+
return;
|
|
809
|
+
}
|
|
810
|
+
if (nonce !== this.currentSeekNonce) {
|
|
811
|
+
delayHandle.unblock();
|
|
812
|
+
return;
|
|
813
|
+
}
|
|
814
|
+
if (!result.value) {
|
|
815
|
+
delayHandle.unblock();
|
|
634
816
|
return;
|
|
635
817
|
}
|
|
636
|
-
|
|
818
|
+
const { buffer, timestamp } = result.value;
|
|
819
|
+
this.audioChunksForAfterResuming.push({
|
|
820
|
+
buffer,
|
|
821
|
+
timestamp
|
|
822
|
+
});
|
|
637
823
|
}
|
|
824
|
+
delayHandle.unblock();
|
|
638
825
|
};
|
|
639
826
|
drawDebugOverlay() {
|
|
640
827
|
if (!this.debugOverlay)
|
|
641
828
|
return;
|
|
642
829
|
if (this.context && this.canvas) {
|
|
643
|
-
drawPreviewOverlay(
|
|
830
|
+
drawPreviewOverlay({
|
|
831
|
+
context: this.context,
|
|
832
|
+
stats: this.debugStats,
|
|
833
|
+
audioTime: this.sharedAudioContext.currentTime,
|
|
834
|
+
audioContextState: this.sharedAudioContext.state,
|
|
835
|
+
audioSyncAnchor: this.audioSyncAnchor,
|
|
836
|
+
audioIterator: this.audioBufferIterator,
|
|
837
|
+
audioChunksForAfterResuming: this.audioChunksForAfterResuming,
|
|
838
|
+
playing: this.playing
|
|
839
|
+
});
|
|
644
840
|
}
|
|
645
841
|
}
|
|
646
842
|
startVideoIterator = async (timeToSeek, nonce) => {
|
|
@@ -651,9 +847,9 @@ class MediaPlayer {
|
|
|
651
847
|
const iterator = createVideoIterator(timeToSeek, this.canvasSink);
|
|
652
848
|
this.debugStats.videoIteratorsCreated++;
|
|
653
849
|
this.videoFrameIterator = iterator;
|
|
654
|
-
const delayHandle = this.bufferState
|
|
850
|
+
const delayHandle = this.bufferState.delayPlayback();
|
|
655
851
|
const frameResult = await iterator.getNext();
|
|
656
|
-
delayHandle
|
|
852
|
+
delayHandle.unblock();
|
|
657
853
|
if (iterator.isDestroyed()) {
|
|
658
854
|
return;
|
|
659
855
|
}
|
|
@@ -663,101 +859,10 @@ class MediaPlayer {
|
|
|
663
859
|
if (this.videoFrameIterator.isDestroyed()) {
|
|
664
860
|
return;
|
|
665
861
|
}
|
|
666
|
-
if (frameResult.value) {
|
|
667
|
-
this.audioSyncAnchor = this.sharedAudioContext.currentTime - frameResult.value.timestamp;
|
|
668
|
-
this.drawFrame(frameResult.value);
|
|
669
|
-
} else {}
|
|
670
|
-
};
|
|
671
|
-
bufferingStartedAtMs = null;
|
|
672
|
-
minBufferingTimeoutMs = 500;
|
|
673
|
-
setBufferingState(isBuffering) {
|
|
674
|
-
if (this.isBuffering !== isBuffering) {
|
|
675
|
-
this.isBuffering = isBuffering;
|
|
676
|
-
if (isBuffering) {
|
|
677
|
-
this.bufferingStartedAtMs = performance.now();
|
|
678
|
-
this.onBufferingChangeCallback?.(true);
|
|
679
|
-
} else {
|
|
680
|
-
this.bufferingStartedAtMs = null;
|
|
681
|
-
this.onBufferingChangeCallback?.(false);
|
|
682
|
-
}
|
|
683
|
-
}
|
|
684
|
-
}
|
|
685
|
-
maybeResumeFromBuffering(currentBufferDuration) {
|
|
686
|
-
if (!this.isCurrentlyBuffering())
|
|
862
|
+
if (!frameResult.value) {
|
|
687
863
|
return;
|
|
688
|
-
const now = performance.now();
|
|
689
|
-
const bufferingDuration = now - this.bufferingStartedAtMs;
|
|
690
|
-
const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
|
|
691
|
-
const bufferHealthy = currentBufferDuration >= HEALTHY_BUFFER_THRESHOLD_SECONDS;
|
|
692
|
-
if (minTimeElapsed && bufferHealthy) {
|
|
693
|
-
Internals2.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
|
|
694
|
-
this.setBufferingState(false);
|
|
695
|
-
}
|
|
696
|
-
}
|
|
697
|
-
runAudioIterator = async (startFromSecond, audioIterator) => {
|
|
698
|
-
if (!this.hasAudio())
|
|
699
|
-
return;
|
|
700
|
-
try {
|
|
701
|
-
let totalBufferDuration = 0;
|
|
702
|
-
let isFirstBuffer = true;
|
|
703
|
-
audioIterator.setAudioIteratorStarted(true);
|
|
704
|
-
while (true) {
|
|
705
|
-
if (audioIterator.isDestroyed()) {
|
|
706
|
-
return;
|
|
707
|
-
}
|
|
708
|
-
const BUFFERING_TIMEOUT_MS = 50;
|
|
709
|
-
let result;
|
|
710
|
-
try {
|
|
711
|
-
result = await withTimeout(audioIterator.getNext(), BUFFERING_TIMEOUT_MS, "Iterator timeout");
|
|
712
|
-
} catch (error) {
|
|
713
|
-
if (error instanceof TimeoutError && !this.mediaEnded) {
|
|
714
|
-
this.setBufferingState(true);
|
|
715
|
-
}
|
|
716
|
-
await sleep(10);
|
|
717
|
-
continue;
|
|
718
|
-
}
|
|
719
|
-
if (result.done || !result.value) {
|
|
720
|
-
this.mediaEnded = true;
|
|
721
|
-
break;
|
|
722
|
-
}
|
|
723
|
-
const { buffer, timestamp, duration } = result.value;
|
|
724
|
-
totalBufferDuration += duration;
|
|
725
|
-
audioIterator.setAudioBufferHealth(Math.max(0, totalBufferDuration / this.playbackRate));
|
|
726
|
-
this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
|
|
727
|
-
if (this.playing) {
|
|
728
|
-
if (isFirstBuffer) {
|
|
729
|
-
this.audioSyncAnchor = this.sharedAudioContext.currentTime - timestamp;
|
|
730
|
-
isFirstBuffer = false;
|
|
731
|
-
}
|
|
732
|
-
if (timestamp < startFromSecond - AUDIO_BUFFER_TOLERANCE_THRESHOLD) {
|
|
733
|
-
continue;
|
|
734
|
-
}
|
|
735
|
-
this.scheduleAudioChunk(buffer, timestamp);
|
|
736
|
-
}
|
|
737
|
-
const playbackTime = this.getPlaybackTime();
|
|
738
|
-
if (playbackTime === null) {
|
|
739
|
-
continue;
|
|
740
|
-
}
|
|
741
|
-
if (timestamp - playbackTime >= 1) {
|
|
742
|
-
await new Promise((resolve) => {
|
|
743
|
-
const check = () => {
|
|
744
|
-
const currentPlaybackTime = this.getPlaybackTime();
|
|
745
|
-
if (currentPlaybackTime !== null && timestamp - currentPlaybackTime < 1) {
|
|
746
|
-
resolve();
|
|
747
|
-
} else {
|
|
748
|
-
requestAnimationFrame(check);
|
|
749
|
-
}
|
|
750
|
-
};
|
|
751
|
-
check();
|
|
752
|
-
});
|
|
753
|
-
}
|
|
754
|
-
}
|
|
755
|
-
} catch (error) {
|
|
756
|
-
if (this.isDisposalError()) {
|
|
757
|
-
return;
|
|
758
|
-
}
|
|
759
|
-
Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to run audio iterator", error);
|
|
760
864
|
}
|
|
865
|
+
this.drawFrame(frameResult.value);
|
|
761
866
|
};
|
|
762
867
|
}
|
|
763
868
|
|
|
@@ -1010,6 +1115,11 @@ var NewAudioForPreview = ({
|
|
|
1010
1115
|
trimAfter,
|
|
1011
1116
|
trimBefore
|
|
1012
1117
|
});
|
|
1118
|
+
const buffering = useContext2(Internals5.BufferingContextReact);
|
|
1119
|
+
if (!buffering) {
|
|
1120
|
+
throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
|
|
1121
|
+
}
|
|
1122
|
+
const isPlayerBuffering = Internals5.useIsPlayerBuffering(buffering);
|
|
1013
1123
|
useEffect2(() => {
|
|
1014
1124
|
if (!sharedAudioContext)
|
|
1015
1125
|
return;
|
|
@@ -1107,14 +1217,12 @@ var NewAudioForPreview = ({
|
|
|
1107
1217
|
const audioPlayer = mediaPlayerRef.current;
|
|
1108
1218
|
if (!audioPlayer)
|
|
1109
1219
|
return;
|
|
1110
|
-
if (playing) {
|
|
1111
|
-
audioPlayer.play(
|
|
1112
|
-
Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to play", error);
|
|
1113
|
-
});
|
|
1220
|
+
if (playing && !isPlayerBuffering) {
|
|
1221
|
+
audioPlayer.play(currentTimeRef.current);
|
|
1114
1222
|
} else {
|
|
1115
1223
|
audioPlayer.pause();
|
|
1116
1224
|
}
|
|
1117
|
-
}, [
|
|
1225
|
+
}, [isPlayerBuffering, logLevel, playing]);
|
|
1118
1226
|
useEffect2(() => {
|
|
1119
1227
|
const audioPlayer = mediaPlayerRef.current;
|
|
1120
1228
|
if (!audioPlayer || !mediaPlayerReady)
|
|
@@ -1122,21 +1230,6 @@ var NewAudioForPreview = ({
|
|
|
1122
1230
|
audioPlayer.seekTo(currentTime);
|
|
1123
1231
|
Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
1124
1232
|
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
1125
|
-
useEffect2(() => {
|
|
1126
|
-
const audioPlayer = mediaPlayerRef.current;
|
|
1127
|
-
if (!audioPlayer || !mediaPlayerReady)
|
|
1128
|
-
return;
|
|
1129
|
-
audioPlayer.onBufferingChange((newBufferingState) => {
|
|
1130
|
-
if (newBufferingState && !delayHandleRef.current) {
|
|
1131
|
-
delayHandleRef.current = buffer.delayPlayback();
|
|
1132
|
-
Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer buffering - blocking Remotion playback");
|
|
1133
|
-
} else if (!newBufferingState && delayHandleRef.current) {
|
|
1134
|
-
delayHandleRef.current.unblock();
|
|
1135
|
-
delayHandleRef.current = null;
|
|
1136
|
-
Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
|
|
1137
|
-
}
|
|
1138
|
-
});
|
|
1139
|
-
}, [mediaPlayerReady, buffer, logLevel]);
|
|
1140
1233
|
const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
|
|
1141
1234
|
useEffect2(() => {
|
|
1142
1235
|
const audioPlayer = mediaPlayerRef.current;
|
|
@@ -3137,6 +3230,11 @@ var VideoForPreview = ({
|
|
|
3137
3230
|
const currentTimeRef = useRef2(currentTime);
|
|
3138
3231
|
currentTimeRef.current = currentTime;
|
|
3139
3232
|
const preloadedSrc = usePreload2(src);
|
|
3233
|
+
const buffering = useContext4(Internals14.BufferingContextReact);
|
|
3234
|
+
if (!buffering) {
|
|
3235
|
+
throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
|
|
3236
|
+
}
|
|
3237
|
+
const isPlayerBuffering = Internals14.useIsPlayerBuffering(buffering);
|
|
3140
3238
|
useEffect3(() => {
|
|
3141
3239
|
if (!canvasRef.current)
|
|
3142
3240
|
return;
|
|
@@ -3238,14 +3336,12 @@ var VideoForPreview = ({
|
|
|
3238
3336
|
const mediaPlayer = mediaPlayerRef.current;
|
|
3239
3337
|
if (!mediaPlayer)
|
|
3240
3338
|
return;
|
|
3241
|
-
if (playing) {
|
|
3242
|
-
mediaPlayer.play(
|
|
3243
|
-
Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to play", error);
|
|
3244
|
-
});
|
|
3339
|
+
if (playing && !isPlayerBuffering) {
|
|
3340
|
+
mediaPlayer.play(currentTimeRef.current);
|
|
3245
3341
|
} else {
|
|
3246
3342
|
mediaPlayer.pause();
|
|
3247
3343
|
}
|
|
3248
|
-
}, [playing, logLevel, mediaPlayerReady]);
|
|
3344
|
+
}, [isPlayerBuffering, playing, logLevel, mediaPlayerReady]);
|
|
3249
3345
|
useLayoutEffect2(() => {
|
|
3250
3346
|
const mediaPlayer = mediaPlayerRef.current;
|
|
3251
3347
|
if (!mediaPlayer || !mediaPlayerReady)
|
|
@@ -3253,29 +3349,6 @@ var VideoForPreview = ({
|
|
|
3253
3349
|
mediaPlayer.seekTo(currentTime);
|
|
3254
3350
|
Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
3255
3351
|
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
3256
|
-
useEffect3(() => {
|
|
3257
|
-
const mediaPlayer = mediaPlayerRef.current;
|
|
3258
|
-
if (!mediaPlayer || !mediaPlayerReady)
|
|
3259
|
-
return;
|
|
3260
|
-
let currentBlock = null;
|
|
3261
|
-
const unsubscribe = mediaPlayer.onBufferingChange((newBufferingState) => {
|
|
3262
|
-
if (newBufferingState && !currentBlock) {
|
|
3263
|
-
currentBlock = buffer.delayPlayback();
|
|
3264
|
-
Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer buffering - blocking Remotion playback");
|
|
3265
|
-
} else if (!newBufferingState && currentBlock) {
|
|
3266
|
-
currentBlock.unblock();
|
|
3267
|
-
currentBlock = null;
|
|
3268
|
-
Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
|
|
3269
|
-
}
|
|
3270
|
-
});
|
|
3271
|
-
return () => {
|
|
3272
|
-
unsubscribe();
|
|
3273
|
-
if (currentBlock) {
|
|
3274
|
-
currentBlock.unblock();
|
|
3275
|
-
currentBlock = null;
|
|
3276
|
-
}
|
|
3277
|
-
};
|
|
3278
|
-
}, [mediaPlayerReady, buffer, logLevel]);
|
|
3279
3352
|
const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
|
|
3280
3353
|
useEffect3(() => {
|
|
3281
3354
|
const mediaPlayer = mediaPlayerRef.current;
|