@remotion/media 4.0.363 → 4.0.365
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-preview.js +12 -22
- package/dist/audio/audio-iterator.d.ts +11 -0
- package/dist/audio/audio-iterator.js +24 -0
- package/dist/audio/audio-preview-iterator.d.ts +31 -0
- package/dist/audio/audio-preview-iterator.js +168 -0
- package/dist/debug-overlay/preview-overlay.d.ts +19 -0
- package/dist/debug-overlay/preview-overlay.js +37 -0
- package/dist/esm/index.mjs +786 -542
- package/dist/helpers/round-to-4-digits.d.ts +1 -0
- package/dist/helpers/round-to-4-digits.js +4 -0
- package/dist/media-player.d.ts +87 -0
- package/dist/media-player.js +475 -0
- package/dist/video/props.d.ts +1 -0
- package/dist/video/video-for-preview.d.ts +3 -2
- package/dist/video/video-for-preview.js +30 -40
- package/dist/video/video-preview-iterator.d.ts +14 -0
- package/dist/video/video-preview-iterator.js +122 -0
- package/dist/video/video.js +4 -4
- package/dist/video-extraction/keyframe-bank.js +1 -4
- package/package.json +4 -4
package/dist/esm/index.mjs
CHANGED
|
@@ -10,182 +10,235 @@ import {
|
|
|
10
10
|
useCurrentFrame as useCurrentFrame2
|
|
11
11
|
} from "remotion";
|
|
12
12
|
|
|
13
|
-
// src/
|
|
14
|
-
import {
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
return;
|
|
27
|
-
}
|
|
28
|
-
const durationInFrames = Internals.calculateMediaDuration({
|
|
29
|
-
mediaDurationInFrames: mediaDurationInSeconds * fps,
|
|
30
|
-
playbackRate,
|
|
31
|
-
trimAfter,
|
|
32
|
-
trimBefore
|
|
33
|
-
});
|
|
34
|
-
const maxTimes = compDuration / durationInFrames;
|
|
35
|
-
return {
|
|
36
|
-
numberOfTimes: maxTimes,
|
|
37
|
-
startOffset: 0,
|
|
38
|
-
durationInFrames
|
|
39
|
-
};
|
|
40
|
-
}, [
|
|
41
|
-
compDuration,
|
|
42
|
-
fps,
|
|
43
|
-
loop,
|
|
44
|
-
mediaDurationInSeconds,
|
|
45
|
-
playbackRate,
|
|
46
|
-
trimAfter,
|
|
47
|
-
trimBefore
|
|
48
|
-
]);
|
|
49
|
-
return loopDisplay;
|
|
13
|
+
// src/media-player.ts
|
|
14
|
+
import {
|
|
15
|
+
ALL_FORMATS,
|
|
16
|
+
AudioBufferSink,
|
|
17
|
+
CanvasSink,
|
|
18
|
+
Input,
|
|
19
|
+
UrlSource
|
|
20
|
+
} from "mediabunny";
|
|
21
|
+
import { Internals as Internals2 } from "remotion";
|
|
22
|
+
|
|
23
|
+
// src/helpers/round-to-4-digits.ts
|
|
24
|
+
var roundTo4Digits = (timestamp) => {
|
|
25
|
+
return Math.round(timestamp * 1000) / 1000;
|
|
50
26
|
};
|
|
51
27
|
|
|
52
|
-
// src/
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
mediaVolume,
|
|
87
|
-
mediaType,
|
|
88
|
-
src,
|
|
89
|
-
displayName,
|
|
90
|
-
trimBefore,
|
|
91
|
-
trimAfter,
|
|
92
|
-
playbackRate
|
|
93
|
-
});
|
|
94
|
-
useEffect(() => {
|
|
95
|
-
if (!src) {
|
|
96
|
-
throw new Error("No src passed");
|
|
28
|
+
// src/audio/audio-preview-iterator.ts
|
|
29
|
+
var makeAudioIterator = (audioSink, startFromSecond) => {
|
|
30
|
+
let destroyed = false;
|
|
31
|
+
const iterator = audioSink.buffers(startFromSecond);
|
|
32
|
+
const queuedAudioNodes = [];
|
|
33
|
+
const cleanupAudioQueue = () => {
|
|
34
|
+
for (const node of queuedAudioNodes) {
|
|
35
|
+
node.node.stop();
|
|
36
|
+
}
|
|
37
|
+
queuedAudioNodes.length = 0;
|
|
38
|
+
};
|
|
39
|
+
let lastReturnedBuffer = null;
|
|
40
|
+
let iteratorEnded = false;
|
|
41
|
+
const getNextOrNullIfNotAvailable = async () => {
|
|
42
|
+
const next = iterator.next();
|
|
43
|
+
const result = await Promise.race([
|
|
44
|
+
next,
|
|
45
|
+
new Promise((resolve) => {
|
|
46
|
+
Promise.resolve().then(() => resolve());
|
|
47
|
+
})
|
|
48
|
+
]);
|
|
49
|
+
if (!result) {
|
|
50
|
+
return {
|
|
51
|
+
type: "need-to-wait-for-it",
|
|
52
|
+
waitPromise: async () => {
|
|
53
|
+
const res = await next;
|
|
54
|
+
if (res.value) {
|
|
55
|
+
lastReturnedBuffer = res.value;
|
|
56
|
+
} else {
|
|
57
|
+
iteratorEnded = true;
|
|
58
|
+
}
|
|
59
|
+
return res.value;
|
|
60
|
+
}
|
|
61
|
+
};
|
|
97
62
|
}
|
|
98
|
-
if (
|
|
99
|
-
|
|
63
|
+
if (result.value) {
|
|
64
|
+
lastReturnedBuffer = result.value;
|
|
65
|
+
} else {
|
|
66
|
+
iteratorEnded = true;
|
|
100
67
|
}
|
|
101
|
-
|
|
102
|
-
|
|
68
|
+
return {
|
|
69
|
+
type: "got-buffer-or-end",
|
|
70
|
+
buffer: result.value ?? null
|
|
71
|
+
};
|
|
72
|
+
};
|
|
73
|
+
const tryToSatisfySeek = async (time) => {
|
|
74
|
+
if (lastReturnedBuffer) {
|
|
75
|
+
const bufferTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp);
|
|
76
|
+
const bufferEndTimestamp = roundTo4Digits(lastReturnedBuffer.timestamp + lastReturnedBuffer.duration);
|
|
77
|
+
if (roundTo4Digits(time) < bufferTimestamp) {
|
|
78
|
+
return {
|
|
79
|
+
type: "not-satisfied",
|
|
80
|
+
reason: `iterator is too far, most recently returned ${bufferTimestamp}-${bufferEndTimestamp}, requested ${time}`
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
if (roundTo4Digits(time) <= bufferEndTimestamp) {
|
|
84
|
+
return {
|
|
85
|
+
type: "satisfied",
|
|
86
|
+
buffers: [lastReturnedBuffer]
|
|
87
|
+
};
|
|
88
|
+
}
|
|
103
89
|
}
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
loopDisplay,
|
|
116
|
-
stack,
|
|
117
|
-
from: 0,
|
|
118
|
-
duration,
|
|
119
|
-
id: sequenceId
|
|
120
|
-
});
|
|
90
|
+
if (iteratorEnded) {
|
|
91
|
+
if (lastReturnedBuffer) {
|
|
92
|
+
return {
|
|
93
|
+
type: "satisfied",
|
|
94
|
+
buffers: [lastReturnedBuffer]
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
return {
|
|
98
|
+
type: "not-satisfied",
|
|
99
|
+
reason: "iterator ended"
|
|
100
|
+
};
|
|
121
101
|
}
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
rootId,
|
|
131
|
-
volume: volumes,
|
|
132
|
-
showInTimeline: true,
|
|
133
|
-
nonce,
|
|
134
|
-
startMediaFrom: 0 - startsAt,
|
|
135
|
-
doesVolumeChange,
|
|
136
|
-
loopDisplay: undefined,
|
|
137
|
-
playbackRate,
|
|
138
|
-
stack,
|
|
139
|
-
premountDisplay: null,
|
|
140
|
-
postmountDisplay: null
|
|
141
|
-
});
|
|
142
|
-
return () => {
|
|
143
|
-
if (loopDisplay) {
|
|
144
|
-
unregisterSequence(sequenceId);
|
|
102
|
+
const toBeReturned = [];
|
|
103
|
+
while (true) {
|
|
104
|
+
const buffer = await getNextOrNullIfNotAvailable();
|
|
105
|
+
if (buffer.type === "need-to-wait-for-it") {
|
|
106
|
+
return {
|
|
107
|
+
type: "not-satisfied",
|
|
108
|
+
reason: "iterator did not have buffer ready"
|
|
109
|
+
};
|
|
145
110
|
}
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
111
|
+
if (buffer.type === "got-buffer-or-end") {
|
|
112
|
+
if (buffer.buffer === null) {
|
|
113
|
+
iteratorEnded = true;
|
|
114
|
+
if (lastReturnedBuffer) {
|
|
115
|
+
return {
|
|
116
|
+
type: "satisfied",
|
|
117
|
+
buffers: [lastReturnedBuffer]
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
return {
|
|
121
|
+
type: "not-satisfied",
|
|
122
|
+
reason: "iterator ended and did not have buffer ready"
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
const bufferTimestamp = roundTo4Digits(buffer.buffer.timestamp);
|
|
126
|
+
const bufferEndTimestamp = roundTo4Digits(buffer.buffer.timestamp + buffer.buffer.duration);
|
|
127
|
+
const timestamp = roundTo4Digits(time);
|
|
128
|
+
if (bufferTimestamp <= timestamp && bufferEndTimestamp > timestamp) {
|
|
129
|
+
return {
|
|
130
|
+
type: "satisfied",
|
|
131
|
+
buffers: [...toBeReturned, buffer.buffer]
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
toBeReturned.push(buffer.buffer);
|
|
135
|
+
continue;
|
|
136
|
+
}
|
|
137
|
+
throw new Error("Unreachable");
|
|
138
|
+
}
|
|
139
|
+
};
|
|
172
140
|
return {
|
|
173
|
-
|
|
141
|
+
destroy: () => {
|
|
142
|
+
cleanupAudioQueue();
|
|
143
|
+
destroyed = true;
|
|
144
|
+
iterator.return().catch(() => {
|
|
145
|
+
return;
|
|
146
|
+
});
|
|
147
|
+
},
|
|
148
|
+
getNext: () => {
|
|
149
|
+
return iterator.next();
|
|
150
|
+
},
|
|
151
|
+
isDestroyed: () => {
|
|
152
|
+
return destroyed;
|
|
153
|
+
},
|
|
154
|
+
addQueuedAudioNode: (node, timestamp, buffer) => {
|
|
155
|
+
queuedAudioNodes.push({ node, timestamp, buffer });
|
|
156
|
+
},
|
|
157
|
+
removeQueuedAudioNode: (node) => {
|
|
158
|
+
const index = queuedAudioNodes.findIndex((n) => n.node === node);
|
|
159
|
+
if (index !== -1) {
|
|
160
|
+
queuedAudioNodes.splice(index, 1);
|
|
161
|
+
}
|
|
162
|
+
},
|
|
163
|
+
removeAndReturnAllQueuedAudioNodes: () => {
|
|
164
|
+
const nodes = queuedAudioNodes.slice();
|
|
165
|
+
for (const node of nodes) {
|
|
166
|
+
node.node.stop();
|
|
167
|
+
}
|
|
168
|
+
queuedAudioNodes.length = 0;
|
|
169
|
+
return nodes;
|
|
170
|
+
},
|
|
171
|
+
getQueuedPeriod: () => {
|
|
172
|
+
const lastNode = queuedAudioNodes[queuedAudioNodes.length - 1];
|
|
173
|
+
if (!lastNode) {
|
|
174
|
+
return null;
|
|
175
|
+
}
|
|
176
|
+
const firstNode = queuedAudioNodes[0];
|
|
177
|
+
if (!firstNode) {
|
|
178
|
+
return null;
|
|
179
|
+
}
|
|
180
|
+
return {
|
|
181
|
+
from: firstNode.timestamp,
|
|
182
|
+
until: lastNode.timestamp + lastNode.buffer.duration
|
|
183
|
+
};
|
|
184
|
+
},
|
|
185
|
+
tryToSatisfySeek
|
|
174
186
|
};
|
|
175
187
|
};
|
|
188
|
+
var isAlreadyQueued = (time, queuedPeriod) => {
|
|
189
|
+
if (!queuedPeriod) {
|
|
190
|
+
return false;
|
|
191
|
+
}
|
|
192
|
+
return time >= queuedPeriod.from && time < queuedPeriod.until;
|
|
193
|
+
};
|
|
176
194
|
|
|
177
|
-
// src/
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
195
|
+
// src/debug-overlay/preview-overlay.ts
|
|
196
|
+
var drawPreviewOverlay = ({
|
|
197
|
+
context,
|
|
198
|
+
stats,
|
|
199
|
+
audioTime,
|
|
200
|
+
audioContextState,
|
|
201
|
+
audioIterator,
|
|
202
|
+
audioSyncAnchor,
|
|
203
|
+
audioChunksForAfterResuming,
|
|
204
|
+
playing
|
|
205
|
+
}) => {
|
|
206
|
+
const lines = [
|
|
207
|
+
"Debug overlay",
|
|
208
|
+
`Video iterators created: ${stats.videoIteratorsCreated}`,
|
|
209
|
+
`Audio iterators created: ${stats.audioIteratorsCreated}`,
|
|
210
|
+
`Frames rendered: ${stats.framesRendered}`,
|
|
211
|
+
`Audio context state: ${audioContextState}`,
|
|
212
|
+
`Audio time: ${(audioTime - audioSyncAnchor).toFixed(3)}s`
|
|
213
|
+
];
|
|
214
|
+
if (audioIterator) {
|
|
215
|
+
const queuedPeriod = audioIterator.getQueuedPeriod();
|
|
216
|
+
if (queuedPeriod) {
|
|
217
|
+
lines.push(`Audio queued until: ${(queuedPeriod.until - (audioTime - audioSyncAnchor)).toFixed(3)}s`);
|
|
218
|
+
} else if (audioChunksForAfterResuming.length > 0) {
|
|
219
|
+
lines.push(`Audio chunks for after resuming: ${audioChunksForAfterResuming.length}`);
|
|
220
|
+
}
|
|
221
|
+
lines.push(`Playing: ${playing}`);
|
|
222
|
+
}
|
|
223
|
+
const lineHeight = 30;
|
|
224
|
+
const boxPaddingX = 10;
|
|
225
|
+
const boxPaddingY = 10;
|
|
226
|
+
const boxLeft = 20;
|
|
227
|
+
const boxTop = 20;
|
|
228
|
+
const boxWidth = 600;
|
|
229
|
+
const boxHeight = lines.length * lineHeight + 2 * boxPaddingY;
|
|
230
|
+
context.fillStyle = "rgba(0, 0, 0, 1)";
|
|
231
|
+
context.fillRect(boxLeft, boxTop, boxWidth, boxHeight);
|
|
232
|
+
context.fillStyle = "white";
|
|
233
|
+
context.font = "24px sans-serif";
|
|
234
|
+
context.textBaseline = "top";
|
|
235
|
+
for (let i = 0;i < lines.length; i++) {
|
|
236
|
+
context.fillText(lines[i], boxLeft + boxPaddingX, boxTop + boxPaddingY + i * lineHeight);
|
|
237
|
+
}
|
|
238
|
+
};
|
|
186
239
|
|
|
187
240
|
// src/get-time-in-seconds.ts
|
|
188
|
-
import { Internals
|
|
241
|
+
import { Internals } from "remotion";
|
|
189
242
|
var getTimeInSeconds = ({
|
|
190
243
|
loop,
|
|
191
244
|
mediaDurationInSeconds,
|
|
@@ -200,7 +253,7 @@ var getTimeInSeconds = ({
|
|
|
200
253
|
if (mediaDurationInSeconds === null && loop && ifNoMediaDuration === "fail") {
|
|
201
254
|
throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
|
|
202
255
|
}
|
|
203
|
-
const loopDuration = loop ?
|
|
256
|
+
const loopDuration = loop ? Internals.calculateMediaDuration({
|
|
204
257
|
trimAfter,
|
|
205
258
|
mediaDurationInFrames: mediaDurationInSeconds ? mediaDurationInSeconds * fps : Infinity,
|
|
206
259
|
playbackRate: 1,
|
|
@@ -224,36 +277,130 @@ function isNetworkError(error) {
|
|
|
224
277
|
return false;
|
|
225
278
|
}
|
|
226
279
|
|
|
227
|
-
// src/video/
|
|
228
|
-
var
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
280
|
+
// src/video/video-preview-iterator.ts
|
|
281
|
+
var createVideoIterator = (timeToSeek, videoSink) => {
|
|
282
|
+
let destroyed = false;
|
|
283
|
+
const iterator = videoSink.canvases(timeToSeek);
|
|
284
|
+
let lastReturnedFrame = null;
|
|
285
|
+
let iteratorEnded = false;
|
|
286
|
+
const getNextOrNullIfNotAvailable = async () => {
|
|
287
|
+
const next = iterator.next();
|
|
288
|
+
const result = await Promise.race([
|
|
289
|
+
next,
|
|
290
|
+
new Promise((resolve) => {
|
|
291
|
+
Promise.resolve().then(() => resolve());
|
|
292
|
+
})
|
|
293
|
+
]);
|
|
294
|
+
if (!result) {
|
|
295
|
+
return {
|
|
296
|
+
type: "need-to-wait-for-it",
|
|
297
|
+
waitPromise: async () => {
|
|
298
|
+
const res = await next;
|
|
299
|
+
if (res.value) {
|
|
300
|
+
lastReturnedFrame = res.value;
|
|
301
|
+
} else {
|
|
302
|
+
iteratorEnded = true;
|
|
303
|
+
}
|
|
304
|
+
return res.value;
|
|
305
|
+
}
|
|
306
|
+
};
|
|
307
|
+
}
|
|
308
|
+
if (result.value) {
|
|
309
|
+
lastReturnedFrame = result.value;
|
|
310
|
+
} else {
|
|
311
|
+
iteratorEnded = true;
|
|
312
|
+
}
|
|
313
|
+
return {
|
|
314
|
+
type: "got-frame-or-end",
|
|
315
|
+
frame: result.value ?? null
|
|
316
|
+
};
|
|
317
|
+
};
|
|
318
|
+
const destroy = () => {
|
|
319
|
+
destroyed = true;
|
|
320
|
+
lastReturnedFrame = null;
|
|
321
|
+
iterator.return().catch(() => {
|
|
322
|
+
return;
|
|
323
|
+
});
|
|
324
|
+
};
|
|
325
|
+
const tryToSatisfySeek = async (time) => {
|
|
326
|
+
if (lastReturnedFrame) {
|
|
327
|
+
const frameTimestamp = roundTo4Digits(lastReturnedFrame.timestamp);
|
|
328
|
+
if (roundTo4Digits(time) < frameTimestamp) {
|
|
329
|
+
return {
|
|
330
|
+
type: "not-satisfied",
|
|
331
|
+
reason: `iterator is too far, most recently returned ${frameTimestamp}`
|
|
332
|
+
};
|
|
247
333
|
}
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
334
|
+
const frameEndTimestamp = roundTo4Digits(lastReturnedFrame.timestamp + lastReturnedFrame.duration);
|
|
335
|
+
const timestamp = roundTo4Digits(time);
|
|
336
|
+
if (frameTimestamp <= timestamp && frameEndTimestamp > timestamp) {
|
|
337
|
+
return {
|
|
338
|
+
type: "satisfied",
|
|
339
|
+
frame: lastReturnedFrame
|
|
340
|
+
};
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
if (iteratorEnded) {
|
|
344
|
+
if (lastReturnedFrame) {
|
|
345
|
+
return {
|
|
346
|
+
type: "satisfied",
|
|
347
|
+
frame: lastReturnedFrame
|
|
348
|
+
};
|
|
349
|
+
}
|
|
350
|
+
return {
|
|
351
|
+
type: "not-satisfied",
|
|
352
|
+
reason: "iterator ended"
|
|
353
|
+
};
|
|
354
|
+
}
|
|
355
|
+
while (true) {
|
|
356
|
+
const frame = await getNextOrNullIfNotAvailable();
|
|
357
|
+
if (frame.type === "need-to-wait-for-it") {
|
|
358
|
+
return {
|
|
359
|
+
type: "not-satisfied",
|
|
360
|
+
reason: "iterator did not have frame ready"
|
|
361
|
+
};
|
|
362
|
+
}
|
|
363
|
+
if (frame.type === "got-frame-or-end") {
|
|
364
|
+
if (frame.frame === null) {
|
|
365
|
+
iteratorEnded = true;
|
|
366
|
+
if (lastReturnedFrame) {
|
|
367
|
+
return {
|
|
368
|
+
type: "satisfied",
|
|
369
|
+
frame: lastReturnedFrame
|
|
370
|
+
};
|
|
371
|
+
}
|
|
372
|
+
return {
|
|
373
|
+
type: "not-satisfied",
|
|
374
|
+
reason: "iterator ended and did not have frame ready"
|
|
375
|
+
};
|
|
376
|
+
}
|
|
377
|
+
const frameTimestamp = roundTo4Digits(frame.frame.timestamp);
|
|
378
|
+
const frameEndTimestamp = roundTo4Digits(frame.frame.timestamp + frame.frame.duration);
|
|
379
|
+
const timestamp = roundTo4Digits(time);
|
|
380
|
+
if (frameTimestamp <= timestamp && frameEndTimestamp > timestamp) {
|
|
381
|
+
return {
|
|
382
|
+
type: "satisfied",
|
|
383
|
+
frame: frame.frame
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
continue;
|
|
387
|
+
}
|
|
388
|
+
throw new Error("Unreachable");
|
|
389
|
+
}
|
|
390
|
+
};
|
|
391
|
+
return {
|
|
392
|
+
destroy,
|
|
393
|
+
getNext: () => {
|
|
394
|
+
return iterator.next();
|
|
395
|
+
},
|
|
396
|
+
isDestroyed: () => {
|
|
397
|
+
return destroyed;
|
|
398
|
+
},
|
|
399
|
+
tryToSatisfySeek
|
|
400
|
+
};
|
|
401
|
+
};
|
|
256
402
|
|
|
403
|
+
// src/media-player.ts
|
|
257
404
|
class MediaPlayer {
|
|
258
405
|
canvas;
|
|
259
406
|
context;
|
|
@@ -263,10 +410,13 @@ class MediaPlayer {
|
|
|
263
410
|
audioStreamIndex;
|
|
264
411
|
canvasSink = null;
|
|
265
412
|
videoFrameIterator = null;
|
|
266
|
-
|
|
413
|
+
debugStats = {
|
|
414
|
+
videoIteratorsCreated: 0,
|
|
415
|
+
audioIteratorsCreated: 0,
|
|
416
|
+
framesRendered: 0
|
|
417
|
+
};
|
|
267
418
|
audioSink = null;
|
|
268
419
|
audioBufferIterator = null;
|
|
269
|
-
queuedAudioNodes = new Set;
|
|
270
420
|
gainNode = null;
|
|
271
421
|
currentVolume = 1;
|
|
272
422
|
sharedAudioContext;
|
|
@@ -277,18 +427,12 @@ class MediaPlayer {
|
|
|
277
427
|
fps;
|
|
278
428
|
trimBefore;
|
|
279
429
|
trimAfter;
|
|
280
|
-
animationFrameId = null;
|
|
281
|
-
videoAsyncId = 0;
|
|
282
|
-
audioAsyncId = 0;
|
|
283
430
|
initialized = false;
|
|
284
431
|
totalDuration;
|
|
285
|
-
|
|
286
|
-
onBufferingChangeCallback;
|
|
287
|
-
audioBufferHealth = 0;
|
|
288
|
-
audioIteratorStarted = false;
|
|
289
|
-
HEALTHY_BUFER_THRESHOLD_SECONDS = 1;
|
|
290
|
-
mediaEnded = false;
|
|
432
|
+
debugOverlay = false;
|
|
291
433
|
onVideoFrameCallback;
|
|
434
|
+
initializationPromise = null;
|
|
435
|
+
bufferState;
|
|
292
436
|
constructor({
|
|
293
437
|
canvas,
|
|
294
438
|
src,
|
|
@@ -299,7 +443,9 @@ class MediaPlayer {
|
|
|
299
443
|
trimAfter,
|
|
300
444
|
playbackRate,
|
|
301
445
|
audioStreamIndex,
|
|
302
|
-
fps
|
|
446
|
+
fps,
|
|
447
|
+
debugOverlay,
|
|
448
|
+
bufferState
|
|
303
449
|
}) {
|
|
304
450
|
this.canvas = canvas ?? null;
|
|
305
451
|
this.src = src;
|
|
@@ -311,6 +457,8 @@ class MediaPlayer {
|
|
|
311
457
|
this.trimAfter = trimAfter;
|
|
312
458
|
this.audioStreamIndex = audioStreamIndex ?? 0;
|
|
313
459
|
this.fps = fps;
|
|
460
|
+
this.debugOverlay = debugOverlay;
|
|
461
|
+
this.bufferState = bufferState;
|
|
314
462
|
if (canvas) {
|
|
315
463
|
const context = canvas.getContext("2d", {
|
|
316
464
|
alpha: true,
|
|
@@ -326,15 +474,20 @@ class MediaPlayer {
|
|
|
326
474
|
}
|
|
327
475
|
input = null;
|
|
328
476
|
isReady() {
|
|
329
|
-
return this.initialized && Boolean(this.sharedAudioContext);
|
|
477
|
+
return this.initialized && Boolean(this.sharedAudioContext) && !this.input?.disposed;
|
|
330
478
|
}
|
|
331
479
|
hasAudio() {
|
|
332
480
|
return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
|
|
333
481
|
}
|
|
334
|
-
|
|
335
|
-
return this.
|
|
482
|
+
isDisposalError() {
|
|
483
|
+
return this.input?.disposed === true;
|
|
336
484
|
}
|
|
337
|
-
|
|
485
|
+
initialize(startTimeUnresolved) {
|
|
486
|
+
const promise = this._initialize(startTimeUnresolved);
|
|
487
|
+
this.initializationPromise = promise;
|
|
488
|
+
return promise;
|
|
489
|
+
}
|
|
490
|
+
async _initialize(startTimeUnresolved) {
|
|
338
491
|
try {
|
|
339
492
|
const urlSource = new UrlSource(this.src);
|
|
340
493
|
const input = new Input({
|
|
@@ -342,14 +495,20 @@ class MediaPlayer {
|
|
|
342
495
|
formats: ALL_FORMATS
|
|
343
496
|
});
|
|
344
497
|
this.input = input;
|
|
498
|
+
if (input.disposed) {
|
|
499
|
+
return { type: "disposed" };
|
|
500
|
+
}
|
|
345
501
|
try {
|
|
346
|
-
await
|
|
502
|
+
await input.getFormat();
|
|
347
503
|
} catch (error) {
|
|
504
|
+
if (this.isDisposalError()) {
|
|
505
|
+
return { type: "disposed" };
|
|
506
|
+
}
|
|
348
507
|
const err = error;
|
|
349
508
|
if (isNetworkError(err)) {
|
|
350
509
|
throw error;
|
|
351
510
|
}
|
|
352
|
-
|
|
511
|
+
Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
|
|
353
512
|
return { type: "unknown-container-format" };
|
|
354
513
|
}
|
|
355
514
|
const [durationInSeconds, videoTrack, audioTracks] = await Promise.all([
|
|
@@ -396,22 +555,26 @@ class MediaPlayer {
|
|
|
396
555
|
return { type: "success", durationInSeconds: this.totalDuration };
|
|
397
556
|
}
|
|
398
557
|
if (this.sharedAudioContext) {
|
|
399
|
-
this.
|
|
558
|
+
this.setPlaybackTime(startTime);
|
|
400
559
|
}
|
|
401
560
|
this.initialized = true;
|
|
402
|
-
|
|
403
|
-
this.startAudioIterator(startTime)
|
|
404
|
-
this.startVideoIterator(startTime)
|
|
405
|
-
|
|
406
|
-
|
|
561
|
+
try {
|
|
562
|
+
this.startAudioIterator(startTime, this.currentSeekNonce);
|
|
563
|
+
await this.startVideoIterator(startTime, this.currentSeekNonce);
|
|
564
|
+
} catch (error) {
|
|
565
|
+
if (this.isDisposalError()) {
|
|
566
|
+
return { type: "disposed" };
|
|
567
|
+
}
|
|
568
|
+
Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio and video iterators", error);
|
|
569
|
+
}
|
|
407
570
|
return { type: "success", durationInSeconds };
|
|
408
571
|
} catch (error) {
|
|
409
572
|
const err = error;
|
|
410
573
|
if (isNetworkError(err)) {
|
|
411
|
-
|
|
574
|
+
Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
|
|
412
575
|
return { type: "network-error" };
|
|
413
576
|
}
|
|
414
|
-
|
|
577
|
+
Internals2.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
|
|
415
578
|
throw error;
|
|
416
579
|
}
|
|
417
580
|
}
|
|
@@ -420,20 +583,19 @@ class MediaPlayer {
|
|
|
420
583
|
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
421
584
|
}
|
|
422
585
|
}
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
node.stop();
|
|
426
|
-
}
|
|
427
|
-
this.queuedAudioNodes.clear();
|
|
428
|
-
}
|
|
429
|
-
async cleanAudioIteratorAndNodes() {
|
|
430
|
-
await this.audioBufferIterator?.return();
|
|
431
|
-
this.audioBufferIterator = null;
|
|
432
|
-
this.audioIteratorStarted = false;
|
|
433
|
-
this.audioBufferHealth = 0;
|
|
434
|
-
this.cleanupAudioQueue();
|
|
435
|
-
}
|
|
586
|
+
currentSeekNonce = 0;
|
|
587
|
+
seekPromiseChain = Promise.resolve();
|
|
436
588
|
async seekTo(time) {
|
|
589
|
+
this.currentSeekNonce++;
|
|
590
|
+
const nonce = this.currentSeekNonce;
|
|
591
|
+
await this.seekPromiseChain;
|
|
592
|
+
this.seekPromiseChain = this.seekToDoNotCallDirectly(time, nonce);
|
|
593
|
+
await this.seekPromiseChain;
|
|
594
|
+
}
|
|
595
|
+
async seekToDoNotCallDirectly(time, nonce) {
|
|
596
|
+
if (nonce !== this.currentSeekNonce) {
|
|
597
|
+
return;
|
|
598
|
+
}
|
|
437
599
|
if (!this.isReady())
|
|
438
600
|
return;
|
|
439
601
|
const newTime = getTimeInSeconds({
|
|
@@ -448,45 +610,98 @@ class MediaPlayer {
|
|
|
448
610
|
src: this.src
|
|
449
611
|
});
|
|
450
612
|
if (newTime === null) {
|
|
451
|
-
this.
|
|
452
|
-
this.
|
|
613
|
+
this.videoFrameIterator?.destroy();
|
|
614
|
+
this.videoFrameIterator = null;
|
|
453
615
|
this.clearCanvas();
|
|
454
|
-
|
|
616
|
+
this.audioBufferIterator?.destroy();
|
|
617
|
+
this.audioBufferIterator = null;
|
|
455
618
|
return;
|
|
456
619
|
}
|
|
457
620
|
const currentPlaybackTime = this.getPlaybackTime();
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
621
|
+
if (currentPlaybackTime === newTime) {
|
|
622
|
+
return;
|
|
623
|
+
}
|
|
624
|
+
const newAudioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
|
|
625
|
+
const diff = Math.abs(newAudioSyncAnchor - this.audioSyncAnchor);
|
|
626
|
+
if (diff > 0.1) {
|
|
627
|
+
this.setPlaybackTime(newTime);
|
|
628
|
+
}
|
|
629
|
+
const videoSatisfyResult = await this.videoFrameIterator?.tryToSatisfySeek(newTime);
|
|
630
|
+
if (videoSatisfyResult?.type === "satisfied") {
|
|
631
|
+
this.drawFrame(videoSatisfyResult.frame);
|
|
632
|
+
} else if (videoSatisfyResult && this.currentSeekNonce === nonce) {
|
|
633
|
+
this.startVideoIterator(newTime, nonce);
|
|
634
|
+
}
|
|
635
|
+
const queuedPeriod = this.audioBufferIterator?.getQueuedPeriod();
|
|
636
|
+
const currentTimeIsAlreadyQueued = isAlreadyQueued(newTime, queuedPeriod);
|
|
637
|
+
const toBeScheduled = [];
|
|
638
|
+
if (!currentTimeIsAlreadyQueued) {
|
|
639
|
+
const audioSatisfyResult = await this.audioBufferIterator?.tryToSatisfySeek(newTime);
|
|
640
|
+
if (this.currentSeekNonce !== nonce) {
|
|
641
|
+
return;
|
|
642
|
+
}
|
|
643
|
+
if (!audioSatisfyResult) {
|
|
644
|
+
return;
|
|
645
|
+
}
|
|
646
|
+
if (audioSatisfyResult.type === "not-satisfied") {
|
|
647
|
+
await this.startAudioIterator(newTime, nonce);
|
|
648
|
+
return;
|
|
649
|
+
}
|
|
650
|
+
toBeScheduled.push(...audioSatisfyResult.buffers);
|
|
470
651
|
}
|
|
471
|
-
|
|
472
|
-
|
|
652
|
+
const nextTime = newTime + 1 / this.fps * this.playbackRate + 1 / this.fps * this.playbackRate;
|
|
653
|
+
const nextIsAlreadyQueued = isAlreadyQueued(nextTime, queuedPeriod);
|
|
654
|
+
if (!nextIsAlreadyQueued) {
|
|
655
|
+
const audioSatisfyResult = await this.audioBufferIterator?.tryToSatisfySeek(nextTime);
|
|
656
|
+
if (this.currentSeekNonce !== nonce) {
|
|
657
|
+
return;
|
|
658
|
+
}
|
|
659
|
+
if (!audioSatisfyResult) {
|
|
660
|
+
return;
|
|
661
|
+
}
|
|
662
|
+
if (audioSatisfyResult.type === "not-satisfied") {
|
|
663
|
+
await this.startAudioIterator(nextTime, nonce);
|
|
664
|
+
return;
|
|
665
|
+
}
|
|
666
|
+
toBeScheduled.push(...audioSatisfyResult.buffers);
|
|
667
|
+
}
|
|
668
|
+
for (const buffer of toBeScheduled) {
|
|
669
|
+
if (this.playing) {
|
|
670
|
+
this.scheduleAudioChunk(buffer.buffer, buffer.timestamp);
|
|
671
|
+
} else {
|
|
672
|
+
this.audioChunksForAfterResuming.push({
|
|
673
|
+
buffer: buffer.buffer,
|
|
674
|
+
timestamp: buffer.timestamp
|
|
675
|
+
});
|
|
676
|
+
}
|
|
473
677
|
}
|
|
474
678
|
}
|
|
475
|
-
async play() {
|
|
679
|
+
async play(time) {
|
|
476
680
|
if (!this.isReady())
|
|
477
681
|
return;
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
682
|
+
this.setPlaybackTime(time);
|
|
683
|
+
this.playing = true;
|
|
684
|
+
for (const chunk of this.audioChunksForAfterResuming) {
|
|
685
|
+
this.scheduleAudioChunk(chunk.buffer, chunk.timestamp);
|
|
686
|
+
}
|
|
687
|
+
if (this.sharedAudioContext.state === "suspended") {
|
|
688
|
+
await this.sharedAudioContext.resume();
|
|
484
689
|
}
|
|
690
|
+
this.audioChunksForAfterResuming.length = 0;
|
|
691
|
+
this.drawDebugOverlay();
|
|
485
692
|
}
|
|
486
693
|
pause() {
|
|
487
694
|
this.playing = false;
|
|
488
|
-
this.
|
|
489
|
-
|
|
695
|
+
const toQueue = this.audioBufferIterator?.removeAndReturnAllQueuedAudioNodes();
|
|
696
|
+
if (toQueue) {
|
|
697
|
+
for (const chunk of toQueue) {
|
|
698
|
+
this.audioChunksForAfterResuming.push({
|
|
699
|
+
buffer: chunk.buffer,
|
|
700
|
+
timestamp: chunk.timestamp
|
|
701
|
+
});
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
this.drawDebugOverlay();
|
|
490
705
|
}
|
|
491
706
|
setMuted(muted) {
|
|
492
707
|
this.muted = muted;
|
|
@@ -504,6 +719,9 @@ class MediaPlayer {
|
|
|
504
719
|
this.gainNode.gain.value = appliedVolume;
|
|
505
720
|
}
|
|
506
721
|
}
|
|
722
|
+
setDebugOverlay(debugOverlay) {
|
|
723
|
+
this.debugOverlay = debugOverlay;
|
|
724
|
+
}
|
|
507
725
|
setPlaybackRate(rate) {
|
|
508
726
|
this.playbackRate = rate;
|
|
509
727
|
}
|
|
@@ -513,37 +731,41 @@ class MediaPlayer {
|
|
|
513
731
|
setLoop(loop) {
|
|
514
732
|
this.loop = loop;
|
|
515
733
|
}
|
|
516
|
-
dispose() {
|
|
734
|
+
async dispose() {
|
|
735
|
+
this.initialized = false;
|
|
736
|
+
if (this.initializationPromise) {
|
|
737
|
+
try {
|
|
738
|
+
await this.initializationPromise;
|
|
739
|
+
} catch {}
|
|
740
|
+
}
|
|
517
741
|
this.input?.dispose();
|
|
518
|
-
this.
|
|
519
|
-
this.videoFrameIterator
|
|
520
|
-
this.
|
|
521
|
-
this.
|
|
742
|
+
this.videoFrameIterator?.destroy();
|
|
743
|
+
this.videoFrameIterator = null;
|
|
744
|
+
this.audioBufferIterator?.destroy();
|
|
745
|
+
this.audioBufferIterator = null;
|
|
522
746
|
}
|
|
523
747
|
getPlaybackTime() {
|
|
524
748
|
return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
|
|
525
749
|
}
|
|
750
|
+
setPlaybackTime(time) {
|
|
751
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - time;
|
|
752
|
+
}
|
|
753
|
+
audioChunksForAfterResuming = [];
|
|
526
754
|
scheduleAudioChunk(buffer, mediaTimestamp) {
|
|
527
|
-
const targetTime = mediaTimestamp
|
|
528
|
-
const delay = targetTime - this.sharedAudioContext.currentTime;
|
|
755
|
+
const targetTime = (mediaTimestamp - (this.trimBefore ?? 0) / this.fps) / this.playbackRate;
|
|
756
|
+
const delay = targetTime + this.audioSyncAnchor - this.sharedAudioContext.currentTime;
|
|
529
757
|
const node = this.sharedAudioContext.createBufferSource();
|
|
530
758
|
node.buffer = buffer;
|
|
531
759
|
node.playbackRate.value = this.playbackRate;
|
|
532
760
|
node.connect(this.gainNode);
|
|
533
761
|
if (delay >= 0) {
|
|
534
|
-
node.start(targetTime);
|
|
762
|
+
node.start(targetTime + this.audioSyncAnchor);
|
|
535
763
|
} else {
|
|
536
764
|
node.start(this.sharedAudioContext.currentTime, -delay);
|
|
537
765
|
}
|
|
538
|
-
this.
|
|
539
|
-
node.onended = () =>
|
|
540
|
-
|
|
541
|
-
onBufferingChange(callback) {
|
|
542
|
-
this.onBufferingChangeCallback = callback;
|
|
543
|
-
return () => {
|
|
544
|
-
if (this.onBufferingChangeCallback === callback) {
|
|
545
|
-
this.onBufferingChangeCallback = undefined;
|
|
546
|
-
}
|
|
766
|
+
this.audioBufferIterator.addQueuedAudioNode(node, mediaTimestamp, buffer);
|
|
767
|
+
node.onended = () => {
|
|
768
|
+
return this.audioBufferIterator.removeQueuedAudioNode(node);
|
|
547
769
|
};
|
|
548
770
|
}
|
|
549
771
|
onVideoFrame(callback) {
|
|
@@ -557,225 +779,256 @@ class MediaPlayer {
|
|
|
557
779
|
}
|
|
558
780
|
};
|
|
559
781
|
}
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
startRenderLoop() {
|
|
564
|
-
if (this.animationFrameId !== null) {
|
|
565
|
-
return;
|
|
566
|
-
}
|
|
567
|
-
this.render();
|
|
568
|
-
}
|
|
569
|
-
stopRenderLoop() {
|
|
570
|
-
if (this.animationFrameId !== null) {
|
|
571
|
-
cancelAnimationFrame(this.animationFrameId);
|
|
572
|
-
this.animationFrameId = null;
|
|
782
|
+
drawFrame = (frame) => {
|
|
783
|
+
if (!this.context) {
|
|
784
|
+
throw new Error("Context not initialized");
|
|
573
785
|
}
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
this.drawCurrentFrame();
|
|
786
|
+
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
787
|
+
this.context.drawImage(frame.canvas, 0, 0);
|
|
788
|
+
this.debugStats.framesRendered++;
|
|
789
|
+
this.drawDebugOverlay();
|
|
790
|
+
if (this.onVideoFrameCallback && this.canvas) {
|
|
791
|
+
this.onVideoFrameCallback(this.canvas);
|
|
581
792
|
}
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
793
|
+
Internals2.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
|
|
794
|
+
};
|
|
795
|
+
startAudioIterator = async (startFromSecond, nonce) => {
|
|
796
|
+
if (!this.hasAudio())
|
|
797
|
+
return;
|
|
798
|
+
this.audioBufferIterator?.destroy();
|
|
799
|
+
this.audioChunksForAfterResuming = [];
|
|
800
|
+
const delayHandle = this.bufferState.delayPlayback();
|
|
801
|
+
const iterator = makeAudioIterator(this.audioSink, startFromSecond);
|
|
802
|
+
this.debugStats.audioIteratorsCreated++;
|
|
803
|
+
this.audioBufferIterator = iterator;
|
|
804
|
+
for (let i = 0;i < 3; i++) {
|
|
805
|
+
const result = await iterator.getNext();
|
|
806
|
+
if (iterator.isDestroyed()) {
|
|
807
|
+
delayHandle.unblock();
|
|
808
|
+
return;
|
|
809
|
+
}
|
|
810
|
+
if (nonce !== this.currentSeekNonce) {
|
|
811
|
+
delayHandle.unblock();
|
|
812
|
+
return;
|
|
813
|
+
}
|
|
814
|
+
if (!result.value) {
|
|
815
|
+
delayHandle.unblock();
|
|
816
|
+
return;
|
|
817
|
+
}
|
|
818
|
+
const { buffer, timestamp } = result.value;
|
|
819
|
+
this.audioChunksForAfterResuming.push({
|
|
820
|
+
buffer,
|
|
821
|
+
timestamp
|
|
822
|
+
});
|
|
586
823
|
}
|
|
824
|
+
delayHandle.unblock();
|
|
587
825
|
};
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
826
|
+
drawDebugOverlay() {
|
|
827
|
+
if (!this.debugOverlay)
|
|
828
|
+
return;
|
|
829
|
+
if (this.context && this.canvas) {
|
|
830
|
+
drawPreviewOverlay({
|
|
831
|
+
context: this.context,
|
|
832
|
+
stats: this.debugStats,
|
|
833
|
+
audioTime: this.sharedAudioContext.currentTime,
|
|
834
|
+
audioContextState: this.sharedAudioContext.state,
|
|
835
|
+
audioSyncAnchor: this.audioSyncAnchor,
|
|
836
|
+
audioIterator: this.audioBufferIterator,
|
|
837
|
+
audioChunksForAfterResuming: this.audioChunksForAfterResuming,
|
|
838
|
+
playing: this.playing
|
|
839
|
+
});
|
|
592
840
|
}
|
|
593
|
-
return !this.isBuffering && this.canRenderVideo() && this.nextFrame !== null && this.nextFrame.timestamp <= playbackTime;
|
|
594
841
|
}
|
|
595
|
-
|
|
596
|
-
if (this.
|
|
597
|
-
|
|
598
|
-
this.context.drawImage(this.nextFrame.canvas, 0, 0);
|
|
842
|
+
startVideoIterator = async (timeToSeek, nonce) => {
|
|
843
|
+
if (!this.canvasSink) {
|
|
844
|
+
return;
|
|
599
845
|
}
|
|
600
|
-
|
|
601
|
-
|
|
846
|
+
this.videoFrameIterator?.destroy();
|
|
847
|
+
const iterator = createVideoIterator(timeToSeek, this.canvasSink);
|
|
848
|
+
this.debugStats.videoIteratorsCreated++;
|
|
849
|
+
this.videoFrameIterator = iterator;
|
|
850
|
+
const delayHandle = this.bufferState.delayPlayback();
|
|
851
|
+
const frameResult = await iterator.getNext();
|
|
852
|
+
delayHandle.unblock();
|
|
853
|
+
if (iterator.isDestroyed()) {
|
|
854
|
+
return;
|
|
602
855
|
}
|
|
603
|
-
this.
|
|
604
|
-
this.updateNextFrame();
|
|
605
|
-
}
|
|
606
|
-
startAudioIterator = async (startFromSecond) => {
|
|
607
|
-
if (!this.hasAudio())
|
|
856
|
+
if (nonce !== this.currentSeekNonce) {
|
|
608
857
|
return;
|
|
609
|
-
this.audioAsyncId++;
|
|
610
|
-
const currentAsyncId = this.audioAsyncId;
|
|
611
|
-
await this.audioBufferIterator?.return();
|
|
612
|
-
this.audioIteratorStarted = false;
|
|
613
|
-
this.audioBufferHealth = 0;
|
|
614
|
-
try {
|
|
615
|
-
this.audioBufferIterator = this.audioSink.buffers(startFromSecond);
|
|
616
|
-
this.runAudioIterator(startFromSecond, currentAsyncId);
|
|
617
|
-
} catch (error) {
|
|
618
|
-
Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio iterator", error);
|
|
619
858
|
}
|
|
620
|
-
|
|
621
|
-
startVideoIterator = async (timeToSeek) => {
|
|
622
|
-
if (!this.canvasSink) {
|
|
859
|
+
if (this.videoFrameIterator.isDestroyed()) {
|
|
623
860
|
return;
|
|
624
861
|
}
|
|
625
|
-
|
|
626
|
-
const currentAsyncId = this.videoAsyncId;
|
|
627
|
-
this.videoFrameIterator?.return().catch(() => {
|
|
862
|
+
if (!frameResult.value) {
|
|
628
863
|
return;
|
|
629
|
-
});
|
|
630
|
-
this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
|
|
631
|
-
try {
|
|
632
|
-
const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
633
|
-
const secondFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
634
|
-
if (currentAsyncId !== this.videoAsyncId) {
|
|
635
|
-
return;
|
|
636
|
-
}
|
|
637
|
-
if (firstFrame && this.context) {
|
|
638
|
-
Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
|
|
639
|
-
this.context.drawImage(firstFrame.canvas, 0, 0);
|
|
640
|
-
if (this.onVideoFrameCallback && this.canvas) {
|
|
641
|
-
this.onVideoFrameCallback(this.canvas);
|
|
642
|
-
}
|
|
643
|
-
}
|
|
644
|
-
this.nextFrame = secondFrame ?? null;
|
|
645
|
-
if (secondFrame) {
|
|
646
|
-
Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
|
|
647
|
-
}
|
|
648
|
-
} catch (error) {
|
|
649
|
-
Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start video iterator", error);
|
|
650
864
|
}
|
|
865
|
+
this.drawFrame(frameResult.value);
|
|
651
866
|
};
|
|
652
|
-
|
|
653
|
-
|
|
867
|
+
}
|
|
868
|
+
|
|
869
|
+
// src/show-in-timeline.ts
|
|
870
|
+
import { useMemo } from "react";
|
|
871
|
+
import { Internals as Internals3, useVideoConfig } from "remotion";
|
|
872
|
+
var useLoopDisplay = ({
|
|
873
|
+
loop,
|
|
874
|
+
mediaDurationInSeconds,
|
|
875
|
+
playbackRate,
|
|
876
|
+
trimAfter,
|
|
877
|
+
trimBefore
|
|
878
|
+
}) => {
|
|
879
|
+
const { durationInFrames: compDuration, fps } = useVideoConfig();
|
|
880
|
+
const loopDisplay = useMemo(() => {
|
|
881
|
+
if (!loop || !mediaDurationInSeconds) {
|
|
654
882
|
return;
|
|
655
883
|
}
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
884
|
+
const durationInFrames = Internals3.calculateMediaDuration({
|
|
885
|
+
mediaDurationInFrames: mediaDurationInSeconds * fps,
|
|
886
|
+
playbackRate,
|
|
887
|
+
trimAfter,
|
|
888
|
+
trimBefore
|
|
889
|
+
});
|
|
890
|
+
const maxTimes = compDuration / durationInFrames;
|
|
891
|
+
return {
|
|
892
|
+
numberOfTimes: maxTimes,
|
|
893
|
+
startOffset: 0,
|
|
894
|
+
durationInFrames
|
|
895
|
+
};
|
|
896
|
+
}, [
|
|
897
|
+
compDuration,
|
|
898
|
+
fps,
|
|
899
|
+
loop,
|
|
900
|
+
mediaDurationInSeconds,
|
|
901
|
+
playbackRate,
|
|
902
|
+
trimAfter,
|
|
903
|
+
trimBefore
|
|
904
|
+
]);
|
|
905
|
+
return loopDisplay;
|
|
906
|
+
};
|
|
907
|
+
|
|
908
|
+
// src/use-media-in-timeline.ts
|
|
909
|
+
import { useContext, useEffect, useState } from "react";
|
|
910
|
+
import { Internals as Internals4, useCurrentFrame } from "remotion";
|
|
911
|
+
var useMediaInTimeline = ({
|
|
912
|
+
volume,
|
|
913
|
+
mediaVolume,
|
|
914
|
+
src,
|
|
915
|
+
mediaType,
|
|
916
|
+
playbackRate,
|
|
917
|
+
displayName,
|
|
918
|
+
stack,
|
|
919
|
+
showInTimeline,
|
|
920
|
+
premountDisplay,
|
|
921
|
+
postmountDisplay,
|
|
922
|
+
loopDisplay,
|
|
923
|
+
trimBefore,
|
|
924
|
+
trimAfter
|
|
925
|
+
}) => {
|
|
926
|
+
const parentSequence = useContext(Internals4.SequenceContext);
|
|
927
|
+
const startsAt = Internals4.useMediaStartsAt();
|
|
928
|
+
const { registerSequence, unregisterSequence } = useContext(Internals4.SequenceManager);
|
|
929
|
+
const [sequenceId] = useState(() => String(Math.random()));
|
|
930
|
+
const [mediaId] = useState(() => String(Math.random()));
|
|
931
|
+
const frame = useCurrentFrame();
|
|
932
|
+
const {
|
|
933
|
+
volumes,
|
|
934
|
+
duration,
|
|
935
|
+
doesVolumeChange,
|
|
936
|
+
nonce,
|
|
937
|
+
rootId,
|
|
938
|
+
isStudio,
|
|
939
|
+
finalDisplayName
|
|
940
|
+
} = Internals4.useBasicMediaInTimeline({
|
|
941
|
+
volume,
|
|
942
|
+
mediaVolume,
|
|
943
|
+
mediaType,
|
|
944
|
+
src,
|
|
945
|
+
displayName,
|
|
946
|
+
trimBefore,
|
|
947
|
+
trimAfter,
|
|
948
|
+
playbackRate
|
|
949
|
+
});
|
|
950
|
+
useEffect(() => {
|
|
951
|
+
if (!src) {
|
|
952
|
+
throw new Error("No src passed");
|
|
691
953
|
}
|
|
692
|
-
|
|
693
|
-
maybeResumeFromBuffering(currentBufferDuration) {
|
|
694
|
-
if (!this.isCurrentlyBuffering())
|
|
954
|
+
if (!isStudio && window.process?.env?.NODE_ENV !== "test") {
|
|
695
955
|
return;
|
|
696
|
-
const now = performance.now();
|
|
697
|
-
const bufferingDuration = now - this.bufferingStartedAtMs;
|
|
698
|
-
const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
|
|
699
|
-
const bufferHealthy = currentBufferDuration >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
|
|
700
|
-
if (minTimeElapsed && bufferHealthy) {
|
|
701
|
-
Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
|
|
702
|
-
this.setBufferingState(false);
|
|
703
956
|
}
|
|
704
|
-
|
|
705
|
-
maybeForceResumeFromBuffering() {
|
|
706
|
-
if (!this.isCurrentlyBuffering())
|
|
957
|
+
if (!showInTimeline) {
|
|
707
958
|
return;
|
|
708
|
-
const now = performance.now();
|
|
709
|
-
const bufferingDuration = now - this.bufferingStartedAtMs;
|
|
710
|
-
const forceTimeout = bufferingDuration > this.minBufferingTimeoutMs * 10;
|
|
711
|
-
if (forceTimeout) {
|
|
712
|
-
Internals4.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Force resuming from buffering after ${bufferingDuration}ms`);
|
|
713
|
-
this.setBufferingState(false);
|
|
714
959
|
}
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
if (error instanceof TimeoutError && !this.mediaEnded) {
|
|
733
|
-
this.setBufferingState(true);
|
|
734
|
-
}
|
|
735
|
-
await sleep(10);
|
|
736
|
-
continue;
|
|
737
|
-
}
|
|
738
|
-
if (result.done || !result.value) {
|
|
739
|
-
this.mediaEnded = true;
|
|
740
|
-
break;
|
|
741
|
-
}
|
|
742
|
-
const { buffer, timestamp, duration } = result.value;
|
|
743
|
-
totalBufferDuration += duration;
|
|
744
|
-
this.audioBufferHealth = Math.max(0, totalBufferDuration / this.playbackRate);
|
|
745
|
-
this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
|
|
746
|
-
if (this.playing) {
|
|
747
|
-
if (isFirstBuffer) {
|
|
748
|
-
this.audioSyncAnchor = this.sharedAudioContext.currentTime - timestamp;
|
|
749
|
-
isFirstBuffer = false;
|
|
750
|
-
}
|
|
751
|
-
if (timestamp < startFromSecond - AUDIO_BUFFER_TOLERANCE_THRESHOLD) {
|
|
752
|
-
continue;
|
|
753
|
-
}
|
|
754
|
-
this.scheduleAudioChunk(buffer, timestamp);
|
|
755
|
-
}
|
|
756
|
-
const playbackTime = this.getPlaybackTime();
|
|
757
|
-
if (playbackTime === null) {
|
|
758
|
-
continue;
|
|
759
|
-
}
|
|
760
|
-
if (timestamp - playbackTime >= 1) {
|
|
761
|
-
await new Promise((resolve) => {
|
|
762
|
-
const check = () => {
|
|
763
|
-
const currentPlaybackTime = this.getPlaybackTime();
|
|
764
|
-
if (currentPlaybackTime !== null && timestamp - currentPlaybackTime < 1) {
|
|
765
|
-
resolve();
|
|
766
|
-
} else {
|
|
767
|
-
requestAnimationFrame(check);
|
|
768
|
-
}
|
|
769
|
-
};
|
|
770
|
-
check();
|
|
771
|
-
});
|
|
772
|
-
}
|
|
773
|
-
}
|
|
774
|
-
} catch (error) {
|
|
775
|
-
Internals4.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to run audio iterator", error);
|
|
960
|
+
const loopIteration = loopDisplay ? Math.floor(frame / loopDisplay.durationInFrames) : 0;
|
|
961
|
+
if (loopDisplay) {
|
|
962
|
+
registerSequence({
|
|
963
|
+
type: "sequence",
|
|
964
|
+
premountDisplay,
|
|
965
|
+
postmountDisplay,
|
|
966
|
+
parent: parentSequence?.id ?? null,
|
|
967
|
+
displayName: finalDisplayName,
|
|
968
|
+
rootId,
|
|
969
|
+
showInTimeline: true,
|
|
970
|
+
nonce,
|
|
971
|
+
loopDisplay,
|
|
972
|
+
stack,
|
|
973
|
+
from: 0,
|
|
974
|
+
duration,
|
|
975
|
+
id: sequenceId
|
|
976
|
+
});
|
|
776
977
|
}
|
|
978
|
+
registerSequence({
|
|
979
|
+
type: mediaType,
|
|
980
|
+
src,
|
|
981
|
+
id: mediaId,
|
|
982
|
+
duration: loopDisplay?.durationInFrames ?? duration,
|
|
983
|
+
from: loopDisplay ? loopIteration * loopDisplay.durationInFrames : 0,
|
|
984
|
+
parent: loopDisplay ? sequenceId : parentSequence?.id ?? null,
|
|
985
|
+
displayName: finalDisplayName,
|
|
986
|
+
rootId,
|
|
987
|
+
volume: volumes,
|
|
988
|
+
showInTimeline: true,
|
|
989
|
+
nonce,
|
|
990
|
+
startMediaFrom: 0 - startsAt,
|
|
991
|
+
doesVolumeChange,
|
|
992
|
+
loopDisplay: undefined,
|
|
993
|
+
playbackRate,
|
|
994
|
+
stack,
|
|
995
|
+
premountDisplay: null,
|
|
996
|
+
postmountDisplay: null
|
|
997
|
+
});
|
|
998
|
+
return () => {
|
|
999
|
+
if (loopDisplay) {
|
|
1000
|
+
unregisterSequence(sequenceId);
|
|
1001
|
+
}
|
|
1002
|
+
unregisterSequence(mediaId);
|
|
1003
|
+
};
|
|
1004
|
+
}, [
|
|
1005
|
+
doesVolumeChange,
|
|
1006
|
+
duration,
|
|
1007
|
+
finalDisplayName,
|
|
1008
|
+
isStudio,
|
|
1009
|
+
loopDisplay,
|
|
1010
|
+
mediaId,
|
|
1011
|
+
mediaType,
|
|
1012
|
+
nonce,
|
|
1013
|
+
parentSequence?.id,
|
|
1014
|
+
playbackRate,
|
|
1015
|
+
postmountDisplay,
|
|
1016
|
+
premountDisplay,
|
|
1017
|
+
registerSequence,
|
|
1018
|
+
rootId,
|
|
1019
|
+
sequenceId,
|
|
1020
|
+
showInTimeline,
|
|
1021
|
+
src,
|
|
1022
|
+
stack,
|
|
1023
|
+
startsAt,
|
|
1024
|
+
unregisterSequence,
|
|
1025
|
+
volumes,
|
|
1026
|
+
frame
|
|
1027
|
+
]);
|
|
1028
|
+
return {
|
|
1029
|
+
id: mediaId
|
|
777
1030
|
};
|
|
778
|
-
}
|
|
1031
|
+
};
|
|
779
1032
|
|
|
780
1033
|
// src/audio/audio-for-preview.tsx
|
|
781
1034
|
import { jsx } from "react/jsx-runtime";
|
|
@@ -862,6 +1115,11 @@ var NewAudioForPreview = ({
|
|
|
862
1115
|
trimAfter,
|
|
863
1116
|
trimBefore
|
|
864
1117
|
});
|
|
1118
|
+
const buffering = useContext2(Internals5.BufferingContextReact);
|
|
1119
|
+
if (!buffering) {
|
|
1120
|
+
throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
|
|
1121
|
+
}
|
|
1122
|
+
const isPlayerBuffering = Internals5.useIsPlayerBuffering(buffering);
|
|
865
1123
|
useEffect2(() => {
|
|
866
1124
|
if (!sharedAudioContext)
|
|
867
1125
|
return;
|
|
@@ -878,7 +1136,9 @@ var NewAudioForPreview = ({
|
|
|
878
1136
|
fps: videoConfig.fps,
|
|
879
1137
|
canvas: null,
|
|
880
1138
|
playbackRate,
|
|
881
|
-
audioStreamIndex: audioStreamIndex ?? 0
|
|
1139
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
1140
|
+
debugOverlay: false,
|
|
1141
|
+
bufferState: buffer
|
|
882
1142
|
});
|
|
883
1143
|
mediaPlayerRef.current = player;
|
|
884
1144
|
player.initialize(currentTimeRef.current).then((result) => {
|
|
@@ -950,20 +1210,19 @@ var NewAudioForPreview = ({
|
|
|
950
1210
|
playbackRate,
|
|
951
1211
|
videoConfig.fps,
|
|
952
1212
|
audioStreamIndex,
|
|
953
|
-
disallowFallbackToHtml5Audio
|
|
1213
|
+
disallowFallbackToHtml5Audio,
|
|
1214
|
+
buffer
|
|
954
1215
|
]);
|
|
955
1216
|
useEffect2(() => {
|
|
956
1217
|
const audioPlayer = mediaPlayerRef.current;
|
|
957
1218
|
if (!audioPlayer)
|
|
958
1219
|
return;
|
|
959
|
-
if (playing) {
|
|
960
|
-
audioPlayer.play(
|
|
961
|
-
Internals5.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to play", error);
|
|
962
|
-
});
|
|
1220
|
+
if (playing && !isPlayerBuffering) {
|
|
1221
|
+
audioPlayer.play(currentTimeRef.current);
|
|
963
1222
|
} else {
|
|
964
1223
|
audioPlayer.pause();
|
|
965
1224
|
}
|
|
966
|
-
}, [
|
|
1225
|
+
}, [isPlayerBuffering, logLevel, playing]);
|
|
967
1226
|
useEffect2(() => {
|
|
968
1227
|
const audioPlayer = mediaPlayerRef.current;
|
|
969
1228
|
if (!audioPlayer || !mediaPlayerReady)
|
|
@@ -971,21 +1230,6 @@ var NewAudioForPreview = ({
|
|
|
971
1230
|
audioPlayer.seekTo(currentTime);
|
|
972
1231
|
Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
973
1232
|
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
974
|
-
useEffect2(() => {
|
|
975
|
-
const audioPlayer = mediaPlayerRef.current;
|
|
976
|
-
if (!audioPlayer || !mediaPlayerReady)
|
|
977
|
-
return;
|
|
978
|
-
audioPlayer.onBufferingChange((newBufferingState) => {
|
|
979
|
-
if (newBufferingState && !delayHandleRef.current) {
|
|
980
|
-
delayHandleRef.current = buffer.delayPlayback();
|
|
981
|
-
Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer buffering - blocking Remotion playback");
|
|
982
|
-
} else if (!newBufferingState && delayHandleRef.current) {
|
|
983
|
-
delayHandleRef.current.unblock();
|
|
984
|
-
delayHandleRef.current = null;
|
|
985
|
-
Internals5.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
|
|
986
|
-
}
|
|
987
|
-
});
|
|
988
|
-
}, [mediaPlayerReady, buffer, logLevel]);
|
|
989
1233
|
const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
|
|
990
1234
|
useEffect2(() => {
|
|
991
1235
|
const audioPlayer = mediaPlayerRef.current;
|
|
@@ -1297,7 +1541,7 @@ var warnAboutMatroskaOnce = (src, logLevel) => {
|
|
|
1297
1541
|
warned[src] = true;
|
|
1298
1542
|
Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
|
|
1299
1543
|
};
|
|
1300
|
-
var
|
|
1544
|
+
var makeAudioIterator2 = ({
|
|
1301
1545
|
audioSampleSink,
|
|
1302
1546
|
isMatroska,
|
|
1303
1547
|
startTimestamp,
|
|
@@ -1422,7 +1666,7 @@ var makeAudioManager = () => {
|
|
|
1422
1666
|
actualMatroskaTimestamps,
|
|
1423
1667
|
logLevel
|
|
1424
1668
|
}) => {
|
|
1425
|
-
const iterator =
|
|
1669
|
+
const iterator = makeAudioIterator2({
|
|
1426
1670
|
audioSampleSink,
|
|
1427
1671
|
isMatroska,
|
|
1428
1672
|
startTimestamp: timeInSeconds,
|
|
@@ -1584,9 +1828,6 @@ import {
|
|
|
1584
1828
|
|
|
1585
1829
|
// src/video-extraction/keyframe-bank.ts
|
|
1586
1830
|
import { Internals as Internals8 } from "remotion";
|
|
1587
|
-
var roundTo4Digits = (timestamp) => {
|
|
1588
|
-
return Math.round(timestamp * 1000) / 1000;
|
|
1589
|
-
};
|
|
1590
1831
|
var makeKeyframeBank = ({
|
|
1591
1832
|
startTimestampInSeconds,
|
|
1592
1833
|
endTimestampInSeconds,
|
|
@@ -2891,7 +3132,14 @@ Internals13.addSequenceStackTraces(Audio);
|
|
|
2891
3132
|
import { Internals as Internals16, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
|
|
2892
3133
|
|
|
2893
3134
|
// src/video/video-for-preview.tsx
|
|
2894
|
-
import {
|
|
3135
|
+
import {
|
|
3136
|
+
useContext as useContext4,
|
|
3137
|
+
useEffect as useEffect3,
|
|
3138
|
+
useLayoutEffect as useLayoutEffect2,
|
|
3139
|
+
useMemo as useMemo4,
|
|
3140
|
+
useRef as useRef2,
|
|
3141
|
+
useState as useState4
|
|
3142
|
+
} from "react";
|
|
2895
3143
|
import { Html5Video, Internals as Internals14, useBufferState as useBufferState2, useCurrentFrame as useCurrentFrame4 } from "remotion";
|
|
2896
3144
|
import { jsx as jsx4 } from "react/jsx-runtime";
|
|
2897
3145
|
var {
|
|
@@ -2925,7 +3173,8 @@ var VideoForPreview = ({
|
|
|
2925
3173
|
stack,
|
|
2926
3174
|
disallowFallbackToOffthreadVideo,
|
|
2927
3175
|
fallbackOffthreadVideoProps,
|
|
2928
|
-
audioStreamIndex
|
|
3176
|
+
audioStreamIndex,
|
|
3177
|
+
debugOverlay
|
|
2929
3178
|
}) => {
|
|
2930
3179
|
const src = usePreload2(unpreloadedSrc);
|
|
2931
3180
|
const canvasRef = useRef2(null);
|
|
@@ -2977,13 +3226,15 @@ var VideoForPreview = ({
|
|
|
2977
3226
|
if (!videoConfig) {
|
|
2978
3227
|
throw new Error("No video config found");
|
|
2979
3228
|
}
|
|
2980
|
-
if (!src) {
|
|
2981
|
-
throw new TypeError("No `src` was passed to <NewVideoForPreview>.");
|
|
2982
|
-
}
|
|
2983
3229
|
const currentTime = frame / videoConfig.fps;
|
|
2984
3230
|
const currentTimeRef = useRef2(currentTime);
|
|
2985
3231
|
currentTimeRef.current = currentTime;
|
|
2986
3232
|
const preloadedSrc = usePreload2(src);
|
|
3233
|
+
const buffering = useContext4(Internals14.BufferingContextReact);
|
|
3234
|
+
if (!buffering) {
|
|
3235
|
+
throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
|
|
3236
|
+
}
|
|
3237
|
+
const isPlayerBuffering = Internals14.useIsPlayerBuffering(buffering);
|
|
2987
3238
|
useEffect3(() => {
|
|
2988
3239
|
if (!canvasRef.current)
|
|
2989
3240
|
return;
|
|
@@ -3002,10 +3253,15 @@ var VideoForPreview = ({
|
|
|
3002
3253
|
trimBefore,
|
|
3003
3254
|
fps: videoConfig.fps,
|
|
3004
3255
|
playbackRate,
|
|
3005
|
-
audioStreamIndex
|
|
3256
|
+
audioStreamIndex,
|
|
3257
|
+
debugOverlay,
|
|
3258
|
+
bufferState: buffer
|
|
3006
3259
|
});
|
|
3007
3260
|
mediaPlayerRef.current = player;
|
|
3008
3261
|
player.initialize(currentTimeRef.current).then((result) => {
|
|
3262
|
+
if (result.type === "disposed") {
|
|
3263
|
+
return;
|
|
3264
|
+
}
|
|
3009
3265
|
if (result.type === "unknown-container-format") {
|
|
3010
3266
|
if (disallowFallbackToOffthreadVideo) {
|
|
3011
3267
|
throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
@@ -3043,16 +3299,16 @@ var VideoForPreview = ({
|
|
|
3043
3299
|
setMediaDurationInSeconds(result.durationInSeconds);
|
|
3044
3300
|
}
|
|
3045
3301
|
}).catch((error) => {
|
|
3046
|
-
Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[
|
|
3302
|
+
Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to initialize MediaPlayer", error);
|
|
3047
3303
|
setShouldFallbackToNativeVideo(true);
|
|
3048
3304
|
});
|
|
3049
3305
|
} catch (error) {
|
|
3050
|
-
Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[
|
|
3306
|
+
Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] MediaPlayer initialization failed", error);
|
|
3051
3307
|
setShouldFallbackToNativeVideo(true);
|
|
3052
3308
|
}
|
|
3053
3309
|
return () => {
|
|
3054
3310
|
if (mediaPlayerRef.current) {
|
|
3055
|
-
Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[
|
|
3311
|
+
Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Disposing MediaPlayer`);
|
|
3056
3312
|
mediaPlayerRef.current.dispose();
|
|
3057
3313
|
mediaPlayerRef.current = null;
|
|
3058
3314
|
}
|
|
@@ -3069,7 +3325,9 @@ var VideoForPreview = ({
|
|
|
3069
3325
|
videoConfig.fps,
|
|
3070
3326
|
playbackRate,
|
|
3071
3327
|
disallowFallbackToOffthreadVideo,
|
|
3072
|
-
audioStreamIndex
|
|
3328
|
+
audioStreamIndex,
|
|
3329
|
+
debugOverlay,
|
|
3330
|
+
buffer
|
|
3073
3331
|
]);
|
|
3074
3332
|
const classNameValue = useMemo4(() => {
|
|
3075
3333
|
return [Internals14.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals14.truthy).join(" ");
|
|
@@ -3078,44 +3336,19 @@ var VideoForPreview = ({
|
|
|
3078
3336
|
const mediaPlayer = mediaPlayerRef.current;
|
|
3079
3337
|
if (!mediaPlayer)
|
|
3080
3338
|
return;
|
|
3081
|
-
if (playing) {
|
|
3082
|
-
mediaPlayer.play(
|
|
3083
|
-
Internals14.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to play", error);
|
|
3084
|
-
});
|
|
3339
|
+
if (playing && !isPlayerBuffering) {
|
|
3340
|
+
mediaPlayer.play(currentTimeRef.current);
|
|
3085
3341
|
} else {
|
|
3086
3342
|
mediaPlayer.pause();
|
|
3087
3343
|
}
|
|
3088
|
-
}, [playing, logLevel, mediaPlayerReady]);
|
|
3089
|
-
|
|
3344
|
+
}, [isPlayerBuffering, playing, logLevel, mediaPlayerReady]);
|
|
3345
|
+
useLayoutEffect2(() => {
|
|
3090
3346
|
const mediaPlayer = mediaPlayerRef.current;
|
|
3091
3347
|
if (!mediaPlayer || !mediaPlayerReady)
|
|
3092
3348
|
return;
|
|
3093
3349
|
mediaPlayer.seekTo(currentTime);
|
|
3094
|
-
Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[
|
|
3350
|
+
Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, `[VideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
3095
3351
|
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
3096
|
-
useEffect3(() => {
|
|
3097
|
-
const mediaPlayer = mediaPlayerRef.current;
|
|
3098
|
-
if (!mediaPlayer || !mediaPlayerReady)
|
|
3099
|
-
return;
|
|
3100
|
-
let currentBlock = null;
|
|
3101
|
-
const unsubscribe = mediaPlayer.onBufferingChange((newBufferingState) => {
|
|
3102
|
-
if (newBufferingState && !currentBlock) {
|
|
3103
|
-
currentBlock = buffer.delayPlayback();
|
|
3104
|
-
Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback");
|
|
3105
|
-
} else if (!newBufferingState && currentBlock) {
|
|
3106
|
-
currentBlock.unblock();
|
|
3107
|
-
currentBlock = null;
|
|
3108
|
-
Internals14.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
|
|
3109
|
-
}
|
|
3110
|
-
});
|
|
3111
|
-
return () => {
|
|
3112
|
-
unsubscribe();
|
|
3113
|
-
if (currentBlock) {
|
|
3114
|
-
currentBlock.unblock();
|
|
3115
|
-
currentBlock = null;
|
|
3116
|
-
}
|
|
3117
|
-
};
|
|
3118
|
-
}, [mediaPlayerReady, buffer, logLevel]);
|
|
3119
3352
|
const effectiveMuted = isSequenceHidden || muted || mediaMuted || userPreferredVolume <= 0;
|
|
3120
3353
|
useEffect3(() => {
|
|
3121
3354
|
const mediaPlayer = mediaPlayerRef.current;
|
|
@@ -3130,6 +3363,13 @@ var VideoForPreview = ({
|
|
|
3130
3363
|
}
|
|
3131
3364
|
mediaPlayer.setVolume(userPreferredVolume);
|
|
3132
3365
|
}, [userPreferredVolume, mediaPlayerReady]);
|
|
3366
|
+
useEffect3(() => {
|
|
3367
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
3368
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
3369
|
+
return;
|
|
3370
|
+
}
|
|
3371
|
+
mediaPlayer.setDebugOverlay(debugOverlay);
|
|
3372
|
+
}, [debugOverlay, mediaPlayerReady]);
|
|
3133
3373
|
const effectivePlaybackRate = useMemo4(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
|
|
3134
3374
|
useEffect3(() => {
|
|
3135
3375
|
const mediaPlayer = mediaPlayerRef.current;
|
|
@@ -3198,7 +3438,7 @@ var VideoForPreview = ({
|
|
|
3198
3438
|
// src/video/video-for-rendering.tsx
|
|
3199
3439
|
import {
|
|
3200
3440
|
useContext as useContext5,
|
|
3201
|
-
useLayoutEffect as
|
|
3441
|
+
useLayoutEffect as useLayoutEffect3,
|
|
3202
3442
|
useMemo as useMemo5,
|
|
3203
3443
|
useRef as useRef3,
|
|
3204
3444
|
useState as useState5
|
|
@@ -3257,7 +3497,7 @@ var VideoForRendering = ({
|
|
|
3257
3497
|
const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
|
|
3258
3498
|
const audioEnabled = Internals15.useAudioEnabled();
|
|
3259
3499
|
const videoEnabled = Internals15.useVideoEnabled();
|
|
3260
|
-
|
|
3500
|
+
useLayoutEffect3(() => {
|
|
3261
3501
|
if (!canvasRef.current) {
|
|
3262
3502
|
return;
|
|
3263
3503
|
}
|
|
@@ -3514,7 +3754,8 @@ var InnerVideo = ({
|
|
|
3514
3754
|
volume,
|
|
3515
3755
|
stack,
|
|
3516
3756
|
toneFrequency,
|
|
3517
|
-
showInTimeline
|
|
3757
|
+
showInTimeline,
|
|
3758
|
+
debugOverlay
|
|
3518
3759
|
}) => {
|
|
3519
3760
|
const environment = useRemotionEnvironment4();
|
|
3520
3761
|
if (typeof src !== "string") {
|
|
@@ -3575,7 +3816,8 @@ var InnerVideo = ({
|
|
|
3575
3816
|
trimBefore: trimBeforeValue,
|
|
3576
3817
|
stack: stack ?? null,
|
|
3577
3818
|
disallowFallbackToOffthreadVideo,
|
|
3578
|
-
fallbackOffthreadVideoProps
|
|
3819
|
+
fallbackOffthreadVideoProps,
|
|
3820
|
+
debugOverlay: debugOverlay ?? false
|
|
3579
3821
|
});
|
|
3580
3822
|
};
|
|
3581
3823
|
var Video = ({
|
|
@@ -3599,7 +3841,8 @@ var Video = ({
|
|
|
3599
3841
|
trimBefore,
|
|
3600
3842
|
volume,
|
|
3601
3843
|
stack,
|
|
3602
|
-
toneFrequency
|
|
3844
|
+
toneFrequency,
|
|
3845
|
+
debugOverlay
|
|
3603
3846
|
}) => {
|
|
3604
3847
|
return /* @__PURE__ */ jsx6(InnerVideo, {
|
|
3605
3848
|
audioStreamIndex: audioStreamIndex ?? 0,
|
|
@@ -3622,7 +3865,8 @@ var Video = ({
|
|
|
3622
3865
|
trimBefore,
|
|
3623
3866
|
volume: volume ?? 1,
|
|
3624
3867
|
toneFrequency: toneFrequency ?? 1,
|
|
3625
|
-
stack
|
|
3868
|
+
stack,
|
|
3869
|
+
debugOverlay: debugOverlay ?? false
|
|
3626
3870
|
});
|
|
3627
3871
|
};
|
|
3628
3872
|
Internals16.addSequenceStackTraces(Video);
|