@remotion/media 4.0.355 → 4.0.356
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-rendering.js +37 -3
- package/dist/audio/audio.js +1 -1
- package/dist/audio/props.d.ts +15 -0
- package/dist/audio-extraction/audio-iterator.d.ts +3 -2
- package/dist/audio-extraction/audio-iterator.js +13 -2
- package/dist/audio-extraction/audio-manager.d.ts +6 -5
- package/dist/audio-extraction/audio-manager.js +5 -3
- package/dist/audio-extraction/extract-audio.d.ts +3 -2
- package/dist/audio-extraction/extract-audio.js +11 -4
- package/dist/caches.d.ts +6 -5
- package/dist/convert-audiodata/apply-tonefrequency.d.ts +2 -0
- package/dist/convert-audiodata/apply-tonefrequency.js +44 -0
- package/dist/convert-audiodata/wsola.d.ts +13 -0
- package/dist/convert-audiodata/wsola.js +197 -0
- package/dist/esm/index.mjs +1297 -140
- package/dist/extract-frame-and-audio.d.ts +3 -2
- package/dist/extract-frame-and-audio.js +60 -26
- package/dist/get-sink-weak.d.ts +2 -7
- package/dist/index.d.ts +12 -3
- package/dist/index.js +11 -2
- package/dist/video/media-player.d.ts +70 -0
- package/dist/video/media-player.js +419 -0
- package/dist/video/props.d.ts +36 -18
- package/dist/video/timeout-utils.d.ts +2 -0
- package/dist/video/timeout-utils.js +18 -0
- package/dist/video/video-for-preview.d.ts +17 -0
- package/dist/video/video-for-preview.js +218 -0
- package/dist/video/video-for-rendering.d.ts +23 -2
- package/dist/video/video-for-rendering.js +47 -4
- package/dist/video/video.js +13 -14
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +3 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +53 -4
- package/dist/video-extraction/extract-frame.d.ts +2 -1
- package/dist/video-extraction/extract-frame.js +9 -3
- package/dist/video-extraction/get-frames-since-keyframe.d.ts +12 -7
- package/dist/video-extraction/get-frames-since-keyframe.js +70 -17
- package/package.json +3 -3
package/dist/esm/index.mjs
CHANGED
|
@@ -44,6 +44,7 @@ var SharedAudioContext = createContext2(null);
|
|
|
44
44
|
// src/audio/audio-for-rendering.tsx
|
|
45
45
|
import { useContext as useContext4, useLayoutEffect, useState as useState3 } from "react";
|
|
46
46
|
import {
|
|
47
|
+
Audio,
|
|
47
48
|
cancelRender as cancelRender2,
|
|
48
49
|
Internals as Internals6,
|
|
49
50
|
useCurrentFrame,
|
|
@@ -159,14 +160,26 @@ var makeAudioCache = () => {
|
|
|
159
160
|
|
|
160
161
|
// src/audio-extraction/audio-iterator.ts
|
|
161
162
|
var extraThreshold = 1.5;
|
|
163
|
+
var warned = {};
|
|
164
|
+
var warnAboutMatroskaOnce = (src, logLevel) => {
|
|
165
|
+
if (warned[src]) {
|
|
166
|
+
return;
|
|
167
|
+
}
|
|
168
|
+
warned[src] = true;
|
|
169
|
+
Internals.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
|
|
170
|
+
};
|
|
162
171
|
var makeAudioIterator = ({
|
|
163
172
|
audioSampleSink,
|
|
164
173
|
isMatroska,
|
|
165
174
|
startTimestamp,
|
|
166
175
|
src,
|
|
167
|
-
actualMatroskaTimestamps
|
|
176
|
+
actualMatroskaTimestamps,
|
|
177
|
+
logLevel
|
|
168
178
|
}) => {
|
|
169
179
|
const sampleIterator = audioSampleSink.samples(isMatroska ? 0 : Math.max(0, startTimestamp - extraThreshold));
|
|
180
|
+
if (isMatroska) {
|
|
181
|
+
warnAboutMatroskaOnce(src, logLevel);
|
|
182
|
+
}
|
|
170
183
|
let fullDuration = null;
|
|
171
184
|
const cache = makeAudioCache();
|
|
172
185
|
let lastUsed = Date.now();
|
|
@@ -210,7 +223,7 @@ var makeAudioIterator = ({
|
|
|
210
223
|
}
|
|
211
224
|
return samples;
|
|
212
225
|
};
|
|
213
|
-
const logOpenFrames = (
|
|
226
|
+
const logOpenFrames = () => {
|
|
214
227
|
Internals.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, cache.getOpenTimestamps().map((t) => t.toFixed(3)).join(", "));
|
|
215
228
|
};
|
|
216
229
|
const getCacheStats = () => {
|
|
@@ -262,14 +275,16 @@ var makeAudioManager = () => {
|
|
|
262
275
|
src,
|
|
263
276
|
audioSampleSink,
|
|
264
277
|
isMatroska,
|
|
265
|
-
actualMatroskaTimestamps
|
|
278
|
+
actualMatroskaTimestamps,
|
|
279
|
+
logLevel
|
|
266
280
|
}) => {
|
|
267
281
|
const iterator = makeAudioIterator({
|
|
268
282
|
audioSampleSink,
|
|
269
283
|
isMatroska,
|
|
270
284
|
startTimestamp: timeInSeconds,
|
|
271
285
|
src,
|
|
272
|
-
actualMatroskaTimestamps
|
|
286
|
+
actualMatroskaTimestamps,
|
|
287
|
+
logLevel
|
|
273
288
|
});
|
|
274
289
|
iterators.push(iterator);
|
|
275
290
|
return iterator;
|
|
@@ -321,7 +336,8 @@ var makeAudioManager = () => {
|
|
|
321
336
|
timeInSeconds,
|
|
322
337
|
audioSampleSink,
|
|
323
338
|
isMatroska,
|
|
324
|
-
actualMatroskaTimestamps
|
|
339
|
+
actualMatroskaTimestamps,
|
|
340
|
+
logLevel
|
|
325
341
|
});
|
|
326
342
|
};
|
|
327
343
|
const getCacheStats = () => {
|
|
@@ -334,9 +350,9 @@ var makeAudioManager = () => {
|
|
|
334
350
|
}
|
|
335
351
|
return { count: totalCount, totalSize };
|
|
336
352
|
};
|
|
337
|
-
const logOpenFrames = (
|
|
353
|
+
const logOpenFrames = () => {
|
|
338
354
|
for (const iterator of iterators) {
|
|
339
|
-
iterator.logOpenFrames(
|
|
355
|
+
iterator.logOpenFrames();
|
|
340
356
|
}
|
|
341
357
|
};
|
|
342
358
|
return {
|
|
@@ -359,7 +375,8 @@ import {
|
|
|
359
375
|
Input,
|
|
360
376
|
MATROSKA,
|
|
361
377
|
UrlSource,
|
|
362
|
-
VideoSampleSink
|
|
378
|
+
VideoSampleSink,
|
|
379
|
+
WEBM
|
|
363
380
|
} from "mediabunny";
|
|
364
381
|
|
|
365
382
|
// src/video-extraction/keyframe-bank.ts
|
|
@@ -510,23 +527,78 @@ var rememberActualMatroskaTimestamps = (isMatroska) => {
|
|
|
510
527
|
};
|
|
511
528
|
|
|
512
529
|
// src/video-extraction/get-frames-since-keyframe.ts
|
|
530
|
+
var getRetryDelay = () => {
|
|
531
|
+
return null;
|
|
532
|
+
};
|
|
533
|
+
var getFormatOrNull = async (input) => {
|
|
534
|
+
try {
|
|
535
|
+
return await input.getFormat();
|
|
536
|
+
} catch {
|
|
537
|
+
return null;
|
|
538
|
+
}
|
|
539
|
+
};
|
|
513
540
|
var getSinks = async (src) => {
|
|
514
541
|
const input = new Input({
|
|
515
542
|
formats: ALL_FORMATS,
|
|
516
|
-
source: new UrlSource(src
|
|
543
|
+
source: new UrlSource(src, {
|
|
544
|
+
getRetryDelay
|
|
545
|
+
})
|
|
517
546
|
});
|
|
518
|
-
const format = await input
|
|
519
|
-
const
|
|
520
|
-
const
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
547
|
+
const format = await getFormatOrNull(input);
|
|
548
|
+
const isMatroska = format === MATROSKA || format === WEBM;
|
|
549
|
+
const getVideoSinks = async () => {
|
|
550
|
+
if (format === null) {
|
|
551
|
+
return "unknown-container-format";
|
|
552
|
+
}
|
|
553
|
+
const videoTrack = await input.getPrimaryVideoTrack();
|
|
554
|
+
if (!videoTrack) {
|
|
555
|
+
return "no-video-track";
|
|
556
|
+
}
|
|
557
|
+
const canDecode = await videoTrack.canDecode();
|
|
558
|
+
if (!canDecode) {
|
|
559
|
+
return "cannot-decode";
|
|
560
|
+
}
|
|
561
|
+
return {
|
|
524
562
|
sampleSink: new VideoSampleSink(videoTrack),
|
|
525
563
|
packetSink: new EncodedPacketSink(videoTrack)
|
|
526
|
-
}
|
|
527
|
-
|
|
564
|
+
};
|
|
565
|
+
};
|
|
566
|
+
let videoSinksPromise = null;
|
|
567
|
+
const getVideoSinksPromise = () => {
|
|
568
|
+
if (videoSinksPromise) {
|
|
569
|
+
return videoSinksPromise;
|
|
570
|
+
}
|
|
571
|
+
videoSinksPromise = getVideoSinks();
|
|
572
|
+
return videoSinksPromise;
|
|
573
|
+
};
|
|
574
|
+
const audioSinksPromise = {};
|
|
575
|
+
const getAudioSinks = async (index) => {
|
|
576
|
+
if (format === null) {
|
|
577
|
+
return "unknown-container-format";
|
|
578
|
+
}
|
|
579
|
+
const audioTracks = await input.getAudioTracks();
|
|
580
|
+
const audioTrack = audioTracks[index];
|
|
581
|
+
if (!audioTrack) {
|
|
582
|
+
return "no-audio-track";
|
|
583
|
+
}
|
|
584
|
+
const canDecode = await audioTrack.canDecode();
|
|
585
|
+
if (!canDecode) {
|
|
586
|
+
return "cannot-decode-audio";
|
|
587
|
+
}
|
|
588
|
+
return {
|
|
528
589
|
sampleSink: new AudioSampleSink(audioTrack)
|
|
529
|
-
}
|
|
590
|
+
};
|
|
591
|
+
};
|
|
592
|
+
const getAudioSinksPromise = (index) => {
|
|
593
|
+
if (audioSinksPromise[index]) {
|
|
594
|
+
return audioSinksPromise[index];
|
|
595
|
+
}
|
|
596
|
+
audioSinksPromise[index] = getAudioSinks(index);
|
|
597
|
+
return audioSinksPromise[index];
|
|
598
|
+
};
|
|
599
|
+
return new WeakRef({
|
|
600
|
+
getVideo: () => getVideoSinksPromise(),
|
|
601
|
+
getAudio: (index) => getAudioSinksPromise(index),
|
|
530
602
|
actualMatroskaTimestamps: rememberActualMatroskaTimestamps(isMatroska),
|
|
531
603
|
isMatroska,
|
|
532
604
|
getDuration: () => input.computeDuration()
|
|
@@ -1006,16 +1078,24 @@ var extractAudio = async ({
|
|
|
1006
1078
|
durationInSeconds,
|
|
1007
1079
|
logLevel,
|
|
1008
1080
|
loop,
|
|
1009
|
-
playbackRate
|
|
1081
|
+
playbackRate,
|
|
1082
|
+
audioStreamIndex
|
|
1010
1083
|
}) => {
|
|
1011
|
-
const {
|
|
1084
|
+
const { getAudio, actualMatroskaTimestamps, isMatroska, getDuration } = await getSinkWeak(src, logLevel);
|
|
1012
1085
|
let duration = null;
|
|
1013
1086
|
if (loop) {
|
|
1014
1087
|
duration = await getDuration();
|
|
1015
1088
|
}
|
|
1016
|
-
|
|
1089
|
+
const audio = await getAudio(audioStreamIndex);
|
|
1090
|
+
if (audio === "no-audio-track") {
|
|
1017
1091
|
return { data: null, durationInSeconds: null };
|
|
1018
1092
|
}
|
|
1093
|
+
if (audio === "cannot-decode-audio") {
|
|
1094
|
+
return "cannot-decode";
|
|
1095
|
+
}
|
|
1096
|
+
if (audio === "unknown-container-format") {
|
|
1097
|
+
return "unknown-container-format";
|
|
1098
|
+
}
|
|
1019
1099
|
const timeInSeconds = loop ? unloopedTimeInSeconds % duration : unloopedTimeInSeconds;
|
|
1020
1100
|
const sampleIterator = await audioManager.getIterator({
|
|
1021
1101
|
src,
|
|
@@ -1026,7 +1106,7 @@ var extractAudio = async ({
|
|
|
1026
1106
|
logLevel
|
|
1027
1107
|
});
|
|
1028
1108
|
const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
|
|
1029
|
-
audioManager.logOpenFrames(
|
|
1109
|
+
audioManager.logOpenFrames();
|
|
1030
1110
|
const audioDataArray = [];
|
|
1031
1111
|
for (let i = 0;i < samples.length; i++) {
|
|
1032
1112
|
const sample = samples[i];
|
|
@@ -1082,11 +1162,17 @@ var extractFrame = async ({
|
|
|
1082
1162
|
loop
|
|
1083
1163
|
}) => {
|
|
1084
1164
|
const sink = await getSinkWeak(src, logLevel);
|
|
1085
|
-
const
|
|
1086
|
-
if (video ===
|
|
1165
|
+
const video = await sink.getVideo();
|
|
1166
|
+
if (video === "no-video-track") {
|
|
1087
1167
|
throw new Error(`No video track found for ${src}`);
|
|
1088
1168
|
}
|
|
1089
|
-
|
|
1169
|
+
if (video === "cannot-decode") {
|
|
1170
|
+
return "cannot-decode";
|
|
1171
|
+
}
|
|
1172
|
+
if (video === "unknown-container-format") {
|
|
1173
|
+
return "unknown-container-format";
|
|
1174
|
+
}
|
|
1175
|
+
const timeInSeconds = loop ? unloopedTimeinSeconds % await sink.getDuration() : unloopedTimeinSeconds;
|
|
1090
1176
|
const keyframeBank = await keyframeManager.requestKeyframeBank({
|
|
1091
1177
|
packetSink: video.packetSink,
|
|
1092
1178
|
videoSampleSink: video.sampleSink,
|
|
@@ -1107,29 +1193,57 @@ var extractFrameAndAudio = async ({
|
|
|
1107
1193
|
playbackRate,
|
|
1108
1194
|
includeAudio,
|
|
1109
1195
|
includeVideo,
|
|
1110
|
-
loop
|
|
1196
|
+
loop,
|
|
1197
|
+
audioStreamIndex
|
|
1111
1198
|
}) => {
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1199
|
+
try {
|
|
1200
|
+
const [frame, audio] = await Promise.all([
|
|
1201
|
+
includeVideo ? extractFrame({
|
|
1202
|
+
src,
|
|
1203
|
+
timeInSeconds,
|
|
1204
|
+
logLevel,
|
|
1205
|
+
loop
|
|
1206
|
+
}) : null,
|
|
1207
|
+
includeAudio ? extractAudio({
|
|
1208
|
+
src,
|
|
1209
|
+
timeInSeconds,
|
|
1210
|
+
durationInSeconds,
|
|
1211
|
+
logLevel,
|
|
1212
|
+
loop,
|
|
1213
|
+
playbackRate,
|
|
1214
|
+
audioStreamIndex
|
|
1215
|
+
}) : null
|
|
1216
|
+
]);
|
|
1217
|
+
if (frame === "cannot-decode") {
|
|
1218
|
+
return "cannot-decode";
|
|
1219
|
+
}
|
|
1220
|
+
if (frame === "unknown-container-format") {
|
|
1221
|
+
return "unknown-container-format";
|
|
1222
|
+
}
|
|
1223
|
+
if (audio === "unknown-container-format") {
|
|
1224
|
+
if (frame !== null) {
|
|
1225
|
+
frame?.close();
|
|
1226
|
+
}
|
|
1227
|
+
return "unknown-container-format";
|
|
1228
|
+
}
|
|
1229
|
+
if (audio === "cannot-decode") {
|
|
1230
|
+
if (frame !== null) {
|
|
1231
|
+
frame?.close();
|
|
1232
|
+
}
|
|
1233
|
+
return "cannot-decode";
|
|
1234
|
+
}
|
|
1235
|
+
return {
|
|
1236
|
+
frame: frame?.toVideoFrame() ?? null,
|
|
1237
|
+
audio: audio?.data ?? null,
|
|
1238
|
+
durationInSeconds: audio?.durationInSeconds ?? null
|
|
1239
|
+
};
|
|
1240
|
+
} catch (err) {
|
|
1241
|
+
const error = err;
|
|
1242
|
+
if (error.message.includes("Failed to fetch") || error.message.includes("Load failed") || error.message.includes("NetworkError when attempting to fetch resource")) {
|
|
1243
|
+
return "network-error";
|
|
1244
|
+
}
|
|
1245
|
+
throw err;
|
|
1246
|
+
}
|
|
1133
1247
|
};
|
|
1134
1248
|
|
|
1135
1249
|
// src/video-extraction/extract-frame-via-broadcast-channel.ts
|
|
@@ -1138,7 +1252,7 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
1138
1252
|
const data = event.data;
|
|
1139
1253
|
if (data.type === "request") {
|
|
1140
1254
|
try {
|
|
1141
|
-
const
|
|
1255
|
+
const result = await extractFrameAndAudio({
|
|
1142
1256
|
src: data.src,
|
|
1143
1257
|
timeInSeconds: data.timeInSeconds,
|
|
1144
1258
|
logLevel: data.logLevel,
|
|
@@ -1146,8 +1260,34 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
1146
1260
|
playbackRate: data.playbackRate,
|
|
1147
1261
|
includeAudio: data.includeAudio,
|
|
1148
1262
|
includeVideo: data.includeVideo,
|
|
1149
|
-
loop: data.loop
|
|
1263
|
+
loop: data.loop,
|
|
1264
|
+
audioStreamIndex: data.audioStreamIndex
|
|
1150
1265
|
});
|
|
1266
|
+
if (result === "cannot-decode") {
|
|
1267
|
+
const cannotDecodeResponse = {
|
|
1268
|
+
type: "response-cannot-decode",
|
|
1269
|
+
id: data.id
|
|
1270
|
+
};
|
|
1271
|
+
window.remotion_broadcastChannel.postMessage(cannotDecodeResponse);
|
|
1272
|
+
return;
|
|
1273
|
+
}
|
|
1274
|
+
if (result === "network-error") {
|
|
1275
|
+
const networkErrorResponse = {
|
|
1276
|
+
type: "response-network-error",
|
|
1277
|
+
id: data.id
|
|
1278
|
+
};
|
|
1279
|
+
window.remotion_broadcastChannel.postMessage(networkErrorResponse);
|
|
1280
|
+
return;
|
|
1281
|
+
}
|
|
1282
|
+
if (result === "unknown-container-format") {
|
|
1283
|
+
const unknownContainerFormatResponse = {
|
|
1284
|
+
type: "response-unknown-container-format",
|
|
1285
|
+
id: data.id
|
|
1286
|
+
};
|
|
1287
|
+
window.remotion_broadcastChannel.postMessage(unknownContainerFormatResponse);
|
|
1288
|
+
return;
|
|
1289
|
+
}
|
|
1290
|
+
const { frame, audio, durationInSeconds } = result;
|
|
1151
1291
|
const videoFrame = frame;
|
|
1152
1292
|
const imageBitmap = videoFrame ? await createImageBitmap(videoFrame) : null;
|
|
1153
1293
|
if (videoFrame) {
|
|
@@ -1184,7 +1324,8 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1184
1324
|
includeAudio,
|
|
1185
1325
|
includeVideo,
|
|
1186
1326
|
isClientSideRendering,
|
|
1187
|
-
loop
|
|
1327
|
+
loop,
|
|
1328
|
+
audioStreamIndex
|
|
1188
1329
|
}) => {
|
|
1189
1330
|
if (isClientSideRendering || window.remotion_isMainTab) {
|
|
1190
1331
|
return extractFrameAndAudio({
|
|
@@ -1195,7 +1336,8 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1195
1336
|
playbackRate,
|
|
1196
1337
|
includeAudio,
|
|
1197
1338
|
includeVideo,
|
|
1198
|
-
loop
|
|
1339
|
+
loop,
|
|
1340
|
+
audioStreamIndex
|
|
1199
1341
|
});
|
|
1200
1342
|
}
|
|
1201
1343
|
const requestId = crypto.randomUUID();
|
|
@@ -1205,17 +1347,39 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1205
1347
|
if (!data) {
|
|
1206
1348
|
return;
|
|
1207
1349
|
}
|
|
1208
|
-
if (data.
|
|
1350
|
+
if (data.id !== requestId) {
|
|
1351
|
+
return;
|
|
1352
|
+
}
|
|
1353
|
+
if (data.type === "response-success") {
|
|
1209
1354
|
resolve({
|
|
1210
1355
|
frame: data.frame ? data.frame : null,
|
|
1211
1356
|
audio: data.audio ? data.audio : null,
|
|
1212
1357
|
durationInSeconds: data.durationInSeconds ? data.durationInSeconds : null
|
|
1213
1358
|
});
|
|
1214
1359
|
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
1215
|
-
|
|
1360
|
+
return;
|
|
1361
|
+
}
|
|
1362
|
+
if (data.type === "response-error") {
|
|
1216
1363
|
reject(data.errorStack);
|
|
1217
1364
|
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
1365
|
+
return;
|
|
1366
|
+
}
|
|
1367
|
+
if (data.type === "response-cannot-decode") {
|
|
1368
|
+
resolve("cannot-decode");
|
|
1369
|
+
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
1370
|
+
return;
|
|
1218
1371
|
}
|
|
1372
|
+
if (data.type === "response-network-error") {
|
|
1373
|
+
resolve("network-error");
|
|
1374
|
+
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
1375
|
+
return;
|
|
1376
|
+
}
|
|
1377
|
+
if (data.type === "response-unknown-container-format") {
|
|
1378
|
+
resolve("unknown-container-format");
|
|
1379
|
+
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
1380
|
+
return;
|
|
1381
|
+
}
|
|
1382
|
+
throw new Error(`Invalid message: ${JSON.stringify(data)}`);
|
|
1219
1383
|
};
|
|
1220
1384
|
window.remotion_broadcastChannel.addEventListener("message", onMessage);
|
|
1221
1385
|
});
|
|
@@ -1229,7 +1393,8 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1229
1393
|
playbackRate,
|
|
1230
1394
|
includeAudio,
|
|
1231
1395
|
includeVideo,
|
|
1232
|
-
loop
|
|
1396
|
+
loop,
|
|
1397
|
+
audioStreamIndex
|
|
1233
1398
|
};
|
|
1234
1399
|
window.remotion_broadcastChannel.postMessage(request);
|
|
1235
1400
|
let timeoutId;
|
|
@@ -1247,6 +1412,7 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1247
1412
|
};
|
|
1248
1413
|
|
|
1249
1414
|
// src/audio/audio-for-rendering.tsx
|
|
1415
|
+
import { jsx as jsx2 } from "react/jsx-runtime";
|
|
1250
1416
|
var AudioForRendering = ({
|
|
1251
1417
|
volume: volumeProp,
|
|
1252
1418
|
playbackRate,
|
|
@@ -1256,7 +1422,13 @@ var AudioForRendering = ({
|
|
|
1256
1422
|
delayRenderRetries,
|
|
1257
1423
|
delayRenderTimeoutInMilliseconds,
|
|
1258
1424
|
logLevel = window.remotion_logLevel,
|
|
1259
|
-
loop
|
|
1425
|
+
loop,
|
|
1426
|
+
fallbackHtml5AudioProps,
|
|
1427
|
+
audioStreamIndex,
|
|
1428
|
+
showInTimeline,
|
|
1429
|
+
style,
|
|
1430
|
+
name,
|
|
1431
|
+
disallowFallbackToHtml5Audio
|
|
1260
1432
|
}) => {
|
|
1261
1433
|
const frame = useCurrentFrame();
|
|
1262
1434
|
const absoluteFrame = Internals6.useTimelinePosition();
|
|
@@ -1273,6 +1445,7 @@ var AudioForRendering = ({
|
|
|
1273
1445
|
}
|
|
1274
1446
|
const { fps } = videoConfig;
|
|
1275
1447
|
const { delayRender, continueRender } = useDelayRender();
|
|
1448
|
+
const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState3(false);
|
|
1276
1449
|
useLayoutEffect(() => {
|
|
1277
1450
|
const actualFps = playbackRate ? fps / playbackRate : fps;
|
|
1278
1451
|
const timestamp = frame / actualFps;
|
|
@@ -1299,8 +1472,34 @@ var AudioForRendering = ({
|
|
|
1299
1472
|
includeAudio: shouldRenderAudio,
|
|
1300
1473
|
includeVideo: false,
|
|
1301
1474
|
isClientSideRendering: environment.isClientSideRendering,
|
|
1302
|
-
loop: loop ?? false
|
|
1303
|
-
|
|
1475
|
+
loop: loop ?? false,
|
|
1476
|
+
audioStreamIndex: audioStreamIndex ?? 0
|
|
1477
|
+
}).then((result) => {
|
|
1478
|
+
if (result === "unknown-container-format") {
|
|
1479
|
+
if (disallowFallbackToHtml5Audio) {
|
|
1480
|
+
cancelRender2(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
|
|
1481
|
+
}
|
|
1482
|
+
Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Audio>`);
|
|
1483
|
+
setReplaceWithHtml5Audio(true);
|
|
1484
|
+
return;
|
|
1485
|
+
}
|
|
1486
|
+
if (result === "cannot-decode") {
|
|
1487
|
+
if (disallowFallbackToHtml5Audio) {
|
|
1488
|
+
cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
|
|
1489
|
+
}
|
|
1490
|
+
Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <Audio>`);
|
|
1491
|
+
setReplaceWithHtml5Audio(true);
|
|
1492
|
+
return;
|
|
1493
|
+
}
|
|
1494
|
+
if (result === "network-error") {
|
|
1495
|
+
if (disallowFallbackToHtml5Audio) {
|
|
1496
|
+
cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
|
|
1497
|
+
}
|
|
1498
|
+
Internals6.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <Audio>`);
|
|
1499
|
+
setReplaceWithHtml5Audio(true);
|
|
1500
|
+
return;
|
|
1501
|
+
}
|
|
1502
|
+
const { audio, durationInSeconds: assetDurationInSeconds } = result;
|
|
1304
1503
|
const volumePropsFrame = frameForVolumeProp({
|
|
1305
1504
|
behavior: loopVolumeCurveBehavior ?? "repeat",
|
|
1306
1505
|
loop: loop ?? false,
|
|
@@ -1342,6 +1541,7 @@ var AudioForRendering = ({
|
|
|
1342
1541
|
delayRender,
|
|
1343
1542
|
delayRenderRetries,
|
|
1344
1543
|
delayRenderTimeoutInMilliseconds,
|
|
1544
|
+
disallowFallbackToHtml5Audio,
|
|
1345
1545
|
environment.isClientSideRendering,
|
|
1346
1546
|
fps,
|
|
1347
1547
|
frame,
|
|
@@ -1355,13 +1555,34 @@ var AudioForRendering = ({
|
|
|
1355
1555
|
src,
|
|
1356
1556
|
startsAt,
|
|
1357
1557
|
unregisterRenderAsset,
|
|
1358
|
-
volumeProp
|
|
1558
|
+
volumeProp,
|
|
1559
|
+
audioStreamIndex
|
|
1359
1560
|
]);
|
|
1561
|
+
if (replaceWithHtml5Audio) {
|
|
1562
|
+
return /* @__PURE__ */ jsx2(Audio, {
|
|
1563
|
+
src,
|
|
1564
|
+
playbackRate,
|
|
1565
|
+
muted,
|
|
1566
|
+
loop,
|
|
1567
|
+
volume: volumeProp,
|
|
1568
|
+
delayRenderRetries,
|
|
1569
|
+
delayRenderTimeoutInMilliseconds,
|
|
1570
|
+
style,
|
|
1571
|
+
loopVolumeCurveBehavior,
|
|
1572
|
+
audioStreamIndex,
|
|
1573
|
+
useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi,
|
|
1574
|
+
onError: fallbackHtml5AudioProps?.onError,
|
|
1575
|
+
toneFrequency: fallbackHtml5AudioProps?.toneFrequency,
|
|
1576
|
+
acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds,
|
|
1577
|
+
name,
|
|
1578
|
+
showInTimeline
|
|
1579
|
+
});
|
|
1580
|
+
}
|
|
1360
1581
|
return null;
|
|
1361
1582
|
};
|
|
1362
1583
|
|
|
1363
1584
|
// src/audio/audio.tsx
|
|
1364
|
-
import { jsx as
|
|
1585
|
+
import { jsx as jsx3 } from "react/jsx-runtime";
|
|
1365
1586
|
var {
|
|
1366
1587
|
validateMediaTrimProps,
|
|
1367
1588
|
resolveTrimProps,
|
|
@@ -1369,7 +1590,7 @@ var {
|
|
|
1369
1590
|
AudioForPreview
|
|
1370
1591
|
} = Internals7;
|
|
1371
1592
|
var onRemotionError = (_e) => {};
|
|
1372
|
-
var
|
|
1593
|
+
var Audio2 = (props) => {
|
|
1373
1594
|
const audioContext = useContext5(SharedAudioContext);
|
|
1374
1595
|
const {
|
|
1375
1596
|
trimBefore,
|
|
@@ -1415,21 +1636,21 @@ var Audio = (props) => {
|
|
|
1415
1636
|
}
|
|
1416
1637
|
}, [loop]);
|
|
1417
1638
|
if (typeof trimBeforeValue !== "undefined" || typeof trimAfterValue !== "undefined") {
|
|
1418
|
-
return /* @__PURE__ */
|
|
1639
|
+
return /* @__PURE__ */ jsx3(Sequence, {
|
|
1419
1640
|
layout: "none",
|
|
1420
1641
|
from: 0 - (trimBeforeValue ?? 0),
|
|
1421
1642
|
showInTimeline: false,
|
|
1422
1643
|
durationInFrames: trimAfterValue,
|
|
1423
1644
|
name,
|
|
1424
|
-
children: /* @__PURE__ */
|
|
1645
|
+
children: /* @__PURE__ */ jsx3(Audio2, {
|
|
1425
1646
|
pauseWhenBuffering: pauseWhenBuffering ?? false,
|
|
1426
1647
|
...otherProps
|
|
1427
1648
|
})
|
|
1428
1649
|
});
|
|
1429
1650
|
}
|
|
1430
|
-
validateMediaProps(props, "
|
|
1651
|
+
validateMediaProps({ playbackRate: props.playbackRate, volume: props.volume }, "Audio");
|
|
1431
1652
|
if (environment.isRendering) {
|
|
1432
|
-
return /* @__PURE__ */
|
|
1653
|
+
return /* @__PURE__ */ jsx3(AudioForRendering, {
|
|
1433
1654
|
...otherProps
|
|
1434
1655
|
});
|
|
1435
1656
|
}
|
|
@@ -1438,7 +1659,7 @@ var Audio = (props) => {
|
|
|
1438
1659
|
delayRenderTimeoutInMilliseconds,
|
|
1439
1660
|
...propsForPreview
|
|
1440
1661
|
} = otherProps;
|
|
1441
|
-
return /* @__PURE__ */
|
|
1662
|
+
return /* @__PURE__ */ jsx3(AudioForPreview, {
|
|
1442
1663
|
_remotionInternalNativeLoopPassed: props._remotionInternalNativeLoopPassed ?? false,
|
|
1443
1664
|
_remotionInternalStack: stack ?? null,
|
|
1444
1665
|
shouldPreMountAudioTags: audioContext !== null && audioContext.numberOfAudioTags > 0,
|
|
@@ -1450,27 +1671,796 @@ var Audio = (props) => {
|
|
|
1450
1671
|
showInTimeline: showInTimeline ?? true
|
|
1451
1672
|
});
|
|
1452
1673
|
};
|
|
1674
|
+
|
|
1453
1675
|
// src/video/video.tsx
|
|
1454
|
-
import {
|
|
1455
|
-
|
|
1676
|
+
import { Internals as Internals11, Sequence as Sequence2, useRemotionEnvironment as useRemotionEnvironment5 } from "remotion";
|
|
1677
|
+
|
|
1678
|
+
// src/video/video-for-preview.tsx
|
|
1679
|
+
import { ALL_FORMATS as ALL_FORMATS3, Input as Input3, UrlSource as UrlSource3 } from "mediabunny";
|
|
1680
|
+
import { useContext as useContext6, useEffect as useEffect2, useMemo as useMemo3, useRef as useRef2, useState as useState4 } from "react";
|
|
1681
|
+
import {
|
|
1682
|
+
Internals as Internals9,
|
|
1683
|
+
Loop,
|
|
1684
|
+
useBufferState,
|
|
1685
|
+
useCurrentFrame as useCurrentFrame2,
|
|
1686
|
+
useVideoConfig
|
|
1687
|
+
} from "remotion";
|
|
1688
|
+
|
|
1689
|
+
// src/video/media-player.ts
|
|
1690
|
+
import {
|
|
1691
|
+
ALL_FORMATS as ALL_FORMATS2,
|
|
1692
|
+
AudioBufferSink,
|
|
1693
|
+
CanvasSink,
|
|
1694
|
+
Input as Input2,
|
|
1695
|
+
UrlSource as UrlSource2
|
|
1696
|
+
} from "mediabunny";
|
|
1697
|
+
import { Internals as Internals8 } from "remotion";
|
|
1698
|
+
|
|
1699
|
+
// src/video/timeout-utils.ts
|
|
1700
|
+
var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
1701
|
+
function withTimeout(promise, timeoutMs, errorMessage = "Operation timed out") {
|
|
1702
|
+
let timeoutId = null;
|
|
1703
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
1704
|
+
timeoutId = window.setTimeout(() => {
|
|
1705
|
+
reject(new Error(errorMessage));
|
|
1706
|
+
}, timeoutMs);
|
|
1707
|
+
});
|
|
1708
|
+
return Promise.race([
|
|
1709
|
+
promise.finally(() => {
|
|
1710
|
+
if (timeoutId) {
|
|
1711
|
+
clearTimeout(timeoutId);
|
|
1712
|
+
}
|
|
1713
|
+
}),
|
|
1714
|
+
timeoutPromise
|
|
1715
|
+
]);
|
|
1716
|
+
}
|
|
1717
|
+
|
|
1718
|
+
// src/video/media-player.ts
|
|
1719
|
+
var SEEK_THRESHOLD = 0.05;
|
|
1720
|
+
var AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
|
|
1721
|
+
|
|
1722
|
+
class MediaPlayer {
|
|
1723
|
+
canvas;
|
|
1724
|
+
context;
|
|
1725
|
+
src;
|
|
1726
|
+
logLevel;
|
|
1727
|
+
playbackRate;
|
|
1728
|
+
canvasSink = null;
|
|
1729
|
+
videoFrameIterator = null;
|
|
1730
|
+
nextFrame = null;
|
|
1731
|
+
audioSink = null;
|
|
1732
|
+
audioBufferIterator = null;
|
|
1733
|
+
queuedAudioNodes = new Set;
|
|
1734
|
+
gainNode = null;
|
|
1735
|
+
sharedAudioContext;
|
|
1736
|
+
audioSyncAnchor = 0;
|
|
1737
|
+
playing = false;
|
|
1738
|
+
muted = false;
|
|
1739
|
+
animationFrameId = null;
|
|
1740
|
+
videoAsyncId = 0;
|
|
1741
|
+
initialized = false;
|
|
1742
|
+
totalDuration = 0;
|
|
1743
|
+
isBuffering = false;
|
|
1744
|
+
onBufferingChangeCallback;
|
|
1745
|
+
audioBufferHealth = 0;
|
|
1746
|
+
audioIteratorStarted = false;
|
|
1747
|
+
HEALTHY_BUFER_THRESHOLD_SECONDS = 1;
|
|
1748
|
+
onVideoFrameCallback;
|
|
1749
|
+
constructor({
|
|
1750
|
+
canvas,
|
|
1751
|
+
src,
|
|
1752
|
+
logLevel,
|
|
1753
|
+
sharedAudioContext
|
|
1754
|
+
}) {
|
|
1755
|
+
this.canvas = canvas;
|
|
1756
|
+
this.src = src;
|
|
1757
|
+
this.logLevel = logLevel ?? "info";
|
|
1758
|
+
this.sharedAudioContext = sharedAudioContext;
|
|
1759
|
+
this.playbackRate = 1;
|
|
1760
|
+
const context = canvas.getContext("2d", {
|
|
1761
|
+
alpha: false,
|
|
1762
|
+
desynchronized: true
|
|
1763
|
+
});
|
|
1764
|
+
if (!context) {
|
|
1765
|
+
throw new Error("Could not get 2D context from canvas");
|
|
1766
|
+
}
|
|
1767
|
+
this.context = context;
|
|
1768
|
+
}
|
|
1769
|
+
input = null;
|
|
1770
|
+
isReady() {
|
|
1771
|
+
return this.initialized && Boolean(this.sharedAudioContext);
|
|
1772
|
+
}
|
|
1773
|
+
hasAudio() {
|
|
1774
|
+
return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
|
|
1775
|
+
}
|
|
1776
|
+
isCurrentlyBuffering() {
|
|
1777
|
+
return this.isBuffering && Boolean(this.bufferingStartedAtMs);
|
|
1778
|
+
}
|
|
1779
|
+
async initialize(startTime = 0) {
|
|
1780
|
+
try {
|
|
1781
|
+
const urlSource = new UrlSource2(this.src);
|
|
1782
|
+
const input = new Input2({
|
|
1783
|
+
source: urlSource,
|
|
1784
|
+
formats: ALL_FORMATS2
|
|
1785
|
+
});
|
|
1786
|
+
this.input = input;
|
|
1787
|
+
this.totalDuration = await input.computeDuration();
|
|
1788
|
+
const videoTrack = await input.getPrimaryVideoTrack();
|
|
1789
|
+
const audioTrack = await input.getPrimaryAudioTrack();
|
|
1790
|
+
if (!videoTrack && !audioTrack) {
|
|
1791
|
+
throw new Error(`No video or audio track found for ${this.src}`);
|
|
1792
|
+
}
|
|
1793
|
+
if (videoTrack) {
|
|
1794
|
+
this.canvasSink = new CanvasSink(videoTrack, {
|
|
1795
|
+
poolSize: 2,
|
|
1796
|
+
fit: "contain"
|
|
1797
|
+
});
|
|
1798
|
+
this.canvas.width = videoTrack.displayWidth;
|
|
1799
|
+
this.canvas.height = videoTrack.displayHeight;
|
|
1800
|
+
}
|
|
1801
|
+
if (audioTrack && this.sharedAudioContext) {
|
|
1802
|
+
this.audioSink = new AudioBufferSink(audioTrack);
|
|
1803
|
+
this.gainNode = this.sharedAudioContext.createGain();
|
|
1804
|
+
this.gainNode.connect(this.sharedAudioContext.destination);
|
|
1805
|
+
}
|
|
1806
|
+
if (this.sharedAudioContext) {
|
|
1807
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
|
|
1808
|
+
}
|
|
1809
|
+
this.initialized = true;
|
|
1810
|
+
const mediaTime = startTime * this.playbackRate;
|
|
1811
|
+
await this.startAudioIterator(mediaTime);
|
|
1812
|
+
await this.startVideoIterator(mediaTime);
|
|
1813
|
+
this.startRenderLoop();
|
|
1814
|
+
} catch (error) {
|
|
1815
|
+
Internals8.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
|
|
1816
|
+
throw error;
|
|
1817
|
+
}
|
|
1818
|
+
}
|
|
1819
|
+
cleanupAudioQueue() {
|
|
1820
|
+
for (const node of this.queuedAudioNodes) {
|
|
1821
|
+
node.stop();
|
|
1822
|
+
}
|
|
1823
|
+
this.queuedAudioNodes.clear();
|
|
1824
|
+
}
|
|
1825
|
+
async cleanAudioIteratorAndNodes() {
|
|
1826
|
+
await this.audioBufferIterator?.return();
|
|
1827
|
+
this.audioBufferIterator = null;
|
|
1828
|
+
this.audioIteratorStarted = false;
|
|
1829
|
+
this.audioBufferHealth = 0;
|
|
1830
|
+
this.cleanupAudioQueue();
|
|
1831
|
+
}
|
|
1832
|
+
async seekTo(time) {
|
|
1833
|
+
if (!this.isReady())
|
|
1834
|
+
return;
|
|
1835
|
+
const newTime = Math.max(0, Math.min(time, this.totalDuration));
|
|
1836
|
+
const currentPlaybackTime = this.getPlaybackTime();
|
|
1837
|
+
const isSignificantSeek = Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
|
|
1838
|
+
if (isSignificantSeek) {
|
|
1839
|
+
this.nextFrame = null;
|
|
1840
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
|
|
1841
|
+
if (this.audioSink) {
|
|
1842
|
+
await this.cleanAudioIteratorAndNodes();
|
|
1843
|
+
}
|
|
1844
|
+
const mediaTime = newTime * this.playbackRate;
|
|
1845
|
+
await this.startAudioIterator(mediaTime);
|
|
1846
|
+
await this.startVideoIterator(mediaTime);
|
|
1847
|
+
}
|
|
1848
|
+
if (!this.playing) {
|
|
1849
|
+
this.render();
|
|
1850
|
+
}
|
|
1851
|
+
}
|
|
1852
|
+
async play() {
|
|
1853
|
+
if (!this.isReady())
|
|
1854
|
+
return;
|
|
1855
|
+
if (!this.playing) {
|
|
1856
|
+
if (this.sharedAudioContext.state === "suspended") {
|
|
1857
|
+
await this.sharedAudioContext.resume();
|
|
1858
|
+
}
|
|
1859
|
+
this.playing = true;
|
|
1860
|
+
this.startRenderLoop();
|
|
1861
|
+
}
|
|
1862
|
+
}
|
|
1863
|
+
pause() {
|
|
1864
|
+
this.playing = false;
|
|
1865
|
+
this.cleanupAudioQueue();
|
|
1866
|
+
this.stopRenderLoop();
|
|
1867
|
+
}
|
|
1868
|
+
setMuted(muted) {
|
|
1869
|
+
this.muted = muted;
|
|
1870
|
+
if (muted) {
|
|
1871
|
+
this.cleanupAudioQueue();
|
|
1872
|
+
}
|
|
1873
|
+
}
|
|
1874
|
+
setVolume(volume) {
|
|
1875
|
+
if (!this.gainNode) {
|
|
1876
|
+
return;
|
|
1877
|
+
}
|
|
1878
|
+
const appliedVolume = Math.max(0, volume);
|
|
1879
|
+
this.gainNode.gain.value = appliedVolume;
|
|
1880
|
+
}
|
|
1881
|
+
async setPlaybackRate(rate) {
|
|
1882
|
+
if (this.playbackRate === rate)
|
|
1883
|
+
return;
|
|
1884
|
+
this.playbackRate = rate;
|
|
1885
|
+
if (this.hasAudio() && this.playing) {
|
|
1886
|
+
const currentPlaybackTime = this.getPlaybackTime();
|
|
1887
|
+
const mediaTime = currentPlaybackTime * rate;
|
|
1888
|
+
await this.cleanAudioIteratorAndNodes();
|
|
1889
|
+
await this.startAudioIterator(mediaTime);
|
|
1890
|
+
}
|
|
1891
|
+
}
|
|
1892
|
+
dispose() {
|
|
1893
|
+
this.input?.dispose();
|
|
1894
|
+
this.stopRenderLoop();
|
|
1895
|
+
this.videoFrameIterator?.return();
|
|
1896
|
+
this.cleanAudioIteratorAndNodes();
|
|
1897
|
+
this.videoAsyncId++;
|
|
1898
|
+
}
|
|
1899
|
+
getPlaybackTime() {
|
|
1900
|
+
return this.sharedAudioContext.currentTime - this.audioSyncAnchor;
|
|
1901
|
+
}
|
|
1902
|
+
getAdjustedTimestamp(mediaTimestamp) {
|
|
1903
|
+
return mediaTimestamp / this.playbackRate;
|
|
1904
|
+
}
|
|
1905
|
+
scheduleAudioChunk(buffer, mediaTimestamp) {
|
|
1906
|
+
const adjustedTimestamp = this.getAdjustedTimestamp(mediaTimestamp);
|
|
1907
|
+
const targetTime = adjustedTimestamp + this.audioSyncAnchor;
|
|
1908
|
+
const delay = targetTime - this.sharedAudioContext.currentTime;
|
|
1909
|
+
const node = this.sharedAudioContext.createBufferSource();
|
|
1910
|
+
node.buffer = buffer;
|
|
1911
|
+
node.playbackRate.value = this.playbackRate;
|
|
1912
|
+
node.connect(this.gainNode);
|
|
1913
|
+
if (delay >= 0) {
|
|
1914
|
+
node.start(targetTime);
|
|
1915
|
+
} else {
|
|
1916
|
+
node.start(this.sharedAudioContext.currentTime, -delay);
|
|
1917
|
+
}
|
|
1918
|
+
this.queuedAudioNodes.add(node);
|
|
1919
|
+
node.onended = () => this.queuedAudioNodes.delete(node);
|
|
1920
|
+
}
|
|
1921
|
+
onBufferingChange(callback) {
|
|
1922
|
+
this.onBufferingChangeCallback = callback;
|
|
1923
|
+
}
|
|
1924
|
+
onVideoFrame(callback) {
|
|
1925
|
+
this.onVideoFrameCallback = callback;
|
|
1926
|
+
if (this.initialized && callback) {
|
|
1927
|
+
callback(this.canvas);
|
|
1928
|
+
}
|
|
1929
|
+
}
|
|
1930
|
+
canRenderVideo() {
|
|
1931
|
+
return !this.hasAudio() || this.audioIteratorStarted && this.audioBufferHealth >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
|
|
1932
|
+
}
|
|
1933
|
+
startRenderLoop() {
|
|
1934
|
+
if (this.animationFrameId !== null) {
|
|
1935
|
+
return;
|
|
1936
|
+
}
|
|
1937
|
+
this.render();
|
|
1938
|
+
}
|
|
1939
|
+
stopRenderLoop() {
|
|
1940
|
+
if (this.animationFrameId !== null) {
|
|
1941
|
+
cancelAnimationFrame(this.animationFrameId);
|
|
1942
|
+
this.animationFrameId = null;
|
|
1943
|
+
}
|
|
1944
|
+
}
|
|
1945
|
+
render = () => {
|
|
1946
|
+
if (this.isBuffering) {
|
|
1947
|
+
this.maybeForceResumeFromBuffering();
|
|
1948
|
+
}
|
|
1949
|
+
if (this.shouldRenderFrame()) {
|
|
1950
|
+
this.drawCurrentFrame();
|
|
1951
|
+
}
|
|
1952
|
+
if (this.playing) {
|
|
1953
|
+
this.animationFrameId = requestAnimationFrame(this.render);
|
|
1954
|
+
} else {
|
|
1955
|
+
this.animationFrameId = null;
|
|
1956
|
+
}
|
|
1957
|
+
};
|
|
1958
|
+
shouldRenderFrame() {
|
|
1959
|
+
return !this.isBuffering && this.canRenderVideo() && this.nextFrame !== null && this.getAdjustedTimestamp(this.nextFrame.timestamp) <= this.getPlaybackTime();
|
|
1960
|
+
}
|
|
1961
|
+
drawCurrentFrame() {
|
|
1962
|
+
this.context.drawImage(this.nextFrame.canvas, 0, 0);
|
|
1963
|
+
if (this.onVideoFrameCallback) {
|
|
1964
|
+
this.onVideoFrameCallback(this.canvas);
|
|
1965
|
+
}
|
|
1966
|
+
this.nextFrame = null;
|
|
1967
|
+
this.updateNextFrame();
|
|
1968
|
+
}
|
|
1969
|
+
startAudioIterator = async (startFromSecond) => {
|
|
1970
|
+
if (!this.hasAudio())
|
|
1971
|
+
return;
|
|
1972
|
+
await this.audioBufferIterator?.return();
|
|
1973
|
+
this.audioIteratorStarted = false;
|
|
1974
|
+
this.audioBufferHealth = 0;
|
|
1975
|
+
try {
|
|
1976
|
+
this.audioBufferIterator = this.audioSink.buffers(startFromSecond);
|
|
1977
|
+
this.runAudioIterator(startFromSecond);
|
|
1978
|
+
} catch (error) {
|
|
1979
|
+
Internals8.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio iterator", error);
|
|
1980
|
+
}
|
|
1981
|
+
};
|
|
1982
|
+
startVideoIterator = async (timeToSeek) => {
|
|
1983
|
+
if (!this.canvasSink) {
|
|
1984
|
+
return;
|
|
1985
|
+
}
|
|
1986
|
+
this.videoAsyncId++;
|
|
1987
|
+
const currentAsyncId = this.videoAsyncId;
|
|
1988
|
+
await this.videoFrameIterator?.return();
|
|
1989
|
+
this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
|
|
1990
|
+
try {
|
|
1991
|
+
const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
1992
|
+
const secondFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
1993
|
+
if (currentAsyncId !== this.videoAsyncId) {
|
|
1994
|
+
return;
|
|
1995
|
+
}
|
|
1996
|
+
if (firstFrame) {
|
|
1997
|
+
Internals8.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
|
|
1998
|
+
this.context.drawImage(firstFrame.canvas, 0, 0);
|
|
1999
|
+
if (this.onVideoFrameCallback) {
|
|
2000
|
+
this.onVideoFrameCallback(this.canvas);
|
|
2001
|
+
}
|
|
2002
|
+
}
|
|
2003
|
+
this.nextFrame = secondFrame ?? null;
|
|
2004
|
+
if (secondFrame) {
|
|
2005
|
+
Internals8.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
|
|
2006
|
+
}
|
|
2007
|
+
} catch (error) {
|
|
2008
|
+
Internals8.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start video iterator", error);
|
|
2009
|
+
}
|
|
2010
|
+
};
|
|
2011
|
+
updateNextFrame = async () => {
|
|
2012
|
+
if (!this.videoFrameIterator) {
|
|
2013
|
+
return;
|
|
2014
|
+
}
|
|
2015
|
+
try {
|
|
2016
|
+
while (true) {
|
|
2017
|
+
const newNextFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
2018
|
+
if (!newNextFrame) {
|
|
2019
|
+
break;
|
|
2020
|
+
}
|
|
2021
|
+
if (this.getAdjustedTimestamp(newNextFrame.timestamp) <= this.getPlaybackTime()) {
|
|
2022
|
+
continue;
|
|
2023
|
+
} else {
|
|
2024
|
+
this.nextFrame = newNextFrame;
|
|
2025
|
+
Internals8.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${newNextFrame.timestamp.toFixed(3)}s`);
|
|
2026
|
+
break;
|
|
2027
|
+
}
|
|
2028
|
+
}
|
|
2029
|
+
} catch (error) {
|
|
2030
|
+
Internals8.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to update next frame", error);
|
|
2031
|
+
}
|
|
2032
|
+
};
|
|
2033
|
+
bufferingStartedAtMs = null;
|
|
2034
|
+
minBufferingTimeoutMs = 500;
|
|
2035
|
+
setBufferingState(isBuffering) {
|
|
2036
|
+
if (this.isBuffering !== isBuffering) {
|
|
2037
|
+
this.isBuffering = isBuffering;
|
|
2038
|
+
if (isBuffering) {
|
|
2039
|
+
this.bufferingStartedAtMs = performance.now();
|
|
2040
|
+
this.onBufferingChangeCallback?.(true);
|
|
2041
|
+
} else {
|
|
2042
|
+
this.bufferingStartedAtMs = null;
|
|
2043
|
+
this.onBufferingChangeCallback?.(false);
|
|
2044
|
+
}
|
|
2045
|
+
}
|
|
2046
|
+
}
|
|
2047
|
+
maybeResumeFromBuffering(currentBufferDuration) {
|
|
2048
|
+
if (!this.isCurrentlyBuffering())
|
|
2049
|
+
return;
|
|
2050
|
+
const now = performance.now();
|
|
2051
|
+
const bufferingDuration = now - this.bufferingStartedAtMs;
|
|
2052
|
+
const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
|
|
2053
|
+
const bufferHealthy = currentBufferDuration >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
|
|
2054
|
+
if (minTimeElapsed && bufferHealthy) {
|
|
2055
|
+
Internals8.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
|
|
2056
|
+
this.setBufferingState(false);
|
|
2057
|
+
}
|
|
2058
|
+
}
|
|
2059
|
+
maybeForceResumeFromBuffering() {
|
|
2060
|
+
if (!this.isCurrentlyBuffering())
|
|
2061
|
+
return;
|
|
2062
|
+
const now = performance.now();
|
|
2063
|
+
const bufferingDuration = now - this.bufferingStartedAtMs;
|
|
2064
|
+
const forceTimeout = bufferingDuration > this.minBufferingTimeoutMs * 10;
|
|
2065
|
+
if (forceTimeout) {
|
|
2066
|
+
Internals8.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Force resuming from buffering after ${bufferingDuration}ms`);
|
|
2067
|
+
this.setBufferingState(false);
|
|
2068
|
+
}
|
|
2069
|
+
}
|
|
2070
|
+
runAudioIterator = async (startFromSecond) => {
|
|
2071
|
+
if (!this.hasAudio() || !this.audioBufferIterator)
|
|
2072
|
+
return;
|
|
2073
|
+
try {
|
|
2074
|
+
let totalBufferDuration = 0;
|
|
2075
|
+
let isFirstBuffer = true;
|
|
2076
|
+
this.audioIteratorStarted = true;
|
|
2077
|
+
while (true) {
|
|
2078
|
+
const BUFFERING_TIMEOUT_MS = 50;
|
|
2079
|
+
let result;
|
|
2080
|
+
try {
|
|
2081
|
+
result = await withTimeout(this.audioBufferIterator.next(), BUFFERING_TIMEOUT_MS, "Iterator timeout");
|
|
2082
|
+
} catch {
|
|
2083
|
+
this.setBufferingState(true);
|
|
2084
|
+
await sleep(10);
|
|
2085
|
+
continue;
|
|
2086
|
+
}
|
|
2087
|
+
if (result.done || !result.value) {
|
|
2088
|
+
break;
|
|
2089
|
+
}
|
|
2090
|
+
const { buffer, timestamp, duration } = result.value;
|
|
2091
|
+
totalBufferDuration += duration;
|
|
2092
|
+
this.audioBufferHealth = Math.max(0, totalBufferDuration / this.playbackRate);
|
|
2093
|
+
this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
|
|
2094
|
+
if (this.playing && !this.muted) {
|
|
2095
|
+
if (isFirstBuffer) {
|
|
2096
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - this.getAdjustedTimestamp(timestamp);
|
|
2097
|
+
isFirstBuffer = false;
|
|
2098
|
+
}
|
|
2099
|
+
if (timestamp < startFromSecond - AUDIO_BUFFER_TOLERANCE_THRESHOLD) {
|
|
2100
|
+
continue;
|
|
2101
|
+
}
|
|
2102
|
+
this.scheduleAudioChunk(buffer, timestamp);
|
|
2103
|
+
}
|
|
2104
|
+
if (this.getAdjustedTimestamp(timestamp) - this.getPlaybackTime() >= 1) {
|
|
2105
|
+
await new Promise((resolve) => {
|
|
2106
|
+
const check = () => {
|
|
2107
|
+
if (this.getAdjustedTimestamp(timestamp) - this.getPlaybackTime() < 1) {
|
|
2108
|
+
resolve();
|
|
2109
|
+
} else {
|
|
2110
|
+
requestAnimationFrame(check);
|
|
2111
|
+
}
|
|
2112
|
+
};
|
|
2113
|
+
check();
|
|
2114
|
+
});
|
|
2115
|
+
}
|
|
2116
|
+
}
|
|
2117
|
+
} catch (error) {
|
|
2118
|
+
Internals8.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to run audio iterator", error);
|
|
2119
|
+
}
|
|
2120
|
+
};
|
|
2121
|
+
}
|
|
2122
|
+
|
|
2123
|
+
// src/video/video-for-preview.tsx
|
|
2124
|
+
import { jsx as jsx4 } from "react/jsx-runtime";
|
|
2125
|
+
var {
|
|
2126
|
+
useUnsafeVideoConfig,
|
|
2127
|
+
Timeline,
|
|
2128
|
+
SharedAudioContext: SharedAudioContext2,
|
|
2129
|
+
useMediaMutedState,
|
|
2130
|
+
useMediaVolumeState,
|
|
2131
|
+
useFrameForVolumeProp,
|
|
2132
|
+
evaluateVolume,
|
|
2133
|
+
warnAboutTooHighVolume,
|
|
2134
|
+
usePreload
|
|
2135
|
+
} = Internals9;
|
|
2136
|
+
var calculateLoopDuration = ({
|
|
2137
|
+
endAt,
|
|
2138
|
+
mediaDuration,
|
|
2139
|
+
playbackRate,
|
|
2140
|
+
startFrom
|
|
2141
|
+
}) => {
|
|
2142
|
+
let duration = mediaDuration;
|
|
2143
|
+
if (typeof endAt !== "undefined") {
|
|
2144
|
+
duration = endAt;
|
|
2145
|
+
}
|
|
2146
|
+
if (typeof startFrom !== "undefined") {
|
|
2147
|
+
duration -= startFrom;
|
|
2148
|
+
}
|
|
2149
|
+
const actualDuration = duration / playbackRate;
|
|
2150
|
+
return Math.floor(actualDuration);
|
|
2151
|
+
};
|
|
2152
|
+
var NewVideoForPreview = ({
|
|
2153
|
+
src,
|
|
2154
|
+
style,
|
|
2155
|
+
playbackRate,
|
|
2156
|
+
logLevel,
|
|
2157
|
+
className,
|
|
2158
|
+
muted,
|
|
2159
|
+
volume,
|
|
2160
|
+
loopVolumeCurveBehavior,
|
|
2161
|
+
onVideoFrame
|
|
2162
|
+
}) => {
|
|
2163
|
+
const canvasRef = useRef2(null);
|
|
2164
|
+
const videoConfig = useUnsafeVideoConfig();
|
|
2165
|
+
const frame = useCurrentFrame2();
|
|
2166
|
+
const mediaPlayerRef = useRef2(null);
|
|
2167
|
+
const [mediaPlayerReady, setMediaPlayerReady] = useState4(false);
|
|
2168
|
+
const [playing] = Timeline.usePlayingState();
|
|
2169
|
+
const timelineContext = useContext6(Timeline.TimelineContext);
|
|
2170
|
+
const globalPlaybackRate = timelineContext.playbackRate;
|
|
2171
|
+
const sharedAudioContext = useContext6(SharedAudioContext2);
|
|
2172
|
+
const buffer = useBufferState();
|
|
2173
|
+
const delayHandleRef = useRef2(null);
|
|
2174
|
+
const [mediaMuted] = useMediaMutedState();
|
|
2175
|
+
const [mediaVolume] = useMediaVolumeState();
|
|
2176
|
+
const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
|
|
2177
|
+
const userPreferredVolume = evaluateVolume({
|
|
2178
|
+
frame: volumePropFrame,
|
|
2179
|
+
volume,
|
|
2180
|
+
mediaVolume
|
|
2181
|
+
});
|
|
2182
|
+
warnAboutTooHighVolume(userPreferredVolume);
|
|
2183
|
+
if (!videoConfig) {
|
|
2184
|
+
throw new Error("No video config found");
|
|
2185
|
+
}
|
|
2186
|
+
if (!src) {
|
|
2187
|
+
throw new TypeError("No `src` was passed to <NewVideoForPreview>.");
|
|
2188
|
+
}
|
|
2189
|
+
const actualFps = videoConfig.fps / playbackRate;
|
|
2190
|
+
const currentTime = frame / actualFps;
|
|
2191
|
+
const [initialTimestamp] = useState4(currentTime);
|
|
2192
|
+
const preloadedSrc = usePreload(src);
|
|
2193
|
+
useEffect2(() => {
|
|
2194
|
+
if (!canvasRef.current)
|
|
2195
|
+
return;
|
|
2196
|
+
if (!sharedAudioContext)
|
|
2197
|
+
return;
|
|
2198
|
+
if (!sharedAudioContext.audioContext)
|
|
2199
|
+
return;
|
|
2200
|
+
try {
|
|
2201
|
+
const player = new MediaPlayer({
|
|
2202
|
+
canvas: canvasRef.current,
|
|
2203
|
+
src: preloadedSrc,
|
|
2204
|
+
logLevel,
|
|
2205
|
+
sharedAudioContext: sharedAudioContext.audioContext
|
|
2206
|
+
});
|
|
2207
|
+
mediaPlayerRef.current = player;
|
|
2208
|
+
player.initialize(initialTimestamp).then(() => {
|
|
2209
|
+
setMediaPlayerReady(true);
|
|
2210
|
+
Internals9.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] MediaPlayer initialized successfully`);
|
|
2211
|
+
}).catch((error) => {
|
|
2212
|
+
Internals9.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to initialize MediaPlayer", error);
|
|
2213
|
+
});
|
|
2214
|
+
} catch (error) {
|
|
2215
|
+
Internals9.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer initialization failed", error);
|
|
2216
|
+
}
|
|
2217
|
+
return () => {
|
|
2218
|
+
if (delayHandleRef.current) {
|
|
2219
|
+
delayHandleRef.current.unblock();
|
|
2220
|
+
delayHandleRef.current = null;
|
|
2221
|
+
}
|
|
2222
|
+
if (mediaPlayerRef.current) {
|
|
2223
|
+
Internals9.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Disposing MediaPlayer`);
|
|
2224
|
+
mediaPlayerRef.current.dispose();
|
|
2225
|
+
mediaPlayerRef.current = null;
|
|
2226
|
+
}
|
|
2227
|
+
setMediaPlayerReady(false);
|
|
2228
|
+
};
|
|
2229
|
+
}, [preloadedSrc, logLevel, sharedAudioContext, initialTimestamp]);
|
|
2230
|
+
const classNameValue = useMemo3(() => {
|
|
2231
|
+
return [Internals9.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals9.truthy).join(" ");
|
|
2232
|
+
}, [className]);
|
|
2233
|
+
useEffect2(() => {
|
|
2234
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2235
|
+
if (!mediaPlayer)
|
|
2236
|
+
return;
|
|
2237
|
+
if (playing) {
|
|
2238
|
+
mediaPlayer.play().catch((error) => {
|
|
2239
|
+
Internals9.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to play", error);
|
|
2240
|
+
});
|
|
2241
|
+
} else {
|
|
2242
|
+
mediaPlayer.pause();
|
|
2243
|
+
}
|
|
2244
|
+
}, [playing, logLevel, mediaPlayerReady]);
|
|
2245
|
+
useEffect2(() => {
|
|
2246
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2247
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
2248
|
+
return;
|
|
2249
|
+
mediaPlayer.seekTo(currentTime);
|
|
2250
|
+
Internals9.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
2251
|
+
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
2252
|
+
useEffect2(() => {
|
|
2253
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2254
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
2255
|
+
return;
|
|
2256
|
+
mediaPlayer.onBufferingChange((newBufferingState) => {
|
|
2257
|
+
if (newBufferingState && !delayHandleRef.current) {
|
|
2258
|
+
delayHandleRef.current = buffer.delayPlayback();
|
|
2259
|
+
Internals9.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback");
|
|
2260
|
+
} else if (!newBufferingState && delayHandleRef.current) {
|
|
2261
|
+
delayHandleRef.current.unblock();
|
|
2262
|
+
delayHandleRef.current = null;
|
|
2263
|
+
Internals9.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
|
|
2264
|
+
}
|
|
2265
|
+
});
|
|
2266
|
+
}, [mediaPlayerReady, buffer, logLevel]);
|
|
2267
|
+
const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
|
|
2268
|
+
useEffect2(() => {
|
|
2269
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2270
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
2271
|
+
return;
|
|
2272
|
+
mediaPlayer.setMuted(effectiveMuted);
|
|
2273
|
+
}, [effectiveMuted, mediaPlayerReady]);
|
|
2274
|
+
useEffect2(() => {
|
|
2275
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2276
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
2277
|
+
return;
|
|
2278
|
+
}
|
|
2279
|
+
mediaPlayer.setVolume(userPreferredVolume);
|
|
2280
|
+
}, [userPreferredVolume, mediaPlayerReady, logLevel]);
|
|
2281
|
+
const effectivePlaybackRate = useMemo3(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
|
|
2282
|
+
useEffect2(() => {
|
|
2283
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2284
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
2285
|
+
return;
|
|
2286
|
+
}
|
|
2287
|
+
mediaPlayer.setPlaybackRate(effectivePlaybackRate).catch((error) => {
|
|
2288
|
+
Internals9.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to set playback rate", error);
|
|
2289
|
+
});
|
|
2290
|
+
}, [effectivePlaybackRate, mediaPlayerReady, logLevel]);
|
|
2291
|
+
useEffect2(() => {
|
|
2292
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2293
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
2294
|
+
return;
|
|
2295
|
+
}
|
|
2296
|
+
if (onVideoFrame) {
|
|
2297
|
+
mediaPlayer.onVideoFrame(onVideoFrame);
|
|
2298
|
+
}
|
|
2299
|
+
}, [onVideoFrame, mediaPlayerReady]);
|
|
2300
|
+
return /* @__PURE__ */ jsx4("canvas", {
|
|
2301
|
+
ref: canvasRef,
|
|
2302
|
+
width: videoConfig.width,
|
|
2303
|
+
height: videoConfig.height,
|
|
2304
|
+
style,
|
|
2305
|
+
className: classNameValue
|
|
2306
|
+
});
|
|
2307
|
+
};
|
|
2308
|
+
var VideoForPreviewWithDuration = ({
|
|
2309
|
+
className,
|
|
2310
|
+
durationInSeconds,
|
|
2311
|
+
logLevel,
|
|
2312
|
+
loopVolumeCurveBehavior,
|
|
2313
|
+
muted,
|
|
2314
|
+
onVideoFrame,
|
|
2315
|
+
playbackRate,
|
|
2316
|
+
src,
|
|
2317
|
+
style,
|
|
2318
|
+
volume,
|
|
2319
|
+
loop,
|
|
2320
|
+
name,
|
|
2321
|
+
trimAfter,
|
|
2322
|
+
trimBefore
|
|
2323
|
+
}) => {
|
|
2324
|
+
const { fps } = useVideoConfig();
|
|
2325
|
+
if (loop) {
|
|
2326
|
+
if (!Number.isFinite(durationInSeconds) || durationInSeconds === null) {
|
|
2327
|
+
return /* @__PURE__ */ jsx4(VideoForPreviewWithDuration, {
|
|
2328
|
+
loop: false,
|
|
2329
|
+
className,
|
|
2330
|
+
durationInSeconds,
|
|
2331
|
+
logLevel,
|
|
2332
|
+
loopVolumeCurveBehavior,
|
|
2333
|
+
muted,
|
|
2334
|
+
onVideoFrame,
|
|
2335
|
+
playbackRate,
|
|
2336
|
+
src,
|
|
2337
|
+
style,
|
|
2338
|
+
volume,
|
|
2339
|
+
name,
|
|
2340
|
+
trimAfter,
|
|
2341
|
+
trimBefore
|
|
2342
|
+
});
|
|
2343
|
+
}
|
|
2344
|
+
const mediaDuration = durationInSeconds * fps;
|
|
2345
|
+
return /* @__PURE__ */ jsx4(Loop, {
|
|
2346
|
+
durationInFrames: calculateLoopDuration({
|
|
2347
|
+
endAt: trimAfter,
|
|
2348
|
+
mediaDuration,
|
|
2349
|
+
playbackRate: playbackRate ?? 1,
|
|
2350
|
+
startFrom: trimBefore
|
|
2351
|
+
}),
|
|
2352
|
+
layout: "none",
|
|
2353
|
+
name,
|
|
2354
|
+
children: /* @__PURE__ */ jsx4(VideoForPreviewWithDuration, {
|
|
2355
|
+
loop: false,
|
|
2356
|
+
className,
|
|
2357
|
+
durationInSeconds,
|
|
2358
|
+
logLevel,
|
|
2359
|
+
loopVolumeCurveBehavior,
|
|
2360
|
+
muted,
|
|
2361
|
+
onVideoFrame,
|
|
2362
|
+
playbackRate,
|
|
2363
|
+
src,
|
|
2364
|
+
style,
|
|
2365
|
+
volume,
|
|
2366
|
+
name,
|
|
2367
|
+
trimAfter,
|
|
2368
|
+
trimBefore
|
|
2369
|
+
})
|
|
2370
|
+
});
|
|
2371
|
+
}
|
|
2372
|
+
return /* @__PURE__ */ jsx4(NewVideoForPreview, {
|
|
2373
|
+
src,
|
|
2374
|
+
style,
|
|
2375
|
+
playbackRate,
|
|
2376
|
+
logLevel,
|
|
2377
|
+
muted,
|
|
2378
|
+
volume,
|
|
2379
|
+
loopVolumeCurveBehavior,
|
|
2380
|
+
onVideoFrame,
|
|
2381
|
+
className
|
|
2382
|
+
});
|
|
2383
|
+
};
|
|
2384
|
+
var VideoForPreview = ({
|
|
2385
|
+
className,
|
|
2386
|
+
loop,
|
|
2387
|
+
src,
|
|
2388
|
+
logLevel,
|
|
2389
|
+
muted,
|
|
2390
|
+
name,
|
|
2391
|
+
volume,
|
|
2392
|
+
loopVolumeCurveBehavior,
|
|
2393
|
+
onVideoFrame,
|
|
2394
|
+
playbackRate,
|
|
2395
|
+
style
|
|
2396
|
+
}) => {
|
|
2397
|
+
const preloadedSrc = usePreload(src);
|
|
2398
|
+
const [durationInSeconds, setDurationInSeconds] = useState4(null);
|
|
2399
|
+
useEffect2(() => {
|
|
2400
|
+
if (!loop) {
|
|
2401
|
+
return;
|
|
2402
|
+
}
|
|
2403
|
+
let cancelled = false;
|
|
2404
|
+
const computeDuration = async () => {
|
|
2405
|
+
const urlSource = new UrlSource3(preloadedSrc);
|
|
2406
|
+
const input = new Input3({
|
|
2407
|
+
source: urlSource,
|
|
2408
|
+
formats: ALL_FORMATS3
|
|
2409
|
+
});
|
|
2410
|
+
try {
|
|
2411
|
+
const duration = await input.computeDuration();
|
|
2412
|
+
if (!cancelled) {
|
|
2413
|
+
setDurationInSeconds(duration);
|
|
2414
|
+
}
|
|
2415
|
+
} catch (error) {
|
|
2416
|
+
Internals9.Log.error({ logLevel, tag: "@remotion/media" }, "[VideoForPreview] Failed to compute duration", error);
|
|
2417
|
+
} finally {
|
|
2418
|
+
input.dispose();
|
|
2419
|
+
}
|
|
2420
|
+
};
|
|
2421
|
+
computeDuration();
|
|
2422
|
+
return () => {
|
|
2423
|
+
cancelled = true;
|
|
2424
|
+
};
|
|
2425
|
+
}, [loop, preloadedSrc, logLevel]);
|
|
2426
|
+
if (loop && durationInSeconds === null) {
|
|
2427
|
+
return null;
|
|
2428
|
+
}
|
|
2429
|
+
return /* @__PURE__ */ jsx4(VideoForPreviewWithDuration, {
|
|
2430
|
+
durationInSeconds,
|
|
2431
|
+
className,
|
|
2432
|
+
logLevel,
|
|
2433
|
+
muted,
|
|
2434
|
+
onVideoFrame,
|
|
2435
|
+
playbackRate,
|
|
2436
|
+
src,
|
|
2437
|
+
style,
|
|
2438
|
+
volume,
|
|
2439
|
+
name,
|
|
2440
|
+
trimAfter: undefined,
|
|
2441
|
+
trimBefore: undefined,
|
|
2442
|
+
loop,
|
|
2443
|
+
loopVolumeCurveBehavior
|
|
2444
|
+
});
|
|
2445
|
+
};
|
|
1456
2446
|
|
|
1457
2447
|
// src/video/video-for-rendering.tsx
|
|
1458
2448
|
import {
|
|
1459
|
-
useContext as
|
|
2449
|
+
useContext as useContext7,
|
|
1460
2450
|
useLayoutEffect as useLayoutEffect2,
|
|
1461
|
-
useMemo as
|
|
1462
|
-
useRef as
|
|
1463
|
-
useState as
|
|
2451
|
+
useMemo as useMemo4,
|
|
2452
|
+
useRef as useRef3,
|
|
2453
|
+
useState as useState5
|
|
1464
2454
|
} from "react";
|
|
1465
2455
|
import {
|
|
1466
2456
|
cancelRender as cancelRender4,
|
|
1467
|
-
Internals as
|
|
1468
|
-
useCurrentFrame as
|
|
2457
|
+
Internals as Internals10,
|
|
2458
|
+
useCurrentFrame as useCurrentFrame3,
|
|
1469
2459
|
useDelayRender as useDelayRender2,
|
|
1470
2460
|
useRemotionEnvironment as useRemotionEnvironment4,
|
|
1471
|
-
useVideoConfig
|
|
2461
|
+
useVideoConfig as useVideoConfig2
|
|
1472
2462
|
} from "remotion";
|
|
1473
|
-
import { jsx as
|
|
2463
|
+
import { jsx as jsx5 } from "react/jsx-runtime";
|
|
1474
2464
|
var VideoForRendering = ({
|
|
1475
2465
|
volume: volumeProp,
|
|
1476
2466
|
playbackRate,
|
|
@@ -1480,27 +2470,36 @@ var VideoForRendering = ({
|
|
|
1480
2470
|
delayRenderRetries,
|
|
1481
2471
|
delayRenderTimeoutInMilliseconds,
|
|
1482
2472
|
onVideoFrame,
|
|
1483
|
-
logLevel
|
|
2473
|
+
logLevel,
|
|
1484
2474
|
loop,
|
|
1485
2475
|
style,
|
|
1486
|
-
className
|
|
2476
|
+
className,
|
|
2477
|
+
fallbackOffthreadVideoProps,
|
|
2478
|
+
audioStreamIndex,
|
|
2479
|
+
name,
|
|
2480
|
+
disallowFallbackToOffthreadVideo,
|
|
2481
|
+
stack
|
|
1487
2482
|
}) => {
|
|
1488
2483
|
if (!src) {
|
|
1489
2484
|
throw new TypeError("No `src` was passed to <Video>.");
|
|
1490
2485
|
}
|
|
1491
|
-
const frame =
|
|
1492
|
-
const absoluteFrame =
|
|
1493
|
-
const { fps } =
|
|
1494
|
-
const { registerRenderAsset, unregisterRenderAsset } =
|
|
1495
|
-
const startsAt =
|
|
1496
|
-
const [id] =
|
|
2486
|
+
const frame = useCurrentFrame3();
|
|
2487
|
+
const absoluteFrame = Internals10.useTimelinePosition();
|
|
2488
|
+
const { fps } = useVideoConfig2();
|
|
2489
|
+
const { registerRenderAsset, unregisterRenderAsset } = useContext7(Internals10.RenderAssetManager);
|
|
2490
|
+
const startsAt = Internals10.useMediaStartsAt();
|
|
2491
|
+
const [id] = useState5(() => `${Math.random()}`.replace("0.", ""));
|
|
1497
2492
|
const environment = useRemotionEnvironment4();
|
|
1498
2493
|
const { delayRender, continueRender } = useDelayRender2();
|
|
1499
|
-
const canvasRef =
|
|
2494
|
+
const canvasRef = useRef3(null);
|
|
2495
|
+
const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState5(false);
|
|
1500
2496
|
useLayoutEffect2(() => {
|
|
1501
2497
|
if (!canvasRef.current) {
|
|
1502
2498
|
return;
|
|
1503
2499
|
}
|
|
2500
|
+
if (replaceWithOffthreadVideo) {
|
|
2501
|
+
return;
|
|
2502
|
+
}
|
|
1504
2503
|
const actualFps = playbackRate ? fps / playbackRate : fps;
|
|
1505
2504
|
const timestamp = frame / actualFps;
|
|
1506
2505
|
const durationInSeconds = 1 / actualFps;
|
|
@@ -1526,12 +2525,44 @@ var VideoForRendering = ({
|
|
|
1526
2525
|
includeAudio: shouldRenderAudio,
|
|
1527
2526
|
includeVideo: window.remotion_videoEnabled,
|
|
1528
2527
|
isClientSideRendering: environment.isClientSideRendering,
|
|
1529
|
-
loop: loop ?? false
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
1533
|
-
|
|
1534
|
-
|
|
2528
|
+
loop: loop ?? false,
|
|
2529
|
+
audioStreamIndex: audioStreamIndex ?? 0
|
|
2530
|
+
}).then((result) => {
|
|
2531
|
+
if (result === "unknown-container-format") {
|
|
2532
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
2533
|
+
cancelRender4(new Error(`Unknown container format ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
2534
|
+
}
|
|
2535
|
+
if (window.remotion_isMainTab) {
|
|
2536
|
+
Internals10.Log.info({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
|
|
2537
|
+
}
|
|
2538
|
+
setReplaceWithOffthreadVideo(true);
|
|
2539
|
+
return;
|
|
2540
|
+
}
|
|
2541
|
+
if (result === "cannot-decode") {
|
|
2542
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
2543
|
+
cancelRender4(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
2544
|
+
}
|
|
2545
|
+
if (window.remotion_isMainTab) {
|
|
2546
|
+
Internals10.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
|
|
2547
|
+
}
|
|
2548
|
+
setReplaceWithOffthreadVideo(true);
|
|
2549
|
+
return;
|
|
2550
|
+
}
|
|
2551
|
+
if (result === "network-error") {
|
|
2552
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
2553
|
+
cancelRender4(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
2554
|
+
}
|
|
2555
|
+
if (window.remotion_isMainTab) {
|
|
2556
|
+
Internals10.Log.info({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
|
|
2557
|
+
}
|
|
2558
|
+
setReplaceWithOffthreadVideo(true);
|
|
2559
|
+
return;
|
|
2560
|
+
}
|
|
2561
|
+
const {
|
|
2562
|
+
frame: imageBitmap,
|
|
2563
|
+
audio,
|
|
2564
|
+
durationInSeconds: assetDurationInSeconds
|
|
2565
|
+
} = result;
|
|
1535
2566
|
if (imageBitmap) {
|
|
1536
2567
|
onVideoFrame?.(imageBitmap);
|
|
1537
2568
|
const context = canvasRef.current?.getContext("2d");
|
|
@@ -1554,12 +2585,12 @@ var VideoForRendering = ({
|
|
|
1554
2585
|
frame,
|
|
1555
2586
|
startsAt
|
|
1556
2587
|
});
|
|
1557
|
-
const volume =
|
|
2588
|
+
const volume = Internals10.evaluateVolume({
|
|
1558
2589
|
volume: volumeProp,
|
|
1559
2590
|
frame: volumePropsFrame,
|
|
1560
2591
|
mediaVolume: 1
|
|
1561
2592
|
});
|
|
1562
|
-
|
|
2593
|
+
Internals10.warnAboutTooHighVolume(volume);
|
|
1563
2594
|
if (audio && volume > 0) {
|
|
1564
2595
|
applyVolume(audio.data, volume);
|
|
1565
2596
|
registerRenderAsset({
|
|
@@ -1601,12 +2632,51 @@ var VideoForRendering = ({
|
|
|
1601
2632
|
src,
|
|
1602
2633
|
startsAt,
|
|
1603
2634
|
unregisterRenderAsset,
|
|
1604
|
-
volumeProp
|
|
2635
|
+
volumeProp,
|
|
2636
|
+
replaceWithOffthreadVideo,
|
|
2637
|
+
audioStreamIndex,
|
|
2638
|
+
disallowFallbackToOffthreadVideo
|
|
1605
2639
|
]);
|
|
1606
|
-
const classNameValue =
|
|
1607
|
-
return [
|
|
2640
|
+
const classNameValue = useMemo4(() => {
|
|
2641
|
+
return [Internals10.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals10.truthy).join(" ");
|
|
1608
2642
|
}, [className]);
|
|
1609
|
-
|
|
2643
|
+
if (replaceWithOffthreadVideo) {
|
|
2644
|
+
return /* @__PURE__ */ jsx5(Internals10.InnerOffthreadVideo, {
|
|
2645
|
+
src,
|
|
2646
|
+
playbackRate: playbackRate ?? 1,
|
|
2647
|
+
muted: muted ?? false,
|
|
2648
|
+
acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds,
|
|
2649
|
+
loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? "repeat",
|
|
2650
|
+
delayRenderRetries: delayRenderRetries ?? undefined,
|
|
2651
|
+
delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
|
|
2652
|
+
style,
|
|
2653
|
+
allowAmplificationDuringRender: true,
|
|
2654
|
+
transparent: fallbackOffthreadVideoProps?.transparent ?? false,
|
|
2655
|
+
toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true,
|
|
2656
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
2657
|
+
name,
|
|
2658
|
+
className,
|
|
2659
|
+
onVideoFrame,
|
|
2660
|
+
volume: volumeProp,
|
|
2661
|
+
id,
|
|
2662
|
+
onError: fallbackOffthreadVideoProps?.onError,
|
|
2663
|
+
toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
|
|
2664
|
+
showInTimeline: false,
|
|
2665
|
+
crossOrigin: undefined,
|
|
2666
|
+
onAutoPlayError: () => {
|
|
2667
|
+
return;
|
|
2668
|
+
},
|
|
2669
|
+
pauseWhenBuffering: false,
|
|
2670
|
+
trimAfter: undefined,
|
|
2671
|
+
trimBefore: undefined,
|
|
2672
|
+
useWebAudioApi: false,
|
|
2673
|
+
startFrom: undefined,
|
|
2674
|
+
endAt: undefined,
|
|
2675
|
+
stack,
|
|
2676
|
+
_remotionInternalNativeLoopPassed: false
|
|
2677
|
+
});
|
|
2678
|
+
}
|
|
2679
|
+
return /* @__PURE__ */ jsx5("canvas", {
|
|
1610
2680
|
ref: canvasRef,
|
|
1611
2681
|
style,
|
|
1612
2682
|
className: classNameValue
|
|
@@ -1614,29 +2684,33 @@ var VideoForRendering = ({
|
|
|
1614
2684
|
};
|
|
1615
2685
|
|
|
1616
2686
|
// src/video/video.tsx
|
|
1617
|
-
import { jsx as
|
|
1618
|
-
var {
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
|
|
1623
|
-
|
|
1624
|
-
|
|
1625
|
-
|
|
1626
|
-
|
|
1627
|
-
|
|
1628
|
-
|
|
1629
|
-
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
|
|
2687
|
+
import { jsx as jsx6 } from "react/jsx-runtime";
|
|
2688
|
+
var { validateMediaTrimProps: validateMediaTrimProps2, resolveTrimProps: resolveTrimProps2, validateMediaProps: validateMediaProps2 } = Internals11;
|
|
2689
|
+
var InnerVideo = ({
|
|
2690
|
+
src,
|
|
2691
|
+
audioStreamIndex,
|
|
2692
|
+
className,
|
|
2693
|
+
delayRenderRetries,
|
|
2694
|
+
delayRenderTimeoutInMilliseconds,
|
|
2695
|
+
disallowFallbackToOffthreadVideo,
|
|
2696
|
+
fallbackOffthreadVideoProps,
|
|
2697
|
+
logLevel,
|
|
2698
|
+
loop,
|
|
2699
|
+
loopVolumeCurveBehavior,
|
|
2700
|
+
muted,
|
|
2701
|
+
name,
|
|
2702
|
+
onVideoFrame,
|
|
2703
|
+
playbackRate,
|
|
2704
|
+
style,
|
|
2705
|
+
trimAfter,
|
|
2706
|
+
trimBefore,
|
|
2707
|
+
volume,
|
|
2708
|
+
showInTimeline,
|
|
2709
|
+
stack
|
|
2710
|
+
}) => {
|
|
1634
2711
|
const environment = useRemotionEnvironment5();
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
}, []);
|
|
1638
|
-
if (typeof props.src !== "string") {
|
|
1639
|
-
throw new TypeError(`The \`<Video>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
|
|
2712
|
+
if (typeof src !== "string") {
|
|
2713
|
+
throw new TypeError(`The \`<Video>\` tag requires a string for \`src\`, but got ${JSON.stringify(src)} instead.`);
|
|
1640
2714
|
}
|
|
1641
2715
|
validateMediaTrimProps2({
|
|
1642
2716
|
startFrom: undefined,
|
|
@@ -1651,42 +2725,125 @@ var Video = (props) => {
|
|
|
1651
2725
|
trimAfter
|
|
1652
2726
|
});
|
|
1653
2727
|
if (typeof trimBeforeValue !== "undefined" || typeof trimAfterValue !== "undefined") {
|
|
1654
|
-
return /* @__PURE__ */
|
|
2728
|
+
return /* @__PURE__ */ jsx6(Sequence2, {
|
|
1655
2729
|
layout: "none",
|
|
1656
2730
|
from: 0 - (trimBeforeValue ?? 0),
|
|
1657
2731
|
showInTimeline: false,
|
|
1658
2732
|
durationInFrames: trimAfterValue,
|
|
1659
2733
|
name,
|
|
1660
|
-
children: /* @__PURE__ */
|
|
1661
|
-
|
|
1662
|
-
|
|
2734
|
+
children: /* @__PURE__ */ jsx6(InnerVideo, {
|
|
2735
|
+
audioStreamIndex,
|
|
2736
|
+
className,
|
|
2737
|
+
delayRenderRetries,
|
|
2738
|
+
delayRenderTimeoutInMilliseconds,
|
|
2739
|
+
disallowFallbackToOffthreadVideo,
|
|
2740
|
+
name,
|
|
2741
|
+
fallbackOffthreadVideoProps,
|
|
2742
|
+
logLevel,
|
|
2743
|
+
loop,
|
|
2744
|
+
loopVolumeCurveBehavior,
|
|
2745
|
+
muted,
|
|
2746
|
+
onVideoFrame,
|
|
2747
|
+
playbackRate,
|
|
2748
|
+
src,
|
|
2749
|
+
stack,
|
|
2750
|
+
style,
|
|
2751
|
+
volume,
|
|
2752
|
+
trimAfter: undefined,
|
|
2753
|
+
trimBefore: undefined,
|
|
2754
|
+
showInTimeline
|
|
1663
2755
|
})
|
|
1664
2756
|
});
|
|
1665
2757
|
}
|
|
1666
|
-
validateMediaProps2(
|
|
2758
|
+
validateMediaProps2({ playbackRate, volume }, "Video");
|
|
1667
2759
|
if (environment.isRendering) {
|
|
1668
|
-
return /* @__PURE__ */
|
|
1669
|
-
|
|
2760
|
+
return /* @__PURE__ */ jsx6(VideoForRendering, {
|
|
2761
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
2762
|
+
className,
|
|
2763
|
+
delayRenderRetries: delayRenderRetries ?? null,
|
|
2764
|
+
delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null,
|
|
2765
|
+
disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false,
|
|
2766
|
+
name,
|
|
2767
|
+
fallbackOffthreadVideoProps,
|
|
2768
|
+
logLevel,
|
|
2769
|
+
loop,
|
|
2770
|
+
loopVolumeCurveBehavior,
|
|
2771
|
+
muted,
|
|
2772
|
+
onVideoFrame,
|
|
2773
|
+
playbackRate,
|
|
2774
|
+
src,
|
|
2775
|
+
stack,
|
|
2776
|
+
style,
|
|
2777
|
+
volume
|
|
1670
2778
|
});
|
|
1671
2779
|
}
|
|
1672
|
-
|
|
2780
|
+
return /* @__PURE__ */ jsx6(VideoForPreview, {
|
|
2781
|
+
className,
|
|
2782
|
+
name,
|
|
2783
|
+
logLevel,
|
|
2784
|
+
loop,
|
|
2785
|
+
loopVolumeCurveBehavior,
|
|
2786
|
+
muted,
|
|
1673
2787
|
onVideoFrame,
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
|
|
2788
|
+
playbackRate,
|
|
2789
|
+
src,
|
|
2790
|
+
style,
|
|
2791
|
+
volume
|
|
2792
|
+
});
|
|
2793
|
+
};
|
|
2794
|
+
var Video = ({
|
|
2795
|
+
src,
|
|
2796
|
+
audioStreamIndex,
|
|
2797
|
+
className,
|
|
2798
|
+
delayRenderRetries,
|
|
2799
|
+
delayRenderTimeoutInMilliseconds,
|
|
2800
|
+
disallowFallbackToOffthreadVideo,
|
|
2801
|
+
fallbackOffthreadVideoProps,
|
|
2802
|
+
logLevel,
|
|
2803
|
+
loop,
|
|
2804
|
+
loopVolumeCurveBehavior,
|
|
2805
|
+
muted,
|
|
2806
|
+
name,
|
|
2807
|
+
onVideoFrame,
|
|
2808
|
+
playbackRate,
|
|
2809
|
+
showInTimeline,
|
|
2810
|
+
style,
|
|
2811
|
+
trimAfter,
|
|
2812
|
+
trimBefore,
|
|
2813
|
+
volume,
|
|
2814
|
+
stack
|
|
2815
|
+
}) => {
|
|
2816
|
+
return /* @__PURE__ */ jsx6(InnerVideo, {
|
|
2817
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
2818
|
+
className,
|
|
2819
|
+
delayRenderRetries: delayRenderRetries ?? null,
|
|
2820
|
+
delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null,
|
|
2821
|
+
disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false,
|
|
2822
|
+
fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {},
|
|
2823
|
+
logLevel: logLevel ?? "info",
|
|
2824
|
+
loop: loop ?? false,
|
|
2825
|
+
loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? "repeat",
|
|
2826
|
+
muted: muted ?? false,
|
|
2827
|
+
name,
|
|
2828
|
+
onVideoFrame,
|
|
2829
|
+
playbackRate: playbackRate ?? 1,
|
|
1684
2830
|
showInTimeline: showInTimeline ?? true,
|
|
1685
|
-
|
|
1686
|
-
|
|
2831
|
+
src,
|
|
2832
|
+
style: style ?? {},
|
|
2833
|
+
trimAfter,
|
|
2834
|
+
trimBefore,
|
|
2835
|
+
volume: volume ?? 1,
|
|
2836
|
+
stack
|
|
1687
2837
|
});
|
|
1688
2838
|
};
|
|
2839
|
+
Internals11.addSequenceStackTraces(Video);
|
|
2840
|
+
|
|
2841
|
+
// src/index.ts
|
|
2842
|
+
var experimental_Audio = Audio2;
|
|
2843
|
+
var experimental_Video = Video;
|
|
1689
2844
|
export {
|
|
1690
|
-
|
|
1691
|
-
|
|
2845
|
+
experimental_Video,
|
|
2846
|
+
experimental_Audio,
|
|
2847
|
+
Video,
|
|
2848
|
+
Audio2 as Audio
|
|
1692
2849
|
};
|