@remotion/media 4.0.352 → 4.0.354
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-rendering.js +37 -27
- package/dist/audio/audio.js +6 -3
- package/dist/audio/props.d.ts +1 -7
- package/dist/audio-extraction/audio-iterator.d.ts +1 -1
- package/dist/audio-extraction/audio-iterator.js +2 -2
- package/dist/audio-extraction/audio-manager.d.ts +1 -1
- package/dist/audio-extraction/extract-audio.d.ts +7 -4
- package/dist/audio-extraction/extract-audio.js +16 -7
- package/dist/caches.d.ts +6 -6
- package/dist/caches.js +5 -6
- package/dist/convert-audiodata/apply-volume.d.ts +1 -0
- package/dist/convert-audiodata/apply-volume.js +17 -0
- package/dist/convert-audiodata/convert-audiodata.d.ts +2 -2
- package/dist/convert-audiodata/convert-audiodata.js +13 -7
- package/dist/convert-audiodata/resample-audiodata.d.ts +1 -2
- package/dist/convert-audiodata/resample-audiodata.js +42 -20
- package/dist/esm/index.mjs +242 -182
- package/dist/extract-frame-and-audio.d.ts +3 -2
- package/dist/extract-frame-and-audio.js +4 -3
- package/dist/looped-frame.d.ts +9 -0
- package/dist/looped-frame.js +10 -0
- package/dist/video/media-player.d.ts +28 -30
- package/dist/video/media-player.js +174 -314
- package/dist/video/new-video-for-preview.d.ts +1 -1
- package/dist/video/new-video-for-preview.js +12 -18
- package/dist/video/props.d.ts +0 -5
- package/dist/video/timeout-utils.d.ts +2 -0
- package/dist/video/timeout-utils.js +18 -0
- package/dist/video/video-for-preview.d.ts +11 -0
- package/dist/video/video-for-preview.js +113 -0
- package/dist/video/video-for-rendering.js +41 -31
- package/dist/video/video.js +2 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +4 -3
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +9 -5
- package/dist/video-extraction/extract-frame.d.ts +1 -1
- package/dist/video-extraction/extract-frame.js +3 -0
- package/dist/video-extraction/get-frames-since-keyframe.d.ts +1 -1
- package/dist/video-extraction/get-frames-since-keyframe.js +7 -8
- package/dist/video-extraction/keyframe-bank.d.ts +1 -1
- package/dist/video-extraction/keyframe-bank.js +7 -7
- package/dist/video-extraction/keyframe-manager.d.ts +1 -1
- package/dist/video-extraction/keyframe-manager.js +6 -6
- package/package.json +3 -3
package/dist/esm/index.mjs
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
import { useCallback as useCallback2, useContext as useContext5 } from "react";
|
|
3
3
|
import {
|
|
4
4
|
cancelRender as cancelRender3,
|
|
5
|
-
Internals as
|
|
5
|
+
Internals as Internals6,
|
|
6
6
|
Sequence,
|
|
7
7
|
useRemotionEnvironment as useRemotionEnvironment3
|
|
8
8
|
} from "remotion";
|
|
@@ -42,52 +42,57 @@ import { jsx, jsxs } from "react/jsx-runtime";
|
|
|
42
42
|
var SharedAudioContext = createContext2(null);
|
|
43
43
|
|
|
44
44
|
// src/audio/audio-for-rendering.tsx
|
|
45
|
-
import { useContext as useContext4, useLayoutEffect,
|
|
45
|
+
import { useContext as useContext4, useLayoutEffect, useState as useState3 } from "react";
|
|
46
46
|
import {
|
|
47
47
|
cancelRender as cancelRender2,
|
|
48
|
-
Internals,
|
|
48
|
+
Internals as Internals5,
|
|
49
49
|
useCurrentFrame,
|
|
50
50
|
useDelayRender,
|
|
51
51
|
useRemotionEnvironment as useRemotionEnvironment2
|
|
52
52
|
} from "remotion";
|
|
53
53
|
|
|
54
|
-
// src/
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
if (isEqualOrBelowLogLevel(logLevel, "trace")) {
|
|
68
|
-
return console.log(...args);
|
|
69
|
-
}
|
|
70
|
-
},
|
|
71
|
-
verbose: (logLevel, ...args) => {
|
|
72
|
-
if (isEqualOrBelowLogLevel(logLevel, "verbose")) {
|
|
73
|
-
return console.log(...args);
|
|
74
|
-
}
|
|
75
|
-
},
|
|
76
|
-
info: (logLevel, ...args) => {
|
|
77
|
-
if (isEqualOrBelowLogLevel(logLevel, "info")) {
|
|
78
|
-
return console.log(...args);
|
|
79
|
-
}
|
|
80
|
-
},
|
|
81
|
-
warn: (logLevel, ...args) => {
|
|
82
|
-
if (isEqualOrBelowLogLevel(logLevel, "warn")) {
|
|
83
|
-
return console.warn(...args);
|
|
54
|
+
// src/convert-audiodata/apply-volume.ts
|
|
55
|
+
var applyVolume = (array, volume) => {
|
|
56
|
+
if (volume === 1) {
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
for (let i = 0;i < array.length; i++) {
|
|
60
|
+
const newValue = array[i] * volume;
|
|
61
|
+
if (newValue < -32768) {
|
|
62
|
+
array[i] = -32768;
|
|
63
|
+
} else if (newValue > 32767) {
|
|
64
|
+
array[i] = 32767;
|
|
65
|
+
} else {
|
|
66
|
+
array[i] = newValue;
|
|
84
67
|
}
|
|
85
|
-
},
|
|
86
|
-
error: (...args) => {
|
|
87
|
-
return console.error(...args);
|
|
88
68
|
}
|
|
89
69
|
};
|
|
90
70
|
|
|
71
|
+
// src/looped-frame.ts
|
|
72
|
+
var frameForVolumeProp = ({
|
|
73
|
+
behavior,
|
|
74
|
+
loop,
|
|
75
|
+
assetDurationInSeconds,
|
|
76
|
+
fps,
|
|
77
|
+
frame,
|
|
78
|
+
startsAt
|
|
79
|
+
}) => {
|
|
80
|
+
if (!loop) {
|
|
81
|
+
return frame + startsAt;
|
|
82
|
+
}
|
|
83
|
+
if (behavior === "extend") {
|
|
84
|
+
return frame + startsAt;
|
|
85
|
+
}
|
|
86
|
+
const assetDurationInFrames = Math.floor(assetDurationInSeconds * fps) - startsAt;
|
|
87
|
+
return frame % assetDurationInFrames + startsAt;
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
// src/caches.ts
|
|
91
|
+
import { cancelRender, Internals as Internals4 } from "remotion";
|
|
92
|
+
|
|
93
|
+
// src/audio-extraction/audio-iterator.ts
|
|
94
|
+
import { Internals } from "remotion";
|
|
95
|
+
|
|
91
96
|
// src/audio-extraction/audio-cache.ts
|
|
92
97
|
var makeAudioCache = () => {
|
|
93
98
|
const timestamps = [];
|
|
@@ -206,7 +211,7 @@ var makeAudioIterator = ({
|
|
|
206
211
|
return samples;
|
|
207
212
|
};
|
|
208
213
|
const logOpenFrames = (logLevel) => {
|
|
209
|
-
|
|
214
|
+
Internals.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, cache.getOpenTimestamps().map((t) => t.toFixed(3)).join(", "));
|
|
210
215
|
};
|
|
211
216
|
const getCacheStats = () => {
|
|
212
217
|
return {
|
|
@@ -343,6 +348,9 @@ var makeAudioManager = () => {
|
|
|
343
348
|
};
|
|
344
349
|
};
|
|
345
350
|
|
|
351
|
+
// src/video-extraction/keyframe-manager.ts
|
|
352
|
+
import { Internals as Internals3 } from "remotion";
|
|
353
|
+
|
|
346
354
|
// ../../node_modules/.pnpm/mediabunny@1.17.0/node_modules/mediabunny/dist/modules/src/misc.js
|
|
347
355
|
/*!
|
|
348
356
|
* Copyright (c) 2025-present, Vanilagy and contributors
|
|
@@ -13280,6 +13288,7 @@ class Input {
|
|
|
13280
13288
|
*/
|
|
13281
13289
|
|
|
13282
13290
|
// src/video-extraction/keyframe-bank.ts
|
|
13291
|
+
import { Internals as Internals2 } from "remotion";
|
|
13283
13292
|
var roundTo4Digits = (timestamp) => {
|
|
13284
13293
|
return Math.round(timestamp * 1000) / 1000;
|
|
13285
13294
|
};
|
|
@@ -13291,7 +13300,7 @@ var makeKeyframeBank = ({
|
|
|
13291
13300
|
const frames = {};
|
|
13292
13301
|
const frameTimestamps = [];
|
|
13293
13302
|
let lastUsed = Date.now();
|
|
13294
|
-
let
|
|
13303
|
+
let allocationSize = 0;
|
|
13295
13304
|
const hasDecodedEnoughForTimestamp = (timestamp) => {
|
|
13296
13305
|
const lastFrameTimestamp = frameTimestamps[frameTimestamps.length - 1];
|
|
13297
13306
|
if (!lastFrameTimestamp) {
|
|
@@ -13306,7 +13315,7 @@ var makeKeyframeBank = ({
|
|
|
13306
13315
|
const addFrame = (frame) => {
|
|
13307
13316
|
frames[frame.timestamp] = frame;
|
|
13308
13317
|
frameTimestamps.push(frame.timestamp);
|
|
13309
|
-
|
|
13318
|
+
allocationSize += frame.allocationSize();
|
|
13310
13319
|
lastUsed = Date.now();
|
|
13311
13320
|
};
|
|
13312
13321
|
const ensureEnoughFramesForTimestamp = async (timestamp) => {
|
|
@@ -13353,7 +13362,7 @@ var makeKeyframeBank = ({
|
|
|
13353
13362
|
if (!frames[frameTimestamp]) {
|
|
13354
13363
|
continue;
|
|
13355
13364
|
}
|
|
13356
|
-
|
|
13365
|
+
allocationSize -= frames[frameTimestamp].allocationSize();
|
|
13357
13366
|
frames[frameTimestamp].close();
|
|
13358
13367
|
delete frames[frameTimestamp];
|
|
13359
13368
|
}
|
|
@@ -13373,17 +13382,17 @@ var makeKeyframeBank = ({
|
|
|
13373
13382
|
if (!frames[frameTimestamp]) {
|
|
13374
13383
|
continue;
|
|
13375
13384
|
}
|
|
13376
|
-
|
|
13385
|
+
allocationSize -= frames[frameTimestamp].allocationSize();
|
|
13377
13386
|
frameTimestamps.splice(frameTimestamps.indexOf(frameTimestamp), 1);
|
|
13378
13387
|
frames[frameTimestamp].close();
|
|
13379
13388
|
delete frames[frameTimestamp];
|
|
13380
|
-
|
|
13389
|
+
Internals2.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted frame ${frameTimestamp} for src ${src}`);
|
|
13381
13390
|
}
|
|
13382
13391
|
}
|
|
13383
13392
|
};
|
|
13384
13393
|
const getOpenFrameCount = () => {
|
|
13385
13394
|
return {
|
|
13386
|
-
size:
|
|
13395
|
+
size: allocationSize,
|
|
13387
13396
|
timestamps: frameTimestamps
|
|
13388
13397
|
};
|
|
13389
13398
|
};
|
|
@@ -13433,16 +13442,13 @@ var getSinks = async (src) => {
|
|
|
13433
13442
|
});
|
|
13434
13443
|
const format = await input.getFormat();
|
|
13435
13444
|
const videoTrack = await input.getPrimaryVideoTrack();
|
|
13436
|
-
if (!videoTrack) {
|
|
13437
|
-
throw new Error(`No video track found for ${src}`);
|
|
13438
|
-
}
|
|
13439
13445
|
const audioTrack = await input.getPrimaryAudioTrack();
|
|
13440
13446
|
const isMatroska = format === MATROSKA;
|
|
13441
13447
|
return {
|
|
13442
|
-
video: {
|
|
13448
|
+
video: videoTrack ? {
|
|
13443
13449
|
sampleSink: new VideoSampleSink(videoTrack),
|
|
13444
13450
|
packetSink: new EncodedPacketSink(videoTrack)
|
|
13445
|
-
},
|
|
13451
|
+
} : null,
|
|
13446
13452
|
audio: audioTrack ? {
|
|
13447
13453
|
sampleSink: new AudioSampleSink(audioTrack)
|
|
13448
13454
|
} : null,
|
|
@@ -13457,7 +13463,7 @@ var getFramesSinceKeyframe = async ({
|
|
|
13457
13463
|
startPacket
|
|
13458
13464
|
}) => {
|
|
13459
13465
|
const nextKeyPacket = await packetSink.getNextKeyPacket(startPacket, {
|
|
13460
|
-
verifyKeyPackets:
|
|
13466
|
+
verifyKeyPackets: true
|
|
13461
13467
|
});
|
|
13462
13468
|
const sampleIterator = videoSampleSink.samples(startPacket.timestamp, nextKeyPacket ? nextKeyPacket.timestamp : Infinity);
|
|
13463
13469
|
const keyframeBank = makeKeyframeBank({
|
|
@@ -13491,10 +13497,10 @@ var makeKeyframeManager = () => {
|
|
|
13491
13497
|
if (size === 0) {
|
|
13492
13498
|
continue;
|
|
13493
13499
|
}
|
|
13494
|
-
|
|
13500
|
+
Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${timestamps.join(", ")}`);
|
|
13495
13501
|
}
|
|
13496
13502
|
}
|
|
13497
|
-
|
|
13503
|
+
Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
|
|
13498
13504
|
};
|
|
13499
13505
|
const getCacheStats = async () => {
|
|
13500
13506
|
let count = 0;
|
|
@@ -13535,7 +13541,7 @@ var makeKeyframeManager = () => {
|
|
|
13535
13541
|
if (mostInThePastBank) {
|
|
13536
13542
|
await mostInThePastBank.prepareForDeletion();
|
|
13537
13543
|
delete sources[mostInThePastSrc][mostInThePastBank.startTimestampInSeconds];
|
|
13538
|
-
|
|
13544
|
+
Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
|
|
13539
13545
|
}
|
|
13540
13546
|
};
|
|
13541
13547
|
const ensureToStayUnderMaxCacheSize = async (logLevel) => {
|
|
@@ -13561,7 +13567,7 @@ var makeKeyframeManager = () => {
|
|
|
13561
13567
|
const { endTimestampInSeconds, startTimestampInSeconds } = bank;
|
|
13562
13568
|
if (endTimestampInSeconds < threshold) {
|
|
13563
13569
|
await bank.prepareForDeletion();
|
|
13564
|
-
|
|
13570
|
+
Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
|
|
13565
13571
|
delete sources[src][startTimeInSeconds];
|
|
13566
13572
|
} else {
|
|
13567
13573
|
bank.deleteFramesBeforeTimestamp({
|
|
@@ -13600,7 +13606,7 @@ var makeKeyframeManager = () => {
|
|
|
13600
13606
|
if (await (await existingBank).hasTimestampInSecond(timestamp)) {
|
|
13601
13607
|
return existingBank;
|
|
13602
13608
|
}
|
|
13603
|
-
|
|
13609
|
+
Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frames have already been evicted!`);
|
|
13604
13610
|
await (await existingBank).prepareForDeletion();
|
|
13605
13611
|
delete sources[src][startTimestampInSeconds];
|
|
13606
13612
|
const replacementKeybank = getFramesSinceKeyframe({
|
|
@@ -13672,20 +13678,20 @@ var getUncachedMaxCacheSize = (logLevel) => {
|
|
|
13672
13678
|
if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {
|
|
13673
13679
|
cancelRender(new Error(`The maximum value for the "mediaCacheSizeInBytes" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
|
|
13674
13680
|
}
|
|
13675
|
-
|
|
13681
|
+
Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
|
|
13676
13682
|
return window.remotion_mediaCacheSizeInBytes;
|
|
13677
13683
|
}
|
|
13678
13684
|
if (window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
|
|
13679
13685
|
const value = window.remotion_initialMemoryAvailable / 2;
|
|
13680
13686
|
if (value < 240 * 1024 * 1024) {
|
|
13681
|
-
|
|
13687
|
+
Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 240MB (which is more than half of the available system memory!)`);
|
|
13682
13688
|
return 240 * 1024 * 1024;
|
|
13683
13689
|
}
|
|
13684
13690
|
if (value > 20000 * 1024 * 1024) {
|
|
13685
|
-
|
|
13691
|
+
Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
|
|
13686
13692
|
return 20000 * 1024 * 1024;
|
|
13687
13693
|
}
|
|
13688
|
-
|
|
13694
|
+
Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
|
|
13689
13695
|
return value;
|
|
13690
13696
|
}
|
|
13691
13697
|
return 1000 * 1000 * 1000;
|
|
@@ -13742,36 +13748,53 @@ var combineAudioDataAndClosePrevious = (audioDataArray) => {
|
|
|
13742
13748
|
// src/convert-audiodata/resample-audiodata.ts
|
|
13743
13749
|
var TARGET_NUMBER_OF_CHANNELS = 2;
|
|
13744
13750
|
var TARGET_SAMPLE_RATE = 48000;
|
|
13751
|
+
var fixFloatingPoint = (value) => {
|
|
13752
|
+
if (value % 1 < 0.0000001) {
|
|
13753
|
+
return Math.floor(value);
|
|
13754
|
+
}
|
|
13755
|
+
if (value % 1 > 0.9999999) {
|
|
13756
|
+
return Math.ceil(value);
|
|
13757
|
+
}
|
|
13758
|
+
return value;
|
|
13759
|
+
};
|
|
13745
13760
|
var resampleAudioData = ({
|
|
13746
13761
|
srcNumberOfChannels,
|
|
13747
13762
|
sourceChannels,
|
|
13748
13763
|
destination,
|
|
13749
13764
|
targetFrames,
|
|
13750
|
-
chunkSize
|
|
13751
|
-
volume
|
|
13765
|
+
chunkSize
|
|
13752
13766
|
}) => {
|
|
13753
|
-
const getSourceValues = (
|
|
13754
|
-
const
|
|
13755
|
-
|
|
13756
|
-
|
|
13757
|
-
|
|
13758
|
-
|
|
13759
|
-
|
|
13760
|
-
|
|
13761
|
-
|
|
13762
|
-
|
|
13763
|
-
|
|
13764
|
-
|
|
13765
|
-
|
|
13766
|
-
|
|
13767
|
-
|
|
13768
|
-
|
|
13769
|
-
|
|
13770
|
-
|
|
13767
|
+
const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {
|
|
13768
|
+
const start = fixFloatingPoint(startUnfixed);
|
|
13769
|
+
const end = fixFloatingPoint(endUnfixed);
|
|
13770
|
+
const startFloor = Math.floor(start);
|
|
13771
|
+
const startCeil = Math.ceil(start);
|
|
13772
|
+
const startFraction = start - startFloor;
|
|
13773
|
+
const endFraction = end - Math.floor(end);
|
|
13774
|
+
const endFloor = Math.floor(end);
|
|
13775
|
+
let weightedSum = 0;
|
|
13776
|
+
let totalWeight = 0;
|
|
13777
|
+
if (startFraction > 0) {
|
|
13778
|
+
const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];
|
|
13779
|
+
weightedSum += firstSample * (1 - startFraction);
|
|
13780
|
+
totalWeight += 1 - startFraction;
|
|
13781
|
+
}
|
|
13782
|
+
for (let k = startCeil;k < endFloor; k++) {
|
|
13783
|
+
const num = sourceChannels[k * srcNumberOfChannels + channelIndex];
|
|
13784
|
+
weightedSum += num;
|
|
13785
|
+
totalWeight += 1;
|
|
13786
|
+
}
|
|
13787
|
+
if (endFraction > 0) {
|
|
13788
|
+
const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];
|
|
13789
|
+
weightedSum += lastSample * endFraction;
|
|
13790
|
+
totalWeight += endFraction;
|
|
13791
|
+
}
|
|
13792
|
+
const average = weightedSum / totalWeight;
|
|
13793
|
+
return average;
|
|
13771
13794
|
};
|
|
13772
13795
|
for (let newFrameIndex = 0;newFrameIndex < targetFrames; newFrameIndex++) {
|
|
13773
|
-
const start =
|
|
13774
|
-
const end =
|
|
13796
|
+
const start = newFrameIndex * chunkSize;
|
|
13797
|
+
const end = start + chunkSize;
|
|
13775
13798
|
if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {
|
|
13776
13799
|
for (let i = 0;i < srcNumberOfChannels; i++) {
|
|
13777
13800
|
destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);
|
|
@@ -13796,8 +13819,9 @@ var resampleAudioData = ({
|
|
|
13796
13819
|
const c = getSourceValues(start, end, 2);
|
|
13797
13820
|
const sl = getSourceValues(start, end, 3);
|
|
13798
13821
|
const sr = getSourceValues(start, end, 4);
|
|
13799
|
-
const
|
|
13800
|
-
const
|
|
13822
|
+
const sq = Math.sqrt(1 / 2);
|
|
13823
|
+
const l2 = l + sq * (c + sl);
|
|
13824
|
+
const r2 = r + sq * (c + sr);
|
|
13801
13825
|
destination[newFrameIndex * 2 + 0] = l2;
|
|
13802
13826
|
destination[newFrameIndex * 2 + 1] = r2;
|
|
13803
13827
|
} else {
|
|
@@ -13810,13 +13834,19 @@ var resampleAudioData = ({
|
|
|
13810
13834
|
|
|
13811
13835
|
// src/convert-audiodata/convert-audiodata.ts
|
|
13812
13836
|
var FORMAT = "s16";
|
|
13837
|
+
var roundButRoundDownZeroPointFive = (value) => {
|
|
13838
|
+
if (value % 1 <= 0.5) {
|
|
13839
|
+
return Math.floor(value);
|
|
13840
|
+
}
|
|
13841
|
+
return Math.ceil(value);
|
|
13842
|
+
};
|
|
13813
13843
|
var convertAudioData = ({
|
|
13814
13844
|
audioData,
|
|
13815
13845
|
newSampleRate,
|
|
13816
13846
|
trimStartInSeconds,
|
|
13817
13847
|
trimEndInSeconds,
|
|
13818
13848
|
targetNumberOfChannels,
|
|
13819
|
-
|
|
13849
|
+
playbackRate
|
|
13820
13850
|
}) => {
|
|
13821
13851
|
const {
|
|
13822
13852
|
numberOfChannels: srcNumberOfChannels,
|
|
@@ -13824,9 +13854,10 @@ var convertAudioData = ({
|
|
|
13824
13854
|
numberOfFrames
|
|
13825
13855
|
} = audioData;
|
|
13826
13856
|
const ratio = currentSampleRate / newSampleRate;
|
|
13827
|
-
const frameOffset =
|
|
13828
|
-
const
|
|
13829
|
-
const
|
|
13857
|
+
const frameOffset = roundButRoundDownZeroPointFive(trimStartInSeconds * audioData.sampleRate);
|
|
13858
|
+
const unroundedFrameCount = numberOfFrames - (trimEndInSeconds + trimStartInSeconds) * audioData.sampleRate;
|
|
13859
|
+
const frameCount = Math.round(unroundedFrameCount);
|
|
13860
|
+
const newNumberOfFrames = Math.round(unroundedFrameCount / ratio / playbackRate);
|
|
13830
13861
|
if (newNumberOfFrames === 0) {
|
|
13831
13862
|
throw new Error("Cannot resample - the given sample rate would result in less than 1 sample");
|
|
13832
13863
|
}
|
|
@@ -13842,7 +13873,7 @@ var convertAudioData = ({
|
|
|
13842
13873
|
});
|
|
13843
13874
|
const data = new Int16Array(newNumberOfFrames * targetNumberOfChannels);
|
|
13844
13875
|
const chunkSize = frameCount / newNumberOfFrames;
|
|
13845
|
-
if (newNumberOfFrames === frameCount && targetNumberOfChannels === srcNumberOfChannels &&
|
|
13876
|
+
if (newNumberOfFrames === frameCount && targetNumberOfChannels === srcNumberOfChannels && playbackRate === 1) {
|
|
13846
13877
|
return {
|
|
13847
13878
|
data: srcChannels,
|
|
13848
13879
|
numberOfChannels: targetNumberOfChannels,
|
|
@@ -13856,8 +13887,7 @@ var convertAudioData = ({
|
|
|
13856
13887
|
sourceChannels: srcChannels,
|
|
13857
13888
|
destination: data,
|
|
13858
13889
|
targetFrames: newNumberOfFrames,
|
|
13859
|
-
chunkSize
|
|
13860
|
-
volume
|
|
13890
|
+
chunkSize
|
|
13861
13891
|
});
|
|
13862
13892
|
const newAudioData = {
|
|
13863
13893
|
data,
|
|
@@ -13882,6 +13912,9 @@ var extractFrame = async ({
|
|
|
13882
13912
|
sinkPromises[src] = getSinks(src);
|
|
13883
13913
|
}
|
|
13884
13914
|
const { video, getDuration } = await sinkPromises[src];
|
|
13915
|
+
if (video === null) {
|
|
13916
|
+
throw new Error(`No video track found for ${src}`);
|
|
13917
|
+
}
|
|
13885
13918
|
const timeInSeconds = loop ? unloopedTimeinSeconds % await getDuration() : unloopedTimeinSeconds;
|
|
13886
13919
|
const keyframeBank = await keyframeManager.requestKeyframeBank({
|
|
13887
13920
|
packetSink: video.packetSink,
|
|
@@ -13899,18 +13932,22 @@ var extractAudio = async ({
|
|
|
13899
13932
|
src,
|
|
13900
13933
|
timeInSeconds: unloopedTimeInSeconds,
|
|
13901
13934
|
durationInSeconds,
|
|
13902
|
-
volume,
|
|
13903
13935
|
logLevel,
|
|
13904
|
-
loop
|
|
13936
|
+
loop,
|
|
13937
|
+
playbackRate
|
|
13905
13938
|
}) => {
|
|
13906
13939
|
if (!sinkPromises[src]) {
|
|
13907
13940
|
sinkPromises[src] = getSinks(src);
|
|
13908
13941
|
}
|
|
13909
13942
|
const { audio, actualMatroskaTimestamps, isMatroska, getDuration } = await sinkPromises[src];
|
|
13943
|
+
let duration = null;
|
|
13944
|
+
if (loop) {
|
|
13945
|
+
duration = await getDuration();
|
|
13946
|
+
}
|
|
13910
13947
|
if (audio === null) {
|
|
13911
|
-
return null;
|
|
13948
|
+
return { data: null, durationInSeconds: null };
|
|
13912
13949
|
}
|
|
13913
|
-
const timeInSeconds = loop ? unloopedTimeInSeconds %
|
|
13950
|
+
const timeInSeconds = loop ? unloopedTimeInSeconds % duration : unloopedTimeInSeconds;
|
|
13914
13951
|
const sampleIterator = await audioManager.getIterator({
|
|
13915
13952
|
src,
|
|
13916
13953
|
timeInSeconds,
|
|
@@ -13937,6 +13974,12 @@ var extractAudio = async ({
|
|
|
13937
13974
|
let trimEndInSeconds = 0;
|
|
13938
13975
|
if (isFirstSample) {
|
|
13939
13976
|
trimStartInSeconds = timeInSeconds - sample.timestamp;
|
|
13977
|
+
if (trimStartInSeconds < 0 && trimStartInSeconds > -0.0000000001) {
|
|
13978
|
+
trimStartInSeconds = 0;
|
|
13979
|
+
}
|
|
13980
|
+
if (trimStartInSeconds < 0) {
|
|
13981
|
+
throw new Error(`trimStartInSeconds is negative: ${trimStartInSeconds}`);
|
|
13982
|
+
}
|
|
13940
13983
|
}
|
|
13941
13984
|
if (isLastSample) {
|
|
13942
13985
|
trimEndInSeconds = Math.max(0, sample.timestamp + sample.duration - (timeInSeconds + durationInSeconds));
|
|
@@ -13947,7 +13990,7 @@ var extractAudio = async ({
|
|
|
13947
13990
|
trimStartInSeconds,
|
|
13948
13991
|
trimEndInSeconds,
|
|
13949
13992
|
targetNumberOfChannels: TARGET_NUMBER_OF_CHANNELS,
|
|
13950
|
-
|
|
13993
|
+
playbackRate
|
|
13951
13994
|
});
|
|
13952
13995
|
audioDataRaw.close();
|
|
13953
13996
|
if (audioData.numberOfFrames === 0) {
|
|
@@ -13956,10 +13999,10 @@ var extractAudio = async ({
|
|
|
13956
13999
|
audioDataArray.push(audioData);
|
|
13957
14000
|
}
|
|
13958
14001
|
if (audioDataArray.length === 0) {
|
|
13959
|
-
return null;
|
|
14002
|
+
return { data: null, durationInSeconds: duration };
|
|
13960
14003
|
}
|
|
13961
14004
|
const combined = combineAudioDataAndClosePrevious(audioDataArray);
|
|
13962
|
-
return combined;
|
|
14005
|
+
return { data: combined, durationInSeconds: duration };
|
|
13963
14006
|
};
|
|
13964
14007
|
|
|
13965
14008
|
// src/extract-frame-and-audio.ts
|
|
@@ -13968,9 +14011,9 @@ var extractFrameAndAudio = async ({
|
|
|
13968
14011
|
timeInSeconds,
|
|
13969
14012
|
logLevel,
|
|
13970
14013
|
durationInSeconds,
|
|
14014
|
+
playbackRate,
|
|
13971
14015
|
includeAudio,
|
|
13972
14016
|
includeVideo,
|
|
13973
|
-
volume,
|
|
13974
14017
|
loop
|
|
13975
14018
|
}) => {
|
|
13976
14019
|
const [frame, audio] = await Promise.all([
|
|
@@ -13984,14 +14027,15 @@ var extractFrameAndAudio = async ({
|
|
|
13984
14027
|
src,
|
|
13985
14028
|
timeInSeconds,
|
|
13986
14029
|
durationInSeconds,
|
|
13987
|
-
volume,
|
|
13988
14030
|
logLevel,
|
|
13989
|
-
loop
|
|
14031
|
+
loop,
|
|
14032
|
+
playbackRate
|
|
13990
14033
|
}) : null
|
|
13991
14034
|
]);
|
|
13992
14035
|
return {
|
|
13993
14036
|
frame: frame?.toVideoFrame() ?? null,
|
|
13994
|
-
audio
|
|
14037
|
+
audio: audio?.data ?? null,
|
|
14038
|
+
durationInSeconds: audio?.durationInSeconds ?? null
|
|
13995
14039
|
};
|
|
13996
14040
|
};
|
|
13997
14041
|
|
|
@@ -14001,14 +14045,14 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
14001
14045
|
const data = event.data;
|
|
14002
14046
|
if (data.type === "request") {
|
|
14003
14047
|
try {
|
|
14004
|
-
const { frame, audio } = await extractFrameAndAudio({
|
|
14048
|
+
const { frame, audio, durationInSeconds } = await extractFrameAndAudio({
|
|
14005
14049
|
src: data.src,
|
|
14006
14050
|
timeInSeconds: data.timeInSeconds,
|
|
14007
14051
|
logLevel: data.logLevel,
|
|
14008
14052
|
durationInSeconds: data.durationInSeconds,
|
|
14053
|
+
playbackRate: data.playbackRate,
|
|
14009
14054
|
includeAudio: data.includeAudio,
|
|
14010
14055
|
includeVideo: data.includeVideo,
|
|
14011
|
-
volume: data.volume,
|
|
14012
14056
|
loop: data.loop
|
|
14013
14057
|
});
|
|
14014
14058
|
const videoFrame = frame;
|
|
@@ -14020,7 +14064,8 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
14020
14064
|
type: "response-success",
|
|
14021
14065
|
id: data.id,
|
|
14022
14066
|
frame: imageBitmap,
|
|
14023
|
-
audio
|
|
14067
|
+
audio,
|
|
14068
|
+
durationInSeconds: durationInSeconds ?? null
|
|
14024
14069
|
};
|
|
14025
14070
|
window.remotion_broadcastChannel.postMessage(response);
|
|
14026
14071
|
videoFrame?.close();
|
|
@@ -14042,10 +14087,10 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
14042
14087
|
timeInSeconds,
|
|
14043
14088
|
logLevel,
|
|
14044
14089
|
durationInSeconds,
|
|
14090
|
+
playbackRate,
|
|
14045
14091
|
includeAudio,
|
|
14046
14092
|
includeVideo,
|
|
14047
14093
|
isClientSideRendering,
|
|
14048
|
-
volume,
|
|
14049
14094
|
loop
|
|
14050
14095
|
}) => {
|
|
14051
14096
|
if (isClientSideRendering || window.remotion_isMainTab) {
|
|
@@ -14054,9 +14099,9 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
14054
14099
|
src,
|
|
14055
14100
|
timeInSeconds,
|
|
14056
14101
|
durationInSeconds,
|
|
14102
|
+
playbackRate,
|
|
14057
14103
|
includeAudio,
|
|
14058
14104
|
includeVideo,
|
|
14059
|
-
volume,
|
|
14060
14105
|
loop
|
|
14061
14106
|
});
|
|
14062
14107
|
}
|
|
@@ -14070,7 +14115,8 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
14070
14115
|
if (data.type === "response-success" && data.id === requestId) {
|
|
14071
14116
|
resolve({
|
|
14072
14117
|
frame: data.frame ? data.frame : null,
|
|
14073
|
-
audio: data.audio ? data.audio : null
|
|
14118
|
+
audio: data.audio ? data.audio : null,
|
|
14119
|
+
durationInSeconds: data.durationInSeconds ? data.durationInSeconds : null
|
|
14074
14120
|
});
|
|
14075
14121
|
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
14076
14122
|
} else if (data.type === "response-error" && data.id === requestId) {
|
|
@@ -14087,9 +14133,9 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
14087
14133
|
id: requestId,
|
|
14088
14134
|
logLevel,
|
|
14089
14135
|
durationInSeconds,
|
|
14136
|
+
playbackRate,
|
|
14090
14137
|
includeAudio,
|
|
14091
14138
|
includeVideo,
|
|
14092
|
-
volume,
|
|
14093
14139
|
loop
|
|
14094
14140
|
};
|
|
14095
14141
|
window.remotion_broadcastChannel.postMessage(request);
|
|
@@ -14119,11 +14165,11 @@ var AudioForRendering = ({
|
|
|
14119
14165
|
logLevel = window.remotion_logLevel,
|
|
14120
14166
|
loop
|
|
14121
14167
|
}) => {
|
|
14122
|
-
const absoluteFrame = Internals.useTimelinePosition();
|
|
14123
|
-
const videoConfig = Internals.useUnsafeVideoConfig();
|
|
14124
|
-
const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals.RenderAssetManager);
|
|
14125
14168
|
const frame = useCurrentFrame();
|
|
14126
|
-
const
|
|
14169
|
+
const absoluteFrame = Internals5.useTimelinePosition();
|
|
14170
|
+
const videoConfig = Internals5.useUnsafeVideoConfig();
|
|
14171
|
+
const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals5.RenderAssetManager);
|
|
14172
|
+
const startsAt = Internals5.useMediaStartsAt();
|
|
14127
14173
|
const environment = useRemotionEnvironment2();
|
|
14128
14174
|
const [id] = useState3(() => `${Math.random()}`.replace("0.", ""));
|
|
14129
14175
|
if (!videoConfig) {
|
|
@@ -14132,24 +14178,6 @@ var AudioForRendering = ({
|
|
|
14132
14178
|
if (!src) {
|
|
14133
14179
|
throw new TypeError("No `src` was passed to <Audio>.");
|
|
14134
14180
|
}
|
|
14135
|
-
const volume = Internals.evaluateVolume({
|
|
14136
|
-
volume: volumeProp,
|
|
14137
|
-
frame: volumePropsFrame,
|
|
14138
|
-
mediaVolume: 1
|
|
14139
|
-
});
|
|
14140
|
-
Internals.warnAboutTooHighVolume(volume);
|
|
14141
|
-
const shouldRenderAudio = useMemo3(() => {
|
|
14142
|
-
if (!window.remotion_audioEnabled) {
|
|
14143
|
-
return false;
|
|
14144
|
-
}
|
|
14145
|
-
if (muted) {
|
|
14146
|
-
return false;
|
|
14147
|
-
}
|
|
14148
|
-
if (volume <= 0) {
|
|
14149
|
-
return false;
|
|
14150
|
-
}
|
|
14151
|
-
return true;
|
|
14152
|
-
}, [muted, volume]);
|
|
14153
14181
|
const { fps } = videoConfig;
|
|
14154
14182
|
const { delayRender, continueRender } = useDelayRender();
|
|
14155
14183
|
useLayoutEffect(() => {
|
|
@@ -14160,18 +14188,42 @@ var AudioForRendering = ({
|
|
|
14160
14188
|
retries: delayRenderRetries ?? undefined,
|
|
14161
14189
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined
|
|
14162
14190
|
});
|
|
14191
|
+
const shouldRenderAudio = (() => {
|
|
14192
|
+
if (!window.remotion_audioEnabled) {
|
|
14193
|
+
return false;
|
|
14194
|
+
}
|
|
14195
|
+
if (muted) {
|
|
14196
|
+
return false;
|
|
14197
|
+
}
|
|
14198
|
+
return true;
|
|
14199
|
+
})();
|
|
14163
14200
|
extractFrameViaBroadcastChannel({
|
|
14164
14201
|
src,
|
|
14165
14202
|
timeInSeconds: timestamp,
|
|
14166
14203
|
durationInSeconds,
|
|
14204
|
+
playbackRate: playbackRate ?? 1,
|
|
14167
14205
|
logLevel: logLevel ?? "info",
|
|
14168
14206
|
includeAudio: shouldRenderAudio,
|
|
14169
14207
|
includeVideo: false,
|
|
14170
14208
|
isClientSideRendering: environment.isClientSideRendering,
|
|
14171
|
-
volume,
|
|
14172
14209
|
loop: loop ?? false
|
|
14173
|
-
}).then(({ audio }) => {
|
|
14174
|
-
|
|
14210
|
+
}).then(({ audio, durationInSeconds: assetDurationInSeconds }) => {
|
|
14211
|
+
const volumePropsFrame = frameForVolumeProp({
|
|
14212
|
+
behavior: loopVolumeCurveBehavior ?? "repeat",
|
|
14213
|
+
loop: loop ?? false,
|
|
14214
|
+
assetDurationInSeconds: assetDurationInSeconds ?? 0,
|
|
14215
|
+
fps,
|
|
14216
|
+
frame,
|
|
14217
|
+
startsAt
|
|
14218
|
+
});
|
|
14219
|
+
const volume = Internals5.evaluateVolume({
|
|
14220
|
+
volume: volumeProp,
|
|
14221
|
+
frame: volumePropsFrame,
|
|
14222
|
+
mediaVolume: 1
|
|
14223
|
+
});
|
|
14224
|
+
Internals5.warnAboutTooHighVolume(volume);
|
|
14225
|
+
if (audio && volume > 0) {
|
|
14226
|
+
applyVolume(audio.data, volume);
|
|
14175
14227
|
registerRenderAsset({
|
|
14176
14228
|
type: "inline-audio",
|
|
14177
14229
|
id,
|
|
@@ -14202,13 +14254,15 @@ var AudioForRendering = ({
|
|
|
14202
14254
|
frame,
|
|
14203
14255
|
id,
|
|
14204
14256
|
logLevel,
|
|
14257
|
+
loop,
|
|
14258
|
+
loopVolumeCurveBehavior,
|
|
14259
|
+
muted,
|
|
14205
14260
|
playbackRate,
|
|
14206
14261
|
registerRenderAsset,
|
|
14207
|
-
shouldRenderAudio,
|
|
14208
14262
|
src,
|
|
14263
|
+
startsAt,
|
|
14209
14264
|
unregisterRenderAsset,
|
|
14210
|
-
|
|
14211
|
-
loop
|
|
14265
|
+
volumeProp
|
|
14212
14266
|
]);
|
|
14213
14267
|
return null;
|
|
14214
14268
|
};
|
|
@@ -14220,7 +14274,8 @@ var {
|
|
|
14220
14274
|
resolveTrimProps,
|
|
14221
14275
|
validateMediaProps,
|
|
14222
14276
|
AudioForPreview
|
|
14223
|
-
} =
|
|
14277
|
+
} = Internals6;
|
|
14278
|
+
var onRemotionError = (_e) => {};
|
|
14224
14279
|
var Audio = (props) => {
|
|
14225
14280
|
const audioContext = useContext5(SharedAudioContext);
|
|
14226
14281
|
const {
|
|
@@ -14230,7 +14285,6 @@ var Audio = (props) => {
|
|
|
14230
14285
|
pauseWhenBuffering,
|
|
14231
14286
|
stack,
|
|
14232
14287
|
showInTimeline,
|
|
14233
|
-
onError: onRemotionError,
|
|
14234
14288
|
loop,
|
|
14235
14289
|
...otherProps
|
|
14236
14290
|
} = props;
|
|
@@ -14266,7 +14320,7 @@ var Audio = (props) => {
|
|
|
14266
14320
|
onRemotionError?.(new Error(errMessage));
|
|
14267
14321
|
console.warn(errMessage);
|
|
14268
14322
|
}
|
|
14269
|
-
}, [
|
|
14323
|
+
}, [loop]);
|
|
14270
14324
|
if (typeof trimBeforeValue !== "undefined" || typeof trimAfterValue !== "undefined") {
|
|
14271
14325
|
return /* @__PURE__ */ jsx2(Sequence, {
|
|
14272
14326
|
layout: "none",
|
|
@@ -14287,8 +14341,6 @@ var Audio = (props) => {
|
|
|
14287
14341
|
});
|
|
14288
14342
|
}
|
|
14289
14343
|
const {
|
|
14290
|
-
onAutoPlayError,
|
|
14291
|
-
crossOrigin,
|
|
14292
14344
|
delayRenderRetries,
|
|
14293
14345
|
delayRenderTimeoutInMilliseconds,
|
|
14294
14346
|
...propsForPreview
|
|
@@ -14307,19 +14359,19 @@ var Audio = (props) => {
|
|
|
14307
14359
|
};
|
|
14308
14360
|
// src/video/video.tsx
|
|
14309
14361
|
import { useCallback as useCallback3 } from "react";
|
|
14310
|
-
import { Internals as
|
|
14362
|
+
import { Internals as Internals8, Sequence as Sequence2, useRemotionEnvironment as useRemotionEnvironment5 } from "remotion";
|
|
14311
14363
|
|
|
14312
14364
|
// src/video/video-for-rendering.tsx
|
|
14313
14365
|
import {
|
|
14314
14366
|
useContext as useContext6,
|
|
14315
14367
|
useLayoutEffect as useLayoutEffect2,
|
|
14316
|
-
useMemo as
|
|
14368
|
+
useMemo as useMemo3,
|
|
14317
14369
|
useRef as useRef2,
|
|
14318
14370
|
useState as useState4
|
|
14319
14371
|
} from "react";
|
|
14320
14372
|
import {
|
|
14321
14373
|
cancelRender as cancelRender4,
|
|
14322
|
-
Internals as
|
|
14374
|
+
Internals as Internals7,
|
|
14323
14375
|
useCurrentFrame as useCurrentFrame2,
|
|
14324
14376
|
useDelayRender as useDelayRender2,
|
|
14325
14377
|
useRemotionEnvironment as useRemotionEnvironment4,
|
|
@@ -14340,36 +14392,18 @@ var VideoForRendering = ({
|
|
|
14340
14392
|
style,
|
|
14341
14393
|
className
|
|
14342
14394
|
}) => {
|
|
14343
|
-
const absoluteFrame = Internals3.useTimelinePosition();
|
|
14344
|
-
const { fps } = useVideoConfig();
|
|
14345
|
-
const canvasRef = useRef2(null);
|
|
14346
|
-
const { registerRenderAsset, unregisterRenderAsset } = useContext6(Internals3.RenderAssetManager);
|
|
14347
|
-
const frame = useCurrentFrame2();
|
|
14348
|
-
const volumePropsFrame = Internals3.useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
|
|
14349
|
-
const environment = useRemotionEnvironment4();
|
|
14350
|
-
const [id] = useState4(() => `${Math.random()}`.replace("0.", ""));
|
|
14351
14395
|
if (!src) {
|
|
14352
14396
|
throw new TypeError("No `src` was passed to <Video>.");
|
|
14353
14397
|
}
|
|
14354
|
-
const
|
|
14355
|
-
|
|
14356
|
-
|
|
14357
|
-
|
|
14358
|
-
|
|
14359
|
-
|
|
14360
|
-
const
|
|
14361
|
-
if (!window.remotion_audioEnabled) {
|
|
14362
|
-
return false;
|
|
14363
|
-
}
|
|
14364
|
-
if (muted) {
|
|
14365
|
-
return false;
|
|
14366
|
-
}
|
|
14367
|
-
if (volume <= 0) {
|
|
14368
|
-
return false;
|
|
14369
|
-
}
|
|
14370
|
-
return true;
|
|
14371
|
-
}, [muted, volume]);
|
|
14398
|
+
const frame = useCurrentFrame2();
|
|
14399
|
+
const absoluteFrame = Internals7.useTimelinePosition();
|
|
14400
|
+
const { fps } = useVideoConfig();
|
|
14401
|
+
const { registerRenderAsset, unregisterRenderAsset } = useContext6(Internals7.RenderAssetManager);
|
|
14402
|
+
const startsAt = Internals7.useMediaStartsAt();
|
|
14403
|
+
const [id] = useState4(() => `${Math.random()}`.replace("0.", ""));
|
|
14404
|
+
const environment = useRemotionEnvironment4();
|
|
14372
14405
|
const { delayRender, continueRender } = useDelayRender2();
|
|
14406
|
+
const canvasRef = useRef2(null);
|
|
14373
14407
|
useLayoutEffect2(() => {
|
|
14374
14408
|
if (!canvasRef.current) {
|
|
14375
14409
|
return;
|
|
@@ -14381,17 +14415,30 @@ var VideoForRendering = ({
|
|
|
14381
14415
|
retries: delayRenderRetries ?? undefined,
|
|
14382
14416
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined
|
|
14383
14417
|
});
|
|
14418
|
+
const shouldRenderAudio = (() => {
|
|
14419
|
+
if (!window.remotion_audioEnabled) {
|
|
14420
|
+
return false;
|
|
14421
|
+
}
|
|
14422
|
+
if (muted) {
|
|
14423
|
+
return false;
|
|
14424
|
+
}
|
|
14425
|
+
return true;
|
|
14426
|
+
})();
|
|
14384
14427
|
extractFrameViaBroadcastChannel({
|
|
14385
14428
|
src,
|
|
14386
14429
|
timeInSeconds: timestamp,
|
|
14387
14430
|
durationInSeconds,
|
|
14431
|
+
playbackRate: playbackRate ?? 1,
|
|
14388
14432
|
logLevel: logLevel ?? "info",
|
|
14389
14433
|
includeAudio: shouldRenderAudio,
|
|
14390
14434
|
includeVideo: window.remotion_videoEnabled,
|
|
14391
14435
|
isClientSideRendering: environment.isClientSideRendering,
|
|
14392
|
-
volume,
|
|
14393
14436
|
loop: loop ?? false
|
|
14394
|
-
}).then(({
|
|
14437
|
+
}).then(({
|
|
14438
|
+
frame: imageBitmap,
|
|
14439
|
+
audio,
|
|
14440
|
+
durationInSeconds: assetDurationInSeconds
|
|
14441
|
+
}) => {
|
|
14395
14442
|
if (imageBitmap) {
|
|
14396
14443
|
onVideoFrame?.(imageBitmap);
|
|
14397
14444
|
const context = canvasRef.current?.getContext("2d");
|
|
@@ -14406,7 +14453,22 @@ var VideoForRendering = ({
|
|
|
14406
14453
|
} else if (window.remotion_videoEnabled) {
|
|
14407
14454
|
cancelRender4(new Error("No video frame found"));
|
|
14408
14455
|
}
|
|
14409
|
-
|
|
14456
|
+
const volumePropsFrame = frameForVolumeProp({
|
|
14457
|
+
behavior: loopVolumeCurveBehavior ?? "repeat",
|
|
14458
|
+
loop: loop ?? false,
|
|
14459
|
+
assetDurationInSeconds: assetDurationInSeconds ?? 0,
|
|
14460
|
+
fps,
|
|
14461
|
+
frame,
|
|
14462
|
+
startsAt
|
|
14463
|
+
});
|
|
14464
|
+
const volume = Internals7.evaluateVolume({
|
|
14465
|
+
volume: volumeProp,
|
|
14466
|
+
frame: volumePropsFrame,
|
|
14467
|
+
mediaVolume: 1
|
|
14468
|
+
});
|
|
14469
|
+
Internals7.warnAboutTooHighVolume(volume);
|
|
14470
|
+
if (audio && volume > 0) {
|
|
14471
|
+
applyVolume(audio.data, volume);
|
|
14410
14472
|
registerRenderAsset({
|
|
14411
14473
|
type: "inline-audio",
|
|
14412
14474
|
id,
|
|
@@ -14437,17 +14499,19 @@ var VideoForRendering = ({
|
|
|
14437
14499
|
frame,
|
|
14438
14500
|
id,
|
|
14439
14501
|
logLevel,
|
|
14502
|
+
loop,
|
|
14503
|
+
loopVolumeCurveBehavior,
|
|
14504
|
+
muted,
|
|
14440
14505
|
onVideoFrame,
|
|
14441
14506
|
playbackRate,
|
|
14442
14507
|
registerRenderAsset,
|
|
14443
|
-
shouldRenderAudio,
|
|
14444
14508
|
src,
|
|
14509
|
+
startsAt,
|
|
14445
14510
|
unregisterRenderAsset,
|
|
14446
|
-
|
|
14447
|
-
loop
|
|
14511
|
+
volumeProp
|
|
14448
14512
|
]);
|
|
14449
|
-
const classNameValue =
|
|
14450
|
-
return [
|
|
14513
|
+
const classNameValue = useMemo3(() => {
|
|
14514
|
+
return [Internals7.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals7.truthy).join(" ");
|
|
14451
14515
|
}, [className]);
|
|
14452
14516
|
return /* @__PURE__ */ jsx3("canvas", {
|
|
14453
14517
|
ref: canvasRef,
|
|
@@ -14463,7 +14527,7 @@ var {
|
|
|
14463
14527
|
resolveTrimProps: resolveTrimProps2,
|
|
14464
14528
|
validateMediaProps: validateMediaProps2,
|
|
14465
14529
|
VideoForPreview
|
|
14466
|
-
} =
|
|
14530
|
+
} = Internals8;
|
|
14467
14531
|
var Video = (props) => {
|
|
14468
14532
|
const {
|
|
14469
14533
|
trimBefore,
|
|
@@ -14513,9 +14577,7 @@ var Video = (props) => {
|
|
|
14513
14577
|
});
|
|
14514
14578
|
}
|
|
14515
14579
|
const {
|
|
14516
|
-
onAutoPlayError,
|
|
14517
14580
|
onVideoFrame,
|
|
14518
|
-
crossOrigin,
|
|
14519
14581
|
delayRenderRetries,
|
|
14520
14582
|
delayRenderTimeoutInMilliseconds,
|
|
14521
14583
|
...propsForPreview
|
|
@@ -14527,9 +14589,7 @@ var Video = (props) => {
|
|
|
14527
14589
|
onlyWarnForMediaSeekingError: true,
|
|
14528
14590
|
pauseWhenBuffering: pauseWhenBuffering ?? false,
|
|
14529
14591
|
showInTimeline: showInTimeline ?? true,
|
|
14530
|
-
onAutoPlayError: onAutoPlayError ?? undefined,
|
|
14531
14592
|
onVideoFrame: onVideoFrame ?? null,
|
|
14532
|
-
crossOrigin,
|
|
14533
14593
|
...propsForPreview
|
|
14534
14594
|
});
|
|
14535
14595
|
};
|