@remotion/media 4.0.353 → 4.0.354
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-rendering.js +37 -27
- package/dist/audio/audio.js +6 -3
- package/dist/audio/props.d.ts +0 -5
- package/dist/audio-extraction/extract-audio.d.ts +6 -3
- package/dist/audio-extraction/extract-audio.js +16 -7
- package/dist/audio-for-rendering.d.ts +3 -0
- package/dist/audio-for-rendering.js +94 -0
- package/dist/audio.d.ts +3 -0
- package/dist/audio.js +60 -0
- package/dist/audiodata-to-array.d.ts +0 -0
- package/dist/audiodata-to-array.js +1 -0
- package/dist/convert-audiodata/apply-volume.d.ts +1 -0
- package/dist/convert-audiodata/apply-volume.js +17 -0
- package/dist/convert-audiodata/convert-audiodata.d.ts +2 -2
- package/dist/convert-audiodata/convert-audiodata.js +13 -7
- package/dist/convert-audiodata/data-types.d.ts +1 -0
- package/dist/convert-audiodata/data-types.js +22 -0
- package/dist/convert-audiodata/is-planar-format.d.ts +1 -0
- package/dist/convert-audiodata/is-planar-format.js +3 -0
- package/dist/convert-audiodata/log-audiodata.d.ts +1 -0
- package/dist/convert-audiodata/log-audiodata.js +8 -0
- package/dist/convert-audiodata/resample-audiodata.d.ts +1 -2
- package/dist/convert-audiodata/resample-audiodata.js +39 -18
- package/dist/convert-audiodata/trim-audiodata.d.ts +0 -0
- package/dist/convert-audiodata/trim-audiodata.js +1 -0
- package/dist/deserialized-audiodata.d.ts +15 -0
- package/dist/deserialized-audiodata.js +26 -0
- package/dist/esm/index.mjs +206 -120
- package/dist/extract-audio.d.ts +7 -0
- package/dist/extract-audio.js +98 -0
- package/dist/extract-frame-and-audio.d.ts +3 -2
- package/dist/extract-frame-and-audio.js +4 -3
- package/dist/extract-frame-via-broadcast-channel.d.ts +15 -0
- package/dist/extract-frame-via-broadcast-channel.js +104 -0
- package/dist/extract-frame.d.ts +27 -0
- package/dist/extract-frame.js +21 -0
- package/dist/extrct-audio.d.ts +7 -0
- package/dist/extrct-audio.js +94 -0
- package/dist/get-frames-since-keyframe.d.ts +22 -0
- package/dist/get-frames-since-keyframe.js +41 -0
- package/dist/keyframe-bank.d.ts +25 -0
- package/dist/keyframe-bank.js +120 -0
- package/dist/keyframe-manager.d.ts +23 -0
- package/dist/keyframe-manager.js +170 -0
- package/dist/looped-frame.d.ts +9 -0
- package/dist/looped-frame.js +10 -0
- package/dist/new-video-for-rendering.d.ts +3 -0
- package/dist/new-video-for-rendering.js +108 -0
- package/dist/new-video.d.ts +3 -0
- package/dist/new-video.js +37 -0
- package/dist/props.d.ts +29 -0
- package/dist/props.js +1 -0
- package/dist/remember-actual-matroska-timestamps.d.ts +4 -0
- package/dist/remember-actual-matroska-timestamps.js +19 -0
- package/dist/serialize-videoframe.d.ts +0 -0
- package/dist/serialize-videoframe.js +1 -0
- package/dist/video/media-player.d.ts +62 -0
- package/dist/video/media-player.js +361 -0
- package/dist/video/new-video-for-preview.d.ts +10 -0
- package/dist/video/new-video-for-preview.js +108 -0
- package/dist/video/props.d.ts +0 -5
- package/dist/video/timeout-utils.d.ts +2 -0
- package/dist/video/timeout-utils.js +18 -0
- package/dist/video/video-for-preview.d.ts +11 -0
- package/dist/video/video-for-preview.js +113 -0
- package/dist/video/video-for-rendering.js +41 -31
- package/dist/video/video.js +2 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +3 -2
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +9 -5
- package/dist/video-extraction/extract-frame.js +3 -0
- package/dist/video-extraction/get-frames-since-keyframe.d.ts +1 -1
- package/dist/video-extraction/get-frames-since-keyframe.js +6 -7
- package/dist/video-extraction/media-player.d.ts +64 -0
- package/dist/video-extraction/media-player.js +501 -0
- package/dist/video-extraction/new-video-for-preview.d.ts +10 -0
- package/dist/video-extraction/new-video-for-preview.js +114 -0
- package/dist/video-for-rendering.d.ts +3 -0
- package/dist/video-for-rendering.js +108 -0
- package/dist/video.d.ts +3 -0
- package/dist/video.js +37 -0
- package/package.json +3 -3
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { getDataTypeForAudioFormat } from './convert-audiodata/data-types';
|
|
2
|
+
import { isPlanarFormat } from './convert-audiodata/is-planar-format';
|
|
3
|
+
export const turnAudioDataIntoSerializableData = (audioData) => {
|
|
4
|
+
if (!audioData.format) {
|
|
5
|
+
throw new Error('AudioData format is not set');
|
|
6
|
+
}
|
|
7
|
+
const DataType = getDataTypeForAudioFormat(audioData.format);
|
|
8
|
+
const isPlanar = isPlanarFormat(audioData.format);
|
|
9
|
+
const planes = isPlanar ? audioData.numberOfChannels : 1;
|
|
10
|
+
const srcChannels = new Array(planes)
|
|
11
|
+
.fill(true)
|
|
12
|
+
.map(() => new DataType((isPlanar ? 1 : audioData.numberOfChannels) *
|
|
13
|
+
audioData.numberOfFrames));
|
|
14
|
+
for (let i = 0; i < planes; i++) {
|
|
15
|
+
audioData.copyTo(srcChannels[i], {
|
|
16
|
+
planeIndex: i,
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
return {
|
|
20
|
+
data: srcChannels,
|
|
21
|
+
format: audioData.format,
|
|
22
|
+
numberOfChannels: audioData.numberOfChannels,
|
|
23
|
+
numberOfFrames: audioData.numberOfFrames,
|
|
24
|
+
sampleRate: audioData.sampleRate,
|
|
25
|
+
};
|
|
26
|
+
};
|
package/dist/esm/index.mjs
CHANGED
|
@@ -42,7 +42,7 @@ import { jsx, jsxs } from "react/jsx-runtime";
|
|
|
42
42
|
var SharedAudioContext = createContext2(null);
|
|
43
43
|
|
|
44
44
|
// src/audio/audio-for-rendering.tsx
|
|
45
|
-
import { useContext as useContext4, useLayoutEffect,
|
|
45
|
+
import { useContext as useContext4, useLayoutEffect, useState as useState3 } from "react";
|
|
46
46
|
import {
|
|
47
47
|
cancelRender as cancelRender2,
|
|
48
48
|
Internals as Internals5,
|
|
@@ -51,6 +51,42 @@ import {
|
|
|
51
51
|
useRemotionEnvironment as useRemotionEnvironment2
|
|
52
52
|
} from "remotion";
|
|
53
53
|
|
|
54
|
+
// src/convert-audiodata/apply-volume.ts
|
|
55
|
+
var applyVolume = (array, volume) => {
|
|
56
|
+
if (volume === 1) {
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
for (let i = 0;i < array.length; i++) {
|
|
60
|
+
const newValue = array[i] * volume;
|
|
61
|
+
if (newValue < -32768) {
|
|
62
|
+
array[i] = -32768;
|
|
63
|
+
} else if (newValue > 32767) {
|
|
64
|
+
array[i] = 32767;
|
|
65
|
+
} else {
|
|
66
|
+
array[i] = newValue;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
|
|
71
|
+
// src/looped-frame.ts
|
|
72
|
+
var frameForVolumeProp = ({
|
|
73
|
+
behavior,
|
|
74
|
+
loop,
|
|
75
|
+
assetDurationInSeconds,
|
|
76
|
+
fps,
|
|
77
|
+
frame,
|
|
78
|
+
startsAt
|
|
79
|
+
}) => {
|
|
80
|
+
if (!loop) {
|
|
81
|
+
return frame + startsAt;
|
|
82
|
+
}
|
|
83
|
+
if (behavior === "extend") {
|
|
84
|
+
return frame + startsAt;
|
|
85
|
+
}
|
|
86
|
+
const assetDurationInFrames = Math.floor(assetDurationInSeconds * fps) - startsAt;
|
|
87
|
+
return frame % assetDurationInFrames + startsAt;
|
|
88
|
+
};
|
|
89
|
+
|
|
54
90
|
// src/caches.ts
|
|
55
91
|
import { cancelRender, Internals as Internals4 } from "remotion";
|
|
56
92
|
|
|
@@ -13406,16 +13442,13 @@ var getSinks = async (src) => {
|
|
|
13406
13442
|
});
|
|
13407
13443
|
const format = await input.getFormat();
|
|
13408
13444
|
const videoTrack = await input.getPrimaryVideoTrack();
|
|
13409
|
-
if (!videoTrack) {
|
|
13410
|
-
throw new Error(`No video track found for ${src}`);
|
|
13411
|
-
}
|
|
13412
13445
|
const audioTrack = await input.getPrimaryAudioTrack();
|
|
13413
13446
|
const isMatroska = format === MATROSKA;
|
|
13414
13447
|
return {
|
|
13415
|
-
video: {
|
|
13448
|
+
video: videoTrack ? {
|
|
13416
13449
|
sampleSink: new VideoSampleSink(videoTrack),
|
|
13417
13450
|
packetSink: new EncodedPacketSink(videoTrack)
|
|
13418
|
-
},
|
|
13451
|
+
} : null,
|
|
13419
13452
|
audio: audioTrack ? {
|
|
13420
13453
|
sampleSink: new AudioSampleSink(audioTrack)
|
|
13421
13454
|
} : null,
|
|
@@ -13715,36 +13748,53 @@ var combineAudioDataAndClosePrevious = (audioDataArray) => {
|
|
|
13715
13748
|
// src/convert-audiodata/resample-audiodata.ts
|
|
13716
13749
|
var TARGET_NUMBER_OF_CHANNELS = 2;
|
|
13717
13750
|
var TARGET_SAMPLE_RATE = 48000;
|
|
13751
|
+
var fixFloatingPoint = (value) => {
|
|
13752
|
+
if (value % 1 < 0.0000001) {
|
|
13753
|
+
return Math.floor(value);
|
|
13754
|
+
}
|
|
13755
|
+
if (value % 1 > 0.9999999) {
|
|
13756
|
+
return Math.ceil(value);
|
|
13757
|
+
}
|
|
13758
|
+
return value;
|
|
13759
|
+
};
|
|
13718
13760
|
var resampleAudioData = ({
|
|
13719
13761
|
srcNumberOfChannels,
|
|
13720
13762
|
sourceChannels,
|
|
13721
13763
|
destination,
|
|
13722
13764
|
targetFrames,
|
|
13723
|
-
chunkSize
|
|
13724
|
-
volume
|
|
13765
|
+
chunkSize
|
|
13725
13766
|
}) => {
|
|
13726
|
-
const getSourceValues = (
|
|
13727
|
-
const
|
|
13728
|
-
|
|
13729
|
-
|
|
13730
|
-
|
|
13731
|
-
|
|
13732
|
-
|
|
13733
|
-
|
|
13734
|
-
|
|
13735
|
-
|
|
13736
|
-
|
|
13737
|
-
|
|
13738
|
-
|
|
13739
|
-
|
|
13740
|
-
|
|
13741
|
-
|
|
13742
|
-
|
|
13743
|
-
|
|
13767
|
+
const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {
|
|
13768
|
+
const start = fixFloatingPoint(startUnfixed);
|
|
13769
|
+
const end = fixFloatingPoint(endUnfixed);
|
|
13770
|
+
const startFloor = Math.floor(start);
|
|
13771
|
+
const startCeil = Math.ceil(start);
|
|
13772
|
+
const startFraction = start - startFloor;
|
|
13773
|
+
const endFraction = end - Math.floor(end);
|
|
13774
|
+
const endFloor = Math.floor(end);
|
|
13775
|
+
let weightedSum = 0;
|
|
13776
|
+
let totalWeight = 0;
|
|
13777
|
+
if (startFraction > 0) {
|
|
13778
|
+
const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];
|
|
13779
|
+
weightedSum += firstSample * (1 - startFraction);
|
|
13780
|
+
totalWeight += 1 - startFraction;
|
|
13781
|
+
}
|
|
13782
|
+
for (let k = startCeil;k < endFloor; k++) {
|
|
13783
|
+
const num = sourceChannels[k * srcNumberOfChannels + channelIndex];
|
|
13784
|
+
weightedSum += num;
|
|
13785
|
+
totalWeight += 1;
|
|
13786
|
+
}
|
|
13787
|
+
if (endFraction > 0) {
|
|
13788
|
+
const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];
|
|
13789
|
+
weightedSum += lastSample * endFraction;
|
|
13790
|
+
totalWeight += endFraction;
|
|
13791
|
+
}
|
|
13792
|
+
const average = weightedSum / totalWeight;
|
|
13793
|
+
return average;
|
|
13744
13794
|
};
|
|
13745
13795
|
for (let newFrameIndex = 0;newFrameIndex < targetFrames; newFrameIndex++) {
|
|
13746
|
-
const start =
|
|
13747
|
-
const end =
|
|
13796
|
+
const start = newFrameIndex * chunkSize;
|
|
13797
|
+
const end = start + chunkSize;
|
|
13748
13798
|
if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {
|
|
13749
13799
|
for (let i = 0;i < srcNumberOfChannels; i++) {
|
|
13750
13800
|
destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);
|
|
@@ -13784,13 +13834,19 @@ var resampleAudioData = ({
|
|
|
13784
13834
|
|
|
13785
13835
|
// src/convert-audiodata/convert-audiodata.ts
|
|
13786
13836
|
var FORMAT = "s16";
|
|
13837
|
+
var roundButRoundDownZeroPointFive = (value) => {
|
|
13838
|
+
if (value % 1 <= 0.5) {
|
|
13839
|
+
return Math.floor(value);
|
|
13840
|
+
}
|
|
13841
|
+
return Math.ceil(value);
|
|
13842
|
+
};
|
|
13787
13843
|
var convertAudioData = ({
|
|
13788
13844
|
audioData,
|
|
13789
13845
|
newSampleRate,
|
|
13790
13846
|
trimStartInSeconds,
|
|
13791
13847
|
trimEndInSeconds,
|
|
13792
13848
|
targetNumberOfChannels,
|
|
13793
|
-
|
|
13849
|
+
playbackRate
|
|
13794
13850
|
}) => {
|
|
13795
13851
|
const {
|
|
13796
13852
|
numberOfChannels: srcNumberOfChannels,
|
|
@@ -13798,9 +13854,10 @@ var convertAudioData = ({
|
|
|
13798
13854
|
numberOfFrames
|
|
13799
13855
|
} = audioData;
|
|
13800
13856
|
const ratio = currentSampleRate / newSampleRate;
|
|
13801
|
-
const frameOffset =
|
|
13802
|
-
const
|
|
13803
|
-
const
|
|
13857
|
+
const frameOffset = roundButRoundDownZeroPointFive(trimStartInSeconds * audioData.sampleRate);
|
|
13858
|
+
const unroundedFrameCount = numberOfFrames - (trimEndInSeconds + trimStartInSeconds) * audioData.sampleRate;
|
|
13859
|
+
const frameCount = Math.round(unroundedFrameCount);
|
|
13860
|
+
const newNumberOfFrames = Math.round(unroundedFrameCount / ratio / playbackRate);
|
|
13804
13861
|
if (newNumberOfFrames === 0) {
|
|
13805
13862
|
throw new Error("Cannot resample - the given sample rate would result in less than 1 sample");
|
|
13806
13863
|
}
|
|
@@ -13816,7 +13873,7 @@ var convertAudioData = ({
|
|
|
13816
13873
|
});
|
|
13817
13874
|
const data = new Int16Array(newNumberOfFrames * targetNumberOfChannels);
|
|
13818
13875
|
const chunkSize = frameCount / newNumberOfFrames;
|
|
13819
|
-
if (newNumberOfFrames === frameCount && targetNumberOfChannels === srcNumberOfChannels &&
|
|
13876
|
+
if (newNumberOfFrames === frameCount && targetNumberOfChannels === srcNumberOfChannels && playbackRate === 1) {
|
|
13820
13877
|
return {
|
|
13821
13878
|
data: srcChannels,
|
|
13822
13879
|
numberOfChannels: targetNumberOfChannels,
|
|
@@ -13830,8 +13887,7 @@ var convertAudioData = ({
|
|
|
13830
13887
|
sourceChannels: srcChannels,
|
|
13831
13888
|
destination: data,
|
|
13832
13889
|
targetFrames: newNumberOfFrames,
|
|
13833
|
-
chunkSize
|
|
13834
|
-
volume
|
|
13890
|
+
chunkSize
|
|
13835
13891
|
});
|
|
13836
13892
|
const newAudioData = {
|
|
13837
13893
|
data,
|
|
@@ -13856,6 +13912,9 @@ var extractFrame = async ({
|
|
|
13856
13912
|
sinkPromises[src] = getSinks(src);
|
|
13857
13913
|
}
|
|
13858
13914
|
const { video, getDuration } = await sinkPromises[src];
|
|
13915
|
+
if (video === null) {
|
|
13916
|
+
throw new Error(`No video track found for ${src}`);
|
|
13917
|
+
}
|
|
13859
13918
|
const timeInSeconds = loop ? unloopedTimeinSeconds % await getDuration() : unloopedTimeinSeconds;
|
|
13860
13919
|
const keyframeBank = await keyframeManager.requestKeyframeBank({
|
|
13861
13920
|
packetSink: video.packetSink,
|
|
@@ -13873,18 +13932,22 @@ var extractAudio = async ({
|
|
|
13873
13932
|
src,
|
|
13874
13933
|
timeInSeconds: unloopedTimeInSeconds,
|
|
13875
13934
|
durationInSeconds,
|
|
13876
|
-
volume,
|
|
13877
13935
|
logLevel,
|
|
13878
|
-
loop
|
|
13936
|
+
loop,
|
|
13937
|
+
playbackRate
|
|
13879
13938
|
}) => {
|
|
13880
13939
|
if (!sinkPromises[src]) {
|
|
13881
13940
|
sinkPromises[src] = getSinks(src);
|
|
13882
13941
|
}
|
|
13883
13942
|
const { audio, actualMatroskaTimestamps, isMatroska, getDuration } = await sinkPromises[src];
|
|
13943
|
+
let duration = null;
|
|
13944
|
+
if (loop) {
|
|
13945
|
+
duration = await getDuration();
|
|
13946
|
+
}
|
|
13884
13947
|
if (audio === null) {
|
|
13885
|
-
return null;
|
|
13948
|
+
return { data: null, durationInSeconds: null };
|
|
13886
13949
|
}
|
|
13887
|
-
const timeInSeconds = loop ? unloopedTimeInSeconds %
|
|
13950
|
+
const timeInSeconds = loop ? unloopedTimeInSeconds % duration : unloopedTimeInSeconds;
|
|
13888
13951
|
const sampleIterator = await audioManager.getIterator({
|
|
13889
13952
|
src,
|
|
13890
13953
|
timeInSeconds,
|
|
@@ -13911,6 +13974,12 @@ var extractAudio = async ({
|
|
|
13911
13974
|
let trimEndInSeconds = 0;
|
|
13912
13975
|
if (isFirstSample) {
|
|
13913
13976
|
trimStartInSeconds = timeInSeconds - sample.timestamp;
|
|
13977
|
+
if (trimStartInSeconds < 0 && trimStartInSeconds > -0.0000000001) {
|
|
13978
|
+
trimStartInSeconds = 0;
|
|
13979
|
+
}
|
|
13980
|
+
if (trimStartInSeconds < 0) {
|
|
13981
|
+
throw new Error(`trimStartInSeconds is negative: ${trimStartInSeconds}`);
|
|
13982
|
+
}
|
|
13914
13983
|
}
|
|
13915
13984
|
if (isLastSample) {
|
|
13916
13985
|
trimEndInSeconds = Math.max(0, sample.timestamp + sample.duration - (timeInSeconds + durationInSeconds));
|
|
@@ -13921,7 +13990,7 @@ var extractAudio = async ({
|
|
|
13921
13990
|
trimStartInSeconds,
|
|
13922
13991
|
trimEndInSeconds,
|
|
13923
13992
|
targetNumberOfChannels: TARGET_NUMBER_OF_CHANNELS,
|
|
13924
|
-
|
|
13993
|
+
playbackRate
|
|
13925
13994
|
});
|
|
13926
13995
|
audioDataRaw.close();
|
|
13927
13996
|
if (audioData.numberOfFrames === 0) {
|
|
@@ -13930,10 +13999,10 @@ var extractAudio = async ({
|
|
|
13930
13999
|
audioDataArray.push(audioData);
|
|
13931
14000
|
}
|
|
13932
14001
|
if (audioDataArray.length === 0) {
|
|
13933
|
-
return null;
|
|
14002
|
+
return { data: null, durationInSeconds: duration };
|
|
13934
14003
|
}
|
|
13935
14004
|
const combined = combineAudioDataAndClosePrevious(audioDataArray);
|
|
13936
|
-
return combined;
|
|
14005
|
+
return { data: combined, durationInSeconds: duration };
|
|
13937
14006
|
};
|
|
13938
14007
|
|
|
13939
14008
|
// src/extract-frame-and-audio.ts
|
|
@@ -13942,9 +14011,9 @@ var extractFrameAndAudio = async ({
|
|
|
13942
14011
|
timeInSeconds,
|
|
13943
14012
|
logLevel,
|
|
13944
14013
|
durationInSeconds,
|
|
14014
|
+
playbackRate,
|
|
13945
14015
|
includeAudio,
|
|
13946
14016
|
includeVideo,
|
|
13947
|
-
volume,
|
|
13948
14017
|
loop
|
|
13949
14018
|
}) => {
|
|
13950
14019
|
const [frame, audio] = await Promise.all([
|
|
@@ -13958,14 +14027,15 @@ var extractFrameAndAudio = async ({
|
|
|
13958
14027
|
src,
|
|
13959
14028
|
timeInSeconds,
|
|
13960
14029
|
durationInSeconds,
|
|
13961
|
-
volume,
|
|
13962
14030
|
logLevel,
|
|
13963
|
-
loop
|
|
14031
|
+
loop,
|
|
14032
|
+
playbackRate
|
|
13964
14033
|
}) : null
|
|
13965
14034
|
]);
|
|
13966
14035
|
return {
|
|
13967
14036
|
frame: frame?.toVideoFrame() ?? null,
|
|
13968
|
-
audio
|
|
14037
|
+
audio: audio?.data ?? null,
|
|
14038
|
+
durationInSeconds: audio?.durationInSeconds ?? null
|
|
13969
14039
|
};
|
|
13970
14040
|
};
|
|
13971
14041
|
|
|
@@ -13975,14 +14045,14 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
13975
14045
|
const data = event.data;
|
|
13976
14046
|
if (data.type === "request") {
|
|
13977
14047
|
try {
|
|
13978
|
-
const { frame, audio } = await extractFrameAndAudio({
|
|
14048
|
+
const { frame, audio, durationInSeconds } = await extractFrameAndAudio({
|
|
13979
14049
|
src: data.src,
|
|
13980
14050
|
timeInSeconds: data.timeInSeconds,
|
|
13981
14051
|
logLevel: data.logLevel,
|
|
13982
14052
|
durationInSeconds: data.durationInSeconds,
|
|
14053
|
+
playbackRate: data.playbackRate,
|
|
13983
14054
|
includeAudio: data.includeAudio,
|
|
13984
14055
|
includeVideo: data.includeVideo,
|
|
13985
|
-
volume: data.volume,
|
|
13986
14056
|
loop: data.loop
|
|
13987
14057
|
});
|
|
13988
14058
|
const videoFrame = frame;
|
|
@@ -13994,7 +14064,8 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
13994
14064
|
type: "response-success",
|
|
13995
14065
|
id: data.id,
|
|
13996
14066
|
frame: imageBitmap,
|
|
13997
|
-
audio
|
|
14067
|
+
audio,
|
|
14068
|
+
durationInSeconds: durationInSeconds ?? null
|
|
13998
14069
|
};
|
|
13999
14070
|
window.remotion_broadcastChannel.postMessage(response);
|
|
14000
14071
|
videoFrame?.close();
|
|
@@ -14016,10 +14087,10 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
14016
14087
|
timeInSeconds,
|
|
14017
14088
|
logLevel,
|
|
14018
14089
|
durationInSeconds,
|
|
14090
|
+
playbackRate,
|
|
14019
14091
|
includeAudio,
|
|
14020
14092
|
includeVideo,
|
|
14021
14093
|
isClientSideRendering,
|
|
14022
|
-
volume,
|
|
14023
14094
|
loop
|
|
14024
14095
|
}) => {
|
|
14025
14096
|
if (isClientSideRendering || window.remotion_isMainTab) {
|
|
@@ -14028,9 +14099,9 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
14028
14099
|
src,
|
|
14029
14100
|
timeInSeconds,
|
|
14030
14101
|
durationInSeconds,
|
|
14102
|
+
playbackRate,
|
|
14031
14103
|
includeAudio,
|
|
14032
14104
|
includeVideo,
|
|
14033
|
-
volume,
|
|
14034
14105
|
loop
|
|
14035
14106
|
});
|
|
14036
14107
|
}
|
|
@@ -14044,7 +14115,8 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
14044
14115
|
if (data.type === "response-success" && data.id === requestId) {
|
|
14045
14116
|
resolve({
|
|
14046
14117
|
frame: data.frame ? data.frame : null,
|
|
14047
|
-
audio: data.audio ? data.audio : null
|
|
14118
|
+
audio: data.audio ? data.audio : null,
|
|
14119
|
+
durationInSeconds: data.durationInSeconds ? data.durationInSeconds : null
|
|
14048
14120
|
});
|
|
14049
14121
|
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
14050
14122
|
} else if (data.type === "response-error" && data.id === requestId) {
|
|
@@ -14061,9 +14133,9 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
14061
14133
|
id: requestId,
|
|
14062
14134
|
logLevel,
|
|
14063
14135
|
durationInSeconds,
|
|
14136
|
+
playbackRate,
|
|
14064
14137
|
includeAudio,
|
|
14065
14138
|
includeVideo,
|
|
14066
|
-
volume,
|
|
14067
14139
|
loop
|
|
14068
14140
|
};
|
|
14069
14141
|
window.remotion_broadcastChannel.postMessage(request);
|
|
@@ -14093,11 +14165,11 @@ var AudioForRendering = ({
|
|
|
14093
14165
|
logLevel = window.remotion_logLevel,
|
|
14094
14166
|
loop
|
|
14095
14167
|
}) => {
|
|
14168
|
+
const frame = useCurrentFrame();
|
|
14096
14169
|
const absoluteFrame = Internals5.useTimelinePosition();
|
|
14097
14170
|
const videoConfig = Internals5.useUnsafeVideoConfig();
|
|
14098
14171
|
const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals5.RenderAssetManager);
|
|
14099
|
-
const
|
|
14100
|
-
const volumePropsFrame = Internals5.useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
|
|
14172
|
+
const startsAt = Internals5.useMediaStartsAt();
|
|
14101
14173
|
const environment = useRemotionEnvironment2();
|
|
14102
14174
|
const [id] = useState3(() => `${Math.random()}`.replace("0.", ""));
|
|
14103
14175
|
if (!videoConfig) {
|
|
@@ -14106,24 +14178,6 @@ var AudioForRendering = ({
|
|
|
14106
14178
|
if (!src) {
|
|
14107
14179
|
throw new TypeError("No `src` was passed to <Audio>.");
|
|
14108
14180
|
}
|
|
14109
|
-
const volume = Internals5.evaluateVolume({
|
|
14110
|
-
volume: volumeProp,
|
|
14111
|
-
frame: volumePropsFrame,
|
|
14112
|
-
mediaVolume: 1
|
|
14113
|
-
});
|
|
14114
|
-
Internals5.warnAboutTooHighVolume(volume);
|
|
14115
|
-
const shouldRenderAudio = useMemo3(() => {
|
|
14116
|
-
if (!window.remotion_audioEnabled) {
|
|
14117
|
-
return false;
|
|
14118
|
-
}
|
|
14119
|
-
if (muted) {
|
|
14120
|
-
return false;
|
|
14121
|
-
}
|
|
14122
|
-
if (volume <= 0) {
|
|
14123
|
-
return false;
|
|
14124
|
-
}
|
|
14125
|
-
return true;
|
|
14126
|
-
}, [muted, volume]);
|
|
14127
14181
|
const { fps } = videoConfig;
|
|
14128
14182
|
const { delayRender, continueRender } = useDelayRender();
|
|
14129
14183
|
useLayoutEffect(() => {
|
|
@@ -14134,18 +14188,42 @@ var AudioForRendering = ({
|
|
|
14134
14188
|
retries: delayRenderRetries ?? undefined,
|
|
14135
14189
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined
|
|
14136
14190
|
});
|
|
14191
|
+
const shouldRenderAudio = (() => {
|
|
14192
|
+
if (!window.remotion_audioEnabled) {
|
|
14193
|
+
return false;
|
|
14194
|
+
}
|
|
14195
|
+
if (muted) {
|
|
14196
|
+
return false;
|
|
14197
|
+
}
|
|
14198
|
+
return true;
|
|
14199
|
+
})();
|
|
14137
14200
|
extractFrameViaBroadcastChannel({
|
|
14138
14201
|
src,
|
|
14139
14202
|
timeInSeconds: timestamp,
|
|
14140
14203
|
durationInSeconds,
|
|
14204
|
+
playbackRate: playbackRate ?? 1,
|
|
14141
14205
|
logLevel: logLevel ?? "info",
|
|
14142
14206
|
includeAudio: shouldRenderAudio,
|
|
14143
14207
|
includeVideo: false,
|
|
14144
14208
|
isClientSideRendering: environment.isClientSideRendering,
|
|
14145
|
-
volume,
|
|
14146
14209
|
loop: loop ?? false
|
|
14147
|
-
}).then(({ audio }) => {
|
|
14148
|
-
|
|
14210
|
+
}).then(({ audio, durationInSeconds: assetDurationInSeconds }) => {
|
|
14211
|
+
const volumePropsFrame = frameForVolumeProp({
|
|
14212
|
+
behavior: loopVolumeCurveBehavior ?? "repeat",
|
|
14213
|
+
loop: loop ?? false,
|
|
14214
|
+
assetDurationInSeconds: assetDurationInSeconds ?? 0,
|
|
14215
|
+
fps,
|
|
14216
|
+
frame,
|
|
14217
|
+
startsAt
|
|
14218
|
+
});
|
|
14219
|
+
const volume = Internals5.evaluateVolume({
|
|
14220
|
+
volume: volumeProp,
|
|
14221
|
+
frame: volumePropsFrame,
|
|
14222
|
+
mediaVolume: 1
|
|
14223
|
+
});
|
|
14224
|
+
Internals5.warnAboutTooHighVolume(volume);
|
|
14225
|
+
if (audio && volume > 0) {
|
|
14226
|
+
applyVolume(audio.data, volume);
|
|
14149
14227
|
registerRenderAsset({
|
|
14150
14228
|
type: "inline-audio",
|
|
14151
14229
|
id,
|
|
@@ -14176,13 +14254,15 @@ var AudioForRendering = ({
|
|
|
14176
14254
|
frame,
|
|
14177
14255
|
id,
|
|
14178
14256
|
logLevel,
|
|
14257
|
+
loop,
|
|
14258
|
+
loopVolumeCurveBehavior,
|
|
14259
|
+
muted,
|
|
14179
14260
|
playbackRate,
|
|
14180
14261
|
registerRenderAsset,
|
|
14181
|
-
shouldRenderAudio,
|
|
14182
14262
|
src,
|
|
14263
|
+
startsAt,
|
|
14183
14264
|
unregisterRenderAsset,
|
|
14184
|
-
|
|
14185
|
-
loop
|
|
14265
|
+
volumeProp
|
|
14186
14266
|
]);
|
|
14187
14267
|
return null;
|
|
14188
14268
|
};
|
|
@@ -14195,6 +14275,7 @@ var {
|
|
|
14195
14275
|
validateMediaProps,
|
|
14196
14276
|
AudioForPreview
|
|
14197
14277
|
} = Internals6;
|
|
14278
|
+
var onRemotionError = (_e) => {};
|
|
14198
14279
|
var Audio = (props) => {
|
|
14199
14280
|
const audioContext = useContext5(SharedAudioContext);
|
|
14200
14281
|
const {
|
|
@@ -14204,7 +14285,6 @@ var Audio = (props) => {
|
|
|
14204
14285
|
pauseWhenBuffering,
|
|
14205
14286
|
stack,
|
|
14206
14287
|
showInTimeline,
|
|
14207
|
-
onError: onRemotionError,
|
|
14208
14288
|
loop,
|
|
14209
14289
|
...otherProps
|
|
14210
14290
|
} = props;
|
|
@@ -14240,7 +14320,7 @@ var Audio = (props) => {
|
|
|
14240
14320
|
onRemotionError?.(new Error(errMessage));
|
|
14241
14321
|
console.warn(errMessage);
|
|
14242
14322
|
}
|
|
14243
|
-
}, [
|
|
14323
|
+
}, [loop]);
|
|
14244
14324
|
if (typeof trimBeforeValue !== "undefined" || typeof trimAfterValue !== "undefined") {
|
|
14245
14325
|
return /* @__PURE__ */ jsx2(Sequence, {
|
|
14246
14326
|
layout: "none",
|
|
@@ -14261,8 +14341,6 @@ var Audio = (props) => {
|
|
|
14261
14341
|
});
|
|
14262
14342
|
}
|
|
14263
14343
|
const {
|
|
14264
|
-
onAutoPlayError,
|
|
14265
|
-
crossOrigin,
|
|
14266
14344
|
delayRenderRetries,
|
|
14267
14345
|
delayRenderTimeoutInMilliseconds,
|
|
14268
14346
|
...propsForPreview
|
|
@@ -14287,7 +14365,7 @@ import { Internals as Internals8, Sequence as Sequence2, useRemotionEnvironment
|
|
|
14287
14365
|
import {
|
|
14288
14366
|
useContext as useContext6,
|
|
14289
14367
|
useLayoutEffect as useLayoutEffect2,
|
|
14290
|
-
useMemo as
|
|
14368
|
+
useMemo as useMemo3,
|
|
14291
14369
|
useRef as useRef2,
|
|
14292
14370
|
useState as useState4
|
|
14293
14371
|
} from "react";
|
|
@@ -14314,36 +14392,18 @@ var VideoForRendering = ({
|
|
|
14314
14392
|
style,
|
|
14315
14393
|
className
|
|
14316
14394
|
}) => {
|
|
14395
|
+
if (!src) {
|
|
14396
|
+
throw new TypeError("No `src` was passed to <Video>.");
|
|
14397
|
+
}
|
|
14398
|
+
const frame = useCurrentFrame2();
|
|
14317
14399
|
const absoluteFrame = Internals7.useTimelinePosition();
|
|
14318
14400
|
const { fps } = useVideoConfig();
|
|
14319
|
-
const canvasRef = useRef2(null);
|
|
14320
14401
|
const { registerRenderAsset, unregisterRenderAsset } = useContext6(Internals7.RenderAssetManager);
|
|
14321
|
-
const
|
|
14322
|
-
const volumePropsFrame = Internals7.useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
|
|
14323
|
-
const environment = useRemotionEnvironment4();
|
|
14402
|
+
const startsAt = Internals7.useMediaStartsAt();
|
|
14324
14403
|
const [id] = useState4(() => `${Math.random()}`.replace("0.", ""));
|
|
14325
|
-
|
|
14326
|
-
throw new TypeError("No `src` was passed to <Video>.");
|
|
14327
|
-
}
|
|
14328
|
-
const volume = Internals7.evaluateVolume({
|
|
14329
|
-
volume: volumeProp,
|
|
14330
|
-
frame: volumePropsFrame,
|
|
14331
|
-
mediaVolume: 1
|
|
14332
|
-
});
|
|
14333
|
-
Internals7.warnAboutTooHighVolume(volume);
|
|
14334
|
-
const shouldRenderAudio = useMemo4(() => {
|
|
14335
|
-
if (!window.remotion_audioEnabled) {
|
|
14336
|
-
return false;
|
|
14337
|
-
}
|
|
14338
|
-
if (muted) {
|
|
14339
|
-
return false;
|
|
14340
|
-
}
|
|
14341
|
-
if (volume <= 0) {
|
|
14342
|
-
return false;
|
|
14343
|
-
}
|
|
14344
|
-
return true;
|
|
14345
|
-
}, [muted, volume]);
|
|
14404
|
+
const environment = useRemotionEnvironment4();
|
|
14346
14405
|
const { delayRender, continueRender } = useDelayRender2();
|
|
14406
|
+
const canvasRef = useRef2(null);
|
|
14347
14407
|
useLayoutEffect2(() => {
|
|
14348
14408
|
if (!canvasRef.current) {
|
|
14349
14409
|
return;
|
|
@@ -14355,17 +14415,30 @@ var VideoForRendering = ({
|
|
|
14355
14415
|
retries: delayRenderRetries ?? undefined,
|
|
14356
14416
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined
|
|
14357
14417
|
});
|
|
14418
|
+
const shouldRenderAudio = (() => {
|
|
14419
|
+
if (!window.remotion_audioEnabled) {
|
|
14420
|
+
return false;
|
|
14421
|
+
}
|
|
14422
|
+
if (muted) {
|
|
14423
|
+
return false;
|
|
14424
|
+
}
|
|
14425
|
+
return true;
|
|
14426
|
+
})();
|
|
14358
14427
|
extractFrameViaBroadcastChannel({
|
|
14359
14428
|
src,
|
|
14360
14429
|
timeInSeconds: timestamp,
|
|
14361
14430
|
durationInSeconds,
|
|
14431
|
+
playbackRate: playbackRate ?? 1,
|
|
14362
14432
|
logLevel: logLevel ?? "info",
|
|
14363
14433
|
includeAudio: shouldRenderAudio,
|
|
14364
14434
|
includeVideo: window.remotion_videoEnabled,
|
|
14365
14435
|
isClientSideRendering: environment.isClientSideRendering,
|
|
14366
|
-
volume,
|
|
14367
14436
|
loop: loop ?? false
|
|
14368
|
-
}).then(({
|
|
14437
|
+
}).then(({
|
|
14438
|
+
frame: imageBitmap,
|
|
14439
|
+
audio,
|
|
14440
|
+
durationInSeconds: assetDurationInSeconds
|
|
14441
|
+
}) => {
|
|
14369
14442
|
if (imageBitmap) {
|
|
14370
14443
|
onVideoFrame?.(imageBitmap);
|
|
14371
14444
|
const context = canvasRef.current?.getContext("2d");
|
|
@@ -14380,7 +14453,22 @@ var VideoForRendering = ({
|
|
|
14380
14453
|
} else if (window.remotion_videoEnabled) {
|
|
14381
14454
|
cancelRender4(new Error("No video frame found"));
|
|
14382
14455
|
}
|
|
14383
|
-
|
|
14456
|
+
const volumePropsFrame = frameForVolumeProp({
|
|
14457
|
+
behavior: loopVolumeCurveBehavior ?? "repeat",
|
|
14458
|
+
loop: loop ?? false,
|
|
14459
|
+
assetDurationInSeconds: assetDurationInSeconds ?? 0,
|
|
14460
|
+
fps,
|
|
14461
|
+
frame,
|
|
14462
|
+
startsAt
|
|
14463
|
+
});
|
|
14464
|
+
const volume = Internals7.evaluateVolume({
|
|
14465
|
+
volume: volumeProp,
|
|
14466
|
+
frame: volumePropsFrame,
|
|
14467
|
+
mediaVolume: 1
|
|
14468
|
+
});
|
|
14469
|
+
Internals7.warnAboutTooHighVolume(volume);
|
|
14470
|
+
if (audio && volume > 0) {
|
|
14471
|
+
applyVolume(audio.data, volume);
|
|
14384
14472
|
registerRenderAsset({
|
|
14385
14473
|
type: "inline-audio",
|
|
14386
14474
|
id,
|
|
@@ -14411,16 +14499,18 @@ var VideoForRendering = ({
|
|
|
14411
14499
|
frame,
|
|
14412
14500
|
id,
|
|
14413
14501
|
logLevel,
|
|
14502
|
+
loop,
|
|
14503
|
+
loopVolumeCurveBehavior,
|
|
14504
|
+
muted,
|
|
14414
14505
|
onVideoFrame,
|
|
14415
14506
|
playbackRate,
|
|
14416
14507
|
registerRenderAsset,
|
|
14417
|
-
shouldRenderAudio,
|
|
14418
14508
|
src,
|
|
14509
|
+
startsAt,
|
|
14419
14510
|
unregisterRenderAsset,
|
|
14420
|
-
|
|
14421
|
-
loop
|
|
14511
|
+
volumeProp
|
|
14422
14512
|
]);
|
|
14423
|
-
const classNameValue =
|
|
14513
|
+
const classNameValue = useMemo3(() => {
|
|
14424
14514
|
return [Internals7.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals7.truthy).join(" ");
|
|
14425
14515
|
}, [className]);
|
|
14426
14516
|
return /* @__PURE__ */ jsx3("canvas", {
|
|
@@ -14487,9 +14577,7 @@ var Video = (props) => {
|
|
|
14487
14577
|
});
|
|
14488
14578
|
}
|
|
14489
14579
|
const {
|
|
14490
|
-
onAutoPlayError,
|
|
14491
14580
|
onVideoFrame,
|
|
14492
|
-
crossOrigin,
|
|
14493
14581
|
delayRenderRetries,
|
|
14494
14582
|
delayRenderTimeoutInMilliseconds,
|
|
14495
14583
|
...propsForPreview
|
|
@@ -14501,9 +14589,7 @@ var Video = (props) => {
|
|
|
14501
14589
|
onlyWarnForMediaSeekingError: true,
|
|
14502
14590
|
pauseWhenBuffering: pauseWhenBuffering ?? false,
|
|
14503
14591
|
showInTimeline: showInTimeline ?? true,
|
|
14504
|
-
onAutoPlayError: onAutoPlayError ?? undefined,
|
|
14505
14592
|
onVideoFrame: onVideoFrame ?? null,
|
|
14506
|
-
crossOrigin,
|
|
14507
14593
|
...propsForPreview
|
|
14508
14594
|
});
|
|
14509
14595
|
};
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { PcmS16AudioData } from './convert-audiodata/convert-audiodata';
|
|
2
|
+
export declare const extractAudio: ({ src, timeInSeconds, durationInSeconds, volume, }: {
|
|
3
|
+
src: string;
|
|
4
|
+
timeInSeconds: number;
|
|
5
|
+
durationInSeconds: number;
|
|
6
|
+
volume: number;
|
|
7
|
+
}) => Promise<PcmS16AudioData | null>;
|