@remotion/media 4.0.351 → 4.0.352
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio-extraction/audio-cache.js +4 -0
- package/dist/audio-extraction/audio-iterator.js +5 -2
- package/dist/audio-extraction/audio-manager.d.ts +2 -1
- package/dist/audio-extraction/audio-manager.js +4 -3
- package/dist/audio-extraction/extract-audio.js +1 -0
- package/dist/caches.d.ts +4 -2
- package/dist/caches.js +38 -1
- package/dist/esm/index.mjs +75 -23
- package/dist/video/media-player.d.ts +64 -0
- package/dist/video/media-player.js +501 -0
- package/dist/video/new-video-for-preview.d.ts +10 -0
- package/dist/video/new-video-for-preview.js +114 -0
- package/dist/video/props.d.ts +1 -0
- package/dist/video/video-for-rendering.js +23 -9
- package/dist/video-extraction/keyframe-manager.js +3 -2
- package/dist/video-extraction/media-player.d.ts +64 -0
- package/dist/video-extraction/media-player.js +501 -0
- package/dist/video-extraction/new-video-for-preview.d.ts +10 -0
- package/dist/video-extraction/new-video-for-preview.js +114 -0
- package/package.json +3 -3
|
@@ -9,6 +9,10 @@ export const makeAudioCache = () => {
|
|
|
9
9
|
for (const timestamp of timestamps) {
|
|
10
10
|
const endTimestamp = timestamp + samples[timestamp].duration;
|
|
11
11
|
if (endTimestamp < threshold) {
|
|
12
|
+
const isLast = timestamp === timestamps[timestamps.length - 1];
|
|
13
|
+
if (isLast) {
|
|
14
|
+
continue;
|
|
15
|
+
}
|
|
12
16
|
samples[timestamp].close();
|
|
13
17
|
delete samples[timestamp];
|
|
14
18
|
timestamps.splice(timestamps.indexOf(timestamp), 1);
|
|
@@ -41,10 +41,13 @@ export const makeAudioIterator = ({ audioSampleSink, isMatroska, startTimestamp,
|
|
|
41
41
|
}
|
|
42
42
|
const samples = cache.getSamples(timestamp, durationInSeconds);
|
|
43
43
|
while (true) {
|
|
44
|
+
const sample = await getNextSample();
|
|
44
45
|
// Clear all samples before the timestamp
|
|
45
46
|
// Do this in the while loop because samples might start from 0
|
|
46
|
-
|
|
47
|
-
|
|
47
|
+
// Also do this after a sample has just been added, if it was the last sample we now have the duration
|
|
48
|
+
// and can prevent deleting the last sample
|
|
49
|
+
const deleteBefore = fullDuration === null ? timestamp : Math.min(timestamp, fullDuration);
|
|
50
|
+
cache.clearBeforeThreshold(deleteBefore - SAFE_BACK_WINDOW_IN_SECONDS);
|
|
48
51
|
if (sample === null) {
|
|
49
52
|
break;
|
|
50
53
|
}
|
|
@@ -22,12 +22,13 @@ export declare const makeAudioManager: () => {
|
|
|
22
22
|
prepareForDeletion: () => Promise<void>;
|
|
23
23
|
startTimestamp: number;
|
|
24
24
|
};
|
|
25
|
-
getIterator: ({ src, timeInSeconds, audioSampleSink, isMatroska, actualMatroskaTimestamps, }: {
|
|
25
|
+
getIterator: ({ src, timeInSeconds, audioSampleSink, isMatroska, actualMatroskaTimestamps, logLevel, }: {
|
|
26
26
|
src: string;
|
|
27
27
|
timeInSeconds: number;
|
|
28
28
|
audioSampleSink: AudioSampleSink;
|
|
29
29
|
isMatroska: boolean;
|
|
30
30
|
actualMatroskaTimestamps: RememberActualMatroskaTimestamps;
|
|
31
|
+
logLevel: LogLevel;
|
|
31
32
|
}) => Promise<{
|
|
32
33
|
src: string;
|
|
33
34
|
getSamples: (ts: number, dur: number) => Promise<import("mediabunny").AudioSample[]>;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { getMaxVideoCacheSize, getTotalCacheStats } from '../caches';
|
|
2
2
|
import { makeAudioIterator } from './audio-iterator';
|
|
3
3
|
export const makeAudioManager = () => {
|
|
4
4
|
const iterators = [];
|
|
@@ -32,8 +32,9 @@ export const makeAudioManager = () => {
|
|
|
32
32
|
iterators.splice(iterators.indexOf(iterator), 1);
|
|
33
33
|
}
|
|
34
34
|
};
|
|
35
|
-
const getIterator = async ({ src, timeInSeconds, audioSampleSink, isMatroska, actualMatroskaTimestamps, }) => {
|
|
36
|
-
|
|
35
|
+
const getIterator = async ({ src, timeInSeconds, audioSampleSink, isMatroska, actualMatroskaTimestamps, logLevel, }) => {
|
|
36
|
+
const maxCacheSize = getMaxVideoCacheSize(logLevel);
|
|
37
|
+
while ((await getTotalCacheStats()).totalSize > maxCacheSize) {
|
|
37
38
|
await deleteOldestIterator();
|
|
38
39
|
}
|
|
39
40
|
for (const iterator of iterators) {
|
|
@@ -21,6 +21,7 @@ export const extractAudio = async ({ src, timeInSeconds: unloopedTimeInSeconds,
|
|
|
21
21
|
audioSampleSink: audio.sampleSink,
|
|
22
22
|
isMatroska,
|
|
23
23
|
actualMatroskaTimestamps,
|
|
24
|
+
logLevel,
|
|
24
25
|
});
|
|
25
26
|
const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
|
|
26
27
|
audioManager.logOpenFrames(logLevel);
|
package/dist/caches.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
|
|
1
|
+
import { type LogLevel } from 'remotion';
|
|
2
2
|
export declare const SAFE_BACK_WINDOW_IN_SECONDS = 1;
|
|
3
3
|
export declare const keyframeManager: {
|
|
4
4
|
requestKeyframeBank: ({ packetSink, timestamp, videoSampleSink, src, logLevel, }: {
|
|
@@ -40,12 +40,13 @@ export declare const audioManager: {
|
|
|
40
40
|
prepareForDeletion: () => Promise<void>;
|
|
41
41
|
startTimestamp: number;
|
|
42
42
|
};
|
|
43
|
-
getIterator: ({ src, timeInSeconds, audioSampleSink, isMatroska, actualMatroskaTimestamps, }: {
|
|
43
|
+
getIterator: ({ src, timeInSeconds, audioSampleSink, isMatroska, actualMatroskaTimestamps, logLevel, }: {
|
|
44
44
|
src: string;
|
|
45
45
|
timeInSeconds: number;
|
|
46
46
|
audioSampleSink: import("mediabunny").AudioSampleSink;
|
|
47
47
|
isMatroska: boolean;
|
|
48
48
|
actualMatroskaTimestamps: import("./video-extraction/remember-actual-matroska-timestamps").RememberActualMatroskaTimestamps;
|
|
49
|
+
logLevel: import("./log").LogLevel;
|
|
49
50
|
}) => Promise<{
|
|
50
51
|
src: string;
|
|
51
52
|
getSamples: (ts: number, dur: number) => Promise<import("mediabunny").AudioSample[]>;
|
|
@@ -84,3 +85,4 @@ export declare const getTotalCacheStats: () => Promise<{
|
|
|
84
85
|
count: number;
|
|
85
86
|
totalSize: number;
|
|
86
87
|
}>;
|
|
88
|
+
export declare const getMaxVideoCacheSize: (logLevel: LogLevel) => number;
|
package/dist/caches.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
|
+
import { cancelRender } from 'remotion';
|
|
1
2
|
import { makeAudioManager } from './audio-extraction/audio-manager';
|
|
3
|
+
import { Log } from './log';
|
|
2
4
|
import { makeKeyframeManager } from './video-extraction/keyframe-manager';
|
|
3
|
-
export const MAX_CACHE_SIZE = 1000 * 1000 * 1000; // 1GB
|
|
4
5
|
// TODO: make it dependent on the fps and concurrency
|
|
5
6
|
export const SAFE_BACK_WINDOW_IN_SECONDS = 1;
|
|
6
7
|
export const keyframeManager = makeKeyframeManager();
|
|
@@ -13,3 +14,39 @@ export const getTotalCacheStats = async () => {
|
|
|
13
14
|
totalSize: keyframeManagerCacheStats.totalSize + audioManagerCacheStats.totalSize,
|
|
14
15
|
};
|
|
15
16
|
};
|
|
17
|
+
const getUncachedMaxCacheSize = (logLevel) => {
|
|
18
|
+
if (window.remotion_mediaCacheSizeInBytes !== undefined &&
|
|
19
|
+
window.remotion_mediaCacheSizeInBytes !== null) {
|
|
20
|
+
if (window.remotion_mediaCacheSizeInBytes < 240 * 1024 * 1024) {
|
|
21
|
+
cancelRender(new Error(`The minimum value for the "mediaCacheSizeInBytes" prop is 240MB (${240 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
|
|
22
|
+
}
|
|
23
|
+
if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {
|
|
24
|
+
cancelRender(new Error(`The maximum value for the "mediaCacheSizeInBytes" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
|
|
25
|
+
}
|
|
26
|
+
Log.verbose(logLevel, `Using @remotion/media cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
|
|
27
|
+
return window.remotion_mediaCacheSizeInBytes;
|
|
28
|
+
}
|
|
29
|
+
if (window.remotion_initialMemoryAvailable !== undefined &&
|
|
30
|
+
window.remotion_initialMemoryAvailable !== null) {
|
|
31
|
+
const value = window.remotion_initialMemoryAvailable / 2;
|
|
32
|
+
if (value < 240 * 1024 * 1024) {
|
|
33
|
+
Log.verbose(logLevel, `Using @remotion/media cache size set based on minimum value of 240MB (which is more than half of the available system memory!)`);
|
|
34
|
+
return 240 * 1024 * 1024;
|
|
35
|
+
}
|
|
36
|
+
if (value > 20000 * 1024 * 1024) {
|
|
37
|
+
Log.verbose(logLevel, `Using @remotion/media cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
|
|
38
|
+
return 20000 * 1024 * 1024;
|
|
39
|
+
}
|
|
40
|
+
Log.verbose(logLevel, `Using @remotion/media cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
|
|
41
|
+
return value;
|
|
42
|
+
}
|
|
43
|
+
return 1000 * 1000 * 1000; // 1GB
|
|
44
|
+
};
|
|
45
|
+
let cachedMaxCacheSize = null;
|
|
46
|
+
export const getMaxVideoCacheSize = (logLevel) => {
|
|
47
|
+
if (cachedMaxCacheSize !== null) {
|
|
48
|
+
return cachedMaxCacheSize;
|
|
49
|
+
}
|
|
50
|
+
cachedMaxCacheSize = getUncachedMaxCacheSize(logLevel);
|
|
51
|
+
return cachedMaxCacheSize;
|
|
52
|
+
};
|
package/dist/esm/index.mjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
// src/audio/audio.tsx
|
|
2
2
|
import { useCallback as useCallback2, useContext as useContext5 } from "react";
|
|
3
3
|
import {
|
|
4
|
-
cancelRender as
|
|
4
|
+
cancelRender as cancelRender3,
|
|
5
5
|
Internals as Internals2,
|
|
6
6
|
Sequence,
|
|
7
7
|
useRemotionEnvironment as useRemotionEnvironment3
|
|
@@ -44,13 +44,16 @@ var SharedAudioContext = createContext2(null);
|
|
|
44
44
|
// src/audio/audio-for-rendering.tsx
|
|
45
45
|
import { useContext as useContext4, useLayoutEffect, useMemo as useMemo3, useState as useState3 } from "react";
|
|
46
46
|
import {
|
|
47
|
-
cancelRender,
|
|
47
|
+
cancelRender as cancelRender2,
|
|
48
48
|
Internals,
|
|
49
49
|
useCurrentFrame,
|
|
50
50
|
useDelayRender,
|
|
51
51
|
useRemotionEnvironment as useRemotionEnvironment2
|
|
52
52
|
} from "remotion";
|
|
53
53
|
|
|
54
|
+
// src/caches.ts
|
|
55
|
+
import { cancelRender } from "remotion";
|
|
56
|
+
|
|
54
57
|
// src/log.ts
|
|
55
58
|
var logLevels = ["trace", "verbose", "info", "warn", "error"];
|
|
56
59
|
var getNumberForLogLevel = (level) => {
|
|
@@ -97,6 +100,10 @@ var makeAudioCache = () => {
|
|
|
97
100
|
for (const timestamp of timestamps) {
|
|
98
101
|
const endTimestamp = timestamp + samples[timestamp].duration;
|
|
99
102
|
if (endTimestamp < threshold) {
|
|
103
|
+
const isLast = timestamp === timestamps[timestamps.length - 1];
|
|
104
|
+
if (isLast) {
|
|
105
|
+
continue;
|
|
106
|
+
}
|
|
100
107
|
samples[timestamp].close();
|
|
101
108
|
delete samples[timestamp];
|
|
102
109
|
timestamps.splice(timestamps.indexOf(timestamp), 1);
|
|
@@ -182,8 +189,9 @@ var makeAudioIterator = ({
|
|
|
182
189
|
}
|
|
183
190
|
const samples = cache.getSamples(timestamp, durationInSeconds);
|
|
184
191
|
while (true) {
|
|
185
|
-
cache.clearBeforeThreshold(timestamp - SAFE_BACK_WINDOW_IN_SECONDS);
|
|
186
192
|
const sample = await getNextSample();
|
|
193
|
+
const deleteBefore = fullDuration === null ? timestamp : Math.min(timestamp, fullDuration);
|
|
194
|
+
cache.clearBeforeThreshold(deleteBefore - SAFE_BACK_WINDOW_IN_SECONDS);
|
|
187
195
|
if (sample === null) {
|
|
188
196
|
break;
|
|
189
197
|
}
|
|
@@ -285,9 +293,11 @@ var makeAudioManager = () => {
|
|
|
285
293
|
timeInSeconds,
|
|
286
294
|
audioSampleSink,
|
|
287
295
|
isMatroska,
|
|
288
|
-
actualMatroskaTimestamps
|
|
296
|
+
actualMatroskaTimestamps,
|
|
297
|
+
logLevel
|
|
289
298
|
}) => {
|
|
290
|
-
|
|
299
|
+
const maxCacheSize = getMaxVideoCacheSize(logLevel);
|
|
300
|
+
while ((await getTotalCacheStats()).totalSize > maxCacheSize) {
|
|
291
301
|
await deleteOldestIterator();
|
|
292
302
|
}
|
|
293
303
|
for (const iterator of iterators) {
|
|
@@ -13530,7 +13540,8 @@ var makeKeyframeManager = () => {
|
|
|
13530
13540
|
};
|
|
13531
13541
|
const ensureToStayUnderMaxCacheSize = async (logLevel) => {
|
|
13532
13542
|
let cacheStats = await getTotalCacheStats();
|
|
13533
|
-
|
|
13543
|
+
const maxCacheSize = getMaxVideoCacheSize(logLevel);
|
|
13544
|
+
while (cacheStats.totalSize > maxCacheSize) {
|
|
13534
13545
|
await deleteOldestKeyframeBank(logLevel);
|
|
13535
13546
|
cacheStats = await getTotalCacheStats();
|
|
13536
13547
|
}
|
|
@@ -13642,7 +13653,6 @@ var makeKeyframeManager = () => {
|
|
|
13642
13653
|
};
|
|
13643
13654
|
|
|
13644
13655
|
// src/caches.ts
|
|
13645
|
-
var MAX_CACHE_SIZE = 1000 * 1000 * 1000;
|
|
13646
13656
|
var SAFE_BACK_WINDOW_IN_SECONDS = 1;
|
|
13647
13657
|
var keyframeManager = makeKeyframeManager();
|
|
13648
13658
|
var audioManager = makeAudioManager();
|
|
@@ -13654,6 +13664,40 @@ var getTotalCacheStats = async () => {
|
|
|
13654
13664
|
totalSize: keyframeManagerCacheStats.totalSize + audioManagerCacheStats.totalSize
|
|
13655
13665
|
};
|
|
13656
13666
|
};
|
|
13667
|
+
var getUncachedMaxCacheSize = (logLevel) => {
|
|
13668
|
+
if (window.remotion_mediaCacheSizeInBytes !== undefined && window.remotion_mediaCacheSizeInBytes !== null) {
|
|
13669
|
+
if (window.remotion_mediaCacheSizeInBytes < 240 * 1024 * 1024) {
|
|
13670
|
+
cancelRender(new Error(`The minimum value for the "mediaCacheSizeInBytes" prop is 240MB (${240 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
|
|
13671
|
+
}
|
|
13672
|
+
if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {
|
|
13673
|
+
cancelRender(new Error(`The maximum value for the "mediaCacheSizeInBytes" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
|
|
13674
|
+
}
|
|
13675
|
+
Log2.verbose(logLevel, `Using @remotion/media cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
|
|
13676
|
+
return window.remotion_mediaCacheSizeInBytes;
|
|
13677
|
+
}
|
|
13678
|
+
if (window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
|
|
13679
|
+
const value = window.remotion_initialMemoryAvailable / 2;
|
|
13680
|
+
if (value < 240 * 1024 * 1024) {
|
|
13681
|
+
Log2.verbose(logLevel, `Using @remotion/media cache size set based on minimum value of 240MB (which is more than half of the available system memory!)`);
|
|
13682
|
+
return 240 * 1024 * 1024;
|
|
13683
|
+
}
|
|
13684
|
+
if (value > 20000 * 1024 * 1024) {
|
|
13685
|
+
Log2.verbose(logLevel, `Using @remotion/media cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
|
|
13686
|
+
return 20000 * 1024 * 1024;
|
|
13687
|
+
}
|
|
13688
|
+
Log2.verbose(logLevel, `Using @remotion/media cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
|
|
13689
|
+
return value;
|
|
13690
|
+
}
|
|
13691
|
+
return 1000 * 1000 * 1000;
|
|
13692
|
+
};
|
|
13693
|
+
var cachedMaxCacheSize = null;
|
|
13694
|
+
var getMaxVideoCacheSize = (logLevel) => {
|
|
13695
|
+
if (cachedMaxCacheSize !== null) {
|
|
13696
|
+
return cachedMaxCacheSize;
|
|
13697
|
+
}
|
|
13698
|
+
cachedMaxCacheSize = getUncachedMaxCacheSize(logLevel);
|
|
13699
|
+
return cachedMaxCacheSize;
|
|
13700
|
+
};
|
|
13657
13701
|
|
|
13658
13702
|
// src/convert-audiodata/combine-audiodata.ts
|
|
13659
13703
|
var combineAudioDataAndClosePrevious = (audioDataArray) => {
|
|
@@ -13872,7 +13916,8 @@ var extractAudio = async ({
|
|
|
13872
13916
|
timeInSeconds,
|
|
13873
13917
|
audioSampleSink: audio.sampleSink,
|
|
13874
13918
|
isMatroska,
|
|
13875
|
-
actualMatroskaTimestamps
|
|
13919
|
+
actualMatroskaTimestamps,
|
|
13920
|
+
logLevel
|
|
13876
13921
|
});
|
|
13877
13922
|
const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
|
|
13878
13923
|
audioManager.logOpenFrames(logLevel);
|
|
@@ -14140,7 +14185,7 @@ var AudioForRendering = ({
|
|
|
14140
14185
|
}
|
|
14141
14186
|
continueRender(newHandle);
|
|
14142
14187
|
}).catch((error) => {
|
|
14143
|
-
|
|
14188
|
+
cancelRender2(error);
|
|
14144
14189
|
});
|
|
14145
14190
|
return () => {
|
|
14146
14191
|
continueRender(newHandle);
|
|
@@ -14216,7 +14261,7 @@ var Audio = (props) => {
|
|
|
14216
14261
|
onRemotionError(new Error(errMessage));
|
|
14217
14262
|
return;
|
|
14218
14263
|
}
|
|
14219
|
-
|
|
14264
|
+
cancelRender3(new Error(errMessage));
|
|
14220
14265
|
} else {
|
|
14221
14266
|
onRemotionError?.(new Error(errMessage));
|
|
14222
14267
|
console.warn(errMessage);
|
|
@@ -14273,11 +14318,12 @@ import {
|
|
|
14273
14318
|
useState as useState4
|
|
14274
14319
|
} from "react";
|
|
14275
14320
|
import {
|
|
14276
|
-
cancelRender as
|
|
14321
|
+
cancelRender as cancelRender4,
|
|
14277
14322
|
Internals as Internals3,
|
|
14278
14323
|
useCurrentFrame as useCurrentFrame2,
|
|
14279
14324
|
useDelayRender as useDelayRender2,
|
|
14280
|
-
useRemotionEnvironment as useRemotionEnvironment4
|
|
14325
|
+
useRemotionEnvironment as useRemotionEnvironment4,
|
|
14326
|
+
useVideoConfig
|
|
14281
14327
|
} from "remotion";
|
|
14282
14328
|
import { jsx as jsx3 } from "react/jsx-runtime";
|
|
14283
14329
|
var VideoForRendering = ({
|
|
@@ -14291,19 +14337,17 @@ var VideoForRendering = ({
|
|
|
14291
14337
|
onVideoFrame,
|
|
14292
14338
|
logLevel = window.remotion_logLevel,
|
|
14293
14339
|
loop,
|
|
14294
|
-
style
|
|
14340
|
+
style,
|
|
14341
|
+
className
|
|
14295
14342
|
}) => {
|
|
14296
14343
|
const absoluteFrame = Internals3.useTimelinePosition();
|
|
14297
|
-
const
|
|
14344
|
+
const { fps } = useVideoConfig();
|
|
14298
14345
|
const canvasRef = useRef2(null);
|
|
14299
14346
|
const { registerRenderAsset, unregisterRenderAsset } = useContext6(Internals3.RenderAssetManager);
|
|
14300
14347
|
const frame = useCurrentFrame2();
|
|
14301
14348
|
const volumePropsFrame = Internals3.useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
|
|
14302
14349
|
const environment = useRemotionEnvironment4();
|
|
14303
14350
|
const [id] = useState4(() => `${Math.random()}`.replace("0.", ""));
|
|
14304
|
-
if (!videoConfig) {
|
|
14305
|
-
throw new Error("No video config found");
|
|
14306
|
-
}
|
|
14307
14351
|
if (!src) {
|
|
14308
14352
|
throw new TypeError("No `src` was passed to <Video>.");
|
|
14309
14353
|
}
|
|
@@ -14325,7 +14369,6 @@ var VideoForRendering = ({
|
|
|
14325
14369
|
}
|
|
14326
14370
|
return true;
|
|
14327
14371
|
}, [muted, volume]);
|
|
14328
|
-
const { fps } = videoConfig;
|
|
14329
14372
|
const { delayRender, continueRender } = useDelayRender2();
|
|
14330
14373
|
useLayoutEffect2(() => {
|
|
14331
14374
|
if (!canvasRef.current) {
|
|
@@ -14351,10 +14394,17 @@ var VideoForRendering = ({
|
|
|
14351
14394
|
}).then(({ frame: imageBitmap, audio }) => {
|
|
14352
14395
|
if (imageBitmap) {
|
|
14353
14396
|
onVideoFrame?.(imageBitmap);
|
|
14354
|
-
canvasRef.current?.getContext("2d")
|
|
14397
|
+
const context = canvasRef.current?.getContext("2d");
|
|
14398
|
+
if (!context) {
|
|
14399
|
+
return;
|
|
14400
|
+
}
|
|
14401
|
+
context.canvas.width = imageBitmap instanceof ImageBitmap ? imageBitmap.width : imageBitmap.displayWidth;
|
|
14402
|
+
context.canvas.height = imageBitmap instanceof ImageBitmap ? imageBitmap.height : imageBitmap.displayHeight;
|
|
14403
|
+
context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
|
|
14404
|
+
context.drawImage(imageBitmap, 0, 0);
|
|
14355
14405
|
imageBitmap.close();
|
|
14356
14406
|
} else if (window.remotion_videoEnabled) {
|
|
14357
|
-
|
|
14407
|
+
cancelRender4(new Error("No video frame found"));
|
|
14358
14408
|
}
|
|
14359
14409
|
if (audio) {
|
|
14360
14410
|
registerRenderAsset({
|
|
@@ -14370,7 +14420,7 @@ var VideoForRendering = ({
|
|
|
14370
14420
|
}
|
|
14371
14421
|
continueRender(newHandle);
|
|
14372
14422
|
}).catch((error) => {
|
|
14373
|
-
|
|
14423
|
+
cancelRender4(error);
|
|
14374
14424
|
});
|
|
14375
14425
|
return () => {
|
|
14376
14426
|
continueRender(newHandle);
|
|
@@ -14396,11 +14446,13 @@ var VideoForRendering = ({
|
|
|
14396
14446
|
volume,
|
|
14397
14447
|
loop
|
|
14398
14448
|
]);
|
|
14449
|
+
const classNameValue = useMemo4(() => {
|
|
14450
|
+
return [Internals3.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals3.truthy).join(" ");
|
|
14451
|
+
}, [className]);
|
|
14399
14452
|
return /* @__PURE__ */ jsx3("canvas", {
|
|
14400
14453
|
ref: canvasRef,
|
|
14401
14454
|
style,
|
|
14402
|
-
|
|
14403
|
-
height: videoConfig.height
|
|
14455
|
+
className: classNameValue
|
|
14404
14456
|
});
|
|
14405
14457
|
};
|
|
14406
14458
|
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { type LogLevel } from '../log';
|
|
2
|
+
export declare class MediaPlayer {
|
|
3
|
+
private canvas;
|
|
4
|
+
private context;
|
|
5
|
+
private src;
|
|
6
|
+
private logLevel;
|
|
7
|
+
private canvasSink;
|
|
8
|
+
private videoFrameIterator;
|
|
9
|
+
private nextFrame;
|
|
10
|
+
private audioSink;
|
|
11
|
+
private audioBufferIterator;
|
|
12
|
+
private queuedAudioNodes;
|
|
13
|
+
private gainNode;
|
|
14
|
+
private expectedAudioTime;
|
|
15
|
+
private sharedAudioContext;
|
|
16
|
+
private mediaTimeOffset;
|
|
17
|
+
private playing;
|
|
18
|
+
private animationFrameId;
|
|
19
|
+
private asyncId;
|
|
20
|
+
private initialized;
|
|
21
|
+
private totalDuration;
|
|
22
|
+
private actualFps;
|
|
23
|
+
private isStalled;
|
|
24
|
+
private onStalledChangeCallback?;
|
|
25
|
+
private lastAudioProgressAtMs;
|
|
26
|
+
private lastNetworkActivityAtMs;
|
|
27
|
+
private isNetworkActive;
|
|
28
|
+
private isSeeking;
|
|
29
|
+
private canStartAudio;
|
|
30
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, }: {
|
|
31
|
+
canvas: HTMLCanvasElement;
|
|
32
|
+
src: string;
|
|
33
|
+
logLevel: LogLevel;
|
|
34
|
+
sharedAudioContext?: AudioContext | null;
|
|
35
|
+
});
|
|
36
|
+
initialize(startTime?: number): Promise<void>;
|
|
37
|
+
seekTo(time: number): void;
|
|
38
|
+
drawInitialFrame(time?: number): Promise<void>;
|
|
39
|
+
play(): Promise<void>;
|
|
40
|
+
pause(): void;
|
|
41
|
+
dispose(): void;
|
|
42
|
+
get currentTime(): number;
|
|
43
|
+
private getPlaybackTime;
|
|
44
|
+
get duration(): number;
|
|
45
|
+
get isPlaying(): boolean;
|
|
46
|
+
get stalled(): boolean;
|
|
47
|
+
onStalledChange(callback: (isStalled: boolean) => void): void;
|
|
48
|
+
private renderSingleFrame;
|
|
49
|
+
private startRenderLoop;
|
|
50
|
+
private stopRenderLoop;
|
|
51
|
+
private render;
|
|
52
|
+
private startVideoIterator;
|
|
53
|
+
private updateNextFrame;
|
|
54
|
+
private tryStartAudio;
|
|
55
|
+
private getCurrentTimeMs;
|
|
56
|
+
private resetAudioProgressStopwatch;
|
|
57
|
+
private getAudioLookaheadSec;
|
|
58
|
+
private calculateAudioStallThresholdSec;
|
|
59
|
+
private isNetworkStalled;
|
|
60
|
+
private checkVideoStall;
|
|
61
|
+
private checkIfStalled;
|
|
62
|
+
private updateStalledState;
|
|
63
|
+
private runAudioIterator;
|
|
64
|
+
}
|