@remotion/media 4.0.386 → 4.0.388
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-preview.js +9 -8
- package/dist/esm/index.mjs +72 -50
- package/dist/media-player.d.ts +2 -1
- package/dist/media-player.js +3 -2
- package/dist/video/props.d.ts +1 -0
- package/dist/video/video-for-preview.d.ts +1 -0
- package/dist/video/video-for-preview.js +20 -17
- package/dist/video/video-for-rendering.d.ts +1 -0
- package/dist/video/video-for-rendering.js +13 -9
- package/dist/video/video.js +5 -5
- package/dist/video-iterator-manager.d.ts +2 -2
- package/dist/video-iterator-manager.js +12 -6
- package/package.json +6 -6
- package/dist/audio-for-rendering.d.ts +0 -3
- package/dist/audio-for-rendering.js +0 -94
- package/dist/audio.d.ts +0 -3
- package/dist/audio.js +0 -60
- package/dist/audiodata-to-array.d.ts +0 -0
- package/dist/audiodata-to-array.js +0 -1
- package/dist/convert-audiodata/data-types.d.ts +0 -1
- package/dist/convert-audiodata/data-types.js +0 -22
- package/dist/convert-audiodata/is-planar-format.d.ts +0 -1
- package/dist/convert-audiodata/is-planar-format.js +0 -3
- package/dist/convert-audiodata/log-audiodata.d.ts +0 -1
- package/dist/convert-audiodata/log-audiodata.js +0 -8
- package/dist/convert-audiodata/trim-audiodata.d.ts +0 -0
- package/dist/convert-audiodata/trim-audiodata.js +0 -1
- package/dist/deserialized-audiodata.d.ts +0 -15
- package/dist/deserialized-audiodata.js +0 -26
- package/dist/extract-audio.d.ts +0 -7
- package/dist/extract-audio.js +0 -98
- package/dist/extract-frame-via-broadcast-channel.d.ts +0 -15
- package/dist/extract-frame-via-broadcast-channel.js +0 -104
- package/dist/extract-frame.d.ts +0 -27
- package/dist/extract-frame.js +0 -21
- package/dist/extrct-audio.d.ts +0 -7
- package/dist/extrct-audio.js +0 -94
- package/dist/get-frames-since-keyframe.d.ts +0 -22
- package/dist/get-frames-since-keyframe.js +0 -41
- package/dist/keyframe-bank.d.ts +0 -25
- package/dist/keyframe-bank.js +0 -120
- package/dist/keyframe-manager.d.ts +0 -23
- package/dist/keyframe-manager.js +0 -170
- package/dist/log.d.ts +0 -10
- package/dist/log.js +0 -33
- package/dist/new-video-for-rendering.d.ts +0 -3
- package/dist/new-video-for-rendering.js +0 -108
- package/dist/new-video.d.ts +0 -3
- package/dist/new-video.js +0 -37
- package/dist/props.d.ts +0 -29
- package/dist/props.js +0 -1
- package/dist/remember-actual-matroska-timestamps.d.ts +0 -4
- package/dist/remember-actual-matroska-timestamps.js +0 -19
- package/dist/serialize-videoframe.d.ts +0 -0
- package/dist/serialize-videoframe.js +0 -1
- package/dist/video/media-player.d.ts +0 -62
- package/dist/video/media-player.js +0 -361
- package/dist/video/new-video-for-preview.d.ts +0 -10
- package/dist/video/new-video-for-preview.js +0 -108
- package/dist/video/timeout-utils.d.ts +0 -2
- package/dist/video/timeout-utils.js +0 -18
- package/dist/video-extraction/media-player.d.ts +0 -64
- package/dist/video-extraction/media-player.js +0 -501
- package/dist/video-extraction/new-video-for-preview.d.ts +0 -10
- package/dist/video-extraction/new-video-for-preview.js +0 -114
- package/dist/video-for-rendering.d.ts +0 -3
- package/dist/video-for-rendering.js +0 -108
- package/dist/video.d.ts +0 -3
- package/dist/video.js +0 -37
|
@@ -69,6 +69,10 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
|
|
|
69
69
|
throw new Error('useMediaPlayback must be used inside a <BufferingContext>');
|
|
70
70
|
}
|
|
71
71
|
const isPlayerBuffering = Internals.useIsPlayerBuffering(bufferingContext);
|
|
72
|
+
const initialIsPremounting = useRef(isPremounting);
|
|
73
|
+
const initialIsPostmounting = useRef(isPostmounting);
|
|
74
|
+
const initialGlobalPlaybackRate = useRef(globalPlaybackRate);
|
|
75
|
+
const initialPlaybackRate = useRef(playbackRate);
|
|
72
76
|
useEffect(() => {
|
|
73
77
|
if (!sharedAudioContext)
|
|
74
78
|
return;
|
|
@@ -84,13 +88,14 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
|
|
|
84
88
|
trimBefore: initialTrimBeforeRef.current,
|
|
85
89
|
fps: videoConfig.fps,
|
|
86
90
|
canvas: null,
|
|
87
|
-
playbackRate,
|
|
91
|
+
playbackRate: initialPlaybackRate.current,
|
|
88
92
|
audioStreamIndex: audioStreamIndex ?? 0,
|
|
89
93
|
debugOverlay: false,
|
|
90
94
|
bufferState: buffer,
|
|
91
|
-
isPostmounting,
|
|
92
|
-
isPremounting,
|
|
93
|
-
globalPlaybackRate,
|
|
95
|
+
isPostmounting: initialIsPostmounting.current,
|
|
96
|
+
isPremounting: initialIsPremounting.current,
|
|
97
|
+
globalPlaybackRate: initialGlobalPlaybackRate.current,
|
|
98
|
+
onVideoFrameCallback: null,
|
|
94
99
|
});
|
|
95
100
|
mediaPlayerRef.current = player;
|
|
96
101
|
player
|
|
@@ -161,14 +166,10 @@ const AudioForPreviewAssertedShowing = ({ src, playbackRate, logLevel, muted, vo
|
|
|
161
166
|
sharedAudioContext,
|
|
162
167
|
currentTimeRef,
|
|
163
168
|
loop,
|
|
164
|
-
playbackRate,
|
|
165
169
|
videoConfig.fps,
|
|
166
170
|
audioStreamIndex,
|
|
167
171
|
disallowFallbackToHtml5Audio,
|
|
168
172
|
buffer,
|
|
169
|
-
isPremounting,
|
|
170
|
-
isPostmounting,
|
|
171
|
-
globalPlaybackRate,
|
|
172
173
|
]);
|
|
173
174
|
useLayoutEffect(() => {
|
|
174
175
|
const audioPlayer = mediaPlayerRef.current;
|
package/dist/esm/index.mjs
CHANGED
|
@@ -705,21 +705,25 @@ var videoIteratorManager = ({
|
|
|
705
705
|
let videoIteratorsCreated = 0;
|
|
706
706
|
let videoFrameIterator = null;
|
|
707
707
|
let framesRendered = 0;
|
|
708
|
-
canvas
|
|
709
|
-
|
|
708
|
+
if (canvas) {
|
|
709
|
+
canvas.width = videoTrack.displayWidth;
|
|
710
|
+
canvas.height = videoTrack.displayHeight;
|
|
711
|
+
}
|
|
710
712
|
const canvasSink = new CanvasSink(videoTrack, {
|
|
711
713
|
poolSize: 2,
|
|
712
714
|
fit: "contain",
|
|
713
715
|
alpha: true
|
|
714
716
|
});
|
|
715
717
|
const drawFrame = (frame) => {
|
|
716
|
-
context
|
|
717
|
-
|
|
718
|
+
if (context && canvas) {
|
|
719
|
+
context.clearRect(0, 0, canvas.width, canvas.height);
|
|
720
|
+
context.drawImage(frame.canvas, 0, 0);
|
|
721
|
+
}
|
|
718
722
|
framesRendered++;
|
|
719
723
|
drawDebugOverlay();
|
|
720
724
|
const callback = getOnVideoFrameCallback();
|
|
721
725
|
if (callback) {
|
|
722
|
-
callback(canvas);
|
|
726
|
+
callback(frame.canvas);
|
|
723
727
|
}
|
|
724
728
|
Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
|
|
725
729
|
};
|
|
@@ -765,7 +769,9 @@ var videoIteratorManager = ({
|
|
|
765
769
|
seek,
|
|
766
770
|
destroy: () => {
|
|
767
771
|
videoFrameIterator?.destroy();
|
|
768
|
-
context
|
|
772
|
+
if (context && canvas) {
|
|
773
|
+
context.clearRect(0, 0, canvas.width, canvas.height);
|
|
774
|
+
}
|
|
769
775
|
videoFrameIterator = null;
|
|
770
776
|
},
|
|
771
777
|
getVideoFrameIterator: () => videoFrameIterator,
|
|
@@ -816,7 +822,8 @@ class MediaPlayer {
|
|
|
816
822
|
debugOverlay,
|
|
817
823
|
bufferState,
|
|
818
824
|
isPremounting,
|
|
819
|
-
isPostmounting
|
|
825
|
+
isPostmounting,
|
|
826
|
+
onVideoFrameCallback
|
|
820
827
|
}) {
|
|
821
828
|
this.canvas = canvas ?? null;
|
|
822
829
|
this.src = src;
|
|
@@ -834,6 +841,7 @@ class MediaPlayer {
|
|
|
834
841
|
this.isPremounting = isPremounting;
|
|
835
842
|
this.isPostmounting = isPostmounting;
|
|
836
843
|
this.nonceManager = makeNonceManager();
|
|
844
|
+
this.onVideoFrameCallback = onVideoFrameCallback;
|
|
837
845
|
this.input = new Input({
|
|
838
846
|
source: new UrlSource(this.src),
|
|
839
847
|
formats: ALL_FORMATS
|
|
@@ -891,7 +899,7 @@ class MediaPlayer {
|
|
|
891
899
|
if (!videoTrack && !audioTrack) {
|
|
892
900
|
return { type: "no-tracks" };
|
|
893
901
|
}
|
|
894
|
-
if (videoTrack
|
|
902
|
+
if (videoTrack) {
|
|
895
903
|
const canDecode = await videoTrack.canDecode();
|
|
896
904
|
if (!canDecode) {
|
|
897
905
|
return { type: "cannot-decode" };
|
|
@@ -1429,6 +1437,10 @@ var AudioForPreviewAssertedShowing = ({
|
|
|
1429
1437
|
throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
|
|
1430
1438
|
}
|
|
1431
1439
|
const isPlayerBuffering = Internals6.useIsPlayerBuffering(bufferingContext);
|
|
1440
|
+
const initialIsPremounting = useRef(isPremounting);
|
|
1441
|
+
const initialIsPostmounting = useRef(isPostmounting);
|
|
1442
|
+
const initialGlobalPlaybackRate = useRef(globalPlaybackRate);
|
|
1443
|
+
const initialPlaybackRate = useRef(playbackRate);
|
|
1432
1444
|
useEffect2(() => {
|
|
1433
1445
|
if (!sharedAudioContext)
|
|
1434
1446
|
return;
|
|
@@ -1444,13 +1456,14 @@ var AudioForPreviewAssertedShowing = ({
|
|
|
1444
1456
|
trimBefore: initialTrimBeforeRef.current,
|
|
1445
1457
|
fps: videoConfig.fps,
|
|
1446
1458
|
canvas: null,
|
|
1447
|
-
playbackRate,
|
|
1459
|
+
playbackRate: initialPlaybackRate.current,
|
|
1448
1460
|
audioStreamIndex: audioStreamIndex ?? 0,
|
|
1449
1461
|
debugOverlay: false,
|
|
1450
1462
|
bufferState: buffer,
|
|
1451
|
-
isPostmounting,
|
|
1452
|
-
isPremounting,
|
|
1453
|
-
globalPlaybackRate
|
|
1463
|
+
isPostmounting: initialIsPostmounting.current,
|
|
1464
|
+
isPremounting: initialIsPremounting.current,
|
|
1465
|
+
globalPlaybackRate: initialGlobalPlaybackRate.current,
|
|
1466
|
+
onVideoFrameCallback: null
|
|
1454
1467
|
});
|
|
1455
1468
|
mediaPlayerRef.current = player;
|
|
1456
1469
|
player.initialize(currentTimeRef.current).then((result) => {
|
|
@@ -1517,14 +1530,10 @@ var AudioForPreviewAssertedShowing = ({
|
|
|
1517
1530
|
sharedAudioContext,
|
|
1518
1531
|
currentTimeRef,
|
|
1519
1532
|
loop,
|
|
1520
|
-
playbackRate,
|
|
1521
1533
|
videoConfig.fps,
|
|
1522
1534
|
audioStreamIndex,
|
|
1523
1535
|
disallowFallbackToHtml5Audio,
|
|
1524
|
-
buffer
|
|
1525
|
-
isPremounting,
|
|
1526
|
-
isPostmounting,
|
|
1527
|
-
globalPlaybackRate
|
|
1536
|
+
buffer
|
|
1528
1537
|
]);
|
|
1529
1538
|
useLayoutEffect(() => {
|
|
1530
1539
|
const audioPlayer = mediaPlayerRef.current;
|
|
@@ -3700,7 +3709,8 @@ var VideoForPreviewAssertedShowing = ({
|
|
|
3700
3709
|
disallowFallbackToOffthreadVideo,
|
|
3701
3710
|
fallbackOffthreadVideoProps,
|
|
3702
3711
|
audioStreamIndex,
|
|
3703
|
-
debugOverlay
|
|
3712
|
+
debugOverlay,
|
|
3713
|
+
headless
|
|
3704
3714
|
}) => {
|
|
3705
3715
|
const src = usePreload2(unpreloadedSrc);
|
|
3706
3716
|
const canvasRef = useRef2(null);
|
|
@@ -3709,6 +3719,7 @@ var VideoForPreviewAssertedShowing = ({
|
|
|
3709
3719
|
const mediaPlayerRef = useRef2(null);
|
|
3710
3720
|
const initialTrimBeforeRef = useRef2(trimBefore);
|
|
3711
3721
|
const initialTrimAfterRef = useRef2(trimAfter);
|
|
3722
|
+
const initialOnVideoFrameRef = useRef2(onVideoFrame);
|
|
3712
3723
|
const [mediaPlayerReady, setMediaPlayerReady] = useState4(false);
|
|
3713
3724
|
const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState4(false);
|
|
3714
3725
|
const [playing] = Timeline2.usePlayingState();
|
|
@@ -3765,9 +3776,11 @@ var VideoForPreviewAssertedShowing = ({
|
|
|
3765
3776
|
throw new Error("useMediaPlayback must be used inside a <BufferingContext>");
|
|
3766
3777
|
}
|
|
3767
3778
|
const isPlayerBuffering = Internals15.useIsPlayerBuffering(buffering);
|
|
3779
|
+
const initialIsPremounting = useRef2(isPremounting);
|
|
3780
|
+
const initialIsPostmounting = useRef2(isPostmounting);
|
|
3781
|
+
const initialGlobalPlaybackRate = useRef2(globalPlaybackRate);
|
|
3782
|
+
const initialPlaybackRate = useRef2(playbackRate);
|
|
3768
3783
|
useEffect3(() => {
|
|
3769
|
-
if (!canvasRef.current)
|
|
3770
|
-
return;
|
|
3771
3784
|
if (!sharedAudioContext)
|
|
3772
3785
|
return;
|
|
3773
3786
|
if (!sharedAudioContext.audioContext)
|
|
@@ -3782,13 +3795,14 @@ var VideoForPreviewAssertedShowing = ({
|
|
|
3782
3795
|
trimAfter: initialTrimAfterRef.current,
|
|
3783
3796
|
trimBefore: initialTrimBeforeRef.current,
|
|
3784
3797
|
fps: videoConfig.fps,
|
|
3785
|
-
playbackRate,
|
|
3798
|
+
playbackRate: initialPlaybackRate.current,
|
|
3786
3799
|
audioStreamIndex,
|
|
3787
3800
|
debugOverlay,
|
|
3788
3801
|
bufferState: buffer,
|
|
3789
|
-
isPremounting,
|
|
3790
|
-
isPostmounting,
|
|
3791
|
-
globalPlaybackRate
|
|
3802
|
+
isPremounting: initialIsPremounting.current,
|
|
3803
|
+
isPostmounting: initialIsPostmounting.current,
|
|
3804
|
+
globalPlaybackRate: initialGlobalPlaybackRate.current,
|
|
3805
|
+
onVideoFrameCallback: initialOnVideoFrameRef.current ?? null
|
|
3792
3806
|
});
|
|
3793
3807
|
mediaPlayerRef.current = player;
|
|
3794
3808
|
player.initialize(currentTimeRef.current).then((result) => {
|
|
@@ -3849,19 +3863,15 @@ var VideoForPreviewAssertedShowing = ({
|
|
|
3849
3863
|
setShouldFallbackToNativeVideo(false);
|
|
3850
3864
|
};
|
|
3851
3865
|
}, [
|
|
3852
|
-
preloadedSrc,
|
|
3853
|
-
logLevel,
|
|
3854
|
-
sharedAudioContext,
|
|
3855
|
-
loop,
|
|
3856
|
-
videoConfig.fps,
|
|
3857
|
-
playbackRate,
|
|
3858
|
-
disallowFallbackToOffthreadVideo,
|
|
3859
3866
|
audioStreamIndex,
|
|
3860
|
-
debugOverlay,
|
|
3861
3867
|
buffer,
|
|
3862
|
-
|
|
3863
|
-
|
|
3864
|
-
|
|
3868
|
+
debugOverlay,
|
|
3869
|
+
disallowFallbackToOffthreadVideo,
|
|
3870
|
+
logLevel,
|
|
3871
|
+
loop,
|
|
3872
|
+
preloadedSrc,
|
|
3873
|
+
sharedAudioContext,
|
|
3874
|
+
videoConfig.fps
|
|
3865
3875
|
]);
|
|
3866
3876
|
const classNameValue = useMemo4(() => {
|
|
3867
3877
|
return [Internals15.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals15.truthy).join(" ");
|
|
@@ -3991,6 +4001,9 @@ var VideoForPreviewAssertedShowing = ({
|
|
|
3991
4001
|
...fallbackOffthreadVideoProps
|
|
3992
4002
|
});
|
|
3993
4003
|
}
|
|
4004
|
+
if (headless) {
|
|
4005
|
+
return null;
|
|
4006
|
+
}
|
|
3994
4007
|
return /* @__PURE__ */ jsx4("canvas", {
|
|
3995
4008
|
ref: canvasRef,
|
|
3996
4009
|
width: videoConfig.width,
|
|
@@ -4070,7 +4083,8 @@ var VideoForRendering = ({
|
|
|
4070
4083
|
stack,
|
|
4071
4084
|
toneFrequency,
|
|
4072
4085
|
trimAfterValue,
|
|
4073
|
-
trimBeforeValue
|
|
4086
|
+
trimBeforeValue,
|
|
4087
|
+
headless
|
|
4074
4088
|
}) => {
|
|
4075
4089
|
if (!src) {
|
|
4076
4090
|
throw new TypeError("No `src` was passed to <Video>.");
|
|
@@ -4099,13 +4113,13 @@ var VideoForRendering = ({
|
|
|
4099
4113
|
throw error;
|
|
4100
4114
|
}
|
|
4101
4115
|
useLayoutEffect4(() => {
|
|
4102
|
-
if (!canvasRef.current) {
|
|
4116
|
+
if (!canvasRef.current && !headless) {
|
|
4103
4117
|
return;
|
|
4104
4118
|
}
|
|
4105
4119
|
if (replaceWithOffthreadVideo) {
|
|
4106
4120
|
return;
|
|
4107
4121
|
}
|
|
4108
|
-
if (!canvasRef.current?.getContext) {
|
|
4122
|
+
if (!canvasRef.current?.getContext && !headless) {
|
|
4109
4123
|
return setError(new Error("Canvas does not have .getContext() method available. This could be because <Video> was mounted inside an <svg> tag."));
|
|
4110
4124
|
}
|
|
4111
4125
|
const timestamp = frame / fps;
|
|
@@ -4193,13 +4207,12 @@ var VideoForRendering = ({
|
|
|
4193
4207
|
const context = canvasRef.current?.getContext("2d", {
|
|
4194
4208
|
alpha: true
|
|
4195
4209
|
});
|
|
4196
|
-
if (
|
|
4197
|
-
|
|
4210
|
+
if (context) {
|
|
4211
|
+
context.canvas.width = imageBitmap.width;
|
|
4212
|
+
context.canvas.height = imageBitmap.height;
|
|
4213
|
+
context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
|
|
4214
|
+
context.drawImage(imageBitmap, 0, 0);
|
|
4198
4215
|
}
|
|
4199
|
-
context.canvas.width = imageBitmap.width;
|
|
4200
|
-
context.canvas.height = imageBitmap.height;
|
|
4201
|
-
context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
|
|
4202
|
-
context.drawImage(imageBitmap, 0, 0);
|
|
4203
4216
|
imageBitmap.close();
|
|
4204
4217
|
} else if (videoEnabled) {
|
|
4205
4218
|
const context = canvasRef.current?.getContext("2d", {
|
|
@@ -4273,7 +4286,8 @@ var VideoForRendering = ({
|
|
|
4273
4286
|
audioEnabled,
|
|
4274
4287
|
videoEnabled,
|
|
4275
4288
|
maxCacheSize,
|
|
4276
|
-
cancelRender3
|
|
4289
|
+
cancelRender3,
|
|
4290
|
+
headless
|
|
4277
4291
|
]);
|
|
4278
4292
|
const classNameValue = useMemo5(() => {
|
|
4279
4293
|
return [Internals16.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals16.truthy).join(" ");
|
|
@@ -4332,6 +4346,9 @@ var VideoForRendering = ({
|
|
|
4332
4346
|
}
|
|
4333
4347
|
return fallback;
|
|
4334
4348
|
}
|
|
4349
|
+
if (headless) {
|
|
4350
|
+
return null;
|
|
4351
|
+
}
|
|
4335
4352
|
return /* @__PURE__ */ jsx5("canvas", {
|
|
4336
4353
|
ref: canvasRef,
|
|
4337
4354
|
style,
|
|
@@ -4364,7 +4381,8 @@ var InnerVideo = ({
|
|
|
4364
4381
|
stack,
|
|
4365
4382
|
toneFrequency,
|
|
4366
4383
|
showInTimeline,
|
|
4367
|
-
debugOverlay
|
|
4384
|
+
debugOverlay,
|
|
4385
|
+
headless
|
|
4368
4386
|
}) => {
|
|
4369
4387
|
const environment = useRemotionEnvironment4();
|
|
4370
4388
|
if (typeof src !== "string") {
|
|
@@ -4404,7 +4422,8 @@ var InnerVideo = ({
|
|
|
4404
4422
|
volume,
|
|
4405
4423
|
toneFrequency,
|
|
4406
4424
|
trimAfterValue,
|
|
4407
|
-
trimBeforeValue
|
|
4425
|
+
trimBeforeValue,
|
|
4426
|
+
headless
|
|
4408
4427
|
});
|
|
4409
4428
|
}
|
|
4410
4429
|
return /* @__PURE__ */ jsx6(VideoForPreview, {
|
|
@@ -4426,7 +4445,8 @@ var InnerVideo = ({
|
|
|
4426
4445
|
stack: stack ?? null,
|
|
4427
4446
|
disallowFallbackToOffthreadVideo,
|
|
4428
4447
|
fallbackOffthreadVideoProps,
|
|
4429
|
-
debugOverlay: debugOverlay ?? false
|
|
4448
|
+
debugOverlay: debugOverlay ?? false,
|
|
4449
|
+
headless: headless ?? false
|
|
4430
4450
|
});
|
|
4431
4451
|
};
|
|
4432
4452
|
var Video = ({
|
|
@@ -4451,7 +4471,8 @@ var Video = ({
|
|
|
4451
4471
|
volume,
|
|
4452
4472
|
stack,
|
|
4453
4473
|
toneFrequency,
|
|
4454
|
-
debugOverlay
|
|
4474
|
+
debugOverlay,
|
|
4475
|
+
headless
|
|
4455
4476
|
}) => {
|
|
4456
4477
|
return /* @__PURE__ */ jsx6(InnerVideo, {
|
|
4457
4478
|
audioStreamIndex: audioStreamIndex ?? 0,
|
|
@@ -4475,7 +4496,8 @@ var Video = ({
|
|
|
4475
4496
|
volume: volume ?? 1,
|
|
4476
4497
|
toneFrequency: toneFrequency ?? 1,
|
|
4477
4498
|
stack,
|
|
4478
|
-
debugOverlay: debugOverlay ?? false
|
|
4499
|
+
debugOverlay: debugOverlay ?? false,
|
|
4500
|
+
headless: headless ?? false
|
|
4479
4501
|
});
|
|
4480
4502
|
};
|
|
4481
4503
|
Internals17.addSequenceStackTraces(Video);
|
package/dist/media-player.d.ts
CHANGED
|
@@ -42,7 +42,7 @@ export declare class MediaPlayer {
|
|
|
42
42
|
private isPremounting;
|
|
43
43
|
private isPostmounting;
|
|
44
44
|
private seekPromiseChain;
|
|
45
|
-
constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, globalPlaybackRate, audioStreamIndex, fps, debugOverlay, bufferState, isPremounting, isPostmounting, }: {
|
|
45
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, globalPlaybackRate, audioStreamIndex, fps, debugOverlay, bufferState, isPremounting, isPostmounting, onVideoFrameCallback, }: {
|
|
46
46
|
canvas: HTMLCanvasElement | OffscreenCanvas | null;
|
|
47
47
|
src: string;
|
|
48
48
|
logLevel: LogLevel;
|
|
@@ -58,6 +58,7 @@ export declare class MediaPlayer {
|
|
|
58
58
|
bufferState: ReturnType<typeof useBufferState>;
|
|
59
59
|
isPremounting: boolean;
|
|
60
60
|
isPostmounting: boolean;
|
|
61
|
+
onVideoFrameCallback: null | ((frame: CanvasImageSource) => void);
|
|
61
62
|
});
|
|
62
63
|
private input;
|
|
63
64
|
private isDisposalError;
|
package/dist/media-player.js
CHANGED
|
@@ -8,7 +8,7 @@ import { isNetworkError } from './is-type-of-error';
|
|
|
8
8
|
import { makeNonceManager } from './nonce-manager';
|
|
9
9
|
import { videoIteratorManager } from './video-iterator-manager';
|
|
10
10
|
export class MediaPlayer {
|
|
11
|
-
constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, globalPlaybackRate, audioStreamIndex, fps, debugOverlay, bufferState, isPremounting, isPostmounting, }) {
|
|
11
|
+
constructor({ canvas, src, logLevel, sharedAudioContext, loop, trimBefore, trimAfter, playbackRate, globalPlaybackRate, audioStreamIndex, fps, debugOverlay, bufferState, isPremounting, isPostmounting, onVideoFrameCallback, }) {
|
|
12
12
|
this.audioIteratorManager = null;
|
|
13
13
|
this.videoIteratorManager = null;
|
|
14
14
|
// this is the time difference between Web Audio timeline
|
|
@@ -70,6 +70,7 @@ export class MediaPlayer {
|
|
|
70
70
|
this.isPremounting = isPremounting;
|
|
71
71
|
this.isPostmounting = isPostmounting;
|
|
72
72
|
this.nonceManager = makeNonceManager();
|
|
73
|
+
this.onVideoFrameCallback = onVideoFrameCallback;
|
|
73
74
|
this.input = new Input({
|
|
74
75
|
source: new UrlSource(this.src),
|
|
75
76
|
formats: ALL_FORMATS,
|
|
@@ -128,7 +129,7 @@ export class MediaPlayer {
|
|
|
128
129
|
if (!videoTrack && !audioTrack) {
|
|
129
130
|
return { type: 'no-tracks' };
|
|
130
131
|
}
|
|
131
|
-
if (videoTrack
|
|
132
|
+
if (videoTrack) {
|
|
132
133
|
const canDecode = await videoTrack.canDecode();
|
|
133
134
|
if (!canDecode) {
|
|
134
135
|
return { type: 'cannot-decode' };
|
package/dist/video/props.d.ts
CHANGED
|
@@ -38,6 +38,7 @@ type OptionalVideoProps = {
|
|
|
38
38
|
toneFrequency: number;
|
|
39
39
|
showInTimeline: boolean;
|
|
40
40
|
debugOverlay: boolean;
|
|
41
|
+
headless: boolean;
|
|
41
42
|
};
|
|
42
43
|
export type InnerVideoProps = MandatoryVideoProps & OuterVideoProps & OptionalVideoProps;
|
|
43
44
|
export type VideoProps = MandatoryVideoProps & Partial<OuterVideoProps> & Partial<OptionalVideoProps>;
|
|
@@ -21,6 +21,7 @@ type VideoForPreviewProps = {
|
|
|
21
21
|
readonly fallbackOffthreadVideoProps: FallbackOffthreadVideoProps;
|
|
22
22
|
readonly audioStreamIndex: number;
|
|
23
23
|
readonly debugOverlay: boolean;
|
|
24
|
+
readonly headless: boolean;
|
|
24
25
|
};
|
|
25
26
|
export declare const VideoForPreview: React.FC<VideoForPreviewProps>;
|
|
26
27
|
export {};
|
|
@@ -6,7 +6,7 @@ import { MediaPlayer } from '../media-player';
|
|
|
6
6
|
import { useLoopDisplay } from '../show-in-timeline';
|
|
7
7
|
import { useMediaInTimeline } from '../use-media-in-timeline';
|
|
8
8
|
const { useUnsafeVideoConfig, Timeline, SharedAudioContext, useMediaMutedState, useMediaVolumeState, useFrameForVolumeProp, evaluateVolume, warnAboutTooHighVolume, usePreload, SequenceContext, SequenceVisibilityToggleContext, } = Internals;
|
|
9
|
-
const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRate, logLevel, className, muted, volume, loopVolumeCurveBehavior, onVideoFrame, showInTimeline, loop, name, trimAfter, trimBefore, stack, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, audioStreamIndex, debugOverlay, }) => {
|
|
9
|
+
const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRate, logLevel, className, muted, volume, loopVolumeCurveBehavior, onVideoFrame, showInTimeline, loop, name, trimAfter, trimBefore, stack, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, audioStreamIndex, debugOverlay, headless, }) => {
|
|
10
10
|
const src = usePreload(unpreloadedSrc);
|
|
11
11
|
const canvasRef = useRef(null);
|
|
12
12
|
const videoConfig = useUnsafeVideoConfig();
|
|
@@ -14,6 +14,7 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
|
|
|
14
14
|
const mediaPlayerRef = useRef(null);
|
|
15
15
|
const initialTrimBeforeRef = useRef(trimBefore);
|
|
16
16
|
const initialTrimAfterRef = useRef(trimAfter);
|
|
17
|
+
const initialOnVideoFrameRef = useRef(onVideoFrame);
|
|
17
18
|
const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
|
|
18
19
|
const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState(false);
|
|
19
20
|
const [playing] = Timeline.usePlayingState();
|
|
@@ -70,9 +71,11 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
|
|
|
70
71
|
throw new Error('useMediaPlayback must be used inside a <BufferingContext>');
|
|
71
72
|
}
|
|
72
73
|
const isPlayerBuffering = Internals.useIsPlayerBuffering(buffering);
|
|
74
|
+
const initialIsPremounting = useRef(isPremounting);
|
|
75
|
+
const initialIsPostmounting = useRef(isPostmounting);
|
|
76
|
+
const initialGlobalPlaybackRate = useRef(globalPlaybackRate);
|
|
77
|
+
const initialPlaybackRate = useRef(playbackRate);
|
|
73
78
|
useEffect(() => {
|
|
74
|
-
if (!canvasRef.current)
|
|
75
|
-
return;
|
|
76
79
|
if (!sharedAudioContext)
|
|
77
80
|
return;
|
|
78
81
|
if (!sharedAudioContext.audioContext)
|
|
@@ -87,13 +90,14 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
|
|
|
87
90
|
trimAfter: initialTrimAfterRef.current,
|
|
88
91
|
trimBefore: initialTrimBeforeRef.current,
|
|
89
92
|
fps: videoConfig.fps,
|
|
90
|
-
playbackRate,
|
|
93
|
+
playbackRate: initialPlaybackRate.current,
|
|
91
94
|
audioStreamIndex,
|
|
92
95
|
debugOverlay,
|
|
93
96
|
bufferState: buffer,
|
|
94
|
-
isPremounting,
|
|
95
|
-
isPostmounting,
|
|
96
|
-
globalPlaybackRate,
|
|
97
|
+
isPremounting: initialIsPremounting.current,
|
|
98
|
+
isPostmounting: initialIsPostmounting.current,
|
|
99
|
+
globalPlaybackRate: initialGlobalPlaybackRate.current,
|
|
100
|
+
onVideoFrameCallback: initialOnVideoFrameRef.current ?? null,
|
|
97
101
|
});
|
|
98
102
|
mediaPlayerRef.current = player;
|
|
99
103
|
player
|
|
@@ -158,19 +162,15 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
|
|
|
158
162
|
setShouldFallbackToNativeVideo(false);
|
|
159
163
|
};
|
|
160
164
|
}, [
|
|
161
|
-
|
|
165
|
+
audioStreamIndex,
|
|
166
|
+
buffer,
|
|
167
|
+
debugOverlay,
|
|
168
|
+
disallowFallbackToOffthreadVideo,
|
|
162
169
|
logLevel,
|
|
163
|
-
sharedAudioContext,
|
|
164
170
|
loop,
|
|
171
|
+
preloadedSrc,
|
|
172
|
+
sharedAudioContext,
|
|
165
173
|
videoConfig.fps,
|
|
166
|
-
playbackRate,
|
|
167
|
-
disallowFallbackToOffthreadVideo,
|
|
168
|
-
audioStreamIndex,
|
|
169
|
-
debugOverlay,
|
|
170
|
-
buffer,
|
|
171
|
-
isPremounting,
|
|
172
|
-
isPostmounting,
|
|
173
|
-
globalPlaybackRate,
|
|
174
174
|
]);
|
|
175
175
|
const classNameValue = useMemo(() => {
|
|
176
176
|
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
@@ -292,6 +292,9 @@ const VideoForPreviewAssertedShowing = ({ src: unpreloadedSrc, style, playbackRa
|
|
|
292
292
|
// not using <OffthreadVideo> because it does not support looping
|
|
293
293
|
return (_jsx(Html5Video, { src: src, style: actualStyle, className: className, muted: muted, volume: volume, trimAfter: trimAfter, trimBefore: trimBefore, playbackRate: playbackRate, loopVolumeCurveBehavior: loopVolumeCurveBehavior, name: name, loop: loop, showInTimeline: showInTimeline, stack: stack ?? undefined, ...fallbackOffthreadVideoProps }));
|
|
294
294
|
}
|
|
295
|
+
if (headless) {
|
|
296
|
+
return null;
|
|
297
|
+
}
|
|
295
298
|
return (_jsx("canvas", { ref: canvasRef, width: videoConfig.width, height: videoConfig.height, style: actualStyle, className: classNameValue }));
|
|
296
299
|
};
|
|
297
300
|
export const VideoForPreview = (props) => {
|
|
@@ -22,6 +22,7 @@ type InnerVideoProps = {
|
|
|
22
22
|
readonly toneFrequency: number;
|
|
23
23
|
readonly trimBeforeValue: number | undefined;
|
|
24
24
|
readonly trimAfterValue: number | undefined;
|
|
25
|
+
readonly headless: boolean;
|
|
25
26
|
};
|
|
26
27
|
export declare const VideoForRendering: React.FC<InnerVideoProps>;
|
|
27
28
|
export {};
|
|
@@ -6,7 +6,7 @@ import { applyVolume } from '../convert-audiodata/apply-volume';
|
|
|
6
6
|
import { TARGET_SAMPLE_RATE } from '../convert-audiodata/resample-audiodata';
|
|
7
7
|
import { frameForVolumeProp } from '../looped-frame';
|
|
8
8
|
import { extractFrameViaBroadcastChannel } from '../video-extraction/extract-frame-via-broadcast-channel';
|
|
9
|
-
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, onVideoFrame, logLevel, loop, style, className, fallbackOffthreadVideoProps, audioStreamIndex, name, disallowFallbackToOffthreadVideo, stack, toneFrequency, trimAfterValue, trimBeforeValue, }) => {
|
|
9
|
+
export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted, loopVolumeCurveBehavior, delayRenderRetries, delayRenderTimeoutInMilliseconds, onVideoFrame, logLevel, loop, style, className, fallbackOffthreadVideoProps, audioStreamIndex, name, disallowFallbackToOffthreadVideo, stack, toneFrequency, trimAfterValue, trimBeforeValue, headless, }) => {
|
|
10
10
|
if (!src) {
|
|
11
11
|
throw new TypeError('No `src` was passed to <Video>.');
|
|
12
12
|
}
|
|
@@ -36,13 +36,13 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
36
36
|
throw error;
|
|
37
37
|
}
|
|
38
38
|
useLayoutEffect(() => {
|
|
39
|
-
if (!canvasRef.current) {
|
|
39
|
+
if (!canvasRef.current && !headless) {
|
|
40
40
|
return;
|
|
41
41
|
}
|
|
42
42
|
if (replaceWithOffthreadVideo) {
|
|
43
43
|
return;
|
|
44
44
|
}
|
|
45
|
-
if (!canvasRef.current?.getContext) {
|
|
45
|
+
if (!canvasRef.current?.getContext && !headless) {
|
|
46
46
|
return setError(new Error('Canvas does not have .getContext() method available. This could be because <Video> was mounted inside an <svg> tag.'));
|
|
47
47
|
}
|
|
48
48
|
const timestamp = frame / fps;
|
|
@@ -127,13 +127,13 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
127
127
|
const context = canvasRef.current?.getContext('2d', {
|
|
128
128
|
alpha: true,
|
|
129
129
|
});
|
|
130
|
-
|
|
131
|
-
|
|
130
|
+
// Could be in headless mode
|
|
131
|
+
if (context) {
|
|
132
|
+
context.canvas.width = imageBitmap.width;
|
|
133
|
+
context.canvas.height = imageBitmap.height;
|
|
134
|
+
context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
|
|
135
|
+
context.drawImage(imageBitmap, 0, 0);
|
|
132
136
|
}
|
|
133
|
-
context.canvas.width = imageBitmap.width;
|
|
134
|
-
context.canvas.height = imageBitmap.height;
|
|
135
|
-
context.canvas.style.aspectRatio = `${context.canvas.width} / ${context.canvas.height}`;
|
|
136
|
-
context.drawImage(imageBitmap, 0, 0);
|
|
137
137
|
imageBitmap.close();
|
|
138
138
|
}
|
|
139
139
|
else if (videoEnabled) {
|
|
@@ -214,6 +214,7 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
214
214
|
videoEnabled,
|
|
215
215
|
maxCacheSize,
|
|
216
216
|
cancelRender,
|
|
217
|
+
headless,
|
|
217
218
|
]);
|
|
218
219
|
const classNameValue = useMemo(() => {
|
|
219
220
|
return [Internals.OBJECTFIT_CONTAIN_CLASS_NAME, className]
|
|
@@ -239,5 +240,8 @@ export const VideoForRendering = ({ volume: volumeProp, playbackRate, src, muted
|
|
|
239
240
|
}
|
|
240
241
|
return fallback;
|
|
241
242
|
}
|
|
243
|
+
if (headless) {
|
|
244
|
+
return null;
|
|
245
|
+
}
|
|
242
246
|
return _jsx("canvas", { ref: canvasRef, style: style, className: classNameValue });
|
|
243
247
|
};
|
package/dist/video/video.js
CHANGED
|
@@ -3,7 +3,7 @@ import { Internals, useRemotionEnvironment } from 'remotion';
|
|
|
3
3
|
import { VideoForPreview } from './video-for-preview';
|
|
4
4
|
import { VideoForRendering } from './video-for-rendering';
|
|
5
5
|
const { validateMediaTrimProps, resolveTrimProps, validateMediaProps } = Internals;
|
|
6
|
-
const InnerVideo = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, style, trimAfter, trimBefore, volume, stack, toneFrequency, showInTimeline, debugOverlay, }) => {
|
|
6
|
+
const InnerVideo = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, style, trimAfter, trimBefore, volume, stack, toneFrequency, showInTimeline, debugOverlay, headless, }) => {
|
|
7
7
|
const environment = useRemotionEnvironment();
|
|
8
8
|
if (typeof src !== 'string') {
|
|
9
9
|
throw new TypeError(`The \`<Video>\` tag requires a string for \`src\`, but got ${JSON.stringify(src)} instead.`);
|
|
@@ -22,14 +22,14 @@ const InnerVideo = ({ src, audioStreamIndex, className, delayRenderRetries, dela
|
|
|
22
22
|
});
|
|
23
23
|
validateMediaProps({ playbackRate, volume }, 'Video');
|
|
24
24
|
if (environment.isRendering) {
|
|
25
|
-
return (_jsx(VideoForRendering, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, name: name, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, stack: stack, style: style, volume: volume, toneFrequency: toneFrequency, trimAfterValue: trimAfterValue, trimBeforeValue: trimBeforeValue }));
|
|
25
|
+
return (_jsx(VideoForRendering, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, name: name, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, stack: stack, style: style, volume: volume, toneFrequency: toneFrequency, trimAfterValue: trimAfterValue, trimBeforeValue: trimBeforeValue, headless: headless }));
|
|
26
26
|
}
|
|
27
|
-
return (_jsx(VideoForPreview, { audioStreamIndex: audioStreamIndex ?? 0, className: className, name: name, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, showInTimeline: showInTimeline, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, stack: stack ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, debugOverlay: debugOverlay ?? false }));
|
|
27
|
+
return (_jsx(VideoForPreview, { audioStreamIndex: audioStreamIndex ?? 0, className: className, name: name, logLevel: logLevel, loop: loop, loopVolumeCurveBehavior: loopVolumeCurveBehavior, muted: muted, onVideoFrame: onVideoFrame, playbackRate: playbackRate, src: src, style: style, volume: volume, showInTimeline: showInTimeline, trimAfter: trimAfterValue, trimBefore: trimBeforeValue, stack: stack ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps, debugOverlay: debugOverlay ?? false, headless: headless ?? false }));
|
|
28
28
|
};
|
|
29
|
-
export const Video = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, showInTimeline, style, trimAfter, trimBefore, volume, stack, toneFrequency, debugOverlay, }) => {
|
|
29
|
+
export const Video = ({ src, audioStreamIndex, className, delayRenderRetries, delayRenderTimeoutInMilliseconds, disallowFallbackToOffthreadVideo, fallbackOffthreadVideoProps, logLevel, loop, loopVolumeCurveBehavior, muted, name, onVideoFrame, playbackRate, showInTimeline, style, trimAfter, trimBefore, volume, stack, toneFrequency, debugOverlay, headless, }) => {
|
|
30
30
|
return (_jsx(InnerVideo, { audioStreamIndex: audioStreamIndex ?? 0, className: className, delayRenderRetries: delayRenderRetries ?? null, delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null, disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false, fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {}, logLevel: logLevel ??
|
|
31
31
|
(typeof window !== 'undefined'
|
|
32
32
|
? (window.remotion_logLevel ?? 'info')
|
|
33
|
-
: 'info'), loop: loop ?? false, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', muted: muted ?? false, name: name, onVideoFrame: onVideoFrame, playbackRate: playbackRate ?? 1, showInTimeline: showInTimeline ?? true, src: src, style: style ?? {}, trimAfter: trimAfter, trimBefore: trimBefore, volume: volume ?? 1, toneFrequency: toneFrequency ?? 1, stack: stack, debugOverlay: debugOverlay ?? false }));
|
|
33
|
+
: 'info'), loop: loop ?? false, loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? 'repeat', muted: muted ?? false, name: name, onVideoFrame: onVideoFrame, playbackRate: playbackRate ?? 1, showInTimeline: showInTimeline ?? true, src: src, style: style ?? {}, trimAfter: trimAfter, trimBefore: trimBefore, volume: volume ?? 1, toneFrequency: toneFrequency ?? 1, stack: stack, debugOverlay: debugOverlay ?? false, headless: headless ?? false }));
|
|
34
34
|
};
|
|
35
35
|
Internals.addSequenceStackTraces(Video);
|
|
@@ -6,8 +6,8 @@ export declare const videoIteratorManager: ({ delayPlaybackHandleIfNotPremountin
|
|
|
6
6
|
delayPlaybackHandleIfNotPremounting: () => {
|
|
7
7
|
unblock: () => void;
|
|
8
8
|
};
|
|
9
|
-
context: OffscreenCanvasRenderingContext2D | CanvasRenderingContext2D;
|
|
10
|
-
canvas: OffscreenCanvas | HTMLCanvasElement;
|
|
9
|
+
context: OffscreenCanvasRenderingContext2D | CanvasRenderingContext2D | null;
|
|
10
|
+
canvas: OffscreenCanvas | HTMLCanvasElement | null;
|
|
11
11
|
getOnVideoFrameCallback: () => null | ((frame: CanvasImageSource) => void);
|
|
12
12
|
logLevel: LogLevel;
|
|
13
13
|
drawDebugOverlay: () => void;
|
|
@@ -5,21 +5,25 @@ export const videoIteratorManager = ({ delayPlaybackHandleIfNotPremounting, canv
|
|
|
5
5
|
let videoIteratorsCreated = 0;
|
|
6
6
|
let videoFrameIterator = null;
|
|
7
7
|
let framesRendered = 0;
|
|
8
|
-
canvas
|
|
9
|
-
|
|
8
|
+
if (canvas) {
|
|
9
|
+
canvas.width = videoTrack.displayWidth;
|
|
10
|
+
canvas.height = videoTrack.displayHeight;
|
|
11
|
+
}
|
|
10
12
|
const canvasSink = new CanvasSink(videoTrack, {
|
|
11
13
|
poolSize: 2,
|
|
12
14
|
fit: 'contain',
|
|
13
15
|
alpha: true,
|
|
14
16
|
});
|
|
15
17
|
const drawFrame = (frame) => {
|
|
16
|
-
context
|
|
17
|
-
|
|
18
|
+
if (context && canvas) {
|
|
19
|
+
context.clearRect(0, 0, canvas.width, canvas.height);
|
|
20
|
+
context.drawImage(frame.canvas, 0, 0);
|
|
21
|
+
}
|
|
18
22
|
framesRendered++;
|
|
19
23
|
drawDebugOverlay();
|
|
20
24
|
const callback = getOnVideoFrameCallback();
|
|
21
25
|
if (callback) {
|
|
22
|
-
callback(canvas);
|
|
26
|
+
callback(frame.canvas);
|
|
23
27
|
}
|
|
24
28
|
Internals.Log.trace({ logLevel, tag: '@remotion/media' }, `[MediaPlayer] Drew frame ${frame.timestamp.toFixed(3)}s`);
|
|
25
29
|
};
|
|
@@ -73,7 +77,9 @@ export const videoIteratorManager = ({ delayPlaybackHandleIfNotPremounting, canv
|
|
|
73
77
|
seek,
|
|
74
78
|
destroy: () => {
|
|
75
79
|
videoFrameIterator?.destroy();
|
|
76
|
-
context
|
|
80
|
+
if (context && canvas) {
|
|
81
|
+
context.clearRect(0, 0, canvas.width, canvas.height);
|
|
82
|
+
}
|
|
77
83
|
videoFrameIterator = null;
|
|
78
84
|
},
|
|
79
85
|
getVideoFrameIterator: () => videoFrameIterator,
|