remotion 4.1.0-alpha10 → 4.1.0-alpha12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/CompositionManager.d.ts +1 -1
- package/dist/cjs/CompositionManager.js +2 -2
- package/dist/cjs/audio/Audio.js +10 -2
- package/dist/cjs/audio/AudioForRendering.js +6 -6
- package/dist/cjs/index.d.ts +4 -3
- package/dist/cjs/internals.d.ts +3 -3
- package/dist/cjs/internals.js +2 -2
- package/dist/cjs/resolve-video-config.js +8 -7
- package/dist/cjs/use-media-playback.js +25 -10
- package/dist/cjs/version.d.ts +1 -1
- package/dist/cjs/version.js +1 -1
- package/dist/cjs/video/OffthreadVideoForRendering.js +6 -6
- package/dist/cjs/video/VideoForRendering.js +6 -6
- package/dist/cjs/wrap-remotion-context.d.ts +1 -1
- package/dist/cjs/wrap-remotion-context.js +5 -5
- package/dist/esm/index.mjs +100 -76
- package/dist/esm/version.mjs +1 -1
- package/package.json +1 -1
|
@@ -26,9 +26,9 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
26
26
|
exports.CompositionManagerProvider = exports.compositionsRef = void 0;
|
|
27
27
|
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
28
28
|
const react_1 = __importStar(require("react"));
|
|
29
|
-
const AssetManager_js_1 = require("./AssetManager.js");
|
|
30
29
|
const shared_audio_tags_js_1 = require("./audio/shared-audio-tags.js");
|
|
31
30
|
const CompositionManagerContext_js_1 = require("./CompositionManagerContext.js");
|
|
31
|
+
const RenderAssetManager_js_1 = require("./RenderAssetManager.js");
|
|
32
32
|
const ResolveCompositionConfig_js_1 = require("./ResolveCompositionConfig.js");
|
|
33
33
|
const SequenceManager_js_1 = require("./SequenceManager.js");
|
|
34
34
|
exports.compositionsRef = react_1.default.createRef();
|
|
@@ -112,6 +112,6 @@ const CompositionManagerProvider = ({ children, numberOfAudioTags }) => {
|
|
|
112
112
|
unregisterFolder,
|
|
113
113
|
currentCompositionMetadata,
|
|
114
114
|
]);
|
|
115
|
-
return ((0, jsx_runtime_1.jsx)(CompositionManagerContext_js_1.CompositionManager.Provider, { value: contextValue, children: (0, jsx_runtime_1.jsx)(SequenceManager_js_1.SequenceManagerProvider, { children: (0, jsx_runtime_1.jsx)(
|
|
115
|
+
return ((0, jsx_runtime_1.jsx)(CompositionManagerContext_js_1.CompositionManager.Provider, { value: contextValue, children: (0, jsx_runtime_1.jsx)(SequenceManager_js_1.SequenceManagerProvider, { children: (0, jsx_runtime_1.jsx)(RenderAssetManager_js_1.RenderAssetManagerProvider, { children: (0, jsx_runtime_1.jsx)(ResolveCompositionConfig_js_1.ResolveCompositionConfig, { children: (0, jsx_runtime_1.jsx)(shared_audio_tags_js_1.SharedAudioContextProvider, { numberOfAudioTags: numberOfAudioTags, component: (_a = composition === null || composition === void 0 ? void 0 : composition.component) !== null && _a !== void 0 ? _a : null, children: children }) }) }) }) }));
|
|
116
116
|
};
|
|
117
117
|
exports.CompositionManagerProvider = CompositionManagerProvider;
|
package/dist/cjs/audio/Audio.js
CHANGED
|
@@ -28,8 +28,16 @@ const AudioRefForwardingFunction = (props, ref) => {
|
|
|
28
28
|
}
|
|
29
29
|
const onError = (0, react_1.useCallback)((e) => {
|
|
30
30
|
console.log(e.currentTarget.error);
|
|
31
|
-
|
|
32
|
-
|
|
31
|
+
// If there is no `loop` property, we don't need to get the duration
|
|
32
|
+
// and thsi does not need to be a fatal error
|
|
33
|
+
const errMessage = `Could not play audio with src ${otherProps.src}: ${e.currentTarget.error}. See https://remotion.dev/docs/media-playback-error for help.`;
|
|
34
|
+
if (loop) {
|
|
35
|
+
(0, cancel_render_js_1.cancelRender)(new Error(errMessage));
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
console.warn(errMessage);
|
|
39
|
+
}
|
|
40
|
+
}, [loop, otherProps.src]);
|
|
33
41
|
const onDuration = (0, react_1.useCallback)((src, durationInSeconds) => {
|
|
34
42
|
setDurations({ type: 'got-duration', durationInSeconds, src });
|
|
35
43
|
}, [setDurations]);
|
|
@@ -4,10 +4,10 @@ exports.AudioForRendering = void 0;
|
|
|
4
4
|
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
5
5
|
const react_1 = require("react");
|
|
6
6
|
const absolute_src_js_1 = require("../absolute-src.js");
|
|
7
|
-
const AssetManager_js_1 = require("../AssetManager.js");
|
|
8
7
|
const delay_render_js_1 = require("../delay-render.js");
|
|
9
8
|
const get_environment_js_1 = require("../get-environment.js");
|
|
10
9
|
const random_js_1 = require("../random.js");
|
|
10
|
+
const RenderAssetManager_js_1 = require("../RenderAssetManager.js");
|
|
11
11
|
const SequenceContext_js_1 = require("../SequenceContext.js");
|
|
12
12
|
const timeline_position_state_js_1 = require("../timeline-position-state.js");
|
|
13
13
|
const use_current_frame_js_1 = require("../use-current-frame.js");
|
|
@@ -19,7 +19,7 @@ const AudioForRenderingRefForwardingFunction = (props, ref) => {
|
|
|
19
19
|
const volumePropFrame = (0, use_audio_frame_js_1.useFrameForVolumeProp)();
|
|
20
20
|
const frame = (0, use_current_frame_js_1.useCurrentFrame)();
|
|
21
21
|
const sequenceContext = (0, react_1.useContext)(SequenceContext_js_1.SequenceContext);
|
|
22
|
-
const {
|
|
22
|
+
const { registerRenderAsset, unregisterRenderAsset } = (0, react_1.useContext)(RenderAssetManager_js_1.RenderAssetManager);
|
|
23
23
|
const environment = (0, get_environment_js_1.useRemotionEnvironment)();
|
|
24
24
|
// Generate a string that's as unique as possible for this asset
|
|
25
25
|
// but at the same time the same on all threads
|
|
@@ -51,7 +51,7 @@ const AudioForRenderingRefForwardingFunction = (props, ref) => {
|
|
|
51
51
|
if (volume <= 0) {
|
|
52
52
|
return;
|
|
53
53
|
}
|
|
54
|
-
|
|
54
|
+
registerRenderAsset({
|
|
55
55
|
type: 'audio',
|
|
56
56
|
src: (0, absolute_src_js_1.getAbsoluteSrc)(props.src),
|
|
57
57
|
id,
|
|
@@ -61,14 +61,14 @@ const AudioForRenderingRefForwardingFunction = (props, ref) => {
|
|
|
61
61
|
playbackRate: (_a = props.playbackRate) !== null && _a !== void 0 ? _a : 1,
|
|
62
62
|
allowAmplificationDuringRender: allowAmplificationDuringRender !== null && allowAmplificationDuringRender !== void 0 ? allowAmplificationDuringRender : false,
|
|
63
63
|
});
|
|
64
|
-
return () =>
|
|
64
|
+
return () => unregisterRenderAsset(id);
|
|
65
65
|
}, [
|
|
66
66
|
props.muted,
|
|
67
67
|
props.src,
|
|
68
|
-
|
|
68
|
+
registerRenderAsset,
|
|
69
69
|
absoluteFrame,
|
|
70
70
|
id,
|
|
71
|
-
|
|
71
|
+
unregisterRenderAsset,
|
|
72
72
|
volume,
|
|
73
73
|
volumePropFrame,
|
|
74
74
|
frame,
|
package/dist/cjs/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
2
|
import './asset-types.js';
|
|
3
|
-
import type {
|
|
3
|
+
import type { TRenderAsset } from './CompositionManager.js';
|
|
4
4
|
import type { StaticFile } from './get-static-files.js';
|
|
5
5
|
import type { ClipRegion } from './NativeLayers.js';
|
|
6
6
|
import type { VideoConfig } from './video-config.js';
|
|
@@ -23,6 +23,7 @@ declare global {
|
|
|
23
23
|
remotion_setBundleMode: (bundleMode: BundleState) => void;
|
|
24
24
|
remotion_staticBase: string;
|
|
25
25
|
remotion_staticFiles: StaticFile[];
|
|
26
|
+
remotion_publicFolderExists: string | null;
|
|
26
27
|
remotion_editorName: string | null;
|
|
27
28
|
remotion_numberOfAudioTags: number;
|
|
28
29
|
remotion_projectName: string;
|
|
@@ -36,7 +37,7 @@ declare global {
|
|
|
36
37
|
remotion_puppeteerTimeout: number;
|
|
37
38
|
remotion_inputProps: string;
|
|
38
39
|
remotion_envVariables: string;
|
|
39
|
-
remotion_collectAssets: () =>
|
|
40
|
+
remotion_collectAssets: () => TRenderAsset[];
|
|
40
41
|
remotion_getClipRegion: () => ClipRegion | null;
|
|
41
42
|
remotion_isPlayer: boolean;
|
|
42
43
|
remotion_isBuilding: undefined | (() => void);
|
|
@@ -63,7 +64,7 @@ export * from './AbsoluteFill.js';
|
|
|
63
64
|
export * from './audio/index.js';
|
|
64
65
|
export { cancelRender } from './cancel-render.js';
|
|
65
66
|
export { CalculateMetadataFunction, Composition, CompositionProps, CompProps, StillProps, } from './Composition.js';
|
|
66
|
-
export { AnyCompMetadata, AnyComposition, SmallTCompMetadata,
|
|
67
|
+
export { AnyCompMetadata, AnyComposition, SmallTCompMetadata, TCompMetadata, TRenderAsset, } from './CompositionManager.js';
|
|
67
68
|
export { getInputProps } from './config/input-props.js';
|
|
68
69
|
export { continueRender, delayRender } from './delay-render.js';
|
|
69
70
|
export * from './easing.js';
|
package/dist/cjs/internals.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
/// <reference types="react" />
|
|
2
2
|
import { type CompProps } from './Composition.js';
|
|
3
|
-
import type {
|
|
3
|
+
import type { TCompMetadata, TComposition, TRenderAsset, TSequence } from './CompositionManager.js';
|
|
4
4
|
import type { CompositionManagerContext } from './CompositionManagerContext.js';
|
|
5
5
|
import * as CSSUtils from './default-css.js';
|
|
6
6
|
import type { RemotionEnvironment } from './get-environment.js';
|
|
@@ -126,7 +126,7 @@ export declare const Internals: {
|
|
|
126
126
|
children: import("react").ReactNode;
|
|
127
127
|
}>>;
|
|
128
128
|
readonly REMOTION_STUDIO_CONTAINER_ELEMENT: "__remotion-studio-container";
|
|
129
|
-
readonly
|
|
129
|
+
readonly RenderAssetManager: import("react").Context<import("./RenderAssetManager.js").RenderAssetManagerContext>;
|
|
130
130
|
readonly bundleName: "bundle.js";
|
|
131
131
|
readonly bundleMapName: "bundle.js.map";
|
|
132
132
|
readonly persistCurrentFrame: (frame: number, composition: string) => void;
|
|
@@ -146,4 +146,4 @@ export declare const Internals: {
|
|
|
146
146
|
children?: import("react").ReactNode;
|
|
147
147
|
}>;
|
|
148
148
|
};
|
|
149
|
-
export type { TComposition, Timeline, TCompMetadata, TSequence, TAsset, TimelineContextValue, SetTimelineContextValue, CompProps, CompositionManagerContext, MediaVolumeContextValue, SetMediaVolumeContextValue, RemotionEnvironment, SerializedJSONWithCustomFields, };
|
|
149
|
+
export type { TComposition, Timeline, TCompMetadata, TSequence, TRenderAsset as TAsset, TimelineContextValue, SetTimelineContextValue, CompProps, CompositionManagerContext, MediaVolumeContextValue, SetMediaVolumeContextValue, RemotionEnvironment, SerializedJSONWithCustomFields, };
|
package/dist/cjs/internals.js
CHANGED
|
@@ -24,7 +24,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
24
24
|
};
|
|
25
25
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
26
|
exports.Internals = void 0;
|
|
27
|
-
const AssetManager_js_1 = require("./AssetManager.js");
|
|
28
27
|
const shared_audio_tags_js_1 = require("./audio/shared-audio-tags.js");
|
|
29
28
|
const CanUseRemotionHooks_js_1 = require("./CanUseRemotionHooks.js");
|
|
30
29
|
const Composition_js_1 = require("./Composition.js");
|
|
@@ -45,6 +44,7 @@ const prefetch_state_js_1 = require("./prefetch-state.js");
|
|
|
45
44
|
const prefetch_js_1 = require("./prefetch.js");
|
|
46
45
|
const register_root_js_1 = require("./register-root.js");
|
|
47
46
|
const RemotionRoot_js_1 = require("./RemotionRoot.js");
|
|
47
|
+
const RenderAssetManager_js_1 = require("./RenderAssetManager.js");
|
|
48
48
|
const resolve_video_config_js_1 = require("./resolve-video-config.js");
|
|
49
49
|
const ResolveCompositionConfig_js_1 = require("./ResolveCompositionConfig.js");
|
|
50
50
|
const SequenceContext_js_1 = require("./SequenceContext.js");
|
|
@@ -119,7 +119,7 @@ exports.Internals = {
|
|
|
119
119
|
resolveCompositionsRef: ResolveCompositionConfig_js_1.resolveCompositionsRef,
|
|
120
120
|
ResolveCompositionConfig: ResolveCompositionConfig_js_1.ResolveCompositionConfig,
|
|
121
121
|
REMOTION_STUDIO_CONTAINER_ELEMENT: get_preview_dom_element_js_1.REMOTION_STUDIO_CONTAINER_ELEMENT,
|
|
122
|
-
|
|
122
|
+
RenderAssetManager: RenderAssetManager_js_1.RenderAssetManager,
|
|
123
123
|
bundleName: 'bundle.js',
|
|
124
124
|
bundleMapName: 'bundle.js.map',
|
|
125
125
|
persistCurrentFrame: timeline_position_state_js_1.persistCurrentFrame,
|
|
@@ -17,11 +17,15 @@ const resolveVideoConfig = ({ composition, editorProps: editorPropsOrUndefined,
|
|
|
17
17
|
abortSignal: signal,
|
|
18
18
|
})
|
|
19
19
|
: null;
|
|
20
|
+
const fallbackProps = {
|
|
21
|
+
...((_c = composition.defaultProps) !== null && _c !== void 0 ? _c : {}),
|
|
22
|
+
...(inputProps !== null && inputProps !== void 0 ? inputProps : {}),
|
|
23
|
+
};
|
|
20
24
|
if (calculatedProm !== null &&
|
|
21
25
|
typeof calculatedProm === 'object' &&
|
|
22
26
|
'then' in calculatedProm) {
|
|
23
27
|
return calculatedProm.then((c) => {
|
|
24
|
-
var _a, _b
|
|
28
|
+
var _a, _b;
|
|
25
29
|
const { height, width, durationInFrames, fps } = validateCalculated({
|
|
26
30
|
calculated: c,
|
|
27
31
|
composition,
|
|
@@ -33,7 +37,7 @@ const resolveVideoConfig = ({ composition, editorProps: editorPropsOrUndefined,
|
|
|
33
37
|
durationInFrames,
|
|
34
38
|
id: composition.id,
|
|
35
39
|
defaultProps: (_a = composition.defaultProps) !== null && _a !== void 0 ? _a : {},
|
|
36
|
-
props: (
|
|
40
|
+
props: (_b = c.props) !== null && _b !== void 0 ? _b : fallbackProps,
|
|
37
41
|
};
|
|
38
42
|
});
|
|
39
43
|
}
|
|
@@ -45,11 +49,8 @@ const resolveVideoConfig = ({ composition, editorProps: editorPropsOrUndefined,
|
|
|
45
49
|
return {
|
|
46
50
|
...data,
|
|
47
51
|
id: composition.id,
|
|
48
|
-
defaultProps: (
|
|
49
|
-
props:
|
|
50
|
-
...((_d = composition.defaultProps) !== null && _d !== void 0 ? _d : {}),
|
|
51
|
-
...(inputProps !== null && inputProps !== void 0 ? inputProps : {}),
|
|
52
|
-
},
|
|
52
|
+
defaultProps: (_d = composition.defaultProps) !== null && _d !== void 0 ? _d : {},
|
|
53
|
+
props: fallbackProps,
|
|
53
54
|
};
|
|
54
55
|
}
|
|
55
56
|
return {
|
|
@@ -32,8 +32,18 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
|
|
|
32
32
|
if (!src) {
|
|
33
33
|
throw new Error(`No 'src' attribute was passed to the ${tagName} element.`);
|
|
34
34
|
}
|
|
35
|
-
|
|
36
|
-
|
|
35
|
+
const playbackRateToSet = Math.max(0, playbackRate);
|
|
36
|
+
if (mediaRef.current.playbackRate !== playbackRateToSet) {
|
|
37
|
+
mediaRef.current.playbackRate = playbackRateToSet;
|
|
38
|
+
}
|
|
39
|
+
// Let's throttle the seeking to only every 10 frames when a video is playing to avoid bottlenecking
|
|
40
|
+
// the video tag.
|
|
41
|
+
if (playing) {
|
|
42
|
+
if (absoluteFrame % 10 !== 0) {
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
const desiredUnclampedTime = (0, get_current_time_js_1.getMediaTime)({
|
|
37
47
|
fps,
|
|
38
48
|
frame,
|
|
39
49
|
src,
|
|
@@ -41,15 +51,20 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
|
|
|
41
51
|
startFrom: -mediaStartsAt,
|
|
42
52
|
mediaType,
|
|
43
53
|
});
|
|
54
|
+
const { duration } = mediaRef.current;
|
|
55
|
+
const shouldBeTime = !Number.isNaN(duration) && Number.isFinite(duration)
|
|
56
|
+
? Math.min(duration, desiredUnclampedTime)
|
|
57
|
+
: desiredUnclampedTime;
|
|
44
58
|
const isTime = mediaRef.current.currentTime;
|
|
45
59
|
const timeShift = Math.abs(shouldBeTime - isTime);
|
|
46
|
-
if (timeShift > acceptableTimeshift
|
|
60
|
+
if (timeShift > acceptableTimeshift) {
|
|
47
61
|
// If scrubbing around, adjust timing
|
|
48
|
-
// or if time shift is bigger than 0.
|
|
62
|
+
// or if time shift is bigger than 0.45sec
|
|
49
63
|
mediaRef.current.currentTime = shouldBeTime;
|
|
50
64
|
if (!onlyWarnForMediaSeekingError) {
|
|
51
65
|
(0, warn_about_non_seekable_media_js_1.warnAboutNonSeekableMedia)(mediaRef.current, onlyWarnForMediaSeekingError ? 'console-warning' : 'console-error');
|
|
52
66
|
}
|
|
67
|
+
return;
|
|
53
68
|
}
|
|
54
69
|
// Only perform a seek if the time is not already the same.
|
|
55
70
|
// Chrome rounds to 6 digits, so 0.033333333 -> 0.033333,
|
|
@@ -57,15 +72,15 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
|
|
|
57
72
|
// Refer to the https://github.com/remotion-dev/video-buffering-example
|
|
58
73
|
// which is fixed by only seeking conditionally.
|
|
59
74
|
const makesSenseToSeek = Math.abs(mediaRef.current.currentTime - shouldBeTime) > 0.00001;
|
|
75
|
+
if (!makesSenseToSeek) {
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
60
78
|
if (!playing || absoluteFrame === 0) {
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
}
|
|
79
|
+
mediaRef.current.currentTime = shouldBeTime;
|
|
80
|
+
return;
|
|
64
81
|
}
|
|
65
82
|
if (mediaRef.current.paused && !mediaRef.current.ended && playing) {
|
|
66
|
-
|
|
67
|
-
mediaRef.current.currentTime = shouldBeTime;
|
|
68
|
-
}
|
|
83
|
+
mediaRef.current.currentTime = shouldBeTime;
|
|
69
84
|
(0, play_and_handle_not_allowed_error_js_1.playAndHandleNotAllowedError)(mediaRef, mediaType);
|
|
70
85
|
}
|
|
71
86
|
}, [
|
package/dist/cjs/version.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const VERSION = "4.1.0-
|
|
1
|
+
export declare const VERSION = "4.1.0-alpha12";
|
package/dist/cjs/version.js
CHANGED
|
@@ -4,12 +4,12 @@ exports.OffthreadVideoForRendering = void 0;
|
|
|
4
4
|
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
5
5
|
const react_1 = require("react");
|
|
6
6
|
const absolute_src_js_1 = require("../absolute-src.js");
|
|
7
|
-
const AssetManager_js_1 = require("../AssetManager.js");
|
|
8
7
|
const use_audio_frame_js_1 = require("../audio/use-audio-frame.js");
|
|
9
8
|
const cancel_render_js_1 = require("../cancel-render.js");
|
|
10
9
|
const default_css_js_1 = require("../default-css.js");
|
|
11
10
|
const Img_js_1 = require("../Img.js");
|
|
12
11
|
const random_js_1 = require("../random.js");
|
|
12
|
+
const RenderAssetManager_js_1 = require("../RenderAssetManager.js");
|
|
13
13
|
const SequenceContext_js_1 = require("../SequenceContext.js");
|
|
14
14
|
const timeline_position_state_js_1 = require("../timeline-position-state.js");
|
|
15
15
|
const truthy_js_1 = require("../truthy.js");
|
|
@@ -24,7 +24,7 @@ const OffthreadVideoForRendering = ({ onError, volume: volumeProp, playbackRate,
|
|
|
24
24
|
const videoConfig = (0, use_unsafe_video_config_js_1.useUnsafeVideoConfig)();
|
|
25
25
|
const sequenceContext = (0, react_1.useContext)(SequenceContext_js_1.SequenceContext);
|
|
26
26
|
const mediaStartsAt = (0, use_audio_frame_js_1.useMediaStartsAt)();
|
|
27
|
-
const {
|
|
27
|
+
const { registerRenderAsset, unregisterRenderAsset } = (0, react_1.useContext)(RenderAssetManager_js_1.RenderAssetManager);
|
|
28
28
|
if (!src) {
|
|
29
29
|
throw new TypeError('No `src` was passed to <OffthreadVideo>.');
|
|
30
30
|
}
|
|
@@ -58,7 +58,7 @@ const OffthreadVideoForRendering = ({ onError, volume: volumeProp, playbackRate,
|
|
|
58
58
|
if (volume <= 0) {
|
|
59
59
|
return;
|
|
60
60
|
}
|
|
61
|
-
|
|
61
|
+
registerRenderAsset({
|
|
62
62
|
type: 'video',
|
|
63
63
|
src: (0, absolute_src_js_1.getAbsoluteSrc)(src),
|
|
64
64
|
id,
|
|
@@ -68,13 +68,13 @@ const OffthreadVideoForRendering = ({ onError, volume: volumeProp, playbackRate,
|
|
|
68
68
|
playbackRate: playbackRate !== null && playbackRate !== void 0 ? playbackRate : 1,
|
|
69
69
|
allowAmplificationDuringRender: allowAmplificationDuringRender !== null && allowAmplificationDuringRender !== void 0 ? allowAmplificationDuringRender : false,
|
|
70
70
|
});
|
|
71
|
-
return () =>
|
|
71
|
+
return () => unregisterRenderAsset(id);
|
|
72
72
|
}, [
|
|
73
73
|
muted,
|
|
74
74
|
src,
|
|
75
|
-
|
|
75
|
+
registerRenderAsset,
|
|
76
76
|
id,
|
|
77
|
-
|
|
77
|
+
unregisterRenderAsset,
|
|
78
78
|
volume,
|
|
79
79
|
frame,
|
|
80
80
|
absoluteFrame,
|
|
@@ -4,12 +4,12 @@ exports.VideoForRendering = void 0;
|
|
|
4
4
|
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
5
5
|
const react_1 = require("react");
|
|
6
6
|
const absolute_src_js_1 = require("../absolute-src.js");
|
|
7
|
-
const AssetManager_js_1 = require("../AssetManager.js");
|
|
8
7
|
const use_audio_frame_js_1 = require("../audio/use-audio-frame.js");
|
|
9
8
|
const delay_render_js_1 = require("../delay-render.js");
|
|
10
9
|
const get_environment_js_1 = require("../get-environment.js");
|
|
11
10
|
const is_approximately_the_same_js_1 = require("../is-approximately-the-same.js");
|
|
12
11
|
const random_js_1 = require("../random.js");
|
|
12
|
+
const RenderAssetManager_js_1 = require("../RenderAssetManager.js");
|
|
13
13
|
const SequenceContext_js_1 = require("../SequenceContext.js");
|
|
14
14
|
const timeline_position_state_js_1 = require("../timeline-position-state.js");
|
|
15
15
|
const use_current_frame_js_1 = require("../use-current-frame.js");
|
|
@@ -26,7 +26,7 @@ const VideoForRenderingForwardFunction = ({ onError, volume: volumeProp, allowAm
|
|
|
26
26
|
const sequenceContext = (0, react_1.useContext)(SequenceContext_js_1.SequenceContext);
|
|
27
27
|
const mediaStartsAt = (0, use_audio_frame_js_1.useMediaStartsAt)();
|
|
28
28
|
const environment = (0, get_environment_js_1.useRemotionEnvironment)();
|
|
29
|
-
const {
|
|
29
|
+
const { registerRenderAsset, unregisterRenderAsset } = (0, react_1.useContext)(RenderAssetManager_js_1.RenderAssetManager);
|
|
30
30
|
// Generate a string that's as unique as possible for this asset
|
|
31
31
|
// but at the same time the same on all threads
|
|
32
32
|
const id = (0, react_1.useMemo)(() => {
|
|
@@ -60,7 +60,7 @@ const VideoForRenderingForwardFunction = ({ onError, volume: volumeProp, allowAm
|
|
|
60
60
|
if (!window.remotion_audioEnabled) {
|
|
61
61
|
return;
|
|
62
62
|
}
|
|
63
|
-
|
|
63
|
+
registerRenderAsset({
|
|
64
64
|
type: 'video',
|
|
65
65
|
src: (0, absolute_src_js_1.getAbsoluteSrc)(props.src),
|
|
66
66
|
id,
|
|
@@ -70,13 +70,13 @@ const VideoForRenderingForwardFunction = ({ onError, volume: volumeProp, allowAm
|
|
|
70
70
|
playbackRate: playbackRate !== null && playbackRate !== void 0 ? playbackRate : 1,
|
|
71
71
|
allowAmplificationDuringRender: allowAmplificationDuringRender !== null && allowAmplificationDuringRender !== void 0 ? allowAmplificationDuringRender : false,
|
|
72
72
|
});
|
|
73
|
-
return () =>
|
|
73
|
+
return () => unregisterRenderAsset(id);
|
|
74
74
|
}, [
|
|
75
75
|
props.muted,
|
|
76
76
|
props.src,
|
|
77
|
-
|
|
77
|
+
registerRenderAsset,
|
|
78
78
|
id,
|
|
79
|
-
|
|
79
|
+
unregisterRenderAsset,
|
|
80
80
|
volume,
|
|
81
81
|
frame,
|
|
82
82
|
absoluteFrame,
|
|
@@ -21,7 +21,7 @@ export declare function useRemotionContexts(): {
|
|
|
21
21
|
error: Error;
|
|
22
22
|
}) | undefined;
|
|
23
23
|
} | null;
|
|
24
|
-
|
|
24
|
+
renderAssetManagerContext: import("./RenderAssetManager.js").RenderAssetManagerContext;
|
|
25
25
|
sequenceManagerContext: import("./SequenceManager.js").SequenceManagerContext;
|
|
26
26
|
};
|
|
27
27
|
export interface RemotionContextProviderProps {
|
|
@@ -29,12 +29,12 @@ const jsx_runtime_1 = require("react/jsx-runtime");
|
|
|
29
29
|
// such as in React Three Fiber. All the contexts need to be passed again
|
|
30
30
|
// for them to be useable
|
|
31
31
|
const react_1 = __importStar(require("react"));
|
|
32
|
-
const AssetManager_js_1 = require("./AssetManager.js");
|
|
33
32
|
const CanUseRemotionHooks_js_1 = require("./CanUseRemotionHooks.js");
|
|
34
33
|
const CompositionManagerContext_js_1 = require("./CompositionManagerContext.js");
|
|
35
34
|
const NativeLayers_js_1 = require("./NativeLayers.js");
|
|
36
35
|
const nonce_js_1 = require("./nonce.js");
|
|
37
36
|
const prefetch_state_js_1 = require("./prefetch-state.js");
|
|
37
|
+
const RenderAssetManager_js_1 = require("./RenderAssetManager.js");
|
|
38
38
|
const ResolveCompositionConfig_js_1 = require("./ResolveCompositionConfig.js");
|
|
39
39
|
const SequenceContext_js_1 = require("./SequenceContext.js");
|
|
40
40
|
const SequenceManager_js_1 = require("./SequenceManager.js");
|
|
@@ -49,7 +49,7 @@ function useRemotionContexts() {
|
|
|
49
49
|
const nativeLayersContext = react_1.default.useContext(NativeLayers_js_1.NativeLayersContext);
|
|
50
50
|
const preloadContext = react_1.default.useContext(prefetch_state_js_1.PreloadContext);
|
|
51
51
|
const resolveCompositionContext = react_1.default.useContext(ResolveCompositionConfig_js_1.ResolveCompositionContext);
|
|
52
|
-
const
|
|
52
|
+
const renderAssetManagerContext = react_1.default.useContext(RenderAssetManager_js_1.RenderAssetManager);
|
|
53
53
|
const sequenceManagerContext = react_1.default.useContext(SequenceManager_js_1.SequenceManager);
|
|
54
54
|
return (0, react_1.useMemo)(() => ({
|
|
55
55
|
compositionManagerCtx,
|
|
@@ -61,7 +61,7 @@ function useRemotionContexts() {
|
|
|
61
61
|
nativeLayersContext,
|
|
62
62
|
preloadContext,
|
|
63
63
|
resolveCompositionContext,
|
|
64
|
-
|
|
64
|
+
renderAssetManagerContext,
|
|
65
65
|
sequenceManagerContext,
|
|
66
66
|
}), [
|
|
67
67
|
compositionManagerCtx,
|
|
@@ -73,13 +73,13 @@ function useRemotionContexts() {
|
|
|
73
73
|
nativeLayersContext,
|
|
74
74
|
preloadContext,
|
|
75
75
|
resolveCompositionContext,
|
|
76
|
-
|
|
76
|
+
renderAssetManagerContext,
|
|
77
77
|
sequenceManagerContext,
|
|
78
78
|
]);
|
|
79
79
|
}
|
|
80
80
|
exports.useRemotionContexts = useRemotionContexts;
|
|
81
81
|
const RemotionContextProvider = (props) => {
|
|
82
82
|
const { children, contexts } = props;
|
|
83
|
-
return ((0, jsx_runtime_1.jsx)(CanUseRemotionHooks_js_1.CanUseRemotionHooks.Provider, { value: contexts.canUseRemotionHooksContext, children: (0, jsx_runtime_1.jsx)(nonce_js_1.NonceContext.Provider, { value: contexts.nonceContext, children: (0, jsx_runtime_1.jsx)(NativeLayers_js_1.NativeLayersContext.Provider, { value: contexts.nativeLayersContext, children: (0, jsx_runtime_1.jsx)(prefetch_state_js_1.PreloadContext.Provider, { value: contexts.preloadContext, children: (0, jsx_runtime_1.jsx)(CompositionManagerContext_js_1.CompositionManager.Provider, { value: contexts.compositionManagerCtx, children: (0, jsx_runtime_1.jsx)(SequenceManager_js_1.SequenceManager.Provider, { value: contexts.sequenceManagerContext, children: (0, jsx_runtime_1.jsx)(
|
|
83
|
+
return ((0, jsx_runtime_1.jsx)(CanUseRemotionHooks_js_1.CanUseRemotionHooks.Provider, { value: contexts.canUseRemotionHooksContext, children: (0, jsx_runtime_1.jsx)(nonce_js_1.NonceContext.Provider, { value: contexts.nonceContext, children: (0, jsx_runtime_1.jsx)(NativeLayers_js_1.NativeLayersContext.Provider, { value: contexts.nativeLayersContext, children: (0, jsx_runtime_1.jsx)(prefetch_state_js_1.PreloadContext.Provider, { value: contexts.preloadContext, children: (0, jsx_runtime_1.jsx)(CompositionManagerContext_js_1.CompositionManager.Provider, { value: contexts.compositionManagerCtx, children: (0, jsx_runtime_1.jsx)(SequenceManager_js_1.SequenceManager.Provider, { value: contexts.sequenceManagerContext, children: (0, jsx_runtime_1.jsx)(RenderAssetManager_js_1.RenderAssetManager.Provider, { value: contexts.renderAssetManagerContext, children: (0, jsx_runtime_1.jsx)(ResolveCompositionConfig_js_1.ResolveCompositionContext.Provider, { value: contexts.resolveCompositionContext, children: (0, jsx_runtime_1.jsx)(timeline_position_state_js_1.TimelineContext.Provider, { value: contexts.timelineContext, children: (0, jsx_runtime_1.jsx)(timeline_position_state_js_1.SetTimelineContext.Provider, { value: contexts.setTimelineContext, children: (0, jsx_runtime_1.jsx)(SequenceContext_js_1.SequenceContext.Provider, { value: contexts.sequenceContext, children: children }) }) }) }) }) }) }) }) }) }) }));
|
|
84
84
|
};
|
|
85
85
|
exports.RemotionContextProvider = RemotionContextProvider;
|
package/dist/esm/index.mjs
CHANGED
|
@@ -59,7 +59,7 @@ function truthy(value) {
|
|
|
59
59
|
}
|
|
60
60
|
|
|
61
61
|
// Automatically generated on publish
|
|
62
|
-
const VERSION = '4.1.0-
|
|
62
|
+
const VERSION = '4.1.0-alpha12';
|
|
63
63
|
|
|
64
64
|
const checkMultipleRemotionVersions = () => {
|
|
65
65
|
if (typeof globalThis === 'undefined') {
|
|
@@ -549,11 +549,15 @@ const resolveVideoConfig = ({ composition, editorProps: editorPropsOrUndefined,
|
|
|
549
549
|
abortSignal: signal,
|
|
550
550
|
})
|
|
551
551
|
: null;
|
|
552
|
+
const fallbackProps = {
|
|
553
|
+
...((_c = composition.defaultProps) !== null && _c !== void 0 ? _c : {}),
|
|
554
|
+
...(inputProps !== null && inputProps !== void 0 ? inputProps : {}),
|
|
555
|
+
};
|
|
552
556
|
if (calculatedProm !== null &&
|
|
553
557
|
typeof calculatedProm === 'object' &&
|
|
554
558
|
'then' in calculatedProm) {
|
|
555
559
|
return calculatedProm.then((c) => {
|
|
556
|
-
var _a, _b
|
|
560
|
+
var _a, _b;
|
|
557
561
|
const { height, width, durationInFrames, fps } = validateCalculated({
|
|
558
562
|
calculated: c,
|
|
559
563
|
composition,
|
|
@@ -565,7 +569,7 @@ const resolveVideoConfig = ({ composition, editorProps: editorPropsOrUndefined,
|
|
|
565
569
|
durationInFrames,
|
|
566
570
|
id: composition.id,
|
|
567
571
|
defaultProps: (_a = composition.defaultProps) !== null && _a !== void 0 ? _a : {},
|
|
568
|
-
props: (
|
|
572
|
+
props: (_b = c.props) !== null && _b !== void 0 ? _b : fallbackProps,
|
|
569
573
|
};
|
|
570
574
|
});
|
|
571
575
|
}
|
|
@@ -577,11 +581,8 @@ const resolveVideoConfig = ({ composition, editorProps: editorPropsOrUndefined,
|
|
|
577
581
|
return {
|
|
578
582
|
...data,
|
|
579
583
|
id: composition.id,
|
|
580
|
-
defaultProps: (
|
|
581
|
-
props:
|
|
582
|
-
...((_d = composition.defaultProps) !== null && _d !== void 0 ? _d : {}),
|
|
583
|
-
...(inputProps !== null && inputProps !== void 0 ? inputProps : {}),
|
|
584
|
-
},
|
|
584
|
+
defaultProps: (_d = composition.defaultProps) !== null && _d !== void 0 ? _d : {},
|
|
585
|
+
props: fallbackProps,
|
|
585
586
|
};
|
|
586
587
|
}
|
|
587
588
|
return {
|
|
@@ -1772,8 +1773,18 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
|
|
|
1772
1773
|
if (!src) {
|
|
1773
1774
|
throw new Error(`No 'src' attribute was passed to the ${tagName} element.`);
|
|
1774
1775
|
}
|
|
1775
|
-
|
|
1776
|
-
|
|
1776
|
+
const playbackRateToSet = Math.max(0, playbackRate);
|
|
1777
|
+
if (mediaRef.current.playbackRate !== playbackRateToSet) {
|
|
1778
|
+
mediaRef.current.playbackRate = playbackRateToSet;
|
|
1779
|
+
}
|
|
1780
|
+
// Let's throttle the seeking to only every 10 frames when a video is playing to avoid bottlenecking
|
|
1781
|
+
// the video tag.
|
|
1782
|
+
if (playing) {
|
|
1783
|
+
if (absoluteFrame % 10 !== 0) {
|
|
1784
|
+
return;
|
|
1785
|
+
}
|
|
1786
|
+
}
|
|
1787
|
+
const desiredUnclampedTime = getMediaTime({
|
|
1777
1788
|
fps,
|
|
1778
1789
|
frame,
|
|
1779
1790
|
src,
|
|
@@ -1781,15 +1792,20 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
|
|
|
1781
1792
|
startFrom: -mediaStartsAt,
|
|
1782
1793
|
mediaType,
|
|
1783
1794
|
});
|
|
1795
|
+
const { duration } = mediaRef.current;
|
|
1796
|
+
const shouldBeTime = !Number.isNaN(duration) && Number.isFinite(duration)
|
|
1797
|
+
? Math.min(duration, desiredUnclampedTime)
|
|
1798
|
+
: desiredUnclampedTime;
|
|
1784
1799
|
const isTime = mediaRef.current.currentTime;
|
|
1785
1800
|
const timeShift = Math.abs(shouldBeTime - isTime);
|
|
1786
|
-
if (timeShift > acceptableTimeshift
|
|
1801
|
+
if (timeShift > acceptableTimeshift) {
|
|
1787
1802
|
// If scrubbing around, adjust timing
|
|
1788
|
-
// or if time shift is bigger than 0.
|
|
1803
|
+
// or if time shift is bigger than 0.45sec
|
|
1789
1804
|
mediaRef.current.currentTime = shouldBeTime;
|
|
1790
1805
|
if (!onlyWarnForMediaSeekingError) {
|
|
1791
1806
|
warnAboutNonSeekableMedia(mediaRef.current, onlyWarnForMediaSeekingError ? 'console-warning' : 'console-error');
|
|
1792
1807
|
}
|
|
1808
|
+
return;
|
|
1793
1809
|
}
|
|
1794
1810
|
// Only perform a seek if the time is not already the same.
|
|
1795
1811
|
// Chrome rounds to 6 digits, so 0.033333333 -> 0.033333,
|
|
@@ -1797,15 +1813,15 @@ const useMediaPlayback = ({ mediaRef, src, mediaType, playbackRate: localPlaybac
|
|
|
1797
1813
|
// Refer to the https://github.com/remotion-dev/video-buffering-example
|
|
1798
1814
|
// which is fixed by only seeking conditionally.
|
|
1799
1815
|
const makesSenseToSeek = Math.abs(mediaRef.current.currentTime - shouldBeTime) > 0.00001;
|
|
1816
|
+
if (!makesSenseToSeek) {
|
|
1817
|
+
return;
|
|
1818
|
+
}
|
|
1800
1819
|
if (!playing || absoluteFrame === 0) {
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
}
|
|
1820
|
+
mediaRef.current.currentTime = shouldBeTime;
|
|
1821
|
+
return;
|
|
1804
1822
|
}
|
|
1805
1823
|
if (mediaRef.current.paused && !mediaRef.current.ended && playing) {
|
|
1806
|
-
|
|
1807
|
-
mediaRef.current.currentTime = shouldBeTime;
|
|
1808
|
-
}
|
|
1824
|
+
mediaRef.current.currentTime = shouldBeTime;
|
|
1809
1825
|
playAndHandleNotAllowedError(mediaRef, mediaType);
|
|
1810
1826
|
}
|
|
1811
1827
|
}, [
|
|
@@ -2201,41 +2217,6 @@ const AudioForDevelopmentForwardRefFunction = (props, ref) => {
|
|
|
2201
2217
|
};
|
|
2202
2218
|
const AudioForDevelopment = forwardRef(AudioForDevelopmentForwardRefFunction);
|
|
2203
2219
|
|
|
2204
|
-
const AssetManager = createContext({
|
|
2205
|
-
registerAsset: () => undefined,
|
|
2206
|
-
unregisterAsset: () => undefined,
|
|
2207
|
-
assets: [],
|
|
2208
|
-
});
|
|
2209
|
-
const AssetManagerProvider = ({ children }) => {
|
|
2210
|
-
const [assets, setAssets] = useState([]);
|
|
2211
|
-
const registerAsset = useCallback((asset) => {
|
|
2212
|
-
setAssets((assts) => {
|
|
2213
|
-
return [...assts, asset];
|
|
2214
|
-
});
|
|
2215
|
-
}, []);
|
|
2216
|
-
const unregisterAsset = useCallback((id) => {
|
|
2217
|
-
setAssets((assts) => {
|
|
2218
|
-
return assts.filter((a) => a.id !== id);
|
|
2219
|
-
});
|
|
2220
|
-
}, []);
|
|
2221
|
-
useLayoutEffect(() => {
|
|
2222
|
-
if (typeof window !== 'undefined') {
|
|
2223
|
-
window.remotion_collectAssets = () => {
|
|
2224
|
-
setAssets([]); // clear assets at next render
|
|
2225
|
-
return assets;
|
|
2226
|
-
};
|
|
2227
|
-
}
|
|
2228
|
-
}, [assets]);
|
|
2229
|
-
const contextValue = useMemo(() => {
|
|
2230
|
-
return {
|
|
2231
|
-
registerAsset,
|
|
2232
|
-
unregisterAsset,
|
|
2233
|
-
assets,
|
|
2234
|
-
};
|
|
2235
|
-
}, [assets, registerAsset, unregisterAsset]);
|
|
2236
|
-
return (jsx(AssetManager.Provider, { value: contextValue, children: children }));
|
|
2237
|
-
};
|
|
2238
|
-
|
|
2239
2220
|
if (typeof window !== 'undefined') {
|
|
2240
2221
|
window.remotion_renderReady = false;
|
|
2241
2222
|
}
|
|
@@ -2315,13 +2296,48 @@ const continueRender = (handle) => {
|
|
|
2315
2296
|
}
|
|
2316
2297
|
};
|
|
2317
2298
|
|
|
2299
|
+
const RenderAssetManager = createContext({
|
|
2300
|
+
registerRenderAsset: () => undefined,
|
|
2301
|
+
unregisterRenderAsset: () => undefined,
|
|
2302
|
+
renderAssets: [],
|
|
2303
|
+
});
|
|
2304
|
+
const RenderAssetManagerProvider = ({ children }) => {
|
|
2305
|
+
const [renderAssets, setRenderAssets] = useState([]);
|
|
2306
|
+
const registerRenderAsset = useCallback((renderAsset) => {
|
|
2307
|
+
setRenderAssets((assets) => {
|
|
2308
|
+
return [...assets, renderAsset];
|
|
2309
|
+
});
|
|
2310
|
+
}, []);
|
|
2311
|
+
const unregisterRenderAsset = useCallback((id) => {
|
|
2312
|
+
setRenderAssets((assts) => {
|
|
2313
|
+
return assts.filter((a) => a.id !== id);
|
|
2314
|
+
});
|
|
2315
|
+
}, []);
|
|
2316
|
+
useLayoutEffect(() => {
|
|
2317
|
+
if (typeof window !== 'undefined') {
|
|
2318
|
+
window.remotion_collectAssets = () => {
|
|
2319
|
+
setRenderAssets([]); // clear assets at next render
|
|
2320
|
+
return renderAssets;
|
|
2321
|
+
};
|
|
2322
|
+
}
|
|
2323
|
+
}, [renderAssets]);
|
|
2324
|
+
const contextValue = useMemo(() => {
|
|
2325
|
+
return {
|
|
2326
|
+
registerRenderAsset,
|
|
2327
|
+
unregisterRenderAsset,
|
|
2328
|
+
renderAssets,
|
|
2329
|
+
};
|
|
2330
|
+
}, [renderAssets, registerRenderAsset, unregisterRenderAsset]);
|
|
2331
|
+
return (jsx(RenderAssetManager.Provider, { value: contextValue, children: children }));
|
|
2332
|
+
};
|
|
2333
|
+
|
|
2318
2334
|
const AudioForRenderingRefForwardingFunction = (props, ref) => {
|
|
2319
2335
|
const audioRef = useRef(null);
|
|
2320
2336
|
const absoluteFrame = useTimelinePosition();
|
|
2321
2337
|
const volumePropFrame = useFrameForVolumeProp();
|
|
2322
2338
|
const frame = useCurrentFrame();
|
|
2323
2339
|
const sequenceContext = useContext(SequenceContext);
|
|
2324
|
-
const {
|
|
2340
|
+
const { registerRenderAsset, unregisterRenderAsset } = useContext(RenderAssetManager);
|
|
2325
2341
|
const environment = useRemotionEnvironment();
|
|
2326
2342
|
// Generate a string that's as unique as possible for this asset
|
|
2327
2343
|
// but at the same time the same on all threads
|
|
@@ -2353,7 +2369,7 @@ const AudioForRenderingRefForwardingFunction = (props, ref) => {
|
|
|
2353
2369
|
if (volume <= 0) {
|
|
2354
2370
|
return;
|
|
2355
2371
|
}
|
|
2356
|
-
|
|
2372
|
+
registerRenderAsset({
|
|
2357
2373
|
type: 'audio',
|
|
2358
2374
|
src: getAbsoluteSrc(props.src),
|
|
2359
2375
|
id,
|
|
@@ -2363,14 +2379,14 @@ const AudioForRenderingRefForwardingFunction = (props, ref) => {
|
|
|
2363
2379
|
playbackRate: (_a = props.playbackRate) !== null && _a !== void 0 ? _a : 1,
|
|
2364
2380
|
allowAmplificationDuringRender: allowAmplificationDuringRender !== null && allowAmplificationDuringRender !== void 0 ? allowAmplificationDuringRender : false,
|
|
2365
2381
|
});
|
|
2366
|
-
return () =>
|
|
2382
|
+
return () => unregisterRenderAsset(id);
|
|
2367
2383
|
}, [
|
|
2368
2384
|
props.muted,
|
|
2369
2385
|
props.src,
|
|
2370
|
-
|
|
2386
|
+
registerRenderAsset,
|
|
2371
2387
|
absoluteFrame,
|
|
2372
2388
|
id,
|
|
2373
|
-
|
|
2389
|
+
unregisterRenderAsset,
|
|
2374
2390
|
volume,
|
|
2375
2391
|
volumePropFrame,
|
|
2376
2392
|
frame,
|
|
@@ -2425,8 +2441,16 @@ const AudioRefForwardingFunction = (props, ref) => {
|
|
|
2425
2441
|
}
|
|
2426
2442
|
const onError = useCallback((e) => {
|
|
2427
2443
|
console.log(e.currentTarget.error);
|
|
2428
|
-
|
|
2429
|
-
|
|
2444
|
+
// If there is no `loop` property, we don't need to get the duration
|
|
2445
|
+
// and thsi does not need to be a fatal error
|
|
2446
|
+
const errMessage = `Could not play audio with src ${otherProps.src}: ${e.currentTarget.error}. See https://remotion.dev/docs/media-playback-error for help.`;
|
|
2447
|
+
if (loop) {
|
|
2448
|
+
cancelRender(new Error(errMessage));
|
|
2449
|
+
}
|
|
2450
|
+
else {
|
|
2451
|
+
console.warn(errMessage);
|
|
2452
|
+
}
|
|
2453
|
+
}, [loop, otherProps.src]);
|
|
2430
2454
|
const onDuration = useCallback((src, durationInSeconds) => {
|
|
2431
2455
|
setDurations({ type: 'got-duration', durationInSeconds, src });
|
|
2432
2456
|
}, [setDurations]);
|
|
@@ -3141,7 +3165,7 @@ const CompositionManagerProvider = ({ children, numberOfAudioTags }) => {
|
|
|
3141
3165
|
unregisterFolder,
|
|
3142
3166
|
currentCompositionMetadata,
|
|
3143
3167
|
]);
|
|
3144
|
-
return (jsx(CompositionManager.Provider, { value: contextValue, children: jsx(SequenceManagerProvider, { children: jsx(
|
|
3168
|
+
return (jsx(CompositionManager.Provider, { value: contextValue, children: jsx(SequenceManagerProvider, { children: jsx(RenderAssetManagerProvider, { children: jsx(ResolveCompositionConfig, { children: jsx(SharedAudioContextProvider, { numberOfAudioTags: numberOfAudioTags, component: (_a = composition === null || composition === void 0 ? void 0 : composition.component) !== null && _a !== void 0 ? _a : null, children: children }) }) }) }) }));
|
|
3145
3169
|
};
|
|
3146
3170
|
|
|
3147
3171
|
const injected = {};
|
|
@@ -3749,7 +3773,7 @@ function useRemotionContexts() {
|
|
|
3749
3773
|
const nativeLayersContext = React.useContext(NativeLayersContext);
|
|
3750
3774
|
const preloadContext = React.useContext(PreloadContext);
|
|
3751
3775
|
const resolveCompositionContext = React.useContext(ResolveCompositionContext);
|
|
3752
|
-
const
|
|
3776
|
+
const renderAssetManagerContext = React.useContext(RenderAssetManager);
|
|
3753
3777
|
const sequenceManagerContext = React.useContext(SequenceManager);
|
|
3754
3778
|
return useMemo(() => ({
|
|
3755
3779
|
compositionManagerCtx,
|
|
@@ -3761,7 +3785,7 @@ function useRemotionContexts() {
|
|
|
3761
3785
|
nativeLayersContext,
|
|
3762
3786
|
preloadContext,
|
|
3763
3787
|
resolveCompositionContext,
|
|
3764
|
-
|
|
3788
|
+
renderAssetManagerContext,
|
|
3765
3789
|
sequenceManagerContext,
|
|
3766
3790
|
}), [
|
|
3767
3791
|
compositionManagerCtx,
|
|
@@ -3773,13 +3797,13 @@ function useRemotionContexts() {
|
|
|
3773
3797
|
nativeLayersContext,
|
|
3774
3798
|
preloadContext,
|
|
3775
3799
|
resolveCompositionContext,
|
|
3776
|
-
|
|
3800
|
+
renderAssetManagerContext,
|
|
3777
3801
|
sequenceManagerContext,
|
|
3778
3802
|
]);
|
|
3779
3803
|
}
|
|
3780
3804
|
const RemotionContextProvider = (props) => {
|
|
3781
3805
|
const { children, contexts } = props;
|
|
3782
|
-
return (jsx(CanUseRemotionHooks.Provider, { value: contexts.canUseRemotionHooksContext, children: jsx(NonceContext.Provider, { value: contexts.nonceContext, children: jsx(NativeLayersContext.Provider, { value: contexts.nativeLayersContext, children: jsx(PreloadContext.Provider, { value: contexts.preloadContext, children: jsx(CompositionManager.Provider, { value: contexts.compositionManagerCtx, children: jsx(SequenceManager.Provider, { value: contexts.sequenceManagerContext, children: jsx(
|
|
3806
|
+
return (jsx(CanUseRemotionHooks.Provider, { value: contexts.canUseRemotionHooksContext, children: jsx(NonceContext.Provider, { value: contexts.nonceContext, children: jsx(NativeLayersContext.Provider, { value: contexts.nativeLayersContext, children: jsx(PreloadContext.Provider, { value: contexts.preloadContext, children: jsx(CompositionManager.Provider, { value: contexts.compositionManagerCtx, children: jsx(SequenceManager.Provider, { value: contexts.sequenceManagerContext, children: jsx(RenderAssetManager.Provider, { value: contexts.renderAssetManagerContext, children: jsx(ResolveCompositionContext.Provider, { value: contexts.resolveCompositionContext, children: jsx(TimelineContext.Provider, { value: contexts.timelineContext, children: jsx(SetTimelineContext.Provider, { value: contexts.setTimelineContext, children: jsx(SequenceContext.Provider, { value: contexts.sequenceContext, children: children }) }) }) }) }) }) }) }) }) }) }));
|
|
3783
3807
|
};
|
|
3784
3808
|
|
|
3785
3809
|
const Timeline = TimelinePosition;
|
|
@@ -3836,7 +3860,7 @@ const Internals = {
|
|
|
3836
3860
|
resolveCompositionsRef,
|
|
3837
3861
|
ResolveCompositionConfig,
|
|
3838
3862
|
REMOTION_STUDIO_CONTAINER_ELEMENT,
|
|
3839
|
-
|
|
3863
|
+
RenderAssetManager,
|
|
3840
3864
|
bundleName: 'bundle.js',
|
|
3841
3865
|
bundleMapName: 'bundle.js.map',
|
|
3842
3866
|
persistCurrentFrame,
|
|
@@ -4188,7 +4212,7 @@ const OffthreadVideoForRendering = ({ onError, volume: volumeProp, playbackRate,
|
|
|
4188
4212
|
const videoConfig = useUnsafeVideoConfig();
|
|
4189
4213
|
const sequenceContext = useContext(SequenceContext);
|
|
4190
4214
|
const mediaStartsAt = useMediaStartsAt();
|
|
4191
|
-
const {
|
|
4215
|
+
const { registerRenderAsset, unregisterRenderAsset } = useContext(RenderAssetManager);
|
|
4192
4216
|
if (!src) {
|
|
4193
4217
|
throw new TypeError('No `src` was passed to <OffthreadVideo>.');
|
|
4194
4218
|
}
|
|
@@ -4222,7 +4246,7 @@ const OffthreadVideoForRendering = ({ onError, volume: volumeProp, playbackRate,
|
|
|
4222
4246
|
if (volume <= 0) {
|
|
4223
4247
|
return;
|
|
4224
4248
|
}
|
|
4225
|
-
|
|
4249
|
+
registerRenderAsset({
|
|
4226
4250
|
type: 'video',
|
|
4227
4251
|
src: getAbsoluteSrc(src),
|
|
4228
4252
|
id,
|
|
@@ -4232,13 +4256,13 @@ const OffthreadVideoForRendering = ({ onError, volume: volumeProp, playbackRate,
|
|
|
4232
4256
|
playbackRate: playbackRate !== null && playbackRate !== void 0 ? playbackRate : 1,
|
|
4233
4257
|
allowAmplificationDuringRender: allowAmplificationDuringRender !== null && allowAmplificationDuringRender !== void 0 ? allowAmplificationDuringRender : false,
|
|
4234
4258
|
});
|
|
4235
|
-
return () =>
|
|
4259
|
+
return () => unregisterRenderAsset(id);
|
|
4236
4260
|
}, [
|
|
4237
4261
|
muted,
|
|
4238
4262
|
src,
|
|
4239
|
-
|
|
4263
|
+
registerRenderAsset,
|
|
4240
4264
|
id,
|
|
4241
|
-
|
|
4265
|
+
unregisterRenderAsset,
|
|
4242
4266
|
volume,
|
|
4243
4267
|
frame,
|
|
4244
4268
|
absoluteFrame,
|
|
@@ -4468,7 +4492,7 @@ const VideoForRenderingForwardFunction = ({ onError, volume: volumeProp, allowAm
|
|
|
4468
4492
|
const sequenceContext = useContext(SequenceContext);
|
|
4469
4493
|
const mediaStartsAt = useMediaStartsAt();
|
|
4470
4494
|
const environment = useRemotionEnvironment();
|
|
4471
|
-
const {
|
|
4495
|
+
const { registerRenderAsset, unregisterRenderAsset } = useContext(RenderAssetManager);
|
|
4472
4496
|
// Generate a string that's as unique as possible for this asset
|
|
4473
4497
|
// but at the same time the same on all threads
|
|
4474
4498
|
const id = useMemo(() => {
|
|
@@ -4502,7 +4526,7 @@ const VideoForRenderingForwardFunction = ({ onError, volume: volumeProp, allowAm
|
|
|
4502
4526
|
if (!window.remotion_audioEnabled) {
|
|
4503
4527
|
return;
|
|
4504
4528
|
}
|
|
4505
|
-
|
|
4529
|
+
registerRenderAsset({
|
|
4506
4530
|
type: 'video',
|
|
4507
4531
|
src: getAbsoluteSrc(props.src),
|
|
4508
4532
|
id,
|
|
@@ -4512,13 +4536,13 @@ const VideoForRenderingForwardFunction = ({ onError, volume: volumeProp, allowAm
|
|
|
4512
4536
|
playbackRate: playbackRate !== null && playbackRate !== void 0 ? playbackRate : 1,
|
|
4513
4537
|
allowAmplificationDuringRender: allowAmplificationDuringRender !== null && allowAmplificationDuringRender !== void 0 ? allowAmplificationDuringRender : false,
|
|
4514
4538
|
});
|
|
4515
|
-
return () =>
|
|
4539
|
+
return () => unregisterRenderAsset(id);
|
|
4516
4540
|
}, [
|
|
4517
4541
|
props.muted,
|
|
4518
4542
|
props.src,
|
|
4519
|
-
|
|
4543
|
+
registerRenderAsset,
|
|
4520
4544
|
id,
|
|
4521
|
-
|
|
4545
|
+
unregisterRenderAsset,
|
|
4522
4546
|
volume,
|
|
4523
4547
|
frame,
|
|
4524
4548
|
absoluteFrame,
|
package/dist/esm/version.mjs
CHANGED