remotion 4.0.450 → 4.0.452
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/PremountContext.d.ts +0 -1
- package/dist/cjs/PremountContext.js +0 -1
- package/dist/cjs/RemotionRoot.js +1 -1
- package/dist/cjs/Sequence.js +1 -9
- package/dist/cjs/audio/Audio.js +2 -2
- package/dist/cjs/audio/shared-audio-tags.d.ts +33 -12
- package/dist/cjs/audio/shared-audio-tags.js +152 -75
- package/dist/cjs/audio/use-audio-context.d.ts +4 -1
- package/dist/cjs/audio/use-audio-context.js +8 -3
- package/dist/cjs/audio/wait-until-actually-resumed.d.ts +2 -0
- package/dist/cjs/audio/wait-until-actually-resumed.js +30 -0
- package/dist/cjs/canvas-effects/HtmlInCanvas.d.ts +12 -0
- package/dist/cjs/canvas-effects/HtmlInCanvas.js +91 -0
- package/dist/cjs/canvas-effects/Solid.d.ts +12 -0
- package/dist/cjs/canvas-effects/Solid.js +49 -0
- package/dist/cjs/canvas-effects/canvas-pool.d.ts +11 -0
- package/dist/cjs/canvas-effects/canvas-pool.js +70 -0
- package/dist/cjs/canvas-effects/define-effect.d.ts +3 -0
- package/dist/cjs/canvas-effects/define-effect.js +21 -0
- package/dist/cjs/canvas-effects/effect-internals.d.ts +7 -0
- package/dist/cjs/canvas-effects/effect-internals.js +42 -0
- package/dist/cjs/canvas-effects/effect-types.d.ts +26 -0
- package/dist/cjs/canvas-effects/effect-types.js +13 -0
- package/dist/cjs/canvas-effects/gpu-device.d.ts +2 -0
- package/dist/cjs/canvas-effects/gpu-device.js +36 -0
- package/dist/cjs/canvas-effects/index.d.ts +6 -0
- package/dist/cjs/canvas-effects/index.js +10 -0
- package/dist/cjs/canvas-effects/use-effect-chain.d.ts +13 -0
- package/dist/cjs/canvas-effects/use-effect-chain.js +197 -0
- package/dist/cjs/internals.d.ts +19 -8
- package/dist/cjs/internals.js +2 -0
- package/dist/cjs/sequence-timing-schema.d.ts +19 -0
- package/dist/cjs/sequence-timing-schema.js +22 -0
- package/dist/cjs/use-amplification.js +13 -3
- package/dist/cjs/version.d.ts +1 -1
- package/dist/cjs/version.js +1 -1
- package/dist/esm/index.mjs +212 -106
- package/dist/esm/version.mjs +1 -1
- package/package.json +2 -2
package/dist/cjs/RemotionRoot.js
CHANGED
|
@@ -23,6 +23,6 @@ const RemotionRootContexts = ({ children, numberOfAudioTags, logLevel, audioLate
|
|
|
23
23
|
const logging = (0, react_1.useMemo)(() => {
|
|
24
24
|
return { logLevel, mountTime: Date.now() };
|
|
25
25
|
}, [logLevel]);
|
|
26
|
-
return ((0, jsx_runtime_1.jsx)(log_level_context_js_1.LogLevelContext.Provider, { value: logging, children: (0, jsx_runtime_1.jsx)(nonce_js_1.NonceContext.Provider, { value: nonceContext, children: (0, jsx_runtime_1.jsx)(TimelineContext_js_1.TimelineContextProvider, { frameState: frameState, children: (0, jsx_runtime_1.jsx)(use_media_enabled_js_1.MediaEnabledProvider, { videoEnabled: videoEnabled, audioEnabled: audioEnabled, children: (0, jsx_runtime_1.jsx)(EditorProps_js_1.EditorPropsProvider, { children: (0, jsx_runtime_1.jsx)(prefetch_state_js_1.PrefetchProvider, { children: (0, jsx_runtime_1.jsx)(SequenceManager_js_1.SequenceManagerProvider, { visualModeEnabled: visualModeEnabled, children: (0, jsx_runtime_1.jsx)(
|
|
26
|
+
return ((0, jsx_runtime_1.jsx)(log_level_context_js_1.LogLevelContext.Provider, { value: logging, children: (0, jsx_runtime_1.jsx)(nonce_js_1.NonceContext.Provider, { value: nonceContext, children: (0, jsx_runtime_1.jsx)(TimelineContext_js_1.TimelineContextProvider, { frameState: frameState, children: (0, jsx_runtime_1.jsx)(use_media_enabled_js_1.MediaEnabledProvider, { videoEnabled: videoEnabled, audioEnabled: audioEnabled, children: (0, jsx_runtime_1.jsx)(EditorProps_js_1.EditorPropsProvider, { children: (0, jsx_runtime_1.jsx)(prefetch_state_js_1.PrefetchProvider, { children: (0, jsx_runtime_1.jsx)(SequenceManager_js_1.SequenceManagerProvider, { visualModeEnabled: visualModeEnabled, children: (0, jsx_runtime_1.jsx)(duration_state_js_1.DurationsContextProvider, { children: (0, jsx_runtime_1.jsx)(buffering_js_1.BufferingProvider, { children: (0, jsx_runtime_1.jsx)(shared_audio_tags_js_1.SharedAudioContextProvider, { audioLatencyHint: audioLatencyHint, audioEnabled: audioEnabled, children: (0, jsx_runtime_1.jsx)(shared_audio_tags_js_1.SharedAudioTagsContextProvider, { numberOfAudioTags: numberOfAudioTags, children: children }) }) }) }) }) }) }) }) }) }) }));
|
|
27
27
|
};
|
|
28
28
|
exports.RemotionRootContexts = RemotionRootContexts;
|
package/dist/cjs/Sequence.js
CHANGED
|
@@ -202,15 +202,7 @@ const PremountedPostmountedSequenceRefForwardingFunction = (props, ref) => {
|
|
|
202
202
|
styleWhilePremounted,
|
|
203
203
|
styleWhilePostmounted,
|
|
204
204
|
]);
|
|
205
|
-
|
|
206
|
-
const premountFramesRemaining = premountingActive ? from - frame : 0;
|
|
207
|
-
const premountContextValue = (0, react_1.useMemo)(() => {
|
|
208
|
-
return {
|
|
209
|
-
premountFramesRemaining,
|
|
210
|
-
playing: parentPremountContext.playing || playing,
|
|
211
|
-
};
|
|
212
|
-
}, [premountFramesRemaining, parentPremountContext.playing, playing]);
|
|
213
|
-
return ((0, jsx_runtime_1.jsx)(PremountContext_js_1.PremountContext.Provider, { value: premountContextValue, children: (0, jsx_runtime_1.jsx)(freeze_js_1.Freeze, { frame: freezeFrame, active: isFreezingActive, children: (0, jsx_runtime_1.jsx)(exports.Sequence, { ref: ref, from: from, durationInFrames: durationInFrames, style: style, _remotionInternalPremountDisplay: premountFor, _remotionInternalPostmountDisplay: postmountFor, _remotionInternalIsPremounting: premountingActive, _remotionInternalIsPostmounting: postmountingActive, ...otherProps }) }) }));
|
|
205
|
+
return ((0, jsx_runtime_1.jsx)(freeze_js_1.Freeze, { frame: freezeFrame, active: isFreezingActive, children: (0, jsx_runtime_1.jsx)(exports.Sequence, { ref: ref, from: from, durationInFrames: durationInFrames, style: style, _remotionInternalPremountDisplay: premountFor, _remotionInternalPostmountDisplay: postmountFor, _remotionInternalIsPremounting: premountingActive, _remotionInternalIsPostmounting: postmountingActive, ...otherProps }) }));
|
|
214
206
|
};
|
|
215
207
|
const PremountedPostmountedSequence = (0, react_1.forwardRef)(PremountedPostmountedSequenceRefForwardingFunction);
|
|
216
208
|
const SequenceRefForwardingFunction = (props, ref) => {
|
package/dist/cjs/audio/Audio.js
CHANGED
|
@@ -21,7 +21,7 @@ const AudioForRendering_js_1 = require("./AudioForRendering.js");
|
|
|
21
21
|
const shared_audio_tags_js_1 = require("./shared-audio-tags.js");
|
|
22
22
|
const AudioRefForwardingFunction = (props, ref) => {
|
|
23
23
|
var _a, _b, _c;
|
|
24
|
-
const
|
|
24
|
+
const audioTagsContext = (0, react_1.useContext)(shared_audio_tags_js_1.SharedAudioTagsContext);
|
|
25
25
|
const { startFrom, endAt, trimBefore, trimAfter, name, stack, pauseWhenBuffering, showInTimeline, onError: onRemotionError, ...otherProps } = props;
|
|
26
26
|
const { loop, ...propsOtherThanLoop } = props;
|
|
27
27
|
const { fps } = (0, use_video_config_js_1.useVideoConfig)();
|
|
@@ -84,7 +84,7 @@ const AudioRefForwardingFunction = (props, ref) => {
|
|
|
84
84
|
if (environment.isRendering) {
|
|
85
85
|
return ((0, jsx_runtime_1.jsx)(AudioForRendering_js_1.AudioForRendering, { onDuration: onDuration, ...props, ref: ref, onNativeError: onError, _remotionInternalNeedsDurationCalculation: Boolean(loop) }));
|
|
86
86
|
}
|
|
87
|
-
return ((0, jsx_runtime_1.jsx)(AudioForPreview_js_1.AudioForPreview, { _remotionInternalNativeLoopPassed: (_c = props._remotionInternalNativeLoopPassed) !== null && _c !== void 0 ? _c : false, _remotionInternalStack: stack !== null && stack !== void 0 ? stack : null, shouldPreMountAudioTags:
|
|
87
|
+
return ((0, jsx_runtime_1.jsx)(AudioForPreview_js_1.AudioForPreview, { _remotionInternalNativeLoopPassed: (_c = props._remotionInternalNativeLoopPassed) !== null && _c !== void 0 ? _c : false, _remotionInternalStack: stack !== null && stack !== void 0 ? stack : null, shouldPreMountAudioTags: audioTagsContext !== null && audioTagsContext.numberOfAudioTags > 0, ...props, ref: ref, onNativeError: onError, onDuration: onDuration,
|
|
88
88
|
// Proposal: Make this default to true in v5
|
|
89
89
|
pauseWhenBuffering: pauseWhenBuffering !== null && pauseWhenBuffering !== void 0 ? pauseWhenBuffering : false, _remotionInternalNeedsDurationCalculation: Boolean(loop), showInTimeline: showInTimeline !== null && showInTimeline !== void 0 ? showInTimeline : true }));
|
|
90
90
|
};
|
|
@@ -28,17 +28,39 @@ export type ScheduleAudioNodeResult = {
|
|
|
28
28
|
scheduledTime: number;
|
|
29
29
|
} | {
|
|
30
30
|
type: 'not-started';
|
|
31
|
+
reason: string;
|
|
31
32
|
};
|
|
32
33
|
export type ScheduleAudioNodeOptions = {
|
|
33
34
|
readonly node: AudioBufferSourceNode;
|
|
34
|
-
readonly targetTime: number;
|
|
35
35
|
readonly mediaTimestamp: number;
|
|
36
36
|
readonly currentTime: number;
|
|
37
|
-
readonly
|
|
38
|
-
readonly
|
|
39
|
-
readonly
|
|
37
|
+
readonly scheduledTime: number;
|
|
38
|
+
readonly originalUnloopedMediaTimestamp: number;
|
|
39
|
+
readonly duration: number;
|
|
40
|
+
readonly offset: number;
|
|
40
41
|
};
|
|
41
|
-
type
|
|
42
|
+
export type AudioSyncAnchorEvent = 'changed';
|
|
43
|
+
export type AudioSyncAnchorListener = (event: AudioSyncAnchorEvent) => void;
|
|
44
|
+
export type AudioSyncAnchorEmitter = {
|
|
45
|
+
dispatch: (event: AudioSyncAnchorEvent) => void;
|
|
46
|
+
subscribe: (listener: AudioSyncAnchorListener) => {
|
|
47
|
+
remove: () => void;
|
|
48
|
+
};
|
|
49
|
+
};
|
|
50
|
+
type SharedAudioContextValue = {
|
|
51
|
+
audioContext: AudioContext | null;
|
|
52
|
+
gainNode: GainNode | null;
|
|
53
|
+
audioSyncAnchor: {
|
|
54
|
+
value: number;
|
|
55
|
+
};
|
|
56
|
+
audioSyncAnchorEmitter: AudioSyncAnchorEmitter;
|
|
57
|
+
scheduleAudioNode: (options: ScheduleAudioNodeOptions) => ScheduleAudioNodeResult;
|
|
58
|
+
resume: () => Promise<void>;
|
|
59
|
+
suspend: () => void;
|
|
60
|
+
getIsResumingAudioContext: () => Promise<void> | null;
|
|
61
|
+
unscheduleAudioNode: (node: AudioBufferSourceNode) => void;
|
|
62
|
+
};
|
|
63
|
+
type SharedAudioTagsContextValue = {
|
|
42
64
|
registerAudio: (options: {
|
|
43
65
|
aud: AudioHTMLAttributes<HTMLAudioElement>;
|
|
44
66
|
audioId: string;
|
|
@@ -55,19 +77,18 @@ type SharedContext = {
|
|
|
55
77
|
}) => void;
|
|
56
78
|
playAllAudios: () => void;
|
|
57
79
|
numberOfAudioTags: number;
|
|
58
|
-
audioContext: AudioContext | null;
|
|
59
|
-
audioSyncAnchor: {
|
|
60
|
-
value: number;
|
|
61
|
-
};
|
|
62
|
-
scheduleAudioNode: (options: ScheduleAudioNodeOptions) => ScheduleAudioNodeResult;
|
|
63
80
|
};
|
|
64
|
-
export declare const SharedAudioContext: React.Context<
|
|
81
|
+
export declare const SharedAudioContext: React.Context<SharedAudioContextValue | null>;
|
|
82
|
+
export declare const SharedAudioTagsContext: React.Context<SharedAudioTagsContextValue | null>;
|
|
65
83
|
export declare const SharedAudioContextProvider: React.FC<{
|
|
66
|
-
readonly numberOfAudioTags: number;
|
|
67
84
|
readonly children: React.ReactNode;
|
|
68
85
|
readonly audioLatencyHint: AudioContextLatencyCategory;
|
|
69
86
|
readonly audioEnabled: boolean;
|
|
70
87
|
}>;
|
|
88
|
+
export declare const SharedAudioTagsContextProvider: React.FC<{
|
|
89
|
+
readonly numberOfAudioTags: number;
|
|
90
|
+
readonly children: React.ReactNode;
|
|
91
|
+
}>;
|
|
71
92
|
export declare const useSharedAudio: ({ aud, audioId, premounting, postmounting, }: {
|
|
72
93
|
aud: AudioHTMLAttributes<HTMLAudioElement>;
|
|
73
94
|
audioId: string;
|
|
@@ -33,7 +33,7 @@ var __importStar = (this && this.__importStar) || (function () {
|
|
|
33
33
|
};
|
|
34
34
|
})();
|
|
35
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
-
exports.useSharedAudio = exports.SharedAudioContextProvider = exports.SharedAudioContext = void 0;
|
|
36
|
+
exports.useSharedAudio = exports.SharedAudioTagsContextProvider = exports.SharedAudioContextProvider = exports.SharedAudioTagsContext = exports.SharedAudioContext = void 0;
|
|
37
37
|
const jsx_runtime_1 = require("react/jsx-runtime");
|
|
38
38
|
const react_1 = __importStar(require("react"));
|
|
39
39
|
const log_level_context_js_1 = require("../log-level-context.js");
|
|
@@ -42,6 +42,7 @@ const play_and_handle_not_allowed_error_js_1 = require("../play-and-handle-not-a
|
|
|
42
42
|
const use_remotion_environment_js_1 = require("../use-remotion-environment.js");
|
|
43
43
|
const shared_element_source_node_js_1 = require("./shared-element-source-node.js");
|
|
44
44
|
const use_audio_context_js_1 = require("./use-audio-context.js");
|
|
45
|
+
const wait_until_actually_resumed_js_1 = require("./wait-until-actually-resumed.js");
|
|
45
46
|
const EMPTY_AUDIO = 'data:audio/mp3;base64,/+MYxAAJcAV8AAgAABn//////+/gQ5BAMA+D4Pg+BAQBAEAwD4Pg+D4EBAEAQDAPg++hYBH///hUFQVBUFREDQNHmf///////+MYxBUGkAGIMAAAAP/29Xt6lUxBTUUzLjEwMFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV/+MYxDUAAANIAAAAAFVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV';
|
|
46
47
|
const compareProps = (obj1, obj2) => {
|
|
47
48
|
const keysA = Object.keys(obj1).sort();
|
|
@@ -75,75 +76,77 @@ const didPropChange = (key, newProp, prevProp) => {
|
|
|
75
76
|
return true;
|
|
76
77
|
};
|
|
77
78
|
exports.SharedAudioContext = (0, react_1.createContext)(null);
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
const audios = (0, react_1.useRef)([]);
|
|
81
|
-
const [initialNumberOfAudioTags] = (0, react_1.useState)(numberOfAudioTags);
|
|
82
|
-
if (numberOfAudioTags !== initialNumberOfAudioTags) {
|
|
83
|
-
throw new Error('The number of shared audio tags has changed dynamically. Once you have set this property, you cannot change it afterwards.');
|
|
84
|
-
}
|
|
79
|
+
exports.SharedAudioTagsContext = (0, react_1.createContext)(null);
|
|
80
|
+
const SharedAudioContextProvider = ({ children, audioLatencyHint, audioEnabled }) => {
|
|
85
81
|
const logLevel = (0, log_level_context_js_1.useLogLevel)();
|
|
86
|
-
const
|
|
82
|
+
const ctxAndGain = (0, use_audio_context_js_1.useSingletonAudioContext)({
|
|
87
83
|
logLevel,
|
|
88
84
|
latencyHint: audioLatencyHint,
|
|
89
85
|
audioEnabled,
|
|
90
86
|
});
|
|
87
|
+
const audioContextIsPlayingEventually = (0, react_1.useRef)(false);
|
|
88
|
+
const isResuming = (0, react_1.useRef)(null);
|
|
91
89
|
const audioSyncAnchor = (0, react_1.useMemo)(() => ({ value: 0 }), []);
|
|
90
|
+
const audioSyncAnchorListeners = (0, react_1.useRef)([]);
|
|
91
|
+
const audioSyncAnchorEmitter = (0, react_1.useMemo)(() => {
|
|
92
|
+
return {
|
|
93
|
+
dispatch: (event) => {
|
|
94
|
+
audioSyncAnchorListeners.current.forEach((l) => l(event));
|
|
95
|
+
},
|
|
96
|
+
subscribe: (listener) => {
|
|
97
|
+
audioSyncAnchorListeners.current.push(listener);
|
|
98
|
+
return {
|
|
99
|
+
remove: () => {
|
|
100
|
+
audioSyncAnchorListeners.current =
|
|
101
|
+
audioSyncAnchorListeners.current.filter((l) => l !== listener);
|
|
102
|
+
},
|
|
103
|
+
};
|
|
104
|
+
},
|
|
105
|
+
};
|
|
106
|
+
}, []);
|
|
92
107
|
const prevEndTimes = (0, react_1.useRef)({ scheduledEndTime: null, mediaEndTime: null });
|
|
108
|
+
const nodesToResume = (0, react_1.useRef)(new Map());
|
|
109
|
+
const unscheduleAudioNode = (0, react_1.useCallback)((node) => {
|
|
110
|
+
nodesToResume.current.delete(node);
|
|
111
|
+
}, []);
|
|
93
112
|
const scheduleAudioNode = (0, react_1.useMemo)(() => {
|
|
94
|
-
return ({ node, mediaTimestamp,
|
|
95
|
-
|
|
96
|
-
if (!audioContext) {
|
|
113
|
+
return ({ node, mediaTimestamp, currentTime, scheduledTime, duration, offset, originalUnloopedMediaTimestamp, }) => {
|
|
114
|
+
if (!ctxAndGain) {
|
|
97
115
|
throw new Error('Audio context not found');
|
|
98
116
|
}
|
|
99
|
-
const bufferDuration = (_b = (_a = node.buffer) === null || _a === void 0 ? void 0 : _a.duration) !== null && _b !== void 0 ? _b : 0;
|
|
100
|
-
const unclampedMediaEndTime = mediaTimestamp + bufferDuration;
|
|
101
|
-
const needsTrimEnd = unclampedMediaEndTime > sequenceEndTime;
|
|
102
|
-
const needsTrimStart = mediaTimestamp < sequenceStartTime;
|
|
103
|
-
const offsetBecauseOfTrim = needsTrimStart
|
|
104
|
-
? sequenceStartTime - mediaTimestamp
|
|
105
|
-
: 0;
|
|
106
|
-
const offsetBecauseOfTooLate = targetTime < 0 ? -targetTime : 0;
|
|
107
|
-
const offset = offsetBecauseOfTrim + offsetBecauseOfTooLate;
|
|
108
|
-
const duration = needsTrimEnd
|
|
109
|
-
? bufferDuration -
|
|
110
|
-
Math.max(0, unclampedMediaEndTime - sequenceEndTime) -
|
|
111
|
-
offset
|
|
112
|
-
: bufferDuration - offset;
|
|
113
|
-
const scheduledTime = targetTime + currentTime + offset;
|
|
114
|
-
if (offset < 0) {
|
|
115
|
-
throw new Error('offset < 0: ' +
|
|
116
|
-
JSON.stringify({
|
|
117
|
-
offset,
|
|
118
|
-
targetTime,
|
|
119
|
-
currentTime,
|
|
120
|
-
offsetBecauseOfTrim,
|
|
121
|
-
offsetBecauseOfTooLate,
|
|
122
|
-
}));
|
|
123
|
-
}
|
|
124
117
|
if (duration > 0) {
|
|
125
|
-
|
|
118
|
+
if (ctxAndGain.audioContext.state === 'suspended') {
|
|
119
|
+
nodesToResume.current.set(node, {
|
|
120
|
+
scheduledTime,
|
|
121
|
+
offset,
|
|
122
|
+
duration,
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
else {
|
|
126
|
+
node.start(scheduledTime, offset, duration);
|
|
127
|
+
}
|
|
126
128
|
}
|
|
127
129
|
const scheduledEndTime = scheduledTime + duration / node.playbackRate.value;
|
|
128
130
|
const mediaTime = mediaTimestamp + offset;
|
|
129
131
|
const mediaEndTime = mediaTime + duration;
|
|
130
|
-
const latency = audioContext.baseLatency +
|
|
131
|
-
|
|
132
|
+
const latency = ctxAndGain.audioContext.baseLatency +
|
|
133
|
+
ctxAndGain.audioContext.outputLatency;
|
|
134
|
+
const timeDiff = scheduledTime - ctxAndGain.audioContext.currentTime;
|
|
132
135
|
const prev = prevEndTimes.current;
|
|
133
136
|
const scheduledMismatch = prev.scheduledEndTime !== null &&
|
|
134
137
|
Math.abs(scheduledTime - prev.scheduledEndTime) > 0.001;
|
|
135
138
|
const mediaMismatch = prev.mediaEndTime !== null &&
|
|
136
139
|
Math.abs(mediaTime - prev.mediaEndTime) > 0.001;
|
|
137
|
-
|
|
138
|
-
|
|
140
|
+
log_js_1.Log.verbose({ logLevel, tag: 'audio-scheduling' }, 'scheduled %c%s%c %s %c%s%c %s %c%s%c %s %s %s', scheduledMismatch ? 'color: red; font-weight: bold' : '', scheduledTime.toFixed(4), '', scheduledEndTime.toFixed(4), mediaMismatch ? 'color: red; font-weight: bold' : '', mediaTime.toFixed(4), '', mediaEndTime.toFixed(4), duration < 0
|
|
141
|
+
? 'color: red; font-weight: bold'
|
|
142
|
+
: timeDiff < 0
|
|
139
143
|
? 'color: red; font-weight: bold'
|
|
140
|
-
:
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
? '
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
}
|
|
144
|
+
: 'color: blue; font-weight: bold', duration < 0
|
|
145
|
+
? 'missed ' + Math.abs(offset).toFixed(2) + 's'
|
|
146
|
+
: Math.abs(timeDiff).toFixed(2) +
|
|
147
|
+
(timeDiff < 0 ? ' delay' : ' ahead'), '', 'current=' + currentTime.toFixed(4), 'offset=' + offset.toFixed(4), 'latency=' + latency.toFixed(4), 'state=' + ctxAndGain.audioContext.state, originalUnloopedMediaTimestamp !== mediaTime
|
|
148
|
+
? 'original_ts=' + originalUnloopedMediaTimestamp.toFixed(4)
|
|
149
|
+
: '');
|
|
147
150
|
prev.scheduledEndTime = scheduledEndTime;
|
|
148
151
|
prev.mediaEndTime = mediaEndTime;
|
|
149
152
|
return duration > 0
|
|
@@ -153,9 +156,84 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, audioLatencyH
|
|
|
153
156
|
}
|
|
154
157
|
: {
|
|
155
158
|
type: 'not-started',
|
|
159
|
+
reason: 'missed ' + Math.abs(offset).toFixed(2) + 's',
|
|
156
160
|
};
|
|
157
161
|
};
|
|
158
|
-
}, [
|
|
162
|
+
}, [ctxAndGain, logLevel]);
|
|
163
|
+
const resume = (0, react_1.useCallback)(() => {
|
|
164
|
+
var _a, _b;
|
|
165
|
+
if (!ctxAndGain) {
|
|
166
|
+
return Promise.resolve();
|
|
167
|
+
}
|
|
168
|
+
if (audioContextIsPlayingEventually.current) {
|
|
169
|
+
return Promise.resolve();
|
|
170
|
+
}
|
|
171
|
+
audioContextIsPlayingEventually.current = true;
|
|
172
|
+
isResuming.current = (0, wait_until_actually_resumed_js_1.waitUntilActuallyResumed)(ctxAndGain.audioContext, logLevel)
|
|
173
|
+
.then(() => { })
|
|
174
|
+
.finally(() => {
|
|
175
|
+
isResuming.current = null;
|
|
176
|
+
});
|
|
177
|
+
ctxAndGain.gainNode.gain.cancelScheduledValues(ctxAndGain.audioContext.currentTime);
|
|
178
|
+
(_a = ctxAndGain.gainNode) === null || _a === void 0 ? void 0 : _a.gain.setValueAtTime(0, ctxAndGain.audioContext.currentTime);
|
|
179
|
+
(_b = ctxAndGain.gainNode) === null || _b === void 0 ? void 0 : _b.gain.linearRampToValueAtTime(1, ctxAndGain.audioContext.currentTime + 0.03);
|
|
180
|
+
return ctxAndGain.audioContext.resume().then(() => {
|
|
181
|
+
nodesToResume.current.forEach((r, node) => node.start(r.scheduledTime, r.offset, r.duration));
|
|
182
|
+
nodesToResume.current.clear();
|
|
183
|
+
});
|
|
184
|
+
}, [ctxAndGain, logLevel]);
|
|
185
|
+
const getIsResumingAudioContext = (0, react_1.useCallback)(() => {
|
|
186
|
+
return isResuming.current;
|
|
187
|
+
}, []);
|
|
188
|
+
const suspend = (0, react_1.useCallback)(() => {
|
|
189
|
+
if (!ctxAndGain) {
|
|
190
|
+
return;
|
|
191
|
+
}
|
|
192
|
+
if (!audioContextIsPlayingEventually.current) {
|
|
193
|
+
return;
|
|
194
|
+
}
|
|
195
|
+
audioContextIsPlayingEventually.current = false;
|
|
196
|
+
ctxAndGain.audioContext.suspend();
|
|
197
|
+
}, [ctxAndGain]);
|
|
198
|
+
const audioContextValue = (0, react_1.useMemo)(() => {
|
|
199
|
+
var _a, _b;
|
|
200
|
+
return {
|
|
201
|
+
audioContext: (_a = ctxAndGain === null || ctxAndGain === void 0 ? void 0 : ctxAndGain.audioContext) !== null && _a !== void 0 ? _a : null,
|
|
202
|
+
gainNode: (_b = ctxAndGain === null || ctxAndGain === void 0 ? void 0 : ctxAndGain.gainNode) !== null && _b !== void 0 ? _b : null,
|
|
203
|
+
audioSyncAnchor,
|
|
204
|
+
audioSyncAnchorEmitter,
|
|
205
|
+
scheduleAudioNode,
|
|
206
|
+
resume,
|
|
207
|
+
suspend,
|
|
208
|
+
getIsResumingAudioContext,
|
|
209
|
+
unscheduleAudioNode,
|
|
210
|
+
};
|
|
211
|
+
}, [
|
|
212
|
+
ctxAndGain,
|
|
213
|
+
audioSyncAnchor,
|
|
214
|
+
audioSyncAnchorEmitter,
|
|
215
|
+
scheduleAudioNode,
|
|
216
|
+
resume,
|
|
217
|
+
suspend,
|
|
218
|
+
getIsResumingAudioContext,
|
|
219
|
+
unscheduleAudioNode,
|
|
220
|
+
]);
|
|
221
|
+
return ((0, jsx_runtime_1.jsx)(exports.SharedAudioContext.Provider, { value: audioContextValue, children: children }));
|
|
222
|
+
};
|
|
223
|
+
exports.SharedAudioContextProvider = SharedAudioContextProvider;
|
|
224
|
+
const SharedAudioTagsContextProvider = ({ children, numberOfAudioTags }) => {
|
|
225
|
+
var _a, _b;
|
|
226
|
+
const audios = (0, react_1.useRef)([]);
|
|
227
|
+
const [initialNumberOfAudioTags] = (0, react_1.useState)(numberOfAudioTags);
|
|
228
|
+
if (numberOfAudioTags !== initialNumberOfAudioTags) {
|
|
229
|
+
throw new Error('The number of shared audio tags has changed dynamically. Once you have set this property, you cannot change it afterwards.');
|
|
230
|
+
}
|
|
231
|
+
const logLevel = (0, log_level_context_js_1.useLogLevel)();
|
|
232
|
+
const mountTime = (0, log_level_context_js_1.useMountTime)();
|
|
233
|
+
const env = (0, use_remotion_environment_js_1.useRemotionEnvironment)();
|
|
234
|
+
const audioCtx = (0, react_1.useContext)(exports.SharedAudioContext);
|
|
235
|
+
const audioContext = (_a = audioCtx === null || audioCtx === void 0 ? void 0 : audioCtx.audioContext) !== null && _a !== void 0 ? _a : null;
|
|
236
|
+
const resume = audioCtx === null || audioCtx === void 0 ? void 0 : audioCtx.resume;
|
|
159
237
|
const refs = (0, react_1.useMemo)(() => {
|
|
160
238
|
return new Array(numberOfAudioTags).fill(true).map(() => {
|
|
161
239
|
const ref = (0, react_1.createRef)();
|
|
@@ -178,7 +256,7 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, audioLatencyH
|
|
|
178
256
|
*
|
|
179
257
|
* Need to import it from React to fix React 17 ESM support.
|
|
180
258
|
*/
|
|
181
|
-
const effectToUse = (
|
|
259
|
+
const effectToUse = (_b = react_1.default.useInsertionEffect) !== null && _b !== void 0 ? _b : react_1.default.useLayoutEffect;
|
|
182
260
|
// Disconnecting the SharedElementSourceNodes if the Player unmounts to prevent leak.
|
|
183
261
|
// https://github.com/remotion-dev/remotion/issues/6285
|
|
184
262
|
// But useInsertionEffect will fire before other effects, meaning the
|
|
@@ -294,8 +372,6 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, audioLatencyH
|
|
|
294
372
|
rerenderAudios();
|
|
295
373
|
}
|
|
296
374
|
}, [rerenderAudios]);
|
|
297
|
-
const mountTime = (0, log_level_context_js_1.useMountTime)();
|
|
298
|
-
const env = (0, use_remotion_environment_js_1.useRemotionEnvironment)();
|
|
299
375
|
const playAllAudios = (0, react_1.useCallback)(() => {
|
|
300
376
|
refs.forEach((ref) => {
|
|
301
377
|
const audio = audios.current.find((a) => a.el === ref.ref);
|
|
@@ -312,18 +388,15 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, audioLatencyH
|
|
|
312
388
|
isPlayer: env.isPlayer,
|
|
313
389
|
});
|
|
314
390
|
});
|
|
315
|
-
|
|
316
|
-
}, [
|
|
317
|
-
const
|
|
391
|
+
resume === null || resume === void 0 ? void 0 : resume();
|
|
392
|
+
}, [logLevel, mountTime, refs, env.isPlayer, resume]);
|
|
393
|
+
const audioTagsValue = (0, react_1.useMemo)(() => {
|
|
318
394
|
return {
|
|
319
395
|
registerAudio,
|
|
320
396
|
unregisterAudio,
|
|
321
397
|
updateAudio,
|
|
322
398
|
playAllAudios,
|
|
323
399
|
numberOfAudioTags,
|
|
324
|
-
audioContext,
|
|
325
|
-
audioSyncAnchor,
|
|
326
|
-
scheduleAudioNode,
|
|
327
400
|
};
|
|
328
401
|
}, [
|
|
329
402
|
numberOfAudioTags,
|
|
@@ -331,11 +404,8 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, audioLatencyH
|
|
|
331
404
|
registerAudio,
|
|
332
405
|
unregisterAudio,
|
|
333
406
|
updateAudio,
|
|
334
|
-
audioContext,
|
|
335
|
-
audioSyncAnchor,
|
|
336
|
-
scheduleAudioNode,
|
|
337
407
|
]);
|
|
338
|
-
return ((0, jsx_runtime_1.jsxs)(exports.
|
|
408
|
+
return ((0, jsx_runtime_1.jsxs)(exports.SharedAudioTagsContext.Provider, { value: audioTagsValue, children: [refs.map(({ id, ref }) => {
|
|
339
409
|
return (
|
|
340
410
|
// Without preload="metadata", iOS will seek the time internally
|
|
341
411
|
// but not actually with sound. Adding `preload="metadata"` helps here.
|
|
@@ -343,22 +413,23 @@ const SharedAudioContextProvider = ({ children, numberOfAudioTags, audioLatencyH
|
|
|
343
413
|
(0, jsx_runtime_1.jsx)("audio", { ref: ref, preload: "metadata", src: EMPTY_AUDIO }, id));
|
|
344
414
|
}), children] }));
|
|
345
415
|
};
|
|
346
|
-
exports.
|
|
416
|
+
exports.SharedAudioTagsContextProvider = SharedAudioTagsContextProvider;
|
|
347
417
|
const useSharedAudio = ({ aud, audioId, premounting, postmounting, }) => {
|
|
348
418
|
var _a;
|
|
349
|
-
const
|
|
419
|
+
const audioCtx = (0, react_1.useContext)(exports.SharedAudioContext);
|
|
420
|
+
const tagsCtx = (0, react_1.useContext)(exports.SharedAudioTagsContext);
|
|
350
421
|
/**
|
|
351
422
|
* We work around this in React 18 so an audio tag will only register itself once
|
|
352
423
|
*/
|
|
353
424
|
const [elem] = (0, react_1.useState)(() => {
|
|
354
|
-
if (
|
|
355
|
-
return
|
|
425
|
+
if (tagsCtx && tagsCtx.numberOfAudioTags > 0) {
|
|
426
|
+
return tagsCtx.registerAudio({ aud, audioId, premounting, postmounting });
|
|
356
427
|
}
|
|
357
428
|
// numberOfSharedAudioTags is 0
|
|
358
429
|
const el = react_1.default.createRef();
|
|
359
|
-
const mediaElementSourceNode = (
|
|
430
|
+
const mediaElementSourceNode = (audioCtx === null || audioCtx === void 0 ? void 0 : audioCtx.audioContext)
|
|
360
431
|
? (0, shared_element_source_node_js_1.makeSharedElementSourceNode)({
|
|
361
|
-
audioContext:
|
|
432
|
+
audioContext: audioCtx.audioContext,
|
|
362
433
|
ref: el,
|
|
363
434
|
})
|
|
364
435
|
: null;
|
|
@@ -386,17 +457,23 @@ const useSharedAudio = ({ aud, audioId, premounting, postmounting, }) => {
|
|
|
386
457
|
const effectToUse = (_a = react_1.default.useInsertionEffect) !== null && _a !== void 0 ? _a : react_1.default.useLayoutEffect;
|
|
387
458
|
if (typeof document !== 'undefined') {
|
|
388
459
|
effectToUse(() => {
|
|
389
|
-
if (
|
|
390
|
-
|
|
460
|
+
if (tagsCtx && tagsCtx.numberOfAudioTags > 0) {
|
|
461
|
+
tagsCtx.updateAudio({
|
|
462
|
+
id: elem.id,
|
|
463
|
+
aud,
|
|
464
|
+
audioId,
|
|
465
|
+
premounting,
|
|
466
|
+
postmounting,
|
|
467
|
+
});
|
|
391
468
|
}
|
|
392
|
-
}, [aud,
|
|
469
|
+
}, [aud, tagsCtx, elem.id, audioId, premounting, postmounting]);
|
|
393
470
|
effectToUse(() => {
|
|
394
471
|
return () => {
|
|
395
|
-
if (
|
|
396
|
-
|
|
472
|
+
if (tagsCtx && tagsCtx.numberOfAudioTags > 0) {
|
|
473
|
+
tagsCtx.unregisterAudio(elem.id);
|
|
397
474
|
}
|
|
398
475
|
};
|
|
399
|
-
}, [
|
|
476
|
+
}, [tagsCtx, elem.id]);
|
|
400
477
|
}
|
|
401
478
|
return elem;
|
|
402
479
|
};
|
|
@@ -3,4 +3,7 @@ export declare const useSingletonAudioContext: ({ logLevel, latencyHint, audioEn
|
|
|
3
3
|
logLevel: LogLevel;
|
|
4
4
|
latencyHint: AudioContextLatencyCategory;
|
|
5
5
|
audioEnabled: boolean;
|
|
6
|
-
}) =>
|
|
6
|
+
}) => {
|
|
7
|
+
audioContext: AudioContext;
|
|
8
|
+
gainNode: GainNode;
|
|
9
|
+
} | null;
|
|
@@ -17,7 +17,7 @@ const warnOnce = (logLevel) => {
|
|
|
17
17
|
};
|
|
18
18
|
const useSingletonAudioContext = ({ logLevel, latencyHint, audioEnabled, }) => {
|
|
19
19
|
const env = (0, use_remotion_environment_1.useRemotionEnvironment)();
|
|
20
|
-
|
|
20
|
+
return (0, react_1.useMemo)(() => {
|
|
21
21
|
if (env.isRendering) {
|
|
22
22
|
return null;
|
|
23
23
|
}
|
|
@@ -28,14 +28,19 @@ const useSingletonAudioContext = ({ logLevel, latencyHint, audioEnabled, }) => {
|
|
|
28
28
|
warnOnce(logLevel);
|
|
29
29
|
return null;
|
|
30
30
|
}
|
|
31
|
-
|
|
31
|
+
const audioContext = new AudioContext({
|
|
32
32
|
latencyHint,
|
|
33
33
|
// By default, this can end up being 44100Hz.
|
|
34
34
|
// Playing a 48000Hz file in a 44100Hz context, such as https://remotion.media/video.mp4 in a @remotion/media tag
|
|
35
35
|
// we observe some issues that seem to go away when we set the sample rate to 48000 with Sony LinkBuds Bluetooth headphones.
|
|
36
36
|
sampleRate: 48000,
|
|
37
37
|
});
|
|
38
|
+
const gainNode = audioContext.createGain();
|
|
39
|
+
gainNode.connect(audioContext.destination);
|
|
40
|
+
return {
|
|
41
|
+
audioContext,
|
|
42
|
+
gainNode,
|
|
43
|
+
};
|
|
38
44
|
}, [logLevel, latencyHint, env.isRendering, audioEnabled]);
|
|
39
|
-
return audioContext;
|
|
40
45
|
};
|
|
41
46
|
exports.useSingletonAudioContext = useSingletonAudioContext;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.waitUntilActuallyResumed = void 0;
|
|
4
|
+
const log_js_1 = require("../log.js");
|
|
5
|
+
const waitUntilActuallyResumed = (audioContext, logLevel) => {
|
|
6
|
+
return new Promise((resolve) => {
|
|
7
|
+
const startCurrentTime = audioContext.currentTime;
|
|
8
|
+
const start = audioContext.getOutputTimestamp();
|
|
9
|
+
const startOutputPerformanceTime = start.performanceTime;
|
|
10
|
+
const startWallClock = performance.now();
|
|
11
|
+
const check = () => {
|
|
12
|
+
var _a, _b;
|
|
13
|
+
const { currentTime } = audioContext;
|
|
14
|
+
const outputTimestamp = audioContext.getOutputTimestamp();
|
|
15
|
+
const elapsedWallClock = performance.now() - startWallClock;
|
|
16
|
+
if (startOutputPerformanceTime !== undefined &&
|
|
17
|
+
outputTimestamp.performanceTime !== undefined &&
|
|
18
|
+
outputTimestamp.performanceTime > startOutputPerformanceTime &&
|
|
19
|
+
outputTimestamp.contextTime !== undefined &&
|
|
20
|
+
outputTimestamp.contextTime > startCurrentTime) {
|
|
21
|
+
log_js_1.Log.verbose({ logLevel, tag: 'audio' }, `waitUntilActuallyResumed: getOutputTimestamp.performanceTime advanced from ${startOutputPerformanceTime.toFixed(6)} to ${outputTimestamp.performanceTime.toFixed(6)} after ${elapsedWallClock.toFixed(1)}ms. currentTime=${currentTime.toFixed(6)} (advanced by ${(currentTime - startCurrentTime).toFixed(6)}), getOutputTimestamp.performanceTime=${(_b = (_a = outputTimestamp.performanceTime) === null || _a === void 0 ? void 0 : _a.toFixed(1)) !== null && _b !== void 0 ? _b : 'undefined'}`);
|
|
22
|
+
resolve();
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
requestAnimationFrame(check);
|
|
26
|
+
};
|
|
27
|
+
requestAnimationFrame(check);
|
|
28
|
+
});
|
|
29
|
+
};
|
|
30
|
+
exports.waitUntilActuallyResumed = waitUntilActuallyResumed;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import type { EffectsProp } from './effect-types.js';
|
|
3
|
+
export type HtmlInCanvasProps = {
|
|
4
|
+
readonly width: number;
|
|
5
|
+
readonly height: number;
|
|
6
|
+
readonly effects?: EffectsProp;
|
|
7
|
+
readonly children: React.ReactNode;
|
|
8
|
+
readonly className?: string;
|
|
9
|
+
readonly style?: React.CSSProperties;
|
|
10
|
+
readonly pixelRatio?: number;
|
|
11
|
+
};
|
|
12
|
+
export declare const HtmlInCanvas: React.FC<HtmlInCanvasProps>;
|