@remotion/media 4.0.355 → 4.0.357
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/audio-for-preview.d.ts +30 -0
- package/dist/audio/audio-for-preview.js +213 -0
- package/dist/audio/audio-for-rendering.js +63 -12
- package/dist/audio/audio.js +8 -50
- package/dist/audio/props.d.ts +12 -3
- package/dist/audio-extraction/audio-cache.d.ts +1 -1
- package/dist/audio-extraction/audio-cache.js +5 -1
- package/dist/audio-extraction/audio-iterator.d.ts +7 -3
- package/dist/audio-extraction/audio-iterator.js +35 -12
- package/dist/audio-extraction/audio-manager.d.ts +10 -38
- package/dist/audio-extraction/audio-manager.js +40 -11
- package/dist/audio-extraction/extract-audio.d.ts +11 -3
- package/dist/audio-extraction/extract-audio.js +37 -17
- package/dist/caches.d.ts +11 -45
- package/dist/convert-audiodata/apply-tonefrequency.d.ts +2 -0
- package/dist/convert-audiodata/apply-tonefrequency.js +43 -0
- package/dist/convert-audiodata/combine-audiodata.js +2 -23
- package/dist/convert-audiodata/convert-audiodata.d.ts +1 -5
- package/dist/convert-audiodata/convert-audiodata.js +16 -24
- package/dist/convert-audiodata/wsola.d.ts +13 -0
- package/dist/convert-audiodata/wsola.js +197 -0
- package/dist/esm/index.mjs +2265 -589
- package/dist/extract-frame-and-audio.d.ts +7 -7
- package/dist/extract-frame-and-audio.js +69 -26
- package/dist/get-sink-weak.d.ts +3 -8
- package/dist/get-sink-weak.js +3 -11
- package/dist/get-sink.d.ts +13 -0
- package/dist/get-sink.js +15 -0
- package/dist/get-time-in-seconds.d.ts +10 -0
- package/dist/get-time-in-seconds.js +25 -0
- package/dist/index.d.ts +13 -3
- package/dist/index.js +12 -2
- package/dist/is-network-error.d.ts +6 -0
- package/dist/is-network-error.js +17 -0
- package/dist/render-timestamp-range.d.ts +1 -0
- package/dist/render-timestamp-range.js +9 -0
- package/dist/video/media-player.d.ts +91 -0
- package/dist/video/media-player.js +484 -0
- package/dist/video/props.d.ts +37 -18
- package/dist/video/resolve-playback-time.d.ts +8 -0
- package/dist/video/resolve-playback-time.js +22 -0
- package/dist/video/timeout-utils.d.ts +2 -0
- package/dist/video/timeout-utils.js +18 -0
- package/dist/video/video-for-preview.d.ts +25 -0
- package/dist/video/video-for-preview.js +241 -0
- package/dist/video/video-for-rendering.d.ts +26 -2
- package/dist/video/video-for-rendering.js +95 -19
- package/dist/video/video.js +13 -18
- package/dist/video-extraction/extract-frame-via-broadcast-channel.d.ts +19 -6
- package/dist/video-extraction/extract-frame-via-broadcast-channel.js +67 -4
- package/dist/video-extraction/extract-frame.d.ts +21 -2
- package/dist/video-extraction/extract-frame.js +46 -9
- package/dist/video-extraction/get-frames-since-keyframe.d.ts +17 -10
- package/dist/video-extraction/get-frames-since-keyframe.js +77 -21
- package/dist/video-extraction/keyframe-bank.d.ts +3 -2
- package/dist/video-extraction/keyframe-bank.js +32 -12
- package/dist/video-extraction/keyframe-manager.d.ts +3 -8
- package/dist/video-extraction/keyframe-manager.js +25 -10
- package/package.json +4 -4
package/dist/esm/index.mjs
CHANGED
|
@@ -1,54 +1,855 @@
|
|
|
1
1
|
// src/audio/audio.tsx
|
|
2
|
-
import {
|
|
2
|
+
import { Internals as Internals11, useRemotionEnvironment as useRemotionEnvironment2 } from "remotion";
|
|
3
|
+
|
|
4
|
+
// src/audio/audio-for-preview.tsx
|
|
5
|
+
import { useContext, useEffect, useMemo, useRef, useState } from "react";
|
|
3
6
|
import {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
7
|
+
Internals as Internals2,
|
|
8
|
+
Audio as RemotionAudio,
|
|
9
|
+
useBufferState,
|
|
10
|
+
useCurrentFrame
|
|
8
11
|
} from "remotion";
|
|
9
12
|
|
|
10
|
-
//
|
|
11
|
-
import
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
useState as useState2
|
|
20
|
-
} from "react";
|
|
13
|
+
// src/video/media-player.ts
|
|
14
|
+
import {
|
|
15
|
+
ALL_FORMATS,
|
|
16
|
+
AudioBufferSink,
|
|
17
|
+
CanvasSink,
|
|
18
|
+
Input,
|
|
19
|
+
UrlSource
|
|
20
|
+
} from "mediabunny";
|
|
21
|
+
import { Internals } from "remotion";
|
|
21
22
|
|
|
22
|
-
//
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
}
|
|
23
|
+
// src/is-network-error.ts
|
|
24
|
+
function isNetworkError(error) {
|
|
25
|
+
if (error.message.includes("Failed to fetch") || error.message.includes("Load failed") || error.message.includes("NetworkError when attempting to fetch resource")) {
|
|
26
|
+
return true;
|
|
27
|
+
}
|
|
28
|
+
return false;
|
|
29
|
+
}
|
|
29
30
|
|
|
30
|
-
//
|
|
31
|
-
|
|
31
|
+
// src/video/resolve-playback-time.ts
|
|
32
|
+
var resolvePlaybackTime = ({
|
|
33
|
+
absolutePlaybackTimeInSeconds,
|
|
34
|
+
playbackRate,
|
|
35
|
+
loop,
|
|
36
|
+
trimBeforeInSeconds,
|
|
37
|
+
trimAfterInSeconds,
|
|
38
|
+
mediaDurationInSeconds
|
|
39
|
+
}) => {
|
|
40
|
+
const loopAfterPreliminary = loop ? Math.min(trimAfterInSeconds ?? Infinity, mediaDurationInSeconds ?? Infinity) : Infinity;
|
|
41
|
+
const loopAfterConsideringTrimBefore = loopAfterPreliminary - (trimBeforeInSeconds ?? 0);
|
|
42
|
+
const loopAfterConsideringPlaybackRate = loopAfterConsideringTrimBefore / playbackRate;
|
|
43
|
+
const timeConsideringLoop = absolutePlaybackTimeInSeconds % loopAfterConsideringPlaybackRate;
|
|
44
|
+
const time = timeConsideringLoop * playbackRate + (trimBeforeInSeconds ?? 0);
|
|
45
|
+
if (Number.isNaN(time)) {
|
|
46
|
+
console.log({
|
|
47
|
+
absolutePlaybackTimeInSeconds,
|
|
48
|
+
playbackRate,
|
|
49
|
+
loop,
|
|
50
|
+
trimBeforeInSeconds,
|
|
51
|
+
trimAfterInSeconds,
|
|
52
|
+
mediaDurationInSeconds
|
|
53
|
+
});
|
|
54
|
+
throw new Error("Time is NaN");
|
|
55
|
+
}
|
|
56
|
+
return time;
|
|
57
|
+
};
|
|
32
58
|
|
|
33
|
-
//
|
|
34
|
-
|
|
35
|
-
|
|
59
|
+
// src/video/timeout-utils.ts
|
|
60
|
+
var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
61
|
+
function withTimeout(promise, timeoutMs, errorMessage = "Operation timed out") {
|
|
62
|
+
let timeoutId = null;
|
|
63
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
64
|
+
timeoutId = window.setTimeout(() => {
|
|
65
|
+
reject(new Error(errorMessage));
|
|
66
|
+
}, timeoutMs);
|
|
67
|
+
});
|
|
68
|
+
return Promise.race([
|
|
69
|
+
promise.finally(() => {
|
|
70
|
+
if (timeoutId) {
|
|
71
|
+
clearTimeout(timeoutId);
|
|
72
|
+
}
|
|
73
|
+
}),
|
|
74
|
+
timeoutPromise
|
|
75
|
+
]);
|
|
76
|
+
}
|
|
36
77
|
|
|
37
|
-
//
|
|
38
|
-
|
|
78
|
+
// src/video/media-player.ts
|
|
79
|
+
var SEEK_THRESHOLD = 0.05;
|
|
80
|
+
var AUDIO_BUFFER_TOLERANCE_THRESHOLD = 0.1;
|
|
39
81
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
82
|
+
class MediaPlayer {
|
|
83
|
+
canvas;
|
|
84
|
+
context;
|
|
85
|
+
src;
|
|
86
|
+
logLevel;
|
|
87
|
+
playbackRate;
|
|
88
|
+
audioStreamIndex;
|
|
89
|
+
canvasSink = null;
|
|
90
|
+
videoFrameIterator = null;
|
|
91
|
+
nextFrame = null;
|
|
92
|
+
audioSink = null;
|
|
93
|
+
audioBufferIterator = null;
|
|
94
|
+
queuedAudioNodes = new Set;
|
|
95
|
+
gainNode = null;
|
|
96
|
+
sharedAudioContext;
|
|
97
|
+
audioSyncAnchor = 0;
|
|
98
|
+
playing = false;
|
|
99
|
+
muted = false;
|
|
100
|
+
loop = false;
|
|
101
|
+
trimBeforeSeconds;
|
|
102
|
+
trimAfterSeconds;
|
|
103
|
+
animationFrameId = null;
|
|
104
|
+
videoAsyncId = 0;
|
|
105
|
+
audioAsyncId = 0;
|
|
106
|
+
initialized = false;
|
|
107
|
+
totalDuration;
|
|
108
|
+
isBuffering = false;
|
|
109
|
+
onBufferingChangeCallback;
|
|
110
|
+
audioBufferHealth = 0;
|
|
111
|
+
audioIteratorStarted = false;
|
|
112
|
+
HEALTHY_BUFER_THRESHOLD_SECONDS = 1;
|
|
113
|
+
onVideoFrameCallback;
|
|
114
|
+
constructor({
|
|
115
|
+
canvas,
|
|
116
|
+
src,
|
|
117
|
+
logLevel,
|
|
118
|
+
sharedAudioContext,
|
|
119
|
+
loop,
|
|
120
|
+
trimBeforeSeconds,
|
|
121
|
+
trimAfterSeconds,
|
|
122
|
+
playbackRate,
|
|
123
|
+
audioStreamIndex
|
|
124
|
+
}) {
|
|
125
|
+
this.canvas = canvas ?? null;
|
|
126
|
+
this.src = src;
|
|
127
|
+
this.logLevel = logLevel ?? window.remotion_logLevel;
|
|
128
|
+
this.sharedAudioContext = sharedAudioContext;
|
|
129
|
+
this.playbackRate = playbackRate;
|
|
130
|
+
this.loop = loop;
|
|
131
|
+
this.trimBeforeSeconds = trimBeforeSeconds;
|
|
132
|
+
this.trimAfterSeconds = trimAfterSeconds;
|
|
133
|
+
this.audioStreamIndex = audioStreamIndex ?? 0;
|
|
134
|
+
if (canvas) {
|
|
135
|
+
const context = canvas.getContext("2d", {
|
|
136
|
+
alpha: false,
|
|
137
|
+
desynchronized: true
|
|
138
|
+
});
|
|
139
|
+
if (!context) {
|
|
140
|
+
throw new Error("Could not get 2D context from canvas");
|
|
141
|
+
}
|
|
142
|
+
this.context = context;
|
|
143
|
+
} else {
|
|
144
|
+
this.context = null;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
input = null;
|
|
148
|
+
isReady() {
|
|
149
|
+
return this.initialized && Boolean(this.sharedAudioContext);
|
|
150
|
+
}
|
|
151
|
+
hasAudio() {
|
|
152
|
+
return Boolean(this.audioSink && this.sharedAudioContext && this.gainNode);
|
|
153
|
+
}
|
|
154
|
+
isCurrentlyBuffering() {
|
|
155
|
+
return this.isBuffering && Boolean(this.bufferingStartedAtMs);
|
|
156
|
+
}
|
|
157
|
+
async initialize(startTimeUnresolved) {
|
|
158
|
+
try {
|
|
159
|
+
const urlSource = new UrlSource(this.src);
|
|
160
|
+
const input = new Input({
|
|
161
|
+
source: urlSource,
|
|
162
|
+
formats: ALL_FORMATS
|
|
163
|
+
});
|
|
164
|
+
this.input = input;
|
|
165
|
+
try {
|
|
166
|
+
await this.input.getFormat();
|
|
167
|
+
} catch (error) {
|
|
168
|
+
const err = error;
|
|
169
|
+
if (isNetworkError(err)) {
|
|
170
|
+
throw error;
|
|
171
|
+
}
|
|
172
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Failed to recognize format for ${this.src}`, error);
|
|
173
|
+
return { type: "unknown-container-format" };
|
|
174
|
+
}
|
|
175
|
+
const [duration, videoTrack, audioTracks] = await Promise.all([
|
|
176
|
+
input.computeDuration(),
|
|
177
|
+
input.getPrimaryVideoTrack(),
|
|
178
|
+
input.getAudioTracks()
|
|
179
|
+
]);
|
|
180
|
+
this.totalDuration = duration;
|
|
181
|
+
const audioTrack = audioTracks[this.audioStreamIndex] ?? null;
|
|
182
|
+
if (!videoTrack && !audioTrack) {
|
|
183
|
+
return { type: "no-tracks" };
|
|
184
|
+
}
|
|
185
|
+
if (videoTrack && this.canvas && this.context) {
|
|
186
|
+
const canDecode = await videoTrack.canDecode();
|
|
187
|
+
if (!canDecode) {
|
|
188
|
+
return { type: "cannot-decode" };
|
|
189
|
+
}
|
|
190
|
+
this.canvasSink = new CanvasSink(videoTrack, {
|
|
191
|
+
poolSize: 2,
|
|
192
|
+
fit: "contain"
|
|
193
|
+
});
|
|
194
|
+
this.canvas.width = videoTrack.displayWidth;
|
|
195
|
+
this.canvas.height = videoTrack.displayHeight;
|
|
196
|
+
}
|
|
197
|
+
if (audioTrack && this.sharedAudioContext) {
|
|
198
|
+
this.audioSink = new AudioBufferSink(audioTrack);
|
|
199
|
+
this.gainNode = this.sharedAudioContext.createGain();
|
|
200
|
+
this.gainNode.connect(this.sharedAudioContext.destination);
|
|
201
|
+
}
|
|
202
|
+
const startTime = resolvePlaybackTime({
|
|
203
|
+
absolutePlaybackTimeInSeconds: startTimeUnresolved,
|
|
204
|
+
playbackRate: this.playbackRate,
|
|
205
|
+
loop: this.loop,
|
|
206
|
+
trimBeforeInSeconds: this.trimBeforeSeconds,
|
|
207
|
+
trimAfterInSeconds: this.trimAfterSeconds,
|
|
208
|
+
mediaDurationInSeconds: this.totalDuration
|
|
209
|
+
});
|
|
210
|
+
if (this.sharedAudioContext) {
|
|
211
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - startTime;
|
|
212
|
+
}
|
|
213
|
+
this.initialized = true;
|
|
214
|
+
await Promise.all([
|
|
215
|
+
this.startAudioIterator(startTime),
|
|
216
|
+
this.startVideoIterator(startTime)
|
|
217
|
+
]);
|
|
218
|
+
this.startRenderLoop();
|
|
219
|
+
return { type: "success" };
|
|
220
|
+
} catch (error) {
|
|
221
|
+
const err = error;
|
|
222
|
+
if (isNetworkError(err)) {
|
|
223
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Network/CORS error for ${this.src}`, err);
|
|
224
|
+
return { type: "network-error" };
|
|
225
|
+
}
|
|
226
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to initialize", error);
|
|
227
|
+
throw error;
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
cleanupAudioQueue() {
|
|
231
|
+
for (const node of this.queuedAudioNodes) {
|
|
232
|
+
node.stop();
|
|
233
|
+
}
|
|
234
|
+
this.queuedAudioNodes.clear();
|
|
235
|
+
}
|
|
236
|
+
async cleanAudioIteratorAndNodes() {
|
|
237
|
+
await this.audioBufferIterator?.return();
|
|
238
|
+
this.audioBufferIterator = null;
|
|
239
|
+
this.audioIteratorStarted = false;
|
|
240
|
+
this.audioBufferHealth = 0;
|
|
241
|
+
this.cleanupAudioQueue();
|
|
242
|
+
}
|
|
243
|
+
async seekTo(time) {
|
|
244
|
+
if (!this.isReady())
|
|
245
|
+
return;
|
|
246
|
+
const newTime = resolvePlaybackTime({
|
|
247
|
+
absolutePlaybackTimeInSeconds: time,
|
|
248
|
+
playbackRate: this.playbackRate,
|
|
249
|
+
loop: this.loop,
|
|
250
|
+
trimBeforeInSeconds: this.trimBeforeSeconds,
|
|
251
|
+
trimAfterInSeconds: this.trimAfterSeconds,
|
|
252
|
+
mediaDurationInSeconds: this.totalDuration
|
|
253
|
+
});
|
|
254
|
+
const currentPlaybackTime = this.getPlaybackTime();
|
|
255
|
+
const isSignificantSeek = Math.abs(newTime - currentPlaybackTime) > SEEK_THRESHOLD;
|
|
256
|
+
if (isSignificantSeek) {
|
|
257
|
+
this.nextFrame = null;
|
|
258
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - newTime;
|
|
259
|
+
if (this.audioSink) {
|
|
260
|
+
await this.cleanAudioIteratorAndNodes();
|
|
261
|
+
}
|
|
262
|
+
await Promise.all([
|
|
263
|
+
this.startAudioIterator(newTime),
|
|
264
|
+
this.startVideoIterator(newTime)
|
|
265
|
+
]);
|
|
266
|
+
}
|
|
267
|
+
if (!this.playing) {
|
|
268
|
+
this.render();
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
async play() {
|
|
272
|
+
if (!this.isReady())
|
|
273
|
+
return;
|
|
274
|
+
if (!this.playing) {
|
|
275
|
+
if (this.sharedAudioContext.state === "suspended") {
|
|
276
|
+
await this.sharedAudioContext.resume();
|
|
277
|
+
}
|
|
278
|
+
this.playing = true;
|
|
279
|
+
this.startRenderLoop();
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
pause() {
|
|
283
|
+
this.playing = false;
|
|
284
|
+
this.cleanupAudioQueue();
|
|
285
|
+
this.stopRenderLoop();
|
|
286
|
+
}
|
|
287
|
+
setMuted(muted) {
|
|
288
|
+
this.muted = muted;
|
|
289
|
+
if (muted) {
|
|
290
|
+
this.cleanupAudioQueue();
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
setVolume(volume) {
|
|
294
|
+
if (!this.gainNode) {
|
|
295
|
+
return;
|
|
296
|
+
}
|
|
297
|
+
const appliedVolume = Math.max(0, volume);
|
|
298
|
+
this.gainNode.gain.value = appliedVolume;
|
|
299
|
+
}
|
|
300
|
+
setPlaybackRate(rate) {
|
|
301
|
+
this.playbackRate = rate;
|
|
302
|
+
}
|
|
303
|
+
setLoop(loop) {
|
|
304
|
+
this.loop = loop;
|
|
305
|
+
}
|
|
306
|
+
dispose() {
|
|
307
|
+
this.input?.dispose();
|
|
308
|
+
this.stopRenderLoop();
|
|
309
|
+
this.videoFrameIterator?.return();
|
|
310
|
+
this.cleanAudioIteratorAndNodes();
|
|
311
|
+
this.videoAsyncId++;
|
|
312
|
+
}
|
|
313
|
+
getPlaybackTime() {
|
|
314
|
+
const absoluteTime = this.sharedAudioContext.currentTime - this.audioSyncAnchor;
|
|
315
|
+
return resolvePlaybackTime({
|
|
316
|
+
absolutePlaybackTimeInSeconds: absoluteTime,
|
|
317
|
+
playbackRate: this.playbackRate,
|
|
318
|
+
loop: this.loop,
|
|
319
|
+
trimBeforeInSeconds: this.trimBeforeSeconds,
|
|
320
|
+
trimAfterInSeconds: this.trimAfterSeconds,
|
|
321
|
+
mediaDurationInSeconds: this.totalDuration
|
|
322
|
+
});
|
|
323
|
+
}
|
|
324
|
+
scheduleAudioChunk(buffer, mediaTimestamp) {
|
|
325
|
+
const targetTime = mediaTimestamp + this.audioSyncAnchor;
|
|
326
|
+
const delay = targetTime - this.sharedAudioContext.currentTime;
|
|
327
|
+
const node = this.sharedAudioContext.createBufferSource();
|
|
328
|
+
node.buffer = buffer;
|
|
329
|
+
node.playbackRate.value = this.playbackRate;
|
|
330
|
+
node.connect(this.gainNode);
|
|
331
|
+
if (delay >= 0) {
|
|
332
|
+
node.start(targetTime);
|
|
333
|
+
} else {
|
|
334
|
+
node.start(this.sharedAudioContext.currentTime, -delay);
|
|
335
|
+
}
|
|
336
|
+
this.queuedAudioNodes.add(node);
|
|
337
|
+
node.onended = () => this.queuedAudioNodes.delete(node);
|
|
338
|
+
}
|
|
339
|
+
onBufferingChange(callback) {
|
|
340
|
+
this.onBufferingChangeCallback = callback;
|
|
341
|
+
return () => {
|
|
342
|
+
if (this.onBufferingChangeCallback === callback) {
|
|
343
|
+
this.onBufferingChangeCallback = undefined;
|
|
344
|
+
}
|
|
345
|
+
};
|
|
346
|
+
}
|
|
347
|
+
onVideoFrame(callback) {
|
|
348
|
+
this.onVideoFrameCallback = callback;
|
|
349
|
+
if (this.initialized && callback && this.canvas) {
|
|
350
|
+
callback(this.canvas);
|
|
351
|
+
}
|
|
352
|
+
return () => {
|
|
353
|
+
if (this.onVideoFrameCallback === callback) {
|
|
354
|
+
this.onVideoFrameCallback = undefined;
|
|
355
|
+
}
|
|
356
|
+
};
|
|
357
|
+
}
|
|
358
|
+
canRenderVideo() {
|
|
359
|
+
return !this.hasAudio() || this.audioIteratorStarted && this.audioBufferHealth >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
|
|
360
|
+
}
|
|
361
|
+
startRenderLoop() {
|
|
362
|
+
if (this.animationFrameId !== null) {
|
|
363
|
+
return;
|
|
364
|
+
}
|
|
365
|
+
this.render();
|
|
366
|
+
}
|
|
367
|
+
stopRenderLoop() {
|
|
368
|
+
if (this.animationFrameId !== null) {
|
|
369
|
+
cancelAnimationFrame(this.animationFrameId);
|
|
370
|
+
this.animationFrameId = null;
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
render = () => {
|
|
374
|
+
if (this.isBuffering) {
|
|
375
|
+
this.maybeForceResumeFromBuffering();
|
|
376
|
+
}
|
|
377
|
+
if (this.shouldRenderFrame()) {
|
|
378
|
+
this.drawCurrentFrame();
|
|
379
|
+
}
|
|
380
|
+
if (this.playing) {
|
|
381
|
+
this.animationFrameId = requestAnimationFrame(this.render);
|
|
382
|
+
} else {
|
|
383
|
+
this.animationFrameId = null;
|
|
384
|
+
}
|
|
385
|
+
};
|
|
386
|
+
shouldRenderFrame() {
|
|
387
|
+
return !this.isBuffering && this.canRenderVideo() && this.nextFrame !== null && this.nextFrame.timestamp <= this.getPlaybackTime();
|
|
388
|
+
}
|
|
389
|
+
drawCurrentFrame() {
|
|
390
|
+
if (this.context && this.nextFrame) {
|
|
391
|
+
this.context.drawImage(this.nextFrame.canvas, 0, 0);
|
|
392
|
+
}
|
|
393
|
+
if (this.onVideoFrameCallback && this.canvas) {
|
|
394
|
+
this.onVideoFrameCallback(this.canvas);
|
|
395
|
+
}
|
|
396
|
+
this.nextFrame = null;
|
|
397
|
+
this.updateNextFrame();
|
|
398
|
+
}
|
|
399
|
+
startAudioIterator = async (startFromSecond) => {
|
|
400
|
+
if (!this.hasAudio())
|
|
401
|
+
return;
|
|
402
|
+
this.audioAsyncId++;
|
|
403
|
+
const currentAsyncId = this.audioAsyncId;
|
|
404
|
+
await this.audioBufferIterator?.return();
|
|
405
|
+
this.audioIteratorStarted = false;
|
|
406
|
+
this.audioBufferHealth = 0;
|
|
407
|
+
try {
|
|
408
|
+
this.audioBufferIterator = this.audioSink.buffers(startFromSecond);
|
|
409
|
+
this.runAudioIterator(startFromSecond, currentAsyncId);
|
|
410
|
+
} catch (error) {
|
|
411
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start audio iterator", error);
|
|
412
|
+
}
|
|
413
|
+
};
|
|
414
|
+
startVideoIterator = async (timeToSeek) => {
|
|
415
|
+
if (!this.canvasSink) {
|
|
416
|
+
return;
|
|
417
|
+
}
|
|
418
|
+
this.videoAsyncId++;
|
|
419
|
+
const currentAsyncId = this.videoAsyncId;
|
|
420
|
+
this.videoFrameIterator?.return().catch(() => {
|
|
421
|
+
return;
|
|
422
|
+
});
|
|
423
|
+
this.videoFrameIterator = this.canvasSink.canvases(timeToSeek);
|
|
424
|
+
try {
|
|
425
|
+
const firstFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
426
|
+
const secondFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
427
|
+
if (currentAsyncId !== this.videoAsyncId) {
|
|
428
|
+
return;
|
|
429
|
+
}
|
|
430
|
+
if (firstFrame && this.context) {
|
|
431
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Drew initial frame ${firstFrame.timestamp.toFixed(3)}s`);
|
|
432
|
+
this.context.drawImage(firstFrame.canvas, 0, 0);
|
|
433
|
+
if (this.onVideoFrameCallback && this.canvas) {
|
|
434
|
+
this.onVideoFrameCallback(this.canvas);
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
this.nextFrame = secondFrame ?? null;
|
|
438
|
+
if (secondFrame) {
|
|
439
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${secondFrame.timestamp.toFixed(3)}s`);
|
|
440
|
+
}
|
|
441
|
+
} catch (error) {
|
|
442
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to start video iterator", error);
|
|
443
|
+
}
|
|
444
|
+
};
|
|
445
|
+
updateNextFrame = async () => {
|
|
446
|
+
if (!this.videoFrameIterator) {
|
|
447
|
+
return;
|
|
448
|
+
}
|
|
449
|
+
try {
|
|
450
|
+
while (true) {
|
|
451
|
+
const newNextFrame = (await this.videoFrameIterator.next()).value ?? null;
|
|
452
|
+
if (!newNextFrame) {
|
|
453
|
+
break;
|
|
454
|
+
}
|
|
455
|
+
if (newNextFrame.timestamp <= this.getPlaybackTime()) {
|
|
456
|
+
continue;
|
|
457
|
+
} else {
|
|
458
|
+
this.nextFrame = newNextFrame;
|
|
459
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Buffered next frame ${newNextFrame.timestamp.toFixed(3)}s`);
|
|
460
|
+
break;
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
} catch (error) {
|
|
464
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to update next frame", error);
|
|
465
|
+
}
|
|
466
|
+
};
|
|
467
|
+
bufferingStartedAtMs = null;
|
|
468
|
+
minBufferingTimeoutMs = 500;
|
|
469
|
+
setBufferingState(isBuffering) {
|
|
470
|
+
if (this.isBuffering !== isBuffering) {
|
|
471
|
+
this.isBuffering = isBuffering;
|
|
472
|
+
if (isBuffering) {
|
|
473
|
+
this.bufferingStartedAtMs = performance.now();
|
|
474
|
+
this.onBufferingChangeCallback?.(true);
|
|
475
|
+
} else {
|
|
476
|
+
this.bufferingStartedAtMs = null;
|
|
477
|
+
this.onBufferingChangeCallback?.(false);
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
maybeResumeFromBuffering(currentBufferDuration) {
|
|
482
|
+
if (!this.isCurrentlyBuffering())
|
|
483
|
+
return;
|
|
484
|
+
const now = performance.now();
|
|
485
|
+
const bufferingDuration = now - this.bufferingStartedAtMs;
|
|
486
|
+
const minTimeElapsed = bufferingDuration >= this.minBufferingTimeoutMs;
|
|
487
|
+
const bufferHealthy = currentBufferDuration >= this.HEALTHY_BUFER_THRESHOLD_SECONDS;
|
|
488
|
+
if (minTimeElapsed && bufferHealthy) {
|
|
489
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Resuming from buffering after ${bufferingDuration}ms - buffer recovered`);
|
|
490
|
+
this.setBufferingState(false);
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
maybeForceResumeFromBuffering() {
|
|
494
|
+
if (!this.isCurrentlyBuffering())
|
|
495
|
+
return;
|
|
496
|
+
const now = performance.now();
|
|
497
|
+
const bufferingDuration = now - this.bufferingStartedAtMs;
|
|
498
|
+
const forceTimeout = bufferingDuration > this.minBufferingTimeoutMs * 10;
|
|
499
|
+
if (forceTimeout) {
|
|
500
|
+
Internals.Log.trace({ logLevel: this.logLevel, tag: "@remotion/media" }, `[MediaPlayer] Force resuming from buffering after ${bufferingDuration}ms`);
|
|
501
|
+
this.setBufferingState(false);
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
runAudioIterator = async (startFromSecond, audioAsyncId) => {
|
|
505
|
+
if (!this.hasAudio() || !this.audioBufferIterator)
|
|
506
|
+
return;
|
|
507
|
+
try {
|
|
508
|
+
let totalBufferDuration = 0;
|
|
509
|
+
let isFirstBuffer = true;
|
|
510
|
+
this.audioIteratorStarted = true;
|
|
511
|
+
while (true) {
|
|
512
|
+
if (audioAsyncId !== this.audioAsyncId) {
|
|
513
|
+
return;
|
|
514
|
+
}
|
|
515
|
+
const BUFFERING_TIMEOUT_MS = 50;
|
|
516
|
+
let result;
|
|
517
|
+
try {
|
|
518
|
+
result = await withTimeout(this.audioBufferIterator.next(), BUFFERING_TIMEOUT_MS, "Iterator timeout");
|
|
519
|
+
} catch {
|
|
520
|
+
this.setBufferingState(true);
|
|
521
|
+
await sleep(10);
|
|
522
|
+
continue;
|
|
523
|
+
}
|
|
524
|
+
if (result.done || !result.value) {
|
|
525
|
+
break;
|
|
526
|
+
}
|
|
527
|
+
const { buffer, timestamp, duration } = result.value;
|
|
528
|
+
totalBufferDuration += duration;
|
|
529
|
+
this.audioBufferHealth = Math.max(0, totalBufferDuration / this.playbackRate);
|
|
530
|
+
this.maybeResumeFromBuffering(totalBufferDuration / this.playbackRate);
|
|
531
|
+
if (this.playing && !this.muted) {
|
|
532
|
+
if (isFirstBuffer) {
|
|
533
|
+
this.audioSyncAnchor = this.sharedAudioContext.currentTime - timestamp;
|
|
534
|
+
isFirstBuffer = false;
|
|
535
|
+
}
|
|
536
|
+
if (timestamp < startFromSecond - AUDIO_BUFFER_TOLERANCE_THRESHOLD) {
|
|
537
|
+
continue;
|
|
538
|
+
}
|
|
539
|
+
this.scheduleAudioChunk(buffer, timestamp);
|
|
540
|
+
}
|
|
541
|
+
if (timestamp - this.getPlaybackTime() >= 1) {
|
|
542
|
+
await new Promise((resolve) => {
|
|
543
|
+
const check = () => {
|
|
544
|
+
if (timestamp - this.getPlaybackTime() < 1) {
|
|
545
|
+
resolve();
|
|
546
|
+
} else {
|
|
547
|
+
requestAnimationFrame(check);
|
|
548
|
+
}
|
|
549
|
+
};
|
|
550
|
+
check();
|
|
551
|
+
});
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
} catch (error) {
|
|
555
|
+
Internals.Log.error({ logLevel: this.logLevel, tag: "@remotion/media" }, "[MediaPlayer] Failed to run audio iterator", error);
|
|
556
|
+
}
|
|
557
|
+
};
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
// src/audio/audio-for-preview.tsx
|
|
561
|
+
import { jsx } from "react/jsx-runtime";
|
|
562
|
+
var {
|
|
563
|
+
useUnsafeVideoConfig,
|
|
564
|
+
Timeline,
|
|
565
|
+
SharedAudioContext,
|
|
566
|
+
useMediaMutedState,
|
|
567
|
+
useMediaVolumeState,
|
|
568
|
+
useFrameForVolumeProp,
|
|
569
|
+
evaluateVolume,
|
|
570
|
+
warnAboutTooHighVolume,
|
|
571
|
+
usePreload,
|
|
572
|
+
useMediaInTimeline,
|
|
573
|
+
SequenceContext
|
|
574
|
+
} = Internals2;
|
|
575
|
+
var NewAudioForPreview = ({
|
|
576
|
+
src,
|
|
577
|
+
playbackRate,
|
|
578
|
+
logLevel,
|
|
579
|
+
muted,
|
|
580
|
+
volume,
|
|
581
|
+
loopVolumeCurveBehavior,
|
|
582
|
+
loop,
|
|
583
|
+
trimAfter,
|
|
584
|
+
trimBefore,
|
|
585
|
+
name,
|
|
586
|
+
showInTimeline,
|
|
587
|
+
stack,
|
|
588
|
+
disallowFallbackToHtml5Audio,
|
|
589
|
+
toneFrequency,
|
|
590
|
+
audioStreamIndex,
|
|
591
|
+
fallbackHtml5AudioProps
|
|
592
|
+
}) => {
|
|
593
|
+
const videoConfig = useUnsafeVideoConfig();
|
|
594
|
+
const frame = useCurrentFrame();
|
|
595
|
+
const mediaPlayerRef = useRef(null);
|
|
596
|
+
const [mediaPlayerReady, setMediaPlayerReady] = useState(false);
|
|
597
|
+
const [shouldFallbackToNativeAudio, setShouldFallbackToNativeAudio] = useState(false);
|
|
598
|
+
const [playing] = Timeline.usePlayingState();
|
|
599
|
+
const timelineContext = useContext(Timeline.TimelineContext);
|
|
600
|
+
const globalPlaybackRate = timelineContext.playbackRate;
|
|
601
|
+
const sharedAudioContext = useContext(SharedAudioContext);
|
|
602
|
+
const buffer = useBufferState();
|
|
603
|
+
const delayHandleRef = useRef(null);
|
|
604
|
+
const [mediaMuted] = useMediaMutedState();
|
|
605
|
+
const [mediaVolume] = useMediaVolumeState();
|
|
606
|
+
const volumePropFrame = useFrameForVolumeProp(loopVolumeCurveBehavior ?? "repeat");
|
|
607
|
+
const userPreferredVolume = evaluateVolume({
|
|
608
|
+
frame: volumePropFrame,
|
|
609
|
+
volume,
|
|
610
|
+
mediaVolume
|
|
611
|
+
});
|
|
612
|
+
warnAboutTooHighVolume(userPreferredVolume);
|
|
613
|
+
if (!videoConfig) {
|
|
614
|
+
throw new Error("No video config found");
|
|
615
|
+
}
|
|
616
|
+
if (!src) {
|
|
617
|
+
throw new TypeError("No `src` was passed to <NewAudioForPreview>.");
|
|
618
|
+
}
|
|
619
|
+
const currentTime = frame / videoConfig.fps;
|
|
620
|
+
const currentTimeRef = useRef(currentTime);
|
|
621
|
+
currentTimeRef.current = currentTime;
|
|
622
|
+
const preloadedSrc = usePreload(src);
|
|
623
|
+
const [timelineId] = useState(() => String(Math.random()));
|
|
624
|
+
const parentSequence = useContext(SequenceContext);
|
|
625
|
+
useMediaInTimeline({
|
|
626
|
+
volume,
|
|
627
|
+
mediaVolume,
|
|
628
|
+
mediaType: "audio",
|
|
629
|
+
src,
|
|
630
|
+
playbackRate,
|
|
631
|
+
displayName: name ?? null,
|
|
632
|
+
id: timelineId,
|
|
633
|
+
stack,
|
|
634
|
+
showInTimeline,
|
|
635
|
+
premountDisplay: parentSequence?.premountDisplay ?? null,
|
|
636
|
+
postmountDisplay: parentSequence?.postmountDisplay ?? null
|
|
637
|
+
});
|
|
638
|
+
useEffect(() => {
|
|
639
|
+
if (!sharedAudioContext)
|
|
640
|
+
return;
|
|
641
|
+
if (!sharedAudioContext.audioContext)
|
|
642
|
+
return;
|
|
643
|
+
try {
|
|
644
|
+
const player = new MediaPlayer({
|
|
645
|
+
src: preloadedSrc,
|
|
646
|
+
logLevel,
|
|
647
|
+
sharedAudioContext: sharedAudioContext.audioContext,
|
|
648
|
+
loop,
|
|
649
|
+
trimAfterSeconds: trimAfter ? trimAfter / videoConfig.fps : undefined,
|
|
650
|
+
trimBeforeSeconds: trimBefore ? trimBefore / videoConfig.fps : undefined,
|
|
651
|
+
canvas: null,
|
|
652
|
+
playbackRate,
|
|
653
|
+
audioStreamIndex: audioStreamIndex ?? 0
|
|
654
|
+
});
|
|
655
|
+
mediaPlayerRef.current = player;
|
|
656
|
+
player.initialize(currentTimeRef.current).then((result) => {
|
|
657
|
+
if (result.type === "unknown-container-format") {
|
|
658
|
+
if (disallowFallbackToHtml5Audio) {
|
|
659
|
+
throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
|
|
660
|
+
}
|
|
661
|
+
Internals2.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Audio>`);
|
|
662
|
+
setShouldFallbackToNativeAudio(true);
|
|
663
|
+
return;
|
|
664
|
+
}
|
|
665
|
+
if (result.type === "network-error") {
|
|
666
|
+
if (disallowFallbackToHtml5Audio) {
|
|
667
|
+
throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
|
|
668
|
+
}
|
|
669
|
+
Internals2.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <Audio>`);
|
|
670
|
+
setShouldFallbackToNativeAudio(true);
|
|
671
|
+
return;
|
|
672
|
+
}
|
|
673
|
+
if (result.type === "cannot-decode") {
|
|
674
|
+
if (disallowFallbackToHtml5Audio) {
|
|
675
|
+
throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
|
|
676
|
+
}
|
|
677
|
+
Internals2.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <Audio>`);
|
|
678
|
+
setShouldFallbackToNativeAudio(true);
|
|
679
|
+
return;
|
|
680
|
+
}
|
|
681
|
+
if (result.type === "no-tracks") {
|
|
682
|
+
if (disallowFallbackToHtml5Audio) {
|
|
683
|
+
throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToHtml5Audio' was set.`);
|
|
684
|
+
}
|
|
685
|
+
Internals2.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <Audio>`);
|
|
686
|
+
setShouldFallbackToNativeAudio(true);
|
|
687
|
+
return;
|
|
688
|
+
}
|
|
689
|
+
if (result.type === "success") {
|
|
690
|
+
setMediaPlayerReady(true);
|
|
691
|
+
Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] MediaPlayer initialized successfully`);
|
|
692
|
+
}
|
|
693
|
+
}).catch((error) => {
|
|
694
|
+
Internals2.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to initialize MediaPlayer", error);
|
|
695
|
+
setShouldFallbackToNativeAudio(true);
|
|
696
|
+
});
|
|
697
|
+
} catch (error) {
|
|
698
|
+
Internals2.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer initialization failed", error);
|
|
699
|
+
setShouldFallbackToNativeAudio(true);
|
|
700
|
+
}
|
|
701
|
+
return () => {
|
|
702
|
+
if (delayHandleRef.current) {
|
|
703
|
+
delayHandleRef.current.unblock();
|
|
704
|
+
delayHandleRef.current = null;
|
|
705
|
+
}
|
|
706
|
+
if (mediaPlayerRef.current) {
|
|
707
|
+
Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Disposing MediaPlayer`);
|
|
708
|
+
mediaPlayerRef.current.dispose();
|
|
709
|
+
mediaPlayerRef.current = null;
|
|
710
|
+
}
|
|
711
|
+
setMediaPlayerReady(false);
|
|
712
|
+
setShouldFallbackToNativeAudio(false);
|
|
713
|
+
};
|
|
714
|
+
}, [
|
|
715
|
+
preloadedSrc,
|
|
716
|
+
logLevel,
|
|
717
|
+
sharedAudioContext,
|
|
718
|
+
currentTimeRef,
|
|
719
|
+
loop,
|
|
720
|
+
trimAfter,
|
|
721
|
+
trimBefore,
|
|
722
|
+
playbackRate,
|
|
723
|
+
videoConfig.fps,
|
|
724
|
+
audioStreamIndex,
|
|
725
|
+
disallowFallbackToHtml5Audio
|
|
726
|
+
]);
|
|
727
|
+
useEffect(() => {
|
|
728
|
+
const audioPlayer = mediaPlayerRef.current;
|
|
729
|
+
if (!audioPlayer)
|
|
730
|
+
return;
|
|
731
|
+
if (playing) {
|
|
732
|
+
audioPlayer.play().catch((error) => {
|
|
733
|
+
Internals2.Log.error({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] Failed to play", error);
|
|
734
|
+
});
|
|
735
|
+
} else {
|
|
736
|
+
audioPlayer.pause();
|
|
737
|
+
}
|
|
738
|
+
}, [playing, logLevel, mediaPlayerReady]);
|
|
739
|
+
useEffect(() => {
|
|
740
|
+
const audioPlayer = mediaPlayerRef.current;
|
|
741
|
+
if (!audioPlayer || !mediaPlayerReady)
|
|
742
|
+
return;
|
|
743
|
+
audioPlayer.seekTo(currentTime);
|
|
744
|
+
Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewAudioForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
745
|
+
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
746
|
+
useEffect(() => {
|
|
747
|
+
const audioPlayer = mediaPlayerRef.current;
|
|
748
|
+
if (!audioPlayer || !mediaPlayerReady)
|
|
749
|
+
return;
|
|
750
|
+
audioPlayer.onBufferingChange((newBufferingState) => {
|
|
751
|
+
if (newBufferingState && !delayHandleRef.current) {
|
|
752
|
+
delayHandleRef.current = buffer.delayPlayback();
|
|
753
|
+
Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer buffering - blocking Remotion playback");
|
|
754
|
+
} else if (!newBufferingState && delayHandleRef.current) {
|
|
755
|
+
delayHandleRef.current.unblock();
|
|
756
|
+
delayHandleRef.current = null;
|
|
757
|
+
Internals2.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewAudioForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
|
|
758
|
+
}
|
|
759
|
+
});
|
|
760
|
+
}, [mediaPlayerReady, buffer, logLevel]);
|
|
761
|
+
const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
|
|
762
|
+
useEffect(() => {
|
|
763
|
+
const audioPlayer = mediaPlayerRef.current;
|
|
764
|
+
if (!audioPlayer || !mediaPlayerReady)
|
|
765
|
+
return;
|
|
766
|
+
audioPlayer.setMuted(effectiveMuted);
|
|
767
|
+
}, [effectiveMuted, mediaPlayerReady]);
|
|
768
|
+
useEffect(() => {
|
|
769
|
+
const audioPlayer = mediaPlayerRef.current;
|
|
770
|
+
if (!audioPlayer || !mediaPlayerReady) {
|
|
771
|
+
return;
|
|
772
|
+
}
|
|
773
|
+
audioPlayer.setVolume(userPreferredVolume);
|
|
774
|
+
}, [userPreferredVolume, mediaPlayerReady, logLevel]);
|
|
775
|
+
const effectivePlaybackRate = useMemo(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
|
|
776
|
+
useEffect(() => {
|
|
777
|
+
const audioPlayer = mediaPlayerRef.current;
|
|
778
|
+
if (!audioPlayer || !mediaPlayerReady) {
|
|
779
|
+
return;
|
|
780
|
+
}
|
|
781
|
+
audioPlayer.setPlaybackRate(effectivePlaybackRate);
|
|
782
|
+
}, [effectivePlaybackRate, mediaPlayerReady, logLevel]);
|
|
783
|
+
if (shouldFallbackToNativeAudio && !disallowFallbackToHtml5Audio) {
|
|
784
|
+
return /* @__PURE__ */ jsx(RemotionAudio, {
|
|
785
|
+
src,
|
|
786
|
+
muted,
|
|
787
|
+
volume,
|
|
788
|
+
startFrom: trimBefore,
|
|
789
|
+
endAt: trimAfter,
|
|
790
|
+
playbackRate,
|
|
791
|
+
loopVolumeCurveBehavior,
|
|
792
|
+
name,
|
|
793
|
+
loop,
|
|
794
|
+
showInTimeline,
|
|
795
|
+
stack: stack ?? undefined,
|
|
796
|
+
toneFrequency,
|
|
797
|
+
audioStreamIndex,
|
|
798
|
+
pauseWhenBuffering: fallbackHtml5AudioProps?.pauseWhenBuffering,
|
|
799
|
+
...fallbackHtml5AudioProps
|
|
800
|
+
});
|
|
801
|
+
}
|
|
802
|
+
return null;
|
|
803
|
+
};
|
|
804
|
+
var AudioForPreview = ({
|
|
805
|
+
loop,
|
|
806
|
+
src,
|
|
807
|
+
logLevel,
|
|
808
|
+
muted,
|
|
809
|
+
name,
|
|
810
|
+
volume,
|
|
811
|
+
loopVolumeCurveBehavior,
|
|
812
|
+
playbackRate,
|
|
813
|
+
trimAfter,
|
|
814
|
+
trimBefore,
|
|
815
|
+
showInTimeline,
|
|
816
|
+
stack,
|
|
817
|
+
disallowFallbackToHtml5Audio,
|
|
818
|
+
toneFrequency,
|
|
819
|
+
audioStreamIndex,
|
|
820
|
+
fallbackHtml5AudioProps
|
|
821
|
+
}) => {
|
|
822
|
+
const preloadedSrc = usePreload(src);
|
|
823
|
+
return /* @__PURE__ */ jsx(NewAudioForPreview, {
|
|
824
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
825
|
+
src: preloadedSrc,
|
|
826
|
+
playbackRate: playbackRate ?? 1,
|
|
827
|
+
logLevel: logLevel ?? window.remotion_logLevel,
|
|
828
|
+
muted: muted ?? false,
|
|
829
|
+
volume: volume ?? 1,
|
|
830
|
+
loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? "repeat",
|
|
831
|
+
loop: loop ?? false,
|
|
832
|
+
trimAfter,
|
|
833
|
+
trimBefore,
|
|
834
|
+
name,
|
|
835
|
+
showInTimeline: showInTimeline ?? true,
|
|
836
|
+
stack,
|
|
837
|
+
disallowFallbackToHtml5Audio: disallowFallbackToHtml5Audio ?? false,
|
|
838
|
+
toneFrequency,
|
|
839
|
+
fallbackHtml5AudioProps
|
|
840
|
+
});
|
|
841
|
+
};
|
|
43
842
|
|
|
44
843
|
// src/audio/audio-for-rendering.tsx
|
|
45
|
-
import { useContext as
|
|
844
|
+
import { useContext as useContext2, useLayoutEffect, useMemo as useMemo2, useState as useState2 } from "react";
|
|
46
845
|
import {
|
|
846
|
+
Audio,
|
|
47
847
|
cancelRender as cancelRender2,
|
|
48
|
-
Internals as
|
|
49
|
-
|
|
848
|
+
Internals as Internals10,
|
|
849
|
+
random,
|
|
850
|
+
useCurrentFrame as useCurrentFrame2,
|
|
50
851
|
useDelayRender,
|
|
51
|
-
useRemotionEnvironment
|
|
852
|
+
useRemotionEnvironment
|
|
52
853
|
} from "remotion";
|
|
53
854
|
|
|
54
855
|
// src/convert-audiodata/apply-volume.ts
|
|
@@ -63,7 +864,94 @@ var applyVolume = (array, volume) => {
|
|
|
63
864
|
} else if (newValue > 32767) {
|
|
64
865
|
array[i] = 32767;
|
|
65
866
|
} else {
|
|
66
|
-
array[i] = newValue;
|
|
867
|
+
array[i] = newValue;
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
};
|
|
871
|
+
|
|
872
|
+
// src/convert-audiodata/resample-audiodata.ts
|
|
873
|
+
var TARGET_NUMBER_OF_CHANNELS = 2;
|
|
874
|
+
var TARGET_SAMPLE_RATE = 48000;
|
|
875
|
+
var fixFloatingPoint = (value) => {
|
|
876
|
+
if (value % 1 < 0.0000001) {
|
|
877
|
+
return Math.floor(value);
|
|
878
|
+
}
|
|
879
|
+
if (value % 1 > 0.9999999) {
|
|
880
|
+
return Math.ceil(value);
|
|
881
|
+
}
|
|
882
|
+
return value;
|
|
883
|
+
};
|
|
884
|
+
var resampleAudioData = ({
|
|
885
|
+
srcNumberOfChannels,
|
|
886
|
+
sourceChannels,
|
|
887
|
+
destination,
|
|
888
|
+
targetFrames,
|
|
889
|
+
chunkSize
|
|
890
|
+
}) => {
|
|
891
|
+
const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {
|
|
892
|
+
const start = fixFloatingPoint(startUnfixed);
|
|
893
|
+
const end = fixFloatingPoint(endUnfixed);
|
|
894
|
+
const startFloor = Math.floor(start);
|
|
895
|
+
const startCeil = Math.ceil(start);
|
|
896
|
+
const startFraction = start - startFloor;
|
|
897
|
+
const endFraction = end - Math.floor(end);
|
|
898
|
+
const endFloor = Math.floor(end);
|
|
899
|
+
let weightedSum = 0;
|
|
900
|
+
let totalWeight = 0;
|
|
901
|
+
if (startFraction > 0) {
|
|
902
|
+
const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];
|
|
903
|
+
weightedSum += firstSample * (1 - startFraction);
|
|
904
|
+
totalWeight += 1 - startFraction;
|
|
905
|
+
}
|
|
906
|
+
for (let k = startCeil;k < endFloor; k++) {
|
|
907
|
+
const num = sourceChannels[k * srcNumberOfChannels + channelIndex];
|
|
908
|
+
weightedSum += num;
|
|
909
|
+
totalWeight += 1;
|
|
910
|
+
}
|
|
911
|
+
if (endFraction > 0) {
|
|
912
|
+
const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];
|
|
913
|
+
weightedSum += lastSample * endFraction;
|
|
914
|
+
totalWeight += endFraction;
|
|
915
|
+
}
|
|
916
|
+
const average = weightedSum / totalWeight;
|
|
917
|
+
return average;
|
|
918
|
+
};
|
|
919
|
+
for (let newFrameIndex = 0;newFrameIndex < targetFrames; newFrameIndex++) {
|
|
920
|
+
const start = newFrameIndex * chunkSize;
|
|
921
|
+
const end = start + chunkSize;
|
|
922
|
+
if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {
|
|
923
|
+
for (let i = 0;i < srcNumberOfChannels; i++) {
|
|
924
|
+
destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);
|
|
925
|
+
}
|
|
926
|
+
}
|
|
927
|
+
if (srcNumberOfChannels === 1) {
|
|
928
|
+
const m = getSourceValues(start, end, 0);
|
|
929
|
+
destination[newFrameIndex * 2 + 0] = m;
|
|
930
|
+
destination[newFrameIndex * 2 + 1] = m;
|
|
931
|
+
} else if (srcNumberOfChannels === 4) {
|
|
932
|
+
const l = getSourceValues(start, end, 0);
|
|
933
|
+
const r = getSourceValues(start, end, 1);
|
|
934
|
+
const sl = getSourceValues(start, end, 2);
|
|
935
|
+
const sr = getSourceValues(start, end, 3);
|
|
936
|
+
const l2 = 0.5 * (l + sl);
|
|
937
|
+
const r2 = 0.5 * (r + sr);
|
|
938
|
+
destination[newFrameIndex * 2 + 0] = l2;
|
|
939
|
+
destination[newFrameIndex * 2 + 1] = r2;
|
|
940
|
+
} else if (srcNumberOfChannels === 6) {
|
|
941
|
+
const l = getSourceValues(start, end, 0);
|
|
942
|
+
const r = getSourceValues(start, end, 1);
|
|
943
|
+
const c = getSourceValues(start, end, 2);
|
|
944
|
+
const sl = getSourceValues(start, end, 3);
|
|
945
|
+
const sr = getSourceValues(start, end, 4);
|
|
946
|
+
const sq = Math.sqrt(1 / 2);
|
|
947
|
+
const l2 = l + sq * (c + sl);
|
|
948
|
+
const r2 = r + sq * (c + sr);
|
|
949
|
+
destination[newFrameIndex * 2 + 0] = l2;
|
|
950
|
+
destination[newFrameIndex * 2 + 1] = r2;
|
|
951
|
+
} else {
|
|
952
|
+
for (let i = 0;i < srcNumberOfChannels; i++) {
|
|
953
|
+
destination[newFrameIndex * TARGET_NUMBER_OF_CHANNELS + i] = getSourceValues(start, end, i);
|
|
954
|
+
}
|
|
67
955
|
}
|
|
68
956
|
}
|
|
69
957
|
};
|
|
@@ -88,10 +976,13 @@ var frameForVolumeProp = ({
|
|
|
88
976
|
};
|
|
89
977
|
|
|
90
978
|
// src/caches.ts
|
|
91
|
-
import { cancelRender, Internals as
|
|
979
|
+
import { cancelRender, Internals as Internals7 } from "remotion";
|
|
980
|
+
|
|
981
|
+
// src/audio-extraction/audio-manager.ts
|
|
982
|
+
import { Internals as Internals4 } from "remotion";
|
|
92
983
|
|
|
93
984
|
// src/audio-extraction/audio-iterator.ts
|
|
94
|
-
import { Internals } from "remotion";
|
|
985
|
+
import { Internals as Internals3 } from "remotion";
|
|
95
986
|
|
|
96
987
|
// src/audio-extraction/audio-cache.ts
|
|
97
988
|
var makeAudioCache = () => {
|
|
@@ -102,7 +993,7 @@ var makeAudioCache = () => {
|
|
|
102
993
|
samples[sample.timestamp] = sample;
|
|
103
994
|
};
|
|
104
995
|
const clearBeforeThreshold = (threshold) => {
|
|
105
|
-
for (const timestamp of timestamps) {
|
|
996
|
+
for (const timestamp of timestamps.slice()) {
|
|
106
997
|
const endTimestamp = timestamp + samples[timestamp].duration;
|
|
107
998
|
if (endTimestamp < threshold) {
|
|
108
999
|
const isLast = timestamp === timestamps[timestamps.length - 1];
|
|
@@ -117,6 +1008,7 @@ var makeAudioCache = () => {
|
|
|
117
1008
|
};
|
|
118
1009
|
const deleteAll = () => {
|
|
119
1010
|
for (const timestamp of timestamps) {
|
|
1011
|
+
samples[timestamp].close();
|
|
120
1012
|
delete samples[timestamp];
|
|
121
1013
|
}
|
|
122
1014
|
timestamps.length = 0;
|
|
@@ -143,6 +1035,9 @@ var makeAudioCache = () => {
|
|
|
143
1035
|
return timestamps[0];
|
|
144
1036
|
};
|
|
145
1037
|
const getNewestTimestamp = () => {
|
|
1038
|
+
if (timestamps.length === 0) {
|
|
1039
|
+
return null;
|
|
1040
|
+
}
|
|
146
1041
|
const sample = samples[timestamps[timestamps.length - 1]];
|
|
147
1042
|
return sample.timestamp + sample.duration;
|
|
148
1043
|
};
|
|
@@ -159,14 +1054,26 @@ var makeAudioCache = () => {
|
|
|
159
1054
|
|
|
160
1055
|
// src/audio-extraction/audio-iterator.ts
|
|
161
1056
|
var extraThreshold = 1.5;
|
|
1057
|
+
var warned = {};
|
|
1058
|
+
var warnAboutMatroskaOnce = (src, logLevel) => {
|
|
1059
|
+
if (warned[src]) {
|
|
1060
|
+
return;
|
|
1061
|
+
}
|
|
1062
|
+
warned[src] = true;
|
|
1063
|
+
Internals3.Log.warn({ logLevel, tag: "@remotion/media" }, `Audio from ${src} will need to be read from the beginning. https://www.remotion.dev/docs/media/support#matroska-limitation`);
|
|
1064
|
+
};
|
|
162
1065
|
var makeAudioIterator = ({
|
|
163
1066
|
audioSampleSink,
|
|
164
1067
|
isMatroska,
|
|
165
1068
|
startTimestamp,
|
|
166
1069
|
src,
|
|
167
|
-
actualMatroskaTimestamps
|
|
1070
|
+
actualMatroskaTimestamps,
|
|
1071
|
+
logLevel
|
|
168
1072
|
}) => {
|
|
169
1073
|
const sampleIterator = audioSampleSink.samples(isMatroska ? 0 : Math.max(0, startTimestamp - extraThreshold));
|
|
1074
|
+
if (isMatroska) {
|
|
1075
|
+
warnAboutMatroskaOnce(src, logLevel);
|
|
1076
|
+
}
|
|
170
1077
|
let fullDuration = null;
|
|
171
1078
|
const cache = makeAudioCache();
|
|
172
1079
|
let lastUsed = Date.now();
|
|
@@ -174,7 +1081,7 @@ var makeAudioIterator = ({
|
|
|
174
1081
|
lastUsed = Date.now();
|
|
175
1082
|
const { value: sample, done } = await sampleIterator.next();
|
|
176
1083
|
if (done) {
|
|
177
|
-
fullDuration = cache.getNewestTimestamp()
|
|
1084
|
+
fullDuration = cache.getNewestTimestamp();
|
|
178
1085
|
return null;
|
|
179
1086
|
}
|
|
180
1087
|
const realTimestamp = actualMatroskaTimestamps.getRealTimestamp(sample.timestamp);
|
|
@@ -193,6 +1100,12 @@ var makeAudioIterator = ({
|
|
|
193
1100
|
return [];
|
|
194
1101
|
}
|
|
195
1102
|
const samples = cache.getSamples(timestamp, durationInSeconds);
|
|
1103
|
+
const newestTimestamp = cache.getNewestTimestamp();
|
|
1104
|
+
if (newestTimestamp !== null) {
|
|
1105
|
+
if (newestTimestamp >= timestamp + durationInSeconds - 0.0000000001) {
|
|
1106
|
+
return samples;
|
|
1107
|
+
}
|
|
1108
|
+
}
|
|
196
1109
|
while (true) {
|
|
197
1110
|
const sample = await getNextSample();
|
|
198
1111
|
const deleteBefore = fullDuration === null ? timestamp : Math.min(timestamp, fullDuration);
|
|
@@ -210,8 +1123,13 @@ var makeAudioIterator = ({
|
|
|
210
1123
|
}
|
|
211
1124
|
return samples;
|
|
212
1125
|
};
|
|
213
|
-
const logOpenFrames = (
|
|
214
|
-
|
|
1126
|
+
const logOpenFrames = () => {
|
|
1127
|
+
const openTimestamps = cache.getOpenTimestamps();
|
|
1128
|
+
if (openTimestamps.length > 0) {
|
|
1129
|
+
const first = openTimestamps[0];
|
|
1130
|
+
const last = openTimestamps[openTimestamps.length - 1];
|
|
1131
|
+
Internals3.Log.verbose({ logLevel, tag: "@remotion/media" }, "Open audio samples for src", src, `${first.toFixed(3)}...${last.toFixed(3)}`);
|
|
1132
|
+
}
|
|
215
1133
|
};
|
|
216
1134
|
const getCacheStats = () => {
|
|
217
1135
|
return {
|
|
@@ -226,12 +1144,13 @@ var makeAudioIterator = ({
|
|
|
226
1144
|
}
|
|
227
1145
|
return oldestTimestamp < timestamp && Math.abs(oldestTimestamp - timestamp) < 10;
|
|
228
1146
|
};
|
|
229
|
-
const prepareForDeletion =
|
|
1147
|
+
const prepareForDeletion = () => {
|
|
230
1148
|
cache.deleteAll();
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
1149
|
+
sampleIterator.return().then((value) => {
|
|
1150
|
+
if (value.value) {
|
|
1151
|
+
value.value.close();
|
|
1152
|
+
}
|
|
1153
|
+
});
|
|
235
1154
|
fullDuration = null;
|
|
236
1155
|
};
|
|
237
1156
|
let op = Promise.resolve([]);
|
|
@@ -250,7 +1169,10 @@ var makeAudioIterator = ({
|
|
|
250
1169
|
getCacheStats,
|
|
251
1170
|
getLastUsed: () => lastUsed,
|
|
252
1171
|
prepareForDeletion,
|
|
253
|
-
startTimestamp
|
|
1172
|
+
startTimestamp,
|
|
1173
|
+
clearBeforeThreshold: cache.clearBeforeThreshold,
|
|
1174
|
+
getOldestTimestamp: cache.getOldestTimestamp,
|
|
1175
|
+
getNewestTimestamp: cache.getNewestTimestamp
|
|
254
1176
|
};
|
|
255
1177
|
};
|
|
256
1178
|
|
|
@@ -262,14 +1184,16 @@ var makeAudioManager = () => {
|
|
|
262
1184
|
src,
|
|
263
1185
|
audioSampleSink,
|
|
264
1186
|
isMatroska,
|
|
265
|
-
actualMatroskaTimestamps
|
|
1187
|
+
actualMatroskaTimestamps,
|
|
1188
|
+
logLevel
|
|
266
1189
|
}) => {
|
|
267
1190
|
const iterator = makeAudioIterator({
|
|
268
1191
|
audioSampleSink,
|
|
269
1192
|
isMatroska,
|
|
270
1193
|
startTimestamp: timeInSeconds,
|
|
271
1194
|
src,
|
|
272
|
-
actualMatroskaTimestamps
|
|
1195
|
+
actualMatroskaTimestamps,
|
|
1196
|
+
logLevel
|
|
273
1197
|
});
|
|
274
1198
|
iterators.push(iterator);
|
|
275
1199
|
return iterator;
|
|
@@ -286,13 +1210,26 @@ var makeAudioManager = () => {
|
|
|
286
1210
|
}
|
|
287
1211
|
return mostInThePastIterator;
|
|
288
1212
|
};
|
|
289
|
-
const deleteOldestIterator =
|
|
1213
|
+
const deleteOldestIterator = () => {
|
|
290
1214
|
const iterator = getIteratorMostInThePast();
|
|
291
1215
|
if (iterator) {
|
|
292
|
-
|
|
1216
|
+
iterator.prepareForDeletion();
|
|
293
1217
|
iterators.splice(iterators.indexOf(iterator), 1);
|
|
294
1218
|
}
|
|
295
1219
|
};
|
|
1220
|
+
const deleteDuplicateIterators = (logLevel) => {
|
|
1221
|
+
const seenKeys = new Set;
|
|
1222
|
+
for (let i = 0;i < iterators.length; i++) {
|
|
1223
|
+
const iterator = iterators[i];
|
|
1224
|
+
const key = `${iterator.src}-${iterator.getOldestTimestamp()}-${iterator.getNewestTimestamp()}`;
|
|
1225
|
+
if (seenKeys.has(key)) {
|
|
1226
|
+
iterator.prepareForDeletion();
|
|
1227
|
+
iterators.splice(iterators.indexOf(iterator), 1);
|
|
1228
|
+
Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted duplicate iterator for ${iterator.src}`);
|
|
1229
|
+
}
|
|
1230
|
+
seenKeys.add(key);
|
|
1231
|
+
}
|
|
1232
|
+
};
|
|
296
1233
|
const getIterator = async ({
|
|
297
1234
|
src,
|
|
298
1235
|
timeInSeconds,
|
|
@@ -303,25 +1240,28 @@ var makeAudioManager = () => {
|
|
|
303
1240
|
}) => {
|
|
304
1241
|
const maxCacheSize = getMaxVideoCacheSize(logLevel);
|
|
305
1242
|
while ((await getTotalCacheStats()).totalSize > maxCacheSize) {
|
|
306
|
-
|
|
1243
|
+
deleteOldestIterator();
|
|
307
1244
|
}
|
|
308
1245
|
for (const iterator of iterators) {
|
|
309
1246
|
if (iterator.src === src && await iterator.waitForCompletion() && iterator.canSatisfyRequestedTime(timeInSeconds)) {
|
|
310
1247
|
return iterator;
|
|
311
1248
|
}
|
|
312
1249
|
}
|
|
313
|
-
for (
|
|
1250
|
+
for (let i = 0;i < iterators.length; i++) {
|
|
1251
|
+
const iterator = iterators[i];
|
|
314
1252
|
if (iterator.src === src && iterator.startTimestamp === timeInSeconds) {
|
|
315
|
-
|
|
1253
|
+
iterator.prepareForDeletion();
|
|
316
1254
|
iterators.splice(iterators.indexOf(iterator), 1);
|
|
317
1255
|
}
|
|
318
1256
|
}
|
|
1257
|
+
deleteDuplicateIterators(logLevel);
|
|
319
1258
|
return makeIterator({
|
|
320
1259
|
src,
|
|
321
1260
|
timeInSeconds,
|
|
322
1261
|
audioSampleSink,
|
|
323
1262
|
isMatroska,
|
|
324
|
-
actualMatroskaTimestamps
|
|
1263
|
+
actualMatroskaTimestamps,
|
|
1264
|
+
logLevel
|
|
325
1265
|
});
|
|
326
1266
|
};
|
|
327
1267
|
const getCacheStats = () => {
|
|
@@ -334,44 +1274,76 @@ var makeAudioManager = () => {
|
|
|
334
1274
|
}
|
|
335
1275
|
return { count: totalCount, totalSize };
|
|
336
1276
|
};
|
|
337
|
-
const logOpenFrames = (
|
|
1277
|
+
const logOpenFrames = () => {
|
|
338
1278
|
for (const iterator of iterators) {
|
|
339
|
-
iterator.logOpenFrames(
|
|
1279
|
+
iterator.logOpenFrames();
|
|
340
1280
|
}
|
|
341
1281
|
};
|
|
1282
|
+
let queue = Promise.resolve(undefined);
|
|
342
1283
|
return {
|
|
343
|
-
|
|
344
|
-
|
|
1284
|
+
getIterator: ({
|
|
1285
|
+
src,
|
|
1286
|
+
timeInSeconds,
|
|
1287
|
+
audioSampleSink,
|
|
1288
|
+
isMatroska,
|
|
1289
|
+
actualMatroskaTimestamps,
|
|
1290
|
+
logLevel
|
|
1291
|
+
}) => {
|
|
1292
|
+
queue = queue.then(() => getIterator({
|
|
1293
|
+
src,
|
|
1294
|
+
timeInSeconds,
|
|
1295
|
+
audioSampleSink,
|
|
1296
|
+
isMatroska,
|
|
1297
|
+
actualMatroskaTimestamps,
|
|
1298
|
+
logLevel
|
|
1299
|
+
}));
|
|
1300
|
+
return queue;
|
|
1301
|
+
},
|
|
345
1302
|
getCacheStats,
|
|
346
1303
|
getIteratorMostInThePast,
|
|
347
|
-
logOpenFrames
|
|
1304
|
+
logOpenFrames,
|
|
1305
|
+
deleteDuplicateIterators
|
|
348
1306
|
};
|
|
349
1307
|
};
|
|
350
1308
|
|
|
351
1309
|
// src/video-extraction/keyframe-manager.ts
|
|
352
|
-
import { Internals as
|
|
1310
|
+
import { Internals as Internals6 } from "remotion";
|
|
1311
|
+
|
|
1312
|
+
// src/render-timestamp-range.ts
|
|
1313
|
+
var renderTimestampRange = (timestamps) => {
|
|
1314
|
+
if (timestamps.length === 0) {
|
|
1315
|
+
return "(none)";
|
|
1316
|
+
}
|
|
1317
|
+
if (timestamps.length === 1) {
|
|
1318
|
+
return timestamps[0].toFixed(3);
|
|
1319
|
+
}
|
|
1320
|
+
return `${timestamps[0].toFixed(3)}...${timestamps[timestamps.length - 1].toFixed(3)}`;
|
|
1321
|
+
};
|
|
353
1322
|
|
|
354
1323
|
// src/video-extraction/get-frames-since-keyframe.ts
|
|
355
1324
|
import {
|
|
356
|
-
ALL_FORMATS,
|
|
1325
|
+
ALL_FORMATS as ALL_FORMATS2,
|
|
357
1326
|
AudioSampleSink,
|
|
358
1327
|
EncodedPacketSink,
|
|
359
|
-
Input,
|
|
1328
|
+
Input as Input2,
|
|
360
1329
|
MATROSKA,
|
|
361
|
-
UrlSource,
|
|
362
|
-
VideoSampleSink
|
|
1330
|
+
UrlSource as UrlSource2,
|
|
1331
|
+
VideoSampleSink,
|
|
1332
|
+
WEBM
|
|
363
1333
|
} from "mediabunny";
|
|
364
1334
|
|
|
365
1335
|
// src/video-extraction/keyframe-bank.ts
|
|
366
|
-
import { Internals as
|
|
1336
|
+
import { Internals as Internals5 } from "remotion";
|
|
367
1337
|
var roundTo4Digits = (timestamp) => {
|
|
368
1338
|
return Math.round(timestamp * 1000) / 1000;
|
|
369
1339
|
};
|
|
370
1340
|
var makeKeyframeBank = ({
|
|
371
1341
|
startTimestampInSeconds,
|
|
372
1342
|
endTimestampInSeconds,
|
|
373
|
-
sampleIterator
|
|
1343
|
+
sampleIterator,
|
|
1344
|
+
logLevel: parentLogLevel
|
|
374
1345
|
}) => {
|
|
1346
|
+
Internals5.Log.verbose({ logLevel: parentLogLevel, tag: "@remotion/media" }, `Creating keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
|
|
375
1347
|
const frames = {};
|
|
376
1348
|
const frameTimestamps = [];
|
|
377
1349
|
let lastUsed = Date.now();
|
|
@@ -385,7 +1357,7 @@ var makeKeyframeBank = ({
|
|
|
385
1357
|
if (!lastFrame) {
|
|
386
1358
|
return true;
|
|
387
1359
|
}
|
|
388
|
-
return roundTo4Digits(lastFrame.timestamp + lastFrame.duration) > roundTo4Digits(timestamp);
|
|
1360
|
+
return roundTo4Digits(lastFrame.timestamp + lastFrame.duration) > roundTo4Digits(timestamp) + 0.001;
|
|
389
1361
|
};
|
|
390
1362
|
const addFrame = (frame) => {
|
|
391
1363
|
frames[frame.timestamp] = frame;
|
|
@@ -419,7 +1391,7 @@ var makeKeyframeBank = ({
|
|
|
419
1391
|
if (!sample) {
|
|
420
1392
|
return null;
|
|
421
1393
|
}
|
|
422
|
-
if (roundTo4Digits(sample.timestamp) <= roundTo4Digits(timestampInSeconds)) {
|
|
1394
|
+
if (roundTo4Digits(sample.timestamp) <= roundTo4Digits(timestampInSeconds) || Math.abs(sample.timestamp - timestampInSeconds) <= 0.001) {
|
|
423
1395
|
return sample;
|
|
424
1396
|
}
|
|
425
1397
|
}
|
|
@@ -428,11 +1400,14 @@ var makeKeyframeBank = ({
|
|
|
428
1400
|
const hasTimestampInSecond = async (timestamp) => {
|
|
429
1401
|
return await getFrameFromTimestamp(timestamp) !== null;
|
|
430
1402
|
};
|
|
431
|
-
const prepareForDeletion =
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
value
|
|
435
|
-
|
|
1403
|
+
const prepareForDeletion = (logLevel) => {
|
|
1404
|
+
Internals5.Log.verbose({ logLevel, tag: "@remotion/media" }, `Preparing for deletion of keyframe bank from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
|
|
1405
|
+
sampleIterator.return().then((result) => {
|
|
1406
|
+
if (result.value) {
|
|
1407
|
+
result.value.close();
|
|
1408
|
+
}
|
|
1409
|
+
return null;
|
|
1410
|
+
});
|
|
436
1411
|
for (const frameTimestamp of frameTimestamps) {
|
|
437
1412
|
if (!frames[frameTimestamp]) {
|
|
438
1413
|
continue;
|
|
@@ -448,7 +1423,8 @@ var makeKeyframeBank = ({
|
|
|
448
1423
|
src,
|
|
449
1424
|
timestampInSeconds
|
|
450
1425
|
}) => {
|
|
451
|
-
|
|
1426
|
+
const deletedTimestamps = [];
|
|
1427
|
+
for (const frameTimestamp of frameTimestamps.slice()) {
|
|
452
1428
|
const isLast = frameTimestamp === frameTimestamps[frameTimestamps.length - 1];
|
|
453
1429
|
if (isLast) {
|
|
454
1430
|
continue;
|
|
@@ -461,9 +1437,12 @@ var makeKeyframeBank = ({
|
|
|
461
1437
|
frameTimestamps.splice(frameTimestamps.indexOf(frameTimestamp), 1);
|
|
462
1438
|
frames[frameTimestamp].close();
|
|
463
1439
|
delete frames[frameTimestamp];
|
|
464
|
-
|
|
1440
|
+
deletedTimestamps.push(frameTimestamp);
|
|
465
1441
|
}
|
|
466
1442
|
}
|
|
1443
|
+
if (deletedTimestamps.length > 0) {
|
|
1444
|
+
Internals5.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted ${deletedTimestamps.length} frame${deletedTimestamps.length === 1 ? "" : "s"} ${renderTimestampRange(deletedTimestamps)} for src ${src} because it is lower than ${timestampInSeconds}. Remaining: ${renderTimestampRange(frameTimestamps)}`);
|
|
1445
|
+
}
|
|
467
1446
|
};
|
|
468
1447
|
const getOpenFrameCount = () => {
|
|
469
1448
|
return {
|
|
@@ -474,11 +1453,18 @@ var makeKeyframeBank = ({
|
|
|
474
1453
|
const getLastUsed = () => {
|
|
475
1454
|
return lastUsed;
|
|
476
1455
|
};
|
|
1456
|
+
let queue = Promise.resolve(undefined);
|
|
477
1457
|
const keyframeBank = {
|
|
478
1458
|
startTimestampInSeconds,
|
|
479
1459
|
endTimestampInSeconds,
|
|
480
|
-
getFrameFromTimestamp
|
|
481
|
-
|
|
1460
|
+
getFrameFromTimestamp: (timestamp) => {
|
|
1461
|
+
queue = queue.then(() => getFrameFromTimestamp(timestamp));
|
|
1462
|
+
return queue;
|
|
1463
|
+
},
|
|
1464
|
+
prepareForDeletion: (logLevel) => {
|
|
1465
|
+
queue = queue.then(() => prepareForDeletion(logLevel));
|
|
1466
|
+
return queue;
|
|
1467
|
+
},
|
|
482
1468
|
hasTimestampInSecond,
|
|
483
1469
|
addFrame,
|
|
484
1470
|
deleteFramesBeforeTimestamp,
|
|
@@ -510,32 +1496,90 @@ var rememberActualMatroskaTimestamps = (isMatroska) => {
|
|
|
510
1496
|
};
|
|
511
1497
|
|
|
512
1498
|
// src/video-extraction/get-frames-since-keyframe.ts
|
|
1499
|
+
var getRetryDelay = () => {
|
|
1500
|
+
return null;
|
|
1501
|
+
};
|
|
1502
|
+
var getFormatOrNull = async (input) => {
|
|
1503
|
+
try {
|
|
1504
|
+
return await input.getFormat();
|
|
1505
|
+
} catch {
|
|
1506
|
+
return null;
|
|
1507
|
+
}
|
|
1508
|
+
};
|
|
513
1509
|
var getSinks = async (src) => {
|
|
514
|
-
const input = new
|
|
515
|
-
formats:
|
|
516
|
-
source: new
|
|
1510
|
+
const input = new Input2({
|
|
1511
|
+
formats: ALL_FORMATS2,
|
|
1512
|
+
source: new UrlSource2(src, {
|
|
1513
|
+
getRetryDelay
|
|
1514
|
+
})
|
|
517
1515
|
});
|
|
518
|
-
const format = await input
|
|
519
|
-
const
|
|
520
|
-
const
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
1516
|
+
const format = await getFormatOrNull(input);
|
|
1517
|
+
const isMatroska = format === MATROSKA || format === WEBM;
|
|
1518
|
+
const getVideoSinks = async () => {
|
|
1519
|
+
if (format === null) {
|
|
1520
|
+
return "unknown-container-format";
|
|
1521
|
+
}
|
|
1522
|
+
const videoTrack = await input.getPrimaryVideoTrack();
|
|
1523
|
+
if (!videoTrack) {
|
|
1524
|
+
return "no-video-track";
|
|
1525
|
+
}
|
|
1526
|
+
const canDecode = await videoTrack.canDecode();
|
|
1527
|
+
if (!canDecode) {
|
|
1528
|
+
return "cannot-decode";
|
|
1529
|
+
}
|
|
1530
|
+
return {
|
|
524
1531
|
sampleSink: new VideoSampleSink(videoTrack),
|
|
525
1532
|
packetSink: new EncodedPacketSink(videoTrack)
|
|
526
|
-
}
|
|
527
|
-
|
|
1533
|
+
};
|
|
1534
|
+
};
|
|
1535
|
+
let videoSinksPromise = null;
|
|
1536
|
+
const getVideoSinksPromise = () => {
|
|
1537
|
+
if (videoSinksPromise) {
|
|
1538
|
+
return videoSinksPromise;
|
|
1539
|
+
}
|
|
1540
|
+
videoSinksPromise = getVideoSinks();
|
|
1541
|
+
return videoSinksPromise;
|
|
1542
|
+
};
|
|
1543
|
+
const audioSinksPromise = {};
|
|
1544
|
+
const getAudioSinks = async (index) => {
|
|
1545
|
+
if (format === null) {
|
|
1546
|
+
return "unknown-container-format";
|
|
1547
|
+
}
|
|
1548
|
+
const audioTracks = await input.getAudioTracks();
|
|
1549
|
+
const audioTrack = audioTracks[index];
|
|
1550
|
+
if (!audioTrack) {
|
|
1551
|
+
return "no-audio-track";
|
|
1552
|
+
}
|
|
1553
|
+
const canDecode = await audioTrack.canDecode();
|
|
1554
|
+
if (!canDecode) {
|
|
1555
|
+
return "cannot-decode-audio";
|
|
1556
|
+
}
|
|
1557
|
+
return {
|
|
528
1558
|
sampleSink: new AudioSampleSink(audioTrack)
|
|
529
|
-
}
|
|
1559
|
+
};
|
|
1560
|
+
};
|
|
1561
|
+
const getAudioSinksPromise = (index) => {
|
|
1562
|
+
if (audioSinksPromise[index]) {
|
|
1563
|
+
return audioSinksPromise[index];
|
|
1564
|
+
}
|
|
1565
|
+
audioSinksPromise[index] = getAudioSinks(index);
|
|
1566
|
+
return audioSinksPromise[index];
|
|
1567
|
+
};
|
|
1568
|
+
return {
|
|
1569
|
+
getVideo: () => getVideoSinksPromise(),
|
|
1570
|
+
getAudio: (index) => getAudioSinksPromise(index),
|
|
530
1571
|
actualMatroskaTimestamps: rememberActualMatroskaTimestamps(isMatroska),
|
|
531
1572
|
isMatroska,
|
|
532
|
-
getDuration: () =>
|
|
533
|
-
|
|
1573
|
+
getDuration: () => {
|
|
1574
|
+
return input.computeDuration();
|
|
1575
|
+
}
|
|
1576
|
+
};
|
|
534
1577
|
};
|
|
535
1578
|
var getFramesSinceKeyframe = async ({
|
|
536
1579
|
packetSink,
|
|
537
1580
|
videoSampleSink,
|
|
538
|
-
startPacket
|
|
1581
|
+
startPacket,
|
|
1582
|
+
logLevel
|
|
539
1583
|
}) => {
|
|
540
1584
|
const nextKeyPacket = await packetSink.getNextKeyPacket(startPacket, {
|
|
541
1585
|
verifyKeyPackets: true
|
|
@@ -544,7 +1588,8 @@ var getFramesSinceKeyframe = async ({
|
|
|
544
1588
|
const keyframeBank = makeKeyframeBank({
|
|
545
1589
|
startTimestampInSeconds: startPacket.timestamp,
|
|
546
1590
|
endTimestampInSeconds: nextKeyPacket ? nextKeyPacket.timestamp : Infinity,
|
|
547
|
-
sampleIterator
|
|
1591
|
+
sampleIterator,
|
|
1592
|
+
logLevel
|
|
548
1593
|
});
|
|
549
1594
|
return keyframeBank;
|
|
550
1595
|
};
|
|
@@ -572,10 +1617,10 @@ var makeKeyframeManager = () => {
|
|
|
572
1617
|
if (size === 0) {
|
|
573
1618
|
continue;
|
|
574
1619
|
}
|
|
575
|
-
|
|
1620
|
+
Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `Open frames for src ${src}: ${renderTimestampRange(timestamps)}`);
|
|
576
1621
|
}
|
|
577
1622
|
}
|
|
578
|
-
|
|
1623
|
+
Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `Video cache stats: ${count} open frames, ${totalSize} bytes`);
|
|
579
1624
|
};
|
|
580
1625
|
const getCacheStats = async () => {
|
|
581
1626
|
let count = 0;
|
|
@@ -614,9 +1659,9 @@ var makeKeyframeManager = () => {
|
|
|
614
1659
|
const deleteOldestKeyframeBank = async (logLevel) => {
|
|
615
1660
|
const { bank: mostInThePastBank, src: mostInThePastSrc } = await getTheKeyframeBankMostInThePast();
|
|
616
1661
|
if (mostInThePastBank) {
|
|
617
|
-
await mostInThePastBank.prepareForDeletion();
|
|
1662
|
+
await mostInThePastBank.prepareForDeletion(logLevel);
|
|
618
1663
|
delete sources[mostInThePastSrc][mostInThePastBank.startTimestampInSeconds];
|
|
619
|
-
|
|
1664
|
+
Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `Deleted frames for src ${mostInThePastSrc} from ${mostInThePastBank.startTimestampInSeconds}sec to ${mostInThePastBank.endTimestampInSeconds}sec to free up memory.`);
|
|
620
1665
|
}
|
|
621
1666
|
};
|
|
622
1667
|
const ensureToStayUnderMaxCacheSize = async (logLevel) => {
|
|
@@ -641,8 +1686,8 @@ var makeKeyframeManager = () => {
|
|
|
641
1686
|
const bank = await sources[src][startTimeInSeconds];
|
|
642
1687
|
const { endTimestampInSeconds, startTimestampInSeconds } = bank;
|
|
643
1688
|
if (endTimestampInSeconds < threshold) {
|
|
644
|
-
await bank.prepareForDeletion();
|
|
645
|
-
|
|
1689
|
+
await bank.prepareForDeletion(logLevel);
|
|
1690
|
+
Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `[Video] Cleared frames for src ${src} from ${startTimestampInSeconds}sec to ${endTimestampInSeconds}sec`);
|
|
646
1691
|
delete sources[src][startTimeInSeconds];
|
|
647
1692
|
} else {
|
|
648
1693
|
bank.deleteFramesBeforeTimestamp({
|
|
@@ -665,7 +1710,7 @@ var makeKeyframeManager = () => {
|
|
|
665
1710
|
verifyKeyPackets: true
|
|
666
1711
|
});
|
|
667
1712
|
if (!startPacket) {
|
|
668
|
-
|
|
1713
|
+
return null;
|
|
669
1714
|
}
|
|
670
1715
|
const startTimestampInSeconds = startPacket.timestamp;
|
|
671
1716
|
const existingBank = sources[src]?.[startTimestampInSeconds];
|
|
@@ -673,7 +1718,8 @@ var makeKeyframeManager = () => {
|
|
|
673
1718
|
const newKeyframeBank = getFramesSinceKeyframe({
|
|
674
1719
|
packetSink,
|
|
675
1720
|
videoSampleSink,
|
|
676
|
-
startPacket
|
|
1721
|
+
startPacket,
|
|
1722
|
+
logLevel
|
|
677
1723
|
});
|
|
678
1724
|
addKeyframeBank({ src, bank: newKeyframeBank, startTimestampInSeconds });
|
|
679
1725
|
return newKeyframeBank;
|
|
@@ -681,13 +1727,14 @@ var makeKeyframeManager = () => {
|
|
|
681
1727
|
if (await (await existingBank).hasTimestampInSecond(timestamp)) {
|
|
682
1728
|
return existingBank;
|
|
683
1729
|
}
|
|
684
|
-
|
|
685
|
-
await (await existingBank).prepareForDeletion();
|
|
1730
|
+
Internals6.Log.verbose({ logLevel, tag: "@remotion/media" }, `Keyframe bank exists but frame at time ${timestamp} does not exist anymore.`);
|
|
1731
|
+
await (await existingBank).prepareForDeletion(logLevel);
|
|
686
1732
|
delete sources[src][startTimestampInSeconds];
|
|
687
1733
|
const replacementKeybank = getFramesSinceKeyframe({
|
|
688
1734
|
packetSink,
|
|
689
1735
|
videoSampleSink,
|
|
690
|
-
startPacket
|
|
1736
|
+
startPacket,
|
|
1737
|
+
logLevel
|
|
691
1738
|
});
|
|
692
1739
|
addKeyframeBank({ src, bank: replacementKeybank, startTimestampInSeconds });
|
|
693
1740
|
return replacementKeybank;
|
|
@@ -714,20 +1761,35 @@ var makeKeyframeManager = () => {
|
|
|
714
1761
|
});
|
|
715
1762
|
return keyframeBank;
|
|
716
1763
|
};
|
|
717
|
-
const clearAll = async () => {
|
|
1764
|
+
const clearAll = async (logLevel) => {
|
|
718
1765
|
const srcs = Object.keys(sources);
|
|
719
1766
|
for (const src of srcs) {
|
|
720
1767
|
const banks = Object.keys(sources[src]);
|
|
721
1768
|
for (const startTimeInSeconds of banks) {
|
|
722
1769
|
const bank = await sources[src][startTimeInSeconds];
|
|
723
|
-
|
|
1770
|
+
bank.prepareForDeletion(logLevel);
|
|
724
1771
|
delete sources[src][startTimeInSeconds];
|
|
725
1772
|
}
|
|
726
1773
|
}
|
|
727
1774
|
};
|
|
1775
|
+
let queue = Promise.resolve(undefined);
|
|
728
1776
|
return {
|
|
729
|
-
requestKeyframeBank
|
|
730
|
-
|
|
1777
|
+
requestKeyframeBank: ({
|
|
1778
|
+
packetSink,
|
|
1779
|
+
timestamp,
|
|
1780
|
+
videoSampleSink,
|
|
1781
|
+
src,
|
|
1782
|
+
logLevel
|
|
1783
|
+
}) => {
|
|
1784
|
+
queue = queue.then(() => requestKeyframeBank({
|
|
1785
|
+
packetSink,
|
|
1786
|
+
timestamp,
|
|
1787
|
+
videoSampleSink,
|
|
1788
|
+
src,
|
|
1789
|
+
logLevel
|
|
1790
|
+
}));
|
|
1791
|
+
return queue;
|
|
1792
|
+
},
|
|
731
1793
|
getCacheStats,
|
|
732
1794
|
clearAll
|
|
733
1795
|
};
|
|
@@ -753,174 +1815,59 @@ var getUncachedMaxCacheSize = (logLevel) => {
|
|
|
753
1815
|
if (window.remotion_mediaCacheSizeInBytes > 20000 * 1024 * 1024) {
|
|
754
1816
|
cancelRender(new Error(`The maximum value for the "mediaCacheSizeInBytes" prop is 20GB (${20000 * 1024 * 1024}), got: ${window.remotion_mediaCacheSizeInBytes}`));
|
|
755
1817
|
}
|
|
756
|
-
|
|
1818
|
+
Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set using "mediaCacheSizeInBytes": ${(window.remotion_mediaCacheSizeInBytes / 1024 / 1024).toFixed(1)} MB`);
|
|
757
1819
|
return window.remotion_mediaCacheSizeInBytes;
|
|
758
1820
|
}
|
|
759
|
-
if (window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
|
|
760
|
-
const value = window.remotion_initialMemoryAvailable / 2;
|
|
761
|
-
if (value < 240 * 1024 * 1024) {
|
|
762
|
-
|
|
763
|
-
return 240 * 1024 * 1024;
|
|
764
|
-
}
|
|
765
|
-
if (value > 20000 * 1024 * 1024) {
|
|
766
|
-
Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
|
|
767
|
-
return 20000 * 1024 * 1024;
|
|
768
|
-
}
|
|
769
|
-
Internals4.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
|
|
770
|
-
return value;
|
|
771
|
-
}
|
|
772
|
-
return 1000 * 1000 * 1000;
|
|
773
|
-
};
|
|
774
|
-
var cachedMaxCacheSize = null;
|
|
775
|
-
var getMaxVideoCacheSize = (logLevel) => {
|
|
776
|
-
if (cachedMaxCacheSize !== null) {
|
|
777
|
-
return cachedMaxCacheSize;
|
|
778
|
-
}
|
|
779
|
-
cachedMaxCacheSize = getUncachedMaxCacheSize(logLevel);
|
|
780
|
-
return cachedMaxCacheSize;
|
|
781
|
-
};
|
|
782
|
-
|
|
783
|
-
// src/convert-audiodata/combine-audiodata.ts
|
|
784
|
-
var combineAudioDataAndClosePrevious = (audioDataArray) => {
|
|
785
|
-
let numberOfFrames = 0;
|
|
786
|
-
let numberOfChannels = null;
|
|
787
|
-
let sampleRate = null;
|
|
788
|
-
const { timestamp } = audioDataArray[0];
|
|
789
|
-
for (const audioData of audioDataArray) {
|
|
790
|
-
numberOfFrames += audioData.numberOfFrames;
|
|
791
|
-
if (!numberOfChannels) {
|
|
792
|
-
numberOfChannels = audioData.numberOfChannels;
|
|
793
|
-
} else if (numberOfChannels !== audioData.numberOfChannels) {
|
|
794
|
-
throw new Error("Number of channels do not match");
|
|
795
|
-
}
|
|
796
|
-
if (!sampleRate) {
|
|
797
|
-
sampleRate = audioData.sampleRate;
|
|
798
|
-
} else if (sampleRate !== audioData.sampleRate) {
|
|
799
|
-
throw new Error("Sample rates do not match");
|
|
800
|
-
}
|
|
801
|
-
}
|
|
802
|
-
if (!numberOfChannels) {
|
|
803
|
-
throw new Error("Number of channels is not set");
|
|
804
|
-
}
|
|
805
|
-
if (!sampleRate) {
|
|
806
|
-
throw new Error("Sample rate is not set");
|
|
807
|
-
}
|
|
808
|
-
const arr = new Int16Array(numberOfFrames * numberOfChannels);
|
|
809
|
-
let offset = 0;
|
|
810
|
-
for (const audioData of audioDataArray) {
|
|
811
|
-
arr.set(audioData.data, offset);
|
|
812
|
-
offset += audioData.data.length;
|
|
813
|
-
}
|
|
814
|
-
return {
|
|
815
|
-
data: arr,
|
|
816
|
-
numberOfChannels,
|
|
817
|
-
numberOfFrames,
|
|
818
|
-
sampleRate,
|
|
819
|
-
timestamp
|
|
820
|
-
};
|
|
821
|
-
};
|
|
822
|
-
|
|
823
|
-
// src/convert-audiodata/resample-audiodata.ts
|
|
824
|
-
var TARGET_NUMBER_OF_CHANNELS = 2;
|
|
825
|
-
var TARGET_SAMPLE_RATE = 48000;
|
|
826
|
-
var fixFloatingPoint = (value) => {
|
|
827
|
-
if (value % 1 < 0.0000001) {
|
|
828
|
-
return Math.floor(value);
|
|
829
|
-
}
|
|
830
|
-
if (value % 1 > 0.9999999) {
|
|
831
|
-
return Math.ceil(value);
|
|
832
|
-
}
|
|
833
|
-
return value;
|
|
834
|
-
};
|
|
835
|
-
var resampleAudioData = ({
|
|
836
|
-
srcNumberOfChannels,
|
|
837
|
-
sourceChannels,
|
|
838
|
-
destination,
|
|
839
|
-
targetFrames,
|
|
840
|
-
chunkSize
|
|
841
|
-
}) => {
|
|
842
|
-
const getSourceValues = (startUnfixed, endUnfixed, channelIndex) => {
|
|
843
|
-
const start = fixFloatingPoint(startUnfixed);
|
|
844
|
-
const end = fixFloatingPoint(endUnfixed);
|
|
845
|
-
const startFloor = Math.floor(start);
|
|
846
|
-
const startCeil = Math.ceil(start);
|
|
847
|
-
const startFraction = start - startFloor;
|
|
848
|
-
const endFraction = end - Math.floor(end);
|
|
849
|
-
const endFloor = Math.floor(end);
|
|
850
|
-
let weightedSum = 0;
|
|
851
|
-
let totalWeight = 0;
|
|
852
|
-
if (startFraction > 0) {
|
|
853
|
-
const firstSample = sourceChannels[startFloor * srcNumberOfChannels + channelIndex];
|
|
854
|
-
weightedSum += firstSample * (1 - startFraction);
|
|
855
|
-
totalWeight += 1 - startFraction;
|
|
856
|
-
}
|
|
857
|
-
for (let k = startCeil;k < endFloor; k++) {
|
|
858
|
-
const num = sourceChannels[k * srcNumberOfChannels + channelIndex];
|
|
859
|
-
weightedSum += num;
|
|
860
|
-
totalWeight += 1;
|
|
861
|
-
}
|
|
862
|
-
if (endFraction > 0) {
|
|
863
|
-
const lastSample = sourceChannels[endFloor * srcNumberOfChannels + channelIndex];
|
|
864
|
-
weightedSum += lastSample * endFraction;
|
|
865
|
-
totalWeight += endFraction;
|
|
866
|
-
}
|
|
867
|
-
const average = weightedSum / totalWeight;
|
|
868
|
-
return average;
|
|
869
|
-
};
|
|
870
|
-
for (let newFrameIndex = 0;newFrameIndex < targetFrames; newFrameIndex++) {
|
|
871
|
-
const start = newFrameIndex * chunkSize;
|
|
872
|
-
const end = start + chunkSize;
|
|
873
|
-
if (TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels) {
|
|
874
|
-
for (let i = 0;i < srcNumberOfChannels; i++) {
|
|
875
|
-
destination[newFrameIndex * srcNumberOfChannels + i] = getSourceValues(start, end, i);
|
|
876
|
-
}
|
|
877
|
-
}
|
|
878
|
-
if (srcNumberOfChannels === 1) {
|
|
879
|
-
const m = getSourceValues(start, end, 0);
|
|
880
|
-
destination[newFrameIndex * 2 + 0] = m;
|
|
881
|
-
destination[newFrameIndex * 2 + 1] = m;
|
|
882
|
-
} else if (srcNumberOfChannels === 4) {
|
|
883
|
-
const l = getSourceValues(start, end, 0);
|
|
884
|
-
const r = getSourceValues(start, end, 1);
|
|
885
|
-
const sl = getSourceValues(start, end, 2);
|
|
886
|
-
const sr = getSourceValues(start, end, 3);
|
|
887
|
-
const l2 = 0.5 * (l + sl);
|
|
888
|
-
const r2 = 0.5 * (r + sr);
|
|
889
|
-
destination[newFrameIndex * 2 + 0] = l2;
|
|
890
|
-
destination[newFrameIndex * 2 + 1] = r2;
|
|
891
|
-
} else if (srcNumberOfChannels === 6) {
|
|
892
|
-
const l = getSourceValues(start, end, 0);
|
|
893
|
-
const r = getSourceValues(start, end, 1);
|
|
894
|
-
const c = getSourceValues(start, end, 2);
|
|
895
|
-
const sl = getSourceValues(start, end, 3);
|
|
896
|
-
const sr = getSourceValues(start, end, 4);
|
|
897
|
-
const sq = Math.sqrt(1 / 2);
|
|
898
|
-
const l2 = l + sq * (c + sl);
|
|
899
|
-
const r2 = r + sq * (c + sr);
|
|
900
|
-
destination[newFrameIndex * 2 + 0] = l2;
|
|
901
|
-
destination[newFrameIndex * 2 + 1] = r2;
|
|
902
|
-
} else {
|
|
903
|
-
for (let i = 0;i < srcNumberOfChannels; i++) {
|
|
904
|
-
destination[newFrameIndex * TARGET_NUMBER_OF_CHANNELS + i] = getSourceValues(start, end, i);
|
|
905
|
-
}
|
|
1821
|
+
if (window.remotion_initialMemoryAvailable !== undefined && window.remotion_initialMemoryAvailable !== null) {
|
|
1822
|
+
const value = window.remotion_initialMemoryAvailable / 2;
|
|
1823
|
+
if (value < 240 * 1024 * 1024) {
|
|
1824
|
+
Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on minimum value of 240MB (which is more than half of the available system memory!)`);
|
|
1825
|
+
return 240 * 1024 * 1024;
|
|
906
1826
|
}
|
|
1827
|
+
if (value > 20000 * 1024 * 1024) {
|
|
1828
|
+
Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on maximum value of 20GB (which is less than half of the available system memory)`);
|
|
1829
|
+
return 20000 * 1024 * 1024;
|
|
1830
|
+
}
|
|
1831
|
+
Internals7.Log.verbose({ logLevel, tag: "@remotion/media" }, `Using cache size set based on available memory (50% of available memory): ${(value / 1024 / 1024).toFixed(1)} MB`);
|
|
1832
|
+
return value;
|
|
907
1833
|
}
|
|
1834
|
+
return 1000 * 1000 * 1000;
|
|
1835
|
+
};
|
|
1836
|
+
var cachedMaxCacheSize = null;
|
|
1837
|
+
var getMaxVideoCacheSize = (logLevel) => {
|
|
1838
|
+
if (cachedMaxCacheSize !== null) {
|
|
1839
|
+
return cachedMaxCacheSize;
|
|
1840
|
+
}
|
|
1841
|
+
cachedMaxCacheSize = getUncachedMaxCacheSize(logLevel);
|
|
1842
|
+
return cachedMaxCacheSize;
|
|
908
1843
|
};
|
|
909
1844
|
|
|
910
|
-
// src/convert-audiodata/
|
|
911
|
-
var
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
1845
|
+
// src/convert-audiodata/combine-audiodata.ts
|
|
1846
|
+
var combineAudioDataAndClosePrevious = (audioDataArray) => {
|
|
1847
|
+
let numberOfFrames = 0;
|
|
1848
|
+
const { timestamp } = audioDataArray[0];
|
|
1849
|
+
for (const audioData of audioDataArray) {
|
|
1850
|
+
numberOfFrames += audioData.numberOfFrames;
|
|
1851
|
+
}
|
|
1852
|
+
const arr = new Int16Array(numberOfFrames * TARGET_NUMBER_OF_CHANNELS);
|
|
1853
|
+
let offset = 0;
|
|
1854
|
+
for (const audioData of audioDataArray) {
|
|
1855
|
+
arr.set(audioData.data, offset);
|
|
1856
|
+
offset += audioData.data.length;
|
|
915
1857
|
}
|
|
916
|
-
return
|
|
1858
|
+
return {
|
|
1859
|
+
data: arr,
|
|
1860
|
+
numberOfFrames,
|
|
1861
|
+
timestamp
|
|
1862
|
+
};
|
|
917
1863
|
};
|
|
1864
|
+
|
|
1865
|
+
// src/convert-audiodata/convert-audiodata.ts
|
|
1866
|
+
var FORMAT = "s16";
|
|
918
1867
|
var convertAudioData = ({
|
|
919
1868
|
audioData,
|
|
920
|
-
newSampleRate,
|
|
921
1869
|
trimStartInSeconds,
|
|
922
1870
|
trimEndInSeconds,
|
|
923
|
-
targetNumberOfChannels,
|
|
924
1871
|
playbackRate
|
|
925
1872
|
}) => {
|
|
926
1873
|
const {
|
|
@@ -928,17 +1875,14 @@ var convertAudioData = ({
|
|
|
928
1875
|
sampleRate: currentSampleRate,
|
|
929
1876
|
numberOfFrames
|
|
930
1877
|
} = audioData;
|
|
931
|
-
const ratio = currentSampleRate /
|
|
932
|
-
const frameOffset =
|
|
1878
|
+
const ratio = currentSampleRate / TARGET_SAMPLE_RATE;
|
|
1879
|
+
const frameOffset = Math.floor(trimStartInSeconds * audioData.sampleRate);
|
|
933
1880
|
const unroundedFrameCount = numberOfFrames - (trimEndInSeconds + trimStartInSeconds) * audioData.sampleRate;
|
|
934
|
-
const frameCount = Math.
|
|
935
|
-
const newNumberOfFrames = Math.
|
|
1881
|
+
const frameCount = Math.ceil(unroundedFrameCount);
|
|
1882
|
+
const newNumberOfFrames = Math.ceil(unroundedFrameCount / ratio / playbackRate);
|
|
936
1883
|
if (newNumberOfFrames === 0) {
|
|
937
1884
|
throw new Error("Cannot resample - the given sample rate would result in less than 1 sample");
|
|
938
1885
|
}
|
|
939
|
-
if (newSampleRate < 3000 || newSampleRate > 768000) {
|
|
940
|
-
throw new Error("newSampleRate must be between 3000 and 768000");
|
|
941
|
-
}
|
|
942
1886
|
const srcChannels = new Int16Array(srcNumberOfChannels * frameCount);
|
|
943
1887
|
audioData.copyTo(srcChannels, {
|
|
944
1888
|
planeIndex: 0,
|
|
@@ -946,15 +1890,13 @@ var convertAudioData = ({
|
|
|
946
1890
|
frameOffset,
|
|
947
1891
|
frameCount
|
|
948
1892
|
});
|
|
949
|
-
const data = new Int16Array(newNumberOfFrames *
|
|
1893
|
+
const data = new Int16Array(newNumberOfFrames * TARGET_NUMBER_OF_CHANNELS);
|
|
950
1894
|
const chunkSize = frameCount / newNumberOfFrames;
|
|
951
|
-
if (newNumberOfFrames === frameCount &&
|
|
1895
|
+
if (newNumberOfFrames === frameCount && TARGET_NUMBER_OF_CHANNELS === srcNumberOfChannels && playbackRate === 1) {
|
|
952
1896
|
return {
|
|
953
1897
|
data: srcChannels,
|
|
954
|
-
numberOfChannels: targetNumberOfChannels,
|
|
955
1898
|
numberOfFrames: newNumberOfFrames,
|
|
956
|
-
|
|
957
|
-
timestamp: audioData.timestamp + trimStartInSeconds * 1e6
|
|
1899
|
+
timestamp: audioData.timestamp + frameOffset / audioData.sampleRate * 1e6
|
|
958
1900
|
};
|
|
959
1901
|
}
|
|
960
1902
|
resampleAudioData({
|
|
@@ -966,57 +1908,102 @@ var convertAudioData = ({
|
|
|
966
1908
|
});
|
|
967
1909
|
const newAudioData = {
|
|
968
1910
|
data,
|
|
969
|
-
format: FORMAT,
|
|
970
|
-
numberOfChannels: targetNumberOfChannels,
|
|
971
1911
|
numberOfFrames: newNumberOfFrames,
|
|
972
|
-
|
|
973
|
-
timestamp: audioData.timestamp + trimStartInSeconds * 1e6
|
|
1912
|
+
timestamp: audioData.timestamp + frameOffset / audioData.sampleRate * 1e6
|
|
974
1913
|
};
|
|
975
1914
|
return newAudioData;
|
|
976
1915
|
};
|
|
977
1916
|
|
|
978
|
-
// src/get-sink
|
|
979
|
-
import { Internals as
|
|
1917
|
+
// src/get-sink.ts
|
|
1918
|
+
import { Internals as Internals8 } from "remotion";
|
|
980
1919
|
var sinkPromises = {};
|
|
981
|
-
var
|
|
1920
|
+
var getSink = (src, logLevel) => {
|
|
982
1921
|
let promise = sinkPromises[src];
|
|
983
1922
|
if (!promise) {
|
|
984
|
-
|
|
985
|
-
sinkPromises[src] = promise;
|
|
986
|
-
}
|
|
987
|
-
let awaited = await promise;
|
|
988
|
-
let deferredValue = awaited.deref();
|
|
989
|
-
if (!deferredValue) {
|
|
990
|
-
Internals5.Log.verbose({
|
|
1923
|
+
Internals8.Log.verbose({
|
|
991
1924
|
logLevel,
|
|
992
1925
|
tag: "@remotion/media"
|
|
993
|
-
}, `Sink for ${src} was
|
|
1926
|
+
}, `Sink for ${src} was not found, creating new sink`);
|
|
994
1927
|
promise = getSinks(src);
|
|
995
1928
|
sinkPromises[src] = promise;
|
|
996
|
-
awaited = await promise;
|
|
997
|
-
deferredValue = awaited.deref();
|
|
998
1929
|
}
|
|
999
|
-
return
|
|
1930
|
+
return promise;
|
|
1931
|
+
};
|
|
1932
|
+
|
|
1933
|
+
// src/get-time-in-seconds.ts
|
|
1934
|
+
import { Internals as Internals9 } from "remotion";
|
|
1935
|
+
var getTimeInSeconds = ({
|
|
1936
|
+
loop,
|
|
1937
|
+
mediaDurationInSeconds,
|
|
1938
|
+
unloopedTimeInSeconds,
|
|
1939
|
+
src,
|
|
1940
|
+
trimAfter,
|
|
1941
|
+
trimBefore,
|
|
1942
|
+
fps,
|
|
1943
|
+
playbackRate
|
|
1944
|
+
}) => {
|
|
1945
|
+
if (mediaDurationInSeconds === null && loop) {
|
|
1946
|
+
throw new Error(`Could not determine duration of ${src}, but "loop" was set.`);
|
|
1947
|
+
}
|
|
1948
|
+
const loopDuration = loop ? Internals9.calculateLoopDuration({
|
|
1949
|
+
trimAfter,
|
|
1950
|
+
mediaDurationInFrames: mediaDurationInSeconds * fps,
|
|
1951
|
+
playbackRate: 1,
|
|
1952
|
+
trimBefore
|
|
1953
|
+
}) / fps : Infinity;
|
|
1954
|
+
const timeInSeconds = unloopedTimeInSeconds * playbackRate % loopDuration;
|
|
1955
|
+
if ((trimAfter ?? null) !== null) {
|
|
1956
|
+
if (!loop) {
|
|
1957
|
+
const time = (trimAfter - (trimBefore ?? 0)) / fps;
|
|
1958
|
+
if (timeInSeconds >= time) {
|
|
1959
|
+
return null;
|
|
1960
|
+
}
|
|
1961
|
+
}
|
|
1962
|
+
}
|
|
1963
|
+
return timeInSeconds + (trimBefore ?? 0) / fps;
|
|
1000
1964
|
};
|
|
1001
1965
|
|
|
1002
1966
|
// src/audio-extraction/extract-audio.ts
|
|
1003
|
-
var
|
|
1967
|
+
var extractAudioInternal = async ({
|
|
1004
1968
|
src,
|
|
1005
1969
|
timeInSeconds: unloopedTimeInSeconds,
|
|
1006
|
-
durationInSeconds,
|
|
1970
|
+
durationInSeconds: durationNotYetApplyingPlaybackRate,
|
|
1007
1971
|
logLevel,
|
|
1008
1972
|
loop,
|
|
1009
|
-
playbackRate
|
|
1973
|
+
playbackRate,
|
|
1974
|
+
audioStreamIndex,
|
|
1975
|
+
trimBefore,
|
|
1976
|
+
trimAfter,
|
|
1977
|
+
fps
|
|
1010
1978
|
}) => {
|
|
1011
|
-
const {
|
|
1012
|
-
let
|
|
1979
|
+
const { getAudio, actualMatroskaTimestamps, isMatroska, getDuration } = await getSink(src, logLevel);
|
|
1980
|
+
let mediaDurationInSeconds = null;
|
|
1013
1981
|
if (loop) {
|
|
1014
|
-
|
|
1982
|
+
mediaDurationInSeconds = await getDuration();
|
|
1015
1983
|
}
|
|
1016
|
-
|
|
1984
|
+
const audio = await getAudio(audioStreamIndex);
|
|
1985
|
+
if (audio === "no-audio-track") {
|
|
1017
1986
|
return { data: null, durationInSeconds: null };
|
|
1018
1987
|
}
|
|
1019
|
-
|
|
1988
|
+
if (audio === "cannot-decode-audio") {
|
|
1989
|
+
return "cannot-decode";
|
|
1990
|
+
}
|
|
1991
|
+
if (audio === "unknown-container-format") {
|
|
1992
|
+
return "unknown-container-format";
|
|
1993
|
+
}
|
|
1994
|
+
const timeInSeconds = getTimeInSeconds({
|
|
1995
|
+
loop,
|
|
1996
|
+
mediaDurationInSeconds,
|
|
1997
|
+
unloopedTimeInSeconds,
|
|
1998
|
+
src,
|
|
1999
|
+
trimAfter,
|
|
2000
|
+
playbackRate,
|
|
2001
|
+
trimBefore,
|
|
2002
|
+
fps
|
|
2003
|
+
});
|
|
2004
|
+
if (timeInSeconds === null) {
|
|
2005
|
+
return { data: null, durationInSeconds: mediaDurationInSeconds };
|
|
2006
|
+
}
|
|
1020
2007
|
const sampleIterator = await audioManager.getIterator({
|
|
1021
2008
|
src,
|
|
1022
2009
|
timeInSeconds,
|
|
@@ -1025,8 +2012,9 @@ var extractAudio = async ({
|
|
|
1025
2012
|
actualMatroskaTimestamps,
|
|
1026
2013
|
logLevel
|
|
1027
2014
|
});
|
|
2015
|
+
const durationInSeconds = durationNotYetApplyingPlaybackRate * playbackRate;
|
|
1028
2016
|
const samples = await sampleIterator.getSamples(timeInSeconds, durationInSeconds);
|
|
1029
|
-
audioManager.logOpenFrames(
|
|
2017
|
+
audioManager.logOpenFrames();
|
|
1030
2018
|
const audioDataArray = [];
|
|
1031
2019
|
for (let i = 0;i < samples.length; i++) {
|
|
1032
2020
|
const sample = samples[i];
|
|
@@ -1047,7 +2035,7 @@ var extractAudio = async ({
|
|
|
1047
2035
|
trimStartInSeconds = 0;
|
|
1048
2036
|
}
|
|
1049
2037
|
if (trimStartInSeconds < 0) {
|
|
1050
|
-
throw new Error(`trimStartInSeconds is negative: ${trimStartInSeconds}`);
|
|
2038
|
+
throw new Error(`trimStartInSeconds is negative: ${trimStartInSeconds}. ${JSON.stringify({ timeInSeconds, ts: sample.timestamp, d: sample.duration, isFirstSample, isLastSample, durationInSeconds, i, st: samples.map((s) => s.timestamp) })}`);
|
|
1051
2039
|
}
|
|
1052
2040
|
}
|
|
1053
2041
|
if (isLastSample) {
|
|
@@ -1055,10 +2043,8 @@ var extractAudio = async ({
|
|
|
1055
2043
|
}
|
|
1056
2044
|
const audioData = convertAudioData({
|
|
1057
2045
|
audioData: audioDataRaw,
|
|
1058
|
-
newSampleRate: TARGET_SAMPLE_RATE,
|
|
1059
2046
|
trimStartInSeconds,
|
|
1060
2047
|
trimEndInSeconds,
|
|
1061
|
-
targetNumberOfChannels: TARGET_NUMBER_OF_CHANNELS,
|
|
1062
2048
|
playbackRate
|
|
1063
2049
|
});
|
|
1064
2050
|
audioDataRaw.close();
|
|
@@ -1068,25 +2054,60 @@ var extractAudio = async ({
|
|
|
1068
2054
|
audioDataArray.push(audioData);
|
|
1069
2055
|
}
|
|
1070
2056
|
if (audioDataArray.length === 0) {
|
|
1071
|
-
return { data: null, durationInSeconds:
|
|
2057
|
+
return { data: null, durationInSeconds: mediaDurationInSeconds };
|
|
1072
2058
|
}
|
|
1073
2059
|
const combined = combineAudioDataAndClosePrevious(audioDataArray);
|
|
1074
|
-
return { data: combined, durationInSeconds:
|
|
2060
|
+
return { data: combined, durationInSeconds: mediaDurationInSeconds };
|
|
2061
|
+
};
|
|
2062
|
+
var queue = Promise.resolve(undefined);
|
|
2063
|
+
var extractAudio = (params) => {
|
|
2064
|
+
queue = queue.then(() => extractAudioInternal(params));
|
|
2065
|
+
return queue;
|
|
1075
2066
|
};
|
|
1076
2067
|
|
|
1077
2068
|
// src/video-extraction/extract-frame.ts
|
|
1078
|
-
var
|
|
2069
|
+
var extractFrameInternal = async ({
|
|
1079
2070
|
src,
|
|
1080
|
-
timeInSeconds:
|
|
2071
|
+
timeInSeconds: unloopedTimeInSeconds,
|
|
1081
2072
|
logLevel,
|
|
1082
|
-
loop
|
|
2073
|
+
loop,
|
|
2074
|
+
trimAfter,
|
|
2075
|
+
trimBefore,
|
|
2076
|
+
playbackRate,
|
|
2077
|
+
fps
|
|
1083
2078
|
}) => {
|
|
1084
|
-
const sink = await
|
|
1085
|
-
const
|
|
1086
|
-
if (video ===
|
|
2079
|
+
const sink = await getSink(src, logLevel);
|
|
2080
|
+
const video = await sink.getVideo();
|
|
2081
|
+
if (video === "no-video-track") {
|
|
1087
2082
|
throw new Error(`No video track found for ${src}`);
|
|
1088
2083
|
}
|
|
1089
|
-
|
|
2084
|
+
if (video === "cannot-decode") {
|
|
2085
|
+
return { type: "cannot-decode", durationInSeconds: await sink.getDuration() };
|
|
2086
|
+
}
|
|
2087
|
+
if (video === "unknown-container-format") {
|
|
2088
|
+
return { type: "unknown-container-format" };
|
|
2089
|
+
}
|
|
2090
|
+
let mediaDurationInSeconds = null;
|
|
2091
|
+
if (loop) {
|
|
2092
|
+
mediaDurationInSeconds = await sink.getDuration();
|
|
2093
|
+
}
|
|
2094
|
+
const timeInSeconds = getTimeInSeconds({
|
|
2095
|
+
loop,
|
|
2096
|
+
mediaDurationInSeconds,
|
|
2097
|
+
unloopedTimeInSeconds,
|
|
2098
|
+
src,
|
|
2099
|
+
trimAfter,
|
|
2100
|
+
playbackRate,
|
|
2101
|
+
trimBefore,
|
|
2102
|
+
fps
|
|
2103
|
+
});
|
|
2104
|
+
if (timeInSeconds === null) {
|
|
2105
|
+
return {
|
|
2106
|
+
type: "success",
|
|
2107
|
+
frame: null,
|
|
2108
|
+
durationInSeconds: await sink.getDuration()
|
|
2109
|
+
};
|
|
2110
|
+
}
|
|
1090
2111
|
const keyframeBank = await keyframeManager.requestKeyframeBank({
|
|
1091
2112
|
packetSink: video.packetSink,
|
|
1092
2113
|
videoSampleSink: video.sampleSink,
|
|
@@ -1094,8 +2115,20 @@ var extractFrame = async ({
|
|
|
1094
2115
|
src,
|
|
1095
2116
|
logLevel
|
|
1096
2117
|
});
|
|
2118
|
+
if (!keyframeBank) {
|
|
2119
|
+
return {
|
|
2120
|
+
type: "success",
|
|
2121
|
+
frame: null,
|
|
2122
|
+
durationInSeconds: await sink.getDuration()
|
|
2123
|
+
};
|
|
2124
|
+
}
|
|
1097
2125
|
const frame = await keyframeBank.getFrameFromTimestamp(timeInSeconds);
|
|
1098
|
-
return frame;
|
|
2126
|
+
return { type: "success", frame, durationInSeconds: await sink.getDuration() };
|
|
2127
|
+
};
|
|
2128
|
+
var queue2 = Promise.resolve(undefined);
|
|
2129
|
+
var extractFrame = (params) => {
|
|
2130
|
+
queue2 = queue2.then(() => extractFrameInternal(params));
|
|
2131
|
+
return queue2;
|
|
1099
2132
|
};
|
|
1100
2133
|
|
|
1101
2134
|
// src/extract-frame-and-audio.ts
|
|
@@ -1107,29 +2140,74 @@ var extractFrameAndAudio = async ({
|
|
|
1107
2140
|
playbackRate,
|
|
1108
2141
|
includeAudio,
|
|
1109
2142
|
includeVideo,
|
|
1110
|
-
loop
|
|
2143
|
+
loop,
|
|
2144
|
+
audioStreamIndex,
|
|
2145
|
+
trimAfter,
|
|
2146
|
+
trimBefore,
|
|
2147
|
+
fps
|
|
1111
2148
|
}) => {
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
2149
|
+
try {
|
|
2150
|
+
const [frame, audio] = await Promise.all([
|
|
2151
|
+
includeVideo ? extractFrame({
|
|
2152
|
+
src,
|
|
2153
|
+
timeInSeconds,
|
|
2154
|
+
logLevel,
|
|
2155
|
+
loop,
|
|
2156
|
+
trimAfter,
|
|
2157
|
+
playbackRate,
|
|
2158
|
+
trimBefore,
|
|
2159
|
+
fps
|
|
2160
|
+
}) : null,
|
|
2161
|
+
includeAudio ? extractAudio({
|
|
2162
|
+
src,
|
|
2163
|
+
timeInSeconds,
|
|
2164
|
+
durationInSeconds,
|
|
2165
|
+
logLevel,
|
|
2166
|
+
loop,
|
|
2167
|
+
playbackRate,
|
|
2168
|
+
audioStreamIndex,
|
|
2169
|
+
trimAfter,
|
|
2170
|
+
fps,
|
|
2171
|
+
trimBefore
|
|
2172
|
+
}) : null
|
|
2173
|
+
]);
|
|
2174
|
+
if (frame?.type === "cannot-decode") {
|
|
2175
|
+
return {
|
|
2176
|
+
type: "cannot-decode",
|
|
2177
|
+
durationInSeconds: frame.durationInSeconds
|
|
2178
|
+
};
|
|
2179
|
+
}
|
|
2180
|
+
if (frame?.type === "unknown-container-format") {
|
|
2181
|
+
return { type: "unknown-container-format" };
|
|
2182
|
+
}
|
|
2183
|
+
if (audio === "unknown-container-format") {
|
|
2184
|
+
if (frame !== null) {
|
|
2185
|
+
frame?.frame?.close();
|
|
2186
|
+
}
|
|
2187
|
+
return { type: "unknown-container-format" };
|
|
2188
|
+
}
|
|
2189
|
+
if (audio === "cannot-decode") {
|
|
2190
|
+
if (frame?.type === "success" && frame.frame !== null) {
|
|
2191
|
+
frame?.frame.close();
|
|
2192
|
+
}
|
|
2193
|
+
return {
|
|
2194
|
+
type: "cannot-decode",
|
|
2195
|
+
durationInSeconds: frame?.type === "success" ? frame.durationInSeconds : null
|
|
2196
|
+
};
|
|
2197
|
+
}
|
|
2198
|
+
return {
|
|
2199
|
+
type: "success",
|
|
2200
|
+
frame: frame?.frame?.toVideoFrame() ?? null,
|
|
2201
|
+
audio: audio?.data ?? null,
|
|
2202
|
+
durationInSeconds: audio?.durationInSeconds ?? null
|
|
2203
|
+
};
|
|
2204
|
+
} catch (err) {
|
|
2205
|
+
const error = err;
|
|
2206
|
+
if (isNetworkError(error)) {
|
|
2207
|
+
return { type: "network-error" };
|
|
2208
|
+
}
|
|
2209
|
+
throw err;
|
|
2210
|
+
}
|
|
1133
2211
|
};
|
|
1134
2212
|
|
|
1135
2213
|
// src/video-extraction/extract-frame-via-broadcast-channel.ts
|
|
@@ -1138,7 +2216,7 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
1138
2216
|
const data = event.data;
|
|
1139
2217
|
if (data.type === "request") {
|
|
1140
2218
|
try {
|
|
1141
|
-
const
|
|
2219
|
+
const result = await extractFrameAndAudio({
|
|
1142
2220
|
src: data.src,
|
|
1143
2221
|
timeInSeconds: data.timeInSeconds,
|
|
1144
2222
|
logLevel: data.logLevel,
|
|
@@ -1146,8 +2224,38 @@ if (window.remotion_broadcastChannel && window.remotion_isMainTab) {
|
|
|
1146
2224
|
playbackRate: data.playbackRate,
|
|
1147
2225
|
includeAudio: data.includeAudio,
|
|
1148
2226
|
includeVideo: data.includeVideo,
|
|
1149
|
-
loop: data.loop
|
|
2227
|
+
loop: data.loop,
|
|
2228
|
+
audioStreamIndex: data.audioStreamIndex,
|
|
2229
|
+
trimAfter: data.trimAfter,
|
|
2230
|
+
trimBefore: data.trimBefore,
|
|
2231
|
+
fps: data.fps
|
|
1150
2232
|
});
|
|
2233
|
+
if (result.type === "cannot-decode") {
|
|
2234
|
+
const cannotDecodeResponse = {
|
|
2235
|
+
type: "response-cannot-decode",
|
|
2236
|
+
id: data.id,
|
|
2237
|
+
durationInSeconds: result.durationInSeconds
|
|
2238
|
+
};
|
|
2239
|
+
window.remotion_broadcastChannel.postMessage(cannotDecodeResponse);
|
|
2240
|
+
return;
|
|
2241
|
+
}
|
|
2242
|
+
if (result.type === "network-error") {
|
|
2243
|
+
const networkErrorResponse = {
|
|
2244
|
+
type: "response-network-error",
|
|
2245
|
+
id: data.id
|
|
2246
|
+
};
|
|
2247
|
+
window.remotion_broadcastChannel.postMessage(networkErrorResponse);
|
|
2248
|
+
return;
|
|
2249
|
+
}
|
|
2250
|
+
if (result.type === "unknown-container-format") {
|
|
2251
|
+
const unknownContainerFormatResponse = {
|
|
2252
|
+
type: "response-unknown-container-format",
|
|
2253
|
+
id: data.id
|
|
2254
|
+
};
|
|
2255
|
+
window.remotion_broadcastChannel.postMessage(unknownContainerFormatResponse);
|
|
2256
|
+
return;
|
|
2257
|
+
}
|
|
2258
|
+
const { frame, audio, durationInSeconds } = result;
|
|
1151
2259
|
const videoFrame = frame;
|
|
1152
2260
|
const imageBitmap = videoFrame ? await createImageBitmap(videoFrame) : null;
|
|
1153
2261
|
if (videoFrame) {
|
|
@@ -1184,7 +2292,11 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1184
2292
|
includeAudio,
|
|
1185
2293
|
includeVideo,
|
|
1186
2294
|
isClientSideRendering,
|
|
1187
|
-
loop
|
|
2295
|
+
loop,
|
|
2296
|
+
audioStreamIndex,
|
|
2297
|
+
trimAfter,
|
|
2298
|
+
trimBefore,
|
|
2299
|
+
fps
|
|
1188
2300
|
}) => {
|
|
1189
2301
|
if (isClientSideRendering || window.remotion_isMainTab) {
|
|
1190
2302
|
return extractFrameAndAudio({
|
|
@@ -1195,7 +2307,11 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1195
2307
|
playbackRate,
|
|
1196
2308
|
includeAudio,
|
|
1197
2309
|
includeVideo,
|
|
1198
|
-
loop
|
|
2310
|
+
loop,
|
|
2311
|
+
audioStreamIndex,
|
|
2312
|
+
trimAfter,
|
|
2313
|
+
trimBefore,
|
|
2314
|
+
fps
|
|
1199
2315
|
});
|
|
1200
2316
|
}
|
|
1201
2317
|
const requestId = crypto.randomUUID();
|
|
@@ -1205,17 +2321,43 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1205
2321
|
if (!data) {
|
|
1206
2322
|
return;
|
|
1207
2323
|
}
|
|
1208
|
-
if (data.
|
|
2324
|
+
if (data.id !== requestId) {
|
|
2325
|
+
return;
|
|
2326
|
+
}
|
|
2327
|
+
if (data.type === "response-success") {
|
|
1209
2328
|
resolve({
|
|
2329
|
+
type: "success",
|
|
1210
2330
|
frame: data.frame ? data.frame : null,
|
|
1211
2331
|
audio: data.audio ? data.audio : null,
|
|
1212
2332
|
durationInSeconds: data.durationInSeconds ? data.durationInSeconds : null
|
|
1213
2333
|
});
|
|
1214
2334
|
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
1215
|
-
|
|
2335
|
+
return;
|
|
2336
|
+
}
|
|
2337
|
+
if (data.type === "response-error") {
|
|
1216
2338
|
reject(data.errorStack);
|
|
1217
2339
|
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
2340
|
+
return;
|
|
2341
|
+
}
|
|
2342
|
+
if (data.type === "response-cannot-decode") {
|
|
2343
|
+
resolve({
|
|
2344
|
+
type: "cannot-decode",
|
|
2345
|
+
durationInSeconds: data.durationInSeconds
|
|
2346
|
+
});
|
|
2347
|
+
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
2348
|
+
return;
|
|
2349
|
+
}
|
|
2350
|
+
if (data.type === "response-network-error") {
|
|
2351
|
+
resolve({ type: "network-error" });
|
|
2352
|
+
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
2353
|
+
return;
|
|
2354
|
+
}
|
|
2355
|
+
if (data.type === "response-unknown-container-format") {
|
|
2356
|
+
resolve({ type: "unknown-container-format" });
|
|
2357
|
+
window.remotion_broadcastChannel.removeEventListener("message", onMessage);
|
|
2358
|
+
return;
|
|
1218
2359
|
}
|
|
2360
|
+
throw new Error(`Invalid message: ${JSON.stringify(data)}`);
|
|
1219
2361
|
};
|
|
1220
2362
|
window.remotion_broadcastChannel.addEventListener("message", onMessage);
|
|
1221
2363
|
});
|
|
@@ -1229,7 +2371,11 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1229
2371
|
playbackRate,
|
|
1230
2372
|
includeAudio,
|
|
1231
2373
|
includeVideo,
|
|
1232
|
-
loop
|
|
2374
|
+
loop,
|
|
2375
|
+
audioStreamIndex,
|
|
2376
|
+
trimAfter,
|
|
2377
|
+
trimBefore,
|
|
2378
|
+
fps
|
|
1233
2379
|
};
|
|
1234
2380
|
window.remotion_broadcastChannel.postMessage(request);
|
|
1235
2381
|
let timeoutId;
|
|
@@ -1247,6 +2393,7 @@ var extractFrameViaBroadcastChannel = ({
|
|
|
1247
2393
|
};
|
|
1248
2394
|
|
|
1249
2395
|
// src/audio/audio-for-rendering.tsx
|
|
2396
|
+
import { jsx as jsx2 } from "react/jsx-runtime";
|
|
1250
2397
|
var AudioForRendering = ({
|
|
1251
2398
|
volume: volumeProp,
|
|
1252
2399
|
playbackRate,
|
|
@@ -1256,15 +2403,23 @@ var AudioForRendering = ({
|
|
|
1256
2403
|
delayRenderRetries,
|
|
1257
2404
|
delayRenderTimeoutInMilliseconds,
|
|
1258
2405
|
logLevel = window.remotion_logLevel,
|
|
1259
|
-
loop
|
|
2406
|
+
loop,
|
|
2407
|
+
fallbackHtml5AudioProps,
|
|
2408
|
+
audioStreamIndex,
|
|
2409
|
+
showInTimeline,
|
|
2410
|
+
style,
|
|
2411
|
+
name,
|
|
2412
|
+
disallowFallbackToHtml5Audio,
|
|
2413
|
+
toneFrequency,
|
|
2414
|
+
trimAfter,
|
|
2415
|
+
trimBefore
|
|
1260
2416
|
}) => {
|
|
1261
|
-
const frame =
|
|
1262
|
-
const absoluteFrame =
|
|
1263
|
-
const videoConfig =
|
|
1264
|
-
const { registerRenderAsset, unregisterRenderAsset } =
|
|
1265
|
-
const startsAt =
|
|
1266
|
-
const environment =
|
|
1267
|
-
const [id] = useState3(() => `${Math.random()}`.replace("0.", ""));
|
|
2417
|
+
const frame = useCurrentFrame2();
|
|
2418
|
+
const absoluteFrame = Internals10.useTimelinePosition();
|
|
2419
|
+
const videoConfig = Internals10.useUnsafeVideoConfig();
|
|
2420
|
+
const { registerRenderAsset, unregisterRenderAsset } = useContext2(Internals10.RenderAssetManager);
|
|
2421
|
+
const startsAt = Internals10.useMediaStartsAt();
|
|
2422
|
+
const environment = useRemotionEnvironment();
|
|
1268
2423
|
if (!videoConfig) {
|
|
1269
2424
|
throw new Error("No video config found");
|
|
1270
2425
|
}
|
|
@@ -1273,10 +2428,20 @@ var AudioForRendering = ({
|
|
|
1273
2428
|
}
|
|
1274
2429
|
const { fps } = videoConfig;
|
|
1275
2430
|
const { delayRender, continueRender } = useDelayRender();
|
|
2431
|
+
const [replaceWithHtml5Audio, setReplaceWithHtml5Audio] = useState2(false);
|
|
2432
|
+
const sequenceContext = useContext2(Internals10.SequenceContext);
|
|
2433
|
+
const id = useMemo2(() => `media-video-${random(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
|
|
2434
|
+
src,
|
|
2435
|
+
sequenceContext?.cumulatedFrom,
|
|
2436
|
+
sequenceContext?.relativeFrom,
|
|
2437
|
+
sequenceContext?.durationInFrames
|
|
2438
|
+
]);
|
|
1276
2439
|
useLayoutEffect(() => {
|
|
1277
|
-
const
|
|
1278
|
-
const
|
|
1279
|
-
|
|
2440
|
+
const timestamp = frame / fps;
|
|
2441
|
+
const durationInSeconds = 1 / fps;
|
|
2442
|
+
if (replaceWithHtml5Audio) {
|
|
2443
|
+
return;
|
|
2444
|
+
}
|
|
1280
2445
|
const newHandle = delayRender(`Extracting audio for frame ${frame}`, {
|
|
1281
2446
|
retries: delayRenderRetries ?? undefined,
|
|
1282
2447
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined
|
|
@@ -1295,12 +2460,41 @@ var AudioForRendering = ({
|
|
|
1295
2460
|
timeInSeconds: timestamp,
|
|
1296
2461
|
durationInSeconds,
|
|
1297
2462
|
playbackRate: playbackRate ?? 1,
|
|
1298
|
-
logLevel
|
|
2463
|
+
logLevel,
|
|
1299
2464
|
includeAudio: shouldRenderAudio,
|
|
1300
2465
|
includeVideo: false,
|
|
1301
2466
|
isClientSideRendering: environment.isClientSideRendering,
|
|
1302
|
-
loop: loop ?? false
|
|
1303
|
-
|
|
2467
|
+
loop: loop ?? false,
|
|
2468
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
2469
|
+
trimAfter,
|
|
2470
|
+
trimBefore,
|
|
2471
|
+
fps
|
|
2472
|
+
}).then((result) => {
|
|
2473
|
+
if (result.type === "unknown-container-format") {
|
|
2474
|
+
if (disallowFallbackToHtml5Audio) {
|
|
2475
|
+
cancelRender2(new Error(`Unknown container format ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
|
|
2476
|
+
}
|
|
2477
|
+
Internals10.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <Audio>`);
|
|
2478
|
+
setReplaceWithHtml5Audio(true);
|
|
2479
|
+
return;
|
|
2480
|
+
}
|
|
2481
|
+
if (result.type === "cannot-decode") {
|
|
2482
|
+
if (disallowFallbackToHtml5Audio) {
|
|
2483
|
+
cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
|
|
2484
|
+
}
|
|
2485
|
+
Internals10.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <Audio>`);
|
|
2486
|
+
setReplaceWithHtml5Audio(true);
|
|
2487
|
+
return;
|
|
2488
|
+
}
|
|
2489
|
+
if (result.type === "network-error") {
|
|
2490
|
+
if (disallowFallbackToHtml5Audio) {
|
|
2491
|
+
cancelRender2(new Error(`Cannot decode ${src}, and 'disallowFallbackToHtml5Audio' was set. Failing the render.`));
|
|
2492
|
+
}
|
|
2493
|
+
Internals10.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <Audio>`);
|
|
2494
|
+
setReplaceWithHtml5Audio(true);
|
|
2495
|
+
return;
|
|
2496
|
+
}
|
|
2497
|
+
const { audio, durationInSeconds: assetDurationInSeconds } = result;
|
|
1304
2498
|
const volumePropsFrame = frameForVolumeProp({
|
|
1305
2499
|
behavior: loopVolumeCurveBehavior ?? "repeat",
|
|
1306
2500
|
loop: loop ?? false,
|
|
@@ -1309,168 +2503,467 @@ var AudioForRendering = ({
|
|
|
1309
2503
|
frame,
|
|
1310
2504
|
startsAt
|
|
1311
2505
|
});
|
|
1312
|
-
const volume =
|
|
2506
|
+
const volume = Internals10.evaluateVolume({
|
|
1313
2507
|
volume: volumeProp,
|
|
1314
2508
|
frame: volumePropsFrame,
|
|
1315
2509
|
mediaVolume: 1
|
|
1316
2510
|
});
|
|
1317
|
-
|
|
1318
|
-
if (audio && volume > 0) {
|
|
1319
|
-
applyVolume(audio.data, volume);
|
|
1320
|
-
registerRenderAsset({
|
|
1321
|
-
type: "inline-audio",
|
|
1322
|
-
id,
|
|
1323
|
-
audio: Array.from(audio.data),
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
2511
|
+
Internals10.warnAboutTooHighVolume(volume);
|
|
2512
|
+
if (audio && volume > 0) {
|
|
2513
|
+
applyVolume(audio.data, volume);
|
|
2514
|
+
registerRenderAsset({
|
|
2515
|
+
type: "inline-audio",
|
|
2516
|
+
id,
|
|
2517
|
+
audio: Array.from(audio.data),
|
|
2518
|
+
frame: absoluteFrame,
|
|
2519
|
+
timestamp: audio.timestamp,
|
|
2520
|
+
duration: audio.numberOfFrames / TARGET_SAMPLE_RATE * 1e6,
|
|
2521
|
+
toneFrequency: toneFrequency ?? 1
|
|
2522
|
+
});
|
|
2523
|
+
}
|
|
2524
|
+
continueRender(newHandle);
|
|
2525
|
+
}).catch((error) => {
|
|
2526
|
+
cancelRender2(error);
|
|
2527
|
+
});
|
|
2528
|
+
return () => {
|
|
2529
|
+
continueRender(newHandle);
|
|
2530
|
+
unregisterRenderAsset(id);
|
|
2531
|
+
};
|
|
2532
|
+
}, [
|
|
2533
|
+
absoluteFrame,
|
|
2534
|
+
continueRender,
|
|
2535
|
+
delayRender,
|
|
2536
|
+
delayRenderRetries,
|
|
2537
|
+
delayRenderTimeoutInMilliseconds,
|
|
2538
|
+
disallowFallbackToHtml5Audio,
|
|
2539
|
+
environment.isClientSideRendering,
|
|
2540
|
+
fps,
|
|
2541
|
+
frame,
|
|
2542
|
+
id,
|
|
2543
|
+
logLevel,
|
|
2544
|
+
loop,
|
|
2545
|
+
loopVolumeCurveBehavior,
|
|
2546
|
+
muted,
|
|
2547
|
+
playbackRate,
|
|
2548
|
+
registerRenderAsset,
|
|
2549
|
+
src,
|
|
2550
|
+
startsAt,
|
|
2551
|
+
unregisterRenderAsset,
|
|
2552
|
+
volumeProp,
|
|
2553
|
+
audioStreamIndex,
|
|
2554
|
+
toneFrequency,
|
|
2555
|
+
trimAfter,
|
|
2556
|
+
trimBefore,
|
|
2557
|
+
replaceWithHtml5Audio
|
|
2558
|
+
]);
|
|
2559
|
+
if (replaceWithHtml5Audio) {
|
|
2560
|
+
return /* @__PURE__ */ jsx2(Audio, {
|
|
2561
|
+
src,
|
|
2562
|
+
playbackRate,
|
|
2563
|
+
muted,
|
|
2564
|
+
loop,
|
|
2565
|
+
volume: volumeProp,
|
|
2566
|
+
delayRenderRetries,
|
|
2567
|
+
delayRenderTimeoutInMilliseconds,
|
|
2568
|
+
style,
|
|
2569
|
+
loopVolumeCurveBehavior,
|
|
2570
|
+
audioStreamIndex,
|
|
2571
|
+
useWebAudioApi: fallbackHtml5AudioProps?.useWebAudioApi,
|
|
2572
|
+
onError: fallbackHtml5AudioProps?.onError,
|
|
2573
|
+
toneFrequency,
|
|
2574
|
+
acceptableTimeShiftInSeconds: fallbackHtml5AudioProps?.acceptableTimeShiftInSeconds,
|
|
2575
|
+
name,
|
|
2576
|
+
showInTimeline
|
|
2577
|
+
});
|
|
2578
|
+
}
|
|
2579
|
+
return null;
|
|
2580
|
+
};
|
|
2581
|
+
|
|
2582
|
+
// src/audio/audio.tsx
|
|
2583
|
+
import { jsx as jsx3 } from "react/jsx-runtime";
|
|
2584
|
+
var { validateMediaProps } = Internals11;
|
|
2585
|
+
var Audio2 = (props) => {
|
|
2586
|
+
const { name, stack, showInTimeline, ...otherProps } = props;
|
|
2587
|
+
const environment = useRemotionEnvironment2();
|
|
2588
|
+
if (typeof props.src !== "string") {
|
|
2589
|
+
throw new TypeError(`The \`<Audio>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
|
|
2590
|
+
}
|
|
2591
|
+
validateMediaProps({ playbackRate: props.playbackRate, volume: props.volume }, "Audio");
|
|
2592
|
+
if (environment.isRendering) {
|
|
2593
|
+
return /* @__PURE__ */ jsx3(AudioForRendering, {
|
|
2594
|
+
...otherProps
|
|
2595
|
+
});
|
|
2596
|
+
}
|
|
2597
|
+
return /* @__PURE__ */ jsx3(AudioForPreview, {
|
|
2598
|
+
name,
|
|
2599
|
+
...otherProps,
|
|
2600
|
+
stack: stack ?? null
|
|
2601
|
+
});
|
|
2602
|
+
};
|
|
2603
|
+
Internals11.addSequenceStackTraces(Audio2);
|
|
2604
|
+
|
|
2605
|
+
// src/video/video.tsx
|
|
2606
|
+
import { Internals as Internals14, useRemotionEnvironment as useRemotionEnvironment4 } from "remotion";
|
|
2607
|
+
|
|
2608
|
+
// src/video/video-for-preview.tsx
|
|
2609
|
+
import { useContext as useContext3, useEffect as useEffect2, useMemo as useMemo3, useRef as useRef2, useState as useState3 } from "react";
|
|
2610
|
+
import { Internals as Internals12, useBufferState as useBufferState2, useCurrentFrame as useCurrentFrame3, Video } from "remotion";
|
|
2611
|
+
import { jsx as jsx4 } from "react/jsx-runtime";
|
|
2612
|
+
var {
|
|
2613
|
+
useUnsafeVideoConfig: useUnsafeVideoConfig2,
|
|
2614
|
+
Timeline: Timeline2,
|
|
2615
|
+
SharedAudioContext: SharedAudioContext2,
|
|
2616
|
+
useMediaMutedState: useMediaMutedState2,
|
|
2617
|
+
useMediaVolumeState: useMediaVolumeState2,
|
|
2618
|
+
useFrameForVolumeProp: useFrameForVolumeProp2,
|
|
2619
|
+
evaluateVolume: evaluateVolume2,
|
|
2620
|
+
warnAboutTooHighVolume: warnAboutTooHighVolume2,
|
|
2621
|
+
usePreload: usePreload2,
|
|
2622
|
+
useMediaInTimeline: useMediaInTimeline2,
|
|
2623
|
+
SequenceContext: SequenceContext2
|
|
2624
|
+
} = Internals12;
|
|
2625
|
+
var NewVideoForPreview = ({
|
|
2626
|
+
src,
|
|
2627
|
+
style,
|
|
2628
|
+
playbackRate,
|
|
2629
|
+
logLevel,
|
|
2630
|
+
className,
|
|
2631
|
+
muted,
|
|
2632
|
+
volume,
|
|
2633
|
+
loopVolumeCurveBehavior,
|
|
2634
|
+
onVideoFrame,
|
|
2635
|
+
showInTimeline,
|
|
2636
|
+
loop,
|
|
2637
|
+
name,
|
|
2638
|
+
trimAfter,
|
|
2639
|
+
trimBefore,
|
|
2640
|
+
stack,
|
|
2641
|
+
disallowFallbackToOffthreadVideo,
|
|
2642
|
+
fallbackOffthreadVideoProps,
|
|
2643
|
+
audioStreamIndex
|
|
2644
|
+
}) => {
|
|
2645
|
+
const canvasRef = useRef2(null);
|
|
2646
|
+
const videoConfig = useUnsafeVideoConfig2();
|
|
2647
|
+
const frame = useCurrentFrame3();
|
|
2648
|
+
const mediaPlayerRef = useRef2(null);
|
|
2649
|
+
const [mediaPlayerReady, setMediaPlayerReady] = useState3(false);
|
|
2650
|
+
const [shouldFallbackToNativeVideo, setShouldFallbackToNativeVideo] = useState3(false);
|
|
2651
|
+
const [playing] = Timeline2.usePlayingState();
|
|
2652
|
+
const timelineContext = useContext3(Timeline2.TimelineContext);
|
|
2653
|
+
const globalPlaybackRate = timelineContext.playbackRate;
|
|
2654
|
+
const sharedAudioContext = useContext3(SharedAudioContext2);
|
|
2655
|
+
const buffer = useBufferState2();
|
|
2656
|
+
const [mediaMuted] = useMediaMutedState2();
|
|
2657
|
+
const [mediaVolume] = useMediaVolumeState2();
|
|
2658
|
+
const volumePropFrame = useFrameForVolumeProp2(loopVolumeCurveBehavior);
|
|
2659
|
+
const userPreferredVolume = evaluateVolume2({
|
|
2660
|
+
frame: volumePropFrame,
|
|
2661
|
+
volume,
|
|
2662
|
+
mediaVolume
|
|
2663
|
+
});
|
|
2664
|
+
warnAboutTooHighVolume2(userPreferredVolume);
|
|
2665
|
+
const [timelineId] = useState3(() => String(Math.random()));
|
|
2666
|
+
const parentSequence = useContext3(SequenceContext2);
|
|
2667
|
+
useMediaInTimeline2({
|
|
2668
|
+
volume,
|
|
2669
|
+
mediaVolume,
|
|
2670
|
+
mediaType: "video",
|
|
2671
|
+
src,
|
|
2672
|
+
playbackRate,
|
|
2673
|
+
displayName: name ?? null,
|
|
2674
|
+
id: timelineId,
|
|
2675
|
+
stack,
|
|
2676
|
+
showInTimeline,
|
|
2677
|
+
premountDisplay: parentSequence?.premountDisplay ?? null,
|
|
2678
|
+
postmountDisplay: parentSequence?.postmountDisplay ?? null
|
|
2679
|
+
});
|
|
2680
|
+
if (!videoConfig) {
|
|
2681
|
+
throw new Error("No video config found");
|
|
2682
|
+
}
|
|
2683
|
+
if (!src) {
|
|
2684
|
+
throw new TypeError("No `src` was passed to <NewVideoForPreview>.");
|
|
2685
|
+
}
|
|
2686
|
+
const currentTime = frame / videoConfig.fps;
|
|
2687
|
+
const currentTimeRef = useRef2(currentTime);
|
|
2688
|
+
currentTimeRef.current = currentTime;
|
|
2689
|
+
const preloadedSrc = usePreload2(src);
|
|
2690
|
+
useEffect2(() => {
|
|
2691
|
+
if (!canvasRef.current)
|
|
2692
|
+
return;
|
|
2693
|
+
if (!sharedAudioContext)
|
|
2694
|
+
return;
|
|
2695
|
+
if (!sharedAudioContext.audioContext)
|
|
2696
|
+
return;
|
|
2697
|
+
try {
|
|
2698
|
+
const player = new MediaPlayer({
|
|
2699
|
+
canvas: canvasRef.current,
|
|
2700
|
+
src: preloadedSrc,
|
|
2701
|
+
logLevel,
|
|
2702
|
+
sharedAudioContext: sharedAudioContext.audioContext,
|
|
2703
|
+
loop,
|
|
2704
|
+
trimAfterSeconds: trimAfter ? trimAfter / videoConfig.fps : undefined,
|
|
2705
|
+
trimBeforeSeconds: trimBefore ? trimBefore / videoConfig.fps : undefined,
|
|
2706
|
+
playbackRate,
|
|
2707
|
+
audioStreamIndex
|
|
2708
|
+
});
|
|
2709
|
+
mediaPlayerRef.current = player;
|
|
2710
|
+
player.initialize(currentTimeRef.current).then((result) => {
|
|
2711
|
+
if (result.type === "unknown-container-format") {
|
|
2712
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
2713
|
+
throw new Error(`Unknown container format ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
2714
|
+
}
|
|
2715
|
+
Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${preloadedSrc} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
|
|
2716
|
+
setShouldFallbackToNativeVideo(true);
|
|
2717
|
+
return;
|
|
2718
|
+
}
|
|
2719
|
+
if (result.type === "network-error") {
|
|
2720
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
2721
|
+
throw new Error(`Network error fetching ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
2722
|
+
}
|
|
2723
|
+
Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Network error fetching ${preloadedSrc}, falling back to <OffthreadVideo>`);
|
|
2724
|
+
setShouldFallbackToNativeVideo(true);
|
|
2725
|
+
return;
|
|
2726
|
+
}
|
|
2727
|
+
if (result.type === "cannot-decode") {
|
|
2728
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
2729
|
+
throw new Error(`Cannot decode ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
2730
|
+
}
|
|
2731
|
+
Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `Cannot decode ${preloadedSrc}, falling back to <OffthreadVideo>`);
|
|
2732
|
+
setShouldFallbackToNativeVideo(true);
|
|
2733
|
+
return;
|
|
2734
|
+
}
|
|
2735
|
+
if (result.type === "no-tracks") {
|
|
2736
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
2737
|
+
throw new Error(`No video or audio tracks found for ${preloadedSrc}, and 'disallowFallbackToOffthreadVideo' was set.`);
|
|
2738
|
+
}
|
|
2739
|
+
Internals12.Log.warn({ logLevel, tag: "@remotion/media" }, `No video or audio tracks found for ${preloadedSrc}, falling back to <OffthreadVideo>`);
|
|
2740
|
+
setShouldFallbackToNativeVideo(true);
|
|
2741
|
+
return;
|
|
2742
|
+
}
|
|
2743
|
+
if (result.type === "success") {
|
|
2744
|
+
setMediaPlayerReady(true);
|
|
2745
|
+
}
|
|
2746
|
+
}).catch((error) => {
|
|
2747
|
+
Internals12.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to initialize MediaPlayer", error);
|
|
2748
|
+
setShouldFallbackToNativeVideo(true);
|
|
2749
|
+
});
|
|
2750
|
+
} catch (error) {
|
|
2751
|
+
Internals12.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer initialization failed", error);
|
|
2752
|
+
setShouldFallbackToNativeVideo(true);
|
|
2753
|
+
}
|
|
1335
2754
|
return () => {
|
|
1336
|
-
|
|
1337
|
-
|
|
2755
|
+
if (mediaPlayerRef.current) {
|
|
2756
|
+
Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Disposing MediaPlayer`);
|
|
2757
|
+
mediaPlayerRef.current.dispose();
|
|
2758
|
+
mediaPlayerRef.current = null;
|
|
2759
|
+
}
|
|
2760
|
+
setMediaPlayerReady(false);
|
|
2761
|
+
setShouldFallbackToNativeVideo(false);
|
|
1338
2762
|
};
|
|
1339
2763
|
}, [
|
|
1340
|
-
|
|
1341
|
-
continueRender,
|
|
1342
|
-
delayRender,
|
|
1343
|
-
delayRenderRetries,
|
|
1344
|
-
delayRenderTimeoutInMilliseconds,
|
|
1345
|
-
environment.isClientSideRendering,
|
|
1346
|
-
fps,
|
|
1347
|
-
frame,
|
|
1348
|
-
id,
|
|
2764
|
+
preloadedSrc,
|
|
1349
2765
|
logLevel,
|
|
2766
|
+
sharedAudioContext,
|
|
1350
2767
|
loop,
|
|
1351
|
-
loopVolumeCurveBehavior,
|
|
1352
|
-
muted,
|
|
1353
|
-
playbackRate,
|
|
1354
|
-
registerRenderAsset,
|
|
1355
|
-
src,
|
|
1356
|
-
startsAt,
|
|
1357
|
-
unregisterRenderAsset,
|
|
1358
|
-
volumeProp
|
|
1359
|
-
]);
|
|
1360
|
-
return null;
|
|
1361
|
-
};
|
|
1362
|
-
|
|
1363
|
-
// src/audio/audio.tsx
|
|
1364
|
-
import { jsx as jsx2 } from "react/jsx-runtime";
|
|
1365
|
-
var {
|
|
1366
|
-
validateMediaTrimProps,
|
|
1367
|
-
resolveTrimProps,
|
|
1368
|
-
validateMediaProps,
|
|
1369
|
-
AudioForPreview
|
|
1370
|
-
} = Internals7;
|
|
1371
|
-
var onRemotionError = (_e) => {};
|
|
1372
|
-
var Audio = (props) => {
|
|
1373
|
-
const audioContext = useContext5(SharedAudioContext);
|
|
1374
|
-
const {
|
|
1375
|
-
trimBefore,
|
|
1376
2768
|
trimAfter,
|
|
1377
|
-
name,
|
|
1378
|
-
pauseWhenBuffering,
|
|
1379
|
-
stack,
|
|
1380
|
-
showInTimeline,
|
|
1381
|
-
loop,
|
|
1382
|
-
...otherProps
|
|
1383
|
-
} = props;
|
|
1384
|
-
const environment = useRemotionEnvironment3();
|
|
1385
|
-
const onDuration = useCallback2(() => {
|
|
1386
|
-
return;
|
|
1387
|
-
}, []);
|
|
1388
|
-
if (typeof props.src !== "string") {
|
|
1389
|
-
throw new TypeError(`The \`<Audio>\` tag requires a string for \`src\`, but got ${JSON.stringify(props.src)} instead.`);
|
|
1390
|
-
}
|
|
1391
|
-
validateMediaTrimProps({
|
|
1392
|
-
startFrom: undefined,
|
|
1393
|
-
endAt: undefined,
|
|
1394
|
-
trimBefore,
|
|
1395
|
-
trimAfter
|
|
1396
|
-
});
|
|
1397
|
-
const { trimBeforeValue, trimAfterValue } = resolveTrimProps({
|
|
1398
|
-
startFrom: undefined,
|
|
1399
|
-
endAt: undefined,
|
|
1400
2769
|
trimBefore,
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
2770
|
+
videoConfig.fps,
|
|
2771
|
+
playbackRate,
|
|
2772
|
+
disallowFallbackToOffthreadVideo,
|
|
2773
|
+
audioStreamIndex
|
|
2774
|
+
]);
|
|
2775
|
+
const classNameValue = useMemo3(() => {
|
|
2776
|
+
return [Internals12.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals12.truthy).join(" ");
|
|
2777
|
+
}, [className]);
|
|
2778
|
+
useEffect2(() => {
|
|
2779
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2780
|
+
if (!mediaPlayer)
|
|
2781
|
+
return;
|
|
2782
|
+
if (playing) {
|
|
2783
|
+
mediaPlayer.play().catch((error) => {
|
|
2784
|
+
Internals12.Log.error({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] Failed to play", error);
|
|
2785
|
+
});
|
|
1412
2786
|
} else {
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
1427
|
-
|
|
2787
|
+
mediaPlayer.pause();
|
|
2788
|
+
}
|
|
2789
|
+
}, [playing, logLevel, mediaPlayerReady]);
|
|
2790
|
+
useEffect2(() => {
|
|
2791
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2792
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
2793
|
+
return;
|
|
2794
|
+
mediaPlayer.seekTo(currentTime);
|
|
2795
|
+
Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, `[NewVideoForPreview] Updating target time to ${currentTime.toFixed(3)}s`);
|
|
2796
|
+
}, [currentTime, logLevel, mediaPlayerReady]);
|
|
2797
|
+
useEffect2(() => {
|
|
2798
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2799
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
2800
|
+
return;
|
|
2801
|
+
let currentBlock = null;
|
|
2802
|
+
const unsubscribe = mediaPlayer.onBufferingChange((newBufferingState) => {
|
|
2803
|
+
if (newBufferingState && !currentBlock) {
|
|
2804
|
+
currentBlock = buffer.delayPlayback();
|
|
2805
|
+
Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer buffering - blocking Remotion playback");
|
|
2806
|
+
} else if (!newBufferingState && currentBlock) {
|
|
2807
|
+
currentBlock.unblock();
|
|
2808
|
+
currentBlock = null;
|
|
2809
|
+
Internals12.Log.trace({ logLevel, tag: "@remotion/media" }, "[NewVideoForPreview] MediaPlayer unbuffering - unblocking Remotion playback");
|
|
2810
|
+
}
|
|
1428
2811
|
});
|
|
1429
|
-
|
|
1430
|
-
|
|
1431
|
-
|
|
1432
|
-
|
|
1433
|
-
|
|
2812
|
+
return () => {
|
|
2813
|
+
unsubscribe();
|
|
2814
|
+
if (currentBlock) {
|
|
2815
|
+
currentBlock.unblock();
|
|
2816
|
+
currentBlock = null;
|
|
2817
|
+
}
|
|
2818
|
+
};
|
|
2819
|
+
}, [mediaPlayerReady, buffer, logLevel]);
|
|
2820
|
+
const effectiveMuted = muted || mediaMuted || userPreferredVolume <= 0;
|
|
2821
|
+
useEffect2(() => {
|
|
2822
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2823
|
+
if (!mediaPlayer || !mediaPlayerReady)
|
|
2824
|
+
return;
|
|
2825
|
+
mediaPlayer.setMuted(effectiveMuted);
|
|
2826
|
+
}, [effectiveMuted, mediaPlayerReady]);
|
|
2827
|
+
useEffect2(() => {
|
|
2828
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2829
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
2830
|
+
return;
|
|
2831
|
+
}
|
|
2832
|
+
mediaPlayer.setVolume(userPreferredVolume);
|
|
2833
|
+
}, [userPreferredVolume, mediaPlayerReady, logLevel]);
|
|
2834
|
+
const effectivePlaybackRate = useMemo3(() => playbackRate * globalPlaybackRate, [playbackRate, globalPlaybackRate]);
|
|
2835
|
+
useEffect2(() => {
|
|
2836
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2837
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
2838
|
+
return;
|
|
2839
|
+
}
|
|
2840
|
+
mediaPlayer.setPlaybackRate(effectivePlaybackRate);
|
|
2841
|
+
}, [effectivePlaybackRate, mediaPlayerReady, logLevel]);
|
|
2842
|
+
useEffect2(() => {
|
|
2843
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2844
|
+
if (!mediaPlayer || !mediaPlayerReady) {
|
|
2845
|
+
return;
|
|
2846
|
+
}
|
|
2847
|
+
mediaPlayer.setLoop(loop);
|
|
2848
|
+
}, [loop, mediaPlayerReady]);
|
|
2849
|
+
useEffect2(() => {
|
|
2850
|
+
const mediaPlayer = mediaPlayerRef.current;
|
|
2851
|
+
if (!mediaPlayer || !mediaPlayerReady || !onVideoFrame) {
|
|
2852
|
+
return;
|
|
2853
|
+
}
|
|
2854
|
+
const unsubscribe = mediaPlayer.onVideoFrame(onVideoFrame);
|
|
2855
|
+
return () => {
|
|
2856
|
+
unsubscribe();
|
|
2857
|
+
};
|
|
2858
|
+
}, [onVideoFrame, mediaPlayerReady]);
|
|
2859
|
+
if (shouldFallbackToNativeVideo && !disallowFallbackToOffthreadVideo) {
|
|
2860
|
+
return /* @__PURE__ */ jsx4(Video, {
|
|
2861
|
+
src,
|
|
2862
|
+
style,
|
|
2863
|
+
className,
|
|
2864
|
+
muted,
|
|
2865
|
+
volume,
|
|
2866
|
+
trimAfter,
|
|
2867
|
+
trimBefore,
|
|
2868
|
+
playbackRate,
|
|
2869
|
+
loopVolumeCurveBehavior,
|
|
2870
|
+
name,
|
|
2871
|
+
loop,
|
|
2872
|
+
showInTimeline,
|
|
2873
|
+
stack: stack ?? undefined,
|
|
2874
|
+
...fallbackOffthreadVideoProps
|
|
1434
2875
|
});
|
|
1435
2876
|
}
|
|
1436
|
-
|
|
1437
|
-
|
|
1438
|
-
|
|
1439
|
-
|
|
1440
|
-
|
|
1441
|
-
|
|
1442
|
-
|
|
1443
|
-
|
|
1444
|
-
|
|
1445
|
-
|
|
1446
|
-
|
|
1447
|
-
|
|
1448
|
-
|
|
1449
|
-
|
|
1450
|
-
|
|
2877
|
+
return /* @__PURE__ */ jsx4("canvas", {
|
|
2878
|
+
ref: canvasRef,
|
|
2879
|
+
width: videoConfig.width,
|
|
2880
|
+
height: videoConfig.height,
|
|
2881
|
+
style,
|
|
2882
|
+
className: classNameValue
|
|
2883
|
+
});
|
|
2884
|
+
};
|
|
2885
|
+
var VideoForPreview = ({
|
|
2886
|
+
className,
|
|
2887
|
+
loop,
|
|
2888
|
+
src,
|
|
2889
|
+
logLevel,
|
|
2890
|
+
muted,
|
|
2891
|
+
name,
|
|
2892
|
+
volume,
|
|
2893
|
+
loopVolumeCurveBehavior,
|
|
2894
|
+
onVideoFrame,
|
|
2895
|
+
playbackRate,
|
|
2896
|
+
style,
|
|
2897
|
+
showInTimeline,
|
|
2898
|
+
trimAfter,
|
|
2899
|
+
trimBefore,
|
|
2900
|
+
stack,
|
|
2901
|
+
disallowFallbackToOffthreadVideo,
|
|
2902
|
+
fallbackOffthreadVideoProps,
|
|
2903
|
+
audioStreamIndex
|
|
2904
|
+
}) => {
|
|
2905
|
+
const preloadedSrc = usePreload2(src);
|
|
2906
|
+
return /* @__PURE__ */ jsx4(NewVideoForPreview, {
|
|
2907
|
+
className,
|
|
2908
|
+
logLevel,
|
|
2909
|
+
muted,
|
|
2910
|
+
onVideoFrame,
|
|
2911
|
+
playbackRate,
|
|
2912
|
+
src: preloadedSrc,
|
|
2913
|
+
style,
|
|
2914
|
+
volume,
|
|
2915
|
+
name,
|
|
2916
|
+
trimAfter,
|
|
2917
|
+
trimBefore,
|
|
2918
|
+
loop,
|
|
2919
|
+
loopVolumeCurveBehavior,
|
|
2920
|
+
showInTimeline,
|
|
2921
|
+
stack,
|
|
2922
|
+
disallowFallbackToOffthreadVideo,
|
|
2923
|
+
fallbackOffthreadVideoProps,
|
|
2924
|
+
audioStreamIndex
|
|
1451
2925
|
});
|
|
1452
2926
|
};
|
|
1453
|
-
// src/video/video.tsx
|
|
1454
|
-
import { useCallback as useCallback3 } from "react";
|
|
1455
|
-
import { Internals as Internals9, Sequence as Sequence2, useRemotionEnvironment as useRemotionEnvironment5 } from "remotion";
|
|
1456
2927
|
|
|
1457
2928
|
// src/video/video-for-rendering.tsx
|
|
1458
2929
|
import {
|
|
1459
|
-
useContext as
|
|
2930
|
+
useContext as useContext4,
|
|
1460
2931
|
useLayoutEffect as useLayoutEffect2,
|
|
1461
|
-
useMemo as
|
|
1462
|
-
useRef as
|
|
2932
|
+
useMemo as useMemo4,
|
|
2933
|
+
useRef as useRef3,
|
|
1463
2934
|
useState as useState4
|
|
1464
2935
|
} from "react";
|
|
1465
2936
|
import {
|
|
1466
|
-
cancelRender as
|
|
1467
|
-
Internals as
|
|
1468
|
-
|
|
2937
|
+
cancelRender as cancelRender3,
|
|
2938
|
+
Internals as Internals13,
|
|
2939
|
+
Loop,
|
|
2940
|
+
random as random2,
|
|
2941
|
+
useCurrentFrame as useCurrentFrame4,
|
|
1469
2942
|
useDelayRender as useDelayRender2,
|
|
1470
|
-
useRemotionEnvironment as
|
|
2943
|
+
useRemotionEnvironment as useRemotionEnvironment3,
|
|
1471
2944
|
useVideoConfig
|
|
1472
2945
|
} from "remotion";
|
|
1473
|
-
|
|
2946
|
+
|
|
2947
|
+
// ../core/src/calculate-loop.ts
|
|
2948
|
+
var calculateLoopDuration = ({
|
|
2949
|
+
trimAfter,
|
|
2950
|
+
mediaDurationInFrames,
|
|
2951
|
+
playbackRate,
|
|
2952
|
+
trimBefore
|
|
2953
|
+
}) => {
|
|
2954
|
+
let duration = mediaDurationInFrames;
|
|
2955
|
+
if (typeof trimAfter !== "undefined") {
|
|
2956
|
+
duration = trimAfter;
|
|
2957
|
+
}
|
|
2958
|
+
if (typeof trimBefore !== "undefined") {
|
|
2959
|
+
duration -= trimBefore;
|
|
2960
|
+
}
|
|
2961
|
+
const actualDuration = duration / playbackRate;
|
|
2962
|
+
return Math.floor(actualDuration);
|
|
2963
|
+
};
|
|
2964
|
+
|
|
2965
|
+
// src/video/video-for-rendering.tsx
|
|
2966
|
+
import { jsx as jsx5 } from "react/jsx-runtime";
|
|
1474
2967
|
var VideoForRendering = ({
|
|
1475
2968
|
volume: volumeProp,
|
|
1476
2969
|
playbackRate,
|
|
@@ -1480,31 +2973,48 @@ var VideoForRendering = ({
|
|
|
1480
2973
|
delayRenderRetries,
|
|
1481
2974
|
delayRenderTimeoutInMilliseconds,
|
|
1482
2975
|
onVideoFrame,
|
|
1483
|
-
logLevel
|
|
2976
|
+
logLevel,
|
|
1484
2977
|
loop,
|
|
1485
2978
|
style,
|
|
1486
|
-
className
|
|
2979
|
+
className,
|
|
2980
|
+
fallbackOffthreadVideoProps,
|
|
2981
|
+
audioStreamIndex,
|
|
2982
|
+
name,
|
|
2983
|
+
disallowFallbackToOffthreadVideo,
|
|
2984
|
+
stack,
|
|
2985
|
+
toneFrequency,
|
|
2986
|
+
trimAfterValue,
|
|
2987
|
+
trimBeforeValue
|
|
1487
2988
|
}) => {
|
|
1488
2989
|
if (!src) {
|
|
1489
2990
|
throw new TypeError("No `src` was passed to <Video>.");
|
|
1490
2991
|
}
|
|
1491
|
-
const frame =
|
|
1492
|
-
const absoluteFrame =
|
|
2992
|
+
const frame = useCurrentFrame4();
|
|
2993
|
+
const absoluteFrame = Internals13.useTimelinePosition();
|
|
1493
2994
|
const { fps } = useVideoConfig();
|
|
1494
|
-
const { registerRenderAsset, unregisterRenderAsset } =
|
|
1495
|
-
const startsAt =
|
|
1496
|
-
const
|
|
1497
|
-
const
|
|
2995
|
+
const { registerRenderAsset, unregisterRenderAsset } = useContext4(Internals13.RenderAssetManager);
|
|
2996
|
+
const startsAt = Internals13.useMediaStartsAt();
|
|
2997
|
+
const sequenceContext = useContext4(Internals13.SequenceContext);
|
|
2998
|
+
const id = useMemo4(() => `media-video-${random2(src)}-${sequenceContext?.cumulatedFrom}-${sequenceContext?.relativeFrom}-${sequenceContext?.durationInFrames}`, [
|
|
2999
|
+
src,
|
|
3000
|
+
sequenceContext?.cumulatedFrom,
|
|
3001
|
+
sequenceContext?.relativeFrom,
|
|
3002
|
+
sequenceContext?.durationInFrames
|
|
3003
|
+
]);
|
|
3004
|
+
const environment = useRemotionEnvironment3();
|
|
1498
3005
|
const { delayRender, continueRender } = useDelayRender2();
|
|
1499
|
-
const canvasRef =
|
|
3006
|
+
const canvasRef = useRef3(null);
|
|
3007
|
+
const [replaceWithOffthreadVideo, setReplaceWithOffthreadVideo] = useState4(false);
|
|
1500
3008
|
useLayoutEffect2(() => {
|
|
1501
3009
|
if (!canvasRef.current) {
|
|
1502
3010
|
return;
|
|
1503
3011
|
}
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
const
|
|
3012
|
+
if (replaceWithOffthreadVideo) {
|
|
3013
|
+
return;
|
|
3014
|
+
}
|
|
3015
|
+
const timestamp = frame / fps;
|
|
3016
|
+
const durationInSeconds = 1 / fps;
|
|
3017
|
+
const newHandle = delayRender(`Extracting frame at time ${timestamp}`, {
|
|
1508
3018
|
retries: delayRenderRetries ?? undefined,
|
|
1509
3019
|
timeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined
|
|
1510
3020
|
});
|
|
@@ -1521,17 +3031,54 @@ var VideoForRendering = ({
|
|
|
1521
3031
|
src,
|
|
1522
3032
|
timeInSeconds: timestamp,
|
|
1523
3033
|
durationInSeconds,
|
|
1524
|
-
playbackRate
|
|
1525
|
-
logLevel
|
|
3034
|
+
playbackRate,
|
|
3035
|
+
logLevel,
|
|
1526
3036
|
includeAudio: shouldRenderAudio,
|
|
1527
3037
|
includeVideo: window.remotion_videoEnabled,
|
|
1528
3038
|
isClientSideRendering: environment.isClientSideRendering,
|
|
1529
|
-
loop
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
1533
|
-
|
|
1534
|
-
}) => {
|
|
3039
|
+
loop,
|
|
3040
|
+
audioStreamIndex,
|
|
3041
|
+
trimAfter: trimAfterValue,
|
|
3042
|
+
trimBefore: trimBeforeValue,
|
|
3043
|
+
fps
|
|
3044
|
+
}).then((result) => {
|
|
3045
|
+
if (result.type === "unknown-container-format") {
|
|
3046
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
3047
|
+
cancelRender3(new Error(`Unknown container format ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
3048
|
+
}
|
|
3049
|
+
if (window.remotion_isMainTab) {
|
|
3050
|
+
Internals13.Log.info({ logLevel, tag: "@remotion/media" }, `Unknown container format for ${src} (Supported formats: https://www.remotion.dev/docs/mediabunny/formats), falling back to <OffthreadVideo>`);
|
|
3051
|
+
}
|
|
3052
|
+
setReplaceWithOffthreadVideo({ durationInSeconds: null });
|
|
3053
|
+
return;
|
|
3054
|
+
}
|
|
3055
|
+
if (result.type === "cannot-decode") {
|
|
3056
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
3057
|
+
cancelRender3(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
3058
|
+
}
|
|
3059
|
+
if (window.remotion_isMainTab) {
|
|
3060
|
+
Internals13.Log.info({ logLevel, tag: "@remotion/media" }, `Cannot decode ${src}, falling back to <OffthreadVideo>`);
|
|
3061
|
+
}
|
|
3062
|
+
setReplaceWithOffthreadVideo({
|
|
3063
|
+
durationInSeconds: result.durationInSeconds
|
|
3064
|
+
});
|
|
3065
|
+
return;
|
|
3066
|
+
}
|
|
3067
|
+
if (result.type === "network-error") {
|
|
3068
|
+
if (disallowFallbackToOffthreadVideo) {
|
|
3069
|
+
cancelRender3(new Error(`Cannot decode ${src}, and 'disallowFallbackToOffthreadVideo' was set. Failing the render.`));
|
|
3070
|
+
}
|
|
3071
|
+
if (window.remotion_isMainTab) {
|
|
3072
|
+
Internals13.Log.info({ logLevel, tag: "@remotion/media" }, `Network error fetching ${src}, falling back to <OffthreadVideo>`);
|
|
3073
|
+
}
|
|
3074
|
+
setReplaceWithOffthreadVideo({ durationInSeconds: null });
|
|
3075
|
+
return;
|
|
3076
|
+
}
|
|
3077
|
+
const {
|
|
3078
|
+
frame: imageBitmap,
|
|
3079
|
+
audio,
|
|
3080
|
+
durationInSeconds: assetDurationInSeconds
|
|
3081
|
+
} = result;
|
|
1535
3082
|
if (imageBitmap) {
|
|
1536
3083
|
onVideoFrame?.(imageBitmap);
|
|
1537
3084
|
const context = canvasRef.current?.getContext("2d");
|
|
@@ -1544,38 +3091,40 @@ var VideoForRendering = ({
|
|
|
1544
3091
|
context.drawImage(imageBitmap, 0, 0);
|
|
1545
3092
|
imageBitmap.close();
|
|
1546
3093
|
} else if (window.remotion_videoEnabled) {
|
|
1547
|
-
|
|
3094
|
+
const context = canvasRef.current?.getContext("2d");
|
|
3095
|
+
if (context) {
|
|
3096
|
+
context.clearRect(0, 0, context.canvas.width, context.canvas.height);
|
|
3097
|
+
}
|
|
1548
3098
|
}
|
|
1549
3099
|
const volumePropsFrame = frameForVolumeProp({
|
|
1550
|
-
behavior: loopVolumeCurveBehavior
|
|
1551
|
-
loop
|
|
3100
|
+
behavior: loopVolumeCurveBehavior,
|
|
3101
|
+
loop,
|
|
1552
3102
|
assetDurationInSeconds: assetDurationInSeconds ?? 0,
|
|
1553
3103
|
fps,
|
|
1554
3104
|
frame,
|
|
1555
3105
|
startsAt
|
|
1556
3106
|
});
|
|
1557
|
-
const volume =
|
|
3107
|
+
const volume = Internals13.evaluateVolume({
|
|
1558
3108
|
volume: volumeProp,
|
|
1559
3109
|
frame: volumePropsFrame,
|
|
1560
3110
|
mediaVolume: 1
|
|
1561
3111
|
});
|
|
1562
|
-
|
|
3112
|
+
Internals13.warnAboutTooHighVolume(volume);
|
|
1563
3113
|
if (audio && volume > 0) {
|
|
1564
3114
|
applyVolume(audio.data, volume);
|
|
1565
3115
|
registerRenderAsset({
|
|
1566
3116
|
type: "inline-audio",
|
|
1567
3117
|
id,
|
|
1568
3118
|
audio: Array.from(audio.data),
|
|
1569
|
-
sampleRate: audio.sampleRate,
|
|
1570
|
-
numberOfChannels: audio.numberOfChannels,
|
|
1571
3119
|
frame: absoluteFrame,
|
|
1572
3120
|
timestamp: audio.timestamp,
|
|
1573
|
-
duration: audio.numberOfFrames /
|
|
3121
|
+
duration: audio.numberOfFrames / TARGET_SAMPLE_RATE * 1e6,
|
|
3122
|
+
toneFrequency
|
|
1574
3123
|
});
|
|
1575
3124
|
}
|
|
1576
3125
|
continueRender(newHandle);
|
|
1577
3126
|
}).catch((error) => {
|
|
1578
|
-
|
|
3127
|
+
cancelRender3(error);
|
|
1579
3128
|
});
|
|
1580
3129
|
return () => {
|
|
1581
3130
|
continueRender(newHandle);
|
|
@@ -1601,12 +3150,70 @@ var VideoForRendering = ({
|
|
|
1601
3150
|
src,
|
|
1602
3151
|
startsAt,
|
|
1603
3152
|
unregisterRenderAsset,
|
|
1604
|
-
volumeProp
|
|
3153
|
+
volumeProp,
|
|
3154
|
+
replaceWithOffthreadVideo,
|
|
3155
|
+
audioStreamIndex,
|
|
3156
|
+
disallowFallbackToOffthreadVideo,
|
|
3157
|
+
toneFrequency,
|
|
3158
|
+
trimAfterValue,
|
|
3159
|
+
trimBeforeValue
|
|
1605
3160
|
]);
|
|
1606
|
-
const classNameValue =
|
|
1607
|
-
return [
|
|
3161
|
+
const classNameValue = useMemo4(() => {
|
|
3162
|
+
return [Internals13.OBJECTFIT_CONTAIN_CLASS_NAME, className].filter(Internals13.truthy).join(" ");
|
|
1608
3163
|
}, [className]);
|
|
1609
|
-
|
|
3164
|
+
if (replaceWithOffthreadVideo) {
|
|
3165
|
+
const fallback = /* @__PURE__ */ jsx5(Internals13.InnerOffthreadVideo, {
|
|
3166
|
+
src,
|
|
3167
|
+
playbackRate: playbackRate ?? 1,
|
|
3168
|
+
muted: muted ?? false,
|
|
3169
|
+
acceptableTimeShiftInSeconds: fallbackOffthreadVideoProps?.acceptableTimeShiftInSeconds,
|
|
3170
|
+
loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? "repeat",
|
|
3171
|
+
delayRenderRetries: delayRenderRetries ?? undefined,
|
|
3172
|
+
delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? undefined,
|
|
3173
|
+
style,
|
|
3174
|
+
allowAmplificationDuringRender: true,
|
|
3175
|
+
transparent: fallbackOffthreadVideoProps?.transparent ?? false,
|
|
3176
|
+
toneMapped: fallbackOffthreadVideoProps?.toneMapped ?? true,
|
|
3177
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
3178
|
+
name,
|
|
3179
|
+
className,
|
|
3180
|
+
onVideoFrame,
|
|
3181
|
+
volume: volumeProp,
|
|
3182
|
+
id,
|
|
3183
|
+
onError: fallbackOffthreadVideoProps?.onError,
|
|
3184
|
+
toneFrequency: fallbackOffthreadVideoProps?.toneFrequency ?? 1,
|
|
3185
|
+
showInTimeline: false,
|
|
3186
|
+
crossOrigin: undefined,
|
|
3187
|
+
onAutoPlayError: () => {
|
|
3188
|
+
return;
|
|
3189
|
+
},
|
|
3190
|
+
pauseWhenBuffering: false,
|
|
3191
|
+
trimAfter: undefined,
|
|
3192
|
+
trimBefore: undefined,
|
|
3193
|
+
useWebAudioApi: false,
|
|
3194
|
+
startFrom: undefined,
|
|
3195
|
+
endAt: undefined,
|
|
3196
|
+
stack,
|
|
3197
|
+
_remotionInternalNativeLoopPassed: false
|
|
3198
|
+
});
|
|
3199
|
+
if (loop) {
|
|
3200
|
+
if (!replaceWithOffthreadVideo.durationInSeconds) {
|
|
3201
|
+
cancelRender3(new Error(`Cannot render video ${src}: @remotion/media was unable to render, and fell back to <OffthreadVideo>. Also, "loop" was set, but <OffthreadVideo> does not support looping and @remotion/media could also not determine the duration of the video.`));
|
|
3202
|
+
}
|
|
3203
|
+
return /* @__PURE__ */ jsx5(Loop, {
|
|
3204
|
+
layout: "none",
|
|
3205
|
+
durationInFrames: calculateLoopDuration({
|
|
3206
|
+
trimAfter: trimAfterValue,
|
|
3207
|
+
mediaDurationInFrames: replaceWithOffthreadVideo.durationInSeconds * fps,
|
|
3208
|
+
playbackRate,
|
|
3209
|
+
trimBefore: trimBeforeValue
|
|
3210
|
+
}),
|
|
3211
|
+
children: fallback
|
|
3212
|
+
});
|
|
3213
|
+
}
|
|
3214
|
+
return fallback;
|
|
3215
|
+
}
|
|
3216
|
+
return /* @__PURE__ */ jsx5("canvas", {
|
|
1610
3217
|
ref: canvasRef,
|
|
1611
3218
|
style,
|
|
1612
3219
|
className: classNameValue
|
|
@@ -1614,79 +3221,148 @@ var VideoForRendering = ({
|
|
|
1614
3221
|
};
|
|
1615
3222
|
|
|
1616
3223
|
// src/video/video.tsx
|
|
1617
|
-
import { jsx as
|
|
1618
|
-
var {
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
|
|
1623
|
-
|
|
1624
|
-
|
|
1625
|
-
|
|
1626
|
-
|
|
1627
|
-
|
|
1628
|
-
|
|
1629
|
-
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
|
|
3224
|
+
import { jsx as jsx6 } from "react/jsx-runtime";
|
|
3225
|
+
var { validateMediaTrimProps, resolveTrimProps, validateMediaProps: validateMediaProps2 } = Internals14;
|
|
3226
|
+
var InnerVideo = ({
|
|
3227
|
+
src,
|
|
3228
|
+
audioStreamIndex,
|
|
3229
|
+
className,
|
|
3230
|
+
delayRenderRetries,
|
|
3231
|
+
delayRenderTimeoutInMilliseconds,
|
|
3232
|
+
disallowFallbackToOffthreadVideo,
|
|
3233
|
+
fallbackOffthreadVideoProps,
|
|
3234
|
+
logLevel,
|
|
3235
|
+
loop,
|
|
3236
|
+
loopVolumeCurveBehavior,
|
|
3237
|
+
muted,
|
|
3238
|
+
name,
|
|
3239
|
+
onVideoFrame,
|
|
3240
|
+
playbackRate,
|
|
3241
|
+
style,
|
|
3242
|
+
trimAfter,
|
|
3243
|
+
trimBefore,
|
|
3244
|
+
volume,
|
|
3245
|
+
stack,
|
|
3246
|
+
toneFrequency,
|
|
3247
|
+
showInTimeline
|
|
3248
|
+
}) => {
|
|
3249
|
+
const environment = useRemotionEnvironment4();
|
|
3250
|
+
if (typeof src !== "string") {
|
|
3251
|
+
throw new TypeError(`The \`<Video>\` tag requires a string for \`src\`, but got ${JSON.stringify(src)} instead.`);
|
|
1640
3252
|
}
|
|
1641
|
-
|
|
3253
|
+
validateMediaTrimProps({
|
|
1642
3254
|
startFrom: undefined,
|
|
1643
3255
|
endAt: undefined,
|
|
1644
3256
|
trimBefore,
|
|
1645
3257
|
trimAfter
|
|
1646
3258
|
});
|
|
1647
|
-
const { trimBeforeValue, trimAfterValue } =
|
|
3259
|
+
const { trimBeforeValue, trimAfterValue } = resolveTrimProps({
|
|
1648
3260
|
startFrom: undefined,
|
|
1649
3261
|
endAt: undefined,
|
|
1650
3262
|
trimBefore,
|
|
1651
3263
|
trimAfter
|
|
1652
3264
|
});
|
|
1653
|
-
|
|
1654
|
-
return /* @__PURE__ */ jsx4(Sequence2, {
|
|
1655
|
-
layout: "none",
|
|
1656
|
-
from: 0 - (trimBeforeValue ?? 0),
|
|
1657
|
-
showInTimeline: false,
|
|
1658
|
-
durationInFrames: trimAfterValue,
|
|
1659
|
-
name,
|
|
1660
|
-
children: /* @__PURE__ */ jsx4(Video, {
|
|
1661
|
-
pauseWhenBuffering: pauseWhenBuffering ?? false,
|
|
1662
|
-
...otherProps
|
|
1663
|
-
})
|
|
1664
|
-
});
|
|
1665
|
-
}
|
|
1666
|
-
validateMediaProps2(props, "Video");
|
|
3265
|
+
validateMediaProps2({ playbackRate, volume }, "Video");
|
|
1667
3266
|
if (environment.isRendering) {
|
|
1668
|
-
return /* @__PURE__ */
|
|
1669
|
-
|
|
3267
|
+
return /* @__PURE__ */ jsx6(VideoForRendering, {
|
|
3268
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
3269
|
+
className,
|
|
3270
|
+
delayRenderRetries: delayRenderRetries ?? null,
|
|
3271
|
+
delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null,
|
|
3272
|
+
disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false,
|
|
3273
|
+
name,
|
|
3274
|
+
fallbackOffthreadVideoProps,
|
|
3275
|
+
logLevel,
|
|
3276
|
+
loop,
|
|
3277
|
+
loopVolumeCurveBehavior,
|
|
3278
|
+
muted,
|
|
3279
|
+
onVideoFrame,
|
|
3280
|
+
playbackRate,
|
|
3281
|
+
src,
|
|
3282
|
+
stack,
|
|
3283
|
+
style,
|
|
3284
|
+
volume,
|
|
3285
|
+
toneFrequency,
|
|
3286
|
+
trimAfterValue,
|
|
3287
|
+
trimBeforeValue
|
|
1670
3288
|
});
|
|
1671
3289
|
}
|
|
1672
|
-
|
|
3290
|
+
return /* @__PURE__ */ jsx6(VideoForPreview, {
|
|
3291
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
3292
|
+
className,
|
|
3293
|
+
name,
|
|
3294
|
+
logLevel,
|
|
3295
|
+
loop,
|
|
3296
|
+
loopVolumeCurveBehavior,
|
|
3297
|
+
muted,
|
|
1673
3298
|
onVideoFrame,
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
|
|
3299
|
+
playbackRate,
|
|
3300
|
+
src,
|
|
3301
|
+
style,
|
|
3302
|
+
volume,
|
|
3303
|
+
showInTimeline,
|
|
3304
|
+
trimAfter: trimAfterValue,
|
|
3305
|
+
trimBefore: trimBeforeValue,
|
|
3306
|
+
stack: stack ?? null,
|
|
3307
|
+
disallowFallbackToOffthreadVideo,
|
|
3308
|
+
fallbackOffthreadVideoProps
|
|
3309
|
+
});
|
|
3310
|
+
};
|
|
3311
|
+
var Video2 = ({
|
|
3312
|
+
src,
|
|
3313
|
+
audioStreamIndex,
|
|
3314
|
+
className,
|
|
3315
|
+
delayRenderRetries,
|
|
3316
|
+
delayRenderTimeoutInMilliseconds,
|
|
3317
|
+
disallowFallbackToOffthreadVideo,
|
|
3318
|
+
fallbackOffthreadVideoProps,
|
|
3319
|
+
logLevel,
|
|
3320
|
+
loop,
|
|
3321
|
+
loopVolumeCurveBehavior,
|
|
3322
|
+
muted,
|
|
3323
|
+
name,
|
|
3324
|
+
onVideoFrame,
|
|
3325
|
+
playbackRate,
|
|
3326
|
+
showInTimeline,
|
|
3327
|
+
style,
|
|
3328
|
+
trimAfter,
|
|
3329
|
+
trimBefore,
|
|
3330
|
+
volume,
|
|
3331
|
+
stack,
|
|
3332
|
+
toneFrequency
|
|
3333
|
+
}) => {
|
|
3334
|
+
return /* @__PURE__ */ jsx6(InnerVideo, {
|
|
3335
|
+
audioStreamIndex: audioStreamIndex ?? 0,
|
|
3336
|
+
className,
|
|
3337
|
+
delayRenderRetries: delayRenderRetries ?? null,
|
|
3338
|
+
delayRenderTimeoutInMilliseconds: delayRenderTimeoutInMilliseconds ?? null,
|
|
3339
|
+
disallowFallbackToOffthreadVideo: disallowFallbackToOffthreadVideo ?? false,
|
|
3340
|
+
fallbackOffthreadVideoProps: fallbackOffthreadVideoProps ?? {},
|
|
3341
|
+
logLevel: logLevel ?? window.remotion_logLevel,
|
|
3342
|
+
loop: loop ?? false,
|
|
3343
|
+
loopVolumeCurveBehavior: loopVolumeCurveBehavior ?? "repeat",
|
|
3344
|
+
muted: muted ?? false,
|
|
3345
|
+
name,
|
|
3346
|
+
onVideoFrame,
|
|
3347
|
+
playbackRate: playbackRate ?? 1,
|
|
1684
3348
|
showInTimeline: showInTimeline ?? true,
|
|
1685
|
-
|
|
1686
|
-
|
|
3349
|
+
src,
|
|
3350
|
+
style: style ?? {},
|
|
3351
|
+
trimAfter,
|
|
3352
|
+
trimBefore,
|
|
3353
|
+
volume: volume ?? 1,
|
|
3354
|
+
toneFrequency: toneFrequency ?? 1,
|
|
3355
|
+
stack
|
|
1687
3356
|
});
|
|
1688
3357
|
};
|
|
3358
|
+
Internals14.addSequenceStackTraces(Video2);
|
|
3359
|
+
// src/index.ts
|
|
3360
|
+
var experimental_Audio = Audio2;
|
|
3361
|
+
var experimental_Video = Video2;
|
|
1689
3362
|
export {
|
|
1690
|
-
|
|
1691
|
-
|
|
3363
|
+
experimental_Video,
|
|
3364
|
+
experimental_Audio,
|
|
3365
|
+
Video2 as Video,
|
|
3366
|
+
AudioForPreview,
|
|
3367
|
+
Audio2 as Audio
|
|
1692
3368
|
};
|