@remotion/media-utils 4.0.453 → 4.0.454
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/esm/index.mjs +1252 -0
- package/dist/use-windowed-audio-data.js +10 -1
- package/package.json +16 -5
|
@@ -0,0 +1,1252 @@
|
|
|
1
|
+
// src/audio-buffer/audio-buffer-to-wav.ts
|
|
2
|
+
function interleave(inputL, inputR) {
|
|
3
|
+
const length = inputL.length + inputR.length;
|
|
4
|
+
const result = new Float32Array(length);
|
|
5
|
+
let index = 0;
|
|
6
|
+
let inputIndex = 0;
|
|
7
|
+
while (index < length) {
|
|
8
|
+
result[index++] = inputL[inputIndex];
|
|
9
|
+
result[index++] = inputR[inputIndex];
|
|
10
|
+
inputIndex++;
|
|
11
|
+
}
|
|
12
|
+
return result;
|
|
13
|
+
}
|
|
14
|
+
function writeFloat32(output, offset, input) {
|
|
15
|
+
for (let i = 0;i < input.length; i++, offset += 4) {
|
|
16
|
+
output.setFloat32(offset, input[i], true);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
function floatTo16BitPCM(output, offset, input) {
|
|
20
|
+
for (let i = 0;i < input.length; i++, offset += 2) {
|
|
21
|
+
const s = Math.max(-1, Math.min(1, input[i]));
|
|
22
|
+
output.setInt16(offset, s < 0 ? s * 32768 : s * 32767, true);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
function writeString(view, offset, string) {
|
|
26
|
+
for (let i = 0;i < string.length; i++) {
|
|
27
|
+
view.setUint8(offset + i, string.charCodeAt(i));
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
function encodeWAV({
|
|
31
|
+
samples,
|
|
32
|
+
format,
|
|
33
|
+
sampleRate,
|
|
34
|
+
numChannels,
|
|
35
|
+
bitDepth
|
|
36
|
+
}) {
|
|
37
|
+
const bytesPerSample = bitDepth / 8;
|
|
38
|
+
const blockAlign = numChannels * bytesPerSample;
|
|
39
|
+
const buffer = new ArrayBuffer(44 + samples.length * bytesPerSample);
|
|
40
|
+
const view = new DataView(buffer);
|
|
41
|
+
writeString(view, 0, "RIFF");
|
|
42
|
+
view.setUint32(4, 36 + samples.length * bytesPerSample, true);
|
|
43
|
+
writeString(view, 8, "WAVE");
|
|
44
|
+
writeString(view, 12, "fmt ");
|
|
45
|
+
view.setUint32(16, 16, true);
|
|
46
|
+
view.setUint16(20, format, true);
|
|
47
|
+
view.setUint16(22, numChannels, true);
|
|
48
|
+
view.setUint32(24, sampleRate, true);
|
|
49
|
+
view.setUint32(28, sampleRate * blockAlign, true);
|
|
50
|
+
view.setUint16(32, blockAlign, true);
|
|
51
|
+
view.setUint16(34, bitDepth, true);
|
|
52
|
+
writeString(view, 36, "data");
|
|
53
|
+
view.setUint32(40, samples.length * bytesPerSample, true);
|
|
54
|
+
if (format === 1) {
|
|
55
|
+
floatTo16BitPCM(view, 44, samples);
|
|
56
|
+
} else {
|
|
57
|
+
writeFloat32(view, 44, samples);
|
|
58
|
+
}
|
|
59
|
+
return buffer;
|
|
60
|
+
}
|
|
61
|
+
function audioBufferToWav(buffer, opt) {
|
|
62
|
+
const numChannels = buffer.numberOfChannels;
|
|
63
|
+
const { sampleRate } = buffer;
|
|
64
|
+
const format = opt.float32 ? 3 : 1;
|
|
65
|
+
const bitDepth = format === 3 ? 32 : 16;
|
|
66
|
+
let result;
|
|
67
|
+
if (numChannels === 2) {
|
|
68
|
+
result = interleave(buffer.getChannelData(0), buffer.getChannelData(1));
|
|
69
|
+
} else {
|
|
70
|
+
result = buffer.getChannelData(0);
|
|
71
|
+
}
|
|
72
|
+
return encodeWAV({
|
|
73
|
+
samples: result,
|
|
74
|
+
format,
|
|
75
|
+
sampleRate,
|
|
76
|
+
numChannels,
|
|
77
|
+
bitDepth
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// src/audio-buffer/audio-url-helpers.ts
|
|
82
|
+
var audioBufferToDataUrl = (buffer) => {
|
|
83
|
+
const wavAsArrayBuffer = audioBufferToWav(buffer, {
|
|
84
|
+
float32: true
|
|
85
|
+
});
|
|
86
|
+
let binary = "";
|
|
87
|
+
const bytes = new Uint8Array(wavAsArrayBuffer);
|
|
88
|
+
const len = bytes.byteLength;
|
|
89
|
+
for (let i = 0;i < len; i++) {
|
|
90
|
+
binary += String.fromCharCode(bytes[i]);
|
|
91
|
+
}
|
|
92
|
+
return "data:audio/wav;base64," + window.btoa(binary);
|
|
93
|
+
};
|
|
94
|
+
// src/create-smooth-svg-path.ts
|
|
95
|
+
var line = (pointA, pointB) => {
|
|
96
|
+
const lengthX = pointB.x - pointA.x;
|
|
97
|
+
const lengthY = pointB.y - pointA.y;
|
|
98
|
+
return {
|
|
99
|
+
length: Math.sqrt(lengthX ** 2 + lengthY ** 2),
|
|
100
|
+
angle: Math.atan2(lengthY, lengthX)
|
|
101
|
+
};
|
|
102
|
+
};
|
|
103
|
+
var controlPoint = ({
|
|
104
|
+
current,
|
|
105
|
+
previous,
|
|
106
|
+
next,
|
|
107
|
+
reverse
|
|
108
|
+
}) => {
|
|
109
|
+
const p = previous || current;
|
|
110
|
+
const n = next || current;
|
|
111
|
+
const smoothing = 0.2;
|
|
112
|
+
const o = line(p, n);
|
|
113
|
+
const angle = o.angle + (reverse ? Math.PI : 0);
|
|
114
|
+
const length = o.length * smoothing;
|
|
115
|
+
const x = current.x + Math.cos(angle) * length;
|
|
116
|
+
const y = current.y + Math.sin(angle) * length;
|
|
117
|
+
return { x, y };
|
|
118
|
+
};
|
|
119
|
+
var createSmoothSvgPath = ({ points }) => {
|
|
120
|
+
return points.reduce((acc, current, i, a) => {
|
|
121
|
+
if (i === 0) {
|
|
122
|
+
return `M ${current.x},${current.y}`;
|
|
123
|
+
}
|
|
124
|
+
const { x, y } = current;
|
|
125
|
+
const previous = a[i - 1];
|
|
126
|
+
const twoPrevious = a[i - 2];
|
|
127
|
+
const next = a[i + 1];
|
|
128
|
+
const { x: cp1x, y: cp1y } = controlPoint({
|
|
129
|
+
current: previous,
|
|
130
|
+
previous: twoPrevious,
|
|
131
|
+
next: current,
|
|
132
|
+
reverse: false
|
|
133
|
+
});
|
|
134
|
+
const { x: cp2x, y: cp2y } = controlPoint({
|
|
135
|
+
current,
|
|
136
|
+
previous,
|
|
137
|
+
next,
|
|
138
|
+
reverse: true
|
|
139
|
+
});
|
|
140
|
+
return `${acc} C ${cp1x},${cp1y} ${cp2x},${cp2y} ${x},${y}`;
|
|
141
|
+
}, "");
|
|
142
|
+
};
|
|
143
|
+
// src/fetch-with-cors-catch.ts
|
|
144
|
+
var fetchWithCorsCatch = async (src, init) => {
|
|
145
|
+
try {
|
|
146
|
+
const response = await fetch(src, {
|
|
147
|
+
mode: "cors",
|
|
148
|
+
referrerPolicy: "no-referrer-when-downgrade",
|
|
149
|
+
...init
|
|
150
|
+
});
|
|
151
|
+
return response;
|
|
152
|
+
} catch (err) {
|
|
153
|
+
const error = err;
|
|
154
|
+
if (error.message.includes("Failed to fetch") || error.message.includes("Load failed") || error.message.includes("NetworkError when attempting to fetch resource")) {
|
|
155
|
+
throw new TypeError(`Failed to read from ${src}: ${error.message}. Does the resource support CORS?`);
|
|
156
|
+
}
|
|
157
|
+
throw err;
|
|
158
|
+
}
|
|
159
|
+
};
|
|
160
|
+
|
|
161
|
+
// src/is-remote-asset.ts
|
|
162
|
+
var isRemoteAsset = (asset) => !asset.startsWith(window.origin) && !asset.startsWith("data");
|
|
163
|
+
|
|
164
|
+
// src/p-limit.ts
|
|
165
|
+
var pLimit = (concurrency) => {
|
|
166
|
+
const queue = [];
|
|
167
|
+
let activeCount = 0;
|
|
168
|
+
const next = () => {
|
|
169
|
+
activeCount--;
|
|
170
|
+
if (queue.length > 0) {
|
|
171
|
+
queue.shift()?.();
|
|
172
|
+
}
|
|
173
|
+
};
|
|
174
|
+
const run = async (fn, resolve, ...args) => {
|
|
175
|
+
activeCount++;
|
|
176
|
+
const result = (async () => fn(...args))();
|
|
177
|
+
resolve(result);
|
|
178
|
+
try {
|
|
179
|
+
await result;
|
|
180
|
+
} catch {}
|
|
181
|
+
next();
|
|
182
|
+
};
|
|
183
|
+
const enqueue = (fn, resolve, ...args) => {
|
|
184
|
+
queue.push(() => run(fn, resolve, ...args));
|
|
185
|
+
(async () => {
|
|
186
|
+
await Promise.resolve();
|
|
187
|
+
if (activeCount < concurrency && queue.length > 0) {
|
|
188
|
+
queue.shift()?.();
|
|
189
|
+
}
|
|
190
|
+
})();
|
|
191
|
+
};
|
|
192
|
+
const generator = (fn, ...args) => new Promise((resolve) => {
|
|
193
|
+
enqueue(fn, resolve, ...args);
|
|
194
|
+
});
|
|
195
|
+
Object.defineProperties(generator, {
|
|
196
|
+
activeCount: {
|
|
197
|
+
get: () => activeCount
|
|
198
|
+
},
|
|
199
|
+
pendingCount: {
|
|
200
|
+
get: () => queue.length
|
|
201
|
+
},
|
|
202
|
+
clearQueue: {
|
|
203
|
+
value: () => {
|
|
204
|
+
queue.length = 0;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
});
|
|
208
|
+
return generator;
|
|
209
|
+
};
|
|
210
|
+
|
|
211
|
+
// src/get-audio-data.ts
|
|
212
|
+
var metadataCache = {};
|
|
213
|
+
var limit = pLimit(3);
|
|
214
|
+
var fn = async (src, options) => {
|
|
215
|
+
if (metadataCache[src]) {
|
|
216
|
+
return metadataCache[src];
|
|
217
|
+
}
|
|
218
|
+
if (typeof document === "undefined") {
|
|
219
|
+
throw new Error("getAudioData() is only available in the browser.");
|
|
220
|
+
}
|
|
221
|
+
const audioContext = new AudioContext({
|
|
222
|
+
sampleRate: options?.sampleRate ?? 48000
|
|
223
|
+
});
|
|
224
|
+
const response = await fetchWithCorsCatch(src);
|
|
225
|
+
if (!response.ok) {
|
|
226
|
+
throw new Error(`Failed to fetch audio data from ${src}: ${response.status} ${response.statusText}`);
|
|
227
|
+
}
|
|
228
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
229
|
+
const wave = await audioContext.decodeAudioData(arrayBuffer);
|
|
230
|
+
const channelWaveforms = new Array(wave.numberOfChannels).fill(true).map((_, channel) => {
|
|
231
|
+
return wave.getChannelData(channel);
|
|
232
|
+
});
|
|
233
|
+
const metadata = {
|
|
234
|
+
channelWaveforms,
|
|
235
|
+
sampleRate: wave.sampleRate,
|
|
236
|
+
durationInSeconds: wave.duration,
|
|
237
|
+
numberOfChannels: wave.numberOfChannels,
|
|
238
|
+
resultId: String(Math.random()),
|
|
239
|
+
isRemote: isRemoteAsset(src)
|
|
240
|
+
};
|
|
241
|
+
metadataCache[src] = metadata;
|
|
242
|
+
return metadata;
|
|
243
|
+
};
|
|
244
|
+
var getAudioData = (src, options) => {
|
|
245
|
+
return limit(fn, src, options);
|
|
246
|
+
};
|
|
247
|
+
// src/media-tag-error-handling.ts
|
|
248
|
+
async function fetchWithTimeout(url, options, timeout = 3000) {
|
|
249
|
+
const controller = new AbortController;
|
|
250
|
+
const id = setTimeout(() => controller.abort(), timeout);
|
|
251
|
+
options.signal = controller.signal;
|
|
252
|
+
try {
|
|
253
|
+
const response = await fetch(url, options);
|
|
254
|
+
clearTimeout(id);
|
|
255
|
+
return response;
|
|
256
|
+
} catch {
|
|
257
|
+
clearTimeout(id);
|
|
258
|
+
throw new Error(`Fetch timed out after ${timeout}ms`);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
var checkFor404 = (src) => {
|
|
262
|
+
return fetchWithTimeout(src, {
|
|
263
|
+
method: "HEAD",
|
|
264
|
+
mode: "no-cors"
|
|
265
|
+
}).then((res) => res.status);
|
|
266
|
+
};
|
|
267
|
+
var checkFor404OrSkip = async ({
|
|
268
|
+
suspecting404,
|
|
269
|
+
sameOrigin,
|
|
270
|
+
src
|
|
271
|
+
}) => {
|
|
272
|
+
if (!suspecting404) {
|
|
273
|
+
return Promise.resolve(null);
|
|
274
|
+
}
|
|
275
|
+
if (!sameOrigin) {
|
|
276
|
+
return Promise.resolve(null);
|
|
277
|
+
}
|
|
278
|
+
try {
|
|
279
|
+
return await checkFor404(src);
|
|
280
|
+
} catch {
|
|
281
|
+
return Promise.resolve(null);
|
|
282
|
+
}
|
|
283
|
+
};
|
|
284
|
+
var onMediaError = ({
|
|
285
|
+
error,
|
|
286
|
+
src,
|
|
287
|
+
reject,
|
|
288
|
+
cleanup,
|
|
289
|
+
api
|
|
290
|
+
}) => {
|
|
291
|
+
const suspecting404 = error.MEDIA_ERR_SRC_NOT_SUPPORTED === error.code;
|
|
292
|
+
const isSrcSameOriginAsCurrent = new URL(src, window.location.origin).toString().startsWith(window.location.origin);
|
|
293
|
+
checkFor404OrSkip({
|
|
294
|
+
suspecting404,
|
|
295
|
+
sameOrigin: isSrcSameOriginAsCurrent,
|
|
296
|
+
src
|
|
297
|
+
}).then((status) => {
|
|
298
|
+
const err = status === 404 ? new Error([
|
|
299
|
+
`Failed to execute ${api}: Received a 404 error loading "${src}".`,
|
|
300
|
+
"Correct the URL of the file."
|
|
301
|
+
].join(" ")) : new Error([
|
|
302
|
+
`Failed to execute ${api}, Received a MediaError loading "${src}". Consider using parseMedia() instead which supports more codecs: https://www.remotion.dev/docs/miscellaneous/parse-media-vs-get-video-metadata`,
|
|
303
|
+
status === null ? null : `HTTP Status code of the file: ${status}.`,
|
|
304
|
+
error.message ? `Browser error message: ${error.message}` : null,
|
|
305
|
+
"Check the path of the file and if it is a valid video."
|
|
306
|
+
].filter(Boolean).join(" "));
|
|
307
|
+
reject(err);
|
|
308
|
+
cleanup();
|
|
309
|
+
}).catch((e) => {
|
|
310
|
+
reject(e);
|
|
311
|
+
cleanup();
|
|
312
|
+
});
|
|
313
|
+
};
|
|
314
|
+
|
|
315
|
+
// src/get-audio-duration-in-seconds.ts
|
|
316
|
+
var limit2 = pLimit(3);
|
|
317
|
+
var metadataCache2 = {};
|
|
318
|
+
var fn2 = (src) => {
|
|
319
|
+
if (metadataCache2[src]) {
|
|
320
|
+
return Promise.resolve(metadataCache2[src]);
|
|
321
|
+
}
|
|
322
|
+
if (typeof document === "undefined") {
|
|
323
|
+
throw new Error("getAudioDuration() is only available in the browser.");
|
|
324
|
+
}
|
|
325
|
+
const audio = document.createElement("audio");
|
|
326
|
+
audio.src = src;
|
|
327
|
+
return new Promise((resolve, reject) => {
|
|
328
|
+
const onError = () => {
|
|
329
|
+
onMediaError({
|
|
330
|
+
error: audio.error,
|
|
331
|
+
src,
|
|
332
|
+
cleanup,
|
|
333
|
+
reject,
|
|
334
|
+
api: "getAudioDurationInSeconds()"
|
|
335
|
+
});
|
|
336
|
+
};
|
|
337
|
+
const onLoadedMetadata = () => {
|
|
338
|
+
metadataCache2[src] = audio.duration;
|
|
339
|
+
resolve(audio.duration);
|
|
340
|
+
cleanup();
|
|
341
|
+
};
|
|
342
|
+
const cleanup = () => {
|
|
343
|
+
audio.removeEventListener("loadedmetadata", onLoadedMetadata);
|
|
344
|
+
audio.removeEventListener("error", onError);
|
|
345
|
+
audio.remove();
|
|
346
|
+
};
|
|
347
|
+
audio.addEventListener("loadedmetadata", onLoadedMetadata, { once: true });
|
|
348
|
+
audio.addEventListener("error", onError, { once: true });
|
|
349
|
+
});
|
|
350
|
+
};
|
|
351
|
+
var getAudioDurationInSeconds = (src) => {
|
|
352
|
+
return limit2(fn2, src);
|
|
353
|
+
};
|
|
354
|
+
var getAudioDuration = (src) => getAudioDurationInSeconds(src);
|
|
355
|
+
// src/get-image-dimensions.ts
|
|
356
|
+
var imageDimensionsCache = {};
|
|
357
|
+
var limit3 = pLimit(3);
|
|
358
|
+
var fn3 = async (src) => {
|
|
359
|
+
if (imageDimensionsCache[src]) {
|
|
360
|
+
return imageDimensionsCache[src];
|
|
361
|
+
}
|
|
362
|
+
if (typeof document === "undefined") {
|
|
363
|
+
throw new Error("getImageDimensions() is only available in the browser.");
|
|
364
|
+
}
|
|
365
|
+
const imageDimensions = await new Promise((resolved, reject) => {
|
|
366
|
+
const image = new Image;
|
|
367
|
+
image.onload = () => {
|
|
368
|
+
const { width, height } = image;
|
|
369
|
+
resolved({ width, height });
|
|
370
|
+
};
|
|
371
|
+
image.onerror = reject;
|
|
372
|
+
image.src = src;
|
|
373
|
+
});
|
|
374
|
+
imageDimensionsCache[src] = imageDimensions;
|
|
375
|
+
return imageDimensions;
|
|
376
|
+
};
|
|
377
|
+
function getImageDimensions(src) {
|
|
378
|
+
return limit3(fn3, src);
|
|
379
|
+
}
|
|
380
|
+
// src/get-video-metadata.ts
|
|
381
|
+
var cache = {};
|
|
382
|
+
var limit4 = pLimit(3);
|
|
383
|
+
var fn4 = (src) => {
|
|
384
|
+
if (cache[src]) {
|
|
385
|
+
return Promise.resolve(cache[src]);
|
|
386
|
+
}
|
|
387
|
+
if (typeof document === "undefined") {
|
|
388
|
+
throw new Error("getVideoMetadata() is only available in the browser.");
|
|
389
|
+
}
|
|
390
|
+
const video = document.createElement("video");
|
|
391
|
+
video.src = src;
|
|
392
|
+
return new Promise((resolve, reject) => {
|
|
393
|
+
const onError = () => {
|
|
394
|
+
onMediaError({
|
|
395
|
+
error: video.error,
|
|
396
|
+
src,
|
|
397
|
+
cleanup,
|
|
398
|
+
reject,
|
|
399
|
+
api: "getVideoMetadata()"
|
|
400
|
+
});
|
|
401
|
+
};
|
|
402
|
+
const onLoadedMetadata = () => {
|
|
403
|
+
const pixels = video.videoHeight * video.videoWidth;
|
|
404
|
+
if (pixels === 0) {
|
|
405
|
+
reject(new Error(`Unable to determine video metadata for ${src}`));
|
|
406
|
+
return;
|
|
407
|
+
}
|
|
408
|
+
if (!Number.isFinite(video.duration)) {
|
|
409
|
+
reject(new Error(`Unable to determine video duration for ${src} - got Infinity. Re-encoding this video may fix this issue.`));
|
|
410
|
+
return;
|
|
411
|
+
}
|
|
412
|
+
const metadata = {
|
|
413
|
+
durationInSeconds: video.duration,
|
|
414
|
+
width: video.videoWidth,
|
|
415
|
+
height: video.videoHeight,
|
|
416
|
+
aspectRatio: video.videoWidth / video.videoHeight,
|
|
417
|
+
isRemote: isRemoteAsset(src)
|
|
418
|
+
};
|
|
419
|
+
resolve(metadata);
|
|
420
|
+
cache[src] = metadata;
|
|
421
|
+
cleanup();
|
|
422
|
+
};
|
|
423
|
+
const cleanup = () => {
|
|
424
|
+
video.removeEventListener("loadedmetadata", onLoadedMetadata);
|
|
425
|
+
video.removeEventListener("error", onError);
|
|
426
|
+
video.remove();
|
|
427
|
+
};
|
|
428
|
+
video.addEventListener("loadedmetadata", onLoadedMetadata, { once: true });
|
|
429
|
+
video.addEventListener("error", onError, { once: true });
|
|
430
|
+
});
|
|
431
|
+
};
|
|
432
|
+
var getVideoMetadata = (src) => {
|
|
433
|
+
return limit4(fn4, src);
|
|
434
|
+
};
|
|
435
|
+
// src/get-waveform-portion.ts
|
|
436
|
+
import { NoReactInternals } from "remotion/no-react";
|
|
437
|
+
|
|
438
|
+
// src/normalize-data.ts
|
|
439
|
+
var normalizeData = (filteredData) => {
|
|
440
|
+
const max = Math.max(...filteredData);
|
|
441
|
+
const multiplier = max === 0 ? 0 : max ** -1;
|
|
442
|
+
return filteredData.map((n) => n * multiplier);
|
|
443
|
+
};
|
|
444
|
+
|
|
445
|
+
// src/get-wave-form-samples.ts
|
|
446
|
+
var getWaveformSamples = ({
|
|
447
|
+
audioBuffer,
|
|
448
|
+
numberOfSamples,
|
|
449
|
+
outputRange,
|
|
450
|
+
normalize
|
|
451
|
+
}) => {
|
|
452
|
+
const blockSize = Math.floor(audioBuffer.length / numberOfSamples);
|
|
453
|
+
if (blockSize === 0) {
|
|
454
|
+
return [];
|
|
455
|
+
}
|
|
456
|
+
const filteredData = [];
|
|
457
|
+
for (let i = 0;i < numberOfSamples; i++) {
|
|
458
|
+
const blockStart = blockSize * i;
|
|
459
|
+
let sum = 0;
|
|
460
|
+
for (let j = 0;j < blockSize; j++) {
|
|
461
|
+
sum += Math.abs(audioBuffer[blockStart + j]);
|
|
462
|
+
}
|
|
463
|
+
filteredData.push(sum / blockSize);
|
|
464
|
+
}
|
|
465
|
+
if (normalize) {
|
|
466
|
+
if (outputRange === "minus-one-to-one") {
|
|
467
|
+
return normalizeData(filteredData).map((n, i) => {
|
|
468
|
+
if (i % 2 === 0) {
|
|
469
|
+
return n * -1;
|
|
470
|
+
}
|
|
471
|
+
return n;
|
|
472
|
+
});
|
|
473
|
+
}
|
|
474
|
+
return normalizeData(filteredData);
|
|
475
|
+
}
|
|
476
|
+
if (outputRange === "minus-one-to-one") {
|
|
477
|
+
return filteredData.map((n, i) => {
|
|
478
|
+
if (i % 2 === 0) {
|
|
479
|
+
return n * -1;
|
|
480
|
+
}
|
|
481
|
+
return n;
|
|
482
|
+
});
|
|
483
|
+
}
|
|
484
|
+
return filteredData;
|
|
485
|
+
};
|
|
486
|
+
|
|
487
|
+
// src/validate-channel.ts
|
|
488
|
+
var validateChannel = (channel, numberOfChannels) => {
|
|
489
|
+
if (typeof channel !== "number") {
|
|
490
|
+
throw new TypeError(`"channel" must be a number`);
|
|
491
|
+
}
|
|
492
|
+
if (channel % 1 !== 0) {
|
|
493
|
+
throw new TypeError(`"channel" must an integer, got ${channel}`);
|
|
494
|
+
}
|
|
495
|
+
if (Number.isNaN(channel)) {
|
|
496
|
+
throw new TypeError(`The channel parameter is NaN.`);
|
|
497
|
+
}
|
|
498
|
+
if (channel < 0) {
|
|
499
|
+
throw new TypeError('"channel" cannot be negative');
|
|
500
|
+
}
|
|
501
|
+
if (channel > numberOfChannels - 1) {
|
|
502
|
+
throw new TypeError(`"channel" must be ${numberOfChannels - 1} or lower. The audio has ${numberOfChannels} channels`);
|
|
503
|
+
}
|
|
504
|
+
};
|
|
505
|
+
|
|
506
|
+
// src/get-waveform-portion.ts
|
|
507
|
+
var concatArrays = (arrays) => {
|
|
508
|
+
const totalLength = arrays.reduce((acc, value) => acc + value.length, 0);
|
|
509
|
+
const result = new Float32Array(totalLength);
|
|
510
|
+
let length = 0;
|
|
511
|
+
for (const array of arrays) {
|
|
512
|
+
result.set(array, length);
|
|
513
|
+
length += array.length;
|
|
514
|
+
}
|
|
515
|
+
return result;
|
|
516
|
+
};
|
|
517
|
+
var getWaveformPortion = ({
|
|
518
|
+
audioData,
|
|
519
|
+
startTimeInSeconds,
|
|
520
|
+
durationInSeconds,
|
|
521
|
+
numberOfSamples,
|
|
522
|
+
channel = 0,
|
|
523
|
+
outputRange = "zero-to-one",
|
|
524
|
+
dataOffsetInSeconds,
|
|
525
|
+
normalize = true
|
|
526
|
+
}) => {
|
|
527
|
+
validateChannel(channel, audioData.numberOfChannels);
|
|
528
|
+
const waveform = audioData.channelWaveforms[channel];
|
|
529
|
+
const startSample = Math.floor((startTimeInSeconds - (dataOffsetInSeconds ?? 0)) * audioData.sampleRate);
|
|
530
|
+
const endSample = Math.floor((startTimeInSeconds - (dataOffsetInSeconds ?? 0) + durationInSeconds) * audioData.sampleRate);
|
|
531
|
+
const samplesBeforeStart = 0 - startSample;
|
|
532
|
+
const samplesAfterEnd = endSample - waveform.length;
|
|
533
|
+
const clampedStart = Math.max(startSample, 0);
|
|
534
|
+
const clampedEnd = Math.min(waveform.length, endSample);
|
|
535
|
+
const padStart = samplesBeforeStart > 0 ? new Float32Array(samplesBeforeStart).fill(0) : null;
|
|
536
|
+
const padEnd = samplesAfterEnd > 0 ? new Float32Array(samplesAfterEnd).fill(0) : null;
|
|
537
|
+
const arrs = [
|
|
538
|
+
padStart,
|
|
539
|
+
waveform.slice(clampedStart, clampedEnd),
|
|
540
|
+
padEnd
|
|
541
|
+
].filter(NoReactInternals.truthy);
|
|
542
|
+
const audioBuffer = arrs.length === 1 ? arrs[0] : concatArrays(arrs);
|
|
543
|
+
return getWaveformSamples({
|
|
544
|
+
audioBuffer,
|
|
545
|
+
numberOfSamples,
|
|
546
|
+
outputRange,
|
|
547
|
+
normalize
|
|
548
|
+
}).map((w, i) => {
|
|
549
|
+
return {
|
|
550
|
+
index: i,
|
|
551
|
+
amplitude: w
|
|
552
|
+
};
|
|
553
|
+
});
|
|
554
|
+
};
|
|
555
|
+
// src/use-audio-data.ts
|
|
556
|
+
import { useCallback, useEffect, useLayoutEffect, useRef, useState } from "react";
|
|
557
|
+
import { cancelRender, useDelayRender } from "remotion";
|
|
558
|
+
var useAudioData = (src) => {
|
|
559
|
+
if (!src) {
|
|
560
|
+
throw new TypeError("useAudioData requires a 'src' parameter");
|
|
561
|
+
}
|
|
562
|
+
const mountState = useRef({ isMounted: true });
|
|
563
|
+
useEffect(() => {
|
|
564
|
+
const { current } = mountState;
|
|
565
|
+
current.isMounted = true;
|
|
566
|
+
return () => {
|
|
567
|
+
current.isMounted = false;
|
|
568
|
+
};
|
|
569
|
+
}, []);
|
|
570
|
+
const [metadata, setMetadata] = useState(null);
|
|
571
|
+
const { delayRender, continueRender } = useDelayRender();
|
|
572
|
+
const fetchMetadata = useCallback(async () => {
|
|
573
|
+
const handle = delayRender(`Waiting for audio metadata with src="${src}" to be loaded`);
|
|
574
|
+
try {
|
|
575
|
+
const data = await getAudioData(src);
|
|
576
|
+
if (mountState.current.isMounted) {
|
|
577
|
+
setMetadata(data);
|
|
578
|
+
}
|
|
579
|
+
} catch (err) {
|
|
580
|
+
cancelRender(err);
|
|
581
|
+
}
|
|
582
|
+
continueRender(handle);
|
|
583
|
+
}, [src, delayRender, continueRender]);
|
|
584
|
+
useLayoutEffect(() => {
|
|
585
|
+
fetchMetadata();
|
|
586
|
+
}, [fetchMetadata]);
|
|
587
|
+
return metadata;
|
|
588
|
+
};
|
|
589
|
+
// src/use-windowed-audio-data.ts
|
|
590
|
+
import {
|
|
591
|
+
ALL_FORMATS,
|
|
592
|
+
Input,
|
|
593
|
+
InputDisposedError,
|
|
594
|
+
MATROSKA,
|
|
595
|
+
UrlSource,
|
|
596
|
+
WEBM
|
|
597
|
+
} from "mediabunny";
|
|
598
|
+
import {
|
|
599
|
+
useCallback as useCallback2,
|
|
600
|
+
useEffect as useEffect2,
|
|
601
|
+
useLayoutEffect as useLayoutEffect2,
|
|
602
|
+
useMemo,
|
|
603
|
+
useRef as useRef2,
|
|
604
|
+
useState as useState2
|
|
605
|
+
} from "react";
|
|
606
|
+
import { cancelRender as cancelRender2, Internals, useDelayRender as useDelayRender2 } from "remotion";
|
|
607
|
+
|
|
608
|
+
// src/combine-float32-arrays.ts
|
|
609
|
+
var combineFloat32Arrays = (arrays) => {
|
|
610
|
+
if (arrays.length === 0) {
|
|
611
|
+
return new Float32Array([]);
|
|
612
|
+
}
|
|
613
|
+
if (arrays.length === 1) {
|
|
614
|
+
return arrays[0];
|
|
615
|
+
}
|
|
616
|
+
let totalLength = 0;
|
|
617
|
+
for (const array of arrays) {
|
|
618
|
+
totalLength += array.length;
|
|
619
|
+
}
|
|
620
|
+
const result = new Float32Array(totalLength);
|
|
621
|
+
let offset = 0;
|
|
622
|
+
for (const array of arrays) {
|
|
623
|
+
result.set(array, offset);
|
|
624
|
+
offset += array.length;
|
|
625
|
+
}
|
|
626
|
+
return result;
|
|
627
|
+
};
|
|
628
|
+
|
|
629
|
+
// src/get-partial-audio-data.ts
|
|
630
|
+
import { AudioBufferSink } from "mediabunny";
|
|
631
|
+
var EXTRA_THRESHOLD_IN_SECONDS = 1.5;
|
|
632
|
+
var getPartialAudioData = async ({
|
|
633
|
+
track,
|
|
634
|
+
fromSeconds,
|
|
635
|
+
toSeconds,
|
|
636
|
+
channelIndex,
|
|
637
|
+
signal,
|
|
638
|
+
isMatroska = false
|
|
639
|
+
}) => {
|
|
640
|
+
if (signal.aborted) {
|
|
641
|
+
throw new Error("Operation was aborted");
|
|
642
|
+
}
|
|
643
|
+
const audioSamples = [];
|
|
644
|
+
const actualFromSeconds = isMatroska ? 0 : Math.max(0, fromSeconds - EXTRA_THRESHOLD_IN_SECONDS);
|
|
645
|
+
const sink = new AudioBufferSink(track);
|
|
646
|
+
const iterator = sink.buffers(actualFromSeconds, toSeconds);
|
|
647
|
+
for await (const { buffer, timestamp, duration } of iterator) {
|
|
648
|
+
if (signal.aborted) {
|
|
649
|
+
break;
|
|
650
|
+
}
|
|
651
|
+
const channelData = buffer.getChannelData(channelIndex);
|
|
652
|
+
const bufferStartSeconds = timestamp;
|
|
653
|
+
const bufferEndSeconds = timestamp + duration;
|
|
654
|
+
const overlapStartSecond = Math.max(bufferStartSeconds, fromSeconds);
|
|
655
|
+
const overlapEndSecond = Math.min(bufferEndSeconds, toSeconds);
|
|
656
|
+
if (overlapStartSecond >= overlapEndSecond) {
|
|
657
|
+
continue;
|
|
658
|
+
}
|
|
659
|
+
const startSampleInBuffer = Math.floor((overlapStartSecond - bufferStartSeconds) * buffer.sampleRate);
|
|
660
|
+
const endSampleInBuffer = Math.ceil((overlapEndSecond - bufferStartSeconds) * buffer.sampleRate);
|
|
661
|
+
const trimmedData = channelData.slice(startSampleInBuffer, endSampleInBuffer);
|
|
662
|
+
audioSamples.push(trimmedData);
|
|
663
|
+
}
|
|
664
|
+
await iterator.return();
|
|
665
|
+
const totalSamples = audioSamples.reduce((sum, sample) => sum + sample.length, 0);
|
|
666
|
+
const result = new Float32Array(totalSamples);
|
|
667
|
+
let offset = 0;
|
|
668
|
+
for (const audioSample of audioSamples) {
|
|
669
|
+
result.set(audioSample, offset);
|
|
670
|
+
offset += audioSample.length;
|
|
671
|
+
}
|
|
672
|
+
return result;
|
|
673
|
+
};
|
|
674
|
+
|
|
675
|
+
// src/use-windowed-audio-data.ts
|
|
676
|
+
var warnedMatroska = {};
|
|
677
|
+
var useWindowedAudioData = ({
|
|
678
|
+
src,
|
|
679
|
+
frame,
|
|
680
|
+
fps,
|
|
681
|
+
windowInSeconds,
|
|
682
|
+
channelIndex = 0
|
|
683
|
+
}) => {
|
|
684
|
+
const isMounted = useRef2(true);
|
|
685
|
+
const [audioUtils, setAudioUtils] = useState2(null);
|
|
686
|
+
const [waveFormMap, setWaveformMap] = useState2({});
|
|
687
|
+
const requests = useRef2({});
|
|
688
|
+
const [initialWindowInSeconds] = useState2(windowInSeconds);
|
|
689
|
+
if (windowInSeconds !== initialWindowInSeconds) {
|
|
690
|
+
throw new Error("windowInSeconds cannot be changed dynamically");
|
|
691
|
+
}
|
|
692
|
+
useEffect2(() => {
|
|
693
|
+
isMounted.current = true;
|
|
694
|
+
return () => {
|
|
695
|
+
isMounted.current = false;
|
|
696
|
+
Object.values(requests.current).forEach((controller) => {
|
|
697
|
+
if (controller) {
|
|
698
|
+
controller.abort();
|
|
699
|
+
}
|
|
700
|
+
});
|
|
701
|
+
requests.current = {};
|
|
702
|
+
setWaveformMap({});
|
|
703
|
+
if (audioUtils) {
|
|
704
|
+
audioUtils.input.dispose();
|
|
705
|
+
}
|
|
706
|
+
};
|
|
707
|
+
}, [audioUtils]);
|
|
708
|
+
const { delayRender, continueRender } = useDelayRender2();
|
|
709
|
+
const fetchMetadata = useCallback2(async (signal) => {
|
|
710
|
+
const handle = delayRender(`Waiting for audio metadata with src="${src}" to be loaded`);
|
|
711
|
+
const cont = () => {
|
|
712
|
+
continueRender(handle);
|
|
713
|
+
};
|
|
714
|
+
signal.addEventListener("abort", cont, { once: true });
|
|
715
|
+
const input = new Input({
|
|
716
|
+
formats: ALL_FORMATS,
|
|
717
|
+
source: new UrlSource(src)
|
|
718
|
+
});
|
|
719
|
+
const onAbort = () => {
|
|
720
|
+
input.dispose();
|
|
721
|
+
};
|
|
722
|
+
signal.addEventListener("abort", onAbort, { once: true });
|
|
723
|
+
try {
|
|
724
|
+
const durationInSeconds = await input.computeDuration();
|
|
725
|
+
const audioTrack = await input.getPrimaryAudioTrack();
|
|
726
|
+
if (!audioTrack) {
|
|
727
|
+
throw new Error("No audio track found");
|
|
728
|
+
}
|
|
729
|
+
if (await audioTrack.isLive()) {
|
|
730
|
+
throw new Error("Live streams are not currently supported by Remotion. Sorry! Source: " + src);
|
|
731
|
+
}
|
|
732
|
+
if (await audioTrack.isRelativeToUnixEpoch()) {
|
|
733
|
+
throw new Error("Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: " + src);
|
|
734
|
+
}
|
|
735
|
+
const canDecode = await audioTrack.canDecode();
|
|
736
|
+
if (!canDecode) {
|
|
737
|
+
throw new Error("Audio track cannot be decoded");
|
|
738
|
+
}
|
|
739
|
+
if (channelIndex >= audioTrack.numberOfChannels || channelIndex < 0) {
|
|
740
|
+
throw new Error(`Invalid channel index ${channelIndex} for audio with ${audioTrack.numberOfChannels} channels`);
|
|
741
|
+
}
|
|
742
|
+
const numberOfChannels = await audioTrack.getNumberOfChannels();
|
|
743
|
+
const sampleRate = await audioTrack.getSampleRate();
|
|
744
|
+
const format = await input.getFormat();
|
|
745
|
+
const isMatroska = format === MATROSKA || format === WEBM;
|
|
746
|
+
if (isMounted.current) {
|
|
747
|
+
setAudioUtils({
|
|
748
|
+
input,
|
|
749
|
+
track: audioTrack,
|
|
750
|
+
metadata: {
|
|
751
|
+
durationInSeconds,
|
|
752
|
+
numberOfChannels,
|
|
753
|
+
sampleRate
|
|
754
|
+
},
|
|
755
|
+
isMatroska
|
|
756
|
+
});
|
|
757
|
+
}
|
|
758
|
+
continueRender(handle);
|
|
759
|
+
} catch (err) {
|
|
760
|
+
if (err instanceof InputDisposedError) {
|
|
761
|
+
continueRender(handle);
|
|
762
|
+
return;
|
|
763
|
+
}
|
|
764
|
+
cancelRender2(err);
|
|
765
|
+
} finally {
|
|
766
|
+
signal.removeEventListener("abort", cont);
|
|
767
|
+
signal.removeEventListener("abort", onAbort);
|
|
768
|
+
}
|
|
769
|
+
}, [src, delayRender, continueRender, channelIndex]);
|
|
770
|
+
useLayoutEffect2(() => {
|
|
771
|
+
const controller = new AbortController;
|
|
772
|
+
fetchMetadata(controller.signal);
|
|
773
|
+
return () => {
|
|
774
|
+
controller.abort();
|
|
775
|
+
};
|
|
776
|
+
}, [fetchMetadata]);
|
|
777
|
+
const currentTime = frame / fps;
|
|
778
|
+
const currentWindowIndex = Math.floor(currentTime / windowInSeconds);
|
|
779
|
+
const windowsToFetch = useMemo(() => {
|
|
780
|
+
if (!audioUtils?.metadata) {
|
|
781
|
+
return [];
|
|
782
|
+
}
|
|
783
|
+
const maxWindowIndex = Math.floor(audioUtils.metadata.durationInSeconds / windowInSeconds - 0.000000000001);
|
|
784
|
+
return [
|
|
785
|
+
currentWindowIndex === 0 ? null : currentWindowIndex - 1,
|
|
786
|
+
currentWindowIndex,
|
|
787
|
+
currentWindowIndex + 1 > maxWindowIndex ? null : currentWindowIndex + 1
|
|
788
|
+
].filter((i) => i !== null).filter((i) => i >= 0);
|
|
789
|
+
}, [currentWindowIndex, audioUtils, windowInSeconds]);
|
|
790
|
+
const fetchAndSetWaveformData = useCallback2(async (windowIndex) => {
|
|
791
|
+
if (!audioUtils?.metadata || !audioUtils) {
|
|
792
|
+
throw new Error("MediaBunny context is not loaded yet");
|
|
793
|
+
}
|
|
794
|
+
const existingController = requests.current[windowIndex];
|
|
795
|
+
if (existingController) {
|
|
796
|
+
existingController.abort();
|
|
797
|
+
}
|
|
798
|
+
const controller = new AbortController;
|
|
799
|
+
requests.current[windowIndex] = controller;
|
|
800
|
+
if (controller.signal.aborted) {
|
|
801
|
+
return;
|
|
802
|
+
}
|
|
803
|
+
const fromSeconds = windowIndex * windowInSeconds;
|
|
804
|
+
const toSeconds = (windowIndex + 1) * windowInSeconds;
|
|
805
|
+
if (fromSeconds >= audioUtils.metadata.durationInSeconds || toSeconds <= 0) {
|
|
806
|
+
return;
|
|
807
|
+
}
|
|
808
|
+
try {
|
|
809
|
+
const { isMatroska } = audioUtils;
|
|
810
|
+
if (isMatroska && !warnedMatroska[src]) {
|
|
811
|
+
warnedMatroska[src] = true;
|
|
812
|
+
Internals.Log.warn({ logLevel: "info", tag: "@remotion/media-utils" }, `[useWindowedAudioData] Matroska/WebM file detected at "${src}".
|
|
813
|
+
|
|
814
|
+
Due to format limitation, audio decoding must start from the beginning of the file, which may lead to increased memory usage and slower performance for large files. Consider converting the audio to a more suitable format like MP3 or AAC for better performance.`);
|
|
815
|
+
}
|
|
816
|
+
const partialWaveData = await getPartialAudioData({
|
|
817
|
+
track: audioUtils.track,
|
|
818
|
+
fromSeconds,
|
|
819
|
+
toSeconds,
|
|
820
|
+
channelIndex,
|
|
821
|
+
signal: controller.signal,
|
|
822
|
+
isMatroska
|
|
823
|
+
});
|
|
824
|
+
if (!controller.signal.aborted) {
|
|
825
|
+
setWaveformMap((prev) => {
|
|
826
|
+
const entries = Object.keys(prev);
|
|
827
|
+
const windowsToClear = entries.filter((entry) => !windowsToFetch.includes(Number(entry)));
|
|
828
|
+
return {
|
|
829
|
+
...prev,
|
|
830
|
+
...windowsToClear.reduce((acc, key) => {
|
|
831
|
+
acc[key] = null;
|
|
832
|
+
return acc;
|
|
833
|
+
}, {}),
|
|
834
|
+
[windowIndex]: partialWaveData
|
|
835
|
+
};
|
|
836
|
+
});
|
|
837
|
+
}
|
|
838
|
+
} catch (err) {
|
|
839
|
+
if (controller.signal.aborted) {
|
|
840
|
+
return;
|
|
841
|
+
}
|
|
842
|
+
if (err instanceof InputDisposedError) {
|
|
843
|
+
return;
|
|
844
|
+
}
|
|
845
|
+
throw err;
|
|
846
|
+
} finally {
|
|
847
|
+
if (requests.current[windowIndex] === controller) {
|
|
848
|
+
requests.current[windowIndex] = null;
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
}, [channelIndex, audioUtils, windowInSeconds, windowsToFetch, src]);
|
|
852
|
+
useEffect2(() => {
|
|
853
|
+
if (!audioUtils?.metadata) {
|
|
854
|
+
return;
|
|
855
|
+
}
|
|
856
|
+
const windowsToClear = Object.keys(requests.current).filter((entry) => !windowsToFetch.includes(Number(entry)));
|
|
857
|
+
for (const windowIndex of windowsToClear) {
|
|
858
|
+
const controller = requests.current[windowIndex];
|
|
859
|
+
if (controller) {
|
|
860
|
+
controller.abort();
|
|
861
|
+
requests.current[windowIndex] = null;
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
const windowsToActuallyFetch = windowsToFetch.filter((windowIndex) => !waveFormMap[windowIndex] && !requests.current[windowIndex]);
|
|
865
|
+
if (windowsToActuallyFetch.length === 0) {
|
|
866
|
+
return;
|
|
867
|
+
}
|
|
868
|
+
const currentWindowNeedsFetch = windowsToActuallyFetch.includes(currentWindowIndex);
|
|
869
|
+
const otherWindowsToFetch = windowsToActuallyFetch.filter((w) => w !== currentWindowIndex);
|
|
870
|
+
const fetchWindows = async () => {
|
|
871
|
+
if (currentWindowNeedsFetch) {
|
|
872
|
+
await fetchAndSetWaveformData(currentWindowIndex);
|
|
873
|
+
}
|
|
874
|
+
if (otherWindowsToFetch.length > 0) {
|
|
875
|
+
await Promise.all(otherWindowsToFetch.map((windowIndex) => {
|
|
876
|
+
return fetchAndSetWaveformData(windowIndex);
|
|
877
|
+
}));
|
|
878
|
+
}
|
|
879
|
+
};
|
|
880
|
+
fetchWindows().catch((err) => {
|
|
881
|
+
if (err.stack?.includes("Cancelled")) {
|
|
882
|
+
return;
|
|
883
|
+
}
|
|
884
|
+
if (err.stack?.toLowerCase()?.includes("aborted")) {
|
|
885
|
+
return;
|
|
886
|
+
}
|
|
887
|
+
if (err.message?.toLowerCase()?.includes("aborted")) {
|
|
888
|
+
return;
|
|
889
|
+
}
|
|
890
|
+
cancelRender2(err);
|
|
891
|
+
});
|
|
892
|
+
}, [
|
|
893
|
+
fetchAndSetWaveformData,
|
|
894
|
+
audioUtils,
|
|
895
|
+
windowsToFetch,
|
|
896
|
+
waveFormMap,
|
|
897
|
+
currentWindowIndex
|
|
898
|
+
]);
|
|
899
|
+
const availableWindows = useMemo(() => {
|
|
900
|
+
return windowsToFetch.filter((i) => waveFormMap[i]);
|
|
901
|
+
}, [windowsToFetch, waveFormMap]);
|
|
902
|
+
const currentAudioData = useMemo(() => {
|
|
903
|
+
if (!audioUtils?.metadata) {
|
|
904
|
+
return null;
|
|
905
|
+
}
|
|
906
|
+
if (availableWindows.length === 0) {
|
|
907
|
+
return null;
|
|
908
|
+
}
|
|
909
|
+
const windows = availableWindows.map((i) => waveFormMap[i]);
|
|
910
|
+
const data = combineFloat32Arrays(windows);
|
|
911
|
+
return {
|
|
912
|
+
channelWaveforms: [data],
|
|
913
|
+
durationInSeconds: audioUtils.metadata.durationInSeconds,
|
|
914
|
+
isRemote: isRemoteAsset(src),
|
|
915
|
+
numberOfChannels: 1,
|
|
916
|
+
resultId: `${src}-windows-${availableWindows.join(",")}`,
|
|
917
|
+
sampleRate: audioUtils.metadata.sampleRate
|
|
918
|
+
};
|
|
919
|
+
}, [src, waveFormMap, audioUtils, availableWindows]);
|
|
920
|
+
const isBeyondAudioDuration = audioUtils ? currentTime >= audioUtils.metadata.durationInSeconds : false;
|
|
921
|
+
useLayoutEffect2(() => {
|
|
922
|
+
if (currentAudioData) {
|
|
923
|
+
return;
|
|
924
|
+
}
|
|
925
|
+
if (isBeyondAudioDuration) {
|
|
926
|
+
return;
|
|
927
|
+
}
|
|
928
|
+
const handle = delayRender(`Waiting for audio data with src="${src}" to be loaded`);
|
|
929
|
+
return () => {
|
|
930
|
+
continueRender(handle);
|
|
931
|
+
};
|
|
932
|
+
}, [
|
|
933
|
+
currentAudioData,
|
|
934
|
+
src,
|
|
935
|
+
delayRender,
|
|
936
|
+
continueRender,
|
|
937
|
+
isBeyondAudioDuration
|
|
938
|
+
]);
|
|
939
|
+
const audioData = isBeyondAudioDuration ? null : currentAudioData;
|
|
940
|
+
return {
|
|
941
|
+
audioData,
|
|
942
|
+
dataOffsetInSeconds: availableWindows.length > 0 ? availableWindows[0] * windowInSeconds : 0
|
|
943
|
+
};
|
|
944
|
+
};
|
|
945
|
+
// src/visualize-audio.ts
|
|
946
|
+
import { NoReactInternals as NoReactInternals2 } from "remotion/no-react";
|
|
947
|
+
|
|
948
|
+
// src/fft/complex.ts
|
|
949
|
+
var complexAdd = function(a, b) {
|
|
950
|
+
return [a[0] + b[0], a[1] + b[1]];
|
|
951
|
+
};
|
|
952
|
+
var complexSubtract = function(a, b) {
|
|
953
|
+
return [a[0] - b[0], a[1] - b[1]];
|
|
954
|
+
};
|
|
955
|
+
var complexMultiply = function(a, b) {
|
|
956
|
+
return [a[0] * b[0] - a[1] * b[1], a[0] * b[1] + a[1] * b[0]];
|
|
957
|
+
};
|
|
958
|
+
var complexMagnitude = function(c) {
|
|
959
|
+
return Math.sqrt(c[0] * c[0] + c[1] * c[1]);
|
|
960
|
+
};
|
|
961
|
+
|
|
962
|
+
// src/fft/exponent.ts
|
|
963
|
+
var mapExponent = {};
|
|
964
|
+
var exponent = function(k, N) {
|
|
965
|
+
const x = -2 * Math.PI * (k / N);
|
|
966
|
+
mapExponent[N] = mapExponent[N] || {};
|
|
967
|
+
mapExponent[N][k] = mapExponent[N][k] || [Math.cos(x), Math.sin(x)];
|
|
968
|
+
return mapExponent[N][k];
|
|
969
|
+
};
|
|
970
|
+
|
|
971
|
+
// src/fft/fft-accurate.ts
|
|
972
|
+
var fftAccurate = function(vector) {
|
|
973
|
+
const X = [];
|
|
974
|
+
const N = vector.length;
|
|
975
|
+
if (N === 1) {
|
|
976
|
+
if (Array.isArray(vector[0])) {
|
|
977
|
+
return [[vector[0][0], vector[0][1]]];
|
|
978
|
+
}
|
|
979
|
+
return [[vector[0], 0]];
|
|
980
|
+
}
|
|
981
|
+
const X_evens = fftAccurate(vector.filter((_, ix) => ix % 2 === 0));
|
|
982
|
+
const X_odds = fftAccurate(vector.filter((__, ix) => ix % 2 === 1));
|
|
983
|
+
for (let k = 0;k < N / 2; k++) {
|
|
984
|
+
const t = X_evens[k];
|
|
985
|
+
const e = complexMultiply(exponent(k, N), X_odds[k]);
|
|
986
|
+
X[k] = complexAdd(t, e);
|
|
987
|
+
X[k + N / 2] = complexSubtract(t, e);
|
|
988
|
+
}
|
|
989
|
+
return X;
|
|
990
|
+
};
|
|
991
|
+
|
|
992
|
+
// src/fft/fft-fast.ts
|
|
993
|
+
function reverseBits(num, numBits) {
|
|
994
|
+
let result = 0;
|
|
995
|
+
for (let i = 0;i < numBits; i++) {
|
|
996
|
+
result = result << 1 | num >> i & 1;
|
|
997
|
+
}
|
|
998
|
+
return result;
|
|
999
|
+
}
|
|
1000
|
+
function hammingWindow(N) {
|
|
1001
|
+
const win = new Array(N);
|
|
1002
|
+
for (let i = 0;i < N; i++) {
|
|
1003
|
+
win[i] = 0.8 - 0.46 * Math.cos(2 * Math.PI * i / (N - 1));
|
|
1004
|
+
}
|
|
1005
|
+
return win;
|
|
1006
|
+
}
|
|
1007
|
+
function bitReversePermutation(N) {
|
|
1008
|
+
const bitReversed = new Array(N);
|
|
1009
|
+
for (let i = 0;i < N; i++) {
|
|
1010
|
+
bitReversed[i] = reverseBits(i, Math.log2(N));
|
|
1011
|
+
}
|
|
1012
|
+
return bitReversed;
|
|
1013
|
+
}
|
|
1014
|
+
var fftFast = function(vector) {
|
|
1015
|
+
const N = vector.length;
|
|
1016
|
+
const X = new Array(N);
|
|
1017
|
+
if (N <= 1) {
|
|
1018
|
+
for (let i = 0;i < vector.length; i++) {
|
|
1019
|
+
const value = vector[i];
|
|
1020
|
+
X[i] = [value * 2, 0];
|
|
1021
|
+
}
|
|
1022
|
+
return X;
|
|
1023
|
+
}
|
|
1024
|
+
const window2 = hammingWindow(N);
|
|
1025
|
+
for (let i = 0;i < N; i++) {
|
|
1026
|
+
X[i] = [vector[i] * window2[i], 0];
|
|
1027
|
+
}
|
|
1028
|
+
const bitReversed = bitReversePermutation(N);
|
|
1029
|
+
for (let i = 0;i < N; i++) {
|
|
1030
|
+
X[i] = [vector[bitReversed[i]], 0];
|
|
1031
|
+
}
|
|
1032
|
+
for (let s = 1;s <= Math.log2(N); s++) {
|
|
1033
|
+
const m = 1 << s;
|
|
1034
|
+
const mHalf = m / 2;
|
|
1035
|
+
const angleIncrement = 2 * Math.PI / m;
|
|
1036
|
+
for (let k = 0;k < N; k += m) {
|
|
1037
|
+
let omegaReal = 1;
|
|
1038
|
+
let omegaImag = 0;
|
|
1039
|
+
for (let j = 0;j < mHalf; j++) {
|
|
1040
|
+
const tReal = omegaReal * X[k + j + mHalf][0] - omegaImag * X[k + j + mHalf][1];
|
|
1041
|
+
const tImag = omegaReal * X[k + j + mHalf][1] + omegaImag * X[k + j + mHalf][0];
|
|
1042
|
+
const uReal = X[k + j][0];
|
|
1043
|
+
const uImag = X[k + j][1];
|
|
1044
|
+
X[k + j] = [uReal + tReal, uImag + tImag];
|
|
1045
|
+
X[k + j + mHalf] = [uReal - tReal, uImag - tImag];
|
|
1046
|
+
const tempReal = omegaReal * Math.cos(angleIncrement) - omegaImag * Math.sin(angleIncrement);
|
|
1047
|
+
omegaImag = omegaReal * Math.sin(angleIncrement) + omegaImag * Math.cos(angleIncrement);
|
|
1048
|
+
omegaReal = tempReal;
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
}
|
|
1052
|
+
return X;
|
|
1053
|
+
};
|
|
1054
|
+
|
|
1055
|
+
// src/fft/mag.ts
|
|
1056
|
+
var fftMag = function(fftBins) {
|
|
1057
|
+
const ret = fftBins.map((f) => complexMagnitude(f));
|
|
1058
|
+
return ret.slice(0, ret.length / 2);
|
|
1059
|
+
};
|
|
1060
|
+
|
|
1061
|
+
// src/fft/smoothing.ts
|
|
1062
|
+
var smoothingPasses = 3;
|
|
1063
|
+
var smoothingPoints = 3;
|
|
1064
|
+
var smoothen = function(array) {
|
|
1065
|
+
let lastArray = array;
|
|
1066
|
+
const newArr = [];
|
|
1067
|
+
for (let pass = 0;pass < smoothingPasses; pass++) {
|
|
1068
|
+
const sidePoints = Math.floor(smoothingPoints / 2);
|
|
1069
|
+
const cn = 1 / (2 * sidePoints + 1);
|
|
1070
|
+
for (let i = 0;i < sidePoints; i++) {
|
|
1071
|
+
newArr[i] = lastArray[i];
|
|
1072
|
+
newArr[lastArray.length - i - 1] = lastArray[lastArray.length - i - 1];
|
|
1073
|
+
}
|
|
1074
|
+
for (let i = sidePoints;i < lastArray.length - sidePoints; i++) {
|
|
1075
|
+
let sum = 0;
|
|
1076
|
+
for (let n = -sidePoints;n <= sidePoints; n++) {
|
|
1077
|
+
sum += cn * lastArray[i + n] + n;
|
|
1078
|
+
}
|
|
1079
|
+
newArr[i] = sum;
|
|
1080
|
+
}
|
|
1081
|
+
lastArray = newArr;
|
|
1082
|
+
}
|
|
1083
|
+
return newArr;
|
|
1084
|
+
};
|
|
1085
|
+
|
|
1086
|
+
// src/fft/to-int-16.ts
|
|
1087
|
+
var toInt16 = (x) => x > 0 ? x * 32767 : x * 32768;
|
|
1088
|
+
|
|
1089
|
+
// src/fft/get-visualization.ts
|
|
1090
|
+
var getVisualization = ({
|
|
1091
|
+
sampleSize,
|
|
1092
|
+
data,
|
|
1093
|
+
sampleRate,
|
|
1094
|
+
frame,
|
|
1095
|
+
fps,
|
|
1096
|
+
maxInt,
|
|
1097
|
+
optimizeFor,
|
|
1098
|
+
dataOffsetInSeconds
|
|
1099
|
+
}) => {
|
|
1100
|
+
const isPowerOfTwo = sampleSize > 0 && (sampleSize & sampleSize - 1) === 0;
|
|
1101
|
+
if (!isPowerOfTwo) {
|
|
1102
|
+
throw new TypeError(`The argument "bars" must be a power of two. For example: 64, 128. Got instead: ${sampleSize}`);
|
|
1103
|
+
}
|
|
1104
|
+
if (!fps) {
|
|
1105
|
+
throw new TypeError('The argument "fps" was not provided');
|
|
1106
|
+
}
|
|
1107
|
+
if (data.length < sampleSize) {
|
|
1108
|
+
throw new TypeError("Audio data is not big enough to provide " + sampleSize + " bars.");
|
|
1109
|
+
}
|
|
1110
|
+
const start = Math.floor((frame / fps - dataOffsetInSeconds) * sampleRate);
|
|
1111
|
+
const actualStart = Math.max(0, start - sampleSize / 2);
|
|
1112
|
+
const ints = new Int16Array({
|
|
1113
|
+
length: sampleSize
|
|
1114
|
+
});
|
|
1115
|
+
ints.set(data.subarray(actualStart, actualStart + sampleSize).map((x) => toInt16(x)));
|
|
1116
|
+
const alg = optimizeFor === "accuracy" ? fftAccurate : fftFast;
|
|
1117
|
+
const phasors = alg(ints);
|
|
1118
|
+
const magnitudes = fftMag(phasors).map((p) => p);
|
|
1119
|
+
return smoothen(magnitudes).map((m) => m / (sampleSize / 2) / maxInt);
|
|
1120
|
+
};
|
|
1121
|
+
|
|
1122
|
+
// src/fft/max-value-cached.ts
|
|
1123
|
+
var getMax = (array) => {
|
|
1124
|
+
let max = 0;
|
|
1125
|
+
for (let i = 0;i < array.length; i++) {
|
|
1126
|
+
const val = array[i];
|
|
1127
|
+
if (val > max) {
|
|
1128
|
+
max = val;
|
|
1129
|
+
}
|
|
1130
|
+
}
|
|
1131
|
+
return max;
|
|
1132
|
+
};
|
|
1133
|
+
var cache2 = {};
|
|
1134
|
+
var getMaxPossibleMagnitude = (metadata) => {
|
|
1135
|
+
if (cache2[metadata.resultId]) {
|
|
1136
|
+
return cache2[metadata.resultId];
|
|
1137
|
+
}
|
|
1138
|
+
const result = toInt16(getMax(metadata.channelWaveforms[0]));
|
|
1139
|
+
cache2[metadata.resultId] = result;
|
|
1140
|
+
return result;
|
|
1141
|
+
};
|
|
1142
|
+
|
|
1143
|
+
// src/visualize-audio.ts
|
|
1144
|
+
var cache3 = {};
|
|
1145
|
+
var visualizeAudioFrame = ({
|
|
1146
|
+
audioData,
|
|
1147
|
+
frame,
|
|
1148
|
+
fps,
|
|
1149
|
+
numberOfSamples,
|
|
1150
|
+
optimizeFor,
|
|
1151
|
+
dataOffsetInSeconds
|
|
1152
|
+
}) => {
|
|
1153
|
+
const cacheKey = audioData.resultId + frame + fps + numberOfSamples;
|
|
1154
|
+
if (cache3[cacheKey]) {
|
|
1155
|
+
return cache3[cacheKey];
|
|
1156
|
+
}
|
|
1157
|
+
const maxInt = getMaxPossibleMagnitude(audioData);
|
|
1158
|
+
return getVisualization({
|
|
1159
|
+
sampleSize: numberOfSamples * 2,
|
|
1160
|
+
data: audioData.channelWaveforms[0],
|
|
1161
|
+
frame,
|
|
1162
|
+
fps,
|
|
1163
|
+
sampleRate: audioData.sampleRate,
|
|
1164
|
+
maxInt,
|
|
1165
|
+
optimizeFor,
|
|
1166
|
+
dataOffsetInSeconds
|
|
1167
|
+
});
|
|
1168
|
+
};
|
|
1169
|
+
var visualizeAudio = ({
|
|
1170
|
+
smoothing = true,
|
|
1171
|
+
optimizeFor = NoReactInternals2.ENABLE_V5_BREAKING_CHANGES ? "speed" : "accuracy",
|
|
1172
|
+
dataOffsetInSeconds = 0,
|
|
1173
|
+
...parameters
|
|
1174
|
+
}) => {
|
|
1175
|
+
if (!smoothing) {
|
|
1176
|
+
return visualizeAudioFrame({
|
|
1177
|
+
...parameters,
|
|
1178
|
+
optimizeFor,
|
|
1179
|
+
dataOffsetInSeconds,
|
|
1180
|
+
smoothing
|
|
1181
|
+
});
|
|
1182
|
+
}
|
|
1183
|
+
const toSmooth = [
|
|
1184
|
+
parameters.frame - 1,
|
|
1185
|
+
parameters.frame,
|
|
1186
|
+
parameters.frame + 1
|
|
1187
|
+
];
|
|
1188
|
+
const all = toSmooth.map((s) => {
|
|
1189
|
+
return visualizeAudioFrame({
|
|
1190
|
+
...parameters,
|
|
1191
|
+
frame: s,
|
|
1192
|
+
dataOffsetInSeconds,
|
|
1193
|
+
optimizeFor,
|
|
1194
|
+
smoothing
|
|
1195
|
+
});
|
|
1196
|
+
});
|
|
1197
|
+
return new Array(parameters.numberOfSamples).fill(true).map((_x, i) => {
|
|
1198
|
+
return new Array(toSmooth.length).fill(true).map((_, j) => {
|
|
1199
|
+
return all[j][i];
|
|
1200
|
+
}).reduce((a, b) => a + b, 0) / toSmooth.length;
|
|
1201
|
+
});
|
|
1202
|
+
};
|
|
1203
|
+
// src/visualize-audio-waveform.ts
|
|
1204
|
+
var cache4 = {};
|
|
1205
|
+
var visualizeAudioWaveformFrame = ({
|
|
1206
|
+
audioData,
|
|
1207
|
+
frame,
|
|
1208
|
+
fps,
|
|
1209
|
+
numberOfSamples,
|
|
1210
|
+
windowInSeconds,
|
|
1211
|
+
channel,
|
|
1212
|
+
dataOffsetInSeconds,
|
|
1213
|
+
normalize = false
|
|
1214
|
+
}) => {
|
|
1215
|
+
if (windowInSeconds * audioData.sampleRate < numberOfSamples) {
|
|
1216
|
+
throw new TypeError(windowInSeconds + "s audiodata does not have " + numberOfSamples + " bars. Increase windowInSeconds or decrease numberOfSamples");
|
|
1217
|
+
}
|
|
1218
|
+
const cacheKey = audioData.resultId + frame + fps + numberOfSamples + "waveform" + dataOffsetInSeconds;
|
|
1219
|
+
if (cache4[cacheKey]) {
|
|
1220
|
+
return cache4[cacheKey];
|
|
1221
|
+
}
|
|
1222
|
+
const time = frame / fps;
|
|
1223
|
+
const startTimeInSeconds = time - windowInSeconds / 2;
|
|
1224
|
+
return getWaveformPortion({
|
|
1225
|
+
audioData,
|
|
1226
|
+
startTimeInSeconds,
|
|
1227
|
+
durationInSeconds: windowInSeconds,
|
|
1228
|
+
numberOfSamples,
|
|
1229
|
+
outputRange: "minus-one-to-one",
|
|
1230
|
+
channel,
|
|
1231
|
+
dataOffsetInSeconds,
|
|
1232
|
+
normalize
|
|
1233
|
+
});
|
|
1234
|
+
};
|
|
1235
|
+
var visualizeAudioWaveform = (parameters) => {
|
|
1236
|
+
const data = visualizeAudioWaveformFrame(parameters);
|
|
1237
|
+
return data.map((value) => value.amplitude);
|
|
1238
|
+
};
|
|
1239
|
+
export {
|
|
1240
|
+
visualizeAudioWaveform,
|
|
1241
|
+
visualizeAudio,
|
|
1242
|
+
useWindowedAudioData,
|
|
1243
|
+
useAudioData,
|
|
1244
|
+
getWaveformPortion,
|
|
1245
|
+
getVideoMetadata,
|
|
1246
|
+
getImageDimensions,
|
|
1247
|
+
getAudioDurationInSeconds,
|
|
1248
|
+
getAudioDuration,
|
|
1249
|
+
getAudioData,
|
|
1250
|
+
createSmoothSvgPath,
|
|
1251
|
+
audioBufferToDataUrl
|
|
1252
|
+
};
|
|
@@ -54,6 +54,14 @@ const useWindowedAudioData = ({ src, frame, fps, windowInSeconds, channelIndex =
|
|
|
54
54
|
if (!audioTrack) {
|
|
55
55
|
throw new Error('No audio track found');
|
|
56
56
|
}
|
|
57
|
+
if (await audioTrack.isLive()) {
|
|
58
|
+
throw new Error('Live streams are not currently supported by Remotion. Sorry! Source: ' +
|
|
59
|
+
src);
|
|
60
|
+
}
|
|
61
|
+
if (await audioTrack.isRelativeToUnixEpoch()) {
|
|
62
|
+
throw new Error('Streams with UNIX timestamps are not currently supported by Remotion. Sorry! Source: ' +
|
|
63
|
+
src);
|
|
64
|
+
}
|
|
57
65
|
const canDecode = await audioTrack.canDecode();
|
|
58
66
|
if (!canDecode) {
|
|
59
67
|
throw new Error('Audio track cannot be decoded');
|
|
@@ -61,7 +69,8 @@ const useWindowedAudioData = ({ src, frame, fps, windowInSeconds, channelIndex =
|
|
|
61
69
|
if (channelIndex >= audioTrack.numberOfChannels || channelIndex < 0) {
|
|
62
70
|
throw new Error(`Invalid channel index ${channelIndex} for audio with ${audioTrack.numberOfChannels} channels`);
|
|
63
71
|
}
|
|
64
|
-
const
|
|
72
|
+
const numberOfChannels = await audioTrack.getNumberOfChannels();
|
|
73
|
+
const sampleRate = await audioTrack.getSampleRate();
|
|
65
74
|
const format = await input.getFormat();
|
|
66
75
|
const isMatroska = format === mediabunny_1.MATROSKA || format === mediabunny_1.WEBM;
|
|
67
76
|
if (isMounted.current) {
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/media-utils"
|
|
4
4
|
},
|
|
5
5
|
"name": "@remotion/media-utils",
|
|
6
|
-
"version": "4.0.
|
|
6
|
+
"version": "4.0.454",
|
|
7
7
|
"description": "Utilities for working with media files",
|
|
8
8
|
"main": "dist/index.js",
|
|
9
9
|
"sideEffects": false,
|
|
@@ -11,23 +11,25 @@
|
|
|
11
11
|
"formatting": "oxfmt src --check",
|
|
12
12
|
"format": "oxfmt src",
|
|
13
13
|
"lint": "eslint src",
|
|
14
|
-
"make": "tsgo -d"
|
|
14
|
+
"make": "tsgo -d && bun --env-file=../.env.bundle bundle.ts"
|
|
15
15
|
},
|
|
16
|
+
"types": "dist/index.d.ts",
|
|
17
|
+
"module": "dist/esm/index.mjs",
|
|
16
18
|
"author": "Jonny Burger <jonny@remotion.dev>",
|
|
17
19
|
"license": "MIT",
|
|
18
20
|
"bugs": {
|
|
19
21
|
"url": "https://github.com/remotion-dev/remotion/issues"
|
|
20
22
|
},
|
|
21
23
|
"dependencies": {
|
|
22
|
-
"remotion": "4.0.
|
|
23
|
-
"mediabunny": "1.
|
|
24
|
+
"remotion": "4.0.454",
|
|
25
|
+
"mediabunny": "1.42.0"
|
|
24
26
|
},
|
|
25
27
|
"peerDependencies": {
|
|
26
28
|
"react": ">=16.8.0",
|
|
27
29
|
"react-dom": ">=16.8.0"
|
|
28
30
|
},
|
|
29
31
|
"devDependencies": {
|
|
30
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
32
|
+
"@remotion/eslint-config-internal": "4.0.454",
|
|
31
33
|
"eslint": "9.19.0",
|
|
32
34
|
"@typescript/native-preview": "7.0.0-dev.20260217.1"
|
|
33
35
|
},
|
|
@@ -41,5 +43,14 @@
|
|
|
41
43
|
"publishConfig": {
|
|
42
44
|
"access": "public"
|
|
43
45
|
},
|
|
46
|
+
"exports": {
|
|
47
|
+
"./package.json": "./package.json",
|
|
48
|
+
".": {
|
|
49
|
+
"types": "./dist/index.d.ts",
|
|
50
|
+
"module": "./dist/esm/index.mjs",
|
|
51
|
+
"import": "./dist/esm/index.mjs",
|
|
52
|
+
"require": "./dist/index.js"
|
|
53
|
+
}
|
|
54
|
+
},
|
|
44
55
|
"homepage": "https://www.remotion.dev/docs/media-utils"
|
|
45
56
|
}
|