@remotion/media-utils 4.0.239 → 4.0.240

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ export declare const combineFloat32Arrays: (arrays: Float32Array[]) => Float32Array;
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.combineFloat32Arrays = void 0;
4
+ const combineFloat32Arrays = (arrays) => {
5
+ if (arrays.length === 0) {
6
+ return new Float32Array([]);
7
+ }
8
+ if (arrays.length === 1) {
9
+ return arrays[0];
10
+ }
11
+ let totalLength = 0;
12
+ for (const array of arrays) {
13
+ totalLength += array.length;
14
+ }
15
+ const result = new Float32Array(totalLength);
16
+ let offset = 0;
17
+ for (const array of arrays) {
18
+ result.set(array, offset);
19
+ offset += array.length;
20
+ }
21
+ return result;
22
+ };
23
+ exports.combineFloat32Arrays = combineFloat32Arrays;
@@ -0,0 +1 @@
1
+ export declare const fetchWithCorsCatch: (src: string, init?: RequestInit) => Promise<Response>;
@@ -0,0 +1,27 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.fetchWithCorsCatch = void 0;
4
+ const fetchWithCorsCatch = async (src, init) => {
5
+ try {
6
+ const response = await fetch(src, {
7
+ mode: 'cors',
8
+ referrerPolicy: 'no-referrer-when-downgrade',
9
+ ...init,
10
+ });
11
+ return response;
12
+ }
13
+ catch (err) {
14
+ const error = err;
15
+ if (
16
+ // Chrome
17
+ error.message.includes('Failed to fetch') ||
18
+ // Safari
19
+ error.message.includes('Load failed') ||
20
+ // Firefox
21
+ error.message.includes('NetworkError when attempting to fetch resource')) {
22
+ throw new TypeError(`Failed to read from ${src}: ${error.message}. Does the resource support CORS?`);
23
+ }
24
+ throw err;
25
+ }
26
+ };
27
+ exports.fetchWithCorsCatch = fetchWithCorsCatch;
@@ -1,5 +1,5 @@
1
1
  export type OptimizeFor = 'accuracy' | 'speed';
2
- export declare const getVisualization: ({ sampleSize, data, sampleRate, frame, fps, maxInt, optimizeFor, }: {
2
+ export declare const getVisualization: ({ sampleSize, data, sampleRate, frame, fps, maxInt, optimizeFor, dataOffsetInSeconds, }: {
3
3
  sampleSize: number;
4
4
  data: Float32Array;
5
5
  frame: number;
@@ -7,4 +7,5 @@ export declare const getVisualization: ({ sampleSize, data, sampleRate, frame, f
7
7
  fps: number;
8
8
  maxInt: number;
9
9
  optimizeFor: OptimizeFor;
10
+ dataOffsetInSeconds: number;
10
11
  }) => number[];
@@ -8,7 +8,7 @@ const fft_fast_1 = require("./fft-fast");
8
8
  const mag_1 = require("./mag");
9
9
  const smoothing_1 = require("./smoothing");
10
10
  const to_int_16_1 = require("./to-int-16");
11
- const getVisualization = ({ sampleSize, data, sampleRate, frame, fps, maxInt, optimizeFor = 'accuracy', }) => {
11
+ const getVisualization = ({ sampleSize, data, sampleRate, frame, fps, maxInt, optimizeFor, dataOffsetInSeconds, }) => {
12
12
  const isPowerOfTwo = sampleSize > 0 && (sampleSize & (sampleSize - 1)) === 0;
13
13
  if (!isPowerOfTwo) {
14
14
  throw new TypeError(`The argument "bars" must be a power of two. For example: 64, 128. Got instead: ${sampleSize}`);
@@ -19,7 +19,7 @@ const getVisualization = ({ sampleSize, data, sampleRate, frame, fps, maxInt, op
19
19
  if (data.length < sampleSize) {
20
20
  throw new TypeError('Audio data is not big enough to provide ' + sampleSize + ' bars.');
21
21
  }
22
- const start = Math.floor((frame / fps) * sampleRate);
22
+ const start = Math.floor((frame / fps - dataOffsetInSeconds) * sampleRate);
23
23
  const actualStart = Math.max(0, start - sampleSize / 2);
24
24
  const ints = new Int16Array({
25
25
  length: sampleSize,
@@ -1,32 +1,11 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getAudioData = void 0;
4
+ const fetch_with_cors_catch_1 = require("./fetch-with-cors-catch");
4
5
  const is_remote_asset_1 = require("./is-remote-asset");
5
6
  const p_limit_1 = require("./p-limit");
6
7
  const metadataCache = {};
7
8
  const limit = (0, p_limit_1.pLimit)(3);
8
- const fetchWithCorsCatch = async (src) => {
9
- try {
10
- const response = await fetch(src, {
11
- mode: 'cors',
12
- referrerPolicy: 'no-referrer-when-downgrade',
13
- });
14
- return response;
15
- }
16
- catch (err) {
17
- const error = err;
18
- if (
19
- // Chrome
20
- error.message.includes('Failed to fetch') ||
21
- // Safari
22
- error.message.includes('Load failed') ||
23
- // Firefox
24
- error.message.includes('NetworkError when attempting to fetch resource')) {
25
- throw new TypeError(`Failed to read from ${src}: ${error.message}. Does the resource support CORS?`);
26
- }
27
- throw err;
28
- }
29
- };
30
9
  const fn = async (src, options) => {
31
10
  var _a;
32
11
  if (metadataCache[src]) {
@@ -38,7 +17,7 @@ const fn = async (src, options) => {
38
17
  const audioContext = new AudioContext({
39
18
  sampleRate: (_a = options === null || options === void 0 ? void 0 : options.sampleRate) !== null && _a !== void 0 ? _a : 48000,
40
19
  });
41
- const response = await fetchWithCorsCatch(src);
20
+ const response = await (0, fetch_with_cors_catch_1.fetchWithCorsCatch)(src);
42
21
  const arrayBuffer = await response.arrayBuffer();
43
22
  const wave = await audioContext.decodeAudioData(arrayBuffer);
44
23
  const channelWaveforms = new Array(wave.numberOfChannels)
@@ -0,0 +1,12 @@
1
+ export declare const getPartialWaveData: ({ dataOffset, src, bitsPerSample, channelIndex, sampleRate, fromSeconds, toSeconds, blockAlign, fileSize, signal, }: {
2
+ dataOffset: number;
3
+ src: string;
4
+ bitsPerSample: number;
5
+ channelIndex: number;
6
+ sampleRate: number;
7
+ fromSeconds: number;
8
+ toSeconds: number;
9
+ blockAlign: number;
10
+ fileSize: number;
11
+ signal: AbortSignal;
12
+ }) => Promise<Float32Array>;
@@ -0,0 +1,37 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getPartialWaveData = void 0;
4
+ const fetch_with_cors_catch_1 = require("./fetch-with-cors-catch");
5
+ const probe_wave_file_1 = require("./probe-wave-file");
6
+ const getPartialWaveData = async ({ dataOffset, src, bitsPerSample, channelIndex, sampleRate, fromSeconds, toSeconds, blockAlign, fileSize, signal, }) => {
7
+ const startByte = dataOffset + Math.floor(fromSeconds * sampleRate) * blockAlign;
8
+ const endByte = Math.min(fileSize - 1, dataOffset + Math.floor(toSeconds * sampleRate - 1) * blockAlign);
9
+ const response = await (0, fetch_with_cors_catch_1.fetchWithCorsCatch)(src, {
10
+ headers: {
11
+ range: `bytes=${startByte}-${endByte}`,
12
+ },
13
+ signal,
14
+ });
15
+ if (response.status !== 206) {
16
+ throw new Error(`Tried to read bytes ${startByte}-${endByte} from ${src}, but the response status code was not 206. This means the server might not support returning a partial response.`);
17
+ }
18
+ const arrayBuffer = await response.arrayBuffer();
19
+ const uintArray = new Uint8Array(arrayBuffer);
20
+ const samples = new Float32Array(uintArray.length / blockAlign);
21
+ for (let i = 0; i < uintArray.length; i += blockAlign) {
22
+ const sampleStart = i + channelIndex * (bitsPerSample / 8);
23
+ let sample;
24
+ if (bitsPerSample === 16) {
25
+ sample = (0, probe_wave_file_1.getInt16AsFloat)(uintArray, sampleStart);
26
+ }
27
+ else if (bitsPerSample === 8) {
28
+ sample = (0, probe_wave_file_1.getInt8AsFloat)(uintArray, sampleStart);
29
+ }
30
+ else {
31
+ throw new Error(`Unsupported bits per sample: ${bitsPerSample}`);
32
+ }
33
+ samples[i / blockAlign] = sample;
34
+ }
35
+ return samples;
36
+ };
37
+ exports.getPartialWaveData = getPartialWaveData;
@@ -0,0 +1 @@
1
+ export declare const getPartialAudioData: (src: string) => Promise<void>;
@@ -0,0 +1,87 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getPartialAudioData = void 0;
4
+ const fetch_with_cors_catch_1 = require("./fetch-with-cors-catch");
5
+ const toUint32 = (bytes) => {
6
+ if (bytes.length !== 4) {
7
+ throw new Error('toUint32() requires exactly 4 bytes');
8
+ }
9
+ const val1 = bytes[3];
10
+ const val2 = bytes[2];
11
+ const val3 = bytes[1];
12
+ const val4 = bytes[0];
13
+ return (val1 << 24) | (val2 << 16) | (val3 << 8) | val4;
14
+ };
15
+ const toUint16 = (bytes) => {
16
+ if (bytes.length !== 2) {
17
+ throw new Error('toUint16() requires exactly 2 bytes');
18
+ }
19
+ const val1 = bytes[1];
20
+ const val2 = bytes[0];
21
+ return (val1 << 8) | val2;
22
+ };
23
+ const getPartialAudioData = async (src) => {
24
+ const response = await (0, fetch_with_cors_catch_1.fetchWithCorsCatch)(src, {
25
+ headers: {
26
+ range: 'bytes=0-256',
27
+ },
28
+ });
29
+ if (response.status !== 206) {
30
+ throw new Error(`Tried to read bytes 0-256 from ${src}, but the response status code was not 206. This means the server might not support returning a partial response.`);
31
+ }
32
+ const buffer = await response.arrayBuffer();
33
+ const uintArray = new Uint8Array(buffer);
34
+ const header = uintArray.slice(0, 44);
35
+ const shouldBeRiff = new TextDecoder().decode(uintArray.slice(0, 4));
36
+ if (shouldBeRiff !== 'RIFF') {
37
+ throw new Error('getPartialAudioData() requires a WAVE file, but the first bytes are not RIFF. ');
38
+ }
39
+ const size = toUint32(uintArray.slice(4, 8));
40
+ const shouldBeWAVE = new TextDecoder().decode(uintArray.slice(8, 12));
41
+ if (shouldBeWAVE !== 'WAVE') {
42
+ throw new Error('getPartialAudioData() requires a WAVE file, but the bytes 8-11 are not "WAVE". ');
43
+ }
44
+ const shouldBeFmt = new TextDecoder().decode(uintArray.slice(12, 16));
45
+ if (shouldBeFmt !== 'fmt ') {
46
+ throw new Error('getPartialAudioData() requires a WAVE file, but the bytes 12-15 are not "fmt ". ');
47
+ }
48
+ const chunkSize = toUint32(uintArray.slice(16, 20));
49
+ const audioFormat = toUint16(uintArray.slice(20, 22));
50
+ if (audioFormat !== 1) {
51
+ throw new Error('getPartialAudioData() supports only a WAVE file with PCM audio format, but the audio format is not PCM. ');
52
+ }
53
+ const numberOfChannels = toUint16(uintArray.slice(22, 24));
54
+ const sampleRate = toUint32(uintArray.slice(24, 28));
55
+ const byteRate = toUint32(uintArray.slice(28, 32));
56
+ const blockAlign = toUint16(uintArray.slice(32, 34));
57
+ const bitsPerSample = toUint16(uintArray.slice(34, 36));
58
+ let offset = 36;
59
+ const shouldBeDataOrList = new TextDecoder().decode(uintArray.slice(offset, offset + 4));
60
+ if (shouldBeDataOrList === 'LIST') {
61
+ const listSize = toUint32(uintArray.slice(40, 44));
62
+ offset += listSize;
63
+ offset += 8;
64
+ }
65
+ const shouldBeData = new TextDecoder().decode(uintArray.slice(offset, offset + 4));
66
+ if (shouldBeData !== 'data') {
67
+ throw new Error('getPartialAudioData() requires a WAVE file, but the bytes 36-39 are not "data". ');
68
+ }
69
+ const dataSize = toUint32(uintArray.slice(offset + 4, offset + 8));
70
+ const numSamples = parseInt((chunkSize / blockAlign), 10);
71
+ console.log({
72
+ response,
73
+ buffer,
74
+ header,
75
+ size,
76
+ chunkSize,
77
+ audioFormat,
78
+ numberOfChannels,
79
+ sampleRate,
80
+ byteRate,
81
+ blockAlign,
82
+ bitsPerSample,
83
+ dataSize,
84
+ numSamples,
85
+ });
86
+ };
87
+ exports.getPartialAudioData = getPartialAudioData;
package/dist/index.d.ts CHANGED
@@ -2,9 +2,12 @@ export { audioBufferToDataUrl } from './audio-buffer/audio-url-helpers';
2
2
  export { getAudioData } from './get-audio-data';
3
3
  export { getAudioDuration, getAudioDurationInSeconds, } from './get-audio-duration-in-seconds';
4
4
  export { getImageDimensions } from './get-image-dimensions';
5
+ export { getPartialWaveData } from './get-partial-wave-data';
5
6
  export { getVideoMetadata } from './get-video-metadata';
6
7
  export { getWaveformPortion } from './get-waveform-portion';
8
+ export { WaveProbe, probeWaveFile } from './probe-wave-file';
7
9
  export * from './types';
8
10
  export type { AudioData, VideoMetadata as VideoData } from './types';
9
11
  export { useAudioData } from './use-audio-data';
10
- export { visualizeAudio } from './visualize-audio';
12
+ export { UseWindowedAudioDataOptions, UseWindowedAudioDataReturnValue, useWindowedAudioData, } from './use-windowed-audio-data';
13
+ export { VisualizeAudioOptions, visualizeAudio } from './visualize-audio';
package/dist/index.js CHANGED
@@ -14,7 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
14
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
15
  };
16
16
  Object.defineProperty(exports, "__esModule", { value: true });
17
- exports.visualizeAudio = exports.useAudioData = exports.getWaveformPortion = exports.getVideoMetadata = exports.getImageDimensions = exports.getAudioDurationInSeconds = exports.getAudioDuration = exports.getAudioData = exports.audioBufferToDataUrl = void 0;
17
+ exports.visualizeAudio = exports.useWindowedAudioData = exports.useAudioData = exports.probeWaveFile = exports.getWaveformPortion = exports.getVideoMetadata = exports.getPartialWaveData = exports.getImageDimensions = exports.getAudioDurationInSeconds = exports.getAudioDuration = exports.getAudioData = exports.audioBufferToDataUrl = void 0;
18
18
  var audio_url_helpers_1 = require("./audio-buffer/audio-url-helpers");
19
19
  Object.defineProperty(exports, "audioBufferToDataUrl", { enumerable: true, get: function () { return audio_url_helpers_1.audioBufferToDataUrl; } });
20
20
  var get_audio_data_1 = require("./get-audio-data");
@@ -24,12 +24,18 @@ Object.defineProperty(exports, "getAudioDuration", { enumerable: true, get: func
24
24
  Object.defineProperty(exports, "getAudioDurationInSeconds", { enumerable: true, get: function () { return get_audio_duration_in_seconds_1.getAudioDurationInSeconds; } });
25
25
  var get_image_dimensions_1 = require("./get-image-dimensions");
26
26
  Object.defineProperty(exports, "getImageDimensions", { enumerable: true, get: function () { return get_image_dimensions_1.getImageDimensions; } });
27
+ var get_partial_wave_data_1 = require("./get-partial-wave-data");
28
+ Object.defineProperty(exports, "getPartialWaveData", { enumerable: true, get: function () { return get_partial_wave_data_1.getPartialWaveData; } });
27
29
  var get_video_metadata_1 = require("./get-video-metadata");
28
30
  Object.defineProperty(exports, "getVideoMetadata", { enumerable: true, get: function () { return get_video_metadata_1.getVideoMetadata; } });
29
31
  var get_waveform_portion_1 = require("./get-waveform-portion");
30
32
  Object.defineProperty(exports, "getWaveformPortion", { enumerable: true, get: function () { return get_waveform_portion_1.getWaveformPortion; } });
33
+ var probe_wave_file_1 = require("./probe-wave-file");
34
+ Object.defineProperty(exports, "probeWaveFile", { enumerable: true, get: function () { return probe_wave_file_1.probeWaveFile; } });
31
35
  __exportStar(require("./types"), exports);
32
36
  var use_audio_data_1 = require("./use-audio-data");
33
37
  Object.defineProperty(exports, "useAudioData", { enumerable: true, get: function () { return use_audio_data_1.useAudioData; } });
38
+ var use_windowed_audio_data_1 = require("./use-windowed-audio-data");
39
+ Object.defineProperty(exports, "useWindowedAudioData", { enumerable: true, get: function () { return use_windowed_audio_data_1.useWindowedAudioData; } });
34
40
  var visualize_audio_1 = require("./visualize-audio");
35
41
  Object.defineProperty(exports, "visualizeAudio", { enumerable: true, get: function () { return visualize_audio_1.visualizeAudio; } });
@@ -0,0 +1,12 @@
1
+ export declare const getInt16AsFloat: (bytes: Uint8Array, offset: number) => number;
2
+ export declare const getInt8AsFloat: (bytes: Uint8Array, offset: number) => number;
3
+ export type WaveProbe = {
4
+ dataOffset: number;
5
+ bitsPerSample: number;
6
+ numberOfChannels: number;
7
+ sampleRate: number;
8
+ blockAlign: number;
9
+ fileSize: number;
10
+ durationInSeconds: number;
11
+ };
12
+ export declare const probeWaveFile: (src: string) => Promise<WaveProbe>;
@@ -0,0 +1,92 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.probeWaveFile = exports.getInt8AsFloat = exports.getInt16AsFloat = void 0;
4
+ const fetch_with_cors_catch_1 = require("./fetch-with-cors-catch");
5
+ const getUint32 = (bytes, offset) => {
6
+ const val1 = bytes[offset + 3];
7
+ const val2 = bytes[offset + 2];
8
+ const val3 = bytes[offset + 1];
9
+ const val4 = bytes[offset];
10
+ return (val1 << 24) | (val2 << 16) | (val3 << 8) | val4;
11
+ };
12
+ const getUint16 = (bytes, offset) => {
13
+ const val1 = bytes[offset + 1];
14
+ const val2 = bytes[offset];
15
+ return (val1 << 8) | val2;
16
+ };
17
+ const getInt16AsFloat = (bytes, offset) => {
18
+ if (offset >= bytes.length) {
19
+ throw new Error(`Tried to read a 16-bit integer from offset ${offset} but the array length is ${bytes.length}`);
20
+ }
21
+ const val1 = bytes[offset + 1];
22
+ const val2 = bytes[offset];
23
+ return ((val1 << 8) | val2) / 32768;
24
+ };
25
+ exports.getInt16AsFloat = getInt16AsFloat;
26
+ const getInt8AsFloat = (bytes, offset) => {
27
+ if (offset >= bytes.length) {
28
+ throw new Error(`Tried to read an 8-bit integer from offset ${offset} but the array length is ${bytes.length}`);
29
+ }
30
+ return (bytes[offset] - 128) / 128;
31
+ };
32
+ exports.getInt8AsFloat = getInt8AsFloat;
33
+ const probeWaveFile = async (src) => {
34
+ const response = await (0, fetch_with_cors_catch_1.fetchWithCorsCatch)(src, {
35
+ headers: {
36
+ range: 'bytes=0-256',
37
+ },
38
+ });
39
+ if (response.status !== 206) {
40
+ throw new Error(`Tried to read bytes 0-256 from ${src}, but the response status code was not 206. This means the server might not support returning a partial response.`);
41
+ }
42
+ const buffer = await response.arrayBuffer();
43
+ const uintArray = new Uint8Array(buffer);
44
+ const shouldBeRiff = new TextDecoder().decode(uintArray.slice(0, 4));
45
+ if (shouldBeRiff !== 'RIFF') {
46
+ throw new Error('getPartialAudioData() requires a WAVE file, but the first bytes are not RIFF. ');
47
+ }
48
+ const size = getUint32(uintArray, 4);
49
+ const shouldBeWAVE = new TextDecoder().decode(uintArray.slice(8, 12));
50
+ if (shouldBeWAVE !== 'WAVE') {
51
+ throw new Error('getPartialAudioData() requires a WAVE file, but the bytes 8-11 are not "WAVE". ');
52
+ }
53
+ const shouldBeFmt = new TextDecoder().decode(uintArray.slice(12, 16));
54
+ if (shouldBeFmt !== 'fmt ') {
55
+ throw new Error('getPartialAudioData() requires a WAVE file, but the bytes 12-15 are not "fmt ". ');
56
+ }
57
+ // const chunkSize = toUint32(uintArray.slice(16, 20));
58
+ const audioFormat = getUint16(uintArray, 20);
59
+ if (audioFormat !== 1) {
60
+ throw new Error('getPartialAudioData() supports only a WAVE file with PCM audio format, but the audio format is not PCM. ');
61
+ }
62
+ const numberOfChannels = getUint16(uintArray, 22);
63
+ const sampleRate = getUint32(uintArray, 24);
64
+ // const byteRate = toUint32(uintArray.slice(28, 32));
65
+ const blockAlign = getUint16(uintArray, 32);
66
+ const bitsPerSample = getUint16(uintArray, 34);
67
+ let offset = 36;
68
+ const shouldBeDataOrList = new TextDecoder().decode(uintArray.slice(offset, offset + 4));
69
+ if (shouldBeDataOrList === 'LIST') {
70
+ const listSize = getUint32(uintArray, 40);
71
+ offset += listSize;
72
+ offset += 8;
73
+ }
74
+ const shouldBeData = new TextDecoder().decode(uintArray.slice(offset, offset + 4));
75
+ if (shouldBeData !== 'data') {
76
+ throw new Error('getPartialAudioData() requires a WAVE file, but the bytes 36-39 are not "data". ');
77
+ }
78
+ const dataSize = getUint32(uintArray, offset + 4);
79
+ if (dataSize + offset !== size) {
80
+ throw new Error(`getPartialAudioData() requires a WAVE file, but: Expected ${dataSize + offset}, got ${size}. `);
81
+ }
82
+ return {
83
+ dataOffset: offset + 8,
84
+ bitsPerSample,
85
+ numberOfChannels,
86
+ sampleRate,
87
+ blockAlign,
88
+ fileSize: size,
89
+ durationInSeconds: dataSize / (sampleRate * blockAlign),
90
+ };
91
+ };
92
+ exports.probeWaveFile = probeWaveFile;
@@ -0,0 +1,13 @@
1
+ import type { AudioData } from './types';
2
+ export type UseWindowedAudioDataOptions = {
3
+ src: string;
4
+ frame: number;
5
+ fps: number;
6
+ windowInSeconds: number;
7
+ channelIndex?: number;
8
+ };
9
+ export type UseWindowedAudioDataReturnValue = {
10
+ audioData: AudioData | null;
11
+ dataOffsetInSeconds: number;
12
+ };
13
+ export declare const useWindowedAudioData: ({ src, frame, fps, windowInSeconds, channelIndex, }: UseWindowedAudioDataOptions) => UseWindowedAudioDataReturnValue;
@@ -0,0 +1,152 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.useWindowedAudioData = void 0;
4
+ const react_1 = require("react");
5
+ const remotion_1 = require("remotion");
6
+ const combine_float32_arrays_1 = require("./combine-float32-arrays");
7
+ const get_partial_wave_data_1 = require("./get-partial-wave-data");
8
+ const is_remote_asset_1 = require("./is-remote-asset");
9
+ const probe_wave_file_1 = require("./probe-wave-file");
10
+ const useWindowedAudioData = ({ src, frame, fps, windowInSeconds, channelIndex = 0, }) => {
11
+ const isMounted = (0, react_1.useRef)(true);
12
+ const [waveProbe, setWaveProbe] = (0, react_1.useState)(null);
13
+ const [waveFormMap, setWaveformMap] = (0, react_1.useState)({});
14
+ const requests = (0, react_1.useRef)({});
15
+ const [initialWindowInSeconds] = (0, react_1.useState)(windowInSeconds);
16
+ if (windowInSeconds !== initialWindowInSeconds) {
17
+ throw new Error('windowInSeconds cannot be changed dynamically');
18
+ }
19
+ (0, react_1.useEffect)(() => {
20
+ isMounted.current = true;
21
+ return () => {
22
+ isMounted.current = false;
23
+ };
24
+ }, []);
25
+ const fetchMetadata = (0, react_1.useCallback)(async () => {
26
+ const handle = (0, remotion_1.delayRender)(`Waiting for audio metadata with src="${src}" to be loaded`);
27
+ try {
28
+ const data = await (0, probe_wave_file_1.probeWaveFile)(src);
29
+ if (isMounted.current) {
30
+ setWaveProbe(data);
31
+ }
32
+ (0, remotion_1.continueRender)(handle);
33
+ }
34
+ catch (err) {
35
+ (0, remotion_1.cancelRender)(err);
36
+ }
37
+ }, [src]);
38
+ (0, react_1.useLayoutEffect)(() => {
39
+ fetchMetadata();
40
+ }, [fetchMetadata]);
41
+ const currentTime = frame / fps;
42
+ const currentWindowIndex = Math.floor(currentTime / windowInSeconds);
43
+ const windowsToFetch = (0, react_1.useMemo)(() => {
44
+ if (!waveProbe) {
45
+ return [];
46
+ }
47
+ const maxWindowIndex = Math.floor(waveProbe.durationInSeconds / windowInSeconds);
48
+ // needs to be in order because we rely on the concatenation below
49
+ return [
50
+ currentWindowIndex === 0 ? null : currentWindowIndex - 1,
51
+ currentWindowIndex,
52
+ currentWindowIndex + 1 > maxWindowIndex ? null : currentWindowIndex + 1,
53
+ ]
54
+ .filter((i) => i !== null)
55
+ .filter((i) => i >= 0);
56
+ }, [currentWindowIndex, waveProbe, windowInSeconds]);
57
+ const fetchAndSetWaveformData = (0, react_1.useCallback)(async (windowIndex) => {
58
+ if (!waveProbe) {
59
+ throw new Error('Wave probe is not loaded yet');
60
+ }
61
+ const controller = new AbortController();
62
+ requests.current[windowIndex] = controller;
63
+ const partialWaveData = await (0, get_partial_wave_data_1.getPartialWaveData)({
64
+ bitsPerSample: waveProbe.bitsPerSample,
65
+ blockAlign: waveProbe.blockAlign,
66
+ channelIndex,
67
+ dataOffset: waveProbe.dataOffset,
68
+ fileSize: waveProbe.fileSize,
69
+ fromSeconds: windowIndex * windowInSeconds,
70
+ sampleRate: waveProbe.sampleRate,
71
+ src,
72
+ toSeconds: (windowIndex + 1) * windowInSeconds,
73
+ signal: controller.signal,
74
+ });
75
+ requests.current[windowIndex] = null;
76
+ setWaveformMap((prev) => {
77
+ const entries = Object.keys(prev);
78
+ const windowsToClear = entries.filter((entry) => !windowsToFetch.includes(Number(entry)));
79
+ return {
80
+ ...prev,
81
+ // Delete windows that are not needed anymore
82
+ ...windowsToClear.reduce((acc, key) => {
83
+ acc[key] = null;
84
+ return acc;
85
+ }, {}),
86
+ // Add the new window
87
+ [windowIndex]: partialWaveData,
88
+ };
89
+ });
90
+ }, [channelIndex, src, waveProbe, windowInSeconds, windowsToFetch]);
91
+ (0, react_1.useEffect)(() => {
92
+ if (!waveProbe) {
93
+ return;
94
+ }
95
+ const windowsToClear = Object.keys(requests.current).filter((entry) => !windowsToFetch.includes(Number(entry)));
96
+ for (const windowIndex of windowsToClear) {
97
+ const controller = requests.current[windowIndex];
98
+ if (controller) {
99
+ controller.abort();
100
+ requests.current[windowIndex] = null;
101
+ }
102
+ }
103
+ Promise.all(windowsToFetch.map((windowIndex) => {
104
+ return fetchAndSetWaveformData(windowIndex);
105
+ })).catch((err) => {
106
+ var _a, _b, _c, _d, _e;
107
+ if ((_a = err.stack) === null || _a === void 0 ? void 0 : _a.includes('Cancelled')) {
108
+ return;
109
+ }
110
+ if ((_c = (_b = err.stack) === null || _b === void 0 ? void 0 : _b.toLowerCase()) === null || _c === void 0 ? void 0 : _c.includes('aborted')) {
111
+ return;
112
+ }
113
+ // firefox
114
+ if ((_e = (_d = err.message) === null || _d === void 0 ? void 0 : _d.toLowerCase()) === null || _e === void 0 ? void 0 : _e.includes('aborted')) {
115
+ return;
116
+ }
117
+ (0, remotion_1.cancelRender)(err);
118
+ });
119
+ }, [fetchAndSetWaveformData, waveProbe, windowsToFetch]);
120
+ const currentAudioData = (0, react_1.useMemo)(() => {
121
+ if (!waveProbe) {
122
+ return null;
123
+ }
124
+ if (windowsToFetch.some((i) => !waveFormMap[i])) {
125
+ return null;
126
+ }
127
+ const windows = windowsToFetch.map((i) => waveFormMap[i]);
128
+ const data = (0, combine_float32_arrays_1.combineFloat32Arrays)(windows);
129
+ return {
130
+ channelWaveforms: [data],
131
+ durationInSeconds: waveProbe.durationInSeconds,
132
+ isRemote: (0, is_remote_asset_1.isRemoteAsset)(src),
133
+ numberOfChannels: 1,
134
+ resultId: String(Math.random()),
135
+ sampleRate: waveProbe.sampleRate,
136
+ };
137
+ }, [src, waveFormMap, waveProbe, windowsToFetch]);
138
+ (0, react_1.useLayoutEffect)(() => {
139
+ if (currentAudioData) {
140
+ return;
141
+ }
142
+ const handle = (0, remotion_1.delayRender)(`Waiting for audio data with src="${src}" to be loaded`);
143
+ return () => {
144
+ (0, remotion_1.continueRender)(handle);
145
+ };
146
+ }, [currentAudioData, src]);
147
+ return {
148
+ audioData: currentAudioData,
149
+ dataOffsetInSeconds: windowsToFetch[0] * windowInSeconds,
150
+ };
151
+ };
152
+ exports.useWindowedAudioData = useWindowedAudioData;
@@ -1,12 +1,16 @@
1
+ import type { OptimizeFor } from './fft/get-visualization';
1
2
  import type { AudioData } from './types';
2
- type FnParameters = {
3
+ type MandatoryVisualizeAudioOptions = {
3
4
  audioData: AudioData;
4
5
  frame: number;
5
6
  fps: number;
6
7
  numberOfSamples: number;
7
- optimizeFor?: 'accuracy' | 'speed';
8
8
  };
9
- export declare const visualizeAudio: ({ smoothing, ...parameters }: FnParameters & {
10
- smoothing?: boolean;
11
- }) => number[];
9
+ type OptionalVisualizeAudioOptions = {
10
+ optimizeFor: OptimizeFor;
11
+ dataOffsetInSeconds: number;
12
+ smoothing: boolean;
13
+ };
14
+ export type VisualizeAudioOptions = MandatoryVisualizeAudioOptions & Partial<OptionalVisualizeAudioOptions>;
15
+ export declare const visualizeAudio: ({ smoothing, optimizeFor, dataOffsetInSeconds, ...parameters }: MandatoryVisualizeAudioOptions & Partial<OptionalVisualizeAudioOptions> & {}) => number[];
12
16
  export {};
@@ -10,25 +10,33 @@ const cache = {};
10
10
  * @description part of @remotion/media-utils
11
11
  * @see [Documentation](https://www.remotion.dev/docs/visualize-audio)
12
12
  */
13
- const visualizeAudioFrame = ({ audioData: metadata, frame, fps, numberOfSamples, optimizeFor, }) => {
14
- const cacheKey = metadata.resultId + frame + fps + numberOfSamples;
13
+ const visualizeAudioFrame = ({ audioData, frame, fps, numberOfSamples, optimizeFor, dataOffsetInSeconds, }) => {
14
+ const cacheKey = audioData.resultId + frame + fps + numberOfSamples;
15
15
  if (cache[cacheKey]) {
16
16
  return cache[cacheKey];
17
17
  }
18
- const maxInt = (0, max_value_cached_1.getMaxPossibleMagnitude)(metadata);
18
+ const maxInt = (0, max_value_cached_1.getMaxPossibleMagnitude)(audioData);
19
19
  return (0, get_visualization_1.getVisualization)({
20
20
  sampleSize: numberOfSamples * 2,
21
- data: metadata.channelWaveforms[0],
21
+ data: audioData.channelWaveforms[0],
22
22
  frame,
23
23
  fps,
24
- sampleRate: metadata.sampleRate,
24
+ sampleRate: audioData.sampleRate,
25
25
  maxInt,
26
- optimizeFor: optimizeFor !== null && optimizeFor !== void 0 ? optimizeFor : (no_react_1.NoReactInternals.ENABLE_V5_BREAKING_CHANGES ? 'speed' : 'accuracy'),
26
+ optimizeFor,
27
+ dataOffsetInSeconds,
27
28
  });
28
29
  };
29
- const visualizeAudio = ({ smoothing = true, ...parameters }) => {
30
+ const visualizeAudio = ({ smoothing = true, optimizeFor = no_react_1.NoReactInternals.ENABLE_V5_BREAKING_CHANGES
31
+ ? 'speed'
32
+ : 'accuracy', dataOffsetInSeconds = 0, ...parameters }) => {
30
33
  if (!smoothing) {
31
- return visualizeAudioFrame(parameters);
34
+ return visualizeAudioFrame({
35
+ ...parameters,
36
+ optimizeFor,
37
+ dataOffsetInSeconds,
38
+ smoothing,
39
+ });
32
40
  }
33
41
  const toSmooth = [
34
42
  parameters.frame - 1,
@@ -36,7 +44,13 @@ const visualizeAudio = ({ smoothing = true, ...parameters }) => {
36
44
  parameters.frame + 1,
37
45
  ];
38
46
  const all = toSmooth.map((s) => {
39
- return visualizeAudioFrame({ ...parameters, frame: s });
47
+ return visualizeAudioFrame({
48
+ ...parameters,
49
+ frame: s,
50
+ dataOffsetInSeconds,
51
+ optimizeFor,
52
+ smoothing,
53
+ });
40
54
  });
41
55
  return new Array(parameters.numberOfSamples).fill(true).map((_x, i) => {
42
56
  return (new Array(toSmooth.length)
package/eslint.config.mjs CHANGED
@@ -1,6 +1,6 @@
1
1
  import {remotionFlatConfig} from '@remotion/eslint-config-internal';
2
2
 
3
- const config = remotionFlatConfig({react: false});
3
+ const config = remotionFlatConfig({react: true});
4
4
 
5
5
  export default {
6
6
  ...config,
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "url": "https://github.com/remotion-dev/remotion/tree/main/packages/media-utils"
4
4
  },
5
5
  "name": "@remotion/media-utils",
6
- "version": "4.0.239",
6
+ "version": "4.0.240",
7
7
  "description": "Utilities for working with media files",
8
8
  "main": "dist/index.js",
9
9
  "sideEffects": false,
@@ -13,7 +13,7 @@
13
13
  "url": "https://github.com/remotion-dev/remotion/issues"
14
14
  },
15
15
  "dependencies": {
16
- "remotion": "4.0.239"
16
+ "remotion": "4.0.240"
17
17
  },
18
18
  "peerDependencies": {
19
19
  "react": ">=16.8.0",
@@ -21,7 +21,7 @@
21
21
  },
22
22
  "devDependencies": {
23
23
  "eslint": "9.14.0",
24
- "@remotion/eslint-config-internal": "4.0.239"
24
+ "@remotion/eslint-config-internal": "4.0.240"
25
25
  },
26
26
  "keywords": [
27
27
  "remotion",