@remotion/media-utils 4.0.344 → 4.0.346
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/get-partial-media-data-2.d.ts +7 -0
- package/dist/get-partial-media-data-2.js +104 -0
- package/dist/get-partial-media-data.d.ts +7 -0
- package/dist/get-partial-media-data.js +105 -0
- package/dist/use-audio-data.js +4 -3
- package/dist/use-windowed-audio-data.js +8 -7
- package/package.json +3 -3
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getPartialMediaData = void 0;
|
|
4
|
+
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
+
const webcodecs_1 = require("@remotion/webcodecs");
|
|
6
|
+
const getPartialMediaData = async ({ src, fromSeconds, toSeconds, channelIndex, signal, }) => {
|
|
7
|
+
const controller = (0, media_parser_1.mediaParserController)();
|
|
8
|
+
// Collect audio samples
|
|
9
|
+
const audioSamples = [];
|
|
10
|
+
// Abort if the signal is already aborted
|
|
11
|
+
if (signal.aborted) {
|
|
12
|
+
throw new Error('Operation was aborted');
|
|
13
|
+
}
|
|
14
|
+
try {
|
|
15
|
+
if (fromSeconds > 0) {
|
|
16
|
+
controller.seek(fromSeconds);
|
|
17
|
+
}
|
|
18
|
+
await (0, media_parser_1.parseMedia)({
|
|
19
|
+
src,
|
|
20
|
+
controller,
|
|
21
|
+
onAudioTrack: ({ track }) => {
|
|
22
|
+
if (!track) {
|
|
23
|
+
throw new Error('No audio track found');
|
|
24
|
+
}
|
|
25
|
+
const audioDecoder = (0, webcodecs_1.createAudioDecoder)({
|
|
26
|
+
track,
|
|
27
|
+
onFrame: (sample) => {
|
|
28
|
+
if (signal.aborted) {
|
|
29
|
+
sample.close();
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
// For multi-channel audio, we need to handle channels properly
|
|
33
|
+
const { numberOfChannels } = sample;
|
|
34
|
+
const samplesPerChannel = sample.numberOfFrames;
|
|
35
|
+
let data;
|
|
36
|
+
if (numberOfChannels === 1) {
|
|
37
|
+
// Mono audio
|
|
38
|
+
data = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
39
|
+
sample.copyTo(data, { format: 'f32', planeIndex: 0 });
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
// Multi-channel audio: extract specific channel
|
|
43
|
+
const allChannelsData = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
44
|
+
sample.copyTo(allChannelsData, { format: 'f32', planeIndex: 0 });
|
|
45
|
+
// Extract the specific channel (interleaved audio)
|
|
46
|
+
data = new Float32Array(samplesPerChannel);
|
|
47
|
+
for (let i = 0; i < samplesPerChannel; i++) {
|
|
48
|
+
data[i] = allChannelsData[i * numberOfChannels + channelIndex];
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
audioSamples.push(data);
|
|
52
|
+
sample.close();
|
|
53
|
+
},
|
|
54
|
+
onError(error) {
|
|
55
|
+
throw error;
|
|
56
|
+
},
|
|
57
|
+
});
|
|
58
|
+
// Listen for abort signal
|
|
59
|
+
const onAbort = () => {
|
|
60
|
+
controller.abort();
|
|
61
|
+
if (audioDecoder) {
|
|
62
|
+
audioDecoder.close();
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
signal.addEventListener('abort', onAbort, { once: true });
|
|
66
|
+
return async (sample) => {
|
|
67
|
+
if (signal.aborted) {
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
// Convert timestamp using the track's timescale
|
|
71
|
+
const time = sample.timestamp / track.timescale;
|
|
72
|
+
console.log(time);
|
|
73
|
+
// Stop immediately when we reach our target time
|
|
74
|
+
if (time >= toSeconds) {
|
|
75
|
+
// abort media parsing, we reached the point where we want to stop
|
|
76
|
+
controller.abort();
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
// Decode the sample using the sample directly
|
|
80
|
+
await audioDecoder.waitForQueueToBeLessThan(10);
|
|
81
|
+
// we're waiting for the queue above anyway, enqueue in sync mode
|
|
82
|
+
audioDecoder.decode(sample);
|
|
83
|
+
};
|
|
84
|
+
},
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
catch (err) {
|
|
88
|
+
const isAbortedByTimeCutoff = (0, media_parser_1.hasBeenAborted)(err);
|
|
89
|
+
// Don't throw if we stopped the parsing ourselves
|
|
90
|
+
if (!isAbortedByTimeCutoff && !signal.aborted) {
|
|
91
|
+
throw err;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
// Simply concatenate all audio data since windowing handles the time ranges
|
|
95
|
+
const totalSamples = audioSamples.reduce((sum, sample) => sum + sample.length, 0);
|
|
96
|
+
const result = new Float32Array(totalSamples);
|
|
97
|
+
let offset = 0;
|
|
98
|
+
for (const audioSample of audioSamples) {
|
|
99
|
+
result.set(audioSample, offset);
|
|
100
|
+
offset += audioSample.length;
|
|
101
|
+
}
|
|
102
|
+
return result;
|
|
103
|
+
};
|
|
104
|
+
exports.getPartialMediaData = getPartialMediaData;
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getPartialMediaData = void 0;
|
|
4
|
+
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
+
const worker_1 = require("@remotion/media-parser/worker");
|
|
6
|
+
const webcodecs_1 = require("@remotion/webcodecs");
|
|
7
|
+
const getPartialMediaData = async ({ src, fromSeconds, toSeconds, channelIndex, signal, }) => {
|
|
8
|
+
const controller = (0, media_parser_1.mediaParserController)();
|
|
9
|
+
// Collect audio samples
|
|
10
|
+
const audioSamples = [];
|
|
11
|
+
// Abort if the signal is already aborted
|
|
12
|
+
if (signal.aborted) {
|
|
13
|
+
throw new Error('Operation was aborted');
|
|
14
|
+
}
|
|
15
|
+
try {
|
|
16
|
+
if (fromSeconds > 0) {
|
|
17
|
+
controller.seek(fromSeconds);
|
|
18
|
+
}
|
|
19
|
+
await (0, worker_1.parseMediaOnWebWorker)({
|
|
20
|
+
src,
|
|
21
|
+
controller,
|
|
22
|
+
onAudioTrack: ({ track }) => {
|
|
23
|
+
if (!track) {
|
|
24
|
+
throw new Error('No audio track found');
|
|
25
|
+
}
|
|
26
|
+
const audioDecoder = (0, webcodecs_1.createAudioDecoder)({
|
|
27
|
+
track,
|
|
28
|
+
onFrame: (sample) => {
|
|
29
|
+
if (signal.aborted) {
|
|
30
|
+
sample.close();
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
// For multi-channel audio, we need to handle channels properly
|
|
34
|
+
const { numberOfChannels } = sample;
|
|
35
|
+
const samplesPerChannel = sample.numberOfFrames;
|
|
36
|
+
let data;
|
|
37
|
+
if (numberOfChannels === 1) {
|
|
38
|
+
// Mono audio
|
|
39
|
+
data = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
40
|
+
sample.copyTo(data, { format: 'f32', planeIndex: 0 });
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
// Multi-channel audio: extract specific channel
|
|
44
|
+
const allChannelsData = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
45
|
+
sample.copyTo(allChannelsData, { format: 'f32', planeIndex: 0 });
|
|
46
|
+
// Extract the specific channel (interleaved audio)
|
|
47
|
+
data = new Float32Array(samplesPerChannel);
|
|
48
|
+
for (let i = 0; i < samplesPerChannel; i++) {
|
|
49
|
+
data[i] = allChannelsData[i * numberOfChannels + channelIndex];
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
audioSamples.push(data);
|
|
53
|
+
sample.close();
|
|
54
|
+
},
|
|
55
|
+
onError(error) {
|
|
56
|
+
throw error;
|
|
57
|
+
},
|
|
58
|
+
});
|
|
59
|
+
// Listen for abort signal
|
|
60
|
+
const onAbort = () => {
|
|
61
|
+
controller.abort();
|
|
62
|
+
if (audioDecoder) {
|
|
63
|
+
audioDecoder.close();
|
|
64
|
+
}
|
|
65
|
+
};
|
|
66
|
+
signal.addEventListener('abort', onAbort, { once: true });
|
|
67
|
+
return async (sample) => {
|
|
68
|
+
if (signal.aborted) {
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
// Convert timestamp using the track's timescale
|
|
72
|
+
const time = sample.timestamp / track.timescale;
|
|
73
|
+
// Stop immediately when we reach our target time
|
|
74
|
+
if (time >= toSeconds) {
|
|
75
|
+
// abort media parsing, we reached the point where we want to stop
|
|
76
|
+
controller.abort();
|
|
77
|
+
await audioDecoder.flush();
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
// Decode the sample using the sample directly
|
|
81
|
+
await audioDecoder.waitForQueueToBeLessThan(10);
|
|
82
|
+
// we're waiting for the queue above anyway, enqueue in sync mode
|
|
83
|
+
audioDecoder.decode(sample);
|
|
84
|
+
};
|
|
85
|
+
},
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
catch (err) {
|
|
89
|
+
const isAbortedByTimeCutoff = (0, media_parser_1.hasBeenAborted)(err);
|
|
90
|
+
// Don't throw if we stopped the parsing ourselves
|
|
91
|
+
if (!isAbortedByTimeCutoff && !signal.aborted) {
|
|
92
|
+
throw err;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// Simply concatenate all audio data since windowing handles the time ranges
|
|
96
|
+
const totalSamples = audioSamples.reduce((sum, sample) => sum + sample.length, 0);
|
|
97
|
+
const result = new Float32Array(totalSamples);
|
|
98
|
+
let offset = 0;
|
|
99
|
+
for (const audioSample of audioSamples) {
|
|
100
|
+
result.set(audioSample, offset);
|
|
101
|
+
offset += audioSample.length;
|
|
102
|
+
}
|
|
103
|
+
return result;
|
|
104
|
+
};
|
|
105
|
+
exports.getPartialMediaData = getPartialMediaData;
|
package/dist/use-audio-data.js
CHANGED
|
@@ -21,8 +21,9 @@ const useAudioData = (src) => {
|
|
|
21
21
|
};
|
|
22
22
|
}, []);
|
|
23
23
|
const [metadata, setMetadata] = (0, react_1.useState)(null);
|
|
24
|
+
const { delayRender, continueRender } = (0, remotion_1.useDelayRender)();
|
|
24
25
|
const fetchMetadata = (0, react_1.useCallback)(async () => {
|
|
25
|
-
const handle =
|
|
26
|
+
const handle = delayRender(`Waiting for audio metadata with src="${src}" to be loaded`);
|
|
26
27
|
try {
|
|
27
28
|
const data = await (0, get_audio_data_1.getAudioData)(src);
|
|
28
29
|
if (mountState.current.isMounted) {
|
|
@@ -32,8 +33,8 @@ const useAudioData = (src) => {
|
|
|
32
33
|
catch (err) {
|
|
33
34
|
(0, remotion_1.cancelRender)(err);
|
|
34
35
|
}
|
|
35
|
-
|
|
36
|
-
}, [src]);
|
|
36
|
+
continueRender(handle);
|
|
37
|
+
}, [src, delayRender, continueRender]);
|
|
37
38
|
(0, react_1.useLayoutEffect)(() => {
|
|
38
39
|
fetchMetadata();
|
|
39
40
|
}, [fetchMetadata]);
|
|
@@ -22,10 +22,11 @@ const useWindowedAudioData = ({ src, frame, fps, windowInSeconds, channelIndex =
|
|
|
22
22
|
isMounted.current = false;
|
|
23
23
|
};
|
|
24
24
|
}, []);
|
|
25
|
+
const { delayRender, continueRender } = (0, remotion_1.useDelayRender)();
|
|
25
26
|
const fetchMetadata = (0, react_1.useCallback)(async (signal) => {
|
|
26
|
-
const handle =
|
|
27
|
+
const handle = delayRender(`Waiting for audio metadata with src="${src}" to be loaded`);
|
|
27
28
|
const cont = () => {
|
|
28
|
-
|
|
29
|
+
continueRender(handle);
|
|
29
30
|
};
|
|
30
31
|
signal.addEventListener('abort', cont, { once: true });
|
|
31
32
|
try {
|
|
@@ -33,7 +34,7 @@ const useWindowedAudioData = ({ src, frame, fps, windowInSeconds, channelIndex =
|
|
|
33
34
|
if (isMounted.current) {
|
|
34
35
|
setWaveProbe(data);
|
|
35
36
|
}
|
|
36
|
-
|
|
37
|
+
continueRender(handle);
|
|
37
38
|
}
|
|
38
39
|
catch (err) {
|
|
39
40
|
(0, remotion_1.cancelRender)(err);
|
|
@@ -41,7 +42,7 @@ const useWindowedAudioData = ({ src, frame, fps, windowInSeconds, channelIndex =
|
|
|
41
42
|
finally {
|
|
42
43
|
signal.removeEventListener('abort', cont);
|
|
43
44
|
}
|
|
44
|
-
}, [src]);
|
|
45
|
+
}, [src, delayRender, continueRender]);
|
|
45
46
|
(0, react_1.useLayoutEffect)(() => {
|
|
46
47
|
const controller = new AbortController();
|
|
47
48
|
fetchMetadata(controller.signal);
|
|
@@ -153,11 +154,11 @@ const useWindowedAudioData = ({ src, frame, fps, windowInSeconds, channelIndex =
|
|
|
153
154
|
if (currentAudioData) {
|
|
154
155
|
return;
|
|
155
156
|
}
|
|
156
|
-
const handle =
|
|
157
|
+
const handle = delayRender(`Waiting for audio data with src="${src}" to be loaded`);
|
|
157
158
|
return () => {
|
|
158
|
-
|
|
159
|
+
continueRender(handle);
|
|
159
160
|
};
|
|
160
|
-
}, [currentAudioData, src]);
|
|
161
|
+
}, [currentAudioData, src, delayRender, continueRender]);
|
|
161
162
|
return {
|
|
162
163
|
audioData: currentAudioData,
|
|
163
164
|
dataOffsetInSeconds: windowsToFetch[0] * windowInSeconds,
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/media-utils"
|
|
4
4
|
},
|
|
5
5
|
"name": "@remotion/media-utils",
|
|
6
|
-
"version": "4.0.
|
|
6
|
+
"version": "4.0.346",
|
|
7
7
|
"description": "Utilities for working with media files",
|
|
8
8
|
"main": "dist/index.js",
|
|
9
9
|
"sideEffects": false,
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
"url": "https://github.com/remotion-dev/remotion/issues"
|
|
14
14
|
},
|
|
15
15
|
"dependencies": {
|
|
16
|
-
"remotion": "4.0.
|
|
16
|
+
"remotion": "4.0.346"
|
|
17
17
|
},
|
|
18
18
|
"peerDependencies": {
|
|
19
19
|
"react": ">=16.8.0",
|
|
@@ -21,7 +21,7 @@
|
|
|
21
21
|
},
|
|
22
22
|
"devDependencies": {
|
|
23
23
|
"eslint": "9.19.0",
|
|
24
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
24
|
+
"@remotion/eslint-config-internal": "4.0.346"
|
|
25
25
|
},
|
|
26
26
|
"keywords": [
|
|
27
27
|
"remotion",
|