@remotion/media-utils 4.0.395 → 4.0.396
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getPartialMediaData = void 0;
|
|
4
|
+
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
+
const webcodecs_1 = require("@remotion/webcodecs");
|
|
6
|
+
const getPartialMediaData = async ({ src, fromSeconds, toSeconds, channelIndex, signal, }) => {
|
|
7
|
+
const controller = (0, media_parser_1.mediaParserController)();
|
|
8
|
+
// Collect audio samples
|
|
9
|
+
const audioSamples = [];
|
|
10
|
+
// Abort if the signal is already aborted
|
|
11
|
+
if (signal.aborted) {
|
|
12
|
+
throw new Error('Operation was aborted');
|
|
13
|
+
}
|
|
14
|
+
try {
|
|
15
|
+
if (fromSeconds > 0) {
|
|
16
|
+
controller.seek(fromSeconds);
|
|
17
|
+
}
|
|
18
|
+
await (0, media_parser_1.parseMedia)({
|
|
19
|
+
src,
|
|
20
|
+
controller,
|
|
21
|
+
onAudioTrack: ({ track }) => {
|
|
22
|
+
if (!track) {
|
|
23
|
+
throw new Error('No audio track found');
|
|
24
|
+
}
|
|
25
|
+
const audioDecoder = (0, webcodecs_1.createAudioDecoder)({
|
|
26
|
+
track,
|
|
27
|
+
onFrame: (sample) => {
|
|
28
|
+
if (signal.aborted) {
|
|
29
|
+
sample.close();
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
// For multi-channel audio, we need to handle channels properly
|
|
33
|
+
const { numberOfChannels } = sample;
|
|
34
|
+
const samplesPerChannel = sample.numberOfFrames;
|
|
35
|
+
let data;
|
|
36
|
+
if (numberOfChannels === 1) {
|
|
37
|
+
// Mono audio
|
|
38
|
+
data = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
39
|
+
sample.copyTo(data, { format: 'f32', planeIndex: 0 });
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
// Multi-channel audio: extract specific channel
|
|
43
|
+
const allChannelsData = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
44
|
+
sample.copyTo(allChannelsData, { format: 'f32', planeIndex: 0 });
|
|
45
|
+
// Extract the specific channel (interleaved audio)
|
|
46
|
+
data = new Float32Array(samplesPerChannel);
|
|
47
|
+
for (let i = 0; i < samplesPerChannel; i++) {
|
|
48
|
+
data[i] = allChannelsData[i * numberOfChannels + channelIndex];
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
audioSamples.push(data);
|
|
52
|
+
sample.close();
|
|
53
|
+
},
|
|
54
|
+
onError(error) {
|
|
55
|
+
throw error;
|
|
56
|
+
},
|
|
57
|
+
});
|
|
58
|
+
// Listen for abort signal
|
|
59
|
+
const onAbort = () => {
|
|
60
|
+
controller.abort();
|
|
61
|
+
if (audioDecoder) {
|
|
62
|
+
audioDecoder.close();
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
signal.addEventListener('abort', onAbort, { once: true });
|
|
66
|
+
return async (sample) => {
|
|
67
|
+
if (signal.aborted) {
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
// Convert timestamp using the track's timescale
|
|
71
|
+
const time = sample.timestamp / track.timescale;
|
|
72
|
+
console.log(time);
|
|
73
|
+
// Stop immediately when we reach our target time
|
|
74
|
+
if (time >= toSeconds) {
|
|
75
|
+
// abort media parsing, we reached the point where we want to stop
|
|
76
|
+
controller.abort();
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
// Decode the sample using the sample directly
|
|
80
|
+
await audioDecoder.waitForQueueToBeLessThan(10);
|
|
81
|
+
// we're waiting for the queue above anyway, enqueue in sync mode
|
|
82
|
+
audioDecoder.decode(sample);
|
|
83
|
+
};
|
|
84
|
+
},
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
catch (err) {
|
|
88
|
+
const isAbortedByTimeCutoff = (0, media_parser_1.hasBeenAborted)(err);
|
|
89
|
+
// Don't throw if we stopped the parsing ourselves
|
|
90
|
+
if (!isAbortedByTimeCutoff && !signal.aborted) {
|
|
91
|
+
throw err;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
// Simply concatenate all audio data since windowing handles the time ranges
|
|
95
|
+
const totalSamples = audioSamples.reduce((sum, sample) => sum + sample.length, 0);
|
|
96
|
+
const result = new Float32Array(totalSamples);
|
|
97
|
+
let offset = 0;
|
|
98
|
+
for (const audioSample of audioSamples) {
|
|
99
|
+
result.set(audioSample, offset);
|
|
100
|
+
offset += audioSample.length;
|
|
101
|
+
}
|
|
102
|
+
return result;
|
|
103
|
+
};
|
|
104
|
+
exports.getPartialMediaData = getPartialMediaData;
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getPartialMediaData = void 0;
|
|
4
|
+
const media_parser_1 = require("@remotion/media-parser");
|
|
5
|
+
const worker_1 = require("@remotion/media-parser/worker");
|
|
6
|
+
const webcodecs_1 = require("@remotion/webcodecs");
|
|
7
|
+
const getPartialMediaData = async ({ src, fromSeconds, toSeconds, channelIndex, signal, }) => {
|
|
8
|
+
const controller = (0, media_parser_1.mediaParserController)();
|
|
9
|
+
// Collect audio samples
|
|
10
|
+
const audioSamples = [];
|
|
11
|
+
// Abort if the signal is already aborted
|
|
12
|
+
if (signal.aborted) {
|
|
13
|
+
throw new Error('Operation was aborted');
|
|
14
|
+
}
|
|
15
|
+
try {
|
|
16
|
+
if (fromSeconds > 0) {
|
|
17
|
+
controller.seek(fromSeconds);
|
|
18
|
+
}
|
|
19
|
+
await (0, worker_1.parseMediaOnWebWorker)({
|
|
20
|
+
src,
|
|
21
|
+
controller,
|
|
22
|
+
onAudioTrack: ({ track }) => {
|
|
23
|
+
if (!track) {
|
|
24
|
+
throw new Error('No audio track found');
|
|
25
|
+
}
|
|
26
|
+
const audioDecoder = (0, webcodecs_1.createAudioDecoder)({
|
|
27
|
+
track,
|
|
28
|
+
onFrame: (sample) => {
|
|
29
|
+
if (signal.aborted) {
|
|
30
|
+
sample.close();
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
// For multi-channel audio, we need to handle channels properly
|
|
34
|
+
const { numberOfChannels } = sample;
|
|
35
|
+
const samplesPerChannel = sample.numberOfFrames;
|
|
36
|
+
let data;
|
|
37
|
+
if (numberOfChannels === 1) {
|
|
38
|
+
// Mono audio
|
|
39
|
+
data = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
40
|
+
sample.copyTo(data, { format: 'f32', planeIndex: 0 });
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
// Multi-channel audio: extract specific channel
|
|
44
|
+
const allChannelsData = new Float32Array(sample.allocationSize({ format: 'f32', planeIndex: 0 }));
|
|
45
|
+
sample.copyTo(allChannelsData, { format: 'f32', planeIndex: 0 });
|
|
46
|
+
// Extract the specific channel (interleaved audio)
|
|
47
|
+
data = new Float32Array(samplesPerChannel);
|
|
48
|
+
for (let i = 0; i < samplesPerChannel; i++) {
|
|
49
|
+
data[i] = allChannelsData[i * numberOfChannels + channelIndex];
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
audioSamples.push(data);
|
|
53
|
+
sample.close();
|
|
54
|
+
},
|
|
55
|
+
onError(error) {
|
|
56
|
+
throw error;
|
|
57
|
+
},
|
|
58
|
+
});
|
|
59
|
+
// Listen for abort signal
|
|
60
|
+
const onAbort = () => {
|
|
61
|
+
controller.abort();
|
|
62
|
+
if (audioDecoder) {
|
|
63
|
+
audioDecoder.close();
|
|
64
|
+
}
|
|
65
|
+
};
|
|
66
|
+
signal.addEventListener('abort', onAbort, { once: true });
|
|
67
|
+
return async (sample) => {
|
|
68
|
+
if (signal.aborted) {
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
// Convert timestamp using the track's timescale
|
|
72
|
+
const time = sample.timestamp / track.timescale;
|
|
73
|
+
// Stop immediately when we reach our target time
|
|
74
|
+
if (time >= toSeconds) {
|
|
75
|
+
// abort media parsing, we reached the point where we want to stop
|
|
76
|
+
controller.abort();
|
|
77
|
+
await audioDecoder.flush();
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
// Decode the sample using the sample directly
|
|
81
|
+
await audioDecoder.waitForQueueToBeLessThan(10);
|
|
82
|
+
// we're waiting for the queue above anyway, enqueue in sync mode
|
|
83
|
+
audioDecoder.decode(sample);
|
|
84
|
+
};
|
|
85
|
+
},
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
catch (err) {
|
|
89
|
+
const isAbortedByTimeCutoff = (0, media_parser_1.hasBeenAborted)(err);
|
|
90
|
+
// Don't throw if we stopped the parsing ourselves
|
|
91
|
+
if (!isAbortedByTimeCutoff && !signal.aborted) {
|
|
92
|
+
throw err;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// Simply concatenate all audio data since windowing handles the time ranges
|
|
96
|
+
const totalSamples = audioSamples.reduce((sum, sample) => sum + sample.length, 0);
|
|
97
|
+
const result = new Float32Array(totalSamples);
|
|
98
|
+
let offset = 0;
|
|
99
|
+
for (const audioSample of audioSamples) {
|
|
100
|
+
result.set(audioSample, offset);
|
|
101
|
+
offset += audioSample.length;
|
|
102
|
+
}
|
|
103
|
+
return result;
|
|
104
|
+
};
|
|
105
|
+
exports.getPartialMediaData = getPartialMediaData;
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"url": "https://github.com/remotion-dev/remotion/tree/main/packages/media-utils"
|
|
4
4
|
},
|
|
5
5
|
"name": "@remotion/media-utils",
|
|
6
|
-
"version": "4.0.
|
|
6
|
+
"version": "4.0.396",
|
|
7
7
|
"description": "Utilities for working with media files",
|
|
8
8
|
"main": "dist/index.js",
|
|
9
9
|
"sideEffects": false,
|
|
@@ -18,9 +18,9 @@
|
|
|
18
18
|
"url": "https://github.com/remotion-dev/remotion/issues"
|
|
19
19
|
},
|
|
20
20
|
"dependencies": {
|
|
21
|
-
"@remotion/media-parser": "4.0.
|
|
22
|
-
"@remotion/webcodecs": "4.0.
|
|
23
|
-
"remotion": "4.0.
|
|
21
|
+
"@remotion/media-parser": "4.0.396",
|
|
22
|
+
"@remotion/webcodecs": "4.0.396",
|
|
23
|
+
"remotion": "4.0.396",
|
|
24
24
|
"mediabunny": "1.27.2"
|
|
25
25
|
},
|
|
26
26
|
"peerDependencies": {
|
|
@@ -28,7 +28,7 @@
|
|
|
28
28
|
"react-dom": ">=16.8.0"
|
|
29
29
|
},
|
|
30
30
|
"devDependencies": {
|
|
31
|
-
"@remotion/eslint-config-internal": "4.0.
|
|
31
|
+
"@remotion/eslint-config-internal": "4.0.396",
|
|
32
32
|
"eslint": "9.19.0"
|
|
33
33
|
},
|
|
34
34
|
"keywords": [
|