node-av 1.0.3 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -41
- package/dist/api/bitstream-filter.d.ts +180 -123
- package/dist/api/bitstream-filter.js +182 -126
- package/dist/api/bitstream-filter.js.map +1 -1
- package/dist/api/decoder.d.ts +286 -130
- package/dist/api/decoder.js +321 -159
- package/dist/api/decoder.js.map +1 -1
- package/dist/api/encoder.d.ts +254 -158
- package/dist/api/encoder.js +326 -298
- package/dist/api/encoder.js.map +1 -1
- package/dist/api/filter-presets.d.ts +912 -0
- package/dist/api/filter-presets.js +1407 -0
- package/dist/api/filter-presets.js.map +1 -0
- package/dist/api/filter.d.ts +280 -284
- package/dist/api/filter.js +435 -509
- package/dist/api/filter.js.map +1 -1
- package/dist/api/hardware.d.ts +226 -159
- package/dist/api/hardware.js +405 -287
- package/dist/api/hardware.js.map +1 -1
- package/dist/api/index.d.ts +3 -2
- package/dist/api/index.js +1 -0
- package/dist/api/index.js.map +1 -1
- package/dist/api/io-stream.d.ts +65 -61
- package/dist/api/io-stream.js +45 -47
- package/dist/api/io-stream.js.map +1 -1
- package/dist/api/media-input.d.ts +244 -141
- package/dist/api/media-input.js +207 -104
- package/dist/api/media-input.js.map +1 -1
- package/dist/api/media-output.d.ts +206 -128
- package/dist/api/media-output.js +212 -129
- package/dist/api/media-output.js.map +1 -1
- package/dist/api/pipeline.d.ts +168 -38
- package/dist/api/pipeline.js +238 -14
- package/dist/api/pipeline.js.map +1 -1
- package/dist/api/types.d.ts +22 -182
- package/dist/api/utilities/audio-sample.d.ts +1 -1
- package/dist/api/utilities/image.d.ts +1 -1
- package/dist/api/utilities/media-type.d.ts +1 -1
- package/dist/api/utilities/pixel-format.d.ts +1 -1
- package/dist/api/utilities/sample-format.d.ts +1 -1
- package/dist/api/utilities/timestamp.d.ts +1 -1
- package/dist/api/utils.d.ts +1 -2
- package/dist/api/utils.js +9 -0
- package/dist/api/utils.js.map +1 -1
- package/dist/{lib → constants}/channel-layouts.d.ts +1 -1
- package/dist/constants/channel-layouts.js.map +1 -0
- package/dist/{lib → constants}/constants.d.ts +19 -4
- package/dist/{lib → constants}/constants.js +15 -1
- package/dist/constants/constants.js.map +1 -0
- package/dist/constants/decoders.d.ts +609 -0
- package/dist/constants/decoders.js +617 -0
- package/dist/constants/decoders.js.map +1 -0
- package/dist/constants/encoders.d.ts +285 -0
- package/dist/constants/encoders.js +298 -0
- package/dist/constants/encoders.js.map +1 -0
- package/dist/constants/index.d.ts +4 -0
- package/dist/constants/index.js +5 -0
- package/dist/constants/index.js.map +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +2 -0
- package/dist/index.js.map +1 -1
- package/dist/lib/audio-fifo.d.ts +128 -171
- package/dist/lib/audio-fifo.js +130 -173
- package/dist/lib/audio-fifo.js.map +1 -1
- package/dist/lib/binding.d.ts +7 -5
- package/dist/lib/binding.js +5 -0
- package/dist/lib/binding.js.map +1 -1
- package/dist/lib/bitstream-filter-context.d.ts +139 -184
- package/dist/lib/bitstream-filter-context.js +139 -188
- package/dist/lib/bitstream-filter-context.js.map +1 -1
- package/dist/lib/bitstream-filter.d.ts +69 -55
- package/dist/lib/bitstream-filter.js +68 -54
- package/dist/lib/bitstream-filter.js.map +1 -1
- package/dist/lib/codec-context.d.ts +317 -381
- package/dist/lib/codec-context.js +316 -381
- package/dist/lib/codec-context.js.map +1 -1
- package/dist/lib/codec-parameters.d.ts +161 -171
- package/dist/lib/codec-parameters.js +162 -172
- package/dist/lib/codec-parameters.js.map +1 -1
- package/dist/lib/codec-parser.d.ts +92 -105
- package/dist/lib/codec-parser.js +92 -103
- package/dist/lib/codec-parser.js.map +1 -1
- package/dist/lib/codec.d.ts +328 -217
- package/dist/lib/codec.js +392 -218
- package/dist/lib/codec.js.map +1 -1
- package/dist/lib/dictionary.d.ts +150 -204
- package/dist/lib/dictionary.js +159 -213
- package/dist/lib/dictionary.js.map +1 -1
- package/dist/lib/error.d.ts +97 -131
- package/dist/lib/error.js +98 -128
- package/dist/lib/error.js.map +1 -1
- package/dist/lib/filter-context.d.ts +317 -194
- package/dist/lib/filter-context.js +335 -200
- package/dist/lib/filter-context.js.map +1 -1
- package/dist/lib/filter-graph.d.ts +252 -293
- package/dist/lib/filter-graph.js +253 -294
- package/dist/lib/filter-graph.js.map +1 -1
- package/dist/lib/filter-inout.d.ts +87 -95
- package/dist/lib/filter-inout.js +87 -95
- package/dist/lib/filter-inout.js.map +1 -1
- package/dist/lib/filter.d.ts +93 -111
- package/dist/lib/filter.js +94 -112
- package/dist/lib/filter.js.map +1 -1
- package/dist/lib/format-context.d.ts +321 -429
- package/dist/lib/format-context.js +314 -386
- package/dist/lib/format-context.js.map +1 -1
- package/dist/lib/frame.d.ts +263 -406
- package/dist/lib/frame.js +263 -408
- package/dist/lib/frame.js.map +1 -1
- package/dist/lib/hardware-device-context.d.ts +150 -204
- package/dist/lib/hardware-device-context.js +149 -203
- package/dist/lib/hardware-device-context.js.map +1 -1
- package/dist/lib/hardware-frames-context.d.ts +171 -181
- package/dist/lib/hardware-frames-context.js +171 -181
- package/dist/lib/hardware-frames-context.js.map +1 -1
- package/dist/lib/index.d.ts +2 -3
- package/dist/lib/index.js +2 -5
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/input-format.d.ts +90 -118
- package/dist/lib/input-format.js +89 -117
- package/dist/lib/input-format.js.map +1 -1
- package/dist/lib/io-context.d.ts +210 -242
- package/dist/lib/io-context.js +221 -253
- package/dist/lib/io-context.js.map +1 -1
- package/dist/lib/log.d.ts +86 -120
- package/dist/lib/log.js +85 -122
- package/dist/lib/log.js.map +1 -1
- package/dist/lib/native-types.d.ts +127 -112
- package/dist/lib/native-types.js +9 -0
- package/dist/lib/native-types.js.map +1 -1
- package/dist/lib/option.d.ts +285 -242
- package/dist/lib/option.js +310 -250
- package/dist/lib/option.js.map +1 -1
- package/dist/lib/output-format.d.ts +78 -102
- package/dist/lib/output-format.js +77 -101
- package/dist/lib/output-format.js.map +1 -1
- package/dist/lib/packet.d.ts +173 -241
- package/dist/lib/packet.js +172 -241
- package/dist/lib/packet.js.map +1 -1
- package/dist/lib/rational.d.ts +0 -2
- package/dist/lib/rational.js +0 -2
- package/dist/lib/rational.js.map +1 -1
- package/dist/lib/software-resample-context.d.ts +242 -326
- package/dist/lib/software-resample-context.js +242 -326
- package/dist/lib/software-resample-context.js.map +1 -1
- package/dist/lib/software-scale-context.d.ts +130 -174
- package/dist/lib/software-scale-context.js +132 -176
- package/dist/lib/software-scale-context.js.map +1 -1
- package/dist/lib/stream.d.ts +88 -198
- package/dist/lib/stream.js +87 -197
- package/dist/lib/stream.js.map +1 -1
- package/dist/lib/types.d.ts +1 -1
- package/dist/lib/utilities.d.ts +372 -181
- package/dist/lib/utilities.js +373 -182
- package/dist/lib/utilities.js.map +1 -1
- package/install/check.js +0 -1
- package/package.json +32 -24
- package/release_notes.md +43 -13
- package/CHANGELOG.md +0 -8
- package/dist/lib/channel-layouts.js.map +0 -1
- package/dist/lib/constants.js.map +0 -1
- /package/dist/{lib → constants}/channel-layouts.js +0 -0
package/dist/api/filter.js
CHANGED
|
@@ -1,247 +1,189 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { AVERROR_EOF, AVFILTER_FLAG_HWDEVICE, AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_VIDEO } from '../constants/constants.js';
|
|
2
|
+
import { AVERROR_EAGAIN, avGetSampleFmtName, avIsHardwarePixelFormat, FFmpegError, Filter, FilterGraph, FilterInOut, Frame } from '../lib/index.js';
|
|
2
3
|
/**
|
|
3
|
-
* High-level filter API for
|
|
4
|
+
* High-level filter API for audio and video processing.
|
|
4
5
|
*
|
|
5
|
-
* Provides
|
|
6
|
-
*
|
|
7
|
-
*
|
|
6
|
+
* Provides simplified interface for applying FFmpeg filters to frames.
|
|
7
|
+
* Handles filter graph construction, frame buffering, and command control.
|
|
8
|
+
* Supports both software and hardware-accelerated filtering operations.
|
|
9
|
+
* Essential component for effects, transformations, and format conversions.
|
|
8
10
|
*
|
|
9
11
|
* @example
|
|
10
12
|
* ```typescript
|
|
11
|
-
* import { FilterAPI
|
|
13
|
+
* import { FilterAPI } from 'node-av/api';
|
|
12
14
|
*
|
|
13
|
-
* // Create
|
|
14
|
-
* const
|
|
15
|
-
* const filter = await FilterAPI.create('scale=1280:720,format=yuv420p', videoStream);
|
|
15
|
+
* // Create video filter
|
|
16
|
+
* const filter = await FilterAPI.create('scale=1280:720', videoInfo);
|
|
16
17
|
*
|
|
17
|
-
* // Process
|
|
18
|
-
* const
|
|
18
|
+
* // Process frame
|
|
19
|
+
* const output = await filter.process(inputFrame);
|
|
20
|
+
* if (output) {
|
|
21
|
+
* console.log(`Filtered frame: ${output.width}x${output.height}`);
|
|
22
|
+
* output.free();
|
|
23
|
+
* }
|
|
19
24
|
* ```
|
|
20
25
|
*
|
|
21
26
|
* @example
|
|
22
27
|
* ```typescript
|
|
23
|
-
* //
|
|
24
|
-
* const hw =
|
|
25
|
-
* const filter = await FilterAPI.create(
|
|
26
|
-
*
|
|
27
|
-
*
|
|
28
|
+
* // Hardware-accelerated filtering
|
|
29
|
+
* const hw = HardwareContext.auto();
|
|
30
|
+
* const filter = await FilterAPI.create(
|
|
31
|
+
* 'hwupload,scale_cuda=1920:1080,hwdownload',
|
|
32
|
+
* videoInfo,
|
|
33
|
+
* { hardware: hw }
|
|
34
|
+
* );
|
|
28
35
|
* ```
|
|
36
|
+
*
|
|
37
|
+
* @see {@link FilterGraph} For low-level filter graph API
|
|
38
|
+
* @see {@link HardwareContext} For hardware acceleration
|
|
39
|
+
* @see {@link Frame} For frame operations
|
|
29
40
|
*/
|
|
30
41
|
export class FilterAPI {
|
|
31
|
-
graph;
|
|
42
|
+
graph = null;
|
|
32
43
|
buffersrcCtx = null;
|
|
33
44
|
buffersinkCtx = null;
|
|
34
45
|
config;
|
|
35
46
|
mediaType;
|
|
36
47
|
initialized = false;
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
48
|
+
hardware;
|
|
49
|
+
description;
|
|
50
|
+
options;
|
|
40
51
|
/**
|
|
41
|
-
*
|
|
42
|
-
*
|
|
43
|
-
*
|
|
44
|
-
* Use the static factory methods for easier creation.
|
|
45
|
-
*
|
|
46
|
-
* @param config - Filter configuration
|
|
47
|
-
* @param hardware - Optional hardware context for late framesContext binding
|
|
52
|
+
* @param config - Stream configuration
|
|
53
|
+
* @param description - Filter description string
|
|
54
|
+
* @param options - Filter options
|
|
48
55
|
* @internal
|
|
49
56
|
*/
|
|
50
|
-
constructor(config,
|
|
57
|
+
constructor(config, description, options) {
|
|
51
58
|
this.config = config;
|
|
52
|
-
this.
|
|
59
|
+
this.description = description;
|
|
60
|
+
this.options = options;
|
|
61
|
+
this.hardware = options.hardware;
|
|
53
62
|
this.mediaType = config.type === 'video' ? AVMEDIA_TYPE_VIDEO : AVMEDIA_TYPE_AUDIO;
|
|
54
|
-
this.graph = new LowLevelFilterGraph();
|
|
55
63
|
}
|
|
56
64
|
/**
|
|
57
|
-
* Create a filter
|
|
65
|
+
* Create a filter with specified description and configuration.
|
|
58
66
|
*
|
|
59
|
-
*
|
|
60
|
-
*
|
|
67
|
+
* Constructs filter graph from description string.
|
|
68
|
+
* Configures input/output buffers and threading.
|
|
69
|
+
* For video filters, uses lazy initialization to detect hardware frames.
|
|
61
70
|
*
|
|
62
|
-
*
|
|
63
|
-
* filter in the chain requires hardware acceleration (e.g., scale_vt in
|
|
64
|
-
* "format=nv12,hwupload,scale_vt=640:480").
|
|
71
|
+
* Direct mapping to avfilter_graph_parse_ptr() and avfilter_graph_config().
|
|
65
72
|
*
|
|
66
|
-
* @param description - Filter graph description
|
|
67
|
-
* @param input -
|
|
68
|
-
* @param options -
|
|
73
|
+
* @param description - Filter graph description
|
|
74
|
+
* @param input - Input stream configuration
|
|
75
|
+
* @param options - Filter options
|
|
76
|
+
* @returns Configured filter instance
|
|
69
77
|
*
|
|
70
|
-
* @
|
|
71
|
-
*
|
|
72
|
-
* @throws {FFmpegError} If filter creation or configuration fails
|
|
78
|
+
* @throws {Error} If filter creation or configuration fails
|
|
79
|
+
* @throws {FFmpegError} If graph parsing or config fails
|
|
73
80
|
*
|
|
74
81
|
* @example
|
|
75
82
|
* ```typescript
|
|
76
|
-
* // Simple filter
|
|
77
|
-
* const filter = await FilterAPI.create('scale=640:480',
|
|
83
|
+
* // Simple video filter
|
|
84
|
+
* const filter = await FilterAPI.create('scale=640:480', videoInfo);
|
|
85
|
+
* ```
|
|
78
86
|
*
|
|
79
|
-
*
|
|
80
|
-
*
|
|
87
|
+
* @example
|
|
88
|
+
* ```typescript
|
|
89
|
+
* // Complex filter chain
|
|
81
90
|
* const filter = await FilterAPI.create(
|
|
82
|
-
* '
|
|
83
|
-
*
|
|
84
|
-
* { hardware: hw }
|
|
91
|
+
* 'crop=640:480:0:0,rotate=PI/4',
|
|
92
|
+
* videoInfo
|
|
85
93
|
* );
|
|
94
|
+
* ```
|
|
86
95
|
*
|
|
87
|
-
*
|
|
88
|
-
*
|
|
89
|
-
*
|
|
90
|
-
*
|
|
91
|
-
*
|
|
92
|
-
*
|
|
93
|
-
*
|
|
94
|
-
* });
|
|
96
|
+
* @example
|
|
97
|
+
* ```typescript
|
|
98
|
+
* // Audio filter
|
|
99
|
+
* const filter = await FilterAPI.create(
|
|
100
|
+
* 'volume=0.5,aecho=0.8:0.9:1000:0.3',
|
|
101
|
+
* audioInfo
|
|
102
|
+
* );
|
|
95
103
|
* ```
|
|
104
|
+
*
|
|
105
|
+
* @see {@link process} For frame processing
|
|
106
|
+
* @see {@link FilterOptions} For configuration options
|
|
96
107
|
*/
|
|
97
108
|
static async create(description, input, options = {}) {
|
|
98
109
|
let config;
|
|
99
|
-
if (input
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
};
|
|
110
|
-
}
|
|
111
|
-
else if (input.codecpar.codecType === AVMEDIA_TYPE_AUDIO) {
|
|
112
|
-
config = {
|
|
113
|
-
type: 'audio',
|
|
114
|
-
sampleRate: input.codecpar.sampleRate,
|
|
115
|
-
sampleFormat: input.codecpar.format,
|
|
116
|
-
channelLayout: input.codecpar.channelLayout.mask,
|
|
117
|
-
timeBase: input.timeBase,
|
|
118
|
-
};
|
|
119
|
-
}
|
|
120
|
-
else {
|
|
121
|
-
throw new Error('Unsupported codec type');
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
else {
|
|
125
|
-
if (input.type === 'video') {
|
|
126
|
-
config = {
|
|
127
|
-
type: 'video',
|
|
128
|
-
width: input.width,
|
|
129
|
-
height: input.height,
|
|
130
|
-
pixelFormat: input.pixelFormat,
|
|
131
|
-
timeBase: input.timeBase,
|
|
132
|
-
frameRate: input.frameRate,
|
|
133
|
-
sampleAspectRatio: input.sampleAspectRatio,
|
|
134
|
-
};
|
|
135
|
-
}
|
|
136
|
-
else {
|
|
137
|
-
config = {
|
|
138
|
-
type: 'audio',
|
|
139
|
-
sampleRate: input.sampleRate,
|
|
140
|
-
sampleFormat: input.sampleFormat,
|
|
141
|
-
channelLayout: typeof input.channelLayout === 'bigint' ? input.channelLayout : input.channelLayout.mask || 3n,
|
|
142
|
-
timeBase: input.timeBase,
|
|
143
|
-
};
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
const filter = new FilterAPI(config, options.hardware);
|
|
147
|
-
// Parse the entire filter chain to check if ANY filter requires hardware
|
|
148
|
-
// Split by comma to get individual filters, handle complex chains like:
|
|
149
|
-
// "format=nv12,hwupload,scale_vt=100:100,hwdownload,format=yuv420p"
|
|
150
|
-
const filterNames = description
|
|
151
|
-
.split(',')
|
|
152
|
-
.map((f) => {
|
|
153
|
-
// Extract filter name (before = or : or whitespace)
|
|
154
|
-
const match = /^([a-zA-Z0-9_]+)/.exec(f.trim());
|
|
155
|
-
return match ? match[1] : null;
|
|
156
|
-
})
|
|
157
|
-
.filter(Boolean);
|
|
158
|
-
// Check if chain contains hwupload (which creates hw frames context)
|
|
159
|
-
const hasHwDownload = filterNames.some((name) => name === 'hwdownload');
|
|
160
|
-
const hasHwUpload = filterNames.some((name) => name === 'hwupload');
|
|
161
|
-
// Check each filter in the chain
|
|
162
|
-
let needsHardwareFramesContext = false;
|
|
163
|
-
let needsHardwareDevice = false;
|
|
164
|
-
for (const filterName of filterNames) {
|
|
165
|
-
if (!filterName)
|
|
166
|
-
continue;
|
|
167
|
-
const lowLevelFilter = LowLevelFilter.getByName(filterName);
|
|
168
|
-
if (lowLevelFilter) {
|
|
169
|
-
// Check if this filter needs hardware
|
|
170
|
-
if ((lowLevelFilter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
|
|
171
|
-
needsHardwareDevice = true;
|
|
172
|
-
// Only non-hwupload filters need frames context from decoder
|
|
173
|
-
if (filterName !== 'hwupload' && filterName !== 'hwdownload') {
|
|
174
|
-
needsHardwareFramesContext = true;
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
// If we have hwupload, we don't need hardware frames context from decoder
|
|
180
|
-
filter.needsHardware = hasHwDownload || (needsHardwareFramesContext && !hasHwUpload);
|
|
181
|
-
// Validation: Hardware filter MUST have HardwareContext
|
|
182
|
-
if (needsHardwareDevice && !options.hardware) {
|
|
183
|
-
throw new Error('Hardware filter in chain requires a hardware context. ' + 'Please provide one via options.hardware');
|
|
184
|
-
}
|
|
185
|
-
// Check if we can initialize immediately
|
|
186
|
-
// Initialize if: (1) we don't need hardware, OR (2) we need hardware AND have framesContext
|
|
187
|
-
if (!filter.needsHardware || (filter.needsHardware && options.hardware?.framesContext)) {
|
|
188
|
-
// Can initialize now
|
|
189
|
-
if (options.hardware?.framesContext && config.type === 'video') {
|
|
190
|
-
config.hwFramesCtx = options.hardware.framesContext;
|
|
191
|
-
}
|
|
192
|
-
await filter.initialize(description, options);
|
|
193
|
-
filter.initialized = true;
|
|
110
|
+
if (input.type === 'video') {
|
|
111
|
+
config = {
|
|
112
|
+
type: 'video',
|
|
113
|
+
width: input.width,
|
|
114
|
+
height: input.height,
|
|
115
|
+
pixelFormat: input.pixelFormat,
|
|
116
|
+
timeBase: input.timeBase,
|
|
117
|
+
frameRate: input.frameRate,
|
|
118
|
+
sampleAspectRatio: input.sampleAspectRatio,
|
|
119
|
+
};
|
|
194
120
|
}
|
|
195
121
|
else {
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
122
|
+
config = {
|
|
123
|
+
type: 'audio',
|
|
124
|
+
sampleRate: input.sampleRate,
|
|
125
|
+
sampleFormat: input.sampleFormat,
|
|
126
|
+
channelLayout: input.channelLayout,
|
|
127
|
+
timeBase: input.timeBase,
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
const filter = new FilterAPI(config, description, options);
|
|
131
|
+
// Check if any filters in the chain require hardware context
|
|
132
|
+
if (config.type === 'video') {
|
|
133
|
+
filter.checkHardwareRequirements(description, options);
|
|
134
|
+
}
|
|
135
|
+
// For video filters, always use lazy initialization to properly detect hardware requirements
|
|
136
|
+
// For audio filters, initialize immediately (no hardware audio processing)
|
|
137
|
+
if (config.type === 'audio') {
|
|
138
|
+
await filter.initialize(null);
|
|
139
|
+
}
|
|
140
|
+
// For video: wait for first frame to detect if hw_frames_ctx is present
|
|
199
141
|
return filter;
|
|
200
142
|
}
|
|
201
143
|
/**
|
|
202
|
-
* Process a
|
|
144
|
+
* Process a frame through the filter.
|
|
203
145
|
*
|
|
204
|
-
*
|
|
205
|
-
* May
|
|
146
|
+
* Applies filter operations to input frame.
|
|
147
|
+
* May buffer frames internally before producing output.
|
|
148
|
+
* For video, performs lazy initialization on first frame.
|
|
206
149
|
*
|
|
207
|
-
*
|
|
150
|
+
* Direct mapping to av_buffersrc_add_frame() and av_buffersink_get_frame().
|
|
208
151
|
*
|
|
209
|
-
* @
|
|
152
|
+
* @param frame - Input frame to process
|
|
153
|
+
* @returns Filtered frame or null if buffered
|
|
210
154
|
*
|
|
155
|
+
* @throws {Error} If filter not ready
|
|
211
156
|
* @throws {FFmpegError} If processing fails
|
|
212
157
|
*
|
|
213
158
|
* @example
|
|
214
159
|
* ```typescript
|
|
215
|
-
* const
|
|
216
|
-
* if (
|
|
217
|
-
*
|
|
160
|
+
* const output = await filter.process(inputFrame);
|
|
161
|
+
* if (output) {
|
|
162
|
+
* console.log(`Got filtered frame: pts=${output.pts}`);
|
|
163
|
+
* output.free();
|
|
164
|
+
* }
|
|
165
|
+
* ```
|
|
166
|
+
*
|
|
167
|
+
* @example
|
|
168
|
+
* ```typescript
|
|
169
|
+
* // Process and drain
|
|
170
|
+
* const output = await filter.process(frame);
|
|
171
|
+
* if (output) yield output;
|
|
172
|
+
*
|
|
173
|
+
* // Drain buffered frames
|
|
174
|
+
* let buffered;
|
|
175
|
+
* while ((buffered = await filter.receive()) !== null) {
|
|
176
|
+
* yield buffered;
|
|
218
177
|
* }
|
|
219
178
|
* ```
|
|
179
|
+
*
|
|
180
|
+
* @see {@link receive} For draining buffered frames
|
|
181
|
+
* @see {@link frames} For stream processing
|
|
220
182
|
*/
|
|
221
183
|
async process(frame) {
|
|
222
|
-
//
|
|
223
|
-
if (!this.initialized && this.
|
|
224
|
-
|
|
225
|
-
if (this.hardware?.framesContext && this.config.type === 'video') {
|
|
226
|
-
this.config.hwFramesCtx = this.hardware.framesContext;
|
|
227
|
-
// Update pixel format to match hardware frames if using hardware
|
|
228
|
-
if (this.needsHardware) {
|
|
229
|
-
this.config.pixelFormat = this.hardware.getHardwarePixelFormat();
|
|
230
|
-
}
|
|
231
|
-
// Now we can initialize
|
|
232
|
-
await this.initialize(this.pendingInit.description, this.pendingInit.options);
|
|
233
|
-
this.pendingInit = undefined;
|
|
234
|
-
this.initialized = true;
|
|
235
|
-
}
|
|
236
|
-
else if (this.needsHardware) {
|
|
237
|
-
throw new Error('Hardware filter requires frames context which is not yet available');
|
|
238
|
-
}
|
|
239
|
-
else {
|
|
240
|
-
// Software filter or hardware not required, can initialize now
|
|
241
|
-
await this.initialize(this.pendingInit.description, this.pendingInit.options);
|
|
242
|
-
this.pendingInit = undefined;
|
|
243
|
-
this.initialized = true;
|
|
244
|
-
}
|
|
184
|
+
// Lazy initialization for video filters (detect hardware from first frame)
|
|
185
|
+
if (!this.initialized && this.config.type === 'video') {
|
|
186
|
+
await this.initialize(frame);
|
|
245
187
|
}
|
|
246
188
|
if (!this.initialized || !this.buffersrcCtx || !this.buffersinkCtx) {
|
|
247
189
|
throw new Error('Filter not initialized');
|
|
@@ -256,7 +198,7 @@ export class FilterAPI {
|
|
|
256
198
|
if (getRet >= 0) {
|
|
257
199
|
return outputFrame;
|
|
258
200
|
}
|
|
259
|
-
else if (
|
|
201
|
+
else if (getRet === AVERROR_EAGAIN) {
|
|
260
202
|
// Need more input
|
|
261
203
|
outputFrame.free();
|
|
262
204
|
return null;
|
|
@@ -268,21 +210,27 @@ export class FilterAPI {
|
|
|
268
210
|
}
|
|
269
211
|
}
|
|
270
212
|
/**
|
|
271
|
-
* Process multiple frames
|
|
213
|
+
* Process multiple frames at once.
|
|
272
214
|
*
|
|
273
|
-
*
|
|
274
|
-
*
|
|
215
|
+
* Processes batch of frames and drains all output.
|
|
216
|
+
* Useful for filters that buffer multiple frames.
|
|
275
217
|
*
|
|
276
218
|
* @param frames - Array of input frames
|
|
219
|
+
* @returns Array of all output frames
|
|
277
220
|
*
|
|
278
|
-
* @
|
|
279
|
-
*
|
|
221
|
+
* @throws {Error} If filter not ready
|
|
280
222
|
* @throws {FFmpegError} If processing fails
|
|
281
223
|
*
|
|
282
224
|
* @example
|
|
283
225
|
* ```typescript
|
|
284
|
-
* const
|
|
226
|
+
* const outputs = await filter.processMultiple([frame1, frame2, frame3]);
|
|
227
|
+
* for (const output of outputs) {
|
|
228
|
+
* console.log(`Output frame: pts=${output.pts}`);
|
|
229
|
+
* output.free();
|
|
230
|
+
* }
|
|
285
231
|
* ```
|
|
232
|
+
*
|
|
233
|
+
* @see {@link process} For single frame processing
|
|
286
234
|
*/
|
|
287
235
|
async processMultiple(frames) {
|
|
288
236
|
const outputFrames = [];
|
|
@@ -302,24 +250,30 @@ export class FilterAPI {
|
|
|
302
250
|
return outputFrames;
|
|
303
251
|
}
|
|
304
252
|
/**
|
|
305
|
-
* Receive
|
|
253
|
+
* Receive buffered frame from filter.
|
|
306
254
|
*
|
|
307
|
-
*
|
|
308
|
-
*
|
|
255
|
+
* Drains frames buffered by the filter.
|
|
256
|
+
* Call repeatedly until null to get all buffered frames.
|
|
309
257
|
*
|
|
310
|
-
*
|
|
258
|
+
* Direct mapping to av_buffersink_get_frame().
|
|
311
259
|
*
|
|
312
|
-
* @
|
|
260
|
+
* @returns Buffered frame or null if none available
|
|
261
|
+
*
|
|
262
|
+
* @throws {Error} If filter not ready
|
|
263
|
+
* @throws {FFmpegError} If receive fails
|
|
313
264
|
*
|
|
314
265
|
* @example
|
|
315
266
|
* ```typescript
|
|
316
|
-
* // Drain
|
|
317
|
-
*
|
|
318
|
-
*
|
|
319
|
-
*
|
|
320
|
-
*
|
|
267
|
+
* // Drain buffered frames
|
|
268
|
+
* let frame;
|
|
269
|
+
* while ((frame = await filter.receive()) !== null) {
|
|
270
|
+
* console.log(`Buffered frame: pts=${frame.pts}`);
|
|
271
|
+
* frame.free();
|
|
321
272
|
* }
|
|
322
273
|
* ```
|
|
274
|
+
*
|
|
275
|
+
* @see {@link process} For input processing
|
|
276
|
+
* @see {@link flush} For end-of-stream
|
|
323
277
|
*/
|
|
324
278
|
async receive() {
|
|
325
279
|
if (!this.initialized || !this.buffersinkCtx) {
|
|
@@ -333,7 +287,7 @@ export class FilterAPI {
|
|
|
333
287
|
}
|
|
334
288
|
else {
|
|
335
289
|
frame.free();
|
|
336
|
-
if (
|
|
290
|
+
if (ret === AVERROR_EAGAIN || ret === AVERROR_EOF) {
|
|
337
291
|
return null;
|
|
338
292
|
}
|
|
339
293
|
FFmpegError.throwIfError(ret, 'Failed to receive frame from filter');
|
|
@@ -341,53 +295,58 @@ export class FilterAPI {
|
|
|
341
295
|
}
|
|
342
296
|
}
|
|
343
297
|
/**
|
|
344
|
-
* Flush
|
|
298
|
+
* Flush filter and signal end-of-stream.
|
|
345
299
|
*
|
|
346
|
-
*
|
|
347
|
-
*
|
|
300
|
+
* Sends null frame to flush buffered data.
|
|
301
|
+
* Must call receive() to get flushed frames.
|
|
348
302
|
*
|
|
349
|
-
*
|
|
303
|
+
* Direct mapping to av_buffersrc_add_frame(NULL).
|
|
350
304
|
*
|
|
305
|
+
* @throws {Error} If filter not ready
|
|
351
306
|
* @throws {FFmpegError} If flush fails
|
|
352
307
|
*
|
|
353
308
|
* @example
|
|
354
309
|
* ```typescript
|
|
355
310
|
* await filter.flush();
|
|
356
311
|
* // Get remaining frames
|
|
357
|
-
*
|
|
358
|
-
*
|
|
359
|
-
*
|
|
360
|
-
* // Process final frames
|
|
312
|
+
* let frame;
|
|
313
|
+
* while ((frame = await filter.receive()) !== null) {
|
|
314
|
+
* frame.free();
|
|
361
315
|
* }
|
|
362
316
|
* ```
|
|
317
|
+
*
|
|
318
|
+
* @see {@link flushFrames} For async iteration
|
|
319
|
+
* @see {@link receive} For draining frames
|
|
363
320
|
*/
|
|
364
321
|
async flush() {
|
|
365
322
|
if (!this.initialized || !this.buffersrcCtx) {
|
|
366
323
|
throw new Error('Filter not initialized');
|
|
367
324
|
}
|
|
368
325
|
const ret = await this.buffersrcCtx.buffersrcAddFrame(null);
|
|
369
|
-
if (ret < 0 &&
|
|
326
|
+
if (ret < 0 && ret !== AVERROR_EOF) {
|
|
370
327
|
FFmpegError.throwIfError(ret, 'Failed to flush filter');
|
|
371
328
|
}
|
|
372
329
|
}
|
|
373
330
|
/**
|
|
374
|
-
* Flush filter and yield
|
|
375
|
-
*
|
|
376
|
-
* More convenient than calling flush() + receive() in a loop.
|
|
377
|
-
* Automatically sends flush signal and yields all buffered frames.
|
|
331
|
+
* Flush filter and yield remaining frames.
|
|
378
332
|
*
|
|
379
|
-
*
|
|
333
|
+
* Convenient async generator for flushing.
|
|
334
|
+
* Combines flush and receive operations.
|
|
380
335
|
*
|
|
381
|
-
* @
|
|
336
|
+
* @yields Remaining frames from filter
|
|
337
|
+
* @throws {Error} If filter not ready
|
|
338
|
+
* @throws {FFmpegError} If flush fails
|
|
382
339
|
*
|
|
383
340
|
* @example
|
|
384
341
|
* ```typescript
|
|
385
|
-
* // Process all remaining frames with generator
|
|
386
342
|
* for await (const frame of filter.flushFrames()) {
|
|
387
|
-
*
|
|
388
|
-
*
|
|
343
|
+
* console.log(`Flushed frame: pts=${frame.pts}`);
|
|
344
|
+
* frame.free();
|
|
389
345
|
* }
|
|
390
346
|
* ```
|
|
347
|
+
*
|
|
348
|
+
* @see {@link flush} For manual flush
|
|
349
|
+
* @see {@link frames} For complete pipeline
|
|
391
350
|
*/
|
|
392
351
|
async *flushFrames() {
|
|
393
352
|
if (!this.initialized || !this.buffersrcCtx) {
|
|
@@ -402,26 +361,40 @@ export class FilterAPI {
|
|
|
402
361
|
}
|
|
403
362
|
}
|
|
404
363
|
/**
|
|
405
|
-
* Process
|
|
364
|
+
* Process frame stream through filter.
|
|
406
365
|
*
|
|
407
|
-
*
|
|
408
|
-
* Automatically handles buffering and
|
|
409
|
-
*
|
|
366
|
+
* High-level async generator for filtering frame streams.
|
|
367
|
+
* Automatically handles buffering and flushing.
|
|
368
|
+
* Frees input frames after processing.
|
|
410
369
|
*
|
|
411
|
-
*
|
|
412
|
-
*
|
|
413
|
-
*
|
|
414
|
-
* @
|
|
370
|
+
* @param frames - Async generator of input frames
|
|
371
|
+
* @yields Filtered frames
|
|
372
|
+
* @throws {Error} If filter not ready
|
|
373
|
+
* @throws {FFmpegError} If processing fails
|
|
415
374
|
*
|
|
416
|
-
* @
|
|
375
|
+
* @example
|
|
376
|
+
* ```typescript
|
|
377
|
+
* // Filter decoded frames
|
|
378
|
+
* for await (const frame of filter.frames(decoder.frames(packets))) {
|
|
379
|
+
* await encoder.encode(frame);
|
|
380
|
+
* frame.free();
|
|
381
|
+
* }
|
|
382
|
+
* ```
|
|
417
383
|
*
|
|
418
384
|
* @example
|
|
419
385
|
* ```typescript
|
|
420
|
-
*
|
|
421
|
-
*
|
|
422
|
-
*
|
|
386
|
+
* // Chain filters
|
|
387
|
+
* const filter1 = await FilterAPI.create('scale=640:480', info);
|
|
388
|
+
* const filter2 = await FilterAPI.create('rotate=PI/4', info);
|
|
389
|
+
*
|
|
390
|
+
* for await (const frame of filter2.frames(filter1.frames(input))) {
|
|
391
|
+
* // Process filtered frames
|
|
392
|
+
* frame.free();
|
|
423
393
|
* }
|
|
424
394
|
* ```
|
|
395
|
+
*
|
|
396
|
+
* @see {@link process} For single frame processing
|
|
397
|
+
* @see {@link flush} For end-of-stream handling
|
|
425
398
|
*/
|
|
426
399
|
async *frames(frames) {
|
|
427
400
|
for await (const frame of frames) {
|
|
@@ -454,103 +427,186 @@ export class FilterAPI {
|
|
|
454
427
|
}
|
|
455
428
|
}
|
|
456
429
|
/**
|
|
457
|
-
*
|
|
430
|
+
* Send command to filter.
|
|
431
|
+
*
|
|
432
|
+
* Sends runtime command to specific filter in graph.
|
|
433
|
+
* Allows dynamic parameter adjustment.
|
|
458
434
|
*
|
|
459
|
-
*
|
|
460
|
-
*
|
|
435
|
+
* Direct mapping to avfilter_graph_send_command().
|
|
436
|
+
*
|
|
437
|
+
* @param target - Target filter name
|
|
438
|
+
* @param cmd - Command name
|
|
439
|
+
* @param arg - Command argument
|
|
440
|
+
* @param flags - Command flags
|
|
441
|
+
* @returns Response string from filter
|
|
442
|
+
*
|
|
443
|
+
* @throws {Error} If filter not ready
|
|
444
|
+
* @throws {FFmpegError} If command fails
|
|
445
|
+
*
|
|
446
|
+
* @example
|
|
447
|
+
* ```typescript
|
|
448
|
+
* // Change volume at runtime
|
|
449
|
+
* const response = filter.sendCommand('volume', 'volume', '0.5');
|
|
450
|
+
* console.log(`Volume changed: ${response}`);
|
|
451
|
+
* ```
|
|
452
|
+
*
|
|
453
|
+
* @see {@link queueCommand} For delayed commands
|
|
454
|
+
*/
|
|
455
|
+
sendCommand(target, cmd, arg, flags) {
|
|
456
|
+
if (!this.initialized || !this.graph) {
|
|
457
|
+
throw new Error('Filter not initialized');
|
|
458
|
+
}
|
|
459
|
+
const result = this.graph.sendCommand(target, cmd, arg, flags);
|
|
460
|
+
if (typeof result === 'number') {
|
|
461
|
+
FFmpegError.throwIfError(result, 'Failed to send filter command');
|
|
462
|
+
}
|
|
463
|
+
return result.response;
|
|
464
|
+
}
|
|
465
|
+
/**
|
|
466
|
+
* Queue command for later execution.
|
|
467
|
+
*
|
|
468
|
+
* Schedules command to execute at specific timestamp.
|
|
469
|
+
* Useful for synchronized parameter changes.
|
|
470
|
+
*
|
|
471
|
+
* Direct mapping to avfilter_graph_queue_command().
|
|
472
|
+
*
|
|
473
|
+
* @param target - Target filter name
|
|
474
|
+
* @param cmd - Command name
|
|
475
|
+
* @param arg - Command argument
|
|
476
|
+
* @param ts - Timestamp for execution
|
|
477
|
+
* @param flags - Command flags
|
|
478
|
+
* @throws {Error} If filter not ready
|
|
479
|
+
* @throws {FFmpegError} If queue fails
|
|
480
|
+
*
|
|
481
|
+
* @example
|
|
482
|
+
* ```typescript
|
|
483
|
+
* // Queue volume change at 10 seconds
|
|
484
|
+
* filter.queueCommand('volume', 'volume', '0.8', 10.0);
|
|
485
|
+
* ```
|
|
486
|
+
*
|
|
487
|
+
* @see {@link sendCommand} For immediate commands
|
|
488
|
+
*/
|
|
489
|
+
queueCommand(target, cmd, arg, ts, flags) {
|
|
490
|
+
if (!this.initialized || !this.graph) {
|
|
491
|
+
throw new Error('Filter not initialized');
|
|
492
|
+
}
|
|
493
|
+
const ret = this.graph.queueCommand(target, cmd, arg, ts, flags);
|
|
494
|
+
FFmpegError.throwIfError(ret, 'Failed to queue filter command');
|
|
495
|
+
}
|
|
496
|
+
/**
|
|
497
|
+
* Get filter graph description.
|
|
498
|
+
*
|
|
499
|
+
* Returns human-readable graph structure.
|
|
500
|
+
* Useful for debugging filter chains.
|
|
501
|
+
*
|
|
502
|
+
* Direct mapping to avfilter_graph_dump().
|
|
461
503
|
*
|
|
462
504
|
* @returns Graph description or null if not initialized
|
|
463
505
|
*
|
|
464
506
|
* @example
|
|
465
507
|
* ```typescript
|
|
466
508
|
* const description = filter.getGraphDescription();
|
|
467
|
-
* console.log(description);
|
|
509
|
+
* console.log('Filter graph:', description);
|
|
468
510
|
* ```
|
|
469
511
|
*/
|
|
470
512
|
getGraphDescription() {
|
|
471
|
-
if (!this.initialized) {
|
|
513
|
+
if (!this.initialized || !this.graph) {
|
|
472
514
|
return null;
|
|
473
515
|
}
|
|
474
516
|
return this.graph.dump();
|
|
475
517
|
}
|
|
476
518
|
/**
|
|
477
|
-
* Check if
|
|
519
|
+
* Check if filter is ready for processing.
|
|
478
520
|
*
|
|
479
|
-
* @returns true if
|
|
521
|
+
* @returns true if initialized and ready
|
|
522
|
+
*
|
|
523
|
+
* @example
|
|
524
|
+
* ```typescript
|
|
525
|
+
* if (filter.isReady()) {
|
|
526
|
+
* const output = await filter.process(frame);
|
|
527
|
+
* }
|
|
528
|
+
* ```
|
|
480
529
|
*/
|
|
481
530
|
isReady() {
|
|
482
531
|
return this.initialized && this.buffersrcCtx !== null && this.buffersinkCtx !== null;
|
|
483
532
|
}
|
|
484
533
|
/**
|
|
485
|
-
* Get
|
|
534
|
+
* Get media type of filter.
|
|
486
535
|
*
|
|
487
|
-
* @returns
|
|
536
|
+
* @returns AVMEDIA_TYPE_VIDEO or AVMEDIA_TYPE_AUDIO
|
|
537
|
+
*
|
|
538
|
+
* @example
|
|
539
|
+
* ```typescript
|
|
540
|
+
* if (filter.getMediaType() === AVMEDIA_TYPE_VIDEO) {
|
|
541
|
+
* console.log('Video filter');
|
|
542
|
+
* }
|
|
543
|
+
* ```
|
|
488
544
|
*/
|
|
489
545
|
getMediaType() {
|
|
490
546
|
return this.mediaType;
|
|
491
547
|
}
|
|
492
548
|
/**
|
|
493
|
-
*
|
|
549
|
+
* Free filter resources.
|
|
494
550
|
*
|
|
495
|
-
*
|
|
496
|
-
|
|
497
|
-
getConfig() {
|
|
498
|
-
return this.config;
|
|
499
|
-
}
|
|
500
|
-
/**
|
|
501
|
-
* Free all filter resources.
|
|
502
|
-
*
|
|
503
|
-
* Releases the filter graph and all associated filters.
|
|
504
|
-
* The filter instance cannot be used after calling this.
|
|
551
|
+
* Releases filter graph and contexts.
|
|
552
|
+
* Safe to call multiple times.
|
|
505
553
|
*
|
|
506
554
|
* @example
|
|
507
555
|
* ```typescript
|
|
508
556
|
* filter.free();
|
|
509
|
-
* // filter is now invalid
|
|
510
557
|
* ```
|
|
558
|
+
*
|
|
559
|
+
* @see {@link Symbol.dispose} For automatic cleanup
|
|
511
560
|
*/
|
|
512
561
|
free() {
|
|
513
562
|
if (this.graph) {
|
|
514
563
|
this.graph.free();
|
|
564
|
+
this.graph = null;
|
|
515
565
|
}
|
|
516
566
|
this.buffersrcCtx = null;
|
|
517
567
|
this.buffersinkCtx = null;
|
|
518
568
|
this.initialized = false;
|
|
519
569
|
}
|
|
520
570
|
/**
|
|
521
|
-
* Initialize
|
|
571
|
+
* Initialize filter graph.
|
|
522
572
|
*
|
|
523
|
-
*
|
|
524
|
-
*
|
|
573
|
+
* Creates and configures filter graph components.
|
|
574
|
+
* For video, may use hardware frames context from first frame.
|
|
525
575
|
*
|
|
526
|
-
* @
|
|
576
|
+
* @param firstFrame - First frame for hardware detection (video only)
|
|
577
|
+
* @throws {Error} If initialization fails
|
|
578
|
+
* @throws {FFmpegError} If configuration fails
|
|
527
579
|
*/
|
|
528
|
-
async initialize(
|
|
529
|
-
//
|
|
580
|
+
async initialize(firstFrame) {
|
|
581
|
+
// Create graph
|
|
582
|
+
this.graph = new FilterGraph();
|
|
530
583
|
this.graph.alloc();
|
|
531
584
|
// Configure threading
|
|
532
|
-
if (options.threads !== undefined) {
|
|
533
|
-
this.graph.nbThreads = options.threads;
|
|
585
|
+
if (this.options.threads !== undefined) {
|
|
586
|
+
this.graph.nbThreads = this.options.threads;
|
|
534
587
|
}
|
|
535
588
|
// Configure scaler options
|
|
536
|
-
if (options.scaleSwsOpts) {
|
|
537
|
-
this.graph.scaleSwsOpts = options.scaleSwsOpts;
|
|
589
|
+
if (this.options.scaleSwsOpts) {
|
|
590
|
+
this.graph.scaleSwsOpts = this.options.scaleSwsOpts;
|
|
591
|
+
}
|
|
592
|
+
// Create buffer source with hw_frames_ctx if needed
|
|
593
|
+
if (firstFrame?.hwFramesCtx && this.config.type === 'video') {
|
|
594
|
+
this.createBufferSourceWithHwFrames(firstFrame);
|
|
595
|
+
}
|
|
596
|
+
else {
|
|
597
|
+
this.createBufferSource();
|
|
538
598
|
}
|
|
539
|
-
// Create buffer source
|
|
540
|
-
this.createBufferSource();
|
|
541
599
|
// Create buffer sink
|
|
542
600
|
this.createBufferSink();
|
|
543
601
|
// Parse filter description
|
|
544
|
-
this.parseFilterDescription(description);
|
|
545
|
-
// Set hw_device_ctx on hardware filters
|
|
602
|
+
this.parseFilterDescription(this.description);
|
|
603
|
+
// Set hw_device_ctx on hardware filters
|
|
546
604
|
if (this.hardware?.deviceContext) {
|
|
547
605
|
const filters = this.graph.filters;
|
|
548
606
|
if (filters) {
|
|
549
607
|
for (const filterCtx of filters) {
|
|
550
|
-
// Check if this filter needs hardware device context
|
|
551
608
|
const filter = filterCtx.filter;
|
|
552
609
|
if (filter && (filter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
|
|
553
|
-
// Set hardware device context on this filter
|
|
554
610
|
filterCtx.hwDeviceCtx = this.hardware.deviceContext;
|
|
555
611
|
}
|
|
556
612
|
}
|
|
@@ -562,90 +618,103 @@ export class FilterAPI {
|
|
|
562
618
|
this.initialized = true;
|
|
563
619
|
}
|
|
564
620
|
/**
|
|
565
|
-
* Create
|
|
621
|
+
* Create buffer source with hardware frames context.
|
|
566
622
|
*
|
|
567
|
-
* @
|
|
623
|
+
* @param frame - Frame with hw_frames_ctx
|
|
624
|
+
* @throws {Error} If creation fails
|
|
625
|
+
* @throws {FFmpegError} If configuration fails
|
|
626
|
+
*/
|
|
627
|
+
createBufferSourceWithHwFrames(frame) {
|
|
628
|
+
const filterName = 'buffer';
|
|
629
|
+
const bufferFilter = Filter.getByName(filterName);
|
|
630
|
+
if (!bufferFilter) {
|
|
631
|
+
throw new Error(`${filterName} filter not found`);
|
|
632
|
+
}
|
|
633
|
+
// Allocate filter without args
|
|
634
|
+
this.buffersrcCtx = this.graph.allocFilter(bufferFilter, 'in');
|
|
635
|
+
if (!this.buffersrcCtx) {
|
|
636
|
+
throw new Error('Failed to allocate buffer source');
|
|
637
|
+
}
|
|
638
|
+
// Set parameters including hw_frames_ctx
|
|
639
|
+
const cfg = this.config;
|
|
640
|
+
const ret = this.buffersrcCtx.buffersrcParametersSet({
|
|
641
|
+
width: cfg.width,
|
|
642
|
+
height: cfg.height,
|
|
643
|
+
format: cfg.pixelFormat,
|
|
644
|
+
timeBase: cfg.timeBase,
|
|
645
|
+
frameRate: cfg.frameRate,
|
|
646
|
+
sampleAspectRatio: cfg.sampleAspectRatio,
|
|
647
|
+
hwFramesCtx: frame.hwFramesCtx ?? undefined,
|
|
648
|
+
});
|
|
649
|
+
FFmpegError.throwIfError(ret, 'Failed to set buffer source parameters');
|
|
650
|
+
// Initialize filter
|
|
651
|
+
const initRet = this.buffersrcCtx.init(null);
|
|
652
|
+
FFmpegError.throwIfError(initRet, 'Failed to initialize buffer source');
|
|
653
|
+
}
|
|
654
|
+
/**
|
|
655
|
+
* Create standard buffer source.
|
|
656
|
+
*
|
|
657
|
+
* @throws {Error} If creation fails
|
|
568
658
|
*/
|
|
569
659
|
createBufferSource() {
|
|
570
660
|
const filterName = this.config.type === 'video' ? 'buffer' : 'abuffer';
|
|
571
|
-
const bufferFilter =
|
|
661
|
+
const bufferFilter = Filter.getByName(filterName);
|
|
572
662
|
if (!bufferFilter) {
|
|
573
663
|
throw new Error(`${filterName} filter not found`);
|
|
574
664
|
}
|
|
575
|
-
//
|
|
576
|
-
|
|
577
|
-
if (
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
if (
|
|
581
|
-
|
|
665
|
+
// Build args string
|
|
666
|
+
let args;
|
|
667
|
+
if (this.config.type === 'video') {
|
|
668
|
+
const cfg = this.config;
|
|
669
|
+
args = `video_size=${cfg.width}x${cfg.height}:pix_fmt=${cfg.pixelFormat}:time_base=${cfg.timeBase.num}/${cfg.timeBase.den}`;
|
|
670
|
+
if (cfg.frameRate) {
|
|
671
|
+
args += `:frame_rate=${cfg.frameRate.num}/${cfg.frameRate.den}`;
|
|
672
|
+
}
|
|
673
|
+
if (cfg.sampleAspectRatio) {
|
|
674
|
+
args += `:pixel_aspect=${cfg.sampleAspectRatio.num}/${cfg.sampleAspectRatio.den}`;
|
|
582
675
|
}
|
|
583
|
-
// Set parameters including hardware frames context (BEFORE init)
|
|
584
|
-
const videoConfig = this.config;
|
|
585
|
-
const ret = this.buffersrcCtx.buffersrcParametersSet({
|
|
586
|
-
width: videoConfig.width,
|
|
587
|
-
height: videoConfig.height,
|
|
588
|
-
format: videoConfig.pixelFormat,
|
|
589
|
-
timeBase: videoConfig.timeBase,
|
|
590
|
-
frameRate: videoConfig.frameRate,
|
|
591
|
-
sampleAspectRatio: videoConfig.sampleAspectRatio,
|
|
592
|
-
hwFramesCtx: videoConfig.hwFramesCtx,
|
|
593
|
-
});
|
|
594
|
-
FFmpegError.throwIfError(ret, 'Failed to set buffer source parameters with hardware frames context');
|
|
595
|
-
// Initialize filter AFTER setting parameters
|
|
596
|
-
const initRet = this.buffersrcCtx.init(null);
|
|
597
|
-
FFmpegError.throwIfError(initRet, 'Failed to initialize buffer source');
|
|
598
676
|
}
|
|
599
677
|
else {
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
if (cfg.sampleAspectRatio) {
|
|
609
|
-
args += `:pixel_aspect=${cfg.sampleAspectRatio.num}/${cfg.sampleAspectRatio.den}`;
|
|
610
|
-
}
|
|
611
|
-
}
|
|
612
|
-
else {
|
|
613
|
-
const cfg = this.config;
|
|
614
|
-
// Use sample format name from utilities
|
|
615
|
-
const sampleFmtName = avGetSampleFmtName(cfg.sampleFormat);
|
|
616
|
-
// Handle invalid channel layout (0) by using stereo as default
|
|
617
|
-
const channelLayout = cfg.channelLayout === 0n ? 'stereo' : cfg.channelLayout.toString();
|
|
618
|
-
args = `sample_rate=${cfg.sampleRate}:sample_fmt=${sampleFmtName}:channel_layout=${channelLayout}:time_base=${cfg.timeBase.num}/${cfg.timeBase.den}`;
|
|
619
|
-
}
|
|
620
|
-
this.buffersrcCtx = this.graph.createFilter(bufferFilter, 'in', args);
|
|
621
|
-
if (!this.buffersrcCtx) {
|
|
622
|
-
throw new Error('Failed to create buffer source');
|
|
623
|
-
}
|
|
678
|
+
const cfg = this.config;
|
|
679
|
+
const sampleFmtName = avGetSampleFmtName(cfg.sampleFormat);
|
|
680
|
+
const channelLayout = cfg.channelLayout.mask === 0n ? 'stereo' : cfg.channelLayout.mask.toString();
|
|
681
|
+
args = `sample_rate=${cfg.sampleRate}:sample_fmt=${sampleFmtName}:channel_layout=${channelLayout}:time_base=${cfg.timeBase.num}/${cfg.timeBase.den}`;
|
|
682
|
+
}
|
|
683
|
+
this.buffersrcCtx = this.graph.createFilter(bufferFilter, 'in', args);
|
|
684
|
+
if (!this.buffersrcCtx) {
|
|
685
|
+
throw new Error('Failed to create buffer source');
|
|
624
686
|
}
|
|
625
687
|
}
|
|
626
688
|
/**
|
|
627
|
-
* Create
|
|
689
|
+
* Create buffer sink.
|
|
628
690
|
*
|
|
629
|
-
* @
|
|
691
|
+
* @throws {Error} If creation fails
|
|
630
692
|
*/
|
|
631
693
|
createBufferSink() {
|
|
694
|
+
if (!this.graph) {
|
|
695
|
+
throw new Error('Filter graph not initialized');
|
|
696
|
+
}
|
|
632
697
|
const filterName = this.config.type === 'video' ? 'buffersink' : 'abuffersink';
|
|
633
|
-
const sinkFilter =
|
|
698
|
+
const sinkFilter = Filter.getByName(filterName);
|
|
634
699
|
if (!sinkFilter) {
|
|
635
700
|
throw new Error(`${filterName} filter not found`);
|
|
636
701
|
}
|
|
637
|
-
// Create sink filter - no automatic format conversion
|
|
638
702
|
this.buffersinkCtx = this.graph.createFilter(sinkFilter, 'out', null);
|
|
639
703
|
if (!this.buffersinkCtx) {
|
|
640
704
|
throw new Error('Failed to create buffer sink');
|
|
641
705
|
}
|
|
642
706
|
}
|
|
643
707
|
/**
|
|
644
|
-
* Parse
|
|
708
|
+
* Parse filter description and build graph.
|
|
645
709
|
*
|
|
646
|
-
* @
|
|
710
|
+
* @param description - Filter description string
|
|
711
|
+
* @throws {Error} If parsing fails
|
|
712
|
+
* @throws {FFmpegError} If graph construction fails
|
|
647
713
|
*/
|
|
648
714
|
parseFilterDescription(description) {
|
|
715
|
+
if (!this.graph) {
|
|
716
|
+
throw new Error('Filter graph not initialized');
|
|
717
|
+
}
|
|
649
718
|
if (!this.buffersrcCtx || !this.buffersinkCtx) {
|
|
650
719
|
throw new Error('Buffer filters not initialized');
|
|
651
720
|
}
|
|
@@ -657,12 +726,12 @@ export class FilterAPI {
|
|
|
657
726
|
return;
|
|
658
727
|
}
|
|
659
728
|
// Set up inputs and outputs for parsing
|
|
660
|
-
const outputs = new
|
|
729
|
+
const outputs = new FilterInOut();
|
|
661
730
|
outputs.alloc();
|
|
662
731
|
outputs.name = 'in';
|
|
663
732
|
outputs.filterCtx = this.buffersrcCtx;
|
|
664
733
|
outputs.padIdx = 0;
|
|
665
|
-
const inputs = new
|
|
734
|
+
const inputs = new FilterInOut();
|
|
666
735
|
inputs.alloc();
|
|
667
736
|
inputs.name = 'out';
|
|
668
737
|
inputs.filterCtx = this.buffersinkCtx;
|
|
@@ -670,207 +739,64 @@ export class FilterAPI {
|
|
|
670
739
|
// Parse the filter graph
|
|
671
740
|
const ret = this.graph.parsePtr(description, inputs, outputs);
|
|
672
741
|
FFmpegError.throwIfError(ret, 'Failed to parse filter description');
|
|
673
|
-
// Clean up
|
|
742
|
+
// Clean up
|
|
674
743
|
inputs.free();
|
|
675
744
|
outputs.free();
|
|
676
745
|
}
|
|
677
746
|
/**
|
|
678
|
-
*
|
|
679
|
-
*
|
|
680
|
-
* Allows runtime modification of filter parameters without recreating the graph.
|
|
681
|
-
* Not all filters support commands - check filter documentation.
|
|
747
|
+
* Check hardware requirements for filters.
|
|
682
748
|
*
|
|
683
|
-
* @param
|
|
684
|
-
* @param
|
|
685
|
-
* @
|
|
686
|
-
* @param flags - Optional command flags
|
|
687
|
-
*
|
|
688
|
-
* @returns Command response
|
|
689
|
-
*
|
|
690
|
-
* @example
|
|
691
|
-
* ```typescript
|
|
692
|
-
* // Change volume dynamically
|
|
693
|
-
* const response = filter.sendCommand('volume', 'volume', '0.5');
|
|
694
|
-
* if (response) {
|
|
695
|
-
* console.log('Volume changed successfully');
|
|
696
|
-
* }
|
|
697
|
-
* ```
|
|
698
|
-
*
|
|
699
|
-
* @example
|
|
700
|
-
* ```typescript
|
|
701
|
-
* // Enable/disable all filters at runtime
|
|
702
|
-
* filter.sendCommand('all', 'enable', 'expr=gte(t,10)');
|
|
703
|
-
* ```
|
|
749
|
+
* @param description - Filter description
|
|
750
|
+
* @param options - Filter options
|
|
751
|
+
* @throws {Error} If hardware requirements not met
|
|
704
752
|
*/
|
|
705
|
-
|
|
706
|
-
if (
|
|
707
|
-
|
|
708
|
-
}
|
|
709
|
-
const result = this.graph.sendCommand(target, cmd, arg, flags);
|
|
710
|
-
if (typeof result === 'number') {
|
|
711
|
-
FFmpegError.throwIfError(result, 'Failed to send filter command');
|
|
753
|
+
checkHardwareRequirements(description, options) {
|
|
754
|
+
if (this.config.type !== 'video') {
|
|
755
|
+
return;
|
|
712
756
|
}
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
* ```typescript
|
|
737
|
-
* // Fade effect at specific timestamp
|
|
738
|
-
* filter.queueCommand('fade', 'alpha', '0.5', 30.0);
|
|
739
|
-
* ```
|
|
740
|
-
*/
|
|
741
|
-
queueCommand(target, cmd, arg, ts, flags) {
|
|
742
|
-
if (!this.initialized) {
|
|
743
|
-
throw new Error('Filter not initialized');
|
|
757
|
+
// Parse filter names from description
|
|
758
|
+
const filterNames = description
|
|
759
|
+
.split(',')
|
|
760
|
+
.map((f) => {
|
|
761
|
+
// Extract filter name (before = or : or whitespace)
|
|
762
|
+
const match = /^([a-zA-Z0-9_]+)/.exec(f.trim());
|
|
763
|
+
return match ? match[1] : null;
|
|
764
|
+
})
|
|
765
|
+
.filter(Boolean);
|
|
766
|
+
for (const filterName of filterNames) {
|
|
767
|
+
const lowLevelFilter = Filter.getByName(filterName);
|
|
768
|
+
if (!lowLevelFilter) {
|
|
769
|
+
// Filter will be validated later during graph parsing
|
|
770
|
+
continue;
|
|
771
|
+
}
|
|
772
|
+
if (!options.hardware) {
|
|
773
|
+
if (filterName === 'hwupload' || filterName === 'hwupload_cuda' || (lowLevelFilter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
|
|
774
|
+
throw new Error(`Filter '${filterName}' requires a hardware context`);
|
|
775
|
+
}
|
|
776
|
+
else if (filterName === 'hwdownload' && !avIsHardwarePixelFormat(this.config.pixelFormat)) {
|
|
777
|
+
throw new Error(`Pixel Format '${this.config.pixelFormat}' is not hardware compatible`);
|
|
778
|
+
}
|
|
779
|
+
}
|
|
744
780
|
}
|
|
745
|
-
const ret = this.graph.queueCommand(target, cmd, arg, ts, flags);
|
|
746
|
-
FFmpegError.throwIfError(ret, 'Failed to queue filter command');
|
|
747
781
|
}
|
|
748
782
|
/**
|
|
749
|
-
* Dispose of
|
|
783
|
+
* Dispose of filter.
|
|
750
784
|
*
|
|
751
|
-
* Implements
|
|
785
|
+
* Implements Disposable interface for automatic cleanup.
|
|
752
786
|
* Equivalent to calling free().
|
|
753
787
|
*
|
|
754
788
|
* @example
|
|
755
789
|
* ```typescript
|
|
756
790
|
* {
|
|
757
|
-
* using filter = await
|
|
758
|
-
* //
|
|
759
|
-
* } // Automatically freed
|
|
791
|
+
* using filter = await FilterAPI.create('scale=640:480', info);
|
|
792
|
+
* // Use filter...
|
|
793
|
+
* } // Automatically freed
|
|
760
794
|
* ```
|
|
795
|
+
*
|
|
796
|
+
* @see {@link free} For manual cleanup
|
|
761
797
|
*/
|
|
762
798
|
[Symbol.dispose]() {
|
|
763
799
|
this.free();
|
|
764
800
|
}
|
|
765
801
|
}
|
|
766
|
-
/**
|
|
767
|
-
* Common filter presets for convenience.
|
|
768
|
-
*
|
|
769
|
-
* Provides pre-defined filter strings for common operations.
|
|
770
|
-
* Can be used with Filter.create() for quick setup.
|
|
771
|
-
*
|
|
772
|
-
* @example
|
|
773
|
-
* ```typescript
|
|
774
|
-
* const filter = await Filter.create(
|
|
775
|
-
* FilterPresets.scale(1280, 720),
|
|
776
|
-
* config
|
|
777
|
-
* );
|
|
778
|
-
* ```
|
|
779
|
-
*/
|
|
780
|
-
export class FilterPresets {
|
|
781
|
-
/**
|
|
782
|
-
* Scale video to specified dimensions.
|
|
783
|
-
*/
|
|
784
|
-
static scale(width, height, flags) {
|
|
785
|
-
const base = `scale=${width}:${height}`;
|
|
786
|
-
return flags ? `${base}:flags=${flags}` : base;
|
|
787
|
-
}
|
|
788
|
-
/**
|
|
789
|
-
* Crop video to specified dimensions.
|
|
790
|
-
*/
|
|
791
|
-
static crop(width, height, x = 0, y = 0) {
|
|
792
|
-
return `crop=${width}:${height}:${x}:${y}`;
|
|
793
|
-
}
|
|
794
|
-
/**
|
|
795
|
-
* Change frame rate.
|
|
796
|
-
*/
|
|
797
|
-
static fps(fps) {
|
|
798
|
-
return `fps=${fps}`;
|
|
799
|
-
}
|
|
800
|
-
/**
|
|
801
|
-
* Convert pixel format.
|
|
802
|
-
* Can accept either format name string or AVPixelFormat enum.
|
|
803
|
-
*/
|
|
804
|
-
static format(pixelFormat) {
|
|
805
|
-
const formatName = typeof pixelFormat === 'string' ? pixelFormat : (avGetPixFmtName(pixelFormat) ?? 'yuv420p');
|
|
806
|
-
return `format=${formatName}`;
|
|
807
|
-
}
|
|
808
|
-
/**
|
|
809
|
-
* Rotate video by angle.
|
|
810
|
-
*/
|
|
811
|
-
static rotate(angle) {
|
|
812
|
-
return `rotate=${angle}*PI/180`;
|
|
813
|
-
}
|
|
814
|
-
/**
|
|
815
|
-
* Flip video horizontally.
|
|
816
|
-
*/
|
|
817
|
-
static hflip() {
|
|
818
|
-
return 'hflip';
|
|
819
|
-
}
|
|
820
|
-
/**
|
|
821
|
-
* Flip video vertically.
|
|
822
|
-
*/
|
|
823
|
-
static vflip() {
|
|
824
|
-
return 'vflip';
|
|
825
|
-
}
|
|
826
|
-
/**
|
|
827
|
-
* Apply fade effect.
|
|
828
|
-
*/
|
|
829
|
-
static fade(type, start, duration) {
|
|
830
|
-
return `fade=t=${type}:st=${start}:d=${duration}`;
|
|
831
|
-
}
|
|
832
|
-
/**
|
|
833
|
-
* Overlay one video on another.
|
|
834
|
-
*/
|
|
835
|
-
static overlay(x = 0, y = 0) {
|
|
836
|
-
return `overlay=${x}:${y}`;
|
|
837
|
-
}
|
|
838
|
-
/**
|
|
839
|
-
* Adjust audio volume.
|
|
840
|
-
*/
|
|
841
|
-
static volume(factor) {
|
|
842
|
-
return `volume=${factor}`;
|
|
843
|
-
}
|
|
844
|
-
/**
|
|
845
|
-
* Convert audio sample format.
|
|
846
|
-
* Can accept either format name string or AVSampleFormat enum.
|
|
847
|
-
*/
|
|
848
|
-
static aformat(sampleFormat, sampleRate, channelLayout) {
|
|
849
|
-
const formatName = typeof sampleFormat === 'string' ? sampleFormat : (avGetSampleFmtName(sampleFormat) ?? 's16');
|
|
850
|
-
let filter = `aformat=sample_fmts=${formatName}`;
|
|
851
|
-
if (sampleRate)
|
|
852
|
-
filter += `:sample_rates=${sampleRate}`;
|
|
853
|
-
if (channelLayout)
|
|
854
|
-
filter += `:channel_layouts=${channelLayout}`;
|
|
855
|
-
return filter;
|
|
856
|
-
}
|
|
857
|
-
/**
|
|
858
|
-
* Change audio tempo without changing pitch.
|
|
859
|
-
*/
|
|
860
|
-
static atempo(factor) {
|
|
861
|
-
return `atempo=${factor}`;
|
|
862
|
-
}
|
|
863
|
-
/**
|
|
864
|
-
* Apply audio fade.
|
|
865
|
-
*/
|
|
866
|
-
static afade(type, start, duration) {
|
|
867
|
-
return `afade=t=${type}:st=${start}:d=${duration}`;
|
|
868
|
-
}
|
|
869
|
-
/**
|
|
870
|
-
* Mix multiple audio streams.
|
|
871
|
-
*/
|
|
872
|
-
static amix(inputs = 2, duration = 'longest') {
|
|
873
|
-
return `amix=inputs=${inputs}:duration=${duration}`;
|
|
874
|
-
}
|
|
875
|
-
}
|
|
876
802
|
//# sourceMappingURL=filter.js.map
|