node-av 1.3.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -40
- package/binding.gyp +12 -0
- package/dist/api/bitstream-filter.d.ts +134 -2
- package/dist/api/bitstream-filter.js +200 -2
- package/dist/api/bitstream-filter.js.map +1 -1
- package/dist/api/decoder.d.ts +261 -105
- package/dist/api/decoder.js +384 -171
- package/dist/api/decoder.js.map +1 -1
- package/dist/api/encoder.d.ts +338 -74
- package/dist/api/encoder.js +546 -188
- package/dist/api/encoder.js.map +1 -1
- package/dist/api/filter-presets.d.ts +479 -1513
- package/dist/api/filter-presets.js +1044 -2005
- package/dist/api/filter-presets.js.map +1 -1
- package/dist/api/filter.d.ts +370 -150
- package/dist/api/filter.js +647 -364
- package/dist/api/filter.js.map +1 -1
- package/dist/api/hardware.d.ts +25 -31
- package/dist/api/hardware.js +36 -70
- package/dist/api/hardware.js.map +1 -1
- package/dist/api/index.d.ts +1 -1
- package/dist/api/index.js +1 -1
- package/dist/api/index.js.map +1 -1
- package/dist/api/io-stream.d.ts +6 -0
- package/dist/api/io-stream.js +2 -0
- package/dist/api/io-stream.js.map +1 -1
- package/dist/api/media-input.d.ts +208 -2
- package/dist/api/media-input.js +356 -8
- package/dist/api/media-input.js.map +1 -1
- package/dist/api/media-output.d.ts +142 -104
- package/dist/api/media-output.js +446 -179
- package/dist/api/media-output.js.map +1 -1
- package/dist/api/pipeline.d.ts +82 -17
- package/dist/api/pipeline.js +80 -42
- package/dist/api/pipeline.js.map +1 -1
- package/dist/api/types.d.ts +24 -57
- package/dist/api/utils.js +2 -0
- package/dist/api/utils.js.map +1 -1
- package/dist/lib/audio-fifo.d.ts +103 -0
- package/dist/lib/audio-fifo.js +109 -0
- package/dist/lib/audio-fifo.js.map +1 -1
- package/dist/lib/binding.d.ts +1 -0
- package/dist/lib/binding.js.map +1 -1
- package/dist/lib/bitstream-filter-context.d.ts +79 -0
- package/dist/lib/bitstream-filter-context.js +83 -0
- package/dist/lib/bitstream-filter-context.js.map +1 -1
- package/dist/lib/bitstream-filter.d.ts +2 -0
- package/dist/lib/bitstream-filter.js +2 -0
- package/dist/lib/bitstream-filter.js.map +1 -1
- package/dist/lib/codec-context.d.ts +168 -0
- package/dist/lib/codec-context.js +178 -0
- package/dist/lib/codec-context.js.map +1 -1
- package/dist/lib/codec-parameters.d.ts +3 -0
- package/dist/lib/codec-parameters.js +3 -0
- package/dist/lib/codec-parameters.js.map +1 -1
- package/dist/lib/codec-parser.d.ts +6 -0
- package/dist/lib/codec-parser.js +6 -0
- package/dist/lib/codec-parser.js.map +1 -1
- package/dist/lib/codec.d.ts +12 -0
- package/dist/lib/codec.js +12 -0
- package/dist/lib/codec.js.map +1 -1
- package/dist/lib/dictionary.d.ts +18 -2
- package/dist/lib/dictionary.js +18 -2
- package/dist/lib/dictionary.js.map +1 -1
- package/dist/lib/error.d.ts +8 -0
- package/dist/lib/error.js +9 -0
- package/dist/lib/error.js.map +1 -1
- package/dist/lib/filter-context.d.ts +119 -2
- package/dist/lib/filter-context.js +119 -0
- package/dist/lib/filter-context.js.map +1 -1
- package/dist/lib/filter-graph.d.ts +80 -0
- package/dist/lib/filter-graph.js +84 -0
- package/dist/lib/filter-graph.js.map +1 -1
- package/dist/lib/filter-inout.d.ts +1 -0
- package/dist/lib/filter-inout.js +1 -0
- package/dist/lib/filter-inout.js.map +1 -1
- package/dist/lib/filter.d.ts +2 -0
- package/dist/lib/filter.js +2 -0
- package/dist/lib/filter.js.map +1 -1
- package/dist/lib/format-context.d.ts +356 -20
- package/dist/lib/format-context.js +375 -23
- package/dist/lib/format-context.js.map +1 -1
- package/dist/lib/frame.d.ts +84 -1
- package/dist/lib/frame.js +96 -0
- package/dist/lib/frame.js.map +1 -1
- package/dist/lib/hardware-device-context.d.ts +8 -0
- package/dist/lib/hardware-device-context.js +8 -0
- package/dist/lib/hardware-device-context.js.map +1 -1
- package/dist/lib/hardware-frames-context.d.ts +55 -0
- package/dist/lib/hardware-frames-context.js +57 -0
- package/dist/lib/hardware-frames-context.js.map +1 -1
- package/dist/lib/input-format.d.ts +43 -3
- package/dist/lib/input-format.js +48 -0
- package/dist/lib/input-format.js.map +1 -1
- package/dist/lib/io-context.d.ts +212 -0
- package/dist/lib/io-context.js +228 -0
- package/dist/lib/io-context.js.map +1 -1
- package/dist/lib/log.d.ts +2 -0
- package/dist/lib/log.js +2 -0
- package/dist/lib/log.js.map +1 -1
- package/dist/lib/native-types.d.ts +39 -1
- package/dist/lib/option.d.ts +90 -0
- package/dist/lib/option.js +97 -0
- package/dist/lib/option.js.map +1 -1
- package/dist/lib/output-format.d.ts +4 -0
- package/dist/lib/output-format.js +4 -0
- package/dist/lib/output-format.js.map +1 -1
- package/dist/lib/packet.d.ts +7 -0
- package/dist/lib/packet.js +7 -0
- package/dist/lib/packet.js.map +1 -1
- package/dist/lib/rational.d.ts +1 -0
- package/dist/lib/rational.js +1 -0
- package/dist/lib/rational.js.map +1 -1
- package/dist/lib/software-resample-context.d.ts +64 -0
- package/dist/lib/software-resample-context.js +66 -0
- package/dist/lib/software-resample-context.js.map +1 -1
- package/dist/lib/software-scale-context.d.ts +98 -0
- package/dist/lib/software-scale-context.js +102 -0
- package/dist/lib/software-scale-context.js.map +1 -1
- package/dist/lib/stream.d.ts +1 -0
- package/dist/lib/stream.js +1 -0
- package/dist/lib/stream.js.map +1 -1
- package/dist/lib/utilities.d.ts +60 -0
- package/dist/lib/utilities.js +60 -0
- package/dist/lib/utilities.js.map +1 -1
- package/package.json +18 -18
- package/release_notes.md +0 -29
package/dist/api/filter.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
import { AVERROR_EAGAIN, AVERROR_EOF, AVFILTER_FLAG_HWDEVICE
|
|
2
|
-
import {
|
|
1
|
+
import { AVERROR_EAGAIN, AVERROR_EOF, AVFILTER_FLAG_HWDEVICE } from '../constants/constants.js';
|
|
2
|
+
import { FFmpegError, Filter, FilterGraph, FilterInOut, Frame } from '../lib/index.js';
|
|
3
|
+
import { avGetSampleFmtName } from '../lib/utilities.js';
|
|
3
4
|
/**
|
|
4
5
|
* High-level filter API for audio and video processing.
|
|
5
6
|
*
|
|
@@ -12,10 +13,12 @@ import { avGetSampleFmtName, avIsHardwarePixelFormat, FFmpegError, Filter, Filte
|
|
|
12
13
|
* ```typescript
|
|
13
14
|
* import { FilterAPI } from 'node-av/api';
|
|
14
15
|
*
|
|
15
|
-
* // Create video filter
|
|
16
|
-
* const filter = await FilterAPI.create('scale=1280:720',
|
|
16
|
+
* // Create video filter - initializes on first frame
|
|
17
|
+
* const filter = await FilterAPI.create('scale=1280:720', {
|
|
18
|
+
* timeBase: video.timeBase,
|
|
19
|
+
* });
|
|
17
20
|
*
|
|
18
|
-
* // Process frame
|
|
21
|
+
* // Process frame - first frame configures filter graph
|
|
19
22
|
* const output = await filter.process(inputFrame);
|
|
20
23
|
* if (output) {
|
|
21
24
|
* console.log(`Filtered frame: ${output.width}x${output.height}`);
|
|
@@ -25,54 +28,51 @@ import { avGetSampleFmtName, avIsHardwarePixelFormat, FFmpegError, Filter, Filte
|
|
|
25
28
|
*
|
|
26
29
|
* @example
|
|
27
30
|
* ```typescript
|
|
28
|
-
* // Hardware-accelerated filtering
|
|
29
|
-
* const
|
|
30
|
-
*
|
|
31
|
-
*
|
|
32
|
-
*
|
|
33
|
-
* { hardware: hw }
|
|
34
|
-
* );
|
|
31
|
+
* // Hardware-accelerated filtering - hw context detected from frame
|
|
32
|
+
* const filter = await FilterAPI.create('hwupload,scale_cuda=1920:1080,hwdownload', {
|
|
33
|
+
* timeBase: video.timeBase,
|
|
34
|
+
* });
|
|
35
|
+
* // Hardware frames context will be automatically detected from first frame
|
|
35
36
|
* ```
|
|
36
37
|
*
|
|
37
38
|
* @see {@link FilterGraph} For low-level filter graph API
|
|
38
|
-
* @see {@link HardwareContext} For hardware acceleration
|
|
39
39
|
* @see {@link Frame} For frame operations
|
|
40
40
|
*/
|
|
41
41
|
export class FilterAPI {
|
|
42
|
-
graph
|
|
42
|
+
graph;
|
|
43
|
+
description;
|
|
44
|
+
options;
|
|
43
45
|
buffersrcCtx = null;
|
|
44
46
|
buffersinkCtx = null;
|
|
45
|
-
config;
|
|
46
|
-
mediaType;
|
|
47
47
|
initialized = false;
|
|
48
|
-
|
|
49
|
-
description;
|
|
50
|
-
options;
|
|
48
|
+
isClosed = false;
|
|
51
49
|
/**
|
|
52
|
-
* @param
|
|
50
|
+
* @param graph - Filter graph instance
|
|
51
|
+
*
|
|
53
52
|
* @param description - Filter description string
|
|
53
|
+
*
|
|
54
54
|
* @param options - Filter options
|
|
55
|
+
*
|
|
55
56
|
* @internal
|
|
56
57
|
*/
|
|
57
|
-
constructor(
|
|
58
|
-
this.
|
|
58
|
+
constructor(graph, description, options) {
|
|
59
|
+
this.graph = graph;
|
|
59
60
|
this.description = description;
|
|
60
61
|
this.options = options;
|
|
61
|
-
this.hardware = options.hardware;
|
|
62
|
-
this.mediaType = config.type === 'video' ? AVMEDIA_TYPE_VIDEO : AVMEDIA_TYPE_AUDIO;
|
|
63
62
|
}
|
|
64
63
|
/**
|
|
65
64
|
* Create a filter with specified description and configuration.
|
|
66
65
|
*
|
|
67
|
-
*
|
|
68
|
-
*
|
|
69
|
-
*
|
|
66
|
+
* Creates and allocates filter graph immediately.
|
|
67
|
+
* Filter configuration is completed on first frame with frame properties.
|
|
68
|
+
* Hardware frames context is automatically detected from input frames.
|
|
70
69
|
*
|
|
71
70
|
* Direct mapping to avfilter_graph_parse_ptr() and avfilter_graph_config().
|
|
72
71
|
*
|
|
73
72
|
* @param description - Filter graph description
|
|
74
|
-
*
|
|
75
|
-
* @param options - Filter options
|
|
73
|
+
*
|
|
74
|
+
* @param options - Filter options including required timeBase
|
|
75
|
+
*
|
|
76
76
|
* @returns Configured filter instance
|
|
77
77
|
*
|
|
78
78
|
* @throws {Error} If filter creation or configuration fails
|
|
@@ -82,78 +82,125 @@ export class FilterAPI {
|
|
|
82
82
|
* @example
|
|
83
83
|
* ```typescript
|
|
84
84
|
* // Simple video filter
|
|
85
|
-
* const filter = await FilterAPI.create('scale=640:480',
|
|
85
|
+
* const filter = await FilterAPI.create('scale=640:480', {
|
|
86
|
+
* timeBase: video.timeBase
|
|
87
|
+
* });
|
|
86
88
|
* ```
|
|
87
89
|
*
|
|
88
90
|
* @example
|
|
89
91
|
* ```typescript
|
|
90
92
|
* // Complex filter chain
|
|
91
|
-
* const filter = await FilterAPI.create(
|
|
92
|
-
*
|
|
93
|
-
*
|
|
94
|
-
* );
|
|
93
|
+
* const filter = await FilterAPI.create('crop=640:480:0:0,rotate=PI/4', {
|
|
94
|
+
* timeBase: video.timeBase
|
|
95
|
+
* });
|
|
95
96
|
* ```
|
|
96
97
|
*
|
|
97
98
|
* @example
|
|
98
99
|
* ```typescript
|
|
99
100
|
* // Audio filter
|
|
100
|
-
* const filter = await FilterAPI.create(
|
|
101
|
-
*
|
|
102
|
-
*
|
|
103
|
-
* );
|
|
101
|
+
* const filter = await FilterAPI.create('volume=0.5,aecho=0.8:0.9:1000:0.3', {
|
|
102
|
+
* timeBase: audio.timeBase
|
|
103
|
+
* });
|
|
104
104
|
* ```
|
|
105
105
|
*
|
|
106
106
|
* @see {@link process} For frame processing
|
|
107
107
|
* @see {@link FilterOptions} For configuration options
|
|
108
108
|
*/
|
|
109
|
-
static async create(description,
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
pixelFormat: input.pixelFormat,
|
|
117
|
-
timeBase: input.timeBase,
|
|
118
|
-
frameRate: input.frameRate,
|
|
119
|
-
sampleAspectRatio: input.sampleAspectRatio,
|
|
120
|
-
};
|
|
109
|
+
static async create(description, options) {
|
|
110
|
+
// Create graph
|
|
111
|
+
const graph = new FilterGraph();
|
|
112
|
+
graph.alloc();
|
|
113
|
+
// Configure threading
|
|
114
|
+
if (options.threads !== undefined) {
|
|
115
|
+
graph.nbThreads = options.threads;
|
|
121
116
|
}
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
117
|
+
// Configure scaler options
|
|
118
|
+
if (options.scaleSwsOpts) {
|
|
119
|
+
graph.scaleSwsOpts = options.scaleSwsOpts;
|
|
120
|
+
}
|
|
121
|
+
return new FilterAPI(graph, description, options);
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Check if filter is open.
|
|
125
|
+
*
|
|
126
|
+
* @example
|
|
127
|
+
* ```typescript
|
|
128
|
+
* if (filter.isFilterOpen) {
|
|
129
|
+
* const output = await filter.process(frame);
|
|
130
|
+
* }
|
|
131
|
+
* ```
|
|
132
|
+
*/
|
|
133
|
+
get isFilterOpen() {
|
|
134
|
+
return !this.isClosed;
|
|
135
|
+
}
|
|
136
|
+
/**
|
|
137
|
+
* Check if filter has been initialized.
|
|
138
|
+
*
|
|
139
|
+
* Returns true after first frame has been processed and filter graph configured.
|
|
140
|
+
* Useful for checking if filter has received frame properties.
|
|
141
|
+
*
|
|
142
|
+
* @returns true if filter graph has been built from first frame
|
|
143
|
+
*
|
|
144
|
+
* @example
|
|
145
|
+
* ```typescript
|
|
146
|
+
* if (!filter.isFilterInitialized) {
|
|
147
|
+
* console.log('Filter will initialize on first frame');
|
|
148
|
+
* }
|
|
149
|
+
* ```
|
|
150
|
+
*/
|
|
151
|
+
get isFilterInitialized() {
|
|
152
|
+
return this.initialized;
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Check if filter is ready for processing.
|
|
156
|
+
*
|
|
157
|
+
* @returns true if initialized and ready
|
|
158
|
+
*
|
|
159
|
+
* @example
|
|
160
|
+
* ```typescript
|
|
161
|
+
* if (filter.isReady()) {
|
|
162
|
+
* const output = await filter.process(frame);
|
|
163
|
+
* }
|
|
164
|
+
* ```
|
|
165
|
+
*/
|
|
166
|
+
isReady() {
|
|
167
|
+
return this.initialized && this.buffersrcCtx !== null && this.buffersinkCtx !== null && !this.isClosed;
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* Get filter graph description.
|
|
171
|
+
*
|
|
172
|
+
* Returns human-readable graph structure.
|
|
173
|
+
* Useful for debugging filter chains.
|
|
174
|
+
*
|
|
175
|
+
* Direct mapping to avfilter_graph_dump().
|
|
176
|
+
*
|
|
177
|
+
* @returns Graph description or null if closed
|
|
178
|
+
*
|
|
179
|
+
* @example
|
|
180
|
+
* ```typescript
|
|
181
|
+
* const description = filter.getGraphDescription();
|
|
182
|
+
* console.log('Filter graph:', description);
|
|
183
|
+
* ```
|
|
184
|
+
*/
|
|
185
|
+
getGraphDescription() {
|
|
186
|
+
return !this.isClosed && this.initialized ? this.graph.dump() : null;
|
|
143
187
|
}
|
|
144
188
|
/**
|
|
145
189
|
* Process a frame through the filter.
|
|
146
190
|
*
|
|
147
191
|
* Applies filter operations to input frame.
|
|
192
|
+
* On first frame, automatically builds filter graph with frame properties.
|
|
148
193
|
* May buffer frames internally before producing output.
|
|
149
|
-
*
|
|
194
|
+
* Hardware frames context is automatically detected from frame.
|
|
195
|
+
* Returns null if filter is closed and frame is null.
|
|
150
196
|
*
|
|
151
197
|
* Direct mapping to av_buffersrc_add_frame() and av_buffersink_get_frame().
|
|
152
198
|
*
|
|
153
|
-
* @param frame - Input frame to process
|
|
199
|
+
* @param frame - Input frame to process (or null to flush)
|
|
200
|
+
*
|
|
154
201
|
* @returns Filtered frame or null if buffered
|
|
155
202
|
*
|
|
156
|
-
* @throws {Error} If filter
|
|
203
|
+
* @throws {Error} If filter is closed with non-null frame
|
|
157
204
|
*
|
|
158
205
|
* @throws {FFmpegError} If processing fails
|
|
159
206
|
*
|
|
@@ -168,26 +215,33 @@ export class FilterAPI {
|
|
|
168
215
|
*
|
|
169
216
|
* @example
|
|
170
217
|
* ```typescript
|
|
171
|
-
* // Process
|
|
218
|
+
* // Process frame - may buffer internally
|
|
172
219
|
* const output = await filter.process(frame);
|
|
173
|
-
* if (output)
|
|
174
|
-
*
|
|
175
|
-
*
|
|
176
|
-
* let buffered;
|
|
177
|
-
* while ((buffered = await filter.receive()) !== null) {
|
|
178
|
-
* yield buffered;
|
|
220
|
+
* if (output) {
|
|
221
|
+
* // Got output immediately
|
|
222
|
+
* yield output;
|
|
179
223
|
* }
|
|
224
|
+
* // For buffered frames, use the frames() async generator
|
|
180
225
|
* ```
|
|
181
226
|
*
|
|
182
|
-
* @see {@link
|
|
183
|
-
* @see {@link
|
|
227
|
+
* @see {@link frames} For processing frame streams
|
|
228
|
+
* @see {@link flush} For end-of-stream handling
|
|
184
229
|
*/
|
|
185
230
|
async process(frame) {
|
|
186
|
-
|
|
187
|
-
|
|
231
|
+
if (this.isClosed) {
|
|
232
|
+
if (!frame) {
|
|
233
|
+
return null;
|
|
234
|
+
}
|
|
235
|
+
throw new Error('Filter is closed');
|
|
236
|
+
}
|
|
237
|
+
// Open filter if not already done
|
|
238
|
+
if (!this.initialized) {
|
|
239
|
+
if (!frame) {
|
|
240
|
+
return null;
|
|
241
|
+
}
|
|
188
242
|
await this.initialize(frame);
|
|
189
243
|
}
|
|
190
|
-
if (!this.
|
|
244
|
+
if (!this.buffersrcCtx || !this.buffersinkCtx) {
|
|
191
245
|
throw new Error('Filter not initialized');
|
|
192
246
|
}
|
|
193
247
|
// Send frame to filter
|
|
@@ -211,6 +265,86 @@ export class FilterAPI {
|
|
|
211
265
|
return null;
|
|
212
266
|
}
|
|
213
267
|
}
|
|
268
|
+
/**
|
|
269
|
+
* Process a frame through the filter synchronously.
|
|
270
|
+
* Synchronous version of process.
|
|
271
|
+
*
|
|
272
|
+
* Applies filter operations to input frame.
|
|
273
|
+
* On first frame, automatically builds filter graph with frame properties.
|
|
274
|
+
* May buffer frames internally before producing output.
|
|
275
|
+
* Hardware frames context is automatically detected from frame.
|
|
276
|
+
* Returns null if filter is closed and frame is null.
|
|
277
|
+
*
|
|
278
|
+
* Direct mapping to av_buffersrc_add_frame() and av_buffersink_get_frame().
|
|
279
|
+
*
|
|
280
|
+
* @param frame - Input frame to process (or null to flush)
|
|
281
|
+
*
|
|
282
|
+
* @returns Filtered frame or null if buffered
|
|
283
|
+
*
|
|
284
|
+
* @throws {Error} If filter is closed with non-null frame
|
|
285
|
+
*
|
|
286
|
+
* @throws {FFmpegError} If processing fails
|
|
287
|
+
*
|
|
288
|
+
* @example
|
|
289
|
+
* ```typescript
|
|
290
|
+
* const output = filter.processSync(inputFrame);
|
|
291
|
+
* if (output) {
|
|
292
|
+
* console.log(`Got filtered frame: pts=${output.pts}`);
|
|
293
|
+
* output.free();
|
|
294
|
+
* }
|
|
295
|
+
* ```
|
|
296
|
+
*
|
|
297
|
+
* @example
|
|
298
|
+
* ```typescript
|
|
299
|
+
* // Process frame - may buffer internally
|
|
300
|
+
* const output = filter.processSync(frame);
|
|
301
|
+
* if (output) {
|
|
302
|
+
* // Got output immediately
|
|
303
|
+
* yield output;
|
|
304
|
+
* }
|
|
305
|
+
* // For buffered frames, use the framesSync() generator
|
|
306
|
+
* ```
|
|
307
|
+
*
|
|
308
|
+
* @see {@link process} For async version
|
|
309
|
+
*/
|
|
310
|
+
processSync(frame) {
|
|
311
|
+
if (this.isClosed) {
|
|
312
|
+
if (!frame) {
|
|
313
|
+
return null;
|
|
314
|
+
}
|
|
315
|
+
throw new Error('Filter is closed');
|
|
316
|
+
}
|
|
317
|
+
// Open filter if not already done
|
|
318
|
+
if (!this.initialized) {
|
|
319
|
+
if (!frame) {
|
|
320
|
+
return null;
|
|
321
|
+
}
|
|
322
|
+
this.initializeSync(frame);
|
|
323
|
+
}
|
|
324
|
+
if (!this.buffersrcCtx || !this.buffersinkCtx) {
|
|
325
|
+
throw new Error('Filter not initialized');
|
|
326
|
+
}
|
|
327
|
+
// Send frame to filter
|
|
328
|
+
const addRet = this.buffersrcCtx.buffersrcAddFrameSync(frame);
|
|
329
|
+
FFmpegError.throwIfError(addRet, 'Failed to add frame to filter');
|
|
330
|
+
// Try to get filtered frame
|
|
331
|
+
const outputFrame = new Frame();
|
|
332
|
+
outputFrame.alloc();
|
|
333
|
+
const getRet = this.buffersinkCtx.buffersinkGetFrameSync(outputFrame);
|
|
334
|
+
if (getRet >= 0) {
|
|
335
|
+
return outputFrame;
|
|
336
|
+
}
|
|
337
|
+
else if (getRet === AVERROR_EAGAIN) {
|
|
338
|
+
// Need more input
|
|
339
|
+
outputFrame.free();
|
|
340
|
+
return null;
|
|
341
|
+
}
|
|
342
|
+
else {
|
|
343
|
+
outputFrame.free();
|
|
344
|
+
FFmpegError.throwIfError(getRet, 'Failed to get frame from filter');
|
|
345
|
+
return null;
|
|
346
|
+
}
|
|
347
|
+
}
|
|
214
348
|
/**
|
|
215
349
|
* Process multiple frames at once.
|
|
216
350
|
*
|
|
@@ -218,6 +352,7 @@ export class FilterAPI {
|
|
|
218
352
|
* Useful for filters that buffer multiple frames.
|
|
219
353
|
*
|
|
220
354
|
* @param frames - Array of input frames
|
|
355
|
+
*
|
|
221
356
|
* @returns Array of all output frames
|
|
222
357
|
*
|
|
223
358
|
* @throws {Error} If filter not ready
|
|
@@ -253,81 +388,258 @@ export class FilterAPI {
|
|
|
253
388
|
return outputFrames;
|
|
254
389
|
}
|
|
255
390
|
/**
|
|
256
|
-
*
|
|
391
|
+
* Process multiple frames at once synchronously.
|
|
392
|
+
* Synchronous version of processMultiple.
|
|
257
393
|
*
|
|
258
|
-
*
|
|
259
|
-
*
|
|
394
|
+
* Processes batch of frames and drains all output.
|
|
395
|
+
* Useful for filters that buffer multiple frames.
|
|
260
396
|
*
|
|
261
|
-
*
|
|
397
|
+
* @param frames - Array of input frames
|
|
262
398
|
*
|
|
263
|
-
* @returns
|
|
399
|
+
* @returns Array of all output frames
|
|
400
|
+
*
|
|
401
|
+
* @throws {Error} If filter not ready
|
|
402
|
+
*
|
|
403
|
+
* @throws {FFmpegError} If processing fails
|
|
404
|
+
*
|
|
405
|
+
* @example
|
|
406
|
+
* ```typescript
|
|
407
|
+
* const outputs = filter.processMultipleSync([frame1, frame2, frame3]);
|
|
408
|
+
* for (const output of outputs) {
|
|
409
|
+
* console.log(`Output frame: pts=${output.pts}`);
|
|
410
|
+
* output.free();
|
|
411
|
+
* }
|
|
412
|
+
* ```
|
|
413
|
+
*
|
|
414
|
+
* @see {@link processMultiple} For async version
|
|
415
|
+
*/
|
|
416
|
+
processMultipleSync(frames) {
|
|
417
|
+
const outputFrames = [];
|
|
418
|
+
for (const frame of frames) {
|
|
419
|
+
const output = this.processSync(frame);
|
|
420
|
+
if (output) {
|
|
421
|
+
outputFrames.push(output);
|
|
422
|
+
}
|
|
423
|
+
// Drain any additional frames
|
|
424
|
+
while (true) {
|
|
425
|
+
const additional = this.receiveSync();
|
|
426
|
+
if (!additional)
|
|
427
|
+
break;
|
|
428
|
+
outputFrames.push(additional);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
return outputFrames;
|
|
432
|
+
}
|
|
433
|
+
/**
|
|
434
|
+
* Process frame stream through filter.
|
|
435
|
+
*
|
|
436
|
+
* High-level async generator for filtering frame streams.
|
|
437
|
+
* Automatically handles buffering and flushing.
|
|
438
|
+
* Frees input frames after processing.
|
|
439
|
+
*
|
|
440
|
+
* @param frames - Async generator of input frames
|
|
441
|
+
*
|
|
442
|
+
* @yields {Frame} Filtered frames
|
|
443
|
+
*
|
|
444
|
+
* @throws {Error} If filter not ready
|
|
445
|
+
*
|
|
446
|
+
* @throws {FFmpegError} If processing fails
|
|
447
|
+
*
|
|
448
|
+
* @example
|
|
449
|
+
* ```typescript
|
|
450
|
+
* // Filter decoded frames
|
|
451
|
+
* for await (const frame of filter.frames(decoder.frames(packets))) {
|
|
452
|
+
* await encoder.encode(frame);
|
|
453
|
+
* frame.free();
|
|
454
|
+
* }
|
|
455
|
+
* ```
|
|
456
|
+
*
|
|
457
|
+
* @example
|
|
458
|
+
* ```typescript
|
|
459
|
+
* // Chain filters
|
|
460
|
+
* const filter1 = await FilterAPI.create('scale=640:480', {
|
|
461
|
+
* timeBase: video.timeBase
|
|
462
|
+
* });
|
|
463
|
+
* const filter2 = await FilterAPI.create('rotate=PI/4', {
|
|
464
|
+
* timeBase: video.timeBase
|
|
465
|
+
* });
|
|
466
|
+
*
|
|
467
|
+
* for await (const frame of filter2.frames(filter1.frames(input))) {
|
|
468
|
+
* // Process filtered frames
|
|
469
|
+
* frame.free();
|
|
470
|
+
* }
|
|
471
|
+
* ```
|
|
472
|
+
*
|
|
473
|
+
* @see {@link process} For single frame processing
|
|
474
|
+
* @see {@link flush} For end-of-stream handling
|
|
475
|
+
*/
|
|
476
|
+
async *frames(frames) {
|
|
477
|
+
for await (const frame of frames) {
|
|
478
|
+
try {
|
|
479
|
+
// Process input frame
|
|
480
|
+
const output = await this.process(frame);
|
|
481
|
+
if (output) {
|
|
482
|
+
yield output;
|
|
483
|
+
}
|
|
484
|
+
// Drain any buffered frames
|
|
485
|
+
while (true) {
|
|
486
|
+
const buffered = await this.receive();
|
|
487
|
+
if (!buffered)
|
|
488
|
+
break;
|
|
489
|
+
yield buffered;
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
finally {
|
|
493
|
+
// Free the input frame after processing
|
|
494
|
+
frame.free();
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
// Flush and get remaining frames
|
|
498
|
+
await this.flush();
|
|
499
|
+
while (true) {
|
|
500
|
+
const remaining = await this.receive();
|
|
501
|
+
if (!remaining)
|
|
502
|
+
break;
|
|
503
|
+
yield remaining;
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
/**
|
|
507
|
+
* Process frame stream through filter synchronously.
|
|
508
|
+
* Synchronous version of frames.
|
|
509
|
+
*
|
|
510
|
+
* High-level sync generator for filtering frame streams.
|
|
511
|
+
* Automatically handles buffering and flushing.
|
|
512
|
+
* Frees input frames after processing.
|
|
513
|
+
*
|
|
514
|
+
* @param frames - Generator of input frames
|
|
515
|
+
*
|
|
516
|
+
* @yields {Frame} Filtered frames
|
|
264
517
|
*
|
|
265
518
|
* @throws {Error} If filter not ready
|
|
266
519
|
*
|
|
267
|
-
* @throws {FFmpegError} If
|
|
520
|
+
* @throws {FFmpegError} If processing fails
|
|
521
|
+
*
|
|
522
|
+
* @example
|
|
523
|
+
* ```typescript
|
|
524
|
+
* // Filter decoded frames
|
|
525
|
+
* for (const frame of filter.framesSync(decoder.framesSync(packets))) {
|
|
526
|
+
* encoder.encodeSync(frame);
|
|
527
|
+
* frame.free();
|
|
528
|
+
* }
|
|
529
|
+
* ```
|
|
530
|
+
*
|
|
531
|
+
* @example
|
|
532
|
+
* ```typescript
|
|
533
|
+
* // Chain filters
|
|
534
|
+
* const filter1 = await FilterAPI.create('scale=640:480', {
|
|
535
|
+
* timeBase: video.timeBase
|
|
536
|
+
* });
|
|
537
|
+
* const filter2 = await FilterAPI.create('rotate=PI/4', {
|
|
538
|
+
* timeBase: video.timeBase
|
|
539
|
+
* });
|
|
540
|
+
*
|
|
541
|
+
* for (const frame of filter2.framesSync(filter1.framesSync(input))) {
|
|
542
|
+
* // Process filtered frames
|
|
543
|
+
* frame.free();
|
|
544
|
+
* }
|
|
545
|
+
* ```
|
|
546
|
+
*
|
|
547
|
+
* @see {@link frames} For async version
|
|
548
|
+
*/
|
|
549
|
+
*framesSync(frames) {
|
|
550
|
+
for (const frame of frames) {
|
|
551
|
+
try {
|
|
552
|
+
// Process input frame
|
|
553
|
+
const output = this.processSync(frame);
|
|
554
|
+
if (output) {
|
|
555
|
+
yield output;
|
|
556
|
+
}
|
|
557
|
+
// Drain any buffered frames
|
|
558
|
+
while (true) {
|
|
559
|
+
const buffered = this.receiveSync();
|
|
560
|
+
if (!buffered)
|
|
561
|
+
break;
|
|
562
|
+
yield buffered;
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
finally {
|
|
566
|
+
// Free the input frame after processing
|
|
567
|
+
frame.free();
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
// Flush and get remaining frames
|
|
571
|
+
this.flushSync();
|
|
572
|
+
while (true) {
|
|
573
|
+
const remaining = this.receiveSync();
|
|
574
|
+
if (!remaining)
|
|
575
|
+
break;
|
|
576
|
+
yield remaining;
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
/**
|
|
580
|
+
* Flush filter and signal end-of-stream.
|
|
581
|
+
*
|
|
582
|
+
* Sends null frame to flush buffered data.
|
|
583
|
+
* Must call receive() to get flushed frames.
|
|
584
|
+
* Does nothing if filter is closed or was never initialized.
|
|
585
|
+
*
|
|
586
|
+
* Direct mapping to av_buffersrc_add_frame(NULL).
|
|
587
|
+
*
|
|
588
|
+
* @throws {FFmpegError} If flush fails
|
|
268
589
|
*
|
|
269
590
|
* @example
|
|
270
591
|
* ```typescript
|
|
271
|
-
*
|
|
592
|
+
* await filter.flush();
|
|
593
|
+
* // Get remaining frames
|
|
272
594
|
* let frame;
|
|
273
595
|
* while ((frame = await filter.receive()) !== null) {
|
|
274
|
-
* console.log(`Buffered frame: pts=${frame.pts}`);
|
|
275
596
|
* frame.free();
|
|
276
597
|
* }
|
|
277
598
|
* ```
|
|
278
599
|
*
|
|
279
|
-
* @see {@link
|
|
280
|
-
* @see {@link
|
|
600
|
+
* @see {@link flushFrames} For async iteration
|
|
601
|
+
* @see {@link frames} For complete pipeline
|
|
281
602
|
*/
|
|
282
|
-
async
|
|
283
|
-
if (!this.initialized || !this.
|
|
284
|
-
|
|
285
|
-
}
|
|
286
|
-
const frame = new Frame();
|
|
287
|
-
frame.alloc();
|
|
288
|
-
const ret = await this.buffersinkCtx.buffersinkGetFrame(frame);
|
|
289
|
-
if (ret >= 0) {
|
|
290
|
-
return frame;
|
|
603
|
+
async flush() {
|
|
604
|
+
if (this.isClosed || !this.initialized || !this.buffersrcCtx) {
|
|
605
|
+
return;
|
|
291
606
|
}
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
}
|
|
297
|
-
FFmpegError.throwIfError(ret, 'Failed to receive frame from filter');
|
|
298
|
-
return null;
|
|
607
|
+
// Send flush frame (null)
|
|
608
|
+
const ret = await this.buffersrcCtx.buffersrcAddFrame(null);
|
|
609
|
+
if (ret < 0 && ret !== AVERROR_EOF) {
|
|
610
|
+
FFmpegError.throwIfError(ret, 'Failed to flush filter');
|
|
299
611
|
}
|
|
300
612
|
}
|
|
301
613
|
/**
|
|
302
|
-
* Flush filter and signal end-of-stream.
|
|
614
|
+
* Flush filter and signal end-of-stream synchronously.
|
|
615
|
+
* Synchronous version of flush.
|
|
303
616
|
*
|
|
304
617
|
* Sends null frame to flush buffered data.
|
|
305
|
-
* Must call
|
|
618
|
+
* Must call receiveSync() to get flushed frames.
|
|
619
|
+
* Does nothing if filter is closed or was never initialized.
|
|
306
620
|
*
|
|
307
621
|
* Direct mapping to av_buffersrc_add_frame(NULL).
|
|
308
622
|
*
|
|
309
|
-
* @throws {Error} If filter not ready
|
|
310
|
-
*
|
|
311
623
|
* @throws {FFmpegError} If flush fails
|
|
312
624
|
*
|
|
313
625
|
* @example
|
|
314
626
|
* ```typescript
|
|
315
|
-
*
|
|
627
|
+
* filter.flushSync();
|
|
316
628
|
* // Get remaining frames
|
|
317
629
|
* let frame;
|
|
318
|
-
* while ((frame =
|
|
630
|
+
* while ((frame = filter.receiveSync()) !== null) {
|
|
319
631
|
* frame.free();
|
|
320
632
|
* }
|
|
321
633
|
* ```
|
|
322
634
|
*
|
|
323
|
-
* @see {@link
|
|
324
|
-
* @see {@link receive} For draining frames
|
|
635
|
+
* @see {@link flush} For async version
|
|
325
636
|
*/
|
|
326
|
-
|
|
327
|
-
if (!this.initialized || !this.buffersrcCtx) {
|
|
328
|
-
|
|
637
|
+
flushSync() {
|
|
638
|
+
if (this.isClosed || !this.initialized || !this.buffersrcCtx) {
|
|
639
|
+
return;
|
|
329
640
|
}
|
|
330
|
-
|
|
641
|
+
// Send flush frame (null)
|
|
642
|
+
const ret = this.buffersrcCtx.buffersrcAddFrameSync(null);
|
|
331
643
|
if (ret < 0 && ret !== AVERROR_EOF) {
|
|
332
644
|
FFmpegError.throwIfError(ret, 'Failed to flush filter');
|
|
333
645
|
}
|
|
@@ -337,9 +649,9 @@ export class FilterAPI {
|
|
|
337
649
|
*
|
|
338
650
|
* Convenient async generator for flushing.
|
|
339
651
|
* Combines flush and receive operations.
|
|
652
|
+
* Returns immediately if filter is closed or was never initialized.
|
|
340
653
|
*
|
|
341
|
-
* @yields Remaining frames from filter
|
|
342
|
-
* @throws {Error} If filter not ready
|
|
654
|
+
* @yields {Frame} Remaining frames from filter
|
|
343
655
|
*
|
|
344
656
|
* @throws {FFmpegError} If flush fails
|
|
345
657
|
*
|
|
@@ -355,9 +667,6 @@ export class FilterAPI {
|
|
|
355
667
|
* @see {@link frames} For complete pipeline
|
|
356
668
|
*/
|
|
357
669
|
async *flushFrames() {
|
|
358
|
-
if (!this.initialized || !this.buffersrcCtx) {
|
|
359
|
-
throw new Error('Filter not initialized');
|
|
360
|
-
}
|
|
361
670
|
// Send flush signal
|
|
362
671
|
await this.flush();
|
|
363
672
|
// Yield all remaining frames
|
|
@@ -367,70 +676,119 @@ export class FilterAPI {
|
|
|
367
676
|
}
|
|
368
677
|
}
|
|
369
678
|
/**
|
|
370
|
-
*
|
|
679
|
+
* Flush filter and yield remaining frames synchronously.
|
|
680
|
+
* Synchronous version of flushFrames.
|
|
371
681
|
*
|
|
372
|
-
*
|
|
373
|
-
*
|
|
374
|
-
*
|
|
682
|
+
* Convenient sync generator for flushing.
|
|
683
|
+
* Combines flush and receive operations.
|
|
684
|
+
* Returns immediately if filter is closed or was never initialized.
|
|
375
685
|
*
|
|
376
|
-
* @
|
|
377
|
-
* @yields Filtered frames
|
|
378
|
-
* @throws {Error} If filter not ready
|
|
686
|
+
* @yields {Frame} Remaining frames from filter
|
|
379
687
|
*
|
|
380
|
-
* @throws {FFmpegError} If
|
|
688
|
+
* @throws {FFmpegError} If flush fails
|
|
381
689
|
*
|
|
382
690
|
* @example
|
|
383
691
|
* ```typescript
|
|
384
|
-
*
|
|
385
|
-
*
|
|
386
|
-
* await encoder.encode(frame);
|
|
692
|
+
* for (const frame of filter.flushFramesSync()) {
|
|
693
|
+
* console.log(`Flushed frame: pts=${frame.pts}`);
|
|
387
694
|
* frame.free();
|
|
388
695
|
* }
|
|
389
696
|
* ```
|
|
390
697
|
*
|
|
698
|
+
* @see {@link flushFrames} For async version
|
|
699
|
+
*/
|
|
700
|
+
*flushFramesSync() {
|
|
701
|
+
// Send flush signal
|
|
702
|
+
this.flushSync();
|
|
703
|
+
// Yield all remaining frames
|
|
704
|
+
let frame;
|
|
705
|
+
while ((frame = this.receiveSync()) !== null) {
|
|
706
|
+
yield frame;
|
|
707
|
+
}
|
|
708
|
+
}
|
|
709
|
+
/**
|
|
710
|
+
* Receive buffered frame from filter.
|
|
711
|
+
*
|
|
712
|
+
* Drains frames buffered by the filter.
|
|
713
|
+
* Call repeatedly until null to get all buffered frames.
|
|
714
|
+
* Returns null if filter is closed, not initialized, or no frames available.
|
|
715
|
+
*
|
|
716
|
+
* Direct mapping to av_buffersink_get_frame().
|
|
717
|
+
*
|
|
718
|
+
* @returns Buffered frame or null if none available
|
|
719
|
+
*
|
|
720
|
+
* @throws {FFmpegError} If receiving fails
|
|
721
|
+
*
|
|
391
722
|
* @example
|
|
392
723
|
* ```typescript
|
|
393
|
-
*
|
|
394
|
-
*
|
|
395
|
-
*
|
|
724
|
+
* let frame;
|
|
725
|
+
* while ((frame = await filter.receive()) !== null) {
|
|
726
|
+
* console.log(`Received frame: pts=${frame.pts}`);
|
|
727
|
+
* frame.free();
|
|
728
|
+
* }
|
|
729
|
+
* ```
|
|
730
|
+
*/
|
|
731
|
+
async receive() {
|
|
732
|
+
if (this.isClosed || !this.initialized || !this.buffersinkCtx) {
|
|
733
|
+
return null;
|
|
734
|
+
}
|
|
735
|
+
const frame = new Frame();
|
|
736
|
+
frame.alloc();
|
|
737
|
+
const ret = await this.buffersinkCtx.buffersinkGetFrame(frame);
|
|
738
|
+
if (ret >= 0) {
|
|
739
|
+
return frame;
|
|
740
|
+
}
|
|
741
|
+
else {
|
|
742
|
+
frame.free();
|
|
743
|
+
if (ret === AVERROR_EAGAIN || ret === AVERROR_EOF) {
|
|
744
|
+
return null;
|
|
745
|
+
}
|
|
746
|
+
FFmpegError.throwIfError(ret, 'Failed to receive frame from filter');
|
|
747
|
+
return null;
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
/**
|
|
751
|
+
* Receive buffered frame from filter synchronously.
|
|
752
|
+
* Synchronous version of receive.
|
|
396
753
|
*
|
|
397
|
-
*
|
|
398
|
-
*
|
|
754
|
+
* Drains frames buffered by the filter.
|
|
755
|
+
* Call repeatedly until null to get all buffered frames.
|
|
756
|
+
* Returns null if filter is closed, not initialized, or no frames available.
|
|
757
|
+
*
|
|
758
|
+
* Direct mapping to av_buffersink_get_frame().
|
|
759
|
+
*
|
|
760
|
+
* @returns Buffered frame or null if none available
|
|
761
|
+
*
|
|
762
|
+
* @throws {FFmpegError} If receiving fails
|
|
763
|
+
*
|
|
764
|
+
* @example
|
|
765
|
+
* ```typescript
|
|
766
|
+
* let frame;
|
|
767
|
+
* while ((frame = filter.receiveSync()) !== null) {
|
|
768
|
+
* console.log(`Received frame: pts=${frame.pts}`);
|
|
399
769
|
* frame.free();
|
|
400
770
|
* }
|
|
401
771
|
* ```
|
|
402
772
|
*
|
|
403
|
-
* @see {@link
|
|
404
|
-
* @see {@link flush} For end-of-stream handling
|
|
773
|
+
* @see {@link receive} For async version
|
|
405
774
|
*/
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
// Process input frame
|
|
410
|
-
const output = await this.process(frame);
|
|
411
|
-
if (output) {
|
|
412
|
-
yield output;
|
|
413
|
-
}
|
|
414
|
-
// Drain any buffered frames
|
|
415
|
-
while (true) {
|
|
416
|
-
const buffered = await this.receive();
|
|
417
|
-
if (!buffered)
|
|
418
|
-
break;
|
|
419
|
-
yield buffered;
|
|
420
|
-
}
|
|
421
|
-
}
|
|
422
|
-
finally {
|
|
423
|
-
// Free the input frame after processing
|
|
424
|
-
frame.free();
|
|
425
|
-
}
|
|
775
|
+
receiveSync() {
|
|
776
|
+
if (this.isClosed || !this.initialized || !this.buffersinkCtx) {
|
|
777
|
+
return null;
|
|
426
778
|
}
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
779
|
+
const frame = new Frame();
|
|
780
|
+
frame.alloc();
|
|
781
|
+
const ret = this.buffersinkCtx.buffersinkGetFrameSync(frame);
|
|
782
|
+
if (ret >= 0) {
|
|
783
|
+
return frame;
|
|
784
|
+
}
|
|
785
|
+
else {
|
|
786
|
+
frame.free();
|
|
787
|
+
if (ret === AVERROR_EAGAIN || ret === AVERROR_EOF) {
|
|
788
|
+
return null;
|
|
789
|
+
}
|
|
790
|
+
FFmpegError.throwIfError(ret, 'Failed to receive frame from filter');
|
|
791
|
+
return null;
|
|
434
792
|
}
|
|
435
793
|
}
|
|
436
794
|
/**
|
|
@@ -442,9 +800,13 @@ export class FilterAPI {
|
|
|
442
800
|
* Direct mapping to avfilter_graph_send_command().
|
|
443
801
|
*
|
|
444
802
|
* @param target - Target filter name
|
|
803
|
+
*
|
|
445
804
|
* @param cmd - Command name
|
|
805
|
+
*
|
|
446
806
|
* @param arg - Command argument
|
|
807
|
+
*
|
|
447
808
|
* @param flags - Command flags
|
|
809
|
+
*
|
|
448
810
|
* @returns Response string from filter
|
|
449
811
|
*
|
|
450
812
|
* @throws {Error} If filter not ready
|
|
@@ -461,7 +823,10 @@ export class FilterAPI {
|
|
|
461
823
|
* @see {@link queueCommand} For delayed commands
|
|
462
824
|
*/
|
|
463
825
|
sendCommand(target, cmd, arg, flags) {
|
|
464
|
-
if (
|
|
826
|
+
if (this.isClosed) {
|
|
827
|
+
throw new Error('Filter is closed');
|
|
828
|
+
}
|
|
829
|
+
if (!this.initialized) {
|
|
465
830
|
throw new Error('Filter not initialized');
|
|
466
831
|
}
|
|
467
832
|
const result = this.graph.sendCommand(target, cmd, arg, flags);
|
|
@@ -479,10 +844,15 @@ export class FilterAPI {
|
|
|
479
844
|
* Direct mapping to avfilter_graph_queue_command().
|
|
480
845
|
*
|
|
481
846
|
* @param target - Target filter name
|
|
847
|
+
*
|
|
482
848
|
* @param cmd - Command name
|
|
849
|
+
*
|
|
483
850
|
* @param arg - Command argument
|
|
851
|
+
*
|
|
484
852
|
* @param ts - Timestamp for execution
|
|
853
|
+
*
|
|
485
854
|
* @param flags - Command flags
|
|
855
|
+
*
|
|
486
856
|
* @throws {Error} If filter not ready
|
|
487
857
|
*
|
|
488
858
|
* @throws {FFmpegError} If queue fails
|
|
@@ -496,64 +866,15 @@ export class FilterAPI {
|
|
|
496
866
|
* @see {@link sendCommand} For immediate commands
|
|
497
867
|
*/
|
|
498
868
|
queueCommand(target, cmd, arg, ts, flags) {
|
|
499
|
-
if (
|
|
869
|
+
if (this.isClosed) {
|
|
870
|
+
throw new Error('Filter is closed');
|
|
871
|
+
}
|
|
872
|
+
if (!this.initialized) {
|
|
500
873
|
throw new Error('Filter not initialized');
|
|
501
874
|
}
|
|
502
875
|
const ret = this.graph.queueCommand(target, cmd, arg, ts, flags);
|
|
503
876
|
FFmpegError.throwIfError(ret, 'Failed to queue filter command');
|
|
504
877
|
}
|
|
505
|
-
/**
|
|
506
|
-
* Get filter graph description.
|
|
507
|
-
*
|
|
508
|
-
* Returns human-readable graph structure.
|
|
509
|
-
* Useful for debugging filter chains.
|
|
510
|
-
*
|
|
511
|
-
* Direct mapping to avfilter_graph_dump().
|
|
512
|
-
*
|
|
513
|
-
* @returns Graph description or null if not initialized
|
|
514
|
-
*
|
|
515
|
-
* @example
|
|
516
|
-
* ```typescript
|
|
517
|
-
* const description = filter.getGraphDescription();
|
|
518
|
-
* console.log('Filter graph:', description);
|
|
519
|
-
* ```
|
|
520
|
-
*/
|
|
521
|
-
getGraphDescription() {
|
|
522
|
-
if (!this.initialized || !this.graph) {
|
|
523
|
-
return null;
|
|
524
|
-
}
|
|
525
|
-
return this.graph.dump();
|
|
526
|
-
}
|
|
527
|
-
/**
|
|
528
|
-
* Check if filter is ready for processing.
|
|
529
|
-
*
|
|
530
|
-
* @returns true if initialized and ready
|
|
531
|
-
*
|
|
532
|
-
* @example
|
|
533
|
-
* ```typescript
|
|
534
|
-
* if (filter.isReady()) {
|
|
535
|
-
* const output = await filter.process(frame);
|
|
536
|
-
* }
|
|
537
|
-
* ```
|
|
538
|
-
*/
|
|
539
|
-
isReady() {
|
|
540
|
-
return this.initialized && this.buffersrcCtx !== null && this.buffersinkCtx !== null;
|
|
541
|
-
}
|
|
542
|
-
/**
|
|
543
|
-
* Get media type of filter.
|
|
544
|
-
*
|
|
545
|
-
* @returns AVMEDIA_TYPE_VIDEO or AVMEDIA_TYPE_AUDIO
|
|
546
|
-
*
|
|
547
|
-
* @example
|
|
548
|
-
* ```typescript
|
|
549
|
-
* if (filter.getMediaType() === AVMEDIA_TYPE_VIDEO) {
|
|
550
|
-
* console.log('Video filter');
|
|
551
|
-
* }
|
|
552
|
-
* ```
|
|
553
|
-
*/
|
|
554
|
-
getMediaType() {
|
|
555
|
-
return this.mediaType;
|
|
556
|
-
}
|
|
557
878
|
/**
|
|
558
879
|
* Free filter resources.
|
|
559
880
|
*
|
|
@@ -562,27 +883,29 @@ export class FilterAPI {
|
|
|
562
883
|
*
|
|
563
884
|
* @example
|
|
564
885
|
* ```typescript
|
|
565
|
-
* filter.
|
|
886
|
+
* filter.close();
|
|
566
887
|
* ```
|
|
567
888
|
*
|
|
568
889
|
* @see {@link Symbol.dispose} For automatic cleanup
|
|
569
890
|
*/
|
|
570
|
-
|
|
571
|
-
if (this.
|
|
572
|
-
|
|
573
|
-
this.graph = null;
|
|
891
|
+
close() {
|
|
892
|
+
if (this.isClosed) {
|
|
893
|
+
return;
|
|
574
894
|
}
|
|
895
|
+
this.isClosed = true;
|
|
896
|
+
this.graph.free();
|
|
575
897
|
this.buffersrcCtx = null;
|
|
576
898
|
this.buffersinkCtx = null;
|
|
577
899
|
this.initialized = false;
|
|
578
900
|
}
|
|
579
901
|
/**
|
|
580
|
-
* Initialize filter graph.
|
|
902
|
+
* Initialize filter graph from first frame.
|
|
581
903
|
*
|
|
582
904
|
* Creates and configures filter graph components.
|
|
583
|
-
*
|
|
905
|
+
* Sets buffer source parameters from frame properties.
|
|
906
|
+
* Automatically configures hardware frames context if present.
|
|
584
907
|
*
|
|
585
|
-
* @param
|
|
908
|
+
* @param frame - First frame to process, provides format and hw context
|
|
586
909
|
*
|
|
587
910
|
* @throws {Error} If initialization fails
|
|
588
911
|
*
|
|
@@ -590,38 +913,20 @@ export class FilterAPI {
|
|
|
590
913
|
*
|
|
591
914
|
* @internal
|
|
592
915
|
*/
|
|
593
|
-
async initialize(
|
|
594
|
-
// Create
|
|
595
|
-
this.
|
|
596
|
-
this.graph.alloc();
|
|
597
|
-
// Configure threading
|
|
598
|
-
if (this.options.threads !== undefined) {
|
|
599
|
-
this.graph.nbThreads = this.options.threads;
|
|
600
|
-
}
|
|
601
|
-
// Configure scaler options
|
|
602
|
-
if (this.options.scaleSwsOpts) {
|
|
603
|
-
this.graph.scaleSwsOpts = this.options.scaleSwsOpts;
|
|
604
|
-
}
|
|
605
|
-
// Create buffer source with hw_frames_ctx if needed
|
|
606
|
-
if (firstFrame?.hwFramesCtx && this.config.type === 'video') {
|
|
607
|
-
this.createBufferSourceWithHwFrames(firstFrame);
|
|
608
|
-
}
|
|
609
|
-
else {
|
|
610
|
-
this.createBufferSource();
|
|
611
|
-
}
|
|
916
|
+
async initialize(frame) {
|
|
917
|
+
// Create buffer source
|
|
918
|
+
this.createBufferSource(frame);
|
|
612
919
|
// Create buffer sink
|
|
613
|
-
this.createBufferSink();
|
|
920
|
+
this.createBufferSink(frame);
|
|
614
921
|
// Parse filter description
|
|
615
922
|
this.parseFilterDescription(this.description);
|
|
616
923
|
// Set hw_device_ctx on hardware filters
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
filterCtx.hwDeviceCtx = this.hardware.deviceContext;
|
|
624
|
-
}
|
|
924
|
+
const filters = this.graph.filters;
|
|
925
|
+
if (filters) {
|
|
926
|
+
for (const filterCtx of filters) {
|
|
927
|
+
const filter = filterCtx.filter;
|
|
928
|
+
if (filter && (filter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
|
|
929
|
+
filterCtx.hwDeviceCtx = frame.hwFramesCtx?.deviceRef ?? this.options.hardware?.deviceContext ?? null;
|
|
625
930
|
}
|
|
626
931
|
}
|
|
627
932
|
}
|
|
@@ -631,91 +936,111 @@ export class FilterAPI {
|
|
|
631
936
|
this.initialized = true;
|
|
632
937
|
}
|
|
633
938
|
/**
|
|
634
|
-
*
|
|
939
|
+
* Initialize filter graph from first frame synchronously.
|
|
940
|
+
* Synchronous version of initialize.
|
|
635
941
|
*
|
|
636
|
-
*
|
|
942
|
+
* Creates and configures filter graph components.
|
|
943
|
+
* Sets buffer source parameters from frame properties.
|
|
944
|
+
* Automatically configures hardware frames context if present.
|
|
637
945
|
*
|
|
638
|
-
* @
|
|
946
|
+
* @param frame - First frame to process, provides format and hw context
|
|
947
|
+
*
|
|
948
|
+
* @throws {Error} If initialization fails
|
|
639
949
|
*
|
|
640
950
|
* @throws {FFmpegError} If configuration fails
|
|
641
951
|
*
|
|
642
952
|
* @internal
|
|
953
|
+
*
|
|
954
|
+
* @see {@link initialize} For async version
|
|
643
955
|
*/
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
956
|
+
initializeSync(frame) {
|
|
957
|
+
// Create buffer source
|
|
958
|
+
this.createBufferSource(frame);
|
|
959
|
+
// Create buffer sink
|
|
960
|
+
this.createBufferSink(frame);
|
|
961
|
+
// Parse filter description
|
|
962
|
+
this.parseFilterDescription(this.description);
|
|
963
|
+
// Set hw_device_ctx on hardware filters
|
|
964
|
+
const filters = this.graph.filters;
|
|
965
|
+
if (filters) {
|
|
966
|
+
for (const filterCtx of filters) {
|
|
967
|
+
const filter = filterCtx.filter;
|
|
968
|
+
if (filter && (filter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
|
|
969
|
+
filterCtx.hwDeviceCtx = frame.hwFramesCtx?.deviceRef ?? this.options.hardware?.deviceContext ?? null;
|
|
970
|
+
}
|
|
971
|
+
}
|
|
649
972
|
}
|
|
650
|
-
//
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
}
|
|
655
|
-
// Set parameters including hw_frames_ctx
|
|
656
|
-
const cfg = this.config;
|
|
657
|
-
const ret = this.buffersrcCtx.buffersrcParametersSet({
|
|
658
|
-
width: cfg.width,
|
|
659
|
-
height: cfg.height,
|
|
660
|
-
format: cfg.pixelFormat,
|
|
661
|
-
timeBase: cfg.timeBase,
|
|
662
|
-
frameRate: cfg.frameRate,
|
|
663
|
-
sampleAspectRatio: cfg.sampleAspectRatio,
|
|
664
|
-
hwFramesCtx: frame.hwFramesCtx ?? undefined,
|
|
665
|
-
});
|
|
666
|
-
FFmpegError.throwIfError(ret, 'Failed to set buffer source parameters');
|
|
667
|
-
// Initialize filter
|
|
668
|
-
const initRet = this.buffersrcCtx.init(null);
|
|
669
|
-
FFmpegError.throwIfError(initRet, 'Failed to initialize buffer source');
|
|
973
|
+
// Configure the graph
|
|
974
|
+
const ret = this.graph.configSync();
|
|
975
|
+
FFmpegError.throwIfError(ret, 'Failed to configure filter graph');
|
|
976
|
+
this.initialized = true;
|
|
670
977
|
}
|
|
671
978
|
/**
|
|
672
|
-
* Create
|
|
979
|
+
* Create buffer source with frame parameters.
|
|
980
|
+
*
|
|
981
|
+
* Configures buffer source with frame properties including hardware context.
|
|
982
|
+
* Automatically detects video/audio and sets appropriate parameters.
|
|
983
|
+
*
|
|
984
|
+
* @param frame - Frame providing format, dimensions, and hw_frames_ctx
|
|
673
985
|
*
|
|
674
986
|
* @throws {Error} If creation fails
|
|
675
987
|
*
|
|
988
|
+
* @throws {FFmpegError} If configuration fails
|
|
989
|
+
*
|
|
676
990
|
* @internal
|
|
677
991
|
*/
|
|
678
|
-
createBufferSource() {
|
|
679
|
-
const filterName =
|
|
992
|
+
createBufferSource(frame) {
|
|
993
|
+
const filterName = frame.isVideo() ? 'buffer' : 'abuffer';
|
|
680
994
|
const bufferFilter = Filter.getByName(filterName);
|
|
681
995
|
if (!bufferFilter) {
|
|
682
996
|
throw new Error(`${filterName} filter not found`);
|
|
683
997
|
}
|
|
684
|
-
//
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
args += `:frame_rate=${cfg.frameRate.num}/${cfg.frameRate.den}`;
|
|
691
|
-
}
|
|
692
|
-
if (cfg.sampleAspectRatio) {
|
|
693
|
-
args += `:pixel_aspect=${cfg.sampleAspectRatio.num}/${cfg.sampleAspectRatio.den}`;
|
|
998
|
+
// For audio, create with args. For video, use allocFilter + buffersrcParametersSet
|
|
999
|
+
if (frame.isVideo()) {
|
|
1000
|
+
// Allocate filter without args
|
|
1001
|
+
this.buffersrcCtx = this.graph.allocFilter(bufferFilter, 'in');
|
|
1002
|
+
if (!this.buffersrcCtx) {
|
|
1003
|
+
throw new Error('Failed to allocate buffer source');
|
|
694
1004
|
}
|
|
1005
|
+
const ret = this.buffersrcCtx.buffersrcParametersSet({
|
|
1006
|
+
width: frame.width,
|
|
1007
|
+
height: frame.height,
|
|
1008
|
+
format: frame.format,
|
|
1009
|
+
timeBase: this.options.timeBase,
|
|
1010
|
+
frameRate: this.options.frameRate ?? frame.timeBase,
|
|
1011
|
+
sampleAspectRatio: frame.sampleAspectRatio,
|
|
1012
|
+
colorRange: frame.colorRange,
|
|
1013
|
+
colorSpace: frame.colorSpace,
|
|
1014
|
+
hwFramesCtx: frame.hwFramesCtx,
|
|
1015
|
+
});
|
|
1016
|
+
FFmpegError.throwIfError(ret, 'Failed to set buffer source parameters');
|
|
1017
|
+
// Initialize filter
|
|
1018
|
+
const initRet = this.buffersrcCtx.init(null);
|
|
1019
|
+
FFmpegError.throwIfError(initRet, 'Failed to initialize buffer source');
|
|
695
1020
|
}
|
|
696
1021
|
else {
|
|
697
|
-
|
|
698
|
-
const
|
|
699
|
-
const channelLayout =
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
1022
|
+
// For audio, create with args string
|
|
1023
|
+
const formatName = avGetSampleFmtName(frame.format);
|
|
1024
|
+
const channelLayout = frame.channelLayout.mask === 0n ? 'stereo' : frame.channelLayout.mask.toString();
|
|
1025
|
+
// eslint-disable-next-line @stylistic/max-len
|
|
1026
|
+
const args = `time_base=${this.options.timeBase.num}/${this.options.timeBase.den}:sample_rate=${frame.sampleRate}:sample_fmt=${formatName}:channel_layout=${channelLayout}`;
|
|
1027
|
+
this.buffersrcCtx = this.graph.createFilter(bufferFilter, 'in', args);
|
|
1028
|
+
if (!this.buffersrcCtx) {
|
|
1029
|
+
throw new Error('Failed to create audio buffer source');
|
|
1030
|
+
}
|
|
705
1031
|
}
|
|
706
1032
|
}
|
|
707
1033
|
/**
|
|
708
1034
|
* Create buffer sink.
|
|
709
1035
|
*
|
|
1036
|
+
* @param frame - Frame
|
|
1037
|
+
*
|
|
710
1038
|
* @throws {Error} If creation fails
|
|
711
1039
|
*
|
|
712
1040
|
* @internal
|
|
713
1041
|
*/
|
|
714
|
-
createBufferSink() {
|
|
715
|
-
|
|
716
|
-
throw new Error('Filter graph not initialized');
|
|
717
|
-
}
|
|
718
|
-
const filterName = this.config.type === 'video' ? 'buffersink' : 'abuffersink';
|
|
1042
|
+
createBufferSink(frame) {
|
|
1043
|
+
const filterName = frame.isVideo() ? 'buffersink' : 'abuffersink';
|
|
719
1044
|
const sinkFilter = Filter.getByName(filterName);
|
|
720
1045
|
if (!sinkFilter) {
|
|
721
1046
|
throw new Error(`${filterName} filter not found`);
|
|
@@ -737,9 +1062,6 @@ export class FilterAPI {
|
|
|
737
1062
|
* @internal
|
|
738
1063
|
*/
|
|
739
1064
|
parseFilterDescription(description) {
|
|
740
|
-
if (!this.graph) {
|
|
741
|
-
throw new Error('Filter graph not initialized');
|
|
742
|
-
}
|
|
743
1065
|
if (!this.buffersrcCtx || !this.buffersinkCtx) {
|
|
744
1066
|
throw new Error('Buffer filters not initialized');
|
|
745
1067
|
}
|
|
@@ -768,63 +1090,24 @@ export class FilterAPI {
|
|
|
768
1090
|
inputs.free();
|
|
769
1091
|
outputs.free();
|
|
770
1092
|
}
|
|
771
|
-
/**
|
|
772
|
-
* Check hardware requirements for filters.
|
|
773
|
-
*
|
|
774
|
-
* @param description - Filter description
|
|
775
|
-
* @param options - Filter options
|
|
776
|
-
*
|
|
777
|
-
* @throws {Error} If hardware requirements not met
|
|
778
|
-
*
|
|
779
|
-
* @internal
|
|
780
|
-
*/
|
|
781
|
-
checkHardwareRequirements(description, options) {
|
|
782
|
-
if (this.config.type !== 'video') {
|
|
783
|
-
return;
|
|
784
|
-
}
|
|
785
|
-
// Parse filter names from description
|
|
786
|
-
const filterNames = description
|
|
787
|
-
.split(',')
|
|
788
|
-
.map((f) => {
|
|
789
|
-
// Extract filter name (before = or : or whitespace)
|
|
790
|
-
const match = /^([a-zA-Z0-9_]+)/.exec(f.trim());
|
|
791
|
-
return match ? match[1] : null;
|
|
792
|
-
})
|
|
793
|
-
.filter(Boolean);
|
|
794
|
-
for (const filterName of filterNames) {
|
|
795
|
-
const lowLevelFilter = Filter.getByName(filterName);
|
|
796
|
-
if (!lowLevelFilter) {
|
|
797
|
-
// Filter will be validated later during graph parsing
|
|
798
|
-
continue;
|
|
799
|
-
}
|
|
800
|
-
if (!options.hardware) {
|
|
801
|
-
if (filterName === 'hwupload' || filterName === 'hwupload_cuda' || (lowLevelFilter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
|
|
802
|
-
throw new Error(`Filter '${filterName}' requires a hardware context`);
|
|
803
|
-
}
|
|
804
|
-
else if (filterName === 'hwdownload' && !avIsHardwarePixelFormat(this.config.pixelFormat)) {
|
|
805
|
-
throw new Error(`Pixel Format '${this.config.pixelFormat}' is not hardware compatible`);
|
|
806
|
-
}
|
|
807
|
-
}
|
|
808
|
-
}
|
|
809
|
-
}
|
|
810
1093
|
/**
|
|
811
1094
|
* Dispose of filter.
|
|
812
1095
|
*
|
|
813
1096
|
* Implements Disposable interface for automatic cleanup.
|
|
814
|
-
* Equivalent to calling
|
|
1097
|
+
* Equivalent to calling close().
|
|
815
1098
|
*
|
|
816
1099
|
* @example
|
|
817
1100
|
* ```typescript
|
|
818
1101
|
* {
|
|
819
|
-
* using filter = await FilterAPI.create('scale=640:480',
|
|
1102
|
+
* using filter = await FilterAPI.create('scale=640:480', { ... });
|
|
820
1103
|
* // Use filter...
|
|
821
1104
|
* } // Automatically freed
|
|
822
1105
|
* ```
|
|
823
1106
|
*
|
|
824
|
-
* @see {@link
|
|
1107
|
+
* @see {@link close} For manual cleanup
|
|
825
1108
|
*/
|
|
826
1109
|
[Symbol.dispose]() {
|
|
827
|
-
this.
|
|
1110
|
+
this.close();
|
|
828
1111
|
}
|
|
829
1112
|
}
|
|
830
1113
|
//# sourceMappingURL=filter.js.map
|