node-av 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/LICENSE.md +22 -0
- package/README.md +377 -0
- package/binding.gyp +78 -0
- package/dist/api/bitstream-filter.d.ts +246 -0
- package/dist/api/bitstream-filter.js +369 -0
- package/dist/api/bitstream-filter.js.map +1 -0
- package/dist/api/decoder.d.ts +257 -0
- package/dist/api/decoder.js +424 -0
- package/dist/api/decoder.js.map +1 -0
- package/dist/api/encoder.d.ts +298 -0
- package/dist/api/encoder.js +574 -0
- package/dist/api/encoder.js.map +1 -0
- package/dist/api/filter.d.ts +457 -0
- package/dist/api/filter.js +876 -0
- package/dist/api/filter.js.map +1 -0
- package/dist/api/hardware.d.ts +318 -0
- package/dist/api/hardware.js +558 -0
- package/dist/api/hardware.js.map +1 -0
- package/dist/api/index.d.ts +12 -0
- package/dist/api/index.js +20 -0
- package/dist/api/index.js.map +1 -0
- package/dist/api/io-stream.d.ts +109 -0
- package/dist/api/io-stream.js +124 -0
- package/dist/api/io-stream.js.map +1 -0
- package/dist/api/media-input.d.ts +295 -0
- package/dist/api/media-input.js +456 -0
- package/dist/api/media-input.js.map +1 -0
- package/dist/api/media-output.d.ts +274 -0
- package/dist/api/media-output.js +486 -0
- package/dist/api/media-output.js.map +1 -0
- package/dist/api/pipeline.d.ts +117 -0
- package/dist/api/pipeline.js +836 -0
- package/dist/api/pipeline.js.map +1 -0
- package/dist/api/types.d.ts +440 -0
- package/dist/api/types.js +2 -0
- package/dist/api/types.js.map +1 -0
- package/dist/api/utilities/audio-sample.d.ts +115 -0
- package/dist/api/utilities/audio-sample.js +110 -0
- package/dist/api/utilities/audio-sample.js.map +1 -0
- package/dist/api/utilities/channel-layout.d.ts +83 -0
- package/dist/api/utilities/channel-layout.js +87 -0
- package/dist/api/utilities/channel-layout.js.map +1 -0
- package/dist/api/utilities/image.d.ts +177 -0
- package/dist/api/utilities/image.js +183 -0
- package/dist/api/utilities/image.js.map +1 -0
- package/dist/api/utilities/index.d.ts +8 -0
- package/dist/api/utilities/index.js +17 -0
- package/dist/api/utilities/index.js.map +1 -0
- package/dist/api/utilities/media-type.d.ts +56 -0
- package/dist/api/utilities/media-type.js +60 -0
- package/dist/api/utilities/media-type.js.map +1 -0
- package/dist/api/utilities/pixel-format.d.ts +94 -0
- package/dist/api/utilities/pixel-format.js +102 -0
- package/dist/api/utilities/pixel-format.js.map +1 -0
- package/dist/api/utilities/sample-format.d.ts +132 -0
- package/dist/api/utilities/sample-format.js +144 -0
- package/dist/api/utilities/sample-format.js.map +1 -0
- package/dist/api/utilities/streaming.d.ts +104 -0
- package/dist/api/utilities/streaming.js +137 -0
- package/dist/api/utilities/streaming.js.map +1 -0
- package/dist/api/utilities/timestamp.d.ts +187 -0
- package/dist/api/utilities/timestamp.js +200 -0
- package/dist/api/utilities/timestamp.js.map +1 -0
- package/dist/api/utils.d.ts +61 -0
- package/dist/api/utils.js +330 -0
- package/dist/api/utils.js.map +1 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +5 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/audio-fifo.d.ts +339 -0
- package/dist/lib/audio-fifo.js +365 -0
- package/dist/lib/audio-fifo.js.map +1 -0
- package/dist/lib/binding.d.ts +192 -0
- package/dist/lib/binding.js +70 -0
- package/dist/lib/binding.js.map +1 -0
- package/dist/lib/bitstream-filter-context.d.ts +345 -0
- package/dist/lib/bitstream-filter-context.js +407 -0
- package/dist/lib/bitstream-filter-context.js.map +1 -0
- package/dist/lib/bitstream-filter.d.ts +124 -0
- package/dist/lib/bitstream-filter.js +138 -0
- package/dist/lib/bitstream-filter.js.map +1 -0
- package/dist/lib/channel-layouts.d.ts +51 -0
- package/dist/lib/channel-layouts.js +55 -0
- package/dist/lib/channel-layouts.js.map +1 -0
- package/dist/lib/codec-context.d.ts +763 -0
- package/dist/lib/codec-context.js +974 -0
- package/dist/lib/codec-context.js.map +1 -0
- package/dist/lib/codec-parameters.d.ts +362 -0
- package/dist/lib/codec-parameters.js +460 -0
- package/dist/lib/codec-parameters.js.map +1 -0
- package/dist/lib/codec-parser.d.ts +185 -0
- package/dist/lib/codec-parser.js +193 -0
- package/dist/lib/codec-parser.js.map +1 -0
- package/dist/lib/codec.d.ts +432 -0
- package/dist/lib/codec.js +492 -0
- package/dist/lib/codec.js.map +1 -0
- package/dist/lib/constants.d.ts +2037 -0
- package/dist/lib/constants.js +1659 -0
- package/dist/lib/constants.js.map +1 -0
- package/dist/lib/dictionary.d.ts +371 -0
- package/dist/lib/dictionary.js +406 -0
- package/dist/lib/dictionary.js.map +1 -0
- package/dist/lib/error.d.ts +216 -0
- package/dist/lib/error.js +254 -0
- package/dist/lib/error.js.map +1 -0
- package/dist/lib/filter-context.d.ts +445 -0
- package/dist/lib/filter-context.js +505 -0
- package/dist/lib/filter-context.js.map +1 -0
- package/dist/lib/filter-graph.d.ts +556 -0
- package/dist/lib/filter-graph.js +608 -0
- package/dist/lib/filter-graph.js.map +1 -0
- package/dist/lib/filter-inout.d.ts +205 -0
- package/dist/lib/filter-inout.js +264 -0
- package/dist/lib/filter-inout.js.map +1 -0
- package/dist/lib/filter.d.ts +231 -0
- package/dist/lib/filter.js +260 -0
- package/dist/lib/filter.js.map +1 -0
- package/dist/lib/format-context.d.ts +798 -0
- package/dist/lib/format-context.js +845 -0
- package/dist/lib/format-context.js.map +1 -0
- package/dist/lib/frame.d.ts +784 -0
- package/dist/lib/frame.js +933 -0
- package/dist/lib/frame.js.map +1 -0
- package/dist/lib/hardware-device-context.d.ts +407 -0
- package/dist/lib/hardware-device-context.js +429 -0
- package/dist/lib/hardware-device-context.js.map +1 -0
- package/dist/lib/hardware-frames-context.d.ts +374 -0
- package/dist/lib/hardware-frames-context.js +430 -0
- package/dist/lib/hardware-frames-context.js.map +1 -0
- package/dist/lib/index.d.ts +31 -0
- package/dist/lib/index.js +54 -0
- package/dist/lib/index.js.map +1 -0
- package/dist/lib/input-format.d.ts +216 -0
- package/dist/lib/input-format.js +246 -0
- package/dist/lib/input-format.js.map +1 -0
- package/dist/lib/io-context.d.ts +495 -0
- package/dist/lib/io-context.js +550 -0
- package/dist/lib/io-context.js.map +1 -0
- package/dist/lib/log.d.ts +201 -0
- package/dist/lib/log.js +219 -0
- package/dist/lib/log.js.map +1 -0
- package/dist/lib/native-types.d.ts +719 -0
- package/dist/lib/native-types.js +2 -0
- package/dist/lib/native-types.js.map +1 -0
- package/dist/lib/option.d.ts +589 -0
- package/dist/lib/option.js +853 -0
- package/dist/lib/option.js.map +1 -0
- package/dist/lib/output-format.d.ts +179 -0
- package/dist/lib/output-format.js +205 -0
- package/dist/lib/output-format.js.map +1 -0
- package/dist/lib/packet.d.ts +487 -0
- package/dist/lib/packet.js +558 -0
- package/dist/lib/packet.js.map +1 -0
- package/dist/lib/rational.d.ts +210 -0
- package/dist/lib/rational.js +233 -0
- package/dist/lib/rational.js.map +1 -0
- package/dist/lib/software-resample-context.d.ts +572 -0
- package/dist/lib/software-resample-context.js +610 -0
- package/dist/lib/software-resample-context.js.map +1 -0
- package/dist/lib/software-scale-context.d.ts +290 -0
- package/dist/lib/software-scale-context.js +308 -0
- package/dist/lib/software-scale-context.js.map +1 -0
- package/dist/lib/stream.d.ts +322 -0
- package/dist/lib/stream.js +408 -0
- package/dist/lib/stream.js.map +1 -0
- package/dist/lib/types.d.ts +59 -0
- package/dist/lib/types.js +8 -0
- package/dist/lib/types.js.map +1 -0
- package/dist/lib/utilities.d.ts +346 -0
- package/dist/lib/utilities.js +424 -0
- package/dist/lib/utilities.js.map +1 -0
- package/install/check.js +113 -0
- package/install/ffmpeg.js +163 -0
- package/package.json +107 -0
|
@@ -0,0 +1,876 @@
|
|
|
1
|
+
import { AVERROR_EAGAIN, AVERROR_EOF, AVFILTER_FLAG_HWDEVICE, avGetPixFmtName, avGetSampleFmtName, AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_VIDEO, FFmpegError, Frame, Filter as LowLevelFilter, FilterGraph as LowLevelFilterGraph, FilterInOut as LowLevelFilterInOut, Stream, } from '../lib/index.js';
|
|
2
|
+
/**
|
|
3
|
+
* High-level filter API for media processing.
|
|
4
|
+
*
|
|
5
|
+
* Provides a simplified interface for FFmpeg's filter system.
|
|
6
|
+
* Supports both simple filter chains and complex filter graphs.
|
|
7
|
+
* Handles automatic format negotiation and buffer management.
|
|
8
|
+
*
|
|
9
|
+
* @example
|
|
10
|
+
* ```typescript
|
|
11
|
+
* import { FilterAPI, Frame } from 'node-av/api';
|
|
12
|
+
*
|
|
13
|
+
* // Create a simple video filter from a stream
|
|
14
|
+
* const videoStream = media.video();
|
|
15
|
+
* const filter = await FilterAPI.create('scale=1280:720,format=yuv420p', videoStream);
|
|
16
|
+
*
|
|
17
|
+
* // Process frames
|
|
18
|
+
* const outputFrame = await filter.process(inputFrame);
|
|
19
|
+
* ```
|
|
20
|
+
*
|
|
21
|
+
* @example
|
|
22
|
+
* ```typescript
|
|
23
|
+
* // Create filter with hardware acceleration
|
|
24
|
+
* const hw = await HardwareContext.auto();
|
|
25
|
+
* const filter = await FilterAPI.create('scale_vt=640:480', videoStream, {
|
|
26
|
+
* hardware: hw
|
|
27
|
+
* });
|
|
28
|
+
* ```
|
|
29
|
+
*/
|
|
30
|
+
export class FilterAPI {
|
|
31
|
+
graph;
|
|
32
|
+
buffersrcCtx = null;
|
|
33
|
+
buffersinkCtx = null;
|
|
34
|
+
config;
|
|
35
|
+
mediaType;
|
|
36
|
+
initialized = false;
|
|
37
|
+
needsHardware = false; // Track if this filter REQUIRES hardware
|
|
38
|
+
hardware; // Store reference for hardware context
|
|
39
|
+
pendingInit; // For delayed init
|
|
40
|
+
/**
|
|
41
|
+
* Create a new Filter instance.
|
|
42
|
+
*
|
|
43
|
+
* The filter is uninitialized until setup with a filter description.
|
|
44
|
+
* Use the static factory methods for easier creation.
|
|
45
|
+
*
|
|
46
|
+
* @param config - Filter configuration
|
|
47
|
+
* @param hardware - Optional hardware context for late framesContext binding
|
|
48
|
+
* @internal
|
|
49
|
+
*/
|
|
50
|
+
constructor(config, hardware) {
|
|
51
|
+
this.config = config;
|
|
52
|
+
this.hardware = hardware;
|
|
53
|
+
this.mediaType = config.type === 'video' ? AVMEDIA_TYPE_VIDEO : AVMEDIA_TYPE_AUDIO;
|
|
54
|
+
this.graph = new LowLevelFilterGraph();
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Create a filter from a filter description string.
|
|
58
|
+
*
|
|
59
|
+
* Accepts either a Stream (from MediaInput/Decoder) or StreamInfo (for raw data).
|
|
60
|
+
* Automatically sets up buffer source and sink filters.
|
|
61
|
+
*
|
|
62
|
+
* Handles complex filter chains with multiple filters. Automatically detects if ANY
|
|
63
|
+
* filter in the chain requires hardware acceleration (e.g., scale_vt in
|
|
64
|
+
* "format=nv12,hwupload,scale_vt=640:480").
|
|
65
|
+
*
|
|
66
|
+
* @param description - Filter graph description (e.g., "scale=1280:720" or complex chains)
|
|
67
|
+
* @param input - Stream or StreamInfo describing the input
|
|
68
|
+
* @param options - Optional filter options including hardware context
|
|
69
|
+
*
|
|
70
|
+
* @returns Promise resolving to configured Filter instance
|
|
71
|
+
*
|
|
72
|
+
* @throws {FFmpegError} If filter creation or configuration fails
|
|
73
|
+
*
|
|
74
|
+
* @example
|
|
75
|
+
* ```typescript
|
|
76
|
+
* // Simple filter
|
|
77
|
+
* const filter = await FilterAPI.create('scale=640:480', videoStream);
|
|
78
|
+
*
|
|
79
|
+
* // Complex filter chain with hardware
|
|
80
|
+
* const hw = await HardwareContext.auto();
|
|
81
|
+
* const filter = await FilterAPI.create(
|
|
82
|
+
* 'format=nv12,hwupload,scale_vt=640:480,hwdownload,format=yuv420p',
|
|
83
|
+
* videoStream,
|
|
84
|
+
* { hardware: hw }
|
|
85
|
+
* );
|
|
86
|
+
*
|
|
87
|
+
* // From StreamInfo (for raw data)
|
|
88
|
+
* const filter = await FilterAPI.create('scale=640:480', {
|
|
89
|
+
* type: 'video',
|
|
90
|
+
* width: 1920,
|
|
91
|
+
* height: 1080,
|
|
92
|
+
* pixelFormat: AV_PIX_FMT_YUV420P,
|
|
93
|
+
* timeBase: { num: 1, den: 30 }
|
|
94
|
+
* });
|
|
95
|
+
* ```
|
|
96
|
+
*/
|
|
97
|
+
static async create(description, input, options = {}) {
|
|
98
|
+
let config;
|
|
99
|
+
if (input instanceof Stream) {
|
|
100
|
+
if (input.codecpar.codecType === AVMEDIA_TYPE_VIDEO) {
|
|
101
|
+
config = {
|
|
102
|
+
type: 'video',
|
|
103
|
+
width: input.codecpar.width,
|
|
104
|
+
height: input.codecpar.height,
|
|
105
|
+
pixelFormat: input.codecpar.format,
|
|
106
|
+
timeBase: input.timeBase,
|
|
107
|
+
frameRate: input.rFrameRate,
|
|
108
|
+
sampleAspectRatio: input.codecpar.sampleAspectRatio,
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
else if (input.codecpar.codecType === AVMEDIA_TYPE_AUDIO) {
|
|
112
|
+
config = {
|
|
113
|
+
type: 'audio',
|
|
114
|
+
sampleRate: input.codecpar.sampleRate,
|
|
115
|
+
sampleFormat: input.codecpar.format,
|
|
116
|
+
channelLayout: input.codecpar.channelLayout.mask,
|
|
117
|
+
timeBase: input.timeBase,
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
throw new Error('Unsupported codec type');
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
else {
|
|
125
|
+
if (input.type === 'video') {
|
|
126
|
+
config = {
|
|
127
|
+
type: 'video',
|
|
128
|
+
width: input.width,
|
|
129
|
+
height: input.height,
|
|
130
|
+
pixelFormat: input.pixelFormat,
|
|
131
|
+
timeBase: input.timeBase,
|
|
132
|
+
frameRate: input.frameRate,
|
|
133
|
+
sampleAspectRatio: input.sampleAspectRatio,
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
config = {
|
|
138
|
+
type: 'audio',
|
|
139
|
+
sampleRate: input.sampleRate,
|
|
140
|
+
sampleFormat: input.sampleFormat,
|
|
141
|
+
channelLayout: typeof input.channelLayout === 'bigint' ? input.channelLayout : input.channelLayout.mask || 3n,
|
|
142
|
+
timeBase: input.timeBase,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
const filter = new FilterAPI(config, options.hardware);
|
|
147
|
+
// Parse the entire filter chain to check if ANY filter requires hardware
|
|
148
|
+
// Split by comma to get individual filters, handle complex chains like:
|
|
149
|
+
// "format=nv12,hwupload,scale_vt=100:100,hwdownload,format=yuv420p"
|
|
150
|
+
const filterNames = description
|
|
151
|
+
.split(',')
|
|
152
|
+
.map((f) => {
|
|
153
|
+
// Extract filter name (before = or : or whitespace)
|
|
154
|
+
const match = /^([a-zA-Z0-9_]+)/.exec(f.trim());
|
|
155
|
+
return match ? match[1] : null;
|
|
156
|
+
})
|
|
157
|
+
.filter(Boolean);
|
|
158
|
+
// Check if chain contains hwupload (which creates hw frames context)
|
|
159
|
+
const hasHwDownload = filterNames.some((name) => name === 'hwdownload');
|
|
160
|
+
const hasHwUpload = filterNames.some((name) => name === 'hwupload');
|
|
161
|
+
// Check each filter in the chain
|
|
162
|
+
let needsHardwareFramesContext = false;
|
|
163
|
+
let needsHardwareDevice = false;
|
|
164
|
+
for (const filterName of filterNames) {
|
|
165
|
+
if (!filterName)
|
|
166
|
+
continue;
|
|
167
|
+
const lowLevelFilter = LowLevelFilter.getByName(filterName);
|
|
168
|
+
if (lowLevelFilter) {
|
|
169
|
+
// Check if this filter needs hardware
|
|
170
|
+
if ((lowLevelFilter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
|
|
171
|
+
needsHardwareDevice = true;
|
|
172
|
+
// Only non-hwupload filters need frames context from decoder
|
|
173
|
+
if (filterName !== 'hwupload' && filterName !== 'hwdownload') {
|
|
174
|
+
needsHardwareFramesContext = true;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
// If we have hwupload, we don't need hardware frames context from decoder
|
|
180
|
+
filter.needsHardware = hasHwDownload || (needsHardwareFramesContext && !hasHwUpload);
|
|
181
|
+
// Validation: Hardware filter MUST have HardwareContext
|
|
182
|
+
if (needsHardwareDevice && !options.hardware) {
|
|
183
|
+
throw new Error('Hardware filter in chain requires a hardware context. ' + 'Please provide one via options.hardware');
|
|
184
|
+
}
|
|
185
|
+
// Check if we can initialize immediately
|
|
186
|
+
// Initialize if: (1) we don't need hardware, OR (2) we need hardware AND have framesContext
|
|
187
|
+
if (!filter.needsHardware || (filter.needsHardware && options.hardware?.framesContext)) {
|
|
188
|
+
// Can initialize now
|
|
189
|
+
if (options.hardware?.framesContext && config.type === 'video') {
|
|
190
|
+
config.hwFramesCtx = options.hardware.framesContext;
|
|
191
|
+
}
|
|
192
|
+
await filter.initialize(description, options);
|
|
193
|
+
filter.initialized = true;
|
|
194
|
+
}
|
|
195
|
+
else {
|
|
196
|
+
// Delay initialization until first frame (hardware needed but no framesContext yet)
|
|
197
|
+
filter.pendingInit = { description, options };
|
|
198
|
+
}
|
|
199
|
+
return filter;
|
|
200
|
+
}
|
|
201
|
+
/**
|
|
202
|
+
* Process a single frame through the filter.
|
|
203
|
+
*
|
|
204
|
+
* Sends a frame through the filter graph and returns the filtered result.
|
|
205
|
+
* May return null if the filter needs more input frames.
|
|
206
|
+
*
|
|
207
|
+
* @param frame - Input frame to filter
|
|
208
|
+
*
|
|
209
|
+
* @returns Promise resolving to filtered frame or null
|
|
210
|
+
*
|
|
211
|
+
* @throws {FFmpegError} If processing fails
|
|
212
|
+
*
|
|
213
|
+
* @example
|
|
214
|
+
* ```typescript
|
|
215
|
+
* const outputFrame = await filter.process(inputFrame);
|
|
216
|
+
* if (outputFrame) {
|
|
217
|
+
* // Process the filtered frame
|
|
218
|
+
* }
|
|
219
|
+
* ```
|
|
220
|
+
*/
|
|
221
|
+
async process(frame) {
|
|
222
|
+
// Check for delayed initialization
|
|
223
|
+
if (!this.initialized && this.pendingInit) {
|
|
224
|
+
// Check if hardware frames context became available
|
|
225
|
+
if (this.hardware?.framesContext && this.config.type === 'video') {
|
|
226
|
+
this.config.hwFramesCtx = this.hardware.framesContext;
|
|
227
|
+
// Update pixel format to match hardware frames if using hardware
|
|
228
|
+
if (this.needsHardware) {
|
|
229
|
+
this.config.pixelFormat = this.hardware.getHardwarePixelFormat();
|
|
230
|
+
}
|
|
231
|
+
// Now we can initialize
|
|
232
|
+
await this.initialize(this.pendingInit.description, this.pendingInit.options);
|
|
233
|
+
this.pendingInit = undefined;
|
|
234
|
+
this.initialized = true;
|
|
235
|
+
}
|
|
236
|
+
else if (this.needsHardware) {
|
|
237
|
+
throw new Error('Hardware filter requires frames context which is not yet available');
|
|
238
|
+
}
|
|
239
|
+
else {
|
|
240
|
+
// Software filter or hardware not required, can initialize now
|
|
241
|
+
await this.initialize(this.pendingInit.description, this.pendingInit.options);
|
|
242
|
+
this.pendingInit = undefined;
|
|
243
|
+
this.initialized = true;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
if (!this.initialized || !this.buffersrcCtx || !this.buffersinkCtx) {
|
|
247
|
+
throw new Error('Filter not initialized');
|
|
248
|
+
}
|
|
249
|
+
// Send frame to filter
|
|
250
|
+
const addRet = await this.buffersrcCtx.buffersrcAddFrame(frame);
|
|
251
|
+
FFmpegError.throwIfError(addRet, 'Failed to add frame to filter');
|
|
252
|
+
// Try to get filtered frame
|
|
253
|
+
const outputFrame = new Frame();
|
|
254
|
+
outputFrame.alloc();
|
|
255
|
+
const getRet = await this.buffersinkCtx.buffersinkGetFrame(outputFrame);
|
|
256
|
+
if (getRet >= 0) {
|
|
257
|
+
return outputFrame;
|
|
258
|
+
}
|
|
259
|
+
else if (FFmpegError.is(getRet, AVERROR_EAGAIN)) {
|
|
260
|
+
// Need more input
|
|
261
|
+
outputFrame.free();
|
|
262
|
+
return null;
|
|
263
|
+
}
|
|
264
|
+
else {
|
|
265
|
+
outputFrame.free();
|
|
266
|
+
FFmpegError.throwIfError(getRet, 'Failed to get frame from filter');
|
|
267
|
+
return null;
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
/**
|
|
271
|
+
* Process multiple frames through the filter.
|
|
272
|
+
*
|
|
273
|
+
* Batch processing for better performance.
|
|
274
|
+
* Returns all available output frames.
|
|
275
|
+
*
|
|
276
|
+
* @param frames - Array of input frames
|
|
277
|
+
*
|
|
278
|
+
* @returns Promise resolving to array of filtered frames
|
|
279
|
+
*
|
|
280
|
+
* @throws {FFmpegError} If processing fails
|
|
281
|
+
*
|
|
282
|
+
* @example
|
|
283
|
+
* ```typescript
|
|
284
|
+
* const outputFrames = await filter.processMultiple(inputFrames);
|
|
285
|
+
* ```
|
|
286
|
+
*/
|
|
287
|
+
async processMultiple(frames) {
|
|
288
|
+
const outputFrames = [];
|
|
289
|
+
for (const frame of frames) {
|
|
290
|
+
const output = await this.process(frame);
|
|
291
|
+
if (output) {
|
|
292
|
+
outputFrames.push(output);
|
|
293
|
+
}
|
|
294
|
+
// Drain any additional frames
|
|
295
|
+
while (true) {
|
|
296
|
+
const additional = await this.receive();
|
|
297
|
+
if (!additional)
|
|
298
|
+
break;
|
|
299
|
+
outputFrames.push(additional);
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
return outputFrames;
|
|
303
|
+
}
|
|
304
|
+
/**
|
|
305
|
+
* Receive a filtered frame without sending input.
|
|
306
|
+
*
|
|
307
|
+
* Used to drain buffered frames from the filter.
|
|
308
|
+
* Returns null when no more frames are available.
|
|
309
|
+
*
|
|
310
|
+
* @returns Promise resolving to filtered frame or null
|
|
311
|
+
*
|
|
312
|
+
* @throws {FFmpegError} If receiving fails
|
|
313
|
+
*
|
|
314
|
+
* @example
|
|
315
|
+
* ```typescript
|
|
316
|
+
* // Drain all buffered frames
|
|
317
|
+
* while (true) {
|
|
318
|
+
* const frame = await filter.receive();
|
|
319
|
+
* if (!frame) break;
|
|
320
|
+
* // Process frame
|
|
321
|
+
* }
|
|
322
|
+
* ```
|
|
323
|
+
*/
|
|
324
|
+
async receive() {
|
|
325
|
+
if (!this.initialized || !this.buffersinkCtx) {
|
|
326
|
+
throw new Error('Filter not initialized');
|
|
327
|
+
}
|
|
328
|
+
const frame = new Frame();
|
|
329
|
+
frame.alloc();
|
|
330
|
+
const ret = await this.buffersinkCtx.buffersinkGetFrame(frame);
|
|
331
|
+
if (ret >= 0) {
|
|
332
|
+
return frame;
|
|
333
|
+
}
|
|
334
|
+
else {
|
|
335
|
+
frame.free();
|
|
336
|
+
if (FFmpegError.is(ret, AVERROR_EAGAIN) || FFmpegError.is(ret, AVERROR_EOF)) {
|
|
337
|
+
return null;
|
|
338
|
+
}
|
|
339
|
+
FFmpegError.throwIfError(ret, 'Failed to receive frame from filter');
|
|
340
|
+
return null;
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Flush the filter by sending null frame.
|
|
345
|
+
*
|
|
346
|
+
* Signals end of stream to the filter.
|
|
347
|
+
* Use receive() to get any remaining frames.
|
|
348
|
+
*
|
|
349
|
+
* @returns Promise resolving when flush is complete
|
|
350
|
+
*
|
|
351
|
+
* @throws {FFmpegError} If flush fails
|
|
352
|
+
*
|
|
353
|
+
* @example
|
|
354
|
+
* ```typescript
|
|
355
|
+
* await filter.flush();
|
|
356
|
+
* // Get remaining frames
|
|
357
|
+
* while (true) {
|
|
358
|
+
* const frame = await filter.receive();
|
|
359
|
+
* if (!frame) break;
|
|
360
|
+
* // Process final frames
|
|
361
|
+
* }
|
|
362
|
+
* ```
|
|
363
|
+
*/
|
|
364
|
+
async flush() {
|
|
365
|
+
if (!this.initialized || !this.buffersrcCtx) {
|
|
366
|
+
throw new Error('Filter not initialized');
|
|
367
|
+
}
|
|
368
|
+
const ret = await this.buffersrcCtx.buffersrcAddFrame(null);
|
|
369
|
+
if (ret < 0 && !FFmpegError.is(ret, AVERROR_EOF)) {
|
|
370
|
+
FFmpegError.throwIfError(ret, 'Failed to flush filter');
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
/**
|
|
374
|
+
* Flush filter and yield all remaining frames as a generator.
|
|
375
|
+
*
|
|
376
|
+
* More convenient than calling flush() + receive() in a loop.
|
|
377
|
+
* Automatically sends flush signal and yields all buffered frames.
|
|
378
|
+
*
|
|
379
|
+
* @returns Async generator of remaining frames
|
|
380
|
+
*
|
|
381
|
+
* @throws {Error} If filter is not initialized
|
|
382
|
+
*
|
|
383
|
+
* @example
|
|
384
|
+
* ```typescript
|
|
385
|
+
* // Process all remaining frames with generator
|
|
386
|
+
* for await (const frame of filter.flushFrames()) {
|
|
387
|
+
* // Process final frame
|
|
388
|
+
* using _ = frame; // Auto cleanup
|
|
389
|
+
* }
|
|
390
|
+
* ```
|
|
391
|
+
*/
|
|
392
|
+
async *flushFrames() {
|
|
393
|
+
if (!this.initialized || !this.buffersrcCtx) {
|
|
394
|
+
throw new Error('Filter not initialized');
|
|
395
|
+
}
|
|
396
|
+
// Send flush signal
|
|
397
|
+
await this.flush();
|
|
398
|
+
// Yield all remaining frames
|
|
399
|
+
let frame;
|
|
400
|
+
while ((frame = await this.receive()) !== null) {
|
|
401
|
+
yield frame;
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
/**
|
|
405
|
+
* Process frames as an async generator.
|
|
406
|
+
*
|
|
407
|
+
* Provides a convenient iterator interface for filtering.
|
|
408
|
+
* Automatically handles buffering and draining.
|
|
409
|
+
* Input frames are automatically freed after processing.
|
|
410
|
+
*
|
|
411
|
+
* IMPORTANT: The yielded frames MUST be freed by the caller!
|
|
412
|
+
* Input frames are automatically freed after processing.
|
|
413
|
+
*
|
|
414
|
+
* @param frames - Async generator of input frames (will be freed automatically)
|
|
415
|
+
*
|
|
416
|
+
* @returns Async generator of filtered frames (ownership transferred to caller)
|
|
417
|
+
*
|
|
418
|
+
* @example
|
|
419
|
+
* ```typescript
|
|
420
|
+
* for await (const filtered of filter.frames(decoder.frames())) {
|
|
421
|
+
* // Process filtered frame
|
|
422
|
+
* filtered.free(); // Must free output frame
|
|
423
|
+
* }
|
|
424
|
+
* ```
|
|
425
|
+
*/
|
|
426
|
+
async *frames(frames) {
|
|
427
|
+
for await (const frame of frames) {
|
|
428
|
+
try {
|
|
429
|
+
// Process input frame
|
|
430
|
+
const output = await this.process(frame);
|
|
431
|
+
if (output) {
|
|
432
|
+
yield output;
|
|
433
|
+
}
|
|
434
|
+
// Drain any buffered frames
|
|
435
|
+
while (true) {
|
|
436
|
+
const buffered = await this.receive();
|
|
437
|
+
if (!buffered)
|
|
438
|
+
break;
|
|
439
|
+
yield buffered;
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
finally {
|
|
443
|
+
// Free the input frame after processing
|
|
444
|
+
frame.free();
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
// Flush and get remaining frames
|
|
448
|
+
await this.flush();
|
|
449
|
+
while (true) {
|
|
450
|
+
const remaining = await this.receive();
|
|
451
|
+
if (!remaining)
|
|
452
|
+
break;
|
|
453
|
+
yield remaining;
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
/**
|
|
457
|
+
* Get the filter graph description.
|
|
458
|
+
*
|
|
459
|
+
* Returns a string representation of the filter graph in DOT format.
|
|
460
|
+
* Useful for debugging and visualization.
|
|
461
|
+
*
|
|
462
|
+
* @returns Graph description or null if not initialized
|
|
463
|
+
*
|
|
464
|
+
* @example
|
|
465
|
+
* ```typescript
|
|
466
|
+
* const description = filter.getGraphDescription();
|
|
467
|
+
* console.log(description);
|
|
468
|
+
* ```
|
|
469
|
+
*/
|
|
470
|
+
getGraphDescription() {
|
|
471
|
+
if (!this.initialized) {
|
|
472
|
+
return null;
|
|
473
|
+
}
|
|
474
|
+
return this.graph.dump();
|
|
475
|
+
}
|
|
476
|
+
/**
|
|
477
|
+
* Check if the filter is initialized and ready.
|
|
478
|
+
*
|
|
479
|
+
* @returns true if the filter is ready for processing
|
|
480
|
+
*/
|
|
481
|
+
isReady() {
|
|
482
|
+
return this.initialized && this.buffersrcCtx !== null && this.buffersinkCtx !== null;
|
|
483
|
+
}
|
|
484
|
+
/**
|
|
485
|
+
* Get the media type of this filter.
|
|
486
|
+
*
|
|
487
|
+
* @returns The media type (video or audio)
|
|
488
|
+
*/
|
|
489
|
+
getMediaType() {
|
|
490
|
+
return this.mediaType;
|
|
491
|
+
}
|
|
492
|
+
/**
|
|
493
|
+
* Get the filter configuration.
|
|
494
|
+
*
|
|
495
|
+
* @returns The filter configuration used to create this instance
|
|
496
|
+
*/
|
|
497
|
+
getConfig() {
|
|
498
|
+
return this.config;
|
|
499
|
+
}
|
|
500
|
+
/**
|
|
501
|
+
* Free all filter resources.
|
|
502
|
+
*
|
|
503
|
+
* Releases the filter graph and all associated filters.
|
|
504
|
+
* The filter instance cannot be used after calling this.
|
|
505
|
+
*
|
|
506
|
+
* @example
|
|
507
|
+
* ```typescript
|
|
508
|
+
* filter.free();
|
|
509
|
+
* // filter is now invalid
|
|
510
|
+
* ```
|
|
511
|
+
*/
|
|
512
|
+
free() {
|
|
513
|
+
if (this.graph) {
|
|
514
|
+
this.graph.free();
|
|
515
|
+
}
|
|
516
|
+
this.buffersrcCtx = null;
|
|
517
|
+
this.buffersinkCtx = null;
|
|
518
|
+
this.initialized = false;
|
|
519
|
+
}
|
|
520
|
+
/**
|
|
521
|
+
* Initialize the filter graph.
|
|
522
|
+
*
|
|
523
|
+
* Sets up buffer source, buffer sink, and parses the filter description.
|
|
524
|
+
* Configures the graph for processing.
|
|
525
|
+
*
|
|
526
|
+
* @internal
|
|
527
|
+
*/
|
|
528
|
+
async initialize(description, options) {
|
|
529
|
+
// Allocate graph
|
|
530
|
+
this.graph.alloc();
|
|
531
|
+
// Configure threading
|
|
532
|
+
if (options.threads !== undefined) {
|
|
533
|
+
this.graph.nbThreads = options.threads;
|
|
534
|
+
}
|
|
535
|
+
// Configure scaler options
|
|
536
|
+
if (options.scaleSwsOpts) {
|
|
537
|
+
this.graph.scaleSwsOpts = options.scaleSwsOpts;
|
|
538
|
+
}
|
|
539
|
+
// Create buffer source
|
|
540
|
+
this.createBufferSource();
|
|
541
|
+
// Create buffer sink
|
|
542
|
+
this.createBufferSink();
|
|
543
|
+
// Parse filter description
|
|
544
|
+
this.parseFilterDescription(description);
|
|
545
|
+
// Set hw_device_ctx on hardware filters if we have hardware context
|
|
546
|
+
if (this.hardware?.deviceContext) {
|
|
547
|
+
const filters = this.graph.filters;
|
|
548
|
+
if (filters) {
|
|
549
|
+
for (const filterCtx of filters) {
|
|
550
|
+
// Check if this filter needs hardware device context
|
|
551
|
+
const filter = filterCtx.filter;
|
|
552
|
+
if (filter && (filter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
|
|
553
|
+
// Set hardware device context on this filter
|
|
554
|
+
filterCtx.hwDeviceCtx = this.hardware.deviceContext;
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
// Configure the graph
|
|
560
|
+
const ret = await this.graph.config();
|
|
561
|
+
FFmpegError.throwIfError(ret, 'Failed to configure filter graph');
|
|
562
|
+
this.initialized = true;
|
|
563
|
+
}
|
|
564
|
+
/**
|
|
565
|
+
* Create and configure the buffer source filter.
|
|
566
|
+
*
|
|
567
|
+
* @internal
|
|
568
|
+
*/
|
|
569
|
+
createBufferSource() {
|
|
570
|
+
const filterName = this.config.type === 'video' ? 'buffer' : 'abuffer';
|
|
571
|
+
const bufferFilter = LowLevelFilter.getByName(filterName);
|
|
572
|
+
if (!bufferFilter) {
|
|
573
|
+
throw new Error(`${filterName} filter not found`);
|
|
574
|
+
}
|
|
575
|
+
// Check if we have hardware frames context for video
|
|
576
|
+
const hasHwFrames = this.config.type === 'video' && this.config.hwFramesCtx;
|
|
577
|
+
if (hasHwFrames) {
|
|
578
|
+
// For hardware frames, allocate filter without initialization
|
|
579
|
+
this.buffersrcCtx = this.graph.allocFilter(bufferFilter, 'in');
|
|
580
|
+
if (!this.buffersrcCtx) {
|
|
581
|
+
throw new Error('Failed to allocate buffer source');
|
|
582
|
+
}
|
|
583
|
+
// Set parameters including hardware frames context (BEFORE init)
|
|
584
|
+
const videoConfig = this.config;
|
|
585
|
+
const ret = this.buffersrcCtx.buffersrcParametersSet({
|
|
586
|
+
width: videoConfig.width,
|
|
587
|
+
height: videoConfig.height,
|
|
588
|
+
format: videoConfig.pixelFormat,
|
|
589
|
+
timeBase: videoConfig.timeBase,
|
|
590
|
+
frameRate: videoConfig.frameRate,
|
|
591
|
+
sampleAspectRatio: videoConfig.sampleAspectRatio,
|
|
592
|
+
hwFramesCtx: videoConfig.hwFramesCtx,
|
|
593
|
+
});
|
|
594
|
+
FFmpegError.throwIfError(ret, 'Failed to set buffer source parameters with hardware frames context');
|
|
595
|
+
// Initialize filter AFTER setting parameters
|
|
596
|
+
const initRet = this.buffersrcCtx.init(null);
|
|
597
|
+
FFmpegError.throwIfError(initRet, 'Failed to initialize buffer source');
|
|
598
|
+
}
|
|
599
|
+
else {
|
|
600
|
+
// Build initialization string based on media type
|
|
601
|
+
let args;
|
|
602
|
+
if (this.config.type === 'video') {
|
|
603
|
+
const cfg = this.config;
|
|
604
|
+
args = `video_size=${cfg.width}x${cfg.height}:pix_fmt=${cfg.pixelFormat}:time_base=${cfg.timeBase.num}/${cfg.timeBase.den}`;
|
|
605
|
+
if (cfg.frameRate) {
|
|
606
|
+
args += `:frame_rate=${cfg.frameRate.num}/${cfg.frameRate.den}`;
|
|
607
|
+
}
|
|
608
|
+
if (cfg.sampleAspectRatio) {
|
|
609
|
+
args += `:pixel_aspect=${cfg.sampleAspectRatio.num}/${cfg.sampleAspectRatio.den}`;
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
else {
|
|
613
|
+
const cfg = this.config;
|
|
614
|
+
// Use sample format name from utilities
|
|
615
|
+
const sampleFmtName = avGetSampleFmtName(cfg.sampleFormat);
|
|
616
|
+
// Handle invalid channel layout (0) by using stereo as default
|
|
617
|
+
const channelLayout = cfg.channelLayout === 0n ? 'stereo' : cfg.channelLayout.toString();
|
|
618
|
+
args = `sample_rate=${cfg.sampleRate}:sample_fmt=${sampleFmtName}:channel_layout=${channelLayout}:time_base=${cfg.timeBase.num}/${cfg.timeBase.den}`;
|
|
619
|
+
}
|
|
620
|
+
this.buffersrcCtx = this.graph.createFilter(bufferFilter, 'in', args);
|
|
621
|
+
if (!this.buffersrcCtx) {
|
|
622
|
+
throw new Error('Failed to create buffer source');
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
/**
|
|
627
|
+
* Create and configure the buffer sink filter.
|
|
628
|
+
*
|
|
629
|
+
* @internal
|
|
630
|
+
*/
|
|
631
|
+
createBufferSink() {
|
|
632
|
+
const filterName = this.config.type === 'video' ? 'buffersink' : 'abuffersink';
|
|
633
|
+
const sinkFilter = LowLevelFilter.getByName(filterName);
|
|
634
|
+
if (!sinkFilter) {
|
|
635
|
+
throw new Error(`${filterName} filter not found`);
|
|
636
|
+
}
|
|
637
|
+
// Create sink filter - no automatic format conversion
|
|
638
|
+
this.buffersinkCtx = this.graph.createFilter(sinkFilter, 'out', null);
|
|
639
|
+
if (!this.buffersinkCtx) {
|
|
640
|
+
throw new Error('Failed to create buffer sink');
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
/**
|
|
644
|
+
* Parse and connect the filter description.
|
|
645
|
+
*
|
|
646
|
+
* @internal
|
|
647
|
+
*/
|
|
648
|
+
parseFilterDescription(description) {
|
|
649
|
+
if (!this.buffersrcCtx || !this.buffersinkCtx) {
|
|
650
|
+
throw new Error('Buffer filters not initialized');
|
|
651
|
+
}
|
|
652
|
+
// Handle empty or simple passthrough
|
|
653
|
+
if (!description || description === 'null' || description === 'anull') {
|
|
654
|
+
// Direct connection for null filters
|
|
655
|
+
const ret = this.buffersrcCtx.link(0, this.buffersinkCtx, 0);
|
|
656
|
+
FFmpegError.throwIfError(ret, 'Failed to link buffer filters');
|
|
657
|
+
return;
|
|
658
|
+
}
|
|
659
|
+
// Set up inputs and outputs for parsing
|
|
660
|
+
const outputs = new LowLevelFilterInOut();
|
|
661
|
+
outputs.alloc();
|
|
662
|
+
outputs.name = 'in';
|
|
663
|
+
outputs.filterCtx = this.buffersrcCtx;
|
|
664
|
+
outputs.padIdx = 0;
|
|
665
|
+
const inputs = new LowLevelFilterInOut();
|
|
666
|
+
inputs.alloc();
|
|
667
|
+
inputs.name = 'out';
|
|
668
|
+
inputs.filterCtx = this.buffersinkCtx;
|
|
669
|
+
inputs.padIdx = 0;
|
|
670
|
+
// Parse the filter graph
|
|
671
|
+
const ret = this.graph.parsePtr(description, inputs, outputs);
|
|
672
|
+
FFmpegError.throwIfError(ret, 'Failed to parse filter description');
|
|
673
|
+
// Clean up FilterInOut structures
|
|
674
|
+
inputs.free();
|
|
675
|
+
outputs.free();
|
|
676
|
+
}
|
|
677
|
+
/**
|
|
678
|
+
* Send a command to a filter in the graph.
|
|
679
|
+
*
|
|
680
|
+
* Allows runtime modification of filter parameters without recreating the graph.
|
|
681
|
+
* Not all filters support commands - check filter documentation.
|
|
682
|
+
*
|
|
683
|
+
* @param target - Filter name or "all" to send to all filters
|
|
684
|
+
* @param cmd - Command name (e.g., "volume", "hue", "brightness")
|
|
685
|
+
* @param arg - Command argument value
|
|
686
|
+
* @param flags - Optional command flags
|
|
687
|
+
*
|
|
688
|
+
* @returns Command response
|
|
689
|
+
*
|
|
690
|
+
* @example
|
|
691
|
+
* ```typescript
|
|
692
|
+
* // Change volume dynamically
|
|
693
|
+
* const response = filter.sendCommand('volume', 'volume', '0.5');
|
|
694
|
+
* if (response) {
|
|
695
|
+
* console.log('Volume changed successfully');
|
|
696
|
+
* }
|
|
697
|
+
* ```
|
|
698
|
+
*
|
|
699
|
+
* @example
|
|
700
|
+
* ```typescript
|
|
701
|
+
* // Enable/disable all filters at runtime
|
|
702
|
+
* filter.sendCommand('all', 'enable', 'expr=gte(t,10)');
|
|
703
|
+
* ```
|
|
704
|
+
*/
|
|
705
|
+
sendCommand(target, cmd, arg, flags) {
|
|
706
|
+
if (!this.initialized) {
|
|
707
|
+
throw new Error('Filter not initialized');
|
|
708
|
+
}
|
|
709
|
+
const result = this.graph.sendCommand(target, cmd, arg, flags);
|
|
710
|
+
if (typeof result === 'number') {
|
|
711
|
+
FFmpegError.throwIfError(result, 'Failed to send filter command');
|
|
712
|
+
}
|
|
713
|
+
return result.response;
|
|
714
|
+
}
|
|
715
|
+
/**
|
|
716
|
+
* Queue a command to be executed at a specific time.
|
|
717
|
+
*
|
|
718
|
+
* Commands are executed when processing frames with matching timestamps.
|
|
719
|
+
* Useful for scripted filter changes synchronized with media playback.
|
|
720
|
+
*
|
|
721
|
+
* @param target - Filter name or "all" to send to all filters
|
|
722
|
+
* @param cmd - Command name (e.g., "volume", "hue", "brightness")
|
|
723
|
+
* @param arg - Command argument value
|
|
724
|
+
* @param ts - Timestamp when command should execute (in seconds)
|
|
725
|
+
* @param flags - Optional command flags
|
|
726
|
+
*
|
|
727
|
+
* @example
|
|
728
|
+
* ```typescript
|
|
729
|
+
* // Schedule volume changes at specific times
|
|
730
|
+
* filter.queueCommand('volume', 'volume', '0.5', 5.0); // At 5 seconds
|
|
731
|
+
* filter.queueCommand('volume', 'volume', '0.8', 10.0); // At 10 seconds
|
|
732
|
+
* filter.queueCommand('volume', 'volume', '0.2', 15.0); // At 15 seconds
|
|
733
|
+
* ```
|
|
734
|
+
*
|
|
735
|
+
* @example
|
|
736
|
+
* ```typescript
|
|
737
|
+
* // Fade effect at specific timestamp
|
|
738
|
+
* filter.queueCommand('fade', 'alpha', '0.5', 30.0);
|
|
739
|
+
* ```
|
|
740
|
+
*/
|
|
741
|
+
queueCommand(target, cmd, arg, ts, flags) {
|
|
742
|
+
if (!this.initialized) {
|
|
743
|
+
throw new Error('Filter not initialized');
|
|
744
|
+
}
|
|
745
|
+
const ret = this.graph.queueCommand(target, cmd, arg, ts, flags);
|
|
746
|
+
FFmpegError.throwIfError(ret, 'Failed to queue filter command');
|
|
747
|
+
}
|
|
748
|
+
/**
|
|
749
|
+
* Dispose of the filter.
|
|
750
|
+
*
|
|
751
|
+
* Implements the Disposable interface for automatic cleanup.
|
|
752
|
+
* Equivalent to calling free().
|
|
753
|
+
*
|
|
754
|
+
* @example
|
|
755
|
+
* ```typescript
|
|
756
|
+
* {
|
|
757
|
+
* using filter = await Filter.create('scale=1280:720', config);
|
|
758
|
+
* // ... use filter
|
|
759
|
+
* } // Automatically freed when leaving scope
|
|
760
|
+
* ```
|
|
761
|
+
*/
|
|
762
|
+
[Symbol.dispose]() {
|
|
763
|
+
this.free();
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
/**
|
|
767
|
+
* Common filter presets for convenience.
|
|
768
|
+
*
|
|
769
|
+
* Provides pre-defined filter strings for common operations.
|
|
770
|
+
* Can be used with Filter.create() for quick setup.
|
|
771
|
+
*
|
|
772
|
+
* @example
|
|
773
|
+
* ```typescript
|
|
774
|
+
* const filter = await Filter.create(
|
|
775
|
+
* FilterPresets.scale(1280, 720),
|
|
776
|
+
* config
|
|
777
|
+
* );
|
|
778
|
+
* ```
|
|
779
|
+
*/
|
|
780
|
+
export class FilterPresets {
|
|
781
|
+
/**
|
|
782
|
+
* Scale video to specified dimensions.
|
|
783
|
+
*/
|
|
784
|
+
static scale(width, height, flags) {
|
|
785
|
+
const base = `scale=${width}:${height}`;
|
|
786
|
+
return flags ? `${base}:flags=${flags}` : base;
|
|
787
|
+
}
|
|
788
|
+
/**
|
|
789
|
+
* Crop video to specified dimensions.
|
|
790
|
+
*/
|
|
791
|
+
static crop(width, height, x = 0, y = 0) {
|
|
792
|
+
return `crop=${width}:${height}:${x}:${y}`;
|
|
793
|
+
}
|
|
794
|
+
/**
|
|
795
|
+
* Change frame rate.
|
|
796
|
+
*/
|
|
797
|
+
static fps(fps) {
|
|
798
|
+
return `fps=${fps}`;
|
|
799
|
+
}
|
|
800
|
+
/**
|
|
801
|
+
* Convert pixel format.
|
|
802
|
+
* Can accept either format name string or AVPixelFormat enum.
|
|
803
|
+
*/
|
|
804
|
+
static format(pixelFormat) {
|
|
805
|
+
const formatName = typeof pixelFormat === 'string' ? pixelFormat : (avGetPixFmtName(pixelFormat) ?? 'yuv420p');
|
|
806
|
+
return `format=${formatName}`;
|
|
807
|
+
}
|
|
808
|
+
/**
|
|
809
|
+
* Rotate video by angle.
|
|
810
|
+
*/
|
|
811
|
+
static rotate(angle) {
|
|
812
|
+
return `rotate=${angle}*PI/180`;
|
|
813
|
+
}
|
|
814
|
+
/**
|
|
815
|
+
* Flip video horizontally.
|
|
816
|
+
*/
|
|
817
|
+
static hflip() {
|
|
818
|
+
return 'hflip';
|
|
819
|
+
}
|
|
820
|
+
/**
|
|
821
|
+
* Flip video vertically.
|
|
822
|
+
*/
|
|
823
|
+
static vflip() {
|
|
824
|
+
return 'vflip';
|
|
825
|
+
}
|
|
826
|
+
/**
|
|
827
|
+
* Apply fade effect.
|
|
828
|
+
*/
|
|
829
|
+
static fade(type, start, duration) {
|
|
830
|
+
return `fade=t=${type}:st=${start}:d=${duration}`;
|
|
831
|
+
}
|
|
832
|
+
/**
|
|
833
|
+
* Overlay one video on another.
|
|
834
|
+
*/
|
|
835
|
+
static overlay(x = 0, y = 0) {
|
|
836
|
+
return `overlay=${x}:${y}`;
|
|
837
|
+
}
|
|
838
|
+
/**
|
|
839
|
+
* Adjust audio volume.
|
|
840
|
+
*/
|
|
841
|
+
static volume(factor) {
|
|
842
|
+
return `volume=${factor}`;
|
|
843
|
+
}
|
|
844
|
+
/**
|
|
845
|
+
* Convert audio sample format.
|
|
846
|
+
* Can accept either format name string or AVSampleFormat enum.
|
|
847
|
+
*/
|
|
848
|
+
static aformat(sampleFormat, sampleRate, channelLayout) {
|
|
849
|
+
const formatName = typeof sampleFormat === 'string' ? sampleFormat : (avGetSampleFmtName(sampleFormat) ?? 's16');
|
|
850
|
+
let filter = `aformat=sample_fmts=${formatName}`;
|
|
851
|
+
if (sampleRate)
|
|
852
|
+
filter += `:sample_rates=${sampleRate}`;
|
|
853
|
+
if (channelLayout)
|
|
854
|
+
filter += `:channel_layouts=${channelLayout}`;
|
|
855
|
+
return filter;
|
|
856
|
+
}
|
|
857
|
+
/**
|
|
858
|
+
* Change audio tempo without changing pitch.
|
|
859
|
+
*/
|
|
860
|
+
static atempo(factor) {
|
|
861
|
+
return `atempo=${factor}`;
|
|
862
|
+
}
|
|
863
|
+
/**
|
|
864
|
+
* Apply audio fade.
|
|
865
|
+
*/
|
|
866
|
+
static afade(type, start, duration) {
|
|
867
|
+
return `afade=t=${type}:st=${start}:d=${duration}`;
|
|
868
|
+
}
|
|
869
|
+
/**
|
|
870
|
+
* Mix multiple audio streams.
|
|
871
|
+
*/
|
|
872
|
+
static amix(inputs = 2, duration = 'longest') {
|
|
873
|
+
return `amix=inputs=${inputs}:duration=${duration}`;
|
|
874
|
+
}
|
|
875
|
+
}
|
|
876
|
+
//# sourceMappingURL=filter.js.map
|