node-av 1.0.2 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -3
- package/dist/api/bitstream-filter.js +2 -1
- package/dist/api/bitstream-filter.js.map +1 -1
- package/dist/api/decoder.d.ts +10 -1
- package/dist/api/decoder.js +44 -25
- package/dist/api/decoder.js.map +1 -1
- package/dist/api/encoder.d.ts +19 -7
- package/dist/api/encoder.js +94 -130
- package/dist/api/encoder.js.map +1 -1
- package/dist/api/filter-presets.d.ts +316 -0
- package/dist/api/filter-presets.js +823 -0
- package/dist/api/filter-presets.js.map +1 -0
- package/dist/api/filter.d.ts +133 -173
- package/dist/api/filter.js +309 -393
- package/dist/api/filter.js.map +1 -1
- package/dist/api/hardware.d.ts +33 -73
- package/dist/api/hardware.js +86 -134
- package/dist/api/hardware.js.map +1 -1
- package/dist/api/index.d.ts +2 -1
- package/dist/api/index.js +1 -0
- package/dist/api/index.js.map +1 -1
- package/dist/api/io-stream.js +2 -1
- package/dist/api/io-stream.js.map +1 -1
- package/dist/api/media-input.d.ts +2 -1
- package/dist/api/media-input.js +2 -1
- package/dist/api/media-input.js.map +1 -1
- package/dist/api/media-output.js +2 -1
- package/dist/api/media-output.js.map +1 -1
- package/dist/api/types.d.ts +7 -1
- package/dist/api/utilities/audio-sample.d.ts +1 -1
- package/dist/api/utilities/image.d.ts +1 -1
- package/dist/api/utilities/media-type.d.ts +1 -1
- package/dist/api/utilities/pixel-format.d.ts +1 -1
- package/dist/api/utilities/sample-format.d.ts +1 -1
- package/dist/api/utilities/timestamp.d.ts +1 -1
- package/dist/{lib → constants}/channel-layouts.d.ts +1 -1
- package/dist/constants/channel-layouts.js.map +1 -0
- package/dist/{lib → constants}/constants.d.ts +19 -4
- package/dist/{lib → constants}/constants.js +15 -1
- package/dist/constants/constants.js.map +1 -0
- package/dist/constants/decoders.d.ts +609 -0
- package/dist/constants/decoders.js +617 -0
- package/dist/constants/decoders.js.map +1 -0
- package/dist/constants/encoders.d.ts +285 -0
- package/dist/constants/encoders.js +298 -0
- package/dist/constants/encoders.js.map +1 -0
- package/dist/constants/index.d.ts +4 -0
- package/dist/constants/index.js +5 -0
- package/dist/constants/index.js.map +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +2 -0
- package/dist/index.js.map +1 -1
- package/dist/lib/audio-fifo.d.ts +1 -1
- package/dist/lib/binding.d.ts +7 -5
- package/dist/lib/binding.js.map +1 -1
- package/dist/lib/bitstream-filter.d.ts +1 -1
- package/dist/lib/codec-context.d.ts +1 -1
- package/dist/lib/codec-parameters.d.ts +1 -1
- package/dist/lib/codec-parser.d.ts +1 -1
- package/dist/lib/codec.d.ts +131 -3
- package/dist/lib/codec.js +191 -0
- package/dist/lib/codec.js.map +1 -1
- package/dist/lib/dictionary.d.ts +1 -1
- package/dist/lib/dictionary.js +1 -1
- package/dist/lib/dictionary.js.map +1 -1
- package/dist/lib/error.d.ts +1 -1
- package/dist/lib/error.js.map +1 -1
- package/dist/lib/filter-context.d.ts +58 -1
- package/dist/lib/filter-context.js +72 -0
- package/dist/lib/filter-context.js.map +1 -1
- package/dist/lib/filter-graph.d.ts +1 -1
- package/dist/lib/filter.js +1 -1
- package/dist/lib/filter.js.map +1 -1
- package/dist/lib/format-context.d.ts +1 -1
- package/dist/lib/format-context.js +1 -1
- package/dist/lib/format-context.js.map +1 -1
- package/dist/lib/frame.d.ts +36 -1
- package/dist/lib/frame.js +37 -0
- package/dist/lib/frame.js.map +1 -1
- package/dist/lib/hardware-device-context.d.ts +1 -1
- package/dist/lib/hardware-frames-context.d.ts +1 -1
- package/dist/lib/index.d.ts +0 -2
- package/dist/lib/index.js +0 -3
- package/dist/lib/index.js.map +1 -1
- package/dist/lib/input-format.d.ts +1 -1
- package/dist/lib/io-context.d.ts +1 -1
- package/dist/lib/io-context.js +1 -1
- package/dist/lib/io-context.js.map +1 -1
- package/dist/lib/log.d.ts +1 -1
- package/dist/lib/native-types.d.ts +11 -6
- package/dist/lib/native-types.js +16 -0
- package/dist/lib/native-types.js.map +1 -1
- package/dist/lib/option.d.ts +1 -1
- package/dist/lib/option.js +1 -1
- package/dist/lib/option.js.map +1 -1
- package/dist/lib/output-format.d.ts +1 -1
- package/dist/lib/packet.d.ts +1 -1
- package/dist/lib/software-resample-context.d.ts +1 -1
- package/dist/lib/software-scale-context.d.ts +1 -1
- package/dist/lib/software-scale-context.js +1 -1
- package/dist/lib/software-scale-context.js.map +1 -1
- package/dist/lib/stream.d.ts +1 -1
- package/dist/lib/types.d.ts +1 -1
- package/dist/lib/utilities.d.ts +1 -1
- package/package.json +18 -19
- package/release_notes.md +59 -25
- package/dist/lib/channel-layouts.js.map +0 -1
- package/dist/lib/constants.js.map +0 -1
- /package/dist/{lib → constants}/channel-layouts.js +0 -0
|
@@ -0,0 +1,823 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* FilterPresets - Pre-defined filter configurations
|
|
3
|
+
*
|
|
4
|
+
* Provides convenient filter string builders for common operations.
|
|
5
|
+
* Includes both software and hardware-accelerated filter presets.
|
|
6
|
+
*
|
|
7
|
+
* Simplifies filter creation with type-safe parameter handling.
|
|
8
|
+
* Supports platform-specific hardware acceleration capabilities.
|
|
9
|
+
*
|
|
10
|
+
* @module api/filter-presets
|
|
11
|
+
*/
|
|
12
|
+
import { AV_HWDEVICE_TYPE_CUDA, AV_HWDEVICE_TYPE_D3D11VA, AV_HWDEVICE_TYPE_D3D12VA, AV_HWDEVICE_TYPE_DRM, AV_HWDEVICE_TYPE_DXVA2, AV_HWDEVICE_TYPE_MEDIACODEC, AV_HWDEVICE_TYPE_OPENCL, AV_HWDEVICE_TYPE_QSV, AV_HWDEVICE_TYPE_RKMPP, AV_HWDEVICE_TYPE_VAAPI, AV_HWDEVICE_TYPE_VDPAU, AV_HWDEVICE_TYPE_VIDEOTOOLBOX, AV_HWDEVICE_TYPE_VULKAN, AVFILTER_FLAG_HWDEVICE, } from '../constants/constants.js';
|
|
13
|
+
import { Filter } from '../lib/filter.js';
|
|
14
|
+
import { avGetPixFmtName, avGetSampleFmtName } from '../lib/utilities.js';
|
|
15
|
+
/**
|
|
16
|
+
* Base class for filter preset implementations.
|
|
17
|
+
* Provides common filter building methods that can be overridden.
|
|
18
|
+
*/
|
|
19
|
+
export class FilterPresetBase {
|
|
20
|
+
/**
|
|
21
|
+
* Scale video to specified dimensions.
|
|
22
|
+
* @returns Filter string or null if not supported
|
|
23
|
+
*/
|
|
24
|
+
scale(width, height, options) {
|
|
25
|
+
const flags = options?.flags;
|
|
26
|
+
const base = `scale=${width}:${height}`;
|
|
27
|
+
return flags ? `${base}:flags=${flags}` : base;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Crop video to specified dimensions.
|
|
31
|
+
*/
|
|
32
|
+
crop(width, height, x = 0, y = 0) {
|
|
33
|
+
return `crop=${width}:${height}:${x}:${y}`;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Change frame rate.
|
|
37
|
+
*/
|
|
38
|
+
fps(fps) {
|
|
39
|
+
return `fps=${fps}`;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Convert pixel format.
|
|
43
|
+
* Can accept a single format or an array of formats for fallback.
|
|
44
|
+
* Multiple formats will create a chain: format=fmt1,format=fmt2,...
|
|
45
|
+
*/
|
|
46
|
+
format(pixelFormat) {
|
|
47
|
+
if (Array.isArray(pixelFormat)) {
|
|
48
|
+
// Create a chain of format filters
|
|
49
|
+
const formats = pixelFormat.map((fmt) => {
|
|
50
|
+
const formatName = typeof fmt === 'string' ? fmt : (avGetPixFmtName(fmt) ?? 'yuv420p');
|
|
51
|
+
return `format=${formatName}`;
|
|
52
|
+
});
|
|
53
|
+
return formats.join(',');
|
|
54
|
+
}
|
|
55
|
+
const formatName = typeof pixelFormat === 'string' ? pixelFormat : (avGetPixFmtName(pixelFormat) ?? 'yuv420p');
|
|
56
|
+
return `format=${formatName}`;
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Rotate video by angle.
|
|
60
|
+
*/
|
|
61
|
+
rotate(angle) {
|
|
62
|
+
return `rotate=${angle}*PI/180`;
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Flip video horizontally.
|
|
66
|
+
*/
|
|
67
|
+
hflip() {
|
|
68
|
+
return 'hflip';
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Flip video vertically.
|
|
72
|
+
*/
|
|
73
|
+
vflip() {
|
|
74
|
+
return 'vflip';
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Apply fade effect.
|
|
78
|
+
*/
|
|
79
|
+
fade(type, start, duration) {
|
|
80
|
+
return `fade=t=${type}:st=${start}:d=${duration}`;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Overlay one video on another.
|
|
84
|
+
*/
|
|
85
|
+
overlay(x = 0, y = 0, options) {
|
|
86
|
+
let filter = `overlay=${x}:${y}`;
|
|
87
|
+
if (options) {
|
|
88
|
+
for (const [key, value] of Object.entries(options)) {
|
|
89
|
+
filter += `:${key}=${value}`;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return filter;
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Adjust audio volume.
|
|
96
|
+
*/
|
|
97
|
+
volume(factor) {
|
|
98
|
+
return `volume=${factor}`;
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Convert audio sample format.
|
|
102
|
+
*/
|
|
103
|
+
aformat(sampleFormat, sampleRate, channelLayout) {
|
|
104
|
+
const formatName = typeof sampleFormat === 'string' ? sampleFormat : (avGetSampleFmtName(sampleFormat) ?? 's16');
|
|
105
|
+
let filter = `aformat=sample_fmts=${formatName}`;
|
|
106
|
+
if (sampleRate)
|
|
107
|
+
filter += `:sample_rates=${sampleRate}`;
|
|
108
|
+
if (channelLayout)
|
|
109
|
+
filter += `:channel_layouts=${channelLayout}`;
|
|
110
|
+
return filter;
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Change audio tempo without changing pitch.
|
|
114
|
+
*/
|
|
115
|
+
atempo(factor) {
|
|
116
|
+
return `atempo=${factor}`;
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Apply audio fade.
|
|
120
|
+
*/
|
|
121
|
+
afade(type, start, duration) {
|
|
122
|
+
return `afade=t=${type}:st=${start}:d=${duration}`;
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Mix multiple audio streams.
|
|
126
|
+
*/
|
|
127
|
+
amix(inputs = 2, duration = 'longest') {
|
|
128
|
+
return `amix=inputs=${inputs}:duration=${duration}`;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Filter chain builder for composing multiple filters.
|
|
133
|
+
* Allows fluent API for building complex filter graphs.
|
|
134
|
+
*/
|
|
135
|
+
export class FilterChain {
|
|
136
|
+
filters = [];
|
|
137
|
+
/**
|
|
138
|
+
* Add a filter to the chain.
|
|
139
|
+
* @param filter - Filter string or null/undefined (will be skipped)
|
|
140
|
+
*/
|
|
141
|
+
add(filter) {
|
|
142
|
+
if (filter) {
|
|
143
|
+
this.filters.push(filter);
|
|
144
|
+
}
|
|
145
|
+
return this;
|
|
146
|
+
}
|
|
147
|
+
/**
|
|
148
|
+
* Add a custom filter string.
|
|
149
|
+
*/
|
|
150
|
+
custom(filter) {
|
|
151
|
+
return this.add(filter);
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* Build the filter chain string.
|
|
155
|
+
* @param separator - Separator between filters (default: ',')
|
|
156
|
+
*/
|
|
157
|
+
build(separator = ',') {
|
|
158
|
+
return this.filters.join(separator);
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Get the filter array.
|
|
162
|
+
*/
|
|
163
|
+
toArray() {
|
|
164
|
+
return [...this.filters];
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Base chain builder with common filter methods.
|
|
169
|
+
* @template T The preset type this builder uses
|
|
170
|
+
*/
|
|
171
|
+
export class ChainBuilderBase extends FilterChain {
|
|
172
|
+
presets;
|
|
173
|
+
constructor(presets) {
|
|
174
|
+
super();
|
|
175
|
+
this.presets = presets;
|
|
176
|
+
}
|
|
177
|
+
scale(width, height, options) {
|
|
178
|
+
return this.add(this.presets.scale(width, height, options));
|
|
179
|
+
}
|
|
180
|
+
crop(width, height, x = 0, y = 0) {
|
|
181
|
+
return this.add(this.presets.crop(width, height, x, y));
|
|
182
|
+
}
|
|
183
|
+
fps(fps) {
|
|
184
|
+
return this.add(this.presets.fps(fps));
|
|
185
|
+
}
|
|
186
|
+
format(pixelFormat) {
|
|
187
|
+
return this.add(this.presets.format(pixelFormat));
|
|
188
|
+
}
|
|
189
|
+
rotate(angle) {
|
|
190
|
+
return this.add(this.presets.rotate(angle));
|
|
191
|
+
}
|
|
192
|
+
hflip() {
|
|
193
|
+
return this.add(this.presets.hflip());
|
|
194
|
+
}
|
|
195
|
+
vflip() {
|
|
196
|
+
return this.add(this.presets.vflip());
|
|
197
|
+
}
|
|
198
|
+
fade(type, start, duration) {
|
|
199
|
+
return this.add(this.presets.fade(type, start, duration));
|
|
200
|
+
}
|
|
201
|
+
overlay(x = 0, y = 0, options) {
|
|
202
|
+
return this.add(this.presets.overlay(x, y, options));
|
|
203
|
+
}
|
|
204
|
+
volume(factor) {
|
|
205
|
+
return this.add(this.presets.volume(factor));
|
|
206
|
+
}
|
|
207
|
+
aformat(sampleFormat, sampleRate, channelLayout) {
|
|
208
|
+
return this.add(this.presets.aformat(sampleFormat, sampleRate, channelLayout));
|
|
209
|
+
}
|
|
210
|
+
atempo(factor) {
|
|
211
|
+
return this.add(this.presets.atempo(factor));
|
|
212
|
+
}
|
|
213
|
+
afade(type, start, duration) {
|
|
214
|
+
return this.add(this.presets.afade(type, start, duration));
|
|
215
|
+
}
|
|
216
|
+
amix(inputs = 2, duration = 'longest') {
|
|
217
|
+
return this.add(this.presets.amix(inputs, duration));
|
|
218
|
+
}
|
|
219
|
+
// Hardware-specific methods (only available if presets support them)
|
|
220
|
+
transpose(dir = 0) {
|
|
221
|
+
if ('transpose' in this.presets) {
|
|
222
|
+
return this.add(this.presets.transpose(dir));
|
|
223
|
+
}
|
|
224
|
+
return this.add(null);
|
|
225
|
+
}
|
|
226
|
+
tonemap(options) {
|
|
227
|
+
if ('tonemap' in this.presets) {
|
|
228
|
+
return this.add(this.presets.tonemap(options));
|
|
229
|
+
}
|
|
230
|
+
return this.add(null);
|
|
231
|
+
}
|
|
232
|
+
deinterlace(mode) {
|
|
233
|
+
if ('deinterlace' in this.presets) {
|
|
234
|
+
return this.add(this.presets.deinterlace(mode));
|
|
235
|
+
}
|
|
236
|
+
return this.add(null);
|
|
237
|
+
}
|
|
238
|
+
flip(direction) {
|
|
239
|
+
if ('flip' in this.presets) {
|
|
240
|
+
return this.add(this.presets.flip(direction));
|
|
241
|
+
}
|
|
242
|
+
// Fallback to hflip/vflip
|
|
243
|
+
return direction === 'h' ? this.hflip() : this.vflip();
|
|
244
|
+
}
|
|
245
|
+
blur(type = 'avg', radius) {
|
|
246
|
+
if ('blur' in this.presets) {
|
|
247
|
+
return this.add(this.presets.blur(type, radius));
|
|
248
|
+
}
|
|
249
|
+
return this.add(null);
|
|
250
|
+
}
|
|
251
|
+
sharpen(amount) {
|
|
252
|
+
if ('sharpen' in this.presets) {
|
|
253
|
+
return this.add(this.presets.sharpen(amount));
|
|
254
|
+
}
|
|
255
|
+
return this.add(null);
|
|
256
|
+
}
|
|
257
|
+
stack(type, inputs = 2) {
|
|
258
|
+
if ('stack' in this.presets) {
|
|
259
|
+
return this.add(this.presets.stack(type, inputs));
|
|
260
|
+
}
|
|
261
|
+
return this.add(null);
|
|
262
|
+
}
|
|
263
|
+
hwupload() {
|
|
264
|
+
if ('hwupload' in this.presets) {
|
|
265
|
+
return this.add(this.presets.hwupload());
|
|
266
|
+
}
|
|
267
|
+
return this.add('hwupload');
|
|
268
|
+
}
|
|
269
|
+
hwdownload() {
|
|
270
|
+
if ('hwdownload' in this.presets) {
|
|
271
|
+
return this.add(this.presets.hwdownload());
|
|
272
|
+
}
|
|
273
|
+
return this.add('hwdownload');
|
|
274
|
+
}
|
|
275
|
+
hwmap(derive) {
|
|
276
|
+
if ('hwmap' in this.presets) {
|
|
277
|
+
return this.add(this.presets.hwmap(derive));
|
|
278
|
+
}
|
|
279
|
+
return this.add(derive ? `hwmap=derive_device=${derive}` : 'hwmap');
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
/**
|
|
283
|
+
* Fluent filter chain builder with preset methods.
|
|
284
|
+
*/
|
|
285
|
+
export class FilterChainBuilder extends ChainBuilderBase {
|
|
286
|
+
}
|
|
287
|
+
/**
|
|
288
|
+
* Common filter presets for convenience.
|
|
289
|
+
*
|
|
290
|
+
* Provides pre-defined filter strings for common operations.
|
|
291
|
+
* Can be used with Filter.create() for quick setup.
|
|
292
|
+
*
|
|
293
|
+
* @example
|
|
294
|
+
* ```typescript
|
|
295
|
+
* const filter = await Filter.create(
|
|
296
|
+
* FilterPresets.scale(1280, 720),
|
|
297
|
+
* config
|
|
298
|
+
* );
|
|
299
|
+
*
|
|
300
|
+
* // Using chain builder
|
|
301
|
+
* const chain = FilterPresets.chain()
|
|
302
|
+
* .scale(1920, 1080)
|
|
303
|
+
* .format('yuv420p')
|
|
304
|
+
* .custom('unsharp=5:5:1.0')
|
|
305
|
+
* .build();
|
|
306
|
+
* ```
|
|
307
|
+
*/
|
|
308
|
+
export class FilterPresets extends FilterPresetBase {
|
|
309
|
+
static instance = new FilterPresets();
|
|
310
|
+
/**
|
|
311
|
+
* Create a new filter chain builder.
|
|
312
|
+
*/
|
|
313
|
+
static chain() {
|
|
314
|
+
return new FilterChainBuilder(FilterPresets.instance);
|
|
315
|
+
}
|
|
316
|
+
// Static methods that delegate to instance
|
|
317
|
+
static scale(width, height, flags) {
|
|
318
|
+
const result = FilterPresets.instance.scale(width, height, { flags });
|
|
319
|
+
return result ?? '';
|
|
320
|
+
}
|
|
321
|
+
static crop(width, height, x = 0, y = 0) {
|
|
322
|
+
const result = FilterPresets.instance.crop(width, height, x, y);
|
|
323
|
+
return result ?? '';
|
|
324
|
+
}
|
|
325
|
+
static fps(fps) {
|
|
326
|
+
const result = FilterPresets.instance.fps(fps);
|
|
327
|
+
return result ?? '';
|
|
328
|
+
}
|
|
329
|
+
static format(pixelFormat) {
|
|
330
|
+
const result = FilterPresets.instance.format(pixelFormat);
|
|
331
|
+
return result ?? '';
|
|
332
|
+
}
|
|
333
|
+
static rotate(angle) {
|
|
334
|
+
const result = FilterPresets.instance.rotate(angle);
|
|
335
|
+
return result ?? '';
|
|
336
|
+
}
|
|
337
|
+
static hflip() {
|
|
338
|
+
const result = FilterPresets.instance.hflip();
|
|
339
|
+
return result ?? '';
|
|
340
|
+
}
|
|
341
|
+
static vflip() {
|
|
342
|
+
const result = FilterPresets.instance.vflip();
|
|
343
|
+
return result ?? '';
|
|
344
|
+
}
|
|
345
|
+
static fade(type, start, duration) {
|
|
346
|
+
const result = FilterPresets.instance.fade(type, start, duration);
|
|
347
|
+
return result ?? '';
|
|
348
|
+
}
|
|
349
|
+
static overlay(x = 0, y = 0) {
|
|
350
|
+
const result = FilterPresets.instance.overlay(x, y);
|
|
351
|
+
return result ?? '';
|
|
352
|
+
}
|
|
353
|
+
static volume(factor) {
|
|
354
|
+
const result = FilterPresets.instance.volume(factor);
|
|
355
|
+
return result ?? '';
|
|
356
|
+
}
|
|
357
|
+
static aformat(sampleFormat, sampleRate, channelLayout) {
|
|
358
|
+
const result = FilterPresets.instance.aformat(sampleFormat, sampleRate, channelLayout);
|
|
359
|
+
return result ?? '';
|
|
360
|
+
}
|
|
361
|
+
static atempo(factor) {
|
|
362
|
+
const result = FilterPresets.instance.atempo(factor);
|
|
363
|
+
return result ?? '';
|
|
364
|
+
}
|
|
365
|
+
static afade(type, start, duration) {
|
|
366
|
+
const result = FilterPresets.instance.afade(type, start, duration);
|
|
367
|
+
return result ?? '';
|
|
368
|
+
}
|
|
369
|
+
static amix(inputs = 2, duration = 'longest') {
|
|
370
|
+
const result = FilterPresets.instance.amix(inputs, duration);
|
|
371
|
+
return result ?? '';
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
/**
|
|
375
|
+
* Hardware-accelerated filter presets.
|
|
376
|
+
*
|
|
377
|
+
* Provides hardware-specific filter strings for accelerated processing.
|
|
378
|
+
* Created and managed by HardwareContext for type-safe hardware operations.
|
|
379
|
+
*
|
|
380
|
+
* @example
|
|
381
|
+
* ```typescript
|
|
382
|
+
* const hw = await HardwareContext.auto();
|
|
383
|
+
* if (hw) {
|
|
384
|
+
* // Get hardware-specific scale filter (returns null if unsupported)
|
|
385
|
+
* const scaleFilter = hw.filterPresets.scale(1920, 1080);
|
|
386
|
+
*
|
|
387
|
+
* // Build a filter chain (unsupported filters are skipped)
|
|
388
|
+
* const chain = hw.filterPresets.chain()
|
|
389
|
+
* .hwupload()
|
|
390
|
+
* .scale(1920, 1080)
|
|
391
|
+
* .tonemap() // Skipped if not supported
|
|
392
|
+
* .custom('unsharp=5:5:1.0')
|
|
393
|
+
* .hwdownload()
|
|
394
|
+
* .build();
|
|
395
|
+
* }
|
|
396
|
+
* ```
|
|
397
|
+
*/
|
|
398
|
+
export class HardwareFilterPresets extends FilterPresetBase {
|
|
399
|
+
deviceType;
|
|
400
|
+
deviceName;
|
|
401
|
+
support;
|
|
402
|
+
/**
|
|
403
|
+
* Create hardware filter presets for a specific device type.
|
|
404
|
+
* @internal Used by HardwareContext
|
|
405
|
+
*/
|
|
406
|
+
constructor(deviceType, deviceName = null) {
|
|
407
|
+
super();
|
|
408
|
+
this.deviceType = deviceType;
|
|
409
|
+
this.deviceName = deviceName ?? 'unknown';
|
|
410
|
+
this.support = this.getSupport();
|
|
411
|
+
}
|
|
412
|
+
/**
|
|
413
|
+
* Check if a filter name is a hardware-accelerated filter.
|
|
414
|
+
* Uses FFmpeg's AVFILTER_FLAG_HWDEVICE flag to determine if a filter is hardware-accelerated.
|
|
415
|
+
* @param filterName - The filter name to check
|
|
416
|
+
* @returns True if it's a hardware filter, false otherwise
|
|
417
|
+
*/
|
|
418
|
+
static isHardwareFilter(filterName) {
|
|
419
|
+
const filter = Filter.getByName(filterName);
|
|
420
|
+
if (!filter) {
|
|
421
|
+
return false;
|
|
422
|
+
}
|
|
423
|
+
// Check if filter has hardware device flag
|
|
424
|
+
return (filter.flags & AVFILTER_FLAG_HWDEVICE) !== 0;
|
|
425
|
+
}
|
|
426
|
+
/**
|
|
427
|
+
* Create a new hardware filter chain builder.
|
|
428
|
+
*/
|
|
429
|
+
chain() {
|
|
430
|
+
return new HardwareFilterChainBuilder(this);
|
|
431
|
+
}
|
|
432
|
+
/**
|
|
433
|
+
* Hardware-accelerated scale filter.
|
|
434
|
+
* @returns Filter string or null if not supported
|
|
435
|
+
*/
|
|
436
|
+
scale(width, height, options) {
|
|
437
|
+
if (!this.support.scale) {
|
|
438
|
+
return null;
|
|
439
|
+
}
|
|
440
|
+
// Special handling for different hardware scalers
|
|
441
|
+
let filterName;
|
|
442
|
+
if (this.deviceType === AV_HWDEVICE_TYPE_CUDA && options?.npp) {
|
|
443
|
+
filterName = 'scale_npp';
|
|
444
|
+
}
|
|
445
|
+
else if (this.deviceType === AV_HWDEVICE_TYPE_RKMPP) {
|
|
446
|
+
filterName = 'scale_rkrga'; // RKMPP uses RGA for scaling
|
|
447
|
+
}
|
|
448
|
+
else if (this.deviceType === AV_HWDEVICE_TYPE_VIDEOTOOLBOX) {
|
|
449
|
+
filterName = 'scale_vt'; // VideoToolbox uses scale_vt
|
|
450
|
+
}
|
|
451
|
+
else {
|
|
452
|
+
filterName = `scale_${this.deviceName}`;
|
|
453
|
+
}
|
|
454
|
+
let filter = `${filterName}=${width}:${height}`;
|
|
455
|
+
if (options) {
|
|
456
|
+
for (const [key, value] of Object.entries(options)) {
|
|
457
|
+
if (key !== 'npp') {
|
|
458
|
+
// Skip our special npp flag
|
|
459
|
+
filter += `:${key}=${value}`;
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
return filter;
|
|
464
|
+
}
|
|
465
|
+
/**
|
|
466
|
+
* Hardware-accelerated overlay filter.
|
|
467
|
+
* @returns Filter string or null if not supported
|
|
468
|
+
*/
|
|
469
|
+
overlay(x = 0, y = 0, options) {
|
|
470
|
+
if (!this.support.overlay) {
|
|
471
|
+
return null;
|
|
472
|
+
}
|
|
473
|
+
// Special handling for RKMPP which uses RGA
|
|
474
|
+
const filterName = this.deviceType === AV_HWDEVICE_TYPE_RKMPP ? 'overlay_rkrga' : `overlay_${this.deviceName}`;
|
|
475
|
+
let filter = `${filterName}=${x}:${y}`;
|
|
476
|
+
if (options) {
|
|
477
|
+
for (const [key, value] of Object.entries(options)) {
|
|
478
|
+
filter += `:${key}=${value}`;
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
return filter;
|
|
482
|
+
}
|
|
483
|
+
/**
|
|
484
|
+
* Hardware-accelerated transpose filter.
|
|
485
|
+
* @returns Filter string or null if not supported
|
|
486
|
+
*/
|
|
487
|
+
transpose(dir = 0) {
|
|
488
|
+
if (!this.support.transpose) {
|
|
489
|
+
return null;
|
|
490
|
+
}
|
|
491
|
+
// Special handling for different hardware transpose implementations
|
|
492
|
+
let filterName;
|
|
493
|
+
if (this.deviceType === AV_HWDEVICE_TYPE_CUDA) {
|
|
494
|
+
filterName = 'transpose_cuda'; // Uses transpose_cuda from patch, not NPP
|
|
495
|
+
}
|
|
496
|
+
else if (this.deviceType === AV_HWDEVICE_TYPE_VIDEOTOOLBOX) {
|
|
497
|
+
filterName = 'transpose_vt'; // CoreImage-based transpose
|
|
498
|
+
}
|
|
499
|
+
else {
|
|
500
|
+
filterName = `transpose_${this.deviceName}`;
|
|
501
|
+
}
|
|
502
|
+
return `${filterName}=dir=${dir}`;
|
|
503
|
+
}
|
|
504
|
+
/**
|
|
505
|
+
* Hardware-accelerated tonemap filter.
|
|
506
|
+
* @returns Filter string or null if not supported
|
|
507
|
+
*/
|
|
508
|
+
tonemap(options) {
|
|
509
|
+
if (!this.support.tonemap) {
|
|
510
|
+
return null;
|
|
511
|
+
}
|
|
512
|
+
// VideoToolbox uses different filter name
|
|
513
|
+
const filterName = this.deviceType === AV_HWDEVICE_TYPE_VIDEOTOOLBOX ? 'tonemap_videotoolbox' : `tonemap_${this.deviceName}`;
|
|
514
|
+
let filter = filterName;
|
|
515
|
+
if (options) {
|
|
516
|
+
const opts = Object.entries(options)
|
|
517
|
+
.map(([k, v]) => `${k}=${v}`)
|
|
518
|
+
.join(':');
|
|
519
|
+
filter += `=${opts}`;
|
|
520
|
+
}
|
|
521
|
+
return filter;
|
|
522
|
+
}
|
|
523
|
+
/**
|
|
524
|
+
* Hardware-accelerated deinterlace filter.
|
|
525
|
+
* @returns Filter string or null if not supported
|
|
526
|
+
*/
|
|
527
|
+
deinterlace(mode) {
|
|
528
|
+
if (!this.support.deinterlace) {
|
|
529
|
+
return null;
|
|
530
|
+
}
|
|
531
|
+
switch (this.deviceType) {
|
|
532
|
+
case AV_HWDEVICE_TYPE_CUDA:
|
|
533
|
+
return mode ? `yadif_cuda=mode=${mode}` : 'yadif_cuda';
|
|
534
|
+
case AV_HWDEVICE_TYPE_VAAPI:
|
|
535
|
+
return mode ? `deinterlace_vaapi=mode=${mode}` : 'deinterlace_vaapi';
|
|
536
|
+
case AV_HWDEVICE_TYPE_QSV:
|
|
537
|
+
return mode ? `deinterlace_qsv=mode=${mode}` : 'deinterlace_qsv';
|
|
538
|
+
case AV_HWDEVICE_TYPE_VULKAN:
|
|
539
|
+
return mode ? `bwdif_vulkan=mode=${mode}` : 'bwdif_vulkan';
|
|
540
|
+
case AV_HWDEVICE_TYPE_VIDEOTOOLBOX:
|
|
541
|
+
return mode ? `yadif_videotoolbox=mode=${mode}` : 'yadif_videotoolbox';
|
|
542
|
+
default:
|
|
543
|
+
return null;
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
/**
|
|
547
|
+
* Hardware-accelerated flip filter.
|
|
548
|
+
* @returns Filter string or null if not supported
|
|
549
|
+
*/
|
|
550
|
+
flip(direction) {
|
|
551
|
+
if (!this.support.flip) {
|
|
552
|
+
return null;
|
|
553
|
+
}
|
|
554
|
+
if (this.deviceType === AV_HWDEVICE_TYPE_VULKAN) {
|
|
555
|
+
return direction === 'h' ? 'hflip_vulkan' : 'vflip_vulkan';
|
|
556
|
+
}
|
|
557
|
+
return null;
|
|
558
|
+
}
|
|
559
|
+
/**
|
|
560
|
+
* Hardware-accelerated blur filter.
|
|
561
|
+
* @returns Filter string or null if not supported
|
|
562
|
+
*/
|
|
563
|
+
blur(type = 'avg', radius) {
|
|
564
|
+
if (!this.support.blur) {
|
|
565
|
+
return null;
|
|
566
|
+
}
|
|
567
|
+
switch (this.deviceType) {
|
|
568
|
+
case AV_HWDEVICE_TYPE_CUDA:
|
|
569
|
+
return radius ? `bilateral_cuda=sigmaS=${radius}` : 'bilateral_cuda';
|
|
570
|
+
case AV_HWDEVICE_TYPE_VULKAN:
|
|
571
|
+
return type === 'gaussian' ? (radius ? `gblur_vulkan=sigma=${radius}` : 'gblur_vulkan') : radius ? `avgblur_vulkan=sizeX=${radius}` : 'avgblur_vulkan';
|
|
572
|
+
case AV_HWDEVICE_TYPE_OPENCL:
|
|
573
|
+
return type === 'box' ? (radius ? `boxblur_opencl=luma_radius=${radius}` : 'boxblur_opencl') : radius ? `avgblur_opencl=sizeX=${radius}` : 'avgblur_opencl';
|
|
574
|
+
default:
|
|
575
|
+
return null;
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
/**
|
|
579
|
+
* Hardware-accelerated sharpen filter.
|
|
580
|
+
* @returns Filter string or null if not supported
|
|
581
|
+
*/
|
|
582
|
+
sharpen(amount) {
|
|
583
|
+
if (!this.support.sharpen) {
|
|
584
|
+
return null;
|
|
585
|
+
}
|
|
586
|
+
switch (this.deviceType) {
|
|
587
|
+
case AV_HWDEVICE_TYPE_VAAPI:
|
|
588
|
+
return amount ? `sharpness_vaapi=sharpness=${amount}` : 'sharpness_vaapi';
|
|
589
|
+
case AV_HWDEVICE_TYPE_OPENCL:
|
|
590
|
+
return amount ? `unsharp_opencl=amount=${amount}` : 'unsharp_opencl';
|
|
591
|
+
case AV_HWDEVICE_TYPE_CUDA:
|
|
592
|
+
// CUDA uses NPP for sharpening
|
|
593
|
+
return 'sharpen_npp';
|
|
594
|
+
default:
|
|
595
|
+
return null;
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
/**
|
|
599
|
+
* Hardware-accelerated stack filters (hstack, vstack, xstack).
|
|
600
|
+
* @returns Filter string or null if not supported
|
|
601
|
+
*/
|
|
602
|
+
stack(type, inputs = 2) {
|
|
603
|
+
if (!this.support.stack) {
|
|
604
|
+
return null;
|
|
605
|
+
}
|
|
606
|
+
if (this.deviceType === AV_HWDEVICE_TYPE_VAAPI || this.deviceType === AV_HWDEVICE_TYPE_QSV) {
|
|
607
|
+
return `${type}stack_${this.deviceName}=inputs=${inputs}`;
|
|
608
|
+
}
|
|
609
|
+
return null;
|
|
610
|
+
}
|
|
611
|
+
/**
|
|
612
|
+
* Hardware upload filter to transfer frames to GPU.
|
|
613
|
+
*/
|
|
614
|
+
hwupload() {
|
|
615
|
+
if (this.deviceType === AV_HWDEVICE_TYPE_CUDA) {
|
|
616
|
+
return 'hwupload_cuda';
|
|
617
|
+
}
|
|
618
|
+
return 'hwupload';
|
|
619
|
+
}
|
|
620
|
+
/**
|
|
621
|
+
* Hardware download filter to transfer frames from GPU.
|
|
622
|
+
*/
|
|
623
|
+
hwdownload() {
|
|
624
|
+
return 'hwdownload';
|
|
625
|
+
}
|
|
626
|
+
/**
|
|
627
|
+
* Format conversion for hardware frames.
|
|
628
|
+
*/
|
|
629
|
+
hwmap(derive) {
|
|
630
|
+
return derive ? `hwmap=derive_device=${derive}` : 'hwmap';
|
|
631
|
+
}
|
|
632
|
+
/**
|
|
633
|
+
* Get capabilities for this hardware type.
|
|
634
|
+
*/
|
|
635
|
+
getCapabilities() {
|
|
636
|
+
return this.support;
|
|
637
|
+
}
|
|
638
|
+
/**
|
|
639
|
+
* Get supported filters for this hardware type.
|
|
640
|
+
*/
|
|
641
|
+
getSupport() {
|
|
642
|
+
switch (this.deviceType) {
|
|
643
|
+
case AV_HWDEVICE_TYPE_CUDA:
|
|
644
|
+
return {
|
|
645
|
+
scale: true, // scale_cuda
|
|
646
|
+
overlay: true, // overlay_cuda
|
|
647
|
+
transpose: true, // transpose_cuda (patch 0054)
|
|
648
|
+
tonemap: true, // tonemap_cuda (patch 0004)
|
|
649
|
+
deinterlace: true, // bwdif_cuda, yadif_cuda
|
|
650
|
+
denoise: false,
|
|
651
|
+
flip: false,
|
|
652
|
+
blur: true, // bilateral_cuda
|
|
653
|
+
sharpen: false, // Uses NPP
|
|
654
|
+
chromakey: true, // chromakey_cuda
|
|
655
|
+
colorspace: true, // colorspace_cuda
|
|
656
|
+
pad: false,
|
|
657
|
+
stack: false,
|
|
658
|
+
};
|
|
659
|
+
case AV_HWDEVICE_TYPE_VAAPI:
|
|
660
|
+
return {
|
|
661
|
+
scale: true, // scale_vaapi
|
|
662
|
+
overlay: true, // overlay_vaapi
|
|
663
|
+
transpose: true, // transpose_vaapi
|
|
664
|
+
tonemap: true, // tonemap_vaapi
|
|
665
|
+
deinterlace: true, // deinterlace_vaapi
|
|
666
|
+
denoise: true, // denoise_vaapi
|
|
667
|
+
flip: false,
|
|
668
|
+
blur: false,
|
|
669
|
+
sharpen: true, // sharpness_vaapi
|
|
670
|
+
chromakey: false,
|
|
671
|
+
colorspace: false,
|
|
672
|
+
pad: true, // pad_vaapi
|
|
673
|
+
stack: true, // hstack_vaapi, vstack_vaapi, xstack_vaapi
|
|
674
|
+
};
|
|
675
|
+
case AV_HWDEVICE_TYPE_QSV:
|
|
676
|
+
return {
|
|
677
|
+
scale: true, // scale_qsv
|
|
678
|
+
overlay: true, // overlay_qsv
|
|
679
|
+
transpose: false,
|
|
680
|
+
tonemap: false,
|
|
681
|
+
deinterlace: true, // deinterlace_qsv
|
|
682
|
+
denoise: false,
|
|
683
|
+
flip: false,
|
|
684
|
+
blur: false,
|
|
685
|
+
sharpen: false,
|
|
686
|
+
chromakey: false,
|
|
687
|
+
colorspace: false,
|
|
688
|
+
pad: false,
|
|
689
|
+
stack: true, // hstack_qsv, vstack_qsv, xstack_qsv
|
|
690
|
+
};
|
|
691
|
+
case AV_HWDEVICE_TYPE_VULKAN:
|
|
692
|
+
return {
|
|
693
|
+
scale: true, // scale_vulkan
|
|
694
|
+
overlay: true, // overlay_vulkan
|
|
695
|
+
transpose: true, // transpose_vulkan
|
|
696
|
+
tonemap: false,
|
|
697
|
+
deinterlace: true, // bwdif_vulkan
|
|
698
|
+
denoise: false,
|
|
699
|
+
flip: true, // flip_vulkan, hflip_vulkan, vflip_vulkan
|
|
700
|
+
blur: true, // avgblur_vulkan, gblur_vulkan
|
|
701
|
+
sharpen: false,
|
|
702
|
+
chromakey: false,
|
|
703
|
+
colorspace: false,
|
|
704
|
+
pad: false,
|
|
705
|
+
stack: false,
|
|
706
|
+
};
|
|
707
|
+
case AV_HWDEVICE_TYPE_OPENCL:
|
|
708
|
+
return {
|
|
709
|
+
scale: true, // scale_opencl (patch 0006)
|
|
710
|
+
overlay: true, // overlay_opencl (+ PGS support patch 0008)
|
|
711
|
+
transpose: true, // transpose_opencl
|
|
712
|
+
tonemap: true, // tonemap_opencl (enhanced in patch 0007)
|
|
713
|
+
deinterlace: false,
|
|
714
|
+
denoise: false,
|
|
715
|
+
flip: false,
|
|
716
|
+
blur: true, // avgblur_opencl, boxblur_opencl
|
|
717
|
+
sharpen: true, // unsharp_opencl
|
|
718
|
+
chromakey: true, // colorkey_opencl
|
|
719
|
+
colorspace: false,
|
|
720
|
+
pad: true, // pad_opencl
|
|
721
|
+
stack: false,
|
|
722
|
+
};
|
|
723
|
+
case AV_HWDEVICE_TYPE_VIDEOTOOLBOX:
|
|
724
|
+
return {
|
|
725
|
+
scale: true, // scale_vt (patch 0047 adds format option)
|
|
726
|
+
overlay: true, // overlay_videotoolbox (patch 0048)
|
|
727
|
+
transpose: true, // transpose_vt (patch 0049, CoreImage based)
|
|
728
|
+
tonemap: true, // tonemap_videotoolbox (patch 0050)
|
|
729
|
+
deinterlace: true, // yadif_videotoolbox
|
|
730
|
+
denoise: false,
|
|
731
|
+
flip: false,
|
|
732
|
+
blur: false,
|
|
733
|
+
sharpen: false,
|
|
734
|
+
chromakey: false,
|
|
735
|
+
colorspace: false,
|
|
736
|
+
pad: false,
|
|
737
|
+
stack: false,
|
|
738
|
+
};
|
|
739
|
+
case AV_HWDEVICE_TYPE_MEDIACODEC:
|
|
740
|
+
// MediaCodec is Android's hardware acceleration - mainly for decode/encode
|
|
741
|
+
return {
|
|
742
|
+
scale: false,
|
|
743
|
+
overlay: false,
|
|
744
|
+
transpose: false,
|
|
745
|
+
tonemap: false,
|
|
746
|
+
deinterlace: false,
|
|
747
|
+
denoise: false,
|
|
748
|
+
flip: false,
|
|
749
|
+
blur: false,
|
|
750
|
+
sharpen: false,
|
|
751
|
+
chromakey: false,
|
|
752
|
+
colorspace: false,
|
|
753
|
+
pad: false,
|
|
754
|
+
stack: false,
|
|
755
|
+
};
|
|
756
|
+
case AV_HWDEVICE_TYPE_RKMPP: // Rockchip - has RGA filters via patch 0046
|
|
757
|
+
// Note: RKMPP uses separate RKRGA (Rockchip 2D Raster Graphic Acceleration)
|
|
758
|
+
// for filtering operations, configured with --enable-rkrga
|
|
759
|
+
return {
|
|
760
|
+
scale: true, // scale_rkrga (patch 0046)
|
|
761
|
+
overlay: true, // overlay_rkrga (patch 0046)
|
|
762
|
+
transpose: false,
|
|
763
|
+
tonemap: false,
|
|
764
|
+
deinterlace: false,
|
|
765
|
+
denoise: false,
|
|
766
|
+
flip: false,
|
|
767
|
+
blur: false,
|
|
768
|
+
sharpen: false,
|
|
769
|
+
chromakey: false,
|
|
770
|
+
colorspace: false,
|
|
771
|
+
pad: false,
|
|
772
|
+
stack: false,
|
|
773
|
+
};
|
|
774
|
+
// These hardware types don't have dedicated filters - they're mainly for decode/encode
|
|
775
|
+
case AV_HWDEVICE_TYPE_VDPAU: // Decode-only, deprecated in favor of VAAPI
|
|
776
|
+
case AV_HWDEVICE_TYPE_DXVA2: // Windows decode-only
|
|
777
|
+
case AV_HWDEVICE_TYPE_D3D11VA: // Windows decode-only
|
|
778
|
+
case AV_HWDEVICE_TYPE_D3D12VA: // Has HEVC encoder but no filters
|
|
779
|
+
case AV_HWDEVICE_TYPE_DRM: // Linux DRM buffer sharing, not processing
|
|
780
|
+
return {
|
|
781
|
+
scale: false,
|
|
782
|
+
overlay: false,
|
|
783
|
+
transpose: false,
|
|
784
|
+
tonemap: false,
|
|
785
|
+
deinterlace: false,
|
|
786
|
+
denoise: false,
|
|
787
|
+
flip: false,
|
|
788
|
+
blur: false,
|
|
789
|
+
sharpen: false,
|
|
790
|
+
chromakey: false,
|
|
791
|
+
colorspace: false,
|
|
792
|
+
pad: false,
|
|
793
|
+
stack: false,
|
|
794
|
+
};
|
|
795
|
+
default:
|
|
796
|
+
// Unknown hardware - no support
|
|
797
|
+
// NPP is not a separate hardware type, it's CUDA-based
|
|
798
|
+
// We handle it through CUDA with special filter names
|
|
799
|
+
return {
|
|
800
|
+
scale: false,
|
|
801
|
+
overlay: false,
|
|
802
|
+
transpose: false,
|
|
803
|
+
tonemap: false,
|
|
804
|
+
deinterlace: false,
|
|
805
|
+
denoise: false,
|
|
806
|
+
flip: false,
|
|
807
|
+
blur: false,
|
|
808
|
+
sharpen: false,
|
|
809
|
+
chromakey: false,
|
|
810
|
+
colorspace: false,
|
|
811
|
+
pad: false,
|
|
812
|
+
stack: false,
|
|
813
|
+
};
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
/**
|
|
818
|
+
* Hardware filter chain builder with fluent API.
|
|
819
|
+
* Automatically skips unsupported filters (returns null).
|
|
820
|
+
*/
|
|
821
|
+
export class HardwareFilterChainBuilder extends ChainBuilderBase {
|
|
822
|
+
}
|
|
823
|
+
//# sourceMappingURL=filter-presets.js.map
|