node-av 1.3.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +37 -38
  2. package/dist/api/bitstream-filter.d.ts +2 -2
  3. package/dist/api/bitstream-filter.js +2 -2
  4. package/dist/api/decoder.d.ts +131 -120
  5. package/dist/api/decoder.js +191 -203
  6. package/dist/api/decoder.js.map +1 -1
  7. package/dist/api/encoder.d.ts +135 -77
  8. package/dist/api/encoder.js +235 -192
  9. package/dist/api/encoder.js.map +1 -1
  10. package/dist/api/filter-presets.d.ts +408 -1534
  11. package/dist/api/filter-presets.js +1005 -2058
  12. package/dist/api/filter-presets.js.map +1 -1
  13. package/dist/api/filter.d.ts +160 -165
  14. package/dist/api/filter.js +294 -374
  15. package/dist/api/filter.js.map +1 -1
  16. package/dist/api/hardware.d.ts +8 -31
  17. package/dist/api/hardware.js +19 -70
  18. package/dist/api/hardware.js.map +1 -1
  19. package/dist/api/index.d.ts +1 -1
  20. package/dist/api/index.js +1 -1
  21. package/dist/api/index.js.map +1 -1
  22. package/dist/api/media-input.d.ts +1 -1
  23. package/dist/api/media-input.js +3 -8
  24. package/dist/api/media-input.js.map +1 -1
  25. package/dist/api/media-output.d.ts +35 -128
  26. package/dist/api/media-output.js +136 -208
  27. package/dist/api/media-output.js.map +1 -1
  28. package/dist/api/pipeline.d.ts +17 -17
  29. package/dist/api/pipeline.js +19 -42
  30. package/dist/api/pipeline.js.map +1 -1
  31. package/dist/api/types.d.ts +17 -57
  32. package/dist/lib/dictionary.d.ts +2 -2
  33. package/dist/lib/dictionary.js +2 -2
  34. package/dist/lib/dictionary.js.map +1 -1
  35. package/dist/lib/filter-context.d.ts +19 -2
  36. package/dist/lib/filter-context.js +15 -0
  37. package/dist/lib/filter-context.js.map +1 -1
  38. package/dist/lib/format-context.d.ts +18 -18
  39. package/dist/lib/format-context.js +20 -20
  40. package/dist/lib/format-context.js.map +1 -1
  41. package/dist/lib/frame.d.ts +43 -1
  42. package/dist/lib/frame.js +53 -0
  43. package/dist/lib/frame.js.map +1 -1
  44. package/package.json +17 -17
  45. package/release_notes.md +0 -29
@@ -1,5 +1,6 @@
1
- import { AVERROR_EAGAIN, AVERROR_EOF, AVFILTER_FLAG_HWDEVICE, AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_VIDEO } from '../constants/constants.js';
2
- import { avGetSampleFmtName, avIsHardwarePixelFormat, FFmpegError, Filter, FilterGraph, FilterInOut, Frame } from '../lib/index.js';
1
+ import { AVERROR_EAGAIN, AVERROR_EOF, AVFILTER_FLAG_HWDEVICE } from '../constants/constants.js';
2
+ import { FFmpegError, Filter, FilterGraph, FilterInOut, Frame } from '../lib/index.js';
3
+ import { avGetSampleFmtName } from '../lib/utilities.js';
3
4
  /**
4
5
  * High-level filter API for audio and video processing.
5
6
  *
@@ -12,10 +13,12 @@ import { avGetSampleFmtName, avIsHardwarePixelFormat, FFmpegError, Filter, Filte
12
13
  * ```typescript
13
14
  * import { FilterAPI } from 'node-av/api';
14
15
  *
15
- * // Create video filter
16
- * const filter = await FilterAPI.create('scale=1280:720', videoInfo);
16
+ * // Create video filter - initializes on first frame
17
+ * const filter = await FilterAPI.create('scale=1280:720', {
18
+ * timeBase: video.timeBase,
19
+ * });
17
20
  *
18
- * // Process frame
21
+ * // Process frame - first frame configures filter graph
19
22
  * const output = await filter.process(inputFrame);
20
23
  * if (output) {
21
24
  * console.log(`Filtered frame: ${output.width}x${output.height}`);
@@ -25,54 +28,46 @@ import { avGetSampleFmtName, avIsHardwarePixelFormat, FFmpegError, Filter, Filte
25
28
  *
26
29
  * @example
27
30
  * ```typescript
28
- * // Hardware-accelerated filtering
29
- * const hw = HardwareContext.auto();
30
- * const filter = await FilterAPI.create(
31
- * 'hwupload,scale_cuda=1920:1080,hwdownload',
32
- * videoInfo,
33
- * { hardware: hw }
34
- * );
31
+ * // Hardware-accelerated filtering - hw context detected from frame
32
+ * const filter = await FilterAPI.create('hwupload,scale_cuda=1920:1080,hwdownload', {
33
+ * timeBase: video.timeBase,
34
+ * });
35
+ * // Hardware frames context will be automatically detected from first frame
35
36
  * ```
36
37
  *
37
38
  * @see {@link FilterGraph} For low-level filter graph API
38
- * @see {@link HardwareContext} For hardware acceleration
39
39
  * @see {@link Frame} For frame operations
40
40
  */
41
41
  export class FilterAPI {
42
- graph = null;
42
+ graph;
43
+ description;
44
+ options;
43
45
  buffersrcCtx = null;
44
46
  buffersinkCtx = null;
45
- config;
46
- mediaType;
47
47
  initialized = false;
48
- hardware;
49
- description;
50
- options;
48
+ isClosed = false;
51
49
  /**
52
- * @param config - Stream configuration
50
+ * @param graph - Filter graph instance
53
51
  * @param description - Filter description string
54
52
  * @param options - Filter options
55
53
  * @internal
56
54
  */
57
- constructor(config, description, options) {
58
- this.config = config;
55
+ constructor(graph, description, options) {
56
+ this.graph = graph;
59
57
  this.description = description;
60
58
  this.options = options;
61
- this.hardware = options.hardware;
62
- this.mediaType = config.type === 'video' ? AVMEDIA_TYPE_VIDEO : AVMEDIA_TYPE_AUDIO;
63
59
  }
64
60
  /**
65
61
  * Create a filter with specified description and configuration.
66
62
  *
67
- * Constructs filter graph from description string.
68
- * Configures input/output buffers and threading.
69
- * For video filters, uses lazy initialization to detect hardware frames.
63
+ * Creates and allocates filter graph immediately.
64
+ * Filter configuration is completed on first frame with frame properties.
65
+ * Hardware frames context is automatically detected from input frames.
70
66
  *
71
67
  * Direct mapping to avfilter_graph_parse_ptr() and avfilter_graph_config().
72
68
  *
73
69
  * @param description - Filter graph description
74
- * @param input - Input stream configuration
75
- * @param options - Filter options
70
+ * @param options - Filter options including required timeBase
76
71
  * @returns Configured filter instance
77
72
  *
78
73
  * @throws {Error} If filter creation or configuration fails
@@ -82,78 +77,124 @@ export class FilterAPI {
82
77
  * @example
83
78
  * ```typescript
84
79
  * // Simple video filter
85
- * const filter = await FilterAPI.create('scale=640:480', videoInfo);
80
+ * const filter = await FilterAPI.create('scale=640:480', {
81
+ * timeBase: video.timeBase
82
+ * });
86
83
  * ```
87
84
  *
88
85
  * @example
89
86
  * ```typescript
90
87
  * // Complex filter chain
91
- * const filter = await FilterAPI.create(
92
- * 'crop=640:480:0:0,rotate=PI/4',
93
- * videoInfo
94
- * );
88
+ * const filter = await FilterAPI.create('crop=640:480:0:0,rotate=PI/4', {
89
+ * timeBase: video.timeBase
90
+ * });
95
91
  * ```
96
92
  *
97
93
  * @example
98
94
  * ```typescript
99
95
  * // Audio filter
100
- * const filter = await FilterAPI.create(
101
- * 'volume=0.5,aecho=0.8:0.9:1000:0.3',
102
- * audioInfo
103
- * );
96
+ * const filter = await FilterAPI.create('volume=0.5,aecho=0.8:0.9:1000:0.3', {
97
+ * timeBase: audio.timeBase
98
+ * });
104
99
  * ```
105
100
  *
106
101
  * @see {@link process} For frame processing
107
102
  * @see {@link FilterOptions} For configuration options
108
103
  */
109
- static async create(description, input, options = {}) {
110
- let config;
111
- if (input.type === 'video') {
112
- config = {
113
- type: 'video',
114
- width: input.width,
115
- height: input.height,
116
- pixelFormat: input.pixelFormat,
117
- timeBase: input.timeBase,
118
- frameRate: input.frameRate,
119
- sampleAspectRatio: input.sampleAspectRatio,
120
- };
121
- }
122
- else {
123
- config = {
124
- type: 'audio',
125
- sampleRate: input.sampleRate,
126
- sampleFormat: input.sampleFormat,
127
- channelLayout: input.channelLayout,
128
- timeBase: input.timeBase,
129
- };
130
- }
131
- const filter = new FilterAPI(config, description, options);
132
- // Check if any filters in the chain require hardware context
133
- if (config.type === 'video') {
134
- filter.checkHardwareRequirements(description, options);
104
+ static async create(description, options) {
105
+ // Create graph
106
+ const graph = new FilterGraph();
107
+ graph.alloc();
108
+ // Configure threading
109
+ if (options.threads !== undefined) {
110
+ graph.nbThreads = options.threads;
135
111
  }
136
- // For video filters, always use lazy initialization to properly detect hardware requirements
137
- // For audio filters, initialize immediately (no hardware audio processing)
138
- if (config.type === 'audio') {
139
- await filter.initialize(null);
112
+ // Configure scaler options
113
+ if (options.scaleSwsOpts) {
114
+ graph.scaleSwsOpts = options.scaleSwsOpts;
140
115
  }
141
- // For video: wait for first frame to detect if hw_frames_ctx is present
142
- return filter;
116
+ return new FilterAPI(graph, description, options);
117
+ }
118
+ /**
119
+ * Check if filter is open.
120
+ *
121
+ * @example
122
+ * ```typescript
123
+ * if (filter.isFilterOpen) {
124
+ * const output = await filter.process(frame);
125
+ * }
126
+ * ```
127
+ */
128
+ get isFilterOpen() {
129
+ return !this.isClosed;
130
+ }
131
+ /**
132
+ * Check if filter has been initialized.
133
+ *
134
+ * Returns true after first frame has been processed and filter graph configured.
135
+ * Useful for checking if filter has received frame properties.
136
+ *
137
+ * @returns true if filter graph has been built from first frame
138
+ *
139
+ * @example
140
+ * ```typescript
141
+ * if (!filter.isFilterInitialized) {
142
+ * console.log('Filter will initialize on first frame');
143
+ * }
144
+ * ```
145
+ */
146
+ get isFilterInitialized() {
147
+ return this.initialized;
148
+ }
149
+ /**
150
+ * Check if filter is ready for processing.
151
+ *
152
+ * @returns true if initialized and ready
153
+ *
154
+ * @example
155
+ * ```typescript
156
+ * if (filter.isReady()) {
157
+ * const output = await filter.process(frame);
158
+ * }
159
+ * ```
160
+ */
161
+ isReady() {
162
+ return this.initialized && this.buffersrcCtx !== null && this.buffersinkCtx !== null && !this.isClosed;
163
+ }
164
+ /**
165
+ * Get filter graph description.
166
+ *
167
+ * Returns human-readable graph structure.
168
+ * Useful for debugging filter chains.
169
+ *
170
+ * Direct mapping to avfilter_graph_dump().
171
+ *
172
+ * @returns Graph description or null if closed
173
+ *
174
+ * @example
175
+ * ```typescript
176
+ * const description = filter.getGraphDescription();
177
+ * console.log('Filter graph:', description);
178
+ * ```
179
+ */
180
+ getGraphDescription() {
181
+ return !this.isClosed && this.initialized ? this.graph.dump() : null;
143
182
  }
144
183
  /**
145
184
  * Process a frame through the filter.
146
185
  *
147
186
  * Applies filter operations to input frame.
187
+ * On first frame, automatically builds filter graph with frame properties.
148
188
  * May buffer frames internally before producing output.
149
- * For video, performs lazy initialization on first frame.
189
+ * Hardware frames context is automatically detected from frame.
190
+ * Returns null if filter is closed and frame is null.
150
191
  *
151
192
  * Direct mapping to av_buffersrc_add_frame() and av_buffersink_get_frame().
152
193
  *
153
- * @param frame - Input frame to process
194
+ * @param frame - Input frame to process (or null to flush)
154
195
  * @returns Filtered frame or null if buffered
155
196
  *
156
- * @throws {Error} If filter not ready
197
+ * @throws {Error} If filter is closed with non-null frame
157
198
  *
158
199
  * @throws {FFmpegError} If processing fails
159
200
  *
@@ -168,26 +209,33 @@ export class FilterAPI {
168
209
  *
169
210
  * @example
170
211
  * ```typescript
171
- * // Process and drain
212
+ * // Process frame - may buffer internally
172
213
  * const output = await filter.process(frame);
173
- * if (output) yield output;
174
- *
175
- * // Drain buffered frames
176
- * let buffered;
177
- * while ((buffered = await filter.receive()) !== null) {
178
- * yield buffered;
214
+ * if (output) {
215
+ * // Got output immediately
216
+ * yield output;
179
217
  * }
218
+ * // For buffered frames, use the frames() async generator
180
219
  * ```
181
220
  *
182
- * @see {@link receive} For draining buffered frames
183
- * @see {@link frames} For stream processing
221
+ * @see {@link frames} For processing frame streams
222
+ * @see {@link flush} For end-of-stream handling
184
223
  */
185
224
  async process(frame) {
186
- // Lazy initialization for video filters (detect hardware from first frame)
187
- if (!this.initialized && this.config.type === 'video') {
225
+ if (this.isClosed) {
226
+ if (!frame) {
227
+ return null;
228
+ }
229
+ throw new Error('Filter is closed');
230
+ }
231
+ // Open filter if not already done
232
+ if (!this.initialized) {
233
+ if (!frame) {
234
+ return null;
235
+ }
188
236
  await this.initialize(frame);
189
237
  }
190
- if (!this.initialized || !this.buffersrcCtx || !this.buffersinkCtx) {
238
+ if (!this.buffersrcCtx || !this.buffersinkCtx) {
191
239
  throw new Error('Filter not initialized');
192
240
  }
193
241
  // Send frame to filter
@@ -253,49 +301,74 @@ export class FilterAPI {
253
301
  return outputFrames;
254
302
  }
255
303
  /**
256
- * Receive buffered frame from filter.
257
- *
258
- * Drains frames buffered by the filter.
259
- * Call repeatedly until null to get all buffered frames.
260
- *
261
- * Direct mapping to av_buffersink_get_frame().
304
+ * Process frame stream through filter.
262
305
  *
263
- * @returns Buffered frame or null if none available
306
+ * High-level async generator for filtering frame streams.
307
+ * Automatically handles buffering and flushing.
308
+ * Frees input frames after processing.
264
309
  *
310
+ * @param frames - Async generator of input frames
311
+ * @yields {Frame} Filtered frames
265
312
  * @throws {Error} If filter not ready
266
313
  *
267
- * @throws {FFmpegError} If receive fails
314
+ * @throws {FFmpegError} If processing fails
268
315
  *
269
316
  * @example
270
317
  * ```typescript
271
- * // Drain buffered frames
272
- * let frame;
273
- * while ((frame = await filter.receive()) !== null) {
274
- * console.log(`Buffered frame: pts=${frame.pts}`);
318
+ * // Filter decoded frames
319
+ * for await (const frame of filter.frames(decoder.frames(packets))) {
320
+ * await encoder.encode(frame);
275
321
  * frame.free();
276
322
  * }
277
323
  * ```
278
324
  *
279
- * @see {@link process} For input processing
280
- * @see {@link flush} For end-of-stream
325
+ * @example
326
+ * ```typescript
327
+ * // Chain filters
328
+ * const filter1 = await FilterAPI.create('scale=640:480', {
329
+ * timeBase: video.timeBase
330
+ * });
331
+ * const filter2 = await FilterAPI.create('rotate=PI/4', {
332
+ * timeBase: video.timeBase
333
+ * });
334
+ *
335
+ * for await (const frame of filter2.frames(filter1.frames(input))) {
336
+ * // Process filtered frames
337
+ * frame.free();
338
+ * }
339
+ * ```
340
+ *
341
+ * @see {@link process} For single frame processing
342
+ * @see {@link flush} For end-of-stream handling
281
343
  */
282
- async receive() {
283
- if (!this.initialized || !this.buffersinkCtx) {
284
- throw new Error('Filter not initialized');
285
- }
286
- const frame = new Frame();
287
- frame.alloc();
288
- const ret = await this.buffersinkCtx.buffersinkGetFrame(frame);
289
- if (ret >= 0) {
290
- return frame;
291
- }
292
- else {
293
- frame.free();
294
- if (ret === AVERROR_EAGAIN || ret === AVERROR_EOF) {
295
- return null;
344
+ async *frames(frames) {
345
+ for await (const frame of frames) {
346
+ try {
347
+ // Process input frame
348
+ const output = await this.process(frame);
349
+ if (output) {
350
+ yield output;
351
+ }
352
+ // Drain any buffered frames
353
+ while (true) {
354
+ const buffered = await this.receive();
355
+ if (!buffered)
356
+ break;
357
+ yield buffered;
358
+ }
296
359
  }
297
- FFmpegError.throwIfError(ret, 'Failed to receive frame from filter');
298
- return null;
360
+ finally {
361
+ // Free the input frame after processing
362
+ frame.free();
363
+ }
364
+ }
365
+ // Flush and get remaining frames
366
+ await this.flush();
367
+ while (true) {
368
+ const remaining = await this.receive();
369
+ if (!remaining)
370
+ break;
371
+ yield remaining;
299
372
  }
300
373
  }
301
374
  /**
@@ -303,11 +376,10 @@ export class FilterAPI {
303
376
  *
304
377
  * Sends null frame to flush buffered data.
305
378
  * Must call receive() to get flushed frames.
379
+ * Does nothing if filter is closed or was never initialized.
306
380
  *
307
381
  * Direct mapping to av_buffersrc_add_frame(NULL).
308
382
  *
309
- * @throws {Error} If filter not ready
310
- *
311
383
  * @throws {FFmpegError} If flush fails
312
384
  *
313
385
  * @example
@@ -321,12 +393,13 @@ export class FilterAPI {
321
393
  * ```
322
394
  *
323
395
  * @see {@link flushFrames} For async iteration
324
- * @see {@link receive} For draining frames
396
+ * @see {@link frames} For complete pipeline
325
397
  */
326
398
  async flush() {
327
- if (!this.initialized || !this.buffersrcCtx) {
328
- throw new Error('Filter not initialized');
399
+ if (this.isClosed || !this.initialized || !this.buffersrcCtx) {
400
+ return;
329
401
  }
402
+ // Send flush frame (null)
330
403
  const ret = await this.buffersrcCtx.buffersrcAddFrame(null);
331
404
  if (ret < 0 && ret !== AVERROR_EOF) {
332
405
  FFmpegError.throwIfError(ret, 'Failed to flush filter');
@@ -337,9 +410,9 @@ export class FilterAPI {
337
410
  *
338
411
  * Convenient async generator for flushing.
339
412
  * Combines flush and receive operations.
413
+ * Returns immediately if filter is closed or was never initialized.
340
414
  *
341
- * @yields Remaining frames from filter
342
- * @throws {Error} If filter not ready
415
+ * @yields {Frame} Remaining frames from filter
343
416
  *
344
417
  * @throws {FFmpegError} If flush fails
345
418
  *
@@ -355,9 +428,6 @@ export class FilterAPI {
355
428
  * @see {@link frames} For complete pipeline
356
429
  */
357
430
  async *flushFrames() {
358
- if (!this.initialized || !this.buffersrcCtx) {
359
- throw new Error('Filter not initialized');
360
- }
361
431
  // Send flush signal
362
432
  await this.flush();
363
433
  // Yield all remaining frames
@@ -367,70 +437,44 @@ export class FilterAPI {
367
437
  }
368
438
  }
369
439
  /**
370
- * Process frame stream through filter.
440
+ * Receive buffered frame from filter.
371
441
  *
372
- * High-level async generator for filtering frame streams.
373
- * Automatically handles buffering and flushing.
374
- * Frees input frames after processing.
442
+ * Drains frames buffered by the filter.
443
+ * Call repeatedly until null to get all buffered frames.
444
+ * Returns null if filter is closed, not initialized, or no frames available.
375
445
  *
376
- * @param frames - Async generator of input frames
377
- * @yields Filtered frames
378
- * @throws {Error} If filter not ready
446
+ * Direct mapping to av_buffersink_get_frame().
379
447
  *
380
- * @throws {FFmpegError} If processing fails
448
+ * @returns Buffered frame or null if none available
381
449
  *
382
- * @example
383
- * ```typescript
384
- * // Filter decoded frames
385
- * for await (const frame of filter.frames(decoder.frames(packets))) {
386
- * await encoder.encode(frame);
387
- * frame.free();
388
- * }
389
- * ```
450
+ * @throws {FFmpegError} If receiving fails
390
451
  *
391
452
  * @example
392
453
  * ```typescript
393
- * // Chain filters
394
- * const filter1 = await FilterAPI.create('scale=640:480', info);
395
- * const filter2 = await FilterAPI.create('rotate=PI/4', info);
396
- *
397
- * for await (const frame of filter2.frames(filter1.frames(input))) {
398
- * // Process filtered frames
454
+ * let frame;
455
+ * while ((frame = await filter.receive()) !== null) {
456
+ * console.log(`Received frame: pts=${frame.pts}`);
399
457
  * frame.free();
400
458
  * }
401
459
  * ```
402
- *
403
- * @see {@link process} For single frame processing
404
- * @see {@link flush} For end-of-stream handling
405
460
  */
406
- async *frames(frames) {
407
- for await (const frame of frames) {
408
- try {
409
- // Process input frame
410
- const output = await this.process(frame);
411
- if (output) {
412
- yield output;
413
- }
414
- // Drain any buffered frames
415
- while (true) {
416
- const buffered = await this.receive();
417
- if (!buffered)
418
- break;
419
- yield buffered;
420
- }
421
- }
422
- finally {
423
- // Free the input frame after processing
424
- frame.free();
425
- }
461
+ async receive() {
462
+ if (this.isClosed || !this.initialized || !this.buffersinkCtx) {
463
+ return null;
426
464
  }
427
- // Flush and get remaining frames
428
- await this.flush();
429
- while (true) {
430
- const remaining = await this.receive();
431
- if (!remaining)
432
- break;
433
- yield remaining;
465
+ const frame = new Frame();
466
+ frame.alloc();
467
+ const ret = await this.buffersinkCtx.buffersinkGetFrame(frame);
468
+ if (ret >= 0) {
469
+ return frame;
470
+ }
471
+ else {
472
+ frame.free();
473
+ if (ret === AVERROR_EAGAIN || ret === AVERROR_EOF) {
474
+ return null;
475
+ }
476
+ FFmpegError.throwIfError(ret, 'Failed to receive frame from filter');
477
+ return null;
434
478
  }
435
479
  }
436
480
  /**
@@ -461,7 +505,10 @@ export class FilterAPI {
461
505
  * @see {@link queueCommand} For delayed commands
462
506
  */
463
507
  sendCommand(target, cmd, arg, flags) {
464
- if (!this.initialized || !this.graph) {
508
+ if (this.isClosed) {
509
+ throw new Error('Filter is closed');
510
+ }
511
+ if (!this.initialized) {
465
512
  throw new Error('Filter not initialized');
466
513
  }
467
514
  const result = this.graph.sendCommand(target, cmd, arg, flags);
@@ -496,64 +543,15 @@ export class FilterAPI {
496
543
  * @see {@link sendCommand} For immediate commands
497
544
  */
498
545
  queueCommand(target, cmd, arg, ts, flags) {
499
- if (!this.initialized || !this.graph) {
546
+ if (this.isClosed) {
547
+ throw new Error('Filter is closed');
548
+ }
549
+ if (!this.initialized) {
500
550
  throw new Error('Filter not initialized');
501
551
  }
502
552
  const ret = this.graph.queueCommand(target, cmd, arg, ts, flags);
503
553
  FFmpegError.throwIfError(ret, 'Failed to queue filter command');
504
554
  }
505
- /**
506
- * Get filter graph description.
507
- *
508
- * Returns human-readable graph structure.
509
- * Useful for debugging filter chains.
510
- *
511
- * Direct mapping to avfilter_graph_dump().
512
- *
513
- * @returns Graph description or null if not initialized
514
- *
515
- * @example
516
- * ```typescript
517
- * const description = filter.getGraphDescription();
518
- * console.log('Filter graph:', description);
519
- * ```
520
- */
521
- getGraphDescription() {
522
- if (!this.initialized || !this.graph) {
523
- return null;
524
- }
525
- return this.graph.dump();
526
- }
527
- /**
528
- * Check if filter is ready for processing.
529
- *
530
- * @returns true if initialized and ready
531
- *
532
- * @example
533
- * ```typescript
534
- * if (filter.isReady()) {
535
- * const output = await filter.process(frame);
536
- * }
537
- * ```
538
- */
539
- isReady() {
540
- return this.initialized && this.buffersrcCtx !== null && this.buffersinkCtx !== null;
541
- }
542
- /**
543
- * Get media type of filter.
544
- *
545
- * @returns AVMEDIA_TYPE_VIDEO or AVMEDIA_TYPE_AUDIO
546
- *
547
- * @example
548
- * ```typescript
549
- * if (filter.getMediaType() === AVMEDIA_TYPE_VIDEO) {
550
- * console.log('Video filter');
551
- * }
552
- * ```
553
- */
554
- getMediaType() {
555
- return this.mediaType;
556
- }
557
555
  /**
558
556
  * Free filter resources.
559
557
  *
@@ -562,27 +560,29 @@ export class FilterAPI {
562
560
  *
563
561
  * @example
564
562
  * ```typescript
565
- * filter.free();
563
+ * filter.close();
566
564
  * ```
567
565
  *
568
566
  * @see {@link Symbol.dispose} For automatic cleanup
569
567
  */
570
- free() {
571
- if (this.graph) {
572
- this.graph.free();
573
- this.graph = null;
568
+ close() {
569
+ if (this.isClosed) {
570
+ return;
574
571
  }
572
+ this.isClosed = true;
573
+ this.graph.free();
575
574
  this.buffersrcCtx = null;
576
575
  this.buffersinkCtx = null;
577
576
  this.initialized = false;
578
577
  }
579
578
  /**
580
- * Initialize filter graph.
579
+ * Initialize filter graph from first frame.
581
580
  *
582
581
  * Creates and configures filter graph components.
583
- * For video, may use hardware frames context from first frame.
582
+ * Sets buffer source parameters from frame properties.
583
+ * Automatically configures hardware frames context if present.
584
584
  *
585
- * @param firstFrame - First frame for hardware detection (video only)
585
+ * @param frame - First frame to process, provides format and hw context
586
586
  *
587
587
  * @throws {Error} If initialization fails
588
588
  *
@@ -590,38 +590,20 @@ export class FilterAPI {
590
590
  *
591
591
  * @internal
592
592
  */
593
- async initialize(firstFrame) {
594
- // Create graph
595
- this.graph = new FilterGraph();
596
- this.graph.alloc();
597
- // Configure threading
598
- if (this.options.threads !== undefined) {
599
- this.graph.nbThreads = this.options.threads;
600
- }
601
- // Configure scaler options
602
- if (this.options.scaleSwsOpts) {
603
- this.graph.scaleSwsOpts = this.options.scaleSwsOpts;
604
- }
605
- // Create buffer source with hw_frames_ctx if needed
606
- if (firstFrame?.hwFramesCtx && this.config.type === 'video') {
607
- this.createBufferSourceWithHwFrames(firstFrame);
608
- }
609
- else {
610
- this.createBufferSource();
611
- }
593
+ async initialize(frame) {
594
+ // Create buffer source
595
+ this.createBufferSource(frame);
612
596
  // Create buffer sink
613
- this.createBufferSink();
597
+ this.createBufferSink(frame);
614
598
  // Parse filter description
615
599
  this.parseFilterDescription(this.description);
616
600
  // Set hw_device_ctx on hardware filters
617
- if (this.hardware?.deviceContext) {
618
- const filters = this.graph.filters;
619
- if (filters) {
620
- for (const filterCtx of filters) {
621
- const filter = filterCtx.filter;
622
- if (filter && (filter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
623
- filterCtx.hwDeviceCtx = this.hardware.deviceContext;
624
- }
601
+ const filters = this.graph.filters;
602
+ if (filters) {
603
+ for (const filterCtx of filters) {
604
+ const filter = filterCtx.filter;
605
+ if (filter && (filter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
606
+ filterCtx.hwDeviceCtx = frame.hwFramesCtx?.deviceRef ?? this.options.hardware?.deviceContext ?? null;
625
607
  }
626
608
  }
627
609
  }
@@ -631,9 +613,12 @@ export class FilterAPI {
631
613
  this.initialized = true;
632
614
  }
633
615
  /**
634
- * Create buffer source with hardware frames context.
616
+ * Create buffer source with frame parameters.
617
+ *
618
+ * Configures buffer source with frame properties including hardware context.
619
+ * Automatically detects video/audio and sets appropriate parameters.
635
620
  *
636
- * @param frame - Frame with hw_frames_ctx
621
+ * @param frame - Frame providing format, dimensions, and hw_frames_ctx
637
622
  *
638
623
  * @throws {Error} If creation fails
639
624
  *
@@ -641,81 +626,58 @@ export class FilterAPI {
641
626
  *
642
627
  * @internal
643
628
  */
644
- createBufferSourceWithHwFrames(frame) {
645
- const filterName = 'buffer';
629
+ createBufferSource(frame) {
630
+ const filterName = frame.isVideo() ? 'buffer' : 'abuffer';
646
631
  const bufferFilter = Filter.getByName(filterName);
647
632
  if (!bufferFilter) {
648
633
  throw new Error(`${filterName} filter not found`);
649
634
  }
650
- // Allocate filter without args
651
- this.buffersrcCtx = this.graph.allocFilter(bufferFilter, 'in');
652
- if (!this.buffersrcCtx) {
653
- throw new Error('Failed to allocate buffer source');
654
- }
655
- // Set parameters including hw_frames_ctx
656
- const cfg = this.config;
657
- const ret = this.buffersrcCtx.buffersrcParametersSet({
658
- width: cfg.width,
659
- height: cfg.height,
660
- format: cfg.pixelFormat,
661
- timeBase: cfg.timeBase,
662
- frameRate: cfg.frameRate,
663
- sampleAspectRatio: cfg.sampleAspectRatio,
664
- hwFramesCtx: frame.hwFramesCtx ?? undefined,
665
- });
666
- FFmpegError.throwIfError(ret, 'Failed to set buffer source parameters');
667
- // Initialize filter
668
- const initRet = this.buffersrcCtx.init(null);
669
- FFmpegError.throwIfError(initRet, 'Failed to initialize buffer source');
670
- }
671
- /**
672
- * Create standard buffer source.
673
- *
674
- * @throws {Error} If creation fails
675
- *
676
- * @internal
677
- */
678
- createBufferSource() {
679
- const filterName = this.config.type === 'video' ? 'buffer' : 'abuffer';
680
- const bufferFilter = Filter.getByName(filterName);
681
- if (!bufferFilter) {
682
- throw new Error(`${filterName} filter not found`);
683
- }
684
- // Build args string
685
- let args;
686
- if (this.config.type === 'video') {
687
- const cfg = this.config;
688
- args = `video_size=${cfg.width}x${cfg.height}:pix_fmt=${cfg.pixelFormat}:time_base=${cfg.timeBase.num}/${cfg.timeBase.den}`;
689
- if (cfg.frameRate) {
690
- args += `:frame_rate=${cfg.frameRate.num}/${cfg.frameRate.den}`;
691
- }
692
- if (cfg.sampleAspectRatio) {
693
- args += `:pixel_aspect=${cfg.sampleAspectRatio.num}/${cfg.sampleAspectRatio.den}`;
635
+ // For audio, create with args. For video, use allocFilter + buffersrcParametersSet
636
+ if (frame.isVideo()) {
637
+ // Allocate filter without args
638
+ this.buffersrcCtx = this.graph.allocFilter(bufferFilter, 'in');
639
+ if (!this.buffersrcCtx) {
640
+ throw new Error('Failed to allocate buffer source');
694
641
  }
642
+ const ret = this.buffersrcCtx.buffersrcParametersSet({
643
+ width: frame.width,
644
+ height: frame.height,
645
+ format: frame.format,
646
+ timeBase: this.options.timeBase,
647
+ frameRate: this.options.frameRate ?? frame.timeBase,
648
+ sampleAspectRatio: frame.sampleAspectRatio,
649
+ colorRange: frame.colorRange,
650
+ colorSpace: frame.colorSpace,
651
+ hwFramesCtx: frame.hwFramesCtx,
652
+ });
653
+ FFmpegError.throwIfError(ret, 'Failed to set buffer source parameters');
654
+ // Initialize filter
655
+ const initRet = this.buffersrcCtx.init(null);
656
+ FFmpegError.throwIfError(initRet, 'Failed to initialize buffer source');
695
657
  }
696
658
  else {
697
- const cfg = this.config;
698
- const sampleFmtName = avGetSampleFmtName(cfg.sampleFormat);
699
- const channelLayout = cfg.channelLayout.mask === 0n ? 'stereo' : cfg.channelLayout.mask.toString();
700
- args = `sample_rate=${cfg.sampleRate}:sample_fmt=${sampleFmtName}:channel_layout=${channelLayout}:time_base=${cfg.timeBase.num}/${cfg.timeBase.den}`;
701
- }
702
- this.buffersrcCtx = this.graph.createFilter(bufferFilter, 'in', args);
703
- if (!this.buffersrcCtx) {
704
- throw new Error('Failed to create buffer source');
659
+ // For audio, create with args string
660
+ const formatName = avGetSampleFmtName(frame.format);
661
+ const channelLayout = frame.channelLayout.mask === 0n ? 'stereo' : frame.channelLayout.mask.toString();
662
+ // eslint-disable-next-line @stylistic/max-len
663
+ const args = `time_base=${this.options.timeBase.num}/${this.options.timeBase.den}:sample_rate=${frame.sampleRate}:sample_fmt=${formatName}:channel_layout=${channelLayout}`;
664
+ this.buffersrcCtx = this.graph.createFilter(bufferFilter, 'in', args);
665
+ if (!this.buffersrcCtx) {
666
+ throw new Error('Failed to create audio buffer source');
667
+ }
705
668
  }
706
669
  }
707
670
  /**
708
671
  * Create buffer sink.
709
672
  *
673
+ * @param frame - Frame
674
+ *
710
675
  * @throws {Error} If creation fails
711
676
  *
712
677
  * @internal
713
678
  */
714
- createBufferSink() {
715
- if (!this.graph) {
716
- throw new Error('Filter graph not initialized');
717
- }
718
- const filterName = this.config.type === 'video' ? 'buffersink' : 'abuffersink';
679
+ createBufferSink(frame) {
680
+ const filterName = frame.isVideo() ? 'buffersink' : 'abuffersink';
719
681
  const sinkFilter = Filter.getByName(filterName);
720
682
  if (!sinkFilter) {
721
683
  throw new Error(`${filterName} filter not found`);
@@ -737,9 +699,6 @@ export class FilterAPI {
737
699
  * @internal
738
700
  */
739
701
  parseFilterDescription(description) {
740
- if (!this.graph) {
741
- throw new Error('Filter graph not initialized');
742
- }
743
702
  if (!this.buffersrcCtx || !this.buffersinkCtx) {
744
703
  throw new Error('Buffer filters not initialized');
745
704
  }
@@ -768,63 +727,24 @@ export class FilterAPI {
768
727
  inputs.free();
769
728
  outputs.free();
770
729
  }
771
- /**
772
- * Check hardware requirements for filters.
773
- *
774
- * @param description - Filter description
775
- * @param options - Filter options
776
- *
777
- * @throws {Error} If hardware requirements not met
778
- *
779
- * @internal
780
- */
781
- checkHardwareRequirements(description, options) {
782
- if (this.config.type !== 'video') {
783
- return;
784
- }
785
- // Parse filter names from description
786
- const filterNames = description
787
- .split(',')
788
- .map((f) => {
789
- // Extract filter name (before = or : or whitespace)
790
- const match = /^([a-zA-Z0-9_]+)/.exec(f.trim());
791
- return match ? match[1] : null;
792
- })
793
- .filter(Boolean);
794
- for (const filterName of filterNames) {
795
- const lowLevelFilter = Filter.getByName(filterName);
796
- if (!lowLevelFilter) {
797
- // Filter will be validated later during graph parsing
798
- continue;
799
- }
800
- if (!options.hardware) {
801
- if (filterName === 'hwupload' || filterName === 'hwupload_cuda' || (lowLevelFilter.flags & AVFILTER_FLAG_HWDEVICE) !== 0) {
802
- throw new Error(`Filter '${filterName}' requires a hardware context`);
803
- }
804
- else if (filterName === 'hwdownload' && !avIsHardwarePixelFormat(this.config.pixelFormat)) {
805
- throw new Error(`Pixel Format '${this.config.pixelFormat}' is not hardware compatible`);
806
- }
807
- }
808
- }
809
- }
810
730
  /**
811
731
  * Dispose of filter.
812
732
  *
813
733
  * Implements Disposable interface for automatic cleanup.
814
- * Equivalent to calling free().
734
+ * Equivalent to calling close().
815
735
  *
816
736
  * @example
817
737
  * ```typescript
818
738
  * {
819
- * using filter = await FilterAPI.create('scale=640:480', info);
739
+ * using filter = await FilterAPI.create('scale=640:480', { ... });
820
740
  * // Use filter...
821
741
  * } // Automatically freed
822
742
  * ```
823
743
  *
824
- * @see {@link free} For manual cleanup
744
+ * @see {@link close} For manual cleanup
825
745
  */
826
746
  [Symbol.dispose]() {
827
- this.free();
747
+ this.close();
828
748
  }
829
749
  }
830
750
  //# sourceMappingURL=filter.js.map