node-av 3.0.5 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,702 @@
1
+ var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
2
+ if (value !== null && value !== void 0) {
3
+ if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
4
+ var dispose, inner;
5
+ if (async) {
6
+ if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
7
+ dispose = value[Symbol.asyncDispose];
8
+ }
9
+ if (dispose === void 0) {
10
+ if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
11
+ dispose = value[Symbol.dispose];
12
+ if (async) inner = dispose;
13
+ }
14
+ if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
15
+ if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
16
+ env.stack.push({ value: value, dispose: dispose, async: async });
17
+ }
18
+ else if (async) {
19
+ env.stack.push({ async: true });
20
+ }
21
+ return value;
22
+ };
23
+ var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
24
+ return function (env) {
25
+ function fail(e) {
26
+ env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
27
+ env.hasError = true;
28
+ }
29
+ var r, s = 0;
30
+ function next() {
31
+ while (r = env.stack.pop()) {
32
+ try {
33
+ if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
34
+ if (r.dispose) {
35
+ var result = r.dispose.call(r.value);
36
+ if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
37
+ }
38
+ else s |= 1;
39
+ }
40
+ catch (e) {
41
+ fail(e);
42
+ }
43
+ }
44
+ if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
45
+ if (env.hasError) throw env.error;
46
+ }
47
+ return next();
48
+ };
49
+ })(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
50
+ var e = new Error(message);
51
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
52
+ });
53
+ import { AV_CODEC_ID_AAC, AV_CODEC_ID_AV1, AV_CODEC_ID_FLAC, AV_CODEC_ID_H264, AV_CODEC_ID_HEVC, AV_CODEC_ID_OPUS, AV_HWDEVICE_TYPE_NONE, AV_SAMPLE_FMT_FLTP, } from '../constants/constants.js';
54
+ import { FF_ENCODER_AAC, FF_ENCODER_LIBX264 } from '../constants/encoders.js';
55
+ import { avGetCodecStringHls } from '../lib/utilities.js';
56
+ import { Decoder } from './decoder.js';
57
+ import { Encoder } from './encoder.js';
58
+ import { FilterPreset } from './filter-presets.js';
59
+ import { FilterAPI } from './filter.js';
60
+ import { HardwareContext } from './hardware.js';
61
+ import { MediaInput } from './media-input.js';
62
+ import { MediaOutput } from './media-output.js';
63
+ /**
64
+ * Target codec strings for fMP4 streaming.
65
+ */
66
+ export const FMP4_CODECS = {
67
+ H264: 'avc1.640029',
68
+ H265: 'hvc1.1.6.L153.B0',
69
+ AV1: 'av01.0.00M.08',
70
+ AAC: 'mp4a.40.2',
71
+ FLAC: 'flac',
72
+ OPUS: 'opus',
73
+ };
74
+ /**
75
+ * High-level fMP4 streaming with automatic codec detection and transcoding.
76
+ *
77
+ * Provides fragmented MP4 streaming for clients.
78
+ * Automatically transcodes video to H.264 and audio to AAC if not supported by client.
79
+ * Client sends supported codecs, server transcodes accordingly.
80
+ * Essential component for building adaptive streaming servers.
81
+ *
82
+ * @example
83
+ * ```typescript
84
+ * import { FMP4Stream } from 'node-av/api';
85
+ *
86
+ * // Client sends supported codecs
87
+ * const supportedCodecs = 'avc1.640029,hvc1.1.6.L153.B0,mp4a.40.2,flac';
88
+ *
89
+ * // Create stream with codec negotiation
90
+ * const stream = await FMP4Stream.create('rtsp://camera.local/stream', {
91
+ * supportedCodecs,
92
+ * onChunk: (chunk) => ws.send(chunk)
93
+ * });
94
+ *
95
+ * // Start streaming (auto-transcodes if needed)
96
+ * await stream.start();
97
+ * ```
98
+ *
99
+ * @example
100
+ * ```typescript
101
+ * // Stream with hardware acceleration
102
+ * const stream = await FMP4Stream.create('input.mp4', {
103
+ * supportedCodecs: 'avc1.640029,mp4a.40.2',
104
+ * hardware: 'auto',
105
+ * fragDuration: 1,
106
+ * onChunk: (chunk) => sendToClient(chunk)
107
+ * });
108
+ *
109
+ * await stream.start();
110
+ * stream.stop();
111
+ * stream.dispose();
112
+ * ```
113
+ *
114
+ * @see {@link MediaInput} For input media handling
115
+ * @see {@link MediaOutput} For fMP4 generation
116
+ * @see {@link HardwareContext} For GPU acceleration
117
+ */
118
+ export class FMP4Stream {
119
+ input;
120
+ options;
121
+ output = null;
122
+ hardwareContext = null;
123
+ videoDecoder = null;
124
+ videoEncoder = null;
125
+ audioDecoder = null;
126
+ audioFilter = null;
127
+ audioEncoder = null;
128
+ streamActive = false;
129
+ supportedCodecs;
130
+ /**
131
+ * @param input - Media input source
132
+ *
133
+ * @param options - Stream configuration options
134
+ *
135
+ * Use {@link create} factory method
136
+ *
137
+ * @internal
138
+ */
139
+ constructor(input, options) {
140
+ this.input = input;
141
+ this.options = {
142
+ onChunk: options.onChunk ?? (() => { }),
143
+ supportedCodecs: options.supportedCodecs ?? '',
144
+ fragDuration: options.fragDuration ?? 1,
145
+ hardware: options.hardware ?? { deviceType: AV_HWDEVICE_TYPE_NONE },
146
+ };
147
+ // Parse supported codecs
148
+ this.supportedCodecs = new Set(this.options.supportedCodecs
149
+ .split(',')
150
+ .map((c) => c.trim())
151
+ .filter(Boolean));
152
+ }
153
+ /**
154
+ * Create a fMP4 stream from a media source.
155
+ *
156
+ * Opens the input media, detects video and audio codecs, and prepares
157
+ * transcoding pipelines based on client-supported codecs.
158
+ * Automatically transcodes to H.264 and AAC if necessary.
159
+ *
160
+ * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
161
+ *
162
+ * @param options - Stream configuration options with supported codecs
163
+ *
164
+ * @returns Configured fMP4 stream instance
165
+ *
166
+ * @throws {Error} If no video stream found in input
167
+ *
168
+ * @throws {FFmpegError} If input cannot be opened
169
+ *
170
+ * @example
171
+ * ```typescript
172
+ * // Stream from file with codec negotiation
173
+ * const stream = await FMP4Stream.create('video.mp4', {
174
+ * supportedCodecs: 'avc1.640029,mp4a.40.2',
175
+ * onChunk: (chunk) => ws.send(chunk)
176
+ * });
177
+ * ```
178
+ *
179
+ * @example
180
+ * ```typescript
181
+ * // Stream from RTSP with auto hardware acceleration
182
+ * const stream = await FMP4Stream.create('rtsp://camera.local/stream', {
183
+ * supportedCodecs: 'avc1.640029,hvc1.1.6.L153.B0,mp4a.40.2',
184
+ * hardware: 'auto',
185
+ * fragDuration: 0.5
186
+ * });
187
+ * ```
188
+ */
189
+ static async create(inputUrl, options = {}) {
190
+ const isRtsp = inputUrl.toLowerCase().startsWith('rtsp://');
191
+ const input = await MediaInput.open(inputUrl, {
192
+ options: isRtsp ? { rtsp_transport: 'tcp' } : undefined,
193
+ });
194
+ const videoStream = input.video();
195
+ if (!videoStream) {
196
+ throw new Error('No video stream found in input');
197
+ }
198
+ return new FMP4Stream(input, options);
199
+ }
200
+ /**
201
+ * Get the codec string that will be used by client.
202
+ *
203
+ * Returns the MIME type codec string based on input codecs and transcoding decisions.
204
+ * Call this after creating the stream to know what codec string to use for addSourceBuffer().
205
+ *
206
+ * @returns MIME type codec string (e.g., "avc1.640029,mp4a.40.2")
207
+ *
208
+ * @example
209
+ * ```typescript
210
+ * const stream = await FMP4Stream.create('input.mp4', {
211
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
212
+ * });
213
+ *
214
+ * const codecString = stream.getCodecString();
215
+ * console.log(codecString); // "avc1.640029,mp4a.40.2"
216
+ * // Use this for: sourceBuffer = mediaSource.addSourceBuffer(`video/mp4; codecs="${codecString}"`);
217
+ * ```
218
+ */
219
+ getCodecString() {
220
+ const videoStream = this.input.video();
221
+ const audioStream = this.input.audio();
222
+ const videoCodecId = videoStream.codecpar.codecId;
223
+ const audioCodecId = audioStream?.codecpar.codecId;
224
+ // Determine video codec string
225
+ let videoCodec;
226
+ const needsVideoTranscode = !this.isVideoCodecSupported(videoCodecId);
227
+ if (needsVideoTranscode) {
228
+ // Transcoding to H.264
229
+ videoCodec = FMP4_CODECS.H264;
230
+ }
231
+ else if (videoCodecId === AV_CODEC_ID_H264) {
232
+ // H.264 - use HLS codec string from input
233
+ const hlsCodec = avGetCodecStringHls(videoStream.codecpar);
234
+ videoCodec = hlsCodec ?? FMP4_CODECS.H264;
235
+ }
236
+ else if (videoCodecId === AV_CODEC_ID_HEVC) {
237
+ // H.265 - use HLS codec string from input
238
+ const hlsCodec = avGetCodecStringHls(videoStream.codecpar);
239
+ videoCodec = hlsCodec ?? FMP4_CODECS.H265;
240
+ }
241
+ else if (videoCodecId === AV_CODEC_ID_AV1) {
242
+ // AV1 - use HLS codec string from input
243
+ const hlsCodec = avGetCodecStringHls(videoStream.codecpar);
244
+ videoCodec = hlsCodec ?? FMP4_CODECS.AV1;
245
+ }
246
+ else {
247
+ // Fallback to H.264 (should not happen as we transcode unsupported codecs)
248
+ videoCodec = FMP4_CODECS.H264;
249
+ }
250
+ // Determine audio codec string
251
+ let audioCodec = null;
252
+ if (audioCodecId) {
253
+ const needsAudioTranscode = !this.isAudioCodecSupported(audioCodecId);
254
+ if (needsAudioTranscode) {
255
+ // Transcoding to AAC
256
+ audioCodec = FMP4_CODECS.AAC;
257
+ }
258
+ else if (audioCodecId === AV_CODEC_ID_AAC) {
259
+ // AAC - use fixed codec string
260
+ audioCodec = FMP4_CODECS.AAC;
261
+ }
262
+ else if (audioCodecId === AV_CODEC_ID_FLAC) {
263
+ // FLAC
264
+ audioCodec = FMP4_CODECS.FLAC;
265
+ }
266
+ else if (audioCodecId === AV_CODEC_ID_OPUS) {
267
+ // Opus
268
+ audioCodec = FMP4_CODECS.OPUS;
269
+ }
270
+ else {
271
+ // Fallback to AAC (should not happen as we transcode unsupported codecs)
272
+ audioCodec = FMP4_CODECS.AAC;
273
+ }
274
+ }
275
+ // Combine video and audio codec strings
276
+ return audioCodec ? `${videoCodec},${audioCodec}` : videoCodec;
277
+ }
278
+ /**
279
+ * Get the resolution of the input video stream.
280
+ *
281
+ * @returns Object with width and height properties
282
+ *
283
+ * @example
284
+ * ```typescript
285
+ * const stream = await FMP4Stream.create('input.mp4', {
286
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
287
+ * });
288
+ *
289
+ * const resolution = stream.getResolution();
290
+ * console.log(`Width: ${resolution.width}, Height: ${resolution.height}`);
291
+ * ```
292
+ */
293
+ getResolution() {
294
+ const videoStream = this.input.video();
295
+ return {
296
+ width: videoStream.codecpar.width,
297
+ height: videoStream.codecpar.height,
298
+ };
299
+ }
300
+ /**
301
+ * Start streaming media to fMP4 chunks.
302
+ *
303
+ * Begins the media processing pipeline, reading packets from input,
304
+ * transcoding based on supported codecs, and generating fMP4 chunks.
305
+ * Video transcodes to H.264 if H.264/H.265 not supported.
306
+ * Audio transcodes to AAC if AAC/FLAC/Opus not supported.
307
+ * This method blocks until streaming completes or {@link stop} is called.
308
+ *
309
+ * @returns Promise that resolves when streaming completes
310
+ *
311
+ * @throws {FFmpegError} If transcoding or muxing fails
312
+ *
313
+ * @example
314
+ * ```typescript
315
+ * const stream = await FMP4Stream.create('input.mp4', {
316
+ * supportedCodecs: 'avc1.640029,mp4a.40.2',
317
+ * onChunk: (chunk) => sendToClient(chunk)
318
+ * });
319
+ *
320
+ * // Start streaming (blocks until complete)
321
+ * await stream.start();
322
+ * ```
323
+ *
324
+ * @example
325
+ * ```typescript
326
+ * // Non-blocking start with background promise
327
+ * const stream = await FMP4Stream.create('input.mp4', {
328
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
329
+ * });
330
+ * const streamPromise = stream.start();
331
+ *
332
+ * // Later: stop streaming
333
+ * stream.stop();
334
+ * await streamPromise;
335
+ * ```
336
+ */
337
+ async start() {
338
+ if (this.streamActive) {
339
+ return;
340
+ }
341
+ this.streamActive = true;
342
+ const videoStream = this.input.video();
343
+ const audioStream = this.input.audio();
344
+ // Check if we need hardware acceleration
345
+ if (this.options.hardware === 'auto') {
346
+ this.hardwareContext = HardwareContext.auto();
347
+ }
348
+ else if (this.options.hardware.deviceType !== AV_HWDEVICE_TYPE_NONE) {
349
+ this.hardwareContext = HardwareContext.create(this.options.hardware.deviceType, this.options.hardware.device, this.options.hardware.options);
350
+ }
351
+ // Check if video needs transcoding
352
+ const needsVideoTranscode = !this.isVideoCodecSupported(videoStream.codecpar.codecId);
353
+ if (needsVideoTranscode) {
354
+ // Transcode to H.264
355
+ this.videoDecoder = await Decoder.create(videoStream, {
356
+ hardware: this.hardwareContext ?? undefined,
357
+ exitOnError: false,
358
+ });
359
+ this.videoEncoder = await Encoder.create(FF_ENCODER_LIBX264, {
360
+ timeBase: videoStream.timeBase,
361
+ frameRate: videoStream.avgFrameRate,
362
+ });
363
+ }
364
+ // Check if audio needs transcoding
365
+ const needsAudioTranscode = audioStream && !this.isAudioCodecSupported(audioStream.codecpar.codecId);
366
+ if (needsAudioTranscode) {
367
+ // Transcode to AAC
368
+ this.audioDecoder = await Decoder.create(audioStream, {
369
+ exitOnError: false,
370
+ });
371
+ const targetSampleRate = 48000;
372
+ const filterChain = FilterPreset.chain().aformat(AV_SAMPLE_FMT_FLTP, targetSampleRate, 'stereo').asetnsamples(1024).build();
373
+ this.audioFilter = FilterAPI.create(filterChain, {
374
+ timeBase: audioStream.timeBase,
375
+ });
376
+ this.audioEncoder = await Encoder.create(FF_ENCODER_AAC, {
377
+ timeBase: { num: 1, den: targetSampleRate },
378
+ });
379
+ }
380
+ // Setup output with callback
381
+ const cb = {
382
+ write: (buffer) => {
383
+ this.options.onChunk(buffer);
384
+ return buffer.length;
385
+ },
386
+ };
387
+ this.output = await MediaOutput.open(cb, {
388
+ format: 'mp4',
389
+ options: {
390
+ movflags: '+frag_keyframe+separate_moof+default_base_moof+empty_moov',
391
+ frag_duration: this.options.fragDuration,
392
+ },
393
+ });
394
+ // Add streams to output
395
+ const videoStreamIndex = this.videoEncoder ? this.output.addStream(this.videoEncoder) : this.output.addStream(videoStream);
396
+ const audioStreamIndex = this.audioEncoder ? this.output.addStream(this.audioEncoder) : audioStream ? this.output.addStream(audioStream) : null;
397
+ const hasAudio = audioStreamIndex !== null && audioStream !== undefined;
398
+ // Start processing loop
399
+ for await (const packet_1 of this.input.packets()) {
400
+ const env_1 = { stack: [], error: void 0, hasError: false };
401
+ try {
402
+ const packet = __addDisposableResource(env_1, packet_1, false);
403
+ if (!this.streamActive) {
404
+ break;
405
+ }
406
+ if (packet.streamIndex === videoStream.index) {
407
+ if (this.videoDecoder && this.videoEncoder) {
408
+ const env_2 = { stack: [], error: void 0, hasError: false };
409
+ try {
410
+ // Transcode video
411
+ const decodedFrame = __addDisposableResource(env_2, await this.videoDecoder.decode(packet), false);
412
+ if (!decodedFrame) {
413
+ continue;
414
+ }
415
+ const encodedPacket = __addDisposableResource(env_2, await this.videoEncoder.encode(decodedFrame), false);
416
+ if (!encodedPacket) {
417
+ continue;
418
+ }
419
+ await this.output.writePacket(encodedPacket, videoStreamIndex);
420
+ }
421
+ catch (e_1) {
422
+ env_2.error = e_1;
423
+ env_2.hasError = true;
424
+ }
425
+ finally {
426
+ __disposeResources(env_2);
427
+ }
428
+ }
429
+ else {
430
+ // Stream copy video
431
+ await this.output.writePacket(packet, videoStreamIndex);
432
+ }
433
+ }
434
+ else if (hasAudio && packet.streamIndex === audioStream.index) {
435
+ if (this.audioDecoder && this.audioFilter && this.audioEncoder) {
436
+ const env_3 = { stack: [], error: void 0, hasError: false };
437
+ try {
438
+ // Transcode audio
439
+ const decodedFrame = __addDisposableResource(env_3, await this.audioDecoder.decode(packet), false);
440
+ if (!decodedFrame) {
441
+ continue;
442
+ }
443
+ const filteredFrame = __addDisposableResource(env_3, await this.audioFilter.process(decodedFrame), false);
444
+ if (!filteredFrame) {
445
+ continue;
446
+ }
447
+ const encodedPacket = __addDisposableResource(env_3, await this.audioEncoder.encode(filteredFrame), false);
448
+ if (!encodedPacket) {
449
+ continue;
450
+ }
451
+ await this.output.writePacket(encodedPacket, audioStreamIndex);
452
+ }
453
+ catch (e_2) {
454
+ env_3.error = e_2;
455
+ env_3.hasError = true;
456
+ }
457
+ finally {
458
+ __disposeResources(env_3);
459
+ }
460
+ }
461
+ else {
462
+ // Stream copy audio
463
+ await this.output.writePacket(packet, audioStreamIndex);
464
+ }
465
+ }
466
+ }
467
+ catch (e_3) {
468
+ env_1.error = e_3;
469
+ env_1.hasError = true;
470
+ }
471
+ finally {
472
+ __disposeResources(env_1);
473
+ }
474
+ }
475
+ // Flush pipelines
476
+ await Promise.allSettled([this.flushVideo(videoStreamIndex), this.flushAudio(audioStreamIndex)]);
477
+ // Close output - remaining data will be written via callback
478
+ await this.output.close();
479
+ }
480
+ /**
481
+ * Stop streaming gracefully.
482
+ *
483
+ * Signals the streaming loop to exit after the current packet is processed.
484
+ * Does not immediately close resources - use {@link dispose} for cleanup.
485
+ * Safe to call multiple times.
486
+ *
487
+ * @example
488
+ * ```typescript
489
+ * const stream = await FMP4Stream.create('input.mp4', {
490
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
491
+ * });
492
+ * const streamPromise = stream.start();
493
+ *
494
+ * // Stop after 10 seconds
495
+ * setTimeout(() => stream.stop(), 10000);
496
+ *
497
+ * await streamPromise; // Resolves when stopped
498
+ * stream.dispose();
499
+ * ```
500
+ */
501
+ stop() {
502
+ this.streamActive = false;
503
+ }
504
+ /**
505
+ * Clean up all resources and close the stream.
506
+ *
507
+ * Stops streaming if active and releases all FFmpeg resources including
508
+ * decoders, encoders, filters, output, and input. Should be called when
509
+ * done with the stream to prevent memory leaks.
510
+ * Safe to call multiple times.
511
+ *
512
+ * @example
513
+ * ```typescript
514
+ * const stream = await FMP4Stream.create('input.mp4', {
515
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
516
+ * });
517
+ * await stream.start();
518
+ * stream.dispose();
519
+ * ```
520
+ *
521
+ * @example
522
+ * ```typescript
523
+ * // Using automatic cleanup
524
+ * {
525
+ * await using stream = await FMP4Stream.create('input.mp4', {
526
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
527
+ * });
528
+ * await stream.start();
529
+ * } // Automatically disposed
530
+ * ```
531
+ */
532
+ dispose() {
533
+ this.stop();
534
+ this.output?.close();
535
+ this.videoDecoder?.close();
536
+ this.videoEncoder?.close();
537
+ this.audioDecoder?.close();
538
+ this.audioFilter?.close();
539
+ this.audioEncoder?.close();
540
+ this.hardwareContext?.dispose();
541
+ this.hardwareContext = null;
542
+ this.input.close();
543
+ }
544
+ /**
545
+ * Check if video codec is supported.
546
+ *
547
+ * @param codecId - Codec ID
548
+ *
549
+ * @returns True if H.264, H.265, or AV1 is in supported codecs
550
+ *
551
+ * @internal
552
+ */
553
+ isVideoCodecSupported(codecId) {
554
+ if (codecId === AV_CODEC_ID_H264 && (this.supportedCodecs.has(FMP4_CODECS.H264) || this.supportedCodecs.has('avc1'))) {
555
+ return true;
556
+ }
557
+ if (codecId === AV_CODEC_ID_HEVC && (this.supportedCodecs.has(FMP4_CODECS.H265) || this.supportedCodecs.has('hvc1') || this.supportedCodecs.has('hev1'))) {
558
+ return true;
559
+ }
560
+ if (codecId === AV_CODEC_ID_AV1 && (this.supportedCodecs.has(FMP4_CODECS.AV1) || this.supportedCodecs.has('av01'))) {
561
+ return true;
562
+ }
563
+ return false;
564
+ }
565
+ /**
566
+ * Check if audio codec is supported.
567
+ *
568
+ * @param codecId - Codec ID
569
+ *
570
+ * @returns True if AAC, FLAC, or Opus is in supported codecs
571
+ *
572
+ * @internal
573
+ */
574
+ isAudioCodecSupported(codecId) {
575
+ if (codecId === AV_CODEC_ID_AAC && (this.supportedCodecs.has(FMP4_CODECS.AAC) || this.supportedCodecs.has('mp4a'))) {
576
+ return true;
577
+ }
578
+ if (codecId === AV_CODEC_ID_FLAC && this.supportedCodecs.has(FMP4_CODECS.FLAC)) {
579
+ return true;
580
+ }
581
+ if (codecId === AV_CODEC_ID_OPUS && this.supportedCodecs.has(FMP4_CODECS.OPUS)) {
582
+ return true;
583
+ }
584
+ return false;
585
+ }
586
+ /**
587
+ * Flush video encoder pipeline.
588
+ *
589
+ * @param videoStreamIndex - Output video stream index
590
+ *
591
+ * @internal
592
+ */
593
+ async flushVideo(videoStreamIndex) {
594
+ if (!this.videoDecoder || !this.videoEncoder || !this.output) {
595
+ return;
596
+ }
597
+ for await (const frame_1 of this.videoDecoder.flushFrames()) {
598
+ const env_4 = { stack: [], error: void 0, hasError: false };
599
+ try {
600
+ const frame = __addDisposableResource(env_4, frame_1, false);
601
+ const encodedPacket = __addDisposableResource(env_4, await this.videoEncoder.encode(frame), false);
602
+ if (encodedPacket) {
603
+ await this.output.writePacket(encodedPacket, videoStreamIndex);
604
+ }
605
+ }
606
+ catch (e_4) {
607
+ env_4.error = e_4;
608
+ env_4.hasError = true;
609
+ }
610
+ finally {
611
+ __disposeResources(env_4);
612
+ }
613
+ }
614
+ for await (const packet_2 of this.videoEncoder.flushPackets()) {
615
+ const env_5 = { stack: [], error: void 0, hasError: false };
616
+ try {
617
+ const packet = __addDisposableResource(env_5, packet_2, false);
618
+ await this.output.writePacket(packet, videoStreamIndex);
619
+ }
620
+ catch (e_5) {
621
+ env_5.error = e_5;
622
+ env_5.hasError = true;
623
+ }
624
+ finally {
625
+ __disposeResources(env_5);
626
+ }
627
+ }
628
+ }
629
+ /**
630
+ * Flush audio encoder pipeline.
631
+ *
632
+ * @param audioStreamIndex - Output audio stream index
633
+ *
634
+ * @internal
635
+ */
636
+ async flushAudio(audioStreamIndex) {
637
+ if (!this.audioDecoder || !this.audioFilter || !this.audioEncoder || audioStreamIndex === null || !this.output) {
638
+ return;
639
+ }
640
+ for await (const frame_2 of this.audioDecoder.flushFrames()) {
641
+ const env_6 = { stack: [], error: void 0, hasError: false };
642
+ try {
643
+ const frame = __addDisposableResource(env_6, frame_2, false);
644
+ const filteredFrame = __addDisposableResource(env_6, await this.audioFilter.process(frame), false);
645
+ if (!filteredFrame) {
646
+ continue;
647
+ }
648
+ const encodedPacket = __addDisposableResource(env_6, await this.audioEncoder.encode(filteredFrame), false);
649
+ if (encodedPacket) {
650
+ await this.output.writePacket(encodedPacket, audioStreamIndex);
651
+ }
652
+ }
653
+ catch (e_6) {
654
+ env_6.error = e_6;
655
+ env_6.hasError = true;
656
+ }
657
+ finally {
658
+ __disposeResources(env_6);
659
+ }
660
+ }
661
+ for await (const frame_3 of this.audioFilter.flushFrames()) {
662
+ const env_7 = { stack: [], error: void 0, hasError: false };
663
+ try {
664
+ const frame = __addDisposableResource(env_7, frame_3, false);
665
+ const encodedPacket = __addDisposableResource(env_7, await this.audioEncoder.encode(frame), false);
666
+ if (encodedPacket) {
667
+ await this.output.writePacket(encodedPacket, audioStreamIndex);
668
+ }
669
+ }
670
+ catch (e_7) {
671
+ env_7.error = e_7;
672
+ env_7.hasError = true;
673
+ }
674
+ finally {
675
+ __disposeResources(env_7);
676
+ }
677
+ }
678
+ for await (const packet_3 of this.audioEncoder.flushPackets()) {
679
+ const env_8 = { stack: [], error: void 0, hasError: false };
680
+ try {
681
+ const packet = __addDisposableResource(env_8, packet_3, false);
682
+ await this.output.writePacket(packet, audioStreamIndex);
683
+ }
684
+ catch (e_8) {
685
+ env_8.error = e_8;
686
+ env_8.hasError = true;
687
+ }
688
+ finally {
689
+ __disposeResources(env_8);
690
+ }
691
+ }
692
+ }
693
+ /**
694
+ * Symbol.dispose implementation for automatic cleanup.
695
+ *
696
+ * @internal
697
+ */
698
+ [Symbol.dispose]() {
699
+ this.dispose();
700
+ }
701
+ }
702
+ //# sourceMappingURL=fmp4.js.map