node-av 3.0.6 → 3.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,710 @@
1
+ var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
2
+ if (value !== null && value !== void 0) {
3
+ if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
4
+ var dispose, inner;
5
+ if (async) {
6
+ if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
7
+ dispose = value[Symbol.asyncDispose];
8
+ }
9
+ if (dispose === void 0) {
10
+ if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
11
+ dispose = value[Symbol.dispose];
12
+ if (async) inner = dispose;
13
+ }
14
+ if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
15
+ if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
16
+ env.stack.push({ value: value, dispose: dispose, async: async });
17
+ }
18
+ else if (async) {
19
+ env.stack.push({ async: true });
20
+ }
21
+ return value;
22
+ };
23
+ var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
24
+ return function (env) {
25
+ function fail(e) {
26
+ env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
27
+ env.hasError = true;
28
+ }
29
+ var r, s = 0;
30
+ function next() {
31
+ while (r = env.stack.pop()) {
32
+ try {
33
+ if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
34
+ if (r.dispose) {
35
+ var result = r.dispose.call(r.value);
36
+ if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
37
+ }
38
+ else s |= 1;
39
+ }
40
+ catch (e) {
41
+ fail(e);
42
+ }
43
+ }
44
+ if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
45
+ if (env.hasError) throw env.error;
46
+ }
47
+ return next();
48
+ };
49
+ })(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
50
+ var e = new Error(message);
51
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
52
+ });
53
+ import { AV_CODEC_ID_AAC, AV_CODEC_ID_AV1, AV_CODEC_ID_FLAC, AV_CODEC_ID_H264, AV_CODEC_ID_HEVC, AV_CODEC_ID_OPUS, AV_HWDEVICE_TYPE_NONE, AV_SAMPLE_FMT_FLTP, } from '../constants/constants.js';
54
+ import { FF_ENCODER_AAC, FF_ENCODER_LIBX264 } from '../constants/encoders.js';
55
+ import { avGetCodecStringHls } from '../lib/utilities.js';
56
+ import { Decoder } from './decoder.js';
57
+ import { Encoder } from './encoder.js';
58
+ import { FilterPreset } from './filter-presets.js';
59
+ import { FilterAPI } from './filter.js';
60
+ import { HardwareContext } from './hardware.js';
61
+ import { MediaInput } from './media-input.js';
62
+ import { MediaOutput } from './media-output.js';
63
+ /**
64
+ * Target codec strings for fMP4 streaming.
65
+ */
66
+ export const FMP4_CODECS = {
67
+ H264: 'avc1.640029',
68
+ H265: 'hvc1.1.6.L153.B0',
69
+ AV1: 'av01.0.00M.08',
70
+ AAC: 'mp4a.40.2',
71
+ FLAC: 'flac',
72
+ OPUS: 'opus',
73
+ };
74
+ /**
75
+ * High-level fMP4 streaming with automatic codec detection and transcoding.
76
+ *
77
+ * Provides fragmented MP4 streaming for clients.
78
+ * Automatically transcodes video to H.264 and audio to AAC if not supported by client.
79
+ * Client sends supported codecs, server transcodes accordingly.
80
+ * Essential component for building adaptive streaming servers.
81
+ *
82
+ * @example
83
+ * ```typescript
84
+ * import { FMP4Stream } from 'node-av/api';
85
+ *
86
+ * // Client sends supported codecs
87
+ * const supportedCodecs = 'avc1.640029,hvc1.1.6.L153.B0,mp4a.40.2,flac';
88
+ *
89
+ * // Create stream with codec negotiation
90
+ * const stream = await FMP4Stream.create('rtsp://camera.local/stream', {
91
+ * supportedCodecs,
92
+ * onChunk: (chunk) => ws.send(chunk)
93
+ * });
94
+ *
95
+ * // Start streaming (auto-transcodes if needed)
96
+ * await stream.start();
97
+ * ```
98
+ *
99
+ * @example
100
+ * ```typescript
101
+ * // Stream with hardware acceleration
102
+ * const stream = await FMP4Stream.create('input.mp4', {
103
+ * supportedCodecs: 'avc1.640029,mp4a.40.2',
104
+ * hardware: 'auto',
105
+ * fragDuration: 1,
106
+ * onChunk: (chunk) => sendToClient(chunk)
107
+ * });
108
+ *
109
+ * await stream.start();
110
+ * stream.stop();
111
+ * stream.dispose();
112
+ * ```
113
+ *
114
+ * @see {@link MediaInput} For input media handling
115
+ * @see {@link MediaOutput} For fMP4 generation
116
+ * @see {@link HardwareContext} For GPU acceleration
117
+ */
118
+ export class FMP4Stream {
119
+ input;
120
+ options;
121
+ output = null;
122
+ hardwareContext = null;
123
+ videoDecoder = null;
124
+ videoEncoder = null;
125
+ audioDecoder = null;
126
+ audioFilter = null;
127
+ audioEncoder = null;
128
+ streamActive = false;
129
+ supportedCodecs;
130
+ /**
131
+ * @param input - Media input source
132
+ *
133
+ * @param options - Stream configuration options
134
+ *
135
+ * Use {@link create} factory method
136
+ *
137
+ * @internal
138
+ */
139
+ constructor(input, options) {
140
+ this.input = input;
141
+ this.options = {
142
+ onChunk: options.onChunk ?? (() => { }),
143
+ supportedCodecs: options.supportedCodecs ?? '',
144
+ fragDuration: options.fragDuration ?? 1,
145
+ hardware: options.hardware ?? { deviceType: AV_HWDEVICE_TYPE_NONE },
146
+ inputOptions: options.inputOptions,
147
+ };
148
+ // Parse supported codecs
149
+ this.supportedCodecs = new Set(this.options.supportedCodecs
150
+ .split(',')
151
+ .map((c) => c.trim())
152
+ .filter(Boolean));
153
+ }
154
+ /**
155
+ * Create a fMP4 stream from a media source.
156
+ *
157
+ * Opens the input media, detects video and audio codecs, and prepares
158
+ * transcoding pipelines based on client-supported codecs.
159
+ * Automatically transcodes to H.264 and AAC if necessary.
160
+ *
161
+ * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
162
+ *
163
+ * @param options - Stream configuration options with supported codecs
164
+ *
165
+ * @returns Configured fMP4 stream instance
166
+ *
167
+ * @throws {Error} If no video stream found in input
168
+ *
169
+ * @throws {FFmpegError} If input cannot be opened
170
+ *
171
+ * @example
172
+ * ```typescript
173
+ * // Stream from file with codec negotiation
174
+ * const stream = await FMP4Stream.create('video.mp4', {
175
+ * supportedCodecs: 'avc1.640029,mp4a.40.2',
176
+ * onChunk: (chunk) => ws.send(chunk)
177
+ * });
178
+ * ```
179
+ *
180
+ * @example
181
+ * ```typescript
182
+ * // Stream from RTSP with auto hardware acceleration
183
+ * const stream = await FMP4Stream.create('rtsp://camera.local/stream', {
184
+ * supportedCodecs: 'avc1.640029,hvc1.1.6.L153.B0,mp4a.40.2',
185
+ * hardware: 'auto',
186
+ * fragDuration: 0.5
187
+ * });
188
+ * ```
189
+ */
190
+ static async create(inputUrl, options = {}) {
191
+ const isRtsp = inputUrl.toLowerCase().startsWith('rtsp');
192
+ options.inputOptions = options.inputOptions ?? {};
193
+ options.inputOptions = {
194
+ flags: 'low_delay',
195
+ rtsp_transport: isRtsp ? 'tcp' : undefined,
196
+ ...options.inputOptions,
197
+ };
198
+ const input = await MediaInput.open(inputUrl, {
199
+ options: options.inputOptions,
200
+ });
201
+ const videoStream = input.video();
202
+ if (!videoStream) {
203
+ throw new Error('No video stream found in input');
204
+ }
205
+ return new FMP4Stream(input, options);
206
+ }
207
+ /**
208
+ * Get the codec string that will be used by client.
209
+ *
210
+ * Returns the MIME type codec string based on input codecs and transcoding decisions.
211
+ * Call this after creating the stream to know what codec string to use for addSourceBuffer().
212
+ *
213
+ * @returns MIME type codec string (e.g., "avc1.640029,mp4a.40.2")
214
+ *
215
+ * @example
216
+ * ```typescript
217
+ * const stream = await FMP4Stream.create('input.mp4', {
218
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
219
+ * });
220
+ *
221
+ * const codecString = stream.getCodecString();
222
+ * console.log(codecString); // "avc1.640029,mp4a.40.2"
223
+ * // Use this for: sourceBuffer = mediaSource.addSourceBuffer(`video/mp4; codecs="${codecString}"`);
224
+ * ```
225
+ */
226
+ getCodecString() {
227
+ const videoStream = this.input.video();
228
+ const audioStream = this.input.audio();
229
+ const videoCodecId = videoStream.codecpar.codecId;
230
+ const audioCodecId = audioStream?.codecpar.codecId;
231
+ // Determine video codec string
232
+ let videoCodec;
233
+ const needsVideoTranscode = !this.isVideoCodecSupported(videoCodecId);
234
+ if (needsVideoTranscode) {
235
+ // Transcoding to H.264
236
+ videoCodec = FMP4_CODECS.H264;
237
+ }
238
+ else if (videoCodecId === AV_CODEC_ID_H264) {
239
+ // H.264 - use HLS codec string from input
240
+ const hlsCodec = avGetCodecStringHls(videoStream.codecpar);
241
+ videoCodec = hlsCodec ?? FMP4_CODECS.H264;
242
+ }
243
+ else if (videoCodecId === AV_CODEC_ID_HEVC) {
244
+ // H.265 - use HLS codec string from input
245
+ const hlsCodec = avGetCodecStringHls(videoStream.codecpar);
246
+ videoCodec = hlsCodec ?? FMP4_CODECS.H265;
247
+ }
248
+ else if (videoCodecId === AV_CODEC_ID_AV1) {
249
+ // AV1 - use HLS codec string from input
250
+ const hlsCodec = avGetCodecStringHls(videoStream.codecpar);
251
+ videoCodec = hlsCodec ?? FMP4_CODECS.AV1;
252
+ }
253
+ else {
254
+ // Fallback to H.264 (should not happen as we transcode unsupported codecs)
255
+ videoCodec = FMP4_CODECS.H264;
256
+ }
257
+ // Determine audio codec string
258
+ let audioCodec = null;
259
+ if (audioCodecId) {
260
+ const needsAudioTranscode = !this.isAudioCodecSupported(audioCodecId);
261
+ if (needsAudioTranscode) {
262
+ // Transcoding to AAC
263
+ audioCodec = FMP4_CODECS.AAC;
264
+ }
265
+ else if (audioCodecId === AV_CODEC_ID_AAC) {
266
+ // AAC - use fixed codec string
267
+ audioCodec = FMP4_CODECS.AAC;
268
+ }
269
+ else if (audioCodecId === AV_CODEC_ID_FLAC) {
270
+ // FLAC
271
+ audioCodec = FMP4_CODECS.FLAC;
272
+ }
273
+ else if (audioCodecId === AV_CODEC_ID_OPUS) {
274
+ // Opus
275
+ audioCodec = FMP4_CODECS.OPUS;
276
+ }
277
+ else {
278
+ // Fallback to AAC (should not happen as we transcode unsupported codecs)
279
+ audioCodec = FMP4_CODECS.AAC;
280
+ }
281
+ }
282
+ // Combine video and audio codec strings
283
+ return audioCodec ? `${videoCodec},${audioCodec}` : videoCodec;
284
+ }
285
+ /**
286
+ * Get the resolution of the input video stream.
287
+ *
288
+ * @returns Object with width and height properties
289
+ *
290
+ * @example
291
+ * ```typescript
292
+ * const stream = await FMP4Stream.create('input.mp4', {
293
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
294
+ * });
295
+ *
296
+ * const resolution = stream.getResolution();
297
+ * console.log(`Width: ${resolution.width}, Height: ${resolution.height}`);
298
+ * ```
299
+ */
300
+ getResolution() {
301
+ const videoStream = this.input.video();
302
+ return {
303
+ width: videoStream.codecpar.width,
304
+ height: videoStream.codecpar.height,
305
+ };
306
+ }
307
+ /**
308
+ * Start streaming media to fMP4 chunks.
309
+ *
310
+ * Begins the media processing pipeline, reading packets from input,
311
+ * transcoding based on supported codecs, and generating fMP4 chunks.
312
+ * Video transcodes to H.264 if H.264/H.265 not supported.
313
+ * Audio transcodes to AAC if AAC/FLAC/Opus not supported.
314
+ * This method blocks until streaming completes or {@link stop} is called.
315
+ *
316
+ * @returns Promise that resolves when streaming completes
317
+ *
318
+ * @throws {FFmpegError} If transcoding or muxing fails
319
+ *
320
+ * @example
321
+ * ```typescript
322
+ * const stream = await FMP4Stream.create('input.mp4', {
323
+ * supportedCodecs: 'avc1.640029,mp4a.40.2',
324
+ * onChunk: (chunk) => sendToClient(chunk)
325
+ * });
326
+ *
327
+ * // Start streaming (blocks until complete)
328
+ * await stream.start();
329
+ * ```
330
+ *
331
+ * @example
332
+ * ```typescript
333
+ * // Non-blocking start with background promise
334
+ * const stream = await FMP4Stream.create('input.mp4', {
335
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
336
+ * });
337
+ * const streamPromise = stream.start();
338
+ *
339
+ * // Later: stop streaming
340
+ * stream.stop();
341
+ * await streamPromise;
342
+ * ```
343
+ */
344
+ async start() {
345
+ if (this.streamActive) {
346
+ return;
347
+ }
348
+ this.streamActive = true;
349
+ const videoStream = this.input.video();
350
+ const audioStream = this.input.audio();
351
+ // Check if we need hardware acceleration
352
+ if (this.options.hardware === 'auto') {
353
+ this.hardwareContext = HardwareContext.auto();
354
+ }
355
+ else if (this.options.hardware.deviceType !== AV_HWDEVICE_TYPE_NONE) {
356
+ this.hardwareContext = HardwareContext.create(this.options.hardware.deviceType, this.options.hardware.device, this.options.hardware.options);
357
+ }
358
+ // Check if video needs transcoding
359
+ const needsVideoTranscode = !this.isVideoCodecSupported(videoStream.codecpar.codecId);
360
+ if (needsVideoTranscode) {
361
+ // Transcode to H.264
362
+ this.videoDecoder = await Decoder.create(videoStream, {
363
+ hardware: this.hardwareContext ?? undefined,
364
+ exitOnError: false,
365
+ });
366
+ this.videoEncoder = await Encoder.create(FF_ENCODER_LIBX264, {
367
+ timeBase: videoStream.timeBase,
368
+ frameRate: videoStream.avgFrameRate,
369
+ maxBFrames: 0,
370
+ });
371
+ }
372
+ // Check if audio needs transcoding
373
+ const needsAudioTranscode = audioStream && !this.isAudioCodecSupported(audioStream.codecpar.codecId);
374
+ if (needsAudioTranscode) {
375
+ // Transcode to AAC
376
+ this.audioDecoder = await Decoder.create(audioStream, {
377
+ exitOnError: false,
378
+ });
379
+ const targetSampleRate = 48000;
380
+ const filterChain = FilterPreset.chain().aformat(AV_SAMPLE_FMT_FLTP, targetSampleRate, 'stereo').asetnsamples(1024).build();
381
+ this.audioFilter = FilterAPI.create(filterChain, {
382
+ timeBase: audioStream.timeBase,
383
+ });
384
+ this.audioEncoder = await Encoder.create(FF_ENCODER_AAC, {
385
+ timeBase: { num: 1, den: targetSampleRate },
386
+ });
387
+ }
388
+ // Setup output with callback
389
+ const cb = {
390
+ write: (buffer) => {
391
+ this.options.onChunk(buffer);
392
+ return buffer.length;
393
+ },
394
+ };
395
+ this.output = await MediaOutput.open(cb, {
396
+ format: 'mp4',
397
+ options: {
398
+ movflags: '+frag_keyframe+separate_moof+default_base_moof+empty_moov',
399
+ frag_duration: this.options.fragDuration,
400
+ },
401
+ });
402
+ // Add streams to output
403
+ const videoStreamIndex = this.videoEncoder ? this.output.addStream(this.videoEncoder) : this.output.addStream(videoStream);
404
+ const audioStreamIndex = this.audioEncoder ? this.output.addStream(this.audioEncoder) : audioStream ? this.output.addStream(audioStream) : null;
405
+ const hasAudio = audioStreamIndex !== null && audioStream !== undefined;
406
+ // Start processing loop
407
+ for await (const packet_1 of this.input.packets()) {
408
+ const env_1 = { stack: [], error: void 0, hasError: false };
409
+ try {
410
+ const packet = __addDisposableResource(env_1, packet_1, false);
411
+ if (!this.streamActive) {
412
+ break;
413
+ }
414
+ if (packet.streamIndex === videoStream.index) {
415
+ if (this.videoDecoder && this.videoEncoder) {
416
+ const env_2 = { stack: [], error: void 0, hasError: false };
417
+ try {
418
+ // Transcode video
419
+ const decodedFrame = __addDisposableResource(env_2, await this.videoDecoder.decode(packet), false);
420
+ if (!decodedFrame) {
421
+ continue;
422
+ }
423
+ const encodedPacket = __addDisposableResource(env_2, await this.videoEncoder.encode(decodedFrame), false);
424
+ if (!encodedPacket) {
425
+ continue;
426
+ }
427
+ await this.output.writePacket(encodedPacket, videoStreamIndex);
428
+ }
429
+ catch (e_1) {
430
+ env_2.error = e_1;
431
+ env_2.hasError = true;
432
+ }
433
+ finally {
434
+ __disposeResources(env_2);
435
+ }
436
+ }
437
+ else {
438
+ // Stream copy video
439
+ await this.output.writePacket(packet, videoStreamIndex);
440
+ }
441
+ }
442
+ else if (hasAudio && packet.streamIndex === audioStream.index) {
443
+ if (this.audioDecoder && this.audioFilter && this.audioEncoder) {
444
+ const env_3 = { stack: [], error: void 0, hasError: false };
445
+ try {
446
+ // Transcode audio
447
+ const decodedFrame = __addDisposableResource(env_3, await this.audioDecoder.decode(packet), false);
448
+ if (!decodedFrame) {
449
+ continue;
450
+ }
451
+ const filteredFrame = __addDisposableResource(env_3, await this.audioFilter.process(decodedFrame), false);
452
+ if (!filteredFrame) {
453
+ continue;
454
+ }
455
+ const encodedPacket = __addDisposableResource(env_3, await this.audioEncoder.encode(filteredFrame), false);
456
+ if (!encodedPacket) {
457
+ continue;
458
+ }
459
+ await this.output.writePacket(encodedPacket, audioStreamIndex);
460
+ }
461
+ catch (e_2) {
462
+ env_3.error = e_2;
463
+ env_3.hasError = true;
464
+ }
465
+ finally {
466
+ __disposeResources(env_3);
467
+ }
468
+ }
469
+ else {
470
+ // Stream copy audio
471
+ await this.output.writePacket(packet, audioStreamIndex);
472
+ }
473
+ }
474
+ }
475
+ catch (e_3) {
476
+ env_1.error = e_3;
477
+ env_1.hasError = true;
478
+ }
479
+ finally {
480
+ __disposeResources(env_1);
481
+ }
482
+ }
483
+ // Flush pipelines
484
+ await Promise.allSettled([this.flushVideo(videoStreamIndex), this.flushAudio(audioStreamIndex)]);
485
+ // Close output - remaining data will be written via callback
486
+ await this.output.close();
487
+ }
488
+ /**
489
+ * Stop streaming gracefully.
490
+ *
491
+ * Signals the streaming loop to exit after the current packet is processed.
492
+ * Does not immediately close resources - use {@link dispose} for cleanup.
493
+ * Safe to call multiple times.
494
+ *
495
+ * @example
496
+ * ```typescript
497
+ * const stream = await FMP4Stream.create('input.mp4', {
498
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
499
+ * });
500
+ * const streamPromise = stream.start();
501
+ *
502
+ * // Stop after 10 seconds
503
+ * setTimeout(() => stream.stop(), 10000);
504
+ *
505
+ * await streamPromise; // Resolves when stopped
506
+ * stream.dispose();
507
+ * ```
508
+ */
509
+ stop() {
510
+ this.streamActive = false;
511
+ }
512
+ /**
513
+ * Clean up all resources and close the stream.
514
+ *
515
+ * Stops streaming if active and releases all FFmpeg resources including
516
+ * decoders, encoders, filters, output, and input. Should be called when
517
+ * done with the stream to prevent memory leaks.
518
+ * Safe to call multiple times.
519
+ *
520
+ * @example
521
+ * ```typescript
522
+ * const stream = await FMP4Stream.create('input.mp4', {
523
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
524
+ * });
525
+ * await stream.start();
526
+ * stream.dispose();
527
+ * ```
528
+ *
529
+ * @example
530
+ * ```typescript
531
+ * // Using automatic cleanup
532
+ * {
533
+ * await using stream = await FMP4Stream.create('input.mp4', {
534
+ * supportedCodecs: 'avc1.640029,mp4a.40.2'
535
+ * });
536
+ * await stream.start();
537
+ * } // Automatically disposed
538
+ * ```
539
+ */
540
+ dispose() {
541
+ this.stop();
542
+ this.output?.close();
543
+ this.videoDecoder?.close();
544
+ this.videoEncoder?.close();
545
+ this.audioDecoder?.close();
546
+ this.audioFilter?.close();
547
+ this.audioEncoder?.close();
548
+ this.hardwareContext?.dispose();
549
+ this.hardwareContext = null;
550
+ this.input.close();
551
+ }
552
+ /**
553
+ * Check if video codec is supported.
554
+ *
555
+ * @param codecId - Codec ID
556
+ *
557
+ * @returns True if H.264, H.265, or AV1 is in supported codecs
558
+ *
559
+ * @internal
560
+ */
561
+ isVideoCodecSupported(codecId) {
562
+ if (codecId === AV_CODEC_ID_H264 && (this.supportedCodecs.has(FMP4_CODECS.H264) || this.supportedCodecs.has('avc1'))) {
563
+ return true;
564
+ }
565
+ if (codecId === AV_CODEC_ID_HEVC && (this.supportedCodecs.has(FMP4_CODECS.H265) || this.supportedCodecs.has('hvc1') || this.supportedCodecs.has('hev1'))) {
566
+ return true;
567
+ }
568
+ if (codecId === AV_CODEC_ID_AV1 && (this.supportedCodecs.has(FMP4_CODECS.AV1) || this.supportedCodecs.has('av01'))) {
569
+ return true;
570
+ }
571
+ return false;
572
+ }
573
+ /**
574
+ * Check if audio codec is supported.
575
+ *
576
+ * @param codecId - Codec ID
577
+ *
578
+ * @returns True if AAC, FLAC, or Opus is in supported codecs
579
+ *
580
+ * @internal
581
+ */
582
+ isAudioCodecSupported(codecId) {
583
+ if (codecId === AV_CODEC_ID_AAC && (this.supportedCodecs.has(FMP4_CODECS.AAC) || this.supportedCodecs.has('mp4a'))) {
584
+ return true;
585
+ }
586
+ if (codecId === AV_CODEC_ID_FLAC && this.supportedCodecs.has(FMP4_CODECS.FLAC)) {
587
+ return true;
588
+ }
589
+ if (codecId === AV_CODEC_ID_OPUS && this.supportedCodecs.has(FMP4_CODECS.OPUS)) {
590
+ return true;
591
+ }
592
+ return false;
593
+ }
594
+ /**
595
+ * Flush video encoder pipeline.
596
+ *
597
+ * @param videoStreamIndex - Output video stream index
598
+ *
599
+ * @internal
600
+ */
601
+ async flushVideo(videoStreamIndex) {
602
+ if (!this.videoDecoder || !this.videoEncoder || !this.output) {
603
+ return;
604
+ }
605
+ for await (const frame_1 of this.videoDecoder.flushFrames()) {
606
+ const env_4 = { stack: [], error: void 0, hasError: false };
607
+ try {
608
+ const frame = __addDisposableResource(env_4, frame_1, false);
609
+ const encodedPacket = __addDisposableResource(env_4, await this.videoEncoder.encode(frame), false);
610
+ if (encodedPacket) {
611
+ await this.output.writePacket(encodedPacket, videoStreamIndex);
612
+ }
613
+ }
614
+ catch (e_4) {
615
+ env_4.error = e_4;
616
+ env_4.hasError = true;
617
+ }
618
+ finally {
619
+ __disposeResources(env_4);
620
+ }
621
+ }
622
+ for await (const packet_2 of this.videoEncoder.flushPackets()) {
623
+ const env_5 = { stack: [], error: void 0, hasError: false };
624
+ try {
625
+ const packet = __addDisposableResource(env_5, packet_2, false);
626
+ await this.output.writePacket(packet, videoStreamIndex);
627
+ }
628
+ catch (e_5) {
629
+ env_5.error = e_5;
630
+ env_5.hasError = true;
631
+ }
632
+ finally {
633
+ __disposeResources(env_5);
634
+ }
635
+ }
636
+ }
637
+ /**
638
+ * Flush audio encoder pipeline.
639
+ *
640
+ * @param audioStreamIndex - Output audio stream index
641
+ *
642
+ * @internal
643
+ */
644
+ async flushAudio(audioStreamIndex) {
645
+ if (!this.audioDecoder || !this.audioFilter || !this.audioEncoder || audioStreamIndex === null || !this.output) {
646
+ return;
647
+ }
648
+ for await (const frame_2 of this.audioDecoder.flushFrames()) {
649
+ const env_6 = { stack: [], error: void 0, hasError: false };
650
+ try {
651
+ const frame = __addDisposableResource(env_6, frame_2, false);
652
+ const filteredFrame = __addDisposableResource(env_6, await this.audioFilter.process(frame), false);
653
+ if (!filteredFrame) {
654
+ continue;
655
+ }
656
+ const encodedPacket = __addDisposableResource(env_6, await this.audioEncoder.encode(filteredFrame), false);
657
+ if (encodedPacket) {
658
+ await this.output.writePacket(encodedPacket, audioStreamIndex);
659
+ }
660
+ }
661
+ catch (e_6) {
662
+ env_6.error = e_6;
663
+ env_6.hasError = true;
664
+ }
665
+ finally {
666
+ __disposeResources(env_6);
667
+ }
668
+ }
669
+ for await (const frame_3 of this.audioFilter.flushFrames()) {
670
+ const env_7 = { stack: [], error: void 0, hasError: false };
671
+ try {
672
+ const frame = __addDisposableResource(env_7, frame_3, false);
673
+ const encodedPacket = __addDisposableResource(env_7, await this.audioEncoder.encode(frame), false);
674
+ if (encodedPacket) {
675
+ await this.output.writePacket(encodedPacket, audioStreamIndex);
676
+ }
677
+ }
678
+ catch (e_7) {
679
+ env_7.error = e_7;
680
+ env_7.hasError = true;
681
+ }
682
+ finally {
683
+ __disposeResources(env_7);
684
+ }
685
+ }
686
+ for await (const packet_3 of this.audioEncoder.flushPackets()) {
687
+ const env_8 = { stack: [], error: void 0, hasError: false };
688
+ try {
689
+ const packet = __addDisposableResource(env_8, packet_3, false);
690
+ await this.output.writePacket(packet, audioStreamIndex);
691
+ }
692
+ catch (e_8) {
693
+ env_8.error = e_8;
694
+ env_8.hasError = true;
695
+ }
696
+ finally {
697
+ __disposeResources(env_8);
698
+ }
699
+ }
700
+ }
701
+ /**
702
+ * Symbol.dispose implementation for automatic cleanup.
703
+ *
704
+ * @internal
705
+ */
706
+ [Symbol.dispose]() {
707
+ this.dispose();
708
+ }
709
+ }
710
+ //# sourceMappingURL=fmp4.js.map