node-av 3.0.6 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1111 @@
1
+ var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
2
+ if (value !== null && value !== void 0) {
3
+ if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
4
+ var dispose, inner;
5
+ if (async) {
6
+ if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
7
+ dispose = value[Symbol.asyncDispose];
8
+ }
9
+ if (dispose === void 0) {
10
+ if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
11
+ dispose = value[Symbol.dispose];
12
+ if (async) inner = dispose;
13
+ }
14
+ if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
15
+ if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
16
+ env.stack.push({ value: value, dispose: dispose, async: async });
17
+ }
18
+ else if (async) {
19
+ env.stack.push({ async: true });
20
+ }
21
+ return value;
22
+ };
23
+ var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
24
+ return function (env) {
25
+ function fail(e) {
26
+ env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
27
+ env.hasError = true;
28
+ }
29
+ var r, s = 0;
30
+ function next() {
31
+ while (r = env.stack.pop()) {
32
+ try {
33
+ if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
34
+ if (r.dispose) {
35
+ var result = r.dispose.call(r.value);
36
+ if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
37
+ }
38
+ else s |= 1;
39
+ }
40
+ catch (e) {
41
+ fail(e);
42
+ }
43
+ }
44
+ if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
45
+ if (env.hasError) throw env.error;
46
+ }
47
+ return next();
48
+ };
49
+ })(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
50
+ var e = new Error(message);
51
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
52
+ });
53
+ import { MediaStreamTrack, RTCIceCandidate, RTCPeerConnection, RTCRtpCodecParameters, RTCSessionDescription, RtpPacket } from 'werift';
54
+ import { AV_CODEC_ID_AV1, AV_CODEC_ID_H264, AV_CODEC_ID_HEVC, AV_CODEC_ID_OPUS, AV_CODEC_ID_PCM_ALAW, AV_CODEC_ID_PCM_MULAW, AV_CODEC_ID_VP8, AV_CODEC_ID_VP9, AV_HWDEVICE_TYPE_NONE, AV_SAMPLE_FMT_S16, } from '../constants/constants.js';
55
+ import { FF_ENCODER_LIBOPUS, FF_ENCODER_LIBX264 } from '../constants/encoders.js';
56
+ import { Decoder } from './decoder.js';
57
+ import { Encoder } from './encoder.js';
58
+ import { FilterPreset } from './filter-presets.js';
59
+ import { FilterAPI } from './filter.js';
60
+ import { HardwareContext } from './hardware.js';
61
+ import { MediaInput } from './media-input.js';
62
+ import { MediaOutput } from './media-output.js';
63
+ /**
64
+ * High-level WebRTC streaming with automatic codec detection and transcoding.
65
+ *
66
+ * Provides library-agnostic RTP streaming for WebRTC applications.
67
+ * Automatically detects input codecs and transcodes non-WebRTC-compatible formats.
68
+ * Handles video (H.264, H.265, VP8, VP9) and audio (Opus, PCMA, PCMU) codecs.
69
+ * Supports hardware acceleration for video transcoding.
70
+ * Essential component for building WebRTC streaming servers without direct WebRTC library coupling.
71
+ *
72
+ * @example
73
+ * ```typescript
74
+ * import { WebRTCStream } from 'node-av/api';
75
+ *
76
+ * // Create stream with RTP packet callbacks
77
+ * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
78
+ * mtu: 1200,
79
+ * hardware: 'auto',
80
+ * onVideoPacket: (rtp) => {
81
+ * // Send RTP packet to WebRTC peer connection
82
+ * videoTrack.writeRtp(rtp);
83
+ * },
84
+ * onAudioPacket: (rtp) => {
85
+ * audioTrack.writeRtp(rtp);
86
+ * }
87
+ * });
88
+ *
89
+ * // Get detected codecs for SDP negotiation
90
+ * const codecs = stream.getCodecs();
91
+ * console.log('Video:', codecs.video.mimeType);
92
+ * console.log('Audio:', codecs.audio?.mimeType);
93
+ *
94
+ * // Start streaming
95
+ * await stream.start();
96
+ * ```
97
+ *
98
+ * @example
99
+ * ```typescript
100
+ * // Stream with hardware acceleration
101
+ * import { AV_HWDEVICE_TYPE_CUDA } from 'node-av/constants';
102
+ *
103
+ * const stream = await WebRTCStream.create('video.mp4', {
104
+ * hardware: {
105
+ * deviceType: AV_HWDEVICE_TYPE_CUDA,
106
+ * device: '/dev/nvidia0'
107
+ * },
108
+ * onVideoPacket: (rtp) => sendToWebRTC(rtp)
109
+ * });
110
+ *
111
+ * await stream.start();
112
+ * stream.stop();
113
+ * stream.dispose();
114
+ * ```
115
+ *
116
+ * @see {@link WebRTCSession} For complete WebRTC session management with werift
117
+ * @see {@link MediaInput} For input media handling
118
+ * @see {@link HardwareContext} For GPU acceleration
119
+ */
120
+ export class WebRTCStream {
121
+ input;
122
+ codecInfo;
123
+ options;
124
+ videoOutput = null;
125
+ audioOutput = null;
126
+ hardwareContext = null;
127
+ videoDecoder = null;
128
+ videoEncoder = null;
129
+ audioDecoder = null;
130
+ audioFilter = null;
131
+ audioEncoder = null;
132
+ streamActive = false;
133
+ /**
134
+ * @param input - Media input source
135
+ *
136
+ * @param options - Stream configuration options
137
+ *
138
+ * Use {@link create} factory method
139
+ *
140
+ * @internal
141
+ */
142
+ constructor(input, options) {
143
+ this.input = input;
144
+ const videoStream = input.video();
145
+ const audioStream = input.audio();
146
+ const videoCodecId = videoStream.codecpar.codecId;
147
+ const audioCodecId = audioStream?.codecpar.codecId ?? null;
148
+ const videoConfig = this.getVideoCodecConfig(videoCodecId) ?? this.getVideoCodecConfig(AV_CODEC_ID_H264); // We transcode unsupported codecs to H264
149
+ this.codecInfo = {
150
+ video: {
151
+ codecId: videoCodecId,
152
+ ...videoConfig,
153
+ },
154
+ };
155
+ if (audioCodecId !== null) {
156
+ const audioConfig = this.getAudioCodecConfig(audioCodecId) ?? this.getAudioCodecConfig(AV_CODEC_ID_OPUS); // We transcode unsupported codecs to OPUS
157
+ this.codecInfo.audio = {
158
+ codecId: audioCodecId,
159
+ ...audioConfig,
160
+ };
161
+ }
162
+ this.options = {
163
+ onVideoPacket: options.onVideoPacket ?? (() => { }),
164
+ onAudioPacket: options.onAudioPacket ?? (() => { }),
165
+ mtu: options.mtu ?? 1200,
166
+ hardware: options.hardware ?? { deviceType: AV_HWDEVICE_TYPE_NONE },
167
+ };
168
+ }
169
+ /**
170
+ * Create a WebRTC stream from a media source.
171
+ *
172
+ * Opens the input media, detects video and audio codecs, and prepares
173
+ * transcoding pipelines for non-WebRTC-compatible formats.
174
+ * Automatically configures H.264 encoding for unsupported video codecs
175
+ * and Opus encoding for unsupported audio codecs.
176
+ *
177
+ * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
178
+ *
179
+ * @param options - Stream configuration options
180
+ *
181
+ * @returns Configured WebRTC stream instance
182
+ *
183
+ * @throws {Error} If no video stream found in input
184
+ *
185
+ * @throws {FFmpegError} If input cannot be opened
186
+ *
187
+ * @example
188
+ * ```typescript
189
+ * // Stream from RTSP camera
190
+ * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
191
+ * mtu: 1200,
192
+ * onVideoPacket: (rtp) => videoTrack.writeRtp(rtp),
193
+ * onAudioPacket: (rtp) => audioTrack.writeRtp(rtp)
194
+ * });
195
+ * ```
196
+ *
197
+ * @example
198
+ * ```typescript
199
+ * // Stream file with auto hardware acceleration
200
+ * const stream = await WebRTCStream.create('video.mp4', {
201
+ * hardware: 'auto'
202
+ * });
203
+ * ```
204
+ */
205
+ static async create(inputUrl, options = {}) {
206
+ const isRtsp = inputUrl.toLowerCase().startsWith('rtsp://');
207
+ const input = await MediaInput.open(inputUrl, {
208
+ options: isRtsp ? { rtsp_transport: 'tcp' } : undefined,
209
+ });
210
+ const videoStream = input.video();
211
+ if (!videoStream) {
212
+ throw new Error('No video stream found in input');
213
+ }
214
+ return new WebRTCStream(input, options);
215
+ }
216
+ /**
217
+ * Get detected codec information for SDP negotiation.
218
+ *
219
+ * Returns RTP codec parameters and FFmpeg codec IDs for video and audio.
220
+ * Use this information to configure WebRTC peer connections with matching codecs.
221
+ *
222
+ * @returns Codec configuration for video and audio streams
223
+ *
224
+ * @example
225
+ * ```typescript
226
+ * const stream = await WebRTCStream.create('input.mp4');
227
+ * const codecs = stream.getCodecs();
228
+ *
229
+ * console.log('Video codec:', codecs.video.mimeType);
230
+ * console.log('Audio codec:', codecs.audio?.mimeType);
231
+ * ```
232
+ */
233
+ getCodecs() {
234
+ return this.codecInfo;
235
+ }
236
+ /**
237
+ * Start streaming media to RTP packets.
238
+ *
239
+ * Begins the media processing pipeline, reading packets from input,
240
+ * transcoding if necessary, and invoking RTP packet callbacks.
241
+ * Automatically handles video and audio streams in parallel.
242
+ * Flushes all buffers at the end of stream.
243
+ * This method blocks until streaming completes or {@link stop} is called.
244
+ *
245
+ * @returns Promise that resolves when streaming completes
246
+ *
247
+ * @throws {FFmpegError} If transcoding or muxing fails
248
+ *
249
+ * @example
250
+ * ```typescript
251
+ * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
252
+ * onVideoPacket: (rtp) => sendRtp(rtp)
253
+ * });
254
+ *
255
+ * // Start streaming (blocks until complete or stopped)
256
+ * await stream.start();
257
+ * ```
258
+ *
259
+ * @example
260
+ * ```typescript
261
+ * // Non-blocking start with background promise
262
+ * const stream = await WebRTCStream.create('input.mp4');
263
+ * const streamPromise = stream.start();
264
+ *
265
+ * // Later: stop streaming
266
+ * stream.stop();
267
+ * await streamPromise;
268
+ * ```
269
+ */
270
+ async start() {
271
+ if (this.streamActive) {
272
+ return;
273
+ }
274
+ this.streamActive = true;
275
+ const videoStream = this.input.video();
276
+ const audioStream = this.input.audio();
277
+ // Setup video transcoding if needed
278
+ if (!this.isVideoCodecSupported(videoStream.codecpar.codecId)) {
279
+ if (this.options.hardware === 'auto') {
280
+ this.hardwareContext = HardwareContext.auto();
281
+ }
282
+ else if (this.options.hardware.deviceType !== AV_HWDEVICE_TYPE_NONE) {
283
+ this.hardwareContext = HardwareContext.create(this.options.hardware.deviceType, this.options.hardware.device, this.options.hardware.options);
284
+ }
285
+ this.videoDecoder = await Decoder.create(videoStream, {
286
+ exitOnError: false,
287
+ hardware: this.hardwareContext,
288
+ });
289
+ const encoderCodec = this.hardwareContext?.getEncoderCodec('h264') ?? FF_ENCODER_LIBX264;
290
+ const encoderOptions = {};
291
+ if (encoderCodec === FF_ENCODER_LIBX264) {
292
+ encoderOptions.preset = 'ultrafast';
293
+ encoderOptions.tune = 'zerolatency';
294
+ }
295
+ this.videoEncoder = await Encoder.create(encoderCodec, {
296
+ timeBase: videoStream.timeBase,
297
+ frameRate: videoStream.avgFrameRate,
298
+ maxBFrames: 0,
299
+ options: encoderOptions,
300
+ });
301
+ }
302
+ // Setup video output
303
+ this.videoOutput = await MediaOutput.open({
304
+ write: (buffer) => {
305
+ this.options.onVideoPacket(RtpPacket.deSerialize(buffer));
306
+ return buffer.length;
307
+ },
308
+ }, {
309
+ format: 'rtp',
310
+ bufferSize: this.options.mtu,
311
+ options: {
312
+ pkt_size: this.options.mtu,
313
+ },
314
+ });
315
+ const videoStreamIndex = this.videoEncoder ? this.videoOutput.addStream(this.videoEncoder) : this.videoOutput.addStream(videoStream);
316
+ // Setup audio if available
317
+ let audioStreamIndex = null;
318
+ if (audioStream) {
319
+ if (!this.isAudioCodecSupported(audioStream.codecpar.codecId)) {
320
+ this.audioDecoder = await Decoder.create(audioStream, {
321
+ exitOnError: false,
322
+ });
323
+ const targetSampleRate = 48000;
324
+ const filterChain = FilterPreset.chain().aformat(AV_SAMPLE_FMT_S16, targetSampleRate, 'stereo').asetnsamples(960).build();
325
+ this.audioFilter = FilterAPI.create(filterChain, {
326
+ timeBase: audioStream.timeBase,
327
+ });
328
+ this.audioEncoder = await Encoder.create(FF_ENCODER_LIBOPUS, {
329
+ timeBase: { num: 1, den: targetSampleRate },
330
+ options: {
331
+ application: 'lowdelay',
332
+ frame_duration: 20,
333
+ },
334
+ });
335
+ }
336
+ this.audioOutput = await MediaOutput.open({
337
+ write: (buffer) => {
338
+ this.options.onAudioPacket(RtpPacket.deSerialize(buffer));
339
+ return buffer.length;
340
+ },
341
+ }, {
342
+ format: 'rtp',
343
+ bufferSize: this.options.mtu,
344
+ options: {
345
+ pkt_size: this.options.mtu,
346
+ },
347
+ });
348
+ audioStreamIndex = this.audioEncoder ? this.audioOutput.addStream(this.audioEncoder) : this.audioOutput.addStream(audioStream);
349
+ }
350
+ const hasAudio = audioStreamIndex !== null && this.audioOutput !== null && audioStream !== undefined;
351
+ // Start processing loop
352
+ for await (const packet_1 of this.input.packets()) {
353
+ const env_1 = { stack: [], error: void 0, hasError: false };
354
+ try {
355
+ const packet = __addDisposableResource(env_1, packet_1, false);
356
+ if (!this.streamActive) {
357
+ break;
358
+ }
359
+ if (packet.streamIndex === videoStream.index) {
360
+ if (this.videoDecoder === null || this.videoEncoder === null) {
361
+ await this.videoOutput.writePacket(packet, videoStreamIndex);
362
+ }
363
+ else {
364
+ const env_2 = { stack: [], error: void 0, hasError: false };
365
+ try {
366
+ const decodedFrame = __addDisposableResource(env_2, await this.videoDecoder.decode(packet), false);
367
+ if (!decodedFrame) {
368
+ continue;
369
+ }
370
+ const encodedPacket = __addDisposableResource(env_2, await this.videoEncoder.encode(decodedFrame), false);
371
+ if (!encodedPacket) {
372
+ continue;
373
+ }
374
+ await this.videoOutput.writePacket(encodedPacket, videoStreamIndex);
375
+ }
376
+ catch (e_1) {
377
+ env_2.error = e_1;
378
+ env_2.hasError = true;
379
+ }
380
+ finally {
381
+ __disposeResources(env_2);
382
+ }
383
+ }
384
+ }
385
+ else if (hasAudio && packet.streamIndex === audioStream.index) {
386
+ if (this.audioDecoder === null || this.audioFilter === null || this.audioEncoder === null) {
387
+ await this.audioOutput?.writePacket(packet, audioStreamIndex);
388
+ continue;
389
+ }
390
+ else {
391
+ const env_3 = { stack: [], error: void 0, hasError: false };
392
+ try {
393
+ const decodedFrame = __addDisposableResource(env_3, await this.audioDecoder.decode(packet), false);
394
+ if (!decodedFrame) {
395
+ continue;
396
+ }
397
+ const filteredFrame = __addDisposableResource(env_3, await this.audioFilter.process(decodedFrame), false);
398
+ if (!filteredFrame) {
399
+ continue;
400
+ }
401
+ const encodedPacket = __addDisposableResource(env_3, await this.audioEncoder.encode(filteredFrame), false);
402
+ if (!encodedPacket) {
403
+ continue;
404
+ }
405
+ await this.audioOutput?.writePacket(encodedPacket, audioStreamIndex);
406
+ }
407
+ catch (e_2) {
408
+ env_3.error = e_2;
409
+ env_3.hasError = true;
410
+ }
411
+ finally {
412
+ __disposeResources(env_3);
413
+ }
414
+ }
415
+ }
416
+ }
417
+ catch (e_3) {
418
+ env_1.error = e_3;
419
+ env_1.hasError = true;
420
+ }
421
+ finally {
422
+ __disposeResources(env_1);
423
+ }
424
+ }
425
+ // Flush pipelines
426
+ await Promise.allSettled([this.flushVideo(videoStreamIndex), this.flushAudio(audioStreamIndex, hasAudio)]);
427
+ }
428
+ /**
429
+ * Stop streaming gracefully.
430
+ *
431
+ * Signals the streaming loop to exit after the current packet is processed.
432
+ * Does not immediately close resources - use {@link dispose} for cleanup.
433
+ * Safe to call multiple times.
434
+ *
435
+ * @example
436
+ * ```typescript
437
+ * const stream = await WebRTCStream.create('input.mp4');
438
+ * const streamPromise = stream.start();
439
+ *
440
+ * // Stop after 10 seconds
441
+ * setTimeout(() => stream.stop(), 10000);
442
+ *
443
+ * await streamPromise; // Resolves when stopped
444
+ * stream.dispose();
445
+ * ```
446
+ */
447
+ stop() {
448
+ this.streamActive = false;
449
+ }
450
+ /**
451
+ * Clean up all resources and close the stream.
452
+ *
453
+ * Stops streaming if active and releases all FFmpeg resources including
454
+ * decoders, encoders, filters, outputs, and input. Should be called when
455
+ * done with the stream to prevent memory leaks.
456
+ * Safe to call multiple times.
457
+ *
458
+ * @example
459
+ * ```typescript
460
+ * const stream = await WebRTCStream.create('input.mp4');
461
+ * await stream.start();
462
+ * stream.dispose();
463
+ * ```
464
+ *
465
+ * @example
466
+ * ```typescript
467
+ * // Using automatic cleanup
468
+ * {
469
+ * await using stream = await WebRTCStream.create('input.mp4');
470
+ * await stream.start();
471
+ * } // Automatically disposed
472
+ * ```
473
+ */
474
+ dispose() {
475
+ if (!this.streamActive) {
476
+ return;
477
+ }
478
+ this.stop();
479
+ this.videoOutput?.close();
480
+ this.audioOutput?.close();
481
+ this.videoDecoder?.close();
482
+ this.videoEncoder?.close();
483
+ this.audioDecoder?.close();
484
+ this.audioFilter?.close();
485
+ this.audioEncoder?.close();
486
+ this.input.close();
487
+ }
488
+ /**
489
+ * Check if the given audio codec is compatible with WebRTC.
490
+ *
491
+ * @param codecId - The AVCodecID to check
492
+ *
493
+ * @returns True if the codec is WebRTC compatible, false otherwise
494
+ *
495
+ * @internal
496
+ */
497
+ isAudioCodecSupported(codecId) {
498
+ switch (codecId) {
499
+ case AV_CODEC_ID_PCM_ALAW:
500
+ case AV_CODEC_ID_PCM_MULAW:
501
+ case AV_CODEC_ID_OPUS:
502
+ return true;
503
+ default:
504
+ return false;
505
+ }
506
+ }
507
+ /**
508
+ * Check if the given video codec is compatible with WebRTC.
509
+ *
510
+ * @param codecId - The AVCodecID to check
511
+ *
512
+ * @returns True if the codec is WebRTC compatible, false otherwise
513
+ *
514
+ * @internal
515
+ */
516
+ isVideoCodecSupported(codecId) {
517
+ switch (codecId) {
518
+ case AV_CODEC_ID_H264:
519
+ case AV_CODEC_ID_HEVC:
520
+ case AV_CODEC_ID_VP8:
521
+ case AV_CODEC_ID_VP9:
522
+ case AV_CODEC_ID_AV1:
523
+ return true;
524
+ default:
525
+ return false;
526
+ }
527
+ }
528
+ /**
529
+ * Get the audio codec configuration for WebRTC.
530
+ *
531
+ * @param codecId - The AVCodecID of the audio codec
532
+ *
533
+ * @returns An object containing MIME type, clock rate, and channels, or null if unsupported
534
+ *
535
+ * @internal
536
+ */
537
+ getAudioCodecConfig(codecId) {
538
+ switch (codecId) {
539
+ case AV_CODEC_ID_OPUS:
540
+ return {
541
+ mimeType: 'audio/opus',
542
+ clockRate: 48000,
543
+ channels: 2,
544
+ payloadType: 111,
545
+ };
546
+ case AV_CODEC_ID_PCM_MULAW:
547
+ return {
548
+ mimeType: 'audio/PCMU',
549
+ clockRate: 8000,
550
+ channels: 1,
551
+ payloadType: 0,
552
+ };
553
+ case AV_CODEC_ID_PCM_ALAW:
554
+ return {
555
+ mimeType: 'audio/PCMA',
556
+ clockRate: 8000,
557
+ channels: 1,
558
+ payloadType: 8,
559
+ };
560
+ default:
561
+ return null;
562
+ }
563
+ }
564
+ /**
565
+ * Get the video codec configuration for WebRTC.
566
+ *
567
+ * @param codecId - The AVCodecID of the video codec
568
+ *
569
+ * @returns An object containing MIME type and clock rate, or null if unsupported
570
+ *
571
+ * @internal
572
+ */
573
+ getVideoCodecConfig(codecId) {
574
+ switch (codecId) {
575
+ case AV_CODEC_ID_H264:
576
+ return {
577
+ mimeType: 'video/H264',
578
+ clockRate: 90000,
579
+ payloadType: 102,
580
+ };
581
+ case AV_CODEC_ID_HEVC:
582
+ return {
583
+ mimeType: 'video/H265',
584
+ clockRate: 90000,
585
+ payloadType: 103,
586
+ };
587
+ case AV_CODEC_ID_VP8:
588
+ return {
589
+ mimeType: 'video/VP8',
590
+ clockRate: 90000,
591
+ payloadType: 96,
592
+ };
593
+ case AV_CODEC_ID_VP9:
594
+ return {
595
+ mimeType: 'video/VP9',
596
+ clockRate: 90000,
597
+ payloadType: 98,
598
+ };
599
+ case AV_CODEC_ID_AV1:
600
+ return {
601
+ mimeType: 'video/AV1',
602
+ clockRate: 90000,
603
+ payloadType: 35,
604
+ };
605
+ default:
606
+ return null;
607
+ }
608
+ }
609
+ /**
610
+ * Flush video encoder pipeline.
611
+ *
612
+ * @param videoStreamIndex - Output video stream index
613
+ *
614
+ * @internal
615
+ */
616
+ async flushVideo(videoStreamIndex) {
617
+ if (!this.videoDecoder || !this.videoEncoder || !this.videoOutput) {
618
+ return;
619
+ }
620
+ for await (const frame_1 of this.videoDecoder.flushFrames()) {
621
+ const env_4 = { stack: [], error: void 0, hasError: false };
622
+ try {
623
+ const frame = __addDisposableResource(env_4, frame_1, false);
624
+ const encodedPacket = __addDisposableResource(env_4, await this.videoEncoder.encode(frame), false);
625
+ if (encodedPacket) {
626
+ await this.videoOutput.writePacket(encodedPacket, videoStreamIndex);
627
+ }
628
+ }
629
+ catch (e_4) {
630
+ env_4.error = e_4;
631
+ env_4.hasError = true;
632
+ }
633
+ finally {
634
+ __disposeResources(env_4);
635
+ }
636
+ }
637
+ for await (const packet_2 of this.videoEncoder.flushPackets()) {
638
+ const env_5 = { stack: [], error: void 0, hasError: false };
639
+ try {
640
+ const packet = __addDisposableResource(env_5, packet_2, false);
641
+ await this.videoOutput.writePacket(packet, videoStreamIndex);
642
+ }
643
+ catch (e_5) {
644
+ env_5.error = e_5;
645
+ env_5.hasError = true;
646
+ }
647
+ finally {
648
+ __disposeResources(env_5);
649
+ }
650
+ }
651
+ }
652
+ /**
653
+ * Flush audio encoder pipeline.
654
+ *
655
+ * @param audioStreamIndex - Output audio stream index
656
+ *
657
+ * @param hasAudio - Whether audio stream exists
658
+ *
659
+ * @internal
660
+ */
661
+ async flushAudio(audioStreamIndex, hasAudio) {
662
+ if (!this.audioDecoder || !this.audioFilter || !this.audioEncoder || !hasAudio || !this.audioOutput) {
663
+ return;
664
+ }
665
+ for await (const frame_2 of this.audioDecoder.flushFrames()) {
666
+ const env_6 = { stack: [], error: void 0, hasError: false };
667
+ try {
668
+ const frame = __addDisposableResource(env_6, frame_2, false);
669
+ const filteredFrame = __addDisposableResource(env_6, await this.audioFilter.process(frame), false);
670
+ if (!filteredFrame) {
671
+ continue;
672
+ }
673
+ const encodedPacket = __addDisposableResource(env_6, await this.audioEncoder.encode(filteredFrame), false);
674
+ if (encodedPacket) {
675
+ await this.audioOutput.writePacket(encodedPacket, audioStreamIndex);
676
+ }
677
+ }
678
+ catch (e_6) {
679
+ env_6.error = e_6;
680
+ env_6.hasError = true;
681
+ }
682
+ finally {
683
+ __disposeResources(env_6);
684
+ }
685
+ }
686
+ for await (const frame_3 of this.audioFilter.flushFrames()) {
687
+ const env_7 = { stack: [], error: void 0, hasError: false };
688
+ try {
689
+ const frame = __addDisposableResource(env_7, frame_3, false);
690
+ const encodedPacket = __addDisposableResource(env_7, await this.audioEncoder.encode(frame), false);
691
+ if (encodedPacket) {
692
+ await this.audioOutput.writePacket(encodedPacket, audioStreamIndex);
693
+ }
694
+ }
695
+ catch (e_7) {
696
+ env_7.error = e_7;
697
+ env_7.hasError = true;
698
+ }
699
+ finally {
700
+ __disposeResources(env_7);
701
+ }
702
+ }
703
+ for await (const packet_3 of this.audioEncoder.flushPackets()) {
704
+ const env_8 = { stack: [], error: void 0, hasError: false };
705
+ try {
706
+ const packet = __addDisposableResource(env_8, packet_3, false);
707
+ await this.audioOutput.writePacket(packet, audioStreamIndex);
708
+ }
709
+ catch (e_8) {
710
+ env_8.error = e_8;
711
+ env_8.hasError = true;
712
+ }
713
+ finally {
714
+ __disposeResources(env_8);
715
+ }
716
+ }
717
+ }
718
+ /**
719
+ * Symbol.dispose implementation for automatic cleanup.
720
+ *
721
+ * @internal
722
+ */
723
+ [Symbol.dispose]() {
724
+ this.dispose();
725
+ }
726
+ }
727
+ /**
728
+ * Complete WebRTC session management with werift integration.
729
+ *
730
+ * Provides end-to-end WebRTC streaming with automatic SDP negotiation,
731
+ * ICE candidate handling, and peer connection management.
732
+ * Built on top of {@link WebRTCStream} but handles all WebRTC protocol details.
733
+ * Integrates with werift library for RTCPeerConnection and media track handling.
734
+ * Ideal for building complete WebRTC streaming applications with minimal code.
735
+ *
736
+ * @example
737
+ * ```typescript
738
+ * import { WebRTCSession } from 'node-av/api';
739
+ *
740
+ * // Create session from media source
741
+ * const session = await WebRTCSession.create('rtsp://camera.local/stream', {
742
+ * mtu: 1200,
743
+ * hardware: 'auto',
744
+ * iceServers: [{ urls: 'stun:stun.l.google.com:19302' }]
745
+ * });
746
+ *
747
+ * // Setup ICE candidate handler
748
+ * session.onIceCandidate = (candidate) => {
749
+ * sendToClient({ type: 'candidate', value: candidate });
750
+ * };
751
+ *
752
+ * // Process SDP offer from client
753
+ * const answer = await session.setOffer(clientOffer);
754
+ * sendToClient({ type: 'answer', value: answer });
755
+ *
756
+ * // Start streaming
757
+ * await session.start();
758
+ * ```
759
+ *
760
+ * @example
761
+ * ```typescript
762
+ * // Complete WebSocket signaling server
763
+ * import { WebSocket } from 'ws';
764
+ *
765
+ * ws.on('message', async (data) => {
766
+ * const msg = JSON.parse(data);
767
+ *
768
+ * if (msg.type === 'offer') {
769
+ * const session = await WebRTCSession.create(msg.url, {
770
+ * hardware: 'auto'
771
+ * });
772
+ *
773
+ * session.onIceCandidate = (candidate) => {
774
+ * ws.send(JSON.stringify({ type: 'candidate', value: candidate }));
775
+ * };
776
+ *
777
+ * const answer = await session.setOffer(msg.value);
778
+ * ws.send(JSON.stringify({ type: 'answer', value: answer }));
779
+ *
780
+ * await session.start();
781
+ * } else if (msg.type === 'candidate') {
782
+ * session.addIceCandidate(msg.value);
783
+ * }
784
+ * });
785
+ * ```
786
+ *
787
+ * @see {@link WebRTCStream} For library-agnostic RTP streaming
788
+ * @see {@link MediaInput} For input media handling
789
+ * @see {@link HardwareContext} For GPU acceleration
790
+ */
791
+ export class WebRTCSession {
792
+ stream = null;
793
+ pc = null;
794
+ videoTrack = null;
795
+ audioTrack = null;
796
+ options;
797
+ /**
798
+ * Callback invoked when a new ICE candidate is discovered.
799
+ * Send this candidate to the remote peer via signaling channel.
800
+ *
801
+ * @param candidate - ICE candidate string to send to remote peer
802
+ *
803
+ * @example
804
+ * ```typescript
805
+ * session.onIceCandidate = (candidate) => {
806
+ * ws.send(JSON.stringify({ type: 'candidate', value: candidate }));
807
+ * };
808
+ * ```
809
+ */
810
+ onIceCandidate = null;
811
+ /**
812
+ * @param options - Session configuration options
813
+ *
814
+ * Use {@link create} factory method
815
+ *
816
+ * @internal
817
+ */
818
+ constructor(options) {
819
+ this.options = options;
820
+ }
821
+ /**
822
+ * Create a WebRTC session from a media source.
823
+ *
824
+ * Opens the input media, creates internal streaming components, and prepares
825
+ * for WebRTC peer connection negotiation. Does not start streaming yet.
826
+ * Call {@link setOffer} to negotiate SDP and {@link start} to begin streaming.
827
+ *
828
+ * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
829
+ *
830
+ * @param options - Session configuration options
831
+ *
832
+ * @returns Configured WebRTC session instance
833
+ *
834
+ * @throws {Error} If no video stream found in input
835
+ *
836
+ * @throws {FFmpegError} If input cannot be opened
837
+ *
838
+ * @example
839
+ * ```typescript
840
+ * const session = await WebRTCSession.create('rtsp://camera.local/stream', {
841
+ * mtu: 1200,
842
+ * hardware: 'auto',
843
+ * iceServers: [{ urls: 'stun:stun.l.google.com:19302' }]
844
+ * });
845
+ * ```
846
+ *
847
+ * @example
848
+ * ```typescript
849
+ * // Session from file with hardware acceleration
850
+ * const session = await WebRTCSession.create('video.mp4', {
851
+ * hardware: {
852
+ * deviceType: AV_HWDEVICE_TYPE_CUDA
853
+ * }
854
+ * });
855
+ * ```
856
+ */
857
+ static async create(inputUrl, options = {}) {
858
+ const session = new WebRTCSession(options);
859
+ // Create stream to detect codecs
860
+ session.stream = await WebRTCStream.create(inputUrl, {
861
+ mtu: options.mtu,
862
+ hardware: options.hardware,
863
+ onVideoPacket: (rtp) => {
864
+ session.videoTrack?.writeRtp(rtp);
865
+ },
866
+ onAudioPacket: (rtp) => {
867
+ session.audioTrack?.writeRtp(rtp);
868
+ },
869
+ });
870
+ return session;
871
+ }
872
+ /**
873
+ * Get detected codec information.
874
+ *
875
+ * Returns RTP codec parameters and FFmpeg codec IDs for video and audio.
876
+ * Useful for inspecting what codecs will be used in the WebRTC session.
877
+ *
878
+ * @returns Codec configuration for video and audio streams
879
+ *
880
+ * @throws {Error} If stream not initialized
881
+ *
882
+ * @example
883
+ * ```typescript
884
+ * const session = await WebRTCSession.create('input.mp4');
885
+ * const codecs = session.getCodecs();
886
+ *
887
+ * console.log('Video:', codecs.video.mimeType);
888
+ * console.log('Audio:', codecs.audio?.mimeType);
889
+ * ```
890
+ */
891
+ getCodecs() {
892
+ if (!this.stream) {
893
+ throw new Error('Stream not initialized');
894
+ }
895
+ return this.stream.getCodecs();
896
+ }
897
+ /**
898
+ * Process SDP offer from remote peer and generate SDP answer.
899
+ *
900
+ * Creates RTCPeerConnection with detected codecs, sets up media tracks,
901
+ * processes the remote SDP offer, and generates a local SDP answer.
902
+ * Also configures ICE candidate handling via {@link onIceCandidate} callback.
903
+ * Must be called before {@link start}.
904
+ *
905
+ * @param offerSdp - SDP offer string from remote WebRTC peer
906
+ *
907
+ * @returns SDP answer string to send back to remote peer
908
+ *
909
+ * @throws {Error} If stream not initialized
910
+ *
911
+ * @example
912
+ * ```typescript
913
+ * const session = await WebRTCSession.create('input.mp4');
914
+ *
915
+ * // Setup ICE candidate handler first
916
+ * session.onIceCandidate = (candidate) => {
917
+ * sendToRemote({ type: 'candidate', value: candidate });
918
+ * };
919
+ *
920
+ * // Process offer and send answer
921
+ * const answer = await session.setOffer(remoteOffer);
922
+ * sendToRemote({ type: 'answer', value: answer });
923
+ * ```
924
+ */
925
+ async setOffer(offerSdp) {
926
+ if (!this.stream) {
927
+ throw new Error('Stream not initialized');
928
+ }
929
+ const codecs = this.stream.getCodecs();
930
+ const videoConfig = codecs.video;
931
+ delete videoConfig.codecId;
932
+ const audioConfig = codecs.audio ?? {
933
+ mimeType: 'audio/opus',
934
+ clockRate: 48000,
935
+ channels: 2,
936
+ payloadType: 111,
937
+ };
938
+ delete audioConfig.codecId;
939
+ // Create PeerConnection with detected codecs
940
+ const codecParams = {
941
+ video: [
942
+ new RTCRtpCodecParameters({
943
+ ...videoConfig,
944
+ }),
945
+ ],
946
+ audio: [
947
+ new RTCRtpCodecParameters({
948
+ ...audioConfig,
949
+ }),
950
+ ],
951
+ };
952
+ this.pc = new RTCPeerConnection({
953
+ codecs: codecParams,
954
+ iceServers: this.options.iceServers,
955
+ });
956
+ // Setup ICE candidate handling
957
+ this.pc.onIceCandidate.subscribe((candidate) => {
958
+ if (candidate?.candidate && this.onIceCandidate) {
959
+ this.onIceCandidate(candidate.candidate);
960
+ }
961
+ });
962
+ // Setup tracks
963
+ this.pc.onTransceiverAdded.subscribe((transceiver) => {
964
+ if (transceiver.kind === 'video') {
965
+ this.videoTrack = new MediaStreamTrack({ kind: 'video' });
966
+ transceiver.sender.replaceTrack(this.videoTrack);
967
+ transceiver.setDirection('sendonly');
968
+ }
969
+ else if (transceiver.kind === 'audio') {
970
+ this.audioTrack = new MediaStreamTrack({ kind: 'audio' });
971
+ transceiver.sender.replaceTrack(this.audioTrack);
972
+ transceiver.setDirection('sendonly');
973
+ }
974
+ });
975
+ // Set remote description and create answer
976
+ await this.pc.setRemoteDescription(new RTCSessionDescription(offerSdp, 'offer'));
977
+ const answer = await this.pc.createAnswer();
978
+ this.pc.setLocalDescription(answer);
979
+ return this.pc.localDescription?.sdp ?? '';
980
+ }
981
+ /**
982
+ * Add ICE candidate from remote peer.
983
+ *
984
+ * Processes ICE candidates received from the remote peer via signaling channel.
985
+ * Should be called whenever a new candidate message arrives from remote peer.
986
+ * Can be called multiple times as candidates are discovered.
987
+ *
988
+ * @param candidate - ICE candidate string from remote peer
989
+ *
990
+ * @throws {Error} If peer connection not initialized (call {@link setOffer} first)
991
+ *
992
+ * @example
993
+ * ```typescript
994
+ * // In signaling message handler
995
+ * if (msg.type === 'candidate') {
996
+ * session.addIceCandidate(msg.value);
997
+ * }
998
+ * ```
999
+ */
1000
+ addIceCandidate(candidate) {
1001
+ if (!this.pc) {
1002
+ throw new Error('PeerConnection not initialized');
1003
+ }
1004
+ this.pc.addIceCandidate(new RTCIceCandidate({ candidate }));
1005
+ }
1006
+ /**
1007
+ * Start streaming media to WebRTC peer connection.
1008
+ *
1009
+ * Begins the media processing pipeline, reading packets from input,
1010
+ * transcoding if necessary, and sending RTP packets to media tracks.
1011
+ * Must call {@link setOffer} before starting.
1012
+ * This method blocks until streaming completes or {@link stop} is called.
1013
+ *
1014
+ * @returns Promise that resolves when streaming completes
1015
+ *
1016
+ * @throws {Error} If stream not initialized
1017
+ *
1018
+ * @throws {FFmpegError} If transcoding or muxing fails
1019
+ *
1020
+ * @example
1021
+ * ```typescript
1022
+ * const session = await WebRTCSession.create('input.mp4');
1023
+ * session.onIceCandidate = (c) => sendToRemote(c);
1024
+ *
1025
+ * const answer = await session.setOffer(remoteOffer);
1026
+ * sendToRemote(answer);
1027
+ *
1028
+ * // Start streaming (blocks until complete)
1029
+ * await session.start();
1030
+ * ```
1031
+ *
1032
+ * @example
1033
+ * ```typescript
1034
+ * // Non-blocking start
1035
+ * const session = await WebRTCSession.create('input.mp4');
1036
+ * const streamPromise = session.start();
1037
+ *
1038
+ * // Later: stop streaming
1039
+ * session.stop();
1040
+ * await streamPromise;
1041
+ * ```
1042
+ */
1043
+ async start() {
1044
+ if (!this.stream) {
1045
+ throw new Error('Stream not initialized');
1046
+ }
1047
+ await this.stream.start();
1048
+ }
1049
+ /**
1050
+ * Stop streaming gracefully.
1051
+ *
1052
+ * Signals the streaming loop to exit after the current packet is processed.
1053
+ * Does not immediately close resources - use {@link dispose} for cleanup.
1054
+ * Safe to call multiple times.
1055
+ *
1056
+ * @example
1057
+ * ```typescript
1058
+ * const session = await WebRTCSession.create('input.mp4');
1059
+ * const streamPromise = session.start();
1060
+ *
1061
+ * // Stop after 10 seconds
1062
+ * setTimeout(() => session.stop(), 10000);
1063
+ *
1064
+ * await streamPromise;
1065
+ * session.dispose();
1066
+ * ```
1067
+ */
1068
+ stop() {
1069
+ this.stream?.stop();
1070
+ }
1071
+ /**
1072
+ * Clean up all resources and close the session.
1073
+ *
1074
+ * Stops streaming if active, releases all FFmpeg resources, closes peer connection,
1075
+ * and cleans up media tracks. Should be called when done with the session to prevent
1076
+ * memory leaks. Safe to call multiple times.
1077
+ *
1078
+ * @example
1079
+ * ```typescript
1080
+ * const session = await WebRTCSession.create('input.mp4');
1081
+ * await session.start();
1082
+ * session.dispose();
1083
+ * ```
1084
+ *
1085
+ * @example
1086
+ * ```typescript
1087
+ * // Using automatic cleanup
1088
+ * {
1089
+ * await using session = await WebRTCSession.create('input.mp4');
1090
+ * await session.start();
1091
+ * } // Automatically disposed
1092
+ * ```
1093
+ */
1094
+ dispose() {
1095
+ this.stop();
1096
+ this.stream?.dispose();
1097
+ this.pc?.close();
1098
+ this.videoTrack = null;
1099
+ this.audioTrack = null;
1100
+ this.pc = null;
1101
+ }
1102
+ /**
1103
+ * Symbol.dispose implementation for automatic cleanup.
1104
+ *
1105
+ * @internal
1106
+ */
1107
+ [Symbol.dispose]() {
1108
+ this.dispose();
1109
+ }
1110
+ }
1111
+ //# sourceMappingURL=webrtc.js.map