node-av 3.0.6 → 3.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1118 @@
1
+ var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
2
+ if (value !== null && value !== void 0) {
3
+ if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
4
+ var dispose, inner;
5
+ if (async) {
6
+ if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
7
+ dispose = value[Symbol.asyncDispose];
8
+ }
9
+ if (dispose === void 0) {
10
+ if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
11
+ dispose = value[Symbol.dispose];
12
+ if (async) inner = dispose;
13
+ }
14
+ if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
15
+ if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
16
+ env.stack.push({ value: value, dispose: dispose, async: async });
17
+ }
18
+ else if (async) {
19
+ env.stack.push({ async: true });
20
+ }
21
+ return value;
22
+ };
23
+ var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
24
+ return function (env) {
25
+ function fail(e) {
26
+ env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
27
+ env.hasError = true;
28
+ }
29
+ var r, s = 0;
30
+ function next() {
31
+ while (r = env.stack.pop()) {
32
+ try {
33
+ if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
34
+ if (r.dispose) {
35
+ var result = r.dispose.call(r.value);
36
+ if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
37
+ }
38
+ else s |= 1;
39
+ }
40
+ catch (e) {
41
+ fail(e);
42
+ }
43
+ }
44
+ if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
45
+ if (env.hasError) throw env.error;
46
+ }
47
+ return next();
48
+ };
49
+ })(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
50
+ var e = new Error(message);
51
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
52
+ });
53
+ import { MediaStreamTrack, RTCIceCandidate, RTCPeerConnection, RTCRtpCodecParameters, RTCSessionDescription, RtpPacket } from 'werift';
54
+ import { AV_CODEC_ID_AV1, AV_CODEC_ID_H264, AV_CODEC_ID_HEVC, AV_CODEC_ID_OPUS, AV_CODEC_ID_PCM_ALAW, AV_CODEC_ID_PCM_MULAW, AV_CODEC_ID_VP8, AV_CODEC_ID_VP9, AV_HWDEVICE_TYPE_NONE, AV_SAMPLE_FMT_S16, } from '../constants/constants.js';
55
+ import { FF_ENCODER_LIBOPUS, FF_ENCODER_LIBX264 } from '../constants/encoders.js';
56
+ import { Decoder } from './decoder.js';
57
+ import { Encoder } from './encoder.js';
58
+ import { FilterPreset } from './filter-presets.js';
59
+ import { FilterAPI } from './filter.js';
60
+ import { HardwareContext } from './hardware.js';
61
+ import { MediaInput } from './media-input.js';
62
+ import { MediaOutput } from './media-output.js';
63
+ /**
64
+ * High-level WebRTC streaming with automatic codec detection and transcoding.
65
+ *
66
+ * Provides library-agnostic RTP streaming for WebRTC applications.
67
+ * Automatically detects input codecs and transcodes non-WebRTC-compatible formats.
68
+ * Handles video (H.264, H.265, VP8, VP9) and audio (Opus, PCMA, PCMU) codecs.
69
+ * Supports hardware acceleration for video transcoding.
70
+ * Essential component for building WebRTC streaming servers without direct WebRTC library coupling.
71
+ *
72
+ * @example
73
+ * ```typescript
74
+ * import { WebRTCStream } from 'node-av/api';
75
+ *
76
+ * // Create stream with RTP packet callbacks
77
+ * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
78
+ * mtu: 1200,
79
+ * hardware: 'auto',
80
+ * onVideoPacket: (rtp) => {
81
+ * // Send RTP packet to WebRTC peer connection
82
+ * videoTrack.writeRtp(rtp);
83
+ * },
84
+ * onAudioPacket: (rtp) => {
85
+ * audioTrack.writeRtp(rtp);
86
+ * }
87
+ * });
88
+ *
89
+ * // Get detected codecs for SDP negotiation
90
+ * const codecs = stream.getCodecs();
91
+ * console.log('Video:', codecs.video.mimeType);
92
+ * console.log('Audio:', codecs.audio?.mimeType);
93
+ *
94
+ * // Start streaming
95
+ * await stream.start();
96
+ * ```
97
+ *
98
+ * @example
99
+ * ```typescript
100
+ * // Stream with hardware acceleration
101
+ * import { AV_HWDEVICE_TYPE_CUDA } from 'node-av/constants';
102
+ *
103
+ * const stream = await WebRTCStream.create('video.mp4', {
104
+ * hardware: {
105
+ * deviceType: AV_HWDEVICE_TYPE_CUDA,
106
+ * device: '/dev/nvidia0'
107
+ * },
108
+ * onVideoPacket: (rtp) => sendToWebRTC(rtp)
109
+ * });
110
+ *
111
+ * await stream.start();
112
+ * stream.stop();
113
+ * stream.dispose();
114
+ * ```
115
+ *
116
+ * @see {@link WebRTCSession} For complete WebRTC session management with werift
117
+ * @see {@link MediaInput} For input media handling
118
+ * @see {@link HardwareContext} For GPU acceleration
119
+ */
120
+ export class WebRTCStream {
121
+ input;
122
+ codecInfo;
123
+ options;
124
+ videoOutput = null;
125
+ audioOutput = null;
126
+ hardwareContext = null;
127
+ videoDecoder = null;
128
+ videoEncoder = null;
129
+ audioDecoder = null;
130
+ audioFilter = null;
131
+ audioEncoder = null;
132
+ streamActive = false;
133
+ /**
134
+ * @param input - Media input source
135
+ *
136
+ * @param options - Stream configuration options
137
+ *
138
+ * Use {@link create} factory method
139
+ *
140
+ * @internal
141
+ */
142
+ constructor(input, options) {
143
+ this.input = input;
144
+ const videoStream = input.video();
145
+ const audioStream = input.audio();
146
+ const videoCodecId = videoStream.codecpar.codecId;
147
+ const audioCodecId = audioStream?.codecpar.codecId ?? null;
148
+ const videoConfig = this.getVideoCodecConfig(videoCodecId) ?? this.getVideoCodecConfig(AV_CODEC_ID_H264); // We transcode unsupported codecs to H264
149
+ this.codecInfo = {
150
+ video: {
151
+ codecId: videoCodecId,
152
+ ...videoConfig,
153
+ },
154
+ };
155
+ if (audioCodecId !== null) {
156
+ const audioConfig = this.getAudioCodecConfig(audioCodecId) ?? this.getAudioCodecConfig(AV_CODEC_ID_OPUS); // We transcode unsupported codecs to OPUS
157
+ this.codecInfo.audio = {
158
+ codecId: audioCodecId,
159
+ ...audioConfig,
160
+ };
161
+ }
162
+ this.options = {
163
+ onVideoPacket: options.onVideoPacket ?? (() => { }),
164
+ onAudioPacket: options.onAudioPacket ?? (() => { }),
165
+ mtu: options.mtu ?? 1200,
166
+ hardware: options.hardware ?? { deviceType: AV_HWDEVICE_TYPE_NONE },
167
+ inputOptions: options.inputOptions,
168
+ };
169
+ }
170
+ /**
171
+ * Create a WebRTC stream from a media source.
172
+ *
173
+ * Opens the input media, detects video and audio codecs, and prepares
174
+ * transcoding pipelines for non-WebRTC-compatible formats.
175
+ * Automatically configures H.264 encoding for unsupported video codecs
176
+ * and Opus encoding for unsupported audio codecs.
177
+ *
178
+ * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
179
+ *
180
+ * @param options - Stream configuration options
181
+ *
182
+ * @returns Configured WebRTC stream instance
183
+ *
184
+ * @throws {Error} If no video stream found in input
185
+ *
186
+ * @throws {FFmpegError} If input cannot be opened
187
+ *
188
+ * @example
189
+ * ```typescript
190
+ * // Stream from RTSP camera
191
+ * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
192
+ * mtu: 1200,
193
+ * onVideoPacket: (rtp) => videoTrack.writeRtp(rtp),
194
+ * onAudioPacket: (rtp) => audioTrack.writeRtp(rtp)
195
+ * });
196
+ * ```
197
+ *
198
+ * @example
199
+ * ```typescript
200
+ * // Stream file with auto hardware acceleration
201
+ * const stream = await WebRTCStream.create('video.mp4', {
202
+ * hardware: 'auto'
203
+ * });
204
+ * ```
205
+ */
206
+ static async create(inputUrl, options = {}) {
207
+ const isRtsp = inputUrl.toLowerCase().startsWith('rtsp');
208
+ options.inputOptions = options.inputOptions ?? {};
209
+ options.inputOptions = {
210
+ flags: 'low_delay',
211
+ rtsp_transport: isRtsp ? 'tcp' : undefined,
212
+ ...options.inputOptions,
213
+ };
214
+ const input = await MediaInput.open(inputUrl, {
215
+ options: options.inputOptions,
216
+ });
217
+ const videoStream = input.video();
218
+ if (!videoStream) {
219
+ throw new Error('No video stream found in input');
220
+ }
221
+ return new WebRTCStream(input, options);
222
+ }
223
+ /**
224
+ * Get detected codec information for SDP negotiation.
225
+ *
226
+ * Returns RTP codec parameters and FFmpeg codec IDs for video and audio.
227
+ * Use this information to configure WebRTC peer connections with matching codecs.
228
+ *
229
+ * @returns Codec configuration for video and audio streams
230
+ *
231
+ * @example
232
+ * ```typescript
233
+ * const stream = await WebRTCStream.create('input.mp4');
234
+ * const codecs = stream.getCodecs();
235
+ *
236
+ * console.log('Video codec:', codecs.video.mimeType);
237
+ * console.log('Audio codec:', codecs.audio?.mimeType);
238
+ * ```
239
+ */
240
+ getCodecs() {
241
+ return this.codecInfo;
242
+ }
243
+ /**
244
+ * Start streaming media to RTP packets.
245
+ *
246
+ * Begins the media processing pipeline, reading packets from input,
247
+ * transcoding if necessary, and invoking RTP packet callbacks.
248
+ * Automatically handles video and audio streams in parallel.
249
+ * Flushes all buffers at the end of stream.
250
+ * This method blocks until streaming completes or {@link stop} is called.
251
+ *
252
+ * @returns Promise that resolves when streaming completes
253
+ *
254
+ * @throws {FFmpegError} If transcoding or muxing fails
255
+ *
256
+ * @example
257
+ * ```typescript
258
+ * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
259
+ * onVideoPacket: (rtp) => sendRtp(rtp)
260
+ * });
261
+ *
262
+ * // Start streaming (blocks until complete or stopped)
263
+ * await stream.start();
264
+ * ```
265
+ *
266
+ * @example
267
+ * ```typescript
268
+ * // Non-blocking start with background promise
269
+ * const stream = await WebRTCStream.create('input.mp4');
270
+ * const streamPromise = stream.start();
271
+ *
272
+ * // Later: stop streaming
273
+ * stream.stop();
274
+ * await streamPromise;
275
+ * ```
276
+ */
277
+ async start() {
278
+ if (this.streamActive) {
279
+ return;
280
+ }
281
+ this.streamActive = true;
282
+ const videoStream = this.input.video();
283
+ const audioStream = this.input.audio();
284
+ // Setup video transcoding if needed
285
+ if (!this.isVideoCodecSupported(videoStream.codecpar.codecId)) {
286
+ if (this.options.hardware === 'auto') {
287
+ this.hardwareContext = HardwareContext.auto();
288
+ }
289
+ else if (this.options.hardware.deviceType !== AV_HWDEVICE_TYPE_NONE) {
290
+ this.hardwareContext = HardwareContext.create(this.options.hardware.deviceType, this.options.hardware.device, this.options.hardware.options);
291
+ }
292
+ this.videoDecoder = await Decoder.create(videoStream, {
293
+ exitOnError: false,
294
+ hardware: this.hardwareContext,
295
+ });
296
+ const encoderCodec = this.hardwareContext?.getEncoderCodec('h264') ?? FF_ENCODER_LIBX264;
297
+ const encoderOptions = {};
298
+ if (encoderCodec === FF_ENCODER_LIBX264) {
299
+ encoderOptions.preset = 'ultrafast';
300
+ encoderOptions.tune = 'zerolatency';
301
+ }
302
+ this.videoEncoder = await Encoder.create(encoderCodec, {
303
+ timeBase: videoStream.timeBase,
304
+ frameRate: videoStream.avgFrameRate,
305
+ maxBFrames: 0,
306
+ options: encoderOptions,
307
+ });
308
+ }
309
+ // Setup video output
310
+ this.videoOutput = await MediaOutput.open({
311
+ write: (buffer) => {
312
+ this.options.onVideoPacket(RtpPacket.deSerialize(buffer));
313
+ return buffer.length;
314
+ },
315
+ }, {
316
+ format: 'rtp',
317
+ bufferSize: this.options.mtu,
318
+ options: {
319
+ pkt_size: this.options.mtu,
320
+ },
321
+ });
322
+ const videoStreamIndex = this.videoEncoder ? this.videoOutput.addStream(this.videoEncoder) : this.videoOutput.addStream(videoStream);
323
+ // Setup audio if available
324
+ let audioStreamIndex = null;
325
+ if (audioStream) {
326
+ if (!this.isAudioCodecSupported(audioStream.codecpar.codecId)) {
327
+ this.audioDecoder = await Decoder.create(audioStream, {
328
+ exitOnError: false,
329
+ });
330
+ const targetSampleRate = 48000;
331
+ const filterChain = FilterPreset.chain().aformat(AV_SAMPLE_FMT_S16, targetSampleRate, 'stereo').asetnsamples(960).build();
332
+ this.audioFilter = FilterAPI.create(filterChain, {
333
+ timeBase: audioStream.timeBase,
334
+ });
335
+ this.audioEncoder = await Encoder.create(FF_ENCODER_LIBOPUS, {
336
+ timeBase: { num: 1, den: targetSampleRate },
337
+ options: {
338
+ application: 'lowdelay',
339
+ frame_duration: 20,
340
+ },
341
+ });
342
+ }
343
+ this.audioOutput = await MediaOutput.open({
344
+ write: (buffer) => {
345
+ this.options.onAudioPacket(RtpPacket.deSerialize(buffer));
346
+ return buffer.length;
347
+ },
348
+ }, {
349
+ format: 'rtp',
350
+ bufferSize: this.options.mtu,
351
+ options: {
352
+ pkt_size: this.options.mtu,
353
+ },
354
+ });
355
+ audioStreamIndex = this.audioEncoder ? this.audioOutput.addStream(this.audioEncoder) : this.audioOutput.addStream(audioStream);
356
+ }
357
+ const hasAudio = audioStreamIndex !== null && this.audioOutput !== null && audioStream !== undefined;
358
+ // Start processing loop
359
+ for await (const packet_1 of this.input.packets()) {
360
+ const env_1 = { stack: [], error: void 0, hasError: false };
361
+ try {
362
+ const packet = __addDisposableResource(env_1, packet_1, false);
363
+ if (!this.streamActive) {
364
+ break;
365
+ }
366
+ if (packet.streamIndex === videoStream.index) {
367
+ if (this.videoDecoder === null || this.videoEncoder === null) {
368
+ await this.videoOutput.writePacket(packet, videoStreamIndex);
369
+ }
370
+ else {
371
+ const env_2 = { stack: [], error: void 0, hasError: false };
372
+ try {
373
+ const decodedFrame = __addDisposableResource(env_2, await this.videoDecoder.decode(packet), false);
374
+ if (!decodedFrame) {
375
+ continue;
376
+ }
377
+ const encodedPacket = __addDisposableResource(env_2, await this.videoEncoder.encode(decodedFrame), false);
378
+ if (!encodedPacket) {
379
+ continue;
380
+ }
381
+ await this.videoOutput.writePacket(encodedPacket, videoStreamIndex);
382
+ }
383
+ catch (e_1) {
384
+ env_2.error = e_1;
385
+ env_2.hasError = true;
386
+ }
387
+ finally {
388
+ __disposeResources(env_2);
389
+ }
390
+ }
391
+ }
392
+ else if (hasAudio && packet.streamIndex === audioStream.index) {
393
+ if (this.audioDecoder === null || this.audioFilter === null || this.audioEncoder === null) {
394
+ await this.audioOutput?.writePacket(packet, audioStreamIndex);
395
+ continue;
396
+ }
397
+ else {
398
+ const env_3 = { stack: [], error: void 0, hasError: false };
399
+ try {
400
+ const decodedFrame = __addDisposableResource(env_3, await this.audioDecoder.decode(packet), false);
401
+ if (!decodedFrame) {
402
+ continue;
403
+ }
404
+ const filteredFrame = __addDisposableResource(env_3, await this.audioFilter.process(decodedFrame), false);
405
+ if (!filteredFrame) {
406
+ continue;
407
+ }
408
+ const encodedPacket = __addDisposableResource(env_3, await this.audioEncoder.encode(filteredFrame), false);
409
+ if (!encodedPacket) {
410
+ continue;
411
+ }
412
+ await this.audioOutput?.writePacket(encodedPacket, audioStreamIndex);
413
+ }
414
+ catch (e_2) {
415
+ env_3.error = e_2;
416
+ env_3.hasError = true;
417
+ }
418
+ finally {
419
+ __disposeResources(env_3);
420
+ }
421
+ }
422
+ }
423
+ }
424
+ catch (e_3) {
425
+ env_1.error = e_3;
426
+ env_1.hasError = true;
427
+ }
428
+ finally {
429
+ __disposeResources(env_1);
430
+ }
431
+ }
432
+ // Flush pipelines
433
+ await Promise.allSettled([this.flushVideo(videoStreamIndex), this.flushAudio(audioStreamIndex, hasAudio)]);
434
+ }
435
+ /**
436
+ * Stop streaming gracefully.
437
+ *
438
+ * Signals the streaming loop to exit after the current packet is processed.
439
+ * Does not immediately close resources - use {@link dispose} for cleanup.
440
+ * Safe to call multiple times.
441
+ *
442
+ * @example
443
+ * ```typescript
444
+ * const stream = await WebRTCStream.create('input.mp4');
445
+ * const streamPromise = stream.start();
446
+ *
447
+ * // Stop after 10 seconds
448
+ * setTimeout(() => stream.stop(), 10000);
449
+ *
450
+ * await streamPromise; // Resolves when stopped
451
+ * stream.dispose();
452
+ * ```
453
+ */
454
+ stop() {
455
+ this.streamActive = false;
456
+ }
457
+ /**
458
+ * Clean up all resources and close the stream.
459
+ *
460
+ * Stops streaming if active and releases all FFmpeg resources including
461
+ * decoders, encoders, filters, outputs, and input. Should be called when
462
+ * done with the stream to prevent memory leaks.
463
+ * Safe to call multiple times.
464
+ *
465
+ * @example
466
+ * ```typescript
467
+ * const stream = await WebRTCStream.create('input.mp4');
468
+ * await stream.start();
469
+ * stream.dispose();
470
+ * ```
471
+ *
472
+ * @example
473
+ * ```typescript
474
+ * // Using automatic cleanup
475
+ * {
476
+ * await using stream = await WebRTCStream.create('input.mp4');
477
+ * await stream.start();
478
+ * } // Automatically disposed
479
+ * ```
480
+ */
481
+ dispose() {
482
+ if (!this.streamActive) {
483
+ return;
484
+ }
485
+ this.stop();
486
+ this.videoOutput?.close();
487
+ this.audioOutput?.close();
488
+ this.videoDecoder?.close();
489
+ this.videoEncoder?.close();
490
+ this.audioDecoder?.close();
491
+ this.audioFilter?.close();
492
+ this.audioEncoder?.close();
493
+ this.input.close();
494
+ }
495
+ /**
496
+ * Check if the given audio codec is compatible with WebRTC.
497
+ *
498
+ * @param codecId - The AVCodecID to check
499
+ *
500
+ * @returns True if the codec is WebRTC compatible, false otherwise
501
+ *
502
+ * @internal
503
+ */
504
+ isAudioCodecSupported(codecId) {
505
+ switch (codecId) {
506
+ case AV_CODEC_ID_PCM_ALAW:
507
+ case AV_CODEC_ID_PCM_MULAW:
508
+ case AV_CODEC_ID_OPUS:
509
+ return true;
510
+ default:
511
+ return false;
512
+ }
513
+ }
514
+ /**
515
+ * Check if the given video codec is compatible with WebRTC.
516
+ *
517
+ * @param codecId - The AVCodecID to check
518
+ *
519
+ * @returns True if the codec is WebRTC compatible, false otherwise
520
+ *
521
+ * @internal
522
+ */
523
+ isVideoCodecSupported(codecId) {
524
+ switch (codecId) {
525
+ case AV_CODEC_ID_H264:
526
+ case AV_CODEC_ID_HEVC:
527
+ case AV_CODEC_ID_VP8:
528
+ case AV_CODEC_ID_VP9:
529
+ case AV_CODEC_ID_AV1:
530
+ return true;
531
+ default:
532
+ return false;
533
+ }
534
+ }
535
+ /**
536
+ * Get the audio codec configuration for WebRTC.
537
+ *
538
+ * @param codecId - The AVCodecID of the audio codec
539
+ *
540
+ * @returns An object containing MIME type, clock rate, and channels, or null if unsupported
541
+ *
542
+ * @internal
543
+ */
544
+ getAudioCodecConfig(codecId) {
545
+ switch (codecId) {
546
+ case AV_CODEC_ID_OPUS:
547
+ return {
548
+ mimeType: 'audio/opus',
549
+ clockRate: 48000,
550
+ channels: 2,
551
+ payloadType: 111,
552
+ };
553
+ case AV_CODEC_ID_PCM_MULAW:
554
+ return {
555
+ mimeType: 'audio/PCMU',
556
+ clockRate: 8000,
557
+ channels: 1,
558
+ payloadType: 0,
559
+ };
560
+ case AV_CODEC_ID_PCM_ALAW:
561
+ return {
562
+ mimeType: 'audio/PCMA',
563
+ clockRate: 8000,
564
+ channels: 1,
565
+ payloadType: 8,
566
+ };
567
+ default:
568
+ return null;
569
+ }
570
+ }
571
+ /**
572
+ * Get the video codec configuration for WebRTC.
573
+ *
574
+ * @param codecId - The AVCodecID of the video codec
575
+ *
576
+ * @returns An object containing MIME type and clock rate, or null if unsupported
577
+ *
578
+ * @internal
579
+ */
580
+ getVideoCodecConfig(codecId) {
581
+ switch (codecId) {
582
+ case AV_CODEC_ID_H264:
583
+ return {
584
+ mimeType: 'video/H264',
585
+ clockRate: 90000,
586
+ payloadType: 102,
587
+ };
588
+ case AV_CODEC_ID_HEVC:
589
+ return {
590
+ mimeType: 'video/H265',
591
+ clockRate: 90000,
592
+ payloadType: 103,
593
+ };
594
+ case AV_CODEC_ID_VP8:
595
+ return {
596
+ mimeType: 'video/VP8',
597
+ clockRate: 90000,
598
+ payloadType: 96,
599
+ };
600
+ case AV_CODEC_ID_VP9:
601
+ return {
602
+ mimeType: 'video/VP9',
603
+ clockRate: 90000,
604
+ payloadType: 98,
605
+ };
606
+ case AV_CODEC_ID_AV1:
607
+ return {
608
+ mimeType: 'video/AV1',
609
+ clockRate: 90000,
610
+ payloadType: 35,
611
+ };
612
+ default:
613
+ return null;
614
+ }
615
+ }
616
+ /**
617
+ * Flush video encoder pipeline.
618
+ *
619
+ * @param videoStreamIndex - Output video stream index
620
+ *
621
+ * @internal
622
+ */
623
+ async flushVideo(videoStreamIndex) {
624
+ if (!this.videoDecoder || !this.videoEncoder || !this.videoOutput) {
625
+ return;
626
+ }
627
+ for await (const frame_1 of this.videoDecoder.flushFrames()) {
628
+ const env_4 = { stack: [], error: void 0, hasError: false };
629
+ try {
630
+ const frame = __addDisposableResource(env_4, frame_1, false);
631
+ const encodedPacket = __addDisposableResource(env_4, await this.videoEncoder.encode(frame), false);
632
+ if (encodedPacket) {
633
+ await this.videoOutput.writePacket(encodedPacket, videoStreamIndex);
634
+ }
635
+ }
636
+ catch (e_4) {
637
+ env_4.error = e_4;
638
+ env_4.hasError = true;
639
+ }
640
+ finally {
641
+ __disposeResources(env_4);
642
+ }
643
+ }
644
+ for await (const packet_2 of this.videoEncoder.flushPackets()) {
645
+ const env_5 = { stack: [], error: void 0, hasError: false };
646
+ try {
647
+ const packet = __addDisposableResource(env_5, packet_2, false);
648
+ await this.videoOutput.writePacket(packet, videoStreamIndex);
649
+ }
650
+ catch (e_5) {
651
+ env_5.error = e_5;
652
+ env_5.hasError = true;
653
+ }
654
+ finally {
655
+ __disposeResources(env_5);
656
+ }
657
+ }
658
+ }
659
+ /**
660
+ * Flush audio encoder pipeline.
661
+ *
662
+ * @param audioStreamIndex - Output audio stream index
663
+ *
664
+ * @param hasAudio - Whether audio stream exists
665
+ *
666
+ * @internal
667
+ */
668
+ async flushAudio(audioStreamIndex, hasAudio) {
669
+ if (!this.audioDecoder || !this.audioFilter || !this.audioEncoder || !hasAudio || !this.audioOutput) {
670
+ return;
671
+ }
672
+ for await (const frame_2 of this.audioDecoder.flushFrames()) {
673
+ const env_6 = { stack: [], error: void 0, hasError: false };
674
+ try {
675
+ const frame = __addDisposableResource(env_6, frame_2, false);
676
+ const filteredFrame = __addDisposableResource(env_6, await this.audioFilter.process(frame), false);
677
+ if (!filteredFrame) {
678
+ continue;
679
+ }
680
+ const encodedPacket = __addDisposableResource(env_6, await this.audioEncoder.encode(filteredFrame), false);
681
+ if (encodedPacket) {
682
+ await this.audioOutput.writePacket(encodedPacket, audioStreamIndex);
683
+ }
684
+ }
685
+ catch (e_6) {
686
+ env_6.error = e_6;
687
+ env_6.hasError = true;
688
+ }
689
+ finally {
690
+ __disposeResources(env_6);
691
+ }
692
+ }
693
+ for await (const frame_3 of this.audioFilter.flushFrames()) {
694
+ const env_7 = { stack: [], error: void 0, hasError: false };
695
+ try {
696
+ const frame = __addDisposableResource(env_7, frame_3, false);
697
+ const encodedPacket = __addDisposableResource(env_7, await this.audioEncoder.encode(frame), false);
698
+ if (encodedPacket) {
699
+ await this.audioOutput.writePacket(encodedPacket, audioStreamIndex);
700
+ }
701
+ }
702
+ catch (e_7) {
703
+ env_7.error = e_7;
704
+ env_7.hasError = true;
705
+ }
706
+ finally {
707
+ __disposeResources(env_7);
708
+ }
709
+ }
710
+ for await (const packet_3 of this.audioEncoder.flushPackets()) {
711
+ const env_8 = { stack: [], error: void 0, hasError: false };
712
+ try {
713
+ const packet = __addDisposableResource(env_8, packet_3, false);
714
+ await this.audioOutput.writePacket(packet, audioStreamIndex);
715
+ }
716
+ catch (e_8) {
717
+ env_8.error = e_8;
718
+ env_8.hasError = true;
719
+ }
720
+ finally {
721
+ __disposeResources(env_8);
722
+ }
723
+ }
724
+ }
725
+ /**
726
+ * Symbol.dispose implementation for automatic cleanup.
727
+ *
728
+ * @internal
729
+ */
730
+ [Symbol.dispose]() {
731
+ this.dispose();
732
+ }
733
+ }
734
+ /**
735
+ * Complete WebRTC session management with werift integration.
736
+ *
737
+ * Provides end-to-end WebRTC streaming with automatic SDP negotiation,
738
+ * ICE candidate handling, and peer connection management.
739
+ * Built on top of {@link WebRTCStream} but handles all WebRTC protocol details.
740
+ * Integrates with werift library for RTCPeerConnection and media track handling.
741
+ * Ideal for building complete WebRTC streaming applications with minimal code.
742
+ *
743
+ * @example
744
+ * ```typescript
745
+ * import { WebRTCSession } from 'node-av/api';
746
+ *
747
+ * // Create session from media source
748
+ * const session = await WebRTCSession.create('rtsp://camera.local/stream', {
749
+ * mtu: 1200,
750
+ * hardware: 'auto',
751
+ * iceServers: [{ urls: 'stun:stun.l.google.com:19302' }]
752
+ * });
753
+ *
754
+ * // Setup ICE candidate handler
755
+ * session.onIceCandidate = (candidate) => {
756
+ * sendToClient({ type: 'candidate', value: candidate });
757
+ * };
758
+ *
759
+ * // Process SDP offer from client
760
+ * const answer = await session.setOffer(clientOffer);
761
+ * sendToClient({ type: 'answer', value: answer });
762
+ *
763
+ * // Start streaming
764
+ * await session.start();
765
+ * ```
766
+ *
767
+ * @example
768
+ * ```typescript
769
+ * // Complete WebSocket signaling server
770
+ * import { WebSocket } from 'ws';
771
+ *
772
+ * ws.on('message', async (data) => {
773
+ * const msg = JSON.parse(data);
774
+ *
775
+ * if (msg.type === 'offer') {
776
+ * const session = await WebRTCSession.create(msg.url, {
777
+ * hardware: 'auto'
778
+ * });
779
+ *
780
+ * session.onIceCandidate = (candidate) => {
781
+ * ws.send(JSON.stringify({ type: 'candidate', value: candidate }));
782
+ * };
783
+ *
784
+ * const answer = await session.setOffer(msg.value);
785
+ * ws.send(JSON.stringify({ type: 'answer', value: answer }));
786
+ *
787
+ * await session.start();
788
+ * } else if (msg.type === 'candidate') {
789
+ * session.addIceCandidate(msg.value);
790
+ * }
791
+ * });
792
+ * ```
793
+ *
794
+ * @see {@link WebRTCStream} For library-agnostic RTP streaming
795
+ * @see {@link MediaInput} For input media handling
796
+ * @see {@link HardwareContext} For GPU acceleration
797
+ */
798
+ export class WebRTCSession {
799
+ stream = null;
800
+ pc = null;
801
+ videoTrack = null;
802
+ audioTrack = null;
803
+ options;
804
+ /**
805
+ * Callback invoked when a new ICE candidate is discovered.
806
+ * Send this candidate to the remote peer via signaling channel.
807
+ *
808
+ * @param candidate - ICE candidate string to send to remote peer
809
+ *
810
+ * @example
811
+ * ```typescript
812
+ * session.onIceCandidate = (candidate) => {
813
+ * ws.send(JSON.stringify({ type: 'candidate', value: candidate }));
814
+ * };
815
+ * ```
816
+ */
817
+ onIceCandidate = null;
818
+ /**
819
+ * @param options - Session configuration options
820
+ *
821
+ * Use {@link create} factory method
822
+ *
823
+ * @internal
824
+ */
825
+ constructor(options) {
826
+ this.options = options;
827
+ }
828
+ /**
829
+ * Create a WebRTC session from a media source.
830
+ *
831
+ * Opens the input media, creates internal streaming components, and prepares
832
+ * for WebRTC peer connection negotiation. Does not start streaming yet.
833
+ * Call {@link setOffer} to negotiate SDP and {@link start} to begin streaming.
834
+ *
835
+ * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
836
+ *
837
+ * @param options - Session configuration options
838
+ *
839
+ * @returns Configured WebRTC session instance
840
+ *
841
+ * @throws {Error} If no video stream found in input
842
+ *
843
+ * @throws {FFmpegError} If input cannot be opened
844
+ *
845
+ * @example
846
+ * ```typescript
847
+ * const session = await WebRTCSession.create('rtsp://camera.local/stream', {
848
+ * mtu: 1200,
849
+ * hardware: 'auto',
850
+ * iceServers: [{ urls: 'stun:stun.l.google.com:19302' }]
851
+ * });
852
+ * ```
853
+ *
854
+ * @example
855
+ * ```typescript
856
+ * // Session from file with hardware acceleration
857
+ * const session = await WebRTCSession.create('video.mp4', {
858
+ * hardware: {
859
+ * deviceType: AV_HWDEVICE_TYPE_CUDA
860
+ * }
861
+ * });
862
+ * ```
863
+ */
864
+ static async create(inputUrl, options = {}) {
865
+ const session = new WebRTCSession(options);
866
+ // Create stream to detect codecs
867
+ session.stream = await WebRTCStream.create(inputUrl, {
868
+ mtu: options.mtu,
869
+ hardware: options.hardware,
870
+ onVideoPacket: (rtp) => {
871
+ session.videoTrack?.writeRtp(rtp);
872
+ },
873
+ onAudioPacket: (rtp) => {
874
+ session.audioTrack?.writeRtp(rtp);
875
+ },
876
+ });
877
+ return session;
878
+ }
879
+ /**
880
+ * Get detected codec information.
881
+ *
882
+ * Returns RTP codec parameters and FFmpeg codec IDs for video and audio.
883
+ * Useful for inspecting what codecs will be used in the WebRTC session.
884
+ *
885
+ * @returns Codec configuration for video and audio streams
886
+ *
887
+ * @throws {Error} If stream not initialized
888
+ *
889
+ * @example
890
+ * ```typescript
891
+ * const session = await WebRTCSession.create('input.mp4');
892
+ * const codecs = session.getCodecs();
893
+ *
894
+ * console.log('Video:', codecs.video.mimeType);
895
+ * console.log('Audio:', codecs.audio?.mimeType);
896
+ * ```
897
+ */
898
+ getCodecs() {
899
+ if (!this.stream) {
900
+ throw new Error('Stream not initialized');
901
+ }
902
+ return this.stream.getCodecs();
903
+ }
904
+ /**
905
+ * Process SDP offer from remote peer and generate SDP answer.
906
+ *
907
+ * Creates RTCPeerConnection with detected codecs, sets up media tracks,
908
+ * processes the remote SDP offer, and generates a local SDP answer.
909
+ * Also configures ICE candidate handling via {@link onIceCandidate} callback.
910
+ * Must be called before {@link start}.
911
+ *
912
+ * @param offerSdp - SDP offer string from remote WebRTC peer
913
+ *
914
+ * @returns SDP answer string to send back to remote peer
915
+ *
916
+ * @throws {Error} If stream not initialized
917
+ *
918
+ * @example
919
+ * ```typescript
920
+ * const session = await WebRTCSession.create('input.mp4');
921
+ *
922
+ * // Setup ICE candidate handler first
923
+ * session.onIceCandidate = (candidate) => {
924
+ * sendToRemote({ type: 'candidate', value: candidate });
925
+ * };
926
+ *
927
+ * // Process offer and send answer
928
+ * const answer = await session.setOffer(remoteOffer);
929
+ * sendToRemote({ type: 'answer', value: answer });
930
+ * ```
931
+ */
932
+ async setOffer(offerSdp) {
933
+ if (!this.stream) {
934
+ throw new Error('Stream not initialized');
935
+ }
936
+ const codecs = this.stream.getCodecs();
937
+ const videoConfig = codecs.video;
938
+ delete videoConfig.codecId;
939
+ const audioConfig = codecs.audio ?? {
940
+ mimeType: 'audio/opus',
941
+ clockRate: 48000,
942
+ channels: 2,
943
+ payloadType: 111,
944
+ };
945
+ delete audioConfig.codecId;
946
+ // Create PeerConnection with detected codecs
947
+ const codecParams = {
948
+ video: [
949
+ new RTCRtpCodecParameters({
950
+ ...videoConfig,
951
+ }),
952
+ ],
953
+ audio: [
954
+ new RTCRtpCodecParameters({
955
+ ...audioConfig,
956
+ }),
957
+ ],
958
+ };
959
+ this.pc = new RTCPeerConnection({
960
+ codecs: codecParams,
961
+ iceServers: this.options.iceServers,
962
+ });
963
+ // Setup ICE candidate handling
964
+ this.pc.onIceCandidate.subscribe((candidate) => {
965
+ if (candidate?.candidate && this.onIceCandidate) {
966
+ this.onIceCandidate(candidate.candidate);
967
+ }
968
+ });
969
+ // Setup tracks
970
+ this.pc.onTransceiverAdded.subscribe((transceiver) => {
971
+ if (transceiver.kind === 'video') {
972
+ this.videoTrack = new MediaStreamTrack({ kind: 'video' });
973
+ transceiver.sender.replaceTrack(this.videoTrack);
974
+ transceiver.setDirection('sendonly');
975
+ }
976
+ else if (transceiver.kind === 'audio') {
977
+ this.audioTrack = new MediaStreamTrack({ kind: 'audio' });
978
+ transceiver.sender.replaceTrack(this.audioTrack);
979
+ transceiver.setDirection('sendonly');
980
+ }
981
+ });
982
+ // Set remote description and create answer
983
+ await this.pc.setRemoteDescription(new RTCSessionDescription(offerSdp, 'offer'));
984
+ const answer = await this.pc.createAnswer();
985
+ this.pc.setLocalDescription(answer);
986
+ return this.pc.localDescription?.sdp ?? '';
987
+ }
988
+ /**
989
+ * Add ICE candidate from remote peer.
990
+ *
991
+ * Processes ICE candidates received from the remote peer via signaling channel.
992
+ * Should be called whenever a new candidate message arrives from remote peer.
993
+ * Can be called multiple times as candidates are discovered.
994
+ *
995
+ * @param candidate - ICE candidate string from remote peer
996
+ *
997
+ * @throws {Error} If peer connection not initialized (call {@link setOffer} first)
998
+ *
999
+ * @example
1000
+ * ```typescript
1001
+ * // In signaling message handler
1002
+ * if (msg.type === 'candidate') {
1003
+ * session.addIceCandidate(msg.value);
1004
+ * }
1005
+ * ```
1006
+ */
1007
+ addIceCandidate(candidate) {
1008
+ if (!this.pc) {
1009
+ throw new Error('PeerConnection not initialized');
1010
+ }
1011
+ this.pc.addIceCandidate(new RTCIceCandidate({ candidate }));
1012
+ }
1013
+ /**
1014
+ * Start streaming media to WebRTC peer connection.
1015
+ *
1016
+ * Begins the media processing pipeline, reading packets from input,
1017
+ * transcoding if necessary, and sending RTP packets to media tracks.
1018
+ * Must call {@link setOffer} before starting.
1019
+ * This method blocks until streaming completes or {@link stop} is called.
1020
+ *
1021
+ * @returns Promise that resolves when streaming completes
1022
+ *
1023
+ * @throws {Error} If stream not initialized
1024
+ *
1025
+ * @throws {FFmpegError} If transcoding or muxing fails
1026
+ *
1027
+ * @example
1028
+ * ```typescript
1029
+ * const session = await WebRTCSession.create('input.mp4');
1030
+ * session.onIceCandidate = (c) => sendToRemote(c);
1031
+ *
1032
+ * const answer = await session.setOffer(remoteOffer);
1033
+ * sendToRemote(answer);
1034
+ *
1035
+ * // Start streaming (blocks until complete)
1036
+ * await session.start();
1037
+ * ```
1038
+ *
1039
+ * @example
1040
+ * ```typescript
1041
+ * // Non-blocking start
1042
+ * const session = await WebRTCSession.create('input.mp4');
1043
+ * const streamPromise = session.start();
1044
+ *
1045
+ * // Later: stop streaming
1046
+ * session.stop();
1047
+ * await streamPromise;
1048
+ * ```
1049
+ */
1050
+ async start() {
1051
+ if (!this.stream) {
1052
+ throw new Error('Stream not initialized');
1053
+ }
1054
+ await this.stream.start();
1055
+ }
1056
+ /**
1057
+ * Stop streaming gracefully.
1058
+ *
1059
+ * Signals the streaming loop to exit after the current packet is processed.
1060
+ * Does not immediately close resources - use {@link dispose} for cleanup.
1061
+ * Safe to call multiple times.
1062
+ *
1063
+ * @example
1064
+ * ```typescript
1065
+ * const session = await WebRTCSession.create('input.mp4');
1066
+ * const streamPromise = session.start();
1067
+ *
1068
+ * // Stop after 10 seconds
1069
+ * setTimeout(() => session.stop(), 10000);
1070
+ *
1071
+ * await streamPromise;
1072
+ * session.dispose();
1073
+ * ```
1074
+ */
1075
+ stop() {
1076
+ this.stream?.stop();
1077
+ }
1078
+ /**
1079
+ * Clean up all resources and close the session.
1080
+ *
1081
+ * Stops streaming if active, releases all FFmpeg resources, closes peer connection,
1082
+ * and cleans up media tracks. Should be called when done with the session to prevent
1083
+ * memory leaks. Safe to call multiple times.
1084
+ *
1085
+ * @example
1086
+ * ```typescript
1087
+ * const session = await WebRTCSession.create('input.mp4');
1088
+ * await session.start();
1089
+ * session.dispose();
1090
+ * ```
1091
+ *
1092
+ * @example
1093
+ * ```typescript
1094
+ * // Using automatic cleanup
1095
+ * {
1096
+ * await using session = await WebRTCSession.create('input.mp4');
1097
+ * await session.start();
1098
+ * } // Automatically disposed
1099
+ * ```
1100
+ */
1101
+ dispose() {
1102
+ this.stop();
1103
+ this.stream?.dispose();
1104
+ this.pc?.close();
1105
+ this.videoTrack = null;
1106
+ this.audioTrack = null;
1107
+ this.pc = null;
1108
+ }
1109
+ /**
1110
+ * Symbol.dispose implementation for automatic cleanup.
1111
+ *
1112
+ * @internal
1113
+ */
1114
+ [Symbol.dispose]() {
1115
+ this.dispose();
1116
+ }
1117
+ }
1118
+ //# sourceMappingURL=webrtc.js.map