node-av 3.0.6 → 3.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,662 @@
1
+ import { RTCRtpCodecParameters, RtpPacket } from 'werift';
2
+ import type { AVCodecID, AVHWDeviceType } from '../constants/constants.js';
3
+ /**
4
+ * Codec information for WebRTC streaming.
5
+ *
6
+ * Contains RTP codec parameters and FFmpeg codec IDs for video and audio streams.
7
+ * Used for codec negotiation in WebRTC peer connections.
8
+ */
9
+ export interface WebRTCCodecInfo {
10
+ /**
11
+ * Video codec configuration.
12
+ * Combines RTP parameters (mimeType, clockRate, etc.) with FFmpeg codec ID.
13
+ */
14
+ video: Partial<RTCRtpCodecParameters> & {
15
+ codecId: AVCodecID;
16
+ };
17
+ /**
18
+ * Optional audio codec configuration.
19
+ * Combines RTP parameters (mimeType, clockRate, channels) with FFmpeg codec ID.
20
+ */
21
+ audio?: Partial<RTCRtpCodecParameters> & {
22
+ codecId: AVCodecID;
23
+ };
24
+ }
25
+ /**
26
+ * Options for configuring WebRTC streaming.
27
+ */
28
+ export interface WebRTCStreamOptions {
29
+ /**
30
+ * Callback invoked for each video RTP packet.
31
+ * Use this to send packets to your WebRTC implementation.
32
+ *
33
+ * @param packet - Serialized RTP packet ready for transmission
34
+ */
35
+ onVideoPacket?: (packet: RtpPacket) => void;
36
+ /**
37
+ * Callback invoked for each audio RTP packet.
38
+ * Use this to send packets to your WebRTC implementation.
39
+ *
40
+ * @param packet - Serialized RTP packet ready for transmission
41
+ */
42
+ onAudioPacket?: (packet: RtpPacket) => void;
43
+ /**
44
+ * Maximum transmission unit (MTU) size in bytes.
45
+ * RTP packets will be fragmented to fit within this size.
46
+ *
47
+ * @default 1200
48
+ */
49
+ mtu?: number;
50
+ /**
51
+ * Hardware acceleration configuration.
52
+ *
53
+ * - `'auto'` - Automatically detect and use available hardware acceleration
54
+ * - Object with deviceType - Manually specify hardware acceleration type
55
+ *
56
+ * @default { deviceType: AV_HWDEVICE_TYPE_NONE }
57
+ */
58
+ hardware?: 'auto' | {
59
+ deviceType: AVHWDeviceType;
60
+ device?: string;
61
+ options?: Record<string, string>;
62
+ };
63
+ /**
64
+ * FFmpeg input options passed directly to the input.
65
+ *
66
+ * @default { flags: 'low_delay' }
67
+ */
68
+ inputOptions?: Record<string, string | number | boolean | null | undefined>;
69
+ }
70
+ /**
71
+ * High-level WebRTC streaming with automatic codec detection and transcoding.
72
+ *
73
+ * Provides library-agnostic RTP streaming for WebRTC applications.
74
+ * Automatically detects input codecs and transcodes non-WebRTC-compatible formats.
75
+ * Handles video (H.264, H.265, VP8, VP9) and audio (Opus, PCMA, PCMU) codecs.
76
+ * Supports hardware acceleration for video transcoding.
77
+ * Essential component for building WebRTC streaming servers without direct WebRTC library coupling.
78
+ *
79
+ * @example
80
+ * ```typescript
81
+ * import { WebRTCStream } from 'node-av/api';
82
+ *
83
+ * // Create stream with RTP packet callbacks
84
+ * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
85
+ * mtu: 1200,
86
+ * hardware: 'auto',
87
+ * onVideoPacket: (rtp) => {
88
+ * // Send RTP packet to WebRTC peer connection
89
+ * videoTrack.writeRtp(rtp);
90
+ * },
91
+ * onAudioPacket: (rtp) => {
92
+ * audioTrack.writeRtp(rtp);
93
+ * }
94
+ * });
95
+ *
96
+ * // Get detected codecs for SDP negotiation
97
+ * const codecs = stream.getCodecs();
98
+ * console.log('Video:', codecs.video.mimeType);
99
+ * console.log('Audio:', codecs.audio?.mimeType);
100
+ *
101
+ * // Start streaming
102
+ * await stream.start();
103
+ * ```
104
+ *
105
+ * @example
106
+ * ```typescript
107
+ * // Stream with hardware acceleration
108
+ * import { AV_HWDEVICE_TYPE_CUDA } from 'node-av/constants';
109
+ *
110
+ * const stream = await WebRTCStream.create('video.mp4', {
111
+ * hardware: {
112
+ * deviceType: AV_HWDEVICE_TYPE_CUDA,
113
+ * device: '/dev/nvidia0'
114
+ * },
115
+ * onVideoPacket: (rtp) => sendToWebRTC(rtp)
116
+ * });
117
+ *
118
+ * await stream.start();
119
+ * stream.stop();
120
+ * stream.dispose();
121
+ * ```
122
+ *
123
+ * @see {@link WebRTCSession} For complete WebRTC session management with werift
124
+ * @see {@link MediaInput} For input media handling
125
+ * @see {@link HardwareContext} For GPU acceleration
126
+ */
127
+ export declare class WebRTCStream implements Disposable {
128
+ private input;
129
+ private codecInfo;
130
+ private options;
131
+ private videoOutput;
132
+ private audioOutput;
133
+ private hardwareContext;
134
+ private videoDecoder;
135
+ private videoEncoder;
136
+ private audioDecoder;
137
+ private audioFilter;
138
+ private audioEncoder;
139
+ private streamActive;
140
+ /**
141
+ * @param input - Media input source
142
+ *
143
+ * @param options - Stream configuration options
144
+ *
145
+ * Use {@link create} factory method
146
+ *
147
+ * @internal
148
+ */
149
+ private constructor();
150
+ /**
151
+ * Create a WebRTC stream from a media source.
152
+ *
153
+ * Opens the input media, detects video and audio codecs, and prepares
154
+ * transcoding pipelines for non-WebRTC-compatible formats.
155
+ * Automatically configures H.264 encoding for unsupported video codecs
156
+ * and Opus encoding for unsupported audio codecs.
157
+ *
158
+ * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
159
+ *
160
+ * @param options - Stream configuration options
161
+ *
162
+ * @returns Configured WebRTC stream instance
163
+ *
164
+ * @throws {Error} If no video stream found in input
165
+ *
166
+ * @throws {FFmpegError} If input cannot be opened
167
+ *
168
+ * @example
169
+ * ```typescript
170
+ * // Stream from RTSP camera
171
+ * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
172
+ * mtu: 1200,
173
+ * onVideoPacket: (rtp) => videoTrack.writeRtp(rtp),
174
+ * onAudioPacket: (rtp) => audioTrack.writeRtp(rtp)
175
+ * });
176
+ * ```
177
+ *
178
+ * @example
179
+ * ```typescript
180
+ * // Stream file with auto hardware acceleration
181
+ * const stream = await WebRTCStream.create('video.mp4', {
182
+ * hardware: 'auto'
183
+ * });
184
+ * ```
185
+ */
186
+ static create(inputUrl: string, options?: WebRTCStreamOptions): Promise<WebRTCStream>;
187
+ /**
188
+ * Get detected codec information for SDP negotiation.
189
+ *
190
+ * Returns RTP codec parameters and FFmpeg codec IDs for video and audio.
191
+ * Use this information to configure WebRTC peer connections with matching codecs.
192
+ *
193
+ * @returns Codec configuration for video and audio streams
194
+ *
195
+ * @example
196
+ * ```typescript
197
+ * const stream = await WebRTCStream.create('input.mp4');
198
+ * const codecs = stream.getCodecs();
199
+ *
200
+ * console.log('Video codec:', codecs.video.mimeType);
201
+ * console.log('Audio codec:', codecs.audio?.mimeType);
202
+ * ```
203
+ */
204
+ getCodecs(): WebRTCCodecInfo;
205
+ /**
206
+ * Start streaming media to RTP packets.
207
+ *
208
+ * Begins the media processing pipeline, reading packets from input,
209
+ * transcoding if necessary, and invoking RTP packet callbacks.
210
+ * Automatically handles video and audio streams in parallel.
211
+ * Flushes all buffers at the end of stream.
212
+ * This method blocks until streaming completes or {@link stop} is called.
213
+ *
214
+ * @returns Promise that resolves when streaming completes
215
+ *
216
+ * @throws {FFmpegError} If transcoding or muxing fails
217
+ *
218
+ * @example
219
+ * ```typescript
220
+ * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
221
+ * onVideoPacket: (rtp) => sendRtp(rtp)
222
+ * });
223
+ *
224
+ * // Start streaming (blocks until complete or stopped)
225
+ * await stream.start();
226
+ * ```
227
+ *
228
+ * @example
229
+ * ```typescript
230
+ * // Non-blocking start with background promise
231
+ * const stream = await WebRTCStream.create('input.mp4');
232
+ * const streamPromise = stream.start();
233
+ *
234
+ * // Later: stop streaming
235
+ * stream.stop();
236
+ * await streamPromise;
237
+ * ```
238
+ */
239
+ start(): Promise<void>;
240
+ /**
241
+ * Stop streaming gracefully.
242
+ *
243
+ * Signals the streaming loop to exit after the current packet is processed.
244
+ * Does not immediately close resources - use {@link dispose} for cleanup.
245
+ * Safe to call multiple times.
246
+ *
247
+ * @example
248
+ * ```typescript
249
+ * const stream = await WebRTCStream.create('input.mp4');
250
+ * const streamPromise = stream.start();
251
+ *
252
+ * // Stop after 10 seconds
253
+ * setTimeout(() => stream.stop(), 10000);
254
+ *
255
+ * await streamPromise; // Resolves when stopped
256
+ * stream.dispose();
257
+ * ```
258
+ */
259
+ stop(): void;
260
+ /**
261
+ * Clean up all resources and close the stream.
262
+ *
263
+ * Stops streaming if active and releases all FFmpeg resources including
264
+ * decoders, encoders, filters, outputs, and input. Should be called when
265
+ * done with the stream to prevent memory leaks.
266
+ * Safe to call multiple times.
267
+ *
268
+ * @example
269
+ * ```typescript
270
+ * const stream = await WebRTCStream.create('input.mp4');
271
+ * await stream.start();
272
+ * stream.dispose();
273
+ * ```
274
+ *
275
+ * @example
276
+ * ```typescript
277
+ * // Using automatic cleanup
278
+ * {
279
+ * await using stream = await WebRTCStream.create('input.mp4');
280
+ * await stream.start();
281
+ * } // Automatically disposed
282
+ * ```
283
+ */
284
+ dispose(): void;
285
+ /**
286
+ * Check if the given audio codec is compatible with WebRTC.
287
+ *
288
+ * @param codecId - The AVCodecID to check
289
+ *
290
+ * @returns True if the codec is WebRTC compatible, false otherwise
291
+ *
292
+ * @internal
293
+ */
294
+ private isAudioCodecSupported;
295
+ /**
296
+ * Check if the given video codec is compatible with WebRTC.
297
+ *
298
+ * @param codecId - The AVCodecID to check
299
+ *
300
+ * @returns True if the codec is WebRTC compatible, false otherwise
301
+ *
302
+ * @internal
303
+ */
304
+ private isVideoCodecSupported;
305
+ /**
306
+ * Get the audio codec configuration for WebRTC.
307
+ *
308
+ * @param codecId - The AVCodecID of the audio codec
309
+ *
310
+ * @returns An object containing MIME type, clock rate, and channels, or null if unsupported
311
+ *
312
+ * @internal
313
+ */
314
+ private getAudioCodecConfig;
315
+ /**
316
+ * Get the video codec configuration for WebRTC.
317
+ *
318
+ * @param codecId - The AVCodecID of the video codec
319
+ *
320
+ * @returns An object containing MIME type and clock rate, or null if unsupported
321
+ *
322
+ * @internal
323
+ */
324
+ private getVideoCodecConfig;
325
+ /**
326
+ * Flush video encoder pipeline.
327
+ *
328
+ * @param videoStreamIndex - Output video stream index
329
+ *
330
+ * @internal
331
+ */
332
+ private flushVideo;
333
+ /**
334
+ * Flush audio encoder pipeline.
335
+ *
336
+ * @param audioStreamIndex - Output audio stream index
337
+ *
338
+ * @param hasAudio - Whether audio stream exists
339
+ *
340
+ * @internal
341
+ */
342
+ private flushAudio;
343
+ /**
344
+ * Symbol.dispose implementation for automatic cleanup.
345
+ *
346
+ * @internal
347
+ */
348
+ [Symbol.dispose](): void;
349
+ }
350
+ /**
351
+ * Options for configuring WebRTC session with werift integration.
352
+ *
353
+ * Extends WebRTCStreamOptions but excludes RTP packet callbacks since
354
+ * they are automatically handled by the session's media tracks.
355
+ */
356
+ export interface WebRTCSessionOptions extends Omit<WebRTCStreamOptions, 'onVideoPacket' | 'onAudioPacket'> {
357
+ /**
358
+ * ICE servers for NAT traversal and STUN/TURN configuration.
359
+ *
360
+ * @default []
361
+ *
362
+ * @example
363
+ * ```typescript
364
+ * const session = await WebRTCSession.create('input.mp4', {
365
+ * iceServers: [
366
+ * { urls: 'stun:stun.l.google.com:19302' },
367
+ * { urls: 'turn:turn.example.com:3478' }
368
+ * ]
369
+ * });
370
+ * ```
371
+ */
372
+ iceServers?: {
373
+ urls: string;
374
+ }[];
375
+ }
376
+ /**
377
+ * Complete WebRTC session management with werift integration.
378
+ *
379
+ * Provides end-to-end WebRTC streaming with automatic SDP negotiation,
380
+ * ICE candidate handling, and peer connection management.
381
+ * Built on top of {@link WebRTCStream} but handles all WebRTC protocol details.
382
+ * Integrates with werift library for RTCPeerConnection and media track handling.
383
+ * Ideal for building complete WebRTC streaming applications with minimal code.
384
+ *
385
+ * @example
386
+ * ```typescript
387
+ * import { WebRTCSession } from 'node-av/api';
388
+ *
389
+ * // Create session from media source
390
+ * const session = await WebRTCSession.create('rtsp://camera.local/stream', {
391
+ * mtu: 1200,
392
+ * hardware: 'auto',
393
+ * iceServers: [{ urls: 'stun:stun.l.google.com:19302' }]
394
+ * });
395
+ *
396
+ * // Setup ICE candidate handler
397
+ * session.onIceCandidate = (candidate) => {
398
+ * sendToClient({ type: 'candidate', value: candidate });
399
+ * };
400
+ *
401
+ * // Process SDP offer from client
402
+ * const answer = await session.setOffer(clientOffer);
403
+ * sendToClient({ type: 'answer', value: answer });
404
+ *
405
+ * // Start streaming
406
+ * await session.start();
407
+ * ```
408
+ *
409
+ * @example
410
+ * ```typescript
411
+ * // Complete WebSocket signaling server
412
+ * import { WebSocket } from 'ws';
413
+ *
414
+ * ws.on('message', async (data) => {
415
+ * const msg = JSON.parse(data);
416
+ *
417
+ * if (msg.type === 'offer') {
418
+ * const session = await WebRTCSession.create(msg.url, {
419
+ * hardware: 'auto'
420
+ * });
421
+ *
422
+ * session.onIceCandidate = (candidate) => {
423
+ * ws.send(JSON.stringify({ type: 'candidate', value: candidate }));
424
+ * };
425
+ *
426
+ * const answer = await session.setOffer(msg.value);
427
+ * ws.send(JSON.stringify({ type: 'answer', value: answer }));
428
+ *
429
+ * await session.start();
430
+ * } else if (msg.type === 'candidate') {
431
+ * session.addIceCandidate(msg.value);
432
+ * }
433
+ * });
434
+ * ```
435
+ *
436
+ * @see {@link WebRTCStream} For library-agnostic RTP streaming
437
+ * @see {@link MediaInput} For input media handling
438
+ * @see {@link HardwareContext} For GPU acceleration
439
+ */
440
+ export declare class WebRTCSession implements Disposable {
441
+ private stream;
442
+ private pc;
443
+ private videoTrack;
444
+ private audioTrack;
445
+ private options;
446
+ /**
447
+ * Callback invoked when a new ICE candidate is discovered.
448
+ * Send this candidate to the remote peer via signaling channel.
449
+ *
450
+ * @param candidate - ICE candidate string to send to remote peer
451
+ *
452
+ * @example
453
+ * ```typescript
454
+ * session.onIceCandidate = (candidate) => {
455
+ * ws.send(JSON.stringify({ type: 'candidate', value: candidate }));
456
+ * };
457
+ * ```
458
+ */
459
+ onIceCandidate: ((candidate: string) => void) | null;
460
+ /**
461
+ * @param options - Session configuration options
462
+ *
463
+ * Use {@link create} factory method
464
+ *
465
+ * @internal
466
+ */
467
+ private constructor();
468
+ /**
469
+ * Create a WebRTC session from a media source.
470
+ *
471
+ * Opens the input media, creates internal streaming components, and prepares
472
+ * for WebRTC peer connection negotiation. Does not start streaming yet.
473
+ * Call {@link setOffer} to negotiate SDP and {@link start} to begin streaming.
474
+ *
475
+ * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
476
+ *
477
+ * @param options - Session configuration options
478
+ *
479
+ * @returns Configured WebRTC session instance
480
+ *
481
+ * @throws {Error} If no video stream found in input
482
+ *
483
+ * @throws {FFmpegError} If input cannot be opened
484
+ *
485
+ * @example
486
+ * ```typescript
487
+ * const session = await WebRTCSession.create('rtsp://camera.local/stream', {
488
+ * mtu: 1200,
489
+ * hardware: 'auto',
490
+ * iceServers: [{ urls: 'stun:stun.l.google.com:19302' }]
491
+ * });
492
+ * ```
493
+ *
494
+ * @example
495
+ * ```typescript
496
+ * // Session from file with hardware acceleration
497
+ * const session = await WebRTCSession.create('video.mp4', {
498
+ * hardware: {
499
+ * deviceType: AV_HWDEVICE_TYPE_CUDA
500
+ * }
501
+ * });
502
+ * ```
503
+ */
504
+ static create(inputUrl: string, options?: WebRTCSessionOptions): Promise<WebRTCSession>;
505
+ /**
506
+ * Get detected codec information.
507
+ *
508
+ * Returns RTP codec parameters and FFmpeg codec IDs for video and audio.
509
+ * Useful for inspecting what codecs will be used in the WebRTC session.
510
+ *
511
+ * @returns Codec configuration for video and audio streams
512
+ *
513
+ * @throws {Error} If stream not initialized
514
+ *
515
+ * @example
516
+ * ```typescript
517
+ * const session = await WebRTCSession.create('input.mp4');
518
+ * const codecs = session.getCodecs();
519
+ *
520
+ * console.log('Video:', codecs.video.mimeType);
521
+ * console.log('Audio:', codecs.audio?.mimeType);
522
+ * ```
523
+ */
524
+ getCodecs(): WebRTCCodecInfo;
525
+ /**
526
+ * Process SDP offer from remote peer and generate SDP answer.
527
+ *
528
+ * Creates RTCPeerConnection with detected codecs, sets up media tracks,
529
+ * processes the remote SDP offer, and generates a local SDP answer.
530
+ * Also configures ICE candidate handling via {@link onIceCandidate} callback.
531
+ * Must be called before {@link start}.
532
+ *
533
+ * @param offerSdp - SDP offer string from remote WebRTC peer
534
+ *
535
+ * @returns SDP answer string to send back to remote peer
536
+ *
537
+ * @throws {Error} If stream not initialized
538
+ *
539
+ * @example
540
+ * ```typescript
541
+ * const session = await WebRTCSession.create('input.mp4');
542
+ *
543
+ * // Setup ICE candidate handler first
544
+ * session.onIceCandidate = (candidate) => {
545
+ * sendToRemote({ type: 'candidate', value: candidate });
546
+ * };
547
+ *
548
+ * // Process offer and send answer
549
+ * const answer = await session.setOffer(remoteOffer);
550
+ * sendToRemote({ type: 'answer', value: answer });
551
+ * ```
552
+ */
553
+ setOffer(offerSdp: string): Promise<string>;
554
+ /**
555
+ * Add ICE candidate from remote peer.
556
+ *
557
+ * Processes ICE candidates received from the remote peer via signaling channel.
558
+ * Should be called whenever a new candidate message arrives from remote peer.
559
+ * Can be called multiple times as candidates are discovered.
560
+ *
561
+ * @param candidate - ICE candidate string from remote peer
562
+ *
563
+ * @throws {Error} If peer connection not initialized (call {@link setOffer} first)
564
+ *
565
+ * @example
566
+ * ```typescript
567
+ * // In signaling message handler
568
+ * if (msg.type === 'candidate') {
569
+ * session.addIceCandidate(msg.value);
570
+ * }
571
+ * ```
572
+ */
573
+ addIceCandidate(candidate: string): void;
574
+ /**
575
+ * Start streaming media to WebRTC peer connection.
576
+ *
577
+ * Begins the media processing pipeline, reading packets from input,
578
+ * transcoding if necessary, and sending RTP packets to media tracks.
579
+ * Must call {@link setOffer} before starting.
580
+ * This method blocks until streaming completes or {@link stop} is called.
581
+ *
582
+ * @returns Promise that resolves when streaming completes
583
+ *
584
+ * @throws {Error} If stream not initialized
585
+ *
586
+ * @throws {FFmpegError} If transcoding or muxing fails
587
+ *
588
+ * @example
589
+ * ```typescript
590
+ * const session = await WebRTCSession.create('input.mp4');
591
+ * session.onIceCandidate = (c) => sendToRemote(c);
592
+ *
593
+ * const answer = await session.setOffer(remoteOffer);
594
+ * sendToRemote(answer);
595
+ *
596
+ * // Start streaming (blocks until complete)
597
+ * await session.start();
598
+ * ```
599
+ *
600
+ * @example
601
+ * ```typescript
602
+ * // Non-blocking start
603
+ * const session = await WebRTCSession.create('input.mp4');
604
+ * const streamPromise = session.start();
605
+ *
606
+ * // Later: stop streaming
607
+ * session.stop();
608
+ * await streamPromise;
609
+ * ```
610
+ */
611
+ start(): Promise<void>;
612
+ /**
613
+ * Stop streaming gracefully.
614
+ *
615
+ * Signals the streaming loop to exit after the current packet is processed.
616
+ * Does not immediately close resources - use {@link dispose} for cleanup.
617
+ * Safe to call multiple times.
618
+ *
619
+ * @example
620
+ * ```typescript
621
+ * const session = await WebRTCSession.create('input.mp4');
622
+ * const streamPromise = session.start();
623
+ *
624
+ * // Stop after 10 seconds
625
+ * setTimeout(() => session.stop(), 10000);
626
+ *
627
+ * await streamPromise;
628
+ * session.dispose();
629
+ * ```
630
+ */
631
+ stop(): void;
632
+ /**
633
+ * Clean up all resources and close the session.
634
+ *
635
+ * Stops streaming if active, releases all FFmpeg resources, closes peer connection,
636
+ * and cleans up media tracks. Should be called when done with the session to prevent
637
+ * memory leaks. Safe to call multiple times.
638
+ *
639
+ * @example
640
+ * ```typescript
641
+ * const session = await WebRTCSession.create('input.mp4');
642
+ * await session.start();
643
+ * session.dispose();
644
+ * ```
645
+ *
646
+ * @example
647
+ * ```typescript
648
+ * // Using automatic cleanup
649
+ * {
650
+ * await using session = await WebRTCSession.create('input.mp4');
651
+ * await session.start();
652
+ * } // Automatically disposed
653
+ * ```
654
+ */
655
+ dispose(): void;
656
+ /**
657
+ * Symbol.dispose implementation for automatic cleanup.
658
+ *
659
+ * @internal
660
+ */
661
+ [Symbol.dispose](): void;
662
+ }