node-av 3.1.2 → 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (157) hide show
  1. package/README.md +65 -52
  2. package/binding.gyp +4 -0
  3. package/dist/api/audio-frame-buffer.d.ts +201 -0
  4. package/dist/api/audio-frame-buffer.js +275 -0
  5. package/dist/api/audio-frame-buffer.js.map +1 -0
  6. package/dist/api/bitstream-filter.d.ts +319 -78
  7. package/dist/api/bitstream-filter.js +680 -151
  8. package/dist/api/bitstream-filter.js.map +1 -1
  9. package/dist/api/constants.d.ts +44 -0
  10. package/dist/api/constants.js +45 -0
  11. package/dist/api/constants.js.map +1 -0
  12. package/dist/api/data/test_av1.ivf +0 -0
  13. package/dist/api/data/test_mjpeg.mjpeg +0 -0
  14. package/dist/api/data/test_vp8.ivf +0 -0
  15. package/dist/api/data/test_vp9.ivf +0 -0
  16. package/dist/api/decoder.d.ts +279 -17
  17. package/dist/api/decoder.js +998 -209
  18. package/dist/api/decoder.js.map +1 -1
  19. package/dist/api/{media-input.d.ts → demuxer.d.ts} +294 -44
  20. package/dist/api/demuxer.js +1968 -0
  21. package/dist/api/demuxer.js.map +1 -0
  22. package/dist/api/encoder.d.ts +308 -50
  23. package/dist/api/encoder.js +1133 -111
  24. package/dist/api/encoder.js.map +1 -1
  25. package/dist/api/filter-presets.d.ts +12 -5
  26. package/dist/api/filter-presets.js +21 -7
  27. package/dist/api/filter-presets.js.map +1 -1
  28. package/dist/api/filter.d.ts +406 -40
  29. package/dist/api/filter.js +966 -139
  30. package/dist/api/filter.js.map +1 -1
  31. package/dist/api/{fmp4.d.ts → fmp4-stream.d.ts} +141 -140
  32. package/dist/api/fmp4-stream.js +539 -0
  33. package/dist/api/fmp4-stream.js.map +1 -0
  34. package/dist/api/hardware.d.ts +58 -6
  35. package/dist/api/hardware.js +127 -11
  36. package/dist/api/hardware.js.map +1 -1
  37. package/dist/api/index.d.ts +6 -4
  38. package/dist/api/index.js +14 -8
  39. package/dist/api/index.js.map +1 -1
  40. package/dist/api/io-stream.d.ts +3 -3
  41. package/dist/api/io-stream.js +5 -4
  42. package/dist/api/io-stream.js.map +1 -1
  43. package/dist/api/{media-output.d.ts → muxer.d.ts} +274 -60
  44. package/dist/api/muxer.js +1934 -0
  45. package/dist/api/muxer.js.map +1 -0
  46. package/dist/api/pipeline.d.ts +77 -29
  47. package/dist/api/pipeline.js +435 -425
  48. package/dist/api/pipeline.js.map +1 -1
  49. package/dist/api/rtp-stream.d.ts +312 -0
  50. package/dist/api/rtp-stream.js +630 -0
  51. package/dist/api/rtp-stream.js.map +1 -0
  52. package/dist/api/types.d.ts +476 -55
  53. package/dist/api/utilities/async-queue.d.ts +91 -0
  54. package/dist/api/utilities/async-queue.js +162 -0
  55. package/dist/api/utilities/async-queue.js.map +1 -0
  56. package/dist/api/utilities/audio-sample.d.ts +1 -1
  57. package/dist/api/utilities/image.d.ts +1 -1
  58. package/dist/api/utilities/index.d.ts +2 -0
  59. package/dist/api/utilities/index.js +4 -0
  60. package/dist/api/utilities/index.js.map +1 -1
  61. package/dist/api/utilities/media-type.d.ts +1 -1
  62. package/dist/api/utilities/pixel-format.d.ts +1 -1
  63. package/dist/api/utilities/sample-format.d.ts +1 -1
  64. package/dist/api/utilities/scheduler.d.ts +169 -0
  65. package/dist/api/utilities/scheduler.js +136 -0
  66. package/dist/api/utilities/scheduler.js.map +1 -0
  67. package/dist/api/utilities/streaming.d.ts +74 -15
  68. package/dist/api/utilities/streaming.js +170 -12
  69. package/dist/api/utilities/streaming.js.map +1 -1
  70. package/dist/api/utilities/timestamp.d.ts +1 -1
  71. package/dist/api/webrtc-stream.d.ts +288 -0
  72. package/dist/api/webrtc-stream.js +440 -0
  73. package/dist/api/webrtc-stream.js.map +1 -0
  74. package/dist/constants/constants.d.ts +51 -1
  75. package/dist/constants/constants.js +47 -1
  76. package/dist/constants/constants.js.map +1 -1
  77. package/dist/constants/encoders.d.ts +2 -1
  78. package/dist/constants/encoders.js +4 -3
  79. package/dist/constants/encoders.js.map +1 -1
  80. package/dist/constants/hardware.d.ts +26 -0
  81. package/dist/constants/hardware.js +27 -0
  82. package/dist/constants/hardware.js.map +1 -0
  83. package/dist/constants/index.d.ts +1 -0
  84. package/dist/constants/index.js +1 -0
  85. package/dist/constants/index.js.map +1 -1
  86. package/dist/lib/binding.d.ts +19 -8
  87. package/dist/lib/binding.js.map +1 -1
  88. package/dist/lib/codec-context.d.ts +87 -0
  89. package/dist/lib/codec-context.js +125 -4
  90. package/dist/lib/codec-context.js.map +1 -1
  91. package/dist/lib/codec-parameters.d.ts +183 -1
  92. package/dist/lib/codec-parameters.js +209 -0
  93. package/dist/lib/codec-parameters.js.map +1 -1
  94. package/dist/lib/codec-parser.d.ts +23 -0
  95. package/dist/lib/codec-parser.js +25 -0
  96. package/dist/lib/codec-parser.js.map +1 -1
  97. package/dist/lib/codec.d.ts +26 -4
  98. package/dist/lib/codec.js +35 -0
  99. package/dist/lib/codec.js.map +1 -1
  100. package/dist/lib/dictionary.js +1 -0
  101. package/dist/lib/dictionary.js.map +1 -1
  102. package/dist/lib/error.js +1 -1
  103. package/dist/lib/error.js.map +1 -1
  104. package/dist/lib/filter-context.d.ts +52 -11
  105. package/dist/lib/filter-context.js +56 -12
  106. package/dist/lib/filter-context.js.map +1 -1
  107. package/dist/lib/filter-graph.d.ts +9 -0
  108. package/dist/lib/filter-graph.js +13 -0
  109. package/dist/lib/filter-graph.js.map +1 -1
  110. package/dist/lib/filter.d.ts +21 -0
  111. package/dist/lib/filter.js +28 -0
  112. package/dist/lib/filter.js.map +1 -1
  113. package/dist/lib/format-context.d.ts +48 -14
  114. package/dist/lib/format-context.js +76 -7
  115. package/dist/lib/format-context.js.map +1 -1
  116. package/dist/lib/frame.d.ts +168 -0
  117. package/dist/lib/frame.js +212 -0
  118. package/dist/lib/frame.js.map +1 -1
  119. package/dist/lib/hardware-device-context.d.ts +3 -2
  120. package/dist/lib/hardware-device-context.js.map +1 -1
  121. package/dist/lib/index.d.ts +1 -0
  122. package/dist/lib/index.js +2 -0
  123. package/dist/lib/index.js.map +1 -1
  124. package/dist/lib/input-format.d.ts +21 -0
  125. package/dist/lib/input-format.js +42 -2
  126. package/dist/lib/input-format.js.map +1 -1
  127. package/dist/lib/native-types.d.ts +48 -26
  128. package/dist/lib/option.d.ts +25 -13
  129. package/dist/lib/option.js +28 -0
  130. package/dist/lib/option.js.map +1 -1
  131. package/dist/lib/output-format.d.ts +22 -1
  132. package/dist/lib/output-format.js +28 -0
  133. package/dist/lib/output-format.js.map +1 -1
  134. package/dist/lib/packet.d.ts +35 -0
  135. package/dist/lib/packet.js +52 -2
  136. package/dist/lib/packet.js.map +1 -1
  137. package/dist/lib/stream.d.ts +126 -0
  138. package/dist/lib/stream.js +188 -5
  139. package/dist/lib/stream.js.map +1 -1
  140. package/dist/lib/sync-queue.d.ts +179 -0
  141. package/dist/lib/sync-queue.js +197 -0
  142. package/dist/lib/sync-queue.js.map +1 -0
  143. package/dist/lib/types.d.ts +27 -1
  144. package/dist/lib/utilities.d.ts +281 -53
  145. package/dist/lib/utilities.js +298 -55
  146. package/dist/lib/utilities.js.map +1 -1
  147. package/install/check.js +18 -7
  148. package/package.json +20 -19
  149. package/dist/api/fmp4.js +0 -710
  150. package/dist/api/fmp4.js.map +0 -1
  151. package/dist/api/media-input.js +0 -1075
  152. package/dist/api/media-input.js.map +0 -1
  153. package/dist/api/media-output.js +0 -1040
  154. package/dist/api/media-output.js.map +0 -1
  155. package/dist/api/webrtc.d.ts +0 -664
  156. package/dist/api/webrtc.js +0 -1132
  157. package/dist/api/webrtc.js.map +0 -1
@@ -1,1132 +0,0 @@
1
- var __addDisposableResource = (this && this.__addDisposableResource) || function (env, value, async) {
2
- if (value !== null && value !== void 0) {
3
- if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected.");
4
- var dispose, inner;
5
- if (async) {
6
- if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined.");
7
- dispose = value[Symbol.asyncDispose];
8
- }
9
- if (dispose === void 0) {
10
- if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined.");
11
- dispose = value[Symbol.dispose];
12
- if (async) inner = dispose;
13
- }
14
- if (typeof dispose !== "function") throw new TypeError("Object not disposable.");
15
- if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } };
16
- env.stack.push({ value: value, dispose: dispose, async: async });
17
- }
18
- else if (async) {
19
- env.stack.push({ async: true });
20
- }
21
- return value;
22
- };
23
- var __disposeResources = (this && this.__disposeResources) || (function (SuppressedError) {
24
- return function (env) {
25
- function fail(e) {
26
- env.error = env.hasError ? new SuppressedError(e, env.error, "An error was suppressed during disposal.") : e;
27
- env.hasError = true;
28
- }
29
- var r, s = 0;
30
- function next() {
31
- while (r = env.stack.pop()) {
32
- try {
33
- if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next);
34
- if (r.dispose) {
35
- var result = r.dispose.call(r.value);
36
- if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); });
37
- }
38
- else s |= 1;
39
- }
40
- catch (e) {
41
- fail(e);
42
- }
43
- }
44
- if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve();
45
- if (env.hasError) throw env.error;
46
- }
47
- return next();
48
- };
49
- })(typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) {
50
- var e = new Error(message);
51
- return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
52
- });
53
- import { MediaStreamTrack, RTCIceCandidate, RTCPeerConnection, RTCRtpCodecParameters, RTCSessionDescription, RtpPacket } from 'werift';
54
- import { AV_CODEC_ID_AV1, AV_CODEC_ID_H264, AV_CODEC_ID_HEVC, AV_CODEC_ID_OPUS, AV_CODEC_ID_PCM_ALAW, AV_CODEC_ID_PCM_MULAW, AV_CODEC_ID_VP8, AV_CODEC_ID_VP9, AV_HWDEVICE_TYPE_NONE, AV_SAMPLE_FMT_S16, } from '../constants/constants.js';
55
- import { FF_ENCODER_LIBOPUS, FF_ENCODER_LIBX264 } from '../constants/encoders.js';
56
- import { Decoder } from './decoder.js';
57
- import { Encoder } from './encoder.js';
58
- import { FilterPreset } from './filter-presets.js';
59
- import { FilterAPI } from './filter.js';
60
- import { HardwareContext } from './hardware.js';
61
- import { MediaInput } from './media-input.js';
62
- import { MediaOutput } from './media-output.js';
63
- /**
64
- * High-level WebRTC streaming with automatic codec detection and transcoding.
65
- *
66
- * Provides library-agnostic RTP streaming for WebRTC applications.
67
- * Automatically detects input codecs and transcodes non-WebRTC-compatible formats.
68
- * Handles video (H.264, H.265, VP8, VP9) and audio (Opus, PCMA, PCMU) codecs.
69
- * Supports hardware acceleration for video transcoding.
70
- * Essential component for building WebRTC streaming servers without direct WebRTC library coupling.
71
- *
72
- * @example
73
- * ```typescript
74
- * import { WebRTCStream } from 'node-av/api';
75
- *
76
- * // Create stream with RTP packet callbacks
77
- * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
78
- * mtu: 1200,
79
- * hardware: 'auto',
80
- * onVideoPacket: (rtp) => {
81
- * // Send RTP packet to WebRTC peer connection
82
- * videoTrack.writeRtp(rtp);
83
- * },
84
- * onAudioPacket: (rtp) => {
85
- * audioTrack.writeRtp(rtp);
86
- * }
87
- * });
88
- *
89
- * // Get detected codecs for SDP negotiation
90
- * const codecs = stream.getCodecs();
91
- * console.log('Video:', codecs.video.mimeType);
92
- * console.log('Audio:', codecs.audio?.mimeType);
93
- *
94
- * // Start streaming
95
- * await stream.start();
96
- * ```
97
- *
98
- * @example
99
- * ```typescript
100
- * // Stream with hardware acceleration
101
- * import { AV_HWDEVICE_TYPE_CUDA } from 'node-av/constants';
102
- *
103
- * const stream = await WebRTCStream.create('video.mp4', {
104
- * hardware: {
105
- * deviceType: AV_HWDEVICE_TYPE_CUDA,
106
- * device: '/dev/nvidia0'
107
- * },
108
- * onVideoPacket: (rtp) => sendToWebRTC(rtp)
109
- * });
110
- *
111
- * await stream.start();
112
- * stream.stop();
113
- * stream.dispose();
114
- * ```
115
- *
116
- * @see {@link WebRTCSession} For complete WebRTC session management with werift
117
- * @see {@link MediaInput} For input media handling
118
- * @see {@link HardwareContext} For GPU acceleration
119
- */
120
- export class WebRTCStream {
121
- input;
122
- codecInfo;
123
- options;
124
- videoOutput = null;
125
- audioOutput = null;
126
- hardwareContext = null;
127
- videoDecoder = null;
128
- videoEncoder = null;
129
- audioDecoder = null;
130
- audioFilter = null;
131
- audioEncoder = null;
132
- streamActive = false;
133
- disposed = false;
134
- /**
135
- * @param input - Media input source
136
- *
137
- * @param options - Stream configuration options
138
- *
139
- * Use {@link create} factory method
140
- *
141
- * @internal
142
- */
143
- constructor(input, options) {
144
- this.input = input;
145
- const videoStream = input.video();
146
- const audioStream = input.audio();
147
- const videoCodecId = videoStream.codecpar.codecId;
148
- const audioCodecId = audioStream?.codecpar.codecId ?? null;
149
- // We transcode unsupported codecs to H264
150
- const videoConfig = this.getVideoCodecConfig(videoCodecId) ?? this.getVideoCodecConfig(AV_CODEC_ID_H264);
151
- this.codecInfo = {
152
- video: {
153
- codecId: videoCodecId,
154
- ...videoConfig,
155
- },
156
- };
157
- if (audioCodecId !== null) {
158
- // We transcode unsupported codecs to OPUS
159
- const audioConfig = this.getAudioCodecConfig(audioCodecId) ?? this.getAudioCodecConfig(AV_CODEC_ID_OPUS);
160
- this.codecInfo.audio = {
161
- codecId: audioCodecId,
162
- ...audioConfig,
163
- };
164
- }
165
- this.options = {
166
- onVideoPacket: options.onVideoPacket ?? (() => { }),
167
- onAudioPacket: options.onAudioPacket ?? (() => { }),
168
- mtu: options.mtu ?? 1200,
169
- hardware: options.hardware ?? { deviceType: AV_HWDEVICE_TYPE_NONE },
170
- inputOptions: options.inputOptions,
171
- };
172
- }
173
- /**
174
- * Create a WebRTC stream from a media source.
175
- *
176
- * Opens the input media, detects video and audio codecs, and prepares
177
- * transcoding pipelines for non-WebRTC-compatible formats.
178
- * Automatically configures H.264 encoding for unsupported video codecs
179
- * and Opus encoding for unsupported audio codecs.
180
- *
181
- * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
182
- *
183
- * @param options - Stream configuration options
184
- *
185
- * @returns Configured WebRTC stream instance
186
- *
187
- * @throws {Error} If no video stream found in input
188
- *
189
- * @throws {FFmpegError} If input cannot be opened
190
- *
191
- * @example
192
- * ```typescript
193
- * // Stream from RTSP camera
194
- * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
195
- * mtu: 1200,
196
- * onVideoPacket: (rtp) => videoTrack.writeRtp(rtp),
197
- * onAudioPacket: (rtp) => audioTrack.writeRtp(rtp)
198
- * });
199
- * ```
200
- *
201
- * @example
202
- * ```typescript
203
- * // Stream file with auto hardware acceleration
204
- * const stream = await WebRTCStream.create('video.mp4', {
205
- * hardware: 'auto'
206
- * });
207
- * ```
208
- */
209
- static async create(inputUrl, options = {}) {
210
- const isRtsp = inputUrl.toLowerCase().startsWith('rtsp');
211
- options.inputOptions = options.inputOptions ?? {};
212
- options.inputOptions = {
213
- flags: 'low_delay',
214
- rtsp_transport: isRtsp ? 'tcp' : undefined,
215
- ...options.inputOptions,
216
- };
217
- const input = await MediaInput.open(inputUrl, {
218
- options: options.inputOptions,
219
- });
220
- const videoStream = input.video();
221
- if (!videoStream) {
222
- throw new Error('No video stream found in input');
223
- }
224
- return new WebRTCStream(input, options);
225
- }
226
- /**
227
- * Check if the stream is active.
228
- *
229
- * @returns True if the stream is active, false otherwise
230
- */
231
- get isStreamActive() {
232
- return this.streamActive;
233
- }
234
- /**
235
- * Get detected codec information for SDP negotiation.
236
- *
237
- * Returns RTP codec parameters and FFmpeg codec IDs for video and audio.
238
- * Use this information to configure WebRTC peer connections with matching codecs.
239
- *
240
- * @returns Codec configuration for video and audio streams
241
- *
242
- * @example
243
- * ```typescript
244
- * const stream = await WebRTCStream.create('input.mp4');
245
- * const codecs = stream.getCodecs();
246
- *
247
- * console.log('Video codec:', codecs.video.mimeType);
248
- * console.log('Audio codec:', codecs.audio?.mimeType);
249
- * ```
250
- */
251
- getCodecs() {
252
- return this.codecInfo;
253
- }
254
- /**
255
- * Start streaming media to RTP packets.
256
- *
257
- * Begins the media processing pipeline, reading packets from input,
258
- * transcoding if necessary, and invoking RTP packet callbacks.
259
- * Automatically handles video and audio streams in parallel.
260
- * Flushes all buffers at the end of stream.
261
- * This method blocks until streaming completes or {@link stop} is called.
262
- *
263
- * @returns Promise that resolves when streaming completes
264
- *
265
- * @throws {FFmpegError} If transcoding or muxing fails
266
- *
267
- * @example
268
- * ```typescript
269
- * const stream = await WebRTCStream.create('rtsp://camera.local/stream', {
270
- * onVideoPacket: (rtp) => sendRtp(rtp)
271
- * });
272
- *
273
- * // Start streaming (blocks until complete or stopped)
274
- * await stream.start();
275
- * ```
276
- *
277
- * @example
278
- * ```typescript
279
- * // Non-blocking start with background promise
280
- * const stream = await WebRTCStream.create('input.mp4');
281
- * const streamPromise = stream.start();
282
- *
283
- * // Later: stop streaming
284
- * stream.stop();
285
- * await streamPromise;
286
- * ```
287
- */
288
- async start() {
289
- if (this.streamActive) {
290
- return;
291
- }
292
- this.streamActive = true;
293
- const videoStream = this.input.video();
294
- const audioStream = this.input.audio();
295
- // Setup video transcoding if needed
296
- if (!this.isVideoCodecSupported(videoStream.codecpar.codecId)) {
297
- if (this.options.hardware === 'auto') {
298
- this.hardwareContext = HardwareContext.auto();
299
- }
300
- else if (this.options.hardware.deviceType !== AV_HWDEVICE_TYPE_NONE) {
301
- this.hardwareContext = HardwareContext.create(this.options.hardware.deviceType, this.options.hardware.device, this.options.hardware.options);
302
- }
303
- this.videoDecoder = await Decoder.create(videoStream, {
304
- exitOnError: false,
305
- hardware: this.hardwareContext,
306
- });
307
- const encoderCodec = this.hardwareContext?.getEncoderCodec('h264') ?? FF_ENCODER_LIBX264;
308
- const encoderOptions = {};
309
- if (encoderCodec === FF_ENCODER_LIBX264) {
310
- encoderOptions.preset = 'ultrafast';
311
- encoderOptions.tune = 'zerolatency';
312
- }
313
- this.videoEncoder = await Encoder.create(encoderCodec, {
314
- timeBase: videoStream.timeBase,
315
- frameRate: videoStream.avgFrameRate,
316
- maxBFrames: 0,
317
- options: encoderOptions,
318
- });
319
- }
320
- // Setup video output
321
- this.videoOutput = await MediaOutput.open({
322
- write: (buffer) => {
323
- this.options.onVideoPacket(RtpPacket.deSerialize(buffer));
324
- return buffer.length;
325
- },
326
- }, {
327
- format: 'rtp',
328
- bufferSize: this.options.mtu,
329
- options: {
330
- pkt_size: this.options.mtu,
331
- },
332
- });
333
- const videoStreamIndex = this.videoEncoder ? this.videoOutput.addStream(this.videoEncoder) : this.videoOutput.addStream(videoStream);
334
- // Setup audio if available
335
- let audioStreamIndex = null;
336
- if (audioStream) {
337
- if (!this.isAudioCodecSupported(audioStream.codecpar.codecId)) {
338
- this.audioDecoder = await Decoder.create(audioStream, {
339
- exitOnError: false,
340
- });
341
- const targetSampleRate = 48000;
342
- const filterChain = FilterPreset.chain().aformat(AV_SAMPLE_FMT_S16, targetSampleRate, 'stereo').asetnsamples(960).build();
343
- this.audioFilter = FilterAPI.create(filterChain, {
344
- timeBase: audioStream.timeBase,
345
- });
346
- this.audioEncoder = await Encoder.create(FF_ENCODER_LIBOPUS, {
347
- timeBase: { num: 1, den: targetSampleRate },
348
- options: {
349
- application: 'lowdelay',
350
- frame_duration: 20,
351
- },
352
- });
353
- }
354
- this.audioOutput = await MediaOutput.open({
355
- write: (buffer) => {
356
- this.options.onAudioPacket(RtpPacket.deSerialize(buffer));
357
- return buffer.length;
358
- },
359
- }, {
360
- format: 'rtp',
361
- bufferSize: this.options.mtu,
362
- options: {
363
- pkt_size: this.options.mtu,
364
- },
365
- });
366
- audioStreamIndex = this.audioEncoder ? this.audioOutput.addStream(this.audioEncoder) : this.audioOutput.addStream(audioStream);
367
- }
368
- const hasAudio = audioStreamIndex !== null && this.audioOutput !== null && audioStream !== undefined;
369
- // Start processing loop
370
- for await (const packet_1 of this.input.packets()) {
371
- const env_1 = { stack: [], error: void 0, hasError: false };
372
- try {
373
- const packet = __addDisposableResource(env_1, packet_1, false);
374
- if (!this.streamActive) {
375
- break;
376
- }
377
- if (packet.streamIndex === videoStream.index) {
378
- if (this.videoDecoder === null || this.videoEncoder === null) {
379
- await this.videoOutput.writePacket(packet, videoStreamIndex);
380
- }
381
- else {
382
- const env_2 = { stack: [], error: void 0, hasError: false };
383
- try {
384
- const decodedFrame = __addDisposableResource(env_2, await this.videoDecoder.decode(packet), false);
385
- if (!decodedFrame) {
386
- continue;
387
- }
388
- const encodedPacket = __addDisposableResource(env_2, await this.videoEncoder.encode(decodedFrame), false);
389
- if (!encodedPacket) {
390
- continue;
391
- }
392
- await this.videoOutput.writePacket(encodedPacket, videoStreamIndex);
393
- }
394
- catch (e_1) {
395
- env_2.error = e_1;
396
- env_2.hasError = true;
397
- }
398
- finally {
399
- __disposeResources(env_2);
400
- }
401
- }
402
- }
403
- else if (hasAudio && packet.streamIndex === audioStream.index) {
404
- if (this.audioDecoder === null || this.audioFilter === null || this.audioEncoder === null) {
405
- await this.audioOutput?.writePacket(packet, audioStreamIndex);
406
- continue;
407
- }
408
- else {
409
- const env_3 = { stack: [], error: void 0, hasError: false };
410
- try {
411
- const decodedFrame = __addDisposableResource(env_3, await this.audioDecoder.decode(packet), false);
412
- if (!decodedFrame) {
413
- continue;
414
- }
415
- const filteredFrame = __addDisposableResource(env_3, await this.audioFilter.process(decodedFrame), false);
416
- if (!filteredFrame) {
417
- continue;
418
- }
419
- const encodedPacket = __addDisposableResource(env_3, await this.audioEncoder.encode(filteredFrame), false);
420
- if (!encodedPacket) {
421
- continue;
422
- }
423
- await this.audioOutput?.writePacket(encodedPacket, audioStreamIndex);
424
- }
425
- catch (e_2) {
426
- env_3.error = e_2;
427
- env_3.hasError = true;
428
- }
429
- finally {
430
- __disposeResources(env_3);
431
- }
432
- }
433
- }
434
- }
435
- catch (e_3) {
436
- env_1.error = e_3;
437
- env_1.hasError = true;
438
- }
439
- finally {
440
- __disposeResources(env_1);
441
- }
442
- }
443
- // Flush pipelines
444
- await Promise.allSettled([this.flushVideo(videoStreamIndex), this.flushAudio(audioStreamIndex, hasAudio)]);
445
- }
446
- /**
447
- * Stop streaming gracefully.
448
- *
449
- * Signals the streaming loop to exit after the current packet is processed.
450
- * Does not immediately close resources - use {@link dispose} for cleanup.
451
- * Safe to call multiple times.
452
- *
453
- * @example
454
- * ```typescript
455
- * const stream = await WebRTCStream.create('input.mp4');
456
- * const streamPromise = stream.start();
457
- *
458
- * // Stop after 10 seconds
459
- * setTimeout(() => stream.stop(), 10000);
460
- *
461
- * await streamPromise; // Resolves when stopped
462
- * stream.dispose();
463
- * ```
464
- */
465
- stop() {
466
- this.streamActive = false;
467
- }
468
- /**
469
- * Clean up all resources and close the stream.
470
- *
471
- * Stops streaming if active and releases all FFmpeg resources including
472
- * decoders, encoders, filters, outputs, and input. Should be called when
473
- * done with the stream to prevent memory leaks.
474
- * Safe to call multiple times.
475
- *
476
- * @example
477
- * ```typescript
478
- * const stream = await WebRTCStream.create('input.mp4');
479
- * await stream.start();
480
- * stream.dispose();
481
- * ```
482
- *
483
- * @example
484
- * ```typescript
485
- * // Using automatic cleanup
486
- * {
487
- * await using stream = await WebRTCStream.create('input.mp4');
488
- * await stream.start();
489
- * } // Automatically disposed
490
- * ```
491
- */
492
- dispose() {
493
- if (this.disposed) {
494
- return;
495
- }
496
- this.disposed = true;
497
- this.stop();
498
- this.videoOutput?.close();
499
- this.audioOutput?.close();
500
- this.videoDecoder?.close();
501
- this.videoEncoder?.close();
502
- this.audioDecoder?.close();
503
- this.audioFilter?.close();
504
- this.audioEncoder?.close();
505
- this.input.close();
506
- }
507
- /**
508
- * Check if the given audio codec is compatible with WebRTC.
509
- *
510
- * @param codecId - The AVCodecID to check
511
- *
512
- * @returns True if the codec is WebRTC compatible, false otherwise
513
- *
514
- * @internal
515
- */
516
- isAudioCodecSupported(codecId) {
517
- switch (codecId) {
518
- case AV_CODEC_ID_PCM_ALAW:
519
- case AV_CODEC_ID_PCM_MULAW:
520
- case AV_CODEC_ID_OPUS:
521
- return true;
522
- default:
523
- return false;
524
- }
525
- }
526
- /**
527
- * Check if the given video codec is compatible with WebRTC.
528
- *
529
- * @param codecId - The AVCodecID to check
530
- *
531
- * @returns True if the codec is WebRTC compatible, false otherwise
532
- *
533
- * @internal
534
- */
535
- isVideoCodecSupported(codecId) {
536
- switch (codecId) {
537
- case AV_CODEC_ID_H264:
538
- case AV_CODEC_ID_HEVC:
539
- case AV_CODEC_ID_VP8:
540
- case AV_CODEC_ID_VP9:
541
- case AV_CODEC_ID_AV1:
542
- return true;
543
- default:
544
- return false;
545
- }
546
- }
547
- /**
548
- * Get the audio codec configuration for WebRTC.
549
- *
550
- * @param codecId - The AVCodecID of the audio codec
551
- *
552
- * @returns An object containing MIME type, clock rate, and channels, or null if unsupported
553
- *
554
- * @internal
555
- */
556
- getAudioCodecConfig(codecId) {
557
- switch (codecId) {
558
- case AV_CODEC_ID_OPUS:
559
- return {
560
- mimeType: 'audio/opus',
561
- clockRate: 48000,
562
- channels: 2,
563
- payloadType: 111,
564
- };
565
- case AV_CODEC_ID_PCM_MULAW:
566
- return {
567
- mimeType: 'audio/PCMU',
568
- clockRate: 8000,
569
- channels: 1,
570
- payloadType: 0,
571
- };
572
- case AV_CODEC_ID_PCM_ALAW:
573
- return {
574
- mimeType: 'audio/PCMA',
575
- clockRate: 8000,
576
- channels: 1,
577
- payloadType: 8,
578
- };
579
- default:
580
- return null;
581
- }
582
- }
583
- /**
584
- * Get the video codec configuration for WebRTC.
585
- *
586
- * @param codecId - The AVCodecID of the video codec
587
- *
588
- * @returns An object containing MIME type and clock rate, or null if unsupported
589
- *
590
- * @internal
591
- */
592
- getVideoCodecConfig(codecId) {
593
- switch (codecId) {
594
- case AV_CODEC_ID_H264:
595
- return {
596
- mimeType: 'video/H264',
597
- clockRate: 90000,
598
- payloadType: 102,
599
- };
600
- case AV_CODEC_ID_HEVC:
601
- return {
602
- mimeType: 'video/H265',
603
- clockRate: 90000,
604
- payloadType: 103,
605
- };
606
- case AV_CODEC_ID_VP8:
607
- return {
608
- mimeType: 'video/VP8',
609
- clockRate: 90000,
610
- payloadType: 96,
611
- };
612
- case AV_CODEC_ID_VP9:
613
- return {
614
- mimeType: 'video/VP9',
615
- clockRate: 90000,
616
- payloadType: 98,
617
- };
618
- case AV_CODEC_ID_AV1:
619
- return {
620
- mimeType: 'video/AV1',
621
- clockRate: 90000,
622
- payloadType: 35,
623
- };
624
- default:
625
- return null;
626
- }
627
- }
628
- /**
629
- * Flush video encoder pipeline.
630
- *
631
- * @param videoStreamIndex - Output video stream index
632
- *
633
- * @internal
634
- */
635
- async flushVideo(videoStreamIndex) {
636
- if (!this.videoDecoder || !this.videoEncoder || !this.videoOutput) {
637
- return;
638
- }
639
- for await (const frame_1 of this.videoDecoder.flushFrames()) {
640
- const env_4 = { stack: [], error: void 0, hasError: false };
641
- try {
642
- const frame = __addDisposableResource(env_4, frame_1, false);
643
- const encodedPacket = __addDisposableResource(env_4, await this.videoEncoder.encode(frame), false);
644
- if (encodedPacket) {
645
- await this.videoOutput.writePacket(encodedPacket, videoStreamIndex);
646
- }
647
- }
648
- catch (e_4) {
649
- env_4.error = e_4;
650
- env_4.hasError = true;
651
- }
652
- finally {
653
- __disposeResources(env_4);
654
- }
655
- }
656
- for await (const packet_2 of this.videoEncoder.flushPackets()) {
657
- const env_5 = { stack: [], error: void 0, hasError: false };
658
- try {
659
- const packet = __addDisposableResource(env_5, packet_2, false);
660
- await this.videoOutput.writePacket(packet, videoStreamIndex);
661
- }
662
- catch (e_5) {
663
- env_5.error = e_5;
664
- env_5.hasError = true;
665
- }
666
- finally {
667
- __disposeResources(env_5);
668
- }
669
- }
670
- }
671
- /**
672
- * Flush audio encoder pipeline.
673
- *
674
- * @param audioStreamIndex - Output audio stream index
675
- *
676
- * @param hasAudio - Whether audio stream exists
677
- *
678
- * @internal
679
- */
680
- async flushAudio(audioStreamIndex, hasAudio) {
681
- if (!this.audioDecoder || !this.audioFilter || !this.audioEncoder || !hasAudio || !this.audioOutput) {
682
- return;
683
- }
684
- for await (const frame_2 of this.audioDecoder.flushFrames()) {
685
- const env_6 = { stack: [], error: void 0, hasError: false };
686
- try {
687
- const frame = __addDisposableResource(env_6, frame_2, false);
688
- const filteredFrame = __addDisposableResource(env_6, await this.audioFilter.process(frame), false);
689
- if (!filteredFrame) {
690
- continue;
691
- }
692
- const encodedPacket = __addDisposableResource(env_6, await this.audioEncoder.encode(filteredFrame), false);
693
- if (encodedPacket) {
694
- await this.audioOutput.writePacket(encodedPacket, audioStreamIndex);
695
- }
696
- }
697
- catch (e_6) {
698
- env_6.error = e_6;
699
- env_6.hasError = true;
700
- }
701
- finally {
702
- __disposeResources(env_6);
703
- }
704
- }
705
- for await (const frame_3 of this.audioFilter.flushFrames()) {
706
- const env_7 = { stack: [], error: void 0, hasError: false };
707
- try {
708
- const frame = __addDisposableResource(env_7, frame_3, false);
709
- const encodedPacket = __addDisposableResource(env_7, await this.audioEncoder.encode(frame), false);
710
- if (encodedPacket) {
711
- await this.audioOutput.writePacket(encodedPacket, audioStreamIndex);
712
- }
713
- }
714
- catch (e_7) {
715
- env_7.error = e_7;
716
- env_7.hasError = true;
717
- }
718
- finally {
719
- __disposeResources(env_7);
720
- }
721
- }
722
- for await (const packet_3 of this.audioEncoder.flushPackets()) {
723
- const env_8 = { stack: [], error: void 0, hasError: false };
724
- try {
725
- const packet = __addDisposableResource(env_8, packet_3, false);
726
- await this.audioOutput.writePacket(packet, audioStreamIndex);
727
- }
728
- catch (e_8) {
729
- env_8.error = e_8;
730
- env_8.hasError = true;
731
- }
732
- finally {
733
- __disposeResources(env_8);
734
- }
735
- }
736
- }
737
- /**
738
- * Symbol.dispose implementation for automatic cleanup.
739
- *
740
- * @internal
741
- */
742
- [Symbol.dispose]() {
743
- this.dispose();
744
- }
745
- }
746
- /**
747
- * Complete WebRTC session management with werift integration.
748
- *
749
- * Provides end-to-end WebRTC streaming with automatic SDP negotiation,
750
- * ICE candidate handling, and peer connection management.
751
- * Built on top of {@link WebRTCStream} but handles all WebRTC protocol details.
752
- * Integrates with werift library for RTCPeerConnection and media track handling.
753
- * Ideal for building complete WebRTC streaming applications with minimal code.
754
- *
755
- * @example
756
- * ```typescript
757
- * import { WebRTCSession } from 'node-av/api';
758
- *
759
- * // Create session from media source
760
- * const session = await WebRTCSession.create('rtsp://camera.local/stream', {
761
- * mtu: 1200,
762
- * hardware: 'auto',
763
- * iceServers: [{ urls: 'stun:stun.l.google.com:19302' }]
764
- * });
765
- *
766
- * // Setup ICE candidate handler
767
- * session.onIceCandidate = (candidate) => {
768
- * sendToClient({ type: 'candidate', value: candidate });
769
- * };
770
- *
771
- * // Process SDP offer from client
772
- * const answer = await session.setOffer(clientOffer);
773
- * sendToClient({ type: 'answer', value: answer });
774
- *
775
- * // Start streaming
776
- * await session.start();
777
- * ```
778
- *
779
- * @example
780
- * ```typescript
781
- * // Complete WebSocket signaling server
782
- * import { WebSocket } from 'ws';
783
- *
784
- * ws.on('message', async (data) => {
785
- * const msg = JSON.parse(data);
786
- *
787
- * if (msg.type === 'offer') {
788
- * const session = await WebRTCSession.create(msg.url, {
789
- * hardware: 'auto'
790
- * });
791
- *
792
- * session.onIceCandidate = (candidate) => {
793
- * ws.send(JSON.stringify({ type: 'candidate', value: candidate }));
794
- * };
795
- *
796
- * const answer = await session.setOffer(msg.value);
797
- * ws.send(JSON.stringify({ type: 'answer', value: answer }));
798
- *
799
- * await session.start();
800
- * } else if (msg.type === 'candidate') {
801
- * session.addIceCandidate(msg.value);
802
- * }
803
- * });
804
- * ```
805
- *
806
- * @see {@link WebRTCStream} For library-agnostic RTP streaming
807
- * @see {@link MediaInput} For input media handling
808
- * @see {@link HardwareContext} For GPU acceleration
809
- */
810
- export class WebRTCSession {
811
- stream;
812
- pc = null;
813
- videoTrack = null;
814
- audioTrack = null;
815
- options;
816
- /**
817
- * Callback invoked when a new ICE candidate is discovered.
818
- * Send this candidate to the remote peer via signaling channel.
819
- *
820
- * @param candidate - ICE candidate string to send to remote peer
821
- *
822
- * @example
823
- * ```typescript
824
- * session.onIceCandidate = (candidate) => {
825
- * ws.send(JSON.stringify({ type: 'candidate', value: candidate }));
826
- * };
827
- * ```
828
- */
829
- onIceCandidate = null;
830
- /**
831
- * @param options - Session configuration options
832
- *
833
- * Use {@link create} factory method
834
- *
835
- * @internal
836
- */
837
- constructor(options) {
838
- this.options = options;
839
- }
840
- /**
841
- * Create a WebRTC session from a media source.
842
- *
843
- * Opens the input media, creates internal streaming components, and prepares
844
- * for WebRTC peer connection negotiation. Does not start streaming yet.
845
- * Call {@link setOffer} to negotiate SDP and {@link start} to begin streaming.
846
- *
847
- * @param inputUrl - Media source URL (RTSP, file path, HTTP, etc.)
848
- *
849
- * @param options - Session configuration options
850
- *
851
- * @returns Configured WebRTC session instance
852
- *
853
- * @throws {Error} If no video stream found in input
854
- *
855
- * @throws {FFmpegError} If input cannot be opened
856
- *
857
- * @example
858
- * ```typescript
859
- * const session = await WebRTCSession.create('rtsp://camera.local/stream', {
860
- * mtu: 1200,
861
- * hardware: 'auto',
862
- * iceServers: [{ urls: 'stun:stun.l.google.com:19302' }]
863
- * });
864
- * ```
865
- *
866
- * @example
867
- * ```typescript
868
- * // Session from file with hardware acceleration
869
- * const session = await WebRTCSession.create('video.mp4', {
870
- * hardware: {
871
- * deviceType: AV_HWDEVICE_TYPE_CUDA
872
- * }
873
- * });
874
- * ```
875
- */
876
- static async create(inputUrl, options = {}) {
877
- const session = new WebRTCSession(options);
878
- // Create stream to detect codecs
879
- session.stream = await WebRTCStream.create(inputUrl, {
880
- mtu: options.mtu,
881
- hardware: options.hardware,
882
- onVideoPacket: (rtp) => {
883
- session.videoTrack?.writeRtp(rtp);
884
- },
885
- onAudioPacket: (rtp) => {
886
- session.audioTrack?.writeRtp(rtp);
887
- },
888
- });
889
- return session;
890
- }
891
- /**
892
- * Get detected codec information.
893
- *
894
- * Returns RTP codec parameters and FFmpeg codec IDs for video and audio.
895
- * Useful for inspecting what codecs will be used in the WebRTC session.
896
- *
897
- * @returns Codec configuration for video and audio streams
898
- *
899
- * @example
900
- * ```typescript
901
- * const session = await WebRTCSession.create('input.mp4');
902
- * const codecs = session.getCodecs();
903
- *
904
- * console.log('Video:', codecs.video.mimeType);
905
- * console.log('Audio:', codecs.audio?.mimeType);
906
- * ```
907
- */
908
- getCodecs() {
909
- return this.stream.getCodecs();
910
- }
911
- /**
912
- * Process SDP offer from remote peer and generate SDP answer.
913
- *
914
- * Creates RTCPeerConnection with detected codecs, sets up media tracks,
915
- * processes the remote SDP offer, and generates a local SDP answer.
916
- * Also configures ICE candidate handling via {@link onIceCandidate} callback.
917
- * Must be called before {@link start}.
918
- *
919
- * @param offerSdp - SDP offer string from remote WebRTC peer
920
- *
921
- * @returns SDP answer string to send back to remote peer
922
- *
923
- * @example
924
- * ```typescript
925
- * const session = await WebRTCSession.create('input.mp4');
926
- *
927
- * // Setup ICE candidate handler first
928
- * session.onIceCandidate = (candidate) => {
929
- * sendToRemote({ type: 'candidate', value: candidate });
930
- * };
931
- *
932
- * // Process offer and send answer
933
- * const answer = await session.setOffer(remoteOffer);
934
- * sendToRemote({ type: 'answer', value: answer });
935
- * ```
936
- */
937
- async setOffer(offerSdp) {
938
- const codecs = this.stream.getCodecs();
939
- const videoConfig = codecs.video;
940
- delete videoConfig.codecId;
941
- const audioConfig = codecs.audio ?? {
942
- mimeType: 'audio/opus',
943
- clockRate: 48000,
944
- channels: 2,
945
- payloadType: 111,
946
- };
947
- delete audioConfig.codecId;
948
- // Create PeerConnection with detected codecs
949
- const codecParams = {
950
- video: [
951
- new RTCRtpCodecParameters({
952
- ...videoConfig,
953
- }),
954
- ],
955
- audio: [
956
- new RTCRtpCodecParameters({
957
- ...audioConfig,
958
- }),
959
- ],
960
- };
961
- this.pc = new RTCPeerConnection({
962
- codecs: codecParams,
963
- iceServers: this.options.iceServers,
964
- });
965
- // Setup ICE candidate handling
966
- this.pc.onIceCandidate.subscribe((candidate) => {
967
- if (candidate?.candidate && this.onIceCandidate) {
968
- this.onIceCandidate(candidate.candidate);
969
- }
970
- });
971
- // Setup tracks
972
- this.pc.onRemoteTransceiverAdded.subscribe(async (transceiver) => {
973
- if (transceiver.kind === 'video') {
974
- this.videoTrack = new MediaStreamTrack({ kind: 'video' });
975
- transceiver.sender.replaceTrack(this.videoTrack);
976
- transceiver.setDirection('sendonly');
977
- }
978
- else if (transceiver.kind === 'audio' && this.audioTrack === null) {
979
- this.audioTrack = new MediaStreamTrack({ kind: 'audio' });
980
- transceiver.sender.replaceTrack(this.audioTrack);
981
- transceiver.setDirection('sendonly');
982
- }
983
- else if (transceiver.kind === 'audio') {
984
- // Backchannel
985
- const [track] = await transceiver.onTrack.asPromise();
986
- const ctx = this.stream.input.getFormatContext();
987
- const streams = ctx?.getRTSPStreamInfo();
988
- const backchannel = streams?.find((s) => s.direction === 'sendonly');
989
- track.onReceiveRtp.subscribe(async (rtp) => {
990
- if (backchannel && this.stream.isStreamActive) {
991
- try {
992
- await ctx?.sendRTSPPacket(backchannel.streamIndex, rtp.serialize());
993
- }
994
- catch {
995
- // Ignore send errors
996
- }
997
- }
998
- });
999
- }
1000
- });
1001
- // Set remote description and create answer
1002
- await this.pc.setRemoteDescription(new RTCSessionDescription(offerSdp, 'offer'));
1003
- const answer = await this.pc.createAnswer();
1004
- this.pc.setLocalDescription(answer);
1005
- return this.pc.localDescription?.sdp ?? '';
1006
- }
1007
- /**
1008
- * Add ICE candidate from remote peer.
1009
- *
1010
- * Processes ICE candidates received from the remote peer via signaling channel.
1011
- * Should be called whenever a new candidate message arrives from remote peer.
1012
- * Can be called multiple times as candidates are discovered.
1013
- *
1014
- * @param candidate - ICE candidate string from remote peer
1015
- *
1016
- * @throws {Error} If peer connection not initialized (call {@link setOffer} first)
1017
- *
1018
- * @example
1019
- * ```typescript
1020
- * // In signaling message handler
1021
- * if (msg.type === 'candidate') {
1022
- * session.addIceCandidate(msg.value);
1023
- * }
1024
- * ```
1025
- */
1026
- addIceCandidate(candidate) {
1027
- if (!this.pc) {
1028
- throw new Error('PeerConnection not initialized');
1029
- }
1030
- this.pc.addIceCandidate(new RTCIceCandidate({ candidate }));
1031
- }
1032
- /**
1033
- * Start streaming media to WebRTC peer connection.
1034
- *
1035
- * Begins the media processing pipeline, reading packets from input,
1036
- * transcoding if necessary, and sending RTP packets to media tracks.
1037
- * Must call {@link setOffer} before starting.
1038
- * This method blocks until streaming completes or {@link stop} is called.
1039
- *
1040
- * @returns Promise that resolves when streaming completes
1041
- *
1042
- * @throws {FFmpegError} If transcoding or muxing fails
1043
- *
1044
- * @example
1045
- * ```typescript
1046
- * const session = await WebRTCSession.create('input.mp4');
1047
- * session.onIceCandidate = (c) => sendToRemote(c);
1048
- *
1049
- * const answer = await session.setOffer(remoteOffer);
1050
- * sendToRemote(answer);
1051
- *
1052
- * // Start streaming (blocks until complete)
1053
- * await session.start();
1054
- * ```
1055
- *
1056
- * @example
1057
- * ```typescript
1058
- * // Non-blocking start
1059
- * const session = await WebRTCSession.create('input.mp4');
1060
- * const streamPromise = session.start();
1061
- *
1062
- * // Later: stop streaming
1063
- * session.stop();
1064
- * await streamPromise;
1065
- * ```
1066
- */
1067
- async start() {
1068
- await this.stream.start();
1069
- }
1070
- /**
1071
- * Stop streaming gracefully.
1072
- *
1073
- * Signals the streaming loop to exit after the current packet is processed.
1074
- * Does not immediately close resources - use {@link dispose} for cleanup.
1075
- * Safe to call multiple times.
1076
- *
1077
- * @example
1078
- * ```typescript
1079
- * const session = await WebRTCSession.create('input.mp4');
1080
- * const streamPromise = session.start();
1081
- *
1082
- * // Stop after 10 seconds
1083
- * setTimeout(() => session.stop(), 10000);
1084
- *
1085
- * await streamPromise;
1086
- * session.dispose();
1087
- * ```
1088
- */
1089
- stop() {
1090
- this.stream.stop();
1091
- }
1092
- /**
1093
- * Clean up all resources and close the session.
1094
- *
1095
- * Stops streaming if active, releases all FFmpeg resources, closes peer connection,
1096
- * and cleans up media tracks. Should be called when done with the session to prevent
1097
- * memory leaks. Safe to call multiple times.
1098
- *
1099
- * @example
1100
- * ```typescript
1101
- * const session = await WebRTCSession.create('input.mp4');
1102
- * await session.start();
1103
- * session.dispose();
1104
- * ```
1105
- *
1106
- * @example
1107
- * ```typescript
1108
- * // Using automatic cleanup
1109
- * {
1110
- * await using session = await WebRTCSession.create('input.mp4');
1111
- * await session.start();
1112
- * } // Automatically disposed
1113
- * ```
1114
- */
1115
- dispose() {
1116
- this.stop();
1117
- this.stream.dispose();
1118
- this.pc?.close();
1119
- this.videoTrack = null;
1120
- this.audioTrack = null;
1121
- this.pc = null;
1122
- }
1123
- /**
1124
- * Symbol.dispose implementation for automatic cleanup.
1125
- *
1126
- * @internal
1127
- */
1128
- [Symbol.dispose]() {
1129
- this.dispose();
1130
- }
1131
- }
1132
- //# sourceMappingURL=webrtc.js.map