@camstack/addon-webrtc-adaptive 0.1.7 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,378 @@
1
+ import { V as VideoCodec, L as Logger, F as FrameSource, a as VideoFrame, A as AudioFrame, E as EncodingParams } from './addon--i9xjbhN.cjs';
2
+ export { b as AdaptiveController, c as AdaptiveControllerOptions, d as AdaptiveFfmpegSource, e as AdaptiveFfmpegSourceOptions, f as AdaptiveSession, g as AdaptiveSessionOptions, h as AdaptiveStreamServer, i as AdaptiveStreamServerOptions, j as AudioCodec, k as AudioMode, C as CameraConfig, l as FrameSourceFactory, m as LoggerLike, M as MediaFrame, Q as QualityProfile, n as QualityTier, S as SessionInfo, o as SessionStats, p as StreamStats, W as WebrtcAdaptiveAddon, q as asLogger, r as createDefaultProfiles, s as createNullLogger } from './addon--i9xjbhN.cjs';
3
+ import { ChildProcess } from 'node:child_process';
4
+ import { EventEmitter } from 'node:events';
5
+ import '@camstack/types';
6
+
7
+ /**
8
+ * NAL unit parsing utilities.
9
+ * Protocol-agnostic functions for working with Annex-B NAL units.
10
+ */
11
+
12
+ /**
13
+ * Returns true if the buffer starts with an Annex-B start code (0x00000001 or 0x000001).
14
+ */
15
+ declare function hasStartCodes(data: Buffer): boolean;
16
+ /**
17
+ * Split Annex-B data into individual NAL unit payloads (without start codes).
18
+ * Works for both H.264 and H.265.
19
+ */
20
+ declare function splitAnnexBToNals(annexB: Buffer): Buffer[];
21
+ /**
22
+ * Prepend a 4-byte Annex-B start code to a NAL payload.
23
+ */
24
+ declare function prependStartCode(nal: Buffer): Buffer;
25
+ /**
26
+ * Join multiple NAL payloads (without start codes) into an Annex-B access unit.
27
+ */
28
+ declare function joinNalsToAnnexB(...nals: Array<Buffer | null | undefined>): Buffer | undefined;
29
+ /**
30
+ * Detect the actual video codec from raw NAL data.
31
+ * Some cameras report wrong codec (e.g. "H264" but send H.265 data).
32
+ * Analyzes the NAL header to determine the real codec.
33
+ *
34
+ * @param data - Raw video data (either Annex-B or length-prefixed)
35
+ * @returns Detected codec type or null if detection fails
36
+ */
37
+ declare function detectVideoCodecFromNal(data: Buffer): VideoCodec | null;
38
+
39
+ /**
40
+ * H.264/AVC utilities.
41
+ * AVCC to Annex-B conversion, SPS/PPS extraction, keyframe detection, RTP depacketization.
42
+ */
43
+ /**
44
+ * Convert H.264 data from length-prefixed (AVCC) to Annex-B (start codes).
45
+ * If already Annex-B, returns as-is.
46
+ */
47
+ declare function convertH264ToAnnexB(data: Buffer): Buffer;
48
+ /**
49
+ * Check if an H.264 Annex-B access unit is a keyframe (IDR + SPS + PPS).
50
+ */
51
+ declare function isH264KeyframeAnnexB(annexB: Buffer): boolean;
52
+ /**
53
+ * Check if an H.264 Annex-B access unit contains an IDR slice.
54
+ */
55
+ declare function isH264IdrAccessUnit(annexB: Buffer): boolean;
56
+ /**
57
+ * Extract SPS and PPS from an H.264 Annex-B access unit.
58
+ */
59
+ declare function extractH264ParamSets(annexB: Buffer): {
60
+ sps?: Buffer;
61
+ pps?: Buffer;
62
+ profileLevelId?: string;
63
+ };
64
+ /**
65
+ * H.264 RTP depacketizer (RFC 6184).
66
+ * Handles single NAL units, STAP-A aggregation, and FU-A/FU-B fragmentation.
67
+ * Returns complete NAL units in Annex-B format (with start codes).
68
+ */
69
+ declare class H264RtpDepacketizer {
70
+ private fuNalHeader;
71
+ private fuParts;
72
+ private static parseRtpPayload;
73
+ reset(): void;
74
+ push(payload: Buffer): Buffer[];
75
+ }
76
+
77
+ /**
78
+ * H.265/HEVC utilities.
79
+ * HVCC to Annex-B conversion, VPS/SPS/PPS extraction, keyframe detection, RTP depacketization.
80
+ */
81
+ /**
82
+ * Convert H.265 data from length-prefixed (HVCC) to Annex-B (start codes).
83
+ * If already Annex-B, returns as-is.
84
+ */
85
+ declare function convertH265ToAnnexB(data: Buffer): Buffer;
86
+ /**
87
+ * Get H.265 NAL unit type from a NAL payload (without start code).
88
+ */
89
+ declare function getH265NalType(nalPayload: Buffer): number | null;
90
+ /**
91
+ * Check if an H.265 NAL unit type is an IRAP (Intra Random Access Point) picture.
92
+ * IRAP types: BLA (16-18), IDR (19-20), CRA (21).
93
+ */
94
+ declare function isH265Irap(nalType: number): boolean;
95
+ /**
96
+ * Check if an H.265 Annex-B access unit is a keyframe (IRAP + VPS + SPS + PPS).
97
+ */
98
+ declare function isH265KeyframeAnnexB(annexB: Buffer): boolean;
99
+ /**
100
+ * Check if an H.265 Annex-B access unit contains an IRAP picture.
101
+ */
102
+ declare function isH265IrapAccessUnit(annexB: Buffer): boolean;
103
+ /**
104
+ * Extract VPS, SPS, and PPS from an H.265 Annex-B access unit.
105
+ */
106
+ declare function extractH265ParamSets(annexB: Buffer): {
107
+ vps?: Buffer;
108
+ sps?: Buffer;
109
+ pps?: Buffer;
110
+ };
111
+ /**
112
+ * H.265 RTP depacketizer (RFC 7798).
113
+ * Handles single NAL units, AP aggregation, and FU fragmentation.
114
+ * Returns complete NAL units in Annex-B format (with start codes).
115
+ */
116
+ declare class H265RtpDepacketizer {
117
+ private fuParts;
118
+ private static parseRtpPayload;
119
+ reset(): void;
120
+ push(payload: Buffer): Buffer[];
121
+ }
122
+
123
+ /**
124
+ * Stream fan-out utilities.
125
+ * Distribute a single source stream to multiple consumers with bounded queues.
126
+ */
127
+ /**
128
+ * Async bounded queue with push/pull interface.
129
+ * When the queue is full, the oldest items are dropped (tail-drop).
130
+ */
131
+ declare class AsyncBoundedQueue<T> {
132
+ private readonly maxItems;
133
+ private readonly queue;
134
+ private waiting;
135
+ private closed;
136
+ constructor(maxItems: number);
137
+ push(item: T): void;
138
+ close(): void;
139
+ next(): Promise<IteratorResult<T>>;
140
+ isClosed(): boolean;
141
+ size(): number;
142
+ }
143
+ interface StreamFanoutOptions<T> {
144
+ /** Maximum items per subscriber queue before tail-drop */
145
+ maxQueueItems: number;
146
+ /** Factory to create the source stream */
147
+ createSource: () => AsyncGenerator<T, void, unknown>;
148
+ /** Optional callback for each frame (e.g. for extracting metadata) */
149
+ onFrame?: (frame: T) => void;
150
+ /** Optional error handler */
151
+ onError?: (error: unknown) => void;
152
+ }
153
+ /**
154
+ * Fan-out a single async generator source to multiple subscribers.
155
+ * The source stream is started when start() is called and frames are
156
+ * distributed to all active subscriber queues.
157
+ */
158
+ declare class StreamFanout<T> {
159
+ private readonly opts;
160
+ private readonly queues;
161
+ private source;
162
+ private running;
163
+ private pumpPromise;
164
+ constructor(opts: StreamFanoutOptions<T>);
165
+ /** Start pumping frames from the source to all subscribers. */
166
+ start(): void;
167
+ /**
168
+ * Create a subscriber async generator.
169
+ * Returns an async generator that yields frames from the shared source.
170
+ * The generator terminates when the source ends or unsubscribe is called.
171
+ */
172
+ subscribe(id: string): AsyncGenerator<T, void, unknown>;
173
+ /** Unsubscribe a specific subscriber. */
174
+ unsubscribe(id: string): void;
175
+ /** Stop the source and close all subscriber queues. */
176
+ stop(): Promise<void>;
177
+ /** Returns true if the fan-out is running. */
178
+ isRunning(): boolean;
179
+ /** Returns the number of active subscribers. */
180
+ subscriberCount(): number;
181
+ }
182
+
183
+ /**
184
+ * FFmpeg process lifecycle management.
185
+ * Spawn and manage FFmpeg processes with proper cleanup.
186
+ */
187
+
188
+ interface FfmpegProcessOptions {
189
+ /** FFmpeg binary path (default: "ffmpeg") */
190
+ ffmpegPath?: string;
191
+ /** FFmpeg arguments */
192
+ args: string[];
193
+ /** Logger instance */
194
+ logger?: Logger;
195
+ /** Label for log messages */
196
+ label?: string;
197
+ /** Additional stdio configuration (e.g. ["pipe"] for pipe:3) */
198
+ extraStdio?: Array<"pipe" | "ignore" | "inherit">;
199
+ /** Callback when process exits */
200
+ onExit?: (code: number | null, signal: string | null) => void;
201
+ /** Callback for stderr output */
202
+ onStderr?: (data: string) => void;
203
+ }
204
+ /**
205
+ * Managed FFmpeg process with proper lifecycle handling.
206
+ */
207
+ declare class FfmpegProcess {
208
+ private readonly options;
209
+ private process;
210
+ private killed;
211
+ private readonly logger;
212
+ private readonly label;
213
+ constructor(options: FfmpegProcessOptions);
214
+ /** Spawn the FFmpeg process. Returns stdin writable stream. */
215
+ start(): NodeJS.WritableStream | null;
216
+ /** Get a specific stdio stream by fd index (e.g. 3 for pipe:3). */
217
+ getStdio(fd: number): NodeJS.ReadableStream | NodeJS.WritableStream | null;
218
+ /** Get the underlying ChildProcess. */
219
+ getProcess(): ChildProcess | null;
220
+ /** Kill the FFmpeg process gracefully (SIGTERM then SIGKILL after timeout). */
221
+ kill(timeoutMs?: number): Promise<void>;
222
+ /** Check if the process is running. */
223
+ isRunning(): boolean;
224
+ }
225
+
226
+ /**
227
+ * Frame source adapters.
228
+ * Convert various input patterns (EventEmitter, push callbacks) into FrameSource async generators.
229
+ */
230
+
231
+ /**
232
+ * Convert an EventEmitter-based source into a FrameSource async generator.
233
+ *
234
+ * @param emitter - The event emitter that fires video/audio frames
235
+ * @param videoEvent - Event name for video frames (default: "videoFrame")
236
+ * @param audioEvent - Event name for audio frames (default: "audioFrame")
237
+ */
238
+ declare function fromEventEmitter(emitter: EventEmitter, videoEvent?: string, audioEvent?: string): FrameSource;
239
+ /**
240
+ * Create a push-based frame source.
241
+ * Returns a FrameSource async generator and push functions to deliver frames.
242
+ */
243
+ declare function fromPushCallback(): {
244
+ source: FrameSource;
245
+ pushVideo: (frame: VideoFrame) => void;
246
+ pushAudio: (frame: AudioFrame) => void;
247
+ close: () => void;
248
+ };
249
+ /**
250
+ * Create a FrameSource from a raw async generator of { audio, data, codec, videoType, ... } frames.
251
+ * This is the format used by reolink-baichuan-js's createNativeStream().
252
+ */
253
+ declare function fromNativeStream(native: AsyncGenerator<{
254
+ audio: boolean;
255
+ data: Buffer;
256
+ codec: string | null;
257
+ sampleRate: number | null;
258
+ microseconds: number | null;
259
+ videoType?: string;
260
+ isKeyframe?: boolean;
261
+ }, void, unknown>): FrameSource;
262
+
263
+ /**
264
+ * Adaptive RTSP relay — transcodes an RTSP source via ffmpeg and outputs
265
+ * a local RTSP stream that can be registered in go2rtc for WebRTC delivery.
266
+ *
267
+ * This avoids werift's SRTP/DTLS issues by letting go2rtc handle the
268
+ * browser-facing WebRTC connection while we control the transcoding.
269
+ *
270
+ * Pipeline: Camera RTSP → ffmpeg (adaptive params) → RTSP localhost → go2rtc → WHEP → Browser
271
+ */
272
+
273
+ interface AdaptiveRtspRelayOptions {
274
+ /** RTSP source URL (camera or upstream go2rtc). */
275
+ rtspUrl: string;
276
+ /** Local RTSP output URL (e.g. rtsp://127.0.0.1:8554/adaptive_ingresso_main). */
277
+ rtspOutputUrl: string;
278
+ /** Initial encoding parameters. */
279
+ initialParams: EncodingParams;
280
+ /** FFmpeg binary path (default: "ffmpeg"). */
281
+ ffmpegPath?: string;
282
+ /** Logger. */
283
+ logger?: Logger;
284
+ /** Label for logging. */
285
+ label?: string;
286
+ }
287
+ declare class AdaptiveRtspRelay {
288
+ private readonly rtspUrl;
289
+ private readonly rtspOutputUrl;
290
+ private readonly ffmpegPath;
291
+ private readonly logger;
292
+ private readonly label;
293
+ private currentParams;
294
+ private proc;
295
+ private closed;
296
+ constructor(options: AdaptiveRtspRelayOptions);
297
+ getParams(): Readonly<EncodingParams>;
298
+ /** Start the ffmpeg relay. */
299
+ start(): void;
300
+ /** Hot-swap encoding parameters by restarting ffmpeg. */
301
+ updateParams(params: Partial<EncodingParams>): Promise<void>;
302
+ /** Stop the relay. */
303
+ stop(): Promise<void>;
304
+ /** Check if ffmpeg is running. */
305
+ isRunning(): boolean;
306
+ private spawnFfmpeg;
307
+ private killFfmpeg;
308
+ }
309
+
310
+ /**
311
+ * SharedSession — single RTCPeerConnection with multiple dynamic video tracks.
312
+ *
313
+ * One connection per client. Tracks are added/removed via data channel messages.
314
+ * Each track corresponds to one camera's ffmpeg source.
315
+ *
316
+ * Protocol (data channel JSON messages):
317
+ *
318
+ * Client → Server:
319
+ * { type: "addTrack", cameraName: string, trackId: string }
320
+ * { type: "removeTrack", trackId: string }
321
+ * { type: "answer", sdp: string }
322
+ *
323
+ * Server → Client:
324
+ * { type: "offer", sdp: string }
325
+ * { type: "trackReady", trackId: string, mid: string }
326
+ * { type: "trackRemoved", trackId: string }
327
+ * { type: "error", message: string }
328
+ */
329
+
330
+ interface SharedSessionOptions {
331
+ iceConfig?: {
332
+ stunServers?: string[];
333
+ turnServers?: Array<{
334
+ urls: string;
335
+ username?: string;
336
+ credential?: string;
337
+ }>;
338
+ portRange?: [number, number];
339
+ additionalHostAddresses?: string[];
340
+ };
341
+ /** Called when the server needs to attach a camera source for a track. */
342
+ onTrackRequested: (cameraName: string) => FrameSource | null;
343
+ /** Called when a track is removed. */
344
+ onTrackReleased?: (cameraName: string) => void;
345
+ /** Called when intercom audio is received from the client for a camera. */
346
+ onIntercomAudio?: (cameraName: string, data: Buffer) => void;
347
+ logger: Logger;
348
+ }
349
+ declare class SharedSession {
350
+ private readonly options;
351
+ private readonly logger;
352
+ private readonly iceConfig;
353
+ private readonly onTrackRequested;
354
+ private readonly onTrackReleased;
355
+ private pc;
356
+ private dataChannel;
357
+ private readonly activeTracks;
358
+ private closed;
359
+ private negotiating;
360
+ constructor(options: SharedSessionOptions);
361
+ /** Create the initial SDP offer (with data channel, no media tracks yet). */
362
+ createOffer(): Promise<string>;
363
+ /** Handle the client's SDP answer. */
364
+ handleAnswer(sdpAnswer: string): Promise<void>;
365
+ /** Close the shared session and all tracks. */
366
+ close(): Promise<void>;
367
+ private handleDataChannelMessage;
368
+ private handleAddTrack;
369
+ private handleRemoveTrack;
370
+ private handleRenegotiationAnswer;
371
+ private renegotiate;
372
+ private startFeeding;
373
+ private sendDC;
374
+ get isConnected(): boolean;
375
+ get trackCount(): number;
376
+ }
377
+
378
+ export { AdaptiveRtspRelay, type AdaptiveRtspRelayOptions, AsyncBoundedQueue, AudioFrame, EncodingParams, FfmpegProcess, type FfmpegProcessOptions, FrameSource, H264RtpDepacketizer, H265RtpDepacketizer, SharedSession, type SharedSessionOptions, StreamFanout, type StreamFanoutOptions, VideoCodec, Logger as WebRtcLogger, VideoFrame as WebRtcVideoFrame, convertH264ToAnnexB, convertH265ToAnnexB, detectVideoCodecFromNal, extractH264ParamSets, extractH265ParamSets, fromEventEmitter, fromNativeStream, fromPushCallback, getH265NalType, hasStartCodes, isH264IdrAccessUnit, isH264KeyframeAnnexB, isH265Irap, isH265IrapAccessUnit, isH265KeyframeAnnexB, joinNalsToAnnexB, prependStartCode, splitAnnexBToNals };