@apocaliss92/nodelink-js 0.1.7 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -37,7 +37,7 @@ import {
37
37
  normalizeUid,
38
38
  parseSupportXml,
39
39
  setGlobalLogger
40
- } from "./chunk-JMT75JNG.js";
40
+ } from "./chunk-YUBYINJF.js";
41
41
  import {
42
42
  AesStreamDecryptor,
43
43
  BC_AES_IV,
@@ -203,7 +203,365 @@ import {
203
203
  testChannelStreams,
204
204
  xmlEscape,
205
205
  zipDirectory
206
- } from "./chunk-MC2BRLLE.js";
206
+ } from "./chunk-TZFZ5WJX.js";
207
+
208
+ // src/reolink/baichuan/HlsSessionManager.ts
209
+ var withTimeout = async (p, ms, label) => {
210
+ let t;
211
+ try {
212
+ return await Promise.race([
213
+ p,
214
+ new Promise((_, reject) => {
215
+ t = setTimeout(
216
+ () => reject(new Error(`${label} timed out after ${ms}ms`)),
217
+ ms
218
+ );
219
+ })
220
+ ]);
221
+ } finally {
222
+ if (t) clearTimeout(t);
223
+ }
224
+ };
225
+ var HlsSessionManager = class {
226
+ constructor(api, options) {
227
+ this.api = api;
228
+ this.logger = options?.logger;
229
+ this.sessionTtlMs = options?.sessionTtlMs ?? 5 * 60 * 1e3;
230
+ const cleanupIntervalMs = options?.cleanupIntervalMs ?? 3e4;
231
+ this.cleanupTimer = setInterval(() => {
232
+ void this.cleanupExpiredSessions();
233
+ }, cleanupIntervalMs);
234
+ }
235
+ sessions = /* @__PURE__ */ new Map();
236
+ logger;
237
+ sessionTtlMs;
238
+ cleanupTimer;
239
+ creationLocks = /* @__PURE__ */ new Map();
240
+ /**
241
+ * Handle an HLS request and return the HTTP response.
242
+ *
243
+ * @param params - Request parameters
244
+ * @returns HTTP response ready to be sent
245
+ */
246
+ async handleRequest(params) {
247
+ const {
248
+ sessionKey,
249
+ hlsPath,
250
+ requestUrl,
251
+ createSession,
252
+ exclusiveKeyPrefix
253
+ } = params;
254
+ try {
255
+ let entry = this.sessions.get(sessionKey);
256
+ const isPlaylist = hlsPath === "playlist.m3u8" || hlsPath === "";
257
+ const isSegment = hlsPath.endsWith(".ts");
258
+ if (!entry && isSegment) {
259
+ this.logger?.debug?.(
260
+ `[HlsSessionManager] Segment request without session (likely stale after clip switch): ${sessionKey} ${hlsPath}`
261
+ );
262
+ return {
263
+ statusCode: 404,
264
+ headers: {
265
+ "Content-Type": "text/plain",
266
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
267
+ Pragma: "no-cache",
268
+ "Retry-After": "1"
269
+ },
270
+ body: "Segment not found"
271
+ };
272
+ }
273
+ if (!entry) {
274
+ if (!isPlaylist) {
275
+ return {
276
+ statusCode: 400,
277
+ headers: { "Content-Type": "text/plain" },
278
+ body: "Invalid HLS path"
279
+ };
280
+ }
281
+ const lockKey = exclusiveKeyPrefix ?? sessionKey;
282
+ await this.withCreationLock(lockKey, async () => {
283
+ entry = this.sessions.get(sessionKey);
284
+ if (entry) return;
285
+ if (exclusiveKeyPrefix) {
286
+ await this.stopOtherSessionsWithPrefix(
287
+ exclusiveKeyPrefix,
288
+ sessionKey
289
+ );
290
+ }
291
+ this.logger?.log?.(
292
+ `[HlsSessionManager] Creating new session: ${sessionKey}`
293
+ );
294
+ this.logger?.debug?.(
295
+ `[HlsSessionManager] createSession(): ${sessionKey}`
296
+ );
297
+ const sessionParams = await createSession();
298
+ this.logger?.debug?.(
299
+ `[HlsSessionManager] Starting createRecordingReplayHlsSession: ${sessionKey}`
300
+ );
301
+ const session = await withTimeout(
302
+ this.api.createRecordingReplayHlsSession({
303
+ channel: sessionParams.channel,
304
+ fileName: sessionParams.fileName,
305
+ ...sessionParams.isNvr !== void 0 && {
306
+ isNvr: sessionParams.isNvr
307
+ },
308
+ ...this.logger && { logger: this.logger },
309
+ ...sessionParams.deviceId && {
310
+ deviceId: sessionParams.deviceId
311
+ },
312
+ transcodeH265ToH264: sessionParams.transcodeH265ToH264 ?? true,
313
+ hlsSegmentDuration: sessionParams.hlsSegmentDuration ?? 4
314
+ }),
315
+ 2e4,
316
+ "createRecordingReplayHlsSession"
317
+ );
318
+ try {
319
+ await withTimeout(
320
+ session.waitForReady(),
321
+ 12e3,
322
+ "hls waitForReady"
323
+ );
324
+ } catch (e) {
325
+ this.logger?.warn?.(
326
+ `[HlsSessionManager] waitForReady did not complete in time for ${sessionKey}: ${e instanceof Error ? e.message : String(e)}`
327
+ );
328
+ }
329
+ entry = {
330
+ session,
331
+ createdAt: Date.now(),
332
+ lastAccessAt: Date.now()
333
+ };
334
+ this.sessions.set(sessionKey, entry);
335
+ this.logger?.log?.(
336
+ `[HlsSessionManager] Session ready: ${sessionKey}`
337
+ );
338
+ });
339
+ entry = this.sessions.get(sessionKey);
340
+ if (!entry) {
341
+ return {
342
+ statusCode: 500,
343
+ headers: {
344
+ "Content-Type": "text/plain",
345
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
346
+ Pragma: "no-cache"
347
+ },
348
+ body: "HLS session was not created"
349
+ };
350
+ }
351
+ }
352
+ entry.lastAccessAt = Date.now();
353
+ if (isPlaylist) {
354
+ return this.servePlaylist(entry.session, requestUrl, sessionKey);
355
+ }
356
+ if (isSegment) {
357
+ return this.serveSegment(entry.session, hlsPath, sessionKey);
358
+ }
359
+ return {
360
+ statusCode: 400,
361
+ headers: { "Content-Type": "text/plain" },
362
+ body: "Invalid HLS path"
363
+ };
364
+ } catch (error) {
365
+ const message = error instanceof Error ? error.message : String(error);
366
+ this.logger?.error?.(
367
+ `[HlsSessionManager] Error handling request: ${message}`
368
+ );
369
+ return {
370
+ statusCode: 500,
371
+ headers: { "Content-Type": "text/plain" },
372
+ body: `HLS error: ${message}`
373
+ };
374
+ }
375
+ }
376
+ async withCreationLock(lockKey, fn) {
377
+ const prev = this.creationLocks.get(lockKey) ?? Promise.resolve();
378
+ let release;
379
+ const current = new Promise((resolve) => {
380
+ release = resolve;
381
+ });
382
+ const chained = prev.then(
383
+ () => current,
384
+ () => current
385
+ );
386
+ this.creationLocks.set(lockKey, chained);
387
+ await prev.catch(() => {
388
+ });
389
+ try {
390
+ await fn();
391
+ } finally {
392
+ release();
393
+ if (this.creationLocks.get(lockKey) === chained) {
394
+ this.creationLocks.delete(lockKey);
395
+ }
396
+ }
397
+ }
398
+ /**
399
+ * Check if a session exists for the given key.
400
+ */
401
+ hasSession(sessionKey) {
402
+ return this.sessions.has(sessionKey);
403
+ }
404
+ /**
405
+ * Stop a specific session.
406
+ */
407
+ async stopSession(sessionKey) {
408
+ const entry = this.sessions.get(sessionKey);
409
+ if (entry) {
410
+ this.logger?.debug?.(
411
+ `[HlsSessionManager] Stopping session: ${sessionKey}`
412
+ );
413
+ this.sessions.delete(sessionKey);
414
+ await entry.session.stop().catch(() => {
415
+ });
416
+ }
417
+ }
418
+ /**
419
+ * Stop all sessions and cleanup.
420
+ */
421
+ async stopAll() {
422
+ this.logger?.debug?.(`[HlsSessionManager] Stopping all sessions`);
423
+ if (this.cleanupTimer) {
424
+ clearInterval(this.cleanupTimer);
425
+ this.cleanupTimer = void 0;
426
+ }
427
+ const stopPromises = Array.from(this.sessions.values()).map(
428
+ (entry) => entry.session.stop().catch(() => {
429
+ })
430
+ );
431
+ this.sessions.clear();
432
+ await Promise.all(stopPromises);
433
+ }
434
+ /**
435
+ * Get the number of active sessions.
436
+ */
437
+ get sessionCount() {
438
+ return this.sessions.size;
439
+ }
440
+ /**
441
+ * Serve the HLS playlist with rewritten segment URLs.
442
+ */
443
+ servePlaylist(session, requestUrl, sessionKey) {
444
+ let playlist = session.getPlaylist();
445
+ try {
446
+ const url = new URL(requestUrl, "http://localhost");
447
+ const basePath = url.pathname;
448
+ const baseParams = new URLSearchParams(url.searchParams);
449
+ baseParams.delete("hls");
450
+ playlist = playlist.replace(/^(segment_\d+\.ts)$/gm, (match) => {
451
+ const params = new URLSearchParams(baseParams);
452
+ params.set("hls", match);
453
+ return `${basePath}?${params.toString()}`;
454
+ });
455
+ } catch {
456
+ }
457
+ this.logger?.debug?.(
458
+ `[HlsSessionManager] Serving playlist: ${sessionKey}, length=${playlist.length}`
459
+ );
460
+ return {
461
+ statusCode: 200,
462
+ headers: {
463
+ "Content-Type": "application/vnd.apple.mpegurl",
464
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
465
+ Pragma: "no-cache"
466
+ },
467
+ body: playlist
468
+ };
469
+ }
470
+ /**
471
+ * Serve an HLS segment.
472
+ */
473
+ serveSegment(session, segmentName, sessionKey) {
474
+ const segment = session.getSegment(segmentName);
475
+ if (!segment) {
476
+ this.logger?.warn?.(
477
+ `[HlsSessionManager] Segment not found: ${segmentName}`
478
+ );
479
+ return {
480
+ statusCode: 404,
481
+ headers: {
482
+ "Content-Type": "text/plain",
483
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
484
+ Pragma: "no-cache",
485
+ "Retry-After": "1"
486
+ },
487
+ body: "Segment not found"
488
+ };
489
+ }
490
+ this.logger?.debug?.(
491
+ `[HlsSessionManager] Serving segment: ${segmentName} for ${sessionKey}, size=${segment.length}`
492
+ );
493
+ return {
494
+ statusCode: 200,
495
+ headers: {
496
+ "Content-Type": "video/mp2t",
497
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
498
+ Pragma: "no-cache",
499
+ "Content-Length": String(segment.length)
500
+ },
501
+ body: segment
502
+ };
503
+ }
504
+ /**
505
+ * Cleanup expired sessions.
506
+ */
507
+ async cleanupExpiredSessions() {
508
+ const now = Date.now();
509
+ const expiredKeys = [];
510
+ for (const [key, entry] of this.sessions) {
511
+ if (now - entry.lastAccessAt > this.sessionTtlMs) {
512
+ expiredKeys.push(key);
513
+ }
514
+ }
515
+ if (!expiredKeys.length) return;
516
+ await Promise.allSettled(
517
+ expiredKeys.map(async (key) => {
518
+ const entry = this.sessions.get(key);
519
+ if (!entry) return;
520
+ this.logger?.log?.(
521
+ `[HlsSessionManager] TTL expired: stopping session ${key}`
522
+ );
523
+ this.sessions.delete(key);
524
+ try {
525
+ await entry.session.stop();
526
+ } catch {
527
+ }
528
+ })
529
+ );
530
+ }
531
+ async stopOtherSessionsWithPrefix(prefix, exceptKey) {
532
+ const toStop = [];
533
+ for (const key of this.sessions.keys()) {
534
+ if (key !== exceptKey && key.startsWith(prefix)) toStop.push(key);
535
+ }
536
+ if (!toStop.length) return;
537
+ this.logger?.log?.(
538
+ `[HlsSessionManager] Switch: stopping ${toStop.length} session(s) for prefix=${prefix}`
539
+ );
540
+ await Promise.all(
541
+ toStop.map(async (key) => {
542
+ const entry = this.sessions.get(key);
543
+ if (!entry) return;
544
+ this.sessions.delete(key);
545
+ await entry.session.stop().catch(() => {
546
+ });
547
+ })
548
+ );
549
+ }
550
+ };
551
+ function detectIosClient(userAgent) {
552
+ const ua = (userAgent ?? "").toLowerCase();
553
+ const isIos = /iphone|ipad|ipod/.test(ua);
554
+ const isIosInstalledApp = ua.includes("installedapp");
555
+ return {
556
+ isIos,
557
+ isIosInstalledApp,
558
+ // iOS InstalledApp needs HLS for video playback
559
+ needsHls: isIos && isIosInstalledApp
560
+ };
561
+ }
562
+ function buildHlsRedirectUrl(originalUrl) {
563
+ return `${originalUrl}${originalUrl.includes("?") ? "&" : "?"}hls=playlist.m3u8`;
564
+ }
207
565
 
208
566
  // src/reolink/AutodiscoveryClient.ts
209
567
  var AutodiscoveryClient = class {
@@ -495,6 +853,79 @@ var AutodiscoveryClient = class {
495
853
  }
496
854
  };
497
855
 
856
+ // src/reolink/baichuan/types.ts
857
+ function getVideoclipClientInfo(headers) {
858
+ const getHeader = (key) => {
859
+ const val = headers[key] ?? headers[key.toLowerCase()] ?? headers[key.toUpperCase()];
860
+ return Array.isArray(val) ? val[0] : val;
861
+ };
862
+ return {
863
+ userAgent: getHeader("user-agent") ?? getHeader("User-Agent"),
864
+ accept: getHeader("accept") ?? getHeader("Accept"),
865
+ range: getHeader("range") ?? getHeader("Range"),
866
+ secChUa: getHeader("sec-ch-ua") ?? getHeader("Sec-CH-UA"),
867
+ secChUaMobile: getHeader("sec-ch-ua-mobile") ?? getHeader("Sec-CH-UA-Mobile"),
868
+ secChUaPlatform: getHeader("sec-ch-ua-platform") ?? getHeader("Sec-CH-UA-Platform")
869
+ };
870
+ }
871
+ function decideVideoclipTranscodeMode(headers, forceMode) {
872
+ const clientInfo = getVideoclipClientInfo(headers);
873
+ if (forceMode) {
874
+ return {
875
+ mode: forceMode,
876
+ reason: `forced: ${forceMode}`,
877
+ clientInfo
878
+ };
879
+ }
880
+ const ua = (clientInfo.userAgent ?? "").toLowerCase();
881
+ const platform = (clientInfo.secChUaPlatform ?? "").toLowerCase().replace(/"/g, "");
882
+ const isIos = /iphone|ipad|ipod/.test(ua);
883
+ if (isIos) {
884
+ return {
885
+ mode: "transcode-h264",
886
+ reason: "iOS device detected - no native H.265 support in <video>",
887
+ clientInfo
888
+ };
889
+ }
890
+ const isFirefox = ua.includes("firefox");
891
+ if (isFirefox) {
892
+ return {
893
+ mode: "transcode-h264",
894
+ reason: "Firefox detected - no H.265 support",
895
+ clientInfo
896
+ };
897
+ }
898
+ const isAndroid = ua.includes("android") || platform === "android";
899
+ if (isAndroid) {
900
+ return {
901
+ mode: "transcode-h264",
902
+ reason: "Android device detected - variable H.265 support",
903
+ clientInfo
904
+ };
905
+ }
906
+ const isChromium = ua.includes("chrome") || ua.includes("edg");
907
+ const isMac = ua.includes("mac os") || platform === "macos";
908
+ if (isChromium && !isMac) {
909
+ return {
910
+ mode: "transcode-h264",
911
+ reason: "Chrome/Edge on non-Mac detected - limited H.265 support",
912
+ clientInfo
913
+ };
914
+ }
915
+ if (isMac) {
916
+ return {
917
+ mode: "passthrough",
918
+ reason: "macOS detected - native H.265 hardware decoding available",
919
+ clientInfo
920
+ };
921
+ }
922
+ return {
923
+ mode: "transcode-h264",
924
+ reason: "Unknown client - transcoding for compatibility",
925
+ clientInfo
926
+ };
927
+ }
928
+
498
929
  // src/reolink/baichuan/endpoints-server.ts
499
930
  import http from "http";
500
931
  import { spawn } from "child_process";
@@ -503,6 +934,13 @@ function parseIntParam(v, def) {
503
934
  const n = Number.parseInt(v, 10);
504
935
  return Number.isFinite(n) ? n : def;
505
936
  }
937
+ function parseBoolParam(v, def) {
938
+ if (v == null) return def;
939
+ const s = v.trim().toLowerCase();
940
+ if (s === "1" || s === "true" || s === "yes" || s === "y") return true;
941
+ if (s === "0" || s === "false" || s === "no" || s === "n") return false;
942
+ return def;
943
+ }
506
944
  function parseProfile(v) {
507
945
  const p = (v ?? "sub").trim();
508
946
  if (p === "main" || p === "sub" || p === "ext") return p;
@@ -533,6 +971,11 @@ function createBaichuanEndpointsServer(opts) {
533
971
  const api = new ReolinkBaichuanApi({
534
972
  ...opts.baichuan
535
973
  });
974
+ const hlsManager = new HlsSessionManager(api, {
975
+ logger: console,
976
+ sessionTtlMs: 6e4,
977
+ cleanupIntervalMs: 5e3
978
+ });
536
979
  const listenHost = opts.listenHost ?? "127.0.0.1";
537
980
  const rtspListenHost = opts.rtspListenHost ?? "127.0.0.1";
538
981
  const rtspServers = /* @__PURE__ */ new Map();
@@ -578,6 +1021,46 @@ function createBaichuanEndpointsServer(opts) {
578
1021
  res.end(JSON.stringify({ rtspUrl }));
579
1022
  return;
580
1023
  }
1024
+ if (u.pathname === "/hls") {
1025
+ const channel = parseIntParam(u.searchParams.get("channel"), 0);
1026
+ const fileName = (u.searchParams.get("fileName") ?? "").trim();
1027
+ const deviceId = (u.searchParams.get("deviceId") ?? "anon").trim();
1028
+ const isNvr = parseBoolParam(u.searchParams.get("isNvr"), false);
1029
+ const transcode = parseBoolParam(u.searchParams.get("transcode"), true);
1030
+ const hlsSegmentDuration = parseIntParam(
1031
+ u.searchParams.get("hlsSegmentDuration"),
1032
+ 2
1033
+ );
1034
+ const hlsPath = (u.searchParams.get("hls") ?? "playlist.m3u8").trim();
1035
+ if (!fileName) {
1036
+ res.statusCode = 400;
1037
+ res.end("Missing fileName");
1038
+ return;
1039
+ }
1040
+ const sessionKey = `hls:${deviceId}:ch${channel}:${fileName}`;
1041
+ const exclusiveKeyPrefix = `hls:${deviceId}:ch${channel}:`;
1042
+ const requestUrl = `http://${listenHost}:${opts.listenPort}${u.pathname}${u.search}`;
1043
+ const result = await hlsManager.handleRequest({
1044
+ sessionKey,
1045
+ hlsPath,
1046
+ requestUrl,
1047
+ exclusiveKeyPrefix,
1048
+ createSession: () => ({
1049
+ channel,
1050
+ fileName,
1051
+ isNvr,
1052
+ deviceId,
1053
+ transcodeH265ToH264: transcode,
1054
+ hlsSegmentDuration
1055
+ })
1056
+ });
1057
+ res.statusCode = result.statusCode;
1058
+ for (const [k, v] of Object.entries(result.headers)) {
1059
+ res.setHeader(k, v);
1060
+ }
1061
+ res.end(result.body);
1062
+ return;
1063
+ }
581
1064
  if (u.pathname === "/download") {
582
1065
  const channel = parseIntParam(u.searchParams.get("channel"), 0);
583
1066
  const uid = (u.searchParams.get("uid") ?? "").trim();
@@ -4308,11 +4791,1870 @@ var BaichuanHttpStreamServer = class extends EventEmitter2 {
4308
4791
  }
4309
4792
  };
4310
4793
 
4311
- // src/multifocal/compositeRtspServer.ts
4794
+ // src/baichuan/stream/BaichuanMjpegServer.ts
4795
+ import { EventEmitter as EventEmitter4 } from "events";
4796
+ import * as http5 from "http";
4797
+
4798
+ // src/baichuan/stream/MjpegTransformer.ts
4312
4799
  import { EventEmitter as EventEmitter3 } from "events";
4313
4800
  import { spawn as spawn6 } from "child_process";
4801
+ var JPEG_SOI = Buffer.from([255, 216]);
4802
+ var JPEG_EOI = Buffer.from([255, 217]);
4803
+ var MjpegTransformer = class extends EventEmitter3 {
4804
+ options;
4805
+ ffmpeg = null;
4806
+ started = false;
4807
+ closed = false;
4808
+ jpegBuffer = Buffer.alloc(0);
4809
+ frameCount = 0;
4810
+ lastTimestamp = 0;
4811
+ constructor(options) {
4812
+ super();
4813
+ this.options = {
4814
+ codec: options.codec,
4815
+ quality: options.quality ?? 5,
4816
+ width: options.width,
4817
+ height: options.height,
4818
+ maxFps: options.maxFps,
4819
+ logger: options.logger
4820
+ };
4821
+ }
4822
+ /**
4823
+ * Start the transformer (spawns FFmpeg process)
4824
+ */
4825
+ start() {
4826
+ if (this.started || this.closed) return;
4827
+ this.started = true;
4828
+ const { codec, quality, width, height, maxFps } = this.options;
4829
+ const args = [
4830
+ "-hide_banner",
4831
+ "-loglevel",
4832
+ "error",
4833
+ // Input: raw video from stdin
4834
+ "-f",
4835
+ codec === "h265" ? "hevc" : "h264",
4836
+ "-i",
4837
+ "pipe:0"
4838
+ ];
4839
+ const filters = [];
4840
+ if (width || height) {
4841
+ const w = width ?? -1;
4842
+ const h = height ?? -1;
4843
+ filters.push(`scale=${w}:${h}`);
4844
+ }
4845
+ if (maxFps) {
4846
+ filters.push(`fps=${maxFps}`);
4847
+ }
4848
+ if (filters.length > 0) {
4849
+ args.push("-vf", filters.join(","));
4850
+ }
4851
+ args.push(
4852
+ "-c:v",
4853
+ "mjpeg",
4854
+ "-q:v",
4855
+ String(quality),
4856
+ "-f",
4857
+ "mjpeg",
4858
+ "pipe:1"
4859
+ );
4860
+ this.log("debug", `Starting FFmpeg with args: ${args.join(" ")}`);
4861
+ this.ffmpeg = spawn6("ffmpeg", args, {
4862
+ stdio: ["pipe", "pipe", "pipe"]
4863
+ });
4864
+ this.ffmpeg.stdout.on("data", (data) => {
4865
+ this.handleJpegData(data);
4866
+ });
4867
+ this.ffmpeg.stderr.on("data", (data) => {
4868
+ const msg = data.toString().trim();
4869
+ if (msg) {
4870
+ this.log("debug", `FFmpeg: ${msg}`);
4871
+ }
4872
+ });
4873
+ this.ffmpeg.on("close", (code) => {
4874
+ this.log("debug", `FFmpeg closed with code ${code}`);
4875
+ this.ffmpeg = null;
4876
+ if (!this.closed) {
4877
+ this.emit("close", code);
4878
+ }
4879
+ });
4880
+ this.ffmpeg.on("error", (err) => {
4881
+ this.log("error", `FFmpeg error: ${err.message}`);
4882
+ this.emit("error", err);
4883
+ });
4884
+ }
4885
+ /**
4886
+ * Push an H.264/H.265 access unit (Annex-B format with start codes)
4887
+ */
4888
+ push(accessUnit, timestamp) {
4889
+ if (!this.started || this.closed || !this.ffmpeg) {
4890
+ return;
4891
+ }
4892
+ this.lastTimestamp = timestamp ?? Date.now() * 1e3;
4893
+ try {
4894
+ this.ffmpeg.stdin.write(accessUnit);
4895
+ } catch (err) {
4896
+ this.log("error", `Failed to write to FFmpeg: ${err}`);
4897
+ }
4898
+ }
4899
+ /**
4900
+ * Handle JPEG data from FFmpeg stdout
4901
+ * FFmpeg outputs complete JPEG images, each starting with SOI (0xFFD8)
4902
+ * and ending with EOI (0xFFD9)
4903
+ */
4904
+ handleJpegData(data) {
4905
+ this.jpegBuffer = Buffer.concat([this.jpegBuffer, data]);
4906
+ while (true) {
4907
+ const soiIndex = this.jpegBuffer.indexOf(JPEG_SOI);
4908
+ if (soiIndex < 0) {
4909
+ this.jpegBuffer = Buffer.alloc(0);
4910
+ break;
4911
+ }
4912
+ if (soiIndex > 0) {
4913
+ this.jpegBuffer = this.jpegBuffer.subarray(soiIndex);
4914
+ }
4915
+ const eoiIndex = this.jpegBuffer.indexOf(JPEG_EOI, 2);
4916
+ if (eoiIndex < 0) {
4917
+ break;
4918
+ }
4919
+ const frameEnd = eoiIndex + 2;
4920
+ const jpegFrame = this.jpegBuffer.subarray(0, frameEnd);
4921
+ this.jpegBuffer = this.jpegBuffer.subarray(frameEnd);
4922
+ this.frameCount++;
4923
+ const frame = {
4924
+ data: jpegFrame,
4925
+ timestamp: this.lastTimestamp
4926
+ };
4927
+ this.emit("frame", frame);
4928
+ }
4929
+ }
4930
+ /**
4931
+ * Stop the transformer
4932
+ */
4933
+ async stop() {
4934
+ if (this.closed) return;
4935
+ this.closed = true;
4936
+ if (this.ffmpeg) {
4937
+ try {
4938
+ this.ffmpeg.stdin.end();
4939
+ } catch {
4940
+ }
4941
+ await new Promise((resolve) => {
4942
+ const ff = this.ffmpeg;
4943
+ if (!ff) {
4944
+ resolve();
4945
+ return;
4946
+ }
4947
+ const timeout = setTimeout(() => {
4948
+ ff.kill("SIGKILL");
4949
+ resolve();
4950
+ }, 1e3);
4951
+ ff.once("close", () => {
4952
+ clearTimeout(timeout);
4953
+ resolve();
4954
+ });
4955
+ try {
4956
+ ff.kill("SIGTERM");
4957
+ } catch {
4958
+ clearTimeout(timeout);
4959
+ resolve();
4960
+ }
4961
+ });
4962
+ this.ffmpeg = null;
4963
+ }
4964
+ this.emit("close", 0);
4965
+ }
4966
+ /**
4967
+ * Get frame count
4968
+ */
4969
+ getFrameCount() {
4970
+ return this.frameCount;
4971
+ }
4972
+ /**
4973
+ * Check if running
4974
+ */
4975
+ isRunning() {
4976
+ return this.started && !this.closed && this.ffmpeg !== null;
4977
+ }
4978
+ log(level, message) {
4979
+ this.options.logger?.(level, `[MjpegTransformer] ${message}`);
4980
+ }
4981
+ };
4982
+ function createMjpegBoundary() {
4983
+ return `mjpegboundary${Date.now()}`;
4984
+ }
4985
+ function getMjpegContentType(boundary) {
4986
+ return `multipart/x-mixed-replace; boundary=${boundary}`;
4987
+ }
4988
+ function formatMjpegFrame(frame, boundary) {
4989
+ const header = Buffer.from(
4990
+ `--${boundary}\r
4991
+ Content-Type: image/jpeg\r
4992
+ Content-Length: ${frame.length}\r
4993
+ \r
4994
+ `
4995
+ );
4996
+ return Buffer.concat([header, frame, Buffer.from("\r\n")]);
4997
+ }
4998
+
4999
+ // src/baichuan/stream/BaichuanMjpegServer.ts
5000
+ var BaichuanMjpegServer = class extends EventEmitter4 {
5001
+ options;
5002
+ clients = /* @__PURE__ */ new Map();
5003
+ httpServer = null;
5004
+ transformer = null;
5005
+ nativeStream = null;
5006
+ streamPump = null;
5007
+ detectedCodec = null;
5008
+ started = false;
5009
+ clientIdCounter = 0;
5010
+ constructor(options) {
5011
+ super();
5012
+ this.options = options;
5013
+ }
5014
+ /**
5015
+ * Start the MJPEG server
5016
+ */
5017
+ async start() {
5018
+ if (this.started) return;
5019
+ this.started = true;
5020
+ const port = this.options.port ?? 8080;
5021
+ const host = this.options.host ?? "0.0.0.0";
5022
+ const path2 = this.options.path ?? "/mjpeg";
5023
+ this.httpServer = http5.createServer((req, res) => {
5024
+ this.handleRequest(req, res, path2);
5025
+ });
5026
+ return new Promise((resolve, reject) => {
5027
+ this.httpServer.on("error", (err) => {
5028
+ this.log("error", `HTTP server error: ${err.message}`);
5029
+ reject(err);
5030
+ });
5031
+ this.httpServer.listen(port, host, () => {
5032
+ this.log(
5033
+ "info",
5034
+ `MJPEG server started on http://${host}:${port}${path2}`
5035
+ );
5036
+ this.emit("started", { host, port, path: path2 });
5037
+ resolve();
5038
+ });
5039
+ });
5040
+ }
5041
+ /**
5042
+ * Stop the MJPEG server
5043
+ */
5044
+ async stop() {
5045
+ if (!this.started) return;
5046
+ this.started = false;
5047
+ for (const [id, client] of this.clients) {
5048
+ try {
5049
+ client.response.end();
5050
+ } catch {
5051
+ }
5052
+ this.clients.delete(id);
5053
+ }
5054
+ await this.stopStream();
5055
+ if (this.httpServer) {
5056
+ await new Promise((resolve) => {
5057
+ this.httpServer.close(() => resolve());
5058
+ });
5059
+ this.httpServer = null;
5060
+ }
5061
+ this.log("info", "MJPEG server stopped");
5062
+ this.emit("stopped");
5063
+ }
5064
+ /**
5065
+ * Handle HTTP request
5066
+ */
5067
+ handleRequest(req, res, expectedPath) {
5068
+ const url = new URL(req.url ?? "/", `http://${req.headers.host}`);
5069
+ if (url.pathname !== expectedPath) {
5070
+ res.statusCode = 404;
5071
+ res.end("Not Found");
5072
+ return;
5073
+ }
5074
+ if (req.method !== "GET") {
5075
+ res.statusCode = 405;
5076
+ res.end("Method Not Allowed");
5077
+ return;
5078
+ }
5079
+ this.handleMjpegClient(req, res);
5080
+ }
5081
+ /**
5082
+ * Handle new MJPEG client
5083
+ */
5084
+ handleMjpegClient(req, res) {
5085
+ const clientId = `client-${++this.clientIdCounter}`;
5086
+ const boundary = createMjpegBoundary();
5087
+ const client = {
5088
+ id: clientId,
5089
+ response: res,
5090
+ boundary,
5091
+ connectedAt: Date.now()
5092
+ };
5093
+ this.clients.set(clientId, client);
5094
+ this.log(
5095
+ "info",
5096
+ `MJPEG client connected: ${clientId} (total: ${this.clients.size})`
5097
+ );
5098
+ this.emit("client-connected", { id: clientId, total: this.clients.size });
5099
+ res.writeHead(200, {
5100
+ "Content-Type": getMjpegContentType(boundary),
5101
+ "Cache-Control": "no-cache, no-store, must-revalidate",
5102
+ Pragma: "no-cache",
5103
+ Expires: "0",
5104
+ Connection: "close"
5105
+ });
5106
+ const cleanup = () => {
5107
+ this.clients.delete(clientId);
5108
+ this.log(
5109
+ "info",
5110
+ `MJPEG client disconnected: ${clientId} (remaining: ${this.clients.size})`
5111
+ );
5112
+ this.emit("client-disconnected", {
5113
+ id: clientId,
5114
+ total: this.clients.size
5115
+ });
5116
+ if (this.clients.size === 0) {
5117
+ this.stopStream();
5118
+ }
5119
+ };
5120
+ req.on("close", cleanup);
5121
+ res.on("close", cleanup);
5122
+ res.on("error", cleanup);
5123
+ if (!this.transformer) {
5124
+ this.startStream();
5125
+ }
5126
+ }
5127
+ /**
5128
+ * Start the native video stream and MJPEG transformer
5129
+ */
5130
+ async startStream() {
5131
+ if (this.transformer) return;
5132
+ this.log("info", "Starting native video stream...");
5133
+ const { api, channel, profile, variant, quality, width, height, maxFps } = this.options;
5134
+ try {
5135
+ this.nativeStream = createNativeStream(
5136
+ api,
5137
+ channel,
5138
+ profile,
5139
+ variant ? { variant } : void 0
5140
+ );
5141
+ this.streamPump = this.pumpStream();
5142
+ } catch (err) {
5143
+ this.log("error", `Failed to start stream: ${err}`);
5144
+ this.emit("error", err);
5145
+ }
5146
+ }
5147
+ /**
5148
+ * Pump native stream and feed to transformer
5149
+ */
5150
+ async pumpStream() {
5151
+ if (!this.nativeStream) return;
5152
+ let frameBuffer = [];
5153
+ let waitingForKeyframe = true;
5154
+ try {
5155
+ for await (const frame of this.nativeStream) {
5156
+ if (!this.started || this.clients.size === 0) break;
5157
+ const { type, data, microseconds, videoType } = frame;
5158
+ if (type !== "Iframe" && type !== "Pframe") continue;
5159
+ if (!data || data.length === 0) continue;
5160
+ let annexB;
5161
+ if (videoType === "H265") {
5162
+ annexB = convertToAnnexB2(data);
5163
+ if (!this.detectedCodec) {
5164
+ this.detectedCodec = "h265";
5165
+ this.initTransformer();
5166
+ }
5167
+ } else {
5168
+ annexB = convertToAnnexB(data);
5169
+ if (!this.detectedCodec) {
5170
+ this.detectedCodec = "h264";
5171
+ this.initTransformer();
5172
+ }
5173
+ }
5174
+ if (waitingForKeyframe) {
5175
+ if (type === "Iframe") {
5176
+ waitingForKeyframe = false;
5177
+ } else {
5178
+ continue;
5179
+ }
5180
+ }
5181
+ if (this.transformer) {
5182
+ this.transformer.push(annexB, microseconds);
5183
+ }
5184
+ }
5185
+ } catch (err) {
5186
+ if (this.started) {
5187
+ this.log("error", `Stream error: ${err}`);
5188
+ this.emit("error", err);
5189
+ }
5190
+ }
5191
+ }
5192
+ /**
5193
+ * Initialize MJPEG transformer once codec is detected
5194
+ */
5195
+ initTransformer() {
5196
+ if (this.transformer || !this.detectedCodec) return;
5197
+ const { quality, width, height, maxFps } = this.options;
5198
+ this.transformer = new MjpegTransformer({
5199
+ codec: this.detectedCodec,
5200
+ quality,
5201
+ width,
5202
+ height,
5203
+ maxFps,
5204
+ logger: this.options.logger
5205
+ });
5206
+ this.transformer.on("frame", (frame) => {
5207
+ this.broadcastFrame(frame);
5208
+ });
5209
+ this.transformer.on("error", (err) => {
5210
+ this.log("error", `Transformer error: ${err}`);
5211
+ });
5212
+ this.transformer.on("close", () => {
5213
+ this.log("debug", "Transformer closed");
5214
+ });
5215
+ this.transformer.start();
5216
+ this.log(
5217
+ "info",
5218
+ `MJPEG transformer started (codec: ${this.detectedCodec})`
5219
+ );
5220
+ }
5221
+ /**
5222
+ * Broadcast JPEG frame to all connected clients
5223
+ */
5224
+ broadcastFrame(frame) {
5225
+ for (const client of this.clients.values()) {
5226
+ try {
5227
+ const mjpegData = formatMjpegFrame(frame.data, client.boundary);
5228
+ client.response.write(mjpegData);
5229
+ } catch {
5230
+ }
5231
+ }
5232
+ }
5233
+ /**
5234
+ * Stop the stream and transformer
5235
+ */
5236
+ async stopStream() {
5237
+ if (this.transformer) {
5238
+ await this.transformer.stop();
5239
+ this.transformer = null;
5240
+ }
5241
+ if (this.nativeStream) {
5242
+ try {
5243
+ await this.nativeStream.return(void 0);
5244
+ } catch {
5245
+ }
5246
+ this.nativeStream = null;
5247
+ }
5248
+ if (this.streamPump) {
5249
+ try {
5250
+ await this.streamPump;
5251
+ } catch {
5252
+ }
5253
+ this.streamPump = null;
5254
+ }
5255
+ this.detectedCodec = null;
5256
+ this.log("debug", "Stream stopped");
5257
+ }
5258
+ /**
5259
+ * Get current number of connected clients
5260
+ */
5261
+ getClientCount() {
5262
+ return this.clients.size;
5263
+ }
5264
+ /**
5265
+ * Get server status
5266
+ */
5267
+ getStatus() {
5268
+ return {
5269
+ running: this.started,
5270
+ clients: this.clients.size,
5271
+ codec: this.detectedCodec,
5272
+ frames: this.transformer?.getFrameCount() ?? 0
5273
+ };
5274
+ }
5275
+ log(level, message) {
5276
+ this.options.logger?.(level, `[BaichuanMjpegServer] ${message}`);
5277
+ }
5278
+ };
5279
+
5280
+ // src/baichuan/stream/BaichuanWebRTCServer.ts
5281
+ import { EventEmitter as EventEmitter5 } from "events";
5282
+ function parseAnnexBNalUnits(annexB) {
5283
+ const nalUnits = [];
5284
+ let offset = 0;
5285
+ while (offset < annexB.length) {
5286
+ let startCodeLen = 0;
5287
+ if (offset + 4 <= annexB.length && annexB[offset] === 0 && annexB[offset + 1] === 0 && annexB[offset + 2] === 0 && annexB[offset + 3] === 1) {
5288
+ startCodeLen = 4;
5289
+ } else if (offset + 3 <= annexB.length && annexB[offset] === 0 && annexB[offset + 1] === 0 && annexB[offset + 2] === 1) {
5290
+ startCodeLen = 3;
5291
+ } else {
5292
+ offset++;
5293
+ continue;
5294
+ }
5295
+ const naluStart = offset + startCodeLen;
5296
+ let naluEnd = annexB.length;
5297
+ for (let i = naluStart; i < annexB.length - 2; i++) {
5298
+ if (annexB[i] === 0 && annexB[i + 1] === 0 && (annexB[i + 2] === 1 || i + 3 < annexB.length && annexB[i + 2] === 0 && annexB[i + 3] === 1)) {
5299
+ naluEnd = i;
5300
+ break;
5301
+ }
5302
+ }
5303
+ if (naluEnd > naluStart) {
5304
+ nalUnits.push(annexB.subarray(naluStart, naluEnd));
5305
+ }
5306
+ offset = naluEnd;
5307
+ }
5308
+ return nalUnits;
5309
+ }
5310
+ function getH264NalType(nalUnit) {
5311
+ return nalUnit[0] & 31;
5312
+ }
5313
+ function getH265NalType2(nalUnit) {
5314
+ return nalUnit[0] >> 1 & 63;
5315
+ }
5316
+ var BaichuanWebRTCServer = class extends EventEmitter5 {
5317
+ options;
5318
+ sessions = /* @__PURE__ */ new Map();
5319
+ sessionIdCounter = 0;
5320
+ weriftModule = null;
5321
+ constructor(options) {
5322
+ super();
5323
+ this.options = options;
5324
+ }
5325
+ /**
5326
+ * Initialize werift module (lazy load to avoid requiring it if not used)
5327
+ */
5328
+ async loadWerift() {
5329
+ if (this.weriftModule) return this.weriftModule;
5330
+ try {
5331
+ this.weriftModule = await import("werift");
5332
+ return this.weriftModule;
5333
+ } catch (err) {
5334
+ throw new Error(
5335
+ `Failed to load werift module. Make sure it's installed: npm install werift
5336
+ Error: ${err}`
5337
+ );
5338
+ }
5339
+ }
5340
+ /**
5341
+ * Create a new WebRTC session
5342
+ * Returns a session ID and SDP offer to send to the browser
5343
+ */
5344
+ async createSession() {
5345
+ const werift = await this.loadWerift();
5346
+ const { RTCPeerConnection, MediaStreamTrack, RTCRtpCodecParameters } = werift;
5347
+ const sessionId = `webrtc-${++this.sessionIdCounter}-${Date.now()}`;
5348
+ this.log("info", `Creating WebRTC session ${sessionId}`);
5349
+ const iceServers = [];
5350
+ const stunServers = this.options.stunServers ?? [
5351
+ "stun:stun.l.google.com:19302"
5352
+ ];
5353
+ for (const urls of stunServers) {
5354
+ iceServers.push({ urls });
5355
+ }
5356
+ if (this.options.turnServers) {
5357
+ iceServers.push(...this.options.turnServers);
5358
+ }
5359
+ const peerConnection = new RTCPeerConnection({
5360
+ iceServers,
5361
+ codecs: {
5362
+ video: [
5363
+ new RTCRtpCodecParameters({
5364
+ mimeType: "video/H264",
5365
+ clockRate: 9e4,
5366
+ rtcpFeedback: [
5367
+ { type: "nack" },
5368
+ { type: "nack", parameter: "pli" },
5369
+ { type: "goog-remb" }
5370
+ ],
5371
+ parameters: "packetization-mode=1;profile-level-id=42e01f;level-asymmetry-allowed=1"
5372
+ })
5373
+ ],
5374
+ audio: [
5375
+ new RTCRtpCodecParameters({
5376
+ mimeType: "audio/opus",
5377
+ clockRate: 48e3,
5378
+ channels: 2
5379
+ })
5380
+ ]
5381
+ }
5382
+ });
5383
+ const session = {
5384
+ id: sessionId,
5385
+ peerConnection,
5386
+ videoTrack: null,
5387
+ audioTrack: null,
5388
+ videoDataChannel: null,
5389
+ nativeStream: null,
5390
+ intercom: null,
5391
+ dataChannel: null,
5392
+ videoCodec: null,
5393
+ createdAt: /* @__PURE__ */ new Date(),
5394
+ state: "connecting",
5395
+ stats: {
5396
+ videoFrames: 0,
5397
+ audioFrames: 0,
5398
+ bytesSent: 0,
5399
+ intercomBytesSent: 0
5400
+ }
5401
+ };
5402
+ this.sessions.set(sessionId, session);
5403
+ const videoSsrc = Math.random() * 4294967295 >>> 0;
5404
+ const videoTrack = new MediaStreamTrack({ kind: "video", ssrc: videoSsrc });
5405
+ const videoSender = peerConnection.addTrack(videoTrack);
5406
+ session.videoTrack = videoTrack;
5407
+ this.log(
5408
+ "info",
5409
+ `Video track created: ssrc=${videoTrack.ssrc}, sender params=${JSON.stringify(videoSender?.getParameters?.() ?? {})}`
5410
+ );
5411
+ const audioSsrc = Math.random() * 4294967295 >>> 0;
5412
+ const audioTrack = new MediaStreamTrack({ kind: "audio", ssrc: audioSsrc });
5413
+ peerConnection.addTrack(audioTrack);
5414
+ session.audioTrack = audioTrack;
5415
+ const videoDataChannel = peerConnection.createDataChannel("video", {
5416
+ ordered: true,
5417
+ maxRetransmits: 0
5418
+ // Unreliable for real-time video
5419
+ });
5420
+ session.videoDataChannel = videoDataChannel;
5421
+ videoDataChannel.onopen = () => {
5422
+ this.log("info", `Video data channel opened for session ${sessionId}`);
5423
+ };
5424
+ if (this.options.enableIntercom) {
5425
+ const dataChannel = peerConnection.createDataChannel("intercom", {
5426
+ ordered: true
5427
+ });
5428
+ session.dataChannel = dataChannel;
5429
+ dataChannel.onopen = () => {
5430
+ this.log(
5431
+ "info",
5432
+ `Intercom data channel opened for session ${sessionId}`
5433
+ );
5434
+ this.emit("intercom-started", { sessionId });
5435
+ };
5436
+ dataChannel.onmessage = async (event) => {
5437
+ if (session.intercom && event.data instanceof ArrayBuffer) {
5438
+ try {
5439
+ const audioData = Buffer.from(event.data);
5440
+ await session.intercom.sendAudio(audioData);
5441
+ session.stats.intercomBytesSent += audioData.length;
5442
+ } catch (err) {
5443
+ this.log("error", `Failed to send intercom audio: ${err}`);
5444
+ }
5445
+ }
5446
+ };
5447
+ dataChannel.onclose = () => {
5448
+ this.log(
5449
+ "info",
5450
+ `Intercom data channel closed for session ${sessionId}`
5451
+ );
5452
+ this.emit("intercom-stopped", { sessionId });
5453
+ };
5454
+ }
5455
+ peerConnection.iceConnectionStateChange.subscribe((state) => {
5456
+ this.log("info", `ICE connection state for ${sessionId}: ${state}`);
5457
+ if (state === "connected") {
5458
+ session.state = "connected";
5459
+ this.emit("session-connected", { sessionId });
5460
+ } else if (state === "failed") {
5461
+ session.state = state;
5462
+ this.closeSession(sessionId).catch((err) => {
5463
+ this.log("error", `Error closing session on ICE ${state}: ${err}`);
5464
+ });
5465
+ }
5466
+ });
5467
+ peerConnection.connectionStateChange.subscribe((state) => {
5468
+ this.log("debug", `Connection state for ${sessionId}: ${state}`);
5469
+ if (state === "closed" || state === "failed") {
5470
+ this.closeSession(sessionId).catch((err) => {
5471
+ this.log(
5472
+ "error",
5473
+ `Error closing session on connection ${state}: ${err}`
5474
+ );
5475
+ });
5476
+ }
5477
+ });
5478
+ const offer = await peerConnection.createOffer();
5479
+ await peerConnection.setLocalDescription(offer);
5480
+ await this.waitForIceGathering(peerConnection, 3e3);
5481
+ const localDescription = peerConnection.localDescription;
5482
+ if (!localDescription) {
5483
+ throw new Error("Failed to create local description");
5484
+ }
5485
+ this.emit("session-created", { sessionId });
5486
+ return {
5487
+ sessionId,
5488
+ offer: {
5489
+ sdp: localDescription.sdp,
5490
+ type: "offer"
5491
+ }
5492
+ };
5493
+ }
5494
+ /**
5495
+ * Handle WebRTC answer from browser and start streaming
5496
+ */
5497
+ async handleAnswer(sessionId, answer) {
5498
+ const session = this.sessions.get(sessionId);
5499
+ if (!session) {
5500
+ throw new Error(`Session ${sessionId} not found`);
5501
+ }
5502
+ const werift = await this.loadWerift();
5503
+ const { RTCSessionDescription } = werift;
5504
+ this.log("info", `Handling WebRTC answer for session ${sessionId}`);
5505
+ await session.peerConnection.setRemoteDescription(
5506
+ new RTCSessionDescription(answer.sdp, answer.type)
5507
+ );
5508
+ await this.startNativeStream(session);
5509
+ if (this.options.enableIntercom && session.dataChannel) {
5510
+ await this.startIntercom(session);
5511
+ }
5512
+ }
5513
+ /**
5514
+ * Add ICE candidate from browser
5515
+ */
5516
+ async addIceCandidate(sessionId, candidate) {
5517
+ const session = this.sessions.get(sessionId);
5518
+ if (!session) {
5519
+ throw new Error(`Session ${sessionId} not found`);
5520
+ }
5521
+ const werift = await this.loadWerift();
5522
+ const { RTCIceCandidate } = werift;
5523
+ await session.peerConnection.addIceCandidate(
5524
+ new RTCIceCandidate(candidate.candidate, candidate.sdpMid ?? "0")
5525
+ );
5526
+ }
5527
+ /**
5528
+ * Close a WebRTC session
5529
+ */
5530
+ async closeSession(sessionId) {
5531
+ const session = this.sessions.get(sessionId);
5532
+ if (!session) return;
5533
+ this.log("info", `Closing WebRTC session ${sessionId}`);
5534
+ session.state = "disconnected";
5535
+ if (session.intercom) {
5536
+ try {
5537
+ await session.intercom.stop();
5538
+ } catch (err) {
5539
+ this.log("debug", `Error stopping intercom: ${err}`);
5540
+ }
5541
+ session.intercom = null;
5542
+ }
5543
+ if (session.dataChannel) {
5544
+ try {
5545
+ session.dataChannel.close();
5546
+ } catch (err) {
5547
+ this.log("debug", `Error closing data channel: ${err}`);
5548
+ }
5549
+ session.dataChannel = null;
5550
+ }
5551
+ if (session.cleanup) {
5552
+ session.cleanup();
5553
+ }
5554
+ try {
5555
+ await session.peerConnection.close();
5556
+ } catch (err) {
5557
+ this.log("debug", `Error closing peer connection: ${err}`);
5558
+ }
5559
+ this.sessions.delete(sessionId);
5560
+ this.emit("session-closed", { sessionId });
5561
+ this.log(
5562
+ "info",
5563
+ `WebRTC session ${sessionId} closed (active sessions: ${this.sessions.size})`
5564
+ );
5565
+ }
5566
+ /**
5567
+ * Get information about all active sessions
5568
+ */
5569
+ getSessions() {
5570
+ return Array.from(this.sessions.values()).map((s) => ({
5571
+ id: s.id,
5572
+ state: s.state,
5573
+ createdAt: s.createdAt,
5574
+ stats: { ...s.stats }
5575
+ }));
5576
+ }
5577
+ /**
5578
+ * Get information about a specific session
5579
+ */
5580
+ getSession(sessionId) {
5581
+ const session = this.sessions.get(sessionId);
5582
+ if (!session) return null;
5583
+ return {
5584
+ id: session.id,
5585
+ state: session.state,
5586
+ createdAt: session.createdAt,
5587
+ stats: { ...session.stats }
5588
+ };
5589
+ }
5590
+ /**
5591
+ * Close all sessions and stop the server
5592
+ */
5593
+ async stop() {
5594
+ this.log("info", "Stopping WebRTC server");
5595
+ const sessionIds = Array.from(this.sessions.keys());
5596
+ await Promise.all(sessionIds.map((id) => this.closeSession(id)));
5597
+ this.log("info", "WebRTC server stopped");
5598
+ }
5599
+ /**
5600
+ * Get the number of active sessions
5601
+ */
5602
+ get sessionCount() {
5603
+ return this.sessions.size;
5604
+ }
5605
+ // ============================================================================
5606
+ // Private Methods
5607
+ // ============================================================================
5608
+ /**
5609
+ * Wait for ICE gathering to complete
5610
+ */
5611
+ async waitForIceGathering(pc, timeoutMs) {
5612
+ if (pc.iceGatheringState === "complete") return;
5613
+ return new Promise((resolve) => {
5614
+ const timeout = setTimeout(() => {
5615
+ resolve();
5616
+ }, timeoutMs);
5617
+ pc.iceGatheringStateChange.subscribe((state) => {
5618
+ if (state === "complete") {
5619
+ clearTimeout(timeout);
5620
+ resolve();
5621
+ }
5622
+ });
5623
+ });
5624
+ }
5625
+ /**
5626
+ * Start native Baichuan stream and pump frames to WebRTC
5627
+ */
5628
+ async startNativeStream(session) {
5629
+ this.log(
5630
+ "info",
5631
+ `Starting native stream for session ${session.id} (channel=${this.options.channel}, profile=${this.options.profile})`
5632
+ );
5633
+ session.nativeStream = createNativeStream(
5634
+ this.options.api,
5635
+ this.options.channel,
5636
+ this.options.profile,
5637
+ this.options.variant !== void 0 ? { variant: this.options.variant } : void 0
5638
+ );
5639
+ this.pumpFramesToWebRTC(session).catch((err) => {
5640
+ this.log("error", `Frame pump error for session ${session.id}: ${err}`);
5641
+ this.closeSession(session.id).catch(() => {
5642
+ });
5643
+ });
5644
+ }
5645
+ /**
5646
+ * Pump frames from native stream to WebRTC tracks
5647
+ * H.264 → RTP media track (standard WebRTC)
5648
+ * H.265 → DataChannel with raw Annex-B frames (decoded by WebCodecs in browser)
5649
+ */
5650
+ async pumpFramesToWebRTC(session) {
5651
+ if (!session.nativeStream) {
5652
+ this.log("warn", `No native stream for session ${session.id}`);
5653
+ return;
5654
+ }
5655
+ this.log("info", `Starting frame pump for session ${session.id}`);
5656
+ const werift = await this.loadWerift();
5657
+ const { RtpPacket, RtpHeader } = werift;
5658
+ let sequenceNumber = Math.floor(Math.random() * 65535);
5659
+ let timestamp = Math.floor(Math.random() * 4294967295);
5660
+ const videoClockRate = 9e4;
5661
+ let lastTimeMicros = 0;
5662
+ let lastLogTime = Date.now();
5663
+ let packetsSentSinceLastLog = 0;
5664
+ let frameNumber = 0;
5665
+ try {
5666
+ this.log("info", `Entering frame loop for session ${session.id}`);
5667
+ for await (const frame of session.nativeStream) {
5668
+ if (session.state === "disconnected" || session.state === "failed") {
5669
+ this.log(
5670
+ "debug",
5671
+ `Session ${session.id} state is ${session.state}, breaking frame loop`
5672
+ );
5673
+ break;
5674
+ }
5675
+ if (frame.audio) {
5676
+ session.stats.audioFrames++;
5677
+ } else {
5678
+ if (frame.data) {
5679
+ if (!session.videoCodec && frame.videoType) {
5680
+ const detected = detectVideoCodecFromNal(frame.data);
5681
+ session.videoCodec = detected ?? frame.videoType;
5682
+ this.log("info", `Detected video codec: ${session.videoCodec}`);
5683
+ if (session.videoDataChannel && session.videoDataChannel.readyState === "open") {
5684
+ const codecInfo = JSON.stringify({
5685
+ type: "codec",
5686
+ codec: session.videoCodec,
5687
+ width: frame.width || 0,
5688
+ height: frame.height || 0
5689
+ });
5690
+ session.videoDataChannel.send(codecInfo);
5691
+ }
5692
+ }
5693
+ if (frame.microseconds && lastTimeMicros > 0) {
5694
+ const deltaMicros = frame.microseconds - lastTimeMicros;
5695
+ const deltaTicks = Math.floor(
5696
+ deltaMicros / 1e6 * videoClockRate
5697
+ );
5698
+ timestamp = timestamp + deltaTicks >>> 0;
5699
+ } else {
5700
+ timestamp = timestamp + 3e3 >>> 0;
5701
+ }
5702
+ lastTimeMicros = frame.microseconds || 0;
5703
+ if (session.videoCodec === "H264") {
5704
+ const connState = session.peerConnection.connectionState;
5705
+ const iceState = session.peerConnection.iceConnectionState;
5706
+ const isConnected = connState === "connected" || iceState === "connected" || iceState === "completed";
5707
+ if (!isConnected) {
5708
+ if (frameNumber < 10) {
5709
+ this.log(
5710
+ "debug",
5711
+ `Waiting for connection, dropping H.264 frame ${frameNumber}`
5712
+ );
5713
+ }
5714
+ frameNumber++;
5715
+ continue;
5716
+ }
5717
+ const packetsSent = await this.sendH264Frame(
5718
+ session,
5719
+ werift,
5720
+ frame.data,
5721
+ sequenceNumber,
5722
+ timestamp
5723
+ );
5724
+ sequenceNumber = sequenceNumber + packetsSent & 65535;
5725
+ packetsSentSinceLastLog += packetsSent;
5726
+ frameNumber++;
5727
+ session.stats.videoFrames++;
5728
+ session.stats.bytesSent += frame.data.length;
5729
+ } else if (session.videoCodec === "H265") {
5730
+ const sent = await this.sendVideoFrameViaDataChannel(
5731
+ session,
5732
+ frame,
5733
+ frameNumber,
5734
+ "H265"
5735
+ );
5736
+ if (sent) {
5737
+ packetsSentSinceLastLog++;
5738
+ frameNumber++;
5739
+ session.stats.videoFrames++;
5740
+ session.stats.bytesSent += frame.data.length;
5741
+ }
5742
+ }
5743
+ const now = Date.now();
5744
+ if (now - lastLogTime >= 5e3) {
5745
+ this.log(
5746
+ "debug",
5747
+ `WebRTC session ${session.id} [${session.videoCodec}]: sent ${session.stats.videoFrames} frames, ${packetsSentSinceLastLog} packets, ${Math.round(session.stats.bytesSent / 1024)} KB`
5748
+ );
5749
+ lastLogTime = now;
5750
+ packetsSentSinceLastLog = 0;
5751
+ }
5752
+ }
5753
+ }
5754
+ }
5755
+ } catch (err) {
5756
+ this.log(
5757
+ "error",
5758
+ `Error pumping frames for session ${session.id}: ${err}`
5759
+ );
5760
+ }
5761
+ this.log("info", `Native stream ended for session ${session.id}`);
5762
+ }
5763
+ /**
5764
+ * Send H.264 frame via RTP media track
5765
+ * Returns the number of RTP packets sent
5766
+ */
5767
+ async sendH264Frame(session, werift, frameData, sequenceNumber, timestamp) {
5768
+ const annexB = convertToAnnexB(frameData);
5769
+ const nalUnits = splitAnnexBToNalPayloads(annexB);
5770
+ let hasSps = false;
5771
+ let hasPps = false;
5772
+ let hasIdr = false;
5773
+ const nalTypes = [];
5774
+ for (const nal of nalUnits) {
5775
+ const t = (nal[0] ?? 0) & 31;
5776
+ nalTypes.push(t);
5777
+ if (t === 7) {
5778
+ hasSps = true;
5779
+ session.lastH264Sps = nal;
5780
+ }
5781
+ if (t === 8) {
5782
+ hasPps = true;
5783
+ session.lastH264Pps = nal;
5784
+ }
5785
+ if (t === 5) hasIdr = true;
5786
+ }
5787
+ if (session.stats.videoFrames < 10) {
5788
+ this.log(
5789
+ "debug",
5790
+ `H.264 frame NAL types: [${nalTypes.join(",")}] (5=IDR, 7=SPS, 8=PPS, 1=P-slice)`
5791
+ );
5792
+ }
5793
+ const isKeyframe = hasIdr;
5794
+ let nalList = nalUnits;
5795
+ if (hasIdr && (!hasSps || !hasPps)) {
5796
+ const prepend = [];
5797
+ if (!hasSps && session.lastH264Sps) {
5798
+ prepend.push(session.lastH264Sps);
5799
+ this.log("debug", `Prepending cached SPS to IDR frame`);
5800
+ }
5801
+ if (!hasPps && session.lastH264Pps) {
5802
+ prepend.push(session.lastH264Pps);
5803
+ this.log("debug", `Prepending cached PPS to IDR frame`);
5804
+ }
5805
+ if (prepend.length > 0) {
5806
+ nalList = [...prepend, ...nalUnits];
5807
+ } else if (!session.lastH264Sps || !session.lastH264Pps) {
5808
+ this.log(
5809
+ "warn",
5810
+ `IDR frame without SPS/PPS and no cached parameters - frame may not decode`
5811
+ );
5812
+ }
5813
+ }
5814
+ if (!session.hasReceivedKeyframe) {
5815
+ if (hasIdr && session.lastH264Sps && session.lastH264Pps) {
5816
+ session.hasReceivedKeyframe = true;
5817
+ this.log(
5818
+ "info",
5819
+ `First H.264 keyframe received with SPS/PPS - starting video stream`
5820
+ );
5821
+ } else if (hasIdr) {
5822
+ this.log(
5823
+ "debug",
5824
+ `IDR received but waiting for SPS/PPS before starting stream`
5825
+ );
5826
+ return 0;
5827
+ } else {
5828
+ if (session.stats.videoFrames < 5) {
5829
+ this.log(
5830
+ "debug",
5831
+ `Dropping P-frame ${session.stats.videoFrames} while waiting for keyframe`
5832
+ );
5833
+ }
5834
+ return 0;
5835
+ }
5836
+ }
5837
+ let totalPacketsSent = 0;
5838
+ let currentSeqNum = sequenceNumber;
5839
+ const ssrc = session.videoTrack.ssrc || 0;
5840
+ for (let i = 0; i < nalList.length; i++) {
5841
+ const nalUnit = nalList[i];
5842
+ if (nalUnit.length === 0) continue;
5843
+ const isLastNalu = i === nalList.length - 1;
5844
+ const nalType = getH264NalType(nalUnit);
5845
+ if (nalType === 9) continue;
5846
+ const rtpPackets = this.createH264RtpPackets(
5847
+ werift,
5848
+ nalUnit,
5849
+ currentSeqNum,
5850
+ timestamp,
5851
+ isLastNalu,
5852
+ ssrc
5853
+ );
5854
+ if (session.stats.videoFrames < 3) {
5855
+ this.log(
5856
+ "info",
5857
+ `NAL ${i}: type=${nalType}, size=${nalUnit.length}, rtpPackets=${rtpPackets.length}`
5858
+ );
5859
+ }
5860
+ for (const rtpPacket of rtpPackets) {
5861
+ try {
5862
+ session.videoTrack.writeRtp(rtpPacket);
5863
+ currentSeqNum = currentSeqNum + 1 & 65535;
5864
+ totalPacketsSent++;
5865
+ } catch (err) {
5866
+ this.log(
5867
+ "error",
5868
+ `Error writing RTP packet for session ${session.id}: ${err}`
5869
+ );
5870
+ }
5871
+ }
5872
+ }
5873
+ if (session.stats.videoFrames < 3) {
5874
+ this.log(
5875
+ "info",
5876
+ `H.264 frame sent: nalCount=${nalList.length} packets=${totalPacketsSent} seq=${sequenceNumber}->${currentSeqNum} ts=${timestamp} keyframe=${isKeyframe}`
5877
+ );
5878
+ }
5879
+ return totalPacketsSent;
5880
+ }
5881
+ /**
5882
+ * Send video frame via DataChannel (works for both H.264 and H.265)
5883
+ * Format: 12-byte header + Annex-B data
5884
+ * Header: [frameNum (4)] [timestamp (4)] [flags (1)] [keyframe (1)] [reserved (2)]
5885
+ * Flags: 0x01 = H.265, 0x02 = H.264
5886
+ */
5887
+ async sendVideoFrameViaDataChannel(session, frame, frameNumber, codec) {
5888
+ if (!session.videoDataChannel) {
5889
+ if (frameNumber === 0) {
5890
+ this.log("warn", `No video data channel for session ${session.id}`);
5891
+ }
5892
+ return false;
5893
+ }
5894
+ if (session.videoDataChannel.readyState !== "open") {
5895
+ if (frameNumber === 0) {
5896
+ this.log(
5897
+ "warn",
5898
+ `Video data channel not open for session ${session.id}: ${session.videoDataChannel.readyState}`
5899
+ );
5900
+ }
5901
+ return false;
5902
+ }
5903
+ const nalUnits = parseAnnexBNalUnits(frame.data);
5904
+ let isKeyframe = frame.isKeyframe === true;
5905
+ let hasIdr = false;
5906
+ let hasSps = false;
5907
+ let hasPps = false;
5908
+ let hasVps = false;
5909
+ const nalTypes = [];
5910
+ for (const nalUnit of nalUnits) {
5911
+ if (nalUnit.length === 0) continue;
5912
+ if (codec === "H265") {
5913
+ const nalType = getH265NalType2(nalUnit);
5914
+ nalTypes.push(nalType);
5915
+ if (nalType === 32) {
5916
+ hasVps = true;
5917
+ session.lastH265Vps = nalUnit;
5918
+ }
5919
+ if (nalType === 33) {
5920
+ hasSps = true;
5921
+ session.lastH265Sps = nalUnit;
5922
+ }
5923
+ if (nalType === 34) {
5924
+ hasPps = true;
5925
+ session.lastH265Pps = nalUnit;
5926
+ }
5927
+ if (nalType === 19 || nalType === 20) {
5928
+ hasIdr = true;
5929
+ isKeyframe = true;
5930
+ }
5931
+ } else {
5932
+ const nalType = getH264NalType(nalUnit);
5933
+ nalTypes.push(nalType);
5934
+ if (nalType === 7) {
5935
+ hasSps = true;
5936
+ session.lastH264Sps = nalUnit;
5937
+ }
5938
+ if (nalType === 8) {
5939
+ hasPps = true;
5940
+ session.lastH264Pps = nalUnit;
5941
+ }
5942
+ if (nalType === 5) {
5943
+ hasIdr = true;
5944
+ isKeyframe = true;
5945
+ }
5946
+ }
5947
+ }
5948
+ if (frameNumber < 5) {
5949
+ this.log(
5950
+ "debug",
5951
+ `${codec} frame ${frameNumber} NAL types: [${nalTypes.join(",")}] hasIdr=${hasIdr} hasSps=${hasSps} hasPps=${hasPps}`
5952
+ );
5953
+ }
5954
+ if (!session.hasReceivedKeyframe) {
5955
+ if (codec === "H264") {
5956
+ if (hasIdr && session.lastH264Sps && session.lastH264Pps) {
5957
+ session.hasReceivedKeyframe = true;
5958
+ this.log(
5959
+ "info",
5960
+ `First H.264 keyframe received with SPS/PPS - starting video stream`
5961
+ );
5962
+ } else if (hasSps || hasPps) {
5963
+ this.log("debug", `Received H.264 parameter sets, waiting for IDR`);
5964
+ return false;
5965
+ } else if (hasIdr) {
5966
+ this.log("debug", `IDR received but waiting for SPS/PPS`);
5967
+ return false;
5968
+ } else {
5969
+ if (frameNumber < 10) {
5970
+ this.log(
5971
+ "debug",
5972
+ `Dropping H.264 P-frame ${frameNumber} while waiting for keyframe`
5973
+ );
5974
+ }
5975
+ return false;
5976
+ }
5977
+ } else {
5978
+ if (hasIdr && session.lastH265Vps && session.lastH265Sps && session.lastH265Pps) {
5979
+ session.hasReceivedKeyframe = true;
5980
+ this.log(
5981
+ "info",
5982
+ `First H.265 keyframe received with VPS/SPS/PPS - starting video stream`
5983
+ );
5984
+ } else if (hasVps || hasSps || hasPps) {
5985
+ this.log("debug", `Received H.265 parameter sets, waiting for IDR`);
5986
+ return false;
5987
+ } else if (hasIdr) {
5988
+ this.log("debug", `H.265 IDR received but waiting for VPS/SPS/PPS`);
5989
+ return false;
5990
+ } else {
5991
+ if (frameNumber < 10) {
5992
+ this.log(
5993
+ "debug",
5994
+ `Dropping H.265 P-frame ${frameNumber} while waiting for keyframe`
5995
+ );
5996
+ }
5997
+ return false;
5998
+ }
5999
+ }
6000
+ }
6001
+ let frameData = frame.data;
6002
+ if (hasIdr) {
6003
+ if (codec === "H264" && (!hasSps || !hasPps)) {
6004
+ const parts = [];
6005
+ if (!hasSps && session.lastH264Sps) {
6006
+ parts.push(Buffer.from([0, 0, 0, 1]));
6007
+ parts.push(session.lastH264Sps);
6008
+ }
6009
+ if (!hasPps && session.lastH264Pps) {
6010
+ parts.push(Buffer.from([0, 0, 0, 1]));
6011
+ parts.push(session.lastH264Pps);
6012
+ }
6013
+ if (parts.length > 0) {
6014
+ frameData = Buffer.concat([...parts, frame.data]);
6015
+ this.log("debug", `Prepended cached SPS/PPS to H.264 IDR frame`);
6016
+ }
6017
+ } else if (codec === "H265" && (!hasVps || !hasSps || !hasPps)) {
6018
+ const parts = [];
6019
+ if (!hasVps && session.lastH265Vps) {
6020
+ parts.push(Buffer.from([0, 0, 0, 1]));
6021
+ parts.push(session.lastH265Vps);
6022
+ }
6023
+ if (!hasSps && session.lastH265Sps) {
6024
+ parts.push(Buffer.from([0, 0, 0, 1]));
6025
+ parts.push(session.lastH265Sps);
6026
+ }
6027
+ if (!hasPps && session.lastH265Pps) {
6028
+ parts.push(Buffer.from([0, 0, 0, 1]));
6029
+ parts.push(session.lastH265Pps);
6030
+ }
6031
+ if (parts.length > 0) {
6032
+ frameData = Buffer.concat([...parts, frame.data]);
6033
+ this.log("debug", `Prepended cached VPS/SPS/PPS to H.265 IDR frame`);
6034
+ }
6035
+ }
6036
+ }
6037
+ const header = Buffer.alloc(12);
6038
+ header.writeUInt32BE(frameNumber, 0);
6039
+ header.writeUInt32BE(frame.microseconds ? frame.microseconds / 1e3 : 0, 4);
6040
+ header.writeUInt8(codec === "H265" ? 1 : 2, 8);
6041
+ header.writeUInt8(isKeyframe ? 1 : 0, 9);
6042
+ header.writeUInt16BE(0, 10);
6043
+ const packet = Buffer.concat([header, frameData]);
6044
+ if (frameNumber < 3) {
6045
+ this.log(
6046
+ "info",
6047
+ `Sending ${codec} frame ${frameNumber}: ${packet.length} bytes, keyframe=${isKeyframe}`
6048
+ );
6049
+ }
6050
+ const MAX_CHUNK_SIZE = 16e3;
6051
+ try {
6052
+ if (packet.length <= MAX_CHUNK_SIZE) {
6053
+ session.videoDataChannel.send(packet);
6054
+ } else {
6055
+ const totalChunks = Math.ceil(packet.length / MAX_CHUNK_SIZE);
6056
+ for (let i = 0; i < totalChunks; i++) {
6057
+ const start = i * MAX_CHUNK_SIZE;
6058
+ const end = Math.min(start + MAX_CHUNK_SIZE, packet.length);
6059
+ const chunk = packet.subarray(start, end);
6060
+ const chunkHeader = Buffer.alloc(2);
6061
+ chunkHeader.writeUInt8(i, 0);
6062
+ chunkHeader.writeUInt8(totalChunks, 1);
6063
+ session.videoDataChannel.send(Buffer.concat([chunkHeader, chunk]));
6064
+ }
6065
+ }
6066
+ return true;
6067
+ } catch (err) {
6068
+ this.log("error", `Error sending ${codec} frame ${frameNumber}: ${err}`);
6069
+ return false;
6070
+ }
6071
+ }
6072
+ /**
6073
+ * Send H.265 frame via DataChannel
6074
+ * Format: 12-byte header + Annex-B data
6075
+ * Header: [frameNum (4)] [timestamp (4)] [flags (1)] [keyframe (1)] [reserved (2)]
6076
+ */
6077
+ async sendH265Frame(session, frame, frameNumber) {
6078
+ if (!session.videoDataChannel) {
6079
+ if (frameNumber === 0) {
6080
+ this.log("warn", `No video data channel for session ${session.id}`);
6081
+ }
6082
+ return;
6083
+ }
6084
+ if (session.videoDataChannel.readyState !== "open") {
6085
+ if (frameNumber === 0) {
6086
+ this.log(
6087
+ "warn",
6088
+ `Video data channel not open for session ${session.id}: ${session.videoDataChannel.readyState}`
6089
+ );
6090
+ }
6091
+ return;
6092
+ }
6093
+ let isKeyframe = frame.isKeyframe === true;
6094
+ if (!isKeyframe && frame.isKeyframe === void 0) {
6095
+ const nalUnits = parseAnnexBNalUnits(frame.data);
6096
+ for (const nalUnit of nalUnits) {
6097
+ if (nalUnit.length === 0) continue;
6098
+ const nalType = getH265NalType2(nalUnit);
6099
+ if (nalType === 32 || nalType === 33 || nalType === 34 || nalType === 19 || nalType === 20) {
6100
+ isKeyframe = true;
6101
+ break;
6102
+ }
6103
+ }
6104
+ }
6105
+ const header = Buffer.alloc(12);
6106
+ header.writeUInt32BE(frameNumber, 0);
6107
+ header.writeUInt32BE(frame.microseconds ? frame.microseconds / 1e3 : 0, 4);
6108
+ header.writeUInt8(1, 8);
6109
+ header.writeUInt8(isKeyframe ? 1 : 0, 9);
6110
+ header.writeUInt16BE(0, 10);
6111
+ const packet = Buffer.concat([header, frame.data]);
6112
+ if (frameNumber < 3) {
6113
+ this.log(
6114
+ "info",
6115
+ `Sending H.265 frame ${frameNumber}: ${packet.length} bytes, keyframe=${isKeyframe}`
6116
+ );
6117
+ }
6118
+ const MAX_CHUNK_SIZE = 16e3;
6119
+ try {
6120
+ if (packet.length <= MAX_CHUNK_SIZE) {
6121
+ session.videoDataChannel.send(packet);
6122
+ } else {
6123
+ const totalChunks = Math.ceil(packet.length / MAX_CHUNK_SIZE);
6124
+ for (let i = 0; i < totalChunks; i++) {
6125
+ const start = i * MAX_CHUNK_SIZE;
6126
+ const end = Math.min(start + MAX_CHUNK_SIZE, packet.length);
6127
+ const chunk = packet.subarray(start, end);
6128
+ const chunkHeader = Buffer.alloc(2);
6129
+ chunkHeader.writeUInt8(i, 0);
6130
+ chunkHeader.writeUInt8(totalChunks, 1);
6131
+ session.videoDataChannel.send(Buffer.concat([chunkHeader, chunk]));
6132
+ }
6133
+ }
6134
+ } catch (err) {
6135
+ this.log("error", `Error sending H.265 frame ${frameNumber}: ${err}`);
6136
+ }
6137
+ }
6138
+ /**
6139
+ * Create RTP packets for H.264 NAL unit
6140
+ * Handles single NAL, STAP-A aggregation, and FU-A fragmentation
6141
+ */
6142
+ createH264RtpPackets(werift, nalUnit, sequenceNumber, timestamp, marker, ssrc) {
6143
+ const { RtpPacket, RtpHeader } = werift;
6144
+ const MTU = 1200;
6145
+ const packets = [];
6146
+ if (nalUnit.length <= MTU) {
6147
+ const header = new RtpHeader();
6148
+ header.payloadType = 96;
6149
+ header.sequenceNumber = sequenceNumber;
6150
+ header.timestamp = timestamp;
6151
+ header.marker = marker;
6152
+ header.ssrc = ssrc;
6153
+ packets.push(new RtpPacket(header, nalUnit));
6154
+ } else {
6155
+ const nalHeader = nalUnit[0];
6156
+ const nalType = nalHeader & 31;
6157
+ const nri = nalHeader & 96;
6158
+ const fuIndicator = (nri | 28) & 255;
6159
+ let offset = 1;
6160
+ let isFirst = true;
6161
+ while (offset < nalUnit.length) {
6162
+ const remaining = nalUnit.length - offset;
6163
+ const chunkSize = Math.min(remaining, MTU - 2);
6164
+ const isLast = offset + chunkSize >= nalUnit.length;
6165
+ let fuHeader = nalType;
6166
+ if (isFirst) fuHeader |= 128;
6167
+ if (isLast) fuHeader |= 64;
6168
+ const fuPayload = Buffer.alloc(2 + chunkSize);
6169
+ fuPayload[0] = fuIndicator;
6170
+ fuPayload[1] = fuHeader;
6171
+ nalUnit.copy(fuPayload, 2, offset, offset + chunkSize);
6172
+ const header = new RtpHeader();
6173
+ header.payloadType = 96;
6174
+ header.sequenceNumber = sequenceNumber + packets.length & 65535;
6175
+ header.timestamp = timestamp;
6176
+ header.marker = isLast && marker;
6177
+ header.ssrc = ssrc;
6178
+ packets.push(new RtpPacket(header, fuPayload));
6179
+ offset += chunkSize;
6180
+ isFirst = false;
6181
+ }
6182
+ }
6183
+ return packets;
6184
+ }
6185
+ /**
6186
+ * Start intercom (two-way audio)
6187
+ */
6188
+ async startIntercom(session) {
6189
+ try {
6190
+ session.intercom = new Intercom({
6191
+ api: this.options.api,
6192
+ channel: this.options.channel
6193
+ });
6194
+ await session.intercom.start();
6195
+ this.log("info", `Intercom started for session ${session.id}`);
6196
+ } catch (err) {
6197
+ this.log(
6198
+ "error",
6199
+ `Failed to start intercom for session ${session.id}: ${err}`
6200
+ );
6201
+ session.intercom = null;
6202
+ }
6203
+ }
6204
+ /**
6205
+ * Log helper
6206
+ */
6207
+ log(level, message) {
6208
+ if (this.options.logger) {
6209
+ this.options.logger(level, message);
6210
+ }
6211
+ }
6212
+ };
6213
+
6214
+ // src/baichuan/stream/BaichuanHlsServer.ts
6215
+ import { EventEmitter as EventEmitter6 } from "events";
6216
+ import fs from "fs";
6217
+ import fsp from "fs/promises";
6218
+ import os from "os";
6219
+ import path from "path";
6220
+ import { spawn as spawn7 } from "child_process";
6221
+ function parseAnnexBNalUnits2(data) {
6222
+ const units = [];
6223
+ const len = data.length;
6224
+ const findStart = (from) => {
6225
+ for (let i = from; i + 3 < len; i++) {
6226
+ if (data[i] === 0 && data[i + 1] === 0) {
6227
+ if (data[i + 2] === 1) return i;
6228
+ if (i + 4 < len && data[i + 2] === 0 && data[i + 3] === 1)
6229
+ return i;
6230
+ }
6231
+ }
6232
+ return -1;
6233
+ };
6234
+ const startCodeLenAt = (i) => {
6235
+ if (i + 3 < len && data[i] === 0 && data[i + 1] === 0) {
6236
+ if (data[i + 2] === 1) return 3;
6237
+ if (i + 4 < len && data[i + 2] === 0 && data[i + 3] === 1) return 4;
6238
+ }
6239
+ return 0;
6240
+ };
6241
+ let start = findStart(0);
6242
+ if (start < 0) return units;
6243
+ while (start >= 0) {
6244
+ const scLen = startCodeLenAt(start);
6245
+ if (!scLen) break;
6246
+ const nalStart = start + scLen;
6247
+ let next = findStart(nalStart);
6248
+ if (next < 0) next = len;
6249
+ if (nalStart < next) units.push(data.subarray(nalStart, next));
6250
+ start = next < len ? next : -1;
6251
+ }
6252
+ return units;
6253
+ }
6254
+ function isKeyframeAnnexB(codec, annexB) {
6255
+ const nals = parseAnnexBNalUnits2(annexB);
6256
+ for (const nal of nals) {
6257
+ if (!nal || nal.length === 0) continue;
6258
+ if (codec === "h264") {
6259
+ const nalType = nal[0] & 31;
6260
+ if (nalType === 5) return true;
6261
+ } else {
6262
+ const nalType = nal[0] >> 1 & 63;
6263
+ if (nalType >= 16 && nalType <= 21) return true;
6264
+ }
6265
+ }
6266
+ return false;
6267
+ }
6268
+ function hasParamSets(codec, annexB) {
6269
+ const nals = parseAnnexBNalUnits2(annexB);
6270
+ for (const nal of nals) {
6271
+ if (!nal || nal.length === 0) continue;
6272
+ if (codec === "h264") {
6273
+ const nalType = nal[0] & 31;
6274
+ if (nalType === 7 || nalType === 8) return true;
6275
+ } else {
6276
+ const nalType = nal[0] >> 1 & 63;
6277
+ if (nalType === 32 || nalType === 33 || nalType === 34) return true;
6278
+ }
6279
+ }
6280
+ return false;
6281
+ }
6282
+ function getNalTypes(codec, annexB) {
6283
+ const nals = parseAnnexBNalUnits2(annexB);
6284
+ return nals.map((nal) => {
6285
+ if (codec === "h265") {
6286
+ return nal[0] >> 1 & 63;
6287
+ } else {
6288
+ return nal[0] & 31;
6289
+ }
6290
+ });
6291
+ }
6292
+ var BaichuanHlsServer = class extends EventEmitter6 {
6293
+ api;
6294
+ channel;
6295
+ profile;
6296
+ variant;
6297
+ segmentDuration;
6298
+ playlistSize;
6299
+ ffmpegPath;
6300
+ log;
6301
+ outputDir = null;
6302
+ createdTempDir = false;
6303
+ playlistPath = null;
6304
+ segmentPattern = null;
6305
+ state = "idle";
6306
+ codec = null;
6307
+ framesReceived = 0;
6308
+ ffmpeg = null;
6309
+ nativeStream = null;
6310
+ pumpPromise = null;
6311
+ startedAt = null;
6312
+ lastError = null;
6313
+ constructor(options) {
6314
+ super();
6315
+ this.api = options.api;
6316
+ this.channel = options.channel;
6317
+ this.profile = options.profile;
6318
+ this.variant = options.variant ?? void 0;
6319
+ this.segmentDuration = options.segmentDuration ?? 2;
6320
+ this.playlistSize = options.playlistSize ?? 5;
6321
+ this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
6322
+ if (options.outputDir) {
6323
+ this.outputDir = options.outputDir;
6324
+ this.createdTempDir = false;
6325
+ }
6326
+ this.log = options.logger ?? (() => {
6327
+ });
6328
+ }
6329
+ /**
6330
+ * Start HLS streaming
6331
+ */
6332
+ async start() {
6333
+ if (this.state === "running" || this.state === "starting") {
6334
+ return;
6335
+ }
6336
+ this.state = "starting";
6337
+ this.lastError = null;
6338
+ try {
6339
+ if (!this.outputDir) {
6340
+ this.outputDir = await fsp.mkdtemp(
6341
+ path.join(os.tmpdir(), `nodelink-hls-${this.profile}-`)
6342
+ );
6343
+ this.createdTempDir = true;
6344
+ } else {
6345
+ await fsp.mkdir(this.outputDir, { recursive: true });
6346
+ }
6347
+ this.playlistPath = path.join(this.outputDir, "playlist.m3u8");
6348
+ this.segmentPattern = path.join(this.outputDir, "segment_%05d.ts");
6349
+ this.log("info", `Starting HLS stream to ${this.outputDir}`);
6350
+ this.nativeStream = createNativeStream(
6351
+ this.api,
6352
+ this.channel,
6353
+ this.profile,
6354
+ this.variant ? { variant: this.variant } : void 0
6355
+ );
6356
+ this.pumpPromise = this.pumpNativeToFfmpeg();
6357
+ this.startedAt = /* @__PURE__ */ new Date();
6358
+ this.state = "running";
6359
+ this.emit("started", { outputDir: this.outputDir });
6360
+ } catch (err) {
6361
+ this.state = "error";
6362
+ this.lastError = String(err);
6363
+ this.log("error", `Failed to start HLS: ${err}`);
6364
+ throw err;
6365
+ }
6366
+ }
6367
+ /**
6368
+ * Stop HLS streaming
6369
+ */
6370
+ async stop() {
6371
+ if (this.state === "idle" || this.state === "stopped") {
6372
+ return;
6373
+ }
6374
+ this.state = "stopping";
6375
+ this.log("info", "Stopping HLS stream");
6376
+ try {
6377
+ this.ffmpeg?.stdin?.end();
6378
+ } catch {
6379
+ }
6380
+ try {
6381
+ this.ffmpeg?.kill("SIGKILL");
6382
+ } catch {
6383
+ }
6384
+ this.ffmpeg = null;
6385
+ if (this.nativeStream) {
6386
+ try {
6387
+ await this.nativeStream.return(void 0);
6388
+ } catch {
6389
+ }
6390
+ this.nativeStream = null;
6391
+ }
6392
+ if (this.pumpPromise) {
6393
+ try {
6394
+ await this.pumpPromise;
6395
+ } catch {
6396
+ }
6397
+ this.pumpPromise = null;
6398
+ }
6399
+ if (this.createdTempDir && this.outputDir) {
6400
+ try {
6401
+ await fsp.rm(this.outputDir, { recursive: true, force: true });
6402
+ } catch {
6403
+ }
6404
+ }
6405
+ this.state = "stopped";
6406
+ this.emit("stopped");
6407
+ }
6408
+ /**
6409
+ * Get current status
6410
+ */
6411
+ getStatus() {
6412
+ return {
6413
+ state: this.state,
6414
+ codec: this.codec,
6415
+ framesReceived: this.framesReceived,
6416
+ ffmpegRunning: this.ffmpeg !== null && !this.ffmpeg.killed,
6417
+ playlistPath: this.playlistPath,
6418
+ outputDir: this.outputDir,
6419
+ startedAt: this.startedAt,
6420
+ error: this.lastError
6421
+ };
6422
+ }
6423
+ /**
6424
+ * Get playlist file path
6425
+ */
6426
+ getPlaylistPath() {
6427
+ return this.playlistPath;
6428
+ }
6429
+ /**
6430
+ * Get output directory
6431
+ */
6432
+ getOutputDir() {
6433
+ return this.outputDir;
6434
+ }
6435
+ /**
6436
+ * Check if playlist file exists
6437
+ */
6438
+ async waitForPlaylist(timeoutMs = 2e4) {
6439
+ if (!this.playlistPath) return false;
6440
+ const deadline = Date.now() + timeoutMs;
6441
+ while (Date.now() < deadline) {
6442
+ if (fs.existsSync(this.playlistPath)) {
6443
+ return true;
6444
+ }
6445
+ await new Promise((r) => setTimeout(r, 150));
6446
+ }
6447
+ return false;
6448
+ }
6449
+ /**
6450
+ * Read an HLS asset (playlist or segment)
6451
+ */
6452
+ async readAsset(assetName) {
6453
+ if (!this.outputDir) return null;
6454
+ const safe = assetName.replace(/^\/+/, "");
6455
+ if (safe.includes("..") || safe.includes("/")) {
6456
+ return null;
6457
+ }
6458
+ const filePath = path.join(this.outputDir, safe);
6459
+ if (!fs.existsSync(filePath)) {
6460
+ return null;
6461
+ }
6462
+ const data = await fsp.readFile(filePath);
6463
+ let contentType = "application/octet-stream";
6464
+ if (safe.endsWith(".m3u8")) {
6465
+ contentType = "application/vnd.apple.mpegurl";
6466
+ } else if (safe.endsWith(".ts")) {
6467
+ contentType = "video/mp2t";
6468
+ }
6469
+ return { data, contentType };
6470
+ }
6471
+ // ============================================================================
6472
+ // Private Methods
6473
+ // ============================================================================
6474
+ async pumpNativeToFfmpeg() {
6475
+ if (!this.nativeStream || !this.playlistPath || !this.segmentPattern) {
6476
+ return;
6477
+ }
6478
+ let startedFfmpeg = false;
6479
+ let pendingParamSets = [];
6480
+ const MAX_FRAMES_WAIT_FOR_KEYFRAME = 180;
6481
+ const collectParamSets = (codec, annexB) => {
6482
+ const nals = parseAnnexBNalUnits2(annexB);
6483
+ for (const nal of nals) {
6484
+ if (!nal || nal.length === 0) continue;
6485
+ if (codec === "h264") {
6486
+ const t = nal[0] & 31;
6487
+ if (t === 7 || t === 8) {
6488
+ pendingParamSets.push(
6489
+ Buffer.concat([Buffer.from([0, 0, 0, 1]), nal])
6490
+ );
6491
+ }
6492
+ } else {
6493
+ const t = nal[0] >> 1 & 63;
6494
+ if (t === 32 || t === 33 || t === 34) {
6495
+ pendingParamSets.push(
6496
+ Buffer.concat([Buffer.from([0, 0, 0, 1]), nal])
6497
+ );
6498
+ }
6499
+ }
6500
+ }
6501
+ if (pendingParamSets.length > 12) {
6502
+ pendingParamSets = pendingParamSets.slice(-12);
6503
+ }
6504
+ };
6505
+ try {
6506
+ for await (const frame of this.nativeStream) {
6507
+ if (this.state !== "running") break;
6508
+ if (frame.audio) continue;
6509
+ if (!frame.data || frame.data.length === 0) continue;
6510
+ if (!this.codec) {
6511
+ const detected = detectVideoCodecFromNal(frame.data);
6512
+ const fromMeta = frame.videoType === "H265" ? "h265" : "h264";
6513
+ this.codec = detected ? detected.toLowerCase() : fromMeta;
6514
+ this.log(
6515
+ "info",
6516
+ `HLS codec detected: meta=${fromMeta} detected=${detected} (using ${this.codec})`
6517
+ );
6518
+ this.emit("codec-detected", { codec: this.codec });
6519
+ }
6520
+ const annexB = this.codec === "h265" ? convertToAnnexB2(frame.data) : convertToAnnexB(frame.data);
6521
+ this.framesReceived++;
6522
+ const shouldLog = this.framesReceived <= 5 || this.framesReceived <= 60 && this.framesReceived % 10 === 0;
6523
+ if (shouldLog) {
6524
+ const nalTypes = getNalTypes(this.codec, annexB);
6525
+ const hasIdr = isKeyframeAnnexB(this.codec, annexB);
6526
+ const hasParams = hasParamSets(this.codec, annexB);
6527
+ this.log(
6528
+ "debug",
6529
+ `HLS frame#${this.framesReceived}: bytes=${annexB.length} nalTypes=[${nalTypes.join(",")}] hasIDR=${hasIdr} hasParams=${hasParams}`
6530
+ );
6531
+ }
6532
+ collectParamSets(this.codec, annexB);
6533
+ const isKeyframe = isKeyframeAnnexB(this.codec, annexB);
6534
+ if (!isKeyframe && !startedFfmpeg) {
6535
+ if (this.framesReceived < MAX_FRAMES_WAIT_FOR_KEYFRAME) {
6536
+ continue;
6537
+ }
6538
+ this.log(
6539
+ "warn",
6540
+ `No keyframe after ${this.framesReceived} frames, starting ffmpeg anyway`
6541
+ );
6542
+ }
6543
+ if (!startedFfmpeg) {
6544
+ this.log(
6545
+ "info",
6546
+ `Starting ffmpeg: codec=${this.codec} framesSeen=${this.framesReceived} isKeyframe=${isKeyframe} paramSets=${pendingParamSets.length}`
6547
+ );
6548
+ this.ffmpeg = this.spawnFfmpeg();
6549
+ startedFfmpeg = true;
6550
+ this.emit("ffmpeg-started");
6551
+ try {
6552
+ if (this.ffmpeg?.stdin && !this.ffmpeg.stdin.destroyed) {
6553
+ for (const ps of pendingParamSets) {
6554
+ this.ffmpeg.stdin.write(ps);
6555
+ }
6556
+ }
6557
+ } catch {
6558
+ }
6559
+ }
6560
+ if (!this.ffmpeg || !this.ffmpeg.stdin || this.ffmpeg.stdin.destroyed) {
6561
+ this.log("warn", "ffmpeg stdin not available, stopping pump");
6562
+ break;
6563
+ }
6564
+ try {
6565
+ this.ffmpeg.stdin.write(annexB);
6566
+ if (this.framesReceived % 100 === 0 || this.framesReceived <= 5 || this.framesReceived <= 50 && this.framesReceived % 10 === 0) {
6567
+ this.log(
6568
+ "debug",
6569
+ `HLS fed frame #${this.framesReceived} to ffmpeg (${annexB.length} bytes)`
6570
+ );
6571
+ }
6572
+ } catch (err) {
6573
+ this.log("error", `Failed to write to ffmpeg: ${err}`);
6574
+ break;
6575
+ }
6576
+ }
6577
+ } catch (e) {
6578
+ this.log("error", `HLS pump error: ${e}`);
6579
+ this.lastError = String(e);
6580
+ this.state = "error";
6581
+ this.emit("error", e);
6582
+ }
6583
+ }
6584
+ spawnFfmpeg() {
6585
+ if (!this.playlistPath || !this.segmentPattern) {
6586
+ throw new Error("Playlist path not set");
6587
+ }
6588
+ const codec = this.codec ?? "h264";
6589
+ const args = [
6590
+ "-hide_banner",
6591
+ "-loglevel",
6592
+ "warning",
6593
+ "-fflags",
6594
+ "+genpts",
6595
+ "-use_wallclock_as_timestamps",
6596
+ "1",
6597
+ "-r",
6598
+ "25",
6599
+ "-f",
6600
+ codec === "h265" ? "hevc" : "h264",
6601
+ "-i",
6602
+ "pipe:0"
6603
+ ];
6604
+ if (codec === "h265") {
6605
+ args.push(
6606
+ "-c:v",
6607
+ "libx264",
6608
+ "-preset",
6609
+ "veryfast",
6610
+ "-tune",
6611
+ "zerolatency",
6612
+ "-pix_fmt",
6613
+ "yuv420p"
6614
+ );
6615
+ } else {
6616
+ args.push("-c:v", "copy");
6617
+ }
6618
+ args.push(
6619
+ "-f",
6620
+ "hls",
6621
+ "-hls_time",
6622
+ String(this.segmentDuration),
6623
+ "-hls_list_size",
6624
+ String(this.playlistSize),
6625
+ "-hls_flags",
6626
+ "delete_segments+append_list+omit_endlist",
6627
+ "-hls_segment_filename",
6628
+ this.segmentPattern,
6629
+ this.playlistPath
6630
+ );
6631
+ const p = spawn7(this.ffmpegPath, args, {
6632
+ stdio: ["pipe", "ignore", "pipe"]
6633
+ });
6634
+ p.on("error", (err) => {
6635
+ this.log("error", `ffmpeg spawn error: ${err}`);
6636
+ this.emit("ffmpeg-error", err);
6637
+ });
6638
+ p.stderr?.on("data", (d) => {
6639
+ const s = String(d ?? "").trim();
6640
+ if (s) this.log("warn", `[ffmpeg] ${s}`);
6641
+ });
6642
+ p.on("exit", (code, signal) => {
6643
+ this.log(
6644
+ "warn",
6645
+ `ffmpeg exited (code=${code ?? "?"} signal=${signal ?? "?"})`
6646
+ );
6647
+ this.emit("ffmpeg-exited", { code, signal });
6648
+ });
6649
+ return p;
6650
+ }
6651
+ };
6652
+
6653
+ // src/multifocal/compositeRtspServer.ts
6654
+ import { EventEmitter as EventEmitter7 } from "events";
6655
+ import { spawn as spawn8 } from "child_process";
4314
6656
  import * as net from "net";
4315
- var CompositeRtspServer = class extends EventEmitter3 {
6657
+ var CompositeRtspServer = class extends EventEmitter7 {
4316
6658
  options;
4317
6659
  compositeStream = null;
4318
6660
  rtspServer = null;
@@ -4417,7 +6759,7 @@ var CompositeRtspServer = class extends EventEmitter3 {
4417
6759
  this.logger.log?.(
4418
6760
  `[CompositeRtspServer] Starting ffmpeg RTSP server: ${ffmpegArgs.join(" ")}`
4419
6761
  );
4420
- this.ffmpegProcess = spawn6("ffmpeg", ffmpegArgs, {
6762
+ this.ffmpegProcess = spawn8("ffmpeg", ffmpegArgs, {
4421
6763
  stdio: ["pipe", "pipe", "pipe"]
4422
6764
  });
4423
6765
  this.ffmpegProcess.on("error", (error) => {
@@ -4629,9 +6971,12 @@ export {
4629
6971
  BaichuanClient,
4630
6972
  BaichuanEventEmitter,
4631
6973
  BaichuanFrameParser,
6974
+ BaichuanHlsServer,
4632
6975
  BaichuanHttpStreamServer,
6976
+ BaichuanMjpegServer,
4633
6977
  BaichuanRtspServer,
4634
6978
  BaichuanVideoStream,
6979
+ BaichuanWebRTCServer,
4635
6980
  BcMediaAnnexBDecoder,
4636
6981
  BcMediaCodec,
4637
6982
  BcUdpStream,
@@ -4642,7 +6987,9 @@ export {
4642
6987
  DUAL_LENS_SINGLE_MOTION_MODELS,
4643
6988
  H264RtpDepacketizer,
4644
6989
  H265RtpDepacketizer,
6990
+ HlsSessionManager,
4645
6991
  Intercom,
6992
+ MjpegTransformer,
4646
6993
  NVR_HUB_EXACT_TYPES,
4647
6994
  NVR_HUB_MODEL_PATTERNS,
4648
6995
  ReolinkBaichuanApi,
@@ -4662,6 +7009,7 @@ export {
4662
7009
  buildBinaryExtensionXml,
4663
7010
  buildChannelExtensionXml,
4664
7011
  buildFloodlightManualXml,
7012
+ buildHlsRedirectUrl,
4665
7013
  buildLoginXml,
4666
7014
  buildPreviewStopXml,
4667
7015
  buildPreviewStopXmlV11,
@@ -4689,6 +7037,7 @@ export {
4689
7037
  createDebugGateLogger,
4690
7038
  createDiagnosticsBundle,
4691
7039
  createLogger,
7040
+ createMjpegBoundary,
4692
7041
  createNativeStream,
4693
7042
  createNullLogger,
4694
7043
  createReplayHttpServer,
@@ -4696,8 +7045,10 @@ export {
4696
7045
  createRfc4571TcpServerForReplay,
4697
7046
  createRtspProxyServer,
4698
7047
  createTaggedLogger,
7048
+ decideVideoclipTranscodeMode,
4699
7049
  decodeHeader,
4700
7050
  deriveAesKey,
7051
+ detectIosClient,
4701
7052
  detectVideoCodecFromNal,
4702
7053
  discoverReolinkDevices,
4703
7054
  discoverViaHttpScan,
@@ -4710,10 +7061,13 @@ export {
4710
7061
  extractSpsFromAnnexB,
4711
7062
  extractVpsFromAnnexB,
4712
7063
  flattenAbilitiesForChannel,
7064
+ formatMjpegFrame,
4713
7065
  getConstructedVideoStreamOptions,
4714
7066
  getGlobalLogger,
4715
7067
  getH265NalType,
7068
+ getMjpegContentType,
4716
7069
  getVideoStream,
7070
+ getVideoclipClientInfo,
4717
7071
  getXmlText,
4718
7072
  hasStartCodes2 as hasH265StartCodes,
4719
7073
  hasStartCodes,