@apocaliss92/nodelink-js 0.1.7 → 0.1.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -6627,12 +6627,12 @@ var init_ReolinkCgiApi = __esm({
6627
6627
  "getVideoclipThumbnailJpeg",
6628
6628
  `Extracting thumbnail from VOD URL (FLV): ${vodUrl.substring(0, 100)}... (seek=${seekSeconds}s)`
6629
6629
  );
6630
- const { spawn: spawn10 } = await import("child_process");
6630
+ const { spawn: spawn11 } = await import("child_process");
6631
6631
  return new Promise((resolve, reject) => {
6632
6632
  const chunks = [];
6633
6633
  let stderr = "";
6634
6634
  let timedOut = false;
6635
- const ffmpeg = spawn10(ffmpegPath, [
6635
+ const ffmpeg = spawn11(ffmpegPath, [
6636
6636
  "-y",
6637
6637
  "-analyzeduration",
6638
6638
  "10000000",
@@ -7316,8 +7316,10 @@ __export(index_exports, {
7316
7316
  BaichuanEventEmitter: () => BaichuanEventEmitter,
7317
7317
  BaichuanFrameParser: () => BaichuanFrameParser,
7318
7318
  BaichuanHttpStreamServer: () => BaichuanHttpStreamServer,
7319
+ BaichuanMjpegServer: () => BaichuanMjpegServer,
7319
7320
  BaichuanRtspServer: () => BaichuanRtspServer,
7320
7321
  BaichuanVideoStream: () => BaichuanVideoStream,
7322
+ BaichuanWebRTCServer: () => BaichuanWebRTCServer,
7321
7323
  BcMediaAnnexBDecoder: () => BcMediaAnnexBDecoder,
7322
7324
  BcMediaCodec: () => BcMediaCodec,
7323
7325
  BcUdpStream: () => BcUdpStream,
@@ -7329,6 +7331,7 @@ __export(index_exports, {
7329
7331
  H264RtpDepacketizer: () => H264RtpDepacketizer,
7330
7332
  H265RtpDepacketizer: () => H265RtpDepacketizer,
7331
7333
  Intercom: () => Intercom,
7334
+ MjpegTransformer: () => MjpegTransformer,
7332
7335
  NVR_HUB_EXACT_TYPES: () => NVR_HUB_EXACT_TYPES,
7333
7336
  NVR_HUB_MODEL_PATTERNS: () => NVR_HUB_MODEL_PATTERNS,
7334
7337
  ReolinkBaichuanApi: () => ReolinkBaichuanApi,
@@ -7375,6 +7378,7 @@ __export(index_exports, {
7375
7378
  createDebugGateLogger: () => createDebugGateLogger,
7376
7379
  createDiagnosticsBundle: () => createDiagnosticsBundle,
7377
7380
  createLogger: () => createLogger,
7381
+ createMjpegBoundary: () => createMjpegBoundary,
7378
7382
  createNativeStream: () => createNativeStream,
7379
7383
  createNullLogger: () => createNullLogger,
7380
7384
  createReplayHttpServer: () => createReplayHttpServer,
@@ -7382,6 +7386,7 @@ __export(index_exports, {
7382
7386
  createRfc4571TcpServerForReplay: () => createRfc4571TcpServerForReplay,
7383
7387
  createRtspProxyServer: () => createRtspProxyServer,
7384
7388
  createTaggedLogger: () => createTaggedLogger,
7389
+ decideVideoclipTranscodeMode: () => decideVideoclipTranscodeMode,
7385
7390
  decodeHeader: () => decodeHeader,
7386
7391
  deriveAesKey: () => deriveAesKey,
7387
7392
  detectVideoCodecFromNal: () => detectVideoCodecFromNal,
@@ -7396,10 +7401,13 @@ __export(index_exports, {
7396
7401
  extractSpsFromAnnexB: () => extractSpsFromAnnexB,
7397
7402
  extractVpsFromAnnexB: () => extractVpsFromAnnexB,
7398
7403
  flattenAbilitiesForChannel: () => flattenAbilitiesForChannel,
7404
+ formatMjpegFrame: () => formatMjpegFrame,
7399
7405
  getConstructedVideoStreamOptions: () => getConstructedVideoStreamOptions,
7400
7406
  getGlobalLogger: () => getGlobalLogger,
7401
7407
  getH265NalType: () => getH265NalType,
7408
+ getMjpegContentType: () => getMjpegContentType,
7402
7409
  getVideoStream: () => getVideoStream,
7410
+ getVideoclipClientInfo: () => getVideoclipClientInfo,
7403
7411
  getXmlText: () => getXmlText,
7404
7412
  hasH265StartCodes: () => hasStartCodes2,
7405
7413
  hasStartCodes: () => hasStartCodes,
@@ -20110,7 +20118,7 @@ ${stderr}`)
20110
20118
  * Convert a raw video keyframe to JPEG using ffmpeg.
20111
20119
  */
20112
20120
  async convertFrameToJpeg(params) {
20113
- const { spawn: spawn10 } = await import("child_process");
20121
+ const { spawn: spawn11 } = await import("child_process");
20114
20122
  const ffmpeg = params.ffmpegPath ?? "ffmpeg";
20115
20123
  const inputFormat = params.videoCodec === "H265" ? "hevc" : "h264";
20116
20124
  return new Promise((resolve, reject) => {
@@ -20132,7 +20140,7 @@ ${stderr}`)
20132
20140
  "2",
20133
20141
  "pipe:1"
20134
20142
  ];
20135
- const proc = spawn10(ffmpeg, args, {
20143
+ const proc = spawn11(ffmpeg, args, {
20136
20144
  stdio: ["pipe", "pipe", "pipe"]
20137
20145
  });
20138
20146
  const chunks = [];
@@ -20275,7 +20283,7 @@ ${stderr}`)
20275
20283
  * Internal helper to mux video+audio into MP4 using ffmpeg.
20276
20284
  */
20277
20285
  async muxToMp4(params) {
20278
- const { spawn: spawn10 } = await import("child_process");
20286
+ const { spawn: spawn11 } = await import("child_process");
20279
20287
  const { randomUUID: randomUUID2 } = await import("crypto");
20280
20288
  const fs5 = await import("fs/promises");
20281
20289
  const os = await import("os");
@@ -20327,7 +20335,7 @@ ${stderr}`)
20327
20335
  outputPath
20328
20336
  );
20329
20337
  await new Promise((resolve, reject) => {
20330
- const p = spawn10(ffmpeg, args, { stdio: ["ignore", "ignore", "pipe"] });
20338
+ const p = spawn11(ffmpeg, args, { stdio: ["ignore", "ignore", "pipe"] });
20331
20339
  let stderr = "";
20332
20340
  p.stderr.on("data", (d) => {
20333
20341
  stderr += d.toString();
@@ -24058,8 +24066,9 @@ ${scheduleItems}
24058
24066
  let frameCount = 0;
24059
24067
  const startFfmpeg = (videoType) => {
24060
24068
  if (ff) return;
24069
+ const needsTranscode = videoType === "H265" && params.transcodeH265ToH264 === true;
24061
24070
  logger?.debug?.(
24062
- `[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}`
24071
+ `[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}, transcode=${needsTranscode}`
24063
24072
  );
24064
24073
  MpegTsMuxer.resetCounters();
24065
24074
  tsMuxer = new MpegTsMuxer({ videoType });
@@ -24071,8 +24080,8 @@ ${scheduleItems}
24071
24080
  "mpegts",
24072
24081
  "-i",
24073
24082
  "pipe:0",
24074
- "-c",
24075
- "copy",
24083
+ // Video codec: transcode H.265→H.264 if requested, otherwise copy
24084
+ ...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
24076
24085
  "-movflags",
24077
24086
  "frag_keyframe+empty_moov",
24078
24087
  "-f",
@@ -25221,6 +25230,79 @@ var AutodiscoveryClient = class {
25221
25230
  }
25222
25231
  };
25223
25232
 
25233
+ // src/reolink/baichuan/types.ts
25234
+ function getVideoclipClientInfo(headers) {
25235
+ const getHeader = (key) => {
25236
+ const val = headers[key] ?? headers[key.toLowerCase()] ?? headers[key.toUpperCase()];
25237
+ return Array.isArray(val) ? val[0] : val;
25238
+ };
25239
+ return {
25240
+ userAgent: getHeader("user-agent") ?? getHeader("User-Agent"),
25241
+ accept: getHeader("accept") ?? getHeader("Accept"),
25242
+ range: getHeader("range") ?? getHeader("Range"),
25243
+ secChUa: getHeader("sec-ch-ua") ?? getHeader("Sec-CH-UA"),
25244
+ secChUaMobile: getHeader("sec-ch-ua-mobile") ?? getHeader("Sec-CH-UA-Mobile"),
25245
+ secChUaPlatform: getHeader("sec-ch-ua-platform") ?? getHeader("Sec-CH-UA-Platform")
25246
+ };
25247
+ }
25248
+ function decideVideoclipTranscodeMode(headers, forceMode) {
25249
+ const clientInfo = getVideoclipClientInfo(headers);
25250
+ if (forceMode) {
25251
+ return {
25252
+ mode: forceMode,
25253
+ reason: `forced: ${forceMode}`,
25254
+ clientInfo
25255
+ };
25256
+ }
25257
+ const ua = (clientInfo.userAgent ?? "").toLowerCase();
25258
+ const platform = (clientInfo.secChUaPlatform ?? "").toLowerCase().replace(/"/g, "");
25259
+ const isIos = /iphone|ipad|ipod/.test(ua);
25260
+ if (isIos) {
25261
+ return {
25262
+ mode: "transcode-h264",
25263
+ reason: "iOS device detected - no native H.265 support in <video>",
25264
+ clientInfo
25265
+ };
25266
+ }
25267
+ const isFirefox = ua.includes("firefox");
25268
+ if (isFirefox) {
25269
+ return {
25270
+ mode: "transcode-h264",
25271
+ reason: "Firefox detected - no H.265 support",
25272
+ clientInfo
25273
+ };
25274
+ }
25275
+ const isAndroid = ua.includes("android") || platform === "android";
25276
+ if (isAndroid) {
25277
+ return {
25278
+ mode: "transcode-h264",
25279
+ reason: "Android device detected - variable H.265 support",
25280
+ clientInfo
25281
+ };
25282
+ }
25283
+ const isChromium = ua.includes("chrome") || ua.includes("edg");
25284
+ const isMac = ua.includes("mac os") || platform === "macos";
25285
+ if (isChromium && !isMac) {
25286
+ return {
25287
+ mode: "transcode-h264",
25288
+ reason: "Chrome/Edge on non-Mac detected - limited H.265 support",
25289
+ clientInfo
25290
+ };
25291
+ }
25292
+ if (isMac) {
25293
+ return {
25294
+ mode: "passthrough",
25295
+ reason: "macOS detected - native H.265 hardware decoding available",
25296
+ clientInfo
25297
+ };
25298
+ }
25299
+ return {
25300
+ mode: "transcode-h264",
25301
+ reason: "Unknown client - transcoding for compatibility",
25302
+ clientInfo
25303
+ };
25304
+ }
25305
+
25224
25306
  // src/index.ts
25225
25307
  init_recordingFileName();
25226
25308
 
@@ -29045,6 +29127,1111 @@ var BaichuanHttpStreamServer = class extends import_node_events6.EventEmitter {
29045
29127
  }
29046
29128
  };
29047
29129
 
29130
+ // src/baichuan/stream/BaichuanMjpegServer.ts
29131
+ var import_node_events8 = require("events");
29132
+ var http5 = __toESM(require("http"), 1);
29133
+
29134
+ // src/baichuan/stream/MjpegTransformer.ts
29135
+ var import_node_events7 = require("events");
29136
+ var import_node_child_process9 = require("child_process");
29137
+ var JPEG_SOI = Buffer.from([255, 216]);
29138
+ var JPEG_EOI = Buffer.from([255, 217]);
29139
+ var MjpegTransformer = class extends import_node_events7.EventEmitter {
29140
+ options;
29141
+ ffmpeg = null;
29142
+ started = false;
29143
+ closed = false;
29144
+ jpegBuffer = Buffer.alloc(0);
29145
+ frameCount = 0;
29146
+ lastTimestamp = 0;
29147
+ constructor(options) {
29148
+ super();
29149
+ this.options = {
29150
+ codec: options.codec,
29151
+ quality: options.quality ?? 5,
29152
+ width: options.width,
29153
+ height: options.height,
29154
+ maxFps: options.maxFps,
29155
+ logger: options.logger
29156
+ };
29157
+ }
29158
+ /**
29159
+ * Start the transformer (spawns FFmpeg process)
29160
+ */
29161
+ start() {
29162
+ if (this.started || this.closed) return;
29163
+ this.started = true;
29164
+ const { codec, quality, width, height, maxFps } = this.options;
29165
+ const args = [
29166
+ "-hide_banner",
29167
+ "-loglevel",
29168
+ "error",
29169
+ // Input: raw video from stdin
29170
+ "-f",
29171
+ codec === "h265" ? "hevc" : "h264",
29172
+ "-i",
29173
+ "pipe:0"
29174
+ ];
29175
+ const filters = [];
29176
+ if (width || height) {
29177
+ const w = width ?? -1;
29178
+ const h = height ?? -1;
29179
+ filters.push(`scale=${w}:${h}`);
29180
+ }
29181
+ if (maxFps) {
29182
+ filters.push(`fps=${maxFps}`);
29183
+ }
29184
+ if (filters.length > 0) {
29185
+ args.push("-vf", filters.join(","));
29186
+ }
29187
+ args.push(
29188
+ "-c:v",
29189
+ "mjpeg",
29190
+ "-q:v",
29191
+ String(quality),
29192
+ "-f",
29193
+ "mjpeg",
29194
+ "pipe:1"
29195
+ );
29196
+ this.log("debug", `Starting FFmpeg with args: ${args.join(" ")}`);
29197
+ this.ffmpeg = (0, import_node_child_process9.spawn)("ffmpeg", args, {
29198
+ stdio: ["pipe", "pipe", "pipe"]
29199
+ });
29200
+ this.ffmpeg.stdout.on("data", (data) => {
29201
+ this.handleJpegData(data);
29202
+ });
29203
+ this.ffmpeg.stderr.on("data", (data) => {
29204
+ const msg = data.toString().trim();
29205
+ if (msg) {
29206
+ this.log("debug", `FFmpeg: ${msg}`);
29207
+ }
29208
+ });
29209
+ this.ffmpeg.on("close", (code) => {
29210
+ this.log("debug", `FFmpeg closed with code ${code}`);
29211
+ this.ffmpeg = null;
29212
+ if (!this.closed) {
29213
+ this.emit("close", code);
29214
+ }
29215
+ });
29216
+ this.ffmpeg.on("error", (err) => {
29217
+ this.log("error", `FFmpeg error: ${err.message}`);
29218
+ this.emit("error", err);
29219
+ });
29220
+ }
29221
+ /**
29222
+ * Push an H.264/H.265 access unit (Annex-B format with start codes)
29223
+ */
29224
+ push(accessUnit, timestamp) {
29225
+ if (!this.started || this.closed || !this.ffmpeg) {
29226
+ return;
29227
+ }
29228
+ this.lastTimestamp = timestamp ?? Date.now() * 1e3;
29229
+ try {
29230
+ this.ffmpeg.stdin.write(accessUnit);
29231
+ } catch (err) {
29232
+ this.log("error", `Failed to write to FFmpeg: ${err}`);
29233
+ }
29234
+ }
29235
+ /**
29236
+ * Handle JPEG data from FFmpeg stdout
29237
+ * FFmpeg outputs complete JPEG images, each starting with SOI (0xFFD8)
29238
+ * and ending with EOI (0xFFD9)
29239
+ */
29240
+ handleJpegData(data) {
29241
+ this.jpegBuffer = Buffer.concat([this.jpegBuffer, data]);
29242
+ while (true) {
29243
+ const soiIndex = this.jpegBuffer.indexOf(JPEG_SOI);
29244
+ if (soiIndex < 0) {
29245
+ this.jpegBuffer = Buffer.alloc(0);
29246
+ break;
29247
+ }
29248
+ if (soiIndex > 0) {
29249
+ this.jpegBuffer = this.jpegBuffer.subarray(soiIndex);
29250
+ }
29251
+ const eoiIndex = this.jpegBuffer.indexOf(JPEG_EOI, 2);
29252
+ if (eoiIndex < 0) {
29253
+ break;
29254
+ }
29255
+ const frameEnd = eoiIndex + 2;
29256
+ const jpegFrame = this.jpegBuffer.subarray(0, frameEnd);
29257
+ this.jpegBuffer = this.jpegBuffer.subarray(frameEnd);
29258
+ this.frameCount++;
29259
+ const frame = {
29260
+ data: jpegFrame,
29261
+ timestamp: this.lastTimestamp
29262
+ };
29263
+ this.emit("frame", frame);
29264
+ }
29265
+ }
29266
+ /**
29267
+ * Stop the transformer
29268
+ */
29269
+ async stop() {
29270
+ if (this.closed) return;
29271
+ this.closed = true;
29272
+ if (this.ffmpeg) {
29273
+ try {
29274
+ this.ffmpeg.stdin.end();
29275
+ } catch {
29276
+ }
29277
+ await new Promise((resolve) => {
29278
+ const ff = this.ffmpeg;
29279
+ if (!ff) {
29280
+ resolve();
29281
+ return;
29282
+ }
29283
+ const timeout = setTimeout(() => {
29284
+ ff.kill("SIGKILL");
29285
+ resolve();
29286
+ }, 1e3);
29287
+ ff.once("close", () => {
29288
+ clearTimeout(timeout);
29289
+ resolve();
29290
+ });
29291
+ try {
29292
+ ff.kill("SIGTERM");
29293
+ } catch {
29294
+ clearTimeout(timeout);
29295
+ resolve();
29296
+ }
29297
+ });
29298
+ this.ffmpeg = null;
29299
+ }
29300
+ this.emit("close", 0);
29301
+ }
29302
+ /**
29303
+ * Get frame count
29304
+ */
29305
+ getFrameCount() {
29306
+ return this.frameCount;
29307
+ }
29308
+ /**
29309
+ * Check if running
29310
+ */
29311
+ isRunning() {
29312
+ return this.started && !this.closed && this.ffmpeg !== null;
29313
+ }
29314
+ log(level, message) {
29315
+ this.options.logger?.(level, `[MjpegTransformer] ${message}`);
29316
+ }
29317
+ };
29318
+ function createMjpegBoundary() {
29319
+ return `mjpegboundary${Date.now()}`;
29320
+ }
29321
+ function getMjpegContentType(boundary) {
29322
+ return `multipart/x-mixed-replace; boundary=${boundary}`;
29323
+ }
29324
+ function formatMjpegFrame(frame, boundary) {
29325
+ const header = Buffer.from(
29326
+ `--${boundary}\r
29327
+ Content-Type: image/jpeg\r
29328
+ Content-Length: ${frame.length}\r
29329
+ \r
29330
+ `
29331
+ );
29332
+ return Buffer.concat([header, frame, Buffer.from("\r\n")]);
29333
+ }
29334
+
29335
+ // src/baichuan/stream/BaichuanMjpegServer.ts
29336
+ init_H264Converter();
29337
+ init_H265Converter();
29338
+ var BaichuanMjpegServer = class extends import_node_events8.EventEmitter {
29339
+ options;
29340
+ clients = /* @__PURE__ */ new Map();
29341
+ httpServer = null;
29342
+ transformer = null;
29343
+ nativeStream = null;
29344
+ streamPump = null;
29345
+ detectedCodec = null;
29346
+ started = false;
29347
+ clientIdCounter = 0;
29348
+ constructor(options) {
29349
+ super();
29350
+ this.options = options;
29351
+ }
29352
+ /**
29353
+ * Start the MJPEG server
29354
+ */
29355
+ async start() {
29356
+ if (this.started) return;
29357
+ this.started = true;
29358
+ const port = this.options.port ?? 8080;
29359
+ const host = this.options.host ?? "0.0.0.0";
29360
+ const path5 = this.options.path ?? "/mjpeg";
29361
+ this.httpServer = http5.createServer((req, res) => {
29362
+ this.handleRequest(req, res, path5);
29363
+ });
29364
+ return new Promise((resolve, reject) => {
29365
+ this.httpServer.on("error", (err) => {
29366
+ this.log("error", `HTTP server error: ${err.message}`);
29367
+ reject(err);
29368
+ });
29369
+ this.httpServer.listen(port, host, () => {
29370
+ this.log(
29371
+ "info",
29372
+ `MJPEG server started on http://${host}:${port}${path5}`
29373
+ );
29374
+ this.emit("started", { host, port, path: path5 });
29375
+ resolve();
29376
+ });
29377
+ });
29378
+ }
29379
+ /**
29380
+ * Stop the MJPEG server
29381
+ */
29382
+ async stop() {
29383
+ if (!this.started) return;
29384
+ this.started = false;
29385
+ for (const [id, client] of this.clients) {
29386
+ try {
29387
+ client.response.end();
29388
+ } catch {
29389
+ }
29390
+ this.clients.delete(id);
29391
+ }
29392
+ await this.stopStream();
29393
+ if (this.httpServer) {
29394
+ await new Promise((resolve) => {
29395
+ this.httpServer.close(() => resolve());
29396
+ });
29397
+ this.httpServer = null;
29398
+ }
29399
+ this.log("info", "MJPEG server stopped");
29400
+ this.emit("stopped");
29401
+ }
29402
+ /**
29403
+ * Handle HTTP request
29404
+ */
29405
+ handleRequest(req, res, expectedPath) {
29406
+ const url = new URL(req.url ?? "/", `http://${req.headers.host}`);
29407
+ if (url.pathname !== expectedPath) {
29408
+ res.statusCode = 404;
29409
+ res.end("Not Found");
29410
+ return;
29411
+ }
29412
+ if (req.method !== "GET") {
29413
+ res.statusCode = 405;
29414
+ res.end("Method Not Allowed");
29415
+ return;
29416
+ }
29417
+ this.handleMjpegClient(req, res);
29418
+ }
29419
+ /**
29420
+ * Handle new MJPEG client
29421
+ */
29422
+ handleMjpegClient(req, res) {
29423
+ const clientId = `client-${++this.clientIdCounter}`;
29424
+ const boundary = createMjpegBoundary();
29425
+ const client = {
29426
+ id: clientId,
29427
+ response: res,
29428
+ boundary,
29429
+ connectedAt: Date.now()
29430
+ };
29431
+ this.clients.set(clientId, client);
29432
+ this.log(
29433
+ "info",
29434
+ `MJPEG client connected: ${clientId} (total: ${this.clients.size})`
29435
+ );
29436
+ this.emit("client-connected", { id: clientId, total: this.clients.size });
29437
+ res.writeHead(200, {
29438
+ "Content-Type": getMjpegContentType(boundary),
29439
+ "Cache-Control": "no-cache, no-store, must-revalidate",
29440
+ Pragma: "no-cache",
29441
+ Expires: "0",
29442
+ Connection: "close"
29443
+ });
29444
+ const cleanup = () => {
29445
+ this.clients.delete(clientId);
29446
+ this.log(
29447
+ "info",
29448
+ `MJPEG client disconnected: ${clientId} (remaining: ${this.clients.size})`
29449
+ );
29450
+ this.emit("client-disconnected", {
29451
+ id: clientId,
29452
+ total: this.clients.size
29453
+ });
29454
+ if (this.clients.size === 0) {
29455
+ this.stopStream();
29456
+ }
29457
+ };
29458
+ req.on("close", cleanup);
29459
+ res.on("close", cleanup);
29460
+ res.on("error", cleanup);
29461
+ if (!this.transformer) {
29462
+ this.startStream();
29463
+ }
29464
+ }
29465
+ /**
29466
+ * Start the native video stream and MJPEG transformer
29467
+ */
29468
+ async startStream() {
29469
+ if (this.transformer) return;
29470
+ this.log("info", "Starting native video stream...");
29471
+ const { api, channel, profile, variant, quality, width, height, maxFps } = this.options;
29472
+ try {
29473
+ this.nativeStream = createNativeStream(
29474
+ api,
29475
+ channel,
29476
+ profile,
29477
+ variant ? { variant } : void 0
29478
+ );
29479
+ this.streamPump = this.pumpStream();
29480
+ } catch (err) {
29481
+ this.log("error", `Failed to start stream: ${err}`);
29482
+ this.emit("error", err);
29483
+ }
29484
+ }
29485
+ /**
29486
+ * Pump native stream and feed to transformer
29487
+ */
29488
+ async pumpStream() {
29489
+ if (!this.nativeStream) return;
29490
+ let frameBuffer = [];
29491
+ let waitingForKeyframe = true;
29492
+ try {
29493
+ for await (const frame of this.nativeStream) {
29494
+ if (!this.started || this.clients.size === 0) break;
29495
+ const { type, data, microseconds, videoType } = frame;
29496
+ if (type !== "Iframe" && type !== "Pframe") continue;
29497
+ if (!data || data.length === 0) continue;
29498
+ let annexB;
29499
+ if (videoType === "H265") {
29500
+ annexB = convertToAnnexB2(data);
29501
+ if (!this.detectedCodec) {
29502
+ this.detectedCodec = "h265";
29503
+ this.initTransformer();
29504
+ }
29505
+ } else {
29506
+ annexB = convertToAnnexB(data);
29507
+ if (!this.detectedCodec) {
29508
+ this.detectedCodec = "h264";
29509
+ this.initTransformer();
29510
+ }
29511
+ }
29512
+ if (waitingForKeyframe) {
29513
+ if (type === "Iframe") {
29514
+ waitingForKeyframe = false;
29515
+ } else {
29516
+ continue;
29517
+ }
29518
+ }
29519
+ if (this.transformer) {
29520
+ this.transformer.push(annexB, microseconds);
29521
+ }
29522
+ }
29523
+ } catch (err) {
29524
+ if (this.started) {
29525
+ this.log("error", `Stream error: ${err}`);
29526
+ this.emit("error", err);
29527
+ }
29528
+ }
29529
+ }
29530
+ /**
29531
+ * Initialize MJPEG transformer once codec is detected
29532
+ */
29533
+ initTransformer() {
29534
+ if (this.transformer || !this.detectedCodec) return;
29535
+ const { quality, width, height, maxFps } = this.options;
29536
+ this.transformer = new MjpegTransformer({
29537
+ codec: this.detectedCodec,
29538
+ quality,
29539
+ width,
29540
+ height,
29541
+ maxFps,
29542
+ logger: this.options.logger
29543
+ });
29544
+ this.transformer.on("frame", (frame) => {
29545
+ this.broadcastFrame(frame);
29546
+ });
29547
+ this.transformer.on("error", (err) => {
29548
+ this.log("error", `Transformer error: ${err}`);
29549
+ });
29550
+ this.transformer.on("close", () => {
29551
+ this.log("debug", "Transformer closed");
29552
+ });
29553
+ this.transformer.start();
29554
+ this.log(
29555
+ "info",
29556
+ `MJPEG transformer started (codec: ${this.detectedCodec})`
29557
+ );
29558
+ }
29559
+ /**
29560
+ * Broadcast JPEG frame to all connected clients
29561
+ */
29562
+ broadcastFrame(frame) {
29563
+ for (const client of this.clients.values()) {
29564
+ try {
29565
+ const mjpegData = formatMjpegFrame(frame.data, client.boundary);
29566
+ client.response.write(mjpegData);
29567
+ } catch {
29568
+ }
29569
+ }
29570
+ }
29571
+ /**
29572
+ * Stop the stream and transformer
29573
+ */
29574
+ async stopStream() {
29575
+ if (this.transformer) {
29576
+ await this.transformer.stop();
29577
+ this.transformer = null;
29578
+ }
29579
+ if (this.nativeStream) {
29580
+ try {
29581
+ await this.nativeStream.return(void 0);
29582
+ } catch {
29583
+ }
29584
+ this.nativeStream = null;
29585
+ }
29586
+ if (this.streamPump) {
29587
+ try {
29588
+ await this.streamPump;
29589
+ } catch {
29590
+ }
29591
+ this.streamPump = null;
29592
+ }
29593
+ this.detectedCodec = null;
29594
+ this.log("debug", "Stream stopped");
29595
+ }
29596
+ /**
29597
+ * Get current number of connected clients
29598
+ */
29599
+ getClientCount() {
29600
+ return this.clients.size;
29601
+ }
29602
+ /**
29603
+ * Get server status
29604
+ */
29605
+ getStatus() {
29606
+ return {
29607
+ running: this.started,
29608
+ clients: this.clients.size,
29609
+ codec: this.detectedCodec,
29610
+ frames: this.transformer?.getFrameCount() ?? 0
29611
+ };
29612
+ }
29613
+ log(level, message) {
29614
+ this.options.logger?.(level, `[BaichuanMjpegServer] ${message}`);
29615
+ }
29616
+ };
29617
+
29618
+ // src/baichuan/stream/BaichuanWebRTCServer.ts
29619
+ var import_node_events9 = require("events");
29620
+ function parseAnnexBNalUnits(annexB) {
29621
+ const nalUnits = [];
29622
+ let offset = 0;
29623
+ while (offset < annexB.length) {
29624
+ let startCodeLen = 0;
29625
+ if (offset + 4 <= annexB.length && annexB[offset] === 0 && annexB[offset + 1] === 0 && annexB[offset + 2] === 0 && annexB[offset + 3] === 1) {
29626
+ startCodeLen = 4;
29627
+ } else if (offset + 3 <= annexB.length && annexB[offset] === 0 && annexB[offset + 1] === 0 && annexB[offset + 2] === 1) {
29628
+ startCodeLen = 3;
29629
+ } else {
29630
+ offset++;
29631
+ continue;
29632
+ }
29633
+ const naluStart = offset + startCodeLen;
29634
+ let naluEnd = annexB.length;
29635
+ for (let i = naluStart; i < annexB.length - 2; i++) {
29636
+ if (annexB[i] === 0 && annexB[i + 1] === 0 && (annexB[i + 2] === 1 || i + 3 < annexB.length && annexB[i + 2] === 0 && annexB[i + 3] === 1)) {
29637
+ naluEnd = i;
29638
+ break;
29639
+ }
29640
+ }
29641
+ if (naluEnd > naluStart) {
29642
+ nalUnits.push(annexB.subarray(naluStart, naluEnd));
29643
+ }
29644
+ offset = naluEnd;
29645
+ }
29646
+ return nalUnits;
29647
+ }
29648
+ function getH264NalType(nalUnit) {
29649
+ return nalUnit[0] & 31;
29650
+ }
29651
+ function getH265NalType2(nalUnit) {
29652
+ return nalUnit[0] >> 1 & 63;
29653
+ }
29654
+ var BaichuanWebRTCServer = class extends import_node_events9.EventEmitter {
29655
+ options;
29656
+ sessions = /* @__PURE__ */ new Map();
29657
+ sessionIdCounter = 0;
29658
+ weriftModule = null;
29659
+ constructor(options) {
29660
+ super();
29661
+ this.options = options;
29662
+ }
29663
+ /**
29664
+ * Initialize werift module (lazy load to avoid requiring it if not used)
29665
+ */
29666
+ async loadWerift() {
29667
+ if (this.weriftModule) return this.weriftModule;
29668
+ try {
29669
+ this.weriftModule = await import("werift");
29670
+ return this.weriftModule;
29671
+ } catch (err) {
29672
+ throw new Error(
29673
+ `Failed to load werift module. Make sure it's installed: npm install werift
29674
+ Error: ${err}`
29675
+ );
29676
+ }
29677
+ }
29678
+ /**
29679
+ * Create a new WebRTC session
29680
+ * Returns a session ID and SDP offer to send to the browser
29681
+ */
29682
+ async createSession() {
29683
+ const werift = await this.loadWerift();
29684
+ const { RTCPeerConnection, MediaStreamTrack, RTCRtpCodecParameters } = werift;
29685
+ const sessionId = `webrtc-${++this.sessionIdCounter}-${Date.now()}`;
29686
+ this.log("info", `Creating WebRTC session ${sessionId}`);
29687
+ const iceServers = [];
29688
+ const stunServers = this.options.stunServers ?? [
29689
+ "stun:stun.l.google.com:19302"
29690
+ ];
29691
+ for (const urls of stunServers) {
29692
+ iceServers.push({ urls });
29693
+ }
29694
+ if (this.options.turnServers) {
29695
+ iceServers.push(...this.options.turnServers);
29696
+ }
29697
+ const peerConnection = new RTCPeerConnection({
29698
+ iceServers,
29699
+ codecs: {
29700
+ video: [
29701
+ new RTCRtpCodecParameters({
29702
+ mimeType: "video/H264",
29703
+ clockRate: 9e4,
29704
+ rtcpFeedback: [
29705
+ { type: "nack" },
29706
+ { type: "nack", parameter: "pli" },
29707
+ { type: "goog-remb" }
29708
+ ],
29709
+ parameters: "packetization-mode=1;profile-level-id=42e01f;level-asymmetry-allowed=1"
29710
+ })
29711
+ ],
29712
+ audio: [
29713
+ new RTCRtpCodecParameters({
29714
+ mimeType: "audio/opus",
29715
+ clockRate: 48e3,
29716
+ channels: 2
29717
+ })
29718
+ ]
29719
+ }
29720
+ });
29721
+ const session = {
29722
+ id: sessionId,
29723
+ peerConnection,
29724
+ videoTrack: null,
29725
+ audioTrack: null,
29726
+ videoDataChannel: null,
29727
+ nativeStream: null,
29728
+ intercom: null,
29729
+ dataChannel: null,
29730
+ videoCodec: null,
29731
+ createdAt: /* @__PURE__ */ new Date(),
29732
+ state: "connecting",
29733
+ stats: {
29734
+ videoFrames: 0,
29735
+ audioFrames: 0,
29736
+ bytesSent: 0,
29737
+ intercomBytesSent: 0
29738
+ }
29739
+ };
29740
+ this.sessions.set(sessionId, session);
29741
+ const videoTrack = new MediaStreamTrack({ kind: "video" });
29742
+ peerConnection.addTrack(videoTrack);
29743
+ session.videoTrack = videoTrack;
29744
+ const audioTrack = new MediaStreamTrack({ kind: "audio" });
29745
+ peerConnection.addTrack(audioTrack);
29746
+ session.audioTrack = audioTrack;
29747
+ const videoDataChannel = peerConnection.createDataChannel("video", {
29748
+ ordered: true,
29749
+ maxRetransmits: 0
29750
+ // Unreliable for real-time video
29751
+ });
29752
+ session.videoDataChannel = videoDataChannel;
29753
+ videoDataChannel.onopen = () => {
29754
+ this.log("info", `Video data channel opened for session ${sessionId}`);
29755
+ };
29756
+ if (this.options.enableIntercom) {
29757
+ const dataChannel = peerConnection.createDataChannel("intercom", {
29758
+ ordered: true
29759
+ });
29760
+ session.dataChannel = dataChannel;
29761
+ dataChannel.onopen = () => {
29762
+ this.log(
29763
+ "info",
29764
+ `Intercom data channel opened for session ${sessionId}`
29765
+ );
29766
+ this.emit("intercom-started", { sessionId });
29767
+ };
29768
+ dataChannel.onmessage = async (event) => {
29769
+ if (session.intercom && event.data instanceof ArrayBuffer) {
29770
+ try {
29771
+ const audioData = Buffer.from(event.data);
29772
+ await session.intercom.sendAudio(audioData);
29773
+ session.stats.intercomBytesSent += audioData.length;
29774
+ } catch (err) {
29775
+ this.log("error", `Failed to send intercom audio: ${err}`);
29776
+ }
29777
+ }
29778
+ };
29779
+ dataChannel.onclose = () => {
29780
+ this.log(
29781
+ "info",
29782
+ `Intercom data channel closed for session ${sessionId}`
29783
+ );
29784
+ this.emit("intercom-stopped", { sessionId });
29785
+ };
29786
+ }
29787
+ peerConnection.iceConnectionStateChange.subscribe((state) => {
29788
+ this.log("debug", `ICE connection state for ${sessionId}: ${state}`);
29789
+ if (state === "connected") {
29790
+ session.state = "connected";
29791
+ this.emit("session-connected", { sessionId });
29792
+ } else if (state === "disconnected" || state === "failed") {
29793
+ session.state = state;
29794
+ this.closeSession(sessionId).catch((err) => {
29795
+ this.log("error", `Error closing session on ICE ${state}: ${err}`);
29796
+ });
29797
+ }
29798
+ });
29799
+ peerConnection.connectionStateChange.subscribe((state) => {
29800
+ this.log("debug", `Connection state for ${sessionId}: ${state}`);
29801
+ if (state === "closed" || state === "failed") {
29802
+ this.closeSession(sessionId).catch((err) => {
29803
+ this.log(
29804
+ "error",
29805
+ `Error closing session on connection ${state}: ${err}`
29806
+ );
29807
+ });
29808
+ }
29809
+ });
29810
+ const offer = await peerConnection.createOffer();
29811
+ await peerConnection.setLocalDescription(offer);
29812
+ await this.waitForIceGathering(peerConnection, 3e3);
29813
+ const localDescription = peerConnection.localDescription;
29814
+ if (!localDescription) {
29815
+ throw new Error("Failed to create local description");
29816
+ }
29817
+ this.emit("session-created", { sessionId });
29818
+ return {
29819
+ sessionId,
29820
+ offer: {
29821
+ sdp: localDescription.sdp,
29822
+ type: "offer"
29823
+ }
29824
+ };
29825
+ }
29826
+ /**
29827
+ * Handle WebRTC answer from browser and start streaming
29828
+ */
29829
+ async handleAnswer(sessionId, answer) {
29830
+ const session = this.sessions.get(sessionId);
29831
+ if (!session) {
29832
+ throw new Error(`Session ${sessionId} not found`);
29833
+ }
29834
+ const werift = await this.loadWerift();
29835
+ const { RTCSessionDescription } = werift;
29836
+ this.log("info", `Handling WebRTC answer for session ${sessionId}`);
29837
+ await session.peerConnection.setRemoteDescription(
29838
+ new RTCSessionDescription(answer.sdp, answer.type)
29839
+ );
29840
+ await this.startNativeStream(session);
29841
+ if (this.options.enableIntercom && session.dataChannel) {
29842
+ await this.startIntercom(session);
29843
+ }
29844
+ }
29845
+ /**
29846
+ * Add ICE candidate from browser
29847
+ */
29848
+ async addIceCandidate(sessionId, candidate) {
29849
+ const session = this.sessions.get(sessionId);
29850
+ if (!session) {
29851
+ throw new Error(`Session ${sessionId} not found`);
29852
+ }
29853
+ const werift = await this.loadWerift();
29854
+ const { RTCIceCandidate } = werift;
29855
+ await session.peerConnection.addIceCandidate(
29856
+ new RTCIceCandidate(candidate.candidate, candidate.sdpMid ?? "0")
29857
+ );
29858
+ }
29859
+ /**
29860
+ * Close a WebRTC session
29861
+ */
29862
+ async closeSession(sessionId) {
29863
+ const session = this.sessions.get(sessionId);
29864
+ if (!session) return;
29865
+ this.log("info", `Closing WebRTC session ${sessionId}`);
29866
+ session.state = "disconnected";
29867
+ if (session.intercom) {
29868
+ try {
29869
+ await session.intercom.stop();
29870
+ } catch (err) {
29871
+ this.log("debug", `Error stopping intercom: ${err}`);
29872
+ }
29873
+ session.intercom = null;
29874
+ }
29875
+ if (session.dataChannel) {
29876
+ try {
29877
+ session.dataChannel.close();
29878
+ } catch (err) {
29879
+ this.log("debug", `Error closing data channel: ${err}`);
29880
+ }
29881
+ session.dataChannel = null;
29882
+ }
29883
+ if (session.cleanup) {
29884
+ session.cleanup();
29885
+ }
29886
+ try {
29887
+ await session.peerConnection.close();
29888
+ } catch (err) {
29889
+ this.log("debug", `Error closing peer connection: ${err}`);
29890
+ }
29891
+ this.sessions.delete(sessionId);
29892
+ this.emit("session-closed", { sessionId });
29893
+ this.log(
29894
+ "info",
29895
+ `WebRTC session ${sessionId} closed (active sessions: ${this.sessions.size})`
29896
+ );
29897
+ }
29898
+ /**
29899
+ * Get information about all active sessions
29900
+ */
29901
+ getSessions() {
29902
+ return Array.from(this.sessions.values()).map((s) => ({
29903
+ id: s.id,
29904
+ state: s.state,
29905
+ createdAt: s.createdAt,
29906
+ stats: { ...s.stats }
29907
+ }));
29908
+ }
29909
+ /**
29910
+ * Get information about a specific session
29911
+ */
29912
+ getSession(sessionId) {
29913
+ const session = this.sessions.get(sessionId);
29914
+ if (!session) return null;
29915
+ return {
29916
+ id: session.id,
29917
+ state: session.state,
29918
+ createdAt: session.createdAt,
29919
+ stats: { ...session.stats }
29920
+ };
29921
+ }
29922
+ /**
29923
+ * Close all sessions and stop the server
29924
+ */
29925
+ async stop() {
29926
+ this.log("info", "Stopping WebRTC server");
29927
+ const sessionIds = Array.from(this.sessions.keys());
29928
+ await Promise.all(sessionIds.map((id) => this.closeSession(id)));
29929
+ this.log("info", "WebRTC server stopped");
29930
+ }
29931
+ /**
29932
+ * Get the number of active sessions
29933
+ */
29934
+ get sessionCount() {
29935
+ return this.sessions.size;
29936
+ }
29937
+ // ============================================================================
29938
+ // Private Methods
29939
+ // ============================================================================
29940
+ /**
29941
+ * Wait for ICE gathering to complete
29942
+ */
29943
+ async waitForIceGathering(pc, timeoutMs) {
29944
+ if (pc.iceGatheringState === "complete") return;
29945
+ return new Promise((resolve) => {
29946
+ const timeout = setTimeout(() => {
29947
+ resolve();
29948
+ }, timeoutMs);
29949
+ pc.iceGatheringStateChange.subscribe((state) => {
29950
+ if (state === "complete") {
29951
+ clearTimeout(timeout);
29952
+ resolve();
29953
+ }
29954
+ });
29955
+ });
29956
+ }
29957
+ /**
29958
+ * Start native Baichuan stream and pump frames to WebRTC
29959
+ */
29960
+ async startNativeStream(session) {
29961
+ this.log(
29962
+ "info",
29963
+ `Starting native stream for session ${session.id} (channel=${this.options.channel}, profile=${this.options.profile})`
29964
+ );
29965
+ session.nativeStream = createNativeStream(
29966
+ this.options.api,
29967
+ this.options.channel,
29968
+ this.options.profile,
29969
+ this.options.variant !== void 0 ? { variant: this.options.variant } : void 0
29970
+ );
29971
+ this.pumpFramesToWebRTC(session).catch((err) => {
29972
+ this.log("error", `Frame pump error for session ${session.id}: ${err}`);
29973
+ this.closeSession(session.id).catch(() => {
29974
+ });
29975
+ });
29976
+ }
29977
+ /**
29978
+ * Pump frames from native stream to WebRTC tracks
29979
+ * H.264 → RTP media track (standard WebRTC)
29980
+ * H.265 → DataChannel with raw Annex-B frames (decoded by WebCodecs in browser)
29981
+ */
29982
+ async pumpFramesToWebRTC(session) {
29983
+ if (!session.nativeStream) {
29984
+ this.log("warn", `No native stream for session ${session.id}`);
29985
+ return;
29986
+ }
29987
+ this.log("info", `Starting frame pump for session ${session.id}`);
29988
+ const werift = await this.loadWerift();
29989
+ const { RtpPacket, RtpHeader } = werift;
29990
+ let sequenceNumber = Math.floor(Math.random() * 65535);
29991
+ let timestamp = Math.floor(Math.random() * 4294967295);
29992
+ const videoClockRate = 9e4;
29993
+ let lastTimeMicros = 0;
29994
+ let lastLogTime = Date.now();
29995
+ let packetsSentSinceLastLog = 0;
29996
+ let frameNumber = 0;
29997
+ try {
29998
+ this.log("info", `Entering frame loop for session ${session.id}`);
29999
+ for await (const frame of session.nativeStream) {
30000
+ if (session.state === "disconnected" || session.state === "failed") {
30001
+ this.log(
30002
+ "debug",
30003
+ `Session ${session.id} state is ${session.state}, breaking frame loop`
30004
+ );
30005
+ break;
30006
+ }
30007
+ if (frame.audio) {
30008
+ session.stats.audioFrames++;
30009
+ } else {
30010
+ if (frame.data) {
30011
+ if (!session.videoCodec && frame.videoType) {
30012
+ session.videoCodec = frame.videoType;
30013
+ this.log("info", `Detected video codec: ${session.videoCodec}`);
30014
+ if (session.videoDataChannel && session.videoDataChannel.readyState === "open") {
30015
+ const codecInfo = JSON.stringify({
30016
+ type: "codec",
30017
+ codec: session.videoCodec,
30018
+ width: frame.width || 0,
30019
+ height: frame.height || 0
30020
+ });
30021
+ session.videoDataChannel.send(codecInfo);
30022
+ }
30023
+ }
30024
+ if (frame.microseconds && lastTimeMicros > 0) {
30025
+ const deltaMicros = frame.microseconds - lastTimeMicros;
30026
+ const deltaTicks = Math.floor(
30027
+ deltaMicros / 1e6 * videoClockRate
30028
+ );
30029
+ timestamp = timestamp + deltaTicks >>> 0;
30030
+ } else {
30031
+ timestamp = timestamp + 3e3 >>> 0;
30032
+ }
30033
+ lastTimeMicros = frame.microseconds || 0;
30034
+ if (session.videoCodec === "H264") {
30035
+ await this.sendH264Frame(
30036
+ session,
30037
+ werift,
30038
+ frame.data,
30039
+ sequenceNumber,
30040
+ timestamp
30041
+ );
30042
+ sequenceNumber = sequenceNumber + Math.ceil(frame.data.length / 1200) & 65535;
30043
+ packetsSentSinceLastLog++;
30044
+ } else if (session.videoCodec === "H265") {
30045
+ await this.sendH265Frame(session, frame, frameNumber);
30046
+ packetsSentSinceLastLog++;
30047
+ }
30048
+ frameNumber++;
30049
+ session.stats.videoFrames++;
30050
+ session.stats.bytesSent += frame.data.length;
30051
+ const now = Date.now();
30052
+ if (now - lastLogTime >= 5e3) {
30053
+ this.log(
30054
+ "debug",
30055
+ `WebRTC session ${session.id} [${session.videoCodec}]: sent ${session.stats.videoFrames} frames, ${packetsSentSinceLastLog} packets, ${Math.round(session.stats.bytesSent / 1024)} KB`
30056
+ );
30057
+ lastLogTime = now;
30058
+ packetsSentSinceLastLog = 0;
30059
+ }
30060
+ }
30061
+ }
30062
+ }
30063
+ } catch (err) {
30064
+ this.log(
30065
+ "error",
30066
+ `Error pumping frames for session ${session.id}: ${err}`
30067
+ );
30068
+ }
30069
+ this.log("info", `Native stream ended for session ${session.id}`);
30070
+ }
30071
+ /**
30072
+ * Send H.264 frame via RTP media track
30073
+ */
30074
+ async sendH264Frame(session, werift, frameData, sequenceNumber, timestamp) {
30075
+ const nalUnits = parseAnnexBNalUnits(frameData);
30076
+ for (let i = 0; i < nalUnits.length; i++) {
30077
+ const nalUnit = nalUnits[i];
30078
+ if (nalUnit.length === 0) continue;
30079
+ const isLastNalu = i === nalUnits.length - 1;
30080
+ const nalType = getH264NalType(nalUnit);
30081
+ if (nalType === 9) continue;
30082
+ const rtpPackets = this.createH264RtpPackets(
30083
+ werift,
30084
+ nalUnit,
30085
+ sequenceNumber,
30086
+ timestamp,
30087
+ isLastNalu
30088
+ );
30089
+ for (const rtpPacket of rtpPackets) {
30090
+ session.videoTrack.writeRtp(rtpPacket);
30091
+ sequenceNumber = sequenceNumber + 1 & 65535;
30092
+ }
30093
+ }
30094
+ }
30095
+ /**
30096
+ * Send H.265 frame via DataChannel
30097
+ * Format: 12-byte header + Annex-B data
30098
+ * Header: [frameNum (4)] [timestamp (4)] [flags (1)] [keyframe (1)] [reserved (2)]
30099
+ */
30100
+ async sendH265Frame(session, frame, frameNumber) {
30101
+ if (!session.videoDataChannel) {
30102
+ if (frameNumber === 0) {
30103
+ this.log("warn", `No video data channel for session ${session.id}`);
30104
+ }
30105
+ return;
30106
+ }
30107
+ if (session.videoDataChannel.readyState !== "open") {
30108
+ if (frameNumber === 0) {
30109
+ this.log(
30110
+ "warn",
30111
+ `Video data channel not open for session ${session.id}: ${session.videoDataChannel.readyState}`
30112
+ );
30113
+ }
30114
+ return;
30115
+ }
30116
+ let isKeyframe = frame.isKeyframe === true;
30117
+ if (!isKeyframe && frame.isKeyframe === void 0) {
30118
+ const nalUnits = parseAnnexBNalUnits(frame.data);
30119
+ for (const nalUnit of nalUnits) {
30120
+ if (nalUnit.length === 0) continue;
30121
+ const nalType = getH265NalType2(nalUnit);
30122
+ if (nalType === 32 || nalType === 33 || nalType === 34 || nalType === 19 || nalType === 20) {
30123
+ isKeyframe = true;
30124
+ break;
30125
+ }
30126
+ }
30127
+ }
30128
+ const header = Buffer.alloc(12);
30129
+ header.writeUInt32BE(frameNumber, 0);
30130
+ header.writeUInt32BE(frame.microseconds ? frame.microseconds / 1e3 : 0, 4);
30131
+ header.writeUInt8(1, 8);
30132
+ header.writeUInt8(isKeyframe ? 1 : 0, 9);
30133
+ header.writeUInt16BE(0, 10);
30134
+ const packet = Buffer.concat([header, frame.data]);
30135
+ if (frameNumber < 3) {
30136
+ this.log(
30137
+ "info",
30138
+ `Sending H.265 frame ${frameNumber}: ${packet.length} bytes, keyframe=${isKeyframe}`
30139
+ );
30140
+ }
30141
+ const MAX_CHUNK_SIZE = 16e3;
30142
+ try {
30143
+ if (packet.length <= MAX_CHUNK_SIZE) {
30144
+ session.videoDataChannel.send(packet);
30145
+ } else {
30146
+ const totalChunks = Math.ceil(packet.length / MAX_CHUNK_SIZE);
30147
+ for (let i = 0; i < totalChunks; i++) {
30148
+ const start = i * MAX_CHUNK_SIZE;
30149
+ const end = Math.min(start + MAX_CHUNK_SIZE, packet.length);
30150
+ const chunk = packet.subarray(start, end);
30151
+ const chunkHeader = Buffer.alloc(2);
30152
+ chunkHeader.writeUInt8(i, 0);
30153
+ chunkHeader.writeUInt8(totalChunks, 1);
30154
+ session.videoDataChannel.send(Buffer.concat([chunkHeader, chunk]));
30155
+ }
30156
+ }
30157
+ } catch (err) {
30158
+ this.log("error", `Error sending H.265 frame ${frameNumber}: ${err}`);
30159
+ }
30160
+ }
30161
+ /**
30162
+ * Create RTP packets for H.264 NAL unit
30163
+ * Handles single NAL, STAP-A aggregation, and FU-A fragmentation
30164
+ */
30165
+ createH264RtpPackets(werift, nalUnit, sequenceNumber, timestamp, marker) {
30166
+ const { RtpPacket, RtpHeader } = werift;
30167
+ const MTU = 1200;
30168
+ const packets = [];
30169
+ if (nalUnit.length <= MTU) {
30170
+ const header = new RtpHeader();
30171
+ header.payloadType = 96;
30172
+ header.sequenceNumber = sequenceNumber;
30173
+ header.timestamp = timestamp;
30174
+ header.marker = marker;
30175
+ packets.push(new RtpPacket(header, nalUnit));
30176
+ } else {
30177
+ const nalHeader = nalUnit[0];
30178
+ const nalType = nalHeader & 31;
30179
+ const nri = nalHeader & 96;
30180
+ const fuIndicator = (nri | 28) & 255;
30181
+ let offset = 1;
30182
+ let isFirst = true;
30183
+ while (offset < nalUnit.length) {
30184
+ const remaining = nalUnit.length - offset;
30185
+ const chunkSize = Math.min(remaining, MTU - 2);
30186
+ const isLast = offset + chunkSize >= nalUnit.length;
30187
+ let fuHeader = nalType;
30188
+ if (isFirst) fuHeader |= 128;
30189
+ if (isLast) fuHeader |= 64;
30190
+ const fuPayload = Buffer.alloc(2 + chunkSize);
30191
+ fuPayload[0] = fuIndicator;
30192
+ fuPayload[1] = fuHeader;
30193
+ nalUnit.copy(fuPayload, 2, offset, offset + chunkSize);
30194
+ const header = new RtpHeader();
30195
+ header.payloadType = 96;
30196
+ header.sequenceNumber = sequenceNumber + packets.length & 65535;
30197
+ header.timestamp = timestamp;
30198
+ header.marker = isLast && marker;
30199
+ packets.push(new RtpPacket(header, fuPayload));
30200
+ offset += chunkSize;
30201
+ isFirst = false;
30202
+ }
30203
+ }
30204
+ return packets;
30205
+ }
30206
+ /**
30207
+ * Start intercom (two-way audio)
30208
+ */
30209
+ async startIntercom(session) {
30210
+ try {
30211
+ session.intercom = new Intercom({
30212
+ api: this.options.api,
30213
+ channel: this.options.channel
30214
+ });
30215
+ await session.intercom.start();
30216
+ this.log("info", `Intercom started for session ${session.id}`);
30217
+ } catch (err) {
30218
+ this.log(
30219
+ "error",
30220
+ `Failed to start intercom for session ${session.id}: ${err}`
30221
+ );
30222
+ session.intercom = null;
30223
+ }
30224
+ }
30225
+ /**
30226
+ * Log helper
30227
+ */
30228
+ log(level, message) {
30229
+ if (this.options.logger) {
30230
+ this.options.logger(level, message);
30231
+ }
30232
+ }
30233
+ };
30234
+
29048
30235
  // src/index.ts
29049
30236
  init_BcMediaParser();
29050
30237
  init_BcMediaCodec();
@@ -29519,10 +30706,10 @@ async function autoDetectDeviceType(inputs) {
29519
30706
  }
29520
30707
 
29521
30708
  // src/multifocal/compositeRtspServer.ts
29522
- var import_node_events7 = require("events");
29523
- var import_node_child_process9 = require("child_process");
30709
+ var import_node_events10 = require("events");
30710
+ var import_node_child_process10 = require("child_process");
29524
30711
  var net3 = __toESM(require("net"), 1);
29525
- var CompositeRtspServer = class extends import_node_events7.EventEmitter {
30712
+ var CompositeRtspServer = class extends import_node_events10.EventEmitter {
29526
30713
  options;
29527
30714
  compositeStream = null;
29528
30715
  rtspServer = null;
@@ -29627,7 +30814,7 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
29627
30814
  this.logger.log?.(
29628
30815
  `[CompositeRtspServer] Starting ffmpeg RTSP server: ${ffmpegArgs.join(" ")}`
29629
30816
  );
29630
- this.ffmpegProcess = (0, import_node_child_process9.spawn)("ffmpeg", ffmpegArgs, {
30817
+ this.ffmpegProcess = (0, import_node_child_process10.spawn)("ffmpeg", ffmpegArgs, {
29631
30818
  stdio: ["pipe", "pipe", "pipe"]
29632
30819
  });
29633
30820
  this.ffmpegProcess.on("error", (error) => {
@@ -29841,8 +31028,10 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
29841
31028
  BaichuanEventEmitter,
29842
31029
  BaichuanFrameParser,
29843
31030
  BaichuanHttpStreamServer,
31031
+ BaichuanMjpegServer,
29844
31032
  BaichuanRtspServer,
29845
31033
  BaichuanVideoStream,
31034
+ BaichuanWebRTCServer,
29846
31035
  BcMediaAnnexBDecoder,
29847
31036
  BcMediaCodec,
29848
31037
  BcUdpStream,
@@ -29854,6 +31043,7 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
29854
31043
  H264RtpDepacketizer,
29855
31044
  H265RtpDepacketizer,
29856
31045
  Intercom,
31046
+ MjpegTransformer,
29857
31047
  NVR_HUB_EXACT_TYPES,
29858
31048
  NVR_HUB_MODEL_PATTERNS,
29859
31049
  ReolinkBaichuanApi,
@@ -29900,6 +31090,7 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
29900
31090
  createDebugGateLogger,
29901
31091
  createDiagnosticsBundle,
29902
31092
  createLogger,
31093
+ createMjpegBoundary,
29903
31094
  createNativeStream,
29904
31095
  createNullLogger,
29905
31096
  createReplayHttpServer,
@@ -29907,6 +31098,7 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
29907
31098
  createRfc4571TcpServerForReplay,
29908
31099
  createRtspProxyServer,
29909
31100
  createTaggedLogger,
31101
+ decideVideoclipTranscodeMode,
29910
31102
  decodeHeader,
29911
31103
  deriveAesKey,
29912
31104
  detectVideoCodecFromNal,
@@ -29921,10 +31113,13 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
29921
31113
  extractSpsFromAnnexB,
29922
31114
  extractVpsFromAnnexB,
29923
31115
  flattenAbilitiesForChannel,
31116
+ formatMjpegFrame,
29924
31117
  getConstructedVideoStreamOptions,
29925
31118
  getGlobalLogger,
29926
31119
  getH265NalType,
31120
+ getMjpegContentType,
29927
31121
  getVideoStream,
31122
+ getVideoclipClientInfo,
29928
31123
  getXmlText,
29929
31124
  hasH265StartCodes,
29930
31125
  hasStartCodes,