@apocaliss92/nodelink-js 0.1.7 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-JMT75JNG.js → chunk-AFUYHWWQ.js} +5 -4
- package/dist/{chunk-JMT75JNG.js.map → chunk-AFUYHWWQ.js.map} +1 -1
- package/dist/cli/rtsp-server.cjs +4 -3
- package/dist/cli/rtsp-server.cjs.map +1 -1
- package/dist/cli/rtsp-server.d.cts +1 -0
- package/dist/cli/rtsp-server.d.ts +1 -0
- package/dist/cli/rtsp-server.js +1 -1
- package/dist/index.cjs +1208 -13
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +7773 -0
- package/dist/index.d.ts +398 -0
- package/dist/index.js +1188 -4
- package/dist/index.js.map +1 -1
- package/package.json +2 -1
package/dist/index.js
CHANGED
|
@@ -37,7 +37,7 @@ import {
|
|
|
37
37
|
normalizeUid,
|
|
38
38
|
parseSupportXml,
|
|
39
39
|
setGlobalLogger
|
|
40
|
-
} from "./chunk-
|
|
40
|
+
} from "./chunk-AFUYHWWQ.js";
|
|
41
41
|
import {
|
|
42
42
|
AesStreamDecryptor,
|
|
43
43
|
BC_AES_IV,
|
|
@@ -495,6 +495,79 @@ var AutodiscoveryClient = class {
|
|
|
495
495
|
}
|
|
496
496
|
};
|
|
497
497
|
|
|
498
|
+
// src/reolink/baichuan/types.ts
|
|
499
|
+
function getVideoclipClientInfo(headers) {
|
|
500
|
+
const getHeader = (key) => {
|
|
501
|
+
const val = headers[key] ?? headers[key.toLowerCase()] ?? headers[key.toUpperCase()];
|
|
502
|
+
return Array.isArray(val) ? val[0] : val;
|
|
503
|
+
};
|
|
504
|
+
return {
|
|
505
|
+
userAgent: getHeader("user-agent") ?? getHeader("User-Agent"),
|
|
506
|
+
accept: getHeader("accept") ?? getHeader("Accept"),
|
|
507
|
+
range: getHeader("range") ?? getHeader("Range"),
|
|
508
|
+
secChUa: getHeader("sec-ch-ua") ?? getHeader("Sec-CH-UA"),
|
|
509
|
+
secChUaMobile: getHeader("sec-ch-ua-mobile") ?? getHeader("Sec-CH-UA-Mobile"),
|
|
510
|
+
secChUaPlatform: getHeader("sec-ch-ua-platform") ?? getHeader("Sec-CH-UA-Platform")
|
|
511
|
+
};
|
|
512
|
+
}
|
|
513
|
+
function decideVideoclipTranscodeMode(headers, forceMode) {
|
|
514
|
+
const clientInfo = getVideoclipClientInfo(headers);
|
|
515
|
+
if (forceMode) {
|
|
516
|
+
return {
|
|
517
|
+
mode: forceMode,
|
|
518
|
+
reason: `forced: ${forceMode}`,
|
|
519
|
+
clientInfo
|
|
520
|
+
};
|
|
521
|
+
}
|
|
522
|
+
const ua = (clientInfo.userAgent ?? "").toLowerCase();
|
|
523
|
+
const platform = (clientInfo.secChUaPlatform ?? "").toLowerCase().replace(/"/g, "");
|
|
524
|
+
const isIos = /iphone|ipad|ipod/.test(ua);
|
|
525
|
+
if (isIos) {
|
|
526
|
+
return {
|
|
527
|
+
mode: "transcode-h264",
|
|
528
|
+
reason: "iOS device detected - no native H.265 support in <video>",
|
|
529
|
+
clientInfo
|
|
530
|
+
};
|
|
531
|
+
}
|
|
532
|
+
const isFirefox = ua.includes("firefox");
|
|
533
|
+
if (isFirefox) {
|
|
534
|
+
return {
|
|
535
|
+
mode: "transcode-h264",
|
|
536
|
+
reason: "Firefox detected - no H.265 support",
|
|
537
|
+
clientInfo
|
|
538
|
+
};
|
|
539
|
+
}
|
|
540
|
+
const isAndroid = ua.includes("android") || platform === "android";
|
|
541
|
+
if (isAndroid) {
|
|
542
|
+
return {
|
|
543
|
+
mode: "transcode-h264",
|
|
544
|
+
reason: "Android device detected - variable H.265 support",
|
|
545
|
+
clientInfo
|
|
546
|
+
};
|
|
547
|
+
}
|
|
548
|
+
const isChromium = ua.includes("chrome") || ua.includes("edg");
|
|
549
|
+
const isMac = ua.includes("mac os") || platform === "macos";
|
|
550
|
+
if (isChromium && !isMac) {
|
|
551
|
+
return {
|
|
552
|
+
mode: "transcode-h264",
|
|
553
|
+
reason: "Chrome/Edge on non-Mac detected - limited H.265 support",
|
|
554
|
+
clientInfo
|
|
555
|
+
};
|
|
556
|
+
}
|
|
557
|
+
if (isMac) {
|
|
558
|
+
return {
|
|
559
|
+
mode: "passthrough",
|
|
560
|
+
reason: "macOS detected - native H.265 hardware decoding available",
|
|
561
|
+
clientInfo
|
|
562
|
+
};
|
|
563
|
+
}
|
|
564
|
+
return {
|
|
565
|
+
mode: "transcode-h264",
|
|
566
|
+
reason: "Unknown client - transcoding for compatibility",
|
|
567
|
+
clientInfo
|
|
568
|
+
};
|
|
569
|
+
}
|
|
570
|
+
|
|
498
571
|
// src/reolink/baichuan/endpoints-server.ts
|
|
499
572
|
import http from "http";
|
|
500
573
|
import { spawn } from "child_process";
|
|
@@ -4308,11 +4381,1114 @@ var BaichuanHttpStreamServer = class extends EventEmitter2 {
|
|
|
4308
4381
|
}
|
|
4309
4382
|
};
|
|
4310
4383
|
|
|
4311
|
-
// src/
|
|
4384
|
+
// src/baichuan/stream/BaichuanMjpegServer.ts
|
|
4385
|
+
import { EventEmitter as EventEmitter4 } from "events";
|
|
4386
|
+
import * as http5 from "http";
|
|
4387
|
+
|
|
4388
|
+
// src/baichuan/stream/MjpegTransformer.ts
|
|
4312
4389
|
import { EventEmitter as EventEmitter3 } from "events";
|
|
4313
4390
|
import { spawn as spawn6 } from "child_process";
|
|
4391
|
+
var JPEG_SOI = Buffer.from([255, 216]);
|
|
4392
|
+
var JPEG_EOI = Buffer.from([255, 217]);
|
|
4393
|
+
var MjpegTransformer = class extends EventEmitter3 {
|
|
4394
|
+
options;
|
|
4395
|
+
ffmpeg = null;
|
|
4396
|
+
started = false;
|
|
4397
|
+
closed = false;
|
|
4398
|
+
jpegBuffer = Buffer.alloc(0);
|
|
4399
|
+
frameCount = 0;
|
|
4400
|
+
lastTimestamp = 0;
|
|
4401
|
+
constructor(options) {
|
|
4402
|
+
super();
|
|
4403
|
+
this.options = {
|
|
4404
|
+
codec: options.codec,
|
|
4405
|
+
quality: options.quality ?? 5,
|
|
4406
|
+
width: options.width,
|
|
4407
|
+
height: options.height,
|
|
4408
|
+
maxFps: options.maxFps,
|
|
4409
|
+
logger: options.logger
|
|
4410
|
+
};
|
|
4411
|
+
}
|
|
4412
|
+
/**
|
|
4413
|
+
* Start the transformer (spawns FFmpeg process)
|
|
4414
|
+
*/
|
|
4415
|
+
start() {
|
|
4416
|
+
if (this.started || this.closed) return;
|
|
4417
|
+
this.started = true;
|
|
4418
|
+
const { codec, quality, width, height, maxFps } = this.options;
|
|
4419
|
+
const args = [
|
|
4420
|
+
"-hide_banner",
|
|
4421
|
+
"-loglevel",
|
|
4422
|
+
"error",
|
|
4423
|
+
// Input: raw video from stdin
|
|
4424
|
+
"-f",
|
|
4425
|
+
codec === "h265" ? "hevc" : "h264",
|
|
4426
|
+
"-i",
|
|
4427
|
+
"pipe:0"
|
|
4428
|
+
];
|
|
4429
|
+
const filters = [];
|
|
4430
|
+
if (width || height) {
|
|
4431
|
+
const w = width ?? -1;
|
|
4432
|
+
const h = height ?? -1;
|
|
4433
|
+
filters.push(`scale=${w}:${h}`);
|
|
4434
|
+
}
|
|
4435
|
+
if (maxFps) {
|
|
4436
|
+
filters.push(`fps=${maxFps}`);
|
|
4437
|
+
}
|
|
4438
|
+
if (filters.length > 0) {
|
|
4439
|
+
args.push("-vf", filters.join(","));
|
|
4440
|
+
}
|
|
4441
|
+
args.push(
|
|
4442
|
+
"-c:v",
|
|
4443
|
+
"mjpeg",
|
|
4444
|
+
"-q:v",
|
|
4445
|
+
String(quality),
|
|
4446
|
+
"-f",
|
|
4447
|
+
"mjpeg",
|
|
4448
|
+
"pipe:1"
|
|
4449
|
+
);
|
|
4450
|
+
this.log("debug", `Starting FFmpeg with args: ${args.join(" ")}`);
|
|
4451
|
+
this.ffmpeg = spawn6("ffmpeg", args, {
|
|
4452
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
4453
|
+
});
|
|
4454
|
+
this.ffmpeg.stdout.on("data", (data) => {
|
|
4455
|
+
this.handleJpegData(data);
|
|
4456
|
+
});
|
|
4457
|
+
this.ffmpeg.stderr.on("data", (data) => {
|
|
4458
|
+
const msg = data.toString().trim();
|
|
4459
|
+
if (msg) {
|
|
4460
|
+
this.log("debug", `FFmpeg: ${msg}`);
|
|
4461
|
+
}
|
|
4462
|
+
});
|
|
4463
|
+
this.ffmpeg.on("close", (code) => {
|
|
4464
|
+
this.log("debug", `FFmpeg closed with code ${code}`);
|
|
4465
|
+
this.ffmpeg = null;
|
|
4466
|
+
if (!this.closed) {
|
|
4467
|
+
this.emit("close", code);
|
|
4468
|
+
}
|
|
4469
|
+
});
|
|
4470
|
+
this.ffmpeg.on("error", (err) => {
|
|
4471
|
+
this.log("error", `FFmpeg error: ${err.message}`);
|
|
4472
|
+
this.emit("error", err);
|
|
4473
|
+
});
|
|
4474
|
+
}
|
|
4475
|
+
/**
|
|
4476
|
+
* Push an H.264/H.265 access unit (Annex-B format with start codes)
|
|
4477
|
+
*/
|
|
4478
|
+
push(accessUnit, timestamp) {
|
|
4479
|
+
if (!this.started || this.closed || !this.ffmpeg) {
|
|
4480
|
+
return;
|
|
4481
|
+
}
|
|
4482
|
+
this.lastTimestamp = timestamp ?? Date.now() * 1e3;
|
|
4483
|
+
try {
|
|
4484
|
+
this.ffmpeg.stdin.write(accessUnit);
|
|
4485
|
+
} catch (err) {
|
|
4486
|
+
this.log("error", `Failed to write to FFmpeg: ${err}`);
|
|
4487
|
+
}
|
|
4488
|
+
}
|
|
4489
|
+
/**
|
|
4490
|
+
* Handle JPEG data from FFmpeg stdout
|
|
4491
|
+
* FFmpeg outputs complete JPEG images, each starting with SOI (0xFFD8)
|
|
4492
|
+
* and ending with EOI (0xFFD9)
|
|
4493
|
+
*/
|
|
4494
|
+
handleJpegData(data) {
|
|
4495
|
+
this.jpegBuffer = Buffer.concat([this.jpegBuffer, data]);
|
|
4496
|
+
while (true) {
|
|
4497
|
+
const soiIndex = this.jpegBuffer.indexOf(JPEG_SOI);
|
|
4498
|
+
if (soiIndex < 0) {
|
|
4499
|
+
this.jpegBuffer = Buffer.alloc(0);
|
|
4500
|
+
break;
|
|
4501
|
+
}
|
|
4502
|
+
if (soiIndex > 0) {
|
|
4503
|
+
this.jpegBuffer = this.jpegBuffer.subarray(soiIndex);
|
|
4504
|
+
}
|
|
4505
|
+
const eoiIndex = this.jpegBuffer.indexOf(JPEG_EOI, 2);
|
|
4506
|
+
if (eoiIndex < 0) {
|
|
4507
|
+
break;
|
|
4508
|
+
}
|
|
4509
|
+
const frameEnd = eoiIndex + 2;
|
|
4510
|
+
const jpegFrame = this.jpegBuffer.subarray(0, frameEnd);
|
|
4511
|
+
this.jpegBuffer = this.jpegBuffer.subarray(frameEnd);
|
|
4512
|
+
this.frameCount++;
|
|
4513
|
+
const frame = {
|
|
4514
|
+
data: jpegFrame,
|
|
4515
|
+
timestamp: this.lastTimestamp
|
|
4516
|
+
};
|
|
4517
|
+
this.emit("frame", frame);
|
|
4518
|
+
}
|
|
4519
|
+
}
|
|
4520
|
+
/**
|
|
4521
|
+
* Stop the transformer
|
|
4522
|
+
*/
|
|
4523
|
+
async stop() {
|
|
4524
|
+
if (this.closed) return;
|
|
4525
|
+
this.closed = true;
|
|
4526
|
+
if (this.ffmpeg) {
|
|
4527
|
+
try {
|
|
4528
|
+
this.ffmpeg.stdin.end();
|
|
4529
|
+
} catch {
|
|
4530
|
+
}
|
|
4531
|
+
await new Promise((resolve) => {
|
|
4532
|
+
const ff = this.ffmpeg;
|
|
4533
|
+
if (!ff) {
|
|
4534
|
+
resolve();
|
|
4535
|
+
return;
|
|
4536
|
+
}
|
|
4537
|
+
const timeout = setTimeout(() => {
|
|
4538
|
+
ff.kill("SIGKILL");
|
|
4539
|
+
resolve();
|
|
4540
|
+
}, 1e3);
|
|
4541
|
+
ff.once("close", () => {
|
|
4542
|
+
clearTimeout(timeout);
|
|
4543
|
+
resolve();
|
|
4544
|
+
});
|
|
4545
|
+
try {
|
|
4546
|
+
ff.kill("SIGTERM");
|
|
4547
|
+
} catch {
|
|
4548
|
+
clearTimeout(timeout);
|
|
4549
|
+
resolve();
|
|
4550
|
+
}
|
|
4551
|
+
});
|
|
4552
|
+
this.ffmpeg = null;
|
|
4553
|
+
}
|
|
4554
|
+
this.emit("close", 0);
|
|
4555
|
+
}
|
|
4556
|
+
/**
|
|
4557
|
+
* Get frame count
|
|
4558
|
+
*/
|
|
4559
|
+
getFrameCount() {
|
|
4560
|
+
return this.frameCount;
|
|
4561
|
+
}
|
|
4562
|
+
/**
|
|
4563
|
+
* Check if running
|
|
4564
|
+
*/
|
|
4565
|
+
isRunning() {
|
|
4566
|
+
return this.started && !this.closed && this.ffmpeg !== null;
|
|
4567
|
+
}
|
|
4568
|
+
log(level, message) {
|
|
4569
|
+
this.options.logger?.(level, `[MjpegTransformer] ${message}`);
|
|
4570
|
+
}
|
|
4571
|
+
};
|
|
4572
|
+
function createMjpegBoundary() {
|
|
4573
|
+
return `mjpegboundary${Date.now()}`;
|
|
4574
|
+
}
|
|
4575
|
+
function getMjpegContentType(boundary) {
|
|
4576
|
+
return `multipart/x-mixed-replace; boundary=${boundary}`;
|
|
4577
|
+
}
|
|
4578
|
+
function formatMjpegFrame(frame, boundary) {
|
|
4579
|
+
const header = Buffer.from(
|
|
4580
|
+
`--${boundary}\r
|
|
4581
|
+
Content-Type: image/jpeg\r
|
|
4582
|
+
Content-Length: ${frame.length}\r
|
|
4583
|
+
\r
|
|
4584
|
+
`
|
|
4585
|
+
);
|
|
4586
|
+
return Buffer.concat([header, frame, Buffer.from("\r\n")]);
|
|
4587
|
+
}
|
|
4588
|
+
|
|
4589
|
+
// src/baichuan/stream/BaichuanMjpegServer.ts
|
|
4590
|
+
var BaichuanMjpegServer = class extends EventEmitter4 {
|
|
4591
|
+
options;
|
|
4592
|
+
clients = /* @__PURE__ */ new Map();
|
|
4593
|
+
httpServer = null;
|
|
4594
|
+
transformer = null;
|
|
4595
|
+
nativeStream = null;
|
|
4596
|
+
streamPump = null;
|
|
4597
|
+
detectedCodec = null;
|
|
4598
|
+
started = false;
|
|
4599
|
+
clientIdCounter = 0;
|
|
4600
|
+
constructor(options) {
|
|
4601
|
+
super();
|
|
4602
|
+
this.options = options;
|
|
4603
|
+
}
|
|
4604
|
+
/**
|
|
4605
|
+
* Start the MJPEG server
|
|
4606
|
+
*/
|
|
4607
|
+
async start() {
|
|
4608
|
+
if (this.started) return;
|
|
4609
|
+
this.started = true;
|
|
4610
|
+
const port = this.options.port ?? 8080;
|
|
4611
|
+
const host = this.options.host ?? "0.0.0.0";
|
|
4612
|
+
const path = this.options.path ?? "/mjpeg";
|
|
4613
|
+
this.httpServer = http5.createServer((req, res) => {
|
|
4614
|
+
this.handleRequest(req, res, path);
|
|
4615
|
+
});
|
|
4616
|
+
return new Promise((resolve, reject) => {
|
|
4617
|
+
this.httpServer.on("error", (err) => {
|
|
4618
|
+
this.log("error", `HTTP server error: ${err.message}`);
|
|
4619
|
+
reject(err);
|
|
4620
|
+
});
|
|
4621
|
+
this.httpServer.listen(port, host, () => {
|
|
4622
|
+
this.log(
|
|
4623
|
+
"info",
|
|
4624
|
+
`MJPEG server started on http://${host}:${port}${path}`
|
|
4625
|
+
);
|
|
4626
|
+
this.emit("started", { host, port, path });
|
|
4627
|
+
resolve();
|
|
4628
|
+
});
|
|
4629
|
+
});
|
|
4630
|
+
}
|
|
4631
|
+
/**
|
|
4632
|
+
* Stop the MJPEG server
|
|
4633
|
+
*/
|
|
4634
|
+
async stop() {
|
|
4635
|
+
if (!this.started) return;
|
|
4636
|
+
this.started = false;
|
|
4637
|
+
for (const [id, client] of this.clients) {
|
|
4638
|
+
try {
|
|
4639
|
+
client.response.end();
|
|
4640
|
+
} catch {
|
|
4641
|
+
}
|
|
4642
|
+
this.clients.delete(id);
|
|
4643
|
+
}
|
|
4644
|
+
await this.stopStream();
|
|
4645
|
+
if (this.httpServer) {
|
|
4646
|
+
await new Promise((resolve) => {
|
|
4647
|
+
this.httpServer.close(() => resolve());
|
|
4648
|
+
});
|
|
4649
|
+
this.httpServer = null;
|
|
4650
|
+
}
|
|
4651
|
+
this.log("info", "MJPEG server stopped");
|
|
4652
|
+
this.emit("stopped");
|
|
4653
|
+
}
|
|
4654
|
+
/**
|
|
4655
|
+
* Handle HTTP request
|
|
4656
|
+
*/
|
|
4657
|
+
handleRequest(req, res, expectedPath) {
|
|
4658
|
+
const url = new URL(req.url ?? "/", `http://${req.headers.host}`);
|
|
4659
|
+
if (url.pathname !== expectedPath) {
|
|
4660
|
+
res.statusCode = 404;
|
|
4661
|
+
res.end("Not Found");
|
|
4662
|
+
return;
|
|
4663
|
+
}
|
|
4664
|
+
if (req.method !== "GET") {
|
|
4665
|
+
res.statusCode = 405;
|
|
4666
|
+
res.end("Method Not Allowed");
|
|
4667
|
+
return;
|
|
4668
|
+
}
|
|
4669
|
+
this.handleMjpegClient(req, res);
|
|
4670
|
+
}
|
|
4671
|
+
/**
|
|
4672
|
+
* Handle new MJPEG client
|
|
4673
|
+
*/
|
|
4674
|
+
handleMjpegClient(req, res) {
|
|
4675
|
+
const clientId = `client-${++this.clientIdCounter}`;
|
|
4676
|
+
const boundary = createMjpegBoundary();
|
|
4677
|
+
const client = {
|
|
4678
|
+
id: clientId,
|
|
4679
|
+
response: res,
|
|
4680
|
+
boundary,
|
|
4681
|
+
connectedAt: Date.now()
|
|
4682
|
+
};
|
|
4683
|
+
this.clients.set(clientId, client);
|
|
4684
|
+
this.log(
|
|
4685
|
+
"info",
|
|
4686
|
+
`MJPEG client connected: ${clientId} (total: ${this.clients.size})`
|
|
4687
|
+
);
|
|
4688
|
+
this.emit("client-connected", { id: clientId, total: this.clients.size });
|
|
4689
|
+
res.writeHead(200, {
|
|
4690
|
+
"Content-Type": getMjpegContentType(boundary),
|
|
4691
|
+
"Cache-Control": "no-cache, no-store, must-revalidate",
|
|
4692
|
+
Pragma: "no-cache",
|
|
4693
|
+
Expires: "0",
|
|
4694
|
+
Connection: "close"
|
|
4695
|
+
});
|
|
4696
|
+
const cleanup = () => {
|
|
4697
|
+
this.clients.delete(clientId);
|
|
4698
|
+
this.log(
|
|
4699
|
+
"info",
|
|
4700
|
+
`MJPEG client disconnected: ${clientId} (remaining: ${this.clients.size})`
|
|
4701
|
+
);
|
|
4702
|
+
this.emit("client-disconnected", {
|
|
4703
|
+
id: clientId,
|
|
4704
|
+
total: this.clients.size
|
|
4705
|
+
});
|
|
4706
|
+
if (this.clients.size === 0) {
|
|
4707
|
+
this.stopStream();
|
|
4708
|
+
}
|
|
4709
|
+
};
|
|
4710
|
+
req.on("close", cleanup);
|
|
4711
|
+
res.on("close", cleanup);
|
|
4712
|
+
res.on("error", cleanup);
|
|
4713
|
+
if (!this.transformer) {
|
|
4714
|
+
this.startStream();
|
|
4715
|
+
}
|
|
4716
|
+
}
|
|
4717
|
+
/**
|
|
4718
|
+
* Start the native video stream and MJPEG transformer
|
|
4719
|
+
*/
|
|
4720
|
+
async startStream() {
|
|
4721
|
+
if (this.transformer) return;
|
|
4722
|
+
this.log("info", "Starting native video stream...");
|
|
4723
|
+
const { api, channel, profile, variant, quality, width, height, maxFps } = this.options;
|
|
4724
|
+
try {
|
|
4725
|
+
this.nativeStream = createNativeStream(
|
|
4726
|
+
api,
|
|
4727
|
+
channel,
|
|
4728
|
+
profile,
|
|
4729
|
+
variant ? { variant } : void 0
|
|
4730
|
+
);
|
|
4731
|
+
this.streamPump = this.pumpStream();
|
|
4732
|
+
} catch (err) {
|
|
4733
|
+
this.log("error", `Failed to start stream: ${err}`);
|
|
4734
|
+
this.emit("error", err);
|
|
4735
|
+
}
|
|
4736
|
+
}
|
|
4737
|
+
/**
|
|
4738
|
+
* Pump native stream and feed to transformer
|
|
4739
|
+
*/
|
|
4740
|
+
async pumpStream() {
|
|
4741
|
+
if (!this.nativeStream) return;
|
|
4742
|
+
let frameBuffer = [];
|
|
4743
|
+
let waitingForKeyframe = true;
|
|
4744
|
+
try {
|
|
4745
|
+
for await (const frame of this.nativeStream) {
|
|
4746
|
+
if (!this.started || this.clients.size === 0) break;
|
|
4747
|
+
const { type, data, microseconds, videoType } = frame;
|
|
4748
|
+
if (type !== "Iframe" && type !== "Pframe") continue;
|
|
4749
|
+
if (!data || data.length === 0) continue;
|
|
4750
|
+
let annexB;
|
|
4751
|
+
if (videoType === "H265") {
|
|
4752
|
+
annexB = convertToAnnexB2(data);
|
|
4753
|
+
if (!this.detectedCodec) {
|
|
4754
|
+
this.detectedCodec = "h265";
|
|
4755
|
+
this.initTransformer();
|
|
4756
|
+
}
|
|
4757
|
+
} else {
|
|
4758
|
+
annexB = convertToAnnexB(data);
|
|
4759
|
+
if (!this.detectedCodec) {
|
|
4760
|
+
this.detectedCodec = "h264";
|
|
4761
|
+
this.initTransformer();
|
|
4762
|
+
}
|
|
4763
|
+
}
|
|
4764
|
+
if (waitingForKeyframe) {
|
|
4765
|
+
if (type === "Iframe") {
|
|
4766
|
+
waitingForKeyframe = false;
|
|
4767
|
+
} else {
|
|
4768
|
+
continue;
|
|
4769
|
+
}
|
|
4770
|
+
}
|
|
4771
|
+
if (this.transformer) {
|
|
4772
|
+
this.transformer.push(annexB, microseconds);
|
|
4773
|
+
}
|
|
4774
|
+
}
|
|
4775
|
+
} catch (err) {
|
|
4776
|
+
if (this.started) {
|
|
4777
|
+
this.log("error", `Stream error: ${err}`);
|
|
4778
|
+
this.emit("error", err);
|
|
4779
|
+
}
|
|
4780
|
+
}
|
|
4781
|
+
}
|
|
4782
|
+
/**
|
|
4783
|
+
* Initialize MJPEG transformer once codec is detected
|
|
4784
|
+
*/
|
|
4785
|
+
initTransformer() {
|
|
4786
|
+
if (this.transformer || !this.detectedCodec) return;
|
|
4787
|
+
const { quality, width, height, maxFps } = this.options;
|
|
4788
|
+
this.transformer = new MjpegTransformer({
|
|
4789
|
+
codec: this.detectedCodec,
|
|
4790
|
+
quality,
|
|
4791
|
+
width,
|
|
4792
|
+
height,
|
|
4793
|
+
maxFps,
|
|
4794
|
+
logger: this.options.logger
|
|
4795
|
+
});
|
|
4796
|
+
this.transformer.on("frame", (frame) => {
|
|
4797
|
+
this.broadcastFrame(frame);
|
|
4798
|
+
});
|
|
4799
|
+
this.transformer.on("error", (err) => {
|
|
4800
|
+
this.log("error", `Transformer error: ${err}`);
|
|
4801
|
+
});
|
|
4802
|
+
this.transformer.on("close", () => {
|
|
4803
|
+
this.log("debug", "Transformer closed");
|
|
4804
|
+
});
|
|
4805
|
+
this.transformer.start();
|
|
4806
|
+
this.log(
|
|
4807
|
+
"info",
|
|
4808
|
+
`MJPEG transformer started (codec: ${this.detectedCodec})`
|
|
4809
|
+
);
|
|
4810
|
+
}
|
|
4811
|
+
/**
|
|
4812
|
+
* Broadcast JPEG frame to all connected clients
|
|
4813
|
+
*/
|
|
4814
|
+
broadcastFrame(frame) {
|
|
4815
|
+
for (const client of this.clients.values()) {
|
|
4816
|
+
try {
|
|
4817
|
+
const mjpegData = formatMjpegFrame(frame.data, client.boundary);
|
|
4818
|
+
client.response.write(mjpegData);
|
|
4819
|
+
} catch {
|
|
4820
|
+
}
|
|
4821
|
+
}
|
|
4822
|
+
}
|
|
4823
|
+
/**
|
|
4824
|
+
* Stop the stream and transformer
|
|
4825
|
+
*/
|
|
4826
|
+
async stopStream() {
|
|
4827
|
+
if (this.transformer) {
|
|
4828
|
+
await this.transformer.stop();
|
|
4829
|
+
this.transformer = null;
|
|
4830
|
+
}
|
|
4831
|
+
if (this.nativeStream) {
|
|
4832
|
+
try {
|
|
4833
|
+
await this.nativeStream.return(void 0);
|
|
4834
|
+
} catch {
|
|
4835
|
+
}
|
|
4836
|
+
this.nativeStream = null;
|
|
4837
|
+
}
|
|
4838
|
+
if (this.streamPump) {
|
|
4839
|
+
try {
|
|
4840
|
+
await this.streamPump;
|
|
4841
|
+
} catch {
|
|
4842
|
+
}
|
|
4843
|
+
this.streamPump = null;
|
|
4844
|
+
}
|
|
4845
|
+
this.detectedCodec = null;
|
|
4846
|
+
this.log("debug", "Stream stopped");
|
|
4847
|
+
}
|
|
4848
|
+
/**
|
|
4849
|
+
* Get current number of connected clients
|
|
4850
|
+
*/
|
|
4851
|
+
getClientCount() {
|
|
4852
|
+
return this.clients.size;
|
|
4853
|
+
}
|
|
4854
|
+
/**
|
|
4855
|
+
* Get server status
|
|
4856
|
+
*/
|
|
4857
|
+
getStatus() {
|
|
4858
|
+
return {
|
|
4859
|
+
running: this.started,
|
|
4860
|
+
clients: this.clients.size,
|
|
4861
|
+
codec: this.detectedCodec,
|
|
4862
|
+
frames: this.transformer?.getFrameCount() ?? 0
|
|
4863
|
+
};
|
|
4864
|
+
}
|
|
4865
|
+
log(level, message) {
|
|
4866
|
+
this.options.logger?.(level, `[BaichuanMjpegServer] ${message}`);
|
|
4867
|
+
}
|
|
4868
|
+
};
|
|
4869
|
+
|
|
4870
|
+
// src/baichuan/stream/BaichuanWebRTCServer.ts
|
|
4871
|
+
import { EventEmitter as EventEmitter5 } from "events";
|
|
4872
|
+
function parseAnnexBNalUnits(annexB) {
|
|
4873
|
+
const nalUnits = [];
|
|
4874
|
+
let offset = 0;
|
|
4875
|
+
while (offset < annexB.length) {
|
|
4876
|
+
let startCodeLen = 0;
|
|
4877
|
+
if (offset + 4 <= annexB.length && annexB[offset] === 0 && annexB[offset + 1] === 0 && annexB[offset + 2] === 0 && annexB[offset + 3] === 1) {
|
|
4878
|
+
startCodeLen = 4;
|
|
4879
|
+
} else if (offset + 3 <= annexB.length && annexB[offset] === 0 && annexB[offset + 1] === 0 && annexB[offset + 2] === 1) {
|
|
4880
|
+
startCodeLen = 3;
|
|
4881
|
+
} else {
|
|
4882
|
+
offset++;
|
|
4883
|
+
continue;
|
|
4884
|
+
}
|
|
4885
|
+
const naluStart = offset + startCodeLen;
|
|
4886
|
+
let naluEnd = annexB.length;
|
|
4887
|
+
for (let i = naluStart; i < annexB.length - 2; i++) {
|
|
4888
|
+
if (annexB[i] === 0 && annexB[i + 1] === 0 && (annexB[i + 2] === 1 || i + 3 < annexB.length && annexB[i + 2] === 0 && annexB[i + 3] === 1)) {
|
|
4889
|
+
naluEnd = i;
|
|
4890
|
+
break;
|
|
4891
|
+
}
|
|
4892
|
+
}
|
|
4893
|
+
if (naluEnd > naluStart) {
|
|
4894
|
+
nalUnits.push(annexB.subarray(naluStart, naluEnd));
|
|
4895
|
+
}
|
|
4896
|
+
offset = naluEnd;
|
|
4897
|
+
}
|
|
4898
|
+
return nalUnits;
|
|
4899
|
+
}
|
|
4900
|
+
function getH264NalType(nalUnit) {
|
|
4901
|
+
return nalUnit[0] & 31;
|
|
4902
|
+
}
|
|
4903
|
+
function getH265NalType2(nalUnit) {
|
|
4904
|
+
return nalUnit[0] >> 1 & 63;
|
|
4905
|
+
}
|
|
4906
|
+
var BaichuanWebRTCServer = class extends EventEmitter5 {
|
|
4907
|
+
options;
|
|
4908
|
+
sessions = /* @__PURE__ */ new Map();
|
|
4909
|
+
sessionIdCounter = 0;
|
|
4910
|
+
weriftModule = null;
|
|
4911
|
+
constructor(options) {
|
|
4912
|
+
super();
|
|
4913
|
+
this.options = options;
|
|
4914
|
+
}
|
|
4915
|
+
/**
|
|
4916
|
+
* Initialize werift module (lazy load to avoid requiring it if not used)
|
|
4917
|
+
*/
|
|
4918
|
+
async loadWerift() {
|
|
4919
|
+
if (this.weriftModule) return this.weriftModule;
|
|
4920
|
+
try {
|
|
4921
|
+
this.weriftModule = await import("werift");
|
|
4922
|
+
return this.weriftModule;
|
|
4923
|
+
} catch (err) {
|
|
4924
|
+
throw new Error(
|
|
4925
|
+
`Failed to load werift module. Make sure it's installed: npm install werift
|
|
4926
|
+
Error: ${err}`
|
|
4927
|
+
);
|
|
4928
|
+
}
|
|
4929
|
+
}
|
|
4930
|
+
/**
|
|
4931
|
+
* Create a new WebRTC session
|
|
4932
|
+
* Returns a session ID and SDP offer to send to the browser
|
|
4933
|
+
*/
|
|
4934
|
+
async createSession() {
|
|
4935
|
+
const werift = await this.loadWerift();
|
|
4936
|
+
const { RTCPeerConnection, MediaStreamTrack, RTCRtpCodecParameters } = werift;
|
|
4937
|
+
const sessionId = `webrtc-${++this.sessionIdCounter}-${Date.now()}`;
|
|
4938
|
+
this.log("info", `Creating WebRTC session ${sessionId}`);
|
|
4939
|
+
const iceServers = [];
|
|
4940
|
+
const stunServers = this.options.stunServers ?? [
|
|
4941
|
+
"stun:stun.l.google.com:19302"
|
|
4942
|
+
];
|
|
4943
|
+
for (const urls of stunServers) {
|
|
4944
|
+
iceServers.push({ urls });
|
|
4945
|
+
}
|
|
4946
|
+
if (this.options.turnServers) {
|
|
4947
|
+
iceServers.push(...this.options.turnServers);
|
|
4948
|
+
}
|
|
4949
|
+
const peerConnection = new RTCPeerConnection({
|
|
4950
|
+
iceServers,
|
|
4951
|
+
codecs: {
|
|
4952
|
+
video: [
|
|
4953
|
+
new RTCRtpCodecParameters({
|
|
4954
|
+
mimeType: "video/H264",
|
|
4955
|
+
clockRate: 9e4,
|
|
4956
|
+
rtcpFeedback: [
|
|
4957
|
+
{ type: "nack" },
|
|
4958
|
+
{ type: "nack", parameter: "pli" },
|
|
4959
|
+
{ type: "goog-remb" }
|
|
4960
|
+
],
|
|
4961
|
+
parameters: "packetization-mode=1;profile-level-id=42e01f;level-asymmetry-allowed=1"
|
|
4962
|
+
})
|
|
4963
|
+
],
|
|
4964
|
+
audio: [
|
|
4965
|
+
new RTCRtpCodecParameters({
|
|
4966
|
+
mimeType: "audio/opus",
|
|
4967
|
+
clockRate: 48e3,
|
|
4968
|
+
channels: 2
|
|
4969
|
+
})
|
|
4970
|
+
]
|
|
4971
|
+
}
|
|
4972
|
+
});
|
|
4973
|
+
const session = {
|
|
4974
|
+
id: sessionId,
|
|
4975
|
+
peerConnection,
|
|
4976
|
+
videoTrack: null,
|
|
4977
|
+
audioTrack: null,
|
|
4978
|
+
videoDataChannel: null,
|
|
4979
|
+
nativeStream: null,
|
|
4980
|
+
intercom: null,
|
|
4981
|
+
dataChannel: null,
|
|
4982
|
+
videoCodec: null,
|
|
4983
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
4984
|
+
state: "connecting",
|
|
4985
|
+
stats: {
|
|
4986
|
+
videoFrames: 0,
|
|
4987
|
+
audioFrames: 0,
|
|
4988
|
+
bytesSent: 0,
|
|
4989
|
+
intercomBytesSent: 0
|
|
4990
|
+
}
|
|
4991
|
+
};
|
|
4992
|
+
this.sessions.set(sessionId, session);
|
|
4993
|
+
const videoTrack = new MediaStreamTrack({ kind: "video" });
|
|
4994
|
+
peerConnection.addTrack(videoTrack);
|
|
4995
|
+
session.videoTrack = videoTrack;
|
|
4996
|
+
const audioTrack = new MediaStreamTrack({ kind: "audio" });
|
|
4997
|
+
peerConnection.addTrack(audioTrack);
|
|
4998
|
+
session.audioTrack = audioTrack;
|
|
4999
|
+
const videoDataChannel = peerConnection.createDataChannel("video", {
|
|
5000
|
+
ordered: true,
|
|
5001
|
+
maxRetransmits: 0
|
|
5002
|
+
// Unreliable for real-time video
|
|
5003
|
+
});
|
|
5004
|
+
session.videoDataChannel = videoDataChannel;
|
|
5005
|
+
videoDataChannel.onopen = () => {
|
|
5006
|
+
this.log("info", `Video data channel opened for session ${sessionId}`);
|
|
5007
|
+
};
|
|
5008
|
+
if (this.options.enableIntercom) {
|
|
5009
|
+
const dataChannel = peerConnection.createDataChannel("intercom", {
|
|
5010
|
+
ordered: true
|
|
5011
|
+
});
|
|
5012
|
+
session.dataChannel = dataChannel;
|
|
5013
|
+
dataChannel.onopen = () => {
|
|
5014
|
+
this.log(
|
|
5015
|
+
"info",
|
|
5016
|
+
`Intercom data channel opened for session ${sessionId}`
|
|
5017
|
+
);
|
|
5018
|
+
this.emit("intercom-started", { sessionId });
|
|
5019
|
+
};
|
|
5020
|
+
dataChannel.onmessage = async (event) => {
|
|
5021
|
+
if (session.intercom && event.data instanceof ArrayBuffer) {
|
|
5022
|
+
try {
|
|
5023
|
+
const audioData = Buffer.from(event.data);
|
|
5024
|
+
await session.intercom.sendAudio(audioData);
|
|
5025
|
+
session.stats.intercomBytesSent += audioData.length;
|
|
5026
|
+
} catch (err) {
|
|
5027
|
+
this.log("error", `Failed to send intercom audio: ${err}`);
|
|
5028
|
+
}
|
|
5029
|
+
}
|
|
5030
|
+
};
|
|
5031
|
+
dataChannel.onclose = () => {
|
|
5032
|
+
this.log(
|
|
5033
|
+
"info",
|
|
5034
|
+
`Intercom data channel closed for session ${sessionId}`
|
|
5035
|
+
);
|
|
5036
|
+
this.emit("intercom-stopped", { sessionId });
|
|
5037
|
+
};
|
|
5038
|
+
}
|
|
5039
|
+
peerConnection.iceConnectionStateChange.subscribe((state) => {
|
|
5040
|
+
this.log("debug", `ICE connection state for ${sessionId}: ${state}`);
|
|
5041
|
+
if (state === "connected") {
|
|
5042
|
+
session.state = "connected";
|
|
5043
|
+
this.emit("session-connected", { sessionId });
|
|
5044
|
+
} else if (state === "disconnected" || state === "failed") {
|
|
5045
|
+
session.state = state;
|
|
5046
|
+
this.closeSession(sessionId).catch((err) => {
|
|
5047
|
+
this.log("error", `Error closing session on ICE ${state}: ${err}`);
|
|
5048
|
+
});
|
|
5049
|
+
}
|
|
5050
|
+
});
|
|
5051
|
+
peerConnection.connectionStateChange.subscribe((state) => {
|
|
5052
|
+
this.log("debug", `Connection state for ${sessionId}: ${state}`);
|
|
5053
|
+
if (state === "closed" || state === "failed") {
|
|
5054
|
+
this.closeSession(sessionId).catch((err) => {
|
|
5055
|
+
this.log(
|
|
5056
|
+
"error",
|
|
5057
|
+
`Error closing session on connection ${state}: ${err}`
|
|
5058
|
+
);
|
|
5059
|
+
});
|
|
5060
|
+
}
|
|
5061
|
+
});
|
|
5062
|
+
const offer = await peerConnection.createOffer();
|
|
5063
|
+
await peerConnection.setLocalDescription(offer);
|
|
5064
|
+
await this.waitForIceGathering(peerConnection, 3e3);
|
|
5065
|
+
const localDescription = peerConnection.localDescription;
|
|
5066
|
+
if (!localDescription) {
|
|
5067
|
+
throw new Error("Failed to create local description");
|
|
5068
|
+
}
|
|
5069
|
+
this.emit("session-created", { sessionId });
|
|
5070
|
+
return {
|
|
5071
|
+
sessionId,
|
|
5072
|
+
offer: {
|
|
5073
|
+
sdp: localDescription.sdp,
|
|
5074
|
+
type: "offer"
|
|
5075
|
+
}
|
|
5076
|
+
};
|
|
5077
|
+
}
|
|
5078
|
+
/**
|
|
5079
|
+
* Handle WebRTC answer from browser and start streaming
|
|
5080
|
+
*/
|
|
5081
|
+
async handleAnswer(sessionId, answer) {
|
|
5082
|
+
const session = this.sessions.get(sessionId);
|
|
5083
|
+
if (!session) {
|
|
5084
|
+
throw new Error(`Session ${sessionId} not found`);
|
|
5085
|
+
}
|
|
5086
|
+
const werift = await this.loadWerift();
|
|
5087
|
+
const { RTCSessionDescription } = werift;
|
|
5088
|
+
this.log("info", `Handling WebRTC answer for session ${sessionId}`);
|
|
5089
|
+
await session.peerConnection.setRemoteDescription(
|
|
5090
|
+
new RTCSessionDescription(answer.sdp, answer.type)
|
|
5091
|
+
);
|
|
5092
|
+
await this.startNativeStream(session);
|
|
5093
|
+
if (this.options.enableIntercom && session.dataChannel) {
|
|
5094
|
+
await this.startIntercom(session);
|
|
5095
|
+
}
|
|
5096
|
+
}
|
|
5097
|
+
/**
|
|
5098
|
+
* Add ICE candidate from browser
|
|
5099
|
+
*/
|
|
5100
|
+
async addIceCandidate(sessionId, candidate) {
|
|
5101
|
+
const session = this.sessions.get(sessionId);
|
|
5102
|
+
if (!session) {
|
|
5103
|
+
throw new Error(`Session ${sessionId} not found`);
|
|
5104
|
+
}
|
|
5105
|
+
const werift = await this.loadWerift();
|
|
5106
|
+
const { RTCIceCandidate } = werift;
|
|
5107
|
+
await session.peerConnection.addIceCandidate(
|
|
5108
|
+
new RTCIceCandidate(candidate.candidate, candidate.sdpMid ?? "0")
|
|
5109
|
+
);
|
|
5110
|
+
}
|
|
5111
|
+
/**
|
|
5112
|
+
* Close a WebRTC session
|
|
5113
|
+
*/
|
|
5114
|
+
async closeSession(sessionId) {
|
|
5115
|
+
const session = this.sessions.get(sessionId);
|
|
5116
|
+
if (!session) return;
|
|
5117
|
+
this.log("info", `Closing WebRTC session ${sessionId}`);
|
|
5118
|
+
session.state = "disconnected";
|
|
5119
|
+
if (session.intercom) {
|
|
5120
|
+
try {
|
|
5121
|
+
await session.intercom.stop();
|
|
5122
|
+
} catch (err) {
|
|
5123
|
+
this.log("debug", `Error stopping intercom: ${err}`);
|
|
5124
|
+
}
|
|
5125
|
+
session.intercom = null;
|
|
5126
|
+
}
|
|
5127
|
+
if (session.dataChannel) {
|
|
5128
|
+
try {
|
|
5129
|
+
session.dataChannel.close();
|
|
5130
|
+
} catch (err) {
|
|
5131
|
+
this.log("debug", `Error closing data channel: ${err}`);
|
|
5132
|
+
}
|
|
5133
|
+
session.dataChannel = null;
|
|
5134
|
+
}
|
|
5135
|
+
if (session.cleanup) {
|
|
5136
|
+
session.cleanup();
|
|
5137
|
+
}
|
|
5138
|
+
try {
|
|
5139
|
+
await session.peerConnection.close();
|
|
5140
|
+
} catch (err) {
|
|
5141
|
+
this.log("debug", `Error closing peer connection: ${err}`);
|
|
5142
|
+
}
|
|
5143
|
+
this.sessions.delete(sessionId);
|
|
5144
|
+
this.emit("session-closed", { sessionId });
|
|
5145
|
+
this.log(
|
|
5146
|
+
"info",
|
|
5147
|
+
`WebRTC session ${sessionId} closed (active sessions: ${this.sessions.size})`
|
|
5148
|
+
);
|
|
5149
|
+
}
|
|
5150
|
+
/**
|
|
5151
|
+
* Get information about all active sessions
|
|
5152
|
+
*/
|
|
5153
|
+
getSessions() {
|
|
5154
|
+
return Array.from(this.sessions.values()).map((s) => ({
|
|
5155
|
+
id: s.id,
|
|
5156
|
+
state: s.state,
|
|
5157
|
+
createdAt: s.createdAt,
|
|
5158
|
+
stats: { ...s.stats }
|
|
5159
|
+
}));
|
|
5160
|
+
}
|
|
5161
|
+
/**
|
|
5162
|
+
* Get information about a specific session
|
|
5163
|
+
*/
|
|
5164
|
+
getSession(sessionId) {
|
|
5165
|
+
const session = this.sessions.get(sessionId);
|
|
5166
|
+
if (!session) return null;
|
|
5167
|
+
return {
|
|
5168
|
+
id: session.id,
|
|
5169
|
+
state: session.state,
|
|
5170
|
+
createdAt: session.createdAt,
|
|
5171
|
+
stats: { ...session.stats }
|
|
5172
|
+
};
|
|
5173
|
+
}
|
|
5174
|
+
/**
|
|
5175
|
+
* Close all sessions and stop the server
|
|
5176
|
+
*/
|
|
5177
|
+
async stop() {
|
|
5178
|
+
this.log("info", "Stopping WebRTC server");
|
|
5179
|
+
const sessionIds = Array.from(this.sessions.keys());
|
|
5180
|
+
await Promise.all(sessionIds.map((id) => this.closeSession(id)));
|
|
5181
|
+
this.log("info", "WebRTC server stopped");
|
|
5182
|
+
}
|
|
5183
|
+
/**
|
|
5184
|
+
* Get the number of active sessions
|
|
5185
|
+
*/
|
|
5186
|
+
get sessionCount() {
|
|
5187
|
+
return this.sessions.size;
|
|
5188
|
+
}
|
|
5189
|
+
// ============================================================================
|
|
5190
|
+
// Private Methods
|
|
5191
|
+
// ============================================================================
|
|
5192
|
+
/**
|
|
5193
|
+
* Wait for ICE gathering to complete
|
|
5194
|
+
*/
|
|
5195
|
+
async waitForIceGathering(pc, timeoutMs) {
|
|
5196
|
+
if (pc.iceGatheringState === "complete") return;
|
|
5197
|
+
return new Promise((resolve) => {
|
|
5198
|
+
const timeout = setTimeout(() => {
|
|
5199
|
+
resolve();
|
|
5200
|
+
}, timeoutMs);
|
|
5201
|
+
pc.iceGatheringStateChange.subscribe((state) => {
|
|
5202
|
+
if (state === "complete") {
|
|
5203
|
+
clearTimeout(timeout);
|
|
5204
|
+
resolve();
|
|
5205
|
+
}
|
|
5206
|
+
});
|
|
5207
|
+
});
|
|
5208
|
+
}
|
|
5209
|
+
/**
|
|
5210
|
+
* Start native Baichuan stream and pump frames to WebRTC
|
|
5211
|
+
*/
|
|
5212
|
+
async startNativeStream(session) {
|
|
5213
|
+
this.log(
|
|
5214
|
+
"info",
|
|
5215
|
+
`Starting native stream for session ${session.id} (channel=${this.options.channel}, profile=${this.options.profile})`
|
|
5216
|
+
);
|
|
5217
|
+
session.nativeStream = createNativeStream(
|
|
5218
|
+
this.options.api,
|
|
5219
|
+
this.options.channel,
|
|
5220
|
+
this.options.profile,
|
|
5221
|
+
this.options.variant !== void 0 ? { variant: this.options.variant } : void 0
|
|
5222
|
+
);
|
|
5223
|
+
this.pumpFramesToWebRTC(session).catch((err) => {
|
|
5224
|
+
this.log("error", `Frame pump error for session ${session.id}: ${err}`);
|
|
5225
|
+
this.closeSession(session.id).catch(() => {
|
|
5226
|
+
});
|
|
5227
|
+
});
|
|
5228
|
+
}
|
|
5229
|
+
/**
|
|
5230
|
+
* Pump frames from native stream to WebRTC tracks
|
|
5231
|
+
* H.264 → RTP media track (standard WebRTC)
|
|
5232
|
+
* H.265 → DataChannel with raw Annex-B frames (decoded by WebCodecs in browser)
|
|
5233
|
+
*/
|
|
5234
|
+
async pumpFramesToWebRTC(session) {
|
|
5235
|
+
if (!session.nativeStream) {
|
|
5236
|
+
this.log("warn", `No native stream for session ${session.id}`);
|
|
5237
|
+
return;
|
|
5238
|
+
}
|
|
5239
|
+
this.log("info", `Starting frame pump for session ${session.id}`);
|
|
5240
|
+
const werift = await this.loadWerift();
|
|
5241
|
+
const { RtpPacket, RtpHeader } = werift;
|
|
5242
|
+
let sequenceNumber = Math.floor(Math.random() * 65535);
|
|
5243
|
+
let timestamp = Math.floor(Math.random() * 4294967295);
|
|
5244
|
+
const videoClockRate = 9e4;
|
|
5245
|
+
let lastTimeMicros = 0;
|
|
5246
|
+
let lastLogTime = Date.now();
|
|
5247
|
+
let packetsSentSinceLastLog = 0;
|
|
5248
|
+
let frameNumber = 0;
|
|
5249
|
+
try {
|
|
5250
|
+
this.log("info", `Entering frame loop for session ${session.id}`);
|
|
5251
|
+
for await (const frame of session.nativeStream) {
|
|
5252
|
+
if (session.state === "disconnected" || session.state === "failed") {
|
|
5253
|
+
this.log(
|
|
5254
|
+
"debug",
|
|
5255
|
+
`Session ${session.id} state is ${session.state}, breaking frame loop`
|
|
5256
|
+
);
|
|
5257
|
+
break;
|
|
5258
|
+
}
|
|
5259
|
+
if (frame.audio) {
|
|
5260
|
+
session.stats.audioFrames++;
|
|
5261
|
+
} else {
|
|
5262
|
+
if (frame.data) {
|
|
5263
|
+
if (!session.videoCodec && frame.videoType) {
|
|
5264
|
+
session.videoCodec = frame.videoType;
|
|
5265
|
+
this.log("info", `Detected video codec: ${session.videoCodec}`);
|
|
5266
|
+
if (session.videoDataChannel && session.videoDataChannel.readyState === "open") {
|
|
5267
|
+
const codecInfo = JSON.stringify({
|
|
5268
|
+
type: "codec",
|
|
5269
|
+
codec: session.videoCodec,
|
|
5270
|
+
width: frame.width || 0,
|
|
5271
|
+
height: frame.height || 0
|
|
5272
|
+
});
|
|
5273
|
+
session.videoDataChannel.send(codecInfo);
|
|
5274
|
+
}
|
|
5275
|
+
}
|
|
5276
|
+
if (frame.microseconds && lastTimeMicros > 0) {
|
|
5277
|
+
const deltaMicros = frame.microseconds - lastTimeMicros;
|
|
5278
|
+
const deltaTicks = Math.floor(
|
|
5279
|
+
deltaMicros / 1e6 * videoClockRate
|
|
5280
|
+
);
|
|
5281
|
+
timestamp = timestamp + deltaTicks >>> 0;
|
|
5282
|
+
} else {
|
|
5283
|
+
timestamp = timestamp + 3e3 >>> 0;
|
|
5284
|
+
}
|
|
5285
|
+
lastTimeMicros = frame.microseconds || 0;
|
|
5286
|
+
if (session.videoCodec === "H264") {
|
|
5287
|
+
await this.sendH264Frame(
|
|
5288
|
+
session,
|
|
5289
|
+
werift,
|
|
5290
|
+
frame.data,
|
|
5291
|
+
sequenceNumber,
|
|
5292
|
+
timestamp
|
|
5293
|
+
);
|
|
5294
|
+
sequenceNumber = sequenceNumber + Math.ceil(frame.data.length / 1200) & 65535;
|
|
5295
|
+
packetsSentSinceLastLog++;
|
|
5296
|
+
} else if (session.videoCodec === "H265") {
|
|
5297
|
+
await this.sendH265Frame(session, frame, frameNumber);
|
|
5298
|
+
packetsSentSinceLastLog++;
|
|
5299
|
+
}
|
|
5300
|
+
frameNumber++;
|
|
5301
|
+
session.stats.videoFrames++;
|
|
5302
|
+
session.stats.bytesSent += frame.data.length;
|
|
5303
|
+
const now = Date.now();
|
|
5304
|
+
if (now - lastLogTime >= 5e3) {
|
|
5305
|
+
this.log(
|
|
5306
|
+
"debug",
|
|
5307
|
+
`WebRTC session ${session.id} [${session.videoCodec}]: sent ${session.stats.videoFrames} frames, ${packetsSentSinceLastLog} packets, ${Math.round(session.stats.bytesSent / 1024)} KB`
|
|
5308
|
+
);
|
|
5309
|
+
lastLogTime = now;
|
|
5310
|
+
packetsSentSinceLastLog = 0;
|
|
5311
|
+
}
|
|
5312
|
+
}
|
|
5313
|
+
}
|
|
5314
|
+
}
|
|
5315
|
+
} catch (err) {
|
|
5316
|
+
this.log(
|
|
5317
|
+
"error",
|
|
5318
|
+
`Error pumping frames for session ${session.id}: ${err}`
|
|
5319
|
+
);
|
|
5320
|
+
}
|
|
5321
|
+
this.log("info", `Native stream ended for session ${session.id}`);
|
|
5322
|
+
}
|
|
5323
|
+
/**
|
|
5324
|
+
* Send H.264 frame via RTP media track
|
|
5325
|
+
*/
|
|
5326
|
+
async sendH264Frame(session, werift, frameData, sequenceNumber, timestamp) {
|
|
5327
|
+
const nalUnits = parseAnnexBNalUnits(frameData);
|
|
5328
|
+
for (let i = 0; i < nalUnits.length; i++) {
|
|
5329
|
+
const nalUnit = nalUnits[i];
|
|
5330
|
+
if (nalUnit.length === 0) continue;
|
|
5331
|
+
const isLastNalu = i === nalUnits.length - 1;
|
|
5332
|
+
const nalType = getH264NalType(nalUnit);
|
|
5333
|
+
if (nalType === 9) continue;
|
|
5334
|
+
const rtpPackets = this.createH264RtpPackets(
|
|
5335
|
+
werift,
|
|
5336
|
+
nalUnit,
|
|
5337
|
+
sequenceNumber,
|
|
5338
|
+
timestamp,
|
|
5339
|
+
isLastNalu
|
|
5340
|
+
);
|
|
5341
|
+
for (const rtpPacket of rtpPackets) {
|
|
5342
|
+
session.videoTrack.writeRtp(rtpPacket);
|
|
5343
|
+
sequenceNumber = sequenceNumber + 1 & 65535;
|
|
5344
|
+
}
|
|
5345
|
+
}
|
|
5346
|
+
}
|
|
5347
|
+
/**
|
|
5348
|
+
* Send H.265 frame via DataChannel
|
|
5349
|
+
* Format: 12-byte header + Annex-B data
|
|
5350
|
+
* Header: [frameNum (4)] [timestamp (4)] [flags (1)] [keyframe (1)] [reserved (2)]
|
|
5351
|
+
*/
|
|
5352
|
+
async sendH265Frame(session, frame, frameNumber) {
|
|
5353
|
+
if (!session.videoDataChannel) {
|
|
5354
|
+
if (frameNumber === 0) {
|
|
5355
|
+
this.log("warn", `No video data channel for session ${session.id}`);
|
|
5356
|
+
}
|
|
5357
|
+
return;
|
|
5358
|
+
}
|
|
5359
|
+
if (session.videoDataChannel.readyState !== "open") {
|
|
5360
|
+
if (frameNumber === 0) {
|
|
5361
|
+
this.log(
|
|
5362
|
+
"warn",
|
|
5363
|
+
`Video data channel not open for session ${session.id}: ${session.videoDataChannel.readyState}`
|
|
5364
|
+
);
|
|
5365
|
+
}
|
|
5366
|
+
return;
|
|
5367
|
+
}
|
|
5368
|
+
let isKeyframe = frame.isKeyframe === true;
|
|
5369
|
+
if (!isKeyframe && frame.isKeyframe === void 0) {
|
|
5370
|
+
const nalUnits = parseAnnexBNalUnits(frame.data);
|
|
5371
|
+
for (const nalUnit of nalUnits) {
|
|
5372
|
+
if (nalUnit.length === 0) continue;
|
|
5373
|
+
const nalType = getH265NalType2(nalUnit);
|
|
5374
|
+
if (nalType === 32 || nalType === 33 || nalType === 34 || nalType === 19 || nalType === 20) {
|
|
5375
|
+
isKeyframe = true;
|
|
5376
|
+
break;
|
|
5377
|
+
}
|
|
5378
|
+
}
|
|
5379
|
+
}
|
|
5380
|
+
const header = Buffer.alloc(12);
|
|
5381
|
+
header.writeUInt32BE(frameNumber, 0);
|
|
5382
|
+
header.writeUInt32BE(frame.microseconds ? frame.microseconds / 1e3 : 0, 4);
|
|
5383
|
+
header.writeUInt8(1, 8);
|
|
5384
|
+
header.writeUInt8(isKeyframe ? 1 : 0, 9);
|
|
5385
|
+
header.writeUInt16BE(0, 10);
|
|
5386
|
+
const packet = Buffer.concat([header, frame.data]);
|
|
5387
|
+
if (frameNumber < 3) {
|
|
5388
|
+
this.log(
|
|
5389
|
+
"info",
|
|
5390
|
+
`Sending H.265 frame ${frameNumber}: ${packet.length} bytes, keyframe=${isKeyframe}`
|
|
5391
|
+
);
|
|
5392
|
+
}
|
|
5393
|
+
const MAX_CHUNK_SIZE = 16e3;
|
|
5394
|
+
try {
|
|
5395
|
+
if (packet.length <= MAX_CHUNK_SIZE) {
|
|
5396
|
+
session.videoDataChannel.send(packet);
|
|
5397
|
+
} else {
|
|
5398
|
+
const totalChunks = Math.ceil(packet.length / MAX_CHUNK_SIZE);
|
|
5399
|
+
for (let i = 0; i < totalChunks; i++) {
|
|
5400
|
+
const start = i * MAX_CHUNK_SIZE;
|
|
5401
|
+
const end = Math.min(start + MAX_CHUNK_SIZE, packet.length);
|
|
5402
|
+
const chunk = packet.subarray(start, end);
|
|
5403
|
+
const chunkHeader = Buffer.alloc(2);
|
|
5404
|
+
chunkHeader.writeUInt8(i, 0);
|
|
5405
|
+
chunkHeader.writeUInt8(totalChunks, 1);
|
|
5406
|
+
session.videoDataChannel.send(Buffer.concat([chunkHeader, chunk]));
|
|
5407
|
+
}
|
|
5408
|
+
}
|
|
5409
|
+
} catch (err) {
|
|
5410
|
+
this.log("error", `Error sending H.265 frame ${frameNumber}: ${err}`);
|
|
5411
|
+
}
|
|
5412
|
+
}
|
|
5413
|
+
/**
|
|
5414
|
+
* Create RTP packets for H.264 NAL unit
|
|
5415
|
+
* Handles single NAL, STAP-A aggregation, and FU-A fragmentation
|
|
5416
|
+
*/
|
|
5417
|
+
createH264RtpPackets(werift, nalUnit, sequenceNumber, timestamp, marker) {
|
|
5418
|
+
const { RtpPacket, RtpHeader } = werift;
|
|
5419
|
+
const MTU = 1200;
|
|
5420
|
+
const packets = [];
|
|
5421
|
+
if (nalUnit.length <= MTU) {
|
|
5422
|
+
const header = new RtpHeader();
|
|
5423
|
+
header.payloadType = 96;
|
|
5424
|
+
header.sequenceNumber = sequenceNumber;
|
|
5425
|
+
header.timestamp = timestamp;
|
|
5426
|
+
header.marker = marker;
|
|
5427
|
+
packets.push(new RtpPacket(header, nalUnit));
|
|
5428
|
+
} else {
|
|
5429
|
+
const nalHeader = nalUnit[0];
|
|
5430
|
+
const nalType = nalHeader & 31;
|
|
5431
|
+
const nri = nalHeader & 96;
|
|
5432
|
+
const fuIndicator = (nri | 28) & 255;
|
|
5433
|
+
let offset = 1;
|
|
5434
|
+
let isFirst = true;
|
|
5435
|
+
while (offset < nalUnit.length) {
|
|
5436
|
+
const remaining = nalUnit.length - offset;
|
|
5437
|
+
const chunkSize = Math.min(remaining, MTU - 2);
|
|
5438
|
+
const isLast = offset + chunkSize >= nalUnit.length;
|
|
5439
|
+
let fuHeader = nalType;
|
|
5440
|
+
if (isFirst) fuHeader |= 128;
|
|
5441
|
+
if (isLast) fuHeader |= 64;
|
|
5442
|
+
const fuPayload = Buffer.alloc(2 + chunkSize);
|
|
5443
|
+
fuPayload[0] = fuIndicator;
|
|
5444
|
+
fuPayload[1] = fuHeader;
|
|
5445
|
+
nalUnit.copy(fuPayload, 2, offset, offset + chunkSize);
|
|
5446
|
+
const header = new RtpHeader();
|
|
5447
|
+
header.payloadType = 96;
|
|
5448
|
+
header.sequenceNumber = sequenceNumber + packets.length & 65535;
|
|
5449
|
+
header.timestamp = timestamp;
|
|
5450
|
+
header.marker = isLast && marker;
|
|
5451
|
+
packets.push(new RtpPacket(header, fuPayload));
|
|
5452
|
+
offset += chunkSize;
|
|
5453
|
+
isFirst = false;
|
|
5454
|
+
}
|
|
5455
|
+
}
|
|
5456
|
+
return packets;
|
|
5457
|
+
}
|
|
5458
|
+
/**
|
|
5459
|
+
* Start intercom (two-way audio)
|
|
5460
|
+
*/
|
|
5461
|
+
async startIntercom(session) {
|
|
5462
|
+
try {
|
|
5463
|
+
session.intercom = new Intercom({
|
|
5464
|
+
api: this.options.api,
|
|
5465
|
+
channel: this.options.channel
|
|
5466
|
+
});
|
|
5467
|
+
await session.intercom.start();
|
|
5468
|
+
this.log("info", `Intercom started for session ${session.id}`);
|
|
5469
|
+
} catch (err) {
|
|
5470
|
+
this.log(
|
|
5471
|
+
"error",
|
|
5472
|
+
`Failed to start intercom for session ${session.id}: ${err}`
|
|
5473
|
+
);
|
|
5474
|
+
session.intercom = null;
|
|
5475
|
+
}
|
|
5476
|
+
}
|
|
5477
|
+
/**
|
|
5478
|
+
* Log helper
|
|
5479
|
+
*/
|
|
5480
|
+
log(level, message) {
|
|
5481
|
+
if (this.options.logger) {
|
|
5482
|
+
this.options.logger(level, message);
|
|
5483
|
+
}
|
|
5484
|
+
}
|
|
5485
|
+
};
|
|
5486
|
+
|
|
5487
|
+
// src/multifocal/compositeRtspServer.ts
|
|
5488
|
+
import { EventEmitter as EventEmitter6 } from "events";
|
|
5489
|
+
import { spawn as spawn7 } from "child_process";
|
|
4314
5490
|
import * as net from "net";
|
|
4315
|
-
var CompositeRtspServer = class extends
|
|
5491
|
+
var CompositeRtspServer = class extends EventEmitter6 {
|
|
4316
5492
|
options;
|
|
4317
5493
|
compositeStream = null;
|
|
4318
5494
|
rtspServer = null;
|
|
@@ -4417,7 +5593,7 @@ var CompositeRtspServer = class extends EventEmitter3 {
|
|
|
4417
5593
|
this.logger.log?.(
|
|
4418
5594
|
`[CompositeRtspServer] Starting ffmpeg RTSP server: ${ffmpegArgs.join(" ")}`
|
|
4419
5595
|
);
|
|
4420
|
-
this.ffmpegProcess =
|
|
5596
|
+
this.ffmpegProcess = spawn7("ffmpeg", ffmpegArgs, {
|
|
4421
5597
|
stdio: ["pipe", "pipe", "pipe"]
|
|
4422
5598
|
});
|
|
4423
5599
|
this.ffmpegProcess.on("error", (error) => {
|
|
@@ -4630,8 +5806,10 @@ export {
|
|
|
4630
5806
|
BaichuanEventEmitter,
|
|
4631
5807
|
BaichuanFrameParser,
|
|
4632
5808
|
BaichuanHttpStreamServer,
|
|
5809
|
+
BaichuanMjpegServer,
|
|
4633
5810
|
BaichuanRtspServer,
|
|
4634
5811
|
BaichuanVideoStream,
|
|
5812
|
+
BaichuanWebRTCServer,
|
|
4635
5813
|
BcMediaAnnexBDecoder,
|
|
4636
5814
|
BcMediaCodec,
|
|
4637
5815
|
BcUdpStream,
|
|
@@ -4643,6 +5821,7 @@ export {
|
|
|
4643
5821
|
H264RtpDepacketizer,
|
|
4644
5822
|
H265RtpDepacketizer,
|
|
4645
5823
|
Intercom,
|
|
5824
|
+
MjpegTransformer,
|
|
4646
5825
|
NVR_HUB_EXACT_TYPES,
|
|
4647
5826
|
NVR_HUB_MODEL_PATTERNS,
|
|
4648
5827
|
ReolinkBaichuanApi,
|
|
@@ -4689,6 +5868,7 @@ export {
|
|
|
4689
5868
|
createDebugGateLogger,
|
|
4690
5869
|
createDiagnosticsBundle,
|
|
4691
5870
|
createLogger,
|
|
5871
|
+
createMjpegBoundary,
|
|
4692
5872
|
createNativeStream,
|
|
4693
5873
|
createNullLogger,
|
|
4694
5874
|
createReplayHttpServer,
|
|
@@ -4696,6 +5876,7 @@ export {
|
|
|
4696
5876
|
createRfc4571TcpServerForReplay,
|
|
4697
5877
|
createRtspProxyServer,
|
|
4698
5878
|
createTaggedLogger,
|
|
5879
|
+
decideVideoclipTranscodeMode,
|
|
4699
5880
|
decodeHeader,
|
|
4700
5881
|
deriveAesKey,
|
|
4701
5882
|
detectVideoCodecFromNal,
|
|
@@ -4710,10 +5891,13 @@ export {
|
|
|
4710
5891
|
extractSpsFromAnnexB,
|
|
4711
5892
|
extractVpsFromAnnexB,
|
|
4712
5893
|
flattenAbilitiesForChannel,
|
|
5894
|
+
formatMjpegFrame,
|
|
4713
5895
|
getConstructedVideoStreamOptions,
|
|
4714
5896
|
getGlobalLogger,
|
|
4715
5897
|
getH265NalType,
|
|
5898
|
+
getMjpegContentType,
|
|
4716
5899
|
getVideoStream,
|
|
5900
|
+
getVideoclipClientInfo,
|
|
4717
5901
|
getXmlText,
|
|
4718
5902
|
hasStartCodes2 as hasH265StartCodes,
|
|
4719
5903
|
hasStartCodes,
|