@apocaliss92/nodelink-js 0.1.7 → 0.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -6
- package/dist/{DiagnosticsTools-MTXG65O3.js → DiagnosticsTools-EC7DADEQ.js} +2 -2
- package/dist/{chunk-MC2BRLLE.js → chunk-TZFZ5WJX.js} +71 -9
- package/dist/chunk-TZFZ5WJX.js.map +1 -0
- package/dist/{chunk-JMT75JNG.js → chunk-YUBYINJF.js} +674 -64
- package/dist/chunk-YUBYINJF.js.map +1 -0
- package/dist/cli/rtsp-server.cjs +740 -68
- package/dist/cli/rtsp-server.cjs.map +1 -1
- package/dist/cli/rtsp-server.d.cts +1 -0
- package/dist/cli/rtsp-server.d.ts +1 -0
- package/dist/cli/rtsp-server.js +2 -2
- package/dist/index.cjs +3293 -248
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +8187 -0
- package/dist/index.d.ts +761 -1
- package/dist/index.js +2359 -5
- package/dist/index.js.map +1 -1
- package/package.json +14 -3
- package/dist/chunk-JMT75JNG.js.map +0 -1
- package/dist/chunk-MC2BRLLE.js.map +0 -1
- /package/dist/{DiagnosticsTools-MTXG65O3.js.map → DiagnosticsTools-EC7DADEQ.js.map} +0 -0
package/dist/index.cjs
CHANGED
|
@@ -2468,13 +2468,31 @@ var init_BaichuanVideoStream = __esm({
|
|
|
2468
2468
|
searchStart = bodyEnd + Buffer.from("</body>").length;
|
|
2469
2469
|
dataToParse = rawCandidate.subarray(searchStart);
|
|
2470
2470
|
}
|
|
2471
|
+
let encryptLen;
|
|
2472
|
+
if (frame.extension && frame.extension.length > 0) {
|
|
2473
|
+
try {
|
|
2474
|
+
const extDec = this.client.tryDecryptXml(
|
|
2475
|
+
frame.extension,
|
|
2476
|
+
frame.header.channelId,
|
|
2477
|
+
enc
|
|
2478
|
+
);
|
|
2479
|
+
const encryptLenMatch = extDec.match(
|
|
2480
|
+
/<encryptLen>(\d+)<\/encryptLen>/i
|
|
2481
|
+
);
|
|
2482
|
+
if (encryptLenMatch && encryptLenMatch[1]) {
|
|
2483
|
+
encryptLen = parseInt(encryptLenMatch[1], 10);
|
|
2484
|
+
}
|
|
2485
|
+
} catch {
|
|
2486
|
+
}
|
|
2487
|
+
}
|
|
2471
2488
|
const dataAfterXml = this.chooseDecryptedOrRawCandidate({
|
|
2472
2489
|
raw: dataToParse,
|
|
2473
2490
|
enc,
|
|
2474
2491
|
channelId: frame.header.channelId,
|
|
2475
2492
|
// Some NVR/Hub streams appear to include non-media bytes even when payloadOffset is present.
|
|
2476
2493
|
// Allow a one-time resync at startup to avoid delaying the first keyframe.
|
|
2477
|
-
allowResync: frame.payload.length === 0 || totalFramesReceived <= 10 && totalMediaPackets === 0
|
|
2494
|
+
allowResync: frame.payload.length === 0 || totalFramesReceived <= 10 && totalMediaPackets === 0,
|
|
2495
|
+
...encryptLen !== void 0 ? { encryptLen } : {}
|
|
2478
2496
|
});
|
|
2479
2497
|
if (this.bcMediaCodec.getRemainingBuffer().length === 0 && dataAfterXml.length <= 600) {
|
|
2480
2498
|
const s = _BaichuanVideoStream.scoreBcMediaLike(dataAfterXml);
|
|
@@ -2587,15 +2605,38 @@ var init_BaichuanVideoStream = __esm({
|
|
|
2587
2605
|
}
|
|
2588
2606
|
}
|
|
2589
2607
|
};
|
|
2590
|
-
const prependParamSetsIfNeeded = (annexB, videoType) => {
|
|
2608
|
+
const prependParamSetsIfNeeded = (annexB, videoType, isPframe = false) => {
|
|
2591
2609
|
if (videoType === "H264") {
|
|
2592
2610
|
const nals = splitAnnexBToNalPayloads(annexB);
|
|
2593
2611
|
if (nals.length === 0) return annexB;
|
|
2594
2612
|
const types = nals.map((n) => (n[0] ?? 0) & 31);
|
|
2595
|
-
if (types.includes(7) && types.includes(8)) return annexB;
|
|
2596
2613
|
const hasVcl = types.some(
|
|
2597
2614
|
(t) => t === 1 || t === 5 || t === 19 || t === 20
|
|
2598
2615
|
);
|
|
2616
|
+
if (isPframe && !hasVcl) {
|
|
2617
|
+
if (dbg.traceNativeStream) {
|
|
2618
|
+
this.logger?.warn(
|
|
2619
|
+
`[BaichuanVideoStream] Dropping P-frame without VCL (only param sets): types=${types.join(",")}`
|
|
2620
|
+
);
|
|
2621
|
+
}
|
|
2622
|
+
return Buffer.alloc(0);
|
|
2623
|
+
}
|
|
2624
|
+
if (types.includes(7) && types.includes(8)) {
|
|
2625
|
+
let ppsIdFromSlice = null;
|
|
2626
|
+
for (const nal of nals) {
|
|
2627
|
+
const t = (nal[0] ?? 0) & 31;
|
|
2628
|
+
if (t === 1 || t === 5) {
|
|
2629
|
+
ppsIdFromSlice = parseSlicePpsIdFromNal(nal);
|
|
2630
|
+
break;
|
|
2631
|
+
}
|
|
2632
|
+
}
|
|
2633
|
+
if (ppsIdFromSlice != null && ppsIdFromSlice <= 255) {
|
|
2634
|
+
this.lastPrependedPpsId = ppsIdFromSlice;
|
|
2635
|
+
} else {
|
|
2636
|
+
this.lastPrependedPpsId = -1;
|
|
2637
|
+
}
|
|
2638
|
+
return annexB;
|
|
2639
|
+
}
|
|
2599
2640
|
if (!hasVcl) return annexB;
|
|
2600
2641
|
let ppsId = null;
|
|
2601
2642
|
for (const nal of nals) {
|
|
@@ -2642,11 +2683,19 @@ var init_BaichuanVideoStream = __esm({
|
|
|
2642
2683
|
const nals = splitAnnexBToNalPayloads2(annexB);
|
|
2643
2684
|
if (nals.length === 0) return annexB;
|
|
2644
2685
|
const types = nals.map((n) => getH265NalType(n)).filter((t) => t !== null);
|
|
2645
|
-
if (types.includes(32) && types.includes(33) && types.includes(34))
|
|
2646
|
-
return annexB;
|
|
2647
2686
|
const hasVcl = types.some(
|
|
2648
2687
|
(t) => t >= 0 && t <= 9 || t >= 16 && t <= 23
|
|
2649
2688
|
);
|
|
2689
|
+
if (isPframe && !hasVcl) {
|
|
2690
|
+
if (dbg.traceNativeStream) {
|
|
2691
|
+
this.logger?.warn(
|
|
2692
|
+
`[BaichuanVideoStream] Dropping H.265 P-frame without VCL (only param sets): types=${types.join(",")}`
|
|
2693
|
+
);
|
|
2694
|
+
}
|
|
2695
|
+
return Buffer.alloc(0);
|
|
2696
|
+
}
|
|
2697
|
+
if (types.includes(32) && types.includes(33) && types.includes(34))
|
|
2698
|
+
return annexB;
|
|
2650
2699
|
if (!hasVcl) return annexB;
|
|
2651
2700
|
if (this.lastPrependedParamSetsH265) return annexB;
|
|
2652
2701
|
if (!this.lastVps || !this.lastSpsH265 || !this.lastPpsH265)
|
|
@@ -2828,7 +2877,7 @@ var init_BaichuanVideoStream = __esm({
|
|
|
2828
2877
|
}
|
|
2829
2878
|
for (const p of parts) {
|
|
2830
2879
|
maybeCacheParamSets(p, "Pframe", videoType);
|
|
2831
|
-
const outP0 = prependParamSetsIfNeeded(p, videoType);
|
|
2880
|
+
const outP0 = prependParamSetsIfNeeded(p, videoType, true);
|
|
2832
2881
|
if (outP0.length === 0) continue;
|
|
2833
2882
|
const outP = outP0;
|
|
2834
2883
|
dumpNalSummary(outP, "Pframe", media.microseconds);
|
|
@@ -2997,10 +3046,10 @@ function buildRtspPath(channel, stream) {
|
|
|
2997
3046
|
}
|
|
2998
3047
|
function buildRtspUrl(params) {
|
|
2999
3048
|
const port = params.port ?? 554;
|
|
3000
|
-
const
|
|
3049
|
+
const path6 = buildRtspPath(params.channel, params.stream);
|
|
3001
3050
|
const user = encodeURIComponent(params.username);
|
|
3002
3051
|
const pass = encodeURIComponent(params.password);
|
|
3003
|
-
return `rtsp://${user}:${pass}@${params.host}:${port}${
|
|
3052
|
+
return `rtsp://${user}:${pass}@${params.host}:${port}${path6}`;
|
|
3004
3053
|
}
|
|
3005
3054
|
var init_urls = __esm({
|
|
3006
3055
|
"src/rtsp/urls.ts"() {
|
|
@@ -3162,7 +3211,7 @@ async function createDiagnosticsBundle(params) {
|
|
|
3162
3211
|
}
|
|
3163
3212
|
function sanitizeFfmpegError(error) {
|
|
3164
3213
|
return error.replace(
|
|
3165
|
-
/([a-z]+:\/\/)([
|
|
3214
|
+
/([a-z]+:\/\/)([^:@/\s]+):([^@/\s]+)@/gi,
|
|
3166
3215
|
(match, protocol, username, password) => {
|
|
3167
3216
|
return `${protocol}***:***@`;
|
|
3168
3217
|
}
|
|
@@ -4542,8 +4591,8 @@ async function runMultifocalDiagnosticsConsecutively(params) {
|
|
|
4542
4591
|
for (const app of rtmpApps) {
|
|
4543
4592
|
for (const streamName of streams) {
|
|
4544
4593
|
const streamType = streamName.includes("sub") || streamName === "sub" || streamName === "mobile" ? 1 : 0;
|
|
4545
|
-
const
|
|
4546
|
-
const u = new URL(`rtmp://${params.host}:1935${
|
|
4594
|
+
const path6 = `/${app}/channel${params.channel}_${streamName}.bcs`;
|
|
4595
|
+
const u = new URL(`rtmp://${params.host}:1935${path6}`);
|
|
4547
4596
|
u.searchParams.set("channel", params.channel.toString());
|
|
4548
4597
|
u.searchParams.set("stream", streamType.toString());
|
|
4549
4598
|
u.searchParams.set("user", params.username);
|
|
@@ -5453,17 +5502,20 @@ function parseRecordingFileName(fileName) {
|
|
|
5453
5502
|
let widthRaw;
|
|
5454
5503
|
let heightRaw;
|
|
5455
5504
|
let hexValue = "";
|
|
5505
|
+
let sizeHex;
|
|
5456
5506
|
if (parts.length === 6) {
|
|
5457
5507
|
startDate = parts[1] ?? "";
|
|
5458
5508
|
startTime = parts[2] ?? "";
|
|
5459
5509
|
endTime = parts[3] ?? "";
|
|
5460
5510
|
hexValue = parts[4] ?? "";
|
|
5511
|
+
sizeHex = parts[5];
|
|
5461
5512
|
} else if (parts.length === 7) {
|
|
5462
5513
|
startDate = parts[1] ?? "";
|
|
5463
5514
|
startTime = parts[2] ?? "";
|
|
5464
5515
|
endTime = parts[3] ?? "";
|
|
5465
5516
|
animalTypeRaw = parts[4];
|
|
5466
5517
|
hexValue = parts[5] ?? "";
|
|
5518
|
+
sizeHex = parts[6];
|
|
5467
5519
|
} else if (parts.length === 9) {
|
|
5468
5520
|
devType = "hub";
|
|
5469
5521
|
startDate = parts[1] ?? "";
|
|
@@ -5473,6 +5525,7 @@ function parseRecordingFileName(fileName) {
|
|
|
5473
5525
|
widthRaw = parts[5];
|
|
5474
5526
|
heightRaw = parts[6];
|
|
5475
5527
|
hexValue = parts[7] ?? "";
|
|
5528
|
+
sizeHex = parts[8];
|
|
5476
5529
|
} else {
|
|
5477
5530
|
return void 0;
|
|
5478
5531
|
}
|
|
@@ -5503,6 +5556,12 @@ function parseRecordingFileName(fileName) {
|
|
|
5503
5556
|
if (animalTypeRaw != null) parsed.animalTypeRaw = animalTypeRaw;
|
|
5504
5557
|
if (widthRaw != null) parsed.widthRaw = widthRaw;
|
|
5505
5558
|
if (heightRaw != null) parsed.heightRaw = heightRaw;
|
|
5559
|
+
if (sizeHex && /^[0-9a-fA-F]+$/.test(sizeHex)) {
|
|
5560
|
+
const sizeBytes = parseInt(sizeHex, 16);
|
|
5561
|
+
if (Number.isFinite(sizeBytes) && sizeBytes > 0) {
|
|
5562
|
+
parsed.sizeBytes = sizeBytes;
|
|
5563
|
+
}
|
|
5564
|
+
}
|
|
5506
5565
|
return parsed;
|
|
5507
5566
|
}
|
|
5508
5567
|
var FLAGS_CAM_V2, FLAGS_HUB_V0, FLAGS_HUB_V1, FLAGS_HUB_V2, FLAGS_MAPPING;
|
|
@@ -6627,12 +6686,12 @@ var init_ReolinkCgiApi = __esm({
|
|
|
6627
6686
|
"getVideoclipThumbnailJpeg",
|
|
6628
6687
|
`Extracting thumbnail from VOD URL (FLV): ${vodUrl.substring(0, 100)}... (seek=${seekSeconds}s)`
|
|
6629
6688
|
);
|
|
6630
|
-
const { spawn:
|
|
6689
|
+
const { spawn: spawn12 } = await import("child_process");
|
|
6631
6690
|
return new Promise((resolve, reject) => {
|
|
6632
6691
|
const chunks = [];
|
|
6633
6692
|
let stderr = "";
|
|
6634
6693
|
let timedOut = false;
|
|
6635
|
-
const ffmpeg =
|
|
6694
|
+
const ffmpeg = spawn12(ffmpegPath, [
|
|
6636
6695
|
"-y",
|
|
6637
6696
|
"-analyzeduration",
|
|
6638
6697
|
"10000000",
|
|
@@ -7191,15 +7250,18 @@ var init_ReolinkCgiApi = __esm({
|
|
|
7191
7250
|
if (detectionClasses.length === 0) {
|
|
7192
7251
|
detectionClasses.push("motion");
|
|
7193
7252
|
}
|
|
7253
|
+
const sizeBytes = typeof vodFile.size === "string" ? parseInt(vodFile.size, 10) : vodFile.size;
|
|
7194
7254
|
const result = {
|
|
7195
7255
|
fileName: vodFile.name,
|
|
7196
7256
|
id: vodFile.name,
|
|
7197
|
-
sizeBytes: vodFile.size,
|
|
7198
7257
|
startTime,
|
|
7199
7258
|
endTime,
|
|
7200
7259
|
recordType: vodFile.type,
|
|
7201
7260
|
detectionClasses
|
|
7202
7261
|
};
|
|
7262
|
+
if (Number.isFinite(sizeBytes)) {
|
|
7263
|
+
result.sizeBytes = sizeBytes;
|
|
7264
|
+
}
|
|
7203
7265
|
if (parsed) {
|
|
7204
7266
|
result.parsedFileName = parsed;
|
|
7205
7267
|
}
|
|
@@ -7315,9 +7377,12 @@ __export(index_exports, {
|
|
|
7315
7377
|
BaichuanClient: () => BaichuanClient,
|
|
7316
7378
|
BaichuanEventEmitter: () => BaichuanEventEmitter,
|
|
7317
7379
|
BaichuanFrameParser: () => BaichuanFrameParser,
|
|
7380
|
+
BaichuanHlsServer: () => BaichuanHlsServer,
|
|
7318
7381
|
BaichuanHttpStreamServer: () => BaichuanHttpStreamServer,
|
|
7382
|
+
BaichuanMjpegServer: () => BaichuanMjpegServer,
|
|
7319
7383
|
BaichuanRtspServer: () => BaichuanRtspServer,
|
|
7320
7384
|
BaichuanVideoStream: () => BaichuanVideoStream,
|
|
7385
|
+
BaichuanWebRTCServer: () => BaichuanWebRTCServer,
|
|
7321
7386
|
BcMediaAnnexBDecoder: () => BcMediaAnnexBDecoder,
|
|
7322
7387
|
BcMediaCodec: () => BcMediaCodec,
|
|
7323
7388
|
BcUdpStream: () => BcUdpStream,
|
|
@@ -7328,7 +7393,9 @@ __export(index_exports, {
|
|
|
7328
7393
|
DUAL_LENS_SINGLE_MOTION_MODELS: () => DUAL_LENS_SINGLE_MOTION_MODELS,
|
|
7329
7394
|
H264RtpDepacketizer: () => H264RtpDepacketizer,
|
|
7330
7395
|
H265RtpDepacketizer: () => H265RtpDepacketizer,
|
|
7396
|
+
HlsSessionManager: () => HlsSessionManager,
|
|
7331
7397
|
Intercom: () => Intercom,
|
|
7398
|
+
MjpegTransformer: () => MjpegTransformer,
|
|
7332
7399
|
NVR_HUB_EXACT_TYPES: () => NVR_HUB_EXACT_TYPES,
|
|
7333
7400
|
NVR_HUB_MODEL_PATTERNS: () => NVR_HUB_MODEL_PATTERNS,
|
|
7334
7401
|
ReolinkBaichuanApi: () => ReolinkBaichuanApi,
|
|
@@ -7348,6 +7415,7 @@ __export(index_exports, {
|
|
|
7348
7415
|
buildBinaryExtensionXml: () => buildBinaryExtensionXml,
|
|
7349
7416
|
buildChannelExtensionXml: () => buildChannelExtensionXml,
|
|
7350
7417
|
buildFloodlightManualXml: () => buildFloodlightManualXml,
|
|
7418
|
+
buildHlsRedirectUrl: () => buildHlsRedirectUrl,
|
|
7351
7419
|
buildLoginXml: () => buildLoginXml,
|
|
7352
7420
|
buildPreviewStopXml: () => buildPreviewStopXml,
|
|
7353
7421
|
buildPreviewStopXmlV11: () => buildPreviewStopXmlV11,
|
|
@@ -7375,6 +7443,7 @@ __export(index_exports, {
|
|
|
7375
7443
|
createDebugGateLogger: () => createDebugGateLogger,
|
|
7376
7444
|
createDiagnosticsBundle: () => createDiagnosticsBundle,
|
|
7377
7445
|
createLogger: () => createLogger,
|
|
7446
|
+
createMjpegBoundary: () => createMjpegBoundary,
|
|
7378
7447
|
createNativeStream: () => createNativeStream,
|
|
7379
7448
|
createNullLogger: () => createNullLogger,
|
|
7380
7449
|
createReplayHttpServer: () => createReplayHttpServer,
|
|
@@ -7382,8 +7451,10 @@ __export(index_exports, {
|
|
|
7382
7451
|
createRfc4571TcpServerForReplay: () => createRfc4571TcpServerForReplay,
|
|
7383
7452
|
createRtspProxyServer: () => createRtspProxyServer,
|
|
7384
7453
|
createTaggedLogger: () => createTaggedLogger,
|
|
7454
|
+
decideVideoclipTranscodeMode: () => decideVideoclipTranscodeMode,
|
|
7385
7455
|
decodeHeader: () => decodeHeader,
|
|
7386
7456
|
deriveAesKey: () => deriveAesKey,
|
|
7457
|
+
detectIosClient: () => detectIosClient,
|
|
7387
7458
|
detectVideoCodecFromNal: () => detectVideoCodecFromNal,
|
|
7388
7459
|
discoverReolinkDevices: () => discoverReolinkDevices,
|
|
7389
7460
|
discoverViaHttpScan: () => discoverViaHttpScan,
|
|
@@ -7396,10 +7467,13 @@ __export(index_exports, {
|
|
|
7396
7467
|
extractSpsFromAnnexB: () => extractSpsFromAnnexB,
|
|
7397
7468
|
extractVpsFromAnnexB: () => extractVpsFromAnnexB,
|
|
7398
7469
|
flattenAbilitiesForChannel: () => flattenAbilitiesForChannel,
|
|
7470
|
+
formatMjpegFrame: () => formatMjpegFrame,
|
|
7399
7471
|
getConstructedVideoStreamOptions: () => getConstructedVideoStreamOptions,
|
|
7400
7472
|
getGlobalLogger: () => getGlobalLogger,
|
|
7401
7473
|
getH265NalType: () => getH265NalType,
|
|
7474
|
+
getMjpegContentType: () => getMjpegContentType,
|
|
7402
7475
|
getVideoStream: () => getVideoStream,
|
|
7476
|
+
getVideoclipClientInfo: () => getVideoclipClientInfo,
|
|
7403
7477
|
getXmlText: () => getXmlText,
|
|
7404
7478
|
hasH265StartCodes: () => hasStartCodes2,
|
|
7405
7479
|
hasStartCodes: () => hasStartCodes,
|
|
@@ -7711,9 +7785,9 @@ function buildC2dS(params) {
|
|
|
7711
7785
|
return buildP2pXml(`<C2D_S><to><port>${params.clientPort}</port></to></C2D_S>`);
|
|
7712
7786
|
}
|
|
7713
7787
|
function buildC2mQ(params) {
|
|
7714
|
-
const
|
|
7788
|
+
const os2 = params.os ?? "MAC";
|
|
7715
7789
|
return buildP2pXml(
|
|
7716
|
-
`<C2M_Q><uid>${xmlEscape(params.uid)}</uid><p>${xmlEscape(
|
|
7790
|
+
`<C2M_Q><uid>${xmlEscape(params.uid)}</uid><p>${xmlEscape(os2)}</p></C2M_Q>`
|
|
7717
7791
|
);
|
|
7718
7792
|
}
|
|
7719
7793
|
function parseIpPortBlock(tag, body) {
|
|
@@ -7739,11 +7813,11 @@ function parseM2cQr(xml) {
|
|
|
7739
7813
|
return { ...reg ? { reg } : {}, ...relay ? { relay } : {}, ...log ? { log } : {}, ...t ? { t } : {} };
|
|
7740
7814
|
}
|
|
7741
7815
|
function buildC2rC(params) {
|
|
7742
|
-
const
|
|
7816
|
+
const os2 = params.os ?? "MAC";
|
|
7743
7817
|
const debug = params.debug ?? false;
|
|
7744
7818
|
const rev = params.revision != null ? `<r>${params.revision}</r>` : "";
|
|
7745
7819
|
return buildP2pXml(
|
|
7746
|
-
`<C2R_C><uid>${xmlEscape(params.uid)}</uid><cli><ip>${xmlEscape(params.cli.ip)}</ip><port>${params.cli.port}</port></cli><relay><ip>${xmlEscape(params.relay.ip)}</ip><port>${params.relay.port}</port></relay><cid>${params.cid}</cid><debug>${debug ? "true" : "false"}</debug><family>${params.family}</family><p>${xmlEscape(
|
|
7820
|
+
`<C2R_C><uid>${xmlEscape(params.uid)}</uid><cli><ip>${xmlEscape(params.cli.ip)}</ip><port>${params.cli.port}</port></cli><relay><ip>${xmlEscape(params.relay.ip)}</ip><port>${params.relay.port}</port></relay><cid>${params.cid}</cid><debug>${debug ? "true" : "false"}</debug><family>${params.family}</family><p>${xmlEscape(os2)}</p>` + rev + `</C2R_C>`
|
|
7747
7821
|
);
|
|
7748
7822
|
}
|
|
7749
7823
|
function parseR2cCr(xml) {
|
|
@@ -7787,9 +7861,9 @@ function buildC2rCfm(params) {
|
|
|
7787
7861
|
);
|
|
7788
7862
|
}
|
|
7789
7863
|
function buildC2dC(params) {
|
|
7790
|
-
const
|
|
7864
|
+
const os2 = params.os ?? "MAC";
|
|
7791
7865
|
return buildP2pXml(
|
|
7792
|
-
`<C2D_C><uid>${xmlEscape(params.uid)}</uid><cli><port>${params.clientPort}</port></cli><cid>${params.cid}</cid><mtu>${params.mtu}</mtu><debug>false</debug><p>${xmlEscape(
|
|
7866
|
+
`<C2D_C><uid>${xmlEscape(params.uid)}</uid><cli><port>${params.clientPort}</port></cli><cid>${params.cid}</cid><mtu>${params.mtu}</mtu><debug>false</debug><p>${xmlEscape(os2)}</p></C2D_C>`
|
|
7793
7867
|
);
|
|
7794
7868
|
}
|
|
7795
7869
|
function buildC2dHb(params) {
|
|
@@ -12712,8 +12786,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
|
|
|
12712
12786
|
`[BaichuanRtspServer] Failed to start native stream for SDP priming: ${error}`
|
|
12713
12787
|
);
|
|
12714
12788
|
}
|
|
12715
|
-
const { hasParamSets } = this.flow.getFmtp();
|
|
12716
|
-
if (!
|
|
12789
|
+
const { hasParamSets: hasParamSets2 } = this.flow.getFmtp();
|
|
12790
|
+
if (!hasParamSets2) {
|
|
12717
12791
|
const primingMs = this.api.client.getTransport() === "udp" ? 4e3 : 1500;
|
|
12718
12792
|
try {
|
|
12719
12793
|
await Promise.race([
|
|
@@ -12725,12 +12799,12 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
|
|
|
12725
12799
|
}
|
|
12726
12800
|
}
|
|
12727
12801
|
{
|
|
12728
|
-
const { fmtp, hasParamSets } = this.flow.getFmtp();
|
|
12802
|
+
const { fmtp, hasParamSets: hasParamSets2 } = this.flow.getFmtp();
|
|
12729
12803
|
const fmtpPreview = fmtp.length > 160 ? `${fmtp.slice(0, 160)}...` : fmtp;
|
|
12730
12804
|
this.logger.info(
|
|
12731
|
-
`[BaichuanRtspServer] DESCRIBE SDP for ${clientId} path=${this.path} codec=${this.flow.sdpCodec} hasParamSets=${
|
|
12805
|
+
`[BaichuanRtspServer] DESCRIBE SDP for ${clientId} path=${this.path} codec=${this.flow.sdpCodec} hasParamSets=${hasParamSets2} fmtp=${fmtpPreview}`
|
|
12732
12806
|
);
|
|
12733
|
-
if (!
|
|
12807
|
+
if (!hasParamSets2) {
|
|
12734
12808
|
this.rtspDebugLog(
|
|
12735
12809
|
`DESCRIBE responding without parameter sets yet (client=${clientId}, path=${this.path}, flow=${this.flow.key})`
|
|
12736
12810
|
);
|
|
@@ -12906,8 +12980,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
|
|
|
12906
12980
|
}
|
|
12907
12981
|
sdp += `a=control:track0\r
|
|
12908
12982
|
`;
|
|
12909
|
-
const { fmtp, hasParamSets } = this.flow.getFmtp();
|
|
12910
|
-
if (!
|
|
12983
|
+
const { fmtp, hasParamSets: hasParamSets2 } = this.flow.getFmtp();
|
|
12984
|
+
if (!hasParamSets2) {
|
|
12911
12985
|
this.logger.warn(
|
|
12912
12986
|
`[BaichuanRtspServer] SDP missing parameter sets for flow ${this.flow.key}`
|
|
12913
12987
|
);
|
|
@@ -13563,8 +13637,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
|
|
|
13563
13637
|
const normalizedVideoData = videoType === "H264" ? convertToAnnexB(frame.data) : convertToAnnexB2(frame.data);
|
|
13564
13638
|
if (!resources?.seenFirstVideoKeyframe) {
|
|
13565
13639
|
if (videoType === "H265") {
|
|
13566
|
-
const { hasParamSets } = this.flow.getFmtp();
|
|
13567
|
-
if (!
|
|
13640
|
+
const { hasParamSets: hasParamSets2 } = this.flow.getFmtp();
|
|
13641
|
+
if (!hasParamSets2) {
|
|
13568
13642
|
if (rtspDebug && !h265WaitParamSetsLogged) {
|
|
13569
13643
|
h265WaitParamSetsLogged = true;
|
|
13570
13644
|
rtspDebugLog(
|
|
@@ -13595,8 +13669,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
|
|
|
13595
13669
|
}
|
|
13596
13670
|
resources.seenFirstVideoKeyframe = true;
|
|
13597
13671
|
} else {
|
|
13598
|
-
const { hasParamSets } = this.flow.getFmtp();
|
|
13599
|
-
if (!
|
|
13672
|
+
const { hasParamSets: hasParamSets2 } = this.flow.getFmtp();
|
|
13673
|
+
if (!hasParamSets2) {
|
|
13600
13674
|
if (rtspDebug && !h265WaitParamSetsLogged) {
|
|
13601
13675
|
h265WaitParamSetsLogged = true;
|
|
13602
13676
|
rtspDebugLog(
|
|
@@ -13756,8 +13830,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
|
|
|
13756
13830
|
this.setFlowVideoType(frame.videoType, "native stream");
|
|
13757
13831
|
}
|
|
13758
13832
|
this.flow.extractParameterSets(frame.data);
|
|
13759
|
-
const { hasParamSets } = this.flow.getFmtp();
|
|
13760
|
-
if (
|
|
13833
|
+
const { hasParamSets: hasParamSets2 } = this.flow.getFmtp();
|
|
13834
|
+
if (hasParamSets2) {
|
|
13761
13835
|
this.markFirstFrameReceived();
|
|
13762
13836
|
}
|
|
13763
13837
|
},
|
|
@@ -15051,10 +15125,15 @@ var parseRecordingFilesFromXml = (xml) => {
|
|
|
15051
15125
|
if (startDt) item.startTime = startDt;
|
|
15052
15126
|
if (endDt) item.endTime = endDt;
|
|
15053
15127
|
const parsed = parseRecordingFileName(item.name ?? item.fileName);
|
|
15054
|
-
|
|
15055
|
-
|
|
15056
|
-
|
|
15057
|
-
|
|
15128
|
+
const parsedFromPath = item.fileName !== item.name ? parseRecordingFileName(item.fileName) : void 0;
|
|
15129
|
+
const bestParsed = parsedFromPath?.sizeBytes != null ? parsedFromPath : parsed;
|
|
15130
|
+
if (bestParsed) {
|
|
15131
|
+
item.parsedFileName = bestParsed;
|
|
15132
|
+
if (!item.startTime) item.startTime = bestParsed.start;
|
|
15133
|
+
if (!item.endTime) item.endTime = bestParsed.end;
|
|
15134
|
+
if (item.sizeBytes == null && bestParsed.sizeBytes != null) {
|
|
15135
|
+
item.sizeBytes = bestParsed.sizeBytes;
|
|
15136
|
+
}
|
|
15058
15137
|
}
|
|
15059
15138
|
item.detectionClasses = buildDetectionClasses(parsed, item.recordType);
|
|
15060
15139
|
out.push(item);
|
|
@@ -15081,6 +15160,9 @@ var parseRecordingFilesFromXml = (xml) => {
|
|
|
15081
15160
|
item.parsedFileName = parsed;
|
|
15082
15161
|
if (!item.startTime) item.startTime = parsed.start;
|
|
15083
15162
|
if (!item.endTime) item.endTime = parsed.end;
|
|
15163
|
+
if (item.sizeBytes == null && parsed.sizeBytes != null) {
|
|
15164
|
+
item.sizeBytes = parsed.sizeBytes;
|
|
15165
|
+
}
|
|
15084
15166
|
}
|
|
15085
15167
|
item.detectionClasses = buildDetectionClasses(parsed, item.recordType);
|
|
15086
15168
|
out.push(item);
|
|
@@ -16395,6 +16477,10 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
16395
16477
|
* Value: client, refCount, createdAt
|
|
16396
16478
|
*/
|
|
16397
16479
|
dedicatedClients = /* @__PURE__ */ new Map();
|
|
16480
|
+
/** Keep replay dedicated sockets warm briefly to reduce clip switch latency. */
|
|
16481
|
+
// Keep replay sockets warm briefly for fast clip switches, but tear down quickly
|
|
16482
|
+
// when clients stop requesting HLS segments (avoids looking like a stuck session).
|
|
16483
|
+
static REPLAY_DEDICATED_KEEPALIVE_MS = 1e4;
|
|
16398
16484
|
/**
|
|
16399
16485
|
* Get a summary of currently active dedicated sessions.
|
|
16400
16486
|
* Useful for debugging/logging to see how many sockets are open.
|
|
@@ -16504,22 +16590,35 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
16504
16590
|
* Process the replay queue - executes operations one at a time.
|
|
16505
16591
|
*/
|
|
16506
16592
|
async processReplayQueue() {
|
|
16507
|
-
if (this.replayQueueProcessing)
|
|
16593
|
+
if (this.replayQueueProcessing) {
|
|
16594
|
+
this.logger?.debug?.(
|
|
16595
|
+
`[ReplayQueue] Already processing, queue length: ${this.replayQueue.length}`
|
|
16596
|
+
);
|
|
16597
|
+
return;
|
|
16598
|
+
}
|
|
16508
16599
|
this.replayQueueProcessing = true;
|
|
16600
|
+
this.logger?.debug?.(
|
|
16601
|
+
`[ReplayQueue] Starting queue processing, items: ${this.replayQueue.length}`
|
|
16602
|
+
);
|
|
16509
16603
|
while (this.replayQueue.length > 0) {
|
|
16510
16604
|
const item = this.replayQueue.shift();
|
|
16511
16605
|
if (item) {
|
|
16512
16606
|
const timeSinceLastReplay = Date.now() - this.lastReplayEndTime;
|
|
16513
16607
|
if (timeSinceLastReplay < this.REPLAY_COOLDOWN_MS) {
|
|
16514
|
-
|
|
16515
|
-
|
|
16516
|
-
);
|
|
16608
|
+
const waitTime = this.REPLAY_COOLDOWN_MS - timeSinceLastReplay;
|
|
16609
|
+
this.logger?.debug?.(`[ReplayQueue] Waiting ${waitTime}ms cooldown`);
|
|
16610
|
+
await new Promise((r) => setTimeout(r, waitTime));
|
|
16517
16611
|
}
|
|
16612
|
+
this.logger?.debug?.(
|
|
16613
|
+
`[ReplayQueue] Executing item, remaining: ${this.replayQueue.length}`
|
|
16614
|
+
);
|
|
16518
16615
|
await item.execute();
|
|
16519
16616
|
this.lastReplayEndTime = Date.now();
|
|
16617
|
+
this.logger?.debug?.(`[ReplayQueue] Item completed`);
|
|
16520
16618
|
}
|
|
16521
16619
|
}
|
|
16522
16620
|
this.replayQueueProcessing = false;
|
|
16621
|
+
this.logger?.debug?.(`[ReplayQueue] Queue processing complete`);
|
|
16523
16622
|
}
|
|
16524
16623
|
/**
|
|
16525
16624
|
* Enqueue a replay operation with optional de-duplication.
|
|
@@ -16582,14 +16681,35 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
16582
16681
|
this.replayQueue.push({
|
|
16583
16682
|
execute: () => {
|
|
16584
16683
|
return new Promise((releaseSlot) => {
|
|
16684
|
+
let released = false;
|
|
16685
|
+
const safeRelease = () => {
|
|
16686
|
+
if (released) return;
|
|
16687
|
+
released = true;
|
|
16688
|
+
releaseSlot();
|
|
16689
|
+
};
|
|
16690
|
+
const safetyTimeout = setTimeout(
|
|
16691
|
+
() => {
|
|
16692
|
+
if (!released) {
|
|
16693
|
+
this.logger?.warn?.(
|
|
16694
|
+
"[ReplayQueue] Safety timeout: releasing queue slot after 10 minutes"
|
|
16695
|
+
);
|
|
16696
|
+
safeRelease();
|
|
16697
|
+
}
|
|
16698
|
+
},
|
|
16699
|
+
10 * 60 * 1e3
|
|
16700
|
+
);
|
|
16585
16701
|
setup().then((result) => {
|
|
16586
16702
|
resolvePromise({
|
|
16587
16703
|
result,
|
|
16588
|
-
release: () =>
|
|
16704
|
+
release: () => {
|
|
16705
|
+
clearTimeout(safetyTimeout);
|
|
16706
|
+
safeRelease();
|
|
16707
|
+
}
|
|
16589
16708
|
});
|
|
16590
16709
|
}).catch((e) => {
|
|
16710
|
+
clearTimeout(safetyTimeout);
|
|
16591
16711
|
rejectPromise(e);
|
|
16592
|
-
|
|
16712
|
+
safeRelease();
|
|
16593
16713
|
});
|
|
16594
16714
|
});
|
|
16595
16715
|
}
|
|
@@ -16659,30 +16779,68 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
16659
16779
|
* immediately and create a new one. This ensures clean state for each clip.
|
|
16660
16780
|
*/
|
|
16661
16781
|
async acquireDedicatedClient(sessionKey, logger) {
|
|
16782
|
+
const log = logger ?? this.logger;
|
|
16783
|
+
const isReplayKey = sessionKey.startsWith("replay:");
|
|
16662
16784
|
const existing = this.dedicatedClients.get(sessionKey);
|
|
16663
16785
|
if (existing) {
|
|
16664
|
-
|
|
16665
|
-
|
|
16786
|
+
if (existing.idleCloseTimer) {
|
|
16787
|
+
clearTimeout(existing.idleCloseTimer);
|
|
16788
|
+
existing.idleCloseTimer = void 0;
|
|
16789
|
+
}
|
|
16790
|
+
if (existing.refCount === 0) {
|
|
16791
|
+
existing.refCount = 1;
|
|
16792
|
+
existing.lastUsedAt = Date.now();
|
|
16793
|
+
log?.debug?.(
|
|
16794
|
+
`[DedicatedClient] Reusing existing dedicated socket for sessionKey=${sessionKey}`
|
|
16795
|
+
);
|
|
16796
|
+
try {
|
|
16797
|
+
if (!existing.client.loggedIn) {
|
|
16798
|
+
await existing.client.login();
|
|
16799
|
+
}
|
|
16800
|
+
} catch {
|
|
16801
|
+
}
|
|
16802
|
+
if (existing.client.loggedIn) {
|
|
16803
|
+
return {
|
|
16804
|
+
client: existing.client,
|
|
16805
|
+
release: () => this.releaseDedicatedClient(sessionKey, logger)
|
|
16806
|
+
};
|
|
16807
|
+
}
|
|
16808
|
+
}
|
|
16809
|
+
log?.log?.(
|
|
16810
|
+
`[DedicatedClient] Closing existing socket for sessionKey=${sessionKey} (preempting active session)`
|
|
16666
16811
|
);
|
|
16667
16812
|
this.dedicatedClients.delete(sessionKey);
|
|
16668
|
-
|
|
16669
|
-
|
|
16670
|
-
|
|
16813
|
+
try {
|
|
16814
|
+
await existing.client.close({ reason: "preempted by new session" });
|
|
16815
|
+
log?.log?.(
|
|
16816
|
+
`[DedicatedClient] Old socket closed successfully for sessionKey=${sessionKey}`
|
|
16817
|
+
);
|
|
16818
|
+
} catch (e) {
|
|
16819
|
+
log?.warn?.(
|
|
16820
|
+
`[DedicatedClient] Error closing old socket for sessionKey=${sessionKey}: ${e}`
|
|
16821
|
+
);
|
|
16822
|
+
}
|
|
16671
16823
|
}
|
|
16672
|
-
|
|
16824
|
+
log?.log?.(
|
|
16825
|
+
`[DedicatedClient] Opening new dedicated socket for sessionKey=${sessionKey}`
|
|
16826
|
+
);
|
|
16673
16827
|
const dedicatedClient = new BaichuanClient({
|
|
16674
16828
|
host: this.host,
|
|
16675
16829
|
username: this.username,
|
|
16676
16830
|
password: this.password,
|
|
16677
|
-
logger:
|
|
16831
|
+
logger: log,
|
|
16678
16832
|
debugOptions: this.client.getDebugConfig?.()
|
|
16679
16833
|
});
|
|
16680
16834
|
await dedicatedClient.login();
|
|
16835
|
+
log?.log?.(
|
|
16836
|
+
`[DedicatedClient] Dedicated socket logged in for sessionKey=${sessionKey}`
|
|
16837
|
+
);
|
|
16681
16838
|
this.dedicatedClients.set(sessionKey, {
|
|
16682
16839
|
client: dedicatedClient,
|
|
16683
16840
|
refCount: 1,
|
|
16684
|
-
|
|
16685
|
-
|
|
16841
|
+
createdAt: Date.now(),
|
|
16842
|
+
lastUsedAt: Date.now(),
|
|
16843
|
+
idleCloseTimer: void 0
|
|
16686
16844
|
});
|
|
16687
16845
|
return {
|
|
16688
16846
|
client: dedicatedClient,
|
|
@@ -16694,15 +16852,81 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
16694
16852
|
* This ensures clean teardown at the end of each clip.
|
|
16695
16853
|
*/
|
|
16696
16854
|
async releaseDedicatedClient(sessionKey, logger) {
|
|
16855
|
+
const log = logger ?? this.logger;
|
|
16697
16856
|
const entry = this.dedicatedClients.get(sessionKey);
|
|
16698
16857
|
if (!entry) return;
|
|
16858
|
+
entry.refCount = Math.max(0, entry.refCount - 1);
|
|
16859
|
+
entry.lastUsedAt = Date.now();
|
|
16860
|
+
if (entry.refCount > 0) return;
|
|
16861
|
+
const isReplayKey = sessionKey.startsWith("replay:");
|
|
16862
|
+
const allowReplayKeepAlive = /^replay:[^:]+$/.test(sessionKey);
|
|
16863
|
+
if (isReplayKey && allowReplayKeepAlive) {
|
|
16864
|
+
if (entry.idleCloseTimer) return;
|
|
16865
|
+
entry.idleCloseTimer = setTimeout(async () => {
|
|
16866
|
+
const current = this.dedicatedClients.get(sessionKey);
|
|
16867
|
+
if (!current) return;
|
|
16868
|
+
if (current.refCount > 0) return;
|
|
16869
|
+
this.dedicatedClients.delete(sessionKey);
|
|
16870
|
+
log?.debug?.(
|
|
16871
|
+
`[DedicatedClient] Closing idle replay socket for sessionKey=${sessionKey} (keepalive expired)`
|
|
16872
|
+
);
|
|
16873
|
+
try {
|
|
16874
|
+
await current.client.close({
|
|
16875
|
+
reason: "replay idle keepalive expired"
|
|
16876
|
+
});
|
|
16877
|
+
} catch {
|
|
16878
|
+
}
|
|
16879
|
+
}, _ReolinkBaichuanApi.REPLAY_DEDICATED_KEEPALIVE_MS);
|
|
16880
|
+
return;
|
|
16881
|
+
}
|
|
16699
16882
|
this.dedicatedClients.delete(sessionKey);
|
|
16700
|
-
|
|
16883
|
+
log?.log?.(
|
|
16884
|
+
`[DedicatedClient] Closing socket for sessionKey=${sessionKey} (session ended)`
|
|
16885
|
+
);
|
|
16701
16886
|
try {
|
|
16702
16887
|
await entry.client.close({ reason: "dedicated session ended" });
|
|
16888
|
+
log?.log?.(
|
|
16889
|
+
`[DedicatedClient] Socket closed successfully for sessionKey=${sessionKey}`
|
|
16890
|
+
);
|
|
16891
|
+
} catch (e) {
|
|
16892
|
+
log?.warn?.(
|
|
16893
|
+
`[DedicatedClient] Error closing socket for sessionKey=${sessionKey}: ${e}`
|
|
16894
|
+
);
|
|
16895
|
+
}
|
|
16896
|
+
}
|
|
16897
|
+
/**
|
|
16898
|
+
* Force-close a dedicated client if it exists.
|
|
16899
|
+
* This is called BEFORE entering the queue to immediately terminate any existing stream
|
|
16900
|
+
* for the same sessionKey. The existing stream will receive an error, release its queue slot,
|
|
16901
|
+
* and the new request can then proceed.
|
|
16902
|
+
*
|
|
16903
|
+
* @param sessionKey - The session key to force-close (e.g., `replay:${deviceId}`)
|
|
16904
|
+
* @param logger - Optional logger
|
|
16905
|
+
* @returns true if a client was closed, false if no client existed
|
|
16906
|
+
*/
|
|
16907
|
+
async forceCloseDedicatedClient(sessionKey, logger) {
|
|
16908
|
+
const log = logger ?? this.logger;
|
|
16909
|
+
const entry = this.dedicatedClients.get(sessionKey);
|
|
16910
|
+
if (!entry) return false;
|
|
16911
|
+
if (entry.idleCloseTimer) {
|
|
16912
|
+
clearTimeout(entry.idleCloseTimer);
|
|
16913
|
+
entry.idleCloseTimer = void 0;
|
|
16914
|
+
}
|
|
16915
|
+
log?.log?.(
|
|
16916
|
+
`[DedicatedClient] Force-closing existing socket for sessionKey=${sessionKey} (new request preempting)`
|
|
16917
|
+
);
|
|
16918
|
+
this.dedicatedClients.delete(sessionKey);
|
|
16919
|
+
try {
|
|
16920
|
+
await entry.client.close({ reason: "preempted by new request" });
|
|
16921
|
+
log?.log?.(
|
|
16922
|
+
`[DedicatedClient] Force-close complete for sessionKey=${sessionKey}`
|
|
16923
|
+
);
|
|
16703
16924
|
} catch (e) {
|
|
16704
|
-
|
|
16925
|
+
log?.warn?.(
|
|
16926
|
+
`[DedicatedClient] Error during force-close for sessionKey=${sessionKey}: ${e}`
|
|
16927
|
+
);
|
|
16705
16928
|
}
|
|
16929
|
+
return true;
|
|
16706
16930
|
}
|
|
16707
16931
|
/**
|
|
16708
16932
|
* Create a dedicated Baichuan client session for streaming.
|
|
@@ -16739,6 +16963,9 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
16739
16963
|
await Promise.allSettled(
|
|
16740
16964
|
entries.map(async ([key, entry]) => {
|
|
16741
16965
|
try {
|
|
16966
|
+
if (entry.idleCloseTimer) {
|
|
16967
|
+
clearTimeout(entry.idleCloseTimer);
|
|
16968
|
+
}
|
|
16742
16969
|
this.logger?.debug?.(`[DedicatedClient] Cleanup: closing ${key}`);
|
|
16743
16970
|
await entry.client.close({ reason: "API cleanup" });
|
|
16744
16971
|
} catch {
|
|
@@ -18594,7 +18821,8 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
18594
18821
|
channel,
|
|
18595
18822
|
payloadXml: stopXml,
|
|
18596
18823
|
messageClass: BC_CLASS_MODERN_24,
|
|
18597
|
-
timeoutMs:
|
|
18824
|
+
timeoutMs: 2e3,
|
|
18825
|
+
// Short timeout - if socket is closed, fail fast
|
|
18598
18826
|
internal: true
|
|
18599
18827
|
});
|
|
18600
18828
|
} catch {
|
|
@@ -18730,7 +18958,8 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
18730
18958
|
channel,
|
|
18731
18959
|
payloadXml: stopXml,
|
|
18732
18960
|
messageClass: BC_CLASS_MODERN_24,
|
|
18733
|
-
timeoutMs:
|
|
18961
|
+
timeoutMs: 2e3,
|
|
18962
|
+
// Short timeout - if socket is closed, fail fast
|
|
18734
18963
|
internal: true
|
|
18735
18964
|
});
|
|
18736
18965
|
} catch {
|
|
@@ -19777,11 +20006,20 @@ ${stderr}`)
|
|
|
19777
20006
|
}
|
|
19778
20007
|
}
|
|
19779
20008
|
async downloadRecording(params) {
|
|
20009
|
+
this.logger?.debug?.(
|
|
20010
|
+
`[downloadRecording] Queuing download for: ${params.fileName}, channel=${params.channel}`
|
|
20011
|
+
);
|
|
19780
20012
|
return this.enqueueReplayOperation(async () => {
|
|
20013
|
+
this.logger?.debug?.(
|
|
20014
|
+
`[downloadRecording] Starting download for: ${params.fileName}`
|
|
20015
|
+
);
|
|
19781
20016
|
await this.client.login();
|
|
19782
20017
|
const channel = this.normalizeChannel(params.channel);
|
|
19783
20018
|
const uid = await this.ensureUidForRecordings(channel, params.uid);
|
|
19784
20019
|
const fileName = params.fileName;
|
|
20020
|
+
this.logger?.debug?.(
|
|
20021
|
+
`[downloadRecording] Trying fileInfoListReplayBinaryDownload for: ${fileName}`
|
|
20022
|
+
);
|
|
19785
20023
|
let replayErr;
|
|
19786
20024
|
try {
|
|
19787
20025
|
return await this.fileInfoListReplayBinaryDownload({
|
|
@@ -19792,7 +20030,13 @@ ${stderr}`)
|
|
|
19792
20030
|
});
|
|
19793
20031
|
} catch (e) {
|
|
19794
20032
|
replayErr = e;
|
|
20033
|
+
this.logger?.debug?.(
|
|
20034
|
+
`[downloadRecording] fileInfoListReplayBinaryDownload failed: ${e instanceof Error ? e.message : String(e)}`
|
|
20035
|
+
);
|
|
19795
20036
|
}
|
|
20037
|
+
this.logger?.debug?.(
|
|
20038
|
+
`[downloadRecording] Trying fileInfoListDownload for: ${fileName}`
|
|
20039
|
+
);
|
|
19796
20040
|
let downloadErr;
|
|
19797
20041
|
try {
|
|
19798
20042
|
return await this.fileInfoListDownload({
|
|
@@ -19803,7 +20047,13 @@ ${stderr}`)
|
|
|
19803
20047
|
});
|
|
19804
20048
|
} catch (e) {
|
|
19805
20049
|
downloadErr = e;
|
|
20050
|
+
this.logger?.debug?.(
|
|
20051
|
+
`[downloadRecording] fileInfoListDownload failed: ${e instanceof Error ? e.message : String(e)}`
|
|
20052
|
+
);
|
|
19806
20053
|
}
|
|
20054
|
+
this.logger?.debug?.(
|
|
20055
|
+
`[downloadRecording] Trying fileInfoListPagedDownload for: ${fileName}`
|
|
20056
|
+
);
|
|
19807
20057
|
try {
|
|
19808
20058
|
const result = await this.fileInfoListPagedDownload({
|
|
19809
20059
|
channel,
|
|
@@ -19815,6 +20065,9 @@ ${stderr}`)
|
|
|
19815
20065
|
return result;
|
|
19816
20066
|
}
|
|
19817
20067
|
} catch (e) {
|
|
20068
|
+
this.logger?.debug?.(
|
|
20069
|
+
`[downloadRecording] fileInfoListPagedDownload failed: ${e instanceof Error ? e.message : String(e)}`
|
|
20070
|
+
);
|
|
19818
20071
|
}
|
|
19819
20072
|
const replayMsg = replayErr instanceof Error ? replayErr.message : replayErr != null ? String(replayErr) : "";
|
|
19820
20073
|
const dlMsg = downloadErr instanceof Error ? downloadErr.message : downloadErr != null ? String(downloadErr) : "";
|
|
@@ -20110,7 +20363,7 @@ ${stderr}`)
|
|
|
20110
20363
|
* Convert a raw video keyframe to JPEG using ffmpeg.
|
|
20111
20364
|
*/
|
|
20112
20365
|
async convertFrameToJpeg(params) {
|
|
20113
|
-
const { spawn:
|
|
20366
|
+
const { spawn: spawn12 } = await import("child_process");
|
|
20114
20367
|
const ffmpeg = params.ffmpegPath ?? "ffmpeg";
|
|
20115
20368
|
const inputFormat = params.videoCodec === "H265" ? "hevc" : "h264";
|
|
20116
20369
|
return new Promise((resolve, reject) => {
|
|
@@ -20132,7 +20385,7 @@ ${stderr}`)
|
|
|
20132
20385
|
"2",
|
|
20133
20386
|
"pipe:1"
|
|
20134
20387
|
];
|
|
20135
|
-
const proc =
|
|
20388
|
+
const proc = spawn12(ffmpeg, args, {
|
|
20136
20389
|
stdio: ["pipe", "pipe", "pipe"]
|
|
20137
20390
|
});
|
|
20138
20391
|
const chunks = [];
|
|
@@ -20275,26 +20528,26 @@ ${stderr}`)
|
|
|
20275
20528
|
* Internal helper to mux video+audio into MP4 using ffmpeg.
|
|
20276
20529
|
*/
|
|
20277
20530
|
async muxToMp4(params) {
|
|
20278
|
-
const { spawn:
|
|
20531
|
+
const { spawn: spawn12 } = await import("child_process");
|
|
20279
20532
|
const { randomUUID: randomUUID2 } = await import("crypto");
|
|
20280
|
-
const
|
|
20281
|
-
const
|
|
20282
|
-
const
|
|
20533
|
+
const fs6 = await import("fs/promises");
|
|
20534
|
+
const os2 = await import("os");
|
|
20535
|
+
const path6 = await import("path");
|
|
20283
20536
|
const ffmpeg = params.ffmpegPath ?? "ffmpeg";
|
|
20284
|
-
const tmpDir =
|
|
20537
|
+
const tmpDir = os2.tmpdir();
|
|
20285
20538
|
const id = randomUUID2();
|
|
20286
20539
|
const videoFormat = params.videoCodec === "H265" ? "hevc" : "h264";
|
|
20287
|
-
const videoPath =
|
|
20288
|
-
const outputPath =
|
|
20540
|
+
const videoPath = path6.join(tmpDir, `reolink-${id}.${videoFormat}`);
|
|
20541
|
+
const outputPath = path6.join(tmpDir, `reolink-${id}.mp4`);
|
|
20289
20542
|
let audioPath = null;
|
|
20290
20543
|
if (params.audioData && params.audioData.length > 0 && params.audioCodec) {
|
|
20291
20544
|
const audioExt = params.audioCodec === "Aac" ? "aac" : "raw";
|
|
20292
|
-
audioPath =
|
|
20545
|
+
audioPath = path6.join(tmpDir, `reolink-${id}.${audioExt}`);
|
|
20293
20546
|
}
|
|
20294
20547
|
try {
|
|
20295
|
-
await
|
|
20548
|
+
await fs6.writeFile(videoPath, params.videoData);
|
|
20296
20549
|
if (audioPath && params.audioData) {
|
|
20297
|
-
await
|
|
20550
|
+
await fs6.writeFile(audioPath, params.audioData);
|
|
20298
20551
|
}
|
|
20299
20552
|
const args = ["-hide_banner", "-loglevel", "error", "-y"];
|
|
20300
20553
|
if (params.fps > 0) {
|
|
@@ -20327,7 +20580,7 @@ ${stderr}`)
|
|
|
20327
20580
|
outputPath
|
|
20328
20581
|
);
|
|
20329
20582
|
await new Promise((resolve, reject) => {
|
|
20330
|
-
const p =
|
|
20583
|
+
const p = spawn12(ffmpeg, args, { stdio: ["ignore", "ignore", "pipe"] });
|
|
20331
20584
|
let stderr = "";
|
|
20332
20585
|
p.stderr.on("data", (d) => {
|
|
20333
20586
|
stderr += d.toString();
|
|
@@ -20347,13 +20600,13 @@ ${stderr}`)
|
|
|
20347
20600
|
}
|
|
20348
20601
|
});
|
|
20349
20602
|
});
|
|
20350
|
-
return await
|
|
20603
|
+
return await fs6.readFile(outputPath);
|
|
20351
20604
|
} finally {
|
|
20352
|
-
await
|
|
20605
|
+
await fs6.unlink(videoPath).catch(() => {
|
|
20353
20606
|
});
|
|
20354
|
-
if (audioPath) await
|
|
20607
|
+
if (audioPath) await fs6.unlink(audioPath).catch(() => {
|
|
20355
20608
|
});
|
|
20356
|
-
await
|
|
20609
|
+
await fs6.unlink(outputPath).catch(() => {
|
|
20357
20610
|
});
|
|
20358
20611
|
}
|
|
20359
20612
|
}
|
|
@@ -21887,11 +22140,13 @@ ${stderr}`)
|
|
|
21887
22140
|
* @param settings - Floodlight settings to apply
|
|
21888
22141
|
*
|
|
21889
22142
|
* @example
|
|
22143
|
+
* ```typescript
|
|
21890
22144
|
* await api.setFloodlightSettings(0, {
|
|
21891
22145
|
* duration: 300, // 5 minutes
|
|
21892
22146
|
* detectType: 'people,vehicle',
|
|
21893
22147
|
* brightness: 80,
|
|
21894
22148
|
* });
|
|
22149
|
+
* ```
|
|
21895
22150
|
*/
|
|
21896
22151
|
async setFloodlightSettings(channel, settings) {
|
|
21897
22152
|
const ch = this.normalizeChannel(channel);
|
|
@@ -24033,11 +24288,13 @@ ${scheduleItems}
|
|
|
24033
24288
|
*/
|
|
24034
24289
|
async createRecordingReplayMp4Stream(params) {
|
|
24035
24290
|
const logger = params.logger ?? this.logger;
|
|
24291
|
+
const useMpegTsMuxer = params.useMpegTsMuxer ?? true;
|
|
24036
24292
|
const parsed = parseRecordingFileName(params.fileName);
|
|
24037
24293
|
const durationMs = parsed?.durationMs ?? 3e5;
|
|
24294
|
+
const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
|
|
24038
24295
|
const seconds = Math.ceil(durationMs / 1e3 * 1.1);
|
|
24039
24296
|
logger?.debug?.(
|
|
24040
|
-
`[createRecordingReplayMp4Stream] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, timeoutSec=${seconds}, deviceId=${params.deviceId ?? "auto"}`
|
|
24297
|
+
`[createRecordingReplayMp4Stream] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, fps=${fps}, timeoutSec=${seconds}, deviceId=${params.deviceId ?? "auto"}, useMpegTsMuxer=${useMpegTsMuxer}`
|
|
24041
24298
|
);
|
|
24042
24299
|
const startParams = {
|
|
24043
24300
|
channel: params.channel,
|
|
@@ -24046,39 +24303,84 @@ ${scheduleItems}
|
|
|
24046
24303
|
...params.isNvr != null ? { isNvr: params.isNvr } : {},
|
|
24047
24304
|
...params.deviceId != null ? { deviceId: params.deviceId } : {}
|
|
24048
24305
|
};
|
|
24049
|
-
const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(
|
|
24050
|
-
|
|
24051
|
-
|
|
24306
|
+
const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(async () => {
|
|
24307
|
+
try {
|
|
24308
|
+
return await this.startRecordingReplayStream(startParams);
|
|
24309
|
+
} catch (e) {
|
|
24310
|
+
if (!params.deviceId) throw e;
|
|
24311
|
+
const sessionKey = `replay:${params.deviceId}`;
|
|
24312
|
+
logger?.debug?.(
|
|
24313
|
+
`[createRecordingReplayMp4Stream] startRecordingReplayStream failed; force-closing dedicated client and retrying once`
|
|
24314
|
+
);
|
|
24315
|
+
await this.forceCloseDedicatedClient(sessionKey, logger);
|
|
24316
|
+
return await this.startRecordingReplayStream(startParams);
|
|
24317
|
+
}
|
|
24318
|
+
});
|
|
24052
24319
|
const { stream, stop: stopReplay } = replayResult;
|
|
24053
24320
|
const input = new import_node_stream.PassThrough();
|
|
24054
24321
|
const output = new import_node_stream.PassThrough();
|
|
24322
|
+
const H264_AUD = Buffer.from([0, 0, 0, 1, 9, 240]);
|
|
24055
24323
|
let tsMuxer = null;
|
|
24056
24324
|
let ff = null;
|
|
24057
24325
|
let ended = false;
|
|
24058
24326
|
let frameCount = 0;
|
|
24059
24327
|
const startFfmpeg = (videoType) => {
|
|
24060
24328
|
if (ff) return;
|
|
24329
|
+
const needsTranscode = videoType === "H265" && params.transcodeH265ToH264 === true;
|
|
24061
24330
|
logger?.debug?.(
|
|
24062
|
-
`[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}`
|
|
24331
|
+
`[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}, transcode=${needsTranscode}, useMpegTsMuxer=${useMpegTsMuxer}, fps=${fps}`
|
|
24063
24332
|
);
|
|
24064
|
-
|
|
24065
|
-
|
|
24066
|
-
|
|
24067
|
-
|
|
24068
|
-
|
|
24069
|
-
|
|
24070
|
-
|
|
24071
|
-
|
|
24072
|
-
|
|
24073
|
-
|
|
24074
|
-
|
|
24075
|
-
|
|
24076
|
-
|
|
24077
|
-
|
|
24078
|
-
|
|
24079
|
-
|
|
24080
|
-
|
|
24081
|
-
|
|
24333
|
+
let args;
|
|
24334
|
+
if (useMpegTsMuxer) {
|
|
24335
|
+
MpegTsMuxer.resetCounters();
|
|
24336
|
+
tsMuxer = new MpegTsMuxer({ videoType });
|
|
24337
|
+
args = [
|
|
24338
|
+
"-hide_banner",
|
|
24339
|
+
"-loglevel",
|
|
24340
|
+
"error",
|
|
24341
|
+
"-f",
|
|
24342
|
+
"mpegts",
|
|
24343
|
+
"-i",
|
|
24344
|
+
"pipe:0",
|
|
24345
|
+
// Video codec: transcode H.265→H.264 if requested, otherwise copy
|
|
24346
|
+
...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
|
|
24347
|
+
// frag_keyframe: create new fragment at each keyframe
|
|
24348
|
+
// empty_moov: write ftyp/moov immediately (required for streaming)
|
|
24349
|
+
// default_base_moof: required for iOS Media Source Extensions
|
|
24350
|
+
// negative_cts_offsets: fixes some iOS playback issues
|
|
24351
|
+
"-movflags",
|
|
24352
|
+
"frag_keyframe+empty_moov+default_base_moof+negative_cts_offsets",
|
|
24353
|
+
"-f",
|
|
24354
|
+
"mp4",
|
|
24355
|
+
"pipe:1"
|
|
24356
|
+
];
|
|
24357
|
+
} else {
|
|
24358
|
+
const inputFormat = videoType === "H265" ? "hevc" : "h264";
|
|
24359
|
+
args = [
|
|
24360
|
+
"-hide_banner",
|
|
24361
|
+
"-loglevel",
|
|
24362
|
+
"error",
|
|
24363
|
+
"-fflags",
|
|
24364
|
+
"+genpts",
|
|
24365
|
+
"-r",
|
|
24366
|
+
String(fps),
|
|
24367
|
+
"-f",
|
|
24368
|
+
inputFormat,
|
|
24369
|
+
"-i",
|
|
24370
|
+
"pipe:0",
|
|
24371
|
+
// Video codec: transcode H.265→H.264 if requested, otherwise copy
|
|
24372
|
+
...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
|
|
24373
|
+
// frag_keyframe: create new fragment at each keyframe
|
|
24374
|
+
// empty_moov: write ftyp/moov immediately (required for streaming)
|
|
24375
|
+
// default_base_moof: required for iOS Media Source Extensions
|
|
24376
|
+
// negative_cts_offsets: fixes some iOS playback issues
|
|
24377
|
+
"-movflags",
|
|
24378
|
+
"frag_keyframe+empty_moov+default_base_moof+negative_cts_offsets",
|
|
24379
|
+
"-f",
|
|
24380
|
+
"mp4",
|
|
24381
|
+
"pipe:1"
|
|
24382
|
+
];
|
|
24383
|
+
}
|
|
24082
24384
|
ff = (0, import_node_child_process3.spawn)("ffmpeg", args, { stdio: ["pipe", "pipe", "pipe"] });
|
|
24083
24385
|
if (!ff.stdin || !ff.stdout || !ff.stderr) {
|
|
24084
24386
|
throw new Error("ffmpeg stdio streams not available");
|
|
@@ -24116,17 +24418,19 @@ ${scheduleItems}
|
|
|
24116
24418
|
const stopAll = async () => {
|
|
24117
24419
|
if (ended) return;
|
|
24118
24420
|
ended = true;
|
|
24421
|
+
releaseQueueSlot();
|
|
24119
24422
|
logger?.debug?.(
|
|
24120
24423
|
`[createRecordingReplayMp4Stream] Stopping stream, frames=${frameCount}`
|
|
24121
24424
|
);
|
|
24122
|
-
|
|
24123
|
-
|
|
24124
|
-
|
|
24125
|
-
|
|
24126
|
-
|
|
24127
|
-
|
|
24128
|
-
|
|
24129
|
-
|
|
24425
|
+
const cleanupPromises = [];
|
|
24426
|
+
cleanupPromises.push(
|
|
24427
|
+
stopReplay().catch(() => {
|
|
24428
|
+
})
|
|
24429
|
+
);
|
|
24430
|
+
cleanupPromises.push(
|
|
24431
|
+
stream.stop().catch(() => {
|
|
24432
|
+
})
|
|
24433
|
+
);
|
|
24130
24434
|
try {
|
|
24131
24435
|
input.end();
|
|
24132
24436
|
} catch {
|
|
@@ -24139,7 +24443,11 @@ ${scheduleItems}
|
|
|
24139
24443
|
output.end();
|
|
24140
24444
|
} catch {
|
|
24141
24445
|
}
|
|
24142
|
-
|
|
24446
|
+
await Promise.race([
|
|
24447
|
+
Promise.all(cleanupPromises),
|
|
24448
|
+
new Promise((resolve) => setTimeout(resolve, 2e3))
|
|
24449
|
+
// Max 2s for cleanup
|
|
24450
|
+
]);
|
|
24143
24451
|
};
|
|
24144
24452
|
const timer = setTimeout(
|
|
24145
24453
|
() => {
|
|
@@ -24161,15 +24469,25 @@ ${scheduleItems}
|
|
|
24161
24469
|
output.destroy(e);
|
|
24162
24470
|
void stopAll();
|
|
24163
24471
|
});
|
|
24472
|
+
stream.on("close", () => {
|
|
24473
|
+
logger?.debug?.(
|
|
24474
|
+
`[createRecordingReplayMp4Stream] Stream closed, frames=${frameCount}`
|
|
24475
|
+
);
|
|
24476
|
+
clearTimeout(timer);
|
|
24477
|
+
void stopAll();
|
|
24478
|
+
});
|
|
24164
24479
|
stream.on(
|
|
24165
24480
|
"videoAccessUnit",
|
|
24166
24481
|
({ data, videoType, isKeyframe, microseconds }) => {
|
|
24167
24482
|
if (ended) return;
|
|
24168
24483
|
startFfmpeg(videoType);
|
|
24169
24484
|
frameCount++;
|
|
24170
|
-
if (tsMuxer) {
|
|
24485
|
+
if (useMpegTsMuxer && tsMuxer) {
|
|
24171
24486
|
const tsData = tsMuxer.mux(data, microseconds, isKeyframe);
|
|
24172
24487
|
input.write(tsData);
|
|
24488
|
+
} else {
|
|
24489
|
+
if (videoType === "H264") input.write(H264_AUD);
|
|
24490
|
+
input.write(data);
|
|
24173
24491
|
}
|
|
24174
24492
|
}
|
|
24175
24493
|
);
|
|
@@ -24208,19 +24526,53 @@ ${scheduleItems}
|
|
|
24208
24526
|
*/
|
|
24209
24527
|
async createRecordingDownloadMp4Stream(params) {
|
|
24210
24528
|
const timeoutMs = params.timeoutMs ?? 12e4;
|
|
24211
|
-
const parsed = parseRecordingFileName(params.fileName);
|
|
24212
|
-
const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
|
|
24213
24529
|
const channel = this.normalizeChannel(params.channel);
|
|
24214
24530
|
const uid = await this.ensureUidForRecordings(channel);
|
|
24215
|
-
const
|
|
24531
|
+
const raw = await this.downloadRecording({
|
|
24216
24532
|
channel,
|
|
24217
24533
|
uid,
|
|
24218
24534
|
fileName: params.fileName,
|
|
24219
24535
|
timeoutMs
|
|
24220
24536
|
});
|
|
24221
|
-
if (
|
|
24537
|
+
if (raw.length === 0) {
|
|
24222
24538
|
throw new Error("Downloaded recording is empty");
|
|
24223
24539
|
}
|
|
24540
|
+
const videoFrames = [];
|
|
24541
|
+
let videoType = null;
|
|
24542
|
+
const decoder = new BcMediaAnnexBDecoder({
|
|
24543
|
+
strict: false,
|
|
24544
|
+
logger: this.logger,
|
|
24545
|
+
onVideoAccessUnit: ({ annexB: annexB2, microseconds }) => {
|
|
24546
|
+
videoFrames.push({ annexB: annexB2, microseconds });
|
|
24547
|
+
}
|
|
24548
|
+
});
|
|
24549
|
+
decoder.push(raw);
|
|
24550
|
+
const stats = decoder.getStats();
|
|
24551
|
+
videoType = stats.videoType;
|
|
24552
|
+
if (videoFrames.length === 0) {
|
|
24553
|
+
throw new Error("Downloaded recording has no video frames");
|
|
24554
|
+
}
|
|
24555
|
+
let fps;
|
|
24556
|
+
if (videoFrames.length >= 2) {
|
|
24557
|
+
const firstTs = videoFrames[0].microseconds;
|
|
24558
|
+
const lastTs = videoFrames[videoFrames.length - 1].microseconds;
|
|
24559
|
+
const durationUs = lastTs - firstTs;
|
|
24560
|
+
if (durationUs > 0) {
|
|
24561
|
+
const durationSeconds = durationUs / 1e6;
|
|
24562
|
+
fps = (videoFrames.length - 1) / durationSeconds;
|
|
24563
|
+
} else {
|
|
24564
|
+
const infoFps = stats.infos[0]?.fps;
|
|
24565
|
+
fps = infoFps && infoFps > 0 ? infoFps : 15;
|
|
24566
|
+
}
|
|
24567
|
+
} else {
|
|
24568
|
+
const infoFps = stats.infos[0]?.fps;
|
|
24569
|
+
fps = infoFps && infoFps > 0 ? infoFps : 15;
|
|
24570
|
+
}
|
|
24571
|
+
if (fps > 14 && fps < 16) fps = 15;
|
|
24572
|
+
else if (fps > 23 && fps < 26) fps = 25;
|
|
24573
|
+
else if (fps > 29 && fps < 31) fps = 30;
|
|
24574
|
+
else fps = Math.round(fps * 100) / 100;
|
|
24575
|
+
const annexB = Buffer.concat(videoFrames.map((f) => f.annexB));
|
|
24224
24576
|
const input = new import_node_stream.PassThrough();
|
|
24225
24577
|
const output = new import_node_stream.PassThrough();
|
|
24226
24578
|
let ff = null;
|
|
@@ -24292,48 +24644,380 @@ ${scheduleItems}
|
|
|
24292
24644
|
stop: stopAll
|
|
24293
24645
|
};
|
|
24294
24646
|
}
|
|
24295
|
-
// ============================================================
|
|
24296
|
-
// STANDALONE CAMERA METHODS
|
|
24297
|
-
// ============================================================
|
|
24298
|
-
// These methods are specifically designed for standalone cameras
|
|
24299
|
-
// (non-NVR) connected via TCP. They provide a simplified interface
|
|
24300
|
-
// for common operations like listing recordings, streaming playback,
|
|
24301
|
-
// downloading clips, and getting thumbnails.
|
|
24302
|
-
// ============================================================
|
|
24303
24647
|
/**
|
|
24304
|
-
*
|
|
24648
|
+
* Create an HLS (HTTP Live Streaming) session for a recording.
|
|
24305
24649
|
*
|
|
24306
|
-
* This method
|
|
24307
|
-
*
|
|
24650
|
+
* This method creates HLS segments on-the-fly from a recording replay stream.
|
|
24651
|
+
* HLS is required for iOS devices (Safari, Home app) which don't support
|
|
24652
|
+
* fragmented MP4 streaming well and require Range request support.
|
|
24653
|
+
*
|
|
24654
|
+
* The session writes HLS segments (.ts files) and playlist (.m3u8) to a
|
|
24655
|
+
* temporary directory. You must serve these files via HTTP to the client.
|
|
24308
24656
|
*
|
|
24309
24657
|
* @example
|
|
24310
24658
|
* ```ts
|
|
24311
|
-
* const
|
|
24312
|
-
*
|
|
24659
|
+
* const session = await api.createRecordingReplayHlsSession({
|
|
24660
|
+
* channel: 0,
|
|
24661
|
+
* fileName: "/mnt/sda/Mp4Record/2026-01-25/RecS03.mp4",
|
|
24662
|
+
* });
|
|
24313
24663
|
*
|
|
24314
|
-
*
|
|
24315
|
-
*
|
|
24316
|
-
*
|
|
24664
|
+
* // Serve playlist
|
|
24665
|
+
* app.get('/clip.m3u8', (req, res) => {
|
|
24666
|
+
* res.type('application/vnd.apple.mpegurl');
|
|
24667
|
+
* res.send(session.getPlaylist());
|
|
24317
24668
|
* });
|
|
24318
24669
|
*
|
|
24319
|
-
*
|
|
24320
|
-
*
|
|
24321
|
-
*
|
|
24670
|
+
* // Serve segments
|
|
24671
|
+
* app.get('/segment/:name', (req, res) => {
|
|
24672
|
+
* const data = session.getSegment(req.params.name);
|
|
24673
|
+
* if (data) {
|
|
24674
|
+
* res.type('video/mp2t');
|
|
24675
|
+
* res.send(data);
|
|
24676
|
+
* } else {
|
|
24677
|
+
* res.status(404).end();
|
|
24678
|
+
* }
|
|
24679
|
+
* });
|
|
24680
|
+
*
|
|
24681
|
+
* // Cleanup when done
|
|
24682
|
+
* await session.stop();
|
|
24322
24683
|
* ```
|
|
24323
24684
|
*/
|
|
24324
|
-
async
|
|
24325
|
-
const
|
|
24326
|
-
const
|
|
24327
|
-
const
|
|
24328
|
-
|
|
24329
|
-
|
|
24330
|
-
|
|
24331
|
-
|
|
24332
|
-
|
|
24333
|
-
|
|
24334
|
-
|
|
24335
|
-
|
|
24336
|
-
|
|
24685
|
+
async createRecordingReplayHlsSession(params) {
|
|
24686
|
+
const logger = params.logger ?? this.logger;
|
|
24687
|
+
const hlsSegmentDuration = params.hlsSegmentDuration ?? 4;
|
|
24688
|
+
const os2 = await import("os");
|
|
24689
|
+
const path6 = await import("path");
|
|
24690
|
+
const fs6 = await import("fs/promises");
|
|
24691
|
+
const crypto3 = await import("crypto");
|
|
24692
|
+
const tempDir = path6.join(
|
|
24693
|
+
os2.tmpdir(),
|
|
24694
|
+
`reolink-hls-${crypto3.randomBytes(8).toString("hex")}`
|
|
24695
|
+
);
|
|
24696
|
+
await fs6.mkdir(tempDir, { recursive: true });
|
|
24697
|
+
const playlistPath = path6.join(tempDir, "playlist.m3u8");
|
|
24698
|
+
const segmentPattern = path6.join(tempDir, "segment_%03d.ts");
|
|
24699
|
+
const parsed = parseRecordingFileName(params.fileName);
|
|
24700
|
+
const durationMs = parsed?.durationMs ?? 3e5;
|
|
24701
|
+
const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
|
|
24702
|
+
const seconds = Math.ceil(durationMs / 1e3 * 1.1);
|
|
24703
|
+
logger?.debug?.(
|
|
24704
|
+
`[createRecordingReplayHlsSession] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, hlsSegmentDuration=${hlsSegmentDuration}`
|
|
24705
|
+
);
|
|
24706
|
+
const startParams = {
|
|
24707
|
+
channel: params.channel,
|
|
24708
|
+
fileName: params.fileName,
|
|
24709
|
+
logger,
|
|
24710
|
+
...params.isNvr != null ? { isNvr: params.isNvr } : {},
|
|
24711
|
+
...params.deviceId != null ? { deviceId: params.deviceId } : {}
|
|
24712
|
+
};
|
|
24713
|
+
const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(async () => {
|
|
24714
|
+
try {
|
|
24715
|
+
return await this.startRecordingReplayStream(startParams);
|
|
24716
|
+
} catch (e) {
|
|
24717
|
+
if (!params.deviceId) throw e;
|
|
24718
|
+
const sessionKey = `replay:${params.deviceId}`;
|
|
24719
|
+
logger?.debug?.(
|
|
24720
|
+
`[createRecordingReplayHlsSession] startRecordingReplayStream failed; force-closing dedicated client and retrying once`
|
|
24721
|
+
);
|
|
24722
|
+
await this.forceCloseDedicatedClient(sessionKey, logger);
|
|
24723
|
+
return await this.startRecordingReplayStream(startParams);
|
|
24724
|
+
}
|
|
24725
|
+
});
|
|
24726
|
+
const { stream, stop: stopReplay } = replayResult;
|
|
24727
|
+
const input = new import_node_stream.PassThrough();
|
|
24728
|
+
const H264_AUD = Buffer.from([0, 0, 0, 1, 9, 240]);
|
|
24729
|
+
let tsMuxer = null;
|
|
24730
|
+
let ff = null;
|
|
24731
|
+
let ended = false;
|
|
24732
|
+
let frameCount = 0;
|
|
24733
|
+
let readyResolve = null;
|
|
24734
|
+
let segmentWatcher = null;
|
|
24735
|
+
const readyPromise = new Promise((resolve) => {
|
|
24736
|
+
readyResolve = resolve;
|
|
24737
|
+
});
|
|
24738
|
+
const segments = /* @__PURE__ */ new Map();
|
|
24739
|
+
const startSegmentWatcher = () => {
|
|
24740
|
+
if (segmentWatcher || !readyResolve) return;
|
|
24741
|
+
const firstSegmentPath = path6.join(tempDir, "segment_000.ts");
|
|
24742
|
+
let checkCount = 0;
|
|
24743
|
+
const maxChecks = Math.ceil((hlsSegmentDuration + 2) * 10);
|
|
24744
|
+
segmentWatcher = setInterval(async () => {
|
|
24745
|
+
checkCount++;
|
|
24746
|
+
try {
|
|
24747
|
+
const stats = await fs6.stat(firstSegmentPath);
|
|
24748
|
+
if (stats.size > 256) {
|
|
24749
|
+
if (segmentWatcher) {
|
|
24750
|
+
clearInterval(segmentWatcher);
|
|
24751
|
+
segmentWatcher = null;
|
|
24752
|
+
}
|
|
24753
|
+
logger?.debug?.(
|
|
24754
|
+
`[createRecordingReplayHlsSession] First segment ready after ${checkCount * 100}ms, size=${stats.size}`
|
|
24755
|
+
);
|
|
24756
|
+
readyResolve?.();
|
|
24757
|
+
readyResolve = null;
|
|
24758
|
+
}
|
|
24759
|
+
} catch {
|
|
24760
|
+
}
|
|
24761
|
+
if (checkCount >= maxChecks && readyResolve) {
|
|
24762
|
+
if (segmentWatcher) {
|
|
24763
|
+
clearInterval(segmentWatcher);
|
|
24764
|
+
segmentWatcher = null;
|
|
24765
|
+
}
|
|
24766
|
+
logger?.debug?.(
|
|
24767
|
+
`[createRecordingReplayHlsSession] Segment watcher timeout, resolving anyway`
|
|
24768
|
+
);
|
|
24769
|
+
readyResolve?.();
|
|
24770
|
+
readyResolve = null;
|
|
24771
|
+
}
|
|
24772
|
+
}, 100);
|
|
24773
|
+
};
|
|
24774
|
+
const startFfmpeg = (videoType) => {
|
|
24775
|
+
if (ff) return;
|
|
24776
|
+
const needsTranscode = videoType === "H265" && params.transcodeH265ToH264 === true;
|
|
24777
|
+
const gop = Math.max(1, Math.round(fps * hlsSegmentDuration));
|
|
24778
|
+
logger?.log?.(
|
|
24779
|
+
`[createRecordingReplayHlsSession] Starting ffmpeg HLS with videoType=${videoType}, transcode=${needsTranscode}, hlsTime=${hlsSegmentDuration}s, fileName=${params.fileName}`
|
|
24780
|
+
);
|
|
24781
|
+
MpegTsMuxer.resetCounters();
|
|
24782
|
+
tsMuxer = new MpegTsMuxer({ videoType });
|
|
24783
|
+
const args = [
|
|
24784
|
+
"-hide_banner",
|
|
24785
|
+
"-loglevel",
|
|
24786
|
+
"error",
|
|
24787
|
+
"-f",
|
|
24788
|
+
"mpegts",
|
|
24789
|
+
"-i",
|
|
24790
|
+
"pipe:0",
|
|
24791
|
+
// Video codec
|
|
24792
|
+
...needsTranscode ? [
|
|
24793
|
+
"-c:v",
|
|
24794
|
+
"libx264",
|
|
24795
|
+
"-preset",
|
|
24796
|
+
"ultrafast",
|
|
24797
|
+
"-tune",
|
|
24798
|
+
"zerolatency",
|
|
24799
|
+
"-crf",
|
|
24800
|
+
"23",
|
|
24801
|
+
"-pix_fmt",
|
|
24802
|
+
"yuv420p",
|
|
24803
|
+
// Ensure regular GOP for consistent HLS cutting.
|
|
24804
|
+
"-g",
|
|
24805
|
+
String(gop),
|
|
24806
|
+
"-keyint_min",
|
|
24807
|
+
String(gop),
|
|
24808
|
+
"-sc_threshold",
|
|
24809
|
+
"0",
|
|
24810
|
+
// Force frequent keyframes so HLS can cut segments reliably.
|
|
24811
|
+
// Without this, ffmpeg will only cut on keyframes and segments can become huge.
|
|
24812
|
+
"-force_key_frames",
|
|
24813
|
+
`expr:gte(t,n_forced*${hlsSegmentDuration})`
|
|
24814
|
+
] : ["-c", "copy"],
|
|
24815
|
+
// HLS output options
|
|
24816
|
+
"-f",
|
|
24817
|
+
"hls",
|
|
24818
|
+
"-hls_time",
|
|
24819
|
+
String(hlsSegmentDuration),
|
|
24820
|
+
"-hls_list_size",
|
|
24821
|
+
"0",
|
|
24822
|
+
// Keep all segments in playlist
|
|
24823
|
+
"-hls_playlist_type",
|
|
24824
|
+
"event",
|
|
24825
|
+
// Growing playlist (not VOD until end)
|
|
24826
|
+
"-hls_segment_filename",
|
|
24827
|
+
segmentPattern,
|
|
24828
|
+
"-hls_flags",
|
|
24829
|
+
"independent_segments+temp_file",
|
|
24830
|
+
playlistPath
|
|
24831
|
+
];
|
|
24832
|
+
ff = (0, import_node_child_process3.spawn)("ffmpeg", args, { stdio: ["pipe", "pipe", "pipe"] });
|
|
24833
|
+
if (!ff.stdin || !ff.stderr) {
|
|
24834
|
+
throw new Error("ffmpeg stdio streams not available");
|
|
24835
|
+
}
|
|
24836
|
+
input.pipe(ff.stdin);
|
|
24837
|
+
ff.stdin.on("error", () => {
|
|
24838
|
+
});
|
|
24839
|
+
ff.stderr.on("error", () => {
|
|
24840
|
+
});
|
|
24841
|
+
input.on("error", () => {
|
|
24842
|
+
});
|
|
24843
|
+
let stderr = "";
|
|
24844
|
+
ff.stderr.on("data", (d) => stderr += String(d));
|
|
24845
|
+
ff.on("close", (code) => {
|
|
24846
|
+
if (ended) return;
|
|
24847
|
+
ended = true;
|
|
24848
|
+
if ((code ?? 0) !== 0 && stderr.trim()) {
|
|
24849
|
+
logger?.error?.(
|
|
24850
|
+
`[createRecordingReplayHlsSession] ffmpeg exited with code ${code}: ${stderr}`
|
|
24851
|
+
);
|
|
24852
|
+
} else {
|
|
24853
|
+
logger?.debug?.(
|
|
24854
|
+
`[createRecordingReplayHlsSession] ffmpeg closed normally, frames=${frameCount}`
|
|
24855
|
+
);
|
|
24856
|
+
}
|
|
24857
|
+
});
|
|
24858
|
+
};
|
|
24859
|
+
const stopAll = async () => {
|
|
24860
|
+
if (ended) return;
|
|
24861
|
+
ended = true;
|
|
24862
|
+
releaseQueueSlot();
|
|
24863
|
+
if (segmentWatcher) {
|
|
24864
|
+
clearInterval(segmentWatcher);
|
|
24865
|
+
segmentWatcher = null;
|
|
24866
|
+
}
|
|
24867
|
+
logger?.debug?.(
|
|
24868
|
+
`[createRecordingReplayHlsSession] Stopping, frames=${frameCount}`
|
|
24869
|
+
);
|
|
24870
|
+
const cleanupPromises = [];
|
|
24871
|
+
cleanupPromises.push(stopReplay().catch(() => {
|
|
24872
|
+
}));
|
|
24873
|
+
cleanupPromises.push(stream.stop().catch(() => {
|
|
24874
|
+
}));
|
|
24875
|
+
try {
|
|
24876
|
+
input.end();
|
|
24877
|
+
} catch {
|
|
24878
|
+
}
|
|
24879
|
+
try {
|
|
24880
|
+
ff?.kill("SIGKILL");
|
|
24881
|
+
} catch {
|
|
24882
|
+
}
|
|
24883
|
+
await Promise.race([
|
|
24884
|
+
Promise.all(cleanupPromises),
|
|
24885
|
+
new Promise((resolve) => setTimeout(resolve, 2e3))
|
|
24886
|
+
]);
|
|
24887
|
+
setTimeout(async () => {
|
|
24888
|
+
try {
|
|
24889
|
+
const files = await fs6.readdir(tempDir);
|
|
24890
|
+
for (const file of files) {
|
|
24891
|
+
await fs6.unlink(path6.join(tempDir, file)).catch(() => {
|
|
24892
|
+
});
|
|
24893
|
+
}
|
|
24894
|
+
await fs6.rmdir(tempDir).catch(() => {
|
|
24895
|
+
});
|
|
24896
|
+
} catch {
|
|
24897
|
+
}
|
|
24898
|
+
}, 6e4);
|
|
24899
|
+
};
|
|
24900
|
+
const timer = setTimeout(
|
|
24901
|
+
() => {
|
|
24902
|
+
logger?.debug?.(
|
|
24903
|
+
`[createRecordingReplayHlsSession] Timeout reached (${seconds}s), stopping`
|
|
24904
|
+
);
|
|
24905
|
+
void stopAll();
|
|
24906
|
+
},
|
|
24907
|
+
Math.max(1, seconds) * 1e3
|
|
24908
|
+
);
|
|
24909
|
+
stream.on("error", (e) => {
|
|
24910
|
+
logger?.error?.(
|
|
24911
|
+
`[createRecordingReplayHlsSession] Stream error: ${e.message}`
|
|
24912
|
+
);
|
|
24913
|
+
clearTimeout(timer);
|
|
24914
|
+
void stopAll();
|
|
24915
|
+
});
|
|
24916
|
+
stream.on("close", () => {
|
|
24917
|
+
logger?.debug?.(
|
|
24918
|
+
`[createRecordingReplayHlsSession] Stream closed, frames=${frameCount}`
|
|
24919
|
+
);
|
|
24920
|
+
clearTimeout(timer);
|
|
24921
|
+
try {
|
|
24922
|
+
input.end();
|
|
24923
|
+
} catch {
|
|
24924
|
+
}
|
|
24925
|
+
});
|
|
24926
|
+
stream.on(
|
|
24927
|
+
"videoAccessUnit",
|
|
24928
|
+
({ data, videoType, isKeyframe, microseconds }) => {
|
|
24929
|
+
if (ended) return;
|
|
24930
|
+
startFfmpeg(videoType);
|
|
24931
|
+
frameCount++;
|
|
24932
|
+
if (tsMuxer) {
|
|
24933
|
+
const tsData = tsMuxer.mux(data, microseconds, isKeyframe);
|
|
24934
|
+
input.write(tsData);
|
|
24935
|
+
}
|
|
24936
|
+
if (frameCount === 1) {
|
|
24937
|
+
startSegmentWatcher();
|
|
24938
|
+
}
|
|
24939
|
+
}
|
|
24940
|
+
);
|
|
24941
|
+
return {
|
|
24942
|
+
getPlaylist: () => {
|
|
24943
|
+
try {
|
|
24944
|
+
const { readFileSync } = require("fs");
|
|
24945
|
+
return readFileSync(playlistPath, "utf8");
|
|
24946
|
+
} catch {
|
|
24947
|
+
return "#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:4\n";
|
|
24948
|
+
}
|
|
24949
|
+
},
|
|
24950
|
+
getSegment: (name) => {
|
|
24951
|
+
if (segments.has(name)) {
|
|
24952
|
+
return segments.get(name);
|
|
24953
|
+
}
|
|
24954
|
+
try {
|
|
24955
|
+
const { readFileSync } = require("fs");
|
|
24956
|
+
const segmentPath = path6.join(tempDir, name);
|
|
24957
|
+
const data = readFileSync(segmentPath);
|
|
24958
|
+
segments.set(name, data);
|
|
24959
|
+
return data;
|
|
24960
|
+
} catch {
|
|
24961
|
+
return void 0;
|
|
24962
|
+
}
|
|
24963
|
+
},
|
|
24964
|
+
listSegments: () => {
|
|
24965
|
+
try {
|
|
24966
|
+
const { readdirSync } = require("fs");
|
|
24967
|
+
return readdirSync(tempDir).filter(
|
|
24968
|
+
(f) => f.endsWith(".ts")
|
|
24969
|
+
);
|
|
24970
|
+
} catch {
|
|
24971
|
+
return [];
|
|
24972
|
+
}
|
|
24973
|
+
},
|
|
24974
|
+
waitForReady: () => readyPromise,
|
|
24975
|
+
stop: stopAll,
|
|
24976
|
+
tempDir
|
|
24977
|
+
};
|
|
24978
|
+
}
|
|
24979
|
+
// ============================================================
|
|
24980
|
+
// STANDALONE CAMERA METHODS
|
|
24981
|
+
// ============================================================
|
|
24982
|
+
// These methods are specifically designed for standalone cameras
|
|
24983
|
+
// (non-NVR) connected via TCP. They provide a simplified interface
|
|
24984
|
+
// for common operations like listing recordings, streaming playback,
|
|
24985
|
+
// downloading clips, and getting thumbnails.
|
|
24986
|
+
// ============================================================
|
|
24987
|
+
/**
|
|
24988
|
+
* List recordings from a standalone camera.
|
|
24989
|
+
*
|
|
24990
|
+
* This method is optimized for standalone cameras (non-NVR) and uses
|
|
24991
|
+
* the native Baichuan protocol to list recorded files.
|
|
24992
|
+
*
|
|
24993
|
+
* @example
|
|
24994
|
+
* ```ts
|
|
24995
|
+
* const api = new ReolinkBaichuanApi({ host: '192.168.1.100', ... });
|
|
24996
|
+
* await api.login();
|
|
24997
|
+
*
|
|
24998
|
+
* const recordings = await api.standaloneListRecordings({
|
|
24999
|
+
* start: new Date('2024-01-20T00:00:00'),
|
|
25000
|
+
* end: new Date('2024-01-21T23:59:59'),
|
|
25001
|
+
* });
|
|
25002
|
+
*
|
|
25003
|
+
* for (const file of recordings) {
|
|
25004
|
+
* console.log(file.fileName, file.startTime, file.endTime);
|
|
25005
|
+
* }
|
|
25006
|
+
* ```
|
|
25007
|
+
*/
|
|
25008
|
+
async standaloneListRecordings(params) {
|
|
25009
|
+
const channel = 0;
|
|
25010
|
+
const streamType = params.streamType === "mainStream" ? "mainStream" : "subStream";
|
|
25011
|
+
const timeoutMs = params.timeoutMs ?? 15e3;
|
|
25012
|
+
return await this.getVideoclips({
|
|
25013
|
+
channel,
|
|
25014
|
+
start: params.start,
|
|
25015
|
+
end: params.end,
|
|
25016
|
+
streamType,
|
|
25017
|
+
timeoutMs
|
|
25018
|
+
});
|
|
25019
|
+
}
|
|
25020
|
+
/**
|
|
24337
25021
|
* Start a streaming replay of a recorded file from a standalone camera.
|
|
24338
25022
|
*
|
|
24339
25023
|
* Returns a video stream that emits frames in real-time. The stream can be
|
|
@@ -24530,46 +25214,404 @@ ${scheduleItems}
|
|
|
24530
25214
|
}
|
|
24531
25215
|
};
|
|
24532
25216
|
|
|
24533
|
-
// src/reolink/
|
|
24534
|
-
var
|
|
24535
|
-
|
|
24536
|
-
|
|
24537
|
-
|
|
24538
|
-
|
|
24539
|
-
|
|
24540
|
-
|
|
24541
|
-
|
|
24542
|
-
|
|
24543
|
-
|
|
24544
|
-
|
|
24545
|
-
|
|
24546
|
-
|
|
24547
|
-
|
|
24548
|
-
|
|
24549
|
-
|
|
24550
|
-
|
|
24551
|
-
|
|
24552
|
-
|
|
24553
|
-
|
|
24554
|
-
|
|
24555
|
-
|
|
24556
|
-
|
|
24557
|
-
|
|
24558
|
-
|
|
24559
|
-
|
|
24560
|
-
|
|
24561
|
-
|
|
24562
|
-
|
|
24563
|
-
|
|
24564
|
-
|
|
24565
|
-
|
|
24566
|
-
|
|
24567
|
-
|
|
24568
|
-
|
|
25217
|
+
// src/reolink/baichuan/HlsSessionManager.ts
|
|
25218
|
+
var withTimeout = async (p, ms, label) => {
|
|
25219
|
+
let t;
|
|
25220
|
+
try {
|
|
25221
|
+
return await Promise.race([
|
|
25222
|
+
p,
|
|
25223
|
+
new Promise((_, reject) => {
|
|
25224
|
+
t = setTimeout(
|
|
25225
|
+
() => reject(new Error(`${label} timed out after ${ms}ms`)),
|
|
25226
|
+
ms
|
|
25227
|
+
);
|
|
25228
|
+
})
|
|
25229
|
+
]);
|
|
25230
|
+
} finally {
|
|
25231
|
+
if (t) clearTimeout(t);
|
|
25232
|
+
}
|
|
25233
|
+
};
|
|
25234
|
+
var HlsSessionManager = class {
|
|
25235
|
+
constructor(api, options) {
|
|
25236
|
+
this.api = api;
|
|
25237
|
+
this.logger = options?.logger;
|
|
25238
|
+
this.sessionTtlMs = options?.sessionTtlMs ?? 5 * 60 * 1e3;
|
|
25239
|
+
const cleanupIntervalMs = options?.cleanupIntervalMs ?? 3e4;
|
|
25240
|
+
this.cleanupTimer = setInterval(() => {
|
|
25241
|
+
void this.cleanupExpiredSessions();
|
|
25242
|
+
}, cleanupIntervalMs);
|
|
25243
|
+
}
|
|
25244
|
+
sessions = /* @__PURE__ */ new Map();
|
|
25245
|
+
logger;
|
|
25246
|
+
sessionTtlMs;
|
|
25247
|
+
cleanupTimer;
|
|
25248
|
+
creationLocks = /* @__PURE__ */ new Map();
|
|
25249
|
+
/**
|
|
25250
|
+
* Handle an HLS request and return the HTTP response.
|
|
25251
|
+
*
|
|
25252
|
+
* @param params - Request parameters
|
|
25253
|
+
* @returns HTTP response ready to be sent
|
|
25254
|
+
*/
|
|
25255
|
+
async handleRequest(params) {
|
|
25256
|
+
const {
|
|
25257
|
+
sessionKey,
|
|
25258
|
+
hlsPath,
|
|
25259
|
+
requestUrl,
|
|
25260
|
+
createSession,
|
|
25261
|
+
exclusiveKeyPrefix
|
|
25262
|
+
} = params;
|
|
25263
|
+
try {
|
|
25264
|
+
let entry = this.sessions.get(sessionKey);
|
|
25265
|
+
const isPlaylist = hlsPath === "playlist.m3u8" || hlsPath === "";
|
|
25266
|
+
const isSegment = hlsPath.endsWith(".ts");
|
|
25267
|
+
if (!entry && isSegment) {
|
|
25268
|
+
this.logger?.debug?.(
|
|
25269
|
+
`[HlsSessionManager] Segment request without session (likely stale after clip switch): ${sessionKey} ${hlsPath}`
|
|
25270
|
+
);
|
|
25271
|
+
return {
|
|
25272
|
+
statusCode: 404,
|
|
25273
|
+
headers: {
|
|
25274
|
+
"Content-Type": "text/plain",
|
|
25275
|
+
"Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
|
|
25276
|
+
Pragma: "no-cache",
|
|
25277
|
+
"Retry-After": "1"
|
|
25278
|
+
},
|
|
25279
|
+
body: "Segment not found"
|
|
24569
25280
|
};
|
|
24570
|
-
|
|
24571
|
-
|
|
24572
|
-
|
|
25281
|
+
}
|
|
25282
|
+
if (!entry) {
|
|
25283
|
+
if (!isPlaylist) {
|
|
25284
|
+
return {
|
|
25285
|
+
statusCode: 400,
|
|
25286
|
+
headers: { "Content-Type": "text/plain" },
|
|
25287
|
+
body: "Invalid HLS path"
|
|
25288
|
+
};
|
|
25289
|
+
}
|
|
25290
|
+
const lockKey = exclusiveKeyPrefix ?? sessionKey;
|
|
25291
|
+
await this.withCreationLock(lockKey, async () => {
|
|
25292
|
+
entry = this.sessions.get(sessionKey);
|
|
25293
|
+
if (entry) return;
|
|
25294
|
+
if (exclusiveKeyPrefix) {
|
|
25295
|
+
await this.stopOtherSessionsWithPrefix(
|
|
25296
|
+
exclusiveKeyPrefix,
|
|
25297
|
+
sessionKey
|
|
25298
|
+
);
|
|
25299
|
+
}
|
|
25300
|
+
this.logger?.log?.(
|
|
25301
|
+
`[HlsSessionManager] Creating new session: ${sessionKey}`
|
|
25302
|
+
);
|
|
25303
|
+
this.logger?.debug?.(
|
|
25304
|
+
`[HlsSessionManager] createSession(): ${sessionKey}`
|
|
25305
|
+
);
|
|
25306
|
+
const sessionParams = await createSession();
|
|
25307
|
+
this.logger?.debug?.(
|
|
25308
|
+
`[HlsSessionManager] Starting createRecordingReplayHlsSession: ${sessionKey}`
|
|
25309
|
+
);
|
|
25310
|
+
const session = await withTimeout(
|
|
25311
|
+
this.api.createRecordingReplayHlsSession({
|
|
25312
|
+
channel: sessionParams.channel,
|
|
25313
|
+
fileName: sessionParams.fileName,
|
|
25314
|
+
...sessionParams.isNvr !== void 0 && {
|
|
25315
|
+
isNvr: sessionParams.isNvr
|
|
25316
|
+
},
|
|
25317
|
+
...this.logger && { logger: this.logger },
|
|
25318
|
+
...sessionParams.deviceId && {
|
|
25319
|
+
deviceId: sessionParams.deviceId
|
|
25320
|
+
},
|
|
25321
|
+
transcodeH265ToH264: sessionParams.transcodeH265ToH264 ?? true,
|
|
25322
|
+
hlsSegmentDuration: sessionParams.hlsSegmentDuration ?? 4
|
|
25323
|
+
}),
|
|
25324
|
+
2e4,
|
|
25325
|
+
"createRecordingReplayHlsSession"
|
|
25326
|
+
);
|
|
25327
|
+
try {
|
|
25328
|
+
await withTimeout(
|
|
25329
|
+
session.waitForReady(),
|
|
25330
|
+
12e3,
|
|
25331
|
+
"hls waitForReady"
|
|
25332
|
+
);
|
|
25333
|
+
} catch (e) {
|
|
25334
|
+
this.logger?.warn?.(
|
|
25335
|
+
`[HlsSessionManager] waitForReady did not complete in time for ${sessionKey}: ${e instanceof Error ? e.message : String(e)}`
|
|
25336
|
+
);
|
|
25337
|
+
}
|
|
25338
|
+
entry = {
|
|
25339
|
+
session,
|
|
25340
|
+
createdAt: Date.now(),
|
|
25341
|
+
lastAccessAt: Date.now()
|
|
25342
|
+
};
|
|
25343
|
+
this.sessions.set(sessionKey, entry);
|
|
25344
|
+
this.logger?.log?.(
|
|
25345
|
+
`[HlsSessionManager] Session ready: ${sessionKey}`
|
|
25346
|
+
);
|
|
25347
|
+
});
|
|
25348
|
+
entry = this.sessions.get(sessionKey);
|
|
25349
|
+
if (!entry) {
|
|
25350
|
+
return {
|
|
25351
|
+
statusCode: 500,
|
|
25352
|
+
headers: {
|
|
25353
|
+
"Content-Type": "text/plain",
|
|
25354
|
+
"Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
|
|
25355
|
+
Pragma: "no-cache"
|
|
25356
|
+
},
|
|
25357
|
+
body: "HLS session was not created"
|
|
25358
|
+
};
|
|
25359
|
+
}
|
|
25360
|
+
}
|
|
25361
|
+
entry.lastAccessAt = Date.now();
|
|
25362
|
+
if (isPlaylist) {
|
|
25363
|
+
return this.servePlaylist(entry.session, requestUrl, sessionKey);
|
|
25364
|
+
}
|
|
25365
|
+
if (isSegment) {
|
|
25366
|
+
return this.serveSegment(entry.session, hlsPath, sessionKey);
|
|
25367
|
+
}
|
|
25368
|
+
return {
|
|
25369
|
+
statusCode: 400,
|
|
25370
|
+
headers: { "Content-Type": "text/plain" },
|
|
25371
|
+
body: "Invalid HLS path"
|
|
25372
|
+
};
|
|
25373
|
+
} catch (error) {
|
|
25374
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
25375
|
+
this.logger?.error?.(
|
|
25376
|
+
`[HlsSessionManager] Error handling request: ${message}`
|
|
25377
|
+
);
|
|
25378
|
+
return {
|
|
25379
|
+
statusCode: 500,
|
|
25380
|
+
headers: { "Content-Type": "text/plain" },
|
|
25381
|
+
body: `HLS error: ${message}`
|
|
25382
|
+
};
|
|
25383
|
+
}
|
|
25384
|
+
}
|
|
25385
|
+
async withCreationLock(lockKey, fn) {
|
|
25386
|
+
const prev = this.creationLocks.get(lockKey) ?? Promise.resolve();
|
|
25387
|
+
let release;
|
|
25388
|
+
const current = new Promise((resolve) => {
|
|
25389
|
+
release = resolve;
|
|
25390
|
+
});
|
|
25391
|
+
const chained = prev.then(
|
|
25392
|
+
() => current,
|
|
25393
|
+
() => current
|
|
25394
|
+
);
|
|
25395
|
+
this.creationLocks.set(lockKey, chained);
|
|
25396
|
+
await prev.catch(() => {
|
|
25397
|
+
});
|
|
25398
|
+
try {
|
|
25399
|
+
await fn();
|
|
25400
|
+
} finally {
|
|
25401
|
+
release();
|
|
25402
|
+
if (this.creationLocks.get(lockKey) === chained) {
|
|
25403
|
+
this.creationLocks.delete(lockKey);
|
|
25404
|
+
}
|
|
25405
|
+
}
|
|
25406
|
+
}
|
|
25407
|
+
/**
|
|
25408
|
+
* Check if a session exists for the given key.
|
|
25409
|
+
*/
|
|
25410
|
+
hasSession(sessionKey) {
|
|
25411
|
+
return this.sessions.has(sessionKey);
|
|
25412
|
+
}
|
|
25413
|
+
/**
|
|
25414
|
+
* Stop a specific session.
|
|
25415
|
+
*/
|
|
25416
|
+
async stopSession(sessionKey) {
|
|
25417
|
+
const entry = this.sessions.get(sessionKey);
|
|
25418
|
+
if (entry) {
|
|
25419
|
+
this.logger?.debug?.(
|
|
25420
|
+
`[HlsSessionManager] Stopping session: ${sessionKey}`
|
|
25421
|
+
);
|
|
25422
|
+
this.sessions.delete(sessionKey);
|
|
25423
|
+
await entry.session.stop().catch(() => {
|
|
25424
|
+
});
|
|
25425
|
+
}
|
|
25426
|
+
}
|
|
25427
|
+
/**
|
|
25428
|
+
* Stop all sessions and cleanup.
|
|
25429
|
+
*/
|
|
25430
|
+
async stopAll() {
|
|
25431
|
+
this.logger?.debug?.(`[HlsSessionManager] Stopping all sessions`);
|
|
25432
|
+
if (this.cleanupTimer) {
|
|
25433
|
+
clearInterval(this.cleanupTimer);
|
|
25434
|
+
this.cleanupTimer = void 0;
|
|
25435
|
+
}
|
|
25436
|
+
const stopPromises = Array.from(this.sessions.values()).map(
|
|
25437
|
+
(entry) => entry.session.stop().catch(() => {
|
|
25438
|
+
})
|
|
25439
|
+
);
|
|
25440
|
+
this.sessions.clear();
|
|
25441
|
+
await Promise.all(stopPromises);
|
|
25442
|
+
}
|
|
25443
|
+
/**
|
|
25444
|
+
* Get the number of active sessions.
|
|
25445
|
+
*/
|
|
25446
|
+
get sessionCount() {
|
|
25447
|
+
return this.sessions.size;
|
|
25448
|
+
}
|
|
25449
|
+
/**
|
|
25450
|
+
* Serve the HLS playlist with rewritten segment URLs.
|
|
25451
|
+
*/
|
|
25452
|
+
servePlaylist(session, requestUrl, sessionKey) {
|
|
25453
|
+
let playlist = session.getPlaylist();
|
|
25454
|
+
try {
|
|
25455
|
+
const url = new URL(requestUrl, "http://localhost");
|
|
25456
|
+
const basePath = url.pathname;
|
|
25457
|
+
const baseParams = new URLSearchParams(url.searchParams);
|
|
25458
|
+
baseParams.delete("hls");
|
|
25459
|
+
playlist = playlist.replace(/^(segment_\d+\.ts)$/gm, (match) => {
|
|
25460
|
+
const params = new URLSearchParams(baseParams);
|
|
25461
|
+
params.set("hls", match);
|
|
25462
|
+
return `${basePath}?${params.toString()}`;
|
|
25463
|
+
});
|
|
25464
|
+
} catch {
|
|
25465
|
+
}
|
|
25466
|
+
this.logger?.debug?.(
|
|
25467
|
+
`[HlsSessionManager] Serving playlist: ${sessionKey}, length=${playlist.length}`
|
|
25468
|
+
);
|
|
25469
|
+
return {
|
|
25470
|
+
statusCode: 200,
|
|
25471
|
+
headers: {
|
|
25472
|
+
"Content-Type": "application/vnd.apple.mpegurl",
|
|
25473
|
+
"Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
|
|
25474
|
+
Pragma: "no-cache"
|
|
25475
|
+
},
|
|
25476
|
+
body: playlist
|
|
25477
|
+
};
|
|
25478
|
+
}
|
|
25479
|
+
/**
|
|
25480
|
+
* Serve an HLS segment.
|
|
25481
|
+
*/
|
|
25482
|
+
serveSegment(session, segmentName, sessionKey) {
|
|
25483
|
+
const segment = session.getSegment(segmentName);
|
|
25484
|
+
if (!segment) {
|
|
25485
|
+
this.logger?.warn?.(
|
|
25486
|
+
`[HlsSessionManager] Segment not found: ${segmentName}`
|
|
25487
|
+
);
|
|
25488
|
+
return {
|
|
25489
|
+
statusCode: 404,
|
|
25490
|
+
headers: {
|
|
25491
|
+
"Content-Type": "text/plain",
|
|
25492
|
+
"Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
|
|
25493
|
+
Pragma: "no-cache",
|
|
25494
|
+
"Retry-After": "1"
|
|
25495
|
+
},
|
|
25496
|
+
body: "Segment not found"
|
|
25497
|
+
};
|
|
25498
|
+
}
|
|
25499
|
+
this.logger?.debug?.(
|
|
25500
|
+
`[HlsSessionManager] Serving segment: ${segmentName} for ${sessionKey}, size=${segment.length}`
|
|
25501
|
+
);
|
|
25502
|
+
return {
|
|
25503
|
+
statusCode: 200,
|
|
25504
|
+
headers: {
|
|
25505
|
+
"Content-Type": "video/mp2t",
|
|
25506
|
+
"Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
|
|
25507
|
+
Pragma: "no-cache",
|
|
25508
|
+
"Content-Length": String(segment.length)
|
|
25509
|
+
},
|
|
25510
|
+
body: segment
|
|
25511
|
+
};
|
|
25512
|
+
}
|
|
25513
|
+
/**
|
|
25514
|
+
* Cleanup expired sessions.
|
|
25515
|
+
*/
|
|
25516
|
+
async cleanupExpiredSessions() {
|
|
25517
|
+
const now = Date.now();
|
|
25518
|
+
const expiredKeys = [];
|
|
25519
|
+
for (const [key, entry] of this.sessions) {
|
|
25520
|
+
if (now - entry.lastAccessAt > this.sessionTtlMs) {
|
|
25521
|
+
expiredKeys.push(key);
|
|
25522
|
+
}
|
|
25523
|
+
}
|
|
25524
|
+
if (!expiredKeys.length) return;
|
|
25525
|
+
await Promise.allSettled(
|
|
25526
|
+
expiredKeys.map(async (key) => {
|
|
25527
|
+
const entry = this.sessions.get(key);
|
|
25528
|
+
if (!entry) return;
|
|
25529
|
+
this.logger?.log?.(
|
|
25530
|
+
`[HlsSessionManager] TTL expired: stopping session ${key}`
|
|
25531
|
+
);
|
|
25532
|
+
this.sessions.delete(key);
|
|
25533
|
+
try {
|
|
25534
|
+
await entry.session.stop();
|
|
25535
|
+
} catch {
|
|
25536
|
+
}
|
|
25537
|
+
})
|
|
25538
|
+
);
|
|
25539
|
+
}
|
|
25540
|
+
async stopOtherSessionsWithPrefix(prefix, exceptKey) {
|
|
25541
|
+
const toStop = [];
|
|
25542
|
+
for (const key of this.sessions.keys()) {
|
|
25543
|
+
if (key !== exceptKey && key.startsWith(prefix)) toStop.push(key);
|
|
25544
|
+
}
|
|
25545
|
+
if (!toStop.length) return;
|
|
25546
|
+
this.logger?.log?.(
|
|
25547
|
+
`[HlsSessionManager] Switch: stopping ${toStop.length} session(s) for prefix=${prefix}`
|
|
25548
|
+
);
|
|
25549
|
+
await Promise.all(
|
|
25550
|
+
toStop.map(async (key) => {
|
|
25551
|
+
const entry = this.sessions.get(key);
|
|
25552
|
+
if (!entry) return;
|
|
25553
|
+
this.sessions.delete(key);
|
|
25554
|
+
await entry.session.stop().catch(() => {
|
|
25555
|
+
});
|
|
25556
|
+
})
|
|
25557
|
+
);
|
|
25558
|
+
}
|
|
25559
|
+
};
|
|
25560
|
+
function detectIosClient(userAgent) {
|
|
25561
|
+
const ua = (userAgent ?? "").toLowerCase();
|
|
25562
|
+
const isIos = /iphone|ipad|ipod/.test(ua);
|
|
25563
|
+
const isIosInstalledApp = ua.includes("installedapp");
|
|
25564
|
+
return {
|
|
25565
|
+
isIos,
|
|
25566
|
+
isIosInstalledApp,
|
|
25567
|
+
// iOS InstalledApp needs HLS for video playback
|
|
25568
|
+
needsHls: isIos && isIosInstalledApp
|
|
25569
|
+
};
|
|
25570
|
+
}
|
|
25571
|
+
function buildHlsRedirectUrl(originalUrl) {
|
|
25572
|
+
return `${originalUrl}${originalUrl.includes("?") ? "&" : "?"}hls=playlist.m3u8`;
|
|
25573
|
+
}
|
|
25574
|
+
|
|
25575
|
+
// src/reolink/discovery.ts
|
|
25576
|
+
var import_node_dgram2 = __toESM(require("dgram"), 1);
|
|
25577
|
+
var import_node_os2 = require("os");
|
|
25578
|
+
init_ReolinkCgiApi();
|
|
25579
|
+
async function discoverViaUdpDirect(host, options) {
|
|
25580
|
+
if (!options.enableUdpDiscovery) return [];
|
|
25581
|
+
const logger = options.logger;
|
|
25582
|
+
const timeoutMs = options.udpBroadcastTimeoutMs ?? 1500;
|
|
25583
|
+
const discovered = [];
|
|
25584
|
+
const targetHost = host?.trim();
|
|
25585
|
+
if (!targetHost) return [];
|
|
25586
|
+
logger?.log?.(`[Discovery] Starting UDP direct discovery to ${targetHost} on ports ${BCUDP_DISCOVERY_PORT_LOCAL_ANY} and ${BCUDP_DISCOVERY_PORT_LOCAL_UID}...`);
|
|
25587
|
+
return new Promise((resolve) => {
|
|
25588
|
+
const socket = import_node_dgram2.default.createSocket("udp4");
|
|
25589
|
+
let timeout;
|
|
25590
|
+
socket.on("message", (msg, rinfo) => {
|
|
25591
|
+
try {
|
|
25592
|
+
if (rinfo.address !== targetHost) return;
|
|
25593
|
+
const packet = decodeBcUdpPacket(msg);
|
|
25594
|
+
if (packet.kind !== "discovery") return;
|
|
25595
|
+
const cr = parseD2cCr(packet.xml);
|
|
25596
|
+
const disc = cr ? void 0 : parseD2cDisc(packet.xml);
|
|
25597
|
+
if (!cr && !disc) return;
|
|
25598
|
+
const uidMatch = /<uid>([^<]+)<\/uid>/i.exec(packet.xml);
|
|
25599
|
+
const modelMatch = /<model>([^<]+)<\/model>/i.exec(packet.xml);
|
|
25600
|
+
const nameMatch = /<name>([^<]+)<\/name>/i.exec(packet.xml);
|
|
25601
|
+
const deviceIdMatch = /<deviceId>([^<]+)<\/deviceId>/i.exec(packet.xml);
|
|
25602
|
+
const uid = (uidMatch?.[1] ?? deviceIdMatch?.[1])?.trim();
|
|
25603
|
+
const model = modelMatch?.[1]?.trim();
|
|
25604
|
+
const name = nameMatch?.[1]?.trim();
|
|
25605
|
+
const result = {
|
|
25606
|
+
host: rinfo.address,
|
|
25607
|
+
discoveryMethod: "udp_direct",
|
|
25608
|
+
...uid ? { uid } : {},
|
|
25609
|
+
...model ? { model } : {},
|
|
25610
|
+
...name ? { name } : {}
|
|
25611
|
+
};
|
|
25612
|
+
discovered.push(result);
|
|
25613
|
+
try {
|
|
25614
|
+
socket.close();
|
|
24573
25615
|
} catch {
|
|
24574
25616
|
}
|
|
24575
25617
|
} catch {
|
|
@@ -25221,6 +26263,79 @@ var AutodiscoveryClient = class {
|
|
|
25221
26263
|
}
|
|
25222
26264
|
};
|
|
25223
26265
|
|
|
26266
|
+
// src/reolink/baichuan/types.ts
|
|
26267
|
+
function getVideoclipClientInfo(headers) {
|
|
26268
|
+
const getHeader = (key) => {
|
|
26269
|
+
const val = headers[key] ?? headers[key.toLowerCase()] ?? headers[key.toUpperCase()];
|
|
26270
|
+
return Array.isArray(val) ? val[0] : val;
|
|
26271
|
+
};
|
|
26272
|
+
return {
|
|
26273
|
+
userAgent: getHeader("user-agent") ?? getHeader("User-Agent"),
|
|
26274
|
+
accept: getHeader("accept") ?? getHeader("Accept"),
|
|
26275
|
+
range: getHeader("range") ?? getHeader("Range"),
|
|
26276
|
+
secChUa: getHeader("sec-ch-ua") ?? getHeader("Sec-CH-UA"),
|
|
26277
|
+
secChUaMobile: getHeader("sec-ch-ua-mobile") ?? getHeader("Sec-CH-UA-Mobile"),
|
|
26278
|
+
secChUaPlatform: getHeader("sec-ch-ua-platform") ?? getHeader("Sec-CH-UA-Platform")
|
|
26279
|
+
};
|
|
26280
|
+
}
|
|
26281
|
+
function decideVideoclipTranscodeMode(headers, forceMode) {
|
|
26282
|
+
const clientInfo = getVideoclipClientInfo(headers);
|
|
26283
|
+
if (forceMode) {
|
|
26284
|
+
return {
|
|
26285
|
+
mode: forceMode,
|
|
26286
|
+
reason: `forced: ${forceMode}`,
|
|
26287
|
+
clientInfo
|
|
26288
|
+
};
|
|
26289
|
+
}
|
|
26290
|
+
const ua = (clientInfo.userAgent ?? "").toLowerCase();
|
|
26291
|
+
const platform = (clientInfo.secChUaPlatform ?? "").toLowerCase().replace(/"/g, "");
|
|
26292
|
+
const isIos = /iphone|ipad|ipod/.test(ua);
|
|
26293
|
+
if (isIos) {
|
|
26294
|
+
return {
|
|
26295
|
+
mode: "transcode-h264",
|
|
26296
|
+
reason: "iOS device detected - no native H.265 support in <video>",
|
|
26297
|
+
clientInfo
|
|
26298
|
+
};
|
|
26299
|
+
}
|
|
26300
|
+
const isFirefox = ua.includes("firefox");
|
|
26301
|
+
if (isFirefox) {
|
|
26302
|
+
return {
|
|
26303
|
+
mode: "transcode-h264",
|
|
26304
|
+
reason: "Firefox detected - no H.265 support",
|
|
26305
|
+
clientInfo
|
|
26306
|
+
};
|
|
26307
|
+
}
|
|
26308
|
+
const isAndroid = ua.includes("android") || platform === "android";
|
|
26309
|
+
if (isAndroid) {
|
|
26310
|
+
return {
|
|
26311
|
+
mode: "transcode-h264",
|
|
26312
|
+
reason: "Android device detected - variable H.265 support",
|
|
26313
|
+
clientInfo
|
|
26314
|
+
};
|
|
26315
|
+
}
|
|
26316
|
+
const isChromium = ua.includes("chrome") || ua.includes("edg");
|
|
26317
|
+
const isMac = ua.includes("mac os") || platform === "macos";
|
|
26318
|
+
if (isChromium && !isMac) {
|
|
26319
|
+
return {
|
|
26320
|
+
mode: "transcode-h264",
|
|
26321
|
+
reason: "Chrome/Edge on non-Mac detected - limited H.265 support",
|
|
26322
|
+
clientInfo
|
|
26323
|
+
};
|
|
26324
|
+
}
|
|
26325
|
+
if (isMac) {
|
|
26326
|
+
return {
|
|
26327
|
+
mode: "passthrough",
|
|
26328
|
+
reason: "macOS detected - native H.265 hardware decoding available",
|
|
26329
|
+
clientInfo
|
|
26330
|
+
};
|
|
26331
|
+
}
|
|
26332
|
+
return {
|
|
26333
|
+
mode: "transcode-h264",
|
|
26334
|
+
reason: "Unknown client - transcoding for compatibility",
|
|
26335
|
+
clientInfo
|
|
26336
|
+
};
|
|
26337
|
+
}
|
|
26338
|
+
|
|
25224
26339
|
// src/index.ts
|
|
25225
26340
|
init_recordingFileName();
|
|
25226
26341
|
|
|
@@ -25232,6 +26347,13 @@ function parseIntParam(v, def) {
|
|
|
25232
26347
|
const n = Number.parseInt(v, 10);
|
|
25233
26348
|
return Number.isFinite(n) ? n : def;
|
|
25234
26349
|
}
|
|
26350
|
+
function parseBoolParam(v, def) {
|
|
26351
|
+
if (v == null) return def;
|
|
26352
|
+
const s = v.trim().toLowerCase();
|
|
26353
|
+
if (s === "1" || s === "true" || s === "yes" || s === "y") return true;
|
|
26354
|
+
if (s === "0" || s === "false" || s === "no" || s === "n") return false;
|
|
26355
|
+
return def;
|
|
26356
|
+
}
|
|
25235
26357
|
function parseProfile(v) {
|
|
25236
26358
|
const p = (v ?? "sub").trim();
|
|
25237
26359
|
if (p === "main" || p === "sub" || p === "ext") return p;
|
|
@@ -25262,6 +26384,11 @@ function createBaichuanEndpointsServer(opts) {
|
|
|
25262
26384
|
const api = new ReolinkBaichuanApi({
|
|
25263
26385
|
...opts.baichuan
|
|
25264
26386
|
});
|
|
26387
|
+
const hlsManager = new HlsSessionManager(api, {
|
|
26388
|
+
logger: console,
|
|
26389
|
+
sessionTtlMs: 6e4,
|
|
26390
|
+
cleanupIntervalMs: 5e3
|
|
26391
|
+
});
|
|
25265
26392
|
const listenHost = opts.listenHost ?? "127.0.0.1";
|
|
25266
26393
|
const rtspListenHost = opts.rtspListenHost ?? "127.0.0.1";
|
|
25267
26394
|
const rtspServers = /* @__PURE__ */ new Map();
|
|
@@ -25307,6 +26434,46 @@ function createBaichuanEndpointsServer(opts) {
|
|
|
25307
26434
|
res.end(JSON.stringify({ rtspUrl }));
|
|
25308
26435
|
return;
|
|
25309
26436
|
}
|
|
26437
|
+
if (u.pathname === "/hls") {
|
|
26438
|
+
const channel = parseIntParam(u.searchParams.get("channel"), 0);
|
|
26439
|
+
const fileName = (u.searchParams.get("fileName") ?? "").trim();
|
|
26440
|
+
const deviceId = (u.searchParams.get("deviceId") ?? "anon").trim();
|
|
26441
|
+
const isNvr = parseBoolParam(u.searchParams.get("isNvr"), false);
|
|
26442
|
+
const transcode = parseBoolParam(u.searchParams.get("transcode"), true);
|
|
26443
|
+
const hlsSegmentDuration = parseIntParam(
|
|
26444
|
+
u.searchParams.get("hlsSegmentDuration"),
|
|
26445
|
+
2
|
|
26446
|
+
);
|
|
26447
|
+
const hlsPath = (u.searchParams.get("hls") ?? "playlist.m3u8").trim();
|
|
26448
|
+
if (!fileName) {
|
|
26449
|
+
res.statusCode = 400;
|
|
26450
|
+
res.end("Missing fileName");
|
|
26451
|
+
return;
|
|
26452
|
+
}
|
|
26453
|
+
const sessionKey = `hls:${deviceId}:ch${channel}:${fileName}`;
|
|
26454
|
+
const exclusiveKeyPrefix = `hls:${deviceId}:ch${channel}:`;
|
|
26455
|
+
const requestUrl = `http://${listenHost}:${opts.listenPort}${u.pathname}${u.search}`;
|
|
26456
|
+
const result = await hlsManager.handleRequest({
|
|
26457
|
+
sessionKey,
|
|
26458
|
+
hlsPath,
|
|
26459
|
+
requestUrl,
|
|
26460
|
+
exclusiveKeyPrefix,
|
|
26461
|
+
createSession: () => ({
|
|
26462
|
+
channel,
|
|
26463
|
+
fileName,
|
|
26464
|
+
isNvr,
|
|
26465
|
+
deviceId,
|
|
26466
|
+
transcodeH265ToH264: transcode,
|
|
26467
|
+
hlsSegmentDuration
|
|
26468
|
+
})
|
|
26469
|
+
});
|
|
26470
|
+
res.statusCode = result.statusCode;
|
|
26471
|
+
for (const [k, v] of Object.entries(result.headers)) {
|
|
26472
|
+
res.setHeader(k, v);
|
|
26473
|
+
}
|
|
26474
|
+
res.end(result.body);
|
|
26475
|
+
return;
|
|
26476
|
+
}
|
|
25310
26477
|
if (u.pathname === "/download") {
|
|
25311
26478
|
const channel = parseIntParam(u.searchParams.get("channel"), 0);
|
|
25312
26479
|
const uid = (u.searchParams.get("uid") ?? "").trim();
|
|
@@ -28955,93 +30122,1959 @@ var BaichuanHttpStreamServer = class extends import_node_events6.EventEmitter {
|
|
|
28955
30122
|
}
|
|
28956
30123
|
}
|
|
28957
30124
|
}
|
|
28958
|
-
});
|
|
28959
|
-
let ffmpegOutput = "";
|
|
28960
|
-
ffmpeg.stderr.on("data", (data) => {
|
|
28961
|
-
const output = data.toString();
|
|
28962
|
-
ffmpegOutput += output;
|
|
28963
|
-
const isKnownNonFatal = output.includes("top block unavailable") || output.includes("error while decoding") || output.includes("decode_slice_header error") || output.includes("no frame") || output.includes("concealing") || output.includes("left block unavailable") || output.includes("bottom block unavailable");
|
|
28964
|
-
if (isKnownNonFatal) {
|
|
28965
|
-
this.logger.warn(`[BaichuanHttpStreamServer] FFmpeg decode warning: ${output.trim()}`);
|
|
28966
|
-
return;
|
|
28967
|
-
}
|
|
28968
|
-
const isCriticalError = output.includes("Invalid data found") || output.includes("Error opening") || output.includes("Could not write header") || output.includes("Broken pipe") || output.includes("Connection refused") || output.includes("Immediate exit") || output.includes("Conversion failed");
|
|
28969
|
-
if (isCriticalError) {
|
|
28970
|
-
this.logger.error(`[BaichuanHttpStreamServer] FFmpeg critical error: ${output.trim()}`);
|
|
28971
|
-
this.emit("error", new Error(`FFmpeg error: ${output}`));
|
|
30125
|
+
});
|
|
30126
|
+
let ffmpegOutput = "";
|
|
30127
|
+
ffmpeg.stderr.on("data", (data) => {
|
|
30128
|
+
const output = data.toString();
|
|
30129
|
+
ffmpegOutput += output;
|
|
30130
|
+
const isKnownNonFatal = output.includes("top block unavailable") || output.includes("error while decoding") || output.includes("decode_slice_header error") || output.includes("no frame") || output.includes("concealing") || output.includes("left block unavailable") || output.includes("bottom block unavailable");
|
|
30131
|
+
if (isKnownNonFatal) {
|
|
30132
|
+
this.logger.warn(`[BaichuanHttpStreamServer] FFmpeg decode warning: ${output.trim()}`);
|
|
30133
|
+
return;
|
|
30134
|
+
}
|
|
30135
|
+
const isCriticalError = output.includes("Invalid data found") || output.includes("Error opening") || output.includes("Could not write header") || output.includes("Broken pipe") || output.includes("Connection refused") || output.includes("Immediate exit") || output.includes("Conversion failed");
|
|
30136
|
+
if (isCriticalError) {
|
|
30137
|
+
this.logger.error(`[BaichuanHttpStreamServer] FFmpeg critical error: ${output.trim()}`);
|
|
30138
|
+
this.emit("error", new Error(`FFmpeg error: ${output}`));
|
|
30139
|
+
} else {
|
|
30140
|
+
this.logger.warn(`[BaichuanHttpStreamServer] FFmpeg stderr: ${output.trim()}`);
|
|
30141
|
+
}
|
|
30142
|
+
});
|
|
30143
|
+
ffmpeg.on("close", (code) => {
|
|
30144
|
+
if (code !== 0) {
|
|
30145
|
+
this.logger.error(`[BaichuanHttpStreamServer] FFmpeg exited with code ${code}`);
|
|
30146
|
+
this.emit("error", new Error(`FFmpeg exited with code ${code}`));
|
|
30147
|
+
}
|
|
30148
|
+
this.active = false;
|
|
30149
|
+
this.emit("close");
|
|
30150
|
+
});
|
|
30151
|
+
this.active = true;
|
|
30152
|
+
}
|
|
30153
|
+
/**
|
|
30154
|
+
* Get HTTP URL for this stream.
|
|
30155
|
+
*/
|
|
30156
|
+
getStreamUrl() {
|
|
30157
|
+
return `http://127.0.0.1:${this.listenPort}${this.path}.ts`;
|
|
30158
|
+
}
|
|
30159
|
+
/**
|
|
30160
|
+
* Stop HTTP stream server.
|
|
30161
|
+
*/
|
|
30162
|
+
async stop() {
|
|
30163
|
+
for (const client of this.clients) {
|
|
30164
|
+
if (!client.destroyed) {
|
|
30165
|
+
client.end();
|
|
30166
|
+
}
|
|
30167
|
+
}
|
|
30168
|
+
this.clients.clear();
|
|
30169
|
+
try {
|
|
30170
|
+
await this.videoStream.stop();
|
|
30171
|
+
} catch {
|
|
30172
|
+
}
|
|
30173
|
+
if (this.videoListener) {
|
|
30174
|
+
this.videoStream.removeListener("videoAccessUnit", this.videoListener);
|
|
30175
|
+
this.videoStream.removeListener("videoFrame", this.videoListener);
|
|
30176
|
+
}
|
|
30177
|
+
this.videoListener = void 0;
|
|
30178
|
+
if (this.ffmpegProcess) {
|
|
30179
|
+
const proc = this.ffmpegProcess;
|
|
30180
|
+
try {
|
|
30181
|
+
proc.kill("SIGTERM");
|
|
30182
|
+
} catch {
|
|
30183
|
+
}
|
|
30184
|
+
await new Promise((resolve) => {
|
|
30185
|
+
const t = setTimeout(() => {
|
|
30186
|
+
try {
|
|
30187
|
+
proc.kill("SIGKILL");
|
|
30188
|
+
} catch {
|
|
30189
|
+
}
|
|
30190
|
+
resolve();
|
|
30191
|
+
}, 1500);
|
|
30192
|
+
t?.unref?.();
|
|
30193
|
+
proc.once("close", () => {
|
|
30194
|
+
clearTimeout(t);
|
|
30195
|
+
resolve();
|
|
30196
|
+
});
|
|
30197
|
+
});
|
|
30198
|
+
}
|
|
30199
|
+
this.ffmpegProcess = void 0;
|
|
30200
|
+
if (this.httpServer) {
|
|
30201
|
+
await new Promise((resolve) => {
|
|
30202
|
+
this.httpServer?.closeAllConnections?.();
|
|
30203
|
+
this.httpServer?.closeIdleConnections?.();
|
|
30204
|
+
this.httpServer.close(() => resolve());
|
|
30205
|
+
});
|
|
30206
|
+
this.httpServer = void 0;
|
|
30207
|
+
}
|
|
30208
|
+
this.active = false;
|
|
30209
|
+
}
|
|
30210
|
+
isActive() {
|
|
30211
|
+
return this.active;
|
|
30212
|
+
}
|
|
30213
|
+
};
|
|
30214
|
+
|
|
30215
|
+
// src/baichuan/stream/BaichuanMjpegServer.ts
|
|
30216
|
+
var import_node_events8 = require("events");
|
|
30217
|
+
var http5 = __toESM(require("http"), 1);
|
|
30218
|
+
|
|
30219
|
+
// src/baichuan/stream/MjpegTransformer.ts
|
|
30220
|
+
var import_node_events7 = require("events");
|
|
30221
|
+
var import_node_child_process9 = require("child_process");
|
|
30222
|
+
var JPEG_SOI = Buffer.from([255, 216]);
|
|
30223
|
+
var JPEG_EOI = Buffer.from([255, 217]);
|
|
30224
|
+
var MjpegTransformer = class extends import_node_events7.EventEmitter {
|
|
30225
|
+
options;
|
|
30226
|
+
ffmpeg = null;
|
|
30227
|
+
started = false;
|
|
30228
|
+
closed = false;
|
|
30229
|
+
jpegBuffer = Buffer.alloc(0);
|
|
30230
|
+
frameCount = 0;
|
|
30231
|
+
lastTimestamp = 0;
|
|
30232
|
+
constructor(options) {
|
|
30233
|
+
super();
|
|
30234
|
+
this.options = {
|
|
30235
|
+
codec: options.codec,
|
|
30236
|
+
quality: options.quality ?? 5,
|
|
30237
|
+
width: options.width,
|
|
30238
|
+
height: options.height,
|
|
30239
|
+
maxFps: options.maxFps,
|
|
30240
|
+
logger: options.logger
|
|
30241
|
+
};
|
|
30242
|
+
}
|
|
30243
|
+
/**
|
|
30244
|
+
* Start the transformer (spawns FFmpeg process)
|
|
30245
|
+
*/
|
|
30246
|
+
start() {
|
|
30247
|
+
if (this.started || this.closed) return;
|
|
30248
|
+
this.started = true;
|
|
30249
|
+
const { codec, quality, width, height, maxFps } = this.options;
|
|
30250
|
+
const args = [
|
|
30251
|
+
"-hide_banner",
|
|
30252
|
+
"-loglevel",
|
|
30253
|
+
"error",
|
|
30254
|
+
// Input: raw video from stdin
|
|
30255
|
+
"-f",
|
|
30256
|
+
codec === "h265" ? "hevc" : "h264",
|
|
30257
|
+
"-i",
|
|
30258
|
+
"pipe:0"
|
|
30259
|
+
];
|
|
30260
|
+
const filters = [];
|
|
30261
|
+
if (width || height) {
|
|
30262
|
+
const w = width ?? -1;
|
|
30263
|
+
const h = height ?? -1;
|
|
30264
|
+
filters.push(`scale=${w}:${h}`);
|
|
30265
|
+
}
|
|
30266
|
+
if (maxFps) {
|
|
30267
|
+
filters.push(`fps=${maxFps}`);
|
|
30268
|
+
}
|
|
30269
|
+
if (filters.length > 0) {
|
|
30270
|
+
args.push("-vf", filters.join(","));
|
|
30271
|
+
}
|
|
30272
|
+
args.push(
|
|
30273
|
+
"-c:v",
|
|
30274
|
+
"mjpeg",
|
|
30275
|
+
"-q:v",
|
|
30276
|
+
String(quality),
|
|
30277
|
+
"-f",
|
|
30278
|
+
"mjpeg",
|
|
30279
|
+
"pipe:1"
|
|
30280
|
+
);
|
|
30281
|
+
this.log("debug", `Starting FFmpeg with args: ${args.join(" ")}`);
|
|
30282
|
+
this.ffmpeg = (0, import_node_child_process9.spawn)("ffmpeg", args, {
|
|
30283
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
30284
|
+
});
|
|
30285
|
+
this.ffmpeg.stdout.on("data", (data) => {
|
|
30286
|
+
this.handleJpegData(data);
|
|
30287
|
+
});
|
|
30288
|
+
this.ffmpeg.stderr.on("data", (data) => {
|
|
30289
|
+
const msg = data.toString().trim();
|
|
30290
|
+
if (msg) {
|
|
30291
|
+
this.log("debug", `FFmpeg: ${msg}`);
|
|
30292
|
+
}
|
|
30293
|
+
});
|
|
30294
|
+
this.ffmpeg.on("close", (code) => {
|
|
30295
|
+
this.log("debug", `FFmpeg closed with code ${code}`);
|
|
30296
|
+
this.ffmpeg = null;
|
|
30297
|
+
if (!this.closed) {
|
|
30298
|
+
this.emit("close", code);
|
|
30299
|
+
}
|
|
30300
|
+
});
|
|
30301
|
+
this.ffmpeg.on("error", (err) => {
|
|
30302
|
+
this.log("error", `FFmpeg error: ${err.message}`);
|
|
30303
|
+
this.emit("error", err);
|
|
30304
|
+
});
|
|
30305
|
+
}
|
|
30306
|
+
/**
|
|
30307
|
+
* Push an H.264/H.265 access unit (Annex-B format with start codes)
|
|
30308
|
+
*/
|
|
30309
|
+
push(accessUnit, timestamp) {
|
|
30310
|
+
if (!this.started || this.closed || !this.ffmpeg) {
|
|
30311
|
+
return;
|
|
30312
|
+
}
|
|
30313
|
+
this.lastTimestamp = timestamp ?? Date.now() * 1e3;
|
|
30314
|
+
try {
|
|
30315
|
+
this.ffmpeg.stdin.write(accessUnit);
|
|
30316
|
+
} catch (err) {
|
|
30317
|
+
this.log("error", `Failed to write to FFmpeg: ${err}`);
|
|
30318
|
+
}
|
|
30319
|
+
}
|
|
30320
|
+
/**
|
|
30321
|
+
* Handle JPEG data from FFmpeg stdout
|
|
30322
|
+
* FFmpeg outputs complete JPEG images, each starting with SOI (0xFFD8)
|
|
30323
|
+
* and ending with EOI (0xFFD9)
|
|
30324
|
+
*/
|
|
30325
|
+
handleJpegData(data) {
|
|
30326
|
+
this.jpegBuffer = Buffer.concat([this.jpegBuffer, data]);
|
|
30327
|
+
while (true) {
|
|
30328
|
+
const soiIndex = this.jpegBuffer.indexOf(JPEG_SOI);
|
|
30329
|
+
if (soiIndex < 0) {
|
|
30330
|
+
this.jpegBuffer = Buffer.alloc(0);
|
|
30331
|
+
break;
|
|
30332
|
+
}
|
|
30333
|
+
if (soiIndex > 0) {
|
|
30334
|
+
this.jpegBuffer = this.jpegBuffer.subarray(soiIndex);
|
|
30335
|
+
}
|
|
30336
|
+
const eoiIndex = this.jpegBuffer.indexOf(JPEG_EOI, 2);
|
|
30337
|
+
if (eoiIndex < 0) {
|
|
30338
|
+
break;
|
|
30339
|
+
}
|
|
30340
|
+
const frameEnd = eoiIndex + 2;
|
|
30341
|
+
const jpegFrame = this.jpegBuffer.subarray(0, frameEnd);
|
|
30342
|
+
this.jpegBuffer = this.jpegBuffer.subarray(frameEnd);
|
|
30343
|
+
this.frameCount++;
|
|
30344
|
+
const frame = {
|
|
30345
|
+
data: jpegFrame,
|
|
30346
|
+
timestamp: this.lastTimestamp
|
|
30347
|
+
};
|
|
30348
|
+
this.emit("frame", frame);
|
|
30349
|
+
}
|
|
30350
|
+
}
|
|
30351
|
+
/**
|
|
30352
|
+
* Stop the transformer
|
|
30353
|
+
*/
|
|
30354
|
+
async stop() {
|
|
30355
|
+
if (this.closed) return;
|
|
30356
|
+
this.closed = true;
|
|
30357
|
+
if (this.ffmpeg) {
|
|
30358
|
+
try {
|
|
30359
|
+
this.ffmpeg.stdin.end();
|
|
30360
|
+
} catch {
|
|
30361
|
+
}
|
|
30362
|
+
await new Promise((resolve) => {
|
|
30363
|
+
const ff = this.ffmpeg;
|
|
30364
|
+
if (!ff) {
|
|
30365
|
+
resolve();
|
|
30366
|
+
return;
|
|
30367
|
+
}
|
|
30368
|
+
const timeout = setTimeout(() => {
|
|
30369
|
+
ff.kill("SIGKILL");
|
|
30370
|
+
resolve();
|
|
30371
|
+
}, 1e3);
|
|
30372
|
+
ff.once("close", () => {
|
|
30373
|
+
clearTimeout(timeout);
|
|
30374
|
+
resolve();
|
|
30375
|
+
});
|
|
30376
|
+
try {
|
|
30377
|
+
ff.kill("SIGTERM");
|
|
30378
|
+
} catch {
|
|
30379
|
+
clearTimeout(timeout);
|
|
30380
|
+
resolve();
|
|
30381
|
+
}
|
|
30382
|
+
});
|
|
30383
|
+
this.ffmpeg = null;
|
|
30384
|
+
}
|
|
30385
|
+
this.emit("close", 0);
|
|
30386
|
+
}
|
|
30387
|
+
/**
|
|
30388
|
+
* Get frame count
|
|
30389
|
+
*/
|
|
30390
|
+
getFrameCount() {
|
|
30391
|
+
return this.frameCount;
|
|
30392
|
+
}
|
|
30393
|
+
/**
|
|
30394
|
+
* Check if running
|
|
30395
|
+
*/
|
|
30396
|
+
isRunning() {
|
|
30397
|
+
return this.started && !this.closed && this.ffmpeg !== null;
|
|
30398
|
+
}
|
|
30399
|
+
log(level, message) {
|
|
30400
|
+
this.options.logger?.(level, `[MjpegTransformer] ${message}`);
|
|
30401
|
+
}
|
|
30402
|
+
};
|
|
30403
|
+
function createMjpegBoundary() {
|
|
30404
|
+
return `mjpegboundary${Date.now()}`;
|
|
30405
|
+
}
|
|
30406
|
+
function getMjpegContentType(boundary) {
|
|
30407
|
+
return `multipart/x-mixed-replace; boundary=${boundary}`;
|
|
30408
|
+
}
|
|
30409
|
+
function formatMjpegFrame(frame, boundary) {
|
|
30410
|
+
const header = Buffer.from(
|
|
30411
|
+
`--${boundary}\r
|
|
30412
|
+
Content-Type: image/jpeg\r
|
|
30413
|
+
Content-Length: ${frame.length}\r
|
|
30414
|
+
\r
|
|
30415
|
+
`
|
|
30416
|
+
);
|
|
30417
|
+
return Buffer.concat([header, frame, Buffer.from("\r\n")]);
|
|
30418
|
+
}
|
|
30419
|
+
|
|
30420
|
+
// src/baichuan/stream/BaichuanMjpegServer.ts
|
|
30421
|
+
init_H264Converter();
|
|
30422
|
+
init_H265Converter();
|
|
30423
|
+
var BaichuanMjpegServer = class extends import_node_events8.EventEmitter {
|
|
30424
|
+
options;
|
|
30425
|
+
clients = /* @__PURE__ */ new Map();
|
|
30426
|
+
httpServer = null;
|
|
30427
|
+
transformer = null;
|
|
30428
|
+
nativeStream = null;
|
|
30429
|
+
streamPump = null;
|
|
30430
|
+
detectedCodec = null;
|
|
30431
|
+
started = false;
|
|
30432
|
+
clientIdCounter = 0;
|
|
30433
|
+
constructor(options) {
|
|
30434
|
+
super();
|
|
30435
|
+
this.options = options;
|
|
30436
|
+
}
|
|
30437
|
+
/**
|
|
30438
|
+
* Start the MJPEG server
|
|
30439
|
+
*/
|
|
30440
|
+
async start() {
|
|
30441
|
+
if (this.started) return;
|
|
30442
|
+
this.started = true;
|
|
30443
|
+
const port = this.options.port ?? 8080;
|
|
30444
|
+
const host = this.options.host ?? "0.0.0.0";
|
|
30445
|
+
const path6 = this.options.path ?? "/mjpeg";
|
|
30446
|
+
this.httpServer = http5.createServer((req, res) => {
|
|
30447
|
+
this.handleRequest(req, res, path6);
|
|
30448
|
+
});
|
|
30449
|
+
return new Promise((resolve, reject) => {
|
|
30450
|
+
this.httpServer.on("error", (err) => {
|
|
30451
|
+
this.log("error", `HTTP server error: ${err.message}`);
|
|
30452
|
+
reject(err);
|
|
30453
|
+
});
|
|
30454
|
+
this.httpServer.listen(port, host, () => {
|
|
30455
|
+
this.log(
|
|
30456
|
+
"info",
|
|
30457
|
+
`MJPEG server started on http://${host}:${port}${path6}`
|
|
30458
|
+
);
|
|
30459
|
+
this.emit("started", { host, port, path: path6 });
|
|
30460
|
+
resolve();
|
|
30461
|
+
});
|
|
30462
|
+
});
|
|
30463
|
+
}
|
|
30464
|
+
/**
|
|
30465
|
+
* Stop the MJPEG server
|
|
30466
|
+
*/
|
|
30467
|
+
async stop() {
|
|
30468
|
+
if (!this.started) return;
|
|
30469
|
+
this.started = false;
|
|
30470
|
+
for (const [id, client] of this.clients) {
|
|
30471
|
+
try {
|
|
30472
|
+
client.response.end();
|
|
30473
|
+
} catch {
|
|
30474
|
+
}
|
|
30475
|
+
this.clients.delete(id);
|
|
30476
|
+
}
|
|
30477
|
+
await this.stopStream();
|
|
30478
|
+
if (this.httpServer) {
|
|
30479
|
+
await new Promise((resolve) => {
|
|
30480
|
+
this.httpServer.close(() => resolve());
|
|
30481
|
+
});
|
|
30482
|
+
this.httpServer = null;
|
|
30483
|
+
}
|
|
30484
|
+
this.log("info", "MJPEG server stopped");
|
|
30485
|
+
this.emit("stopped");
|
|
30486
|
+
}
|
|
30487
|
+
/**
|
|
30488
|
+
* Handle HTTP request
|
|
30489
|
+
*/
|
|
30490
|
+
handleRequest(req, res, expectedPath) {
|
|
30491
|
+
const url = new URL(req.url ?? "/", `http://${req.headers.host}`);
|
|
30492
|
+
if (url.pathname !== expectedPath) {
|
|
30493
|
+
res.statusCode = 404;
|
|
30494
|
+
res.end("Not Found");
|
|
30495
|
+
return;
|
|
30496
|
+
}
|
|
30497
|
+
if (req.method !== "GET") {
|
|
30498
|
+
res.statusCode = 405;
|
|
30499
|
+
res.end("Method Not Allowed");
|
|
30500
|
+
return;
|
|
30501
|
+
}
|
|
30502
|
+
this.handleMjpegClient(req, res);
|
|
30503
|
+
}
|
|
30504
|
+
/**
|
|
30505
|
+
* Handle new MJPEG client
|
|
30506
|
+
*/
|
|
30507
|
+
handleMjpegClient(req, res) {
|
|
30508
|
+
const clientId = `client-${++this.clientIdCounter}`;
|
|
30509
|
+
const boundary = createMjpegBoundary();
|
|
30510
|
+
const client = {
|
|
30511
|
+
id: clientId,
|
|
30512
|
+
response: res,
|
|
30513
|
+
boundary,
|
|
30514
|
+
connectedAt: Date.now()
|
|
30515
|
+
};
|
|
30516
|
+
this.clients.set(clientId, client);
|
|
30517
|
+
this.log(
|
|
30518
|
+
"info",
|
|
30519
|
+
`MJPEG client connected: ${clientId} (total: ${this.clients.size})`
|
|
30520
|
+
);
|
|
30521
|
+
this.emit("client-connected", { id: clientId, total: this.clients.size });
|
|
30522
|
+
res.writeHead(200, {
|
|
30523
|
+
"Content-Type": getMjpegContentType(boundary),
|
|
30524
|
+
"Cache-Control": "no-cache, no-store, must-revalidate",
|
|
30525
|
+
Pragma: "no-cache",
|
|
30526
|
+
Expires: "0",
|
|
30527
|
+
Connection: "close"
|
|
30528
|
+
});
|
|
30529
|
+
const cleanup = () => {
|
|
30530
|
+
this.clients.delete(clientId);
|
|
30531
|
+
this.log(
|
|
30532
|
+
"info",
|
|
30533
|
+
`MJPEG client disconnected: ${clientId} (remaining: ${this.clients.size})`
|
|
30534
|
+
);
|
|
30535
|
+
this.emit("client-disconnected", {
|
|
30536
|
+
id: clientId,
|
|
30537
|
+
total: this.clients.size
|
|
30538
|
+
});
|
|
30539
|
+
if (this.clients.size === 0) {
|
|
30540
|
+
this.stopStream();
|
|
30541
|
+
}
|
|
30542
|
+
};
|
|
30543
|
+
req.on("close", cleanup);
|
|
30544
|
+
res.on("close", cleanup);
|
|
30545
|
+
res.on("error", cleanup);
|
|
30546
|
+
if (!this.transformer) {
|
|
30547
|
+
this.startStream();
|
|
30548
|
+
}
|
|
30549
|
+
}
|
|
30550
|
+
/**
|
|
30551
|
+
* Start the native video stream and MJPEG transformer
|
|
30552
|
+
*/
|
|
30553
|
+
async startStream() {
|
|
30554
|
+
if (this.transformer) return;
|
|
30555
|
+
this.log("info", "Starting native video stream...");
|
|
30556
|
+
const { api, channel, profile, variant, quality, width, height, maxFps } = this.options;
|
|
30557
|
+
try {
|
|
30558
|
+
this.nativeStream = createNativeStream(
|
|
30559
|
+
api,
|
|
30560
|
+
channel,
|
|
30561
|
+
profile,
|
|
30562
|
+
variant ? { variant } : void 0
|
|
30563
|
+
);
|
|
30564
|
+
this.streamPump = this.pumpStream();
|
|
30565
|
+
} catch (err) {
|
|
30566
|
+
this.log("error", `Failed to start stream: ${err}`);
|
|
30567
|
+
this.emit("error", err);
|
|
30568
|
+
}
|
|
30569
|
+
}
|
|
30570
|
+
/**
|
|
30571
|
+
* Pump native stream and feed to transformer
|
|
30572
|
+
*/
|
|
30573
|
+
async pumpStream() {
|
|
30574
|
+
if (!this.nativeStream) return;
|
|
30575
|
+
let frameBuffer = [];
|
|
30576
|
+
let waitingForKeyframe = true;
|
|
30577
|
+
try {
|
|
30578
|
+
for await (const frame of this.nativeStream) {
|
|
30579
|
+
if (!this.started || this.clients.size === 0) break;
|
|
30580
|
+
const { type, data, microseconds, videoType } = frame;
|
|
30581
|
+
if (type !== "Iframe" && type !== "Pframe") continue;
|
|
30582
|
+
if (!data || data.length === 0) continue;
|
|
30583
|
+
let annexB;
|
|
30584
|
+
if (videoType === "H265") {
|
|
30585
|
+
annexB = convertToAnnexB2(data);
|
|
30586
|
+
if (!this.detectedCodec) {
|
|
30587
|
+
this.detectedCodec = "h265";
|
|
30588
|
+
this.initTransformer();
|
|
30589
|
+
}
|
|
30590
|
+
} else {
|
|
30591
|
+
annexB = convertToAnnexB(data);
|
|
30592
|
+
if (!this.detectedCodec) {
|
|
30593
|
+
this.detectedCodec = "h264";
|
|
30594
|
+
this.initTransformer();
|
|
30595
|
+
}
|
|
30596
|
+
}
|
|
30597
|
+
if (waitingForKeyframe) {
|
|
30598
|
+
if (type === "Iframe") {
|
|
30599
|
+
waitingForKeyframe = false;
|
|
30600
|
+
} else {
|
|
30601
|
+
continue;
|
|
30602
|
+
}
|
|
30603
|
+
}
|
|
30604
|
+
if (this.transformer) {
|
|
30605
|
+
this.transformer.push(annexB, microseconds);
|
|
30606
|
+
}
|
|
30607
|
+
}
|
|
30608
|
+
} catch (err) {
|
|
30609
|
+
if (this.started) {
|
|
30610
|
+
this.log("error", `Stream error: ${err}`);
|
|
30611
|
+
this.emit("error", err);
|
|
30612
|
+
}
|
|
30613
|
+
}
|
|
30614
|
+
}
|
|
30615
|
+
/**
|
|
30616
|
+
* Initialize MJPEG transformer once codec is detected
|
|
30617
|
+
*/
|
|
30618
|
+
initTransformer() {
|
|
30619
|
+
if (this.transformer || !this.detectedCodec) return;
|
|
30620
|
+
const { quality, width, height, maxFps } = this.options;
|
|
30621
|
+
this.transformer = new MjpegTransformer({
|
|
30622
|
+
codec: this.detectedCodec,
|
|
30623
|
+
quality,
|
|
30624
|
+
width,
|
|
30625
|
+
height,
|
|
30626
|
+
maxFps,
|
|
30627
|
+
logger: this.options.logger
|
|
30628
|
+
});
|
|
30629
|
+
this.transformer.on("frame", (frame) => {
|
|
30630
|
+
this.broadcastFrame(frame);
|
|
30631
|
+
});
|
|
30632
|
+
this.transformer.on("error", (err) => {
|
|
30633
|
+
this.log("error", `Transformer error: ${err}`);
|
|
30634
|
+
});
|
|
30635
|
+
this.transformer.on("close", () => {
|
|
30636
|
+
this.log("debug", "Transformer closed");
|
|
30637
|
+
});
|
|
30638
|
+
this.transformer.start();
|
|
30639
|
+
this.log(
|
|
30640
|
+
"info",
|
|
30641
|
+
`MJPEG transformer started (codec: ${this.detectedCodec})`
|
|
30642
|
+
);
|
|
30643
|
+
}
|
|
30644
|
+
/**
|
|
30645
|
+
* Broadcast JPEG frame to all connected clients
|
|
30646
|
+
*/
|
|
30647
|
+
broadcastFrame(frame) {
|
|
30648
|
+
for (const client of this.clients.values()) {
|
|
30649
|
+
try {
|
|
30650
|
+
const mjpegData = formatMjpegFrame(frame.data, client.boundary);
|
|
30651
|
+
client.response.write(mjpegData);
|
|
30652
|
+
} catch {
|
|
30653
|
+
}
|
|
30654
|
+
}
|
|
30655
|
+
}
|
|
30656
|
+
/**
|
|
30657
|
+
* Stop the stream and transformer
|
|
30658
|
+
*/
|
|
30659
|
+
async stopStream() {
|
|
30660
|
+
if (this.transformer) {
|
|
30661
|
+
await this.transformer.stop();
|
|
30662
|
+
this.transformer = null;
|
|
30663
|
+
}
|
|
30664
|
+
if (this.nativeStream) {
|
|
30665
|
+
try {
|
|
30666
|
+
await this.nativeStream.return(void 0);
|
|
30667
|
+
} catch {
|
|
30668
|
+
}
|
|
30669
|
+
this.nativeStream = null;
|
|
30670
|
+
}
|
|
30671
|
+
if (this.streamPump) {
|
|
30672
|
+
try {
|
|
30673
|
+
await this.streamPump;
|
|
30674
|
+
} catch {
|
|
30675
|
+
}
|
|
30676
|
+
this.streamPump = null;
|
|
30677
|
+
}
|
|
30678
|
+
this.detectedCodec = null;
|
|
30679
|
+
this.log("debug", "Stream stopped");
|
|
30680
|
+
}
|
|
30681
|
+
/**
|
|
30682
|
+
* Get current number of connected clients
|
|
30683
|
+
*/
|
|
30684
|
+
getClientCount() {
|
|
30685
|
+
return this.clients.size;
|
|
30686
|
+
}
|
|
30687
|
+
/**
|
|
30688
|
+
* Get server status
|
|
30689
|
+
*/
|
|
30690
|
+
getStatus() {
|
|
30691
|
+
return {
|
|
30692
|
+
running: this.started,
|
|
30693
|
+
clients: this.clients.size,
|
|
30694
|
+
codec: this.detectedCodec,
|
|
30695
|
+
frames: this.transformer?.getFrameCount() ?? 0
|
|
30696
|
+
};
|
|
30697
|
+
}
|
|
30698
|
+
log(level, message) {
|
|
30699
|
+
this.options.logger?.(level, `[BaichuanMjpegServer] ${message}`);
|
|
30700
|
+
}
|
|
30701
|
+
};
|
|
30702
|
+
|
|
30703
|
+
// src/baichuan/stream/BaichuanWebRTCServer.ts
|
|
30704
|
+
var import_node_events9 = require("events");
|
|
30705
|
+
init_BcMediaAnnexBDecoder();
|
|
30706
|
+
init_H264Converter();
|
|
30707
|
+
function parseAnnexBNalUnits(annexB) {
|
|
30708
|
+
const nalUnits = [];
|
|
30709
|
+
let offset = 0;
|
|
30710
|
+
while (offset < annexB.length) {
|
|
30711
|
+
let startCodeLen = 0;
|
|
30712
|
+
if (offset + 4 <= annexB.length && annexB[offset] === 0 && annexB[offset + 1] === 0 && annexB[offset + 2] === 0 && annexB[offset + 3] === 1) {
|
|
30713
|
+
startCodeLen = 4;
|
|
30714
|
+
} else if (offset + 3 <= annexB.length && annexB[offset] === 0 && annexB[offset + 1] === 0 && annexB[offset + 2] === 1) {
|
|
30715
|
+
startCodeLen = 3;
|
|
30716
|
+
} else {
|
|
30717
|
+
offset++;
|
|
30718
|
+
continue;
|
|
30719
|
+
}
|
|
30720
|
+
const naluStart = offset + startCodeLen;
|
|
30721
|
+
let naluEnd = annexB.length;
|
|
30722
|
+
for (let i = naluStart; i < annexB.length - 2; i++) {
|
|
30723
|
+
if (annexB[i] === 0 && annexB[i + 1] === 0 && (annexB[i + 2] === 1 || i + 3 < annexB.length && annexB[i + 2] === 0 && annexB[i + 3] === 1)) {
|
|
30724
|
+
naluEnd = i;
|
|
30725
|
+
break;
|
|
30726
|
+
}
|
|
30727
|
+
}
|
|
30728
|
+
if (naluEnd > naluStart) {
|
|
30729
|
+
nalUnits.push(annexB.subarray(naluStart, naluEnd));
|
|
30730
|
+
}
|
|
30731
|
+
offset = naluEnd;
|
|
30732
|
+
}
|
|
30733
|
+
return nalUnits;
|
|
30734
|
+
}
|
|
30735
|
+
function getH264NalType(nalUnit) {
|
|
30736
|
+
return nalUnit[0] & 31;
|
|
30737
|
+
}
|
|
30738
|
+
function getH265NalType2(nalUnit) {
|
|
30739
|
+
return nalUnit[0] >> 1 & 63;
|
|
30740
|
+
}
|
|
30741
|
+
var BaichuanWebRTCServer = class extends import_node_events9.EventEmitter {
|
|
30742
|
+
options;
|
|
30743
|
+
sessions = /* @__PURE__ */ new Map();
|
|
30744
|
+
sessionIdCounter = 0;
|
|
30745
|
+
weriftModule = null;
|
|
30746
|
+
constructor(options) {
|
|
30747
|
+
super();
|
|
30748
|
+
this.options = options;
|
|
30749
|
+
}
|
|
30750
|
+
/**
|
|
30751
|
+
* Initialize werift module (lazy load to avoid requiring it if not used)
|
|
30752
|
+
*/
|
|
30753
|
+
async loadWerift() {
|
|
30754
|
+
if (this.weriftModule) return this.weriftModule;
|
|
30755
|
+
try {
|
|
30756
|
+
this.weriftModule = await import("werift");
|
|
30757
|
+
return this.weriftModule;
|
|
30758
|
+
} catch (err) {
|
|
30759
|
+
throw new Error(
|
|
30760
|
+
`Failed to load werift module. Make sure it's installed: npm install werift
|
|
30761
|
+
Error: ${err}`
|
|
30762
|
+
);
|
|
30763
|
+
}
|
|
30764
|
+
}
|
|
30765
|
+
/**
|
|
30766
|
+
* Create a new WebRTC session
|
|
30767
|
+
* Returns a session ID and SDP offer to send to the browser
|
|
30768
|
+
*/
|
|
30769
|
+
async createSession() {
|
|
30770
|
+
const werift = await this.loadWerift();
|
|
30771
|
+
const { RTCPeerConnection, MediaStreamTrack, RTCRtpCodecParameters } = werift;
|
|
30772
|
+
const sessionId = `webrtc-${++this.sessionIdCounter}-${Date.now()}`;
|
|
30773
|
+
this.log("info", `Creating WebRTC session ${sessionId}`);
|
|
30774
|
+
const iceServers = [];
|
|
30775
|
+
const stunServers = this.options.stunServers ?? [
|
|
30776
|
+
"stun:stun.l.google.com:19302"
|
|
30777
|
+
];
|
|
30778
|
+
for (const urls of stunServers) {
|
|
30779
|
+
iceServers.push({ urls });
|
|
30780
|
+
}
|
|
30781
|
+
if (this.options.turnServers) {
|
|
30782
|
+
iceServers.push(...this.options.turnServers);
|
|
30783
|
+
}
|
|
30784
|
+
const peerConnection = new RTCPeerConnection({
|
|
30785
|
+
iceServers,
|
|
30786
|
+
codecs: {
|
|
30787
|
+
video: [
|
|
30788
|
+
new RTCRtpCodecParameters({
|
|
30789
|
+
mimeType: "video/H264",
|
|
30790
|
+
clockRate: 9e4,
|
|
30791
|
+
rtcpFeedback: [
|
|
30792
|
+
{ type: "nack" },
|
|
30793
|
+
{ type: "nack", parameter: "pli" },
|
|
30794
|
+
{ type: "goog-remb" }
|
|
30795
|
+
],
|
|
30796
|
+
parameters: "packetization-mode=1;profile-level-id=42e01f;level-asymmetry-allowed=1"
|
|
30797
|
+
})
|
|
30798
|
+
],
|
|
30799
|
+
audio: [
|
|
30800
|
+
new RTCRtpCodecParameters({
|
|
30801
|
+
mimeType: "audio/opus",
|
|
30802
|
+
clockRate: 48e3,
|
|
30803
|
+
channels: 2
|
|
30804
|
+
})
|
|
30805
|
+
]
|
|
30806
|
+
}
|
|
30807
|
+
});
|
|
30808
|
+
const session = {
|
|
30809
|
+
id: sessionId,
|
|
30810
|
+
peerConnection,
|
|
30811
|
+
videoTrack: null,
|
|
30812
|
+
audioTrack: null,
|
|
30813
|
+
videoDataChannel: null,
|
|
30814
|
+
nativeStream: null,
|
|
30815
|
+
intercom: null,
|
|
30816
|
+
dataChannel: null,
|
|
30817
|
+
videoCodec: null,
|
|
30818
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
30819
|
+
state: "connecting",
|
|
30820
|
+
stats: {
|
|
30821
|
+
videoFrames: 0,
|
|
30822
|
+
audioFrames: 0,
|
|
30823
|
+
bytesSent: 0,
|
|
30824
|
+
intercomBytesSent: 0
|
|
30825
|
+
}
|
|
30826
|
+
};
|
|
30827
|
+
this.sessions.set(sessionId, session);
|
|
30828
|
+
const videoSsrc = Math.random() * 4294967295 >>> 0;
|
|
30829
|
+
const videoTrack = new MediaStreamTrack({ kind: "video", ssrc: videoSsrc });
|
|
30830
|
+
const videoSender = peerConnection.addTrack(videoTrack);
|
|
30831
|
+
session.videoTrack = videoTrack;
|
|
30832
|
+
this.log(
|
|
30833
|
+
"info",
|
|
30834
|
+
`Video track created: ssrc=${videoTrack.ssrc}, sender params=${JSON.stringify(videoSender?.getParameters?.() ?? {})}`
|
|
30835
|
+
);
|
|
30836
|
+
const audioSsrc = Math.random() * 4294967295 >>> 0;
|
|
30837
|
+
const audioTrack = new MediaStreamTrack({ kind: "audio", ssrc: audioSsrc });
|
|
30838
|
+
peerConnection.addTrack(audioTrack);
|
|
30839
|
+
session.audioTrack = audioTrack;
|
|
30840
|
+
const videoDataChannel = peerConnection.createDataChannel("video", {
|
|
30841
|
+
ordered: true,
|
|
30842
|
+
maxRetransmits: 0
|
|
30843
|
+
// Unreliable for real-time video
|
|
30844
|
+
});
|
|
30845
|
+
session.videoDataChannel = videoDataChannel;
|
|
30846
|
+
videoDataChannel.onopen = () => {
|
|
30847
|
+
this.log("info", `Video data channel opened for session ${sessionId}`);
|
|
30848
|
+
};
|
|
30849
|
+
if (this.options.enableIntercom) {
|
|
30850
|
+
const dataChannel = peerConnection.createDataChannel("intercom", {
|
|
30851
|
+
ordered: true
|
|
30852
|
+
});
|
|
30853
|
+
session.dataChannel = dataChannel;
|
|
30854
|
+
dataChannel.onopen = () => {
|
|
30855
|
+
this.log(
|
|
30856
|
+
"info",
|
|
30857
|
+
`Intercom data channel opened for session ${sessionId}`
|
|
30858
|
+
);
|
|
30859
|
+
this.emit("intercom-started", { sessionId });
|
|
30860
|
+
};
|
|
30861
|
+
dataChannel.onmessage = async (event) => {
|
|
30862
|
+
if (session.intercom && event.data instanceof ArrayBuffer) {
|
|
30863
|
+
try {
|
|
30864
|
+
const audioData = Buffer.from(event.data);
|
|
30865
|
+
await session.intercom.sendAudio(audioData);
|
|
30866
|
+
session.stats.intercomBytesSent += audioData.length;
|
|
30867
|
+
} catch (err) {
|
|
30868
|
+
this.log("error", `Failed to send intercom audio: ${err}`);
|
|
30869
|
+
}
|
|
30870
|
+
}
|
|
30871
|
+
};
|
|
30872
|
+
dataChannel.onclose = () => {
|
|
30873
|
+
this.log(
|
|
30874
|
+
"info",
|
|
30875
|
+
`Intercom data channel closed for session ${sessionId}`
|
|
30876
|
+
);
|
|
30877
|
+
this.emit("intercom-stopped", { sessionId });
|
|
30878
|
+
};
|
|
30879
|
+
}
|
|
30880
|
+
peerConnection.iceConnectionStateChange.subscribe((state) => {
|
|
30881
|
+
this.log("info", `ICE connection state for ${sessionId}: ${state}`);
|
|
30882
|
+
if (state === "connected") {
|
|
30883
|
+
session.state = "connected";
|
|
30884
|
+
this.emit("session-connected", { sessionId });
|
|
30885
|
+
} else if (state === "failed") {
|
|
30886
|
+
session.state = state;
|
|
30887
|
+
this.closeSession(sessionId).catch((err) => {
|
|
30888
|
+
this.log("error", `Error closing session on ICE ${state}: ${err}`);
|
|
30889
|
+
});
|
|
30890
|
+
}
|
|
30891
|
+
});
|
|
30892
|
+
peerConnection.connectionStateChange.subscribe((state) => {
|
|
30893
|
+
this.log("debug", `Connection state for ${sessionId}: ${state}`);
|
|
30894
|
+
if (state === "closed" || state === "failed") {
|
|
30895
|
+
this.closeSession(sessionId).catch((err) => {
|
|
30896
|
+
this.log(
|
|
30897
|
+
"error",
|
|
30898
|
+
`Error closing session on connection ${state}: ${err}`
|
|
30899
|
+
);
|
|
30900
|
+
});
|
|
30901
|
+
}
|
|
30902
|
+
});
|
|
30903
|
+
const offer = await peerConnection.createOffer();
|
|
30904
|
+
await peerConnection.setLocalDescription(offer);
|
|
30905
|
+
await this.waitForIceGathering(peerConnection, 3e3);
|
|
30906
|
+
const localDescription = peerConnection.localDescription;
|
|
30907
|
+
if (!localDescription) {
|
|
30908
|
+
throw new Error("Failed to create local description");
|
|
30909
|
+
}
|
|
30910
|
+
this.emit("session-created", { sessionId });
|
|
30911
|
+
return {
|
|
30912
|
+
sessionId,
|
|
30913
|
+
offer: {
|
|
30914
|
+
sdp: localDescription.sdp,
|
|
30915
|
+
type: "offer"
|
|
30916
|
+
}
|
|
30917
|
+
};
|
|
30918
|
+
}
|
|
30919
|
+
/**
|
|
30920
|
+
* Handle WebRTC answer from browser and start streaming
|
|
30921
|
+
*/
|
|
30922
|
+
async handleAnswer(sessionId, answer) {
|
|
30923
|
+
const session = this.sessions.get(sessionId);
|
|
30924
|
+
if (!session) {
|
|
30925
|
+
throw new Error(`Session ${sessionId} not found`);
|
|
30926
|
+
}
|
|
30927
|
+
const werift = await this.loadWerift();
|
|
30928
|
+
const { RTCSessionDescription } = werift;
|
|
30929
|
+
this.log("info", `Handling WebRTC answer for session ${sessionId}`);
|
|
30930
|
+
await session.peerConnection.setRemoteDescription(
|
|
30931
|
+
new RTCSessionDescription(answer.sdp, answer.type)
|
|
30932
|
+
);
|
|
30933
|
+
await this.startNativeStream(session);
|
|
30934
|
+
if (this.options.enableIntercom && session.dataChannel) {
|
|
30935
|
+
await this.startIntercom(session);
|
|
30936
|
+
}
|
|
30937
|
+
}
|
|
30938
|
+
/**
|
|
30939
|
+
* Add ICE candidate from browser
|
|
30940
|
+
*/
|
|
30941
|
+
async addIceCandidate(sessionId, candidate) {
|
|
30942
|
+
const session = this.sessions.get(sessionId);
|
|
30943
|
+
if (!session) {
|
|
30944
|
+
throw new Error(`Session ${sessionId} not found`);
|
|
30945
|
+
}
|
|
30946
|
+
const werift = await this.loadWerift();
|
|
30947
|
+
const { RTCIceCandidate } = werift;
|
|
30948
|
+
await session.peerConnection.addIceCandidate(
|
|
30949
|
+
new RTCIceCandidate(candidate.candidate, candidate.sdpMid ?? "0")
|
|
30950
|
+
);
|
|
30951
|
+
}
|
|
30952
|
+
/**
|
|
30953
|
+
* Close a WebRTC session
|
|
30954
|
+
*/
|
|
30955
|
+
async closeSession(sessionId) {
|
|
30956
|
+
const session = this.sessions.get(sessionId);
|
|
30957
|
+
if (!session) return;
|
|
30958
|
+
this.log("info", `Closing WebRTC session ${sessionId}`);
|
|
30959
|
+
session.state = "disconnected";
|
|
30960
|
+
if (session.intercom) {
|
|
30961
|
+
try {
|
|
30962
|
+
await session.intercom.stop();
|
|
30963
|
+
} catch (err) {
|
|
30964
|
+
this.log("debug", `Error stopping intercom: ${err}`);
|
|
30965
|
+
}
|
|
30966
|
+
session.intercom = null;
|
|
30967
|
+
}
|
|
30968
|
+
if (session.dataChannel) {
|
|
30969
|
+
try {
|
|
30970
|
+
session.dataChannel.close();
|
|
30971
|
+
} catch (err) {
|
|
30972
|
+
this.log("debug", `Error closing data channel: ${err}`);
|
|
30973
|
+
}
|
|
30974
|
+
session.dataChannel = null;
|
|
30975
|
+
}
|
|
30976
|
+
if (session.cleanup) {
|
|
30977
|
+
session.cleanup();
|
|
30978
|
+
}
|
|
30979
|
+
try {
|
|
30980
|
+
await session.peerConnection.close();
|
|
30981
|
+
} catch (err) {
|
|
30982
|
+
this.log("debug", `Error closing peer connection: ${err}`);
|
|
30983
|
+
}
|
|
30984
|
+
this.sessions.delete(sessionId);
|
|
30985
|
+
this.emit("session-closed", { sessionId });
|
|
30986
|
+
this.log(
|
|
30987
|
+
"info",
|
|
30988
|
+
`WebRTC session ${sessionId} closed (active sessions: ${this.sessions.size})`
|
|
30989
|
+
);
|
|
30990
|
+
}
|
|
30991
|
+
/**
|
|
30992
|
+
* Get information about all active sessions
|
|
30993
|
+
*/
|
|
30994
|
+
getSessions() {
|
|
30995
|
+
return Array.from(this.sessions.values()).map((s) => ({
|
|
30996
|
+
id: s.id,
|
|
30997
|
+
state: s.state,
|
|
30998
|
+
createdAt: s.createdAt,
|
|
30999
|
+
stats: { ...s.stats }
|
|
31000
|
+
}));
|
|
31001
|
+
}
|
|
31002
|
+
/**
|
|
31003
|
+
* Get information about a specific session
|
|
31004
|
+
*/
|
|
31005
|
+
getSession(sessionId) {
|
|
31006
|
+
const session = this.sessions.get(sessionId);
|
|
31007
|
+
if (!session) return null;
|
|
31008
|
+
return {
|
|
31009
|
+
id: session.id,
|
|
31010
|
+
state: session.state,
|
|
31011
|
+
createdAt: session.createdAt,
|
|
31012
|
+
stats: { ...session.stats }
|
|
31013
|
+
};
|
|
31014
|
+
}
|
|
31015
|
+
/**
|
|
31016
|
+
* Close all sessions and stop the server
|
|
31017
|
+
*/
|
|
31018
|
+
async stop() {
|
|
31019
|
+
this.log("info", "Stopping WebRTC server");
|
|
31020
|
+
const sessionIds = Array.from(this.sessions.keys());
|
|
31021
|
+
await Promise.all(sessionIds.map((id) => this.closeSession(id)));
|
|
31022
|
+
this.log("info", "WebRTC server stopped");
|
|
31023
|
+
}
|
|
31024
|
+
/**
|
|
31025
|
+
* Get the number of active sessions
|
|
31026
|
+
*/
|
|
31027
|
+
get sessionCount() {
|
|
31028
|
+
return this.sessions.size;
|
|
31029
|
+
}
|
|
31030
|
+
// ============================================================================
|
|
31031
|
+
// Private Methods
|
|
31032
|
+
// ============================================================================
|
|
31033
|
+
/**
|
|
31034
|
+
* Wait for ICE gathering to complete
|
|
31035
|
+
*/
|
|
31036
|
+
async waitForIceGathering(pc, timeoutMs) {
|
|
31037
|
+
if (pc.iceGatheringState === "complete") return;
|
|
31038
|
+
return new Promise((resolve) => {
|
|
31039
|
+
const timeout = setTimeout(() => {
|
|
31040
|
+
resolve();
|
|
31041
|
+
}, timeoutMs);
|
|
31042
|
+
pc.iceGatheringStateChange.subscribe((state) => {
|
|
31043
|
+
if (state === "complete") {
|
|
31044
|
+
clearTimeout(timeout);
|
|
31045
|
+
resolve();
|
|
31046
|
+
}
|
|
31047
|
+
});
|
|
31048
|
+
});
|
|
31049
|
+
}
|
|
31050
|
+
/**
|
|
31051
|
+
* Start native Baichuan stream and pump frames to WebRTC
|
|
31052
|
+
*/
|
|
31053
|
+
async startNativeStream(session) {
|
|
31054
|
+
this.log(
|
|
31055
|
+
"info",
|
|
31056
|
+
`Starting native stream for session ${session.id} (channel=${this.options.channel}, profile=${this.options.profile})`
|
|
31057
|
+
);
|
|
31058
|
+
session.nativeStream = createNativeStream(
|
|
31059
|
+
this.options.api,
|
|
31060
|
+
this.options.channel,
|
|
31061
|
+
this.options.profile,
|
|
31062
|
+
this.options.variant !== void 0 ? { variant: this.options.variant } : void 0
|
|
31063
|
+
);
|
|
31064
|
+
this.pumpFramesToWebRTC(session).catch((err) => {
|
|
31065
|
+
this.log("error", `Frame pump error for session ${session.id}: ${err}`);
|
|
31066
|
+
this.closeSession(session.id).catch(() => {
|
|
31067
|
+
});
|
|
31068
|
+
});
|
|
31069
|
+
}
|
|
31070
|
+
/**
|
|
31071
|
+
* Pump frames from native stream to WebRTC tracks
|
|
31072
|
+
* H.264 → RTP media track (standard WebRTC)
|
|
31073
|
+
* H.265 → DataChannel with raw Annex-B frames (decoded by WebCodecs in browser)
|
|
31074
|
+
*/
|
|
31075
|
+
async pumpFramesToWebRTC(session) {
|
|
31076
|
+
if (!session.nativeStream) {
|
|
31077
|
+
this.log("warn", `No native stream for session ${session.id}`);
|
|
31078
|
+
return;
|
|
31079
|
+
}
|
|
31080
|
+
this.log("info", `Starting frame pump for session ${session.id}`);
|
|
31081
|
+
const werift = await this.loadWerift();
|
|
31082
|
+
const { RtpPacket, RtpHeader } = werift;
|
|
31083
|
+
let sequenceNumber = Math.floor(Math.random() * 65535);
|
|
31084
|
+
let timestamp = Math.floor(Math.random() * 4294967295);
|
|
31085
|
+
const videoClockRate = 9e4;
|
|
31086
|
+
let lastTimeMicros = 0;
|
|
31087
|
+
let lastLogTime = Date.now();
|
|
31088
|
+
let packetsSentSinceLastLog = 0;
|
|
31089
|
+
let frameNumber = 0;
|
|
31090
|
+
try {
|
|
31091
|
+
this.log("info", `Entering frame loop for session ${session.id}`);
|
|
31092
|
+
for await (const frame of session.nativeStream) {
|
|
31093
|
+
if (session.state === "disconnected" || session.state === "failed") {
|
|
31094
|
+
this.log(
|
|
31095
|
+
"debug",
|
|
31096
|
+
`Session ${session.id} state is ${session.state}, breaking frame loop`
|
|
31097
|
+
);
|
|
31098
|
+
break;
|
|
31099
|
+
}
|
|
31100
|
+
if (frame.audio) {
|
|
31101
|
+
session.stats.audioFrames++;
|
|
31102
|
+
} else {
|
|
31103
|
+
if (frame.data) {
|
|
31104
|
+
if (!session.videoCodec && frame.videoType) {
|
|
31105
|
+
const detected = detectVideoCodecFromNal(frame.data);
|
|
31106
|
+
session.videoCodec = detected ?? frame.videoType;
|
|
31107
|
+
this.log("info", `Detected video codec: ${session.videoCodec}`);
|
|
31108
|
+
if (session.videoDataChannel && session.videoDataChannel.readyState === "open") {
|
|
31109
|
+
const codecInfo = JSON.stringify({
|
|
31110
|
+
type: "codec",
|
|
31111
|
+
codec: session.videoCodec,
|
|
31112
|
+
width: frame.width || 0,
|
|
31113
|
+
height: frame.height || 0
|
|
31114
|
+
});
|
|
31115
|
+
session.videoDataChannel.send(codecInfo);
|
|
31116
|
+
}
|
|
31117
|
+
}
|
|
31118
|
+
if (frame.microseconds && lastTimeMicros > 0) {
|
|
31119
|
+
const deltaMicros = frame.microseconds - lastTimeMicros;
|
|
31120
|
+
const deltaTicks = Math.floor(
|
|
31121
|
+
deltaMicros / 1e6 * videoClockRate
|
|
31122
|
+
);
|
|
31123
|
+
timestamp = timestamp + deltaTicks >>> 0;
|
|
31124
|
+
} else {
|
|
31125
|
+
timestamp = timestamp + 3e3 >>> 0;
|
|
31126
|
+
}
|
|
31127
|
+
lastTimeMicros = frame.microseconds || 0;
|
|
31128
|
+
if (session.videoCodec === "H264") {
|
|
31129
|
+
const connState = session.peerConnection.connectionState;
|
|
31130
|
+
const iceState = session.peerConnection.iceConnectionState;
|
|
31131
|
+
const isConnected = connState === "connected" || iceState === "connected" || iceState === "completed";
|
|
31132
|
+
if (!isConnected) {
|
|
31133
|
+
if (frameNumber < 10) {
|
|
31134
|
+
this.log(
|
|
31135
|
+
"debug",
|
|
31136
|
+
`Waiting for connection, dropping H.264 frame ${frameNumber}`
|
|
31137
|
+
);
|
|
31138
|
+
}
|
|
31139
|
+
frameNumber++;
|
|
31140
|
+
continue;
|
|
31141
|
+
}
|
|
31142
|
+
const packetsSent = await this.sendH264Frame(
|
|
31143
|
+
session,
|
|
31144
|
+
werift,
|
|
31145
|
+
frame.data,
|
|
31146
|
+
sequenceNumber,
|
|
31147
|
+
timestamp
|
|
31148
|
+
);
|
|
31149
|
+
sequenceNumber = sequenceNumber + packetsSent & 65535;
|
|
31150
|
+
packetsSentSinceLastLog += packetsSent;
|
|
31151
|
+
frameNumber++;
|
|
31152
|
+
session.stats.videoFrames++;
|
|
31153
|
+
session.stats.bytesSent += frame.data.length;
|
|
31154
|
+
} else if (session.videoCodec === "H265") {
|
|
31155
|
+
const sent = await this.sendVideoFrameViaDataChannel(
|
|
31156
|
+
session,
|
|
31157
|
+
frame,
|
|
31158
|
+
frameNumber,
|
|
31159
|
+
"H265"
|
|
31160
|
+
);
|
|
31161
|
+
if (sent) {
|
|
31162
|
+
packetsSentSinceLastLog++;
|
|
31163
|
+
frameNumber++;
|
|
31164
|
+
session.stats.videoFrames++;
|
|
31165
|
+
session.stats.bytesSent += frame.data.length;
|
|
31166
|
+
}
|
|
31167
|
+
}
|
|
31168
|
+
const now = Date.now();
|
|
31169
|
+
if (now - lastLogTime >= 5e3) {
|
|
31170
|
+
this.log(
|
|
31171
|
+
"debug",
|
|
31172
|
+
`WebRTC session ${session.id} [${session.videoCodec}]: sent ${session.stats.videoFrames} frames, ${packetsSentSinceLastLog} packets, ${Math.round(session.stats.bytesSent / 1024)} KB`
|
|
31173
|
+
);
|
|
31174
|
+
lastLogTime = now;
|
|
31175
|
+
packetsSentSinceLastLog = 0;
|
|
31176
|
+
}
|
|
31177
|
+
}
|
|
31178
|
+
}
|
|
31179
|
+
}
|
|
31180
|
+
} catch (err) {
|
|
31181
|
+
this.log(
|
|
31182
|
+
"error",
|
|
31183
|
+
`Error pumping frames for session ${session.id}: ${err}`
|
|
31184
|
+
);
|
|
31185
|
+
}
|
|
31186
|
+
this.log("info", `Native stream ended for session ${session.id}`);
|
|
31187
|
+
}
|
|
31188
|
+
/**
|
|
31189
|
+
* Send H.264 frame via RTP media track
|
|
31190
|
+
* Returns the number of RTP packets sent
|
|
31191
|
+
*/
|
|
31192
|
+
async sendH264Frame(session, werift, frameData, sequenceNumber, timestamp) {
|
|
31193
|
+
const annexB = convertToAnnexB(frameData);
|
|
31194
|
+
const nalUnits = splitAnnexBToNalPayloads(annexB);
|
|
31195
|
+
let hasSps = false;
|
|
31196
|
+
let hasPps = false;
|
|
31197
|
+
let hasIdr = false;
|
|
31198
|
+
const nalTypes = [];
|
|
31199
|
+
for (const nal of nalUnits) {
|
|
31200
|
+
const t = (nal[0] ?? 0) & 31;
|
|
31201
|
+
nalTypes.push(t);
|
|
31202
|
+
if (t === 7) {
|
|
31203
|
+
hasSps = true;
|
|
31204
|
+
session.lastH264Sps = nal;
|
|
31205
|
+
}
|
|
31206
|
+
if (t === 8) {
|
|
31207
|
+
hasPps = true;
|
|
31208
|
+
session.lastH264Pps = nal;
|
|
31209
|
+
}
|
|
31210
|
+
if (t === 5) hasIdr = true;
|
|
31211
|
+
}
|
|
31212
|
+
if (session.stats.videoFrames < 10) {
|
|
31213
|
+
this.log(
|
|
31214
|
+
"debug",
|
|
31215
|
+
`H.264 frame NAL types: [${nalTypes.join(",")}] (5=IDR, 7=SPS, 8=PPS, 1=P-slice)`
|
|
31216
|
+
);
|
|
31217
|
+
}
|
|
31218
|
+
const isKeyframe = hasIdr;
|
|
31219
|
+
let nalList = nalUnits;
|
|
31220
|
+
if (hasIdr && (!hasSps || !hasPps)) {
|
|
31221
|
+
const prepend = [];
|
|
31222
|
+
if (!hasSps && session.lastH264Sps) {
|
|
31223
|
+
prepend.push(session.lastH264Sps);
|
|
31224
|
+
this.log("debug", `Prepending cached SPS to IDR frame`);
|
|
31225
|
+
}
|
|
31226
|
+
if (!hasPps && session.lastH264Pps) {
|
|
31227
|
+
prepend.push(session.lastH264Pps);
|
|
31228
|
+
this.log("debug", `Prepending cached PPS to IDR frame`);
|
|
31229
|
+
}
|
|
31230
|
+
if (prepend.length > 0) {
|
|
31231
|
+
nalList = [...prepend, ...nalUnits];
|
|
31232
|
+
} else if (!session.lastH264Sps || !session.lastH264Pps) {
|
|
31233
|
+
this.log(
|
|
31234
|
+
"warn",
|
|
31235
|
+
`IDR frame without SPS/PPS and no cached parameters - frame may not decode`
|
|
31236
|
+
);
|
|
31237
|
+
}
|
|
31238
|
+
}
|
|
31239
|
+
if (!session.hasReceivedKeyframe) {
|
|
31240
|
+
if (hasIdr && session.lastH264Sps && session.lastH264Pps) {
|
|
31241
|
+
session.hasReceivedKeyframe = true;
|
|
31242
|
+
this.log(
|
|
31243
|
+
"info",
|
|
31244
|
+
`First H.264 keyframe received with SPS/PPS - starting video stream`
|
|
31245
|
+
);
|
|
31246
|
+
} else if (hasIdr) {
|
|
31247
|
+
this.log(
|
|
31248
|
+
"debug",
|
|
31249
|
+
`IDR received but waiting for SPS/PPS before starting stream`
|
|
31250
|
+
);
|
|
31251
|
+
return 0;
|
|
31252
|
+
} else {
|
|
31253
|
+
if (session.stats.videoFrames < 5) {
|
|
31254
|
+
this.log(
|
|
31255
|
+
"debug",
|
|
31256
|
+
`Dropping P-frame ${session.stats.videoFrames} while waiting for keyframe`
|
|
31257
|
+
);
|
|
31258
|
+
}
|
|
31259
|
+
return 0;
|
|
31260
|
+
}
|
|
31261
|
+
}
|
|
31262
|
+
let totalPacketsSent = 0;
|
|
31263
|
+
let currentSeqNum = sequenceNumber;
|
|
31264
|
+
const ssrc = session.videoTrack.ssrc || 0;
|
|
31265
|
+
for (let i = 0; i < nalList.length; i++) {
|
|
31266
|
+
const nalUnit = nalList[i];
|
|
31267
|
+
if (nalUnit.length === 0) continue;
|
|
31268
|
+
const isLastNalu = i === nalList.length - 1;
|
|
31269
|
+
const nalType = getH264NalType(nalUnit);
|
|
31270
|
+
if (nalType === 9) continue;
|
|
31271
|
+
const rtpPackets = this.createH264RtpPackets(
|
|
31272
|
+
werift,
|
|
31273
|
+
nalUnit,
|
|
31274
|
+
currentSeqNum,
|
|
31275
|
+
timestamp,
|
|
31276
|
+
isLastNalu,
|
|
31277
|
+
ssrc
|
|
31278
|
+
);
|
|
31279
|
+
if (session.stats.videoFrames < 3) {
|
|
31280
|
+
this.log(
|
|
31281
|
+
"info",
|
|
31282
|
+
`NAL ${i}: type=${nalType}, size=${nalUnit.length}, rtpPackets=${rtpPackets.length}`
|
|
31283
|
+
);
|
|
31284
|
+
}
|
|
31285
|
+
for (const rtpPacket of rtpPackets) {
|
|
31286
|
+
try {
|
|
31287
|
+
session.videoTrack.writeRtp(rtpPacket);
|
|
31288
|
+
currentSeqNum = currentSeqNum + 1 & 65535;
|
|
31289
|
+
totalPacketsSent++;
|
|
31290
|
+
} catch (err) {
|
|
31291
|
+
this.log(
|
|
31292
|
+
"error",
|
|
31293
|
+
`Error writing RTP packet for session ${session.id}: ${err}`
|
|
31294
|
+
);
|
|
31295
|
+
}
|
|
31296
|
+
}
|
|
31297
|
+
}
|
|
31298
|
+
if (session.stats.videoFrames < 3) {
|
|
31299
|
+
this.log(
|
|
31300
|
+
"info",
|
|
31301
|
+
`H.264 frame sent: nalCount=${nalList.length} packets=${totalPacketsSent} seq=${sequenceNumber}->${currentSeqNum} ts=${timestamp} keyframe=${isKeyframe}`
|
|
31302
|
+
);
|
|
31303
|
+
}
|
|
31304
|
+
return totalPacketsSent;
|
|
31305
|
+
}
|
|
31306
|
+
/**
|
|
31307
|
+
* Send video frame via DataChannel (works for both H.264 and H.265)
|
|
31308
|
+
* Format: 12-byte header + Annex-B data
|
|
31309
|
+
* Header: [frameNum (4)] [timestamp (4)] [flags (1)] [keyframe (1)] [reserved (2)]
|
|
31310
|
+
* Flags: 0x01 = H.265, 0x02 = H.264
|
|
31311
|
+
*/
|
|
31312
|
+
async sendVideoFrameViaDataChannel(session, frame, frameNumber, codec) {
|
|
31313
|
+
if (!session.videoDataChannel) {
|
|
31314
|
+
if (frameNumber === 0) {
|
|
31315
|
+
this.log("warn", `No video data channel for session ${session.id}`);
|
|
31316
|
+
}
|
|
31317
|
+
return false;
|
|
31318
|
+
}
|
|
31319
|
+
if (session.videoDataChannel.readyState !== "open") {
|
|
31320
|
+
if (frameNumber === 0) {
|
|
31321
|
+
this.log(
|
|
31322
|
+
"warn",
|
|
31323
|
+
`Video data channel not open for session ${session.id}: ${session.videoDataChannel.readyState}`
|
|
31324
|
+
);
|
|
31325
|
+
}
|
|
31326
|
+
return false;
|
|
31327
|
+
}
|
|
31328
|
+
const nalUnits = parseAnnexBNalUnits(frame.data);
|
|
31329
|
+
let isKeyframe = frame.isKeyframe === true;
|
|
31330
|
+
let hasIdr = false;
|
|
31331
|
+
let hasSps = false;
|
|
31332
|
+
let hasPps = false;
|
|
31333
|
+
let hasVps = false;
|
|
31334
|
+
const nalTypes = [];
|
|
31335
|
+
for (const nalUnit of nalUnits) {
|
|
31336
|
+
if (nalUnit.length === 0) continue;
|
|
31337
|
+
if (codec === "H265") {
|
|
31338
|
+
const nalType = getH265NalType2(nalUnit);
|
|
31339
|
+
nalTypes.push(nalType);
|
|
31340
|
+
if (nalType === 32) {
|
|
31341
|
+
hasVps = true;
|
|
31342
|
+
session.lastH265Vps = nalUnit;
|
|
31343
|
+
}
|
|
31344
|
+
if (nalType === 33) {
|
|
31345
|
+
hasSps = true;
|
|
31346
|
+
session.lastH265Sps = nalUnit;
|
|
31347
|
+
}
|
|
31348
|
+
if (nalType === 34) {
|
|
31349
|
+
hasPps = true;
|
|
31350
|
+
session.lastH265Pps = nalUnit;
|
|
31351
|
+
}
|
|
31352
|
+
if (nalType === 19 || nalType === 20) {
|
|
31353
|
+
hasIdr = true;
|
|
31354
|
+
isKeyframe = true;
|
|
31355
|
+
}
|
|
31356
|
+
} else {
|
|
31357
|
+
const nalType = getH264NalType(nalUnit);
|
|
31358
|
+
nalTypes.push(nalType);
|
|
31359
|
+
if (nalType === 7) {
|
|
31360
|
+
hasSps = true;
|
|
31361
|
+
session.lastH264Sps = nalUnit;
|
|
31362
|
+
}
|
|
31363
|
+
if (nalType === 8) {
|
|
31364
|
+
hasPps = true;
|
|
31365
|
+
session.lastH264Pps = nalUnit;
|
|
31366
|
+
}
|
|
31367
|
+
if (nalType === 5) {
|
|
31368
|
+
hasIdr = true;
|
|
31369
|
+
isKeyframe = true;
|
|
31370
|
+
}
|
|
31371
|
+
}
|
|
31372
|
+
}
|
|
31373
|
+
if (frameNumber < 5) {
|
|
31374
|
+
this.log(
|
|
31375
|
+
"debug",
|
|
31376
|
+
`${codec} frame ${frameNumber} NAL types: [${nalTypes.join(",")}] hasIdr=${hasIdr} hasSps=${hasSps} hasPps=${hasPps}`
|
|
31377
|
+
);
|
|
31378
|
+
}
|
|
31379
|
+
if (!session.hasReceivedKeyframe) {
|
|
31380
|
+
if (codec === "H264") {
|
|
31381
|
+
if (hasIdr && session.lastH264Sps && session.lastH264Pps) {
|
|
31382
|
+
session.hasReceivedKeyframe = true;
|
|
31383
|
+
this.log(
|
|
31384
|
+
"info",
|
|
31385
|
+
`First H.264 keyframe received with SPS/PPS - starting video stream`
|
|
31386
|
+
);
|
|
31387
|
+
} else if (hasSps || hasPps) {
|
|
31388
|
+
this.log("debug", `Received H.264 parameter sets, waiting for IDR`);
|
|
31389
|
+
return false;
|
|
31390
|
+
} else if (hasIdr) {
|
|
31391
|
+
this.log("debug", `IDR received but waiting for SPS/PPS`);
|
|
31392
|
+
return false;
|
|
31393
|
+
} else {
|
|
31394
|
+
if (frameNumber < 10) {
|
|
31395
|
+
this.log(
|
|
31396
|
+
"debug",
|
|
31397
|
+
`Dropping H.264 P-frame ${frameNumber} while waiting for keyframe`
|
|
31398
|
+
);
|
|
31399
|
+
}
|
|
31400
|
+
return false;
|
|
31401
|
+
}
|
|
31402
|
+
} else {
|
|
31403
|
+
if (hasIdr && session.lastH265Vps && session.lastH265Sps && session.lastH265Pps) {
|
|
31404
|
+
session.hasReceivedKeyframe = true;
|
|
31405
|
+
this.log(
|
|
31406
|
+
"info",
|
|
31407
|
+
`First H.265 keyframe received with VPS/SPS/PPS - starting video stream`
|
|
31408
|
+
);
|
|
31409
|
+
} else if (hasVps || hasSps || hasPps) {
|
|
31410
|
+
this.log("debug", `Received H.265 parameter sets, waiting for IDR`);
|
|
31411
|
+
return false;
|
|
31412
|
+
} else if (hasIdr) {
|
|
31413
|
+
this.log("debug", `H.265 IDR received but waiting for VPS/SPS/PPS`);
|
|
31414
|
+
return false;
|
|
31415
|
+
} else {
|
|
31416
|
+
if (frameNumber < 10) {
|
|
31417
|
+
this.log(
|
|
31418
|
+
"debug",
|
|
31419
|
+
`Dropping H.265 P-frame ${frameNumber} while waiting for keyframe`
|
|
31420
|
+
);
|
|
31421
|
+
}
|
|
31422
|
+
return false;
|
|
31423
|
+
}
|
|
31424
|
+
}
|
|
31425
|
+
}
|
|
31426
|
+
let frameData = frame.data;
|
|
31427
|
+
if (hasIdr) {
|
|
31428
|
+
if (codec === "H264" && (!hasSps || !hasPps)) {
|
|
31429
|
+
const parts = [];
|
|
31430
|
+
if (!hasSps && session.lastH264Sps) {
|
|
31431
|
+
parts.push(Buffer.from([0, 0, 0, 1]));
|
|
31432
|
+
parts.push(session.lastH264Sps);
|
|
31433
|
+
}
|
|
31434
|
+
if (!hasPps && session.lastH264Pps) {
|
|
31435
|
+
parts.push(Buffer.from([0, 0, 0, 1]));
|
|
31436
|
+
parts.push(session.lastH264Pps);
|
|
31437
|
+
}
|
|
31438
|
+
if (parts.length > 0) {
|
|
31439
|
+
frameData = Buffer.concat([...parts, frame.data]);
|
|
31440
|
+
this.log("debug", `Prepended cached SPS/PPS to H.264 IDR frame`);
|
|
31441
|
+
}
|
|
31442
|
+
} else if (codec === "H265" && (!hasVps || !hasSps || !hasPps)) {
|
|
31443
|
+
const parts = [];
|
|
31444
|
+
if (!hasVps && session.lastH265Vps) {
|
|
31445
|
+
parts.push(Buffer.from([0, 0, 0, 1]));
|
|
31446
|
+
parts.push(session.lastH265Vps);
|
|
31447
|
+
}
|
|
31448
|
+
if (!hasSps && session.lastH265Sps) {
|
|
31449
|
+
parts.push(Buffer.from([0, 0, 0, 1]));
|
|
31450
|
+
parts.push(session.lastH265Sps);
|
|
31451
|
+
}
|
|
31452
|
+
if (!hasPps && session.lastH265Pps) {
|
|
31453
|
+
parts.push(Buffer.from([0, 0, 0, 1]));
|
|
31454
|
+
parts.push(session.lastH265Pps);
|
|
31455
|
+
}
|
|
31456
|
+
if (parts.length > 0) {
|
|
31457
|
+
frameData = Buffer.concat([...parts, frame.data]);
|
|
31458
|
+
this.log("debug", `Prepended cached VPS/SPS/PPS to H.265 IDR frame`);
|
|
31459
|
+
}
|
|
31460
|
+
}
|
|
31461
|
+
}
|
|
31462
|
+
const header = Buffer.alloc(12);
|
|
31463
|
+
header.writeUInt32BE(frameNumber, 0);
|
|
31464
|
+
header.writeUInt32BE(frame.microseconds ? frame.microseconds / 1e3 : 0, 4);
|
|
31465
|
+
header.writeUInt8(codec === "H265" ? 1 : 2, 8);
|
|
31466
|
+
header.writeUInt8(isKeyframe ? 1 : 0, 9);
|
|
31467
|
+
header.writeUInt16BE(0, 10);
|
|
31468
|
+
const packet = Buffer.concat([header, frameData]);
|
|
31469
|
+
if (frameNumber < 3) {
|
|
31470
|
+
this.log(
|
|
31471
|
+
"info",
|
|
31472
|
+
`Sending ${codec} frame ${frameNumber}: ${packet.length} bytes, keyframe=${isKeyframe}`
|
|
31473
|
+
);
|
|
31474
|
+
}
|
|
31475
|
+
const MAX_CHUNK_SIZE = 16e3;
|
|
31476
|
+
try {
|
|
31477
|
+
if (packet.length <= MAX_CHUNK_SIZE) {
|
|
31478
|
+
session.videoDataChannel.send(packet);
|
|
28972
31479
|
} else {
|
|
28973
|
-
|
|
31480
|
+
const totalChunks = Math.ceil(packet.length / MAX_CHUNK_SIZE);
|
|
31481
|
+
for (let i = 0; i < totalChunks; i++) {
|
|
31482
|
+
const start = i * MAX_CHUNK_SIZE;
|
|
31483
|
+
const end = Math.min(start + MAX_CHUNK_SIZE, packet.length);
|
|
31484
|
+
const chunk = packet.subarray(start, end);
|
|
31485
|
+
const chunkHeader = Buffer.alloc(2);
|
|
31486
|
+
chunkHeader.writeUInt8(i, 0);
|
|
31487
|
+
chunkHeader.writeUInt8(totalChunks, 1);
|
|
31488
|
+
session.videoDataChannel.send(Buffer.concat([chunkHeader, chunk]));
|
|
31489
|
+
}
|
|
28974
31490
|
}
|
|
28975
|
-
|
|
28976
|
-
|
|
28977
|
-
|
|
28978
|
-
|
|
28979
|
-
|
|
31491
|
+
return true;
|
|
31492
|
+
} catch (err) {
|
|
31493
|
+
this.log("error", `Error sending ${codec} frame ${frameNumber}: ${err}`);
|
|
31494
|
+
return false;
|
|
31495
|
+
}
|
|
31496
|
+
}
|
|
31497
|
+
/**
|
|
31498
|
+
* Send H.265 frame via DataChannel
|
|
31499
|
+
* Format: 12-byte header + Annex-B data
|
|
31500
|
+
* Header: [frameNum (4)] [timestamp (4)] [flags (1)] [keyframe (1)] [reserved (2)]
|
|
31501
|
+
*/
|
|
31502
|
+
async sendH265Frame(session, frame, frameNumber) {
|
|
31503
|
+
if (!session.videoDataChannel) {
|
|
31504
|
+
if (frameNumber === 0) {
|
|
31505
|
+
this.log("warn", `No video data channel for session ${session.id}`);
|
|
28980
31506
|
}
|
|
28981
|
-
|
|
28982
|
-
|
|
31507
|
+
return;
|
|
31508
|
+
}
|
|
31509
|
+
if (session.videoDataChannel.readyState !== "open") {
|
|
31510
|
+
if (frameNumber === 0) {
|
|
31511
|
+
this.log(
|
|
31512
|
+
"warn",
|
|
31513
|
+
`Video data channel not open for session ${session.id}: ${session.videoDataChannel.readyState}`
|
|
31514
|
+
);
|
|
31515
|
+
}
|
|
31516
|
+
return;
|
|
31517
|
+
}
|
|
31518
|
+
let isKeyframe = frame.isKeyframe === true;
|
|
31519
|
+
if (!isKeyframe && frame.isKeyframe === void 0) {
|
|
31520
|
+
const nalUnits = parseAnnexBNalUnits(frame.data);
|
|
31521
|
+
for (const nalUnit of nalUnits) {
|
|
31522
|
+
if (nalUnit.length === 0) continue;
|
|
31523
|
+
const nalType = getH265NalType2(nalUnit);
|
|
31524
|
+
if (nalType === 32 || nalType === 33 || nalType === 34 || nalType === 19 || nalType === 20) {
|
|
31525
|
+
isKeyframe = true;
|
|
31526
|
+
break;
|
|
31527
|
+
}
|
|
31528
|
+
}
|
|
31529
|
+
}
|
|
31530
|
+
const header = Buffer.alloc(12);
|
|
31531
|
+
header.writeUInt32BE(frameNumber, 0);
|
|
31532
|
+
header.writeUInt32BE(frame.microseconds ? frame.microseconds / 1e3 : 0, 4);
|
|
31533
|
+
header.writeUInt8(1, 8);
|
|
31534
|
+
header.writeUInt8(isKeyframe ? 1 : 0, 9);
|
|
31535
|
+
header.writeUInt16BE(0, 10);
|
|
31536
|
+
const packet = Buffer.concat([header, frame.data]);
|
|
31537
|
+
if (frameNumber < 3) {
|
|
31538
|
+
this.log(
|
|
31539
|
+
"info",
|
|
31540
|
+
`Sending H.265 frame ${frameNumber}: ${packet.length} bytes, keyframe=${isKeyframe}`
|
|
31541
|
+
);
|
|
31542
|
+
}
|
|
31543
|
+
const MAX_CHUNK_SIZE = 16e3;
|
|
31544
|
+
try {
|
|
31545
|
+
if (packet.length <= MAX_CHUNK_SIZE) {
|
|
31546
|
+
session.videoDataChannel.send(packet);
|
|
31547
|
+
} else {
|
|
31548
|
+
const totalChunks = Math.ceil(packet.length / MAX_CHUNK_SIZE);
|
|
31549
|
+
for (let i = 0; i < totalChunks; i++) {
|
|
31550
|
+
const start = i * MAX_CHUNK_SIZE;
|
|
31551
|
+
const end = Math.min(start + MAX_CHUNK_SIZE, packet.length);
|
|
31552
|
+
const chunk = packet.subarray(start, end);
|
|
31553
|
+
const chunkHeader = Buffer.alloc(2);
|
|
31554
|
+
chunkHeader.writeUInt8(i, 0);
|
|
31555
|
+
chunkHeader.writeUInt8(totalChunks, 1);
|
|
31556
|
+
session.videoDataChannel.send(Buffer.concat([chunkHeader, chunk]));
|
|
31557
|
+
}
|
|
31558
|
+
}
|
|
31559
|
+
} catch (err) {
|
|
31560
|
+
this.log("error", `Error sending H.265 frame ${frameNumber}: ${err}`);
|
|
31561
|
+
}
|
|
31562
|
+
}
|
|
31563
|
+
/**
|
|
31564
|
+
* Create RTP packets for H.264 NAL unit
|
|
31565
|
+
* Handles single NAL, STAP-A aggregation, and FU-A fragmentation
|
|
31566
|
+
*/
|
|
31567
|
+
createH264RtpPackets(werift, nalUnit, sequenceNumber, timestamp, marker, ssrc) {
|
|
31568
|
+
const { RtpPacket, RtpHeader } = werift;
|
|
31569
|
+
const MTU = 1200;
|
|
31570
|
+
const packets = [];
|
|
31571
|
+
if (nalUnit.length <= MTU) {
|
|
31572
|
+
const header = new RtpHeader();
|
|
31573
|
+
header.payloadType = 96;
|
|
31574
|
+
header.sequenceNumber = sequenceNumber;
|
|
31575
|
+
header.timestamp = timestamp;
|
|
31576
|
+
header.marker = marker;
|
|
31577
|
+
header.ssrc = ssrc;
|
|
31578
|
+
packets.push(new RtpPacket(header, nalUnit));
|
|
31579
|
+
} else {
|
|
31580
|
+
const nalHeader = nalUnit[0];
|
|
31581
|
+
const nalType = nalHeader & 31;
|
|
31582
|
+
const nri = nalHeader & 96;
|
|
31583
|
+
const fuIndicator = (nri | 28) & 255;
|
|
31584
|
+
let offset = 1;
|
|
31585
|
+
let isFirst = true;
|
|
31586
|
+
while (offset < nalUnit.length) {
|
|
31587
|
+
const remaining = nalUnit.length - offset;
|
|
31588
|
+
const chunkSize = Math.min(remaining, MTU - 2);
|
|
31589
|
+
const isLast = offset + chunkSize >= nalUnit.length;
|
|
31590
|
+
let fuHeader = nalType;
|
|
31591
|
+
if (isFirst) fuHeader |= 128;
|
|
31592
|
+
if (isLast) fuHeader |= 64;
|
|
31593
|
+
const fuPayload = Buffer.alloc(2 + chunkSize);
|
|
31594
|
+
fuPayload[0] = fuIndicator;
|
|
31595
|
+
fuPayload[1] = fuHeader;
|
|
31596
|
+
nalUnit.copy(fuPayload, 2, offset, offset + chunkSize);
|
|
31597
|
+
const header = new RtpHeader();
|
|
31598
|
+
header.payloadType = 96;
|
|
31599
|
+
header.sequenceNumber = sequenceNumber + packets.length & 65535;
|
|
31600
|
+
header.timestamp = timestamp;
|
|
31601
|
+
header.marker = isLast && marker;
|
|
31602
|
+
header.ssrc = ssrc;
|
|
31603
|
+
packets.push(new RtpPacket(header, fuPayload));
|
|
31604
|
+
offset += chunkSize;
|
|
31605
|
+
isFirst = false;
|
|
31606
|
+
}
|
|
31607
|
+
}
|
|
31608
|
+
return packets;
|
|
31609
|
+
}
|
|
31610
|
+
/**
|
|
31611
|
+
* Start intercom (two-way audio)
|
|
31612
|
+
*/
|
|
31613
|
+
async startIntercom(session) {
|
|
31614
|
+
try {
|
|
31615
|
+
session.intercom = new Intercom({
|
|
31616
|
+
api: this.options.api,
|
|
31617
|
+
channel: this.options.channel
|
|
31618
|
+
});
|
|
31619
|
+
await session.intercom.start();
|
|
31620
|
+
this.log("info", `Intercom started for session ${session.id}`);
|
|
31621
|
+
} catch (err) {
|
|
31622
|
+
this.log(
|
|
31623
|
+
"error",
|
|
31624
|
+
`Failed to start intercom for session ${session.id}: ${err}`
|
|
31625
|
+
);
|
|
31626
|
+
session.intercom = null;
|
|
31627
|
+
}
|
|
31628
|
+
}
|
|
31629
|
+
/**
|
|
31630
|
+
* Log helper
|
|
31631
|
+
*/
|
|
31632
|
+
log(level, message) {
|
|
31633
|
+
if (this.options.logger) {
|
|
31634
|
+
this.options.logger(level, message);
|
|
31635
|
+
}
|
|
31636
|
+
}
|
|
31637
|
+
};
|
|
31638
|
+
|
|
31639
|
+
// src/baichuan/stream/BaichuanHlsServer.ts
|
|
31640
|
+
var import_node_events10 = require("events");
|
|
31641
|
+
var import_node_fs = __toESM(require("fs"), 1);
|
|
31642
|
+
var import_promises3 = __toESM(require("fs/promises"), 1);
|
|
31643
|
+
var import_node_os3 = __toESM(require("os"), 1);
|
|
31644
|
+
var import_node_path3 = __toESM(require("path"), 1);
|
|
31645
|
+
var import_node_child_process10 = require("child_process");
|
|
31646
|
+
init_BcMediaAnnexBDecoder();
|
|
31647
|
+
init_H264Converter();
|
|
31648
|
+
init_H265Converter();
|
|
31649
|
+
function parseAnnexBNalUnits2(data) {
|
|
31650
|
+
const units = [];
|
|
31651
|
+
const len = data.length;
|
|
31652
|
+
const findStart = (from) => {
|
|
31653
|
+
for (let i = from; i + 3 < len; i++) {
|
|
31654
|
+
if (data[i] === 0 && data[i + 1] === 0) {
|
|
31655
|
+
if (data[i + 2] === 1) return i;
|
|
31656
|
+
if (i + 4 < len && data[i + 2] === 0 && data[i + 3] === 1)
|
|
31657
|
+
return i;
|
|
31658
|
+
}
|
|
31659
|
+
}
|
|
31660
|
+
return -1;
|
|
31661
|
+
};
|
|
31662
|
+
const startCodeLenAt = (i) => {
|
|
31663
|
+
if (i + 3 < len && data[i] === 0 && data[i + 1] === 0) {
|
|
31664
|
+
if (data[i + 2] === 1) return 3;
|
|
31665
|
+
if (i + 4 < len && data[i + 2] === 0 && data[i + 3] === 1) return 4;
|
|
31666
|
+
}
|
|
31667
|
+
return 0;
|
|
31668
|
+
};
|
|
31669
|
+
let start = findStart(0);
|
|
31670
|
+
if (start < 0) return units;
|
|
31671
|
+
while (start >= 0) {
|
|
31672
|
+
const scLen = startCodeLenAt(start);
|
|
31673
|
+
if (!scLen) break;
|
|
31674
|
+
const nalStart = start + scLen;
|
|
31675
|
+
let next = findStart(nalStart);
|
|
31676
|
+
if (next < 0) next = len;
|
|
31677
|
+
if (nalStart < next) units.push(data.subarray(nalStart, next));
|
|
31678
|
+
start = next < len ? next : -1;
|
|
31679
|
+
}
|
|
31680
|
+
return units;
|
|
31681
|
+
}
|
|
31682
|
+
function isKeyframeAnnexB(codec, annexB) {
|
|
31683
|
+
const nals = parseAnnexBNalUnits2(annexB);
|
|
31684
|
+
for (const nal of nals) {
|
|
31685
|
+
if (!nal || nal.length === 0) continue;
|
|
31686
|
+
if (codec === "h264") {
|
|
31687
|
+
const nalType = nal[0] & 31;
|
|
31688
|
+
if (nalType === 5) return true;
|
|
31689
|
+
} else {
|
|
31690
|
+
const nalType = nal[0] >> 1 & 63;
|
|
31691
|
+
if (nalType >= 16 && nalType <= 21) return true;
|
|
31692
|
+
}
|
|
31693
|
+
}
|
|
31694
|
+
return false;
|
|
31695
|
+
}
|
|
31696
|
+
function hasParamSets(codec, annexB) {
|
|
31697
|
+
const nals = parseAnnexBNalUnits2(annexB);
|
|
31698
|
+
for (const nal of nals) {
|
|
31699
|
+
if (!nal || nal.length === 0) continue;
|
|
31700
|
+
if (codec === "h264") {
|
|
31701
|
+
const nalType = nal[0] & 31;
|
|
31702
|
+
if (nalType === 7 || nalType === 8) return true;
|
|
31703
|
+
} else {
|
|
31704
|
+
const nalType = nal[0] >> 1 & 63;
|
|
31705
|
+
if (nalType === 32 || nalType === 33 || nalType === 34) return true;
|
|
31706
|
+
}
|
|
31707
|
+
}
|
|
31708
|
+
return false;
|
|
31709
|
+
}
|
|
31710
|
+
function getNalTypes(codec, annexB) {
|
|
31711
|
+
const nals = parseAnnexBNalUnits2(annexB);
|
|
31712
|
+
return nals.map((nal) => {
|
|
31713
|
+
if (codec === "h265") {
|
|
31714
|
+
return nal[0] >> 1 & 63;
|
|
31715
|
+
} else {
|
|
31716
|
+
return nal[0] & 31;
|
|
31717
|
+
}
|
|
31718
|
+
});
|
|
31719
|
+
}
|
|
31720
|
+
var BaichuanHlsServer = class extends import_node_events10.EventEmitter {
|
|
31721
|
+
api;
|
|
31722
|
+
channel;
|
|
31723
|
+
profile;
|
|
31724
|
+
variant;
|
|
31725
|
+
segmentDuration;
|
|
31726
|
+
playlistSize;
|
|
31727
|
+
ffmpegPath;
|
|
31728
|
+
log;
|
|
31729
|
+
outputDir = null;
|
|
31730
|
+
createdTempDir = false;
|
|
31731
|
+
playlistPath = null;
|
|
31732
|
+
segmentPattern = null;
|
|
31733
|
+
state = "idle";
|
|
31734
|
+
codec = null;
|
|
31735
|
+
framesReceived = 0;
|
|
31736
|
+
ffmpeg = null;
|
|
31737
|
+
nativeStream = null;
|
|
31738
|
+
pumpPromise = null;
|
|
31739
|
+
startedAt = null;
|
|
31740
|
+
lastError = null;
|
|
31741
|
+
constructor(options) {
|
|
31742
|
+
super();
|
|
31743
|
+
this.api = options.api;
|
|
31744
|
+
this.channel = options.channel;
|
|
31745
|
+
this.profile = options.profile;
|
|
31746
|
+
this.variant = options.variant ?? void 0;
|
|
31747
|
+
this.segmentDuration = options.segmentDuration ?? 2;
|
|
31748
|
+
this.playlistSize = options.playlistSize ?? 5;
|
|
31749
|
+
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
31750
|
+
if (options.outputDir) {
|
|
31751
|
+
this.outputDir = options.outputDir;
|
|
31752
|
+
this.createdTempDir = false;
|
|
31753
|
+
}
|
|
31754
|
+
this.log = options.logger ?? (() => {
|
|
28983
31755
|
});
|
|
28984
|
-
this.active = true;
|
|
28985
31756
|
}
|
|
28986
31757
|
/**
|
|
28987
|
-
*
|
|
31758
|
+
* Start HLS streaming
|
|
28988
31759
|
*/
|
|
28989
|
-
|
|
28990
|
-
|
|
31760
|
+
async start() {
|
|
31761
|
+
if (this.state === "running" || this.state === "starting") {
|
|
31762
|
+
return;
|
|
31763
|
+
}
|
|
31764
|
+
this.state = "starting";
|
|
31765
|
+
this.lastError = null;
|
|
31766
|
+
try {
|
|
31767
|
+
if (!this.outputDir) {
|
|
31768
|
+
this.outputDir = await import_promises3.default.mkdtemp(
|
|
31769
|
+
import_node_path3.default.join(import_node_os3.default.tmpdir(), `nodelink-hls-${this.profile}-`)
|
|
31770
|
+
);
|
|
31771
|
+
this.createdTempDir = true;
|
|
31772
|
+
} else {
|
|
31773
|
+
await import_promises3.default.mkdir(this.outputDir, { recursive: true });
|
|
31774
|
+
}
|
|
31775
|
+
this.playlistPath = import_node_path3.default.join(this.outputDir, "playlist.m3u8");
|
|
31776
|
+
this.segmentPattern = import_node_path3.default.join(this.outputDir, "segment_%05d.ts");
|
|
31777
|
+
this.log("info", `Starting HLS stream to ${this.outputDir}`);
|
|
31778
|
+
this.nativeStream = createNativeStream(
|
|
31779
|
+
this.api,
|
|
31780
|
+
this.channel,
|
|
31781
|
+
this.profile,
|
|
31782
|
+
this.variant ? { variant: this.variant } : void 0
|
|
31783
|
+
);
|
|
31784
|
+
this.pumpPromise = this.pumpNativeToFfmpeg();
|
|
31785
|
+
this.startedAt = /* @__PURE__ */ new Date();
|
|
31786
|
+
this.state = "running";
|
|
31787
|
+
this.emit("started", { outputDir: this.outputDir });
|
|
31788
|
+
} catch (err) {
|
|
31789
|
+
this.state = "error";
|
|
31790
|
+
this.lastError = String(err);
|
|
31791
|
+
this.log("error", `Failed to start HLS: ${err}`);
|
|
31792
|
+
throw err;
|
|
31793
|
+
}
|
|
28991
31794
|
}
|
|
28992
31795
|
/**
|
|
28993
|
-
* Stop
|
|
31796
|
+
* Stop HLS streaming
|
|
28994
31797
|
*/
|
|
28995
31798
|
async stop() {
|
|
28996
|
-
|
|
28997
|
-
|
|
28998
|
-
client.end();
|
|
28999
|
-
}
|
|
31799
|
+
if (this.state === "idle" || this.state === "stopped") {
|
|
31800
|
+
return;
|
|
29000
31801
|
}
|
|
29001
|
-
this.
|
|
31802
|
+
this.state = "stopping";
|
|
31803
|
+
this.log("info", "Stopping HLS stream");
|
|
29002
31804
|
try {
|
|
29003
|
-
|
|
31805
|
+
this.ffmpeg?.stdin?.end();
|
|
29004
31806
|
} catch {
|
|
29005
31807
|
}
|
|
29006
|
-
|
|
29007
|
-
this.
|
|
29008
|
-
|
|
31808
|
+
try {
|
|
31809
|
+
this.ffmpeg?.kill("SIGKILL");
|
|
31810
|
+
} catch {
|
|
29009
31811
|
}
|
|
29010
|
-
this.
|
|
29011
|
-
if (this.
|
|
29012
|
-
const proc = this.ffmpegProcess;
|
|
31812
|
+
this.ffmpeg = null;
|
|
31813
|
+
if (this.nativeStream) {
|
|
29013
31814
|
try {
|
|
29014
|
-
|
|
31815
|
+
await this.nativeStream.return(void 0);
|
|
29015
31816
|
} catch {
|
|
29016
31817
|
}
|
|
29017
|
-
|
|
29018
|
-
|
|
31818
|
+
this.nativeStream = null;
|
|
31819
|
+
}
|
|
31820
|
+
if (this.pumpPromise) {
|
|
31821
|
+
try {
|
|
31822
|
+
await this.pumpPromise;
|
|
31823
|
+
} catch {
|
|
31824
|
+
}
|
|
31825
|
+
this.pumpPromise = null;
|
|
31826
|
+
}
|
|
31827
|
+
if (this.createdTempDir && this.outputDir) {
|
|
31828
|
+
try {
|
|
31829
|
+
await import_promises3.default.rm(this.outputDir, { recursive: true, force: true });
|
|
31830
|
+
} catch {
|
|
31831
|
+
}
|
|
31832
|
+
}
|
|
31833
|
+
this.state = "stopped";
|
|
31834
|
+
this.emit("stopped");
|
|
31835
|
+
}
|
|
31836
|
+
/**
|
|
31837
|
+
* Get current status
|
|
31838
|
+
*/
|
|
31839
|
+
getStatus() {
|
|
31840
|
+
return {
|
|
31841
|
+
state: this.state,
|
|
31842
|
+
codec: this.codec,
|
|
31843
|
+
framesReceived: this.framesReceived,
|
|
31844
|
+
ffmpegRunning: this.ffmpeg !== null && !this.ffmpeg.killed,
|
|
31845
|
+
playlistPath: this.playlistPath,
|
|
31846
|
+
outputDir: this.outputDir,
|
|
31847
|
+
startedAt: this.startedAt,
|
|
31848
|
+
error: this.lastError
|
|
31849
|
+
};
|
|
31850
|
+
}
|
|
31851
|
+
/**
|
|
31852
|
+
* Get playlist file path
|
|
31853
|
+
*/
|
|
31854
|
+
getPlaylistPath() {
|
|
31855
|
+
return this.playlistPath;
|
|
31856
|
+
}
|
|
31857
|
+
/**
|
|
31858
|
+
* Get output directory
|
|
31859
|
+
*/
|
|
31860
|
+
getOutputDir() {
|
|
31861
|
+
return this.outputDir;
|
|
31862
|
+
}
|
|
31863
|
+
/**
|
|
31864
|
+
* Check if playlist file exists
|
|
31865
|
+
*/
|
|
31866
|
+
async waitForPlaylist(timeoutMs = 2e4) {
|
|
31867
|
+
if (!this.playlistPath) return false;
|
|
31868
|
+
const deadline = Date.now() + timeoutMs;
|
|
31869
|
+
while (Date.now() < deadline) {
|
|
31870
|
+
if (import_node_fs.default.existsSync(this.playlistPath)) {
|
|
31871
|
+
return true;
|
|
31872
|
+
}
|
|
31873
|
+
await new Promise((r) => setTimeout(r, 150));
|
|
31874
|
+
}
|
|
31875
|
+
return false;
|
|
31876
|
+
}
|
|
31877
|
+
/**
|
|
31878
|
+
* Read an HLS asset (playlist or segment)
|
|
31879
|
+
*/
|
|
31880
|
+
async readAsset(assetName) {
|
|
31881
|
+
if (!this.outputDir) return null;
|
|
31882
|
+
const safe = assetName.replace(/^\/+/, "");
|
|
31883
|
+
if (safe.includes("..") || safe.includes("/")) {
|
|
31884
|
+
return null;
|
|
31885
|
+
}
|
|
31886
|
+
const filePath = import_node_path3.default.join(this.outputDir, safe);
|
|
31887
|
+
if (!import_node_fs.default.existsSync(filePath)) {
|
|
31888
|
+
return null;
|
|
31889
|
+
}
|
|
31890
|
+
const data = await import_promises3.default.readFile(filePath);
|
|
31891
|
+
let contentType = "application/octet-stream";
|
|
31892
|
+
if (safe.endsWith(".m3u8")) {
|
|
31893
|
+
contentType = "application/vnd.apple.mpegurl";
|
|
31894
|
+
} else if (safe.endsWith(".ts")) {
|
|
31895
|
+
contentType = "video/mp2t";
|
|
31896
|
+
}
|
|
31897
|
+
return { data, contentType };
|
|
31898
|
+
}
|
|
31899
|
+
// ============================================================================
|
|
31900
|
+
// Private Methods
|
|
31901
|
+
// ============================================================================
|
|
31902
|
+
async pumpNativeToFfmpeg() {
|
|
31903
|
+
if (!this.nativeStream || !this.playlistPath || !this.segmentPattern) {
|
|
31904
|
+
return;
|
|
31905
|
+
}
|
|
31906
|
+
let startedFfmpeg = false;
|
|
31907
|
+
let pendingParamSets = [];
|
|
31908
|
+
const MAX_FRAMES_WAIT_FOR_KEYFRAME = 180;
|
|
31909
|
+
const collectParamSets = (codec, annexB) => {
|
|
31910
|
+
const nals = parseAnnexBNalUnits2(annexB);
|
|
31911
|
+
for (const nal of nals) {
|
|
31912
|
+
if (!nal || nal.length === 0) continue;
|
|
31913
|
+
if (codec === "h264") {
|
|
31914
|
+
const t = nal[0] & 31;
|
|
31915
|
+
if (t === 7 || t === 8) {
|
|
31916
|
+
pendingParamSets.push(
|
|
31917
|
+
Buffer.concat([Buffer.from([0, 0, 0, 1]), nal])
|
|
31918
|
+
);
|
|
31919
|
+
}
|
|
31920
|
+
} else {
|
|
31921
|
+
const t = nal[0] >> 1 & 63;
|
|
31922
|
+
if (t === 32 || t === 33 || t === 34) {
|
|
31923
|
+
pendingParamSets.push(
|
|
31924
|
+
Buffer.concat([Buffer.from([0, 0, 0, 1]), nal])
|
|
31925
|
+
);
|
|
31926
|
+
}
|
|
31927
|
+
}
|
|
31928
|
+
}
|
|
31929
|
+
if (pendingParamSets.length > 12) {
|
|
31930
|
+
pendingParamSets = pendingParamSets.slice(-12);
|
|
31931
|
+
}
|
|
31932
|
+
};
|
|
31933
|
+
try {
|
|
31934
|
+
for await (const frame of this.nativeStream) {
|
|
31935
|
+
if (this.state !== "running") break;
|
|
31936
|
+
if (frame.audio) continue;
|
|
31937
|
+
if (!frame.data || frame.data.length === 0) continue;
|
|
31938
|
+
if (!this.codec) {
|
|
31939
|
+
const detected = detectVideoCodecFromNal(frame.data);
|
|
31940
|
+
const fromMeta = frame.videoType === "H265" ? "h265" : "h264";
|
|
31941
|
+
this.codec = detected ? detected.toLowerCase() : fromMeta;
|
|
31942
|
+
this.log(
|
|
31943
|
+
"info",
|
|
31944
|
+
`HLS codec detected: meta=${fromMeta} detected=${detected} (using ${this.codec})`
|
|
31945
|
+
);
|
|
31946
|
+
this.emit("codec-detected", { codec: this.codec });
|
|
31947
|
+
}
|
|
31948
|
+
const annexB = this.codec === "h265" ? convertToAnnexB2(frame.data) : convertToAnnexB(frame.data);
|
|
31949
|
+
this.framesReceived++;
|
|
31950
|
+
const shouldLog = this.framesReceived <= 5 || this.framesReceived <= 60 && this.framesReceived % 10 === 0;
|
|
31951
|
+
if (shouldLog) {
|
|
31952
|
+
const nalTypes = getNalTypes(this.codec, annexB);
|
|
31953
|
+
const hasIdr = isKeyframeAnnexB(this.codec, annexB);
|
|
31954
|
+
const hasParams = hasParamSets(this.codec, annexB);
|
|
31955
|
+
this.log(
|
|
31956
|
+
"debug",
|
|
31957
|
+
`HLS frame#${this.framesReceived}: bytes=${annexB.length} nalTypes=[${nalTypes.join(",")}] hasIDR=${hasIdr} hasParams=${hasParams}`
|
|
31958
|
+
);
|
|
31959
|
+
}
|
|
31960
|
+
collectParamSets(this.codec, annexB);
|
|
31961
|
+
const isKeyframe = isKeyframeAnnexB(this.codec, annexB);
|
|
31962
|
+
if (!isKeyframe && !startedFfmpeg) {
|
|
31963
|
+
if (this.framesReceived < MAX_FRAMES_WAIT_FOR_KEYFRAME) {
|
|
31964
|
+
continue;
|
|
31965
|
+
}
|
|
31966
|
+
this.log(
|
|
31967
|
+
"warn",
|
|
31968
|
+
`No keyframe after ${this.framesReceived} frames, starting ffmpeg anyway`
|
|
31969
|
+
);
|
|
31970
|
+
}
|
|
31971
|
+
if (!startedFfmpeg) {
|
|
31972
|
+
this.log(
|
|
31973
|
+
"info",
|
|
31974
|
+
`Starting ffmpeg: codec=${this.codec} framesSeen=${this.framesReceived} isKeyframe=${isKeyframe} paramSets=${pendingParamSets.length}`
|
|
31975
|
+
);
|
|
31976
|
+
this.ffmpeg = this.spawnFfmpeg();
|
|
31977
|
+
startedFfmpeg = true;
|
|
31978
|
+
this.emit("ffmpeg-started");
|
|
29019
31979
|
try {
|
|
29020
|
-
|
|
31980
|
+
if (this.ffmpeg?.stdin && !this.ffmpeg.stdin.destroyed) {
|
|
31981
|
+
for (const ps of pendingParamSets) {
|
|
31982
|
+
this.ffmpeg.stdin.write(ps);
|
|
31983
|
+
}
|
|
31984
|
+
}
|
|
29021
31985
|
} catch {
|
|
29022
31986
|
}
|
|
29023
|
-
|
|
29024
|
-
|
|
29025
|
-
|
|
29026
|
-
|
|
29027
|
-
|
|
29028
|
-
|
|
29029
|
-
|
|
29030
|
-
|
|
29031
|
-
|
|
29032
|
-
|
|
29033
|
-
|
|
29034
|
-
|
|
29035
|
-
|
|
29036
|
-
|
|
29037
|
-
|
|
29038
|
-
|
|
29039
|
-
|
|
31987
|
+
}
|
|
31988
|
+
if (!this.ffmpeg || !this.ffmpeg.stdin || this.ffmpeg.stdin.destroyed) {
|
|
31989
|
+
this.log("warn", "ffmpeg stdin not available, stopping pump");
|
|
31990
|
+
break;
|
|
31991
|
+
}
|
|
31992
|
+
try {
|
|
31993
|
+
this.ffmpeg.stdin.write(annexB);
|
|
31994
|
+
if (this.framesReceived % 100 === 0 || this.framesReceived <= 5 || this.framesReceived <= 50 && this.framesReceived % 10 === 0) {
|
|
31995
|
+
this.log(
|
|
31996
|
+
"debug",
|
|
31997
|
+
`HLS fed frame #${this.framesReceived} to ffmpeg (${annexB.length} bytes)`
|
|
31998
|
+
);
|
|
31999
|
+
}
|
|
32000
|
+
} catch (err) {
|
|
32001
|
+
this.log("error", `Failed to write to ffmpeg: ${err}`);
|
|
32002
|
+
break;
|
|
32003
|
+
}
|
|
32004
|
+
}
|
|
32005
|
+
} catch (e) {
|
|
32006
|
+
this.log("error", `HLS pump error: ${e}`);
|
|
32007
|
+
this.lastError = String(e);
|
|
32008
|
+
this.state = "error";
|
|
32009
|
+
this.emit("error", e);
|
|
29040
32010
|
}
|
|
29041
|
-
this.active = false;
|
|
29042
32011
|
}
|
|
29043
|
-
|
|
29044
|
-
|
|
32012
|
+
spawnFfmpeg() {
|
|
32013
|
+
if (!this.playlistPath || !this.segmentPattern) {
|
|
32014
|
+
throw new Error("Playlist path not set");
|
|
32015
|
+
}
|
|
32016
|
+
const codec = this.codec ?? "h264";
|
|
32017
|
+
const args = [
|
|
32018
|
+
"-hide_banner",
|
|
32019
|
+
"-loglevel",
|
|
32020
|
+
"warning",
|
|
32021
|
+
"-fflags",
|
|
32022
|
+
"+genpts",
|
|
32023
|
+
"-use_wallclock_as_timestamps",
|
|
32024
|
+
"1",
|
|
32025
|
+
"-r",
|
|
32026
|
+
"25",
|
|
32027
|
+
"-f",
|
|
32028
|
+
codec === "h265" ? "hevc" : "h264",
|
|
32029
|
+
"-i",
|
|
32030
|
+
"pipe:0"
|
|
32031
|
+
];
|
|
32032
|
+
if (codec === "h265") {
|
|
32033
|
+
args.push(
|
|
32034
|
+
"-c:v",
|
|
32035
|
+
"libx264",
|
|
32036
|
+
"-preset",
|
|
32037
|
+
"veryfast",
|
|
32038
|
+
"-tune",
|
|
32039
|
+
"zerolatency",
|
|
32040
|
+
"-pix_fmt",
|
|
32041
|
+
"yuv420p"
|
|
32042
|
+
);
|
|
32043
|
+
} else {
|
|
32044
|
+
args.push("-c:v", "copy");
|
|
32045
|
+
}
|
|
32046
|
+
args.push(
|
|
32047
|
+
"-f",
|
|
32048
|
+
"hls",
|
|
32049
|
+
"-hls_time",
|
|
32050
|
+
String(this.segmentDuration),
|
|
32051
|
+
"-hls_list_size",
|
|
32052
|
+
String(this.playlistSize),
|
|
32053
|
+
"-hls_flags",
|
|
32054
|
+
"delete_segments+append_list+omit_endlist",
|
|
32055
|
+
"-hls_segment_filename",
|
|
32056
|
+
this.segmentPattern,
|
|
32057
|
+
this.playlistPath
|
|
32058
|
+
);
|
|
32059
|
+
const p = (0, import_node_child_process10.spawn)(this.ffmpegPath, args, {
|
|
32060
|
+
stdio: ["pipe", "ignore", "pipe"]
|
|
32061
|
+
});
|
|
32062
|
+
p.on("error", (err) => {
|
|
32063
|
+
this.log("error", `ffmpeg spawn error: ${err}`);
|
|
32064
|
+
this.emit("ffmpeg-error", err);
|
|
32065
|
+
});
|
|
32066
|
+
p.stderr?.on("data", (d) => {
|
|
32067
|
+
const s = String(d ?? "").trim();
|
|
32068
|
+
if (s) this.log("warn", `[ffmpeg] ${s}`);
|
|
32069
|
+
});
|
|
32070
|
+
p.on("exit", (code, signal) => {
|
|
32071
|
+
this.log(
|
|
32072
|
+
"warn",
|
|
32073
|
+
`ffmpeg exited (code=${code ?? "?"} signal=${signal ?? "?"})`
|
|
32074
|
+
);
|
|
32075
|
+
this.emit("ffmpeg-exited", { code, signal });
|
|
32076
|
+
});
|
|
32077
|
+
return p;
|
|
29045
32078
|
}
|
|
29046
32079
|
};
|
|
29047
32080
|
|
|
@@ -29519,10 +32552,10 @@ async function autoDetectDeviceType(inputs) {
|
|
|
29519
32552
|
}
|
|
29520
32553
|
|
|
29521
32554
|
// src/multifocal/compositeRtspServer.ts
|
|
29522
|
-
var
|
|
29523
|
-
var
|
|
32555
|
+
var import_node_events11 = require("events");
|
|
32556
|
+
var import_node_child_process11 = require("child_process");
|
|
29524
32557
|
var net3 = __toESM(require("net"), 1);
|
|
29525
|
-
var CompositeRtspServer = class extends
|
|
32558
|
+
var CompositeRtspServer = class extends import_node_events11.EventEmitter {
|
|
29526
32559
|
options;
|
|
29527
32560
|
compositeStream = null;
|
|
29528
32561
|
rtspServer = null;
|
|
@@ -29627,7 +32660,7 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
|
|
|
29627
32660
|
this.logger.log?.(
|
|
29628
32661
|
`[CompositeRtspServer] Starting ffmpeg RTSP server: ${ffmpegArgs.join(" ")}`
|
|
29629
32662
|
);
|
|
29630
|
-
this.ffmpegProcess = (0,
|
|
32663
|
+
this.ffmpegProcess = (0, import_node_child_process11.spawn)("ffmpeg", ffmpegArgs, {
|
|
29631
32664
|
stdio: ["pipe", "pipe", "pipe"]
|
|
29632
32665
|
});
|
|
29633
32666
|
this.ffmpegProcess.on("error", (error) => {
|
|
@@ -29840,9 +32873,12 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
|
|
|
29840
32873
|
BaichuanClient,
|
|
29841
32874
|
BaichuanEventEmitter,
|
|
29842
32875
|
BaichuanFrameParser,
|
|
32876
|
+
BaichuanHlsServer,
|
|
29843
32877
|
BaichuanHttpStreamServer,
|
|
32878
|
+
BaichuanMjpegServer,
|
|
29844
32879
|
BaichuanRtspServer,
|
|
29845
32880
|
BaichuanVideoStream,
|
|
32881
|
+
BaichuanWebRTCServer,
|
|
29846
32882
|
BcMediaAnnexBDecoder,
|
|
29847
32883
|
BcMediaCodec,
|
|
29848
32884
|
BcUdpStream,
|
|
@@ -29853,7 +32889,9 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
|
|
|
29853
32889
|
DUAL_LENS_SINGLE_MOTION_MODELS,
|
|
29854
32890
|
H264RtpDepacketizer,
|
|
29855
32891
|
H265RtpDepacketizer,
|
|
32892
|
+
HlsSessionManager,
|
|
29856
32893
|
Intercom,
|
|
32894
|
+
MjpegTransformer,
|
|
29857
32895
|
NVR_HUB_EXACT_TYPES,
|
|
29858
32896
|
NVR_HUB_MODEL_PATTERNS,
|
|
29859
32897
|
ReolinkBaichuanApi,
|
|
@@ -29873,6 +32911,7 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
|
|
|
29873
32911
|
buildBinaryExtensionXml,
|
|
29874
32912
|
buildChannelExtensionXml,
|
|
29875
32913
|
buildFloodlightManualXml,
|
|
32914
|
+
buildHlsRedirectUrl,
|
|
29876
32915
|
buildLoginXml,
|
|
29877
32916
|
buildPreviewStopXml,
|
|
29878
32917
|
buildPreviewStopXmlV11,
|
|
@@ -29900,6 +32939,7 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
|
|
|
29900
32939
|
createDebugGateLogger,
|
|
29901
32940
|
createDiagnosticsBundle,
|
|
29902
32941
|
createLogger,
|
|
32942
|
+
createMjpegBoundary,
|
|
29903
32943
|
createNativeStream,
|
|
29904
32944
|
createNullLogger,
|
|
29905
32945
|
createReplayHttpServer,
|
|
@@ -29907,8 +32947,10 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
|
|
|
29907
32947
|
createRfc4571TcpServerForReplay,
|
|
29908
32948
|
createRtspProxyServer,
|
|
29909
32949
|
createTaggedLogger,
|
|
32950
|
+
decideVideoclipTranscodeMode,
|
|
29910
32951
|
decodeHeader,
|
|
29911
32952
|
deriveAesKey,
|
|
32953
|
+
detectIosClient,
|
|
29912
32954
|
detectVideoCodecFromNal,
|
|
29913
32955
|
discoverReolinkDevices,
|
|
29914
32956
|
discoverViaHttpScan,
|
|
@@ -29921,10 +32963,13 @@ var CompositeRtspServer = class extends import_node_events7.EventEmitter {
|
|
|
29921
32963
|
extractSpsFromAnnexB,
|
|
29922
32964
|
extractVpsFromAnnexB,
|
|
29923
32965
|
flattenAbilitiesForChannel,
|
|
32966
|
+
formatMjpegFrame,
|
|
29924
32967
|
getConstructedVideoStreamOptions,
|
|
29925
32968
|
getGlobalLogger,
|
|
29926
32969
|
getH265NalType,
|
|
32970
|
+
getMjpegContentType,
|
|
29927
32971
|
getVideoStream,
|
|
32972
|
+
getVideoclipClientInfo,
|
|
29928
32973
|
getXmlText,
|
|
29929
32974
|
hasH265StartCodes,
|
|
29930
32975
|
hasStartCodes,
|