@apocaliss92/nodelink-js 0.1.8 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -2468,13 +2468,31 @@ var init_BaichuanVideoStream = __esm({
2468
2468
  searchStart = bodyEnd + Buffer.from("</body>").length;
2469
2469
  dataToParse = rawCandidate.subarray(searchStart);
2470
2470
  }
2471
+ let encryptLen;
2472
+ if (frame.extension && frame.extension.length > 0) {
2473
+ try {
2474
+ const extDec = this.client.tryDecryptXml(
2475
+ frame.extension,
2476
+ frame.header.channelId,
2477
+ enc
2478
+ );
2479
+ const encryptLenMatch = extDec.match(
2480
+ /<encryptLen>(\d+)<\/encryptLen>/i
2481
+ );
2482
+ if (encryptLenMatch && encryptLenMatch[1]) {
2483
+ encryptLen = parseInt(encryptLenMatch[1], 10);
2484
+ }
2485
+ } catch {
2486
+ }
2487
+ }
2471
2488
  const dataAfterXml = this.chooseDecryptedOrRawCandidate({
2472
2489
  raw: dataToParse,
2473
2490
  enc,
2474
2491
  channelId: frame.header.channelId,
2475
2492
  // Some NVR/Hub streams appear to include non-media bytes even when payloadOffset is present.
2476
2493
  // Allow a one-time resync at startup to avoid delaying the first keyframe.
2477
- allowResync: frame.payload.length === 0 || totalFramesReceived <= 10 && totalMediaPackets === 0
2494
+ allowResync: frame.payload.length === 0 || totalFramesReceived <= 10 && totalMediaPackets === 0,
2495
+ ...encryptLen !== void 0 ? { encryptLen } : {}
2478
2496
  });
2479
2497
  if (this.bcMediaCodec.getRemainingBuffer().length === 0 && dataAfterXml.length <= 600) {
2480
2498
  const s = _BaichuanVideoStream.scoreBcMediaLike(dataAfterXml);
@@ -2587,15 +2605,38 @@ var init_BaichuanVideoStream = __esm({
2587
2605
  }
2588
2606
  }
2589
2607
  };
2590
- const prependParamSetsIfNeeded = (annexB, videoType) => {
2608
+ const prependParamSetsIfNeeded = (annexB, videoType, isPframe = false) => {
2591
2609
  if (videoType === "H264") {
2592
2610
  const nals = splitAnnexBToNalPayloads(annexB);
2593
2611
  if (nals.length === 0) return annexB;
2594
2612
  const types = nals.map((n) => (n[0] ?? 0) & 31);
2595
- if (types.includes(7) && types.includes(8)) return annexB;
2596
2613
  const hasVcl = types.some(
2597
2614
  (t) => t === 1 || t === 5 || t === 19 || t === 20
2598
2615
  );
2616
+ if (isPframe && !hasVcl) {
2617
+ if (dbg.traceNativeStream) {
2618
+ this.logger?.warn(
2619
+ `[BaichuanVideoStream] Dropping P-frame without VCL (only param sets): types=${types.join(",")}`
2620
+ );
2621
+ }
2622
+ return Buffer.alloc(0);
2623
+ }
2624
+ if (types.includes(7) && types.includes(8)) {
2625
+ let ppsIdFromSlice = null;
2626
+ for (const nal of nals) {
2627
+ const t = (nal[0] ?? 0) & 31;
2628
+ if (t === 1 || t === 5) {
2629
+ ppsIdFromSlice = parseSlicePpsIdFromNal(nal);
2630
+ break;
2631
+ }
2632
+ }
2633
+ if (ppsIdFromSlice != null && ppsIdFromSlice <= 255) {
2634
+ this.lastPrependedPpsId = ppsIdFromSlice;
2635
+ } else {
2636
+ this.lastPrependedPpsId = -1;
2637
+ }
2638
+ return annexB;
2639
+ }
2599
2640
  if (!hasVcl) return annexB;
2600
2641
  let ppsId = null;
2601
2642
  for (const nal of nals) {
@@ -2642,11 +2683,19 @@ var init_BaichuanVideoStream = __esm({
2642
2683
  const nals = splitAnnexBToNalPayloads2(annexB);
2643
2684
  if (nals.length === 0) return annexB;
2644
2685
  const types = nals.map((n) => getH265NalType(n)).filter((t) => t !== null);
2645
- if (types.includes(32) && types.includes(33) && types.includes(34))
2646
- return annexB;
2647
2686
  const hasVcl = types.some(
2648
2687
  (t) => t >= 0 && t <= 9 || t >= 16 && t <= 23
2649
2688
  );
2689
+ if (isPframe && !hasVcl) {
2690
+ if (dbg.traceNativeStream) {
2691
+ this.logger?.warn(
2692
+ `[BaichuanVideoStream] Dropping H.265 P-frame without VCL (only param sets): types=${types.join(",")}`
2693
+ );
2694
+ }
2695
+ return Buffer.alloc(0);
2696
+ }
2697
+ if (types.includes(32) && types.includes(33) && types.includes(34))
2698
+ return annexB;
2650
2699
  if (!hasVcl) return annexB;
2651
2700
  if (this.lastPrependedParamSetsH265) return annexB;
2652
2701
  if (!this.lastVps || !this.lastSpsH265 || !this.lastPpsH265)
@@ -2828,7 +2877,7 @@ var init_BaichuanVideoStream = __esm({
2828
2877
  }
2829
2878
  for (const p of parts) {
2830
2879
  maybeCacheParamSets(p, "Pframe", videoType);
2831
- const outP0 = prependParamSetsIfNeeded(p, videoType);
2880
+ const outP0 = prependParamSetsIfNeeded(p, videoType, true);
2832
2881
  if (outP0.length === 0) continue;
2833
2882
  const outP = outP0;
2834
2883
  dumpNalSummary(outP, "Pframe", media.microseconds);
@@ -2997,10 +3046,10 @@ function buildRtspPath(channel, stream) {
2997
3046
  }
2998
3047
  function buildRtspUrl(params) {
2999
3048
  const port = params.port ?? 554;
3000
- const path5 = buildRtspPath(params.channel, params.stream);
3049
+ const path6 = buildRtspPath(params.channel, params.stream);
3001
3050
  const user = encodeURIComponent(params.username);
3002
3051
  const pass = encodeURIComponent(params.password);
3003
- return `rtsp://${user}:${pass}@${params.host}:${port}${path5}`;
3052
+ return `rtsp://${user}:${pass}@${params.host}:${port}${path6}`;
3004
3053
  }
3005
3054
  var init_urls = __esm({
3006
3055
  "src/rtsp/urls.ts"() {
@@ -3162,7 +3211,7 @@ async function createDiagnosticsBundle(params) {
3162
3211
  }
3163
3212
  function sanitizeFfmpegError(error) {
3164
3213
  return error.replace(
3165
- /([a-z]+:\/\/)([^:@\/\s]+):([^@\/\s]+)@/gi,
3214
+ /([a-z]+:\/\/)([^:@/\s]+):([^@/\s]+)@/gi,
3166
3215
  (match, protocol, username, password) => {
3167
3216
  return `${protocol}***:***@`;
3168
3217
  }
@@ -4542,8 +4591,8 @@ async function runMultifocalDiagnosticsConsecutively(params) {
4542
4591
  for (const app of rtmpApps) {
4543
4592
  for (const streamName of streams) {
4544
4593
  const streamType = streamName.includes("sub") || streamName === "sub" || streamName === "mobile" ? 1 : 0;
4545
- const path5 = `/${app}/channel${params.channel}_${streamName}.bcs`;
4546
- const u = new URL(`rtmp://${params.host}:1935${path5}`);
4594
+ const path6 = `/${app}/channel${params.channel}_${streamName}.bcs`;
4595
+ const u = new URL(`rtmp://${params.host}:1935${path6}`);
4547
4596
  u.searchParams.set("channel", params.channel.toString());
4548
4597
  u.searchParams.set("stream", streamType.toString());
4549
4598
  u.searchParams.set("user", params.username);
@@ -5453,17 +5502,20 @@ function parseRecordingFileName(fileName) {
5453
5502
  let widthRaw;
5454
5503
  let heightRaw;
5455
5504
  let hexValue = "";
5505
+ let sizeHex;
5456
5506
  if (parts.length === 6) {
5457
5507
  startDate = parts[1] ?? "";
5458
5508
  startTime = parts[2] ?? "";
5459
5509
  endTime = parts[3] ?? "";
5460
5510
  hexValue = parts[4] ?? "";
5511
+ sizeHex = parts[5];
5461
5512
  } else if (parts.length === 7) {
5462
5513
  startDate = parts[1] ?? "";
5463
5514
  startTime = parts[2] ?? "";
5464
5515
  endTime = parts[3] ?? "";
5465
5516
  animalTypeRaw = parts[4];
5466
5517
  hexValue = parts[5] ?? "";
5518
+ sizeHex = parts[6];
5467
5519
  } else if (parts.length === 9) {
5468
5520
  devType = "hub";
5469
5521
  startDate = parts[1] ?? "";
@@ -5473,6 +5525,7 @@ function parseRecordingFileName(fileName) {
5473
5525
  widthRaw = parts[5];
5474
5526
  heightRaw = parts[6];
5475
5527
  hexValue = parts[7] ?? "";
5528
+ sizeHex = parts[8];
5476
5529
  } else {
5477
5530
  return void 0;
5478
5531
  }
@@ -5503,6 +5556,12 @@ function parseRecordingFileName(fileName) {
5503
5556
  if (animalTypeRaw != null) parsed.animalTypeRaw = animalTypeRaw;
5504
5557
  if (widthRaw != null) parsed.widthRaw = widthRaw;
5505
5558
  if (heightRaw != null) parsed.heightRaw = heightRaw;
5559
+ if (sizeHex && /^[0-9a-fA-F]+$/.test(sizeHex)) {
5560
+ const sizeBytes = parseInt(sizeHex, 16);
5561
+ if (Number.isFinite(sizeBytes) && sizeBytes > 0) {
5562
+ parsed.sizeBytes = sizeBytes;
5563
+ }
5564
+ }
5506
5565
  return parsed;
5507
5566
  }
5508
5567
  var FLAGS_CAM_V2, FLAGS_HUB_V0, FLAGS_HUB_V1, FLAGS_HUB_V2, FLAGS_MAPPING;
@@ -6627,12 +6686,12 @@ var init_ReolinkCgiApi = __esm({
6627
6686
  "getVideoclipThumbnailJpeg",
6628
6687
  `Extracting thumbnail from VOD URL (FLV): ${vodUrl.substring(0, 100)}... (seek=${seekSeconds}s)`
6629
6688
  );
6630
- const { spawn: spawn11 } = await import("child_process");
6689
+ const { spawn: spawn12 } = await import("child_process");
6631
6690
  return new Promise((resolve, reject) => {
6632
6691
  const chunks = [];
6633
6692
  let stderr = "";
6634
6693
  let timedOut = false;
6635
- const ffmpeg = spawn11(ffmpegPath, [
6694
+ const ffmpeg = spawn12(ffmpegPath, [
6636
6695
  "-y",
6637
6696
  "-analyzeduration",
6638
6697
  "10000000",
@@ -7191,15 +7250,18 @@ var init_ReolinkCgiApi = __esm({
7191
7250
  if (detectionClasses.length === 0) {
7192
7251
  detectionClasses.push("motion");
7193
7252
  }
7253
+ const sizeBytes = typeof vodFile.size === "string" ? parseInt(vodFile.size, 10) : vodFile.size;
7194
7254
  const result = {
7195
7255
  fileName: vodFile.name,
7196
7256
  id: vodFile.name,
7197
- sizeBytes: vodFile.size,
7198
7257
  startTime,
7199
7258
  endTime,
7200
7259
  recordType: vodFile.type,
7201
7260
  detectionClasses
7202
7261
  };
7262
+ if (Number.isFinite(sizeBytes)) {
7263
+ result.sizeBytes = sizeBytes;
7264
+ }
7203
7265
  if (parsed) {
7204
7266
  result.parsedFileName = parsed;
7205
7267
  }
@@ -7315,6 +7377,7 @@ __export(index_exports, {
7315
7377
  BaichuanClient: () => BaichuanClient,
7316
7378
  BaichuanEventEmitter: () => BaichuanEventEmitter,
7317
7379
  BaichuanFrameParser: () => BaichuanFrameParser,
7380
+ BaichuanHlsServer: () => BaichuanHlsServer,
7318
7381
  BaichuanHttpStreamServer: () => BaichuanHttpStreamServer,
7319
7382
  BaichuanMjpegServer: () => BaichuanMjpegServer,
7320
7383
  BaichuanRtspServer: () => BaichuanRtspServer,
@@ -7330,6 +7393,7 @@ __export(index_exports, {
7330
7393
  DUAL_LENS_SINGLE_MOTION_MODELS: () => DUAL_LENS_SINGLE_MOTION_MODELS,
7331
7394
  H264RtpDepacketizer: () => H264RtpDepacketizer,
7332
7395
  H265RtpDepacketizer: () => H265RtpDepacketizer,
7396
+ HlsSessionManager: () => HlsSessionManager,
7333
7397
  Intercom: () => Intercom,
7334
7398
  MjpegTransformer: () => MjpegTransformer,
7335
7399
  NVR_HUB_EXACT_TYPES: () => NVR_HUB_EXACT_TYPES,
@@ -7351,6 +7415,7 @@ __export(index_exports, {
7351
7415
  buildBinaryExtensionXml: () => buildBinaryExtensionXml,
7352
7416
  buildChannelExtensionXml: () => buildChannelExtensionXml,
7353
7417
  buildFloodlightManualXml: () => buildFloodlightManualXml,
7418
+ buildHlsRedirectUrl: () => buildHlsRedirectUrl,
7354
7419
  buildLoginXml: () => buildLoginXml,
7355
7420
  buildPreviewStopXml: () => buildPreviewStopXml,
7356
7421
  buildPreviewStopXmlV11: () => buildPreviewStopXmlV11,
@@ -7389,6 +7454,7 @@ __export(index_exports, {
7389
7454
  decideVideoclipTranscodeMode: () => decideVideoclipTranscodeMode,
7390
7455
  decodeHeader: () => decodeHeader,
7391
7456
  deriveAesKey: () => deriveAesKey,
7457
+ detectIosClient: () => detectIosClient,
7392
7458
  detectVideoCodecFromNal: () => detectVideoCodecFromNal,
7393
7459
  discoverReolinkDevices: () => discoverReolinkDevices,
7394
7460
  discoverViaHttpScan: () => discoverViaHttpScan,
@@ -7719,9 +7785,9 @@ function buildC2dS(params) {
7719
7785
  return buildP2pXml(`<C2D_S><to><port>${params.clientPort}</port></to></C2D_S>`);
7720
7786
  }
7721
7787
  function buildC2mQ(params) {
7722
- const os = params.os ?? "MAC";
7788
+ const os2 = params.os ?? "MAC";
7723
7789
  return buildP2pXml(
7724
- `<C2M_Q><uid>${xmlEscape(params.uid)}</uid><p>${xmlEscape(os)}</p></C2M_Q>`
7790
+ `<C2M_Q><uid>${xmlEscape(params.uid)}</uid><p>${xmlEscape(os2)}</p></C2M_Q>`
7725
7791
  );
7726
7792
  }
7727
7793
  function parseIpPortBlock(tag, body) {
@@ -7747,11 +7813,11 @@ function parseM2cQr(xml) {
7747
7813
  return { ...reg ? { reg } : {}, ...relay ? { relay } : {}, ...log ? { log } : {}, ...t ? { t } : {} };
7748
7814
  }
7749
7815
  function buildC2rC(params) {
7750
- const os = params.os ?? "MAC";
7816
+ const os2 = params.os ?? "MAC";
7751
7817
  const debug = params.debug ?? false;
7752
7818
  const rev = params.revision != null ? `<r>${params.revision}</r>` : "";
7753
7819
  return buildP2pXml(
7754
- `<C2R_C><uid>${xmlEscape(params.uid)}</uid><cli><ip>${xmlEscape(params.cli.ip)}</ip><port>${params.cli.port}</port></cli><relay><ip>${xmlEscape(params.relay.ip)}</ip><port>${params.relay.port}</port></relay><cid>${params.cid}</cid><debug>${debug ? "true" : "false"}</debug><family>${params.family}</family><p>${xmlEscape(os)}</p>` + rev + `</C2R_C>`
7820
+ `<C2R_C><uid>${xmlEscape(params.uid)}</uid><cli><ip>${xmlEscape(params.cli.ip)}</ip><port>${params.cli.port}</port></cli><relay><ip>${xmlEscape(params.relay.ip)}</ip><port>${params.relay.port}</port></relay><cid>${params.cid}</cid><debug>${debug ? "true" : "false"}</debug><family>${params.family}</family><p>${xmlEscape(os2)}</p>` + rev + `</C2R_C>`
7755
7821
  );
7756
7822
  }
7757
7823
  function parseR2cCr(xml) {
@@ -7795,9 +7861,9 @@ function buildC2rCfm(params) {
7795
7861
  );
7796
7862
  }
7797
7863
  function buildC2dC(params) {
7798
- const os = params.os ?? "MAC";
7864
+ const os2 = params.os ?? "MAC";
7799
7865
  return buildP2pXml(
7800
- `<C2D_C><uid>${xmlEscape(params.uid)}</uid><cli><port>${params.clientPort}</port></cli><cid>${params.cid}</cid><mtu>${params.mtu}</mtu><debug>false</debug><p>${xmlEscape(os)}</p></C2D_C>`
7866
+ `<C2D_C><uid>${xmlEscape(params.uid)}</uid><cli><port>${params.clientPort}</port></cli><cid>${params.cid}</cid><mtu>${params.mtu}</mtu><debug>false</debug><p>${xmlEscape(os2)}</p></C2D_C>`
7801
7867
  );
7802
7868
  }
7803
7869
  function buildC2dHb(params) {
@@ -12720,8 +12786,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
12720
12786
  `[BaichuanRtspServer] Failed to start native stream for SDP priming: ${error}`
12721
12787
  );
12722
12788
  }
12723
- const { hasParamSets } = this.flow.getFmtp();
12724
- if (!hasParamSets) {
12789
+ const { hasParamSets: hasParamSets2 } = this.flow.getFmtp();
12790
+ if (!hasParamSets2) {
12725
12791
  const primingMs = this.api.client.getTransport() === "udp" ? 4e3 : 1500;
12726
12792
  try {
12727
12793
  await Promise.race([
@@ -12733,12 +12799,12 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
12733
12799
  }
12734
12800
  }
12735
12801
  {
12736
- const { fmtp, hasParamSets } = this.flow.getFmtp();
12802
+ const { fmtp, hasParamSets: hasParamSets2 } = this.flow.getFmtp();
12737
12803
  const fmtpPreview = fmtp.length > 160 ? `${fmtp.slice(0, 160)}...` : fmtp;
12738
12804
  this.logger.info(
12739
- `[BaichuanRtspServer] DESCRIBE SDP for ${clientId} path=${this.path} codec=${this.flow.sdpCodec} hasParamSets=${hasParamSets} fmtp=${fmtpPreview}`
12805
+ `[BaichuanRtspServer] DESCRIBE SDP for ${clientId} path=${this.path} codec=${this.flow.sdpCodec} hasParamSets=${hasParamSets2} fmtp=${fmtpPreview}`
12740
12806
  );
12741
- if (!hasParamSets) {
12807
+ if (!hasParamSets2) {
12742
12808
  this.rtspDebugLog(
12743
12809
  `DESCRIBE responding without parameter sets yet (client=${clientId}, path=${this.path}, flow=${this.flow.key})`
12744
12810
  );
@@ -12914,8 +12980,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
12914
12980
  }
12915
12981
  sdp += `a=control:track0\r
12916
12982
  `;
12917
- const { fmtp, hasParamSets } = this.flow.getFmtp();
12918
- if (!hasParamSets) {
12983
+ const { fmtp, hasParamSets: hasParamSets2 } = this.flow.getFmtp();
12984
+ if (!hasParamSets2) {
12919
12985
  this.logger.warn(
12920
12986
  `[BaichuanRtspServer] SDP missing parameter sets for flow ${this.flow.key}`
12921
12987
  );
@@ -13571,8 +13637,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
13571
13637
  const normalizedVideoData = videoType === "H264" ? convertToAnnexB(frame.data) : convertToAnnexB2(frame.data);
13572
13638
  if (!resources?.seenFirstVideoKeyframe) {
13573
13639
  if (videoType === "H265") {
13574
- const { hasParamSets } = this.flow.getFmtp();
13575
- if (!hasParamSets) {
13640
+ const { hasParamSets: hasParamSets2 } = this.flow.getFmtp();
13641
+ if (!hasParamSets2) {
13576
13642
  if (rtspDebug && !h265WaitParamSetsLogged) {
13577
13643
  h265WaitParamSetsLogged = true;
13578
13644
  rtspDebugLog(
@@ -13603,8 +13669,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
13603
13669
  }
13604
13670
  resources.seenFirstVideoKeyframe = true;
13605
13671
  } else {
13606
- const { hasParamSets } = this.flow.getFmtp();
13607
- if (!hasParamSets) {
13672
+ const { hasParamSets: hasParamSets2 } = this.flow.getFmtp();
13673
+ if (!hasParamSets2) {
13608
13674
  if (rtspDebug && !h265WaitParamSetsLogged) {
13609
13675
  h265WaitParamSetsLogged = true;
13610
13676
  rtspDebugLog(
@@ -13764,8 +13830,8 @@ var BaichuanRtspServer = class _BaichuanRtspServer extends import_node_events4.E
13764
13830
  this.setFlowVideoType(frame.videoType, "native stream");
13765
13831
  }
13766
13832
  this.flow.extractParameterSets(frame.data);
13767
- const { hasParamSets } = this.flow.getFmtp();
13768
- if (hasParamSets) {
13833
+ const { hasParamSets: hasParamSets2 } = this.flow.getFmtp();
13834
+ if (hasParamSets2) {
13769
13835
  this.markFirstFrameReceived();
13770
13836
  }
13771
13837
  },
@@ -15059,10 +15125,15 @@ var parseRecordingFilesFromXml = (xml) => {
15059
15125
  if (startDt) item.startTime = startDt;
15060
15126
  if (endDt) item.endTime = endDt;
15061
15127
  const parsed = parseRecordingFileName(item.name ?? item.fileName);
15062
- if (parsed) {
15063
- item.parsedFileName = parsed;
15064
- if (!item.startTime) item.startTime = parsed.start;
15065
- if (!item.endTime) item.endTime = parsed.end;
15128
+ const parsedFromPath = item.fileName !== item.name ? parseRecordingFileName(item.fileName) : void 0;
15129
+ const bestParsed = parsedFromPath?.sizeBytes != null ? parsedFromPath : parsed;
15130
+ if (bestParsed) {
15131
+ item.parsedFileName = bestParsed;
15132
+ if (!item.startTime) item.startTime = bestParsed.start;
15133
+ if (!item.endTime) item.endTime = bestParsed.end;
15134
+ if (item.sizeBytes == null && bestParsed.sizeBytes != null) {
15135
+ item.sizeBytes = bestParsed.sizeBytes;
15136
+ }
15066
15137
  }
15067
15138
  item.detectionClasses = buildDetectionClasses(parsed, item.recordType);
15068
15139
  out.push(item);
@@ -15089,6 +15160,9 @@ var parseRecordingFilesFromXml = (xml) => {
15089
15160
  item.parsedFileName = parsed;
15090
15161
  if (!item.startTime) item.startTime = parsed.start;
15091
15162
  if (!item.endTime) item.endTime = parsed.end;
15163
+ if (item.sizeBytes == null && parsed.sizeBytes != null) {
15164
+ item.sizeBytes = parsed.sizeBytes;
15165
+ }
15092
15166
  }
15093
15167
  item.detectionClasses = buildDetectionClasses(parsed, item.recordType);
15094
15168
  out.push(item);
@@ -16403,6 +16477,10 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16403
16477
  * Value: client, refCount, createdAt
16404
16478
  */
16405
16479
  dedicatedClients = /* @__PURE__ */ new Map();
16480
+ /** Keep replay dedicated sockets warm briefly to reduce clip switch latency. */
16481
+ // Keep replay sockets warm briefly for fast clip switches, but tear down quickly
16482
+ // when clients stop requesting HLS segments (avoids looking like a stuck session).
16483
+ static REPLAY_DEDICATED_KEEPALIVE_MS = 1e4;
16406
16484
  /**
16407
16485
  * Get a summary of currently active dedicated sessions.
16408
16486
  * Useful for debugging/logging to see how many sockets are open.
@@ -16512,22 +16590,35 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16512
16590
  * Process the replay queue - executes operations one at a time.
16513
16591
  */
16514
16592
  async processReplayQueue() {
16515
- if (this.replayQueueProcessing) return;
16593
+ if (this.replayQueueProcessing) {
16594
+ this.logger?.debug?.(
16595
+ `[ReplayQueue] Already processing, queue length: ${this.replayQueue.length}`
16596
+ );
16597
+ return;
16598
+ }
16516
16599
  this.replayQueueProcessing = true;
16600
+ this.logger?.debug?.(
16601
+ `[ReplayQueue] Starting queue processing, items: ${this.replayQueue.length}`
16602
+ );
16517
16603
  while (this.replayQueue.length > 0) {
16518
16604
  const item = this.replayQueue.shift();
16519
16605
  if (item) {
16520
16606
  const timeSinceLastReplay = Date.now() - this.lastReplayEndTime;
16521
16607
  if (timeSinceLastReplay < this.REPLAY_COOLDOWN_MS) {
16522
- await new Promise(
16523
- (r) => setTimeout(r, this.REPLAY_COOLDOWN_MS - timeSinceLastReplay)
16524
- );
16608
+ const waitTime = this.REPLAY_COOLDOWN_MS - timeSinceLastReplay;
16609
+ this.logger?.debug?.(`[ReplayQueue] Waiting ${waitTime}ms cooldown`);
16610
+ await new Promise((r) => setTimeout(r, waitTime));
16525
16611
  }
16612
+ this.logger?.debug?.(
16613
+ `[ReplayQueue] Executing item, remaining: ${this.replayQueue.length}`
16614
+ );
16526
16615
  await item.execute();
16527
16616
  this.lastReplayEndTime = Date.now();
16617
+ this.logger?.debug?.(`[ReplayQueue] Item completed`);
16528
16618
  }
16529
16619
  }
16530
16620
  this.replayQueueProcessing = false;
16621
+ this.logger?.debug?.(`[ReplayQueue] Queue processing complete`);
16531
16622
  }
16532
16623
  /**
16533
16624
  * Enqueue a replay operation with optional de-duplication.
@@ -16590,14 +16681,35 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16590
16681
  this.replayQueue.push({
16591
16682
  execute: () => {
16592
16683
  return new Promise((releaseSlot) => {
16684
+ let released = false;
16685
+ const safeRelease = () => {
16686
+ if (released) return;
16687
+ released = true;
16688
+ releaseSlot();
16689
+ };
16690
+ const safetyTimeout = setTimeout(
16691
+ () => {
16692
+ if (!released) {
16693
+ this.logger?.warn?.(
16694
+ "[ReplayQueue] Safety timeout: releasing queue slot after 10 minutes"
16695
+ );
16696
+ safeRelease();
16697
+ }
16698
+ },
16699
+ 10 * 60 * 1e3
16700
+ );
16593
16701
  setup().then((result) => {
16594
16702
  resolvePromise({
16595
16703
  result,
16596
- release: () => releaseSlot()
16704
+ release: () => {
16705
+ clearTimeout(safetyTimeout);
16706
+ safeRelease();
16707
+ }
16597
16708
  });
16598
16709
  }).catch((e) => {
16710
+ clearTimeout(safetyTimeout);
16599
16711
  rejectPromise(e);
16600
- releaseSlot();
16712
+ safeRelease();
16601
16713
  });
16602
16714
  });
16603
16715
  }
@@ -16667,30 +16779,68 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16667
16779
  * immediately and create a new one. This ensures clean state for each clip.
16668
16780
  */
16669
16781
  async acquireDedicatedClient(sessionKey, logger) {
16782
+ const log = logger ?? this.logger;
16783
+ const isReplayKey = sessionKey.startsWith("replay:");
16670
16784
  const existing = this.dedicatedClients.get(sessionKey);
16671
16785
  if (existing) {
16672
- logger?.debug?.(
16673
- `[DedicatedClient] Closing existing client for ${sessionKey} (new stream requested)`
16786
+ if (existing.idleCloseTimer) {
16787
+ clearTimeout(existing.idleCloseTimer);
16788
+ existing.idleCloseTimer = void 0;
16789
+ }
16790
+ if (existing.refCount === 0) {
16791
+ existing.refCount = 1;
16792
+ existing.lastUsedAt = Date.now();
16793
+ log?.debug?.(
16794
+ `[DedicatedClient] Reusing existing dedicated socket for sessionKey=${sessionKey}`
16795
+ );
16796
+ try {
16797
+ if (!existing.client.loggedIn) {
16798
+ await existing.client.login();
16799
+ }
16800
+ } catch {
16801
+ }
16802
+ if (existing.client.loggedIn) {
16803
+ return {
16804
+ client: existing.client,
16805
+ release: () => this.releaseDedicatedClient(sessionKey, logger)
16806
+ };
16807
+ }
16808
+ }
16809
+ log?.log?.(
16810
+ `[DedicatedClient] Closing existing socket for sessionKey=${sessionKey} (preempting active session)`
16674
16811
  );
16675
16812
  this.dedicatedClients.delete(sessionKey);
16676
- existing.client.close({ reason: "new stream for same device" }).catch((e) => {
16677
- logger?.debug?.(`[DedicatedClient] Error closing old socket: ${e}`);
16678
- });
16813
+ try {
16814
+ await existing.client.close({ reason: "preempted by new session" });
16815
+ log?.log?.(
16816
+ `[DedicatedClient] Old socket closed successfully for sessionKey=${sessionKey}`
16817
+ );
16818
+ } catch (e) {
16819
+ log?.warn?.(
16820
+ `[DedicatedClient] Error closing old socket for sessionKey=${sessionKey}: ${e}`
16821
+ );
16822
+ }
16679
16823
  }
16680
- logger?.debug?.(`[DedicatedClient] Creating new client for ${sessionKey}`);
16824
+ log?.log?.(
16825
+ `[DedicatedClient] Opening new dedicated socket for sessionKey=${sessionKey}`
16826
+ );
16681
16827
  const dedicatedClient = new BaichuanClient({
16682
16828
  host: this.host,
16683
16829
  username: this.username,
16684
16830
  password: this.password,
16685
- logger: logger ?? this.logger,
16831
+ logger: log,
16686
16832
  debugOptions: this.client.getDebugConfig?.()
16687
16833
  });
16688
16834
  await dedicatedClient.login();
16835
+ log?.log?.(
16836
+ `[DedicatedClient] Dedicated socket logged in for sessionKey=${sessionKey}`
16837
+ );
16689
16838
  this.dedicatedClients.set(sessionKey, {
16690
16839
  client: dedicatedClient,
16691
16840
  refCount: 1,
16692
- // Keep for compatibility, but not used for reuse logic
16693
- createdAt: Date.now()
16841
+ createdAt: Date.now(),
16842
+ lastUsedAt: Date.now(),
16843
+ idleCloseTimer: void 0
16694
16844
  });
16695
16845
  return {
16696
16846
  client: dedicatedClient,
@@ -16702,15 +16852,81 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16702
16852
  * This ensures clean teardown at the end of each clip.
16703
16853
  */
16704
16854
  async releaseDedicatedClient(sessionKey, logger) {
16855
+ const log = logger ?? this.logger;
16705
16856
  const entry = this.dedicatedClients.get(sessionKey);
16706
16857
  if (!entry) return;
16858
+ entry.refCount = Math.max(0, entry.refCount - 1);
16859
+ entry.lastUsedAt = Date.now();
16860
+ if (entry.refCount > 0) return;
16861
+ const isReplayKey = sessionKey.startsWith("replay:");
16862
+ const allowReplayKeepAlive = /^replay:[^:]+$/.test(sessionKey);
16863
+ if (isReplayKey && allowReplayKeepAlive) {
16864
+ if (entry.idleCloseTimer) return;
16865
+ entry.idleCloseTimer = setTimeout(async () => {
16866
+ const current = this.dedicatedClients.get(sessionKey);
16867
+ if (!current) return;
16868
+ if (current.refCount > 0) return;
16869
+ this.dedicatedClients.delete(sessionKey);
16870
+ log?.debug?.(
16871
+ `[DedicatedClient] Closing idle replay socket for sessionKey=${sessionKey} (keepalive expired)`
16872
+ );
16873
+ try {
16874
+ await current.client.close({
16875
+ reason: "replay idle keepalive expired"
16876
+ });
16877
+ } catch {
16878
+ }
16879
+ }, _ReolinkBaichuanApi.REPLAY_DEDICATED_KEEPALIVE_MS);
16880
+ return;
16881
+ }
16707
16882
  this.dedicatedClients.delete(sessionKey);
16708
- logger?.debug?.(`[DedicatedClient] Releasing and closing ${sessionKey}`);
16883
+ log?.log?.(
16884
+ `[DedicatedClient] Closing socket for sessionKey=${sessionKey} (session ended)`
16885
+ );
16709
16886
  try {
16710
16887
  await entry.client.close({ reason: "dedicated session ended" });
16888
+ log?.log?.(
16889
+ `[DedicatedClient] Socket closed successfully for sessionKey=${sessionKey}`
16890
+ );
16891
+ } catch (e) {
16892
+ log?.warn?.(
16893
+ `[DedicatedClient] Error closing socket for sessionKey=${sessionKey}: ${e}`
16894
+ );
16895
+ }
16896
+ }
16897
+ /**
16898
+ * Force-close a dedicated client if it exists.
16899
+ * This is called BEFORE entering the queue to immediately terminate any existing stream
16900
+ * for the same sessionKey. The existing stream will receive an error, release its queue slot,
16901
+ * and the new request can then proceed.
16902
+ *
16903
+ * @param sessionKey - The session key to force-close (e.g., `replay:${deviceId}`)
16904
+ * @param logger - Optional logger
16905
+ * @returns true if a client was closed, false if no client existed
16906
+ */
16907
+ async forceCloseDedicatedClient(sessionKey, logger) {
16908
+ const log = logger ?? this.logger;
16909
+ const entry = this.dedicatedClients.get(sessionKey);
16910
+ if (!entry) return false;
16911
+ if (entry.idleCloseTimer) {
16912
+ clearTimeout(entry.idleCloseTimer);
16913
+ entry.idleCloseTimer = void 0;
16914
+ }
16915
+ log?.log?.(
16916
+ `[DedicatedClient] Force-closing existing socket for sessionKey=${sessionKey} (new request preempting)`
16917
+ );
16918
+ this.dedicatedClients.delete(sessionKey);
16919
+ try {
16920
+ await entry.client.close({ reason: "preempted by new request" });
16921
+ log?.log?.(
16922
+ `[DedicatedClient] Force-close complete for sessionKey=${sessionKey}`
16923
+ );
16711
16924
  } catch (e) {
16712
- logger?.debug?.(`[DedicatedClient] Error closing socket: ${e}`);
16925
+ log?.warn?.(
16926
+ `[DedicatedClient] Error during force-close for sessionKey=${sessionKey}: ${e}`
16927
+ );
16713
16928
  }
16929
+ return true;
16714
16930
  }
16715
16931
  /**
16716
16932
  * Create a dedicated Baichuan client session for streaming.
@@ -16747,6 +16963,9 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16747
16963
  await Promise.allSettled(
16748
16964
  entries.map(async ([key, entry]) => {
16749
16965
  try {
16966
+ if (entry.idleCloseTimer) {
16967
+ clearTimeout(entry.idleCloseTimer);
16968
+ }
16750
16969
  this.logger?.debug?.(`[DedicatedClient] Cleanup: closing ${key}`);
16751
16970
  await entry.client.close({ reason: "API cleanup" });
16752
16971
  } catch {
@@ -18602,7 +18821,8 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
18602
18821
  channel,
18603
18822
  payloadXml: stopXml,
18604
18823
  messageClass: BC_CLASS_MODERN_24,
18605
- timeoutMs: 1e4,
18824
+ timeoutMs: 2e3,
18825
+ // Short timeout - if socket is closed, fail fast
18606
18826
  internal: true
18607
18827
  });
18608
18828
  } catch {
@@ -18738,7 +18958,8 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
18738
18958
  channel,
18739
18959
  payloadXml: stopXml,
18740
18960
  messageClass: BC_CLASS_MODERN_24,
18741
- timeoutMs: 1e4,
18961
+ timeoutMs: 2e3,
18962
+ // Short timeout - if socket is closed, fail fast
18742
18963
  internal: true
18743
18964
  });
18744
18965
  } catch {
@@ -19785,11 +20006,20 @@ ${stderr}`)
19785
20006
  }
19786
20007
  }
19787
20008
  async downloadRecording(params) {
20009
+ this.logger?.debug?.(
20010
+ `[downloadRecording] Queuing download for: ${params.fileName}, channel=${params.channel}`
20011
+ );
19788
20012
  return this.enqueueReplayOperation(async () => {
20013
+ this.logger?.debug?.(
20014
+ `[downloadRecording] Starting download for: ${params.fileName}`
20015
+ );
19789
20016
  await this.client.login();
19790
20017
  const channel = this.normalizeChannel(params.channel);
19791
20018
  const uid = await this.ensureUidForRecordings(channel, params.uid);
19792
20019
  const fileName = params.fileName;
20020
+ this.logger?.debug?.(
20021
+ `[downloadRecording] Trying fileInfoListReplayBinaryDownload for: ${fileName}`
20022
+ );
19793
20023
  let replayErr;
19794
20024
  try {
19795
20025
  return await this.fileInfoListReplayBinaryDownload({
@@ -19800,7 +20030,13 @@ ${stderr}`)
19800
20030
  });
19801
20031
  } catch (e) {
19802
20032
  replayErr = e;
20033
+ this.logger?.debug?.(
20034
+ `[downloadRecording] fileInfoListReplayBinaryDownload failed: ${e instanceof Error ? e.message : String(e)}`
20035
+ );
19803
20036
  }
20037
+ this.logger?.debug?.(
20038
+ `[downloadRecording] Trying fileInfoListDownload for: ${fileName}`
20039
+ );
19804
20040
  let downloadErr;
19805
20041
  try {
19806
20042
  return await this.fileInfoListDownload({
@@ -19811,7 +20047,13 @@ ${stderr}`)
19811
20047
  });
19812
20048
  } catch (e) {
19813
20049
  downloadErr = e;
20050
+ this.logger?.debug?.(
20051
+ `[downloadRecording] fileInfoListDownload failed: ${e instanceof Error ? e.message : String(e)}`
20052
+ );
19814
20053
  }
20054
+ this.logger?.debug?.(
20055
+ `[downloadRecording] Trying fileInfoListPagedDownload for: ${fileName}`
20056
+ );
19815
20057
  try {
19816
20058
  const result = await this.fileInfoListPagedDownload({
19817
20059
  channel,
@@ -19823,6 +20065,9 @@ ${stderr}`)
19823
20065
  return result;
19824
20066
  }
19825
20067
  } catch (e) {
20068
+ this.logger?.debug?.(
20069
+ `[downloadRecording] fileInfoListPagedDownload failed: ${e instanceof Error ? e.message : String(e)}`
20070
+ );
19826
20071
  }
19827
20072
  const replayMsg = replayErr instanceof Error ? replayErr.message : replayErr != null ? String(replayErr) : "";
19828
20073
  const dlMsg = downloadErr instanceof Error ? downloadErr.message : downloadErr != null ? String(downloadErr) : "";
@@ -20118,7 +20363,7 @@ ${stderr}`)
20118
20363
  * Convert a raw video keyframe to JPEG using ffmpeg.
20119
20364
  */
20120
20365
  async convertFrameToJpeg(params) {
20121
- const { spawn: spawn11 } = await import("child_process");
20366
+ const { spawn: spawn12 } = await import("child_process");
20122
20367
  const ffmpeg = params.ffmpegPath ?? "ffmpeg";
20123
20368
  const inputFormat = params.videoCodec === "H265" ? "hevc" : "h264";
20124
20369
  return new Promise((resolve, reject) => {
@@ -20140,7 +20385,7 @@ ${stderr}`)
20140
20385
  "2",
20141
20386
  "pipe:1"
20142
20387
  ];
20143
- const proc = spawn11(ffmpeg, args, {
20388
+ const proc = spawn12(ffmpeg, args, {
20144
20389
  stdio: ["pipe", "pipe", "pipe"]
20145
20390
  });
20146
20391
  const chunks = [];
@@ -20283,26 +20528,26 @@ ${stderr}`)
20283
20528
  * Internal helper to mux video+audio into MP4 using ffmpeg.
20284
20529
  */
20285
20530
  async muxToMp4(params) {
20286
- const { spawn: spawn11 } = await import("child_process");
20531
+ const { spawn: spawn12 } = await import("child_process");
20287
20532
  const { randomUUID: randomUUID2 } = await import("crypto");
20288
- const fs5 = await import("fs/promises");
20289
- const os = await import("os");
20290
- const path5 = await import("path");
20533
+ const fs6 = await import("fs/promises");
20534
+ const os2 = await import("os");
20535
+ const path6 = await import("path");
20291
20536
  const ffmpeg = params.ffmpegPath ?? "ffmpeg";
20292
- const tmpDir = os.tmpdir();
20537
+ const tmpDir = os2.tmpdir();
20293
20538
  const id = randomUUID2();
20294
20539
  const videoFormat = params.videoCodec === "H265" ? "hevc" : "h264";
20295
- const videoPath = path5.join(tmpDir, `reolink-${id}.${videoFormat}`);
20296
- const outputPath = path5.join(tmpDir, `reolink-${id}.mp4`);
20540
+ const videoPath = path6.join(tmpDir, `reolink-${id}.${videoFormat}`);
20541
+ const outputPath = path6.join(tmpDir, `reolink-${id}.mp4`);
20297
20542
  let audioPath = null;
20298
20543
  if (params.audioData && params.audioData.length > 0 && params.audioCodec) {
20299
20544
  const audioExt = params.audioCodec === "Aac" ? "aac" : "raw";
20300
- audioPath = path5.join(tmpDir, `reolink-${id}.${audioExt}`);
20545
+ audioPath = path6.join(tmpDir, `reolink-${id}.${audioExt}`);
20301
20546
  }
20302
20547
  try {
20303
- await fs5.writeFile(videoPath, params.videoData);
20548
+ await fs6.writeFile(videoPath, params.videoData);
20304
20549
  if (audioPath && params.audioData) {
20305
- await fs5.writeFile(audioPath, params.audioData);
20550
+ await fs6.writeFile(audioPath, params.audioData);
20306
20551
  }
20307
20552
  const args = ["-hide_banner", "-loglevel", "error", "-y"];
20308
20553
  if (params.fps > 0) {
@@ -20335,7 +20580,7 @@ ${stderr}`)
20335
20580
  outputPath
20336
20581
  );
20337
20582
  await new Promise((resolve, reject) => {
20338
- const p = spawn11(ffmpeg, args, { stdio: ["ignore", "ignore", "pipe"] });
20583
+ const p = spawn12(ffmpeg, args, { stdio: ["ignore", "ignore", "pipe"] });
20339
20584
  let stderr = "";
20340
20585
  p.stderr.on("data", (d) => {
20341
20586
  stderr += d.toString();
@@ -20355,13 +20600,13 @@ ${stderr}`)
20355
20600
  }
20356
20601
  });
20357
20602
  });
20358
- return await fs5.readFile(outputPath);
20603
+ return await fs6.readFile(outputPath);
20359
20604
  } finally {
20360
- await fs5.unlink(videoPath).catch(() => {
20605
+ await fs6.unlink(videoPath).catch(() => {
20361
20606
  });
20362
- if (audioPath) await fs5.unlink(audioPath).catch(() => {
20607
+ if (audioPath) await fs6.unlink(audioPath).catch(() => {
20363
20608
  });
20364
- await fs5.unlink(outputPath).catch(() => {
20609
+ await fs6.unlink(outputPath).catch(() => {
20365
20610
  });
20366
20611
  }
20367
20612
  }
@@ -21895,11 +22140,13 @@ ${stderr}`)
21895
22140
  * @param settings - Floodlight settings to apply
21896
22141
  *
21897
22142
  * @example
22143
+ * ```typescript
21898
22144
  * await api.setFloodlightSettings(0, {
21899
22145
  * duration: 300, // 5 minutes
21900
22146
  * detectType: 'people,vehicle',
21901
22147
  * brightness: 80,
21902
22148
  * });
22149
+ * ```
21903
22150
  */
21904
22151
  async setFloodlightSettings(channel, settings) {
21905
22152
  const ch = this.normalizeChannel(channel);
@@ -24041,11 +24288,13 @@ ${scheduleItems}
24041
24288
  */
24042
24289
  async createRecordingReplayMp4Stream(params) {
24043
24290
  const logger = params.logger ?? this.logger;
24291
+ const useMpegTsMuxer = params.useMpegTsMuxer ?? true;
24044
24292
  const parsed = parseRecordingFileName(params.fileName);
24045
24293
  const durationMs = parsed?.durationMs ?? 3e5;
24294
+ const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
24046
24295
  const seconds = Math.ceil(durationMs / 1e3 * 1.1);
24047
24296
  logger?.debug?.(
24048
- `[createRecordingReplayMp4Stream] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, timeoutSec=${seconds}, deviceId=${params.deviceId ?? "auto"}`
24297
+ `[createRecordingReplayMp4Stream] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, fps=${fps}, timeoutSec=${seconds}, deviceId=${params.deviceId ?? "auto"}, useMpegTsMuxer=${useMpegTsMuxer}`
24049
24298
  );
24050
24299
  const startParams = {
24051
24300
  channel: params.channel,
@@ -24054,12 +24303,23 @@ ${scheduleItems}
24054
24303
  ...params.isNvr != null ? { isNvr: params.isNvr } : {},
24055
24304
  ...params.deviceId != null ? { deviceId: params.deviceId } : {}
24056
24305
  };
24057
- const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(
24058
- () => this.startRecordingReplayStream(startParams)
24059
- );
24306
+ const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(async () => {
24307
+ try {
24308
+ return await this.startRecordingReplayStream(startParams);
24309
+ } catch (e) {
24310
+ if (!params.deviceId) throw e;
24311
+ const sessionKey = `replay:${params.deviceId}`;
24312
+ logger?.debug?.(
24313
+ `[createRecordingReplayMp4Stream] startRecordingReplayStream failed; force-closing dedicated client and retrying once`
24314
+ );
24315
+ await this.forceCloseDedicatedClient(sessionKey, logger);
24316
+ return await this.startRecordingReplayStream(startParams);
24317
+ }
24318
+ });
24060
24319
  const { stream, stop: stopReplay } = replayResult;
24061
24320
  const input = new import_node_stream.PassThrough();
24062
24321
  const output = new import_node_stream.PassThrough();
24322
+ const H264_AUD = Buffer.from([0, 0, 0, 1, 9, 240]);
24063
24323
  let tsMuxer = null;
24064
24324
  let ff = null;
24065
24325
  let ended = false;
@@ -24068,26 +24328,59 @@ ${scheduleItems}
24068
24328
  if (ff) return;
24069
24329
  const needsTranscode = videoType === "H265" && params.transcodeH265ToH264 === true;
24070
24330
  logger?.debug?.(
24071
- `[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}, transcode=${needsTranscode}`
24331
+ `[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}, transcode=${needsTranscode}, useMpegTsMuxer=${useMpegTsMuxer}, fps=${fps}`
24072
24332
  );
24073
- MpegTsMuxer.resetCounters();
24074
- tsMuxer = new MpegTsMuxer({ videoType });
24075
- const args = [
24076
- "-hide_banner",
24077
- "-loglevel",
24078
- "error",
24079
- "-f",
24080
- "mpegts",
24081
- "-i",
24082
- "pipe:0",
24083
- // Video codec: transcode H.265→H.264 if requested, otherwise copy
24084
- ...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
24085
- "-movflags",
24086
- "frag_keyframe+empty_moov",
24087
- "-f",
24088
- "mp4",
24089
- "pipe:1"
24090
- ];
24333
+ let args;
24334
+ if (useMpegTsMuxer) {
24335
+ MpegTsMuxer.resetCounters();
24336
+ tsMuxer = new MpegTsMuxer({ videoType });
24337
+ args = [
24338
+ "-hide_banner",
24339
+ "-loglevel",
24340
+ "error",
24341
+ "-f",
24342
+ "mpegts",
24343
+ "-i",
24344
+ "pipe:0",
24345
+ // Video codec: transcode H.265→H.264 if requested, otherwise copy
24346
+ ...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
24347
+ // frag_keyframe: create new fragment at each keyframe
24348
+ // empty_moov: write ftyp/moov immediately (required for streaming)
24349
+ // default_base_moof: required for iOS Media Source Extensions
24350
+ // negative_cts_offsets: fixes some iOS playback issues
24351
+ "-movflags",
24352
+ "frag_keyframe+empty_moov+default_base_moof+negative_cts_offsets",
24353
+ "-f",
24354
+ "mp4",
24355
+ "pipe:1"
24356
+ ];
24357
+ } else {
24358
+ const inputFormat = videoType === "H265" ? "hevc" : "h264";
24359
+ args = [
24360
+ "-hide_banner",
24361
+ "-loglevel",
24362
+ "error",
24363
+ "-fflags",
24364
+ "+genpts",
24365
+ "-r",
24366
+ String(fps),
24367
+ "-f",
24368
+ inputFormat,
24369
+ "-i",
24370
+ "pipe:0",
24371
+ // Video codec: transcode H.265→H.264 if requested, otherwise copy
24372
+ ...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
24373
+ // frag_keyframe: create new fragment at each keyframe
24374
+ // empty_moov: write ftyp/moov immediately (required for streaming)
24375
+ // default_base_moof: required for iOS Media Source Extensions
24376
+ // negative_cts_offsets: fixes some iOS playback issues
24377
+ "-movflags",
24378
+ "frag_keyframe+empty_moov+default_base_moof+negative_cts_offsets",
24379
+ "-f",
24380
+ "mp4",
24381
+ "pipe:1"
24382
+ ];
24383
+ }
24091
24384
  ff = (0, import_node_child_process3.spawn)("ffmpeg", args, { stdio: ["pipe", "pipe", "pipe"] });
24092
24385
  if (!ff.stdin || !ff.stdout || !ff.stderr) {
24093
24386
  throw new Error("ffmpeg stdio streams not available");
@@ -24125,17 +24418,19 @@ ${scheduleItems}
24125
24418
  const stopAll = async () => {
24126
24419
  if (ended) return;
24127
24420
  ended = true;
24421
+ releaseQueueSlot();
24128
24422
  logger?.debug?.(
24129
24423
  `[createRecordingReplayMp4Stream] Stopping stream, frames=${frameCount}`
24130
24424
  );
24131
- try {
24132
- await stopReplay();
24133
- } catch {
24134
- }
24135
- try {
24136
- await stream.stop();
24137
- } catch {
24138
- }
24425
+ const cleanupPromises = [];
24426
+ cleanupPromises.push(
24427
+ stopReplay().catch(() => {
24428
+ })
24429
+ );
24430
+ cleanupPromises.push(
24431
+ stream.stop().catch(() => {
24432
+ })
24433
+ );
24139
24434
  try {
24140
24435
  input.end();
24141
24436
  } catch {
@@ -24148,7 +24443,11 @@ ${scheduleItems}
24148
24443
  output.end();
24149
24444
  } catch {
24150
24445
  }
24151
- releaseQueueSlot();
24446
+ await Promise.race([
24447
+ Promise.all(cleanupPromises),
24448
+ new Promise((resolve) => setTimeout(resolve, 2e3))
24449
+ // Max 2s for cleanup
24450
+ ]);
24152
24451
  };
24153
24452
  const timer = setTimeout(
24154
24453
  () => {
@@ -24170,15 +24469,25 @@ ${scheduleItems}
24170
24469
  output.destroy(e);
24171
24470
  void stopAll();
24172
24471
  });
24472
+ stream.on("close", () => {
24473
+ logger?.debug?.(
24474
+ `[createRecordingReplayMp4Stream] Stream closed, frames=${frameCount}`
24475
+ );
24476
+ clearTimeout(timer);
24477
+ void stopAll();
24478
+ });
24173
24479
  stream.on(
24174
24480
  "videoAccessUnit",
24175
24481
  ({ data, videoType, isKeyframe, microseconds }) => {
24176
24482
  if (ended) return;
24177
24483
  startFfmpeg(videoType);
24178
24484
  frameCount++;
24179
- if (tsMuxer) {
24485
+ if (useMpegTsMuxer && tsMuxer) {
24180
24486
  const tsData = tsMuxer.mux(data, microseconds, isKeyframe);
24181
24487
  input.write(tsData);
24488
+ } else {
24489
+ if (videoType === "H264") input.write(H264_AUD);
24490
+ input.write(data);
24182
24491
  }
24183
24492
  }
24184
24493
  );
@@ -24217,19 +24526,53 @@ ${scheduleItems}
24217
24526
  */
24218
24527
  async createRecordingDownloadMp4Stream(params) {
24219
24528
  const timeoutMs = params.timeoutMs ?? 12e4;
24220
- const parsed = parseRecordingFileName(params.fileName);
24221
- const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
24222
24529
  const channel = this.normalizeChannel(params.channel);
24223
24530
  const uid = await this.ensureUidForRecordings(channel);
24224
- const { annexB, videoType } = await this.downloadRecordingDemuxed({
24531
+ const raw = await this.downloadRecording({
24225
24532
  channel,
24226
24533
  uid,
24227
24534
  fileName: params.fileName,
24228
24535
  timeoutMs
24229
24536
  });
24230
- if (annexB.length === 0) {
24537
+ if (raw.length === 0) {
24231
24538
  throw new Error("Downloaded recording is empty");
24232
24539
  }
24540
+ const videoFrames = [];
24541
+ let videoType = null;
24542
+ const decoder = new BcMediaAnnexBDecoder({
24543
+ strict: false,
24544
+ logger: this.logger,
24545
+ onVideoAccessUnit: ({ annexB: annexB2, microseconds }) => {
24546
+ videoFrames.push({ annexB: annexB2, microseconds });
24547
+ }
24548
+ });
24549
+ decoder.push(raw);
24550
+ const stats = decoder.getStats();
24551
+ videoType = stats.videoType;
24552
+ if (videoFrames.length === 0) {
24553
+ throw new Error("Downloaded recording has no video frames");
24554
+ }
24555
+ let fps;
24556
+ if (videoFrames.length >= 2) {
24557
+ const firstTs = videoFrames[0].microseconds;
24558
+ const lastTs = videoFrames[videoFrames.length - 1].microseconds;
24559
+ const durationUs = lastTs - firstTs;
24560
+ if (durationUs > 0) {
24561
+ const durationSeconds = durationUs / 1e6;
24562
+ fps = (videoFrames.length - 1) / durationSeconds;
24563
+ } else {
24564
+ const infoFps = stats.infos[0]?.fps;
24565
+ fps = infoFps && infoFps > 0 ? infoFps : 15;
24566
+ }
24567
+ } else {
24568
+ const infoFps = stats.infos[0]?.fps;
24569
+ fps = infoFps && infoFps > 0 ? infoFps : 15;
24570
+ }
24571
+ if (fps > 14 && fps < 16) fps = 15;
24572
+ else if (fps > 23 && fps < 26) fps = 25;
24573
+ else if (fps > 29 && fps < 31) fps = 30;
24574
+ else fps = Math.round(fps * 100) / 100;
24575
+ const annexB = Buffer.concat(videoFrames.map((f) => f.annexB));
24233
24576
  const input = new import_node_stream.PassThrough();
24234
24577
  const output = new import_node_stream.PassThrough();
24235
24578
  let ff = null;
@@ -24301,48 +24644,380 @@ ${scheduleItems}
24301
24644
  stop: stopAll
24302
24645
  };
24303
24646
  }
24304
- // ============================================================
24305
- // STANDALONE CAMERA METHODS
24306
- // ============================================================
24307
- // These methods are specifically designed for standalone cameras
24308
- // (non-NVR) connected via TCP. They provide a simplified interface
24309
- // for common operations like listing recordings, streaming playback,
24310
- // downloading clips, and getting thumbnails.
24311
- // ============================================================
24312
24647
  /**
24313
- * List recordings from a standalone camera.
24648
+ * Create an HLS (HTTP Live Streaming) session for a recording.
24314
24649
  *
24315
- * This method is optimized for standalone cameras (non-NVR) and uses
24316
- * the native Baichuan protocol to list recorded files.
24650
+ * This method creates HLS segments on-the-fly from a recording replay stream.
24651
+ * HLS is required for iOS devices (Safari, Home app) which don't support
24652
+ * fragmented MP4 streaming well and require Range request support.
24653
+ *
24654
+ * The session writes HLS segments (.ts files) and playlist (.m3u8) to a
24655
+ * temporary directory. You must serve these files via HTTP to the client.
24317
24656
  *
24318
24657
  * @example
24319
24658
  * ```ts
24320
- * const api = new ReolinkBaichuanApi({ host: '192.168.1.100', ... });
24321
- * await api.login();
24659
+ * const session = await api.createRecordingReplayHlsSession({
24660
+ * channel: 0,
24661
+ * fileName: "/mnt/sda/Mp4Record/2026-01-25/RecS03.mp4",
24662
+ * });
24322
24663
  *
24323
- * const recordings = await api.standaloneListRecordings({
24324
- * start: new Date('2024-01-20T00:00:00'),
24325
- * end: new Date('2024-01-21T23:59:59'),
24664
+ * // Serve playlist
24665
+ * app.get('/clip.m3u8', (req, res) => {
24666
+ * res.type('application/vnd.apple.mpegurl');
24667
+ * res.send(session.getPlaylist());
24326
24668
  * });
24327
24669
  *
24328
- * for (const file of recordings) {
24329
- * console.log(file.fileName, file.startTime, file.endTime);
24330
- * }
24670
+ * // Serve segments
24671
+ * app.get('/segment/:name', (req, res) => {
24672
+ * const data = session.getSegment(req.params.name);
24673
+ * if (data) {
24674
+ * res.type('video/mp2t');
24675
+ * res.send(data);
24676
+ * } else {
24677
+ * res.status(404).end();
24678
+ * }
24679
+ * });
24680
+ *
24681
+ * // Cleanup when done
24682
+ * await session.stop();
24331
24683
  * ```
24332
24684
  */
24333
- async standaloneListRecordings(params) {
24334
- const channel = 0;
24335
- const streamType = params.streamType === "mainStream" ? "mainStream" : "subStream";
24336
- const timeoutMs = params.timeoutMs ?? 15e3;
24337
- return await this.getVideoclips({
24338
- channel,
24339
- start: params.start,
24340
- end: params.end,
24341
- streamType,
24342
- timeoutMs
24343
- });
24344
- }
24345
- /**
24685
+ async createRecordingReplayHlsSession(params) {
24686
+ const logger = params.logger ?? this.logger;
24687
+ const hlsSegmentDuration = params.hlsSegmentDuration ?? 4;
24688
+ const os2 = await import("os");
24689
+ const path6 = await import("path");
24690
+ const fs6 = await import("fs/promises");
24691
+ const crypto3 = await import("crypto");
24692
+ const tempDir = path6.join(
24693
+ os2.tmpdir(),
24694
+ `reolink-hls-${crypto3.randomBytes(8).toString("hex")}`
24695
+ );
24696
+ await fs6.mkdir(tempDir, { recursive: true });
24697
+ const playlistPath = path6.join(tempDir, "playlist.m3u8");
24698
+ const segmentPattern = path6.join(tempDir, "segment_%03d.ts");
24699
+ const parsed = parseRecordingFileName(params.fileName);
24700
+ const durationMs = parsed?.durationMs ?? 3e5;
24701
+ const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
24702
+ const seconds = Math.ceil(durationMs / 1e3 * 1.1);
24703
+ logger?.debug?.(
24704
+ `[createRecordingReplayHlsSession] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, hlsSegmentDuration=${hlsSegmentDuration}`
24705
+ );
24706
+ const startParams = {
24707
+ channel: params.channel,
24708
+ fileName: params.fileName,
24709
+ logger,
24710
+ ...params.isNvr != null ? { isNvr: params.isNvr } : {},
24711
+ ...params.deviceId != null ? { deviceId: params.deviceId } : {}
24712
+ };
24713
+ const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(async () => {
24714
+ try {
24715
+ return await this.startRecordingReplayStream(startParams);
24716
+ } catch (e) {
24717
+ if (!params.deviceId) throw e;
24718
+ const sessionKey = `replay:${params.deviceId}`;
24719
+ logger?.debug?.(
24720
+ `[createRecordingReplayHlsSession] startRecordingReplayStream failed; force-closing dedicated client and retrying once`
24721
+ );
24722
+ await this.forceCloseDedicatedClient(sessionKey, logger);
24723
+ return await this.startRecordingReplayStream(startParams);
24724
+ }
24725
+ });
24726
+ const { stream, stop: stopReplay } = replayResult;
24727
+ const input = new import_node_stream.PassThrough();
24728
+ const H264_AUD = Buffer.from([0, 0, 0, 1, 9, 240]);
24729
+ let tsMuxer = null;
24730
+ let ff = null;
24731
+ let ended = false;
24732
+ let frameCount = 0;
24733
+ let readyResolve = null;
24734
+ let segmentWatcher = null;
24735
+ const readyPromise = new Promise((resolve) => {
24736
+ readyResolve = resolve;
24737
+ });
24738
+ const segments = /* @__PURE__ */ new Map();
24739
+ const startSegmentWatcher = () => {
24740
+ if (segmentWatcher || !readyResolve) return;
24741
+ const firstSegmentPath = path6.join(tempDir, "segment_000.ts");
24742
+ let checkCount = 0;
24743
+ const maxChecks = Math.ceil((hlsSegmentDuration + 2) * 10);
24744
+ segmentWatcher = setInterval(async () => {
24745
+ checkCount++;
24746
+ try {
24747
+ const stats = await fs6.stat(firstSegmentPath);
24748
+ if (stats.size > 256) {
24749
+ if (segmentWatcher) {
24750
+ clearInterval(segmentWatcher);
24751
+ segmentWatcher = null;
24752
+ }
24753
+ logger?.debug?.(
24754
+ `[createRecordingReplayHlsSession] First segment ready after ${checkCount * 100}ms, size=${stats.size}`
24755
+ );
24756
+ readyResolve?.();
24757
+ readyResolve = null;
24758
+ }
24759
+ } catch {
24760
+ }
24761
+ if (checkCount >= maxChecks && readyResolve) {
24762
+ if (segmentWatcher) {
24763
+ clearInterval(segmentWatcher);
24764
+ segmentWatcher = null;
24765
+ }
24766
+ logger?.debug?.(
24767
+ `[createRecordingReplayHlsSession] Segment watcher timeout, resolving anyway`
24768
+ );
24769
+ readyResolve?.();
24770
+ readyResolve = null;
24771
+ }
24772
+ }, 100);
24773
+ };
24774
+ const startFfmpeg = (videoType) => {
24775
+ if (ff) return;
24776
+ const needsTranscode = videoType === "H265" && params.transcodeH265ToH264 === true;
24777
+ const gop = Math.max(1, Math.round(fps * hlsSegmentDuration));
24778
+ logger?.log?.(
24779
+ `[createRecordingReplayHlsSession] Starting ffmpeg HLS with videoType=${videoType}, transcode=${needsTranscode}, hlsTime=${hlsSegmentDuration}s, fileName=${params.fileName}`
24780
+ );
24781
+ MpegTsMuxer.resetCounters();
24782
+ tsMuxer = new MpegTsMuxer({ videoType });
24783
+ const args = [
24784
+ "-hide_banner",
24785
+ "-loglevel",
24786
+ "error",
24787
+ "-f",
24788
+ "mpegts",
24789
+ "-i",
24790
+ "pipe:0",
24791
+ // Video codec
24792
+ ...needsTranscode ? [
24793
+ "-c:v",
24794
+ "libx264",
24795
+ "-preset",
24796
+ "ultrafast",
24797
+ "-tune",
24798
+ "zerolatency",
24799
+ "-crf",
24800
+ "23",
24801
+ "-pix_fmt",
24802
+ "yuv420p",
24803
+ // Ensure regular GOP for consistent HLS cutting.
24804
+ "-g",
24805
+ String(gop),
24806
+ "-keyint_min",
24807
+ String(gop),
24808
+ "-sc_threshold",
24809
+ "0",
24810
+ // Force frequent keyframes so HLS can cut segments reliably.
24811
+ // Without this, ffmpeg will only cut on keyframes and segments can become huge.
24812
+ "-force_key_frames",
24813
+ `expr:gte(t,n_forced*${hlsSegmentDuration})`
24814
+ ] : ["-c", "copy"],
24815
+ // HLS output options
24816
+ "-f",
24817
+ "hls",
24818
+ "-hls_time",
24819
+ String(hlsSegmentDuration),
24820
+ "-hls_list_size",
24821
+ "0",
24822
+ // Keep all segments in playlist
24823
+ "-hls_playlist_type",
24824
+ "event",
24825
+ // Growing playlist (not VOD until end)
24826
+ "-hls_segment_filename",
24827
+ segmentPattern,
24828
+ "-hls_flags",
24829
+ "independent_segments+temp_file",
24830
+ playlistPath
24831
+ ];
24832
+ ff = (0, import_node_child_process3.spawn)("ffmpeg", args, { stdio: ["pipe", "pipe", "pipe"] });
24833
+ if (!ff.stdin || !ff.stderr) {
24834
+ throw new Error("ffmpeg stdio streams not available");
24835
+ }
24836
+ input.pipe(ff.stdin);
24837
+ ff.stdin.on("error", () => {
24838
+ });
24839
+ ff.stderr.on("error", () => {
24840
+ });
24841
+ input.on("error", () => {
24842
+ });
24843
+ let stderr = "";
24844
+ ff.stderr.on("data", (d) => stderr += String(d));
24845
+ ff.on("close", (code) => {
24846
+ if (ended) return;
24847
+ ended = true;
24848
+ if ((code ?? 0) !== 0 && stderr.trim()) {
24849
+ logger?.error?.(
24850
+ `[createRecordingReplayHlsSession] ffmpeg exited with code ${code}: ${stderr}`
24851
+ );
24852
+ } else {
24853
+ logger?.debug?.(
24854
+ `[createRecordingReplayHlsSession] ffmpeg closed normally, frames=${frameCount}`
24855
+ );
24856
+ }
24857
+ });
24858
+ };
24859
+ const stopAll = async () => {
24860
+ if (ended) return;
24861
+ ended = true;
24862
+ releaseQueueSlot();
24863
+ if (segmentWatcher) {
24864
+ clearInterval(segmentWatcher);
24865
+ segmentWatcher = null;
24866
+ }
24867
+ logger?.debug?.(
24868
+ `[createRecordingReplayHlsSession] Stopping, frames=${frameCount}`
24869
+ );
24870
+ const cleanupPromises = [];
24871
+ cleanupPromises.push(stopReplay().catch(() => {
24872
+ }));
24873
+ cleanupPromises.push(stream.stop().catch(() => {
24874
+ }));
24875
+ try {
24876
+ input.end();
24877
+ } catch {
24878
+ }
24879
+ try {
24880
+ ff?.kill("SIGKILL");
24881
+ } catch {
24882
+ }
24883
+ await Promise.race([
24884
+ Promise.all(cleanupPromises),
24885
+ new Promise((resolve) => setTimeout(resolve, 2e3))
24886
+ ]);
24887
+ setTimeout(async () => {
24888
+ try {
24889
+ const files = await fs6.readdir(tempDir);
24890
+ for (const file of files) {
24891
+ await fs6.unlink(path6.join(tempDir, file)).catch(() => {
24892
+ });
24893
+ }
24894
+ await fs6.rmdir(tempDir).catch(() => {
24895
+ });
24896
+ } catch {
24897
+ }
24898
+ }, 6e4);
24899
+ };
24900
+ const timer = setTimeout(
24901
+ () => {
24902
+ logger?.debug?.(
24903
+ `[createRecordingReplayHlsSession] Timeout reached (${seconds}s), stopping`
24904
+ );
24905
+ void stopAll();
24906
+ },
24907
+ Math.max(1, seconds) * 1e3
24908
+ );
24909
+ stream.on("error", (e) => {
24910
+ logger?.error?.(
24911
+ `[createRecordingReplayHlsSession] Stream error: ${e.message}`
24912
+ );
24913
+ clearTimeout(timer);
24914
+ void stopAll();
24915
+ });
24916
+ stream.on("close", () => {
24917
+ logger?.debug?.(
24918
+ `[createRecordingReplayHlsSession] Stream closed, frames=${frameCount}`
24919
+ );
24920
+ clearTimeout(timer);
24921
+ try {
24922
+ input.end();
24923
+ } catch {
24924
+ }
24925
+ });
24926
+ stream.on(
24927
+ "videoAccessUnit",
24928
+ ({ data, videoType, isKeyframe, microseconds }) => {
24929
+ if (ended) return;
24930
+ startFfmpeg(videoType);
24931
+ frameCount++;
24932
+ if (tsMuxer) {
24933
+ const tsData = tsMuxer.mux(data, microseconds, isKeyframe);
24934
+ input.write(tsData);
24935
+ }
24936
+ if (frameCount === 1) {
24937
+ startSegmentWatcher();
24938
+ }
24939
+ }
24940
+ );
24941
+ return {
24942
+ getPlaylist: () => {
24943
+ try {
24944
+ const { readFileSync } = require("fs");
24945
+ return readFileSync(playlistPath, "utf8");
24946
+ } catch {
24947
+ return "#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:4\n";
24948
+ }
24949
+ },
24950
+ getSegment: (name) => {
24951
+ if (segments.has(name)) {
24952
+ return segments.get(name);
24953
+ }
24954
+ try {
24955
+ const { readFileSync } = require("fs");
24956
+ const segmentPath = path6.join(tempDir, name);
24957
+ const data = readFileSync(segmentPath);
24958
+ segments.set(name, data);
24959
+ return data;
24960
+ } catch {
24961
+ return void 0;
24962
+ }
24963
+ },
24964
+ listSegments: () => {
24965
+ try {
24966
+ const { readdirSync } = require("fs");
24967
+ return readdirSync(tempDir).filter(
24968
+ (f) => f.endsWith(".ts")
24969
+ );
24970
+ } catch {
24971
+ return [];
24972
+ }
24973
+ },
24974
+ waitForReady: () => readyPromise,
24975
+ stop: stopAll,
24976
+ tempDir
24977
+ };
24978
+ }
24979
+ // ============================================================
24980
+ // STANDALONE CAMERA METHODS
24981
+ // ============================================================
24982
+ // These methods are specifically designed for standalone cameras
24983
+ // (non-NVR) connected via TCP. They provide a simplified interface
24984
+ // for common operations like listing recordings, streaming playback,
24985
+ // downloading clips, and getting thumbnails.
24986
+ // ============================================================
24987
+ /**
24988
+ * List recordings from a standalone camera.
24989
+ *
24990
+ * This method is optimized for standalone cameras (non-NVR) and uses
24991
+ * the native Baichuan protocol to list recorded files.
24992
+ *
24993
+ * @example
24994
+ * ```ts
24995
+ * const api = new ReolinkBaichuanApi({ host: '192.168.1.100', ... });
24996
+ * await api.login();
24997
+ *
24998
+ * const recordings = await api.standaloneListRecordings({
24999
+ * start: new Date('2024-01-20T00:00:00'),
25000
+ * end: new Date('2024-01-21T23:59:59'),
25001
+ * });
25002
+ *
25003
+ * for (const file of recordings) {
25004
+ * console.log(file.fileName, file.startTime, file.endTime);
25005
+ * }
25006
+ * ```
25007
+ */
25008
+ async standaloneListRecordings(params) {
25009
+ const channel = 0;
25010
+ const streamType = params.streamType === "mainStream" ? "mainStream" : "subStream";
25011
+ const timeoutMs = params.timeoutMs ?? 15e3;
25012
+ return await this.getVideoclips({
25013
+ channel,
25014
+ start: params.start,
25015
+ end: params.end,
25016
+ streamType,
25017
+ timeoutMs
25018
+ });
25019
+ }
25020
+ /**
24346
25021
  * Start a streaming replay of a recorded file from a standalone camera.
24347
25022
  *
24348
25023
  * Returns a video stream that emits frames in real-time. The stream can be
@@ -24539,6 +25214,364 @@ ${scheduleItems}
24539
25214
  }
24540
25215
  };
24541
25216
 
25217
+ // src/reolink/baichuan/HlsSessionManager.ts
25218
+ var withTimeout = async (p, ms, label) => {
25219
+ let t;
25220
+ try {
25221
+ return await Promise.race([
25222
+ p,
25223
+ new Promise((_, reject) => {
25224
+ t = setTimeout(
25225
+ () => reject(new Error(`${label} timed out after ${ms}ms`)),
25226
+ ms
25227
+ );
25228
+ })
25229
+ ]);
25230
+ } finally {
25231
+ if (t) clearTimeout(t);
25232
+ }
25233
+ };
25234
+ var HlsSessionManager = class {
25235
+ constructor(api, options) {
25236
+ this.api = api;
25237
+ this.logger = options?.logger;
25238
+ this.sessionTtlMs = options?.sessionTtlMs ?? 5 * 60 * 1e3;
25239
+ const cleanupIntervalMs = options?.cleanupIntervalMs ?? 3e4;
25240
+ this.cleanupTimer = setInterval(() => {
25241
+ void this.cleanupExpiredSessions();
25242
+ }, cleanupIntervalMs);
25243
+ }
25244
+ sessions = /* @__PURE__ */ new Map();
25245
+ logger;
25246
+ sessionTtlMs;
25247
+ cleanupTimer;
25248
+ creationLocks = /* @__PURE__ */ new Map();
25249
+ /**
25250
+ * Handle an HLS request and return the HTTP response.
25251
+ *
25252
+ * @param params - Request parameters
25253
+ * @returns HTTP response ready to be sent
25254
+ */
25255
+ async handleRequest(params) {
25256
+ const {
25257
+ sessionKey,
25258
+ hlsPath,
25259
+ requestUrl,
25260
+ createSession,
25261
+ exclusiveKeyPrefix
25262
+ } = params;
25263
+ try {
25264
+ let entry = this.sessions.get(sessionKey);
25265
+ const isPlaylist = hlsPath === "playlist.m3u8" || hlsPath === "";
25266
+ const isSegment = hlsPath.endsWith(".ts");
25267
+ if (!entry && isSegment) {
25268
+ this.logger?.debug?.(
25269
+ `[HlsSessionManager] Segment request without session (likely stale after clip switch): ${sessionKey} ${hlsPath}`
25270
+ );
25271
+ return {
25272
+ statusCode: 404,
25273
+ headers: {
25274
+ "Content-Type": "text/plain",
25275
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
25276
+ Pragma: "no-cache",
25277
+ "Retry-After": "1"
25278
+ },
25279
+ body: "Segment not found"
25280
+ };
25281
+ }
25282
+ if (!entry) {
25283
+ if (!isPlaylist) {
25284
+ return {
25285
+ statusCode: 400,
25286
+ headers: { "Content-Type": "text/plain" },
25287
+ body: "Invalid HLS path"
25288
+ };
25289
+ }
25290
+ const lockKey = exclusiveKeyPrefix ?? sessionKey;
25291
+ await this.withCreationLock(lockKey, async () => {
25292
+ entry = this.sessions.get(sessionKey);
25293
+ if (entry) return;
25294
+ if (exclusiveKeyPrefix) {
25295
+ await this.stopOtherSessionsWithPrefix(
25296
+ exclusiveKeyPrefix,
25297
+ sessionKey
25298
+ );
25299
+ }
25300
+ this.logger?.log?.(
25301
+ `[HlsSessionManager] Creating new session: ${sessionKey}`
25302
+ );
25303
+ this.logger?.debug?.(
25304
+ `[HlsSessionManager] createSession(): ${sessionKey}`
25305
+ );
25306
+ const sessionParams = await createSession();
25307
+ this.logger?.debug?.(
25308
+ `[HlsSessionManager] Starting createRecordingReplayHlsSession: ${sessionKey}`
25309
+ );
25310
+ const session = await withTimeout(
25311
+ this.api.createRecordingReplayHlsSession({
25312
+ channel: sessionParams.channel,
25313
+ fileName: sessionParams.fileName,
25314
+ ...sessionParams.isNvr !== void 0 && {
25315
+ isNvr: sessionParams.isNvr
25316
+ },
25317
+ ...this.logger && { logger: this.logger },
25318
+ ...sessionParams.deviceId && {
25319
+ deviceId: sessionParams.deviceId
25320
+ },
25321
+ transcodeH265ToH264: sessionParams.transcodeH265ToH264 ?? true,
25322
+ hlsSegmentDuration: sessionParams.hlsSegmentDuration ?? 4
25323
+ }),
25324
+ 2e4,
25325
+ "createRecordingReplayHlsSession"
25326
+ );
25327
+ try {
25328
+ await withTimeout(
25329
+ session.waitForReady(),
25330
+ 12e3,
25331
+ "hls waitForReady"
25332
+ );
25333
+ } catch (e) {
25334
+ this.logger?.warn?.(
25335
+ `[HlsSessionManager] waitForReady did not complete in time for ${sessionKey}: ${e instanceof Error ? e.message : String(e)}`
25336
+ );
25337
+ }
25338
+ entry = {
25339
+ session,
25340
+ createdAt: Date.now(),
25341
+ lastAccessAt: Date.now()
25342
+ };
25343
+ this.sessions.set(sessionKey, entry);
25344
+ this.logger?.log?.(
25345
+ `[HlsSessionManager] Session ready: ${sessionKey}`
25346
+ );
25347
+ });
25348
+ entry = this.sessions.get(sessionKey);
25349
+ if (!entry) {
25350
+ return {
25351
+ statusCode: 500,
25352
+ headers: {
25353
+ "Content-Type": "text/plain",
25354
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
25355
+ Pragma: "no-cache"
25356
+ },
25357
+ body: "HLS session was not created"
25358
+ };
25359
+ }
25360
+ }
25361
+ entry.lastAccessAt = Date.now();
25362
+ if (isPlaylist) {
25363
+ return this.servePlaylist(entry.session, requestUrl, sessionKey);
25364
+ }
25365
+ if (isSegment) {
25366
+ return this.serveSegment(entry.session, hlsPath, sessionKey);
25367
+ }
25368
+ return {
25369
+ statusCode: 400,
25370
+ headers: { "Content-Type": "text/plain" },
25371
+ body: "Invalid HLS path"
25372
+ };
25373
+ } catch (error) {
25374
+ const message = error instanceof Error ? error.message : String(error);
25375
+ this.logger?.error?.(
25376
+ `[HlsSessionManager] Error handling request: ${message}`
25377
+ );
25378
+ return {
25379
+ statusCode: 500,
25380
+ headers: { "Content-Type": "text/plain" },
25381
+ body: `HLS error: ${message}`
25382
+ };
25383
+ }
25384
+ }
25385
+ async withCreationLock(lockKey, fn) {
25386
+ const prev = this.creationLocks.get(lockKey) ?? Promise.resolve();
25387
+ let release;
25388
+ const current = new Promise((resolve) => {
25389
+ release = resolve;
25390
+ });
25391
+ const chained = prev.then(
25392
+ () => current,
25393
+ () => current
25394
+ );
25395
+ this.creationLocks.set(lockKey, chained);
25396
+ await prev.catch(() => {
25397
+ });
25398
+ try {
25399
+ await fn();
25400
+ } finally {
25401
+ release();
25402
+ if (this.creationLocks.get(lockKey) === chained) {
25403
+ this.creationLocks.delete(lockKey);
25404
+ }
25405
+ }
25406
+ }
25407
+ /**
25408
+ * Check if a session exists for the given key.
25409
+ */
25410
+ hasSession(sessionKey) {
25411
+ return this.sessions.has(sessionKey);
25412
+ }
25413
+ /**
25414
+ * Stop a specific session.
25415
+ */
25416
+ async stopSession(sessionKey) {
25417
+ const entry = this.sessions.get(sessionKey);
25418
+ if (entry) {
25419
+ this.logger?.debug?.(
25420
+ `[HlsSessionManager] Stopping session: ${sessionKey}`
25421
+ );
25422
+ this.sessions.delete(sessionKey);
25423
+ await entry.session.stop().catch(() => {
25424
+ });
25425
+ }
25426
+ }
25427
+ /**
25428
+ * Stop all sessions and cleanup.
25429
+ */
25430
+ async stopAll() {
25431
+ this.logger?.debug?.(`[HlsSessionManager] Stopping all sessions`);
25432
+ if (this.cleanupTimer) {
25433
+ clearInterval(this.cleanupTimer);
25434
+ this.cleanupTimer = void 0;
25435
+ }
25436
+ const stopPromises = Array.from(this.sessions.values()).map(
25437
+ (entry) => entry.session.stop().catch(() => {
25438
+ })
25439
+ );
25440
+ this.sessions.clear();
25441
+ await Promise.all(stopPromises);
25442
+ }
25443
+ /**
25444
+ * Get the number of active sessions.
25445
+ */
25446
+ get sessionCount() {
25447
+ return this.sessions.size;
25448
+ }
25449
+ /**
25450
+ * Serve the HLS playlist with rewritten segment URLs.
25451
+ */
25452
+ servePlaylist(session, requestUrl, sessionKey) {
25453
+ let playlist = session.getPlaylist();
25454
+ try {
25455
+ const url = new URL(requestUrl, "http://localhost");
25456
+ const basePath = url.pathname;
25457
+ const baseParams = new URLSearchParams(url.searchParams);
25458
+ baseParams.delete("hls");
25459
+ playlist = playlist.replace(/^(segment_\d+\.ts)$/gm, (match) => {
25460
+ const params = new URLSearchParams(baseParams);
25461
+ params.set("hls", match);
25462
+ return `${basePath}?${params.toString()}`;
25463
+ });
25464
+ } catch {
25465
+ }
25466
+ this.logger?.debug?.(
25467
+ `[HlsSessionManager] Serving playlist: ${sessionKey}, length=${playlist.length}`
25468
+ );
25469
+ return {
25470
+ statusCode: 200,
25471
+ headers: {
25472
+ "Content-Type": "application/vnd.apple.mpegurl",
25473
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
25474
+ Pragma: "no-cache"
25475
+ },
25476
+ body: playlist
25477
+ };
25478
+ }
25479
+ /**
25480
+ * Serve an HLS segment.
25481
+ */
25482
+ serveSegment(session, segmentName, sessionKey) {
25483
+ const segment = session.getSegment(segmentName);
25484
+ if (!segment) {
25485
+ this.logger?.warn?.(
25486
+ `[HlsSessionManager] Segment not found: ${segmentName}`
25487
+ );
25488
+ return {
25489
+ statusCode: 404,
25490
+ headers: {
25491
+ "Content-Type": "text/plain",
25492
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
25493
+ Pragma: "no-cache",
25494
+ "Retry-After": "1"
25495
+ },
25496
+ body: "Segment not found"
25497
+ };
25498
+ }
25499
+ this.logger?.debug?.(
25500
+ `[HlsSessionManager] Serving segment: ${segmentName} for ${sessionKey}, size=${segment.length}`
25501
+ );
25502
+ return {
25503
+ statusCode: 200,
25504
+ headers: {
25505
+ "Content-Type": "video/mp2t",
25506
+ "Cache-Control": "no-store, no-cache, must-revalidate, max-age=0",
25507
+ Pragma: "no-cache",
25508
+ "Content-Length": String(segment.length)
25509
+ },
25510
+ body: segment
25511
+ };
25512
+ }
25513
+ /**
25514
+ * Cleanup expired sessions.
25515
+ */
25516
+ async cleanupExpiredSessions() {
25517
+ const now = Date.now();
25518
+ const expiredKeys = [];
25519
+ for (const [key, entry] of this.sessions) {
25520
+ if (now - entry.lastAccessAt > this.sessionTtlMs) {
25521
+ expiredKeys.push(key);
25522
+ }
25523
+ }
25524
+ if (!expiredKeys.length) return;
25525
+ await Promise.allSettled(
25526
+ expiredKeys.map(async (key) => {
25527
+ const entry = this.sessions.get(key);
25528
+ if (!entry) return;
25529
+ this.logger?.log?.(
25530
+ `[HlsSessionManager] TTL expired: stopping session ${key}`
25531
+ );
25532
+ this.sessions.delete(key);
25533
+ try {
25534
+ await entry.session.stop();
25535
+ } catch {
25536
+ }
25537
+ })
25538
+ );
25539
+ }
25540
+ async stopOtherSessionsWithPrefix(prefix, exceptKey) {
25541
+ const toStop = [];
25542
+ for (const key of this.sessions.keys()) {
25543
+ if (key !== exceptKey && key.startsWith(prefix)) toStop.push(key);
25544
+ }
25545
+ if (!toStop.length) return;
25546
+ this.logger?.log?.(
25547
+ `[HlsSessionManager] Switch: stopping ${toStop.length} session(s) for prefix=${prefix}`
25548
+ );
25549
+ await Promise.all(
25550
+ toStop.map(async (key) => {
25551
+ const entry = this.sessions.get(key);
25552
+ if (!entry) return;
25553
+ this.sessions.delete(key);
25554
+ await entry.session.stop().catch(() => {
25555
+ });
25556
+ })
25557
+ );
25558
+ }
25559
+ };
25560
+ function detectIosClient(userAgent) {
25561
+ const ua = (userAgent ?? "").toLowerCase();
25562
+ const isIos = /iphone|ipad|ipod/.test(ua);
25563
+ const isIosInstalledApp = ua.includes("installedapp");
25564
+ return {
25565
+ isIos,
25566
+ isIosInstalledApp,
25567
+ // iOS InstalledApp needs HLS for video playback
25568
+ needsHls: isIos && isIosInstalledApp
25569
+ };
25570
+ }
25571
+ function buildHlsRedirectUrl(originalUrl) {
25572
+ return `${originalUrl}${originalUrl.includes("?") ? "&" : "?"}hls=playlist.m3u8`;
25573
+ }
25574
+
24542
25575
  // src/reolink/discovery.ts
24543
25576
  var import_node_dgram2 = __toESM(require("dgram"), 1);
24544
25577
  var import_node_os2 = require("os");
@@ -25314,6 +26347,13 @@ function parseIntParam(v, def) {
25314
26347
  const n = Number.parseInt(v, 10);
25315
26348
  return Number.isFinite(n) ? n : def;
25316
26349
  }
26350
+ function parseBoolParam(v, def) {
26351
+ if (v == null) return def;
26352
+ const s = v.trim().toLowerCase();
26353
+ if (s === "1" || s === "true" || s === "yes" || s === "y") return true;
26354
+ if (s === "0" || s === "false" || s === "no" || s === "n") return false;
26355
+ return def;
26356
+ }
25317
26357
  function parseProfile(v) {
25318
26358
  const p = (v ?? "sub").trim();
25319
26359
  if (p === "main" || p === "sub" || p === "ext") return p;
@@ -25344,6 +26384,11 @@ function createBaichuanEndpointsServer(opts) {
25344
26384
  const api = new ReolinkBaichuanApi({
25345
26385
  ...opts.baichuan
25346
26386
  });
26387
+ const hlsManager = new HlsSessionManager(api, {
26388
+ logger: console,
26389
+ sessionTtlMs: 6e4,
26390
+ cleanupIntervalMs: 5e3
26391
+ });
25347
26392
  const listenHost = opts.listenHost ?? "127.0.0.1";
25348
26393
  const rtspListenHost = opts.rtspListenHost ?? "127.0.0.1";
25349
26394
  const rtspServers = /* @__PURE__ */ new Map();
@@ -25389,6 +26434,46 @@ function createBaichuanEndpointsServer(opts) {
25389
26434
  res.end(JSON.stringify({ rtspUrl }));
25390
26435
  return;
25391
26436
  }
26437
+ if (u.pathname === "/hls") {
26438
+ const channel = parseIntParam(u.searchParams.get("channel"), 0);
26439
+ const fileName = (u.searchParams.get("fileName") ?? "").trim();
26440
+ const deviceId = (u.searchParams.get("deviceId") ?? "anon").trim();
26441
+ const isNvr = parseBoolParam(u.searchParams.get("isNvr"), false);
26442
+ const transcode = parseBoolParam(u.searchParams.get("transcode"), true);
26443
+ const hlsSegmentDuration = parseIntParam(
26444
+ u.searchParams.get("hlsSegmentDuration"),
26445
+ 2
26446
+ );
26447
+ const hlsPath = (u.searchParams.get("hls") ?? "playlist.m3u8").trim();
26448
+ if (!fileName) {
26449
+ res.statusCode = 400;
26450
+ res.end("Missing fileName");
26451
+ return;
26452
+ }
26453
+ const sessionKey = `hls:${deviceId}:ch${channel}:${fileName}`;
26454
+ const exclusiveKeyPrefix = `hls:${deviceId}:ch${channel}:`;
26455
+ const requestUrl = `http://${listenHost}:${opts.listenPort}${u.pathname}${u.search}`;
26456
+ const result = await hlsManager.handleRequest({
26457
+ sessionKey,
26458
+ hlsPath,
26459
+ requestUrl,
26460
+ exclusiveKeyPrefix,
26461
+ createSession: () => ({
26462
+ channel,
26463
+ fileName,
26464
+ isNvr,
26465
+ deviceId,
26466
+ transcodeH265ToH264: transcode,
26467
+ hlsSegmentDuration
26468
+ })
26469
+ });
26470
+ res.statusCode = result.statusCode;
26471
+ for (const [k, v] of Object.entries(result.headers)) {
26472
+ res.setHeader(k, v);
26473
+ }
26474
+ res.end(result.body);
26475
+ return;
26476
+ }
25392
26477
  if (u.pathname === "/download") {
25393
26478
  const channel = parseIntParam(u.searchParams.get("channel"), 0);
25394
26479
  const uid = (u.searchParams.get("uid") ?? "").trim();
@@ -29357,9 +30442,9 @@ var BaichuanMjpegServer = class extends import_node_events8.EventEmitter {
29357
30442
  this.started = true;
29358
30443
  const port = this.options.port ?? 8080;
29359
30444
  const host = this.options.host ?? "0.0.0.0";
29360
- const path5 = this.options.path ?? "/mjpeg";
30445
+ const path6 = this.options.path ?? "/mjpeg";
29361
30446
  this.httpServer = http5.createServer((req, res) => {
29362
- this.handleRequest(req, res, path5);
30447
+ this.handleRequest(req, res, path6);
29363
30448
  });
29364
30449
  return new Promise((resolve, reject) => {
29365
30450
  this.httpServer.on("error", (err) => {
@@ -29369,9 +30454,9 @@ var BaichuanMjpegServer = class extends import_node_events8.EventEmitter {
29369
30454
  this.httpServer.listen(port, host, () => {
29370
30455
  this.log(
29371
30456
  "info",
29372
- `MJPEG server started on http://${host}:${port}${path5}`
30457
+ `MJPEG server started on http://${host}:${port}${path6}`
29373
30458
  );
29374
- this.emit("started", { host, port, path: path5 });
30459
+ this.emit("started", { host, port, path: path6 });
29375
30460
  resolve();
29376
30461
  });
29377
30462
  });
@@ -29617,6 +30702,8 @@ var BaichuanMjpegServer = class extends import_node_events8.EventEmitter {
29617
30702
 
29618
30703
  // src/baichuan/stream/BaichuanWebRTCServer.ts
29619
30704
  var import_node_events9 = require("events");
30705
+ init_BcMediaAnnexBDecoder();
30706
+ init_H264Converter();
29620
30707
  function parseAnnexBNalUnits(annexB) {
29621
30708
  const nalUnits = [];
29622
30709
  let offset = 0;
@@ -29738,10 +30825,16 @@ Error: ${err}`
29738
30825
  }
29739
30826
  };
29740
30827
  this.sessions.set(sessionId, session);
29741
- const videoTrack = new MediaStreamTrack({ kind: "video" });
29742
- peerConnection.addTrack(videoTrack);
30828
+ const videoSsrc = Math.random() * 4294967295 >>> 0;
30829
+ const videoTrack = new MediaStreamTrack({ kind: "video", ssrc: videoSsrc });
30830
+ const videoSender = peerConnection.addTrack(videoTrack);
29743
30831
  session.videoTrack = videoTrack;
29744
- const audioTrack = new MediaStreamTrack({ kind: "audio" });
30832
+ this.log(
30833
+ "info",
30834
+ `Video track created: ssrc=${videoTrack.ssrc}, sender params=${JSON.stringify(videoSender?.getParameters?.() ?? {})}`
30835
+ );
30836
+ const audioSsrc = Math.random() * 4294967295 >>> 0;
30837
+ const audioTrack = new MediaStreamTrack({ kind: "audio", ssrc: audioSsrc });
29745
30838
  peerConnection.addTrack(audioTrack);
29746
30839
  session.audioTrack = audioTrack;
29747
30840
  const videoDataChannel = peerConnection.createDataChannel("video", {
@@ -29785,11 +30878,11 @@ Error: ${err}`
29785
30878
  };
29786
30879
  }
29787
30880
  peerConnection.iceConnectionStateChange.subscribe((state) => {
29788
- this.log("debug", `ICE connection state for ${sessionId}: ${state}`);
30881
+ this.log("info", `ICE connection state for ${sessionId}: ${state}`);
29789
30882
  if (state === "connected") {
29790
30883
  session.state = "connected";
29791
30884
  this.emit("session-connected", { sessionId });
29792
- } else if (state === "disconnected" || state === "failed") {
30885
+ } else if (state === "failed") {
29793
30886
  session.state = state;
29794
30887
  this.closeSession(sessionId).catch((err) => {
29795
30888
  this.log("error", `Error closing session on ICE ${state}: ${err}`);
@@ -30009,7 +31102,8 @@ Error: ${err}`
30009
31102
  } else {
30010
31103
  if (frame.data) {
30011
31104
  if (!session.videoCodec && frame.videoType) {
30012
- session.videoCodec = frame.videoType;
31105
+ const detected = detectVideoCodecFromNal(frame.data);
31106
+ session.videoCodec = detected ?? frame.videoType;
30013
31107
  this.log("info", `Detected video codec: ${session.videoCodec}`);
30014
31108
  if (session.videoDataChannel && session.videoDataChannel.readyState === "open") {
30015
31109
  const codecInfo = JSON.stringify({
@@ -30032,22 +31126,45 @@ Error: ${err}`
30032
31126
  }
30033
31127
  lastTimeMicros = frame.microseconds || 0;
30034
31128
  if (session.videoCodec === "H264") {
30035
- await this.sendH264Frame(
31129
+ const connState = session.peerConnection.connectionState;
31130
+ const iceState = session.peerConnection.iceConnectionState;
31131
+ const isConnected = connState === "connected" || iceState === "connected" || iceState === "completed";
31132
+ if (!isConnected) {
31133
+ if (frameNumber < 10) {
31134
+ this.log(
31135
+ "debug",
31136
+ `Waiting for connection, dropping H.264 frame ${frameNumber}`
31137
+ );
31138
+ }
31139
+ frameNumber++;
31140
+ continue;
31141
+ }
31142
+ const packetsSent = await this.sendH264Frame(
30036
31143
  session,
30037
31144
  werift,
30038
31145
  frame.data,
30039
31146
  sequenceNumber,
30040
31147
  timestamp
30041
31148
  );
30042
- sequenceNumber = sequenceNumber + Math.ceil(frame.data.length / 1200) & 65535;
30043
- packetsSentSinceLastLog++;
31149
+ sequenceNumber = sequenceNumber + packetsSent & 65535;
31150
+ packetsSentSinceLastLog += packetsSent;
31151
+ frameNumber++;
31152
+ session.stats.videoFrames++;
31153
+ session.stats.bytesSent += frame.data.length;
30044
31154
  } else if (session.videoCodec === "H265") {
30045
- await this.sendH265Frame(session, frame, frameNumber);
30046
- packetsSentSinceLastLog++;
31155
+ const sent = await this.sendVideoFrameViaDataChannel(
31156
+ session,
31157
+ frame,
31158
+ frameNumber,
31159
+ "H265"
31160
+ );
31161
+ if (sent) {
31162
+ packetsSentSinceLastLog++;
31163
+ frameNumber++;
31164
+ session.stats.videoFrames++;
31165
+ session.stats.bytesSent += frame.data.length;
31166
+ }
30047
31167
  }
30048
- frameNumber++;
30049
- session.stats.videoFrames++;
30050
- session.stats.bytesSent += frame.data.length;
30051
31168
  const now = Date.now();
30052
31169
  if (now - lastLogTime >= 5e3) {
30053
31170
  this.log(
@@ -30060,36 +31177,321 @@ Error: ${err}`
30060
31177
  }
30061
31178
  }
30062
31179
  }
30063
- } catch (err) {
31180
+ } catch (err) {
31181
+ this.log(
31182
+ "error",
31183
+ `Error pumping frames for session ${session.id}: ${err}`
31184
+ );
31185
+ }
31186
+ this.log("info", `Native stream ended for session ${session.id}`);
31187
+ }
31188
+ /**
31189
+ * Send H.264 frame via RTP media track
31190
+ * Returns the number of RTP packets sent
31191
+ */
31192
+ async sendH264Frame(session, werift, frameData, sequenceNumber, timestamp) {
31193
+ const annexB = convertToAnnexB(frameData);
31194
+ const nalUnits = splitAnnexBToNalPayloads(annexB);
31195
+ let hasSps = false;
31196
+ let hasPps = false;
31197
+ let hasIdr = false;
31198
+ const nalTypes = [];
31199
+ for (const nal of nalUnits) {
31200
+ const t = (nal[0] ?? 0) & 31;
31201
+ nalTypes.push(t);
31202
+ if (t === 7) {
31203
+ hasSps = true;
31204
+ session.lastH264Sps = nal;
31205
+ }
31206
+ if (t === 8) {
31207
+ hasPps = true;
31208
+ session.lastH264Pps = nal;
31209
+ }
31210
+ if (t === 5) hasIdr = true;
31211
+ }
31212
+ if (session.stats.videoFrames < 10) {
31213
+ this.log(
31214
+ "debug",
31215
+ `H.264 frame NAL types: [${nalTypes.join(",")}] (5=IDR, 7=SPS, 8=PPS, 1=P-slice)`
31216
+ );
31217
+ }
31218
+ const isKeyframe = hasIdr;
31219
+ let nalList = nalUnits;
31220
+ if (hasIdr && (!hasSps || !hasPps)) {
31221
+ const prepend = [];
31222
+ if (!hasSps && session.lastH264Sps) {
31223
+ prepend.push(session.lastH264Sps);
31224
+ this.log("debug", `Prepending cached SPS to IDR frame`);
31225
+ }
31226
+ if (!hasPps && session.lastH264Pps) {
31227
+ prepend.push(session.lastH264Pps);
31228
+ this.log("debug", `Prepending cached PPS to IDR frame`);
31229
+ }
31230
+ if (prepend.length > 0) {
31231
+ nalList = [...prepend, ...nalUnits];
31232
+ } else if (!session.lastH264Sps || !session.lastH264Pps) {
31233
+ this.log(
31234
+ "warn",
31235
+ `IDR frame without SPS/PPS and no cached parameters - frame may not decode`
31236
+ );
31237
+ }
31238
+ }
31239
+ if (!session.hasReceivedKeyframe) {
31240
+ if (hasIdr && session.lastH264Sps && session.lastH264Pps) {
31241
+ session.hasReceivedKeyframe = true;
31242
+ this.log(
31243
+ "info",
31244
+ `First H.264 keyframe received with SPS/PPS - starting video stream`
31245
+ );
31246
+ } else if (hasIdr) {
31247
+ this.log(
31248
+ "debug",
31249
+ `IDR received but waiting for SPS/PPS before starting stream`
31250
+ );
31251
+ return 0;
31252
+ } else {
31253
+ if (session.stats.videoFrames < 5) {
31254
+ this.log(
31255
+ "debug",
31256
+ `Dropping P-frame ${session.stats.videoFrames} while waiting for keyframe`
31257
+ );
31258
+ }
31259
+ return 0;
31260
+ }
31261
+ }
31262
+ let totalPacketsSent = 0;
31263
+ let currentSeqNum = sequenceNumber;
31264
+ const ssrc = session.videoTrack.ssrc || 0;
31265
+ for (let i = 0; i < nalList.length; i++) {
31266
+ const nalUnit = nalList[i];
31267
+ if (nalUnit.length === 0) continue;
31268
+ const isLastNalu = i === nalList.length - 1;
31269
+ const nalType = getH264NalType(nalUnit);
31270
+ if (nalType === 9) continue;
31271
+ const rtpPackets = this.createH264RtpPackets(
31272
+ werift,
31273
+ nalUnit,
31274
+ currentSeqNum,
31275
+ timestamp,
31276
+ isLastNalu,
31277
+ ssrc
31278
+ );
31279
+ if (session.stats.videoFrames < 3) {
31280
+ this.log(
31281
+ "info",
31282
+ `NAL ${i}: type=${nalType}, size=${nalUnit.length}, rtpPackets=${rtpPackets.length}`
31283
+ );
31284
+ }
31285
+ for (const rtpPacket of rtpPackets) {
31286
+ try {
31287
+ session.videoTrack.writeRtp(rtpPacket);
31288
+ currentSeqNum = currentSeqNum + 1 & 65535;
31289
+ totalPacketsSent++;
31290
+ } catch (err) {
31291
+ this.log(
31292
+ "error",
31293
+ `Error writing RTP packet for session ${session.id}: ${err}`
31294
+ );
31295
+ }
31296
+ }
31297
+ }
31298
+ if (session.stats.videoFrames < 3) {
31299
+ this.log(
31300
+ "info",
31301
+ `H.264 frame sent: nalCount=${nalList.length} packets=${totalPacketsSent} seq=${sequenceNumber}->${currentSeqNum} ts=${timestamp} keyframe=${isKeyframe}`
31302
+ );
31303
+ }
31304
+ return totalPacketsSent;
31305
+ }
31306
+ /**
31307
+ * Send video frame via DataChannel (works for both H.264 and H.265)
31308
+ * Format: 12-byte header + Annex-B data
31309
+ * Header: [frameNum (4)] [timestamp (4)] [flags (1)] [keyframe (1)] [reserved (2)]
31310
+ * Flags: 0x01 = H.265, 0x02 = H.264
31311
+ */
31312
+ async sendVideoFrameViaDataChannel(session, frame, frameNumber, codec) {
31313
+ if (!session.videoDataChannel) {
31314
+ if (frameNumber === 0) {
31315
+ this.log("warn", `No video data channel for session ${session.id}`);
31316
+ }
31317
+ return false;
31318
+ }
31319
+ if (session.videoDataChannel.readyState !== "open") {
31320
+ if (frameNumber === 0) {
31321
+ this.log(
31322
+ "warn",
31323
+ `Video data channel not open for session ${session.id}: ${session.videoDataChannel.readyState}`
31324
+ );
31325
+ }
31326
+ return false;
31327
+ }
31328
+ const nalUnits = parseAnnexBNalUnits(frame.data);
31329
+ let isKeyframe = frame.isKeyframe === true;
31330
+ let hasIdr = false;
31331
+ let hasSps = false;
31332
+ let hasPps = false;
31333
+ let hasVps = false;
31334
+ const nalTypes = [];
31335
+ for (const nalUnit of nalUnits) {
31336
+ if (nalUnit.length === 0) continue;
31337
+ if (codec === "H265") {
31338
+ const nalType = getH265NalType2(nalUnit);
31339
+ nalTypes.push(nalType);
31340
+ if (nalType === 32) {
31341
+ hasVps = true;
31342
+ session.lastH265Vps = nalUnit;
31343
+ }
31344
+ if (nalType === 33) {
31345
+ hasSps = true;
31346
+ session.lastH265Sps = nalUnit;
31347
+ }
31348
+ if (nalType === 34) {
31349
+ hasPps = true;
31350
+ session.lastH265Pps = nalUnit;
31351
+ }
31352
+ if (nalType === 19 || nalType === 20) {
31353
+ hasIdr = true;
31354
+ isKeyframe = true;
31355
+ }
31356
+ } else {
31357
+ const nalType = getH264NalType(nalUnit);
31358
+ nalTypes.push(nalType);
31359
+ if (nalType === 7) {
31360
+ hasSps = true;
31361
+ session.lastH264Sps = nalUnit;
31362
+ }
31363
+ if (nalType === 8) {
31364
+ hasPps = true;
31365
+ session.lastH264Pps = nalUnit;
31366
+ }
31367
+ if (nalType === 5) {
31368
+ hasIdr = true;
31369
+ isKeyframe = true;
31370
+ }
31371
+ }
31372
+ }
31373
+ if (frameNumber < 5) {
31374
+ this.log(
31375
+ "debug",
31376
+ `${codec} frame ${frameNumber} NAL types: [${nalTypes.join(",")}] hasIdr=${hasIdr} hasSps=${hasSps} hasPps=${hasPps}`
31377
+ );
31378
+ }
31379
+ if (!session.hasReceivedKeyframe) {
31380
+ if (codec === "H264") {
31381
+ if (hasIdr && session.lastH264Sps && session.lastH264Pps) {
31382
+ session.hasReceivedKeyframe = true;
31383
+ this.log(
31384
+ "info",
31385
+ `First H.264 keyframe received with SPS/PPS - starting video stream`
31386
+ );
31387
+ } else if (hasSps || hasPps) {
31388
+ this.log("debug", `Received H.264 parameter sets, waiting for IDR`);
31389
+ return false;
31390
+ } else if (hasIdr) {
31391
+ this.log("debug", `IDR received but waiting for SPS/PPS`);
31392
+ return false;
31393
+ } else {
31394
+ if (frameNumber < 10) {
31395
+ this.log(
31396
+ "debug",
31397
+ `Dropping H.264 P-frame ${frameNumber} while waiting for keyframe`
31398
+ );
31399
+ }
31400
+ return false;
31401
+ }
31402
+ } else {
31403
+ if (hasIdr && session.lastH265Vps && session.lastH265Sps && session.lastH265Pps) {
31404
+ session.hasReceivedKeyframe = true;
31405
+ this.log(
31406
+ "info",
31407
+ `First H.265 keyframe received with VPS/SPS/PPS - starting video stream`
31408
+ );
31409
+ } else if (hasVps || hasSps || hasPps) {
31410
+ this.log("debug", `Received H.265 parameter sets, waiting for IDR`);
31411
+ return false;
31412
+ } else if (hasIdr) {
31413
+ this.log("debug", `H.265 IDR received but waiting for VPS/SPS/PPS`);
31414
+ return false;
31415
+ } else {
31416
+ if (frameNumber < 10) {
31417
+ this.log(
31418
+ "debug",
31419
+ `Dropping H.265 P-frame ${frameNumber} while waiting for keyframe`
31420
+ );
31421
+ }
31422
+ return false;
31423
+ }
31424
+ }
31425
+ }
31426
+ let frameData = frame.data;
31427
+ if (hasIdr) {
31428
+ if (codec === "H264" && (!hasSps || !hasPps)) {
31429
+ const parts = [];
31430
+ if (!hasSps && session.lastH264Sps) {
31431
+ parts.push(Buffer.from([0, 0, 0, 1]));
31432
+ parts.push(session.lastH264Sps);
31433
+ }
31434
+ if (!hasPps && session.lastH264Pps) {
31435
+ parts.push(Buffer.from([0, 0, 0, 1]));
31436
+ parts.push(session.lastH264Pps);
31437
+ }
31438
+ if (parts.length > 0) {
31439
+ frameData = Buffer.concat([...parts, frame.data]);
31440
+ this.log("debug", `Prepended cached SPS/PPS to H.264 IDR frame`);
31441
+ }
31442
+ } else if (codec === "H265" && (!hasVps || !hasSps || !hasPps)) {
31443
+ const parts = [];
31444
+ if (!hasVps && session.lastH265Vps) {
31445
+ parts.push(Buffer.from([0, 0, 0, 1]));
31446
+ parts.push(session.lastH265Vps);
31447
+ }
31448
+ if (!hasSps && session.lastH265Sps) {
31449
+ parts.push(Buffer.from([0, 0, 0, 1]));
31450
+ parts.push(session.lastH265Sps);
31451
+ }
31452
+ if (!hasPps && session.lastH265Pps) {
31453
+ parts.push(Buffer.from([0, 0, 0, 1]));
31454
+ parts.push(session.lastH265Pps);
31455
+ }
31456
+ if (parts.length > 0) {
31457
+ frameData = Buffer.concat([...parts, frame.data]);
31458
+ this.log("debug", `Prepended cached VPS/SPS/PPS to H.265 IDR frame`);
31459
+ }
31460
+ }
31461
+ }
31462
+ const header = Buffer.alloc(12);
31463
+ header.writeUInt32BE(frameNumber, 0);
31464
+ header.writeUInt32BE(frame.microseconds ? frame.microseconds / 1e3 : 0, 4);
31465
+ header.writeUInt8(codec === "H265" ? 1 : 2, 8);
31466
+ header.writeUInt8(isKeyframe ? 1 : 0, 9);
31467
+ header.writeUInt16BE(0, 10);
31468
+ const packet = Buffer.concat([header, frameData]);
31469
+ if (frameNumber < 3) {
30064
31470
  this.log(
30065
- "error",
30066
- `Error pumping frames for session ${session.id}: ${err}`
31471
+ "info",
31472
+ `Sending ${codec} frame ${frameNumber}: ${packet.length} bytes, keyframe=${isKeyframe}`
30067
31473
  );
30068
31474
  }
30069
- this.log("info", `Native stream ended for session ${session.id}`);
30070
- }
30071
- /**
30072
- * Send H.264 frame via RTP media track
30073
- */
30074
- async sendH264Frame(session, werift, frameData, sequenceNumber, timestamp) {
30075
- const nalUnits = parseAnnexBNalUnits(frameData);
30076
- for (let i = 0; i < nalUnits.length; i++) {
30077
- const nalUnit = nalUnits[i];
30078
- if (nalUnit.length === 0) continue;
30079
- const isLastNalu = i === nalUnits.length - 1;
30080
- const nalType = getH264NalType(nalUnit);
30081
- if (nalType === 9) continue;
30082
- const rtpPackets = this.createH264RtpPackets(
30083
- werift,
30084
- nalUnit,
30085
- sequenceNumber,
30086
- timestamp,
30087
- isLastNalu
30088
- );
30089
- for (const rtpPacket of rtpPackets) {
30090
- session.videoTrack.writeRtp(rtpPacket);
30091
- sequenceNumber = sequenceNumber + 1 & 65535;
31475
+ const MAX_CHUNK_SIZE = 16e3;
31476
+ try {
31477
+ if (packet.length <= MAX_CHUNK_SIZE) {
31478
+ session.videoDataChannel.send(packet);
31479
+ } else {
31480
+ const totalChunks = Math.ceil(packet.length / MAX_CHUNK_SIZE);
31481
+ for (let i = 0; i < totalChunks; i++) {
31482
+ const start = i * MAX_CHUNK_SIZE;
31483
+ const end = Math.min(start + MAX_CHUNK_SIZE, packet.length);
31484
+ const chunk = packet.subarray(start, end);
31485
+ const chunkHeader = Buffer.alloc(2);
31486
+ chunkHeader.writeUInt8(i, 0);
31487
+ chunkHeader.writeUInt8(totalChunks, 1);
31488
+ session.videoDataChannel.send(Buffer.concat([chunkHeader, chunk]));
31489
+ }
30092
31490
  }
31491
+ return true;
31492
+ } catch (err) {
31493
+ this.log("error", `Error sending ${codec} frame ${frameNumber}: ${err}`);
31494
+ return false;
30093
31495
  }
30094
31496
  }
30095
31497
  /**
@@ -30162,7 +31564,7 @@ Error: ${err}`
30162
31564
  * Create RTP packets for H.264 NAL unit
30163
31565
  * Handles single NAL, STAP-A aggregation, and FU-A fragmentation
30164
31566
  */
30165
- createH264RtpPackets(werift, nalUnit, sequenceNumber, timestamp, marker) {
31567
+ createH264RtpPackets(werift, nalUnit, sequenceNumber, timestamp, marker, ssrc) {
30166
31568
  const { RtpPacket, RtpHeader } = werift;
30167
31569
  const MTU = 1200;
30168
31570
  const packets = [];
@@ -30172,6 +31574,7 @@ Error: ${err}`
30172
31574
  header.sequenceNumber = sequenceNumber;
30173
31575
  header.timestamp = timestamp;
30174
31576
  header.marker = marker;
31577
+ header.ssrc = ssrc;
30175
31578
  packets.push(new RtpPacket(header, nalUnit));
30176
31579
  } else {
30177
31580
  const nalHeader = nalUnit[0];
@@ -30196,6 +31599,7 @@ Error: ${err}`
30196
31599
  header.sequenceNumber = sequenceNumber + packets.length & 65535;
30197
31600
  header.timestamp = timestamp;
30198
31601
  header.marker = isLast && marker;
31602
+ header.ssrc = ssrc;
30199
31603
  packets.push(new RtpPacket(header, fuPayload));
30200
31604
  offset += chunkSize;
30201
31605
  isFirst = false;
@@ -30232,6 +31636,448 @@ Error: ${err}`
30232
31636
  }
30233
31637
  };
30234
31638
 
31639
+ // src/baichuan/stream/BaichuanHlsServer.ts
31640
+ var import_node_events10 = require("events");
31641
+ var import_node_fs = __toESM(require("fs"), 1);
31642
+ var import_promises3 = __toESM(require("fs/promises"), 1);
31643
+ var import_node_os3 = __toESM(require("os"), 1);
31644
+ var import_node_path3 = __toESM(require("path"), 1);
31645
+ var import_node_child_process10 = require("child_process");
31646
+ init_BcMediaAnnexBDecoder();
31647
+ init_H264Converter();
31648
+ init_H265Converter();
31649
+ function parseAnnexBNalUnits2(data) {
31650
+ const units = [];
31651
+ const len = data.length;
31652
+ const findStart = (from) => {
31653
+ for (let i = from; i + 3 < len; i++) {
31654
+ if (data[i] === 0 && data[i + 1] === 0) {
31655
+ if (data[i + 2] === 1) return i;
31656
+ if (i + 4 < len && data[i + 2] === 0 && data[i + 3] === 1)
31657
+ return i;
31658
+ }
31659
+ }
31660
+ return -1;
31661
+ };
31662
+ const startCodeLenAt = (i) => {
31663
+ if (i + 3 < len && data[i] === 0 && data[i + 1] === 0) {
31664
+ if (data[i + 2] === 1) return 3;
31665
+ if (i + 4 < len && data[i + 2] === 0 && data[i + 3] === 1) return 4;
31666
+ }
31667
+ return 0;
31668
+ };
31669
+ let start = findStart(0);
31670
+ if (start < 0) return units;
31671
+ while (start >= 0) {
31672
+ const scLen = startCodeLenAt(start);
31673
+ if (!scLen) break;
31674
+ const nalStart = start + scLen;
31675
+ let next = findStart(nalStart);
31676
+ if (next < 0) next = len;
31677
+ if (nalStart < next) units.push(data.subarray(nalStart, next));
31678
+ start = next < len ? next : -1;
31679
+ }
31680
+ return units;
31681
+ }
31682
+ function isKeyframeAnnexB(codec, annexB) {
31683
+ const nals = parseAnnexBNalUnits2(annexB);
31684
+ for (const nal of nals) {
31685
+ if (!nal || nal.length === 0) continue;
31686
+ if (codec === "h264") {
31687
+ const nalType = nal[0] & 31;
31688
+ if (nalType === 5) return true;
31689
+ } else {
31690
+ const nalType = nal[0] >> 1 & 63;
31691
+ if (nalType >= 16 && nalType <= 21) return true;
31692
+ }
31693
+ }
31694
+ return false;
31695
+ }
31696
+ function hasParamSets(codec, annexB) {
31697
+ const nals = parseAnnexBNalUnits2(annexB);
31698
+ for (const nal of nals) {
31699
+ if (!nal || nal.length === 0) continue;
31700
+ if (codec === "h264") {
31701
+ const nalType = nal[0] & 31;
31702
+ if (nalType === 7 || nalType === 8) return true;
31703
+ } else {
31704
+ const nalType = nal[0] >> 1 & 63;
31705
+ if (nalType === 32 || nalType === 33 || nalType === 34) return true;
31706
+ }
31707
+ }
31708
+ return false;
31709
+ }
31710
+ function getNalTypes(codec, annexB) {
31711
+ const nals = parseAnnexBNalUnits2(annexB);
31712
+ return nals.map((nal) => {
31713
+ if (codec === "h265") {
31714
+ return nal[0] >> 1 & 63;
31715
+ } else {
31716
+ return nal[0] & 31;
31717
+ }
31718
+ });
31719
+ }
31720
+ var BaichuanHlsServer = class extends import_node_events10.EventEmitter {
31721
+ api;
31722
+ channel;
31723
+ profile;
31724
+ variant;
31725
+ segmentDuration;
31726
+ playlistSize;
31727
+ ffmpegPath;
31728
+ log;
31729
+ outputDir = null;
31730
+ createdTempDir = false;
31731
+ playlistPath = null;
31732
+ segmentPattern = null;
31733
+ state = "idle";
31734
+ codec = null;
31735
+ framesReceived = 0;
31736
+ ffmpeg = null;
31737
+ nativeStream = null;
31738
+ pumpPromise = null;
31739
+ startedAt = null;
31740
+ lastError = null;
31741
+ constructor(options) {
31742
+ super();
31743
+ this.api = options.api;
31744
+ this.channel = options.channel;
31745
+ this.profile = options.profile;
31746
+ this.variant = options.variant ?? void 0;
31747
+ this.segmentDuration = options.segmentDuration ?? 2;
31748
+ this.playlistSize = options.playlistSize ?? 5;
31749
+ this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
31750
+ if (options.outputDir) {
31751
+ this.outputDir = options.outputDir;
31752
+ this.createdTempDir = false;
31753
+ }
31754
+ this.log = options.logger ?? (() => {
31755
+ });
31756
+ }
31757
+ /**
31758
+ * Start HLS streaming
31759
+ */
31760
+ async start() {
31761
+ if (this.state === "running" || this.state === "starting") {
31762
+ return;
31763
+ }
31764
+ this.state = "starting";
31765
+ this.lastError = null;
31766
+ try {
31767
+ if (!this.outputDir) {
31768
+ this.outputDir = await import_promises3.default.mkdtemp(
31769
+ import_node_path3.default.join(import_node_os3.default.tmpdir(), `nodelink-hls-${this.profile}-`)
31770
+ );
31771
+ this.createdTempDir = true;
31772
+ } else {
31773
+ await import_promises3.default.mkdir(this.outputDir, { recursive: true });
31774
+ }
31775
+ this.playlistPath = import_node_path3.default.join(this.outputDir, "playlist.m3u8");
31776
+ this.segmentPattern = import_node_path3.default.join(this.outputDir, "segment_%05d.ts");
31777
+ this.log("info", `Starting HLS stream to ${this.outputDir}`);
31778
+ this.nativeStream = createNativeStream(
31779
+ this.api,
31780
+ this.channel,
31781
+ this.profile,
31782
+ this.variant ? { variant: this.variant } : void 0
31783
+ );
31784
+ this.pumpPromise = this.pumpNativeToFfmpeg();
31785
+ this.startedAt = /* @__PURE__ */ new Date();
31786
+ this.state = "running";
31787
+ this.emit("started", { outputDir: this.outputDir });
31788
+ } catch (err) {
31789
+ this.state = "error";
31790
+ this.lastError = String(err);
31791
+ this.log("error", `Failed to start HLS: ${err}`);
31792
+ throw err;
31793
+ }
31794
+ }
31795
+ /**
31796
+ * Stop HLS streaming
31797
+ */
31798
+ async stop() {
31799
+ if (this.state === "idle" || this.state === "stopped") {
31800
+ return;
31801
+ }
31802
+ this.state = "stopping";
31803
+ this.log("info", "Stopping HLS stream");
31804
+ try {
31805
+ this.ffmpeg?.stdin?.end();
31806
+ } catch {
31807
+ }
31808
+ try {
31809
+ this.ffmpeg?.kill("SIGKILL");
31810
+ } catch {
31811
+ }
31812
+ this.ffmpeg = null;
31813
+ if (this.nativeStream) {
31814
+ try {
31815
+ await this.nativeStream.return(void 0);
31816
+ } catch {
31817
+ }
31818
+ this.nativeStream = null;
31819
+ }
31820
+ if (this.pumpPromise) {
31821
+ try {
31822
+ await this.pumpPromise;
31823
+ } catch {
31824
+ }
31825
+ this.pumpPromise = null;
31826
+ }
31827
+ if (this.createdTempDir && this.outputDir) {
31828
+ try {
31829
+ await import_promises3.default.rm(this.outputDir, { recursive: true, force: true });
31830
+ } catch {
31831
+ }
31832
+ }
31833
+ this.state = "stopped";
31834
+ this.emit("stopped");
31835
+ }
31836
+ /**
31837
+ * Get current status
31838
+ */
31839
+ getStatus() {
31840
+ return {
31841
+ state: this.state,
31842
+ codec: this.codec,
31843
+ framesReceived: this.framesReceived,
31844
+ ffmpegRunning: this.ffmpeg !== null && !this.ffmpeg.killed,
31845
+ playlistPath: this.playlistPath,
31846
+ outputDir: this.outputDir,
31847
+ startedAt: this.startedAt,
31848
+ error: this.lastError
31849
+ };
31850
+ }
31851
+ /**
31852
+ * Get playlist file path
31853
+ */
31854
+ getPlaylistPath() {
31855
+ return this.playlistPath;
31856
+ }
31857
+ /**
31858
+ * Get output directory
31859
+ */
31860
+ getOutputDir() {
31861
+ return this.outputDir;
31862
+ }
31863
+ /**
31864
+ * Check if playlist file exists
31865
+ */
31866
+ async waitForPlaylist(timeoutMs = 2e4) {
31867
+ if (!this.playlistPath) return false;
31868
+ const deadline = Date.now() + timeoutMs;
31869
+ while (Date.now() < deadline) {
31870
+ if (import_node_fs.default.existsSync(this.playlistPath)) {
31871
+ return true;
31872
+ }
31873
+ await new Promise((r) => setTimeout(r, 150));
31874
+ }
31875
+ return false;
31876
+ }
31877
+ /**
31878
+ * Read an HLS asset (playlist or segment)
31879
+ */
31880
+ async readAsset(assetName) {
31881
+ if (!this.outputDir) return null;
31882
+ const safe = assetName.replace(/^\/+/, "");
31883
+ if (safe.includes("..") || safe.includes("/")) {
31884
+ return null;
31885
+ }
31886
+ const filePath = import_node_path3.default.join(this.outputDir, safe);
31887
+ if (!import_node_fs.default.existsSync(filePath)) {
31888
+ return null;
31889
+ }
31890
+ const data = await import_promises3.default.readFile(filePath);
31891
+ let contentType = "application/octet-stream";
31892
+ if (safe.endsWith(".m3u8")) {
31893
+ contentType = "application/vnd.apple.mpegurl";
31894
+ } else if (safe.endsWith(".ts")) {
31895
+ contentType = "video/mp2t";
31896
+ }
31897
+ return { data, contentType };
31898
+ }
31899
+ // ============================================================================
31900
+ // Private Methods
31901
+ // ============================================================================
31902
+ async pumpNativeToFfmpeg() {
31903
+ if (!this.nativeStream || !this.playlistPath || !this.segmentPattern) {
31904
+ return;
31905
+ }
31906
+ let startedFfmpeg = false;
31907
+ let pendingParamSets = [];
31908
+ const MAX_FRAMES_WAIT_FOR_KEYFRAME = 180;
31909
+ const collectParamSets = (codec, annexB) => {
31910
+ const nals = parseAnnexBNalUnits2(annexB);
31911
+ for (const nal of nals) {
31912
+ if (!nal || nal.length === 0) continue;
31913
+ if (codec === "h264") {
31914
+ const t = nal[0] & 31;
31915
+ if (t === 7 || t === 8) {
31916
+ pendingParamSets.push(
31917
+ Buffer.concat([Buffer.from([0, 0, 0, 1]), nal])
31918
+ );
31919
+ }
31920
+ } else {
31921
+ const t = nal[0] >> 1 & 63;
31922
+ if (t === 32 || t === 33 || t === 34) {
31923
+ pendingParamSets.push(
31924
+ Buffer.concat([Buffer.from([0, 0, 0, 1]), nal])
31925
+ );
31926
+ }
31927
+ }
31928
+ }
31929
+ if (pendingParamSets.length > 12) {
31930
+ pendingParamSets = pendingParamSets.slice(-12);
31931
+ }
31932
+ };
31933
+ try {
31934
+ for await (const frame of this.nativeStream) {
31935
+ if (this.state !== "running") break;
31936
+ if (frame.audio) continue;
31937
+ if (!frame.data || frame.data.length === 0) continue;
31938
+ if (!this.codec) {
31939
+ const detected = detectVideoCodecFromNal(frame.data);
31940
+ const fromMeta = frame.videoType === "H265" ? "h265" : "h264";
31941
+ this.codec = detected ? detected.toLowerCase() : fromMeta;
31942
+ this.log(
31943
+ "info",
31944
+ `HLS codec detected: meta=${fromMeta} detected=${detected} (using ${this.codec})`
31945
+ );
31946
+ this.emit("codec-detected", { codec: this.codec });
31947
+ }
31948
+ const annexB = this.codec === "h265" ? convertToAnnexB2(frame.data) : convertToAnnexB(frame.data);
31949
+ this.framesReceived++;
31950
+ const shouldLog = this.framesReceived <= 5 || this.framesReceived <= 60 && this.framesReceived % 10 === 0;
31951
+ if (shouldLog) {
31952
+ const nalTypes = getNalTypes(this.codec, annexB);
31953
+ const hasIdr = isKeyframeAnnexB(this.codec, annexB);
31954
+ const hasParams = hasParamSets(this.codec, annexB);
31955
+ this.log(
31956
+ "debug",
31957
+ `HLS frame#${this.framesReceived}: bytes=${annexB.length} nalTypes=[${nalTypes.join(",")}] hasIDR=${hasIdr} hasParams=${hasParams}`
31958
+ );
31959
+ }
31960
+ collectParamSets(this.codec, annexB);
31961
+ const isKeyframe = isKeyframeAnnexB(this.codec, annexB);
31962
+ if (!isKeyframe && !startedFfmpeg) {
31963
+ if (this.framesReceived < MAX_FRAMES_WAIT_FOR_KEYFRAME) {
31964
+ continue;
31965
+ }
31966
+ this.log(
31967
+ "warn",
31968
+ `No keyframe after ${this.framesReceived} frames, starting ffmpeg anyway`
31969
+ );
31970
+ }
31971
+ if (!startedFfmpeg) {
31972
+ this.log(
31973
+ "info",
31974
+ `Starting ffmpeg: codec=${this.codec} framesSeen=${this.framesReceived} isKeyframe=${isKeyframe} paramSets=${pendingParamSets.length}`
31975
+ );
31976
+ this.ffmpeg = this.spawnFfmpeg();
31977
+ startedFfmpeg = true;
31978
+ this.emit("ffmpeg-started");
31979
+ try {
31980
+ if (this.ffmpeg?.stdin && !this.ffmpeg.stdin.destroyed) {
31981
+ for (const ps of pendingParamSets) {
31982
+ this.ffmpeg.stdin.write(ps);
31983
+ }
31984
+ }
31985
+ } catch {
31986
+ }
31987
+ }
31988
+ if (!this.ffmpeg || !this.ffmpeg.stdin || this.ffmpeg.stdin.destroyed) {
31989
+ this.log("warn", "ffmpeg stdin not available, stopping pump");
31990
+ break;
31991
+ }
31992
+ try {
31993
+ this.ffmpeg.stdin.write(annexB);
31994
+ if (this.framesReceived % 100 === 0 || this.framesReceived <= 5 || this.framesReceived <= 50 && this.framesReceived % 10 === 0) {
31995
+ this.log(
31996
+ "debug",
31997
+ `HLS fed frame #${this.framesReceived} to ffmpeg (${annexB.length} bytes)`
31998
+ );
31999
+ }
32000
+ } catch (err) {
32001
+ this.log("error", `Failed to write to ffmpeg: ${err}`);
32002
+ break;
32003
+ }
32004
+ }
32005
+ } catch (e) {
32006
+ this.log("error", `HLS pump error: ${e}`);
32007
+ this.lastError = String(e);
32008
+ this.state = "error";
32009
+ this.emit("error", e);
32010
+ }
32011
+ }
32012
+ spawnFfmpeg() {
32013
+ if (!this.playlistPath || !this.segmentPattern) {
32014
+ throw new Error("Playlist path not set");
32015
+ }
32016
+ const codec = this.codec ?? "h264";
32017
+ const args = [
32018
+ "-hide_banner",
32019
+ "-loglevel",
32020
+ "warning",
32021
+ "-fflags",
32022
+ "+genpts",
32023
+ "-use_wallclock_as_timestamps",
32024
+ "1",
32025
+ "-r",
32026
+ "25",
32027
+ "-f",
32028
+ codec === "h265" ? "hevc" : "h264",
32029
+ "-i",
32030
+ "pipe:0"
32031
+ ];
32032
+ if (codec === "h265") {
32033
+ args.push(
32034
+ "-c:v",
32035
+ "libx264",
32036
+ "-preset",
32037
+ "veryfast",
32038
+ "-tune",
32039
+ "zerolatency",
32040
+ "-pix_fmt",
32041
+ "yuv420p"
32042
+ );
32043
+ } else {
32044
+ args.push("-c:v", "copy");
32045
+ }
32046
+ args.push(
32047
+ "-f",
32048
+ "hls",
32049
+ "-hls_time",
32050
+ String(this.segmentDuration),
32051
+ "-hls_list_size",
32052
+ String(this.playlistSize),
32053
+ "-hls_flags",
32054
+ "delete_segments+append_list+omit_endlist",
32055
+ "-hls_segment_filename",
32056
+ this.segmentPattern,
32057
+ this.playlistPath
32058
+ );
32059
+ const p = (0, import_node_child_process10.spawn)(this.ffmpegPath, args, {
32060
+ stdio: ["pipe", "ignore", "pipe"]
32061
+ });
32062
+ p.on("error", (err) => {
32063
+ this.log("error", `ffmpeg spawn error: ${err}`);
32064
+ this.emit("ffmpeg-error", err);
32065
+ });
32066
+ p.stderr?.on("data", (d) => {
32067
+ const s = String(d ?? "").trim();
32068
+ if (s) this.log("warn", `[ffmpeg] ${s}`);
32069
+ });
32070
+ p.on("exit", (code, signal) => {
32071
+ this.log(
32072
+ "warn",
32073
+ `ffmpeg exited (code=${code ?? "?"} signal=${signal ?? "?"})`
32074
+ );
32075
+ this.emit("ffmpeg-exited", { code, signal });
32076
+ });
32077
+ return p;
32078
+ }
32079
+ };
32080
+
30235
32081
  // src/index.ts
30236
32082
  init_BcMediaParser();
30237
32083
  init_BcMediaCodec();
@@ -30706,10 +32552,10 @@ async function autoDetectDeviceType(inputs) {
30706
32552
  }
30707
32553
 
30708
32554
  // src/multifocal/compositeRtspServer.ts
30709
- var import_node_events10 = require("events");
30710
- var import_node_child_process10 = require("child_process");
32555
+ var import_node_events11 = require("events");
32556
+ var import_node_child_process11 = require("child_process");
30711
32557
  var net3 = __toESM(require("net"), 1);
30712
- var CompositeRtspServer = class extends import_node_events10.EventEmitter {
32558
+ var CompositeRtspServer = class extends import_node_events11.EventEmitter {
30713
32559
  options;
30714
32560
  compositeStream = null;
30715
32561
  rtspServer = null;
@@ -30814,7 +32660,7 @@ var CompositeRtspServer = class extends import_node_events10.EventEmitter {
30814
32660
  this.logger.log?.(
30815
32661
  `[CompositeRtspServer] Starting ffmpeg RTSP server: ${ffmpegArgs.join(" ")}`
30816
32662
  );
30817
- this.ffmpegProcess = (0, import_node_child_process10.spawn)("ffmpeg", ffmpegArgs, {
32663
+ this.ffmpegProcess = (0, import_node_child_process11.spawn)("ffmpeg", ffmpegArgs, {
30818
32664
  stdio: ["pipe", "pipe", "pipe"]
30819
32665
  });
30820
32666
  this.ffmpegProcess.on("error", (error) => {
@@ -31027,6 +32873,7 @@ var CompositeRtspServer = class extends import_node_events10.EventEmitter {
31027
32873
  BaichuanClient,
31028
32874
  BaichuanEventEmitter,
31029
32875
  BaichuanFrameParser,
32876
+ BaichuanHlsServer,
31030
32877
  BaichuanHttpStreamServer,
31031
32878
  BaichuanMjpegServer,
31032
32879
  BaichuanRtspServer,
@@ -31042,6 +32889,7 @@ var CompositeRtspServer = class extends import_node_events10.EventEmitter {
31042
32889
  DUAL_LENS_SINGLE_MOTION_MODELS,
31043
32890
  H264RtpDepacketizer,
31044
32891
  H265RtpDepacketizer,
32892
+ HlsSessionManager,
31045
32893
  Intercom,
31046
32894
  MjpegTransformer,
31047
32895
  NVR_HUB_EXACT_TYPES,
@@ -31063,6 +32911,7 @@ var CompositeRtspServer = class extends import_node_events10.EventEmitter {
31063
32911
  buildBinaryExtensionXml,
31064
32912
  buildChannelExtensionXml,
31065
32913
  buildFloodlightManualXml,
32914
+ buildHlsRedirectUrl,
31066
32915
  buildLoginXml,
31067
32916
  buildPreviewStopXml,
31068
32917
  buildPreviewStopXmlV11,
@@ -31101,6 +32950,7 @@ var CompositeRtspServer = class extends import_node_events10.EventEmitter {
31101
32950
  decideVideoclipTranscodeMode,
31102
32951
  decodeHeader,
31103
32952
  deriveAesKey,
32953
+ detectIosClient,
31104
32954
  detectVideoCodecFromNal,
31105
32955
  discoverReolinkDevices,
31106
32956
  discoverViaHttpScan,