@apocaliss92/nodelink-js 0.1.7 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1884,13 +1884,31 @@ var init_BaichuanVideoStream = __esm({
1884
1884
  searchStart = bodyEnd + Buffer.from("</body>").length;
1885
1885
  dataToParse = rawCandidate.subarray(searchStart);
1886
1886
  }
1887
+ let encryptLen;
1888
+ if (frame.extension && frame.extension.length > 0) {
1889
+ try {
1890
+ const extDec = this.client.tryDecryptXml(
1891
+ frame.extension,
1892
+ frame.header.channelId,
1893
+ enc
1894
+ );
1895
+ const encryptLenMatch = extDec.match(
1896
+ /<encryptLen>(\d+)<\/encryptLen>/i
1897
+ );
1898
+ if (encryptLenMatch && encryptLenMatch[1]) {
1899
+ encryptLen = parseInt(encryptLenMatch[1], 10);
1900
+ }
1901
+ } catch {
1902
+ }
1903
+ }
1887
1904
  const dataAfterXml = this.chooseDecryptedOrRawCandidate({
1888
1905
  raw: dataToParse,
1889
1906
  enc,
1890
1907
  channelId: frame.header.channelId,
1891
1908
  // Some NVR/Hub streams appear to include non-media bytes even when payloadOffset is present.
1892
1909
  // Allow a one-time resync at startup to avoid delaying the first keyframe.
1893
- allowResync: frame.payload.length === 0 || totalFramesReceived <= 10 && totalMediaPackets === 0
1910
+ allowResync: frame.payload.length === 0 || totalFramesReceived <= 10 && totalMediaPackets === 0,
1911
+ ...encryptLen !== void 0 ? { encryptLen } : {}
1894
1912
  });
1895
1913
  if (this.bcMediaCodec.getRemainingBuffer().length === 0 && dataAfterXml.length <= 600) {
1896
1914
  const s = _BaichuanVideoStream.scoreBcMediaLike(dataAfterXml);
@@ -2003,15 +2021,38 @@ var init_BaichuanVideoStream = __esm({
2003
2021
  }
2004
2022
  }
2005
2023
  };
2006
- const prependParamSetsIfNeeded = (annexB, videoType) => {
2024
+ const prependParamSetsIfNeeded = (annexB, videoType, isPframe = false) => {
2007
2025
  if (videoType === "H264") {
2008
2026
  const nals = splitAnnexBToNalPayloads(annexB);
2009
2027
  if (nals.length === 0) return annexB;
2010
2028
  const types = nals.map((n) => (n[0] ?? 0) & 31);
2011
- if (types.includes(7) && types.includes(8)) return annexB;
2012
2029
  const hasVcl = types.some(
2013
2030
  (t) => t === 1 || t === 5 || t === 19 || t === 20
2014
2031
  );
2032
+ if (isPframe && !hasVcl) {
2033
+ if (dbg.traceNativeStream) {
2034
+ this.logger?.warn(
2035
+ `[BaichuanVideoStream] Dropping P-frame without VCL (only param sets): types=${types.join(",")}`
2036
+ );
2037
+ }
2038
+ return Buffer.alloc(0);
2039
+ }
2040
+ if (types.includes(7) && types.includes(8)) {
2041
+ let ppsIdFromSlice = null;
2042
+ for (const nal of nals) {
2043
+ const t = (nal[0] ?? 0) & 31;
2044
+ if (t === 1 || t === 5) {
2045
+ ppsIdFromSlice = parseSlicePpsIdFromNal(nal);
2046
+ break;
2047
+ }
2048
+ }
2049
+ if (ppsIdFromSlice != null && ppsIdFromSlice <= 255) {
2050
+ this.lastPrependedPpsId = ppsIdFromSlice;
2051
+ } else {
2052
+ this.lastPrependedPpsId = -1;
2053
+ }
2054
+ return annexB;
2055
+ }
2015
2056
  if (!hasVcl) return annexB;
2016
2057
  let ppsId = null;
2017
2058
  for (const nal of nals) {
@@ -2058,11 +2099,19 @@ var init_BaichuanVideoStream = __esm({
2058
2099
  const nals = splitAnnexBToNalPayloads2(annexB);
2059
2100
  if (nals.length === 0) return annexB;
2060
2101
  const types = nals.map((n) => getH265NalType(n)).filter((t) => t !== null);
2061
- if (types.includes(32) && types.includes(33) && types.includes(34))
2062
- return annexB;
2063
2102
  const hasVcl = types.some(
2064
2103
  (t) => t >= 0 && t <= 9 || t >= 16 && t <= 23
2065
2104
  );
2105
+ if (isPframe && !hasVcl) {
2106
+ if (dbg.traceNativeStream) {
2107
+ this.logger?.warn(
2108
+ `[BaichuanVideoStream] Dropping H.265 P-frame without VCL (only param sets): types=${types.join(",")}`
2109
+ );
2110
+ }
2111
+ return Buffer.alloc(0);
2112
+ }
2113
+ if (types.includes(32) && types.includes(33) && types.includes(34))
2114
+ return annexB;
2066
2115
  if (!hasVcl) return annexB;
2067
2116
  if (this.lastPrependedParamSetsH265) return annexB;
2068
2117
  if (!this.lastVps || !this.lastSpsH265 || !this.lastPpsH265)
@@ -2244,7 +2293,7 @@ var init_BaichuanVideoStream = __esm({
2244
2293
  }
2245
2294
  for (const p of parts) {
2246
2295
  maybeCacheParamSets(p, "Pframe", videoType);
2247
- const outP0 = prependParamSetsIfNeeded(p, videoType);
2296
+ const outP0 = prependParamSetsIfNeeded(p, videoType, true);
2248
2297
  if (outP0.length === 0) continue;
2249
2298
  const outP = outP0;
2250
2299
  dumpNalSummary(outP, "Pframe", media.microseconds);
@@ -2726,17 +2775,20 @@ function parseRecordingFileName(fileName) {
2726
2775
  let widthRaw;
2727
2776
  let heightRaw;
2728
2777
  let hexValue = "";
2778
+ let sizeHex;
2729
2779
  if (parts.length === 6) {
2730
2780
  startDate = parts[1] ?? "";
2731
2781
  startTime = parts[2] ?? "";
2732
2782
  endTime = parts[3] ?? "";
2733
2783
  hexValue = parts[4] ?? "";
2784
+ sizeHex = parts[5];
2734
2785
  } else if (parts.length === 7) {
2735
2786
  startDate = parts[1] ?? "";
2736
2787
  startTime = parts[2] ?? "";
2737
2788
  endTime = parts[3] ?? "";
2738
2789
  animalTypeRaw = parts[4];
2739
2790
  hexValue = parts[5] ?? "";
2791
+ sizeHex = parts[6];
2740
2792
  } else if (parts.length === 9) {
2741
2793
  devType = "hub";
2742
2794
  startDate = parts[1] ?? "";
@@ -2746,6 +2798,7 @@ function parseRecordingFileName(fileName) {
2746
2798
  widthRaw = parts[5];
2747
2799
  heightRaw = parts[6];
2748
2800
  hexValue = parts[7] ?? "";
2801
+ sizeHex = parts[8];
2749
2802
  } else {
2750
2803
  return void 0;
2751
2804
  }
@@ -2776,6 +2829,12 @@ function parseRecordingFileName(fileName) {
2776
2829
  if (animalTypeRaw != null) parsed.animalTypeRaw = animalTypeRaw;
2777
2830
  if (widthRaw != null) parsed.widthRaw = widthRaw;
2778
2831
  if (heightRaw != null) parsed.heightRaw = heightRaw;
2832
+ if (sizeHex && /^[0-9a-fA-F]+$/.test(sizeHex)) {
2833
+ const sizeBytes = parseInt(sizeHex, 16);
2834
+ if (Number.isFinite(sizeBytes) && sizeBytes > 0) {
2835
+ parsed.sizeBytes = sizeBytes;
2836
+ }
2837
+ }
2779
2838
  return parsed;
2780
2839
  }
2781
2840
  var FLAGS_CAM_V2, FLAGS_HUB_V0, FLAGS_HUB_V1, FLAGS_HUB_V2, FLAGS_MAPPING;
@@ -4735,15 +4794,18 @@ var init_ReolinkCgiApi = __esm({
4735
4794
  if (detectionClasses.length === 0) {
4736
4795
  detectionClasses.push("motion");
4737
4796
  }
4797
+ const sizeBytes = typeof vodFile.size === "string" ? parseInt(vodFile.size, 10) : vodFile.size;
4738
4798
  const result = {
4739
4799
  fileName: vodFile.name,
4740
4800
  id: vodFile.name,
4741
- sizeBytes: vodFile.size,
4742
4801
  startTime,
4743
4802
  endTime,
4744
4803
  recordType: vodFile.type,
4745
4804
  detectionClasses
4746
4805
  };
4806
+ if (Number.isFinite(sizeBytes)) {
4807
+ result.sizeBytes = sizeBytes;
4808
+ }
4747
4809
  if (parsed) {
4748
4810
  result.parsedFileName = parsed;
4749
4811
  }
@@ -4959,7 +5021,7 @@ async function createDiagnosticsBundle(params) {
4959
5021
  }
4960
5022
  function sanitizeFfmpegError(error) {
4961
5023
  return error.replace(
4962
- /([a-z]+:\/\/)([^:@\/\s]+):([^@\/\s]+)@/gi,
5024
+ /([a-z]+:\/\/)([^:@/\s]+):([^@/\s]+)@/gi,
4963
5025
  (match, protocol, username, password) => {
4964
5026
  return `${protocol}***:***@`;
4965
5027
  }
@@ -14508,10 +14570,15 @@ var parseRecordingFilesFromXml = (xml) => {
14508
14570
  if (startDt) item.startTime = startDt;
14509
14571
  if (endDt) item.endTime = endDt;
14510
14572
  const parsed = parseRecordingFileName(item.name ?? item.fileName);
14511
- if (parsed) {
14512
- item.parsedFileName = parsed;
14513
- if (!item.startTime) item.startTime = parsed.start;
14514
- if (!item.endTime) item.endTime = parsed.end;
14573
+ const parsedFromPath = item.fileName !== item.name ? parseRecordingFileName(item.fileName) : void 0;
14574
+ const bestParsed = parsedFromPath?.sizeBytes != null ? parsedFromPath : parsed;
14575
+ if (bestParsed) {
14576
+ item.parsedFileName = bestParsed;
14577
+ if (!item.startTime) item.startTime = bestParsed.start;
14578
+ if (!item.endTime) item.endTime = bestParsed.end;
14579
+ if (item.sizeBytes == null && bestParsed.sizeBytes != null) {
14580
+ item.sizeBytes = bestParsed.sizeBytes;
14581
+ }
14515
14582
  }
14516
14583
  item.detectionClasses = buildDetectionClasses(parsed, item.recordType);
14517
14584
  out.push(item);
@@ -14538,6 +14605,9 @@ var parseRecordingFilesFromXml = (xml) => {
14538
14605
  item.parsedFileName = parsed;
14539
14606
  if (!item.startTime) item.startTime = parsed.start;
14540
14607
  if (!item.endTime) item.endTime = parsed.end;
14608
+ if (item.sizeBytes == null && parsed.sizeBytes != null) {
14609
+ item.sizeBytes = parsed.sizeBytes;
14610
+ }
14541
14611
  }
14542
14612
  item.detectionClasses = buildDetectionClasses(parsed, item.recordType);
14543
14613
  out.push(item);
@@ -15852,6 +15922,10 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
15852
15922
  * Value: client, refCount, createdAt
15853
15923
  */
15854
15924
  dedicatedClients = /* @__PURE__ */ new Map();
15925
+ /** Keep replay dedicated sockets warm briefly to reduce clip switch latency. */
15926
+ // Keep replay sockets warm briefly for fast clip switches, but tear down quickly
15927
+ // when clients stop requesting HLS segments (avoids looking like a stuck session).
15928
+ static REPLAY_DEDICATED_KEEPALIVE_MS = 1e4;
15855
15929
  /**
15856
15930
  * Get a summary of currently active dedicated sessions.
15857
15931
  * Useful for debugging/logging to see how many sockets are open.
@@ -15961,22 +16035,35 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
15961
16035
  * Process the replay queue - executes operations one at a time.
15962
16036
  */
15963
16037
  async processReplayQueue() {
15964
- if (this.replayQueueProcessing) return;
16038
+ if (this.replayQueueProcessing) {
16039
+ this.logger?.debug?.(
16040
+ `[ReplayQueue] Already processing, queue length: ${this.replayQueue.length}`
16041
+ );
16042
+ return;
16043
+ }
15965
16044
  this.replayQueueProcessing = true;
16045
+ this.logger?.debug?.(
16046
+ `[ReplayQueue] Starting queue processing, items: ${this.replayQueue.length}`
16047
+ );
15966
16048
  while (this.replayQueue.length > 0) {
15967
16049
  const item = this.replayQueue.shift();
15968
16050
  if (item) {
15969
16051
  const timeSinceLastReplay = Date.now() - this.lastReplayEndTime;
15970
16052
  if (timeSinceLastReplay < this.REPLAY_COOLDOWN_MS) {
15971
- await new Promise(
15972
- (r) => setTimeout(r, this.REPLAY_COOLDOWN_MS - timeSinceLastReplay)
15973
- );
16053
+ const waitTime = this.REPLAY_COOLDOWN_MS - timeSinceLastReplay;
16054
+ this.logger?.debug?.(`[ReplayQueue] Waiting ${waitTime}ms cooldown`);
16055
+ await new Promise((r) => setTimeout(r, waitTime));
15974
16056
  }
16057
+ this.logger?.debug?.(
16058
+ `[ReplayQueue] Executing item, remaining: ${this.replayQueue.length}`
16059
+ );
15975
16060
  await item.execute();
15976
16061
  this.lastReplayEndTime = Date.now();
16062
+ this.logger?.debug?.(`[ReplayQueue] Item completed`);
15977
16063
  }
15978
16064
  }
15979
16065
  this.replayQueueProcessing = false;
16066
+ this.logger?.debug?.(`[ReplayQueue] Queue processing complete`);
15980
16067
  }
15981
16068
  /**
15982
16069
  * Enqueue a replay operation with optional de-duplication.
@@ -16039,14 +16126,35 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16039
16126
  this.replayQueue.push({
16040
16127
  execute: () => {
16041
16128
  return new Promise((releaseSlot) => {
16129
+ let released = false;
16130
+ const safeRelease = () => {
16131
+ if (released) return;
16132
+ released = true;
16133
+ releaseSlot();
16134
+ };
16135
+ const safetyTimeout = setTimeout(
16136
+ () => {
16137
+ if (!released) {
16138
+ this.logger?.warn?.(
16139
+ "[ReplayQueue] Safety timeout: releasing queue slot after 10 minutes"
16140
+ );
16141
+ safeRelease();
16142
+ }
16143
+ },
16144
+ 10 * 60 * 1e3
16145
+ );
16042
16146
  setup().then((result) => {
16043
16147
  resolvePromise({
16044
16148
  result,
16045
- release: () => releaseSlot()
16149
+ release: () => {
16150
+ clearTimeout(safetyTimeout);
16151
+ safeRelease();
16152
+ }
16046
16153
  });
16047
16154
  }).catch((e) => {
16155
+ clearTimeout(safetyTimeout);
16048
16156
  rejectPromise(e);
16049
- releaseSlot();
16157
+ safeRelease();
16050
16158
  });
16051
16159
  });
16052
16160
  }
@@ -16116,30 +16224,68 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16116
16224
  * immediately and create a new one. This ensures clean state for each clip.
16117
16225
  */
16118
16226
  async acquireDedicatedClient(sessionKey, logger) {
16227
+ const log = logger ?? this.logger;
16228
+ const isReplayKey = sessionKey.startsWith("replay:");
16119
16229
  const existing = this.dedicatedClients.get(sessionKey);
16120
16230
  if (existing) {
16121
- logger?.debug?.(
16122
- `[DedicatedClient] Closing existing client for ${sessionKey} (new stream requested)`
16231
+ if (existing.idleCloseTimer) {
16232
+ clearTimeout(existing.idleCloseTimer);
16233
+ existing.idleCloseTimer = void 0;
16234
+ }
16235
+ if (existing.refCount === 0) {
16236
+ existing.refCount = 1;
16237
+ existing.lastUsedAt = Date.now();
16238
+ log?.debug?.(
16239
+ `[DedicatedClient] Reusing existing dedicated socket for sessionKey=${sessionKey}`
16240
+ );
16241
+ try {
16242
+ if (!existing.client.loggedIn) {
16243
+ await existing.client.login();
16244
+ }
16245
+ } catch {
16246
+ }
16247
+ if (existing.client.loggedIn) {
16248
+ return {
16249
+ client: existing.client,
16250
+ release: () => this.releaseDedicatedClient(sessionKey, logger)
16251
+ };
16252
+ }
16253
+ }
16254
+ log?.log?.(
16255
+ `[DedicatedClient] Closing existing socket for sessionKey=${sessionKey} (preempting active session)`
16123
16256
  );
16124
16257
  this.dedicatedClients.delete(sessionKey);
16125
- existing.client.close({ reason: "new stream for same device" }).catch((e) => {
16126
- logger?.debug?.(`[DedicatedClient] Error closing old socket: ${e}`);
16127
- });
16258
+ try {
16259
+ await existing.client.close({ reason: "preempted by new session" });
16260
+ log?.log?.(
16261
+ `[DedicatedClient] Old socket closed successfully for sessionKey=${sessionKey}`
16262
+ );
16263
+ } catch (e) {
16264
+ log?.warn?.(
16265
+ `[DedicatedClient] Error closing old socket for sessionKey=${sessionKey}: ${e}`
16266
+ );
16267
+ }
16128
16268
  }
16129
- logger?.debug?.(`[DedicatedClient] Creating new client for ${sessionKey}`);
16269
+ log?.log?.(
16270
+ `[DedicatedClient] Opening new dedicated socket for sessionKey=${sessionKey}`
16271
+ );
16130
16272
  const dedicatedClient = new BaichuanClient({
16131
16273
  host: this.host,
16132
16274
  username: this.username,
16133
16275
  password: this.password,
16134
- logger: logger ?? this.logger,
16276
+ logger: log,
16135
16277
  debugOptions: this.client.getDebugConfig?.()
16136
16278
  });
16137
16279
  await dedicatedClient.login();
16280
+ log?.log?.(
16281
+ `[DedicatedClient] Dedicated socket logged in for sessionKey=${sessionKey}`
16282
+ );
16138
16283
  this.dedicatedClients.set(sessionKey, {
16139
16284
  client: dedicatedClient,
16140
16285
  refCount: 1,
16141
- // Keep for compatibility, but not used for reuse logic
16142
- createdAt: Date.now()
16286
+ createdAt: Date.now(),
16287
+ lastUsedAt: Date.now(),
16288
+ idleCloseTimer: void 0
16143
16289
  });
16144
16290
  return {
16145
16291
  client: dedicatedClient,
@@ -16151,15 +16297,81 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16151
16297
  * This ensures clean teardown at the end of each clip.
16152
16298
  */
16153
16299
  async releaseDedicatedClient(sessionKey, logger) {
16300
+ const log = logger ?? this.logger;
16154
16301
  const entry = this.dedicatedClients.get(sessionKey);
16155
16302
  if (!entry) return;
16303
+ entry.refCount = Math.max(0, entry.refCount - 1);
16304
+ entry.lastUsedAt = Date.now();
16305
+ if (entry.refCount > 0) return;
16306
+ const isReplayKey = sessionKey.startsWith("replay:");
16307
+ const allowReplayKeepAlive = /^replay:[^:]+$/.test(sessionKey);
16308
+ if (isReplayKey && allowReplayKeepAlive) {
16309
+ if (entry.idleCloseTimer) return;
16310
+ entry.idleCloseTimer = setTimeout(async () => {
16311
+ const current = this.dedicatedClients.get(sessionKey);
16312
+ if (!current) return;
16313
+ if (current.refCount > 0) return;
16314
+ this.dedicatedClients.delete(sessionKey);
16315
+ log?.debug?.(
16316
+ `[DedicatedClient] Closing idle replay socket for sessionKey=${sessionKey} (keepalive expired)`
16317
+ );
16318
+ try {
16319
+ await current.client.close({
16320
+ reason: "replay idle keepalive expired"
16321
+ });
16322
+ } catch {
16323
+ }
16324
+ }, _ReolinkBaichuanApi.REPLAY_DEDICATED_KEEPALIVE_MS);
16325
+ return;
16326
+ }
16156
16327
  this.dedicatedClients.delete(sessionKey);
16157
- logger?.debug?.(`[DedicatedClient] Releasing and closing ${sessionKey}`);
16328
+ log?.log?.(
16329
+ `[DedicatedClient] Closing socket for sessionKey=${sessionKey} (session ended)`
16330
+ );
16158
16331
  try {
16159
16332
  await entry.client.close({ reason: "dedicated session ended" });
16333
+ log?.log?.(
16334
+ `[DedicatedClient] Socket closed successfully for sessionKey=${sessionKey}`
16335
+ );
16336
+ } catch (e) {
16337
+ log?.warn?.(
16338
+ `[DedicatedClient] Error closing socket for sessionKey=${sessionKey}: ${e}`
16339
+ );
16340
+ }
16341
+ }
16342
+ /**
16343
+ * Force-close a dedicated client if it exists.
16344
+ * This is called BEFORE entering the queue to immediately terminate any existing stream
16345
+ * for the same sessionKey. The existing stream will receive an error, release its queue slot,
16346
+ * and the new request can then proceed.
16347
+ *
16348
+ * @param sessionKey - The session key to force-close (e.g., `replay:${deviceId}`)
16349
+ * @param logger - Optional logger
16350
+ * @returns true if a client was closed, false if no client existed
16351
+ */
16352
+ async forceCloseDedicatedClient(sessionKey, logger) {
16353
+ const log = logger ?? this.logger;
16354
+ const entry = this.dedicatedClients.get(sessionKey);
16355
+ if (!entry) return false;
16356
+ if (entry.idleCloseTimer) {
16357
+ clearTimeout(entry.idleCloseTimer);
16358
+ entry.idleCloseTimer = void 0;
16359
+ }
16360
+ log?.log?.(
16361
+ `[DedicatedClient] Force-closing existing socket for sessionKey=${sessionKey} (new request preempting)`
16362
+ );
16363
+ this.dedicatedClients.delete(sessionKey);
16364
+ try {
16365
+ await entry.client.close({ reason: "preempted by new request" });
16366
+ log?.log?.(
16367
+ `[DedicatedClient] Force-close complete for sessionKey=${sessionKey}`
16368
+ );
16160
16369
  } catch (e) {
16161
- logger?.debug?.(`[DedicatedClient] Error closing socket: ${e}`);
16370
+ log?.warn?.(
16371
+ `[DedicatedClient] Error during force-close for sessionKey=${sessionKey}: ${e}`
16372
+ );
16162
16373
  }
16374
+ return true;
16163
16375
  }
16164
16376
  /**
16165
16377
  * Create a dedicated Baichuan client session for streaming.
@@ -16196,6 +16408,9 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
16196
16408
  await Promise.allSettled(
16197
16409
  entries.map(async ([key, entry]) => {
16198
16410
  try {
16411
+ if (entry.idleCloseTimer) {
16412
+ clearTimeout(entry.idleCloseTimer);
16413
+ }
16199
16414
  this.logger?.debug?.(`[DedicatedClient] Cleanup: closing ${key}`);
16200
16415
  await entry.client.close({ reason: "API cleanup" });
16201
16416
  } catch {
@@ -18051,7 +18266,8 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
18051
18266
  channel,
18052
18267
  payloadXml: stopXml,
18053
18268
  messageClass: BC_CLASS_MODERN_24,
18054
- timeoutMs: 1e4,
18269
+ timeoutMs: 2e3,
18270
+ // Short timeout - if socket is closed, fail fast
18055
18271
  internal: true
18056
18272
  });
18057
18273
  } catch {
@@ -18187,7 +18403,8 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
18187
18403
  channel,
18188
18404
  payloadXml: stopXml,
18189
18405
  messageClass: BC_CLASS_MODERN_24,
18190
- timeoutMs: 1e4,
18406
+ timeoutMs: 2e3,
18407
+ // Short timeout - if socket is closed, fail fast
18191
18408
  internal: true
18192
18409
  });
18193
18410
  } catch {
@@ -19234,11 +19451,20 @@ ${stderr}`)
19234
19451
  }
19235
19452
  }
19236
19453
  async downloadRecording(params) {
19454
+ this.logger?.debug?.(
19455
+ `[downloadRecording] Queuing download for: ${params.fileName}, channel=${params.channel}`
19456
+ );
19237
19457
  return this.enqueueReplayOperation(async () => {
19458
+ this.logger?.debug?.(
19459
+ `[downloadRecording] Starting download for: ${params.fileName}`
19460
+ );
19238
19461
  await this.client.login();
19239
19462
  const channel = this.normalizeChannel(params.channel);
19240
19463
  const uid = await this.ensureUidForRecordings(channel, params.uid);
19241
19464
  const fileName = params.fileName;
19465
+ this.logger?.debug?.(
19466
+ `[downloadRecording] Trying fileInfoListReplayBinaryDownload for: ${fileName}`
19467
+ );
19242
19468
  let replayErr;
19243
19469
  try {
19244
19470
  return await this.fileInfoListReplayBinaryDownload({
@@ -19249,7 +19475,13 @@ ${stderr}`)
19249
19475
  });
19250
19476
  } catch (e) {
19251
19477
  replayErr = e;
19478
+ this.logger?.debug?.(
19479
+ `[downloadRecording] fileInfoListReplayBinaryDownload failed: ${e instanceof Error ? e.message : String(e)}`
19480
+ );
19252
19481
  }
19482
+ this.logger?.debug?.(
19483
+ `[downloadRecording] Trying fileInfoListDownload for: ${fileName}`
19484
+ );
19253
19485
  let downloadErr;
19254
19486
  try {
19255
19487
  return await this.fileInfoListDownload({
@@ -19260,7 +19492,13 @@ ${stderr}`)
19260
19492
  });
19261
19493
  } catch (e) {
19262
19494
  downloadErr = e;
19495
+ this.logger?.debug?.(
19496
+ `[downloadRecording] fileInfoListDownload failed: ${e instanceof Error ? e.message : String(e)}`
19497
+ );
19263
19498
  }
19499
+ this.logger?.debug?.(
19500
+ `[downloadRecording] Trying fileInfoListPagedDownload for: ${fileName}`
19501
+ );
19264
19502
  try {
19265
19503
  const result = await this.fileInfoListPagedDownload({
19266
19504
  channel,
@@ -19272,6 +19510,9 @@ ${stderr}`)
19272
19510
  return result;
19273
19511
  }
19274
19512
  } catch (e) {
19513
+ this.logger?.debug?.(
19514
+ `[downloadRecording] fileInfoListPagedDownload failed: ${e instanceof Error ? e.message : String(e)}`
19515
+ );
19275
19516
  }
19276
19517
  const replayMsg = replayErr instanceof Error ? replayErr.message : replayErr != null ? String(replayErr) : "";
19277
19518
  const dlMsg = downloadErr instanceof Error ? downloadErr.message : downloadErr != null ? String(downloadErr) : "";
@@ -21344,11 +21585,13 @@ ${stderr}`)
21344
21585
  * @param settings - Floodlight settings to apply
21345
21586
  *
21346
21587
  * @example
21588
+ * ```typescript
21347
21589
  * await api.setFloodlightSettings(0, {
21348
21590
  * duration: 300, // 5 minutes
21349
21591
  * detectType: 'people,vehicle',
21350
21592
  * brightness: 80,
21351
21593
  * });
21594
+ * ```
21352
21595
  */
21353
21596
  async setFloodlightSettings(channel, settings) {
21354
21597
  const ch = this.normalizeChannel(channel);
@@ -23490,11 +23733,13 @@ ${scheduleItems}
23490
23733
  */
23491
23734
  async createRecordingReplayMp4Stream(params) {
23492
23735
  const logger = params.logger ?? this.logger;
23736
+ const useMpegTsMuxer = params.useMpegTsMuxer ?? true;
23493
23737
  const parsed = parseRecordingFileName(params.fileName);
23494
23738
  const durationMs = parsed?.durationMs ?? 3e5;
23739
+ const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
23495
23740
  const seconds = Math.ceil(durationMs / 1e3 * 1.1);
23496
23741
  logger?.debug?.(
23497
- `[createRecordingReplayMp4Stream] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, timeoutSec=${seconds}, deviceId=${params.deviceId ?? "auto"}`
23742
+ `[createRecordingReplayMp4Stream] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, fps=${fps}, timeoutSec=${seconds}, deviceId=${params.deviceId ?? "auto"}, useMpegTsMuxer=${useMpegTsMuxer}`
23498
23743
  );
23499
23744
  const startParams = {
23500
23745
  channel: params.channel,
@@ -23503,39 +23748,84 @@ ${scheduleItems}
23503
23748
  ...params.isNvr != null ? { isNvr: params.isNvr } : {},
23504
23749
  ...params.deviceId != null ? { deviceId: params.deviceId } : {}
23505
23750
  };
23506
- const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(
23507
- () => this.startRecordingReplayStream(startParams)
23508
- );
23751
+ const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(async () => {
23752
+ try {
23753
+ return await this.startRecordingReplayStream(startParams);
23754
+ } catch (e) {
23755
+ if (!params.deviceId) throw e;
23756
+ const sessionKey = `replay:${params.deviceId}`;
23757
+ logger?.debug?.(
23758
+ `[createRecordingReplayMp4Stream] startRecordingReplayStream failed; force-closing dedicated client and retrying once`
23759
+ );
23760
+ await this.forceCloseDedicatedClient(sessionKey, logger);
23761
+ return await this.startRecordingReplayStream(startParams);
23762
+ }
23763
+ });
23509
23764
  const { stream, stop: stopReplay } = replayResult;
23510
23765
  const input = new import_node_stream.PassThrough();
23511
23766
  const output = new import_node_stream.PassThrough();
23767
+ const H264_AUD = Buffer.from([0, 0, 0, 1, 9, 240]);
23512
23768
  let tsMuxer = null;
23513
23769
  let ff = null;
23514
23770
  let ended = false;
23515
23771
  let frameCount = 0;
23516
23772
  const startFfmpeg = (videoType) => {
23517
23773
  if (ff) return;
23774
+ const needsTranscode = videoType === "H265" && params.transcodeH265ToH264 === true;
23518
23775
  logger?.debug?.(
23519
- `[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}`
23776
+ `[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}, transcode=${needsTranscode}, useMpegTsMuxer=${useMpegTsMuxer}, fps=${fps}`
23520
23777
  );
23521
- MpegTsMuxer.resetCounters();
23522
- tsMuxer = new MpegTsMuxer({ videoType });
23523
- const args = [
23524
- "-hide_banner",
23525
- "-loglevel",
23526
- "error",
23527
- "-f",
23528
- "mpegts",
23529
- "-i",
23530
- "pipe:0",
23531
- "-c",
23532
- "copy",
23533
- "-movflags",
23534
- "frag_keyframe+empty_moov",
23535
- "-f",
23536
- "mp4",
23537
- "pipe:1"
23538
- ];
23778
+ let args;
23779
+ if (useMpegTsMuxer) {
23780
+ MpegTsMuxer.resetCounters();
23781
+ tsMuxer = new MpegTsMuxer({ videoType });
23782
+ args = [
23783
+ "-hide_banner",
23784
+ "-loglevel",
23785
+ "error",
23786
+ "-f",
23787
+ "mpegts",
23788
+ "-i",
23789
+ "pipe:0",
23790
+ // Video codec: transcode H.265→H.264 if requested, otherwise copy
23791
+ ...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
23792
+ // frag_keyframe: create new fragment at each keyframe
23793
+ // empty_moov: write ftyp/moov immediately (required for streaming)
23794
+ // default_base_moof: required for iOS Media Source Extensions
23795
+ // negative_cts_offsets: fixes some iOS playback issues
23796
+ "-movflags",
23797
+ "frag_keyframe+empty_moov+default_base_moof+negative_cts_offsets",
23798
+ "-f",
23799
+ "mp4",
23800
+ "pipe:1"
23801
+ ];
23802
+ } else {
23803
+ const inputFormat = videoType === "H265" ? "hevc" : "h264";
23804
+ args = [
23805
+ "-hide_banner",
23806
+ "-loglevel",
23807
+ "error",
23808
+ "-fflags",
23809
+ "+genpts",
23810
+ "-r",
23811
+ String(fps),
23812
+ "-f",
23813
+ inputFormat,
23814
+ "-i",
23815
+ "pipe:0",
23816
+ // Video codec: transcode H.265→H.264 if requested, otherwise copy
23817
+ ...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
23818
+ // frag_keyframe: create new fragment at each keyframe
23819
+ // empty_moov: write ftyp/moov immediately (required for streaming)
23820
+ // default_base_moof: required for iOS Media Source Extensions
23821
+ // negative_cts_offsets: fixes some iOS playback issues
23822
+ "-movflags",
23823
+ "frag_keyframe+empty_moov+default_base_moof+negative_cts_offsets",
23824
+ "-f",
23825
+ "mp4",
23826
+ "pipe:1"
23827
+ ];
23828
+ }
23539
23829
  ff = (0, import_node_child_process3.spawn)("ffmpeg", args, { stdio: ["pipe", "pipe", "pipe"] });
23540
23830
  if (!ff.stdin || !ff.stdout || !ff.stderr) {
23541
23831
  throw new Error("ffmpeg stdio streams not available");
@@ -23573,17 +23863,19 @@ ${scheduleItems}
23573
23863
  const stopAll = async () => {
23574
23864
  if (ended) return;
23575
23865
  ended = true;
23866
+ releaseQueueSlot();
23576
23867
  logger?.debug?.(
23577
23868
  `[createRecordingReplayMp4Stream] Stopping stream, frames=${frameCount}`
23578
23869
  );
23579
- try {
23580
- await stopReplay();
23581
- } catch {
23582
- }
23583
- try {
23584
- await stream.stop();
23585
- } catch {
23586
- }
23870
+ const cleanupPromises = [];
23871
+ cleanupPromises.push(
23872
+ stopReplay().catch(() => {
23873
+ })
23874
+ );
23875
+ cleanupPromises.push(
23876
+ stream.stop().catch(() => {
23877
+ })
23878
+ );
23587
23879
  try {
23588
23880
  input.end();
23589
23881
  } catch {
@@ -23596,7 +23888,11 @@ ${scheduleItems}
23596
23888
  output.end();
23597
23889
  } catch {
23598
23890
  }
23599
- releaseQueueSlot();
23891
+ await Promise.race([
23892
+ Promise.all(cleanupPromises),
23893
+ new Promise((resolve) => setTimeout(resolve, 2e3))
23894
+ // Max 2s for cleanup
23895
+ ]);
23600
23896
  };
23601
23897
  const timer = setTimeout(
23602
23898
  () => {
@@ -23618,15 +23914,25 @@ ${scheduleItems}
23618
23914
  output.destroy(e);
23619
23915
  void stopAll();
23620
23916
  });
23917
+ stream.on("close", () => {
23918
+ logger?.debug?.(
23919
+ `[createRecordingReplayMp4Stream] Stream closed, frames=${frameCount}`
23920
+ );
23921
+ clearTimeout(timer);
23922
+ void stopAll();
23923
+ });
23621
23924
  stream.on(
23622
23925
  "videoAccessUnit",
23623
23926
  ({ data, videoType, isKeyframe, microseconds }) => {
23624
23927
  if (ended) return;
23625
23928
  startFfmpeg(videoType);
23626
23929
  frameCount++;
23627
- if (tsMuxer) {
23930
+ if (useMpegTsMuxer && tsMuxer) {
23628
23931
  const tsData = tsMuxer.mux(data, microseconds, isKeyframe);
23629
23932
  input.write(tsData);
23933
+ } else {
23934
+ if (videoType === "H264") input.write(H264_AUD);
23935
+ input.write(data);
23630
23936
  }
23631
23937
  }
23632
23938
  );
@@ -23665,19 +23971,53 @@ ${scheduleItems}
23665
23971
  */
23666
23972
  async createRecordingDownloadMp4Stream(params) {
23667
23973
  const timeoutMs = params.timeoutMs ?? 12e4;
23668
- const parsed = parseRecordingFileName(params.fileName);
23669
- const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
23670
23974
  const channel = this.normalizeChannel(params.channel);
23671
23975
  const uid = await this.ensureUidForRecordings(channel);
23672
- const { annexB, videoType } = await this.downloadRecordingDemuxed({
23976
+ const raw = await this.downloadRecording({
23673
23977
  channel,
23674
23978
  uid,
23675
23979
  fileName: params.fileName,
23676
23980
  timeoutMs
23677
23981
  });
23678
- if (annexB.length === 0) {
23982
+ if (raw.length === 0) {
23679
23983
  throw new Error("Downloaded recording is empty");
23680
23984
  }
23985
+ const videoFrames = [];
23986
+ let videoType = null;
23987
+ const decoder = new BcMediaAnnexBDecoder({
23988
+ strict: false,
23989
+ logger: this.logger,
23990
+ onVideoAccessUnit: ({ annexB: annexB2, microseconds }) => {
23991
+ videoFrames.push({ annexB: annexB2, microseconds });
23992
+ }
23993
+ });
23994
+ decoder.push(raw);
23995
+ const stats = decoder.getStats();
23996
+ videoType = stats.videoType;
23997
+ if (videoFrames.length === 0) {
23998
+ throw new Error("Downloaded recording has no video frames");
23999
+ }
24000
+ let fps;
24001
+ if (videoFrames.length >= 2) {
24002
+ const firstTs = videoFrames[0].microseconds;
24003
+ const lastTs = videoFrames[videoFrames.length - 1].microseconds;
24004
+ const durationUs = lastTs - firstTs;
24005
+ if (durationUs > 0) {
24006
+ const durationSeconds = durationUs / 1e6;
24007
+ fps = (videoFrames.length - 1) / durationSeconds;
24008
+ } else {
24009
+ const infoFps = stats.infos[0]?.fps;
24010
+ fps = infoFps && infoFps > 0 ? infoFps : 15;
24011
+ }
24012
+ } else {
24013
+ const infoFps = stats.infos[0]?.fps;
24014
+ fps = infoFps && infoFps > 0 ? infoFps : 15;
24015
+ }
24016
+ if (fps > 14 && fps < 16) fps = 15;
24017
+ else if (fps > 23 && fps < 26) fps = 25;
24018
+ else if (fps > 29 && fps < 31) fps = 30;
24019
+ else fps = Math.round(fps * 100) / 100;
24020
+ const annexB = Buffer.concat(videoFrames.map((f) => f.annexB));
23681
24021
  const input = new import_node_stream.PassThrough();
23682
24022
  const output = new import_node_stream.PassThrough();
23683
24023
  let ff = null;
@@ -23749,6 +24089,338 @@ ${scheduleItems}
23749
24089
  stop: stopAll
23750
24090
  };
23751
24091
  }
24092
+ /**
24093
+ * Create an HLS (HTTP Live Streaming) session for a recording.
24094
+ *
24095
+ * This method creates HLS segments on-the-fly from a recording replay stream.
24096
+ * HLS is required for iOS devices (Safari, Home app) which don't support
24097
+ * fragmented MP4 streaming well and require Range request support.
24098
+ *
24099
+ * The session writes HLS segments (.ts files) and playlist (.m3u8) to a
24100
+ * temporary directory. You must serve these files via HTTP to the client.
24101
+ *
24102
+ * @example
24103
+ * ```ts
24104
+ * const session = await api.createRecordingReplayHlsSession({
24105
+ * channel: 0,
24106
+ * fileName: "/mnt/sda/Mp4Record/2026-01-25/RecS03.mp4",
24107
+ * });
24108
+ *
24109
+ * // Serve playlist
24110
+ * app.get('/clip.m3u8', (req, res) => {
24111
+ * res.type('application/vnd.apple.mpegurl');
24112
+ * res.send(session.getPlaylist());
24113
+ * });
24114
+ *
24115
+ * // Serve segments
24116
+ * app.get('/segment/:name', (req, res) => {
24117
+ * const data = session.getSegment(req.params.name);
24118
+ * if (data) {
24119
+ * res.type('video/mp2t');
24120
+ * res.send(data);
24121
+ * } else {
24122
+ * res.status(404).end();
24123
+ * }
24124
+ * });
24125
+ *
24126
+ * // Cleanup when done
24127
+ * await session.stop();
24128
+ * ```
24129
+ */
24130
+ async createRecordingReplayHlsSession(params) {
24131
+ const logger = params.logger ?? this.logger;
24132
+ const hlsSegmentDuration = params.hlsSegmentDuration ?? 4;
24133
+ const os = await import("os");
24134
+ const path5 = await import("path");
24135
+ const fs5 = await import("fs/promises");
24136
+ const crypto2 = await import("crypto");
24137
+ const tempDir = path5.join(
24138
+ os.tmpdir(),
24139
+ `reolink-hls-${crypto2.randomBytes(8).toString("hex")}`
24140
+ );
24141
+ await fs5.mkdir(tempDir, { recursive: true });
24142
+ const playlistPath = path5.join(tempDir, "playlist.m3u8");
24143
+ const segmentPattern = path5.join(tempDir, "segment_%03d.ts");
24144
+ const parsed = parseRecordingFileName(params.fileName);
24145
+ const durationMs = parsed?.durationMs ?? 3e5;
24146
+ const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
24147
+ const seconds = Math.ceil(durationMs / 1e3 * 1.1);
24148
+ logger?.debug?.(
24149
+ `[createRecordingReplayHlsSession] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, hlsSegmentDuration=${hlsSegmentDuration}`
24150
+ );
24151
+ const startParams = {
24152
+ channel: params.channel,
24153
+ fileName: params.fileName,
24154
+ logger,
24155
+ ...params.isNvr != null ? { isNvr: params.isNvr } : {},
24156
+ ...params.deviceId != null ? { deviceId: params.deviceId } : {}
24157
+ };
24158
+ const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(async () => {
24159
+ try {
24160
+ return await this.startRecordingReplayStream(startParams);
24161
+ } catch (e) {
24162
+ if (!params.deviceId) throw e;
24163
+ const sessionKey = `replay:${params.deviceId}`;
24164
+ logger?.debug?.(
24165
+ `[createRecordingReplayHlsSession] startRecordingReplayStream failed; force-closing dedicated client and retrying once`
24166
+ );
24167
+ await this.forceCloseDedicatedClient(sessionKey, logger);
24168
+ return await this.startRecordingReplayStream(startParams);
24169
+ }
24170
+ });
24171
+ const { stream, stop: stopReplay } = replayResult;
24172
+ const input = new import_node_stream.PassThrough();
24173
+ const H264_AUD = Buffer.from([0, 0, 0, 1, 9, 240]);
24174
+ let tsMuxer = null;
24175
+ let ff = null;
24176
+ let ended = false;
24177
+ let frameCount = 0;
24178
+ let readyResolve = null;
24179
+ let segmentWatcher = null;
24180
+ const readyPromise = new Promise((resolve) => {
24181
+ readyResolve = resolve;
24182
+ });
24183
+ const segments = /* @__PURE__ */ new Map();
24184
+ const startSegmentWatcher = () => {
24185
+ if (segmentWatcher || !readyResolve) return;
24186
+ const firstSegmentPath = path5.join(tempDir, "segment_000.ts");
24187
+ let checkCount = 0;
24188
+ const maxChecks = Math.ceil((hlsSegmentDuration + 2) * 10);
24189
+ segmentWatcher = setInterval(async () => {
24190
+ checkCount++;
24191
+ try {
24192
+ const stats = await fs5.stat(firstSegmentPath);
24193
+ if (stats.size > 256) {
24194
+ if (segmentWatcher) {
24195
+ clearInterval(segmentWatcher);
24196
+ segmentWatcher = null;
24197
+ }
24198
+ logger?.debug?.(
24199
+ `[createRecordingReplayHlsSession] First segment ready after ${checkCount * 100}ms, size=${stats.size}`
24200
+ );
24201
+ readyResolve?.();
24202
+ readyResolve = null;
24203
+ }
24204
+ } catch {
24205
+ }
24206
+ if (checkCount >= maxChecks && readyResolve) {
24207
+ if (segmentWatcher) {
24208
+ clearInterval(segmentWatcher);
24209
+ segmentWatcher = null;
24210
+ }
24211
+ logger?.debug?.(
24212
+ `[createRecordingReplayHlsSession] Segment watcher timeout, resolving anyway`
24213
+ );
24214
+ readyResolve?.();
24215
+ readyResolve = null;
24216
+ }
24217
+ }, 100);
24218
+ };
24219
+ const startFfmpeg = (videoType) => {
24220
+ if (ff) return;
24221
+ const needsTranscode = videoType === "H265" && params.transcodeH265ToH264 === true;
24222
+ const gop = Math.max(1, Math.round(fps * hlsSegmentDuration));
24223
+ logger?.log?.(
24224
+ `[createRecordingReplayHlsSession] Starting ffmpeg HLS with videoType=${videoType}, transcode=${needsTranscode}, hlsTime=${hlsSegmentDuration}s, fileName=${params.fileName}`
24225
+ );
24226
+ MpegTsMuxer.resetCounters();
24227
+ tsMuxer = new MpegTsMuxer({ videoType });
24228
+ const args = [
24229
+ "-hide_banner",
24230
+ "-loglevel",
24231
+ "error",
24232
+ "-f",
24233
+ "mpegts",
24234
+ "-i",
24235
+ "pipe:0",
24236
+ // Video codec
24237
+ ...needsTranscode ? [
24238
+ "-c:v",
24239
+ "libx264",
24240
+ "-preset",
24241
+ "ultrafast",
24242
+ "-tune",
24243
+ "zerolatency",
24244
+ "-crf",
24245
+ "23",
24246
+ "-pix_fmt",
24247
+ "yuv420p",
24248
+ // Ensure regular GOP for consistent HLS cutting.
24249
+ "-g",
24250
+ String(gop),
24251
+ "-keyint_min",
24252
+ String(gop),
24253
+ "-sc_threshold",
24254
+ "0",
24255
+ // Force frequent keyframes so HLS can cut segments reliably.
24256
+ // Without this, ffmpeg will only cut on keyframes and segments can become huge.
24257
+ "-force_key_frames",
24258
+ `expr:gte(t,n_forced*${hlsSegmentDuration})`
24259
+ ] : ["-c", "copy"],
24260
+ // HLS output options
24261
+ "-f",
24262
+ "hls",
24263
+ "-hls_time",
24264
+ String(hlsSegmentDuration),
24265
+ "-hls_list_size",
24266
+ "0",
24267
+ // Keep all segments in playlist
24268
+ "-hls_playlist_type",
24269
+ "event",
24270
+ // Growing playlist (not VOD until end)
24271
+ "-hls_segment_filename",
24272
+ segmentPattern,
24273
+ "-hls_flags",
24274
+ "independent_segments+temp_file",
24275
+ playlistPath
24276
+ ];
24277
+ ff = (0, import_node_child_process3.spawn)("ffmpeg", args, { stdio: ["pipe", "pipe", "pipe"] });
24278
+ if (!ff.stdin || !ff.stderr) {
24279
+ throw new Error("ffmpeg stdio streams not available");
24280
+ }
24281
+ input.pipe(ff.stdin);
24282
+ ff.stdin.on("error", () => {
24283
+ });
24284
+ ff.stderr.on("error", () => {
24285
+ });
24286
+ input.on("error", () => {
24287
+ });
24288
+ let stderr = "";
24289
+ ff.stderr.on("data", (d) => stderr += String(d));
24290
+ ff.on("close", (code) => {
24291
+ if (ended) return;
24292
+ ended = true;
24293
+ if ((code ?? 0) !== 0 && stderr.trim()) {
24294
+ logger?.error?.(
24295
+ `[createRecordingReplayHlsSession] ffmpeg exited with code ${code}: ${stderr}`
24296
+ );
24297
+ } else {
24298
+ logger?.debug?.(
24299
+ `[createRecordingReplayHlsSession] ffmpeg closed normally, frames=${frameCount}`
24300
+ );
24301
+ }
24302
+ });
24303
+ };
24304
+ const stopAll = async () => {
24305
+ if (ended) return;
24306
+ ended = true;
24307
+ releaseQueueSlot();
24308
+ if (segmentWatcher) {
24309
+ clearInterval(segmentWatcher);
24310
+ segmentWatcher = null;
24311
+ }
24312
+ logger?.debug?.(
24313
+ `[createRecordingReplayHlsSession] Stopping, frames=${frameCount}`
24314
+ );
24315
+ const cleanupPromises = [];
24316
+ cleanupPromises.push(stopReplay().catch(() => {
24317
+ }));
24318
+ cleanupPromises.push(stream.stop().catch(() => {
24319
+ }));
24320
+ try {
24321
+ input.end();
24322
+ } catch {
24323
+ }
24324
+ try {
24325
+ ff?.kill("SIGKILL");
24326
+ } catch {
24327
+ }
24328
+ await Promise.race([
24329
+ Promise.all(cleanupPromises),
24330
+ new Promise((resolve) => setTimeout(resolve, 2e3))
24331
+ ]);
24332
+ setTimeout(async () => {
24333
+ try {
24334
+ const files = await fs5.readdir(tempDir);
24335
+ for (const file of files) {
24336
+ await fs5.unlink(path5.join(tempDir, file)).catch(() => {
24337
+ });
24338
+ }
24339
+ await fs5.rmdir(tempDir).catch(() => {
24340
+ });
24341
+ } catch {
24342
+ }
24343
+ }, 6e4);
24344
+ };
24345
+ const timer = setTimeout(
24346
+ () => {
24347
+ logger?.debug?.(
24348
+ `[createRecordingReplayHlsSession] Timeout reached (${seconds}s), stopping`
24349
+ );
24350
+ void stopAll();
24351
+ },
24352
+ Math.max(1, seconds) * 1e3
24353
+ );
24354
+ stream.on("error", (e) => {
24355
+ logger?.error?.(
24356
+ `[createRecordingReplayHlsSession] Stream error: ${e.message}`
24357
+ );
24358
+ clearTimeout(timer);
24359
+ void stopAll();
24360
+ });
24361
+ stream.on("close", () => {
24362
+ logger?.debug?.(
24363
+ `[createRecordingReplayHlsSession] Stream closed, frames=${frameCount}`
24364
+ );
24365
+ clearTimeout(timer);
24366
+ try {
24367
+ input.end();
24368
+ } catch {
24369
+ }
24370
+ });
24371
+ stream.on(
24372
+ "videoAccessUnit",
24373
+ ({ data, videoType, isKeyframe, microseconds }) => {
24374
+ if (ended) return;
24375
+ startFfmpeg(videoType);
24376
+ frameCount++;
24377
+ if (tsMuxer) {
24378
+ const tsData = tsMuxer.mux(data, microseconds, isKeyframe);
24379
+ input.write(tsData);
24380
+ }
24381
+ if (frameCount === 1) {
24382
+ startSegmentWatcher();
24383
+ }
24384
+ }
24385
+ );
24386
+ return {
24387
+ getPlaylist: () => {
24388
+ try {
24389
+ const { readFileSync } = require("fs");
24390
+ return readFileSync(playlistPath, "utf8");
24391
+ } catch {
24392
+ return "#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:4\n";
24393
+ }
24394
+ },
24395
+ getSegment: (name) => {
24396
+ if (segments.has(name)) {
24397
+ return segments.get(name);
24398
+ }
24399
+ try {
24400
+ const { readFileSync } = require("fs");
24401
+ const segmentPath = path5.join(tempDir, name);
24402
+ const data = readFileSync(segmentPath);
24403
+ segments.set(name, data);
24404
+ return data;
24405
+ } catch {
24406
+ return void 0;
24407
+ }
24408
+ },
24409
+ listSegments: () => {
24410
+ try {
24411
+ const { readdirSync } = require("fs");
24412
+ return readdirSync(tempDir).filter(
24413
+ (f) => f.endsWith(".ts")
24414
+ );
24415
+ } catch {
24416
+ return [];
24417
+ }
24418
+ },
24419
+ waitForReady: () => readyPromise,
24420
+ stop: stopAll,
24421
+ tempDir
24422
+ };
24423
+ }
23752
24424
  // ============================================================
23753
24425
  // STANDALONE CAMERA METHODS
23754
24426
  // ============================================================