@apocaliss92/nodelink-js 0.1.7 → 0.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -6
- package/dist/{DiagnosticsTools-MTXG65O3.js → DiagnosticsTools-EC7DADEQ.js} +2 -2
- package/dist/{chunk-MC2BRLLE.js → chunk-TZFZ5WJX.js} +71 -9
- package/dist/chunk-TZFZ5WJX.js.map +1 -0
- package/dist/{chunk-JMT75JNG.js → chunk-YUBYINJF.js} +674 -64
- package/dist/chunk-YUBYINJF.js.map +1 -0
- package/dist/cli/rtsp-server.cjs +740 -68
- package/dist/cli/rtsp-server.cjs.map +1 -1
- package/dist/cli/rtsp-server.d.cts +1 -0
- package/dist/cli/rtsp-server.d.ts +1 -0
- package/dist/cli/rtsp-server.js +2 -2
- package/dist/index.cjs +3293 -248
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +8187 -0
- package/dist/index.d.ts +761 -1
- package/dist/index.js +2359 -5
- package/dist/index.js.map +1 -1
- package/package.json +14 -3
- package/dist/chunk-JMT75JNG.js.map +0 -1
- package/dist/chunk-MC2BRLLE.js.map +0 -1
- /package/dist/{DiagnosticsTools-MTXG65O3.js.map → DiagnosticsTools-EC7DADEQ.js.map} +0 -0
|
@@ -134,7 +134,7 @@ import {
|
|
|
134
134
|
talkTraceLog,
|
|
135
135
|
traceLog,
|
|
136
136
|
xmlEscape
|
|
137
|
-
} from "./chunk-
|
|
137
|
+
} from "./chunk-TZFZ5WJX.js";
|
|
138
138
|
|
|
139
139
|
// src/protocol/framing.ts
|
|
140
140
|
function encodeHeader(h) {
|
|
@@ -7701,10 +7701,15 @@ var parseRecordingFilesFromXml = (xml) => {
|
|
|
7701
7701
|
if (startDt) item.startTime = startDt;
|
|
7702
7702
|
if (endDt) item.endTime = endDt;
|
|
7703
7703
|
const parsed = parseRecordingFileName(item.name ?? item.fileName);
|
|
7704
|
-
|
|
7705
|
-
|
|
7706
|
-
|
|
7707
|
-
|
|
7704
|
+
const parsedFromPath = item.fileName !== item.name ? parseRecordingFileName(item.fileName) : void 0;
|
|
7705
|
+
const bestParsed = parsedFromPath?.sizeBytes != null ? parsedFromPath : parsed;
|
|
7706
|
+
if (bestParsed) {
|
|
7707
|
+
item.parsedFileName = bestParsed;
|
|
7708
|
+
if (!item.startTime) item.startTime = bestParsed.start;
|
|
7709
|
+
if (!item.endTime) item.endTime = bestParsed.end;
|
|
7710
|
+
if (item.sizeBytes == null && bestParsed.sizeBytes != null) {
|
|
7711
|
+
item.sizeBytes = bestParsed.sizeBytes;
|
|
7712
|
+
}
|
|
7708
7713
|
}
|
|
7709
7714
|
item.detectionClasses = buildDetectionClasses(parsed, item.recordType);
|
|
7710
7715
|
out.push(item);
|
|
@@ -7731,6 +7736,9 @@ var parseRecordingFilesFromXml = (xml) => {
|
|
|
7731
7736
|
item.parsedFileName = parsed;
|
|
7732
7737
|
if (!item.startTime) item.startTime = parsed.start;
|
|
7733
7738
|
if (!item.endTime) item.endTime = parsed.end;
|
|
7739
|
+
if (item.sizeBytes == null && parsed.sizeBytes != null) {
|
|
7740
|
+
item.sizeBytes = parsed.sizeBytes;
|
|
7741
|
+
}
|
|
7734
7742
|
}
|
|
7735
7743
|
item.detectionClasses = buildDetectionClasses(parsed, item.recordType);
|
|
7736
7744
|
out.push(item);
|
|
@@ -9028,6 +9036,10 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
9028
9036
|
* Value: client, refCount, createdAt
|
|
9029
9037
|
*/
|
|
9030
9038
|
dedicatedClients = /* @__PURE__ */ new Map();
|
|
9039
|
+
/** Keep replay dedicated sockets warm briefly to reduce clip switch latency. */
|
|
9040
|
+
// Keep replay sockets warm briefly for fast clip switches, but tear down quickly
|
|
9041
|
+
// when clients stop requesting HLS segments (avoids looking like a stuck session).
|
|
9042
|
+
static REPLAY_DEDICATED_KEEPALIVE_MS = 1e4;
|
|
9031
9043
|
/**
|
|
9032
9044
|
* Get a summary of currently active dedicated sessions.
|
|
9033
9045
|
* Useful for debugging/logging to see how many sockets are open.
|
|
@@ -9137,22 +9149,35 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
9137
9149
|
* Process the replay queue - executes operations one at a time.
|
|
9138
9150
|
*/
|
|
9139
9151
|
async processReplayQueue() {
|
|
9140
|
-
if (this.replayQueueProcessing)
|
|
9152
|
+
if (this.replayQueueProcessing) {
|
|
9153
|
+
this.logger?.debug?.(
|
|
9154
|
+
`[ReplayQueue] Already processing, queue length: ${this.replayQueue.length}`
|
|
9155
|
+
);
|
|
9156
|
+
return;
|
|
9157
|
+
}
|
|
9141
9158
|
this.replayQueueProcessing = true;
|
|
9159
|
+
this.logger?.debug?.(
|
|
9160
|
+
`[ReplayQueue] Starting queue processing, items: ${this.replayQueue.length}`
|
|
9161
|
+
);
|
|
9142
9162
|
while (this.replayQueue.length > 0) {
|
|
9143
9163
|
const item = this.replayQueue.shift();
|
|
9144
9164
|
if (item) {
|
|
9145
9165
|
const timeSinceLastReplay = Date.now() - this.lastReplayEndTime;
|
|
9146
9166
|
if (timeSinceLastReplay < this.REPLAY_COOLDOWN_MS) {
|
|
9147
|
-
|
|
9148
|
-
|
|
9149
|
-
);
|
|
9167
|
+
const waitTime = this.REPLAY_COOLDOWN_MS - timeSinceLastReplay;
|
|
9168
|
+
this.logger?.debug?.(`[ReplayQueue] Waiting ${waitTime}ms cooldown`);
|
|
9169
|
+
await new Promise((r) => setTimeout(r, waitTime));
|
|
9150
9170
|
}
|
|
9171
|
+
this.logger?.debug?.(
|
|
9172
|
+
`[ReplayQueue] Executing item, remaining: ${this.replayQueue.length}`
|
|
9173
|
+
);
|
|
9151
9174
|
await item.execute();
|
|
9152
9175
|
this.lastReplayEndTime = Date.now();
|
|
9176
|
+
this.logger?.debug?.(`[ReplayQueue] Item completed`);
|
|
9153
9177
|
}
|
|
9154
9178
|
}
|
|
9155
9179
|
this.replayQueueProcessing = false;
|
|
9180
|
+
this.logger?.debug?.(`[ReplayQueue] Queue processing complete`);
|
|
9156
9181
|
}
|
|
9157
9182
|
/**
|
|
9158
9183
|
* Enqueue a replay operation with optional de-duplication.
|
|
@@ -9215,14 +9240,35 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
9215
9240
|
this.replayQueue.push({
|
|
9216
9241
|
execute: () => {
|
|
9217
9242
|
return new Promise((releaseSlot) => {
|
|
9243
|
+
let released = false;
|
|
9244
|
+
const safeRelease = () => {
|
|
9245
|
+
if (released) return;
|
|
9246
|
+
released = true;
|
|
9247
|
+
releaseSlot();
|
|
9248
|
+
};
|
|
9249
|
+
const safetyTimeout = setTimeout(
|
|
9250
|
+
() => {
|
|
9251
|
+
if (!released) {
|
|
9252
|
+
this.logger?.warn?.(
|
|
9253
|
+
"[ReplayQueue] Safety timeout: releasing queue slot after 10 minutes"
|
|
9254
|
+
);
|
|
9255
|
+
safeRelease();
|
|
9256
|
+
}
|
|
9257
|
+
},
|
|
9258
|
+
10 * 60 * 1e3
|
|
9259
|
+
);
|
|
9218
9260
|
setup().then((result) => {
|
|
9219
9261
|
resolvePromise({
|
|
9220
9262
|
result,
|
|
9221
|
-
release: () =>
|
|
9263
|
+
release: () => {
|
|
9264
|
+
clearTimeout(safetyTimeout);
|
|
9265
|
+
safeRelease();
|
|
9266
|
+
}
|
|
9222
9267
|
});
|
|
9223
9268
|
}).catch((e) => {
|
|
9269
|
+
clearTimeout(safetyTimeout);
|
|
9224
9270
|
rejectPromise(e);
|
|
9225
|
-
|
|
9271
|
+
safeRelease();
|
|
9226
9272
|
});
|
|
9227
9273
|
});
|
|
9228
9274
|
}
|
|
@@ -9292,30 +9338,68 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
9292
9338
|
* immediately and create a new one. This ensures clean state for each clip.
|
|
9293
9339
|
*/
|
|
9294
9340
|
async acquireDedicatedClient(sessionKey, logger) {
|
|
9341
|
+
const log = logger ?? this.logger;
|
|
9342
|
+
const isReplayKey = sessionKey.startsWith("replay:");
|
|
9295
9343
|
const existing = this.dedicatedClients.get(sessionKey);
|
|
9296
9344
|
if (existing) {
|
|
9297
|
-
|
|
9298
|
-
|
|
9345
|
+
if (existing.idleCloseTimer) {
|
|
9346
|
+
clearTimeout(existing.idleCloseTimer);
|
|
9347
|
+
existing.idleCloseTimer = void 0;
|
|
9348
|
+
}
|
|
9349
|
+
if (existing.refCount === 0) {
|
|
9350
|
+
existing.refCount = 1;
|
|
9351
|
+
existing.lastUsedAt = Date.now();
|
|
9352
|
+
log?.debug?.(
|
|
9353
|
+
`[DedicatedClient] Reusing existing dedicated socket for sessionKey=${sessionKey}`
|
|
9354
|
+
);
|
|
9355
|
+
try {
|
|
9356
|
+
if (!existing.client.loggedIn) {
|
|
9357
|
+
await existing.client.login();
|
|
9358
|
+
}
|
|
9359
|
+
} catch {
|
|
9360
|
+
}
|
|
9361
|
+
if (existing.client.loggedIn) {
|
|
9362
|
+
return {
|
|
9363
|
+
client: existing.client,
|
|
9364
|
+
release: () => this.releaseDedicatedClient(sessionKey, logger)
|
|
9365
|
+
};
|
|
9366
|
+
}
|
|
9367
|
+
}
|
|
9368
|
+
log?.log?.(
|
|
9369
|
+
`[DedicatedClient] Closing existing socket for sessionKey=${sessionKey} (preempting active session)`
|
|
9299
9370
|
);
|
|
9300
9371
|
this.dedicatedClients.delete(sessionKey);
|
|
9301
|
-
|
|
9302
|
-
|
|
9303
|
-
|
|
9372
|
+
try {
|
|
9373
|
+
await existing.client.close({ reason: "preempted by new session" });
|
|
9374
|
+
log?.log?.(
|
|
9375
|
+
`[DedicatedClient] Old socket closed successfully for sessionKey=${sessionKey}`
|
|
9376
|
+
);
|
|
9377
|
+
} catch (e) {
|
|
9378
|
+
log?.warn?.(
|
|
9379
|
+
`[DedicatedClient] Error closing old socket for sessionKey=${sessionKey}: ${e}`
|
|
9380
|
+
);
|
|
9381
|
+
}
|
|
9304
9382
|
}
|
|
9305
|
-
|
|
9383
|
+
log?.log?.(
|
|
9384
|
+
`[DedicatedClient] Opening new dedicated socket for sessionKey=${sessionKey}`
|
|
9385
|
+
);
|
|
9306
9386
|
const dedicatedClient = new BaichuanClient({
|
|
9307
9387
|
host: this.host,
|
|
9308
9388
|
username: this.username,
|
|
9309
9389
|
password: this.password,
|
|
9310
|
-
logger:
|
|
9390
|
+
logger: log,
|
|
9311
9391
|
debugOptions: this.client.getDebugConfig?.()
|
|
9312
9392
|
});
|
|
9313
9393
|
await dedicatedClient.login();
|
|
9394
|
+
log?.log?.(
|
|
9395
|
+
`[DedicatedClient] Dedicated socket logged in for sessionKey=${sessionKey}`
|
|
9396
|
+
);
|
|
9314
9397
|
this.dedicatedClients.set(sessionKey, {
|
|
9315
9398
|
client: dedicatedClient,
|
|
9316
9399
|
refCount: 1,
|
|
9317
|
-
|
|
9318
|
-
|
|
9400
|
+
createdAt: Date.now(),
|
|
9401
|
+
lastUsedAt: Date.now(),
|
|
9402
|
+
idleCloseTimer: void 0
|
|
9319
9403
|
});
|
|
9320
9404
|
return {
|
|
9321
9405
|
client: dedicatedClient,
|
|
@@ -9327,15 +9411,81 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
9327
9411
|
* This ensures clean teardown at the end of each clip.
|
|
9328
9412
|
*/
|
|
9329
9413
|
async releaseDedicatedClient(sessionKey, logger) {
|
|
9414
|
+
const log = logger ?? this.logger;
|
|
9330
9415
|
const entry = this.dedicatedClients.get(sessionKey);
|
|
9331
9416
|
if (!entry) return;
|
|
9417
|
+
entry.refCount = Math.max(0, entry.refCount - 1);
|
|
9418
|
+
entry.lastUsedAt = Date.now();
|
|
9419
|
+
if (entry.refCount > 0) return;
|
|
9420
|
+
const isReplayKey = sessionKey.startsWith("replay:");
|
|
9421
|
+
const allowReplayKeepAlive = /^replay:[^:]+$/.test(sessionKey);
|
|
9422
|
+
if (isReplayKey && allowReplayKeepAlive) {
|
|
9423
|
+
if (entry.idleCloseTimer) return;
|
|
9424
|
+
entry.idleCloseTimer = setTimeout(async () => {
|
|
9425
|
+
const current = this.dedicatedClients.get(sessionKey);
|
|
9426
|
+
if (!current) return;
|
|
9427
|
+
if (current.refCount > 0) return;
|
|
9428
|
+
this.dedicatedClients.delete(sessionKey);
|
|
9429
|
+
log?.debug?.(
|
|
9430
|
+
`[DedicatedClient] Closing idle replay socket for sessionKey=${sessionKey} (keepalive expired)`
|
|
9431
|
+
);
|
|
9432
|
+
try {
|
|
9433
|
+
await current.client.close({
|
|
9434
|
+
reason: "replay idle keepalive expired"
|
|
9435
|
+
});
|
|
9436
|
+
} catch {
|
|
9437
|
+
}
|
|
9438
|
+
}, _ReolinkBaichuanApi.REPLAY_DEDICATED_KEEPALIVE_MS);
|
|
9439
|
+
return;
|
|
9440
|
+
}
|
|
9332
9441
|
this.dedicatedClients.delete(sessionKey);
|
|
9333
|
-
|
|
9442
|
+
log?.log?.(
|
|
9443
|
+
`[DedicatedClient] Closing socket for sessionKey=${sessionKey} (session ended)`
|
|
9444
|
+
);
|
|
9334
9445
|
try {
|
|
9335
9446
|
await entry.client.close({ reason: "dedicated session ended" });
|
|
9447
|
+
log?.log?.(
|
|
9448
|
+
`[DedicatedClient] Socket closed successfully for sessionKey=${sessionKey}`
|
|
9449
|
+
);
|
|
9450
|
+
} catch (e) {
|
|
9451
|
+
log?.warn?.(
|
|
9452
|
+
`[DedicatedClient] Error closing socket for sessionKey=${sessionKey}: ${e}`
|
|
9453
|
+
);
|
|
9454
|
+
}
|
|
9455
|
+
}
|
|
9456
|
+
/**
|
|
9457
|
+
* Force-close a dedicated client if it exists.
|
|
9458
|
+
* This is called BEFORE entering the queue to immediately terminate any existing stream
|
|
9459
|
+
* for the same sessionKey. The existing stream will receive an error, release its queue slot,
|
|
9460
|
+
* and the new request can then proceed.
|
|
9461
|
+
*
|
|
9462
|
+
* @param sessionKey - The session key to force-close (e.g., `replay:${deviceId}`)
|
|
9463
|
+
* @param logger - Optional logger
|
|
9464
|
+
* @returns true if a client was closed, false if no client existed
|
|
9465
|
+
*/
|
|
9466
|
+
async forceCloseDedicatedClient(sessionKey, logger) {
|
|
9467
|
+
const log = logger ?? this.logger;
|
|
9468
|
+
const entry = this.dedicatedClients.get(sessionKey);
|
|
9469
|
+
if (!entry) return false;
|
|
9470
|
+
if (entry.idleCloseTimer) {
|
|
9471
|
+
clearTimeout(entry.idleCloseTimer);
|
|
9472
|
+
entry.idleCloseTimer = void 0;
|
|
9473
|
+
}
|
|
9474
|
+
log?.log?.(
|
|
9475
|
+
`[DedicatedClient] Force-closing existing socket for sessionKey=${sessionKey} (new request preempting)`
|
|
9476
|
+
);
|
|
9477
|
+
this.dedicatedClients.delete(sessionKey);
|
|
9478
|
+
try {
|
|
9479
|
+
await entry.client.close({ reason: "preempted by new request" });
|
|
9480
|
+
log?.log?.(
|
|
9481
|
+
`[DedicatedClient] Force-close complete for sessionKey=${sessionKey}`
|
|
9482
|
+
);
|
|
9336
9483
|
} catch (e) {
|
|
9337
|
-
|
|
9484
|
+
log?.warn?.(
|
|
9485
|
+
`[DedicatedClient] Error during force-close for sessionKey=${sessionKey}: ${e}`
|
|
9486
|
+
);
|
|
9338
9487
|
}
|
|
9488
|
+
return true;
|
|
9339
9489
|
}
|
|
9340
9490
|
/**
|
|
9341
9491
|
* Create a dedicated Baichuan client session for streaming.
|
|
@@ -9372,6 +9522,9 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
9372
9522
|
await Promise.allSettled(
|
|
9373
9523
|
entries.map(async ([key, entry]) => {
|
|
9374
9524
|
try {
|
|
9525
|
+
if (entry.idleCloseTimer) {
|
|
9526
|
+
clearTimeout(entry.idleCloseTimer);
|
|
9527
|
+
}
|
|
9375
9528
|
this.logger?.debug?.(`[DedicatedClient] Cleanup: closing ${key}`);
|
|
9376
9529
|
await entry.client.close({ reason: "API cleanup" });
|
|
9377
9530
|
} catch {
|
|
@@ -11227,7 +11380,8 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
11227
11380
|
channel,
|
|
11228
11381
|
payloadXml: stopXml,
|
|
11229
11382
|
messageClass: BC_CLASS_MODERN_24,
|
|
11230
|
-
timeoutMs:
|
|
11383
|
+
timeoutMs: 2e3,
|
|
11384
|
+
// Short timeout - if socket is closed, fail fast
|
|
11231
11385
|
internal: true
|
|
11232
11386
|
});
|
|
11233
11387
|
} catch {
|
|
@@ -11363,7 +11517,8 @@ var ReolinkBaichuanApi = class _ReolinkBaichuanApi {
|
|
|
11363
11517
|
channel,
|
|
11364
11518
|
payloadXml: stopXml,
|
|
11365
11519
|
messageClass: BC_CLASS_MODERN_24,
|
|
11366
|
-
timeoutMs:
|
|
11520
|
+
timeoutMs: 2e3,
|
|
11521
|
+
// Short timeout - if socket is closed, fail fast
|
|
11367
11522
|
internal: true
|
|
11368
11523
|
});
|
|
11369
11524
|
} catch {
|
|
@@ -12410,11 +12565,20 @@ ${stderr}`)
|
|
|
12410
12565
|
}
|
|
12411
12566
|
}
|
|
12412
12567
|
async downloadRecording(params) {
|
|
12568
|
+
this.logger?.debug?.(
|
|
12569
|
+
`[downloadRecording] Queuing download for: ${params.fileName}, channel=${params.channel}`
|
|
12570
|
+
);
|
|
12413
12571
|
return this.enqueueReplayOperation(async () => {
|
|
12572
|
+
this.logger?.debug?.(
|
|
12573
|
+
`[downloadRecording] Starting download for: ${params.fileName}`
|
|
12574
|
+
);
|
|
12414
12575
|
await this.client.login();
|
|
12415
12576
|
const channel = this.normalizeChannel(params.channel);
|
|
12416
12577
|
const uid = await this.ensureUidForRecordings(channel, params.uid);
|
|
12417
12578
|
const fileName = params.fileName;
|
|
12579
|
+
this.logger?.debug?.(
|
|
12580
|
+
`[downloadRecording] Trying fileInfoListReplayBinaryDownload for: ${fileName}`
|
|
12581
|
+
);
|
|
12418
12582
|
let replayErr;
|
|
12419
12583
|
try {
|
|
12420
12584
|
return await this.fileInfoListReplayBinaryDownload({
|
|
@@ -12425,7 +12589,13 @@ ${stderr}`)
|
|
|
12425
12589
|
});
|
|
12426
12590
|
} catch (e) {
|
|
12427
12591
|
replayErr = e;
|
|
12592
|
+
this.logger?.debug?.(
|
|
12593
|
+
`[downloadRecording] fileInfoListReplayBinaryDownload failed: ${e instanceof Error ? e.message : String(e)}`
|
|
12594
|
+
);
|
|
12428
12595
|
}
|
|
12596
|
+
this.logger?.debug?.(
|
|
12597
|
+
`[downloadRecording] Trying fileInfoListDownload for: ${fileName}`
|
|
12598
|
+
);
|
|
12429
12599
|
let downloadErr;
|
|
12430
12600
|
try {
|
|
12431
12601
|
return await this.fileInfoListDownload({
|
|
@@ -12436,7 +12606,13 @@ ${stderr}`)
|
|
|
12436
12606
|
});
|
|
12437
12607
|
} catch (e) {
|
|
12438
12608
|
downloadErr = e;
|
|
12609
|
+
this.logger?.debug?.(
|
|
12610
|
+
`[downloadRecording] fileInfoListDownload failed: ${e instanceof Error ? e.message : String(e)}`
|
|
12611
|
+
);
|
|
12439
12612
|
}
|
|
12613
|
+
this.logger?.debug?.(
|
|
12614
|
+
`[downloadRecording] Trying fileInfoListPagedDownload for: ${fileName}`
|
|
12615
|
+
);
|
|
12440
12616
|
try {
|
|
12441
12617
|
const result = await this.fileInfoListPagedDownload({
|
|
12442
12618
|
channel,
|
|
@@ -12448,6 +12624,9 @@ ${stderr}`)
|
|
|
12448
12624
|
return result;
|
|
12449
12625
|
}
|
|
12450
12626
|
} catch (e) {
|
|
12627
|
+
this.logger?.debug?.(
|
|
12628
|
+
`[downloadRecording] fileInfoListPagedDownload failed: ${e instanceof Error ? e.message : String(e)}`
|
|
12629
|
+
);
|
|
12451
12630
|
}
|
|
12452
12631
|
const replayMsg = replayErr instanceof Error ? replayErr.message : replayErr != null ? String(replayErr) : "";
|
|
12453
12632
|
const dlMsg = downloadErr instanceof Error ? downloadErr.message : downloadErr != null ? String(downloadErr) : "";
|
|
@@ -14520,11 +14699,13 @@ ${stderr}`)
|
|
|
14520
14699
|
* @param settings - Floodlight settings to apply
|
|
14521
14700
|
*
|
|
14522
14701
|
* @example
|
|
14702
|
+
* ```typescript
|
|
14523
14703
|
* await api.setFloodlightSettings(0, {
|
|
14524
14704
|
* duration: 300, // 5 minutes
|
|
14525
14705
|
* detectType: 'people,vehicle',
|
|
14526
14706
|
* brightness: 80,
|
|
14527
14707
|
* });
|
|
14708
|
+
* ```
|
|
14528
14709
|
*/
|
|
14529
14710
|
async setFloodlightSettings(channel, settings) {
|
|
14530
14711
|
const ch = this.normalizeChannel(channel);
|
|
@@ -15630,7 +15811,7 @@ ${xml}`
|
|
|
15630
15811
|
* @returns Test results for all stream types and profiles
|
|
15631
15812
|
*/
|
|
15632
15813
|
async testChannelStreams(channel, logger) {
|
|
15633
|
-
const { testChannelStreams } = await import("./DiagnosticsTools-
|
|
15814
|
+
const { testChannelStreams } = await import("./DiagnosticsTools-EC7DADEQ.js");
|
|
15634
15815
|
return await testChannelStreams({
|
|
15635
15816
|
api: this,
|
|
15636
15817
|
channel: this.normalizeChannel(channel),
|
|
@@ -15646,7 +15827,7 @@ ${xml}`
|
|
|
15646
15827
|
* @returns Complete diagnostics for all channels and streams
|
|
15647
15828
|
*/
|
|
15648
15829
|
async collectMultifocalDiagnostics(logger) {
|
|
15649
|
-
const { collectMultifocalDiagnostics } = await import("./DiagnosticsTools-
|
|
15830
|
+
const { collectMultifocalDiagnostics } = await import("./DiagnosticsTools-EC7DADEQ.js");
|
|
15650
15831
|
return await collectMultifocalDiagnostics({
|
|
15651
15832
|
api: this,
|
|
15652
15833
|
logger
|
|
@@ -16666,11 +16847,13 @@ ${scheduleItems}
|
|
|
16666
16847
|
*/
|
|
16667
16848
|
async createRecordingReplayMp4Stream(params) {
|
|
16668
16849
|
const logger = params.logger ?? this.logger;
|
|
16850
|
+
const useMpegTsMuxer = params.useMpegTsMuxer ?? true;
|
|
16669
16851
|
const parsed = parseRecordingFileName(params.fileName);
|
|
16670
16852
|
const durationMs = parsed?.durationMs ?? 3e5;
|
|
16853
|
+
const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
|
|
16671
16854
|
const seconds = Math.ceil(durationMs / 1e3 * 1.1);
|
|
16672
16855
|
logger?.debug?.(
|
|
16673
|
-
`[createRecordingReplayMp4Stream] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, timeoutSec=${seconds}, deviceId=${params.deviceId ?? "auto"}`
|
|
16856
|
+
`[createRecordingReplayMp4Stream] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, fps=${fps}, timeoutSec=${seconds}, deviceId=${params.deviceId ?? "auto"}, useMpegTsMuxer=${useMpegTsMuxer}`
|
|
16674
16857
|
);
|
|
16675
16858
|
const startParams = {
|
|
16676
16859
|
channel: params.channel,
|
|
@@ -16679,39 +16862,84 @@ ${scheduleItems}
|
|
|
16679
16862
|
...params.isNvr != null ? { isNvr: params.isNvr } : {},
|
|
16680
16863
|
...params.deviceId != null ? { deviceId: params.deviceId } : {}
|
|
16681
16864
|
};
|
|
16682
|
-
const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(
|
|
16683
|
-
|
|
16684
|
-
|
|
16865
|
+
const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(async () => {
|
|
16866
|
+
try {
|
|
16867
|
+
return await this.startRecordingReplayStream(startParams);
|
|
16868
|
+
} catch (e) {
|
|
16869
|
+
if (!params.deviceId) throw e;
|
|
16870
|
+
const sessionKey = `replay:${params.deviceId}`;
|
|
16871
|
+
logger?.debug?.(
|
|
16872
|
+
`[createRecordingReplayMp4Stream] startRecordingReplayStream failed; force-closing dedicated client and retrying once`
|
|
16873
|
+
);
|
|
16874
|
+
await this.forceCloseDedicatedClient(sessionKey, logger);
|
|
16875
|
+
return await this.startRecordingReplayStream(startParams);
|
|
16876
|
+
}
|
|
16877
|
+
});
|
|
16685
16878
|
const { stream, stop: stopReplay } = replayResult;
|
|
16686
16879
|
const input = new PassThrough();
|
|
16687
16880
|
const output = new PassThrough();
|
|
16881
|
+
const H264_AUD = Buffer.from([0, 0, 0, 1, 9, 240]);
|
|
16688
16882
|
let tsMuxer = null;
|
|
16689
16883
|
let ff = null;
|
|
16690
16884
|
let ended = false;
|
|
16691
16885
|
let frameCount = 0;
|
|
16692
16886
|
const startFfmpeg = (videoType) => {
|
|
16693
16887
|
if (ff) return;
|
|
16888
|
+
const needsTranscode = videoType === "H265" && params.transcodeH265ToH264 === true;
|
|
16694
16889
|
logger?.debug?.(
|
|
16695
|
-
`[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}`
|
|
16890
|
+
`[createRecordingReplayMp4Stream] Starting ffmpeg with videoType=${videoType}, transcode=${needsTranscode}, useMpegTsMuxer=${useMpegTsMuxer}, fps=${fps}`
|
|
16696
16891
|
);
|
|
16697
|
-
|
|
16698
|
-
|
|
16699
|
-
|
|
16700
|
-
|
|
16701
|
-
|
|
16702
|
-
|
|
16703
|
-
|
|
16704
|
-
|
|
16705
|
-
|
|
16706
|
-
|
|
16707
|
-
|
|
16708
|
-
|
|
16709
|
-
|
|
16710
|
-
|
|
16711
|
-
|
|
16712
|
-
|
|
16713
|
-
|
|
16714
|
-
|
|
16892
|
+
let args;
|
|
16893
|
+
if (useMpegTsMuxer) {
|
|
16894
|
+
MpegTsMuxer.resetCounters();
|
|
16895
|
+
tsMuxer = new MpegTsMuxer({ videoType });
|
|
16896
|
+
args = [
|
|
16897
|
+
"-hide_banner",
|
|
16898
|
+
"-loglevel",
|
|
16899
|
+
"error",
|
|
16900
|
+
"-f",
|
|
16901
|
+
"mpegts",
|
|
16902
|
+
"-i",
|
|
16903
|
+
"pipe:0",
|
|
16904
|
+
// Video codec: transcode H.265→H.264 if requested, otherwise copy
|
|
16905
|
+
...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
|
|
16906
|
+
// frag_keyframe: create new fragment at each keyframe
|
|
16907
|
+
// empty_moov: write ftyp/moov immediately (required for streaming)
|
|
16908
|
+
// default_base_moof: required for iOS Media Source Extensions
|
|
16909
|
+
// negative_cts_offsets: fixes some iOS playback issues
|
|
16910
|
+
"-movflags",
|
|
16911
|
+
"frag_keyframe+empty_moov+default_base_moof+negative_cts_offsets",
|
|
16912
|
+
"-f",
|
|
16913
|
+
"mp4",
|
|
16914
|
+
"pipe:1"
|
|
16915
|
+
];
|
|
16916
|
+
} else {
|
|
16917
|
+
const inputFormat = videoType === "H265" ? "hevc" : "h264";
|
|
16918
|
+
args = [
|
|
16919
|
+
"-hide_banner",
|
|
16920
|
+
"-loglevel",
|
|
16921
|
+
"error",
|
|
16922
|
+
"-fflags",
|
|
16923
|
+
"+genpts",
|
|
16924
|
+
"-r",
|
|
16925
|
+
String(fps),
|
|
16926
|
+
"-f",
|
|
16927
|
+
inputFormat,
|
|
16928
|
+
"-i",
|
|
16929
|
+
"pipe:0",
|
|
16930
|
+
// Video codec: transcode H.265→H.264 if requested, otherwise copy
|
|
16931
|
+
...needsTranscode ? ["-c:v", "libx264", "-preset", "ultrafast", "-crf", "23"] : ["-c", "copy"],
|
|
16932
|
+
// frag_keyframe: create new fragment at each keyframe
|
|
16933
|
+
// empty_moov: write ftyp/moov immediately (required for streaming)
|
|
16934
|
+
// default_base_moof: required for iOS Media Source Extensions
|
|
16935
|
+
// negative_cts_offsets: fixes some iOS playback issues
|
|
16936
|
+
"-movflags",
|
|
16937
|
+
"frag_keyframe+empty_moov+default_base_moof+negative_cts_offsets",
|
|
16938
|
+
"-f",
|
|
16939
|
+
"mp4",
|
|
16940
|
+
"pipe:1"
|
|
16941
|
+
];
|
|
16942
|
+
}
|
|
16715
16943
|
ff = spawn2("ffmpeg", args, { stdio: ["pipe", "pipe", "pipe"] });
|
|
16716
16944
|
if (!ff.stdin || !ff.stdout || !ff.stderr) {
|
|
16717
16945
|
throw new Error("ffmpeg stdio streams not available");
|
|
@@ -16749,17 +16977,19 @@ ${scheduleItems}
|
|
|
16749
16977
|
const stopAll = async () => {
|
|
16750
16978
|
if (ended) return;
|
|
16751
16979
|
ended = true;
|
|
16980
|
+
releaseQueueSlot();
|
|
16752
16981
|
logger?.debug?.(
|
|
16753
16982
|
`[createRecordingReplayMp4Stream] Stopping stream, frames=${frameCount}`
|
|
16754
16983
|
);
|
|
16755
|
-
|
|
16756
|
-
|
|
16757
|
-
|
|
16758
|
-
|
|
16759
|
-
|
|
16760
|
-
|
|
16761
|
-
|
|
16762
|
-
|
|
16984
|
+
const cleanupPromises = [];
|
|
16985
|
+
cleanupPromises.push(
|
|
16986
|
+
stopReplay().catch(() => {
|
|
16987
|
+
})
|
|
16988
|
+
);
|
|
16989
|
+
cleanupPromises.push(
|
|
16990
|
+
stream.stop().catch(() => {
|
|
16991
|
+
})
|
|
16992
|
+
);
|
|
16763
16993
|
try {
|
|
16764
16994
|
input.end();
|
|
16765
16995
|
} catch {
|
|
@@ -16772,7 +17002,11 @@ ${scheduleItems}
|
|
|
16772
17002
|
output.end();
|
|
16773
17003
|
} catch {
|
|
16774
17004
|
}
|
|
16775
|
-
|
|
17005
|
+
await Promise.race([
|
|
17006
|
+
Promise.all(cleanupPromises),
|
|
17007
|
+
new Promise((resolve) => setTimeout(resolve, 2e3))
|
|
17008
|
+
// Max 2s for cleanup
|
|
17009
|
+
]);
|
|
16776
17010
|
};
|
|
16777
17011
|
const timer = setTimeout(
|
|
16778
17012
|
() => {
|
|
@@ -16794,15 +17028,25 @@ ${scheduleItems}
|
|
|
16794
17028
|
output.destroy(e);
|
|
16795
17029
|
void stopAll();
|
|
16796
17030
|
});
|
|
17031
|
+
stream.on("close", () => {
|
|
17032
|
+
logger?.debug?.(
|
|
17033
|
+
`[createRecordingReplayMp4Stream] Stream closed, frames=${frameCount}`
|
|
17034
|
+
);
|
|
17035
|
+
clearTimeout(timer);
|
|
17036
|
+
void stopAll();
|
|
17037
|
+
});
|
|
16797
17038
|
stream.on(
|
|
16798
17039
|
"videoAccessUnit",
|
|
16799
17040
|
({ data, videoType, isKeyframe, microseconds }) => {
|
|
16800
17041
|
if (ended) return;
|
|
16801
17042
|
startFfmpeg(videoType);
|
|
16802
17043
|
frameCount++;
|
|
16803
|
-
if (tsMuxer) {
|
|
17044
|
+
if (useMpegTsMuxer && tsMuxer) {
|
|
16804
17045
|
const tsData = tsMuxer.mux(data, microseconds, isKeyframe);
|
|
16805
17046
|
input.write(tsData);
|
|
17047
|
+
} else {
|
|
17048
|
+
if (videoType === "H264") input.write(H264_AUD);
|
|
17049
|
+
input.write(data);
|
|
16806
17050
|
}
|
|
16807
17051
|
}
|
|
16808
17052
|
);
|
|
@@ -16841,19 +17085,53 @@ ${scheduleItems}
|
|
|
16841
17085
|
*/
|
|
16842
17086
|
async createRecordingDownloadMp4Stream(params) {
|
|
16843
17087
|
const timeoutMs = params.timeoutMs ?? 12e4;
|
|
16844
|
-
const parsed = parseRecordingFileName(params.fileName);
|
|
16845
|
-
const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
|
|
16846
17088
|
const channel = this.normalizeChannel(params.channel);
|
|
16847
17089
|
const uid = await this.ensureUidForRecordings(channel);
|
|
16848
|
-
const
|
|
17090
|
+
const raw = await this.downloadRecording({
|
|
16849
17091
|
channel,
|
|
16850
17092
|
uid,
|
|
16851
17093
|
fileName: params.fileName,
|
|
16852
17094
|
timeoutMs
|
|
16853
17095
|
});
|
|
16854
|
-
if (
|
|
17096
|
+
if (raw.length === 0) {
|
|
16855
17097
|
throw new Error("Downloaded recording is empty");
|
|
16856
17098
|
}
|
|
17099
|
+
const videoFrames = [];
|
|
17100
|
+
let videoType = null;
|
|
17101
|
+
const decoder = new BcMediaAnnexBDecoder({
|
|
17102
|
+
strict: false,
|
|
17103
|
+
logger: this.logger,
|
|
17104
|
+
onVideoAccessUnit: ({ annexB: annexB2, microseconds }) => {
|
|
17105
|
+
videoFrames.push({ annexB: annexB2, microseconds });
|
|
17106
|
+
}
|
|
17107
|
+
});
|
|
17108
|
+
decoder.push(raw);
|
|
17109
|
+
const stats = decoder.getStats();
|
|
17110
|
+
videoType = stats.videoType;
|
|
17111
|
+
if (videoFrames.length === 0) {
|
|
17112
|
+
throw new Error("Downloaded recording has no video frames");
|
|
17113
|
+
}
|
|
17114
|
+
let fps;
|
|
17115
|
+
if (videoFrames.length >= 2) {
|
|
17116
|
+
const firstTs = videoFrames[0].microseconds;
|
|
17117
|
+
const lastTs = videoFrames[videoFrames.length - 1].microseconds;
|
|
17118
|
+
const durationUs = lastTs - firstTs;
|
|
17119
|
+
if (durationUs > 0) {
|
|
17120
|
+
const durationSeconds = durationUs / 1e6;
|
|
17121
|
+
fps = (videoFrames.length - 1) / durationSeconds;
|
|
17122
|
+
} else {
|
|
17123
|
+
const infoFps = stats.infos[0]?.fps;
|
|
17124
|
+
fps = infoFps && infoFps > 0 ? infoFps : 15;
|
|
17125
|
+
}
|
|
17126
|
+
} else {
|
|
17127
|
+
const infoFps = stats.infos[0]?.fps;
|
|
17128
|
+
fps = infoFps && infoFps > 0 ? infoFps : 15;
|
|
17129
|
+
}
|
|
17130
|
+
if (fps > 14 && fps < 16) fps = 15;
|
|
17131
|
+
else if (fps > 23 && fps < 26) fps = 25;
|
|
17132
|
+
else if (fps > 29 && fps < 31) fps = 30;
|
|
17133
|
+
else fps = Math.round(fps * 100) / 100;
|
|
17134
|
+
const annexB = Buffer.concat(videoFrames.map((f) => f.annexB));
|
|
16857
17135
|
const input = new PassThrough();
|
|
16858
17136
|
const output = new PassThrough();
|
|
16859
17137
|
let ff = null;
|
|
@@ -16925,6 +17203,338 @@ ${scheduleItems}
|
|
|
16925
17203
|
stop: stopAll
|
|
16926
17204
|
};
|
|
16927
17205
|
}
|
|
17206
|
+
/**
|
|
17207
|
+
* Create an HLS (HTTP Live Streaming) session for a recording.
|
|
17208
|
+
*
|
|
17209
|
+
* This method creates HLS segments on-the-fly from a recording replay stream.
|
|
17210
|
+
* HLS is required for iOS devices (Safari, Home app) which don't support
|
|
17211
|
+
* fragmented MP4 streaming well and require Range request support.
|
|
17212
|
+
*
|
|
17213
|
+
* The session writes HLS segments (.ts files) and playlist (.m3u8) to a
|
|
17214
|
+
* temporary directory. You must serve these files via HTTP to the client.
|
|
17215
|
+
*
|
|
17216
|
+
* @example
|
|
17217
|
+
* ```ts
|
|
17218
|
+
* const session = await api.createRecordingReplayHlsSession({
|
|
17219
|
+
* channel: 0,
|
|
17220
|
+
* fileName: "/mnt/sda/Mp4Record/2026-01-25/RecS03.mp4",
|
|
17221
|
+
* });
|
|
17222
|
+
*
|
|
17223
|
+
* // Serve playlist
|
|
17224
|
+
* app.get('/clip.m3u8', (req, res) => {
|
|
17225
|
+
* res.type('application/vnd.apple.mpegurl');
|
|
17226
|
+
* res.send(session.getPlaylist());
|
|
17227
|
+
* });
|
|
17228
|
+
*
|
|
17229
|
+
* // Serve segments
|
|
17230
|
+
* app.get('/segment/:name', (req, res) => {
|
|
17231
|
+
* const data = session.getSegment(req.params.name);
|
|
17232
|
+
* if (data) {
|
|
17233
|
+
* res.type('video/mp2t');
|
|
17234
|
+
* res.send(data);
|
|
17235
|
+
* } else {
|
|
17236
|
+
* res.status(404).end();
|
|
17237
|
+
* }
|
|
17238
|
+
* });
|
|
17239
|
+
*
|
|
17240
|
+
* // Cleanup when done
|
|
17241
|
+
* await session.stop();
|
|
17242
|
+
* ```
|
|
17243
|
+
*/
|
|
17244
|
+
async createRecordingReplayHlsSession(params) {
|
|
17245
|
+
const logger = params.logger ?? this.logger;
|
|
17246
|
+
const hlsSegmentDuration = params.hlsSegmentDuration ?? 4;
|
|
17247
|
+
const os = await import("os");
|
|
17248
|
+
const path = await import("path");
|
|
17249
|
+
const fs = await import("fs/promises");
|
|
17250
|
+
const crypto2 = await import("crypto");
|
|
17251
|
+
const tempDir = path.join(
|
|
17252
|
+
os.tmpdir(),
|
|
17253
|
+
`reolink-hls-${crypto2.randomBytes(8).toString("hex")}`
|
|
17254
|
+
);
|
|
17255
|
+
await fs.mkdir(tempDir, { recursive: true });
|
|
17256
|
+
const playlistPath = path.join(tempDir, "playlist.m3u8");
|
|
17257
|
+
const segmentPattern = path.join(tempDir, "segment_%03d.ts");
|
|
17258
|
+
const parsed = parseRecordingFileName(params.fileName);
|
|
17259
|
+
const durationMs = parsed?.durationMs ?? 3e5;
|
|
17260
|
+
const fps = parsed?.framerate && parsed.framerate > 0 ? parsed.framerate : 15;
|
|
17261
|
+
const seconds = Math.ceil(durationMs / 1e3 * 1.1);
|
|
17262
|
+
logger?.debug?.(
|
|
17263
|
+
`[createRecordingReplayHlsSession] Starting: channel=${params.channel}, fileName=${params.fileName}, durationMs=${durationMs}, hlsSegmentDuration=${hlsSegmentDuration}`
|
|
17264
|
+
);
|
|
17265
|
+
const startParams = {
|
|
17266
|
+
channel: params.channel,
|
|
17267
|
+
fileName: params.fileName,
|
|
17268
|
+
logger,
|
|
17269
|
+
...params.isNvr != null ? { isNvr: params.isNvr } : {},
|
|
17270
|
+
...params.deviceId != null ? { deviceId: params.deviceId } : {}
|
|
17271
|
+
};
|
|
17272
|
+
const { result: replayResult, release: releaseQueueSlot } = await this.enqueueStreamingReplayOperation(async () => {
|
|
17273
|
+
try {
|
|
17274
|
+
return await this.startRecordingReplayStream(startParams);
|
|
17275
|
+
} catch (e) {
|
|
17276
|
+
if (!params.deviceId) throw e;
|
|
17277
|
+
const sessionKey = `replay:${params.deviceId}`;
|
|
17278
|
+
logger?.debug?.(
|
|
17279
|
+
`[createRecordingReplayHlsSession] startRecordingReplayStream failed; force-closing dedicated client and retrying once`
|
|
17280
|
+
);
|
|
17281
|
+
await this.forceCloseDedicatedClient(sessionKey, logger);
|
|
17282
|
+
return await this.startRecordingReplayStream(startParams);
|
|
17283
|
+
}
|
|
17284
|
+
});
|
|
17285
|
+
const { stream, stop: stopReplay } = replayResult;
|
|
17286
|
+
const input = new PassThrough();
|
|
17287
|
+
const H264_AUD = Buffer.from([0, 0, 0, 1, 9, 240]);
|
|
17288
|
+
let tsMuxer = null;
|
|
17289
|
+
let ff = null;
|
|
17290
|
+
let ended = false;
|
|
17291
|
+
let frameCount = 0;
|
|
17292
|
+
let readyResolve = null;
|
|
17293
|
+
let segmentWatcher = null;
|
|
17294
|
+
const readyPromise = new Promise((resolve) => {
|
|
17295
|
+
readyResolve = resolve;
|
|
17296
|
+
});
|
|
17297
|
+
const segments = /* @__PURE__ */ new Map();
|
|
17298
|
+
const startSegmentWatcher = () => {
|
|
17299
|
+
if (segmentWatcher || !readyResolve) return;
|
|
17300
|
+
const firstSegmentPath = path.join(tempDir, "segment_000.ts");
|
|
17301
|
+
let checkCount = 0;
|
|
17302
|
+
const maxChecks = Math.ceil((hlsSegmentDuration + 2) * 10);
|
|
17303
|
+
segmentWatcher = setInterval(async () => {
|
|
17304
|
+
checkCount++;
|
|
17305
|
+
try {
|
|
17306
|
+
const stats = await fs.stat(firstSegmentPath);
|
|
17307
|
+
if (stats.size > 256) {
|
|
17308
|
+
if (segmentWatcher) {
|
|
17309
|
+
clearInterval(segmentWatcher);
|
|
17310
|
+
segmentWatcher = null;
|
|
17311
|
+
}
|
|
17312
|
+
logger?.debug?.(
|
|
17313
|
+
`[createRecordingReplayHlsSession] First segment ready after ${checkCount * 100}ms, size=${stats.size}`
|
|
17314
|
+
);
|
|
17315
|
+
readyResolve?.();
|
|
17316
|
+
readyResolve = null;
|
|
17317
|
+
}
|
|
17318
|
+
} catch {
|
|
17319
|
+
}
|
|
17320
|
+
if (checkCount >= maxChecks && readyResolve) {
|
|
17321
|
+
if (segmentWatcher) {
|
|
17322
|
+
clearInterval(segmentWatcher);
|
|
17323
|
+
segmentWatcher = null;
|
|
17324
|
+
}
|
|
17325
|
+
logger?.debug?.(
|
|
17326
|
+
`[createRecordingReplayHlsSession] Segment watcher timeout, resolving anyway`
|
|
17327
|
+
);
|
|
17328
|
+
readyResolve?.();
|
|
17329
|
+
readyResolve = null;
|
|
17330
|
+
}
|
|
17331
|
+
}, 100);
|
|
17332
|
+
};
|
|
17333
|
+
const startFfmpeg = (videoType) => {
|
|
17334
|
+
if (ff) return;
|
|
17335
|
+
const needsTranscode = videoType === "H265" && params.transcodeH265ToH264 === true;
|
|
17336
|
+
const gop = Math.max(1, Math.round(fps * hlsSegmentDuration));
|
|
17337
|
+
logger?.log?.(
|
|
17338
|
+
`[createRecordingReplayHlsSession] Starting ffmpeg HLS with videoType=${videoType}, transcode=${needsTranscode}, hlsTime=${hlsSegmentDuration}s, fileName=${params.fileName}`
|
|
17339
|
+
);
|
|
17340
|
+
MpegTsMuxer.resetCounters();
|
|
17341
|
+
tsMuxer = new MpegTsMuxer({ videoType });
|
|
17342
|
+
const args = [
|
|
17343
|
+
"-hide_banner",
|
|
17344
|
+
"-loglevel",
|
|
17345
|
+
"error",
|
|
17346
|
+
"-f",
|
|
17347
|
+
"mpegts",
|
|
17348
|
+
"-i",
|
|
17349
|
+
"pipe:0",
|
|
17350
|
+
// Video codec
|
|
17351
|
+
...needsTranscode ? [
|
|
17352
|
+
"-c:v",
|
|
17353
|
+
"libx264",
|
|
17354
|
+
"-preset",
|
|
17355
|
+
"ultrafast",
|
|
17356
|
+
"-tune",
|
|
17357
|
+
"zerolatency",
|
|
17358
|
+
"-crf",
|
|
17359
|
+
"23",
|
|
17360
|
+
"-pix_fmt",
|
|
17361
|
+
"yuv420p",
|
|
17362
|
+
// Ensure regular GOP for consistent HLS cutting.
|
|
17363
|
+
"-g",
|
|
17364
|
+
String(gop),
|
|
17365
|
+
"-keyint_min",
|
|
17366
|
+
String(gop),
|
|
17367
|
+
"-sc_threshold",
|
|
17368
|
+
"0",
|
|
17369
|
+
// Force frequent keyframes so HLS can cut segments reliably.
|
|
17370
|
+
// Without this, ffmpeg will only cut on keyframes and segments can become huge.
|
|
17371
|
+
"-force_key_frames",
|
|
17372
|
+
`expr:gte(t,n_forced*${hlsSegmentDuration})`
|
|
17373
|
+
] : ["-c", "copy"],
|
|
17374
|
+
// HLS output options
|
|
17375
|
+
"-f",
|
|
17376
|
+
"hls",
|
|
17377
|
+
"-hls_time",
|
|
17378
|
+
String(hlsSegmentDuration),
|
|
17379
|
+
"-hls_list_size",
|
|
17380
|
+
"0",
|
|
17381
|
+
// Keep all segments in playlist
|
|
17382
|
+
"-hls_playlist_type",
|
|
17383
|
+
"event",
|
|
17384
|
+
// Growing playlist (not VOD until end)
|
|
17385
|
+
"-hls_segment_filename",
|
|
17386
|
+
segmentPattern,
|
|
17387
|
+
"-hls_flags",
|
|
17388
|
+
"independent_segments+temp_file",
|
|
17389
|
+
playlistPath
|
|
17390
|
+
];
|
|
17391
|
+
ff = spawn2("ffmpeg", args, { stdio: ["pipe", "pipe", "pipe"] });
|
|
17392
|
+
if (!ff.stdin || !ff.stderr) {
|
|
17393
|
+
throw new Error("ffmpeg stdio streams not available");
|
|
17394
|
+
}
|
|
17395
|
+
input.pipe(ff.stdin);
|
|
17396
|
+
ff.stdin.on("error", () => {
|
|
17397
|
+
});
|
|
17398
|
+
ff.stderr.on("error", () => {
|
|
17399
|
+
});
|
|
17400
|
+
input.on("error", () => {
|
|
17401
|
+
});
|
|
17402
|
+
let stderr = "";
|
|
17403
|
+
ff.stderr.on("data", (d) => stderr += String(d));
|
|
17404
|
+
ff.on("close", (code) => {
|
|
17405
|
+
if (ended) return;
|
|
17406
|
+
ended = true;
|
|
17407
|
+
if ((code ?? 0) !== 0 && stderr.trim()) {
|
|
17408
|
+
logger?.error?.(
|
|
17409
|
+
`[createRecordingReplayHlsSession] ffmpeg exited with code ${code}: ${stderr}`
|
|
17410
|
+
);
|
|
17411
|
+
} else {
|
|
17412
|
+
logger?.debug?.(
|
|
17413
|
+
`[createRecordingReplayHlsSession] ffmpeg closed normally, frames=${frameCount}`
|
|
17414
|
+
);
|
|
17415
|
+
}
|
|
17416
|
+
});
|
|
17417
|
+
};
|
|
17418
|
+
const stopAll = async () => {
|
|
17419
|
+
if (ended) return;
|
|
17420
|
+
ended = true;
|
|
17421
|
+
releaseQueueSlot();
|
|
17422
|
+
if (segmentWatcher) {
|
|
17423
|
+
clearInterval(segmentWatcher);
|
|
17424
|
+
segmentWatcher = null;
|
|
17425
|
+
}
|
|
17426
|
+
logger?.debug?.(
|
|
17427
|
+
`[createRecordingReplayHlsSession] Stopping, frames=${frameCount}`
|
|
17428
|
+
);
|
|
17429
|
+
const cleanupPromises = [];
|
|
17430
|
+
cleanupPromises.push(stopReplay().catch(() => {
|
|
17431
|
+
}));
|
|
17432
|
+
cleanupPromises.push(stream.stop().catch(() => {
|
|
17433
|
+
}));
|
|
17434
|
+
try {
|
|
17435
|
+
input.end();
|
|
17436
|
+
} catch {
|
|
17437
|
+
}
|
|
17438
|
+
try {
|
|
17439
|
+
ff?.kill("SIGKILL");
|
|
17440
|
+
} catch {
|
|
17441
|
+
}
|
|
17442
|
+
await Promise.race([
|
|
17443
|
+
Promise.all(cleanupPromises),
|
|
17444
|
+
new Promise((resolve) => setTimeout(resolve, 2e3))
|
|
17445
|
+
]);
|
|
17446
|
+
setTimeout(async () => {
|
|
17447
|
+
try {
|
|
17448
|
+
const files = await fs.readdir(tempDir);
|
|
17449
|
+
for (const file of files) {
|
|
17450
|
+
await fs.unlink(path.join(tempDir, file)).catch(() => {
|
|
17451
|
+
});
|
|
17452
|
+
}
|
|
17453
|
+
await fs.rmdir(tempDir).catch(() => {
|
|
17454
|
+
});
|
|
17455
|
+
} catch {
|
|
17456
|
+
}
|
|
17457
|
+
}, 6e4);
|
|
17458
|
+
};
|
|
17459
|
+
const timer = setTimeout(
|
|
17460
|
+
() => {
|
|
17461
|
+
logger?.debug?.(
|
|
17462
|
+
`[createRecordingReplayHlsSession] Timeout reached (${seconds}s), stopping`
|
|
17463
|
+
);
|
|
17464
|
+
void stopAll();
|
|
17465
|
+
},
|
|
17466
|
+
Math.max(1, seconds) * 1e3
|
|
17467
|
+
);
|
|
17468
|
+
stream.on("error", (e) => {
|
|
17469
|
+
logger?.error?.(
|
|
17470
|
+
`[createRecordingReplayHlsSession] Stream error: ${e.message}`
|
|
17471
|
+
);
|
|
17472
|
+
clearTimeout(timer);
|
|
17473
|
+
void stopAll();
|
|
17474
|
+
});
|
|
17475
|
+
stream.on("close", () => {
|
|
17476
|
+
logger?.debug?.(
|
|
17477
|
+
`[createRecordingReplayHlsSession] Stream closed, frames=${frameCount}`
|
|
17478
|
+
);
|
|
17479
|
+
clearTimeout(timer);
|
|
17480
|
+
try {
|
|
17481
|
+
input.end();
|
|
17482
|
+
} catch {
|
|
17483
|
+
}
|
|
17484
|
+
});
|
|
17485
|
+
stream.on(
|
|
17486
|
+
"videoAccessUnit",
|
|
17487
|
+
({ data, videoType, isKeyframe, microseconds }) => {
|
|
17488
|
+
if (ended) return;
|
|
17489
|
+
startFfmpeg(videoType);
|
|
17490
|
+
frameCount++;
|
|
17491
|
+
if (tsMuxer) {
|
|
17492
|
+
const tsData = tsMuxer.mux(data, microseconds, isKeyframe);
|
|
17493
|
+
input.write(tsData);
|
|
17494
|
+
}
|
|
17495
|
+
if (frameCount === 1) {
|
|
17496
|
+
startSegmentWatcher();
|
|
17497
|
+
}
|
|
17498
|
+
}
|
|
17499
|
+
);
|
|
17500
|
+
return {
|
|
17501
|
+
getPlaylist: () => {
|
|
17502
|
+
try {
|
|
17503
|
+
const { readFileSync } = __require("fs");
|
|
17504
|
+
return readFileSync(playlistPath, "utf8");
|
|
17505
|
+
} catch {
|
|
17506
|
+
return "#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-TARGETDURATION:4\n";
|
|
17507
|
+
}
|
|
17508
|
+
},
|
|
17509
|
+
getSegment: (name) => {
|
|
17510
|
+
if (segments.has(name)) {
|
|
17511
|
+
return segments.get(name);
|
|
17512
|
+
}
|
|
17513
|
+
try {
|
|
17514
|
+
const { readFileSync } = __require("fs");
|
|
17515
|
+
const segmentPath = path.join(tempDir, name);
|
|
17516
|
+
const data = readFileSync(segmentPath);
|
|
17517
|
+
segments.set(name, data);
|
|
17518
|
+
return data;
|
|
17519
|
+
} catch {
|
|
17520
|
+
return void 0;
|
|
17521
|
+
}
|
|
17522
|
+
},
|
|
17523
|
+
listSegments: () => {
|
|
17524
|
+
try {
|
|
17525
|
+
const { readdirSync } = __require("fs");
|
|
17526
|
+
return readdirSync(tempDir).filter(
|
|
17527
|
+
(f) => f.endsWith(".ts")
|
|
17528
|
+
);
|
|
17529
|
+
} catch {
|
|
17530
|
+
return [];
|
|
17531
|
+
}
|
|
17532
|
+
},
|
|
17533
|
+
waitForReady: () => readyPromise,
|
|
17534
|
+
stop: stopAll,
|
|
17535
|
+
tempDir
|
|
17536
|
+
};
|
|
17537
|
+
}
|
|
16928
17538
|
// ============================================================
|
|
16929
17539
|
// STANDALONE CAMERA METHODS
|
|
16930
17540
|
// ============================================================
|
|
@@ -18066,4 +18676,4 @@ export {
|
|
|
18066
18676
|
isTcpFailureThatShouldFallbackToUdp,
|
|
18067
18677
|
autoDetectDeviceType
|
|
18068
18678
|
};
|
|
18069
|
-
//# sourceMappingURL=chunk-
|
|
18679
|
+
//# sourceMappingURL=chunk-YUBYINJF.js.map
|