@apocaliss92/nodelink-js 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +445 -0
- package/dist/DiagnosticsTools-MTXG65O3.js +25 -0
- package/dist/DiagnosticsTools-MTXG65O3.js.map +1 -0
- package/dist/chunk-JMT75JNG.js +18069 -0
- package/dist/chunk-JMT75JNG.js.map +1 -0
- package/dist/chunk-MC2BRLLE.js +7238 -0
- package/dist/chunk-MC2BRLLE.js.map +1 -0
- package/dist/cli/rtsp-server.cjs +24900 -0
- package/dist/cli/rtsp-server.cjs.map +1 -0
- package/dist/cli/rtsp-server.js +277 -0
- package/dist/cli/rtsp-server.js.map +1 -0
- package/dist/index.cjs +29963 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +7737 -0
- package/dist/index.js +4752 -0
- package/dist/index.js.map +1 -0
- package/package.json +68 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,4752 @@
|
|
|
1
|
+
import {
|
|
2
|
+
BaichuanClient,
|
|
3
|
+
BaichuanEventEmitter,
|
|
4
|
+
BaichuanFrameParser,
|
|
5
|
+
BaichuanRtspServer,
|
|
6
|
+
BcUdpStream,
|
|
7
|
+
DUAL_LENS_DUAL_MOTION_MODELS,
|
|
8
|
+
DUAL_LENS_MODELS,
|
|
9
|
+
DUAL_LENS_SINGLE_MOTION_MODELS,
|
|
10
|
+
Intercom,
|
|
11
|
+
NVR_HUB_EXACT_TYPES,
|
|
12
|
+
NVR_HUB_MODEL_PATTERNS,
|
|
13
|
+
ReolinkBaichuanApi,
|
|
14
|
+
abilitiesHasAny,
|
|
15
|
+
asLogger,
|
|
16
|
+
autoDetectDeviceType,
|
|
17
|
+
computeDeviceCapabilities,
|
|
18
|
+
createDebugGateLogger,
|
|
19
|
+
createLogger,
|
|
20
|
+
createNativeStream,
|
|
21
|
+
createNullLogger,
|
|
22
|
+
createTaggedLogger,
|
|
23
|
+
decodeHeader,
|
|
24
|
+
discoverReolinkDevices,
|
|
25
|
+
discoverViaHttpScan,
|
|
26
|
+
discoverViaUdpBroadcast,
|
|
27
|
+
discoverViaUdpDirect,
|
|
28
|
+
encodeHeader,
|
|
29
|
+
flattenAbilitiesForChannel,
|
|
30
|
+
getConstructedVideoStreamOptions,
|
|
31
|
+
getGlobalLogger,
|
|
32
|
+
getVideoStream,
|
|
33
|
+
isDualLenseModel,
|
|
34
|
+
isNvrHubModel,
|
|
35
|
+
isTcpFailureThatShouldFallbackToUdp,
|
|
36
|
+
maskUid,
|
|
37
|
+
normalizeUid,
|
|
38
|
+
parseSupportXml,
|
|
39
|
+
setGlobalLogger
|
|
40
|
+
} from "./chunk-JMT75JNG.js";
|
|
41
|
+
import {
|
|
42
|
+
AesStreamDecryptor,
|
|
43
|
+
BC_AES_IV,
|
|
44
|
+
BC_CLASS_FILE_DOWNLOAD,
|
|
45
|
+
BC_CLASS_LEGACY,
|
|
46
|
+
BC_CLASS_MODERN_20,
|
|
47
|
+
BC_CLASS_MODERN_24,
|
|
48
|
+
BC_CLASS_MODERN_24_ALT,
|
|
49
|
+
BC_CMD_ID_ABILITY_INFO,
|
|
50
|
+
BC_CMD_ID_ALARM_EVENT_LIST,
|
|
51
|
+
BC_CMD_ID_AUDIO_ALARM_PLAY,
|
|
52
|
+
BC_CMD_ID_CHANNEL_INFO_ALL,
|
|
53
|
+
BC_CMD_ID_CMD_123,
|
|
54
|
+
BC_CMD_ID_CMD_209,
|
|
55
|
+
BC_CMD_ID_CMD_265,
|
|
56
|
+
BC_CMD_ID_CMD_440,
|
|
57
|
+
BC_CMD_ID_COVER_PREVIEW,
|
|
58
|
+
BC_CMD_ID_COVER_RESPONSE,
|
|
59
|
+
BC_CMD_ID_COVER_STANDALONE_458,
|
|
60
|
+
BC_CMD_ID_COVER_STANDALONE_459,
|
|
61
|
+
BC_CMD_ID_COVER_STANDALONE_460,
|
|
62
|
+
BC_CMD_ID_COVER_STANDALONE_461,
|
|
63
|
+
BC_CMD_ID_COVER_STANDALONE_462,
|
|
64
|
+
BC_CMD_ID_FILE_INFO_LIST_CLOSE,
|
|
65
|
+
BC_CMD_ID_FILE_INFO_LIST_DL_VIDEO,
|
|
66
|
+
BC_CMD_ID_FILE_INFO_LIST_DOWNLOAD,
|
|
67
|
+
BC_CMD_ID_FILE_INFO_LIST_GET,
|
|
68
|
+
BC_CMD_ID_FILE_INFO_LIST_OPEN,
|
|
69
|
+
BC_CMD_ID_FILE_INFO_LIST_REPLAY,
|
|
70
|
+
BC_CMD_ID_FILE_INFO_LIST_STOP,
|
|
71
|
+
BC_CMD_ID_FIND_REC_VIDEO_CLOSE,
|
|
72
|
+
BC_CMD_ID_FIND_REC_VIDEO_GET,
|
|
73
|
+
BC_CMD_ID_FIND_REC_VIDEO_OPEN,
|
|
74
|
+
BC_CMD_ID_FLOODLIGHT_STATUS_LIST,
|
|
75
|
+
BC_CMD_ID_GET_ABILITY_SUPPORT,
|
|
76
|
+
BC_CMD_ID_GET_ACCESS_USER_LIST,
|
|
77
|
+
BC_CMD_ID_GET_AI_ALARM,
|
|
78
|
+
BC_CMD_ID_GET_AI_CFG,
|
|
79
|
+
BC_CMD_ID_GET_AI_DENOISE,
|
|
80
|
+
BC_CMD_ID_GET_AUDIO_ALARM,
|
|
81
|
+
BC_CMD_ID_GET_AUDIO_CFG,
|
|
82
|
+
BC_CMD_ID_GET_AUDIO_TASK,
|
|
83
|
+
BC_CMD_ID_GET_BATTERY_INFO,
|
|
84
|
+
BC_CMD_ID_GET_BATTERY_INFO_LIST,
|
|
85
|
+
BC_CMD_ID_GET_DAY_NIGHT_THRESHOLD,
|
|
86
|
+
BC_CMD_ID_GET_DAY_RECORDS,
|
|
87
|
+
BC_CMD_ID_GET_EMAIL_TASK,
|
|
88
|
+
BC_CMD_ID_GET_FTP_TASK,
|
|
89
|
+
BC_CMD_ID_GET_HDD_INFO_LIST,
|
|
90
|
+
BC_CMD_ID_GET_KIT_AP_CFG,
|
|
91
|
+
BC_CMD_ID_GET_LED_STATE,
|
|
92
|
+
BC_CMD_ID_GET_MOTION_ALARM,
|
|
93
|
+
BC_CMD_ID_GET_ONLINE_USER_LIST,
|
|
94
|
+
BC_CMD_ID_GET_OSD_DATETIME,
|
|
95
|
+
BC_CMD_ID_GET_PIR_INFO,
|
|
96
|
+
BC_CMD_ID_GET_PTZ_POSITION,
|
|
97
|
+
BC_CMD_ID_GET_PTZ_PRESET,
|
|
98
|
+
BC_CMD_ID_GET_RECORD,
|
|
99
|
+
BC_CMD_ID_GET_RECORD_CFG,
|
|
100
|
+
BC_CMD_ID_GET_REC_ENC_CFG,
|
|
101
|
+
BC_CMD_ID_GET_SIREN_STATUS,
|
|
102
|
+
BC_CMD_ID_GET_SLEEP_STATE,
|
|
103
|
+
BC_CMD_ID_GET_STREAM_INFO_LIST,
|
|
104
|
+
BC_CMD_ID_GET_SUPPORT,
|
|
105
|
+
BC_CMD_ID_GET_SYSTEM_GENERAL,
|
|
106
|
+
BC_CMD_ID_GET_TIMELAPSE_CFG,
|
|
107
|
+
BC_CMD_ID_GET_VIDEO_INPUT,
|
|
108
|
+
BC_CMD_ID_GET_WHITE_LED,
|
|
109
|
+
BC_CMD_ID_GET_WIFI,
|
|
110
|
+
BC_CMD_ID_GET_WIFI_SIGNAL,
|
|
111
|
+
BC_CMD_ID_GET_ZOOM_FOCUS,
|
|
112
|
+
BC_CMD_ID_PING,
|
|
113
|
+
BC_CMD_ID_PTZ_CONTROL,
|
|
114
|
+
BC_CMD_ID_PTZ_CONTROL_PRESET,
|
|
115
|
+
BC_CMD_ID_PUSH_COORDINATE_POINT_LIST,
|
|
116
|
+
BC_CMD_ID_PUSH_DINGDONG_LIST,
|
|
117
|
+
BC_CMD_ID_PUSH_NET_INFO,
|
|
118
|
+
BC_CMD_ID_PUSH_SERIAL,
|
|
119
|
+
BC_CMD_ID_PUSH_SLEEP_STATUS,
|
|
120
|
+
BC_CMD_ID_PUSH_VIDEO_INPUT,
|
|
121
|
+
BC_CMD_ID_SET_AI_ALARM,
|
|
122
|
+
BC_CMD_ID_SET_AI_CFG,
|
|
123
|
+
BC_CMD_ID_SET_AUDIO_TASK,
|
|
124
|
+
BC_CMD_ID_SET_MOTION_ALARM,
|
|
125
|
+
BC_CMD_ID_SET_PIR_INFO,
|
|
126
|
+
BC_CMD_ID_SET_WHITE_LED_STATE,
|
|
127
|
+
BC_CMD_ID_SET_WHITE_LED_TASK,
|
|
128
|
+
BC_CMD_ID_SET_ZOOM_FOCUS,
|
|
129
|
+
BC_CMD_ID_SUPPORT,
|
|
130
|
+
BC_CMD_ID_TALK,
|
|
131
|
+
BC_CMD_ID_TALK_ABILITY,
|
|
132
|
+
BC_CMD_ID_TALK_CONFIG,
|
|
133
|
+
BC_CMD_ID_TALK_RESET,
|
|
134
|
+
BC_CMD_ID_UDP_KEEP_ALIVE,
|
|
135
|
+
BC_CMD_ID_VIDEO,
|
|
136
|
+
BC_CMD_ID_VIDEO_STOP,
|
|
137
|
+
BC_MAGIC,
|
|
138
|
+
BC_MAGIC_REV,
|
|
139
|
+
BC_TCP_DEFAULT_PORT,
|
|
140
|
+
BC_XML_KEY,
|
|
141
|
+
BaichuanVideoStream,
|
|
142
|
+
BcMediaAnnexBDecoder,
|
|
143
|
+
BcMediaCodec,
|
|
144
|
+
H264RtpDepacketizer,
|
|
145
|
+
H265RtpDepacketizer,
|
|
146
|
+
ReolinkCgiApi,
|
|
147
|
+
ReolinkHttpClient,
|
|
148
|
+
aesDecrypt,
|
|
149
|
+
aesEncrypt,
|
|
150
|
+
bcDecrypt,
|
|
151
|
+
bcEncrypt,
|
|
152
|
+
bcHeaderHasPayloadOffset,
|
|
153
|
+
buildAbilityInfoExtensionXml,
|
|
154
|
+
buildBinaryExtensionXml,
|
|
155
|
+
buildChannelExtensionXml,
|
|
156
|
+
buildFloodlightManualXml,
|
|
157
|
+
buildLoginXml,
|
|
158
|
+
buildPreviewStopXml,
|
|
159
|
+
buildPreviewStopXmlV11,
|
|
160
|
+
buildPreviewXml,
|
|
161
|
+
buildPreviewXmlV11,
|
|
162
|
+
buildPtzControlXml,
|
|
163
|
+
buildPtzPresetXml,
|
|
164
|
+
buildPtzPresetXmlV2,
|
|
165
|
+
buildRtspPath,
|
|
166
|
+
buildRtspUrl,
|
|
167
|
+
buildSirenManualXml,
|
|
168
|
+
buildSirenTimesXml,
|
|
169
|
+
buildStartZoomFocusXml,
|
|
170
|
+
buildWhiteLedStateXml,
|
|
171
|
+
collectCgiDiagnostics,
|
|
172
|
+
collectMultifocalDiagnostics,
|
|
173
|
+
collectNativeDiagnostics,
|
|
174
|
+
collectNvrDiagnostics,
|
|
175
|
+
convertToAnnexB,
|
|
176
|
+
convertToAnnexB2,
|
|
177
|
+
convertToLengthPrefixed,
|
|
178
|
+
createDiagnosticsBundle,
|
|
179
|
+
deriveAesKey,
|
|
180
|
+
detectVideoCodecFromNal,
|
|
181
|
+
extractPpsFromAnnexB,
|
|
182
|
+
extractSpsFromAnnexB,
|
|
183
|
+
extractVpsFromAnnexB,
|
|
184
|
+
getH265NalType,
|
|
185
|
+
getXmlText,
|
|
186
|
+
hasStartCodes,
|
|
187
|
+
hasStartCodes2,
|
|
188
|
+
isH264KeyframeAnnexB,
|
|
189
|
+
isH265Irap,
|
|
190
|
+
isH265KeyframeAnnexB,
|
|
191
|
+
isValidH264AnnexBAccessUnit,
|
|
192
|
+
isValidH265AnnexBAccessUnit,
|
|
193
|
+
md5HexUpper,
|
|
194
|
+
md5StrModern,
|
|
195
|
+
parseBcMedia,
|
|
196
|
+
parseRecordingFileName,
|
|
197
|
+
printNvrDiagnostics,
|
|
198
|
+
runAllDiagnosticsConsecutively,
|
|
199
|
+
runMultifocalDiagnosticsConsecutively,
|
|
200
|
+
sampleStreams,
|
|
201
|
+
splitAnnexBToNalPayloads,
|
|
202
|
+
splitAnnexBToNalPayloads2,
|
|
203
|
+
testChannelStreams,
|
|
204
|
+
xmlEscape,
|
|
205
|
+
zipDirectory
|
|
206
|
+
} from "./chunk-MC2BRLLE.js";
|
|
207
|
+
|
|
208
|
+
// src/reolink/AutodiscoveryClient.ts
|
|
209
|
+
var AutodiscoveryClient = class {
|
|
210
|
+
options;
|
|
211
|
+
discoveredDevices = /* @__PURE__ */ new Map();
|
|
212
|
+
scanTimer = null;
|
|
213
|
+
isRunning = false;
|
|
214
|
+
currentScanPromise = null;
|
|
215
|
+
/**
|
|
216
|
+
* Costruttore del client di autodiscovery.
|
|
217
|
+
*
|
|
218
|
+
* @param options - Opzioni di configurazione per il discovery
|
|
219
|
+
*/
|
|
220
|
+
constructor(options = {}) {
|
|
221
|
+
this.options = {
|
|
222
|
+
scanIntervalMs: options.scanIntervalMs ?? 6e4,
|
|
223
|
+
// Default: 60 secondi
|
|
224
|
+
autoStart: options.autoStart ?? false
|
|
225
|
+
};
|
|
226
|
+
if (options.networkCidr !== void 0) {
|
|
227
|
+
this.options.networkCidr = options.networkCidr;
|
|
228
|
+
}
|
|
229
|
+
if (options.username !== void 0) {
|
|
230
|
+
this.options.username = options.username;
|
|
231
|
+
}
|
|
232
|
+
if (options.password !== void 0) {
|
|
233
|
+
this.options.password = options.password;
|
|
234
|
+
}
|
|
235
|
+
if (options.httpProbeTimeoutMs !== void 0) {
|
|
236
|
+
this.options.httpProbeTimeoutMs = options.httpProbeTimeoutMs;
|
|
237
|
+
}
|
|
238
|
+
if (options.maxConcurrentProbes !== void 0) {
|
|
239
|
+
this.options.maxConcurrentProbes = options.maxConcurrentProbes;
|
|
240
|
+
}
|
|
241
|
+
if (options.logger !== void 0) {
|
|
242
|
+
this.options.logger = options.logger;
|
|
243
|
+
}
|
|
244
|
+
if (options.httpPorts !== void 0) {
|
|
245
|
+
this.options.httpPorts = options.httpPorts;
|
|
246
|
+
}
|
|
247
|
+
if (options.discoveryMethod !== void 0) {
|
|
248
|
+
this.options.discoveryMethod = options.discoveryMethod;
|
|
249
|
+
}
|
|
250
|
+
if (options.udpBroadcastTimeoutMs !== void 0) {
|
|
251
|
+
this.options.udpBroadcastTimeoutMs = options.udpBroadcastTimeoutMs;
|
|
252
|
+
}
|
|
253
|
+
if (this.options.autoStart) {
|
|
254
|
+
this.start();
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
/**
|
|
258
|
+
* Avvia il discovery continuato.
|
|
259
|
+
* Se già in esecuzione, non fa nulla.
|
|
260
|
+
*/
|
|
261
|
+
start() {
|
|
262
|
+
if (this.isRunning) {
|
|
263
|
+
this.options.logger?.warn?.("[Autodiscovery] Discovery already running");
|
|
264
|
+
return;
|
|
265
|
+
}
|
|
266
|
+
this.isRunning = true;
|
|
267
|
+
this.options.logger?.log?.(
|
|
268
|
+
`[Autodiscovery] Starting continuous discovery (interval: ${this.options.scanIntervalMs}ms)`
|
|
269
|
+
);
|
|
270
|
+
this.performScan();
|
|
271
|
+
this.scheduleNextScan();
|
|
272
|
+
}
|
|
273
|
+
/**
|
|
274
|
+
* Ferma il discovery continuato.
|
|
275
|
+
* Se non è in esecuzione, non fa nulla.
|
|
276
|
+
*/
|
|
277
|
+
stop() {
|
|
278
|
+
if (!this.isRunning) {
|
|
279
|
+
this.options.logger?.warn?.("[Autodiscovery] Discovery not running");
|
|
280
|
+
return;
|
|
281
|
+
}
|
|
282
|
+
this.isRunning = false;
|
|
283
|
+
if (this.scanTimer) {
|
|
284
|
+
clearTimeout(this.scanTimer);
|
|
285
|
+
this.scanTimer = null;
|
|
286
|
+
}
|
|
287
|
+
this.options.logger?.log?.("[Autodiscovery] Discovery stopped");
|
|
288
|
+
}
|
|
289
|
+
/**
|
|
290
|
+
* Restituisce la lista corrente delle telecamere discoverate.
|
|
291
|
+
*
|
|
292
|
+
* @returns Array di dispositivi discoverati, ordinati per host
|
|
293
|
+
*/
|
|
294
|
+
getDiscoveredDevices() {
|
|
295
|
+
return Array.from(this.discoveredDevices.values()).sort((a, b) => {
|
|
296
|
+
return a.host.localeCompare(b.host);
|
|
297
|
+
});
|
|
298
|
+
}
|
|
299
|
+
/**
|
|
300
|
+
* Restituisce il numero di telecamere attualmente discoverate.
|
|
301
|
+
*
|
|
302
|
+
* @returns Numero di dispositivi discoverati
|
|
303
|
+
*/
|
|
304
|
+
getDeviceCount() {
|
|
305
|
+
return this.discoveredDevices.size;
|
|
306
|
+
}
|
|
307
|
+
/**
|
|
308
|
+
* Verifica se il discovery è attualmente in esecuzione.
|
|
309
|
+
*
|
|
310
|
+
* @returns `true` se il discovery è in esecuzione, `false` altrimenti
|
|
311
|
+
*/
|
|
312
|
+
isActive() {
|
|
313
|
+
return this.isRunning;
|
|
314
|
+
}
|
|
315
|
+
/**
|
|
316
|
+
* Forza un scan immediato (non aspetta l'intervallo programmato).
|
|
317
|
+
* Se uno scan è già in corso, attende il completamento prima di avviarne uno nuovo.
|
|
318
|
+
*
|
|
319
|
+
* @returns Promise che si risolve quando lo scan è completato
|
|
320
|
+
*/
|
|
321
|
+
async scanNow() {
|
|
322
|
+
if (this.currentScanPromise) {
|
|
323
|
+
this.options.logger?.log?.("[Autodiscovery] Scan already in progress, waiting for completion...");
|
|
324
|
+
await this.currentScanPromise;
|
|
325
|
+
return;
|
|
326
|
+
}
|
|
327
|
+
await this.performScan();
|
|
328
|
+
}
|
|
329
|
+
/**
|
|
330
|
+
* Rimuove un dispositivo dalla lista (utile se si sa che non è più disponibile).
|
|
331
|
+
*
|
|
332
|
+
* @param host - Indirizzo IP del dispositivo da rimuovere
|
|
333
|
+
* @returns `true` se il dispositivo è stato rimosso, `false` se non era presente
|
|
334
|
+
*/
|
|
335
|
+
removeDevice(host) {
|
|
336
|
+
const removed = this.discoveredDevices.delete(host);
|
|
337
|
+
if (removed) {
|
|
338
|
+
this.options.logger?.log?.(`[Autodiscovery] Device removed: ${host}`);
|
|
339
|
+
}
|
|
340
|
+
return removed;
|
|
341
|
+
}
|
|
342
|
+
/**
|
|
343
|
+
* Pulisce tutte le telecamere discoverate dalla lista.
|
|
344
|
+
*/
|
|
345
|
+
clearDevices() {
|
|
346
|
+
const count = this.discoveredDevices.size;
|
|
347
|
+
this.discoveredDevices.clear();
|
|
348
|
+
this.options.logger?.log?.(`[Autodiscovery] Removed ${count} device(s) from list`);
|
|
349
|
+
}
|
|
350
|
+
/**
|
|
351
|
+
* Esegue un singolo scan della rete.
|
|
352
|
+
*/
|
|
353
|
+
async performScan() {
|
|
354
|
+
const scanPromise = (async () => {
|
|
355
|
+
try {
|
|
356
|
+
this.options.logger?.log?.("[Autodiscovery] Starting scan...");
|
|
357
|
+
const discoveryMethod = this.options.discoveryMethod ?? "http";
|
|
358
|
+
const discoveryOptions = {
|
|
359
|
+
enableHttpScanning: discoveryMethod === "http" || discoveryMethod === "both",
|
|
360
|
+
enableUdpDiscovery: discoveryMethod === "udp" || discoveryMethod === "both"
|
|
361
|
+
};
|
|
362
|
+
if (this.options.networkCidr !== void 0) {
|
|
363
|
+
discoveryOptions.networkCidr = this.options.networkCidr;
|
|
364
|
+
}
|
|
365
|
+
if (this.options.username !== void 0) {
|
|
366
|
+
discoveryOptions.username = this.options.username;
|
|
367
|
+
}
|
|
368
|
+
if (this.options.password !== void 0) {
|
|
369
|
+
discoveryOptions.password = this.options.password;
|
|
370
|
+
}
|
|
371
|
+
if (this.options.httpProbeTimeoutMs !== void 0) {
|
|
372
|
+
discoveryOptions.httpProbeTimeoutMs = this.options.httpProbeTimeoutMs;
|
|
373
|
+
}
|
|
374
|
+
if (this.options.maxConcurrentProbes !== void 0) {
|
|
375
|
+
discoveryOptions.maxConcurrentProbes = this.options.maxConcurrentProbes;
|
|
376
|
+
}
|
|
377
|
+
if (this.options.logger !== void 0) {
|
|
378
|
+
discoveryOptions.logger = this.options.logger;
|
|
379
|
+
}
|
|
380
|
+
if (this.options.httpPorts !== void 0) {
|
|
381
|
+
discoveryOptions.httpPorts = this.options.httpPorts;
|
|
382
|
+
}
|
|
383
|
+
if (this.options.udpBroadcastTimeoutMs !== void 0) {
|
|
384
|
+
discoveryOptions.udpBroadcastTimeoutMs = this.options.udpBroadcastTimeoutMs;
|
|
385
|
+
}
|
|
386
|
+
let discovered = [];
|
|
387
|
+
if (discoveryMethod === "http" || discoveryMethod === "both") {
|
|
388
|
+
const httpDevices = await discoverViaHttpScan(discoveryOptions);
|
|
389
|
+
discovered.push(...httpDevices);
|
|
390
|
+
}
|
|
391
|
+
if (discoveryMethod === "udp" || discoveryMethod === "both") {
|
|
392
|
+
const udpDevices = await discoverViaUdpBroadcast(discoveryOptions);
|
|
393
|
+
discovered.push(...udpDevices);
|
|
394
|
+
}
|
|
395
|
+
const beforeCount = this.discoveredDevices.size;
|
|
396
|
+
const newDevices = [];
|
|
397
|
+
const updatedDevices = [];
|
|
398
|
+
for (const device of discovered) {
|
|
399
|
+
const existing = this.discoveredDevices.get(device.host);
|
|
400
|
+
if (existing) {
|
|
401
|
+
const updated = this.mergeDeviceInfo(existing, device);
|
|
402
|
+
if (updated) {
|
|
403
|
+
updatedDevices.push(updated);
|
|
404
|
+
}
|
|
405
|
+
} else {
|
|
406
|
+
this.discoveredDevices.set(device.host, { ...device });
|
|
407
|
+
newDevices.push(device);
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
const afterCount = this.discoveredDevices.size;
|
|
411
|
+
this.options.logger?.log?.(
|
|
412
|
+
`[Autodiscovery] Scan completed: ${newDevices.length} new, ${updatedDevices.length} updated, total: ${afterCount}`
|
|
413
|
+
);
|
|
414
|
+
for (const device of newDevices) {
|
|
415
|
+
const details = [];
|
|
416
|
+
if (device.model) details.push(`Model: ${device.model}`);
|
|
417
|
+
if (device.name) details.push(`Name: ${device.name}`);
|
|
418
|
+
if (device.uid) details.push(`UID: ${device.uid}`);
|
|
419
|
+
if (device.firmwareVersion) details.push(`Firmware: ${device.firmwareVersion}`);
|
|
420
|
+
if (device.httpPort) details.push(`HTTP Port: ${device.httpPort}`);
|
|
421
|
+
if (device.httpsPort) details.push(`HTTPS Port: ${device.httpsPort}`);
|
|
422
|
+
details.push(`Discovery Method: ${device.discoveryMethod}`);
|
|
423
|
+
if (device.supportsHttps !== void 0) details.push(`HTTPS: ${device.supportsHttps}`);
|
|
424
|
+
if (device.httpAccessible !== void 0) details.push(`HTTP Accessible: ${device.httpAccessible}`);
|
|
425
|
+
this.options.logger?.log?.(
|
|
426
|
+
`[Autodiscovery] \u{1F195} NEW DEVICE DISCOVERED - Host: ${device.host}${details.length > 0 ? ` | ${details.join(" | ")}` : ""}`
|
|
427
|
+
);
|
|
428
|
+
}
|
|
429
|
+
} catch (error) {
|
|
430
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
431
|
+
this.options.logger?.error?.(`[Autodiscovery] Error during scan: ${msg}`);
|
|
432
|
+
}
|
|
433
|
+
})();
|
|
434
|
+
this.currentScanPromise = scanPromise;
|
|
435
|
+
await scanPromise;
|
|
436
|
+
this.currentScanPromise = null;
|
|
437
|
+
}
|
|
438
|
+
/**
|
|
439
|
+
* Unisce le informazioni di un dispositivo esistente con quelle di un nuovo scan.
|
|
440
|
+
* Restituisce il dispositivo aggiornato se ci sono state modifiche, altrimenti `null`.
|
|
441
|
+
*/
|
|
442
|
+
mergeDeviceInfo(existing, updated) {
|
|
443
|
+
let hasChanges = false;
|
|
444
|
+
if (!existing.model && updated.model) {
|
|
445
|
+
existing.model = updated.model;
|
|
446
|
+
hasChanges = true;
|
|
447
|
+
}
|
|
448
|
+
if (!existing.uid && updated.uid) {
|
|
449
|
+
existing.uid = updated.uid;
|
|
450
|
+
hasChanges = true;
|
|
451
|
+
}
|
|
452
|
+
if (!existing.name && updated.name) {
|
|
453
|
+
existing.name = updated.name;
|
|
454
|
+
hasChanges = true;
|
|
455
|
+
}
|
|
456
|
+
if (!existing.firmwareVersion && updated.firmwareVersion) {
|
|
457
|
+
existing.firmwareVersion = updated.firmwareVersion;
|
|
458
|
+
hasChanges = true;
|
|
459
|
+
}
|
|
460
|
+
if (updated.httpPort && !existing.httpPort) {
|
|
461
|
+
existing.httpPort = updated.httpPort;
|
|
462
|
+
hasChanges = true;
|
|
463
|
+
}
|
|
464
|
+
if (updated.httpsPort && !existing.httpsPort) {
|
|
465
|
+
existing.httpsPort = updated.httpsPort;
|
|
466
|
+
hasChanges = true;
|
|
467
|
+
}
|
|
468
|
+
if (updated.supportsHttps !== void 0 && existing.supportsHttps !== updated.supportsHttps) {
|
|
469
|
+
existing.supportsHttps = updated.supportsHttps;
|
|
470
|
+
hasChanges = true;
|
|
471
|
+
}
|
|
472
|
+
if (updated.httpAccessible !== void 0 && existing.httpAccessible !== updated.httpAccessible) {
|
|
473
|
+
existing.httpAccessible = updated.httpAccessible;
|
|
474
|
+
hasChanges = true;
|
|
475
|
+
}
|
|
476
|
+
return hasChanges ? existing : null;
|
|
477
|
+
}
|
|
478
|
+
/**
|
|
479
|
+
* Programma il prossimo scan.
|
|
480
|
+
*/
|
|
481
|
+
scheduleNextScan() {
|
|
482
|
+
if (!this.isRunning) {
|
|
483
|
+
return;
|
|
484
|
+
}
|
|
485
|
+
this.scanTimer = setTimeout(() => {
|
|
486
|
+
this.scanTimer = null;
|
|
487
|
+
if (this.isRunning) {
|
|
488
|
+
this.performScan().finally(() => {
|
|
489
|
+
if (this.isRunning) {
|
|
490
|
+
this.scheduleNextScan();
|
|
491
|
+
}
|
|
492
|
+
});
|
|
493
|
+
}
|
|
494
|
+
}, this.options.scanIntervalMs);
|
|
495
|
+
}
|
|
496
|
+
};
|
|
497
|
+
|
|
498
|
+
// src/reolink/baichuan/endpoints-server.ts
|
|
499
|
+
import http from "http";
|
|
500
|
+
import { spawn } from "child_process";
|
|
501
|
+
function parseIntParam(v, def) {
|
|
502
|
+
if (v == null) return def;
|
|
503
|
+
const n = Number.parseInt(v, 10);
|
|
504
|
+
return Number.isFinite(n) ? n : def;
|
|
505
|
+
}
|
|
506
|
+
function parseProfile(v) {
|
|
507
|
+
const p = (v ?? "sub").trim();
|
|
508
|
+
if (p === "main" || p === "sub" || p === "ext") return p;
|
|
509
|
+
throw new Error("Invalid profile (must be main, sub, or ext)");
|
|
510
|
+
}
|
|
511
|
+
function parseNativeVariant(v) {
|
|
512
|
+
const s = (v ?? "default").trim().toLowerCase();
|
|
513
|
+
if (s === "" || s === "default") return "default";
|
|
514
|
+
if (s === "autotrack") return "autotrack";
|
|
515
|
+
if (s === "telephoto") return "telephoto";
|
|
516
|
+
throw new Error("Invalid variant (must be default, autotrack, or telephoto)");
|
|
517
|
+
}
|
|
518
|
+
function parseDateParam(v) {
|
|
519
|
+
if (!v) throw new Error("Missing time");
|
|
520
|
+
const d = new Date(v);
|
|
521
|
+
if (Number.isNaN(d.getTime()))
|
|
522
|
+
throw new Error("Invalid time (expected ISO string)");
|
|
523
|
+
return d;
|
|
524
|
+
}
|
|
525
|
+
function parseDateParamNamed(name, v) {
|
|
526
|
+
if (!v) throw new Error(`Missing ${name}`);
|
|
527
|
+
const d = new Date(v);
|
|
528
|
+
if (Number.isNaN(d.getTime()))
|
|
529
|
+
throw new Error(`Invalid ${name} (expected ISO string)`);
|
|
530
|
+
return d;
|
|
531
|
+
}
|
|
532
|
+
function createBaichuanEndpointsServer(opts) {
|
|
533
|
+
const api = new ReolinkBaichuanApi({
|
|
534
|
+
...opts.baichuan
|
|
535
|
+
});
|
|
536
|
+
const listenHost = opts.listenHost ?? "127.0.0.1";
|
|
537
|
+
const rtspListenHost = opts.rtspListenHost ?? "127.0.0.1";
|
|
538
|
+
const rtspServers = /* @__PURE__ */ new Map();
|
|
539
|
+
const server = http.createServer(async (req, res) => {
|
|
540
|
+
try {
|
|
541
|
+
if (!req.url) {
|
|
542
|
+
res.statusCode = 400;
|
|
543
|
+
res.end("Bad Request");
|
|
544
|
+
return;
|
|
545
|
+
}
|
|
546
|
+
const u = new URL(req.url, `http://${listenHost}:${opts.listenPort}`);
|
|
547
|
+
if (req.method !== "GET") {
|
|
548
|
+
res.statusCode = 405;
|
|
549
|
+
res.setHeader("Allow", "GET");
|
|
550
|
+
res.end("Method Not Allowed");
|
|
551
|
+
return;
|
|
552
|
+
}
|
|
553
|
+
if (u.pathname === "/stream") {
|
|
554
|
+
const channel = parseIntParam(u.searchParams.get("channel"), 0);
|
|
555
|
+
const profile = parseProfile(u.searchParams.get("profile"));
|
|
556
|
+
const variant = parseNativeVariant(u.searchParams.get("variant"));
|
|
557
|
+
if (!Number.isFinite(channel) || channel < 0) {
|
|
558
|
+
res.statusCode = 400;
|
|
559
|
+
res.end("Invalid channel");
|
|
560
|
+
return;
|
|
561
|
+
}
|
|
562
|
+
const key = `${channel}:${profile}:${variant}`;
|
|
563
|
+
const cached = rtspServers.get(key);
|
|
564
|
+
if (cached) {
|
|
565
|
+
res.setHeader("Content-Type", "application/json");
|
|
566
|
+
res.end(JSON.stringify({ rtspUrl: cached.url }));
|
|
567
|
+
return;
|
|
568
|
+
}
|
|
569
|
+
const rtsp = await api.createRtspStream(channel, profile, {
|
|
570
|
+
listenHost: rtspListenHost,
|
|
571
|
+
listenPort: 0,
|
|
572
|
+
path: `/stream/${channel}/${profile}${variant === "default" ? "" : `/${variant}`}`,
|
|
573
|
+
...variant === "default" ? {} : { variant }
|
|
574
|
+
});
|
|
575
|
+
const rtspUrl = rtsp.getRtspUrl();
|
|
576
|
+
rtspServers.set(key, { url: rtspUrl });
|
|
577
|
+
res.setHeader("Content-Type", "application/json");
|
|
578
|
+
res.end(JSON.stringify({ rtspUrl }));
|
|
579
|
+
return;
|
|
580
|
+
}
|
|
581
|
+
if (u.pathname === "/download") {
|
|
582
|
+
const channel = parseIntParam(u.searchParams.get("channel"), 0);
|
|
583
|
+
const uid = (u.searchParams.get("uid") ?? "").trim();
|
|
584
|
+
const fileName = (u.searchParams.get("fileName") ?? "").trim();
|
|
585
|
+
const timeoutMs = parseIntParam(
|
|
586
|
+
u.searchParams.get("timeoutMs"),
|
|
587
|
+
12e4
|
|
588
|
+
);
|
|
589
|
+
if (!uid) {
|
|
590
|
+
res.statusCode = 400;
|
|
591
|
+
res.end("Missing uid");
|
|
592
|
+
return;
|
|
593
|
+
}
|
|
594
|
+
if (!fileName) {
|
|
595
|
+
res.statusCode = 400;
|
|
596
|
+
res.end("Missing fileName");
|
|
597
|
+
return;
|
|
598
|
+
}
|
|
599
|
+
const buf = await api.downloadRecording({
|
|
600
|
+
channel,
|
|
601
|
+
uid,
|
|
602
|
+
fileName,
|
|
603
|
+
timeoutMs
|
|
604
|
+
});
|
|
605
|
+
const outName = fileName.split("/").filter(Boolean).at(-1) ?? "recording.bin";
|
|
606
|
+
res.statusCode = 200;
|
|
607
|
+
res.setHeader("Content-Type", "application/octet-stream");
|
|
608
|
+
res.setHeader(
|
|
609
|
+
"Content-Disposition",
|
|
610
|
+
`attachment; filename="${outName}"`
|
|
611
|
+
);
|
|
612
|
+
res.setHeader("Content-Length", String(buf.length));
|
|
613
|
+
res.end(buf);
|
|
614
|
+
return;
|
|
615
|
+
}
|
|
616
|
+
if (u.pathname === "/recordings") {
|
|
617
|
+
const channel = parseIntParam(u.searchParams.get("channel"), 0);
|
|
618
|
+
const streamType = (u.searchParams.get("streamType") ?? "subStream").trim();
|
|
619
|
+
const start = parseDateParamNamed("start", u.searchParams.get("start"));
|
|
620
|
+
const end = parseDateParamNamed("end", u.searchParams.get("end"));
|
|
621
|
+
const recordType = (u.searchParams.get("recordType") ?? "").trim();
|
|
622
|
+
const count = parseIntParam(u.searchParams.get("count"), 0);
|
|
623
|
+
const timeoutMs = parseIntParam(
|
|
624
|
+
u.searchParams.get("timeoutMs"),
|
|
625
|
+
3e4
|
|
626
|
+
);
|
|
627
|
+
if (streamType !== "mainStream" && streamType !== "subStream") {
|
|
628
|
+
res.statusCode = 400;
|
|
629
|
+
res.end("Invalid streamType (must be mainStream or subStream)");
|
|
630
|
+
return;
|
|
631
|
+
}
|
|
632
|
+
let recordings = await api.getVideoclips({
|
|
633
|
+
channel,
|
|
634
|
+
start,
|
|
635
|
+
end,
|
|
636
|
+
streamType,
|
|
637
|
+
...recordType ? { recordType } : {},
|
|
638
|
+
timeoutMs
|
|
639
|
+
});
|
|
640
|
+
if (count > 0) recordings = recordings.slice(-count);
|
|
641
|
+
res.statusCode = 200;
|
|
642
|
+
res.setHeader("Content-Type", "application/json");
|
|
643
|
+
res.end(
|
|
644
|
+
JSON.stringify({
|
|
645
|
+
channel,
|
|
646
|
+
streamType,
|
|
647
|
+
start: start.toISOString(),
|
|
648
|
+
end: end.toISOString(),
|
|
649
|
+
recordings
|
|
650
|
+
})
|
|
651
|
+
);
|
|
652
|
+
return;
|
|
653
|
+
}
|
|
654
|
+
if (u.pathname === "/vod/stream" || u.pathname === "/vod/download") {
|
|
655
|
+
const channel = parseIntParam(u.searchParams.get("channel"), 0);
|
|
656
|
+
const fileName = (u.searchParams.get("fileName") ?? "").trim();
|
|
657
|
+
const streamType = (u.searchParams.get("streamType") ?? "mainStream").trim();
|
|
658
|
+
const rtspTransport = (u.searchParams.get("rtmpTransport") ?? "tcp").trim();
|
|
659
|
+
if (!fileName) {
|
|
660
|
+
res.statusCode = 400;
|
|
661
|
+
res.end("Missing fileName");
|
|
662
|
+
return;
|
|
663
|
+
}
|
|
664
|
+
if (streamType !== "mainStream" && streamType !== "subStream") {
|
|
665
|
+
res.statusCode = 400;
|
|
666
|
+
res.end("Invalid streamType (must be mainStream or subStream)");
|
|
667
|
+
return;
|
|
668
|
+
}
|
|
669
|
+
if (rtspTransport !== "tcp" && rtspTransport !== "udp") {
|
|
670
|
+
res.statusCode = 400;
|
|
671
|
+
res.end("Invalid rtmpTransport (must be tcp or udp)");
|
|
672
|
+
return;
|
|
673
|
+
}
|
|
674
|
+
const rtmpUrl = await api.getVodRtmpUrl({
|
|
675
|
+
channel,
|
|
676
|
+
fileName,
|
|
677
|
+
streamType,
|
|
678
|
+
ensureEnabled: true
|
|
679
|
+
});
|
|
680
|
+
const outName = fileName.split("/").filter(Boolean).at(-1) ?? "recording.mp4";
|
|
681
|
+
if (u.pathname === "/vod/stream") {
|
|
682
|
+
res.writeHead(200, {
|
|
683
|
+
"Content-Type": "video/mp2t",
|
|
684
|
+
"Cache-Control": "no-cache",
|
|
685
|
+
Connection: "close"
|
|
686
|
+
});
|
|
687
|
+
const ff2 = spawn("ffmpeg", [
|
|
688
|
+
"-hide_banner",
|
|
689
|
+
"-loglevel",
|
|
690
|
+
"error",
|
|
691
|
+
"-rtmp_live",
|
|
692
|
+
"live",
|
|
693
|
+
"-i",
|
|
694
|
+
rtmpUrl,
|
|
695
|
+
"-c",
|
|
696
|
+
"copy",
|
|
697
|
+
"-f",
|
|
698
|
+
"mpegts",
|
|
699
|
+
"pipe:1"
|
|
700
|
+
]);
|
|
701
|
+
ff2.stdout.pipe(res);
|
|
702
|
+
const cleanup2 = () => {
|
|
703
|
+
ff2.kill("SIGKILL");
|
|
704
|
+
};
|
|
705
|
+
req.on("close", cleanup2);
|
|
706
|
+
res.on("close", cleanup2);
|
|
707
|
+
ff2.on("close", () => {
|
|
708
|
+
res.end();
|
|
709
|
+
});
|
|
710
|
+
return;
|
|
711
|
+
}
|
|
712
|
+
res.statusCode = 200;
|
|
713
|
+
res.setHeader("Content-Type", "video/mp4");
|
|
714
|
+
res.setHeader(
|
|
715
|
+
"Content-Disposition",
|
|
716
|
+
`attachment; filename="${outName}"`
|
|
717
|
+
);
|
|
718
|
+
res.setHeader("Cache-Control", "no-cache");
|
|
719
|
+
res.setHeader("Connection", "close");
|
|
720
|
+
const ff = spawn("ffmpeg", [
|
|
721
|
+
"-hide_banner",
|
|
722
|
+
"-loglevel",
|
|
723
|
+
"error",
|
|
724
|
+
"-rtmp_live",
|
|
725
|
+
"live",
|
|
726
|
+
"-i",
|
|
727
|
+
rtmpUrl,
|
|
728
|
+
"-c",
|
|
729
|
+
"copy",
|
|
730
|
+
"-movflags",
|
|
731
|
+
"frag_keyframe+empty_moov",
|
|
732
|
+
"-f",
|
|
733
|
+
"mp4",
|
|
734
|
+
"pipe:1"
|
|
735
|
+
]);
|
|
736
|
+
ff.stdout.pipe(res);
|
|
737
|
+
const cleanup = () => {
|
|
738
|
+
ff.kill("SIGKILL");
|
|
739
|
+
};
|
|
740
|
+
req.on("close", cleanup);
|
|
741
|
+
res.on("close", cleanup);
|
|
742
|
+
ff.on("close", () => {
|
|
743
|
+
res.end();
|
|
744
|
+
});
|
|
745
|
+
return;
|
|
746
|
+
}
|
|
747
|
+
if (u.pathname === "/replay/cover.jpg") {
|
|
748
|
+
const channel = parseIntParam(u.searchParams.get("channel"), 0);
|
|
749
|
+
const time = parseDateParam(u.searchParams.get("time"));
|
|
750
|
+
const snapType = (u.searchParams.get("snapType") ?? "sub").trim();
|
|
751
|
+
const timeoutMs = parseIntParam(
|
|
752
|
+
u.searchParams.get("timeoutMs"),
|
|
753
|
+
3e4
|
|
754
|
+
);
|
|
755
|
+
const jpegQuality = parseIntParam(u.searchParams.get("jpegQuality"), 2);
|
|
756
|
+
if (snapType !== "main" && snapType !== "sub") {
|
|
757
|
+
res.statusCode = 400;
|
|
758
|
+
res.end("Invalid snapType (must be main or sub)");
|
|
759
|
+
return;
|
|
760
|
+
}
|
|
761
|
+
const { jpeg, snapshot } = await api.getVideoclipThumbnailJpegRaw({
|
|
762
|
+
channel,
|
|
763
|
+
time,
|
|
764
|
+
snapType,
|
|
765
|
+
timeoutMs,
|
|
766
|
+
jpegQuality
|
|
767
|
+
});
|
|
768
|
+
res.statusCode = 200;
|
|
769
|
+
res.setHeader("Content-Type", "image/jpeg");
|
|
770
|
+
res.setHeader("Cache-Control", "no-cache");
|
|
771
|
+
res.setHeader("X-Reolink-Encoding", snapshot.encoding);
|
|
772
|
+
res.setHeader("X-Reolink-Frame-Length", String(snapshot.frameLength));
|
|
773
|
+
res.setHeader("Content-Length", String(jpeg.length));
|
|
774
|
+
res.end(jpeg);
|
|
775
|
+
return;
|
|
776
|
+
}
|
|
777
|
+
if (u.pathname === "/replay/stream.mp4") {
|
|
778
|
+
const channel = parseIntParam(u.searchParams.get("channel"), 0);
|
|
779
|
+
const fileName = (u.searchParams.get("fileName") ?? "").trim();
|
|
780
|
+
if (!fileName) {
|
|
781
|
+
res.statusCode = 400;
|
|
782
|
+
res.end("Missing fileName");
|
|
783
|
+
return;
|
|
784
|
+
}
|
|
785
|
+
const { mp4, stop } = await api.createRecordingReplayMp4Stream({
|
|
786
|
+
channel,
|
|
787
|
+
fileName
|
|
788
|
+
});
|
|
789
|
+
res.writeHead(200, {
|
|
790
|
+
"Content-Type": "video/mp4",
|
|
791
|
+
"Cache-Control": "no-cache",
|
|
792
|
+
Connection: "close"
|
|
793
|
+
});
|
|
794
|
+
mp4.pipe(res);
|
|
795
|
+
const cleanup = () => {
|
|
796
|
+
void stop();
|
|
797
|
+
try {
|
|
798
|
+
mp4.destroy();
|
|
799
|
+
} catch {
|
|
800
|
+
}
|
|
801
|
+
};
|
|
802
|
+
req.on("close", cleanup);
|
|
803
|
+
res.on("close", cleanup);
|
|
804
|
+
mp4.on("end", () => {
|
|
805
|
+
res.end();
|
|
806
|
+
});
|
|
807
|
+
mp4.on("error", () => {
|
|
808
|
+
res.end();
|
|
809
|
+
});
|
|
810
|
+
return;
|
|
811
|
+
}
|
|
812
|
+
res.statusCode = 404;
|
|
813
|
+
res.end("Not Found");
|
|
814
|
+
} catch (e) {
|
|
815
|
+
res.statusCode = 500;
|
|
816
|
+
res.setHeader("Content-Type", "text/plain");
|
|
817
|
+
res.end(e instanceof Error ? e.message : String(e));
|
|
818
|
+
}
|
|
819
|
+
});
|
|
820
|
+
server.on("close", () => {
|
|
821
|
+
void api.close().catch(() => void 0);
|
|
822
|
+
});
|
|
823
|
+
return server;
|
|
824
|
+
}
|
|
825
|
+
|
|
826
|
+
// src/rtsp/server.ts
|
|
827
|
+
import http2 from "http";
|
|
828
|
+
import { spawn as spawn2 } from "child_process";
|
|
829
|
+
function createRtspProxyServer(opts) {
|
|
830
|
+
return http2.createServer((req, res) => {
|
|
831
|
+
if (!req.url) {
|
|
832
|
+
res.statusCode = 400;
|
|
833
|
+
res.end("Bad Request");
|
|
834
|
+
return;
|
|
835
|
+
}
|
|
836
|
+
const u = new URL(req.url, `http://127.0.0.1:${opts.listenPort}`);
|
|
837
|
+
if (u.pathname !== "/stream") {
|
|
838
|
+
res.statusCode = 404;
|
|
839
|
+
res.end("Not Found");
|
|
840
|
+
return;
|
|
841
|
+
}
|
|
842
|
+
const channel = Number(u.searchParams.get("channel") ?? "0");
|
|
843
|
+
const profile = u.searchParams.get("profile") ?? "sub";
|
|
844
|
+
if (!Number.isFinite(channel) || channel < 0) {
|
|
845
|
+
res.statusCode = 400;
|
|
846
|
+
res.end("Invalid channel");
|
|
847
|
+
return;
|
|
848
|
+
}
|
|
849
|
+
if (profile !== "main" && profile !== "sub" && profile !== "ext") {
|
|
850
|
+
res.statusCode = 400;
|
|
851
|
+
res.end("Invalid profile (must be main, sub, or ext)");
|
|
852
|
+
return;
|
|
853
|
+
}
|
|
854
|
+
const rtspUrl = buildRtspUrl({
|
|
855
|
+
host: opts.host,
|
|
856
|
+
username: opts.username,
|
|
857
|
+
password: opts.password,
|
|
858
|
+
channel,
|
|
859
|
+
stream: profile,
|
|
860
|
+
...opts.rtspPort === void 0 ? {} : { port: opts.rtspPort }
|
|
861
|
+
});
|
|
862
|
+
res.writeHead(200, {
|
|
863
|
+
"Content-Type": "video/mp2t",
|
|
864
|
+
"Cache-Control": "no-cache",
|
|
865
|
+
Connection: "close"
|
|
866
|
+
});
|
|
867
|
+
const rtspTransport = opts.rtspTransport ?? "tcp";
|
|
868
|
+
const ff = spawn2("ffmpeg", [
|
|
869
|
+
"-hide_banner",
|
|
870
|
+
"-loglevel",
|
|
871
|
+
"error",
|
|
872
|
+
"-rtsp_transport",
|
|
873
|
+
rtspTransport,
|
|
874
|
+
"-i",
|
|
875
|
+
rtspUrl,
|
|
876
|
+
"-an",
|
|
877
|
+
"-c:v",
|
|
878
|
+
"copy",
|
|
879
|
+
"-f",
|
|
880
|
+
"mpegts",
|
|
881
|
+
"pipe:1"
|
|
882
|
+
]);
|
|
883
|
+
ff.stdout.pipe(res);
|
|
884
|
+
ff.stderr.on("data", () => {
|
|
885
|
+
});
|
|
886
|
+
const cleanup = () => {
|
|
887
|
+
ff.kill("SIGKILL");
|
|
888
|
+
};
|
|
889
|
+
req.on("close", cleanup);
|
|
890
|
+
res.on("close", cleanup);
|
|
891
|
+
ff.on("close", () => {
|
|
892
|
+
res.end();
|
|
893
|
+
});
|
|
894
|
+
});
|
|
895
|
+
}
|
|
896
|
+
|
|
897
|
+
// src/rfc/rfc4571.ts
|
|
898
|
+
import crypto from "crypto";
|
|
899
|
+
function buildRfc4571Sdp(video, audio) {
|
|
900
|
+
let out = "v=0\r\n";
|
|
901
|
+
out += "o=- 0 0 IN IP4 0.0.0.0\r\n";
|
|
902
|
+
out += "s=No Name\r\n";
|
|
903
|
+
out += "t=0 0\r\n";
|
|
904
|
+
out += `m=video 0 RTP/AVP ${video.payloadType}\r
|
|
905
|
+
`;
|
|
906
|
+
out += "c=IN IP4 0.0.0.0\r\n";
|
|
907
|
+
out += `a=rtpmap:${video.payloadType} ${video.videoType}/90000\r
|
|
908
|
+
`;
|
|
909
|
+
if (video.videoType === "H264" && video.h264) {
|
|
910
|
+
const spsB64 = video.h264.sps.toString("base64");
|
|
911
|
+
const ppsB64 = video.h264.pps.toString("base64");
|
|
912
|
+
const pli = video.h264.profileLevelId ? `profile-level-id=${video.h264.profileLevelId};` : "";
|
|
913
|
+
out += `a=fmtp:${video.payloadType} packetization-mode=1;${pli}sprop-parameter-sets=${spsB64},${ppsB64}\r
|
|
914
|
+
`;
|
|
915
|
+
}
|
|
916
|
+
if (video.videoType === "H265" && video.h265) {
|
|
917
|
+
const vpsB64 = video.h265.vps.toString("base64");
|
|
918
|
+
const spsB64 = video.h265.sps.toString("base64");
|
|
919
|
+
const ppsB64 = video.h265.pps.toString("base64");
|
|
920
|
+
out += `a=fmtp:${video.payloadType} sprop-vps=${vpsB64};sprop-sps=${spsB64};sprop-pps=${ppsB64}\r
|
|
921
|
+
`;
|
|
922
|
+
}
|
|
923
|
+
if (audio?.codec === "aac") {
|
|
924
|
+
out += `m=audio 0 RTP/AVP ${audio.payloadType}\r
|
|
925
|
+
`;
|
|
926
|
+
out += "c=IN IP4 0.0.0.0\r\n";
|
|
927
|
+
out += "b=AS:128\r\n";
|
|
928
|
+
out += `a=rtpmap:${audio.payloadType} MPEG4-GENERIC/${audio.sampleRate}/${audio.channels}\r
|
|
929
|
+
`;
|
|
930
|
+
out += `a=fmtp:${audio.payloadType} profile-level-id=1; mode=AAC-hbr; sizelength=13; indexlength=3; indexdeltalength=3; config=${audio.configHex}\r
|
|
931
|
+
`;
|
|
932
|
+
}
|
|
933
|
+
if (audio?.codec === "opus") {
|
|
934
|
+
out += `m=audio 0 RTP/AVP ${audio.payloadType}\r
|
|
935
|
+
`;
|
|
936
|
+
out += "c=IN IP4 0.0.0.0\r\n";
|
|
937
|
+
out += "b=AS:128\r\n";
|
|
938
|
+
out += `a=rtpmap:${audio.payloadType} opus/${audio.sampleRate}/${audio.channels}\r
|
|
939
|
+
`;
|
|
940
|
+
}
|
|
941
|
+
return out;
|
|
942
|
+
}
|
|
943
|
+
function splitAnnexBToNals(annexB) {
|
|
944
|
+
const nals = [];
|
|
945
|
+
const len = annexB.length;
|
|
946
|
+
const isStartCodeAt = (i2) => {
|
|
947
|
+
if (i2 + 3 <= len && annexB[i2] === 0 && annexB[i2 + 1] === 0) {
|
|
948
|
+
if (annexB[i2 + 2] === 1) return 3;
|
|
949
|
+
if (i2 + 4 <= len && annexB[i2 + 2] === 0 && annexB[i2 + 3] === 1)
|
|
950
|
+
return 4;
|
|
951
|
+
}
|
|
952
|
+
return 0;
|
|
953
|
+
};
|
|
954
|
+
let i = 0;
|
|
955
|
+
while (i < len) {
|
|
956
|
+
const sc = isStartCodeAt(i);
|
|
957
|
+
if (sc) break;
|
|
958
|
+
i++;
|
|
959
|
+
}
|
|
960
|
+
while (i < len) {
|
|
961
|
+
const sc = isStartCodeAt(i);
|
|
962
|
+
if (!sc) {
|
|
963
|
+
i++;
|
|
964
|
+
continue;
|
|
965
|
+
}
|
|
966
|
+
const nalStart = i + sc;
|
|
967
|
+
let j = nalStart;
|
|
968
|
+
while (j < len) {
|
|
969
|
+
const sc2 = isStartCodeAt(j);
|
|
970
|
+
if (sc2) break;
|
|
971
|
+
j++;
|
|
972
|
+
}
|
|
973
|
+
if (nalStart < j) {
|
|
974
|
+
const nal = annexB.subarray(nalStart, j);
|
|
975
|
+
if (nal.length > 0) nals.push(nal);
|
|
976
|
+
}
|
|
977
|
+
i = j;
|
|
978
|
+
}
|
|
979
|
+
return nals;
|
|
980
|
+
}
|
|
981
|
+
function findAnnexBStartCodeOffset(data, maxScan = 64) {
|
|
982
|
+
if (!data?.length) return -1;
|
|
983
|
+
const len = Math.min(data.length, Math.max(0, maxScan));
|
|
984
|
+
for (let i = 0; i + 3 < len; i++) {
|
|
985
|
+
if (data[i] !== 0 || data[i + 1] !== 0) continue;
|
|
986
|
+
if (data[i + 2] === 1) return i;
|
|
987
|
+
if (data[i + 2] === 0 && data[i + 3] === 1) return i;
|
|
988
|
+
}
|
|
989
|
+
return -1;
|
|
990
|
+
}
|
|
991
|
+
function stripLeadingAnnexBStartCode(data) {
|
|
992
|
+
if (data.length >= 4 && data[0] === 0 && data[1] === 0) {
|
|
993
|
+
if (data[2] === 1) return data.subarray(3);
|
|
994
|
+
if (data[2] === 0 && data[3] === 1) return data.subarray(4);
|
|
995
|
+
}
|
|
996
|
+
return data;
|
|
997
|
+
}
|
|
998
|
+
function splitLengthPrefixedToNals(data, lengthSize, endian) {
|
|
999
|
+
const nals = [];
|
|
1000
|
+
let offset = 0;
|
|
1001
|
+
const readLen = (buf, at) => {
|
|
1002
|
+
if (lengthSize === 4)
|
|
1003
|
+
return endian === "be" ? buf.readUInt32BE(at) : buf.readUInt32LE(at);
|
|
1004
|
+
if (lengthSize === 2)
|
|
1005
|
+
return endian === "be" ? buf.readUInt16BE(at) : buf.readUInt16LE(at);
|
|
1006
|
+
if (lengthSize === 3) {
|
|
1007
|
+
const b0 = buf[at];
|
|
1008
|
+
const b1 = buf[at + 1];
|
|
1009
|
+
const b2 = buf[at + 2];
|
|
1010
|
+
return endian === "be" ? (b0 << 16 | b1 << 8 | b2) >>> 0 : (b2 << 16 | b1 << 8 | b0) >>> 0;
|
|
1011
|
+
}
|
|
1012
|
+
return buf.readUInt8(at);
|
|
1013
|
+
};
|
|
1014
|
+
while (offset + lengthSize <= data.length) {
|
|
1015
|
+
const nalLen = readLen(data, offset);
|
|
1016
|
+
offset += lengthSize;
|
|
1017
|
+
if (!nalLen) return [];
|
|
1018
|
+
if (offset + nalLen > data.length) return [];
|
|
1019
|
+
const nal = data.subarray(offset, offset + nalLen);
|
|
1020
|
+
offset += nalLen;
|
|
1021
|
+
if (nal.length) nals.push(nal);
|
|
1022
|
+
}
|
|
1023
|
+
if (offset !== data.length) return [];
|
|
1024
|
+
return nals;
|
|
1025
|
+
}
|
|
1026
|
+
function scoreNals(videoType, nals) {
|
|
1027
|
+
if (!nals.length) return Number.POSITIVE_INFINITY;
|
|
1028
|
+
let invalid = 0;
|
|
1029
|
+
for (const nal of nals) {
|
|
1030
|
+
if (!nal.length) {
|
|
1031
|
+
invalid++;
|
|
1032
|
+
continue;
|
|
1033
|
+
}
|
|
1034
|
+
if ((nal[0] & 128) !== 0) {
|
|
1035
|
+
invalid += 10;
|
|
1036
|
+
continue;
|
|
1037
|
+
}
|
|
1038
|
+
if (videoType === "H264") {
|
|
1039
|
+
const t = nal[0] & 31;
|
|
1040
|
+
if (t < 1 || t > 23) invalid++;
|
|
1041
|
+
} else if (videoType === "H265") {
|
|
1042
|
+
if (nal.length < 2) {
|
|
1043
|
+
invalid++;
|
|
1044
|
+
continue;
|
|
1045
|
+
}
|
|
1046
|
+
const t = nal[0] >> 1 & 63;
|
|
1047
|
+
const isValid = t >= 0 && t <= 29 || t >= 32 && t <= 40 || t >= 48 && t <= 50;
|
|
1048
|
+
if (!isValid) invalid++;
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
return invalid;
|
|
1052
|
+
}
|
|
1053
|
+
function splitAccessUnitToNalsBestEffort(accessUnit, videoType) {
|
|
1054
|
+
if (!accessUnit?.length) return [];
|
|
1055
|
+
const candidates = [];
|
|
1056
|
+
const scOffset = findAnnexBStartCodeOffset(accessUnit, 64);
|
|
1057
|
+
if (scOffset >= 0) {
|
|
1058
|
+
const annex = scOffset === 0 ? accessUnit : accessUnit.subarray(scOffset);
|
|
1059
|
+
const nals = splitAnnexBToNals(annex);
|
|
1060
|
+
if (nals.length) candidates.push(nals);
|
|
1061
|
+
}
|
|
1062
|
+
candidates.push(splitLengthPrefixedToNals(accessUnit, 4, "be"));
|
|
1063
|
+
candidates.push(splitLengthPrefixedToNals(accessUnit, 4, "le"));
|
|
1064
|
+
candidates.push(splitLengthPrefixedToNals(accessUnit, 3, "be"));
|
|
1065
|
+
candidates.push(splitLengthPrefixedToNals(accessUnit, 3, "le"));
|
|
1066
|
+
candidates.push(splitLengthPrefixedToNals(accessUnit, 2, "be"));
|
|
1067
|
+
candidates.push(splitLengthPrefixedToNals(accessUnit, 2, "le"));
|
|
1068
|
+
candidates.push(splitLengthPrefixedToNals(accessUnit, 1, "be"));
|
|
1069
|
+
const stripped = stripLeadingAnnexBStartCode(accessUnit);
|
|
1070
|
+
if (stripped.length) candidates.push([stripped]);
|
|
1071
|
+
let best = [];
|
|
1072
|
+
let bestScore = Number.POSITIVE_INFINITY;
|
|
1073
|
+
for (const cand of candidates) {
|
|
1074
|
+
if (!cand.length) continue;
|
|
1075
|
+
const s = scoreNals(videoType, cand);
|
|
1076
|
+
if (s < bestScore) {
|
|
1077
|
+
bestScore = s;
|
|
1078
|
+
best = cand;
|
|
1079
|
+
if (bestScore === 0) break;
|
|
1080
|
+
}
|
|
1081
|
+
}
|
|
1082
|
+
return best;
|
|
1083
|
+
}
|
|
1084
|
+
function extractH264ParamSetsFromAccessUnit(annexB) {
|
|
1085
|
+
const nals = splitAccessUnitToNalsBestEffort(annexB, "H264");
|
|
1086
|
+
let sps;
|
|
1087
|
+
let pps;
|
|
1088
|
+
let profileLevelId;
|
|
1089
|
+
for (const nal of nals) {
|
|
1090
|
+
if (nal.length < 1) continue;
|
|
1091
|
+
const nalType = nal[0] & 31;
|
|
1092
|
+
if (nalType === 7) {
|
|
1093
|
+
sps = nal;
|
|
1094
|
+
if (nal.length >= 4) {
|
|
1095
|
+
const hex = Buffer.from([nal[1], nal[2], nal[3]]).toString("hex");
|
|
1096
|
+
profileLevelId = hex;
|
|
1097
|
+
}
|
|
1098
|
+
} else if (nalType === 8) {
|
|
1099
|
+
pps = nal;
|
|
1100
|
+
}
|
|
1101
|
+
}
|
|
1102
|
+
const out = {};
|
|
1103
|
+
if (sps) out.sps = sps;
|
|
1104
|
+
if (pps) out.pps = pps;
|
|
1105
|
+
if (profileLevelId) out.profileLevelId = profileLevelId;
|
|
1106
|
+
return out;
|
|
1107
|
+
}
|
|
1108
|
+
function extractH265ParamSetsFromAccessUnit(annexB) {
|
|
1109
|
+
const nals = splitAccessUnitToNalsBestEffort(annexB, "H265");
|
|
1110
|
+
let vps;
|
|
1111
|
+
let sps;
|
|
1112
|
+
let pps;
|
|
1113
|
+
for (const nal of nals) {
|
|
1114
|
+
if (nal.length < 2) continue;
|
|
1115
|
+
const nalType = nal[0] >> 1 & 63;
|
|
1116
|
+
if (nalType === 32) vps = nal;
|
|
1117
|
+
else if (nalType === 33) sps = nal;
|
|
1118
|
+
else if (nalType === 34) pps = nal;
|
|
1119
|
+
}
|
|
1120
|
+
const out = {};
|
|
1121
|
+
if (vps) out.vps = vps;
|
|
1122
|
+
if (sps) out.sps = sps;
|
|
1123
|
+
if (pps) out.pps = pps;
|
|
1124
|
+
return out;
|
|
1125
|
+
}
|
|
1126
|
+
var aacSampleRates = [
|
|
1127
|
+
96e3,
|
|
1128
|
+
88200,
|
|
1129
|
+
64e3,
|
|
1130
|
+
48e3,
|
|
1131
|
+
44100,
|
|
1132
|
+
32e3,
|
|
1133
|
+
24e3,
|
|
1134
|
+
22050,
|
|
1135
|
+
16e3,
|
|
1136
|
+
12e3,
|
|
1137
|
+
11025,
|
|
1138
|
+
8e3,
|
|
1139
|
+
7350
|
|
1140
|
+
];
|
|
1141
|
+
function buildAacAudioSpecificConfigHex(params) {
|
|
1142
|
+
const { sampleRate, channels } = params;
|
|
1143
|
+
const audioObjectType = params.audioObjectType ?? 2;
|
|
1144
|
+
const samplingFreqIndex = aacSampleRates.indexOf(sampleRate);
|
|
1145
|
+
if (samplingFreqIndex < 0) return;
|
|
1146
|
+
if (!Number.isFinite(channels) || channels <= 0 || channels > 15) return;
|
|
1147
|
+
if (!Number.isFinite(audioObjectType) || audioObjectType <= 0 || audioObjectType > 31)
|
|
1148
|
+
return;
|
|
1149
|
+
const asc = (audioObjectType & 31) << 11 | (samplingFreqIndex & 15) << 7 | (channels & 15) << 3;
|
|
1150
|
+
return asc.toString(16).padStart(4, "0");
|
|
1151
|
+
}
|
|
1152
|
+
function parseAdtsHeader(adtsFrame) {
|
|
1153
|
+
if (adtsFrame.length < 7) return null;
|
|
1154
|
+
if (adtsFrame[0] !== 255 || (adtsFrame[1] & 240) !== 240) return null;
|
|
1155
|
+
const protectionAbsent = (adtsFrame[1] & 1) === 1;
|
|
1156
|
+
const profile = (adtsFrame[2] & 192) >> 6;
|
|
1157
|
+
const audioObjectType = profile + 1;
|
|
1158
|
+
const samplingFreqIndex = (adtsFrame[2] & 60) >> 2;
|
|
1159
|
+
const sampleRate = aacSampleRates[samplingFreqIndex] ?? 0;
|
|
1160
|
+
const channels = (adtsFrame[2] & 1) << 2 | (adtsFrame[3] & 192) >> 6;
|
|
1161
|
+
if (!sampleRate || !channels) return null;
|
|
1162
|
+
const headerLength = protectionAbsent ? 7 : 9;
|
|
1163
|
+
if (adtsFrame.length < headerLength) return null;
|
|
1164
|
+
const asc = (audioObjectType & 31) << 11 | (samplingFreqIndex & 15) << 7 | (channels & 15) << 3;
|
|
1165
|
+
const configHex = asc.toString(16).padStart(4, "0");
|
|
1166
|
+
return { headerLength, sampleRate, channels, configHex };
|
|
1167
|
+
}
|
|
1168
|
+
var RtpWriter = class {
|
|
1169
|
+
constructor(payloadType) {
|
|
1170
|
+
this.payloadType = payloadType;
|
|
1171
|
+
this.seq = crypto.randomBytes(2).readUInt16BE(0);
|
|
1172
|
+
this.timestamp = crypto.randomBytes(4).readUInt32BE(0);
|
|
1173
|
+
}
|
|
1174
|
+
seq = 0;
|
|
1175
|
+
timestamp = 0;
|
|
1176
|
+
ssrc = crypto.randomBytes(4).readUInt32BE(0);
|
|
1177
|
+
setTimestamp(ts) {
|
|
1178
|
+
this.timestamp = ts >>> 0;
|
|
1179
|
+
}
|
|
1180
|
+
getTimestamp() {
|
|
1181
|
+
return this.timestamp >>> 0;
|
|
1182
|
+
}
|
|
1183
|
+
advanceTimestamp(delta) {
|
|
1184
|
+
this.timestamp = this.timestamp + (delta >>> 0) >>> 0;
|
|
1185
|
+
}
|
|
1186
|
+
writePacket(payload, marker) {
|
|
1187
|
+
const header = Buffer.alloc(12);
|
|
1188
|
+
header[0] = 128;
|
|
1189
|
+
header[1] = (marker ? 128 : 0) | this.payloadType & 127;
|
|
1190
|
+
header.writeUInt16BE(this.seq & 65535, 2);
|
|
1191
|
+
header.writeUInt32BE(this.timestamp >>> 0, 4);
|
|
1192
|
+
header.writeUInt32BE(this.ssrc >>> 0, 8);
|
|
1193
|
+
this.seq = this.seq + 1 & 65535;
|
|
1194
|
+
return Buffer.concat([header, payload]);
|
|
1195
|
+
}
|
|
1196
|
+
};
|
|
1197
|
+
function packetizeH264(nal, rtp, opts, markerOnLast, isLastNal) {
|
|
1198
|
+
const max = opts.maxRtpPayload;
|
|
1199
|
+
const out = [];
|
|
1200
|
+
if (nal.length <= max) {
|
|
1201
|
+
out.push(rtp.writePacket(nal, markerOnLast && isLastNal));
|
|
1202
|
+
return out;
|
|
1203
|
+
}
|
|
1204
|
+
const nal0 = nal[0];
|
|
1205
|
+
const f = nal0 & 128;
|
|
1206
|
+
const nri = nal0 & 96;
|
|
1207
|
+
const type = nal0 & 31;
|
|
1208
|
+
const fuIndicator = f | nri | 28;
|
|
1209
|
+
const data = nal.subarray(1);
|
|
1210
|
+
let offset = 0;
|
|
1211
|
+
while (offset < data.length) {
|
|
1212
|
+
const remaining = data.length - offset;
|
|
1213
|
+
const chunkLen = Math.min(remaining, max - 2);
|
|
1214
|
+
const start = offset === 0;
|
|
1215
|
+
const end = offset + chunkLen >= data.length;
|
|
1216
|
+
const fuHeader = (start ? 128 : 0) | (end ? 64 : 0) | type & 31;
|
|
1217
|
+
const payload = Buffer.concat([
|
|
1218
|
+
Buffer.from([fuIndicator, fuHeader]),
|
|
1219
|
+
data.subarray(offset, offset + chunkLen)
|
|
1220
|
+
]);
|
|
1221
|
+
out.push(rtp.writePacket(payload, markerOnLast && isLastNal && end));
|
|
1222
|
+
offset += chunkLen;
|
|
1223
|
+
}
|
|
1224
|
+
return out;
|
|
1225
|
+
}
|
|
1226
|
+
function packetizeH265(nal, rtp, opts, markerOnLast, isLastNal) {
|
|
1227
|
+
const max = opts.maxRtpPayload;
|
|
1228
|
+
const out = [];
|
|
1229
|
+
if (nal.length <= max) {
|
|
1230
|
+
out.push(rtp.writePacket(nal, markerOnLast && isLastNal));
|
|
1231
|
+
return out;
|
|
1232
|
+
}
|
|
1233
|
+
if (nal.length < 3) return out;
|
|
1234
|
+
const nalHeader0 = nal[0];
|
|
1235
|
+
const nalHeader1 = nal[1];
|
|
1236
|
+
const nalType = nalHeader0 >> 1 & 63;
|
|
1237
|
+
const fuIndicator0 = nalHeader0 & 129 | 49 << 1;
|
|
1238
|
+
const fuIndicator1 = nalHeader1;
|
|
1239
|
+
const data = nal.subarray(2);
|
|
1240
|
+
let offset = 0;
|
|
1241
|
+
while (offset < data.length) {
|
|
1242
|
+
const remaining = data.length - offset;
|
|
1243
|
+
const chunkLen = Math.min(remaining, max - 3);
|
|
1244
|
+
const start = offset === 0;
|
|
1245
|
+
const end = offset + chunkLen >= data.length;
|
|
1246
|
+
const fuHeader = (start ? 128 : 0) | (end ? 64 : 0) | nalType & 63;
|
|
1247
|
+
const payload = Buffer.concat([
|
|
1248
|
+
Buffer.from([fuIndicator0, fuIndicator1, fuHeader]),
|
|
1249
|
+
data.subarray(offset, offset + chunkLen)
|
|
1250
|
+
]);
|
|
1251
|
+
out.push(rtp.writePacket(payload, markerOnLast && isLastNal && end));
|
|
1252
|
+
offset += chunkLen;
|
|
1253
|
+
}
|
|
1254
|
+
return out;
|
|
1255
|
+
}
|
|
1256
|
+
function packetizeAacAdtsFrame(adts, rtp) {
|
|
1257
|
+
const parsed = parseAdtsHeader(adts);
|
|
1258
|
+
if (!parsed) return { packets: [] };
|
|
1259
|
+
const raw = adts.subarray(parsed.headerLength);
|
|
1260
|
+
if (!raw.length) return { packets: [] };
|
|
1261
|
+
const auHeadersLength = Buffer.from([0, 16]);
|
|
1262
|
+
const auSize = raw.length & 8191;
|
|
1263
|
+
const auHeader = Buffer.alloc(2);
|
|
1264
|
+
auHeader[0] = auSize >> 5 & 255;
|
|
1265
|
+
auHeader[1] = (auSize & 31) << 3;
|
|
1266
|
+
const payload = Buffer.concat([auHeadersLength, auHeader, raw]);
|
|
1267
|
+
return {
|
|
1268
|
+
packets: [rtp.writePacket(payload, true)],
|
|
1269
|
+
config: {
|
|
1270
|
+
sampleRate: parsed.sampleRate,
|
|
1271
|
+
channels: parsed.channels,
|
|
1272
|
+
configHex: parsed.configHex
|
|
1273
|
+
}
|
|
1274
|
+
};
|
|
1275
|
+
}
|
|
1276
|
+
function packetizeAacRawFrame(raw, rtp) {
|
|
1277
|
+
if (!raw?.length) return { packets: [] };
|
|
1278
|
+
const auHeadersLength = Buffer.from([0, 16]);
|
|
1279
|
+
const auSize = raw.length & 8191;
|
|
1280
|
+
const auHeader = Buffer.alloc(2);
|
|
1281
|
+
auHeader[0] = auSize >> 5 & 255;
|
|
1282
|
+
auHeader[1] = (auSize & 31) << 3;
|
|
1283
|
+
const payload = Buffer.concat([auHeadersLength, auHeader, raw]);
|
|
1284
|
+
return {
|
|
1285
|
+
packets: [rtp.writePacket(payload, true)]
|
|
1286
|
+
};
|
|
1287
|
+
}
|
|
1288
|
+
var Rfc4571Muxer = class _Rfc4571Muxer {
|
|
1289
|
+
constructor(logger, videoPayloadType, audioPayloadType, videoFpsFallback = 25, maxRtpPayload = 1200) {
|
|
1290
|
+
this.logger = logger;
|
|
1291
|
+
this.videoPayloadType = videoPayloadType;
|
|
1292
|
+
this.maxRtpPayload = maxRtpPayload;
|
|
1293
|
+
this.videoRtp = new RtpWriter(videoPayloadType);
|
|
1294
|
+
if (audioPayloadType !== void 0) {
|
|
1295
|
+
this.audioRtp = new RtpWriter(audioPayloadType);
|
|
1296
|
+
}
|
|
1297
|
+
this.fallbackVideoIncrement = Math.max(
|
|
1298
|
+
1,
|
|
1299
|
+
Math.round(this.videoClockRate / Math.max(1, videoFpsFallback))
|
|
1300
|
+
);
|
|
1301
|
+
this.fallbackVideoDeltaUs = Math.max(
|
|
1302
|
+
1,
|
|
1303
|
+
Math.round(
|
|
1304
|
+
this.fallbackVideoIncrement * 1e6 / this.videoClockRate
|
|
1305
|
+
)
|
|
1306
|
+
);
|
|
1307
|
+
}
|
|
1308
|
+
clients = /* @__PURE__ */ new Set();
|
|
1309
|
+
closed = false;
|
|
1310
|
+
videoRtp;
|
|
1311
|
+
audioRtp;
|
|
1312
|
+
// Timestamp tracking
|
|
1313
|
+
// bcmedia microseconds is a u32 clock that may wrap (2^32) and may reset on stream restarts.
|
|
1314
|
+
// Additionally, it may jump forward unexpectedly. Since we do not buffer, large forward jumps
|
|
1315
|
+
// cause downstream schedulers to detect discontinuities.
|
|
1316
|
+
//
|
|
1317
|
+
// Strategy:
|
|
1318
|
+
// - unwrap u32 wraps when likely
|
|
1319
|
+
// - treat backwards jumps as restarts
|
|
1320
|
+
// - use microseconds deltas only when "reasonable"; otherwise advance using a smoothed/fallback delta
|
|
1321
|
+
videoLastUsRaw;
|
|
1322
|
+
videoUsWrapOffset = 0;
|
|
1323
|
+
videoLastAbsUs;
|
|
1324
|
+
videoAvgDeltaUs;
|
|
1325
|
+
videoClockRate = 9e4;
|
|
1326
|
+
fallbackVideoIncrement;
|
|
1327
|
+
fallbackVideoDeltaUs;
|
|
1328
|
+
maxTrustedDeltaUs = 5e5;
|
|
1329
|
+
// 0.5s
|
|
1330
|
+
emaAlpha = 0.1;
|
|
1331
|
+
// Throttled logging to avoid spamming in hot paths.
|
|
1332
|
+
videoTimingLogState = {
|
|
1333
|
+
lastUntrustedDeltaLogMs: 0,
|
|
1334
|
+
lastWrapLogMs: 0,
|
|
1335
|
+
lastResetLogMs: 0,
|
|
1336
|
+
untrustedDeltaCount: 0,
|
|
1337
|
+
wrapCount: 0,
|
|
1338
|
+
resetCount: 0
|
|
1339
|
+
};
|
|
1340
|
+
cachedH264ParamSetsAnnexB;
|
|
1341
|
+
cachedH265ParamSetsAnnexB;
|
|
1342
|
+
addClient(socket) {
|
|
1343
|
+
if (this.closed) {
|
|
1344
|
+
socket.destroy();
|
|
1345
|
+
return;
|
|
1346
|
+
}
|
|
1347
|
+
const client = {
|
|
1348
|
+
socket,
|
|
1349
|
+
needsKeyframe: true,
|
|
1350
|
+
sentVideoConfig: false
|
|
1351
|
+
};
|
|
1352
|
+
this.clients.add(client);
|
|
1353
|
+
const cleanup = () => {
|
|
1354
|
+
this.clients.delete(client);
|
|
1355
|
+
try {
|
|
1356
|
+
socket.destroy();
|
|
1357
|
+
} catch {
|
|
1358
|
+
}
|
|
1359
|
+
};
|
|
1360
|
+
socket.on("error", cleanup);
|
|
1361
|
+
socket.on("close", cleanup);
|
|
1362
|
+
}
|
|
1363
|
+
static ANNEXB_START_CODE = Buffer.from([
|
|
1364
|
+
0,
|
|
1365
|
+
0,
|
|
1366
|
+
0,
|
|
1367
|
+
1
|
|
1368
|
+
]);
|
|
1369
|
+
static joinAnnexBNals(...nals) {
|
|
1370
|
+
const present = nals.filter((n) => !!n && n.length > 0);
|
|
1371
|
+
if (!present.length) return;
|
|
1372
|
+
const parts = [];
|
|
1373
|
+
for (const nal of present) {
|
|
1374
|
+
parts.push(_Rfc4571Muxer.ANNEXB_START_CODE, nal);
|
|
1375
|
+
}
|
|
1376
|
+
return Buffer.concat(parts);
|
|
1377
|
+
}
|
|
1378
|
+
updateVideoParamSetsFromAccessUnit(videoType, accessUnit) {
|
|
1379
|
+
if (!accessUnit?.length) return;
|
|
1380
|
+
if (videoType === "H264") {
|
|
1381
|
+
const { sps: sps2, pps: pps2 } = extractH264ParamSetsFromAccessUnit(accessUnit);
|
|
1382
|
+
if (sps2 && pps2) {
|
|
1383
|
+
this.cachedH264ParamSetsAnnexB = _Rfc4571Muxer.joinAnnexBNals(sps2, pps2);
|
|
1384
|
+
}
|
|
1385
|
+
return;
|
|
1386
|
+
}
|
|
1387
|
+
const { vps, sps, pps } = extractH265ParamSetsFromAccessUnit(accessUnit);
|
|
1388
|
+
if (vps && sps && pps) {
|
|
1389
|
+
this.cachedH265ParamSetsAnnexB = _Rfc4571Muxer.joinAnnexBNals(
|
|
1390
|
+
vps,
|
|
1391
|
+
sps,
|
|
1392
|
+
pps
|
|
1393
|
+
);
|
|
1394
|
+
}
|
|
1395
|
+
}
|
|
1396
|
+
sendCachedVideoConfigToClient(videoType, client) {
|
|
1397
|
+
if (this.closed) return;
|
|
1398
|
+
if (client.sentVideoConfig) return;
|
|
1399
|
+
const paramSetsAnnexB = videoType === "H265" ? this.cachedH265ParamSetsAnnexB : this.cachedH264ParamSetsAnnexB;
|
|
1400
|
+
if (!paramSetsAnnexB?.length) return;
|
|
1401
|
+
const nals = splitAccessUnitToNalsBestEffort(paramSetsAnnexB, videoType);
|
|
1402
|
+
if (!nals.length) return;
|
|
1403
|
+
const opts = { maxRtpPayload: this.maxRtpPayload };
|
|
1404
|
+
for (let i = 0; i < nals.length; i++) {
|
|
1405
|
+
const nal = nals[i];
|
|
1406
|
+
const isLastNal = i === nals.length - 1;
|
|
1407
|
+
const packets = videoType === "H265" ? packetizeH265(nal, this.videoRtp, opts, false, isLastNal) : packetizeH264(nal, this.videoRtp, opts, false, isLastNal);
|
|
1408
|
+
for (const pkt of packets) {
|
|
1409
|
+
this.writeRtpPacketToClient(client, pkt);
|
|
1410
|
+
}
|
|
1411
|
+
}
|
|
1412
|
+
client.sentVideoConfig = true;
|
|
1413
|
+
}
|
|
1414
|
+
close() {
|
|
1415
|
+
if (this.closed) return;
|
|
1416
|
+
this.closed = true;
|
|
1417
|
+
for (const c of Array.from(this.clients)) {
|
|
1418
|
+
try {
|
|
1419
|
+
c.socket.destroy();
|
|
1420
|
+
} catch {
|
|
1421
|
+
}
|
|
1422
|
+
}
|
|
1423
|
+
this.clients.clear();
|
|
1424
|
+
}
|
|
1425
|
+
writeRtpPacketToClient(client, pkt) {
|
|
1426
|
+
if (client.socket.destroyed || !client.socket.writable) return;
|
|
1427
|
+
const header = Buffer.alloc(2);
|
|
1428
|
+
header.writeUInt16BE(pkt.length & 65535, 0);
|
|
1429
|
+
const framed = Buffer.concat([header, pkt]);
|
|
1430
|
+
try {
|
|
1431
|
+
client.socket.write(framed);
|
|
1432
|
+
} catch {
|
|
1433
|
+
try {
|
|
1434
|
+
client.socket.destroy();
|
|
1435
|
+
} catch {
|
|
1436
|
+
}
|
|
1437
|
+
}
|
|
1438
|
+
}
|
|
1439
|
+
writeRtpPacketToClients(pkt, predicate) {
|
|
1440
|
+
for (const c of this.clients) {
|
|
1441
|
+
if (!predicate(c)) continue;
|
|
1442
|
+
this.writeRtpPacketToClient(c, pkt);
|
|
1443
|
+
}
|
|
1444
|
+
}
|
|
1445
|
+
resetVideoTimestampMapping() {
|
|
1446
|
+
this.videoLastUsRaw = void 0;
|
|
1447
|
+
this.videoUsWrapOffset = 0;
|
|
1448
|
+
this.videoLastAbsUs = void 0;
|
|
1449
|
+
this.videoAvgDeltaUs = void 0;
|
|
1450
|
+
}
|
|
1451
|
+
logVideoTiming(kind, message) {
|
|
1452
|
+
const now = Date.now();
|
|
1453
|
+
const intervalMs = 5e3;
|
|
1454
|
+
const state = this.videoTimingLogState;
|
|
1455
|
+
if (kind === "untrusted-delta") {
|
|
1456
|
+
state.untrustedDeltaCount++;
|
|
1457
|
+
if (now - state.lastUntrustedDeltaLogMs < intervalMs) return;
|
|
1458
|
+
state.lastUntrustedDeltaLogMs = now;
|
|
1459
|
+
this.logger.warn(
|
|
1460
|
+
`[rfc4571] video timing: ${message} (untrustedDeltaCount=${state.untrustedDeltaCount})`
|
|
1461
|
+
);
|
|
1462
|
+
return;
|
|
1463
|
+
}
|
|
1464
|
+
if (kind === "wrap") {
|
|
1465
|
+
state.wrapCount++;
|
|
1466
|
+
if (now - state.lastWrapLogMs < intervalMs) return;
|
|
1467
|
+
state.lastWrapLogMs = now;
|
|
1468
|
+
this.logger.warn(
|
|
1469
|
+
`[rfc4571] video timing: ${message} (wrapCount=${state.wrapCount})`
|
|
1470
|
+
);
|
|
1471
|
+
return;
|
|
1472
|
+
}
|
|
1473
|
+
state.resetCount++;
|
|
1474
|
+
if (now - state.lastResetLogMs < intervalMs) return;
|
|
1475
|
+
state.lastResetLogMs = now;
|
|
1476
|
+
this.logger.warn(
|
|
1477
|
+
`[rfc4571] video timing: ${message} (resetCount=${state.resetCount})`
|
|
1478
|
+
);
|
|
1479
|
+
}
|
|
1480
|
+
advanceVideoTimestampFallback() {
|
|
1481
|
+
this.videoRtp.advanceTimestamp(this.fallbackVideoIncrement);
|
|
1482
|
+
}
|
|
1483
|
+
setVideoTimestampFromMicroseconds(frameMicroseconds) {
|
|
1484
|
+
if (frameMicroseconds === null || frameMicroseconds === void 0) {
|
|
1485
|
+
this.advanceVideoTimestampFallback();
|
|
1486
|
+
return;
|
|
1487
|
+
}
|
|
1488
|
+
if (!Number.isFinite(frameMicroseconds)) {
|
|
1489
|
+
this.advanceVideoTimestampFallback();
|
|
1490
|
+
return;
|
|
1491
|
+
}
|
|
1492
|
+
const curUsRaw = frameMicroseconds >>> 0;
|
|
1493
|
+
if (this.videoLastUsRaw !== void 0) {
|
|
1494
|
+
const lastUsRaw = this.videoLastUsRaw;
|
|
1495
|
+
if (curUsRaw < lastUsRaw) {
|
|
1496
|
+
const wrapLikely = lastUsRaw > 4026531840 && curUsRaw < 268435455;
|
|
1497
|
+
if (wrapLikely) {
|
|
1498
|
+
this.videoUsWrapOffset += 4294967296;
|
|
1499
|
+
this.logVideoTiming(
|
|
1500
|
+
"wrap",
|
|
1501
|
+
`detected u32 wrap (lastUsRaw=${lastUsRaw} curUsRaw=${curUsRaw} wrapOffset=${this.videoUsWrapOffset})`
|
|
1502
|
+
);
|
|
1503
|
+
} else {
|
|
1504
|
+
this.logVideoTiming(
|
|
1505
|
+
"reset",
|
|
1506
|
+
`detected backwards jump; resetting mapping (lastUsRaw=${lastUsRaw} curUsRaw=${curUsRaw})`
|
|
1507
|
+
);
|
|
1508
|
+
this.resetVideoTimestampMapping();
|
|
1509
|
+
}
|
|
1510
|
+
}
|
|
1511
|
+
}
|
|
1512
|
+
this.videoLastUsRaw = curUsRaw;
|
|
1513
|
+
const absUs = this.videoUsWrapOffset + curUsRaw;
|
|
1514
|
+
if (this.videoLastAbsUs === void 0) {
|
|
1515
|
+
this.videoLastAbsUs = absUs;
|
|
1516
|
+
if (this.videoAvgDeltaUs === void 0)
|
|
1517
|
+
this.videoAvgDeltaUs = this.fallbackVideoDeltaUs;
|
|
1518
|
+
return;
|
|
1519
|
+
}
|
|
1520
|
+
const deltaUs = absUs - this.videoLastAbsUs;
|
|
1521
|
+
this.videoLastAbsUs = absUs;
|
|
1522
|
+
const trusted = Number.isFinite(deltaUs) && deltaUs > 0 && deltaUs <= this.maxTrustedDeltaUs;
|
|
1523
|
+
let effectiveDeltaUs;
|
|
1524
|
+
if (trusted) {
|
|
1525
|
+
const prevAvg = this.videoAvgDeltaUs ?? deltaUs;
|
|
1526
|
+
this.videoAvgDeltaUs = prevAvg + (deltaUs - prevAvg) * this.emaAlpha;
|
|
1527
|
+
effectiveDeltaUs = deltaUs;
|
|
1528
|
+
} else {
|
|
1529
|
+
this.logVideoTiming(
|
|
1530
|
+
"untrusted-delta",
|
|
1531
|
+
`discarded deltaUs=${deltaUs} (absUs=${absUs} lastAbsUs=${this.videoLastAbsUs} avgDeltaUs=${this.videoAvgDeltaUs ?? "n/a"}); using fallback`
|
|
1532
|
+
);
|
|
1533
|
+
effectiveDeltaUs = this.videoAvgDeltaUs ?? this.fallbackVideoDeltaUs;
|
|
1534
|
+
}
|
|
1535
|
+
const inc = Math.max(
|
|
1536
|
+
1,
|
|
1537
|
+
Math.round(effectiveDeltaUs * this.videoClockRate / 1e6)
|
|
1538
|
+
);
|
|
1539
|
+
this.videoRtp.advanceTimestamp(inc);
|
|
1540
|
+
}
|
|
1541
|
+
sendVideoAccessUnit(videoType, accessUnitAnnexB, isKeyframe, microseconds) {
|
|
1542
|
+
if (this.closed) return;
|
|
1543
|
+
this.updateVideoParamSetsFromAccessUnit(videoType, accessUnitAnnexB);
|
|
1544
|
+
const nals = splitAccessUnitToNalsBestEffort(accessUnitAnnexB, videoType);
|
|
1545
|
+
if (!nals.length) return;
|
|
1546
|
+
let derivedKeyframe = false;
|
|
1547
|
+
if (videoType === "H264") {
|
|
1548
|
+
for (const nal of nals) {
|
|
1549
|
+
const t = nal[0] & 31;
|
|
1550
|
+
if (t === 5) {
|
|
1551
|
+
derivedKeyframe = true;
|
|
1552
|
+
break;
|
|
1553
|
+
}
|
|
1554
|
+
}
|
|
1555
|
+
} else {
|
|
1556
|
+
for (const nal of nals) {
|
|
1557
|
+
if (nal.length < 2) continue;
|
|
1558
|
+
const t = nal[0] >> 1 & 63;
|
|
1559
|
+
if (t >= 16 && t <= 23) {
|
|
1560
|
+
derivedKeyframe = true;
|
|
1561
|
+
break;
|
|
1562
|
+
}
|
|
1563
|
+
}
|
|
1564
|
+
}
|
|
1565
|
+
const effectiveKeyframe = isKeyframe || derivedKeyframe;
|
|
1566
|
+
const shouldSendTo = (c) => effectiveKeyframe ? true : !c.needsKeyframe;
|
|
1567
|
+
let hasAnyTarget = false;
|
|
1568
|
+
for (const c of this.clients) {
|
|
1569
|
+
if (shouldSendTo(c)) {
|
|
1570
|
+
hasAnyTarget = true;
|
|
1571
|
+
break;
|
|
1572
|
+
}
|
|
1573
|
+
}
|
|
1574
|
+
if (!hasAnyTarget) {
|
|
1575
|
+
for (const c of this.clients) {
|
|
1576
|
+
if (!c.needsKeyframe) continue;
|
|
1577
|
+
this.sendCachedVideoConfigToClient(videoType, c);
|
|
1578
|
+
}
|
|
1579
|
+
return;
|
|
1580
|
+
}
|
|
1581
|
+
this.setVideoTimestampFromMicroseconds(microseconds);
|
|
1582
|
+
for (const c of this.clients) {
|
|
1583
|
+
if (!c.needsKeyframe) continue;
|
|
1584
|
+
this.sendCachedVideoConfigToClient(videoType, c);
|
|
1585
|
+
}
|
|
1586
|
+
const opts = { maxRtpPayload: this.maxRtpPayload };
|
|
1587
|
+
for (let i = 0; i < nals.length; i++) {
|
|
1588
|
+
const nal = nals[i];
|
|
1589
|
+
const isLastNal = i === nals.length - 1;
|
|
1590
|
+
const packets = videoType === "H265" ? packetizeH265(nal, this.videoRtp, opts, true, isLastNal) : packetizeH264(nal, this.videoRtp, opts, true, isLastNal);
|
|
1591
|
+
for (const pkt of packets)
|
|
1592
|
+
this.writeRtpPacketToClients(pkt, shouldSendTo);
|
|
1593
|
+
}
|
|
1594
|
+
if (effectiveKeyframe) {
|
|
1595
|
+
for (const c of this.clients) c.needsKeyframe = false;
|
|
1596
|
+
}
|
|
1597
|
+
}
|
|
1598
|
+
sendAudioAdtsFrame(adts) {
|
|
1599
|
+
if (this.closed) return {};
|
|
1600
|
+
if (!this.audioRtp) return {};
|
|
1601
|
+
const { packets, config } = packetizeAacAdtsFrame(adts, this.audioRtp);
|
|
1602
|
+
for (const pkt of packets)
|
|
1603
|
+
this.writeRtpPacketToClients(pkt, (c) => !c.needsKeyframe);
|
|
1604
|
+
if (packets.length) this.audioRtp.advanceTimestamp(1024);
|
|
1605
|
+
return config ? { parsed: config } : {};
|
|
1606
|
+
}
|
|
1607
|
+
sendAudioAacRawFrame(raw) {
|
|
1608
|
+
if (this.closed) return;
|
|
1609
|
+
if (!this.audioRtp) return;
|
|
1610
|
+
const { packets } = packetizeAacRawFrame(raw, this.audioRtp);
|
|
1611
|
+
for (const pkt of packets)
|
|
1612
|
+
this.writeRtpPacketToClients(pkt, (c) => !c.needsKeyframe);
|
|
1613
|
+
if (packets.length) this.audioRtp.advanceTimestamp(1024);
|
|
1614
|
+
}
|
|
1615
|
+
sendAudioRtpPacket(rtpPacket) {
|
|
1616
|
+
if (this.closed) return;
|
|
1617
|
+
if (!rtpPacket || rtpPacket.length < 12) return;
|
|
1618
|
+
const version = rtpPacket[0] >> 6 & 3;
|
|
1619
|
+
if (version !== 2) return;
|
|
1620
|
+
this.writeRtpPacketToClients(rtpPacket, (c) => !c.needsKeyframe);
|
|
1621
|
+
}
|
|
1622
|
+
static parseRtpPayload(packet) {
|
|
1623
|
+
if (!packet || packet.length < 12) return;
|
|
1624
|
+
const version = packet[0] >> 6 & 3;
|
|
1625
|
+
if (version !== 2) return;
|
|
1626
|
+
const padding = (packet[0] & 32) !== 0;
|
|
1627
|
+
const extension = (packet[0] & 16) !== 0;
|
|
1628
|
+
const csrcCount = packet[0] & 15;
|
|
1629
|
+
let offset = 12 + csrcCount * 4;
|
|
1630
|
+
if (offset > packet.length) return;
|
|
1631
|
+
if (extension) {
|
|
1632
|
+
if (offset + 4 > packet.length) return;
|
|
1633
|
+
const extLenWords = packet.readUInt16BE(offset + 2);
|
|
1634
|
+
offset += 4 + extLenWords * 4;
|
|
1635
|
+
if (offset > packet.length) return;
|
|
1636
|
+
}
|
|
1637
|
+
let end = packet.length;
|
|
1638
|
+
if (padding) {
|
|
1639
|
+
const padLen = packet[packet.length - 1];
|
|
1640
|
+
if (padLen <= 0 || padLen > packet.length) return;
|
|
1641
|
+
end = packet.length - padLen;
|
|
1642
|
+
if (end < offset) return;
|
|
1643
|
+
}
|
|
1644
|
+
if (end <= offset) return;
|
|
1645
|
+
return packet.subarray(offset, end);
|
|
1646
|
+
}
|
|
1647
|
+
static isH264KeyframeLikeRtpPacket(packet) {
|
|
1648
|
+
const payload = _Rfc4571Muxer.parseRtpPayload(packet);
|
|
1649
|
+
if (!payload || payload.length < 1) return false;
|
|
1650
|
+
const nalType = payload[0] & 31;
|
|
1651
|
+
if (nalType >= 1 && nalType <= 23) {
|
|
1652
|
+
return nalType === 5 || nalType === 7 || nalType === 8;
|
|
1653
|
+
}
|
|
1654
|
+
if (nalType === 24) {
|
|
1655
|
+
let offset = 1;
|
|
1656
|
+
while (offset + 2 <= payload.length) {
|
|
1657
|
+
const size = payload.readUInt16BE(offset);
|
|
1658
|
+
offset += 2;
|
|
1659
|
+
if (!size || offset + size > payload.length) break;
|
|
1660
|
+
const t = payload[offset] & 31;
|
|
1661
|
+
if (t === 5 || t === 7 || t === 8) return true;
|
|
1662
|
+
offset += size;
|
|
1663
|
+
}
|
|
1664
|
+
return false;
|
|
1665
|
+
}
|
|
1666
|
+
if (nalType === 28 && payload.length >= 2) {
|
|
1667
|
+
const fuHeader = payload[1];
|
|
1668
|
+
const start = (fuHeader & 128) !== 0;
|
|
1669
|
+
const origType = fuHeader & 31;
|
|
1670
|
+
if (!start) return false;
|
|
1671
|
+
return origType === 5 || origType === 7 || origType === 8;
|
|
1672
|
+
}
|
|
1673
|
+
return false;
|
|
1674
|
+
}
|
|
1675
|
+
static isH265KeyframeLikeRtpPacket(packet) {
|
|
1676
|
+
const payload = _Rfc4571Muxer.parseRtpPayload(packet);
|
|
1677
|
+
if (!payload || payload.length < 2) return false;
|
|
1678
|
+
const nalType = payload[0] >> 1 & 63;
|
|
1679
|
+
const isKeyNalType = (t) => {
|
|
1680
|
+
if (t >= 16 && t <= 21) return true;
|
|
1681
|
+
if (t === 32 || t === 33 || t === 34) return true;
|
|
1682
|
+
return false;
|
|
1683
|
+
};
|
|
1684
|
+
if (nalType === 49 && payload.length >= 3) {
|
|
1685
|
+
const fuHeader = payload[2];
|
|
1686
|
+
const start = (fuHeader & 128) !== 0;
|
|
1687
|
+
const origType = fuHeader & 63;
|
|
1688
|
+
if (!start) return false;
|
|
1689
|
+
return isKeyNalType(origType);
|
|
1690
|
+
}
|
|
1691
|
+
return isKeyNalType(nalType);
|
|
1692
|
+
}
|
|
1693
|
+
sendVideoRtpPacket(videoType, rtpPacket) {
|
|
1694
|
+
if (this.closed) return;
|
|
1695
|
+
if (!rtpPacket || rtpPacket.length < 12) return;
|
|
1696
|
+
const version = rtpPacket[0] >> 6 & 3;
|
|
1697
|
+
if (version !== 2) return;
|
|
1698
|
+
const isKeyframeLike = videoType === "H265" ? _Rfc4571Muxer.isH265KeyframeLikeRtpPacket(rtpPacket) : _Rfc4571Muxer.isH264KeyframeLikeRtpPacket(rtpPacket);
|
|
1699
|
+
const shouldSendTo = (c) => isKeyframeLike ? true : !c.needsKeyframe;
|
|
1700
|
+
this.writeRtpPacketToClients(rtpPacket, shouldSendTo);
|
|
1701
|
+
if (isKeyframeLike) {
|
|
1702
|
+
for (const c of this.clients) c.needsKeyframe = false;
|
|
1703
|
+
}
|
|
1704
|
+
}
|
|
1705
|
+
};
|
|
1706
|
+
|
|
1707
|
+
// src/rfc/rfc4571-server.ts
|
|
1708
|
+
import netImpl from "net";
|
|
1709
|
+
|
|
1710
|
+
// src/multifocal/compositeStream.ts
|
|
1711
|
+
import { spawn as spawn3 } from "child_process";
|
|
1712
|
+
import { createHash } from "crypto";
|
|
1713
|
+
import { EventEmitter } from "events";
|
|
1714
|
+
function calculateOverlayPosition(position, mainWidth, mainHeight, pipWidth, pipHeight, margin) {
|
|
1715
|
+
const pipW = Math.floor(pipWidth);
|
|
1716
|
+
const pipH = Math.floor(pipHeight);
|
|
1717
|
+
const m = margin;
|
|
1718
|
+
switch (position) {
|
|
1719
|
+
case "top-left":
|
|
1720
|
+
return { x: m, y: m };
|
|
1721
|
+
case "top-right":
|
|
1722
|
+
return { x: mainWidth - pipW - m, y: m };
|
|
1723
|
+
case "bottom-left":
|
|
1724
|
+
return { x: m, y: mainHeight - pipH - m };
|
|
1725
|
+
case "bottom-right":
|
|
1726
|
+
return { x: mainWidth - pipW - m, y: mainHeight - pipH - m };
|
|
1727
|
+
case "center":
|
|
1728
|
+
return { x: Math.floor((mainWidth - pipW) / 2), y: Math.floor((mainHeight - pipH) / 2) };
|
|
1729
|
+
case "top-center":
|
|
1730
|
+
return { x: Math.floor((mainWidth - pipW) / 2), y: m };
|
|
1731
|
+
case "bottom-center":
|
|
1732
|
+
return { x: Math.floor((mainWidth - pipW) / 2), y: mainHeight - pipH - m };
|
|
1733
|
+
case "left-center":
|
|
1734
|
+
return { x: m, y: Math.floor((mainHeight - pipH) / 2) };
|
|
1735
|
+
case "right-center":
|
|
1736
|
+
return { x: mainWidth - pipW - m, y: Math.floor((mainHeight - pipH) / 2) };
|
|
1737
|
+
default:
|
|
1738
|
+
return { x: m, y: m };
|
|
1739
|
+
}
|
|
1740
|
+
}
|
|
1741
|
+
var CompositeStream = class extends EventEmitter {
|
|
1742
|
+
options;
|
|
1743
|
+
widerStream = null;
|
|
1744
|
+
teleStream = null;
|
|
1745
|
+
ffmpegProcess = null;
|
|
1746
|
+
active = false;
|
|
1747
|
+
logger;
|
|
1748
|
+
inputMode = "native";
|
|
1749
|
+
// ffmpeg stdout is an H.264 Annex-B byte stream; chunk boundaries are arbitrary.
|
|
1750
|
+
// We need to reassemble access units (frames) for RFC4571 packetization and keyframe priming.
|
|
1751
|
+
ffmpegOutBuf = Buffer.alloc(0);
|
|
1752
|
+
// Prime frames read before ffmpeg starts (used to infer codec + seed decoder).
|
|
1753
|
+
widerPrimeFrame;
|
|
1754
|
+
telePrimeFrame;
|
|
1755
|
+
ffmpegInputOffsetSec;
|
|
1756
|
+
ffmpegStderrLastLogAtMs = 0;
|
|
1757
|
+
ffmpegStderrLogBurst = 0;
|
|
1758
|
+
audioSource;
|
|
1759
|
+
effectiveWiderProfile;
|
|
1760
|
+
effectiveTeleProfile;
|
|
1761
|
+
pickH264Profile(metadata, preferred) {
|
|
1762
|
+
try {
|
|
1763
|
+
const isH264 = (enc) => (enc ?? "").toLowerCase().includes("264");
|
|
1764
|
+
const streams = Array.isArray(metadata?.streams) ? metadata.streams : [];
|
|
1765
|
+
const byProfile = /* @__PURE__ */ new Map();
|
|
1766
|
+
for (const s of streams) byProfile.set(s.profile, s);
|
|
1767
|
+
if (isH264(byProfile.get(preferred)?.videoEncType)) return preferred;
|
|
1768
|
+
const preferredOrder = ["sub", "main", "ext"];
|
|
1769
|
+
for (const p of preferredOrder) {
|
|
1770
|
+
const si = byProfile.get(p);
|
|
1771
|
+
if (si && isH264(si.videoEncType)) return p;
|
|
1772
|
+
}
|
|
1773
|
+
} catch {
|
|
1774
|
+
}
|
|
1775
|
+
return preferred;
|
|
1776
|
+
}
|
|
1777
|
+
closeGenerator(gen) {
|
|
1778
|
+
if (!gen) return Promise.resolve();
|
|
1779
|
+
const r = gen.return;
|
|
1780
|
+
if (typeof r !== "function") return Promise.resolve();
|
|
1781
|
+
try {
|
|
1782
|
+
return Promise.resolve(r.call(gen)).then(() => void 0).catch(() => void 0);
|
|
1783
|
+
} catch {
|
|
1784
|
+
return Promise.resolve();
|
|
1785
|
+
}
|
|
1786
|
+
}
|
|
1787
|
+
constructor(options) {
|
|
1788
|
+
super();
|
|
1789
|
+
this.options = {
|
|
1790
|
+
pipPosition: "bottom-right",
|
|
1791
|
+
pipSize: 0.25,
|
|
1792
|
+
pipMargin: 10,
|
|
1793
|
+
...options
|
|
1794
|
+
};
|
|
1795
|
+
this.logger = options.logger ?? console;
|
|
1796
|
+
}
|
|
1797
|
+
resolvePipMarginPx(mainWidth, mainHeight) {
|
|
1798
|
+
const raw = this.options.pipMargin;
|
|
1799
|
+
if (raw === void 0 || raw === null) return 10;
|
|
1800
|
+
const v = Number(raw);
|
|
1801
|
+
if (!Number.isFinite(v) || v < 0) return 10;
|
|
1802
|
+
if (v > 1) return Math.floor(v);
|
|
1803
|
+
const base = Math.min(mainWidth, mainHeight);
|
|
1804
|
+
return Math.max(0, Math.floor(base * v));
|
|
1805
|
+
}
|
|
1806
|
+
describeApiClient(api) {
|
|
1807
|
+
const c = api ? api.client : void 0;
|
|
1808
|
+
if (!c) return {};
|
|
1809
|
+
try {
|
|
1810
|
+
const transport = typeof c.getTransport === "function" ? c.getTransport() : void 0;
|
|
1811
|
+
const host = typeof c.getHost === "function" ? c.getHost() : void 0;
|
|
1812
|
+
const sid = typeof c.getSocketSessionId === "function" ? c.getSocketSessionId() : void 0;
|
|
1813
|
+
const uid = typeof c.getUidShort === "function" ? c.getUidShort() : void 0;
|
|
1814
|
+
return { transport, host, sid, uid };
|
|
1815
|
+
} catch {
|
|
1816
|
+
return {};
|
|
1817
|
+
}
|
|
1818
|
+
}
|
|
1819
|
+
fingerprintFrame(buf) {
|
|
1820
|
+
if (!buf?.length) return;
|
|
1821
|
+
const head = buf.subarray(0, Math.min(12, buf.length)).toString("hex");
|
|
1822
|
+
const slice = buf.subarray(0, Math.min(256, buf.length));
|
|
1823
|
+
const sha1 = createHash("sha1").update(slice).digest("hex");
|
|
1824
|
+
return { len: buf.length, headHex: head, sha1_256: sha1 };
|
|
1825
|
+
}
|
|
1826
|
+
async primeForFfmpeg(gen, timeoutMs, requireKeyframe) {
|
|
1827
|
+
const start = Date.now();
|
|
1828
|
+
let first;
|
|
1829
|
+
while (Date.now() - start < timeoutMs) {
|
|
1830
|
+
const r = await Promise.race([
|
|
1831
|
+
gen.next(),
|
|
1832
|
+
new Promise((resolve) => setTimeout(() => resolve({ value: void 0, done: false }), 250))
|
|
1833
|
+
]);
|
|
1834
|
+
if (!r || r.done) return first;
|
|
1835
|
+
const v = r.value;
|
|
1836
|
+
if (!v || v.audio) continue;
|
|
1837
|
+
if (!first) first = { data: v.data, videoType: v.videoType, isKeyframe: v.isKeyframe, microseconds: v.microseconds };
|
|
1838
|
+
if (v.isKeyframe) return { data: v.data, videoType: v.videoType, isKeyframe: v.isKeyframe, microseconds: v.microseconds };
|
|
1839
|
+
}
|
|
1840
|
+
return requireKeyframe ? void 0 : first;
|
|
1841
|
+
}
|
|
1842
|
+
/**
|
|
1843
|
+
* Start the composite stream
|
|
1844
|
+
*/
|
|
1845
|
+
async start() {
|
|
1846
|
+
if (this.active) {
|
|
1847
|
+
throw new Error("Composite stream already active");
|
|
1848
|
+
}
|
|
1849
|
+
this.active = true;
|
|
1850
|
+
this.audioSource = void 0;
|
|
1851
|
+
this.ffmpegInputOffsetSec = void 0;
|
|
1852
|
+
this.logger.log?.("[CompositeStream] Starting composite stream...");
|
|
1853
|
+
try {
|
|
1854
|
+
const widerApi = this.options.widerApi ?? this.options.api;
|
|
1855
|
+
const teleApi = this.options.teleApi ?? this.options.api;
|
|
1856
|
+
const widerClientInfo = this.describeApiClient(widerApi);
|
|
1857
|
+
const teleClientInfo = this.describeApiClient(teleApi);
|
|
1858
|
+
const sameClient = widerApi?.client && teleApi?.client ? widerApi.client === teleApi.client : false;
|
|
1859
|
+
this.logger.log?.(
|
|
1860
|
+
`[CompositeStream] Inputs: wider(ch=${this.options.widerChannel}, profile=${this.options.widerProfile}) tele(ch=${this.options.teleChannel}, profile=${this.options.teleProfile}) onNvr=${Boolean(this.options.onNvr)} forceH264=${Boolean(this.options.forceH264)} sameClient=${sameClient} widerClient=${JSON.stringify(widerClientInfo)} teleClient=${JSON.stringify(teleClientInfo)}`
|
|
1861
|
+
);
|
|
1862
|
+
const widerMetadata = await widerApi.getStreamMetadata(this.options.widerChannel);
|
|
1863
|
+
const teleMetadata = await teleApi.getStreamMetadata(this.options.teleChannel);
|
|
1864
|
+
const forceH264 = this.options.forceH264 === true;
|
|
1865
|
+
const widerProfile = forceH264 ? this.pickH264Profile(widerMetadata, this.options.widerProfile) : this.options.widerProfile;
|
|
1866
|
+
const teleProfile = forceH264 ? this.pickH264Profile(teleMetadata, this.options.teleProfile) : this.options.teleProfile;
|
|
1867
|
+
this.effectiveWiderProfile = widerProfile;
|
|
1868
|
+
this.effectiveTeleProfile = teleProfile;
|
|
1869
|
+
const widerStreamInfo = widerMetadata.streams.find((s) => s.profile === widerProfile);
|
|
1870
|
+
const teleStreamInfo = teleMetadata.streams.find((s) => s.profile === teleProfile);
|
|
1871
|
+
if (!widerStreamInfo || !teleStreamInfo) {
|
|
1872
|
+
throw new Error("Stream metadata not found");
|
|
1873
|
+
}
|
|
1874
|
+
const mainWidth = widerStreamInfo.width;
|
|
1875
|
+
const mainHeight = widerStreamInfo.height;
|
|
1876
|
+
const requestedPipSizeRaw = this.options.pipSize ?? 0.25;
|
|
1877
|
+
const pipSize = Math.min(0.9, Math.max(0.05, Number(requestedPipSizeRaw)));
|
|
1878
|
+
const teleAspect = teleStreamInfo.width > 0 && teleStreamInfo.height > 0 ? teleStreamInfo.width / teleStreamInfo.height : 16 / 9;
|
|
1879
|
+
let pipWidth = Math.floor(mainWidth * pipSize);
|
|
1880
|
+
let pipHeight = Math.floor(pipWidth / teleAspect);
|
|
1881
|
+
const maxPipHeight = Math.floor(mainHeight * pipSize);
|
|
1882
|
+
if (pipHeight > maxPipHeight) {
|
|
1883
|
+
pipHeight = maxPipHeight;
|
|
1884
|
+
pipWidth = Math.floor(pipHeight * teleAspect);
|
|
1885
|
+
}
|
|
1886
|
+
pipWidth = Math.max(2, pipWidth - pipWidth % 2);
|
|
1887
|
+
pipHeight = Math.max(2, pipHeight - pipHeight % 2);
|
|
1888
|
+
const position = calculateOverlayPosition(
|
|
1889
|
+
this.options.pipPosition ?? "bottom-right",
|
|
1890
|
+
mainWidth,
|
|
1891
|
+
mainHeight,
|
|
1892
|
+
pipWidth,
|
|
1893
|
+
pipHeight,
|
|
1894
|
+
this.resolvePipMarginPx(mainWidth, mainHeight)
|
|
1895
|
+
);
|
|
1896
|
+
this.logger.log?.(
|
|
1897
|
+
`[CompositeStream] Main: ${mainWidth}x${mainHeight}, PIP: ${pipWidth}x${pipHeight} (pipSize=${pipSize}, position=${this.options.pipPosition ?? "bottom-right"}, margin=${this.options.pipMargin ?? 10}) at (${position.x}, ${position.y})`
|
|
1898
|
+
);
|
|
1899
|
+
const widerRtspUrl = this.options.widerRtspUrl;
|
|
1900
|
+
const teleRtspUrl = this.options.teleRtspUrl;
|
|
1901
|
+
if (widerRtspUrl && teleRtspUrl) {
|
|
1902
|
+
this.inputMode = "rtsp";
|
|
1903
|
+
if (!this.options.assumeH264Inputs) {
|
|
1904
|
+
const isH264 = (enc) => (enc ?? "").toLowerCase().includes("264");
|
|
1905
|
+
const widerEnc = widerStreamInfo?.videoEncType;
|
|
1906
|
+
const teleEnc = teleStreamInfo?.videoEncType;
|
|
1907
|
+
if (!isH264(widerEnc) || !isH264(teleEnc)) {
|
|
1908
|
+
throw new Error(
|
|
1909
|
+
`[CompositeStream] RTSP pair requires H.264 inputs. Detected wider=${widerEnc ?? "unknown"} tele=${teleEnc ?? "unknown"}. Provide RTSP URLs that are H.264 (often the substream), or set assumeH264Inputs=true if you know they are H.264.`
|
|
1910
|
+
);
|
|
1911
|
+
}
|
|
1912
|
+
}
|
|
1913
|
+
await this.startFfmpegCompositionFromRtspUrls(
|
|
1914
|
+
mainWidth,
|
|
1915
|
+
mainHeight,
|
|
1916
|
+
pipWidth,
|
|
1917
|
+
pipHeight,
|
|
1918
|
+
position,
|
|
1919
|
+
widerRtspUrl,
|
|
1920
|
+
teleRtspUrl,
|
|
1921
|
+
this.options.rtspTransport ?? "tcp"
|
|
1922
|
+
);
|
|
1923
|
+
this.logger.log?.("[CompositeStream] Composite stream started (rtsp inputs)");
|
|
1924
|
+
return;
|
|
1925
|
+
}
|
|
1926
|
+
this.inputMode = "native";
|
|
1927
|
+
const teleIsVariantOnSameChannel = !!this.options.onNvr || this.options.teleChannel === this.options.widerChannel;
|
|
1928
|
+
this.widerStream = createNativeStream(widerApi, this.options.widerChannel, widerProfile);
|
|
1929
|
+
this.teleStream = teleIsVariantOnSameChannel ? createNativeStream(teleApi, this.options.teleChannel, teleProfile, { variant: "telephoto" }) : createNativeStream(teleApi, this.options.teleChannel, teleProfile);
|
|
1930
|
+
const inputStartupTimeoutMs = Math.max(1e3, this.options.inputStartupTimeoutMs ?? 2e4);
|
|
1931
|
+
const requireKeyframeOnStartup = this.options.assumeH264Inputs ? false : this.options.requireKeyframeOnStartup ?? true;
|
|
1932
|
+
const primeKeyframeMs = inputStartupTimeoutMs;
|
|
1933
|
+
const primeAnyFrameMs = Math.min(5e3, Math.max(1e3, Math.floor(inputStartupTimeoutMs / 3)));
|
|
1934
|
+
if (requireKeyframeOnStartup) {
|
|
1935
|
+
const [widerPrime, telePrime] = await Promise.all([
|
|
1936
|
+
this.primeForFfmpeg(this.widerStream, primeKeyframeMs, true),
|
|
1937
|
+
this.primeForFfmpeg(this.teleStream, primeKeyframeMs, true)
|
|
1938
|
+
]);
|
|
1939
|
+
this.widerPrimeFrame = widerPrime;
|
|
1940
|
+
this.telePrimeFrame = telePrime;
|
|
1941
|
+
} else {
|
|
1942
|
+
const [widerPrime, telePrime] = await Promise.all([
|
|
1943
|
+
this.primeForFfmpeg(this.widerStream, primeAnyFrameMs, false),
|
|
1944
|
+
this.primeForFfmpeg(this.teleStream, primeAnyFrameMs, false)
|
|
1945
|
+
]);
|
|
1946
|
+
this.widerPrimeFrame = widerPrime;
|
|
1947
|
+
this.telePrimeFrame = telePrime;
|
|
1948
|
+
}
|
|
1949
|
+
const widerFp = this.fingerprintFrame(this.widerPrimeFrame?.data);
|
|
1950
|
+
const teleFp = this.fingerprintFrame(this.telePrimeFrame?.data);
|
|
1951
|
+
if (widerFp && teleFp) {
|
|
1952
|
+
const same = widerFp.sha1_256 === teleFp.sha1_256;
|
|
1953
|
+
this.logger.log?.(
|
|
1954
|
+
`[CompositeStream] Prime fingerprints: wider=${JSON.stringify(widerFp)} tele=${JSON.stringify(teleFp)} same=${same}`
|
|
1955
|
+
);
|
|
1956
|
+
if (same) {
|
|
1957
|
+
this.logger.warn?.(
|
|
1958
|
+
`[CompositeStream] WARNING: wider+tele prime frames look identical. This usually means the same underlying stream is feeding both inputs (shared socket mixup, wrong channels, or device mapping).`
|
|
1959
|
+
);
|
|
1960
|
+
}
|
|
1961
|
+
}
|
|
1962
|
+
this.logger.log?.(
|
|
1963
|
+
`[CompositeStream] Prime: wider=${this.widerPrimeFrame?.isKeyframe ? "keyframe" : this.widerPrimeFrame ? "frame" : "none"}, tele=${this.telePrimeFrame?.isKeyframe ? "keyframe" : this.telePrimeFrame ? "frame" : "none"}`
|
|
1964
|
+
);
|
|
1965
|
+
if (!this.widerPrimeFrame || !this.telePrimeFrame) {
|
|
1966
|
+
const missing = [
|
|
1967
|
+
!this.widerPrimeFrame ? "wider" : null,
|
|
1968
|
+
!this.telePrimeFrame ? "tele" : null
|
|
1969
|
+
].filter(Boolean).join(", ");
|
|
1970
|
+
throw new Error(
|
|
1971
|
+
`[CompositeStream] Missing input frames (${missing}) within ${inputStartupTimeoutMs}ms. If your camera has a very long GOP/keyframe interval, startup will be slow. Consider increasing inputStartupTimeoutMs or enabling forceH264 (often shorter GOP on substream).`
|
|
1972
|
+
);
|
|
1973
|
+
}
|
|
1974
|
+
if (!this.options.assumeH264Inputs) {
|
|
1975
|
+
try {
|
|
1976
|
+
const wUs = this.widerPrimeFrame.microseconds;
|
|
1977
|
+
const tUs = this.telePrimeFrame.microseconds;
|
|
1978
|
+
if (typeof wUs === "number" && typeof tUs === "number" && Number.isFinite(wUs) && Number.isFinite(tUs)) {
|
|
1979
|
+
const deltaSec = (tUs - wUs) / 1e6;
|
|
1980
|
+
const abs = Math.abs(deltaSec);
|
|
1981
|
+
if (abs >= 0.2 && abs <= 60) {
|
|
1982
|
+
if (deltaSec > 0) {
|
|
1983
|
+
this.ffmpegInputOffsetSec = { wider: abs };
|
|
1984
|
+
} else {
|
|
1985
|
+
this.ffmpegInputOffsetSec = { tele: abs };
|
|
1986
|
+
}
|
|
1987
|
+
this.logger.log?.(
|
|
1988
|
+
`[CompositeStream] Input timestamp delta: widerUs=${wUs} teleUs=${tUs} deltaSec=${deltaSec.toFixed(3)} applying itoffset=${abs.toFixed(3)}s to ${deltaSec > 0 ? "wider" : "tele"}`
|
|
1989
|
+
);
|
|
1990
|
+
}
|
|
1991
|
+
}
|
|
1992
|
+
} catch {
|
|
1993
|
+
}
|
|
1994
|
+
} else {
|
|
1995
|
+
this.ffmpegInputOffsetSec = void 0;
|
|
1996
|
+
}
|
|
1997
|
+
const widerCodecFromFrames = this.widerPrimeFrame?.videoType === "H265" ? "hevc" : this.widerPrimeFrame?.videoType === "H264" ? "h264" : void 0;
|
|
1998
|
+
const teleCodecFromFrames = this.telePrimeFrame?.videoType === "H265" ? "hevc" : this.telePrimeFrame?.videoType === "H264" ? "h264" : void 0;
|
|
1999
|
+
await this.startFfmpegComposition(mainWidth, mainHeight, pipWidth, pipHeight, position, widerCodecFromFrames, teleCodecFromFrames, this.ffmpegInputOffsetSec);
|
|
2000
|
+
this.logger.log?.("[CompositeStream] Composite stream started");
|
|
2001
|
+
} catch (error) {
|
|
2002
|
+
this.active = false;
|
|
2003
|
+
this.emit("error", error);
|
|
2004
|
+
throw error;
|
|
2005
|
+
}
|
|
2006
|
+
}
|
|
2007
|
+
async startFfmpegCompositionFromRtspUrls(mainWidth, mainHeight, pipWidth, pipHeight, position, widerRtspUrl, teleRtspUrl, rtspTransport) {
|
|
2008
|
+
const ffmpegArgs = [
|
|
2009
|
+
"-hide_banner",
|
|
2010
|
+
"-loglevel",
|
|
2011
|
+
"error",
|
|
2012
|
+
"-fflags",
|
|
2013
|
+
"+genpts",
|
|
2014
|
+
// Input 0: wider
|
|
2015
|
+
"-rtsp_transport",
|
|
2016
|
+
rtspTransport,
|
|
2017
|
+
"-i",
|
|
2018
|
+
widerRtspUrl,
|
|
2019
|
+
// Input 1: tele
|
|
2020
|
+
"-rtsp_transport",
|
|
2021
|
+
rtspTransport,
|
|
2022
|
+
"-i",
|
|
2023
|
+
teleRtspUrl,
|
|
2024
|
+
// Filter to scale and position PIP
|
|
2025
|
+
"-filter_complex",
|
|
2026
|
+
`[0:v]scale=${mainWidth}:${mainHeight}[main];[1:v]scale=${pipWidth}:${pipHeight}[pip];[main][pip]overlay=${position.x}:${position.y}[out]`,
|
|
2027
|
+
"-map",
|
|
2028
|
+
"[out]",
|
|
2029
|
+
// Output: always H.264 Annex-B
|
|
2030
|
+
"-an",
|
|
2031
|
+
"-c:v",
|
|
2032
|
+
"libx264",
|
|
2033
|
+
"-g",
|
|
2034
|
+
"30",
|
|
2035
|
+
"-keyint_min",
|
|
2036
|
+
"30",
|
|
2037
|
+
"-sc_threshold",
|
|
2038
|
+
"0",
|
|
2039
|
+
"-x264-params",
|
|
2040
|
+
"aud=1:repeat-headers=1:keyint=30:min-keyint=30:scenecut=0",
|
|
2041
|
+
"-preset",
|
|
2042
|
+
"ultrafast",
|
|
2043
|
+
"-tune",
|
|
2044
|
+
"zerolatency",
|
|
2045
|
+
"-crf",
|
|
2046
|
+
"23",
|
|
2047
|
+
"-f",
|
|
2048
|
+
"h264",
|
|
2049
|
+
"pipe:1"
|
|
2050
|
+
];
|
|
2051
|
+
this.logger.log?.(`[CompositeStream] Starting ffmpeg (rtsp inputs): ${ffmpegArgs.join(" ")}`);
|
|
2052
|
+
this.ffmpegProcess = spawn3("ffmpeg", ffmpegArgs, {
|
|
2053
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
2054
|
+
});
|
|
2055
|
+
this.ffmpegProcess.on("error", (error) => {
|
|
2056
|
+
this.logger.error?.("[CompositeStream] FFmpeg error:", error);
|
|
2057
|
+
this.emit("error", error);
|
|
2058
|
+
});
|
|
2059
|
+
this.ffmpegProcess.on("close", (code) => {
|
|
2060
|
+
if (code !== 0 && code !== null) {
|
|
2061
|
+
this.logger.warn?.(`[CompositeStream] FFmpeg exited with code ${code}`);
|
|
2062
|
+
}
|
|
2063
|
+
this.emit("close");
|
|
2064
|
+
});
|
|
2065
|
+
this.ffmpegProcess.stdout?.on("data", (data) => {
|
|
2066
|
+
this.onFfmpegStdoutData(data);
|
|
2067
|
+
});
|
|
2068
|
+
this.ffmpegProcess.stderr?.on("data", (data) => {
|
|
2069
|
+
const output = data.toString();
|
|
2070
|
+
if (output.includes("error") || output.includes("Error")) {
|
|
2071
|
+
const now = Date.now();
|
|
2072
|
+
if (now - this.ffmpegStderrLastLogAtMs > 2e3) {
|
|
2073
|
+
this.ffmpegStderrLastLogAtMs = now;
|
|
2074
|
+
this.ffmpegStderrLogBurst = 0;
|
|
2075
|
+
}
|
|
2076
|
+
if (this.ffmpegStderrLogBurst++ < 3) {
|
|
2077
|
+
this.logger.error?.("[CompositeStream] FFmpeg stderr:", output);
|
|
2078
|
+
}
|
|
2079
|
+
}
|
|
2080
|
+
});
|
|
2081
|
+
}
|
|
2082
|
+
/**
|
|
2083
|
+
* Start ffmpeg for composition with overlay
|
|
2084
|
+
*/
|
|
2085
|
+
async startFfmpegComposition(mainWidth, mainHeight, pipWidth, pipHeight, position, widerCodecOverride, teleCodecOverride, inputOffsetSec) {
|
|
2086
|
+
const widerApi = this.options.widerApi ?? this.options.api;
|
|
2087
|
+
const teleApi = this.options.teleApi ?? this.options.api;
|
|
2088
|
+
const widerMetadata = await widerApi.getStreamMetadata(this.options.widerChannel);
|
|
2089
|
+
const teleMetadata = await teleApi.getStreamMetadata(this.options.teleChannel);
|
|
2090
|
+
const widerProfile = this.effectiveWiderProfile ?? this.options.widerProfile;
|
|
2091
|
+
const teleProfile = this.effectiveTeleProfile ?? this.options.teleProfile;
|
|
2092
|
+
const widerStreamInfo = widerMetadata.streams.find((s) => s.profile === widerProfile);
|
|
2093
|
+
const teleStreamInfo = teleMetadata.streams.find((s) => s.profile === teleProfile);
|
|
2094
|
+
const assumeH264Inputs = this.options.assumeH264Inputs === true;
|
|
2095
|
+
const widerCodec = assumeH264Inputs ? "h264" : widerCodecOverride ?? (widerStreamInfo?.videoEncType?.toLowerCase().includes("265") ? "hevc" : "h264");
|
|
2096
|
+
const teleCodec = assumeH264Inputs ? "h264" : teleCodecOverride ?? (teleStreamInfo?.videoEncType?.toLowerCase().includes("265") ? "hevc" : "h264");
|
|
2097
|
+
this.logger.log?.(
|
|
2098
|
+
`[CompositeStream] Codec detection: wider=${widerCodec} (from metadata: ${widerStreamInfo?.videoEncType || "unknown"}), tele=${teleCodec} (from metadata: ${teleStreamInfo?.videoEncType || "unknown"})`
|
|
2099
|
+
);
|
|
2100
|
+
if (this.options.disableTranscode) {
|
|
2101
|
+
this.logger.warn?.(
|
|
2102
|
+
`[CompositeStream] disableTranscode requested, but overlay requires re-encode in ffmpeg; proceeding with libx264 output.`
|
|
2103
|
+
);
|
|
2104
|
+
}
|
|
2105
|
+
const widerInputArgs = [
|
|
2106
|
+
...inputOffsetSec?.wider ? ["-itsoffset", String(inputOffsetSec.wider)] : [],
|
|
2107
|
+
"-f",
|
|
2108
|
+
widerCodec,
|
|
2109
|
+
"-i",
|
|
2110
|
+
"pipe:0"
|
|
2111
|
+
];
|
|
2112
|
+
const teleInputArgs = [
|
|
2113
|
+
...inputOffsetSec?.tele ? ["-itsoffset", String(inputOffsetSec.tele)] : [],
|
|
2114
|
+
"-f",
|
|
2115
|
+
teleCodec,
|
|
2116
|
+
"-i",
|
|
2117
|
+
"pipe:3"
|
|
2118
|
+
];
|
|
2119
|
+
const ffmpegArgs = [
|
|
2120
|
+
"-hide_banner",
|
|
2121
|
+
"-loglevel",
|
|
2122
|
+
"error",
|
|
2123
|
+
"-fflags",
|
|
2124
|
+
"+genpts",
|
|
2125
|
+
"-probesize",
|
|
2126
|
+
"32",
|
|
2127
|
+
// Small probe size for faster detection
|
|
2128
|
+
"-analyzeduration",
|
|
2129
|
+
"500000",
|
|
2130
|
+
// 0.5 seconds to analyze stream
|
|
2131
|
+
// Input 0: wider stream (main)
|
|
2132
|
+
...widerInputArgs,
|
|
2133
|
+
// Input 1: tele stream (PIP)
|
|
2134
|
+
...teleInputArgs,
|
|
2135
|
+
// Filter to scale and position PIP
|
|
2136
|
+
"-filter_complex",
|
|
2137
|
+
`[0:v]scale=${mainWidth}:${mainHeight}[main];[1:v]scale=${pipWidth}:${pipHeight}[pip];[main][pip]overlay=${position.x}:${position.y}[out]`,
|
|
2138
|
+
"-map",
|
|
2139
|
+
"[out]",
|
|
2140
|
+
"-c:v",
|
|
2141
|
+
"libx264",
|
|
2142
|
+
// Re-encode for compatibility
|
|
2143
|
+
// Make the stream easy to join mid-flight: frequent IDRs + in-band headers + AUD.
|
|
2144
|
+
// Without this, a new client may wait many seconds for the next keyframe.
|
|
2145
|
+
"-g",
|
|
2146
|
+
"30",
|
|
2147
|
+
"-keyint_min",
|
|
2148
|
+
"30",
|
|
2149
|
+
"-sc_threshold",
|
|
2150
|
+
"0",
|
|
2151
|
+
"-x264-params",
|
|
2152
|
+
"aud=1:repeat-headers=1:keyint=30:min-keyint=30:scenecut=0",
|
|
2153
|
+
"-preset",
|
|
2154
|
+
"ultrafast",
|
|
2155
|
+
"-tune",
|
|
2156
|
+
"zerolatency",
|
|
2157
|
+
"-crf",
|
|
2158
|
+
"23",
|
|
2159
|
+
"-f",
|
|
2160
|
+
"h264",
|
|
2161
|
+
"pipe:1"
|
|
2162
|
+
// Output (stdout)
|
|
2163
|
+
];
|
|
2164
|
+
this.logger.log?.(
|
|
2165
|
+
`[CompositeStream] Starting ffmpeg: ${ffmpegArgs.join(" ")}`
|
|
2166
|
+
);
|
|
2167
|
+
this.ffmpegProcess = spawn3("ffmpeg", ffmpegArgs, {
|
|
2168
|
+
stdio: ["pipe", "pipe", "pipe", "pipe"]
|
|
2169
|
+
});
|
|
2170
|
+
this.ffmpegProcess.on("error", (error) => {
|
|
2171
|
+
this.logger.error?.("[CompositeStream] FFmpeg error:", error);
|
|
2172
|
+
this.emit("error", error);
|
|
2173
|
+
});
|
|
2174
|
+
this.ffmpegProcess.on("close", (code) => {
|
|
2175
|
+
if (code !== 0 && code !== null) {
|
|
2176
|
+
this.logger.warn?.(`[CompositeStream] FFmpeg exited with code ${code}`);
|
|
2177
|
+
}
|
|
2178
|
+
this.emit("close");
|
|
2179
|
+
});
|
|
2180
|
+
this.ffmpegProcess.stdout?.on("data", (data) => {
|
|
2181
|
+
this.onFfmpegStdoutData(data);
|
|
2182
|
+
});
|
|
2183
|
+
this.ffmpegProcess.stderr?.on("data", (data) => {
|
|
2184
|
+
const output = data.toString();
|
|
2185
|
+
if (output.includes("error") || output.includes("Error")) {
|
|
2186
|
+
const now = Date.now();
|
|
2187
|
+
if (now - this.ffmpegStderrLastLogAtMs > 2e3) {
|
|
2188
|
+
this.ffmpegStderrLastLogAtMs = now;
|
|
2189
|
+
this.ffmpegStderrLogBurst = 0;
|
|
2190
|
+
}
|
|
2191
|
+
if (this.ffmpegStderrLogBurst++ < 3) {
|
|
2192
|
+
this.logger.error?.("[CompositeStream] FFmpeg stderr:", output);
|
|
2193
|
+
}
|
|
2194
|
+
}
|
|
2195
|
+
});
|
|
2196
|
+
this.feedFramesToFfmpeg();
|
|
2197
|
+
}
|
|
2198
|
+
onFfmpegStdoutData(data) {
|
|
2199
|
+
if (!data?.length) return;
|
|
2200
|
+
this.ffmpegOutBuf = this.ffmpegOutBuf.length ? Buffer.concat([this.ffmpegOutBuf, data], this.ffmpegOutBuf.length + data.length) : data;
|
|
2201
|
+
const MAX_BUF = 8 * 1024 * 1024;
|
|
2202
|
+
if (this.ffmpegOutBuf.length > MAX_BUF) {
|
|
2203
|
+
this.logger.warn?.(`[CompositeStream] ffmpeg stdout buffer grew too large (${this.ffmpegOutBuf.length} bytes); resyncing`);
|
|
2204
|
+
this.ffmpegOutBuf = this.ffmpegOutBuf.subarray(this.ffmpegOutBuf.length - 1024 * 1024);
|
|
2205
|
+
}
|
|
2206
|
+
const emittedUpTo = this.emitH264AccessUnitsFromBuffer();
|
|
2207
|
+
if (emittedUpTo > 0) {
|
|
2208
|
+
this.ffmpegOutBuf = this.ffmpegOutBuf.subarray(emittedUpTo);
|
|
2209
|
+
}
|
|
2210
|
+
}
|
|
2211
|
+
emitH264AccessUnitsFromBuffer() {
|
|
2212
|
+
const buf = this.ffmpegOutBuf;
|
|
2213
|
+
if (!buf?.length) return 0;
|
|
2214
|
+
const len = buf.length;
|
|
2215
|
+
if (len < 5) return 0;
|
|
2216
|
+
const startCodeLenAt = (i) => {
|
|
2217
|
+
if (i + 3 <= len && buf[i] === 0 && buf[i + 1] === 0) {
|
|
2218
|
+
if (buf[i + 2] === 1) return 3;
|
|
2219
|
+
if (i + 4 <= len && buf[i + 2] === 0 && buf[i + 3] === 1) return 4;
|
|
2220
|
+
}
|
|
2221
|
+
return 0;
|
|
2222
|
+
};
|
|
2223
|
+
let firstSc = -1;
|
|
2224
|
+
for (let i = 0; i < Math.min(len - 3, 64); i++) {
|
|
2225
|
+
if (startCodeLenAt(i)) {
|
|
2226
|
+
firstSc = i;
|
|
2227
|
+
break;
|
|
2228
|
+
}
|
|
2229
|
+
}
|
|
2230
|
+
if (firstSc < 0) return 0;
|
|
2231
|
+
if (firstSc > 0) {
|
|
2232
|
+
this.ffmpegOutBuf = buf.subarray(firstSc);
|
|
2233
|
+
return 0;
|
|
2234
|
+
}
|
|
2235
|
+
const startCodes = [];
|
|
2236
|
+
for (let i = 0; i < len - 3; i++) {
|
|
2237
|
+
if (startCodeLenAt(i)) startCodes.push(i);
|
|
2238
|
+
}
|
|
2239
|
+
if (startCodes.length < 2) return 0;
|
|
2240
|
+
const removeEmulationPrevention = (rbsp) => {
|
|
2241
|
+
const out = [];
|
|
2242
|
+
for (let i = 0; i < rbsp.length; i++) {
|
|
2243
|
+
const b = rbsp[i];
|
|
2244
|
+
if (i >= 2 && rbsp[i - 1] === 0 && rbsp[i - 2] === 0 && b === 3) {
|
|
2245
|
+
continue;
|
|
2246
|
+
}
|
|
2247
|
+
out.push(b);
|
|
2248
|
+
}
|
|
2249
|
+
return Buffer.from(out);
|
|
2250
|
+
};
|
|
2251
|
+
const readUE = (rbsp, bitOffset) => {
|
|
2252
|
+
const totalBits = rbsp.length * 8;
|
|
2253
|
+
let i = bitOffset;
|
|
2254
|
+
let zeros = 0;
|
|
2255
|
+
while (i < totalBits) {
|
|
2256
|
+
const byte = rbsp[i >> 3];
|
|
2257
|
+
const bit = byte >> 7 - (i & 7) & 1;
|
|
2258
|
+
if (bit === 0) {
|
|
2259
|
+
zeros++;
|
|
2260
|
+
i++;
|
|
2261
|
+
continue;
|
|
2262
|
+
}
|
|
2263
|
+
break;
|
|
2264
|
+
}
|
|
2265
|
+
if (i >= totalBits) return;
|
|
2266
|
+
i++;
|
|
2267
|
+
if (zeros === 0) return { value: 0, next: i };
|
|
2268
|
+
if (i + zeros > totalBits) return;
|
|
2269
|
+
let info = 0;
|
|
2270
|
+
for (let k = 0; k < zeros; k++, i++) {
|
|
2271
|
+
const byte = rbsp[i >> 3];
|
|
2272
|
+
const bit = byte >> 7 - (i & 7) & 1;
|
|
2273
|
+
info = info << 1 | bit;
|
|
2274
|
+
}
|
|
2275
|
+
const value = (1 << zeros) - 1 + info;
|
|
2276
|
+
return { value, next: i };
|
|
2277
|
+
};
|
|
2278
|
+
const isFirstSliceOfPicture = (nal) => {
|
|
2279
|
+
if (nal.length < 2) return false;
|
|
2280
|
+
const nalType = nal[0] & 31;
|
|
2281
|
+
if (nalType !== 1 && nalType !== 5) return false;
|
|
2282
|
+
const rbsp = removeEmulationPrevention(nal.subarray(1));
|
|
2283
|
+
const ue = readUE(rbsp, 0);
|
|
2284
|
+
if (!ue) return false;
|
|
2285
|
+
return ue.value === 0;
|
|
2286
|
+
};
|
|
2287
|
+
let currentAuStart = 0;
|
|
2288
|
+
let sawVcl = false;
|
|
2289
|
+
let emittedThrough = 0;
|
|
2290
|
+
for (let idx = 0; idx < startCodes.length - 1; idx++) {
|
|
2291
|
+
const scPos = startCodes[idx];
|
|
2292
|
+
const scLen = startCodeLenAt(scPos);
|
|
2293
|
+
if (!scLen) continue;
|
|
2294
|
+
const nalStart = scPos + scLen;
|
|
2295
|
+
const nalEnd = startCodes[idx + 1];
|
|
2296
|
+
if (nalStart >= nalEnd || nalStart >= len) continue;
|
|
2297
|
+
const nal = buf.subarray(nalStart, nalEnd);
|
|
2298
|
+
if (!nal.length) continue;
|
|
2299
|
+
const nalType = nal[0] & 31;
|
|
2300
|
+
const isAud = nalType === 9;
|
|
2301
|
+
const isVcl = nalType === 1 || nalType === 5;
|
|
2302
|
+
if (isAud) {
|
|
2303
|
+
if (sawVcl && scPos > currentAuStart) {
|
|
2304
|
+
const au = buf.subarray(currentAuStart, scPos);
|
|
2305
|
+
if (au.length) this.emit("videoFrame", au);
|
|
2306
|
+
emittedThrough = scPos;
|
|
2307
|
+
}
|
|
2308
|
+
currentAuStart = scPos;
|
|
2309
|
+
sawVcl = false;
|
|
2310
|
+
continue;
|
|
2311
|
+
}
|
|
2312
|
+
if (isVcl) {
|
|
2313
|
+
const firstSlice = isFirstSliceOfPicture(nal);
|
|
2314
|
+
if (firstSlice) {
|
|
2315
|
+
if (sawVcl && scPos > currentAuStart) {
|
|
2316
|
+
const au = buf.subarray(currentAuStart, scPos);
|
|
2317
|
+
if (au.length) this.emit("videoFrame", au);
|
|
2318
|
+
emittedThrough = scPos;
|
|
2319
|
+
currentAuStart = scPos;
|
|
2320
|
+
}
|
|
2321
|
+
sawVcl = true;
|
|
2322
|
+
} else {
|
|
2323
|
+
sawVcl = true;
|
|
2324
|
+
}
|
|
2325
|
+
}
|
|
2326
|
+
}
|
|
2327
|
+
return Math.max(0, emittedThrough);
|
|
2328
|
+
}
|
|
2329
|
+
/**
|
|
2330
|
+
* Feed frames from native streams to ffmpeg
|
|
2331
|
+
* Uses two separate loops to write frames continuously
|
|
2332
|
+
*/
|
|
2333
|
+
async feedFramesToFfmpeg() {
|
|
2334
|
+
if (!this.ffmpegProcess || !this.widerStream || !this.teleStream) {
|
|
2335
|
+
return;
|
|
2336
|
+
}
|
|
2337
|
+
const requireKeyframeOnStartup = this.options.assumeH264Inputs ? false : this.options.requireKeyframeOnStartup ?? true;
|
|
2338
|
+
const widerStdin = this.ffmpegProcess.stdio[0];
|
|
2339
|
+
const teleStdin = this.ffmpegProcess.stdio[3];
|
|
2340
|
+
if (!widerStdin || !teleStdin) {
|
|
2341
|
+
this.logger.error?.("[CompositeStream] FFmpeg stdin not available");
|
|
2342
|
+
return;
|
|
2343
|
+
}
|
|
2344
|
+
const feedWider = async () => {
|
|
2345
|
+
try {
|
|
2346
|
+
let widerSynced = !requireKeyframeOnStartup;
|
|
2347
|
+
if (this.widerPrimeFrame?.data) {
|
|
2348
|
+
try {
|
|
2349
|
+
if (!requireKeyframeOnStartup || this.widerPrimeFrame.isKeyframe) {
|
|
2350
|
+
const written = widerStdin.write(this.toAnnexB(this.widerPrimeFrame.data));
|
|
2351
|
+
widerSynced = widerSynced || Boolean(this.widerPrimeFrame.isKeyframe);
|
|
2352
|
+
if (!written) {
|
|
2353
|
+
await new Promise((resolve) => {
|
|
2354
|
+
widerStdin.once("drain", () => resolve());
|
|
2355
|
+
});
|
|
2356
|
+
}
|
|
2357
|
+
}
|
|
2358
|
+
this.widerPrimeFrame = void 0;
|
|
2359
|
+
} catch {
|
|
2360
|
+
}
|
|
2361
|
+
}
|
|
2362
|
+
for await (const frame of this.widerStream) {
|
|
2363
|
+
if (!this.active) break;
|
|
2364
|
+
if (frame.audio) {
|
|
2365
|
+
if (!this.audioSource) this.audioSource = "wider";
|
|
2366
|
+
if (this.audioSource === "wider") this.emit("audioFrame", frame.data);
|
|
2367
|
+
continue;
|
|
2368
|
+
}
|
|
2369
|
+
if (!widerSynced) {
|
|
2370
|
+
if (!frame.isKeyframe) continue;
|
|
2371
|
+
widerSynced = true;
|
|
2372
|
+
}
|
|
2373
|
+
try {
|
|
2374
|
+
const written = widerStdin.write(this.toAnnexB(frame.data));
|
|
2375
|
+
if (!written) {
|
|
2376
|
+
await new Promise((resolve) => {
|
|
2377
|
+
widerStdin.once("drain", () => resolve());
|
|
2378
|
+
});
|
|
2379
|
+
}
|
|
2380
|
+
} catch (error) {
|
|
2381
|
+
const code = error?.code;
|
|
2382
|
+
if (code === "EPIPE" || code === "ERR_STREAM_WRITE_AFTER_END") {
|
|
2383
|
+
this.logger.log?.("[CompositeStream] FFmpeg wider stdin closed");
|
|
2384
|
+
break;
|
|
2385
|
+
}
|
|
2386
|
+
this.logger.error?.("[CompositeStream] Error writing wider frame:", error);
|
|
2387
|
+
}
|
|
2388
|
+
}
|
|
2389
|
+
} catch (error) {
|
|
2390
|
+
if (this.active) {
|
|
2391
|
+
this.logger.error?.("[CompositeStream] Error in wider stream:", error);
|
|
2392
|
+
}
|
|
2393
|
+
} finally {
|
|
2394
|
+
try {
|
|
2395
|
+
widerStdin.end();
|
|
2396
|
+
} catch {
|
|
2397
|
+
}
|
|
2398
|
+
}
|
|
2399
|
+
};
|
|
2400
|
+
const feedTele = async () => {
|
|
2401
|
+
try {
|
|
2402
|
+
let teleSynced = !requireKeyframeOnStartup;
|
|
2403
|
+
if (this.telePrimeFrame?.data) {
|
|
2404
|
+
try {
|
|
2405
|
+
if (!requireKeyframeOnStartup || this.telePrimeFrame.isKeyframe) {
|
|
2406
|
+
const written = teleStdin.write(this.toAnnexB(this.telePrimeFrame.data));
|
|
2407
|
+
teleSynced = teleSynced || Boolean(this.telePrimeFrame.isKeyframe);
|
|
2408
|
+
if (!written) {
|
|
2409
|
+
await new Promise((resolve) => {
|
|
2410
|
+
teleStdin.once("drain", () => resolve());
|
|
2411
|
+
});
|
|
2412
|
+
}
|
|
2413
|
+
}
|
|
2414
|
+
this.telePrimeFrame = void 0;
|
|
2415
|
+
} catch {
|
|
2416
|
+
}
|
|
2417
|
+
}
|
|
2418
|
+
for await (const frame of this.teleStream) {
|
|
2419
|
+
if (!this.active) break;
|
|
2420
|
+
if (frame.audio) {
|
|
2421
|
+
if (!this.audioSource) this.audioSource = "tele";
|
|
2422
|
+
if (this.audioSource === "tele") this.emit("audioFrame", frame.data);
|
|
2423
|
+
continue;
|
|
2424
|
+
}
|
|
2425
|
+
if (!teleSynced) {
|
|
2426
|
+
if (!frame.isKeyframe) continue;
|
|
2427
|
+
teleSynced = true;
|
|
2428
|
+
}
|
|
2429
|
+
try {
|
|
2430
|
+
const written = teleStdin.write(this.toAnnexB(frame.data));
|
|
2431
|
+
if (!written) {
|
|
2432
|
+
await new Promise((resolve) => {
|
|
2433
|
+
teleStdin.once("drain", () => resolve());
|
|
2434
|
+
});
|
|
2435
|
+
}
|
|
2436
|
+
} catch (error) {
|
|
2437
|
+
const code = error?.code;
|
|
2438
|
+
if (code === "EPIPE" || code === "ERR_STREAM_WRITE_AFTER_END") {
|
|
2439
|
+
this.logger.log?.("[CompositeStream] FFmpeg tele stdin closed");
|
|
2440
|
+
break;
|
|
2441
|
+
}
|
|
2442
|
+
this.logger.error?.("[CompositeStream] Error writing tele frame:", error);
|
|
2443
|
+
}
|
|
2444
|
+
}
|
|
2445
|
+
} catch (error) {
|
|
2446
|
+
if (this.active) {
|
|
2447
|
+
this.logger.error?.("[CompositeStream] Error in tele stream:", error);
|
|
2448
|
+
}
|
|
2449
|
+
} finally {
|
|
2450
|
+
try {
|
|
2451
|
+
teleStdin.end();
|
|
2452
|
+
} catch {
|
|
2453
|
+
}
|
|
2454
|
+
}
|
|
2455
|
+
};
|
|
2456
|
+
Promise.all([feedWider(), feedTele()]).catch((error) => {
|
|
2457
|
+
if (this.active) {
|
|
2458
|
+
this.logger.error?.("[CompositeStream] Error in frame processing:", error);
|
|
2459
|
+
this.emit("error", error);
|
|
2460
|
+
}
|
|
2461
|
+
});
|
|
2462
|
+
}
|
|
2463
|
+
toAnnexB(accessUnit) {
|
|
2464
|
+
if (!accessUnit?.length) return accessUnit;
|
|
2465
|
+
if (accessUnit.length >= 3 && accessUnit[0] === 0 && accessUnit[1] === 0) {
|
|
2466
|
+
if (accessUnit[2] === 1) return accessUnit;
|
|
2467
|
+
if (accessUnit.length >= 4 && accessUnit[2] === 0 && accessUnit[3] === 1) return accessUnit;
|
|
2468
|
+
}
|
|
2469
|
+
try {
|
|
2470
|
+
let off = 0;
|
|
2471
|
+
const parts = [];
|
|
2472
|
+
while (off + 4 <= accessUnit.length) {
|
|
2473
|
+
const n = accessUnit.readUInt32BE(off);
|
|
2474
|
+
off += 4;
|
|
2475
|
+
if (!n || off + n > accessUnit.length) {
|
|
2476
|
+
return accessUnit;
|
|
2477
|
+
}
|
|
2478
|
+
parts.push(Buffer.from([0, 0, 0, 1]));
|
|
2479
|
+
parts.push(accessUnit.subarray(off, off + n));
|
|
2480
|
+
off += n;
|
|
2481
|
+
}
|
|
2482
|
+
if (!parts.length) return accessUnit;
|
|
2483
|
+
return Buffer.concat(parts);
|
|
2484
|
+
} catch {
|
|
2485
|
+
return accessUnit;
|
|
2486
|
+
}
|
|
2487
|
+
}
|
|
2488
|
+
/**
|
|
2489
|
+
* Stop the composite stream
|
|
2490
|
+
*/
|
|
2491
|
+
async stop() {
|
|
2492
|
+
if (!this.active) {
|
|
2493
|
+
return;
|
|
2494
|
+
}
|
|
2495
|
+
this.active = false;
|
|
2496
|
+
this.logger.log?.("[CompositeStream] Stopping composite stream...");
|
|
2497
|
+
if (this.ffmpegProcess) {
|
|
2498
|
+
try {
|
|
2499
|
+
this.ffmpegProcess.stdin?.end();
|
|
2500
|
+
this.ffmpegProcess.kill("SIGTERM");
|
|
2501
|
+
setTimeout(() => {
|
|
2502
|
+
try {
|
|
2503
|
+
this.ffmpegProcess?.kill("SIGKILL");
|
|
2504
|
+
} catch {
|
|
2505
|
+
}
|
|
2506
|
+
}, 1e3);
|
|
2507
|
+
} catch {
|
|
2508
|
+
}
|
|
2509
|
+
this.ffmpegProcess = null;
|
|
2510
|
+
}
|
|
2511
|
+
if (this.inputMode === "native") {
|
|
2512
|
+
await Promise.all([
|
|
2513
|
+
this.closeGenerator(this.widerStream),
|
|
2514
|
+
this.closeGenerator(this.teleStream)
|
|
2515
|
+
]);
|
|
2516
|
+
this.widerStream = null;
|
|
2517
|
+
this.teleStream = null;
|
|
2518
|
+
} else {
|
|
2519
|
+
this.widerStream = null;
|
|
2520
|
+
this.teleStream = null;
|
|
2521
|
+
}
|
|
2522
|
+
this.logger.log?.("[CompositeStream] Composite stream stopped");
|
|
2523
|
+
}
|
|
2524
|
+
/**
|
|
2525
|
+
* Check if stream is active
|
|
2526
|
+
*/
|
|
2527
|
+
isActive() {
|
|
2528
|
+
return this.active;
|
|
2529
|
+
}
|
|
2530
|
+
};
|
|
2531
|
+
|
|
2532
|
+
// src/rfc/rfc4571-server.ts
|
|
2533
|
+
async function createRfc4571TcpServer(options) {
|
|
2534
|
+
const isComposite = options.channel === void 0;
|
|
2535
|
+
const parseCompositeFromRequestedId = (requestedId2) => {
|
|
2536
|
+
if (!requestedId2) return;
|
|
2537
|
+
const id = String(requestedId2);
|
|
2538
|
+
const asProfile = (v) => v === "main" || v === "sub" || v === "ext" ? v : void 0;
|
|
2539
|
+
if (id.startsWith("composite-native-") || id.startsWith("composite-rtsp-")) {
|
|
2540
|
+
const source = id.startsWith("composite-rtsp-") ? "rtsp" : "native";
|
|
2541
|
+
const parts = id.split("-").filter(Boolean);
|
|
2542
|
+
if (parts.length >= 4) {
|
|
2543
|
+
const wider = parts.length >= 5 ? parts[3] : parts[2];
|
|
2544
|
+
const tele = parts.length >= 5 ? parts[4] : parts[3];
|
|
2545
|
+
const widerProfile = asProfile(wider);
|
|
2546
|
+
const teleProfile = asProfile(tele);
|
|
2547
|
+
return {
|
|
2548
|
+
source,
|
|
2549
|
+
...widerProfile ? { widerProfile } : {},
|
|
2550
|
+
...teleProfile ? { teleProfile } : {}
|
|
2551
|
+
};
|
|
2552
|
+
}
|
|
2553
|
+
return { source };
|
|
2554
|
+
}
|
|
2555
|
+
return;
|
|
2556
|
+
};
|
|
2557
|
+
const apiFactoryCtx = {
|
|
2558
|
+
profile: options.profile,
|
|
2559
|
+
composite: isComposite,
|
|
2560
|
+
...options.channel !== void 0 ? { channel: options.channel } : {},
|
|
2561
|
+
...options.variant !== void 0 ? { variant: options.variant } : {}
|
|
2562
|
+
};
|
|
2563
|
+
const baseApi = options.api ?? await options.getApi?.(apiFactoryCtx);
|
|
2564
|
+
if (!baseApi) {
|
|
2565
|
+
throw new Error("createRfc4571TcpServer: missing api/getApi");
|
|
2566
|
+
}
|
|
2567
|
+
const resolvedCompositeApis = options.compositeApis ?? await options.getCompositeApis?.();
|
|
2568
|
+
const {
|
|
2569
|
+
channel,
|
|
2570
|
+
profile,
|
|
2571
|
+
variant,
|
|
2572
|
+
logger,
|
|
2573
|
+
expectedVideoType,
|
|
2574
|
+
host = "127.0.0.1",
|
|
2575
|
+
videoPayloadType = 96,
|
|
2576
|
+
audioPayloadType = 97,
|
|
2577
|
+
keyframeTimeoutMs = 5e3,
|
|
2578
|
+
uptimeRestartMs: uptimeRestartMsOpt,
|
|
2579
|
+
idleTeardownMs = 2500,
|
|
2580
|
+
closeApiOnTeardown = true,
|
|
2581
|
+
username,
|
|
2582
|
+
password,
|
|
2583
|
+
requireAuth = false,
|
|
2584
|
+
compositeOptions,
|
|
2585
|
+
requestedId,
|
|
2586
|
+
aacAudioHint
|
|
2587
|
+
} = options;
|
|
2588
|
+
let dedicatedSession;
|
|
2589
|
+
const apisToClose = /* @__PURE__ */ new Set();
|
|
2590
|
+
apisToClose.add(baseApi);
|
|
2591
|
+
if (resolvedCompositeApis?.widerApi)
|
|
2592
|
+
apisToClose.add(resolvedCompositeApis.widerApi);
|
|
2593
|
+
if (resolvedCompositeApis?.teleApi)
|
|
2594
|
+
apisToClose.add(resolvedCompositeApis.teleApi);
|
|
2595
|
+
const uptimeRestartMs = uptimeRestartMsOpt ?? (isComposite ? 6e4 : 1e4);
|
|
2596
|
+
const variantSuffix = variant && variant !== "default" ? ` variant=${variant}` : "";
|
|
2597
|
+
const logPrefix = isComposite ? `[native-rfc4571 composite profile=${profile}${variantSuffix}${requestedId ? ` id=${requestedId}` : ""}]` : `[native-rfc4571 ch=${channel} profile=${profile}${variantSuffix}]`;
|
|
2598
|
+
const log = (message) => {
|
|
2599
|
+
try {
|
|
2600
|
+
if (logger?.info) {
|
|
2601
|
+
logger.info(`${logPrefix} ${message}`);
|
|
2602
|
+
} else if (logger?.log) {
|
|
2603
|
+
logger.log(`${logPrefix} ${message}`);
|
|
2604
|
+
}
|
|
2605
|
+
} catch {
|
|
2606
|
+
}
|
|
2607
|
+
};
|
|
2608
|
+
const logSessionsSummary = (action) => {
|
|
2609
|
+
const summary = baseApi.getDedicatedSessionsSummary();
|
|
2610
|
+
log(
|
|
2611
|
+
`${action} [sessions: ${summary.count} active${summary.count > 0 ? ` (${summary.keys.join(", ")})` : ""}]`
|
|
2612
|
+
);
|
|
2613
|
+
};
|
|
2614
|
+
log(
|
|
2615
|
+
`starting (host=${host} videoPT=${videoPayloadType} audioPT=${audioPayloadType} expectedVideoType=${expectedVideoType ?? "n/a"} keyframeTimeoutMs=${keyframeTimeoutMs} uptimeRestartMs=${uptimeRestartMs} idleTeardownMs=${idleTeardownMs} composite=${isComposite})`
|
|
2616
|
+
);
|
|
2617
|
+
let videoStream;
|
|
2618
|
+
let isCompositeStream = false;
|
|
2619
|
+
if (isComposite) {
|
|
2620
|
+
const widerChannel = compositeOptions?.widerChannel ?? 0;
|
|
2621
|
+
const teleChannel = compositeOptions?.teleChannel ?? 1;
|
|
2622
|
+
const requested = parseCompositeFromRequestedId(requestedId);
|
|
2623
|
+
const outputProfile = profile;
|
|
2624
|
+
let teleInputProfile = requested?.teleProfile ?? outputProfile;
|
|
2625
|
+
let widerMainIsH264 = false;
|
|
2626
|
+
try {
|
|
2627
|
+
const widerApiForProbe = resolvedCompositeApis?.widerApi ?? baseApi;
|
|
2628
|
+
const metadata = await widerApiForProbe.getStreamMetadata(widerChannel);
|
|
2629
|
+
const streams = Array.isArray(metadata) ? metadata : Array.isArray(metadata?.streams) ? metadata.streams : [];
|
|
2630
|
+
const main = streams.find((s) => s?.profile === "main");
|
|
2631
|
+
const enc = typeof main?.videoEncType === "string" ? main.videoEncType.toLowerCase() : "";
|
|
2632
|
+
widerMainIsH264 = enc.includes("264");
|
|
2633
|
+
} catch {
|
|
2634
|
+
}
|
|
2635
|
+
let widerInputProfile = requested?.widerProfile ?? (outputProfile === "ext" ? "ext" : outputProfile === "main" && widerMainIsH264 ? "main" : "sub");
|
|
2636
|
+
if (requested?.teleProfile && requested.teleProfile !== outputProfile) {
|
|
2637
|
+
log(
|
|
2638
|
+
`requestedId overrides tele INPUT profile (requested=${requested.teleProfile} output=${outputProfile})`
|
|
2639
|
+
);
|
|
2640
|
+
}
|
|
2641
|
+
log(
|
|
2642
|
+
`creating composite stream: outputProfile=${outputProfile} wider(ch=${widerChannel}, profile=${widerInputProfile}), tele(ch=${teleChannel}, profile=${teleInputProfile}) source=${requested?.source ?? "native"} widerMainIsH264=${widerMainIsH264}`
|
|
2643
|
+
);
|
|
2644
|
+
const widerApi = resolvedCompositeApis?.widerApi ?? baseApi;
|
|
2645
|
+
const teleApi = resolvedCompositeApis?.teleApi ?? baseApi;
|
|
2646
|
+
const forceH264 = compositeOptions?.forceH264;
|
|
2647
|
+
const defaultForceH264 = outputProfile === "sub";
|
|
2648
|
+
let widerRtspUrl;
|
|
2649
|
+
let teleRtspUrl;
|
|
2650
|
+
if (requested?.source === "rtsp") {
|
|
2651
|
+
const onNvr = Boolean(compositeOptions?.onNvr);
|
|
2652
|
+
const resolveLensRtspUrl = async (params) => {
|
|
2653
|
+
const { rtspStreams } = await baseApi.buildVideoStreamOptions({
|
|
2654
|
+
channel: params.channel,
|
|
2655
|
+
onNvr,
|
|
2656
|
+
compositeOnly: false,
|
|
2657
|
+
lens: params.lens
|
|
2658
|
+
});
|
|
2659
|
+
const candidates = rtspStreams.filter(
|
|
2660
|
+
(s) => s.container === "rtsp" && s.lens === params.desiredLens && Boolean(s.urlWithAuth)
|
|
2661
|
+
);
|
|
2662
|
+
const exact = candidates.find((s) => s.profile === params.profile);
|
|
2663
|
+
if (exact?.urlWithAuth) {
|
|
2664
|
+
return {
|
|
2665
|
+
urlWithAuth: exact.urlWithAuth,
|
|
2666
|
+
actualProfile: exact.profile
|
|
2667
|
+
};
|
|
2668
|
+
}
|
|
2669
|
+
if (params.profile === "sub") {
|
|
2670
|
+
const fallbackMain = candidates.find((s) => s.profile === "main");
|
|
2671
|
+
if (fallbackMain?.urlWithAuth) {
|
|
2672
|
+
return {
|
|
2673
|
+
urlWithAuth: fallbackMain.urlWithAuth,
|
|
2674
|
+
actualProfile: fallbackMain.profile
|
|
2675
|
+
};
|
|
2676
|
+
}
|
|
2677
|
+
}
|
|
2678
|
+
const available = rtspStreams.filter(
|
|
2679
|
+
(s) => s.container === "rtsp" && s.lens === params.desiredLens
|
|
2680
|
+
).map((s) => `${s.profile}:${s.id}`).join(", ");
|
|
2681
|
+
throw new Error(
|
|
2682
|
+
`Requested composite RTSP inputs, but no RTSP ${params.desiredLens} stream found for channel=${params.channel} profile=${params.profile} (onNvr=${onNvr}). Available: [${available || "none"}]. Use composite-native-... or choose a different profile.`
|
|
2683
|
+
);
|
|
2684
|
+
};
|
|
2685
|
+
const widerResolved = await resolveLensRtspUrl({
|
|
2686
|
+
channel: widerChannel,
|
|
2687
|
+
lens: "default",
|
|
2688
|
+
desiredLens: "wide",
|
|
2689
|
+
profile: widerInputProfile
|
|
2690
|
+
});
|
|
2691
|
+
widerRtspUrl = widerResolved.urlWithAuth;
|
|
2692
|
+
widerInputProfile = widerResolved.actualProfile;
|
|
2693
|
+
const teleResolved = await resolveLensRtspUrl({
|
|
2694
|
+
channel: teleChannel,
|
|
2695
|
+
lens: "telephoto",
|
|
2696
|
+
desiredLens: "telephoto",
|
|
2697
|
+
profile: teleInputProfile
|
|
2698
|
+
});
|
|
2699
|
+
teleRtspUrl = teleResolved.urlWithAuth;
|
|
2700
|
+
if (teleResolved.actualProfile !== teleInputProfile) {
|
|
2701
|
+
log(
|
|
2702
|
+
`tele RTSP profile fallback applied (requested=${teleInputProfile} actual=${teleResolved.actualProfile})`
|
|
2703
|
+
);
|
|
2704
|
+
}
|
|
2705
|
+
teleInputProfile = teleResolved.actualProfile;
|
|
2706
|
+
}
|
|
2707
|
+
videoStream = new CompositeStream({
|
|
2708
|
+
api: baseApi,
|
|
2709
|
+
widerApi,
|
|
2710
|
+
teleApi,
|
|
2711
|
+
widerChannel,
|
|
2712
|
+
teleChannel,
|
|
2713
|
+
widerProfile: widerInputProfile,
|
|
2714
|
+
teleProfile: teleInputProfile,
|
|
2715
|
+
...widerRtspUrl && teleRtspUrl ? { widerRtspUrl, teleRtspUrl } : {},
|
|
2716
|
+
...compositeOptions?.rtspTransport ? { rtspTransport: compositeOptions.rtspTransport } : {},
|
|
2717
|
+
pipPosition: compositeOptions?.pipPosition ?? "bottom-right",
|
|
2718
|
+
pipSize: compositeOptions?.pipSize ?? 0.25,
|
|
2719
|
+
// New default is percent-friendly (1%). Values > 1 are still treated as pixels.
|
|
2720
|
+
pipMargin: compositeOptions?.pipMargin ?? 0.01,
|
|
2721
|
+
...compositeOptions?.onNvr !== void 0 ? { onNvr: compositeOptions.onNvr } : {},
|
|
2722
|
+
...forceH264 !== void 0 ? { forceH264 } : defaultForceH264 ? { forceH264: true } : {},
|
|
2723
|
+
...compositeOptions?.assumeH264Inputs !== void 0 ? { assumeH264Inputs: compositeOptions.assumeH264Inputs } : {},
|
|
2724
|
+
...compositeOptions?.disableTranscode !== void 0 ? { disableTranscode: compositeOptions.disableTranscode } : {},
|
|
2725
|
+
logger
|
|
2726
|
+
});
|
|
2727
|
+
isCompositeStream = true;
|
|
2728
|
+
await videoStream.start();
|
|
2729
|
+
log(
|
|
2730
|
+
"composite stream started; waiting for keyframe to extract parameter sets"
|
|
2731
|
+
);
|
|
2732
|
+
} else {
|
|
2733
|
+
const ch = channel;
|
|
2734
|
+
const deviceId = options.deviceId;
|
|
2735
|
+
let streamClient;
|
|
2736
|
+
if (deviceId) {
|
|
2737
|
+
const sessionKey = `live:${deviceId}:ch${ch}:${profile}${variant && variant !== "default" ? `:${variant}` : ""}`;
|
|
2738
|
+
dedicatedSession = await baseApi.createDedicatedSession(
|
|
2739
|
+
sessionKey,
|
|
2740
|
+
logger
|
|
2741
|
+
);
|
|
2742
|
+
streamClient = dedicatedSession.client;
|
|
2743
|
+
logSessionsSummary(`dedicated session created: ${sessionKey}`);
|
|
2744
|
+
} else {
|
|
2745
|
+
streamClient = baseApi.client;
|
|
2746
|
+
}
|
|
2747
|
+
videoStream = new BaichuanVideoStream({
|
|
2748
|
+
client: streamClient,
|
|
2749
|
+
api: baseApi,
|
|
2750
|
+
channel: ch,
|
|
2751
|
+
profile,
|
|
2752
|
+
variant,
|
|
2753
|
+
logger
|
|
2754
|
+
});
|
|
2755
|
+
await videoStream.start();
|
|
2756
|
+
log(
|
|
2757
|
+
`stream started (ch=${ch} profile=${profile}${deviceId ? ` dedicated=${deviceId}` : ""})`
|
|
2758
|
+
);
|
|
2759
|
+
}
|
|
2760
|
+
const waitForKeyframe = async () => {
|
|
2761
|
+
if (isCompositeStream) {
|
|
2762
|
+
return await new Promise((resolve, reject) => {
|
|
2763
|
+
const timeout = setTimeout(() => {
|
|
2764
|
+
cleanup();
|
|
2765
|
+
reject(
|
|
2766
|
+
new Error(
|
|
2767
|
+
`Timeout waiting for keyframe on composite stream profile=${profile}`
|
|
2768
|
+
)
|
|
2769
|
+
);
|
|
2770
|
+
}, keyframeTimeoutMs);
|
|
2771
|
+
const onError = (e) => {
|
|
2772
|
+
cleanup();
|
|
2773
|
+
reject(e instanceof Error ? e : new Error(String(e)));
|
|
2774
|
+
};
|
|
2775
|
+
const onFrame = (frame) => {
|
|
2776
|
+
const videoType = "H264";
|
|
2777
|
+
try {
|
|
2778
|
+
const { sps, pps, profileLevelId } = extractH264ParamSetsFromAccessUnit(frame);
|
|
2779
|
+
if (!sps || !pps) {
|
|
2780
|
+
return;
|
|
2781
|
+
}
|
|
2782
|
+
cleanup();
|
|
2783
|
+
resolve({
|
|
2784
|
+
videoType,
|
|
2785
|
+
accessUnit: frame,
|
|
2786
|
+
...profileLevelId ? { profileLevelId } : {},
|
|
2787
|
+
h264: { sps, pps }
|
|
2788
|
+
});
|
|
2789
|
+
} catch (e) {
|
|
2790
|
+
return;
|
|
2791
|
+
}
|
|
2792
|
+
};
|
|
2793
|
+
const onClose = () => {
|
|
2794
|
+
cleanup();
|
|
2795
|
+
reject(
|
|
2796
|
+
new Error(
|
|
2797
|
+
`Composite stream closed before keyframe (profile=${profile})`
|
|
2798
|
+
)
|
|
2799
|
+
);
|
|
2800
|
+
};
|
|
2801
|
+
const cleanup = () => {
|
|
2802
|
+
clearTimeout(timeout);
|
|
2803
|
+
videoStream.removeListener(
|
|
2804
|
+
"error",
|
|
2805
|
+
onError
|
|
2806
|
+
);
|
|
2807
|
+
videoStream.removeListener(
|
|
2808
|
+
"videoFrame",
|
|
2809
|
+
onFrame
|
|
2810
|
+
);
|
|
2811
|
+
videoStream.removeListener(
|
|
2812
|
+
"close",
|
|
2813
|
+
onClose
|
|
2814
|
+
);
|
|
2815
|
+
};
|
|
2816
|
+
videoStream.on("error", onError);
|
|
2817
|
+
videoStream.on(
|
|
2818
|
+
"videoFrame",
|
|
2819
|
+
onFrame
|
|
2820
|
+
);
|
|
2821
|
+
videoStream.on("close", onClose);
|
|
2822
|
+
});
|
|
2823
|
+
} else {
|
|
2824
|
+
return await new Promise((resolve, reject) => {
|
|
2825
|
+
const timeout = setTimeout(() => {
|
|
2826
|
+
cleanup();
|
|
2827
|
+
reject(
|
|
2828
|
+
new Error(
|
|
2829
|
+
`Timeout waiting for keyframe on native stream channel=${channel} profile=${profile}`
|
|
2830
|
+
)
|
|
2831
|
+
);
|
|
2832
|
+
}, keyframeTimeoutMs);
|
|
2833
|
+
const onError = (e) => {
|
|
2834
|
+
cleanup();
|
|
2835
|
+
reject(e instanceof Error ? e : new Error(String(e)));
|
|
2836
|
+
};
|
|
2837
|
+
const onAu = (au) => {
|
|
2838
|
+
if (!au?.isKeyframe) return;
|
|
2839
|
+
const videoType = au.videoType;
|
|
2840
|
+
const accessUnit = au.data;
|
|
2841
|
+
if (videoType === "H264") {
|
|
2842
|
+
const { sps: sps2, pps: pps2, profileLevelId } = extractH264ParamSetsFromAccessUnit(accessUnit);
|
|
2843
|
+
if (!sps2 || !pps2) return;
|
|
2844
|
+
cleanup();
|
|
2845
|
+
resolve({
|
|
2846
|
+
videoType,
|
|
2847
|
+
accessUnit,
|
|
2848
|
+
...profileLevelId ? { profileLevelId } : {},
|
|
2849
|
+
h264: { sps: sps2, pps: pps2 }
|
|
2850
|
+
});
|
|
2851
|
+
return;
|
|
2852
|
+
}
|
|
2853
|
+
const { vps, sps, pps } = extractH265ParamSetsFromAccessUnit(accessUnit);
|
|
2854
|
+
if (!vps || !sps || !pps) return;
|
|
2855
|
+
cleanup();
|
|
2856
|
+
resolve({ videoType, accessUnit, h265: { vps, sps, pps } });
|
|
2857
|
+
};
|
|
2858
|
+
const cleanup = () => {
|
|
2859
|
+
clearTimeout(timeout);
|
|
2860
|
+
videoStream.removeListener(
|
|
2861
|
+
"error",
|
|
2862
|
+
onError
|
|
2863
|
+
);
|
|
2864
|
+
videoStream.removeListener(
|
|
2865
|
+
"videoAccessUnit",
|
|
2866
|
+
onAu
|
|
2867
|
+
);
|
|
2868
|
+
};
|
|
2869
|
+
videoStream.on("error", onError);
|
|
2870
|
+
videoStream.on(
|
|
2871
|
+
"videoAccessUnit",
|
|
2872
|
+
onAu
|
|
2873
|
+
);
|
|
2874
|
+
});
|
|
2875
|
+
}
|
|
2876
|
+
};
|
|
2877
|
+
let keyframe;
|
|
2878
|
+
try {
|
|
2879
|
+
keyframe = await waitForKeyframe();
|
|
2880
|
+
} catch (e) {
|
|
2881
|
+
try {
|
|
2882
|
+
await videoStream.stop();
|
|
2883
|
+
} catch {
|
|
2884
|
+
}
|
|
2885
|
+
if (closeApiOnTeardown) {
|
|
2886
|
+
await Promise.allSettled(
|
|
2887
|
+
Array.from(apisToClose).map(async (a) => {
|
|
2888
|
+
try {
|
|
2889
|
+
await a.close();
|
|
2890
|
+
} catch {
|
|
2891
|
+
}
|
|
2892
|
+
})
|
|
2893
|
+
);
|
|
2894
|
+
} else {
|
|
2895
|
+
const graceMs = isComposite ? 5e3 : 0;
|
|
2896
|
+
for (const a of Array.from(apisToClose)) {
|
|
2897
|
+
try {
|
|
2898
|
+
a?.client?.requestIdleDisconnectSoon?.(
|
|
2899
|
+
"rfc4571_teardown",
|
|
2900
|
+
graceMs
|
|
2901
|
+
);
|
|
2902
|
+
} catch {
|
|
2903
|
+
}
|
|
2904
|
+
}
|
|
2905
|
+
}
|
|
2906
|
+
throw e;
|
|
2907
|
+
}
|
|
2908
|
+
if (expectedVideoType && keyframe.videoType !== expectedVideoType) {
|
|
2909
|
+
log(
|
|
2910
|
+
`expectedVideoType mismatch (expected=${expectedVideoType} actual=${keyframe.videoType})`
|
|
2911
|
+
);
|
|
2912
|
+
}
|
|
2913
|
+
let fps = 25;
|
|
2914
|
+
try {
|
|
2915
|
+
if (isComposite) {
|
|
2916
|
+
const widerChannel = compositeOptions?.widerChannel ?? 0;
|
|
2917
|
+
const widerApi = resolvedCompositeApis?.widerApi ?? baseApi;
|
|
2918
|
+
const metadata = await widerApi.getStreamMetadata(widerChannel);
|
|
2919
|
+
const streams = Array.isArray(metadata) ? metadata : Array.isArray(metadata?.streams) ? metadata.streams : [];
|
|
2920
|
+
const requested = parseCompositeFromRequestedId(requestedId);
|
|
2921
|
+
const effectiveTeleProfile = requested?.teleProfile ?? profile;
|
|
2922
|
+
let widerMainIsH264 = false;
|
|
2923
|
+
try {
|
|
2924
|
+
const main = streams.find((s) => s?.profile === "main");
|
|
2925
|
+
const enc = typeof main?.videoEncType === "string" ? main.videoEncType.toLowerCase() : "";
|
|
2926
|
+
widerMainIsH264 = enc.includes("264");
|
|
2927
|
+
} catch {
|
|
2928
|
+
}
|
|
2929
|
+
const widerProfile = requested?.widerProfile ?? (effectiveTeleProfile === "ext" ? "ext" : effectiveTeleProfile === "main" && widerMainIsH264 ? "main" : "sub");
|
|
2930
|
+
const stream = streams.find((s) => s?.profile === widerProfile);
|
|
2931
|
+
const fr = Number(stream?.frameRate);
|
|
2932
|
+
if (Number.isFinite(fr) && fr > 0) fps = fr;
|
|
2933
|
+
} else {
|
|
2934
|
+
const metadata = await baseApi.getStreamMetadata(channel);
|
|
2935
|
+
const streams = Array.isArray(metadata) ? metadata : Array.isArray(metadata?.streams) ? metadata.streams : [];
|
|
2936
|
+
const stream = streams.find((s) => s?.profile === profile);
|
|
2937
|
+
const fr = Number(stream?.frameRate);
|
|
2938
|
+
if (Number.isFinite(fr) && fr > 0) fps = fr;
|
|
2939
|
+
}
|
|
2940
|
+
} catch {
|
|
2941
|
+
}
|
|
2942
|
+
let audio;
|
|
2943
|
+
const tryPrimeAudio = async () => {
|
|
2944
|
+
return await new Promise((resolve) => {
|
|
2945
|
+
let sawAnyAudio = false;
|
|
2946
|
+
let debugLogsLeft = 3;
|
|
2947
|
+
const audioPrimeTimeoutMs = isCompositeStream ? Math.min(1e4, keyframeTimeoutMs) : 5e3;
|
|
2948
|
+
const timeout = setTimeout(() => {
|
|
2949
|
+
cleanup();
|
|
2950
|
+
if (!sawAnyAudio) {
|
|
2951
|
+
resolve(void 0);
|
|
2952
|
+
return;
|
|
2953
|
+
}
|
|
2954
|
+
const hint = aacAudioHint ?? { sampleRate: 8e3, channels: 1 };
|
|
2955
|
+
const configHex = buildAacAudioSpecificConfigHex({
|
|
2956
|
+
sampleRate: hint.sampleRate,
|
|
2957
|
+
channels: hint.channels
|
|
2958
|
+
});
|
|
2959
|
+
if (!configHex) {
|
|
2960
|
+
logger.warn(
|
|
2961
|
+
`Native audio frames seen but could not derive AAC config (hint sampleRate=${hint.sampleRate} channels=${hint.channels}); cannot advertise audio track.`
|
|
2962
|
+
);
|
|
2963
|
+
resolve(void 0);
|
|
2964
|
+
return;
|
|
2965
|
+
}
|
|
2966
|
+
logger.warn(
|
|
2967
|
+
`Native audio frames appear to be raw AAC (no ADTS); advertising AAC using hint sampleRate=${hint.sampleRate} channels=${hint.channels}.`
|
|
2968
|
+
);
|
|
2969
|
+
resolve({
|
|
2970
|
+
sampleRate: hint.sampleRate,
|
|
2971
|
+
channels: hint.channels,
|
|
2972
|
+
configHex,
|
|
2973
|
+
mode: "raw"
|
|
2974
|
+
});
|
|
2975
|
+
}, audioPrimeTimeoutMs);
|
|
2976
|
+
const onAudio = (frame) => {
|
|
2977
|
+
sawAnyAudio = true;
|
|
2978
|
+
const parsed = parseAdtsHeader(frame);
|
|
2979
|
+
if (!parsed) {
|
|
2980
|
+
if (debugLogsLeft-- > 0) {
|
|
2981
|
+
const head = frame.subarray(0, Math.min(16, frame.length)).toString("hex");
|
|
2982
|
+
logger.warn(
|
|
2983
|
+
`Native audioFrame not ADTS: len=${frame.length} head=${head}`
|
|
2984
|
+
);
|
|
2985
|
+
}
|
|
2986
|
+
return;
|
|
2987
|
+
}
|
|
2988
|
+
cleanup();
|
|
2989
|
+
resolve({
|
|
2990
|
+
sampleRate: parsed.sampleRate,
|
|
2991
|
+
channels: parsed.channels,
|
|
2992
|
+
configHex: parsed.configHex,
|
|
2993
|
+
mode: "adts"
|
|
2994
|
+
});
|
|
2995
|
+
};
|
|
2996
|
+
const cleanup = () => {
|
|
2997
|
+
clearTimeout(timeout);
|
|
2998
|
+
videoStream?.removeListener?.(
|
|
2999
|
+
"audioFrame",
|
|
3000
|
+
onAudio
|
|
3001
|
+
);
|
|
3002
|
+
};
|
|
3003
|
+
videoStream?.on?.("audioFrame", onAudio);
|
|
3004
|
+
});
|
|
3005
|
+
};
|
|
3006
|
+
audio = await tryPrimeAudio();
|
|
3007
|
+
const video = {
|
|
3008
|
+
videoType: keyframe.videoType,
|
|
3009
|
+
payloadType: videoPayloadType,
|
|
3010
|
+
...keyframe.videoType === "H264" ? {
|
|
3011
|
+
h264: {
|
|
3012
|
+
sps: keyframe.h264.sps,
|
|
3013
|
+
pps: keyframe.h264.pps,
|
|
3014
|
+
...keyframe.profileLevelId ? { profileLevelId: keyframe.profileLevelId } : {}
|
|
3015
|
+
}
|
|
3016
|
+
} : {
|
|
3017
|
+
h265: {
|
|
3018
|
+
vps: keyframe.h265.vps,
|
|
3019
|
+
sps: keyframe.h265.sps,
|
|
3020
|
+
pps: keyframe.h265.pps
|
|
3021
|
+
}
|
|
3022
|
+
}
|
|
3023
|
+
};
|
|
3024
|
+
const aacAudio = audio ? {
|
|
3025
|
+
codec: "aac",
|
|
3026
|
+
payloadType: audioPayloadType,
|
|
3027
|
+
sampleRate: audio.sampleRate,
|
|
3028
|
+
channels: audio.channels,
|
|
3029
|
+
configHex: audio.configHex
|
|
3030
|
+
} : void 0;
|
|
3031
|
+
const sdp = buildRfc4571Sdp(video, aacAudio);
|
|
3032
|
+
const makeMuxer = () => new Rfc4571Muxer(
|
|
3033
|
+
logger,
|
|
3034
|
+
videoPayloadType,
|
|
3035
|
+
aacAudio ? audioPayloadType : void 0,
|
|
3036
|
+
fps
|
|
3037
|
+
);
|
|
3038
|
+
let muxer = makeMuxer();
|
|
3039
|
+
log(
|
|
3040
|
+
`ready (video=${keyframe.videoType} fps=${fps}${aacAudio ? ` audio=aac/${aacAudio.sampleRate}/${aacAudio.channels}` : " audio=none"})`
|
|
3041
|
+
);
|
|
3042
|
+
let rfcClients = 0;
|
|
3043
|
+
const sockets = /* @__PURE__ */ new Set();
|
|
3044
|
+
let idleTeardownTimer;
|
|
3045
|
+
let tearingDown = false;
|
|
3046
|
+
let restarting = false;
|
|
3047
|
+
let lastActivityMs = Date.now();
|
|
3048
|
+
const touchActivity = () => {
|
|
3049
|
+
lastActivityMs = Date.now();
|
|
3050
|
+
};
|
|
3051
|
+
const cancelIdleTeardown = () => {
|
|
3052
|
+
if (!idleTeardownTimer) return;
|
|
3053
|
+
clearTimeout(idleTeardownTimer);
|
|
3054
|
+
idleTeardownTimer = void 0;
|
|
3055
|
+
};
|
|
3056
|
+
let uptimeTimer;
|
|
3057
|
+
const stopUptimeMonitor = () => {
|
|
3058
|
+
if (!uptimeTimer) return;
|
|
3059
|
+
clearInterval(uptimeTimer);
|
|
3060
|
+
uptimeTimer = void 0;
|
|
3061
|
+
};
|
|
3062
|
+
const startUptimeMonitor = () => {
|
|
3063
|
+
if (!uptimeRestartMs || uptimeRestartMs <= 0) return;
|
|
3064
|
+
if (uptimeTimer) return;
|
|
3065
|
+
const tickMs = Math.max(
|
|
3066
|
+
250,
|
|
3067
|
+
Math.min(1e3, Math.floor(uptimeRestartMs / 2))
|
|
3068
|
+
);
|
|
3069
|
+
uptimeTimer = setInterval(() => {
|
|
3070
|
+
if (tearingDown || restarting) return;
|
|
3071
|
+
const idleFor = Date.now() - lastActivityMs;
|
|
3072
|
+
if (idleFor < uptimeRestartMs) return;
|
|
3073
|
+
restart(
|
|
3074
|
+
new Error(
|
|
3075
|
+
`No stream activity for ${idleFor}ms (threshold=${uptimeRestartMs}ms)`
|
|
3076
|
+
)
|
|
3077
|
+
).catch(() => {
|
|
3078
|
+
});
|
|
3079
|
+
}, tickMs);
|
|
3080
|
+
};
|
|
3081
|
+
const scheduleIdleTeardown = (closeFn) => {
|
|
3082
|
+
if (!idleTeardownMs) return;
|
|
3083
|
+
if (idleTeardownTimer) return;
|
|
3084
|
+
idleTeardownTimer = setTimeout(() => {
|
|
3085
|
+
idleTeardownTimer = void 0;
|
|
3086
|
+
if (rfcClients === 0)
|
|
3087
|
+
closeFn(new Error("No RFC4571 clients (idle)")).catch(() => {
|
|
3088
|
+
});
|
|
3089
|
+
}, idleTeardownMs);
|
|
3090
|
+
};
|
|
3091
|
+
const server = netImpl.createServer();
|
|
3092
|
+
const restart = async (reason) => {
|
|
3093
|
+
if (tearingDown) return;
|
|
3094
|
+
if (restarting) return;
|
|
3095
|
+
restarting = true;
|
|
3096
|
+
touchActivity();
|
|
3097
|
+
cancelIdleTeardown();
|
|
3098
|
+
const message = reason?.message || reason?.toString?.() || reason;
|
|
3099
|
+
const address2 = server.address();
|
|
3100
|
+
const addrStr = address2 && typeof address2 !== "string" ? `${address2.address}:${address2.port}` : "unbound";
|
|
3101
|
+
if (message)
|
|
3102
|
+
log(
|
|
3103
|
+
`uptime watchdog: restarting (addr=${addrStr} clients=${rfcClients} reason=${message})`
|
|
3104
|
+
);
|
|
3105
|
+
else
|
|
3106
|
+
log(
|
|
3107
|
+
`uptime watchdog: restarting (addr=${addrStr} clients=${rfcClients})`
|
|
3108
|
+
);
|
|
3109
|
+
for (const s of Array.from(sockets)) {
|
|
3110
|
+
try {
|
|
3111
|
+
s.destroy();
|
|
3112
|
+
} catch {
|
|
3113
|
+
}
|
|
3114
|
+
}
|
|
3115
|
+
sockets.clear();
|
|
3116
|
+
try {
|
|
3117
|
+
muxer.close();
|
|
3118
|
+
} catch {
|
|
3119
|
+
}
|
|
3120
|
+
muxer = makeMuxer();
|
|
3121
|
+
try {
|
|
3122
|
+
await videoStream.stop();
|
|
3123
|
+
} catch {
|
|
3124
|
+
}
|
|
3125
|
+
try {
|
|
3126
|
+
await videoStream.start();
|
|
3127
|
+
} catch (e) {
|
|
3128
|
+
restarting = false;
|
|
3129
|
+
close(e).catch(() => {
|
|
3130
|
+
});
|
|
3131
|
+
return;
|
|
3132
|
+
}
|
|
3133
|
+
if (isCompositeStream) {
|
|
3134
|
+
}
|
|
3135
|
+
restarting = false;
|
|
3136
|
+
touchActivity();
|
|
3137
|
+
log("uptime watchdog: restart complete");
|
|
3138
|
+
if (rfcClients === 0) scheduleIdleTeardown(close);
|
|
3139
|
+
};
|
|
3140
|
+
const close = async (reason) => {
|
|
3141
|
+
if (tearingDown) return;
|
|
3142
|
+
tearingDown = true;
|
|
3143
|
+
stopUptimeMonitor();
|
|
3144
|
+
cancelIdleTeardown();
|
|
3145
|
+
const reasonStr = reason?.message || reason?.toString?.() || reason || "requested";
|
|
3146
|
+
muxer.close();
|
|
3147
|
+
try {
|
|
3148
|
+
await videoStream.stop();
|
|
3149
|
+
} catch {
|
|
3150
|
+
}
|
|
3151
|
+
if (dedicatedSession) {
|
|
3152
|
+
try {
|
|
3153
|
+
await dedicatedSession.release();
|
|
3154
|
+
logSessionsSummary("dedicated session released");
|
|
3155
|
+
} catch {
|
|
3156
|
+
}
|
|
3157
|
+
}
|
|
3158
|
+
if (closeApiOnTeardown) {
|
|
3159
|
+
await Promise.allSettled(
|
|
3160
|
+
Array.from(apisToClose).map(async (a) => {
|
|
3161
|
+
try {
|
|
3162
|
+
await a.close();
|
|
3163
|
+
} catch {
|
|
3164
|
+
}
|
|
3165
|
+
})
|
|
3166
|
+
);
|
|
3167
|
+
}
|
|
3168
|
+
try {
|
|
3169
|
+
server.close();
|
|
3170
|
+
} catch {
|
|
3171
|
+
}
|
|
3172
|
+
log(`teardown (${reasonStr})`);
|
|
3173
|
+
};
|
|
3174
|
+
server.on("connection", (socket) => {
|
|
3175
|
+
touchActivity();
|
|
3176
|
+
const remote = `${socket.remoteAddress ?? "unknown"}:${socket.remotePort ?? "unknown"}`;
|
|
3177
|
+
const setupClient = () => {
|
|
3178
|
+
rfcClients++;
|
|
3179
|
+
cancelIdleTeardown();
|
|
3180
|
+
sockets.add(socket);
|
|
3181
|
+
socket.on("data", () => touchActivity());
|
|
3182
|
+
try {
|
|
3183
|
+
const origWrite = socket.write.bind(socket);
|
|
3184
|
+
socket.write = (...args) => {
|
|
3185
|
+
touchActivity();
|
|
3186
|
+
return origWrite(...args);
|
|
3187
|
+
};
|
|
3188
|
+
} catch {
|
|
3189
|
+
}
|
|
3190
|
+
muxer.addClient(socket);
|
|
3191
|
+
log(`client connected (${remote} total=${rfcClients})`);
|
|
3192
|
+
};
|
|
3193
|
+
if (!requireAuth) {
|
|
3194
|
+
setupClient();
|
|
3195
|
+
} else {
|
|
3196
|
+
let authenticated = false;
|
|
3197
|
+
let authBuffer = Buffer.alloc(0);
|
|
3198
|
+
const authTimeout = setTimeout(() => {
|
|
3199
|
+
if (!authenticated) {
|
|
3200
|
+
log(`client authentication timeout (remote=${remote})`);
|
|
3201
|
+
socket.destroy();
|
|
3202
|
+
}
|
|
3203
|
+
}, 5e3);
|
|
3204
|
+
const onData = (data) => {
|
|
3205
|
+
touchActivity();
|
|
3206
|
+
if (!authenticated) {
|
|
3207
|
+
authBuffer = Buffer.concat([authBuffer, data]);
|
|
3208
|
+
const authString = authBuffer.toString("utf8");
|
|
3209
|
+
const authMatch = authString.match(/^([^:]+):([^\n]+)\n/);
|
|
3210
|
+
if (authMatch) {
|
|
3211
|
+
const [, clientUsername, clientPassword] = authMatch;
|
|
3212
|
+
if (clientUsername === username && clientPassword === password) {
|
|
3213
|
+
authenticated = true;
|
|
3214
|
+
clearTimeout(authTimeout);
|
|
3215
|
+
setupClient();
|
|
3216
|
+
const authLineLength = authMatch[0].length;
|
|
3217
|
+
const remainingData = authBuffer.subarray(authLineLength);
|
|
3218
|
+
socket.removeListener("data", onData);
|
|
3219
|
+
socket.on("data", () => touchActivity());
|
|
3220
|
+
if (remainingData.length > 0) {
|
|
3221
|
+
socket.emit("data", remainingData);
|
|
3222
|
+
}
|
|
3223
|
+
} else {
|
|
3224
|
+
log(`client authentication failed (remote=${remote})`);
|
|
3225
|
+
socket.destroy();
|
|
3226
|
+
return;
|
|
3227
|
+
}
|
|
3228
|
+
} else if (authBuffer.length > 1024) {
|
|
3229
|
+
log(`client authentication buffer overflow (remote=${remote})`);
|
|
3230
|
+
socket.destroy();
|
|
3231
|
+
return;
|
|
3232
|
+
}
|
|
3233
|
+
}
|
|
3234
|
+
};
|
|
3235
|
+
socket.on("data", onData);
|
|
3236
|
+
}
|
|
3237
|
+
let counted = true;
|
|
3238
|
+
const dec = () => {
|
|
3239
|
+
if (!counted) return;
|
|
3240
|
+
counted = false;
|
|
3241
|
+
rfcClients = Math.max(0, rfcClients - 1);
|
|
3242
|
+
sockets.delete(socket);
|
|
3243
|
+
log(`client disconnected (${remote} total=${rfcClients})`);
|
|
3244
|
+
if (rfcClients === 0) scheduleIdleTeardown(close);
|
|
3245
|
+
};
|
|
3246
|
+
socket.once("close", dec);
|
|
3247
|
+
socket.once("error", dec);
|
|
3248
|
+
});
|
|
3249
|
+
if (isCompositeStream) {
|
|
3250
|
+
videoStream.on(
|
|
3251
|
+
"videoFrame",
|
|
3252
|
+
(frame) => {
|
|
3253
|
+
touchActivity();
|
|
3254
|
+
try {
|
|
3255
|
+
let isKeyframe = false;
|
|
3256
|
+
try {
|
|
3257
|
+
for (let i = 0; i < frame.length - 4; i++) {
|
|
3258
|
+
if (frame[i] === 0 && frame[i + 1] === 0) {
|
|
3259
|
+
let nalStart = -1;
|
|
3260
|
+
if (frame[i + 2] === 1) {
|
|
3261
|
+
nalStart = i + 3;
|
|
3262
|
+
} else if (frame[i + 2] === 0 && frame[i + 3] === 1) {
|
|
3263
|
+
nalStart = i + 4;
|
|
3264
|
+
}
|
|
3265
|
+
if (nalStart >= 0 && nalStart < frame.length) {
|
|
3266
|
+
const nalType = (frame[nalStart] ?? 0) & 31;
|
|
3267
|
+
if (nalType === 5) {
|
|
3268
|
+
isKeyframe = true;
|
|
3269
|
+
break;
|
|
3270
|
+
}
|
|
3271
|
+
}
|
|
3272
|
+
}
|
|
3273
|
+
}
|
|
3274
|
+
} catch {
|
|
3275
|
+
}
|
|
3276
|
+
muxer.sendVideoAccessUnit("H264", frame, isKeyframe, void 0);
|
|
3277
|
+
} catch (e) {
|
|
3278
|
+
close(e).catch(() => {
|
|
3279
|
+
});
|
|
3280
|
+
}
|
|
3281
|
+
}
|
|
3282
|
+
);
|
|
3283
|
+
if (aacAudio) {
|
|
3284
|
+
videoStream.on(
|
|
3285
|
+
"audioFrame",
|
|
3286
|
+
(frame) => {
|
|
3287
|
+
touchActivity();
|
|
3288
|
+
try {
|
|
3289
|
+
if (audio?.mode === "adts") {
|
|
3290
|
+
muxer.sendAudioAdtsFrame(frame);
|
|
3291
|
+
} else {
|
|
3292
|
+
muxer.sendAudioAacRawFrame(frame);
|
|
3293
|
+
}
|
|
3294
|
+
} catch (e) {
|
|
3295
|
+
close(e).catch(() => {
|
|
3296
|
+
});
|
|
3297
|
+
}
|
|
3298
|
+
}
|
|
3299
|
+
);
|
|
3300
|
+
}
|
|
3301
|
+
} else {
|
|
3302
|
+
videoStream.on(
|
|
3303
|
+
"videoAccessUnit",
|
|
3304
|
+
(au) => {
|
|
3305
|
+
touchActivity();
|
|
3306
|
+
try {
|
|
3307
|
+
muxer.sendVideoAccessUnit(
|
|
3308
|
+
au.videoType,
|
|
3309
|
+
au.data,
|
|
3310
|
+
au.isKeyframe,
|
|
3311
|
+
au.microseconds
|
|
3312
|
+
);
|
|
3313
|
+
} catch (e) {
|
|
3314
|
+
close(e).catch(() => {
|
|
3315
|
+
});
|
|
3316
|
+
}
|
|
3317
|
+
}
|
|
3318
|
+
);
|
|
3319
|
+
if (aacAudio) {
|
|
3320
|
+
videoStream.on(
|
|
3321
|
+
"audioFrame",
|
|
3322
|
+
(frame) => {
|
|
3323
|
+
touchActivity();
|
|
3324
|
+
try {
|
|
3325
|
+
if (audio?.mode === "adts") {
|
|
3326
|
+
muxer.sendAudioAdtsFrame(frame);
|
|
3327
|
+
} else {
|
|
3328
|
+
muxer.sendAudioAacRawFrame(frame);
|
|
3329
|
+
}
|
|
3330
|
+
} catch (e) {
|
|
3331
|
+
close(e).catch(() => {
|
|
3332
|
+
});
|
|
3333
|
+
}
|
|
3334
|
+
}
|
|
3335
|
+
);
|
|
3336
|
+
}
|
|
3337
|
+
}
|
|
3338
|
+
videoStream.on("error", (e) => {
|
|
3339
|
+
if (restarting) return;
|
|
3340
|
+
close(e).catch(() => {
|
|
3341
|
+
});
|
|
3342
|
+
});
|
|
3343
|
+
videoStream.on("close", (e) => {
|
|
3344
|
+
if (restarting) return;
|
|
3345
|
+
close(e).catch(() => {
|
|
3346
|
+
});
|
|
3347
|
+
});
|
|
3348
|
+
await new Promise((resolve, reject) => {
|
|
3349
|
+
server.once("error", reject);
|
|
3350
|
+
server.listen(0, host, () => resolve());
|
|
3351
|
+
});
|
|
3352
|
+
const address = server.address();
|
|
3353
|
+
if (!address || typeof address === "string") {
|
|
3354
|
+
throw new Error("Failed to bind RFC TCP server");
|
|
3355
|
+
}
|
|
3356
|
+
const port = address.port;
|
|
3357
|
+
if (!port) throw new Error("Failed to bind RFC TCP server");
|
|
3358
|
+
log(`listening (addr=${host}:${port})`);
|
|
3359
|
+
const audioInfo = aacAudio ? {
|
|
3360
|
+
codec: "aac",
|
|
3361
|
+
sampleRate: aacAudio.sampleRate,
|
|
3362
|
+
channels: aacAudio.channels
|
|
3363
|
+
} : void 0;
|
|
3364
|
+
scheduleIdleTeardown(close);
|
|
3365
|
+
startUptimeMonitor();
|
|
3366
|
+
return {
|
|
3367
|
+
host,
|
|
3368
|
+
port,
|
|
3369
|
+
sdp,
|
|
3370
|
+
videoType: keyframe.videoType,
|
|
3371
|
+
...audioInfo ? { audio: audioInfo } : {},
|
|
3372
|
+
username,
|
|
3373
|
+
password,
|
|
3374
|
+
server,
|
|
3375
|
+
videoStream,
|
|
3376
|
+
close
|
|
3377
|
+
};
|
|
3378
|
+
}
|
|
3379
|
+
async function createRfc4571TcpServerForReplay(options) {
|
|
3380
|
+
const {
|
|
3381
|
+
api,
|
|
3382
|
+
channel = 0,
|
|
3383
|
+
fileName,
|
|
3384
|
+
logger,
|
|
3385
|
+
host = "127.0.0.1",
|
|
3386
|
+
videoPayloadType = 96,
|
|
3387
|
+
audioPayloadType = 97,
|
|
3388
|
+
keyframeTimeoutMs = 15e3,
|
|
3389
|
+
closeApiOnTeardown = false,
|
|
3390
|
+
username,
|
|
3391
|
+
password,
|
|
3392
|
+
requireAuth = false,
|
|
3393
|
+
aacAudioHint,
|
|
3394
|
+
isNvr
|
|
3395
|
+
} = options;
|
|
3396
|
+
const streamType = options.streamType ?? (fileName.includes("RecS03_") ? "subStream" : "mainStream");
|
|
3397
|
+
const deviceId = options.deviceId;
|
|
3398
|
+
const log = (msg, ...args) => logger.log(
|
|
3399
|
+
`[RFC4571-Replay ch=${channel} file=${fileName}] ${msg}`,
|
|
3400
|
+
...args
|
|
3401
|
+
);
|
|
3402
|
+
const warn = (msg, ...args) => logger.warn(
|
|
3403
|
+
`[RFC4571-Replay ch=${channel} file=${fileName}] ${msg}`,
|
|
3404
|
+
...args
|
|
3405
|
+
);
|
|
3406
|
+
log(
|
|
3407
|
+
`starting replay: streamType=${streamType}${deviceId ? ` deviceId=${deviceId}` : ""}`
|
|
3408
|
+
);
|
|
3409
|
+
const replayParams = {
|
|
3410
|
+
channel,
|
|
3411
|
+
fileName,
|
|
3412
|
+
streamType,
|
|
3413
|
+
timeoutMs: keyframeTimeoutMs,
|
|
3414
|
+
logger
|
|
3415
|
+
};
|
|
3416
|
+
if (isNvr !== void 0) {
|
|
3417
|
+
replayParams.isNvr = isNvr;
|
|
3418
|
+
}
|
|
3419
|
+
if (deviceId !== void 0) {
|
|
3420
|
+
replayParams.deviceId = deviceId;
|
|
3421
|
+
}
|
|
3422
|
+
const { stream: videoStream, stop: stopReplay } = await api.startRecordingReplayStream(replayParams);
|
|
3423
|
+
let audio;
|
|
3424
|
+
const waitForKeyframe = async () => {
|
|
3425
|
+
return await new Promise((resolve, reject) => {
|
|
3426
|
+
const timeout = setTimeout(() => {
|
|
3427
|
+
cleanup();
|
|
3428
|
+
reject(
|
|
3429
|
+
new Error(
|
|
3430
|
+
`Timeout waiting for keyframe in replay (file=${fileName})`
|
|
3431
|
+
)
|
|
3432
|
+
);
|
|
3433
|
+
}, keyframeTimeoutMs);
|
|
3434
|
+
const onError = (e) => {
|
|
3435
|
+
cleanup();
|
|
3436
|
+
reject(e instanceof Error ? e : new Error(String(e)));
|
|
3437
|
+
};
|
|
3438
|
+
const onAu = (au) => {
|
|
3439
|
+
if (!au?.isKeyframe) return;
|
|
3440
|
+
const videoType = au.videoType;
|
|
3441
|
+
const accessUnit = au.data;
|
|
3442
|
+
if (videoType === "H264") {
|
|
3443
|
+
const { sps: sps2, pps: pps2, profileLevelId } = extractH264ParamSetsFromAccessUnit(accessUnit);
|
|
3444
|
+
if (!sps2 || !pps2) return;
|
|
3445
|
+
cleanup();
|
|
3446
|
+
resolve({
|
|
3447
|
+
videoType,
|
|
3448
|
+
accessUnit,
|
|
3449
|
+
...profileLevelId ? { profileLevelId } : {},
|
|
3450
|
+
h264: { sps: sps2, pps: pps2 }
|
|
3451
|
+
});
|
|
3452
|
+
return;
|
|
3453
|
+
}
|
|
3454
|
+
const { vps, sps, pps } = extractH265ParamSetsFromAccessUnit(accessUnit);
|
|
3455
|
+
if (!vps || !sps || !pps) return;
|
|
3456
|
+
cleanup();
|
|
3457
|
+
resolve({ videoType, accessUnit, h265: { vps, sps, pps } });
|
|
3458
|
+
};
|
|
3459
|
+
const onAudioFrame = (frame) => {
|
|
3460
|
+
if (audio) return;
|
|
3461
|
+
try {
|
|
3462
|
+
const adts = parseAdtsHeader(frame);
|
|
3463
|
+
if (adts) {
|
|
3464
|
+
audio = {
|
|
3465
|
+
sampleRate: adts.sampleRate,
|
|
3466
|
+
channels: adts.channels,
|
|
3467
|
+
configHex: adts.configHex,
|
|
3468
|
+
mode: "adts"
|
|
3469
|
+
};
|
|
3470
|
+
log(
|
|
3471
|
+
`detected audio via ADTS: sr=${audio.sampleRate} ch=${audio.channels}`
|
|
3472
|
+
);
|
|
3473
|
+
}
|
|
3474
|
+
} catch {
|
|
3475
|
+
if (aacAudioHint) {
|
|
3476
|
+
const configHex = buildAacAudioSpecificConfigHex({
|
|
3477
|
+
sampleRate: aacAudioHint.sampleRate,
|
|
3478
|
+
channels: aacAudioHint.channels
|
|
3479
|
+
});
|
|
3480
|
+
if (configHex) {
|
|
3481
|
+
audio = {
|
|
3482
|
+
sampleRate: aacAudioHint.sampleRate,
|
|
3483
|
+
channels: aacAudioHint.channels,
|
|
3484
|
+
configHex,
|
|
3485
|
+
mode: "raw"
|
|
3486
|
+
};
|
|
3487
|
+
log(
|
|
3488
|
+
`using AAC hint: sr=${audio.sampleRate} ch=${audio.channels}`
|
|
3489
|
+
);
|
|
3490
|
+
}
|
|
3491
|
+
}
|
|
3492
|
+
}
|
|
3493
|
+
};
|
|
3494
|
+
const cleanup = () => {
|
|
3495
|
+
clearTimeout(timeout);
|
|
3496
|
+
videoStream.removeListener("error", onError);
|
|
3497
|
+
videoStream.removeListener("videoAccessUnit", onAu);
|
|
3498
|
+
videoStream.removeListener("audioFrame", onAudioFrame);
|
|
3499
|
+
};
|
|
3500
|
+
videoStream.on("error", onError);
|
|
3501
|
+
videoStream.on("videoAccessUnit", onAu);
|
|
3502
|
+
videoStream.on("audioFrame", onAudioFrame);
|
|
3503
|
+
});
|
|
3504
|
+
};
|
|
3505
|
+
let keyframe;
|
|
3506
|
+
try {
|
|
3507
|
+
keyframe = await waitForKeyframe();
|
|
3508
|
+
} catch (e) {
|
|
3509
|
+
try {
|
|
3510
|
+
await stopReplay();
|
|
3511
|
+
} catch {
|
|
3512
|
+
}
|
|
3513
|
+
if (closeApiOnTeardown) {
|
|
3514
|
+
try {
|
|
3515
|
+
await api.close();
|
|
3516
|
+
} catch {
|
|
3517
|
+
}
|
|
3518
|
+
}
|
|
3519
|
+
throw e;
|
|
3520
|
+
}
|
|
3521
|
+
log(`video detected: codec=${keyframe.videoType}`);
|
|
3522
|
+
const video = {
|
|
3523
|
+
videoType: keyframe.videoType,
|
|
3524
|
+
payloadType: videoPayloadType,
|
|
3525
|
+
...keyframe.videoType === "H264" ? {
|
|
3526
|
+
h264: {
|
|
3527
|
+
sps: keyframe.h264.sps,
|
|
3528
|
+
pps: keyframe.h264.pps,
|
|
3529
|
+
...keyframe.profileLevelId ? { profileLevelId: keyframe.profileLevelId } : {}
|
|
3530
|
+
}
|
|
3531
|
+
} : {
|
|
3532
|
+
h265: {
|
|
3533
|
+
vps: keyframe.h265.vps,
|
|
3534
|
+
sps: keyframe.h265.sps,
|
|
3535
|
+
pps: keyframe.h265.pps
|
|
3536
|
+
}
|
|
3537
|
+
}
|
|
3538
|
+
};
|
|
3539
|
+
const aacAudio = audio ? {
|
|
3540
|
+
codec: "aac",
|
|
3541
|
+
payloadType: audioPayloadType,
|
|
3542
|
+
sampleRate: audio.sampleRate,
|
|
3543
|
+
channels: audio.channels,
|
|
3544
|
+
configHex: audio.configHex
|
|
3545
|
+
} : void 0;
|
|
3546
|
+
const sdp = buildRfc4571Sdp(video, aacAudio);
|
|
3547
|
+
const fps = 25;
|
|
3548
|
+
const muxer = new Rfc4571Muxer(
|
|
3549
|
+
logger,
|
|
3550
|
+
videoPayloadType,
|
|
3551
|
+
aacAudio ? audioPayloadType : void 0,
|
|
3552
|
+
fps
|
|
3553
|
+
);
|
|
3554
|
+
log(
|
|
3555
|
+
`SDP ready (video=${keyframe.videoType}/90000 pt=${videoPayloadType}${aacAudio ? `, audio=aac/${aacAudio.sampleRate}/${aacAudio.channels} pt=${audioPayloadType}` : ", audio=none"})`
|
|
3556
|
+
);
|
|
3557
|
+
let rfcClients = 0;
|
|
3558
|
+
const sockets = /* @__PURE__ */ new Set();
|
|
3559
|
+
let tearingDown = false;
|
|
3560
|
+
let replayEnded = false;
|
|
3561
|
+
let onReplayEndCallback;
|
|
3562
|
+
const close = async (reason) => {
|
|
3563
|
+
if (tearingDown) return;
|
|
3564
|
+
tearingDown = true;
|
|
3565
|
+
const message = reason?.message || reason?.toString?.() || reason;
|
|
3566
|
+
if (message) log(`closing: ${message}`);
|
|
3567
|
+
else log("closing");
|
|
3568
|
+
try {
|
|
3569
|
+
await stopReplay();
|
|
3570
|
+
} catch {
|
|
3571
|
+
}
|
|
3572
|
+
for (const s of sockets) {
|
|
3573
|
+
try {
|
|
3574
|
+
s.destroy();
|
|
3575
|
+
} catch {
|
|
3576
|
+
}
|
|
3577
|
+
}
|
|
3578
|
+
sockets.clear();
|
|
3579
|
+
try {
|
|
3580
|
+
server.close();
|
|
3581
|
+
} catch {
|
|
3582
|
+
}
|
|
3583
|
+
if (closeApiOnTeardown) {
|
|
3584
|
+
try {
|
|
3585
|
+
await api.close();
|
|
3586
|
+
} catch {
|
|
3587
|
+
}
|
|
3588
|
+
}
|
|
3589
|
+
};
|
|
3590
|
+
const server = netImpl.createServer();
|
|
3591
|
+
server.on("connection", (socket) => {
|
|
3592
|
+
const remote = `${socket.remoteAddress}:${socket.remotePort}`;
|
|
3593
|
+
log(`client connected: ${remote}`);
|
|
3594
|
+
sockets.add(socket);
|
|
3595
|
+
rfcClients++;
|
|
3596
|
+
const setupClient = () => {
|
|
3597
|
+
muxer.addClient(socket);
|
|
3598
|
+
};
|
|
3599
|
+
if (!requireAuth) {
|
|
3600
|
+
setupClient();
|
|
3601
|
+
} else {
|
|
3602
|
+
let authenticated = false;
|
|
3603
|
+
let authBuffer = Buffer.alloc(0);
|
|
3604
|
+
const authTimeout = setTimeout(() => {
|
|
3605
|
+
if (!authenticated) {
|
|
3606
|
+
log(`client authentication timeout (remote=${remote})`);
|
|
3607
|
+
socket.destroy();
|
|
3608
|
+
}
|
|
3609
|
+
}, 5e3);
|
|
3610
|
+
const onData = (data) => {
|
|
3611
|
+
if (!authenticated) {
|
|
3612
|
+
authBuffer = Buffer.concat([authBuffer, data]);
|
|
3613
|
+
const authString = authBuffer.toString("utf8");
|
|
3614
|
+
const authMatch = authString.match(/^([^:]+):([^\n]+)\n/);
|
|
3615
|
+
if (authMatch) {
|
|
3616
|
+
const [, clientUsername, clientPassword] = authMatch;
|
|
3617
|
+
if (clientUsername === username && clientPassword === password) {
|
|
3618
|
+
authenticated = true;
|
|
3619
|
+
clearTimeout(authTimeout);
|
|
3620
|
+
setupClient();
|
|
3621
|
+
socket.removeListener("data", onData);
|
|
3622
|
+
} else {
|
|
3623
|
+
log(`client authentication failed (remote=${remote})`);
|
|
3624
|
+
socket.destroy();
|
|
3625
|
+
return;
|
|
3626
|
+
}
|
|
3627
|
+
} else if (authBuffer.length > 1024) {
|
|
3628
|
+
log(`client authentication buffer overflow (remote=${remote})`);
|
|
3629
|
+
socket.destroy();
|
|
3630
|
+
return;
|
|
3631
|
+
}
|
|
3632
|
+
}
|
|
3633
|
+
};
|
|
3634
|
+
socket.on("data", onData);
|
|
3635
|
+
}
|
|
3636
|
+
let counted = true;
|
|
3637
|
+
const dec = () => {
|
|
3638
|
+
if (!counted) return;
|
|
3639
|
+
counted = false;
|
|
3640
|
+
rfcClients = Math.max(0, rfcClients - 1);
|
|
3641
|
+
sockets.delete(socket);
|
|
3642
|
+
log(`client disconnected (remote=${remote} clients=${rfcClients})`);
|
|
3643
|
+
if (rfcClients === 0 && replayEnded) {
|
|
3644
|
+
close("replay ended and no clients").catch(() => {
|
|
3645
|
+
});
|
|
3646
|
+
}
|
|
3647
|
+
};
|
|
3648
|
+
socket.once("close", dec);
|
|
3649
|
+
socket.once("error", (e) => {
|
|
3650
|
+
warn(`client socket error: ${remote}`, e?.message || String(e));
|
|
3651
|
+
dec();
|
|
3652
|
+
});
|
|
3653
|
+
});
|
|
3654
|
+
server.on("error", (e) => {
|
|
3655
|
+
warn("server error", e?.message || String(e));
|
|
3656
|
+
close(e).catch(() => {
|
|
3657
|
+
});
|
|
3658
|
+
});
|
|
3659
|
+
videoStream.on("videoAccessUnit", (au) => {
|
|
3660
|
+
if (tearingDown) return;
|
|
3661
|
+
try {
|
|
3662
|
+
muxer.sendVideoAccessUnit(
|
|
3663
|
+
au.videoType,
|
|
3664
|
+
au.data,
|
|
3665
|
+
au.isKeyframe,
|
|
3666
|
+
au.microseconds
|
|
3667
|
+
);
|
|
3668
|
+
} catch (e) {
|
|
3669
|
+
close(e).catch(() => {
|
|
3670
|
+
});
|
|
3671
|
+
}
|
|
3672
|
+
});
|
|
3673
|
+
if (aacAudio) {
|
|
3674
|
+
videoStream.on("audioFrame", (frame) => {
|
|
3675
|
+
if (tearingDown) return;
|
|
3676
|
+
try {
|
|
3677
|
+
if (audio?.mode === "adts") {
|
|
3678
|
+
muxer.sendAudioAdtsFrame(frame);
|
|
3679
|
+
} else {
|
|
3680
|
+
muxer.sendAudioAacRawFrame(frame);
|
|
3681
|
+
}
|
|
3682
|
+
} catch (e) {
|
|
3683
|
+
close(e).catch(() => {
|
|
3684
|
+
});
|
|
3685
|
+
}
|
|
3686
|
+
});
|
|
3687
|
+
}
|
|
3688
|
+
videoStream.on("end", () => {
|
|
3689
|
+
log("replay ended naturally");
|
|
3690
|
+
replayEnded = true;
|
|
3691
|
+
onReplayEndCallback?.();
|
|
3692
|
+
if (rfcClients === 0) {
|
|
3693
|
+
close("replay ended").catch(() => {
|
|
3694
|
+
});
|
|
3695
|
+
}
|
|
3696
|
+
});
|
|
3697
|
+
videoStream.on("error", (e) => {
|
|
3698
|
+
close(e).catch(() => {
|
|
3699
|
+
});
|
|
3700
|
+
});
|
|
3701
|
+
videoStream.on("close", (e) => {
|
|
3702
|
+
if (!replayEnded) {
|
|
3703
|
+
close(e).catch(() => {
|
|
3704
|
+
});
|
|
3705
|
+
}
|
|
3706
|
+
});
|
|
3707
|
+
await new Promise((resolve, reject) => {
|
|
3708
|
+
server.once("error", reject);
|
|
3709
|
+
server.listen(0, host, () => resolve());
|
|
3710
|
+
});
|
|
3711
|
+
const address = server.address();
|
|
3712
|
+
if (!address || typeof address === "string") {
|
|
3713
|
+
throw new Error("Failed to bind RFC TCP server for replay");
|
|
3714
|
+
}
|
|
3715
|
+
const port = address.port;
|
|
3716
|
+
if (!port) throw new Error("Failed to bind RFC TCP server for replay");
|
|
3717
|
+
log(`listening (addr=${host}:${port})`);
|
|
3718
|
+
const audioInfo = aacAudio ? {
|
|
3719
|
+
codec: "aac",
|
|
3720
|
+
sampleRate: aacAudio.sampleRate,
|
|
3721
|
+
channels: aacAudio.channels
|
|
3722
|
+
} : void 0;
|
|
3723
|
+
const result = {
|
|
3724
|
+
host,
|
|
3725
|
+
port,
|
|
3726
|
+
sdp,
|
|
3727
|
+
videoType: keyframe.videoType,
|
|
3728
|
+
...audioInfo ? { audio: audioInfo } : {},
|
|
3729
|
+
username,
|
|
3730
|
+
password,
|
|
3731
|
+
server,
|
|
3732
|
+
videoStream,
|
|
3733
|
+
close
|
|
3734
|
+
};
|
|
3735
|
+
Object.defineProperty(result, "onReplayEnd", {
|
|
3736
|
+
get: () => onReplayEndCallback,
|
|
3737
|
+
set: (fn) => {
|
|
3738
|
+
onReplayEndCallback = fn;
|
|
3739
|
+
}
|
|
3740
|
+
});
|
|
3741
|
+
return result;
|
|
3742
|
+
}
|
|
3743
|
+
|
|
3744
|
+
// src/rfc/replay-http-server.ts
|
|
3745
|
+
import http3 from "http";
|
|
3746
|
+
import { spawn as spawn4 } from "child_process";
|
|
3747
|
+
import { PassThrough } from "stream";
|
|
3748
|
+
async function createReplayHttpServer(options) {
|
|
3749
|
+
const {
|
|
3750
|
+
api,
|
|
3751
|
+
channel = 0,
|
|
3752
|
+
fileName,
|
|
3753
|
+
logger,
|
|
3754
|
+
ffmpegPath = "ffmpeg",
|
|
3755
|
+
host = "127.0.0.1",
|
|
3756
|
+
isNvr
|
|
3757
|
+
} = options;
|
|
3758
|
+
const streamType = options.streamType ?? (fileName.includes("RecS03_") ? "subStream" : "mainStream");
|
|
3759
|
+
const log = (msg) => logger.log(`[ReplayHTTP ch=${channel} file=${fileName.slice(-30)}] ${msg}`);
|
|
3760
|
+
const warn = (msg) => logger.warn(
|
|
3761
|
+
`[ReplayHTTP ch=${channel} file=${fileName.slice(-30)}] ${msg}`
|
|
3762
|
+
);
|
|
3763
|
+
log(`starting replay: streamType=${streamType}`);
|
|
3764
|
+
const frameCache = [];
|
|
3765
|
+
let replayEnded = false;
|
|
3766
|
+
let videoCodec = "h264";
|
|
3767
|
+
let hasAudio = false;
|
|
3768
|
+
let firstKeyframeReceived = false;
|
|
3769
|
+
const replayParams = {
|
|
3770
|
+
channel,
|
|
3771
|
+
fileName,
|
|
3772
|
+
streamType,
|
|
3773
|
+
timeoutMs: 3e4,
|
|
3774
|
+
logger
|
|
3775
|
+
};
|
|
3776
|
+
if (isNvr !== void 0) {
|
|
3777
|
+
replayParams.isNvr = isNvr;
|
|
3778
|
+
}
|
|
3779
|
+
const { stream: videoStream, stop: stopReplay } = await api.startRecordingReplayStream(replayParams);
|
|
3780
|
+
const onVideoFrame = (au) => {
|
|
3781
|
+
if (!au?.data) return;
|
|
3782
|
+
const frame = {
|
|
3783
|
+
type: "video",
|
|
3784
|
+
data: au.data,
|
|
3785
|
+
isKeyframe: au.isKeyframe,
|
|
3786
|
+
videoType: au.videoType,
|
|
3787
|
+
timestamp: au.microseconds
|
|
3788
|
+
};
|
|
3789
|
+
frameCache.push(frame);
|
|
3790
|
+
if (au.videoType === "H265") {
|
|
3791
|
+
videoCodec = "h265";
|
|
3792
|
+
}
|
|
3793
|
+
if (au.isKeyframe && !firstKeyframeReceived) {
|
|
3794
|
+
firstKeyframeReceived = true;
|
|
3795
|
+
log(
|
|
3796
|
+
`first keyframe received, codec=${videoCodec}, cached=${frameCache.length} frames`
|
|
3797
|
+
);
|
|
3798
|
+
}
|
|
3799
|
+
};
|
|
3800
|
+
const onAudioFrame = (frame) => {
|
|
3801
|
+
if (!frame?.length) return;
|
|
3802
|
+
hasAudio = true;
|
|
3803
|
+
frameCache.push({
|
|
3804
|
+
type: "audio",
|
|
3805
|
+
data: frame
|
|
3806
|
+
});
|
|
3807
|
+
};
|
|
3808
|
+
const onReplayEnd = () => {
|
|
3809
|
+
log(`replay ended, total frames cached: ${frameCache.length}`);
|
|
3810
|
+
replayEnded = true;
|
|
3811
|
+
};
|
|
3812
|
+
videoStream.on("videoAccessUnit", onVideoFrame);
|
|
3813
|
+
videoStream.on("audioFrame", onAudioFrame);
|
|
3814
|
+
videoStream.on("end", onReplayEnd);
|
|
3815
|
+
videoStream.on("close", () => {
|
|
3816
|
+
if (!replayEnded) {
|
|
3817
|
+
replayEnded = true;
|
|
3818
|
+
}
|
|
3819
|
+
});
|
|
3820
|
+
await new Promise((resolve, reject) => {
|
|
3821
|
+
const timeout = setTimeout(() => {
|
|
3822
|
+
reject(new Error("Timeout waiting for first keyframe"));
|
|
3823
|
+
}, 3e4);
|
|
3824
|
+
const check = () => {
|
|
3825
|
+
if (firstKeyframeReceived) {
|
|
3826
|
+
clearTimeout(timeout);
|
|
3827
|
+
resolve();
|
|
3828
|
+
} else if (replayEnded) {
|
|
3829
|
+
clearTimeout(timeout);
|
|
3830
|
+
reject(new Error("Replay ended before first keyframe"));
|
|
3831
|
+
} else {
|
|
3832
|
+
setTimeout(check, 50);
|
|
3833
|
+
}
|
|
3834
|
+
};
|
|
3835
|
+
check();
|
|
3836
|
+
});
|
|
3837
|
+
log(
|
|
3838
|
+
`ready to serve: codec=${videoCodec}, hasAudio=${hasAudio}, cached=${frameCache.length} frames`
|
|
3839
|
+
);
|
|
3840
|
+
let httpServer = null;
|
|
3841
|
+
let ffmpegProcess = null;
|
|
3842
|
+
let closed = false;
|
|
3843
|
+
const close = async () => {
|
|
3844
|
+
if (closed) return;
|
|
3845
|
+
closed = true;
|
|
3846
|
+
log("closing...");
|
|
3847
|
+
videoStream.removeListener("videoAccessUnit", onVideoFrame);
|
|
3848
|
+
videoStream.removeListener("audioFrame", onAudioFrame);
|
|
3849
|
+
videoStream.removeListener("end", onReplayEnd);
|
|
3850
|
+
try {
|
|
3851
|
+
await stopReplay();
|
|
3852
|
+
} catch {
|
|
3853
|
+
}
|
|
3854
|
+
if (ffmpegProcess) {
|
|
3855
|
+
try {
|
|
3856
|
+
ffmpegProcess.kill("SIGKILL");
|
|
3857
|
+
} catch {
|
|
3858
|
+
}
|
|
3859
|
+
ffmpegProcess = null;
|
|
3860
|
+
}
|
|
3861
|
+
if (httpServer) {
|
|
3862
|
+
httpServer.close();
|
|
3863
|
+
httpServer = null;
|
|
3864
|
+
}
|
|
3865
|
+
};
|
|
3866
|
+
httpServer = http3.createServer((req, res) => {
|
|
3867
|
+
if (closed) {
|
|
3868
|
+
res.writeHead(503);
|
|
3869
|
+
res.end("Server closed");
|
|
3870
|
+
return;
|
|
3871
|
+
}
|
|
3872
|
+
log(`HTTP request: ${req.method} ${req.url}`);
|
|
3873
|
+
res.writeHead(200, {
|
|
3874
|
+
"Content-Type": "video/mp4",
|
|
3875
|
+
"Transfer-Encoding": "chunked",
|
|
3876
|
+
"Cache-Control": "no-cache, no-store",
|
|
3877
|
+
Connection: "keep-alive"
|
|
3878
|
+
});
|
|
3879
|
+
const outputStream = new PassThrough();
|
|
3880
|
+
const inputCodec = videoCodec === "h265" ? "hevc" : "h264";
|
|
3881
|
+
const ffmpegArgs = [
|
|
3882
|
+
"-hide_banner",
|
|
3883
|
+
"-loglevel",
|
|
3884
|
+
"error",
|
|
3885
|
+
// Video input
|
|
3886
|
+
"-f",
|
|
3887
|
+
inputCodec,
|
|
3888
|
+
"-i",
|
|
3889
|
+
"pipe:0",
|
|
3890
|
+
// Output settings - fragmented MP4 for streaming
|
|
3891
|
+
"-c:v",
|
|
3892
|
+
"copy",
|
|
3893
|
+
"-movflags",
|
|
3894
|
+
"frag_keyframe+empty_moov+default_base_moof",
|
|
3895
|
+
"-f",
|
|
3896
|
+
"mp4",
|
|
3897
|
+
"pipe:1"
|
|
3898
|
+
];
|
|
3899
|
+
log(`spawning ffmpeg: ${ffmpegPath} ${ffmpegArgs.join(" ")}`);
|
|
3900
|
+
ffmpegProcess = spawn4(ffmpegPath, ffmpegArgs, {
|
|
3901
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
3902
|
+
});
|
|
3903
|
+
ffmpegProcess.stdout?.pipe(outputStream).pipe(res);
|
|
3904
|
+
ffmpegProcess.stderr?.on("data", (data) => {
|
|
3905
|
+
const msg = data.toString().trim();
|
|
3906
|
+
if (msg) {
|
|
3907
|
+
warn(`ffmpeg: ${msg}`);
|
|
3908
|
+
}
|
|
3909
|
+
});
|
|
3910
|
+
ffmpegProcess.on("error", (err) => {
|
|
3911
|
+
warn(`ffmpeg error: ${err.message}`);
|
|
3912
|
+
res.end();
|
|
3913
|
+
});
|
|
3914
|
+
ffmpegProcess.on("close", (code) => {
|
|
3915
|
+
log(`ffmpeg exited with code ${code}`);
|
|
3916
|
+
res.end();
|
|
3917
|
+
});
|
|
3918
|
+
req.on("close", () => {
|
|
3919
|
+
log("client disconnected");
|
|
3920
|
+
if (ffmpegProcess) {
|
|
3921
|
+
try {
|
|
3922
|
+
ffmpegProcess.stdin?.end();
|
|
3923
|
+
ffmpegProcess.kill("SIGTERM");
|
|
3924
|
+
} catch {
|
|
3925
|
+
}
|
|
3926
|
+
}
|
|
3927
|
+
});
|
|
3928
|
+
const feedFrames = async () => {
|
|
3929
|
+
const stdin = ffmpegProcess?.stdin;
|
|
3930
|
+
if (!stdin) return;
|
|
3931
|
+
let frameIndex = 0;
|
|
3932
|
+
for (const frame of frameCache) {
|
|
3933
|
+
if (frame.type === "video" && frame.data) {
|
|
3934
|
+
try {
|
|
3935
|
+
const canWrite = stdin.write(frame.data);
|
|
3936
|
+
if (!canWrite) {
|
|
3937
|
+
await new Promise(
|
|
3938
|
+
(resolve) => stdin.once("drain", resolve)
|
|
3939
|
+
);
|
|
3940
|
+
}
|
|
3941
|
+
} catch (e) {
|
|
3942
|
+
warn(`error writing cached frame ${frameIndex}: ${e}`);
|
|
3943
|
+
break;
|
|
3944
|
+
}
|
|
3945
|
+
}
|
|
3946
|
+
frameIndex++;
|
|
3947
|
+
}
|
|
3948
|
+
log(`fed ${frameIndex} cached frames to ffmpeg`);
|
|
3949
|
+
const liveFrameHandler = (au) => {
|
|
3950
|
+
if (!au?.data || !stdin.writable) return;
|
|
3951
|
+
try {
|
|
3952
|
+
stdin.write(au.data);
|
|
3953
|
+
} catch {
|
|
3954
|
+
}
|
|
3955
|
+
};
|
|
3956
|
+
videoStream.on("videoAccessUnit", liveFrameHandler);
|
|
3957
|
+
const endHandler = () => {
|
|
3958
|
+
log("replay finished, closing ffmpeg stdin");
|
|
3959
|
+
videoStream.removeListener("videoAccessUnit", liveFrameHandler);
|
|
3960
|
+
try {
|
|
3961
|
+
stdin.end();
|
|
3962
|
+
} catch {
|
|
3963
|
+
}
|
|
3964
|
+
};
|
|
3965
|
+
if (replayEnded) {
|
|
3966
|
+
endHandler();
|
|
3967
|
+
} else {
|
|
3968
|
+
videoStream.once("end", endHandler);
|
|
3969
|
+
videoStream.once("close", endHandler);
|
|
3970
|
+
}
|
|
3971
|
+
};
|
|
3972
|
+
feedFrames().catch((e) => {
|
|
3973
|
+
warn(`error feeding frames: ${e}`);
|
|
3974
|
+
});
|
|
3975
|
+
});
|
|
3976
|
+
await new Promise((resolve, reject) => {
|
|
3977
|
+
httpServer.once("error", reject);
|
|
3978
|
+
httpServer.listen(0, host, () => resolve());
|
|
3979
|
+
});
|
|
3980
|
+
const address = httpServer.address();
|
|
3981
|
+
if (!address || typeof address === "string") {
|
|
3982
|
+
throw new Error("Failed to bind HTTP server");
|
|
3983
|
+
}
|
|
3984
|
+
const port = address.port;
|
|
3985
|
+
const url = `http://${host}:${port}/replay.mp4`;
|
|
3986
|
+
log(`HTTP server listening: ${url}`);
|
|
3987
|
+
return {
|
|
3988
|
+
url,
|
|
3989
|
+
host,
|
|
3990
|
+
port,
|
|
3991
|
+
videoCodec,
|
|
3992
|
+
hasAudio,
|
|
3993
|
+
close
|
|
3994
|
+
};
|
|
3995
|
+
}
|
|
3996
|
+
|
|
3997
|
+
// src/baichuan/stream/BaichuanHttpStreamServer.ts
|
|
3998
|
+
import { EventEmitter as EventEmitter2 } from "events";
|
|
3999
|
+
import { spawn as spawn5 } from "child_process";
|
|
4000
|
+
import * as http4 from "http";
|
|
4001
|
+
var NAL_START_CODE_4B = Buffer.from([0, 0, 0, 1]);
|
|
4002
|
+
var NAL_START_CODE_3B = Buffer.from([0, 0, 1]);
|
|
4003
|
+
function hasAnnexBStart(data) {
|
|
4004
|
+
if (data.length < 4) return false;
|
|
4005
|
+
return data.subarray(0, 4).equals(NAL_START_CODE_4B) || data.subarray(0, 3).equals(NAL_START_CODE_3B);
|
|
4006
|
+
}
|
|
4007
|
+
function splitAnnexBNals(annexB) {
|
|
4008
|
+
const starts = [];
|
|
4009
|
+
for (let i = 0; i < annexB.length - 3; i++) {
|
|
4010
|
+
if (annexB[i] === 0 && annexB[i + 1] === 0) {
|
|
4011
|
+
if (annexB[i + 2] === 1) {
|
|
4012
|
+
starts.push({ idx: i, len: 3 });
|
|
4013
|
+
i += 2;
|
|
4014
|
+
} else if (annexB[i + 2] === 0 && annexB[i + 3] === 1) {
|
|
4015
|
+
starts.push({ idx: i, len: 4 });
|
|
4016
|
+
i += 3;
|
|
4017
|
+
}
|
|
4018
|
+
}
|
|
4019
|
+
}
|
|
4020
|
+
if (starts.length === 0) return [];
|
|
4021
|
+
const out = [];
|
|
4022
|
+
for (let s = 0; s < starts.length; s++) {
|
|
4023
|
+
const start = starts[s];
|
|
4024
|
+
const payloadStart = start.idx + start.len;
|
|
4025
|
+
const next = starts[s + 1];
|
|
4026
|
+
const payloadEnd = next ? next.idx : annexB.length;
|
|
4027
|
+
if (payloadEnd > payloadStart) out.push(annexB.subarray(payloadStart, payloadEnd));
|
|
4028
|
+
}
|
|
4029
|
+
return out;
|
|
4030
|
+
}
|
|
4031
|
+
function h264NalType(nalPayload) {
|
|
4032
|
+
if (nalPayload.length < 1) return null;
|
|
4033
|
+
const b0 = nalPayload[0];
|
|
4034
|
+
if (b0 === void 0) return null;
|
|
4035
|
+
return b0 & 31;
|
|
4036
|
+
}
|
|
4037
|
+
function isH264KeyframeFromAnnexB(annexB) {
|
|
4038
|
+
const nals = splitAnnexBNals(annexB);
|
|
4039
|
+
for (const nal of nals) {
|
|
4040
|
+
const t = h264NalType(nal);
|
|
4041
|
+
if (t === 5) return true;
|
|
4042
|
+
}
|
|
4043
|
+
return false;
|
|
4044
|
+
}
|
|
4045
|
+
var BaichuanHttpStreamServer = class extends EventEmitter2 {
|
|
4046
|
+
videoStream;
|
|
4047
|
+
listenPort;
|
|
4048
|
+
path;
|
|
4049
|
+
logger;
|
|
4050
|
+
inputFps;
|
|
4051
|
+
httpServer;
|
|
4052
|
+
ffmpegProcess;
|
|
4053
|
+
active = false;
|
|
4054
|
+
clients = /* @__PURE__ */ new Set();
|
|
4055
|
+
videoListener;
|
|
4056
|
+
usingAccessUnit = false;
|
|
4057
|
+
seenKeyframe = false;
|
|
4058
|
+
cachedSps = null;
|
|
4059
|
+
// payload without start code
|
|
4060
|
+
cachedPps = null;
|
|
4061
|
+
// payload without start code
|
|
4062
|
+
constructor(options) {
|
|
4063
|
+
super();
|
|
4064
|
+
this.videoStream = options.videoStream;
|
|
4065
|
+
this.listenPort = options.listenPort ?? 8080;
|
|
4066
|
+
this.path = options.path ?? "/stream";
|
|
4067
|
+
this.inputFps = options.inputFps ?? 25;
|
|
4068
|
+
this.logger = options.logger ?? console;
|
|
4069
|
+
}
|
|
4070
|
+
/**
|
|
4071
|
+
* Start HTTP stream server.
|
|
4072
|
+
* Starts an HTTP server that serves an MPEG-TS stream.
|
|
4073
|
+
*/
|
|
4074
|
+
async start() {
|
|
4075
|
+
if (this.active) {
|
|
4076
|
+
throw new Error("HTTP stream server already active");
|
|
4077
|
+
}
|
|
4078
|
+
this.logger.info(`[BaichuanHttpStreamServer] Starting Baichuan video stream...`);
|
|
4079
|
+
await this.videoStream.start();
|
|
4080
|
+
this.logger.info(`[BaichuanHttpStreamServer] Baichuan video stream started`);
|
|
4081
|
+
this.httpServer = http4.createServer((req, res) => {
|
|
4082
|
+
if (req.url === this.path || req.url === `${this.path}.ts`) {
|
|
4083
|
+
this.logger.info(`[BaichuanHttpStreamServer] New client connected: ${req.socket.remoteAddress}`);
|
|
4084
|
+
this.clients.add(res);
|
|
4085
|
+
this.emit("client", req.socket.remoteAddress || "unknown");
|
|
4086
|
+
res.writeHead(200, {
|
|
4087
|
+
"Content-Type": "video/mp2t",
|
|
4088
|
+
"Cache-Control": "no-cache",
|
|
4089
|
+
"Connection": "keep-alive",
|
|
4090
|
+
"Access-Control-Allow-Origin": "*"
|
|
4091
|
+
});
|
|
4092
|
+
req.on("close", () => {
|
|
4093
|
+
this.clients.delete(res);
|
|
4094
|
+
this.logger.info(`[BaichuanHttpStreamServer] Client disconnected`);
|
|
4095
|
+
});
|
|
4096
|
+
} else {
|
|
4097
|
+
res.writeHead(404);
|
|
4098
|
+
res.end("Not Found");
|
|
4099
|
+
}
|
|
4100
|
+
});
|
|
4101
|
+
await new Promise((resolve, reject) => {
|
|
4102
|
+
this.httpServer.listen(this.listenPort, "127.0.0.1", () => {
|
|
4103
|
+
this.logger.info(`[BaichuanHttpStreamServer] HTTP server listening on port ${this.listenPort}`);
|
|
4104
|
+
resolve();
|
|
4105
|
+
});
|
|
4106
|
+
this.httpServer.on("error", reject);
|
|
4107
|
+
});
|
|
4108
|
+
this.logger.info(`[BaichuanHttpStreamServer] Starting ffmpeg for H.264 -> MPEG-TS conversion...`);
|
|
4109
|
+
const ffmpeg = spawn5("ffmpeg", [
|
|
4110
|
+
"-hide_banner",
|
|
4111
|
+
// ffmpeg warnings often include non-fatal decode messages (e.g. decode_slice_header),
|
|
4112
|
+
// which we don't want to treat as application errors.
|
|
4113
|
+
"-loglevel",
|
|
4114
|
+
"error",
|
|
4115
|
+
// Force a known frame rate on raw H.264 input so the muxer gets valid PTS/DTS.
|
|
4116
|
+
"-r",
|
|
4117
|
+
String(this.inputFps),
|
|
4118
|
+
"-fflags",
|
|
4119
|
+
"+genpts",
|
|
4120
|
+
"-use_wallclock_as_timestamps",
|
|
4121
|
+
"1",
|
|
4122
|
+
"-f",
|
|
4123
|
+
"h264",
|
|
4124
|
+
// Input format (H.264 Annex-B)
|
|
4125
|
+
"-i",
|
|
4126
|
+
"pipe:0",
|
|
4127
|
+
// Read from stdin
|
|
4128
|
+
"-c:v",
|
|
4129
|
+
"copy",
|
|
4130
|
+
// Copy video codec (no re-encoding)
|
|
4131
|
+
"-muxpreload",
|
|
4132
|
+
"0",
|
|
4133
|
+
"-muxdelay",
|
|
4134
|
+
"0",
|
|
4135
|
+
"-f",
|
|
4136
|
+
"mpegts",
|
|
4137
|
+
// Output format MPEG-TS
|
|
4138
|
+
"pipe:1"
|
|
4139
|
+
// Write to stdout
|
|
4140
|
+
], {
|
|
4141
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
4142
|
+
});
|
|
4143
|
+
this.ffmpegProcess = ffmpeg;
|
|
4144
|
+
this.logger.info(`[BaichuanHttpStreamServer] FFmpeg process started (PID: ${ffmpeg.pid})`);
|
|
4145
|
+
let frameCount = 0;
|
|
4146
|
+
const writeToFfmpeg = (videoData) => {
|
|
4147
|
+
if (!hasAnnexBStart(videoData)) {
|
|
4148
|
+
return;
|
|
4149
|
+
}
|
|
4150
|
+
frameCount++;
|
|
4151
|
+
if (frameCount === 1) {
|
|
4152
|
+
this.logger.info(`[BaichuanHttpStreamServer] First video frame received (${videoData.length} bytes)`);
|
|
4153
|
+
}
|
|
4154
|
+
if (ffmpeg.stdin && !ffmpeg.stdin.destroyed) {
|
|
4155
|
+
try {
|
|
4156
|
+
ffmpeg.stdin.write(videoData);
|
|
4157
|
+
} catch (error) {
|
|
4158
|
+
this.logger.error(`[BaichuanHttpStreamServer] Error writing frame: ${error}`);
|
|
4159
|
+
this.emit("error", error instanceof Error ? error : new Error(String(error)));
|
|
4160
|
+
}
|
|
4161
|
+
}
|
|
4162
|
+
};
|
|
4163
|
+
this.seenKeyframe = false;
|
|
4164
|
+
this.usingAccessUnit = false;
|
|
4165
|
+
this.cachedSps = null;
|
|
4166
|
+
this.cachedPps = null;
|
|
4167
|
+
this.videoListener = (unit) => {
|
|
4168
|
+
const data = Buffer.isBuffer(unit) ? unit : unit?.data;
|
|
4169
|
+
const isKeyframe = Buffer.isBuffer(unit) ? isH264KeyframeFromAnnexB(data) : Boolean(unit?.isKeyframe);
|
|
4170
|
+
if (!Buffer.isBuffer(data)) return;
|
|
4171
|
+
if (!Buffer.isBuffer(unit)) {
|
|
4172
|
+
this.usingAccessUnit = true;
|
|
4173
|
+
} else if (this.usingAccessUnit) {
|
|
4174
|
+
return;
|
|
4175
|
+
}
|
|
4176
|
+
const nals = splitAnnexBNals(data);
|
|
4177
|
+
for (const nal of nals) {
|
|
4178
|
+
const t = h264NalType(nal);
|
|
4179
|
+
if (t === 7) this.cachedSps = nal;
|
|
4180
|
+
if (t === 8) this.cachedPps = nal;
|
|
4181
|
+
}
|
|
4182
|
+
if (!this.seenKeyframe) {
|
|
4183
|
+
if (!isKeyframe) return;
|
|
4184
|
+
this.seenKeyframe = true;
|
|
4185
|
+
this.logger.info(`[BaichuanHttpStreamServer] First keyframe received: starting ffmpeg feed`);
|
|
4186
|
+
}
|
|
4187
|
+
if (isKeyframe && this.cachedSps && this.cachedPps) {
|
|
4188
|
+
let hasSps = false;
|
|
4189
|
+
let hasPps = false;
|
|
4190
|
+
for (const nal of nals) {
|
|
4191
|
+
const t = h264NalType(nal);
|
|
4192
|
+
if (t === 7) hasSps = true;
|
|
4193
|
+
if (t === 8) hasPps = true;
|
|
4194
|
+
}
|
|
4195
|
+
if (!hasSps || !hasPps) {
|
|
4196
|
+
const patched = Buffer.concat([
|
|
4197
|
+
NAL_START_CODE_4B,
|
|
4198
|
+
this.cachedSps,
|
|
4199
|
+
NAL_START_CODE_4B,
|
|
4200
|
+
this.cachedPps,
|
|
4201
|
+
data
|
|
4202
|
+
]);
|
|
4203
|
+
writeToFfmpeg(patched);
|
|
4204
|
+
return;
|
|
4205
|
+
}
|
|
4206
|
+
}
|
|
4207
|
+
writeToFfmpeg(data);
|
|
4208
|
+
};
|
|
4209
|
+
this.videoStream.on("videoAccessUnit", this.videoListener);
|
|
4210
|
+
this.videoStream.on("videoFrame", this.videoListener);
|
|
4211
|
+
ffmpeg.stdout.on("data", (data) => {
|
|
4212
|
+
for (const client of this.clients) {
|
|
4213
|
+
if (!client.destroyed) {
|
|
4214
|
+
try {
|
|
4215
|
+
client.write(data);
|
|
4216
|
+
} catch (error) {
|
|
4217
|
+
this.clients.delete(client);
|
|
4218
|
+
}
|
|
4219
|
+
}
|
|
4220
|
+
}
|
|
4221
|
+
});
|
|
4222
|
+
let ffmpegOutput = "";
|
|
4223
|
+
ffmpeg.stderr.on("data", (data) => {
|
|
4224
|
+
const output = data.toString();
|
|
4225
|
+
ffmpegOutput += output;
|
|
4226
|
+
const isKnownNonFatal = output.includes("top block unavailable") || output.includes("error while decoding") || output.includes("decode_slice_header error") || output.includes("no frame") || output.includes("concealing") || output.includes("left block unavailable") || output.includes("bottom block unavailable");
|
|
4227
|
+
if (isKnownNonFatal) {
|
|
4228
|
+
this.logger.warn(`[BaichuanHttpStreamServer] FFmpeg decode warning: ${output.trim()}`);
|
|
4229
|
+
return;
|
|
4230
|
+
}
|
|
4231
|
+
const isCriticalError = output.includes("Invalid data found") || output.includes("Error opening") || output.includes("Could not write header") || output.includes("Broken pipe") || output.includes("Connection refused") || output.includes("Immediate exit") || output.includes("Conversion failed");
|
|
4232
|
+
if (isCriticalError) {
|
|
4233
|
+
this.logger.error(`[BaichuanHttpStreamServer] FFmpeg critical error: ${output.trim()}`);
|
|
4234
|
+
this.emit("error", new Error(`FFmpeg error: ${output}`));
|
|
4235
|
+
} else {
|
|
4236
|
+
this.logger.warn(`[BaichuanHttpStreamServer] FFmpeg stderr: ${output.trim()}`);
|
|
4237
|
+
}
|
|
4238
|
+
});
|
|
4239
|
+
ffmpeg.on("close", (code) => {
|
|
4240
|
+
if (code !== 0) {
|
|
4241
|
+
this.logger.error(`[BaichuanHttpStreamServer] FFmpeg exited with code ${code}`);
|
|
4242
|
+
this.emit("error", new Error(`FFmpeg exited with code ${code}`));
|
|
4243
|
+
}
|
|
4244
|
+
this.active = false;
|
|
4245
|
+
this.emit("close");
|
|
4246
|
+
});
|
|
4247
|
+
this.active = true;
|
|
4248
|
+
}
|
|
4249
|
+
/**
|
|
4250
|
+
* Get HTTP URL for this stream.
|
|
4251
|
+
*/
|
|
4252
|
+
getStreamUrl() {
|
|
4253
|
+
return `http://127.0.0.1:${this.listenPort}${this.path}.ts`;
|
|
4254
|
+
}
|
|
4255
|
+
/**
|
|
4256
|
+
* Stop HTTP stream server.
|
|
4257
|
+
*/
|
|
4258
|
+
async stop() {
|
|
4259
|
+
for (const client of this.clients) {
|
|
4260
|
+
if (!client.destroyed) {
|
|
4261
|
+
client.end();
|
|
4262
|
+
}
|
|
4263
|
+
}
|
|
4264
|
+
this.clients.clear();
|
|
4265
|
+
try {
|
|
4266
|
+
await this.videoStream.stop();
|
|
4267
|
+
} catch {
|
|
4268
|
+
}
|
|
4269
|
+
if (this.videoListener) {
|
|
4270
|
+
this.videoStream.removeListener("videoAccessUnit", this.videoListener);
|
|
4271
|
+
this.videoStream.removeListener("videoFrame", this.videoListener);
|
|
4272
|
+
}
|
|
4273
|
+
this.videoListener = void 0;
|
|
4274
|
+
if (this.ffmpegProcess) {
|
|
4275
|
+
const proc = this.ffmpegProcess;
|
|
4276
|
+
try {
|
|
4277
|
+
proc.kill("SIGTERM");
|
|
4278
|
+
} catch {
|
|
4279
|
+
}
|
|
4280
|
+
await new Promise((resolve) => {
|
|
4281
|
+
const t = setTimeout(() => {
|
|
4282
|
+
try {
|
|
4283
|
+
proc.kill("SIGKILL");
|
|
4284
|
+
} catch {
|
|
4285
|
+
}
|
|
4286
|
+
resolve();
|
|
4287
|
+
}, 1500);
|
|
4288
|
+
t?.unref?.();
|
|
4289
|
+
proc.once("close", () => {
|
|
4290
|
+
clearTimeout(t);
|
|
4291
|
+
resolve();
|
|
4292
|
+
});
|
|
4293
|
+
});
|
|
4294
|
+
}
|
|
4295
|
+
this.ffmpegProcess = void 0;
|
|
4296
|
+
if (this.httpServer) {
|
|
4297
|
+
await new Promise((resolve) => {
|
|
4298
|
+
this.httpServer?.closeAllConnections?.();
|
|
4299
|
+
this.httpServer?.closeIdleConnections?.();
|
|
4300
|
+
this.httpServer.close(() => resolve());
|
|
4301
|
+
});
|
|
4302
|
+
this.httpServer = void 0;
|
|
4303
|
+
}
|
|
4304
|
+
this.active = false;
|
|
4305
|
+
}
|
|
4306
|
+
isActive() {
|
|
4307
|
+
return this.active;
|
|
4308
|
+
}
|
|
4309
|
+
};
|
|
4310
|
+
|
|
4311
|
+
// src/multifocal/compositeRtspServer.ts
|
|
4312
|
+
import { EventEmitter as EventEmitter3 } from "events";
|
|
4313
|
+
import { spawn as spawn6 } from "child_process";
|
|
4314
|
+
import * as net from "net";
|
|
4315
|
+
var CompositeRtspServer = class extends EventEmitter3 {
|
|
4316
|
+
options;
|
|
4317
|
+
compositeStream = null;
|
|
4318
|
+
rtspServer = null;
|
|
4319
|
+
ffmpegProcess = null;
|
|
4320
|
+
active = false;
|
|
4321
|
+
logger;
|
|
4322
|
+
connectedClients = /* @__PURE__ */ new Set();
|
|
4323
|
+
constructor(options) {
|
|
4324
|
+
super();
|
|
4325
|
+
this.options = {
|
|
4326
|
+
listenHost: "127.0.0.1",
|
|
4327
|
+
listenPort: 8554,
|
|
4328
|
+
path: "/composite",
|
|
4329
|
+
...options
|
|
4330
|
+
};
|
|
4331
|
+
this.logger = options.logger ?? console;
|
|
4332
|
+
}
|
|
4333
|
+
/**
|
|
4334
|
+
* Avvia il server RTSP composito
|
|
4335
|
+
*/
|
|
4336
|
+
async start() {
|
|
4337
|
+
if (this.active) {
|
|
4338
|
+
throw new Error("Composite RTSP server already active");
|
|
4339
|
+
}
|
|
4340
|
+
this.active = true;
|
|
4341
|
+
this.logger.log?.("[CompositeRtspServer] Starting composite RTSP server...");
|
|
4342
|
+
try {
|
|
4343
|
+
this.compositeStream = new CompositeStream({
|
|
4344
|
+
api: this.options.api,
|
|
4345
|
+
widerChannel: this.options.widerChannel,
|
|
4346
|
+
teleChannel: this.options.teleChannel,
|
|
4347
|
+
widerProfile: this.options.widerProfile,
|
|
4348
|
+
teleProfile: this.options.teleProfile,
|
|
4349
|
+
...this.options.pipPosition ? { pipPosition: this.options.pipPosition } : {},
|
|
4350
|
+
...this.options.pipSize !== void 0 ? { pipSize: this.options.pipSize } : {},
|
|
4351
|
+
...this.options.pipMargin !== void 0 ? { pipMargin: this.options.pipMargin } : {},
|
|
4352
|
+
logger: this.logger
|
|
4353
|
+
});
|
|
4354
|
+
this.compositeStream.on("error", (error) => {
|
|
4355
|
+
this.logger.error?.("[CompositeRtspServer] Composite stream error:", error);
|
|
4356
|
+
this.emit("error", error);
|
|
4357
|
+
});
|
|
4358
|
+
this.compositeStream.on("close", () => {
|
|
4359
|
+
this.logger.log?.("[CompositeRtspServer] Composite stream closed");
|
|
4360
|
+
});
|
|
4361
|
+
await this.compositeStream.start();
|
|
4362
|
+
await this.startRtspServer();
|
|
4363
|
+
this.logger.log?.(
|
|
4364
|
+
`[CompositeRtspServer] Server started on ${this.options.listenHost}:${this.options.listenPort}${this.options.path}`
|
|
4365
|
+
);
|
|
4366
|
+
} catch (error) {
|
|
4367
|
+
this.active = false;
|
|
4368
|
+
this.emit("error", error);
|
|
4369
|
+
throw error;
|
|
4370
|
+
}
|
|
4371
|
+
}
|
|
4372
|
+
/**
|
|
4373
|
+
* Avvia server RTSP usando ffmpeg
|
|
4374
|
+
*/
|
|
4375
|
+
async startRtspServer() {
|
|
4376
|
+
const widerMetadata = await this.options.api.getStreamMetadata(this.options.widerChannel);
|
|
4377
|
+
const widerStreamInfo = widerMetadata.streams.find((s) => s.profile === this.options.widerProfile);
|
|
4378
|
+
const width = widerStreamInfo?.width ?? 1920;
|
|
4379
|
+
const height = widerStreamInfo?.height ?? 1080;
|
|
4380
|
+
const fps = widerStreamInfo?.frameRate ?? 25;
|
|
4381
|
+
this.rtspServer = net.createServer((socket) => {
|
|
4382
|
+
this.handleRtspConnection(socket);
|
|
4383
|
+
});
|
|
4384
|
+
await new Promise((resolve, reject) => {
|
|
4385
|
+
this.rtspServer.listen(this.options.listenPort, this.options.listenHost, () => {
|
|
4386
|
+
const address = this.rtspServer.address();
|
|
4387
|
+
if (address && typeof address === "object" && "port" in address) {
|
|
4388
|
+
this.options.listenPort = address.port;
|
|
4389
|
+
}
|
|
4390
|
+
resolve();
|
|
4391
|
+
});
|
|
4392
|
+
this.rtspServer.on("error", reject);
|
|
4393
|
+
});
|
|
4394
|
+
this.startFfmpegRtspServer(width, height, fps);
|
|
4395
|
+
}
|
|
4396
|
+
/**
|
|
4397
|
+
* Avvia ffmpeg come server RTSP che legge dallo stream composito
|
|
4398
|
+
*/
|
|
4399
|
+
startFfmpegRtspServer(width, height, fps) {
|
|
4400
|
+
const rtspUrl = `rtsp://${this.options.listenHost}:${this.options.listenPort}${this.options.path}`;
|
|
4401
|
+
const ffmpegArgs = [
|
|
4402
|
+
"-hide_banner",
|
|
4403
|
+
"-loglevel",
|
|
4404
|
+
"error",
|
|
4405
|
+
"-f",
|
|
4406
|
+
"h264",
|
|
4407
|
+
"-i",
|
|
4408
|
+
"pipe:0",
|
|
4409
|
+
"-c:v",
|
|
4410
|
+
"copy",
|
|
4411
|
+
"-f",
|
|
4412
|
+
"rtsp",
|
|
4413
|
+
"-rtsp_flags",
|
|
4414
|
+
"listen",
|
|
4415
|
+
rtspUrl
|
|
4416
|
+
];
|
|
4417
|
+
this.logger.log?.(
|
|
4418
|
+
`[CompositeRtspServer] Starting ffmpeg RTSP server: ${ffmpegArgs.join(" ")}`
|
|
4419
|
+
);
|
|
4420
|
+
this.ffmpegProcess = spawn6("ffmpeg", ffmpegArgs, {
|
|
4421
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
4422
|
+
});
|
|
4423
|
+
this.ffmpegProcess.on("error", (error) => {
|
|
4424
|
+
this.logger.error?.("[CompositeRtspServer] FFmpeg error:", error);
|
|
4425
|
+
this.emit("error", error);
|
|
4426
|
+
});
|
|
4427
|
+
this.ffmpegProcess.on("close", (code) => {
|
|
4428
|
+
if (code !== 0 && code !== null) {
|
|
4429
|
+
this.logger.warn?.(`[CompositeRtspServer] FFmpeg exited with code ${code}`);
|
|
4430
|
+
}
|
|
4431
|
+
});
|
|
4432
|
+
this.compositeStream.on("videoFrame", (frame) => {
|
|
4433
|
+
if (this.ffmpegProcess?.stdin && !this.ffmpegProcess.stdin.destroyed) {
|
|
4434
|
+
try {
|
|
4435
|
+
const written = this.ffmpegProcess.stdin.write(frame);
|
|
4436
|
+
if (!written) {
|
|
4437
|
+
this.ffmpegProcess.stdin.once("drain", () => {
|
|
4438
|
+
});
|
|
4439
|
+
}
|
|
4440
|
+
} catch (error) {
|
|
4441
|
+
const code = error?.code;
|
|
4442
|
+
if (code !== "EPIPE" && code !== "ERR_STREAM_WRITE_AFTER_END") {
|
|
4443
|
+
this.logger.error?.("[CompositeRtspServer] Error writing to ffmpeg:", error);
|
|
4444
|
+
}
|
|
4445
|
+
}
|
|
4446
|
+
}
|
|
4447
|
+
});
|
|
4448
|
+
this.ffmpegProcess.stderr?.on("data", (data) => {
|
|
4449
|
+
const output = data.toString();
|
|
4450
|
+
if (output.includes("error") || output.includes("Error")) {
|
|
4451
|
+
this.logger.error?.("[CompositeRtspServer] FFmpeg stderr:", output);
|
|
4452
|
+
}
|
|
4453
|
+
});
|
|
4454
|
+
}
|
|
4455
|
+
/**
|
|
4456
|
+
* Gestisce connessioni RTSP (semplificato - delega a ffmpeg)
|
|
4457
|
+
*/
|
|
4458
|
+
handleRtspConnection(socket) {
|
|
4459
|
+
const clientId = `${socket.remoteAddress}:${socket.remotePort}`;
|
|
4460
|
+
this.logger.log?.(`[CompositeRtspServer] RTSP client connected: ${clientId}`);
|
|
4461
|
+
this.connectedClients.add(clientId);
|
|
4462
|
+
this.emit("client", clientId);
|
|
4463
|
+
socket.on("close", () => {
|
|
4464
|
+
this.connectedClients.delete(clientId);
|
|
4465
|
+
this.emit("clientDisconnected", clientId);
|
|
4466
|
+
this.logger.log?.(`[CompositeRtspServer] RTSP client disconnected: ${clientId}`);
|
|
4467
|
+
});
|
|
4468
|
+
socket.on("error", (error) => {
|
|
4469
|
+
this.logger.error?.(`[CompositeRtspServer] RTSP client error:`, error);
|
|
4470
|
+
});
|
|
4471
|
+
}
|
|
4472
|
+
/**
|
|
4473
|
+
* Ferma il server RTSP composito
|
|
4474
|
+
*/
|
|
4475
|
+
async stop() {
|
|
4476
|
+
if (!this.active) {
|
|
4477
|
+
return;
|
|
4478
|
+
}
|
|
4479
|
+
this.active = false;
|
|
4480
|
+
this.logger.log?.("[CompositeRtspServer] Stopping server...");
|
|
4481
|
+
if (this.compositeStream) {
|
|
4482
|
+
await this.compositeStream.stop();
|
|
4483
|
+
this.compositeStream = null;
|
|
4484
|
+
}
|
|
4485
|
+
if (this.ffmpegProcess) {
|
|
4486
|
+
try {
|
|
4487
|
+
this.ffmpegProcess.stdin?.end();
|
|
4488
|
+
this.ffmpegProcess.kill("SIGTERM");
|
|
4489
|
+
setTimeout(() => {
|
|
4490
|
+
try {
|
|
4491
|
+
this.ffmpegProcess?.kill("SIGKILL");
|
|
4492
|
+
} catch {
|
|
4493
|
+
}
|
|
4494
|
+
}, 1e3);
|
|
4495
|
+
} catch {
|
|
4496
|
+
}
|
|
4497
|
+
this.ffmpegProcess = null;
|
|
4498
|
+
}
|
|
4499
|
+
if (this.rtspServer) {
|
|
4500
|
+
await new Promise((resolve) => {
|
|
4501
|
+
this.rtspServer?.close(() => resolve());
|
|
4502
|
+
});
|
|
4503
|
+
this.rtspServer = null;
|
|
4504
|
+
}
|
|
4505
|
+
this.connectedClients.clear();
|
|
4506
|
+
this.emit("close");
|
|
4507
|
+
this.logger.log?.("[CompositeRtspServer] Server stopped");
|
|
4508
|
+
}
|
|
4509
|
+
/**
|
|
4510
|
+
* Ottieni URL RTSP
|
|
4511
|
+
*/
|
|
4512
|
+
getRtspUrl() {
|
|
4513
|
+
return `rtsp://${this.options.listenHost}:${this.options.listenPort}${this.options.path}`;
|
|
4514
|
+
}
|
|
4515
|
+
/**
|
|
4516
|
+
* Verifica se il server è attivo
|
|
4517
|
+
*/
|
|
4518
|
+
isActive() {
|
|
4519
|
+
return this.active;
|
|
4520
|
+
}
|
|
4521
|
+
/**
|
|
4522
|
+
* Numero di client connessi
|
|
4523
|
+
*/
|
|
4524
|
+
getClientCount() {
|
|
4525
|
+
return this.connectedClients.size;
|
|
4526
|
+
}
|
|
4527
|
+
};
|
|
4528
|
+
export {
|
|
4529
|
+
AesStreamDecryptor,
|
|
4530
|
+
AutodiscoveryClient,
|
|
4531
|
+
BC_AES_IV,
|
|
4532
|
+
BC_CLASS_FILE_DOWNLOAD,
|
|
4533
|
+
BC_CLASS_LEGACY,
|
|
4534
|
+
BC_CLASS_MODERN_20,
|
|
4535
|
+
BC_CLASS_MODERN_24,
|
|
4536
|
+
BC_CLASS_MODERN_24_ALT,
|
|
4537
|
+
BC_CMD_ID_ABILITY_INFO,
|
|
4538
|
+
BC_CMD_ID_ALARM_EVENT_LIST,
|
|
4539
|
+
BC_CMD_ID_AUDIO_ALARM_PLAY,
|
|
4540
|
+
BC_CMD_ID_CHANNEL_INFO_ALL,
|
|
4541
|
+
BC_CMD_ID_CMD_123,
|
|
4542
|
+
BC_CMD_ID_CMD_209,
|
|
4543
|
+
BC_CMD_ID_CMD_265,
|
|
4544
|
+
BC_CMD_ID_CMD_440,
|
|
4545
|
+
BC_CMD_ID_COVER_PREVIEW,
|
|
4546
|
+
BC_CMD_ID_COVER_RESPONSE,
|
|
4547
|
+
BC_CMD_ID_COVER_STANDALONE_458,
|
|
4548
|
+
BC_CMD_ID_COVER_STANDALONE_459,
|
|
4549
|
+
BC_CMD_ID_COVER_STANDALONE_460,
|
|
4550
|
+
BC_CMD_ID_COVER_STANDALONE_461,
|
|
4551
|
+
BC_CMD_ID_COVER_STANDALONE_462,
|
|
4552
|
+
BC_CMD_ID_FILE_INFO_LIST_CLOSE,
|
|
4553
|
+
BC_CMD_ID_FILE_INFO_LIST_DL_VIDEO,
|
|
4554
|
+
BC_CMD_ID_FILE_INFO_LIST_DOWNLOAD,
|
|
4555
|
+
BC_CMD_ID_FILE_INFO_LIST_GET,
|
|
4556
|
+
BC_CMD_ID_FILE_INFO_LIST_OPEN,
|
|
4557
|
+
BC_CMD_ID_FILE_INFO_LIST_REPLAY,
|
|
4558
|
+
BC_CMD_ID_FILE_INFO_LIST_STOP,
|
|
4559
|
+
BC_CMD_ID_FIND_REC_VIDEO_CLOSE,
|
|
4560
|
+
BC_CMD_ID_FIND_REC_VIDEO_GET,
|
|
4561
|
+
BC_CMD_ID_FIND_REC_VIDEO_OPEN,
|
|
4562
|
+
BC_CMD_ID_FLOODLIGHT_STATUS_LIST,
|
|
4563
|
+
BC_CMD_ID_GET_ABILITY_SUPPORT,
|
|
4564
|
+
BC_CMD_ID_GET_ACCESS_USER_LIST,
|
|
4565
|
+
BC_CMD_ID_GET_AI_ALARM,
|
|
4566
|
+
BC_CMD_ID_GET_AI_CFG,
|
|
4567
|
+
BC_CMD_ID_GET_AI_DENOISE,
|
|
4568
|
+
BC_CMD_ID_GET_AUDIO_ALARM,
|
|
4569
|
+
BC_CMD_ID_GET_AUDIO_CFG,
|
|
4570
|
+
BC_CMD_ID_GET_AUDIO_TASK,
|
|
4571
|
+
BC_CMD_ID_GET_BATTERY_INFO,
|
|
4572
|
+
BC_CMD_ID_GET_BATTERY_INFO_LIST,
|
|
4573
|
+
BC_CMD_ID_GET_DAY_NIGHT_THRESHOLD,
|
|
4574
|
+
BC_CMD_ID_GET_DAY_RECORDS,
|
|
4575
|
+
BC_CMD_ID_GET_EMAIL_TASK,
|
|
4576
|
+
BC_CMD_ID_GET_FTP_TASK,
|
|
4577
|
+
BC_CMD_ID_GET_HDD_INFO_LIST,
|
|
4578
|
+
BC_CMD_ID_GET_KIT_AP_CFG,
|
|
4579
|
+
BC_CMD_ID_GET_LED_STATE,
|
|
4580
|
+
BC_CMD_ID_GET_MOTION_ALARM,
|
|
4581
|
+
BC_CMD_ID_GET_ONLINE_USER_LIST,
|
|
4582
|
+
BC_CMD_ID_GET_OSD_DATETIME,
|
|
4583
|
+
BC_CMD_ID_GET_PIR_INFO,
|
|
4584
|
+
BC_CMD_ID_GET_PTZ_POSITION,
|
|
4585
|
+
BC_CMD_ID_GET_PTZ_PRESET,
|
|
4586
|
+
BC_CMD_ID_GET_RECORD,
|
|
4587
|
+
BC_CMD_ID_GET_RECORD_CFG,
|
|
4588
|
+
BC_CMD_ID_GET_REC_ENC_CFG,
|
|
4589
|
+
BC_CMD_ID_GET_SIREN_STATUS,
|
|
4590
|
+
BC_CMD_ID_GET_SLEEP_STATE,
|
|
4591
|
+
BC_CMD_ID_GET_STREAM_INFO_LIST,
|
|
4592
|
+
BC_CMD_ID_GET_SUPPORT,
|
|
4593
|
+
BC_CMD_ID_GET_SYSTEM_GENERAL,
|
|
4594
|
+
BC_CMD_ID_GET_TIMELAPSE_CFG,
|
|
4595
|
+
BC_CMD_ID_GET_VIDEO_INPUT,
|
|
4596
|
+
BC_CMD_ID_GET_WHITE_LED,
|
|
4597
|
+
BC_CMD_ID_GET_WIFI,
|
|
4598
|
+
BC_CMD_ID_GET_WIFI_SIGNAL,
|
|
4599
|
+
BC_CMD_ID_GET_ZOOM_FOCUS,
|
|
4600
|
+
BC_CMD_ID_PING,
|
|
4601
|
+
BC_CMD_ID_PTZ_CONTROL,
|
|
4602
|
+
BC_CMD_ID_PTZ_CONTROL_PRESET,
|
|
4603
|
+
BC_CMD_ID_PUSH_COORDINATE_POINT_LIST,
|
|
4604
|
+
BC_CMD_ID_PUSH_DINGDONG_LIST,
|
|
4605
|
+
BC_CMD_ID_PUSH_NET_INFO,
|
|
4606
|
+
BC_CMD_ID_PUSH_SERIAL,
|
|
4607
|
+
BC_CMD_ID_PUSH_SLEEP_STATUS,
|
|
4608
|
+
BC_CMD_ID_PUSH_VIDEO_INPUT,
|
|
4609
|
+
BC_CMD_ID_SET_AI_ALARM,
|
|
4610
|
+
BC_CMD_ID_SET_AI_CFG,
|
|
4611
|
+
BC_CMD_ID_SET_AUDIO_TASK,
|
|
4612
|
+
BC_CMD_ID_SET_MOTION_ALARM,
|
|
4613
|
+
BC_CMD_ID_SET_PIR_INFO,
|
|
4614
|
+
BC_CMD_ID_SET_WHITE_LED_STATE,
|
|
4615
|
+
BC_CMD_ID_SET_WHITE_LED_TASK,
|
|
4616
|
+
BC_CMD_ID_SET_ZOOM_FOCUS,
|
|
4617
|
+
BC_CMD_ID_SUPPORT,
|
|
4618
|
+
BC_CMD_ID_TALK,
|
|
4619
|
+
BC_CMD_ID_TALK_ABILITY,
|
|
4620
|
+
BC_CMD_ID_TALK_CONFIG,
|
|
4621
|
+
BC_CMD_ID_TALK_RESET,
|
|
4622
|
+
BC_CMD_ID_UDP_KEEP_ALIVE,
|
|
4623
|
+
BC_CMD_ID_VIDEO,
|
|
4624
|
+
BC_CMD_ID_VIDEO_STOP,
|
|
4625
|
+
BC_MAGIC,
|
|
4626
|
+
BC_MAGIC_REV,
|
|
4627
|
+
BC_TCP_DEFAULT_PORT,
|
|
4628
|
+
BC_XML_KEY,
|
|
4629
|
+
BaichuanClient,
|
|
4630
|
+
BaichuanEventEmitter,
|
|
4631
|
+
BaichuanFrameParser,
|
|
4632
|
+
BaichuanHttpStreamServer,
|
|
4633
|
+
BaichuanRtspServer,
|
|
4634
|
+
BaichuanVideoStream,
|
|
4635
|
+
BcMediaAnnexBDecoder,
|
|
4636
|
+
BcMediaCodec,
|
|
4637
|
+
BcUdpStream,
|
|
4638
|
+
CompositeRtspServer,
|
|
4639
|
+
CompositeStream,
|
|
4640
|
+
DUAL_LENS_DUAL_MOTION_MODELS,
|
|
4641
|
+
DUAL_LENS_MODELS,
|
|
4642
|
+
DUAL_LENS_SINGLE_MOTION_MODELS,
|
|
4643
|
+
H264RtpDepacketizer,
|
|
4644
|
+
H265RtpDepacketizer,
|
|
4645
|
+
Intercom,
|
|
4646
|
+
NVR_HUB_EXACT_TYPES,
|
|
4647
|
+
NVR_HUB_MODEL_PATTERNS,
|
|
4648
|
+
ReolinkBaichuanApi,
|
|
4649
|
+
ReolinkCgiApi,
|
|
4650
|
+
ReolinkHttpClient,
|
|
4651
|
+
Rfc4571Muxer,
|
|
4652
|
+
abilitiesHasAny,
|
|
4653
|
+
aesDecrypt,
|
|
4654
|
+
aesEncrypt,
|
|
4655
|
+
asLogger,
|
|
4656
|
+
autoDetectDeviceType,
|
|
4657
|
+
bcDecrypt,
|
|
4658
|
+
bcEncrypt,
|
|
4659
|
+
bcHeaderHasPayloadOffset,
|
|
4660
|
+
buildAacAudioSpecificConfigHex,
|
|
4661
|
+
buildAbilityInfoExtensionXml,
|
|
4662
|
+
buildBinaryExtensionXml,
|
|
4663
|
+
buildChannelExtensionXml,
|
|
4664
|
+
buildFloodlightManualXml,
|
|
4665
|
+
buildLoginXml,
|
|
4666
|
+
buildPreviewStopXml,
|
|
4667
|
+
buildPreviewStopXmlV11,
|
|
4668
|
+
buildPreviewXml,
|
|
4669
|
+
buildPreviewXmlV11,
|
|
4670
|
+
buildPtzControlXml,
|
|
4671
|
+
buildPtzPresetXml,
|
|
4672
|
+
buildPtzPresetXmlV2,
|
|
4673
|
+
buildRfc4571Sdp,
|
|
4674
|
+
buildRtspPath,
|
|
4675
|
+
buildRtspUrl,
|
|
4676
|
+
buildSirenManualXml,
|
|
4677
|
+
buildSirenTimesXml,
|
|
4678
|
+
buildStartZoomFocusXml,
|
|
4679
|
+
buildWhiteLedStateXml,
|
|
4680
|
+
collectCgiDiagnostics,
|
|
4681
|
+
collectMultifocalDiagnostics,
|
|
4682
|
+
collectNativeDiagnostics,
|
|
4683
|
+
collectNvrDiagnostics,
|
|
4684
|
+
computeDeviceCapabilities,
|
|
4685
|
+
convertToAnnexB2 as convertH265ToAnnexB,
|
|
4686
|
+
convertToAnnexB,
|
|
4687
|
+
convertToLengthPrefixed,
|
|
4688
|
+
createBaichuanEndpointsServer,
|
|
4689
|
+
createDebugGateLogger,
|
|
4690
|
+
createDiagnosticsBundle,
|
|
4691
|
+
createLogger,
|
|
4692
|
+
createNativeStream,
|
|
4693
|
+
createNullLogger,
|
|
4694
|
+
createReplayHttpServer,
|
|
4695
|
+
createRfc4571TcpServer,
|
|
4696
|
+
createRfc4571TcpServerForReplay,
|
|
4697
|
+
createRtspProxyServer,
|
|
4698
|
+
createTaggedLogger,
|
|
4699
|
+
decodeHeader,
|
|
4700
|
+
deriveAesKey,
|
|
4701
|
+
detectVideoCodecFromNal,
|
|
4702
|
+
discoverReolinkDevices,
|
|
4703
|
+
discoverViaHttpScan,
|
|
4704
|
+
discoverViaUdpBroadcast,
|
|
4705
|
+
discoverViaUdpDirect,
|
|
4706
|
+
encodeHeader,
|
|
4707
|
+
extractH264ParamSetsFromAccessUnit,
|
|
4708
|
+
extractH265ParamSetsFromAccessUnit,
|
|
4709
|
+
extractPpsFromAnnexB,
|
|
4710
|
+
extractSpsFromAnnexB,
|
|
4711
|
+
extractVpsFromAnnexB,
|
|
4712
|
+
flattenAbilitiesForChannel,
|
|
4713
|
+
getConstructedVideoStreamOptions,
|
|
4714
|
+
getGlobalLogger,
|
|
4715
|
+
getH265NalType,
|
|
4716
|
+
getVideoStream,
|
|
4717
|
+
getXmlText,
|
|
4718
|
+
hasStartCodes2 as hasH265StartCodes,
|
|
4719
|
+
hasStartCodes,
|
|
4720
|
+
isDualLenseModel,
|
|
4721
|
+
isH264KeyframeAnnexB,
|
|
4722
|
+
isH265Irap,
|
|
4723
|
+
isH265KeyframeAnnexB,
|
|
4724
|
+
isNvrHubModel,
|
|
4725
|
+
isTcpFailureThatShouldFallbackToUdp,
|
|
4726
|
+
isValidH264AnnexBAccessUnit,
|
|
4727
|
+
isValidH265AnnexBAccessUnit,
|
|
4728
|
+
maskUid,
|
|
4729
|
+
md5HexUpper,
|
|
4730
|
+
md5StrModern,
|
|
4731
|
+
normalizeUid,
|
|
4732
|
+
packetizeAacAdtsFrame,
|
|
4733
|
+
packetizeAacRawFrame,
|
|
4734
|
+
packetizeH264,
|
|
4735
|
+
packetizeH265,
|
|
4736
|
+
parseAdtsHeader,
|
|
4737
|
+
parseBcMedia,
|
|
4738
|
+
parseRecordingFileName,
|
|
4739
|
+
parseSupportXml,
|
|
4740
|
+
printNvrDiagnostics,
|
|
4741
|
+
runAllDiagnosticsConsecutively,
|
|
4742
|
+
runMultifocalDiagnosticsConsecutively,
|
|
4743
|
+
sampleStreams,
|
|
4744
|
+
setGlobalLogger,
|
|
4745
|
+
splitAnnexBToNalPayloads,
|
|
4746
|
+
splitAnnexBToNals,
|
|
4747
|
+
splitAnnexBToNalPayloads2 as splitH265AnnexBToNalPayloads,
|
|
4748
|
+
testChannelStreams,
|
|
4749
|
+
xmlEscape,
|
|
4750
|
+
zipDirectory
|
|
4751
|
+
};
|
|
4752
|
+
//# sourceMappingURL=index.js.map
|