@camstack/addon-pipeline 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{addon-CwDFZWAb.d.cts → addon-DK7eQ0PN.d.cts} +3 -4
- package/dist/{addon-CwDFZWAb.d.ts → addon-DK7eQ0PN.d.ts} +3 -4
- package/dist/addon.cjs +27 -72
- package/dist/addon.cjs.map +1 -1
- package/dist/addon.d.cts +1 -1
- package/dist/addon.d.ts +1 -1
- package/dist/addon.js +27 -72
- package/dist/addon.js.map +1 -1
- package/dist/index.cjs +29 -3304
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +4 -921
- package/dist/index.d.ts +4 -921
- package/dist/index.js +28 -3271
- package/dist/index.js.map +1 -1
- package/package.json +34 -17
package/dist/index.js
CHANGED
|
@@ -14,6 +14,7 @@ __export(ffmpeg_config_exports, {
|
|
|
14
14
|
buildFfmpegInputArgs: () => buildFfmpegInputArgs,
|
|
15
15
|
buildFfmpegOutputArgs: () => buildFfmpegOutputArgs,
|
|
16
16
|
detectPlatformDefaults: () => detectPlatformDefaults,
|
|
17
|
+
resolveFfmpegBinary: () => resolveFfmpegBinary,
|
|
17
18
|
resolveFfmpegConfig: () => resolveFfmpegConfig
|
|
18
19
|
});
|
|
19
20
|
import { execFileSync } from "child_process";
|
|
@@ -58,6 +59,12 @@ function buildFfmpegOutputArgs(config) {
|
|
|
58
59
|
if (config.outputArgs?.length) args.push(...config.outputArgs);
|
|
59
60
|
return args;
|
|
60
61
|
}
|
|
62
|
+
async function resolveFfmpegBinary(configPath, deps) {
|
|
63
|
+
if (configPath && configPath !== "ffmpeg") {
|
|
64
|
+
return configPath;
|
|
65
|
+
}
|
|
66
|
+
return deps.ensureFfmpeg();
|
|
67
|
+
}
|
|
61
68
|
var init_ffmpeg_config = __esm({
|
|
62
69
|
"src/recording/ffmpeg-config.ts"() {
|
|
63
70
|
"use strict";
|
|
@@ -1166,7 +1173,7 @@ var recording_coordinator_exports = {};
|
|
|
1166
1173
|
__export(recording_coordinator_exports, {
|
|
1167
1174
|
RecordingCoordinator: () => RecordingCoordinator
|
|
1168
1175
|
});
|
|
1169
|
-
var POLICY_EVAL_INTERVAL_MS, MOTION_FALLBACK_TIMEOUT_MS, RecordingCoordinator;
|
|
1176
|
+
var DEFAULT_SEGMENT_DURATION_SEC, POLICY_EVAL_INTERVAL_MS, MOTION_FALLBACK_TIMEOUT_MS, RecordingCoordinator;
|
|
1170
1177
|
var init_recording_coordinator = __esm({
|
|
1171
1178
|
"src/recording/recording-coordinator.ts"() {
|
|
1172
1179
|
"use strict";
|
|
@@ -1176,6 +1183,7 @@ var init_recording_coordinator = __esm({
|
|
|
1176
1183
|
init_retention_manager();
|
|
1177
1184
|
init_playlist_generator();
|
|
1178
1185
|
init_storage_estimator();
|
|
1186
|
+
DEFAULT_SEGMENT_DURATION_SEC = 4;
|
|
1179
1187
|
POLICY_EVAL_INTERVAL_MS = 1e3;
|
|
1180
1188
|
MOTION_FALLBACK_TIMEOUT_MS = 6e4;
|
|
1181
1189
|
RecordingCoordinator = class _RecordingCoordinator {
|
|
@@ -1189,6 +1197,7 @@ var init_recording_coordinator = __esm({
|
|
|
1189
1197
|
storagePath;
|
|
1190
1198
|
globalFfmpegConfig;
|
|
1191
1199
|
detectedFfmpegConfig;
|
|
1200
|
+
segmentDurationSec;
|
|
1192
1201
|
recordings = /* @__PURE__ */ new Map();
|
|
1193
1202
|
policyTimer = null;
|
|
1194
1203
|
retentionManager;
|
|
@@ -1205,6 +1214,7 @@ var init_recording_coordinator = __esm({
|
|
|
1205
1214
|
this.storagePath = config.storagePath;
|
|
1206
1215
|
this.globalFfmpegConfig = config.globalFfmpegConfig;
|
|
1207
1216
|
this.detectedFfmpegConfig = config.detectedFfmpegConfig;
|
|
1217
|
+
this.segmentDurationSec = config.segmentDurationSec ?? DEFAULT_SEGMENT_DURATION_SEC;
|
|
1208
1218
|
this.retentionManager = new RetentionManager(
|
|
1209
1219
|
this.db,
|
|
1210
1220
|
this.logger.child("retention"),
|
|
@@ -1290,7 +1300,7 @@ var init_recording_coordinator = __esm({
|
|
|
1290
1300
|
const writerConfig = {
|
|
1291
1301
|
deviceId,
|
|
1292
1302
|
streamId: sp.streamId,
|
|
1293
|
-
segmentDurationSec:
|
|
1303
|
+
segmentDurationSec: this.segmentDurationSec,
|
|
1294
1304
|
storagePath: this.storagePath,
|
|
1295
1305
|
storageName,
|
|
1296
1306
|
subDirectory,
|
|
@@ -3135,9 +3145,7 @@ var PipelineAddon = class {
|
|
|
3135
3145
|
capabilities: [
|
|
3136
3146
|
{ name: "stream-broker", mode: "singleton" },
|
|
3137
3147
|
{ name: "recording-engine", mode: "singleton" },
|
|
3138
|
-
{ name: "analysis-
|
|
3139
|
-
{ name: "analysis-data-persistence", mode: "singleton" },
|
|
3140
|
-
{ name: "webrtc", mode: "collection" }
|
|
3148
|
+
{ name: "analysis-data-persistence", mode: "singleton" }
|
|
3141
3149
|
]
|
|
3142
3150
|
};
|
|
3143
3151
|
// Stream broker
|
|
@@ -3150,11 +3158,10 @@ var PipelineAddon = class {
|
|
|
3150
3158
|
currentRecordingConfig = {
|
|
3151
3159
|
ffmpegPath: "ffmpeg",
|
|
3152
3160
|
hwaccel: void 0,
|
|
3153
|
-
threads: void 0
|
|
3161
|
+
threads: void 0,
|
|
3162
|
+
segmentDurationSeconds: 4,
|
|
3163
|
+
defaultRetentionDays: 30
|
|
3154
3164
|
};
|
|
3155
|
-
// Analysis pipeline
|
|
3156
|
-
analysisPipeline = null;
|
|
3157
|
-
analysisLogger = null;
|
|
3158
3165
|
// Analysis persistence
|
|
3159
3166
|
persistenceFacade = null;
|
|
3160
3167
|
setRecordingDependencies(deps) {
|
|
@@ -3184,10 +3191,14 @@ var PipelineAddon = class {
|
|
|
3184
3191
|
hwaccel: context.addonConfig.hwaccel ?? this.currentRecordingConfig.hwaccel,
|
|
3185
3192
|
threads: context.addonConfig.threads ?? this.currentRecordingConfig.threads
|
|
3186
3193
|
};
|
|
3194
|
+
const segmentDurationSeconds = context.addonConfig.segmentDurationSeconds ?? this.currentRecordingConfig.segmentDurationSeconds;
|
|
3195
|
+
const defaultRetentionDays = context.addonConfig.defaultRetentionDays ?? this.currentRecordingConfig.defaultRetentionDays;
|
|
3187
3196
|
this.currentRecordingConfig = {
|
|
3188
3197
|
ffmpegPath,
|
|
3189
3198
|
hwaccel: globalFfmpegConfig.hwaccel,
|
|
3190
|
-
threads: globalFfmpegConfig.threads
|
|
3199
|
+
threads: globalFfmpegConfig.threads,
|
|
3200
|
+
segmentDurationSeconds,
|
|
3201
|
+
defaultRetentionDays
|
|
3191
3202
|
};
|
|
3192
3203
|
const fileStorage = context.storage.files;
|
|
3193
3204
|
if (!fileStorage) {
|
|
@@ -3203,7 +3214,8 @@ var PipelineAddon = class {
|
|
|
3203
3214
|
fileStorage,
|
|
3204
3215
|
storagePath,
|
|
3205
3216
|
globalFfmpegConfig,
|
|
3206
|
-
detectedFfmpegConfig
|
|
3217
|
+
detectedFfmpegConfig,
|
|
3218
|
+
segmentDurationSec: segmentDurationSeconds
|
|
3207
3219
|
});
|
|
3208
3220
|
await this.coordinator.start();
|
|
3209
3221
|
context.logger.info("Recording Engine initialized");
|
|
@@ -3212,16 +3224,6 @@ var PipelineAddon = class {
|
|
|
3212
3224
|
context.logger.warn(`Recording Engine failed to initialize: ${msg}`);
|
|
3213
3225
|
}
|
|
3214
3226
|
}
|
|
3215
|
-
this.analysisLogger = context.logger;
|
|
3216
|
-
try {
|
|
3217
|
-
const mod = await import("@camstack/lib-pipeline-analysis");
|
|
3218
|
-
const instance = new mod.AnalysisPipeline();
|
|
3219
|
-
this.analysisPipeline = instance;
|
|
3220
|
-
this.analysisLogger.info("Analysis pipeline loaded successfully");
|
|
3221
|
-
} catch (error) {
|
|
3222
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
3223
|
-
this.analysisLogger.warn(`Analysis pipeline not available: ${msg} -- analysis features disabled`);
|
|
3224
|
-
}
|
|
3225
3227
|
const eventPersistence = new EventPersistenceService({
|
|
3226
3228
|
getStorageLocation: () => context.storage,
|
|
3227
3229
|
subscribe: (filter, handler) => context.eventBus.subscribe(filter, handler),
|
|
@@ -3273,7 +3275,6 @@ var PipelineAddon = class {
|
|
|
3273
3275
|
this.sqliteDb = null;
|
|
3274
3276
|
}
|
|
3275
3277
|
this.recordingDb = null;
|
|
3276
|
-
this.analysisPipeline = null;
|
|
3277
3278
|
if (this.persistenceFacade) {
|
|
3278
3279
|
this.persistenceFacade.eventPersistence.stop();
|
|
3279
3280
|
this.persistenceFacade.retention.stop();
|
|
@@ -3286,13 +3287,8 @@ var PipelineAddon = class {
|
|
|
3286
3287
|
return this.brokerManager;
|
|
3287
3288
|
case "recording-engine":
|
|
3288
3289
|
return this.coordinator;
|
|
3289
|
-
case "analysis-pipeline":
|
|
3290
|
-
return this.analysisPipeline;
|
|
3291
3290
|
case "analysis-data-persistence":
|
|
3292
3291
|
return this.persistenceFacade;
|
|
3293
|
-
case "webrtc":
|
|
3294
|
-
return null;
|
|
3295
|
-
// WebRTC is provided externally or via collection
|
|
3296
3292
|
default:
|
|
3297
3293
|
return null;
|
|
3298
3294
|
}
|
|
@@ -3306,52 +3302,9 @@ var PipelineAddon = class {
|
|
|
3306
3302
|
if (!this.recordingDb) throw new Error("PipelineAddon recording not initialized");
|
|
3307
3303
|
return this.recordingDb;
|
|
3308
3304
|
}
|
|
3309
|
-
/** Whether the analysis pipeline package loaded successfully */
|
|
3310
|
-
isAnalysisAvailable() {
|
|
3311
|
-
return this.analysisPipeline !== null;
|
|
3312
|
-
}
|
|
3313
3305
|
// --- IConfigurable ---
|
|
3314
3306
|
getConfigSchema() {
|
|
3315
|
-
return {
|
|
3316
|
-
sections: [
|
|
3317
|
-
{
|
|
3318
|
-
id: "ffmpeg",
|
|
3319
|
-
title: "FFmpeg Settings",
|
|
3320
|
-
columns: 2,
|
|
3321
|
-
fields: [
|
|
3322
|
-
{
|
|
3323
|
-
type: "text",
|
|
3324
|
-
key: "ffmpegPath",
|
|
3325
|
-
label: "FFmpeg Binary Path",
|
|
3326
|
-
description: 'Path to the ffmpeg executable, or just "ffmpeg" if it is in your PATH',
|
|
3327
|
-
placeholder: "ffmpeg"
|
|
3328
|
-
},
|
|
3329
|
-
{
|
|
3330
|
-
type: "select",
|
|
3331
|
-
key: "hwaccel",
|
|
3332
|
-
label: "Hardware Acceleration",
|
|
3333
|
-
description: "Enable GPU-accelerated video encoding if supported by your hardware",
|
|
3334
|
-
options: [
|
|
3335
|
-
{ value: "", label: "None (software)", description: "CPU-only encoding" },
|
|
3336
|
-
{ value: "nvenc", label: "NVIDIA NVENC", description: "NVIDIA GPU encoding" },
|
|
3337
|
-
{ value: "vaapi", label: "Intel VAAPI", description: "Intel GPU encoding (Linux)" },
|
|
3338
|
-
{ value: "videotoolbox", label: "Apple VideoToolbox", description: "macOS hardware encoding" }
|
|
3339
|
-
]
|
|
3340
|
-
},
|
|
3341
|
-
{
|
|
3342
|
-
type: "number",
|
|
3343
|
-
key: "threads",
|
|
3344
|
-
label: "FFmpeg Threads",
|
|
3345
|
-
description: "Number of CPU threads for software encoding (0 = auto)",
|
|
3346
|
-
min: 0,
|
|
3347
|
-
max: 64,
|
|
3348
|
-
step: 1,
|
|
3349
|
-
unit: "threads"
|
|
3350
|
-
}
|
|
3351
|
-
]
|
|
3352
|
-
}
|
|
3353
|
-
]
|
|
3354
|
-
};
|
|
3307
|
+
return { sections: [] };
|
|
3355
3308
|
}
|
|
3356
3309
|
getConfig() {
|
|
3357
3310
|
return { ...this.currentRecordingConfig };
|
|
@@ -3360,7 +3313,9 @@ var PipelineAddon = class {
|
|
|
3360
3313
|
this.currentRecordingConfig = {
|
|
3361
3314
|
ffmpegPath: config.ffmpegPath ?? this.currentRecordingConfig.ffmpegPath,
|
|
3362
3315
|
hwaccel: config.hwaccel ?? this.currentRecordingConfig.hwaccel,
|
|
3363
|
-
threads: config.threads ?? this.currentRecordingConfig.threads
|
|
3316
|
+
threads: config.threads ?? this.currentRecordingConfig.threads,
|
|
3317
|
+
segmentDurationSeconds: config.segmentDurationSeconds ?? this.currentRecordingConfig.segmentDurationSeconds,
|
|
3318
|
+
defaultRetentionDays: config.defaultRetentionDays ?? this.currentRecordingConfig.defaultRetentionDays
|
|
3364
3319
|
};
|
|
3365
3320
|
}
|
|
3366
3321
|
};
|
|
@@ -3405,3222 +3360,24 @@ var BuiltinAnalysisAddon = class {
|
|
|
3405
3360
|
return this.pipeline !== null;
|
|
3406
3361
|
}
|
|
3407
3362
|
};
|
|
3408
|
-
|
|
3409
|
-
// src/webrtc/types.ts
|
|
3410
|
-
function asLogger(logger) {
|
|
3411
|
-
if (!logger) return createNullLogger();
|
|
3412
|
-
const noop = () => {
|
|
3413
|
-
};
|
|
3414
|
-
return {
|
|
3415
|
-
log: logger.log?.bind(logger) ?? noop,
|
|
3416
|
-
info: logger.info?.bind(logger) ?? noop,
|
|
3417
|
-
warn: logger.warn?.bind(logger) ?? noop,
|
|
3418
|
-
error: logger.error?.bind(logger) ?? noop,
|
|
3419
|
-
debug: logger.debug?.bind(logger) ?? noop
|
|
3420
|
-
};
|
|
3421
|
-
}
|
|
3422
|
-
function createNullLogger() {
|
|
3423
|
-
const noop = () => {
|
|
3424
|
-
};
|
|
3425
|
-
return { log: noop, info: noop, warn: noop, error: noop, debug: noop };
|
|
3426
|
-
}
|
|
3427
|
-
|
|
3428
|
-
// src/webrtc/nal-utils.ts
|
|
3429
|
-
var NAL_START_CODE_4B = Buffer.from([0, 0, 0, 1]);
|
|
3430
|
-
var NAL_START_CODE_3B = Buffer.from([0, 0, 1]);
|
|
3431
|
-
function hasStartCodes(data) {
|
|
3432
|
-
if (data.length < 4) return false;
|
|
3433
|
-
if (data.subarray(0, 4).equals(NAL_START_CODE_4B)) return true;
|
|
3434
|
-
if (data.subarray(0, 3).equals(NAL_START_CODE_3B)) return true;
|
|
3435
|
-
return false;
|
|
3436
|
-
}
|
|
3437
|
-
function splitAnnexBToNals(annexB) {
|
|
3438
|
-
const nals = [];
|
|
3439
|
-
const len = annexB.length;
|
|
3440
|
-
const isStartCodeAt = (i2) => {
|
|
3441
|
-
if (i2 + 3 <= len && annexB[i2] === 0 && annexB[i2 + 1] === 0) {
|
|
3442
|
-
if (annexB[i2 + 2] === 1) return 3;
|
|
3443
|
-
if (i2 + 4 <= len && annexB[i2 + 2] === 0 && annexB[i2 + 3] === 1)
|
|
3444
|
-
return 4;
|
|
3445
|
-
}
|
|
3446
|
-
return 0;
|
|
3447
|
-
};
|
|
3448
|
-
let i = 0;
|
|
3449
|
-
while (i < len) {
|
|
3450
|
-
const sc = isStartCodeAt(i);
|
|
3451
|
-
if (sc) break;
|
|
3452
|
-
i++;
|
|
3453
|
-
}
|
|
3454
|
-
while (i < len) {
|
|
3455
|
-
const sc = isStartCodeAt(i);
|
|
3456
|
-
if (!sc) {
|
|
3457
|
-
i++;
|
|
3458
|
-
continue;
|
|
3459
|
-
}
|
|
3460
|
-
const nalStart = i + sc;
|
|
3461
|
-
let j = nalStart;
|
|
3462
|
-
while (j < len) {
|
|
3463
|
-
const sc2 = isStartCodeAt(j);
|
|
3464
|
-
if (sc2) break;
|
|
3465
|
-
j++;
|
|
3466
|
-
}
|
|
3467
|
-
if (nalStart < j) {
|
|
3468
|
-
const nal = annexB.subarray(nalStart, j);
|
|
3469
|
-
if (nal.length > 0) nals.push(nal);
|
|
3470
|
-
}
|
|
3471
|
-
i = j;
|
|
3472
|
-
}
|
|
3473
|
-
return nals;
|
|
3474
|
-
}
|
|
3475
|
-
function prependStartCode(nal) {
|
|
3476
|
-
return Buffer.concat([NAL_START_CODE_4B, nal]);
|
|
3477
|
-
}
|
|
3478
|
-
function joinNalsToAnnexB(...nals) {
|
|
3479
|
-
const present = nals.filter((n) => !!n && n.length > 0);
|
|
3480
|
-
if (!present.length) return;
|
|
3481
|
-
const parts = [];
|
|
3482
|
-
for (const nal of present) {
|
|
3483
|
-
parts.push(NAL_START_CODE_4B, nal);
|
|
3484
|
-
}
|
|
3485
|
-
return Buffer.concat(parts);
|
|
3486
|
-
}
|
|
3487
|
-
function detectVideoCodecFromNal(data) {
|
|
3488
|
-
if (!data || data.length < 5) return null;
|
|
3489
|
-
let nalStart = -1;
|
|
3490
|
-
for (let i = 0; i < Math.min(data.length - 4, 100); i++) {
|
|
3491
|
-
if (data[i] === 0 && data[i + 1] === 0) {
|
|
3492
|
-
if (data[i + 2] === 0 && data[i + 3] === 1) {
|
|
3493
|
-
nalStart = i + 4;
|
|
3494
|
-
break;
|
|
3495
|
-
}
|
|
3496
|
-
if (data[i + 2] === 1) {
|
|
3497
|
-
nalStart = i + 3;
|
|
3498
|
-
break;
|
|
3499
|
-
}
|
|
3500
|
-
}
|
|
3501
|
-
}
|
|
3502
|
-
if (nalStart < 0 && data.length >= 5) {
|
|
3503
|
-
const len = data.readUInt32BE(0);
|
|
3504
|
-
if (len > 0 && len <= data.length - 4) {
|
|
3505
|
-
nalStart = 4;
|
|
3506
|
-
}
|
|
3507
|
-
}
|
|
3508
|
-
if (nalStart < 0 || nalStart >= data.length) return null;
|
|
3509
|
-
const nalByte = data[nalStart];
|
|
3510
|
-
if (nalByte === void 0) return null;
|
|
3511
|
-
const forbiddenBit264 = nalByte >> 7 & 1;
|
|
3512
|
-
const h264Type = nalByte & 31;
|
|
3513
|
-
if (forbiddenBit264 === 0 && h264Type > 0 && h264Type <= 12) {
|
|
3514
|
-
if (h264Type === 7 || h264Type === 8) return "H264";
|
|
3515
|
-
if (h264Type === 5) return "H264";
|
|
3516
|
-
if (h264Type === 1) {
|
|
3517
|
-
const nalRefIdc = nalByte >> 5 & 3;
|
|
3518
|
-
if (nalRefIdc >= 1) return "H264";
|
|
3519
|
-
}
|
|
3520
|
-
}
|
|
3521
|
-
if (nalStart + 1 < data.length) {
|
|
3522
|
-
const nalByte2 = data[nalStart + 1];
|
|
3523
|
-
if (nalByte2 !== void 0) {
|
|
3524
|
-
const forbiddenBit = nalByte >> 7 & 1;
|
|
3525
|
-
const hevcType = nalByte >> 1 & 63;
|
|
3526
|
-
const temporalId = nalByte2 & 7;
|
|
3527
|
-
if (forbiddenBit === 0 && temporalId > 0 && hevcType <= 40) {
|
|
3528
|
-
if (hevcType === 32 || hevcType === 33 || hevcType === 34)
|
|
3529
|
-
return "H265";
|
|
3530
|
-
if (hevcType === 19 || hevcType === 20 || hevcType === 21)
|
|
3531
|
-
return "H265";
|
|
3532
|
-
if (hevcType <= 1 && nalByte <= 3) return "H265";
|
|
3533
|
-
}
|
|
3534
|
-
}
|
|
3535
|
-
}
|
|
3536
|
-
return null;
|
|
3537
|
-
}
|
|
3538
|
-
|
|
3539
|
-
// src/webrtc/h264-utils.ts
|
|
3540
|
-
function tryConvertWithLengthReader(data, readLen) {
|
|
3541
|
-
const result = [];
|
|
3542
|
-
let offset = 0;
|
|
3543
|
-
let nalCount = 0;
|
|
3544
|
-
while (offset < data.length) {
|
|
3545
|
-
if (offset + 4 > data.length) return null;
|
|
3546
|
-
const nalLength = readLen(data, offset);
|
|
3547
|
-
offset += 4;
|
|
3548
|
-
if (nalLength <= 0) return null;
|
|
3549
|
-
if (nalLength > data.length - offset) return null;
|
|
3550
|
-
result.push(NAL_START_CODE_4B);
|
|
3551
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3552
|
-
offset += nalLength;
|
|
3553
|
-
nalCount++;
|
|
3554
|
-
}
|
|
3555
|
-
if (nalCount === 0) return null;
|
|
3556
|
-
return Buffer.concat(result);
|
|
3557
|
-
}
|
|
3558
|
-
function tryConvertWithLengthReader16(data, readLen) {
|
|
3559
|
-
const result = [];
|
|
3560
|
-
let offset = 0;
|
|
3561
|
-
let nalCount = 0;
|
|
3562
|
-
while (offset < data.length) {
|
|
3563
|
-
if (offset + 2 > data.length) return null;
|
|
3564
|
-
const nalLength = readLen(data, offset);
|
|
3565
|
-
offset += 2;
|
|
3566
|
-
if (nalLength <= 0) return null;
|
|
3567
|
-
if (nalLength > data.length - offset) return null;
|
|
3568
|
-
result.push(NAL_START_CODE_4B);
|
|
3569
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3570
|
-
offset += nalLength;
|
|
3571
|
-
nalCount++;
|
|
3572
|
-
}
|
|
3573
|
-
if (nalCount === 0) return null;
|
|
3574
|
-
return Buffer.concat(result);
|
|
3575
|
-
}
|
|
3576
|
-
function tryConvertWithLengthReader24(data, endian) {
|
|
3577
|
-
const result = [];
|
|
3578
|
-
let offset = 0;
|
|
3579
|
-
let nalCount = 0;
|
|
3580
|
-
const readLen24 = (buf, at) => {
|
|
3581
|
-
if (at + 3 > buf.length) return 0;
|
|
3582
|
-
const b0 = buf[at];
|
|
3583
|
-
const b1 = buf[at + 1];
|
|
3584
|
-
const b2 = buf[at + 2];
|
|
3585
|
-
return endian === "be" ? (b0 << 16 | b1 << 8 | b2) >>> 0 : (b2 << 16 | b1 << 8 | b0) >>> 0;
|
|
3586
|
-
};
|
|
3587
|
-
while (offset < data.length) {
|
|
3588
|
-
if (offset + 3 > data.length) return null;
|
|
3589
|
-
const nalLength = readLen24(data, offset);
|
|
3590
|
-
offset += 3;
|
|
3591
|
-
if (nalLength <= 0) return null;
|
|
3592
|
-
if (nalLength > data.length - offset) return null;
|
|
3593
|
-
result.push(NAL_START_CODE_4B);
|
|
3594
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3595
|
-
offset += nalLength;
|
|
3596
|
-
nalCount++;
|
|
3597
|
-
}
|
|
3598
|
-
if (nalCount === 0) return null;
|
|
3599
|
-
return Buffer.concat(result);
|
|
3600
|
-
}
|
|
3601
|
-
function looksLikeSingleH264Nal(nalPayload) {
|
|
3602
|
-
if (nalPayload.length < 1) return false;
|
|
3603
|
-
const b0 = nalPayload[0];
|
|
3604
|
-
if (b0 === void 0) return false;
|
|
3605
|
-
if ((b0 & 128) !== 0) return false;
|
|
3606
|
-
const nalType = b0 & 31;
|
|
3607
|
-
return nalType >= 1 && nalType <= 23;
|
|
3608
|
-
}
|
|
3609
|
-
function depacketizeRtpAggregationToAnnexB(payload) {
|
|
3610
|
-
if (payload.length < 1) return null;
|
|
3611
|
-
const nalHeader = payload[0];
|
|
3612
|
-
const nalType = nalHeader & 31;
|
|
3613
|
-
const out = [];
|
|
3614
|
-
const pushNal = (nal) => {
|
|
3615
|
-
if (nal.length === 0) return;
|
|
3616
|
-
out.push(NAL_START_CODE_4B, nal);
|
|
3617
|
-
};
|
|
3618
|
-
if (nalType === 24) {
|
|
3619
|
-
let off = 1;
|
|
3620
|
-
while (off + 2 <= payload.length) {
|
|
3621
|
-
const size = payload.readUInt16BE(off);
|
|
3622
|
-
off += 2;
|
|
3623
|
-
if (size <= 0 || off + size > payload.length) return null;
|
|
3624
|
-
pushNal(payload.subarray(off, off + size));
|
|
3625
|
-
off += size;
|
|
3626
|
-
}
|
|
3627
|
-
return out.length ? Buffer.concat(out) : null;
|
|
3628
|
-
}
|
|
3629
|
-
if (nalType === 25) {
|
|
3630
|
-
let off = 1 + 2;
|
|
3631
|
-
if (off > payload.length) return null;
|
|
3632
|
-
while (off + 2 <= payload.length) {
|
|
3633
|
-
const size = payload.readUInt16BE(off);
|
|
3634
|
-
off += 2;
|
|
3635
|
-
if (size <= 0 || off + size > payload.length) return null;
|
|
3636
|
-
pushNal(payload.subarray(off, off + size));
|
|
3637
|
-
off += size;
|
|
3638
|
-
}
|
|
3639
|
-
return out.length ? Buffer.concat(out) : null;
|
|
3640
|
-
}
|
|
3641
|
-
if (nalType === 26) {
|
|
3642
|
-
let off = 1 + 2;
|
|
3643
|
-
if (off > payload.length) return null;
|
|
3644
|
-
while (off + 2 <= payload.length) {
|
|
3645
|
-
const size = payload.readUInt16BE(off);
|
|
3646
|
-
off += 2;
|
|
3647
|
-
if (off + 1 + 2 > payload.length) return null;
|
|
3648
|
-
off += 1 + 2;
|
|
3649
|
-
if (size <= 0 || off + size > payload.length) return null;
|
|
3650
|
-
pushNal(payload.subarray(off, off + size));
|
|
3651
|
-
off += size;
|
|
3652
|
-
}
|
|
3653
|
-
return out.length ? Buffer.concat(out) : null;
|
|
3654
|
-
}
|
|
3655
|
-
if (nalType === 27) {
|
|
3656
|
-
let off = 1 + 2;
|
|
3657
|
-
if (off > payload.length) return null;
|
|
3658
|
-
while (off + 2 <= payload.length) {
|
|
3659
|
-
const size = payload.readUInt16BE(off);
|
|
3660
|
-
off += 2;
|
|
3661
|
-
if (off + 1 + 3 > payload.length) return null;
|
|
3662
|
-
off += 1 + 3;
|
|
3663
|
-
if (size <= 0 || off + size > payload.length) return null;
|
|
3664
|
-
pushNal(payload.subarray(off, off + size));
|
|
3665
|
-
off += size;
|
|
3666
|
-
}
|
|
3667
|
-
return out.length ? Buffer.concat(out) : null;
|
|
3668
|
-
}
|
|
3669
|
-
return null;
|
|
3670
|
-
}
|
|
3671
|
-
function convertH264ToAnnexB(data) {
|
|
3672
|
-
if (hasStartCodes(data)) return data;
|
|
3673
|
-
const sc4 = Buffer.from([0, 0, 0, 1]);
|
|
3674
|
-
const sc3 = Buffer.from([0, 0, 1]);
|
|
3675
|
-
const maxScan = Math.min(64, data.length);
|
|
3676
|
-
const idx4 = data.subarray(0, maxScan).indexOf(sc4);
|
|
3677
|
-
if (idx4 > 0) return data.subarray(idx4);
|
|
3678
|
-
const idx3 = data.subarray(0, maxScan).indexOf(sc3);
|
|
3679
|
-
if (idx3 > 0) return data.subarray(idx3);
|
|
3680
|
-
const be = tryConvertWithLengthReader(data, (b, o) => b.readUInt32BE(o));
|
|
3681
|
-
if (be) return be;
|
|
3682
|
-
const le = tryConvertWithLengthReader(data, (b, o) => b.readUInt32LE(o));
|
|
3683
|
-
if (le) return le;
|
|
3684
|
-
const be24 = tryConvertWithLengthReader24(data, "be");
|
|
3685
|
-
if (be24) return be24;
|
|
3686
|
-
const le24 = tryConvertWithLengthReader24(data, "le");
|
|
3687
|
-
if (le24) return le24;
|
|
3688
|
-
const be16 = tryConvertWithLengthReader16(data, (b, o) => b.readUInt16BE(o));
|
|
3689
|
-
if (be16) return be16;
|
|
3690
|
-
const le16 = tryConvertWithLengthReader16(data, (b, o) => b.readUInt16LE(o));
|
|
3691
|
-
if (le16) return le16;
|
|
3692
|
-
const agg = depacketizeRtpAggregationToAnnexB(data);
|
|
3693
|
-
if (agg) return agg;
|
|
3694
|
-
if (looksLikeSingleH264Nal(data)) {
|
|
3695
|
-
return Buffer.concat([NAL_START_CODE_4B, data]);
|
|
3696
|
-
}
|
|
3697
|
-
return data;
|
|
3698
|
-
}
|
|
3699
|
-
function isH264KeyframeAnnexB(annexB) {
|
|
3700
|
-
const nals = splitAnnexBToNals(annexB);
|
|
3701
|
-
let hasSps = false;
|
|
3702
|
-
let hasPps = false;
|
|
3703
|
-
let hasIdr = false;
|
|
3704
|
-
for (const nal of nals) {
|
|
3705
|
-
const t = (nal[0] ?? 0) & 31;
|
|
3706
|
-
if (t === 7) hasSps = true;
|
|
3707
|
-
if (t === 8) hasPps = true;
|
|
3708
|
-
if (t === 5) hasIdr = true;
|
|
3709
|
-
}
|
|
3710
|
-
return hasIdr && hasSps && hasPps;
|
|
3711
|
-
}
|
|
3712
|
-
function isH264IdrAccessUnit(annexB) {
|
|
3713
|
-
const nals = splitAnnexBToNals(annexB);
|
|
3714
|
-
for (const nal of nals) {
|
|
3715
|
-
if (nal.length < 1) continue;
|
|
3716
|
-
const t = (nal[0] ?? 0) & 31;
|
|
3717
|
-
if (t === 5) return true;
|
|
3718
|
-
}
|
|
3719
|
-
return false;
|
|
3720
|
-
}
|
|
3721
|
-
function extractH264ParamSets(annexB) {
|
|
3722
|
-
const nals = splitAnnexBToNals(annexB);
|
|
3723
|
-
let sps;
|
|
3724
|
-
let pps;
|
|
3725
|
-
let profileLevelId;
|
|
3726
|
-
for (const nal of nals) {
|
|
3727
|
-
if (nal.length < 1) continue;
|
|
3728
|
-
const nalType = nal[0] & 31;
|
|
3729
|
-
if (nalType === 7) {
|
|
3730
|
-
sps = nal;
|
|
3731
|
-
if (nal.length >= 4) {
|
|
3732
|
-
profileLevelId = Buffer.from([nal[1], nal[2], nal[3]]).toString(
|
|
3733
|
-
"hex"
|
|
3734
|
-
);
|
|
3735
|
-
}
|
|
3736
|
-
} else if (nalType === 8) {
|
|
3737
|
-
pps = nal;
|
|
3738
|
-
}
|
|
3739
|
-
}
|
|
3740
|
-
const out = {};
|
|
3741
|
-
if (sps) out.sps = sps;
|
|
3742
|
-
if (pps) out.pps = pps;
|
|
3743
|
-
if (profileLevelId) out.profileLevelId = profileLevelId;
|
|
3744
|
-
return out;
|
|
3745
|
-
}
|
|
3746
|
-
var H264RtpDepacketizer = class _H264RtpDepacketizer {
|
|
3747
|
-
fuNalHeader = null;
|
|
3748
|
-
fuParts = [];
|
|
3749
|
-
static parseRtpPayload(packet) {
|
|
3750
|
-
if (!packet || packet.length < 12) return null;
|
|
3751
|
-
const version = packet[0] >> 6 & 3;
|
|
3752
|
-
if (version !== 2) return null;
|
|
3753
|
-
const padding = (packet[0] & 32) !== 0;
|
|
3754
|
-
const extension = (packet[0] & 16) !== 0;
|
|
3755
|
-
const csrcCount = packet[0] & 15;
|
|
3756
|
-
let offset = 12 + csrcCount * 4;
|
|
3757
|
-
if (offset > packet.length) return null;
|
|
3758
|
-
if (extension) {
|
|
3759
|
-
if (offset + 4 > packet.length) return null;
|
|
3760
|
-
const extLenWords = packet.readUInt16BE(offset + 2);
|
|
3761
|
-
offset += 4 + extLenWords * 4;
|
|
3762
|
-
if (offset > packet.length) return null;
|
|
3763
|
-
}
|
|
3764
|
-
let end = packet.length;
|
|
3765
|
-
if (padding) {
|
|
3766
|
-
const padLen = packet[packet.length - 1];
|
|
3767
|
-
if (padLen <= 0 || padLen > packet.length) return null;
|
|
3768
|
-
end = packet.length - padLen;
|
|
3769
|
-
if (end < offset) return null;
|
|
3770
|
-
}
|
|
3771
|
-
if (end <= offset) return null;
|
|
3772
|
-
return packet.subarray(offset, end);
|
|
3773
|
-
}
|
|
3774
|
-
reset() {
|
|
3775
|
-
this.fuNalHeader = null;
|
|
3776
|
-
this.fuParts = [];
|
|
3777
|
-
}
|
|
3778
|
-
push(payload) {
|
|
3779
|
-
if (payload.length === 0) return [];
|
|
3780
|
-
const rtpPayload = _H264RtpDepacketizer.parseRtpPayload(payload);
|
|
3781
|
-
if (rtpPayload) payload = rtpPayload;
|
|
3782
|
-
if (hasStartCodes(payload)) return [payload];
|
|
3783
|
-
const b0 = payload[0];
|
|
3784
|
-
if ((b0 & 128) !== 0) return [];
|
|
3785
|
-
const nalType = b0 & 31;
|
|
3786
|
-
if (nalType >= 1 && nalType <= 23) {
|
|
3787
|
-
return [Buffer.concat([NAL_START_CODE_4B, payload])];
|
|
3788
|
-
}
|
|
3789
|
-
if (nalType === 24) {
|
|
3790
|
-
if (payload.length < 1 + 2) return [];
|
|
3791
|
-
let off = 1;
|
|
3792
|
-
const out = [];
|
|
3793
|
-
while (off + 2 <= payload.length) {
|
|
3794
|
-
const size = payload.readUInt16BE(off);
|
|
3795
|
-
off += 2;
|
|
3796
|
-
if (size <= 0 || off + size > payload.length) return [];
|
|
3797
|
-
const nal = payload.subarray(off, off + size);
|
|
3798
|
-
off += size;
|
|
3799
|
-
if (nal.length < 1) return [];
|
|
3800
|
-
if ((nal[0] & 128) !== 0) return [];
|
|
3801
|
-
const t = nal[0] & 31;
|
|
3802
|
-
if (t === 0 || t >= 24) return [];
|
|
3803
|
-
out.push(Buffer.concat([NAL_START_CODE_4B, nal]));
|
|
3804
|
-
}
|
|
3805
|
-
return out;
|
|
3806
|
-
}
|
|
3807
|
-
if (nalType === 28 || nalType === 29) {
|
|
3808
|
-
if (payload.length < 2) return [];
|
|
3809
|
-
const fuIndicator = payload[0];
|
|
3810
|
-
const fuHeader = payload[1];
|
|
3811
|
-
const start = (fuHeader & 128) !== 0;
|
|
3812
|
-
const end = (fuHeader & 64) !== 0;
|
|
3813
|
-
const origType = fuHeader & 31;
|
|
3814
|
-
const reconstructedHeader = fuIndicator & 224 | origType;
|
|
3815
|
-
let off = 2;
|
|
3816
|
-
if (nalType === 29) {
|
|
3817
|
-
if (payload.length < off + 2) return [];
|
|
3818
|
-
off += 2;
|
|
3819
|
-
}
|
|
3820
|
-
const frag = payload.subarray(off);
|
|
3821
|
-
if (start) {
|
|
3822
|
-
this.fuNalHeader = reconstructedHeader;
|
|
3823
|
-
this.fuParts = [frag];
|
|
3824
|
-
} else if (this.fuNalHeader != null) {
|
|
3825
|
-
this.fuParts.push(frag);
|
|
3826
|
-
} else {
|
|
3827
|
-
return [];
|
|
3828
|
-
}
|
|
3829
|
-
if (end && this.fuNalHeader != null) {
|
|
3830
|
-
const nal = Buffer.concat([
|
|
3831
|
-
Buffer.from([this.fuNalHeader]),
|
|
3832
|
-
...this.fuParts
|
|
3833
|
-
]);
|
|
3834
|
-
this.reset();
|
|
3835
|
-
return [Buffer.concat([NAL_START_CODE_4B, nal])];
|
|
3836
|
-
}
|
|
3837
|
-
return [];
|
|
3838
|
-
}
|
|
3839
|
-
return [];
|
|
3840
|
-
}
|
|
3841
|
-
};
|
|
3842
|
-
|
|
3843
|
-
// src/webrtc/h265-utils.ts
|
|
3844
|
-
function tryConvertWithLengthReader2(data, readLen) {
|
|
3845
|
-
const result = [];
|
|
3846
|
-
let offset = 0;
|
|
3847
|
-
let nalCount = 0;
|
|
3848
|
-
while (offset < data.length) {
|
|
3849
|
-
if (offset + 4 > data.length) return null;
|
|
3850
|
-
const nalLength = readLen(data, offset);
|
|
3851
|
-
offset += 4;
|
|
3852
|
-
if (nalLength <= 0) return null;
|
|
3853
|
-
if (nalLength > data.length - offset) return null;
|
|
3854
|
-
result.push(NAL_START_CODE_4B);
|
|
3855
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3856
|
-
offset += nalLength;
|
|
3857
|
-
nalCount++;
|
|
3858
|
-
}
|
|
3859
|
-
if (nalCount === 0) return null;
|
|
3860
|
-
return Buffer.concat(result);
|
|
3861
|
-
}
|
|
3862
|
-
function tryConvertWithLengthReader162(data, readLen) {
|
|
3863
|
-
const result = [];
|
|
3864
|
-
let offset = 0;
|
|
3865
|
-
let nalCount = 0;
|
|
3866
|
-
while (offset < data.length) {
|
|
3867
|
-
if (offset + 2 > data.length) return null;
|
|
3868
|
-
const nalLength = readLen(data, offset);
|
|
3869
|
-
offset += 2;
|
|
3870
|
-
if (nalLength <= 0) return null;
|
|
3871
|
-
if (nalLength > data.length - offset) return null;
|
|
3872
|
-
result.push(NAL_START_CODE_4B);
|
|
3873
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3874
|
-
offset += nalLength;
|
|
3875
|
-
nalCount++;
|
|
3876
|
-
}
|
|
3877
|
-
if (nalCount === 0) return null;
|
|
3878
|
-
return Buffer.concat(result);
|
|
3879
|
-
}
|
|
3880
|
-
function tryConvertWithLengthReader242(data, endian) {
|
|
3881
|
-
const result = [];
|
|
3882
|
-
let offset = 0;
|
|
3883
|
-
let nalCount = 0;
|
|
3884
|
-
const readLen24 = (buf, at) => {
|
|
3885
|
-
if (at + 3 > buf.length) return 0;
|
|
3886
|
-
const b0 = buf[at];
|
|
3887
|
-
const b1 = buf[at + 1];
|
|
3888
|
-
const b2 = buf[at + 2];
|
|
3889
|
-
return endian === "be" ? (b0 << 16 | b1 << 8 | b2) >>> 0 : (b2 << 16 | b1 << 8 | b0) >>> 0;
|
|
3890
|
-
};
|
|
3891
|
-
while (offset < data.length) {
|
|
3892
|
-
if (offset + 3 > data.length) return null;
|
|
3893
|
-
const nalLength = readLen24(data, offset);
|
|
3894
|
-
offset += 3;
|
|
3895
|
-
if (nalLength <= 0) return null;
|
|
3896
|
-
if (nalLength > data.length - offset) return null;
|
|
3897
|
-
result.push(NAL_START_CODE_4B);
|
|
3898
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3899
|
-
offset += nalLength;
|
|
3900
|
-
nalCount++;
|
|
3901
|
-
}
|
|
3902
|
-
if (nalCount === 0) return null;
|
|
3903
|
-
return Buffer.concat(result);
|
|
3904
|
-
}
|
|
3905
|
-
function looksLikeSingleH265Nal(nalPayload) {
|
|
3906
|
-
if (nalPayload.length < 2) return false;
|
|
3907
|
-
const b0 = nalPayload[0];
|
|
3908
|
-
if (b0 === void 0) return false;
|
|
3909
|
-
if ((b0 & 128) !== 0) return false;
|
|
3910
|
-
const nalType = b0 >> 1 & 63;
|
|
3911
|
-
return nalType <= 40;
|
|
3912
|
-
}
|
|
3913
|
-
function convertH265ToAnnexB(data) {
|
|
3914
|
-
if (hasStartCodes(data)) return data;
|
|
3915
|
-
const sc4 = Buffer.from([0, 0, 0, 1]);
|
|
3916
|
-
const sc3 = Buffer.from([0, 0, 1]);
|
|
3917
|
-
const maxScan = Math.min(64, data.length);
|
|
3918
|
-
const idx4 = data.subarray(0, maxScan).indexOf(sc4);
|
|
3919
|
-
if (idx4 > 0) return data.subarray(idx4);
|
|
3920
|
-
const idx3 = data.subarray(0, maxScan).indexOf(sc3);
|
|
3921
|
-
if (idx3 > 0) return data.subarray(idx3);
|
|
3922
|
-
const be = tryConvertWithLengthReader2(data, (b, o) => b.readUInt32BE(o));
|
|
3923
|
-
if (be) return be;
|
|
3924
|
-
const le = tryConvertWithLengthReader2(data, (b, o) => b.readUInt32LE(o));
|
|
3925
|
-
if (le) return le;
|
|
3926
|
-
const be24 = tryConvertWithLengthReader242(data, "be");
|
|
3927
|
-
if (be24) return be24;
|
|
3928
|
-
const le24 = tryConvertWithLengthReader242(data, "le");
|
|
3929
|
-
if (le24) return le24;
|
|
3930
|
-
const be16 = tryConvertWithLengthReader162(data, (b, o) => b.readUInt16BE(o));
|
|
3931
|
-
if (be16) return be16;
|
|
3932
|
-
const le16 = tryConvertWithLengthReader162(data, (b, o) => b.readUInt16LE(o));
|
|
3933
|
-
if (le16) return le16;
|
|
3934
|
-
if (looksLikeSingleH265Nal(data)) {
|
|
3935
|
-
return Buffer.concat([NAL_START_CODE_4B, data]);
|
|
3936
|
-
}
|
|
3937
|
-
return data;
|
|
3938
|
-
}
|
|
3939
|
-
function getH265NalType(nalPayload) {
|
|
3940
|
-
if (nalPayload.length < 1) return null;
|
|
3941
|
-
const b0 = nalPayload[0];
|
|
3942
|
-
if (b0 === void 0) return null;
|
|
3943
|
-
if ((b0 & 128) !== 0) return null;
|
|
3944
|
-
return b0 >> 1 & 63;
|
|
3945
|
-
}
|
|
3946
|
-
function isH265Irap(nalType) {
|
|
3947
|
-
return nalType >= 16 && nalType <= 23;
|
|
3948
|
-
}
|
|
3949
|
-
function isH265KeyframeAnnexB(annexB) {
|
|
3950
|
-
const nals = splitAnnexBToNals(annexB);
|
|
3951
|
-
let hasVps = false;
|
|
3952
|
-
let hasSps = false;
|
|
3953
|
-
let hasPps = false;
|
|
3954
|
-
let hasIrap = false;
|
|
3955
|
-
for (const nal of nals) {
|
|
3956
|
-
const nalType = getH265NalType(nal);
|
|
3957
|
-
if (nalType === null) continue;
|
|
3958
|
-
if (nalType === 32) hasVps = true;
|
|
3959
|
-
if (nalType === 33) hasSps = true;
|
|
3960
|
-
if (nalType === 34) hasPps = true;
|
|
3961
|
-
if (isH265Irap(nalType)) hasIrap = true;
|
|
3962
|
-
}
|
|
3963
|
-
return hasIrap && hasVps && hasSps && hasPps;
|
|
3964
|
-
}
|
|
3965
|
-
function isH265IrapAccessUnit(annexB) {
|
|
3966
|
-
const nals = splitAnnexBToNals(annexB);
|
|
3967
|
-
for (const nal of nals) {
|
|
3968
|
-
if (nal.length < 2) continue;
|
|
3969
|
-
const b0 = nal[0];
|
|
3970
|
-
if (b0 === void 0) continue;
|
|
3971
|
-
if ((b0 & 128) !== 0) continue;
|
|
3972
|
-
const nalType = b0 >> 1 & 63;
|
|
3973
|
-
if (isH265Irap(nalType)) return true;
|
|
3974
|
-
}
|
|
3975
|
-
return false;
|
|
3976
|
-
}
|
|
3977
|
-
function extractH265ParamSets(annexB) {
|
|
3978
|
-
const nals = splitAnnexBToNals(annexB);
|
|
3979
|
-
let vps;
|
|
3980
|
-
let sps;
|
|
3981
|
-
let pps;
|
|
3982
|
-
for (const nal of nals) {
|
|
3983
|
-
if (nal.length < 2) continue;
|
|
3984
|
-
const nalType = nal[0] >> 1 & 63;
|
|
3985
|
-
if (nalType === 32) vps = nal;
|
|
3986
|
-
else if (nalType === 33) sps = nal;
|
|
3987
|
-
else if (nalType === 34) pps = nal;
|
|
3988
|
-
}
|
|
3989
|
-
const out = {};
|
|
3990
|
-
if (vps) out.vps = vps;
|
|
3991
|
-
if (sps) out.sps = sps;
|
|
3992
|
-
if (pps) out.pps = pps;
|
|
3993
|
-
return out;
|
|
3994
|
-
}
|
|
3995
|
-
var H265RtpDepacketizer = class _H265RtpDepacketizer {
|
|
3996
|
-
fuParts = null;
|
|
3997
|
-
static parseRtpPayload(packet) {
|
|
3998
|
-
if (!packet || packet.length < 12) return null;
|
|
3999
|
-
const version = packet[0] >> 6 & 3;
|
|
4000
|
-
if (version !== 2) return null;
|
|
4001
|
-
const padding = (packet[0] & 32) !== 0;
|
|
4002
|
-
const extension = (packet[0] & 16) !== 0;
|
|
4003
|
-
const csrcCount = packet[0] & 15;
|
|
4004
|
-
let offset = 12 + csrcCount * 4;
|
|
4005
|
-
if (offset > packet.length) return null;
|
|
4006
|
-
if (extension) {
|
|
4007
|
-
if (offset + 4 > packet.length) return null;
|
|
4008
|
-
const extLenWords = packet.readUInt16BE(offset + 2);
|
|
4009
|
-
offset += 4 + extLenWords * 4;
|
|
4010
|
-
if (offset > packet.length) return null;
|
|
4011
|
-
}
|
|
4012
|
-
let end = packet.length;
|
|
4013
|
-
if (padding) {
|
|
4014
|
-
const padLen = packet[packet.length - 1];
|
|
4015
|
-
if (padLen <= 0 || padLen > packet.length) return null;
|
|
4016
|
-
end = packet.length - padLen;
|
|
4017
|
-
if (end < offset) return null;
|
|
4018
|
-
}
|
|
4019
|
-
if (end <= offset) return null;
|
|
4020
|
-
return packet.subarray(offset, end);
|
|
4021
|
-
}
|
|
4022
|
-
reset() {
|
|
4023
|
-
this.fuParts = null;
|
|
4024
|
-
}
|
|
4025
|
-
push(payload) {
|
|
4026
|
-
if (!payload || payload.length < 2) return [];
|
|
4027
|
-
const rtpPayload = _H265RtpDepacketizer.parseRtpPayload(payload);
|
|
4028
|
-
if (rtpPayload) payload = rtpPayload;
|
|
4029
|
-
const h0 = payload[0];
|
|
4030
|
-
const h1 = payload[1];
|
|
4031
|
-
if ((h0 & 128) !== 0) return [];
|
|
4032
|
-
const nalType = h0 >> 1 & 63;
|
|
4033
|
-
if (nalType === 48) {
|
|
4034
|
-
let off = 2;
|
|
4035
|
-
const out = [];
|
|
4036
|
-
while (off + 2 <= payload.length) {
|
|
4037
|
-
const size = payload.readUInt16BE(off);
|
|
4038
|
-
off += 2;
|
|
4039
|
-
if (size <= 0 || off + size > payload.length) return [];
|
|
4040
|
-
const nal = payload.subarray(off, off + size);
|
|
4041
|
-
off += size;
|
|
4042
|
-
if (nal.length) out.push(NAL_START_CODE_4B, nal);
|
|
4043
|
-
}
|
|
4044
|
-
return out.length ? [Buffer.concat(out)] : [];
|
|
4045
|
-
}
|
|
4046
|
-
if (nalType === 49) {
|
|
4047
|
-
if (payload.length < 3) return [];
|
|
4048
|
-
const fuHeader = payload[2];
|
|
4049
|
-
const start = (fuHeader & 128) !== 0;
|
|
4050
|
-
const end = (fuHeader & 64) !== 0;
|
|
4051
|
-
const origType = fuHeader & 63;
|
|
4052
|
-
const orig0 = h0 & 129 | (origType & 63) << 1;
|
|
4053
|
-
const orig1 = h1;
|
|
4054
|
-
const frag = payload.subarray(3);
|
|
4055
|
-
if (start) {
|
|
4056
|
-
this.fuParts = [NAL_START_CODE_4B, Buffer.from([orig0, orig1]), frag];
|
|
4057
|
-
} else {
|
|
4058
|
-
if (!this.fuParts) return [];
|
|
4059
|
-
this.fuParts.push(frag);
|
|
4060
|
-
}
|
|
4061
|
-
if (end) {
|
|
4062
|
-
if (!this.fuParts) return [];
|
|
4063
|
-
const out = Buffer.concat(this.fuParts);
|
|
4064
|
-
this.fuParts = null;
|
|
4065
|
-
return [out];
|
|
4066
|
-
}
|
|
4067
|
-
return [];
|
|
4068
|
-
}
|
|
4069
|
-
return [Buffer.concat([NAL_START_CODE_4B, payload])];
|
|
4070
|
-
}
|
|
4071
|
-
};
|
|
4072
|
-
|
|
4073
|
-
// src/webrtc/fanout.ts
|
|
4074
|
-
var AsyncBoundedQueue = class {
|
|
4075
|
-
maxItems;
|
|
4076
|
-
queue = [];
|
|
4077
|
-
waiting;
|
|
4078
|
-
closed = false;
|
|
4079
|
-
constructor(maxItems) {
|
|
4080
|
-
this.maxItems = Math.max(1, maxItems | 0);
|
|
4081
|
-
}
|
|
4082
|
-
push(item) {
|
|
4083
|
-
if (this.closed) return;
|
|
4084
|
-
if (this.waiting) {
|
|
4085
|
-
const { resolve } = this.waiting;
|
|
4086
|
-
this.waiting = void 0;
|
|
4087
|
-
resolve({ value: item, done: false });
|
|
4088
|
-
return;
|
|
4089
|
-
}
|
|
4090
|
-
this.queue.push(item);
|
|
4091
|
-
if (this.queue.length > this.maxItems) {
|
|
4092
|
-
this.queue.splice(0, this.queue.length - this.maxItems);
|
|
4093
|
-
}
|
|
4094
|
-
}
|
|
4095
|
-
close() {
|
|
4096
|
-
if (this.closed) return;
|
|
4097
|
-
this.closed = true;
|
|
4098
|
-
if (this.waiting) {
|
|
4099
|
-
const { resolve } = this.waiting;
|
|
4100
|
-
this.waiting = void 0;
|
|
4101
|
-
resolve({ value: void 0, done: true });
|
|
4102
|
-
}
|
|
4103
|
-
}
|
|
4104
|
-
async next() {
|
|
4105
|
-
const item = this.queue.shift();
|
|
4106
|
-
if (item !== void 0) return { value: item, done: false };
|
|
4107
|
-
if (this.closed) return { value: void 0, done: true };
|
|
4108
|
-
return await new Promise((resolve) => {
|
|
4109
|
-
this.waiting = { resolve };
|
|
4110
|
-
});
|
|
4111
|
-
}
|
|
4112
|
-
isClosed() {
|
|
4113
|
-
return this.closed;
|
|
4114
|
-
}
|
|
4115
|
-
size() {
|
|
4116
|
-
return this.queue.length;
|
|
4117
|
-
}
|
|
4118
|
-
};
|
|
4119
|
-
var StreamFanout = class {
|
|
4120
|
-
opts;
|
|
4121
|
-
queues = /* @__PURE__ */ new Map();
|
|
4122
|
-
source = null;
|
|
4123
|
-
running = false;
|
|
4124
|
-
pumpPromise = null;
|
|
4125
|
-
constructor(opts) {
|
|
4126
|
-
this.opts = opts;
|
|
4127
|
-
}
|
|
4128
|
-
/** Start pumping frames from the source to all subscribers. */
|
|
4129
|
-
start() {
|
|
4130
|
-
if (this.running) return;
|
|
4131
|
-
this.running = true;
|
|
4132
|
-
this.source = this.opts.createSource();
|
|
4133
|
-
this.pumpPromise = (async () => {
|
|
4134
|
-
try {
|
|
4135
|
-
for await (const frame of this.source) {
|
|
4136
|
-
try {
|
|
4137
|
-
this.opts.onFrame?.(frame);
|
|
4138
|
-
} catch {
|
|
4139
|
-
}
|
|
4140
|
-
for (const q of this.queues.values()) {
|
|
4141
|
-
q.push(frame);
|
|
4142
|
-
}
|
|
4143
|
-
}
|
|
4144
|
-
} catch (e) {
|
|
4145
|
-
this.opts.onError?.(e);
|
|
4146
|
-
} finally {
|
|
4147
|
-
this.running = false;
|
|
4148
|
-
for (const q of this.queues.values()) q.close();
|
|
4149
|
-
this.queues.clear();
|
|
4150
|
-
}
|
|
4151
|
-
})();
|
|
4152
|
-
}
|
|
4153
|
-
/**
|
|
4154
|
-
* Create a subscriber async generator.
|
|
4155
|
-
* Returns an async generator that yields frames from the shared source.
|
|
4156
|
-
* The generator terminates when the source ends or unsubscribe is called.
|
|
4157
|
-
*/
|
|
4158
|
-
subscribe(id) {
|
|
4159
|
-
const q = new AsyncBoundedQueue(this.opts.maxQueueItems);
|
|
4160
|
-
if (!this.running) {
|
|
4161
|
-
q.close();
|
|
4162
|
-
} else {
|
|
4163
|
-
this.queues.set(id, q);
|
|
4164
|
-
}
|
|
4165
|
-
const self = this;
|
|
4166
|
-
return (async function* () {
|
|
4167
|
-
try {
|
|
4168
|
-
while (true) {
|
|
4169
|
-
const r = await q.next();
|
|
4170
|
-
if (r.done) return;
|
|
4171
|
-
yield r.value;
|
|
4172
|
-
}
|
|
4173
|
-
} finally {
|
|
4174
|
-
q.close();
|
|
4175
|
-
self.queues.delete(id);
|
|
4176
|
-
}
|
|
4177
|
-
})();
|
|
4178
|
-
}
|
|
4179
|
-
/** Unsubscribe a specific subscriber. */
|
|
4180
|
-
unsubscribe(id) {
|
|
4181
|
-
const q = this.queues.get(id);
|
|
4182
|
-
if (q) {
|
|
4183
|
-
q.close();
|
|
4184
|
-
this.queues.delete(id);
|
|
4185
|
-
}
|
|
4186
|
-
}
|
|
4187
|
-
/** Stop the source and close all subscriber queues. */
|
|
4188
|
-
async stop() {
|
|
4189
|
-
if (!this.running) return;
|
|
4190
|
-
this.running = false;
|
|
4191
|
-
const src = this.source;
|
|
4192
|
-
this.source = null;
|
|
4193
|
-
for (const q of this.queues.values()) q.close();
|
|
4194
|
-
this.queues.clear();
|
|
4195
|
-
const STOP_TIMEOUT = 3e3;
|
|
4196
|
-
const timeout = new Promise((r) => setTimeout(r, STOP_TIMEOUT));
|
|
4197
|
-
try {
|
|
4198
|
-
await Promise.race([
|
|
4199
|
-
(async () => {
|
|
4200
|
-
try {
|
|
4201
|
-
await src?.return(void 0);
|
|
4202
|
-
} catch {
|
|
4203
|
-
}
|
|
4204
|
-
try {
|
|
4205
|
-
await this.pumpPromise;
|
|
4206
|
-
} catch {
|
|
4207
|
-
}
|
|
4208
|
-
})(),
|
|
4209
|
-
timeout
|
|
4210
|
-
]);
|
|
4211
|
-
} catch {
|
|
4212
|
-
}
|
|
4213
|
-
this.pumpPromise = null;
|
|
4214
|
-
}
|
|
4215
|
-
/** Returns true if the fan-out is running. */
|
|
4216
|
-
isRunning() {
|
|
4217
|
-
return this.running;
|
|
4218
|
-
}
|
|
4219
|
-
/** Returns the number of active subscribers. */
|
|
4220
|
-
subscriberCount() {
|
|
4221
|
-
return this.queues.size;
|
|
4222
|
-
}
|
|
4223
|
-
};
|
|
4224
|
-
|
|
4225
|
-
// src/webrtc/ffmpeg-process.ts
|
|
4226
|
-
import { spawn as spawn4 } from "child_process";
|
|
4227
|
-
var FfmpegProcess = class {
|
|
4228
|
-
constructor(options) {
|
|
4229
|
-
this.options = options;
|
|
4230
|
-
this.logger = options.logger;
|
|
4231
|
-
this.label = options.label ?? "ffmpeg";
|
|
4232
|
-
}
|
|
4233
|
-
process = null;
|
|
4234
|
-
killed = false;
|
|
4235
|
-
logger;
|
|
4236
|
-
label;
|
|
4237
|
-
/** Spawn the FFmpeg process. Returns stdin writable stream. */
|
|
4238
|
-
start() {
|
|
4239
|
-
if (this.process) {
|
|
4240
|
-
throw new Error(`[${this.label}] FFmpeg process already started`);
|
|
4241
|
-
}
|
|
4242
|
-
const ffmpegPath = this.options.ffmpegPath ?? "ffmpeg";
|
|
4243
|
-
const stdio = [
|
|
4244
|
-
"pipe",
|
|
4245
|
-
"ignore",
|
|
4246
|
-
"pipe",
|
|
4247
|
-
...this.options.extraStdio ?? []
|
|
4248
|
-
];
|
|
4249
|
-
this.process = spawn4(ffmpegPath, this.options.args, { stdio });
|
|
4250
|
-
this.process.on("error", (error) => {
|
|
4251
|
-
this.logger?.error(`[${this.label}] Failed to spawn FFmpeg:`, error);
|
|
4252
|
-
});
|
|
4253
|
-
this.process.on("close", (code, signal) => {
|
|
4254
|
-
this.options.onExit?.(code, signal);
|
|
4255
|
-
});
|
|
4256
|
-
this.process.stderr?.on("data", (data) => {
|
|
4257
|
-
const output = data.toString();
|
|
4258
|
-
this.options.onStderr?.(output);
|
|
4259
|
-
});
|
|
4260
|
-
this.process.stdin?.on("error", (error) => {
|
|
4261
|
-
const code = error?.code;
|
|
4262
|
-
if (code === "EPIPE" || code === "ERR_STREAM_WRITE_AFTER_END") return;
|
|
4263
|
-
this.logger?.error(`[${this.label}] FFmpeg stdin error:`, error);
|
|
4264
|
-
});
|
|
4265
|
-
return this.process.stdin;
|
|
4266
|
-
}
|
|
4267
|
-
/** Get a specific stdio stream by fd index (e.g. 3 for pipe:3). */
|
|
4268
|
-
getStdio(fd) {
|
|
4269
|
-
if (!this.process) return null;
|
|
4270
|
-
return this.process.stdio?.[fd] ?? null;
|
|
4271
|
-
}
|
|
4272
|
-
/** Get the underlying ChildProcess. */
|
|
4273
|
-
getProcess() {
|
|
4274
|
-
return this.process;
|
|
4275
|
-
}
|
|
4276
|
-
/** Kill the FFmpeg process gracefully (SIGTERM then SIGKILL after timeout). */
|
|
4277
|
-
async kill(timeoutMs = 3e3) {
|
|
4278
|
-
if (this.killed || !this.process) return;
|
|
4279
|
-
this.killed = true;
|
|
4280
|
-
const proc = this.process;
|
|
4281
|
-
this.process = null;
|
|
4282
|
-
try {
|
|
4283
|
-
proc.stdin?.end();
|
|
4284
|
-
} catch {
|
|
4285
|
-
}
|
|
4286
|
-
try {
|
|
4287
|
-
proc.kill("SIGTERM");
|
|
4288
|
-
} catch {
|
|
4289
|
-
}
|
|
4290
|
-
await new Promise((resolve) => {
|
|
4291
|
-
const timer = setTimeout(() => {
|
|
4292
|
-
try {
|
|
4293
|
-
proc.kill("SIGKILL");
|
|
4294
|
-
} catch {
|
|
4295
|
-
}
|
|
4296
|
-
resolve();
|
|
4297
|
-
}, timeoutMs);
|
|
4298
|
-
proc.on("close", () => {
|
|
4299
|
-
clearTimeout(timer);
|
|
4300
|
-
resolve();
|
|
4301
|
-
});
|
|
4302
|
-
});
|
|
4303
|
-
}
|
|
4304
|
-
/** Check if the process is running. */
|
|
4305
|
-
isRunning() {
|
|
4306
|
-
return this.process !== null && !this.killed;
|
|
4307
|
-
}
|
|
4308
|
-
};
|
|
4309
|
-
|
|
4310
|
-
// src/webrtc/frame-source.ts
|
|
4311
|
-
function fromEventEmitter(emitter, videoEvent = "videoFrame", audioEvent = "audioFrame") {
|
|
4312
|
-
const queue = [];
|
|
4313
|
-
let resolve = null;
|
|
4314
|
-
let done = false;
|
|
4315
|
-
const onVideo = (frame) => {
|
|
4316
|
-
const mf = { type: "video", frame };
|
|
4317
|
-
if (resolve) {
|
|
4318
|
-
const r = resolve;
|
|
4319
|
-
resolve = null;
|
|
4320
|
-
r({ value: mf, done: false });
|
|
4321
|
-
} else {
|
|
4322
|
-
queue.push(mf);
|
|
4323
|
-
if (queue.length > 500) queue.splice(0, queue.length - 500);
|
|
4324
|
-
}
|
|
4325
|
-
};
|
|
4326
|
-
const onAudio = (frame) => {
|
|
4327
|
-
const mf = { type: "audio", frame };
|
|
4328
|
-
if (resolve) {
|
|
4329
|
-
const r = resolve;
|
|
4330
|
-
resolve = null;
|
|
4331
|
-
r({ value: mf, done: false });
|
|
4332
|
-
} else {
|
|
4333
|
-
queue.push(mf);
|
|
4334
|
-
if (queue.length > 500) queue.splice(0, queue.length - 500);
|
|
4335
|
-
}
|
|
4336
|
-
};
|
|
4337
|
-
const cleanup = () => {
|
|
4338
|
-
done = true;
|
|
4339
|
-
emitter.removeListener(videoEvent, onVideo);
|
|
4340
|
-
emitter.removeListener(audioEvent, onAudio);
|
|
4341
|
-
if (resolve) {
|
|
4342
|
-
const r = resolve;
|
|
4343
|
-
resolve = null;
|
|
4344
|
-
r({ value: void 0, done: true });
|
|
4345
|
-
}
|
|
4346
|
-
};
|
|
4347
|
-
emitter.on(videoEvent, onVideo);
|
|
4348
|
-
emitter.on(audioEvent, onAudio);
|
|
4349
|
-
emitter.once("close", cleanup);
|
|
4350
|
-
emitter.once("end", cleanup);
|
|
4351
|
-
return (async function* () {
|
|
4352
|
-
try {
|
|
4353
|
-
while (true) {
|
|
4354
|
-
const item = queue.shift();
|
|
4355
|
-
if (item) {
|
|
4356
|
-
yield item;
|
|
4357
|
-
continue;
|
|
4358
|
-
}
|
|
4359
|
-
if (done) return;
|
|
4360
|
-
const result = await new Promise((r) => {
|
|
4361
|
-
resolve = r;
|
|
4362
|
-
});
|
|
4363
|
-
if (result.done) return;
|
|
4364
|
-
yield result.value;
|
|
4365
|
-
}
|
|
4366
|
-
} finally {
|
|
4367
|
-
cleanup();
|
|
4368
|
-
}
|
|
4369
|
-
})();
|
|
4370
|
-
}
|
|
4371
|
-
function fromPushCallback() {
|
|
4372
|
-
const queue = [];
|
|
4373
|
-
let resolve = null;
|
|
4374
|
-
let closed = false;
|
|
4375
|
-
const push = (mf) => {
|
|
4376
|
-
if (closed) return;
|
|
4377
|
-
if (resolve) {
|
|
4378
|
-
const r = resolve;
|
|
4379
|
-
resolve = null;
|
|
4380
|
-
r({ value: mf, done: false });
|
|
4381
|
-
} else {
|
|
4382
|
-
queue.push(mf);
|
|
4383
|
-
if (queue.length > 500) queue.splice(0, queue.length - 500);
|
|
4384
|
-
}
|
|
4385
|
-
};
|
|
4386
|
-
const source = (async function* () {
|
|
4387
|
-
try {
|
|
4388
|
-
while (true) {
|
|
4389
|
-
const item = queue.shift();
|
|
4390
|
-
if (item) {
|
|
4391
|
-
yield item;
|
|
4392
|
-
continue;
|
|
4393
|
-
}
|
|
4394
|
-
if (closed) return;
|
|
4395
|
-
const result = await new Promise((r) => {
|
|
4396
|
-
resolve = r;
|
|
4397
|
-
});
|
|
4398
|
-
if (result.done) return;
|
|
4399
|
-
yield result.value;
|
|
4400
|
-
}
|
|
4401
|
-
} finally {
|
|
4402
|
-
closed = true;
|
|
4403
|
-
}
|
|
4404
|
-
})();
|
|
4405
|
-
return {
|
|
4406
|
-
source,
|
|
4407
|
-
pushVideo: (frame) => push({ type: "video", frame }),
|
|
4408
|
-
pushAudio: (frame) => push({ type: "audio", frame }),
|
|
4409
|
-
close: () => {
|
|
4410
|
-
closed = true;
|
|
4411
|
-
if (resolve) {
|
|
4412
|
-
const r = resolve;
|
|
4413
|
-
resolve = null;
|
|
4414
|
-
r({ value: void 0, done: true });
|
|
4415
|
-
}
|
|
4416
|
-
}
|
|
4417
|
-
};
|
|
4418
|
-
}
|
|
4419
|
-
function fromNativeStream(native) {
|
|
4420
|
-
return (async function* () {
|
|
4421
|
-
for await (const frame of native) {
|
|
4422
|
-
if (frame.audio) {
|
|
4423
|
-
yield {
|
|
4424
|
-
type: "audio",
|
|
4425
|
-
frame: {
|
|
4426
|
-
data: frame.data,
|
|
4427
|
-
codec: frame.codec === "aac" ? "Aac" : "Adpcm",
|
|
4428
|
-
sampleRate: frame.sampleRate ?? 8e3,
|
|
4429
|
-
channels: 1,
|
|
4430
|
-
timestampMicros: frame.microseconds ?? Date.now() * 1e3
|
|
4431
|
-
}
|
|
4432
|
-
};
|
|
4433
|
-
} else {
|
|
4434
|
-
yield {
|
|
4435
|
-
type: "video",
|
|
4436
|
-
frame: {
|
|
4437
|
-
data: frame.data,
|
|
4438
|
-
codec: frame.videoType ?? "H264",
|
|
4439
|
-
isKeyframe: frame.isKeyframe ?? false,
|
|
4440
|
-
timestampMicros: frame.microseconds ?? Date.now() * 1e3
|
|
4441
|
-
}
|
|
4442
|
-
};
|
|
4443
|
-
}
|
|
4444
|
-
}
|
|
4445
|
-
})();
|
|
4446
|
-
}
|
|
4447
|
-
|
|
4448
|
-
// src/webrtc/ffmpeg-source.ts
|
|
4449
|
-
import { spawn as spawn5 } from "child_process";
|
|
4450
|
-
var AnnexBAccessUnitAssembler = class {
|
|
4451
|
-
buffer = Buffer.alloc(0);
|
|
4452
|
-
/** Feed data from ffmpeg stdout. Returns complete access units (one per frame). */
|
|
4453
|
-
feed(data) {
|
|
4454
|
-
this.buffer = this.buffer.length > 0 ? Buffer.concat([this.buffer, data]) : data;
|
|
4455
|
-
const aus = [];
|
|
4456
|
-
const audPositions = [];
|
|
4457
|
-
for (let i = 0; i < this.buffer.length - 5; i++) {
|
|
4458
|
-
if (this.buffer[i] === 0 && this.buffer[i + 1] === 0) {
|
|
4459
|
-
let scLen = 0;
|
|
4460
|
-
if (this.buffer[i + 2] === 0 && this.buffer[i + 3] === 1) scLen = 4;
|
|
4461
|
-
else if (this.buffer[i + 2] === 1) scLen = 3;
|
|
4462
|
-
if (scLen > 0) {
|
|
4463
|
-
const nalType = this.buffer[i + scLen] & 31;
|
|
4464
|
-
if (nalType === 9) {
|
|
4465
|
-
audPositions.push(i);
|
|
4466
|
-
}
|
|
4467
|
-
}
|
|
4468
|
-
}
|
|
4469
|
-
}
|
|
4470
|
-
if (audPositions.length < 2) return aus;
|
|
4471
|
-
for (let j = 0; j < audPositions.length - 1; j++) {
|
|
4472
|
-
const au = this.buffer.subarray(audPositions[j], audPositions[j + 1]);
|
|
4473
|
-
if (au.length > 4) aus.push(au);
|
|
4474
|
-
}
|
|
4475
|
-
this.buffer = this.buffer.subarray(audPositions[audPositions.length - 1]);
|
|
4476
|
-
return aus;
|
|
4477
|
-
}
|
|
4478
|
-
/** Flush any remaining buffered data as a final access unit. */
|
|
4479
|
-
flush() {
|
|
4480
|
-
if (this.buffer.length <= 4) return null;
|
|
4481
|
-
const au = this.buffer;
|
|
4482
|
-
this.buffer = Buffer.alloc(0);
|
|
4483
|
-
return au;
|
|
4484
|
-
}
|
|
4485
|
-
};
|
|
4486
|
-
var AdaptiveFfmpegSource = class {
|
|
4487
|
-
rtspUrl;
|
|
4488
|
-
ffmpegPath;
|
|
4489
|
-
logger;
|
|
4490
|
-
label;
|
|
4491
|
-
audioMode;
|
|
4492
|
-
currentParams;
|
|
4493
|
-
proc = null;
|
|
4494
|
-
audioProc = null;
|
|
4495
|
-
closed = false;
|
|
4496
|
-
/** Push callback for the frame source. */
|
|
4497
|
-
pushFrame = null;
|
|
4498
|
-
closeSource = null;
|
|
4499
|
-
/** The FrameSource async generator. Created once, survives ffmpeg restarts. */
|
|
4500
|
-
source;
|
|
4501
|
-
constructor(options) {
|
|
4502
|
-
this.rtspUrl = options.rtspUrl;
|
|
4503
|
-
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
4504
|
-
this.audioMode = options.audioMode ?? "copy";
|
|
4505
|
-
this.logger = options.logger;
|
|
4506
|
-
this.label = options.label ?? "adaptive-ffmpeg";
|
|
4507
|
-
this.currentParams = { ...options.initialParams };
|
|
4508
|
-
const queue = [];
|
|
4509
|
-
let resolve = null;
|
|
4510
|
-
let done = false;
|
|
4511
|
-
this.pushFrame = (mf) => {
|
|
4512
|
-
if (done) return;
|
|
4513
|
-
if (resolve) {
|
|
4514
|
-
const r = resolve;
|
|
4515
|
-
resolve = null;
|
|
4516
|
-
r({ value: mf, done: false });
|
|
4517
|
-
} else {
|
|
4518
|
-
queue.push(mf);
|
|
4519
|
-
if (queue.length > 120) queue.splice(0, queue.length - 60);
|
|
4520
|
-
}
|
|
4521
|
-
};
|
|
4522
|
-
this.closeSource = () => {
|
|
4523
|
-
done = true;
|
|
4524
|
-
if (resolve) {
|
|
4525
|
-
const r = resolve;
|
|
4526
|
-
resolve = null;
|
|
4527
|
-
r({ value: void 0, done: true });
|
|
4528
|
-
}
|
|
4529
|
-
};
|
|
4530
|
-
this.source = (async function* () {
|
|
4531
|
-
try {
|
|
4532
|
-
while (true) {
|
|
4533
|
-
const item = queue.shift();
|
|
4534
|
-
if (item) {
|
|
4535
|
-
yield item;
|
|
4536
|
-
continue;
|
|
4537
|
-
}
|
|
4538
|
-
if (done) return;
|
|
4539
|
-
const result = await new Promise((r) => {
|
|
4540
|
-
resolve = r;
|
|
4541
|
-
});
|
|
4542
|
-
if (result.done) return;
|
|
4543
|
-
yield result.value;
|
|
4544
|
-
}
|
|
4545
|
-
} finally {
|
|
4546
|
-
done = true;
|
|
4547
|
-
}
|
|
4548
|
-
})();
|
|
4549
|
-
}
|
|
4550
|
-
/** Start the ffmpeg process with current encoding params. */
|
|
4551
|
-
async start() {
|
|
4552
|
-
if (this.closed) return;
|
|
4553
|
-
this.spawnFfmpeg();
|
|
4554
|
-
}
|
|
4555
|
-
/** Get the current encoding parameters. */
|
|
4556
|
-
getParams() {
|
|
4557
|
-
return { ...this.currentParams };
|
|
4558
|
-
}
|
|
4559
|
-
/**
|
|
4560
|
-
* Hot-swap encoding parameters.
|
|
4561
|
-
* Stops the current ffmpeg and starts a new one with updated params.
|
|
4562
|
-
* The FrameSource continues seamlessly — the new ffmpeg's first keyframe
|
|
4563
|
-
* is gated internally so consumers see a clean transition.
|
|
4564
|
-
*/
|
|
4565
|
-
async updateParams(params) {
|
|
4566
|
-
const prev = { ...this.currentParams };
|
|
4567
|
-
if (params.maxBitrateKbps !== void 0) this.currentParams.maxBitrateKbps = params.maxBitrateKbps;
|
|
4568
|
-
if (params.width !== void 0) this.currentParams.width = params.width;
|
|
4569
|
-
if (params.height !== void 0) this.currentParams.height = params.height;
|
|
4570
|
-
if (params.preset !== void 0) this.currentParams.preset = params.preset;
|
|
4571
|
-
if (prev.maxBitrateKbps === this.currentParams.maxBitrateKbps && prev.width === this.currentParams.width && prev.height === this.currentParams.height) return;
|
|
4572
|
-
this.logger?.info(
|
|
4573
|
-
`[${this.label}] Updating params: ${prev.maxBitrateKbps}kbps ${prev.width}x${prev.height} \u2192 ${this.currentParams.maxBitrateKbps}kbps ${this.currentParams.width}x${this.currentParams.height}`
|
|
4574
|
-
);
|
|
4575
|
-
await this.killFfmpeg();
|
|
4576
|
-
if (!this.closed) {
|
|
4577
|
-
this.spawnFfmpeg();
|
|
4578
|
-
}
|
|
4579
|
-
}
|
|
4580
|
-
/** Stop the source and kill ffmpeg. */
|
|
4581
|
-
async stop() {
|
|
4582
|
-
if (this.closed) return;
|
|
4583
|
-
this.closed = true;
|
|
4584
|
-
await this.killFfmpeg();
|
|
4585
|
-
this.closeSource?.();
|
|
4586
|
-
}
|
|
4587
|
-
// -----------------------------------------------------------------------
|
|
4588
|
-
// Private
|
|
4589
|
-
// -----------------------------------------------------------------------
|
|
4590
|
-
spawnFfmpeg() {
|
|
4591
|
-
const { maxBitrateKbps, width, height, preset } = this.currentParams;
|
|
4592
|
-
const args = [
|
|
4593
|
-
"-hide_banner",
|
|
4594
|
-
"-loglevel",
|
|
4595
|
-
"error",
|
|
4596
|
-
"-fflags",
|
|
4597
|
-
"+nobuffer",
|
|
4598
|
-
"-flags",
|
|
4599
|
-
"+low_delay",
|
|
4600
|
-
"-rtsp_transport",
|
|
4601
|
-
"tcp",
|
|
4602
|
-
"-i",
|
|
4603
|
-
this.rtspUrl,
|
|
4604
|
-
"-c:v",
|
|
4605
|
-
"libx264",
|
|
4606
|
-
"-preset",
|
|
4607
|
-
preset ?? "ultrafast",
|
|
4608
|
-
"-tune",
|
|
4609
|
-
"zerolatency",
|
|
4610
|
-
"-crf",
|
|
4611
|
-
"28",
|
|
4612
|
-
"-maxrate",
|
|
4613
|
-
`${maxBitrateKbps}k`,
|
|
4614
|
-
"-bufsize",
|
|
4615
|
-
`${Math.round(maxBitrateKbps * 0.5)}k`,
|
|
4616
|
-
"-g",
|
|
4617
|
-
"50",
|
|
4618
|
-
"-keyint_min",
|
|
4619
|
-
"25",
|
|
4620
|
-
"-x264opts",
|
|
4621
|
-
"aud=1:sliced-threads=1",
|
|
4622
|
-
"-flush_packets",
|
|
4623
|
-
"1"
|
|
4624
|
-
];
|
|
4625
|
-
if (width > 0 && height > 0) {
|
|
4626
|
-
args.push("-vf", `scale=${width}:${height}`);
|
|
4627
|
-
}
|
|
4628
|
-
args.push(
|
|
4629
|
-
"-an",
|
|
4630
|
-
"-f",
|
|
4631
|
-
"h264",
|
|
4632
|
-
"-"
|
|
4633
|
-
);
|
|
4634
|
-
this.proc = spawn5(this.ffmpegPath, args, {
|
|
4635
|
-
stdio: ["ignore", "pipe", "pipe"]
|
|
4636
|
-
});
|
|
4637
|
-
this.proc.on("error", (err) => {
|
|
4638
|
-
this.logger?.error(`[${this.label}] ffmpeg spawn error: ${err.message}`);
|
|
4639
|
-
});
|
|
4640
|
-
this.proc.on("close", (code, signal) => {
|
|
4641
|
-
this.logger?.debug(`[${this.label}] ffmpeg exited code=${code} signal=${signal}`);
|
|
4642
|
-
this.proc = null;
|
|
4643
|
-
if (!this.closed) {
|
|
4644
|
-
setTimeout(() => {
|
|
4645
|
-
if (!this.closed) this.spawnFfmpeg();
|
|
4646
|
-
}, 2e3);
|
|
4647
|
-
}
|
|
4648
|
-
});
|
|
4649
|
-
this.proc.stderr?.on("data", (data) => {
|
|
4650
|
-
const s = data.toString();
|
|
4651
|
-
if (s.includes("error") || s.includes("Error") || s.includes("fatal")) {
|
|
4652
|
-
this.logger?.error(`[${this.label}] ffmpeg: ${s.trim()}`);
|
|
4653
|
-
}
|
|
4654
|
-
});
|
|
4655
|
-
if (!this.proc.stdout) {
|
|
4656
|
-
this.logger?.error(`[${this.label}] ffmpeg stdout not available`);
|
|
4657
|
-
return;
|
|
4658
|
-
}
|
|
4659
|
-
const assembler = new AnnexBAccessUnitAssembler();
|
|
4660
|
-
const startTime = Date.now();
|
|
4661
|
-
this.proc.stdout.on("data", (data) => {
|
|
4662
|
-
if (this.closed) return;
|
|
4663
|
-
const aus = assembler.feed(data);
|
|
4664
|
-
for (const au of aus) {
|
|
4665
|
-
if (au.length < 4) continue;
|
|
4666
|
-
const isKeyframe = isH264IdrAccessUnit(au);
|
|
4667
|
-
const timestampMicros = (Date.now() - startTime) * 1e3;
|
|
4668
|
-
const vf = {
|
|
4669
|
-
data: au,
|
|
4670
|
-
codec: "H264",
|
|
4671
|
-
isKeyframe,
|
|
4672
|
-
timestampMicros
|
|
4673
|
-
};
|
|
4674
|
-
this.pushFrame?.({ type: "video", frame: vf });
|
|
4675
|
-
}
|
|
4676
|
-
});
|
|
4677
|
-
this.proc.stdout.on("end", () => {
|
|
4678
|
-
const remaining = assembler.flush();
|
|
4679
|
-
if (remaining && remaining.length > 4 && this.pushFrame) {
|
|
4680
|
-
const vf = {
|
|
4681
|
-
data: remaining,
|
|
4682
|
-
codec: "H264",
|
|
4683
|
-
isKeyframe: isH264IdrAccessUnit(remaining),
|
|
4684
|
-
timestampMicros: (Date.now() - startTime) * 1e3
|
|
4685
|
-
};
|
|
4686
|
-
this.pushFrame({ type: "video", frame: vf });
|
|
4687
|
-
}
|
|
4688
|
-
});
|
|
4689
|
-
if (this.audioMode !== "off") {
|
|
4690
|
-
const audioArgs = [
|
|
4691
|
-
"-hide_banner",
|
|
4692
|
-
"-loglevel",
|
|
4693
|
-
"error",
|
|
4694
|
-
"-fflags",
|
|
4695
|
-
"+nobuffer+flush_packets",
|
|
4696
|
-
"-rtsp_transport",
|
|
4697
|
-
"tcp",
|
|
4698
|
-
"-analyzeduration",
|
|
4699
|
-
"500000",
|
|
4700
|
-
"-probesize",
|
|
4701
|
-
"500000",
|
|
4702
|
-
"-i",
|
|
4703
|
-
this.rtspUrl,
|
|
4704
|
-
"-vn"
|
|
4705
|
-
];
|
|
4706
|
-
let audioCodecLabel;
|
|
4707
|
-
let frameSize;
|
|
4708
|
-
let sampleRate;
|
|
4709
|
-
let codecName;
|
|
4710
|
-
if (this.audioMode === "opus") {
|
|
4711
|
-
audioArgs.push("-c:a", "libopus", "-ar", "48000", "-ac", "2", "-b:a", "64k", "-f", "ogg", "-");
|
|
4712
|
-
audioCodecLabel = "opus";
|
|
4713
|
-
frameSize = 960;
|
|
4714
|
-
sampleRate = 48e3;
|
|
4715
|
-
codecName = "Opus";
|
|
4716
|
-
audioArgs.length = 0;
|
|
4717
|
-
audioArgs.push(
|
|
4718
|
-
"-hide_banner",
|
|
4719
|
-
"-loglevel",
|
|
4720
|
-
"error",
|
|
4721
|
-
"-rtsp_transport",
|
|
4722
|
-
"tcp",
|
|
4723
|
-
"-i",
|
|
4724
|
-
this.rtspUrl,
|
|
4725
|
-
"-vn",
|
|
4726
|
-
"-c:a",
|
|
4727
|
-
"pcm_mulaw",
|
|
4728
|
-
"-ar",
|
|
4729
|
-
"8000",
|
|
4730
|
-
"-ac",
|
|
4731
|
-
"1",
|
|
4732
|
-
"-f",
|
|
4733
|
-
"mulaw",
|
|
4734
|
-
"-"
|
|
4735
|
-
);
|
|
4736
|
-
audioCodecLabel = "pcmu(opus-fallback)";
|
|
4737
|
-
frameSize = 160;
|
|
4738
|
-
sampleRate = 8e3;
|
|
4739
|
-
codecName = "Pcmu";
|
|
4740
|
-
} else {
|
|
4741
|
-
audioArgs.push("-c:a", "pcm_mulaw", "-ar", "8000", "-ac", "1", "-f", "mulaw", "-");
|
|
4742
|
-
audioCodecLabel = "pcmu";
|
|
4743
|
-
frameSize = 160;
|
|
4744
|
-
sampleRate = 8e3;
|
|
4745
|
-
codecName = "Pcmu";
|
|
4746
|
-
}
|
|
4747
|
-
this.audioProc = spawn5(this.ffmpegPath, audioArgs, {
|
|
4748
|
-
stdio: ["ignore", "pipe", "pipe"]
|
|
4749
|
-
});
|
|
4750
|
-
this.audioProc.on("error", () => {
|
|
4751
|
-
});
|
|
4752
|
-
this.audioProc.on("close", () => {
|
|
4753
|
-
this.audioProc = null;
|
|
4754
|
-
});
|
|
4755
|
-
if (this.audioProc.stdout) {
|
|
4756
|
-
let audioBuf = Buffer.alloc(0);
|
|
4757
|
-
this.audioProc.stdout.on("data", (data) => {
|
|
4758
|
-
if (this.closed || !this.pushFrame) return;
|
|
4759
|
-
audioBuf = audioBuf.length > 0 ? Buffer.concat([audioBuf, data]) : data;
|
|
4760
|
-
while (audioBuf.length >= frameSize) {
|
|
4761
|
-
const audioFrame = audioBuf.subarray(0, frameSize);
|
|
4762
|
-
audioBuf = audioBuf.subarray(frameSize);
|
|
4763
|
-
this.pushFrame({
|
|
4764
|
-
type: "audio",
|
|
4765
|
-
frame: {
|
|
4766
|
-
data: Buffer.from(audioFrame),
|
|
4767
|
-
codec: codecName,
|
|
4768
|
-
sampleRate,
|
|
4769
|
-
channels: 1,
|
|
4770
|
-
timestampMicros: (Date.now() - startTime) * 1e3
|
|
4771
|
-
}
|
|
4772
|
-
});
|
|
4773
|
-
}
|
|
4774
|
-
});
|
|
4775
|
-
}
|
|
4776
|
-
this.logger?.info(
|
|
4777
|
-
`[${this.label}] Started: ${maxBitrateKbps}kbps ` + (width > 0 ? `${width}x${height}` : "native") + ` +audio(${audioCodecLabel})`
|
|
4778
|
-
);
|
|
4779
|
-
} else {
|
|
4780
|
-
this.logger?.info(
|
|
4781
|
-
`[${this.label}] Started: ${maxBitrateKbps}kbps ` + (width > 0 ? `${width}x${height}` : "native") + " (no audio)"
|
|
4782
|
-
);
|
|
4783
|
-
}
|
|
4784
|
-
}
|
|
4785
|
-
async killFfmpeg() {
|
|
4786
|
-
const proc = this.proc;
|
|
4787
|
-
if (proc) {
|
|
4788
|
-
this.proc = null;
|
|
4789
|
-
try {
|
|
4790
|
-
proc.kill("SIGTERM");
|
|
4791
|
-
} catch {
|
|
4792
|
-
}
|
|
4793
|
-
await new Promise((resolve) => {
|
|
4794
|
-
const timer = setTimeout(() => {
|
|
4795
|
-
try {
|
|
4796
|
-
proc.kill("SIGKILL");
|
|
4797
|
-
} catch {
|
|
4798
|
-
}
|
|
4799
|
-
resolve();
|
|
4800
|
-
}, 3e3);
|
|
4801
|
-
proc.on("close", () => {
|
|
4802
|
-
clearTimeout(timer);
|
|
4803
|
-
resolve();
|
|
4804
|
-
});
|
|
4805
|
-
});
|
|
4806
|
-
}
|
|
4807
|
-
const audioProc = this.audioProc;
|
|
4808
|
-
if (audioProc) {
|
|
4809
|
-
this.audioProc = null;
|
|
4810
|
-
try {
|
|
4811
|
-
audioProc.kill("SIGTERM");
|
|
4812
|
-
} catch {
|
|
4813
|
-
}
|
|
4814
|
-
await new Promise((resolve) => {
|
|
4815
|
-
const timer = setTimeout(() => {
|
|
4816
|
-
try {
|
|
4817
|
-
audioProc.kill("SIGKILL");
|
|
4818
|
-
} catch {
|
|
4819
|
-
}
|
|
4820
|
-
resolve();
|
|
4821
|
-
}, 1e3);
|
|
4822
|
-
audioProc.on("close", () => {
|
|
4823
|
-
clearTimeout(timer);
|
|
4824
|
-
resolve();
|
|
4825
|
-
});
|
|
4826
|
-
});
|
|
4827
|
-
}
|
|
4828
|
-
}
|
|
4829
|
-
};
|
|
4830
|
-
|
|
4831
|
-
// src/webrtc/adaptive-controller.ts
|
|
4832
|
-
var EWMA = class {
|
|
4833
|
-
value = null;
|
|
4834
|
-
alpha;
|
|
4835
|
-
constructor(alpha = 0.3) {
|
|
4836
|
-
this.alpha = alpha;
|
|
4837
|
-
}
|
|
4838
|
-
update(sample) {
|
|
4839
|
-
if (this.value === null) {
|
|
4840
|
-
this.value = sample;
|
|
4841
|
-
} else {
|
|
4842
|
-
this.value = this.alpha * sample + (1 - this.alpha) * this.value;
|
|
4843
|
-
}
|
|
4844
|
-
return this.value;
|
|
4845
|
-
}
|
|
4846
|
-
get() {
|
|
4847
|
-
return this.value ?? 0;
|
|
4848
|
-
}
|
|
4849
|
-
reset() {
|
|
4850
|
-
this.value = null;
|
|
4851
|
-
}
|
|
4852
|
-
};
|
|
4853
|
-
var AdaptiveController = class {
|
|
4854
|
-
profiles;
|
|
4855
|
-
degradeThreshold;
|
|
4856
|
-
recoverThreshold;
|
|
4857
|
-
degradeCount;
|
|
4858
|
-
recoverCount;
|
|
4859
|
-
onQualityChange;
|
|
4860
|
-
logger;
|
|
4861
|
-
currentIndex;
|
|
4862
|
-
consecutiveBad = 0;
|
|
4863
|
-
consecutiveGood = 0;
|
|
4864
|
-
switching = false;
|
|
4865
|
-
/** Smoothed stats per session (aggregated for decisions). */
|
|
4866
|
-
sessionStats = /* @__PURE__ */ new Map();
|
|
4867
|
-
/** Manual override tier (null = auto). */
|
|
4868
|
-
forcedTier = null;
|
|
4869
|
-
constructor(options) {
|
|
4870
|
-
if (options.profiles.length === 0) {
|
|
4871
|
-
throw new Error("At least one quality profile is required");
|
|
4872
|
-
}
|
|
4873
|
-
this.profiles = options.profiles;
|
|
4874
|
-
this.degradeThreshold = options.degradeThreshold ?? 0.02;
|
|
4875
|
-
this.recoverThreshold = options.recoverThreshold ?? 5e-3;
|
|
4876
|
-
this.degradeCount = options.degradeCount ?? 2;
|
|
4877
|
-
this.recoverCount = options.recoverCount ?? 3;
|
|
4878
|
-
this.onQualityChange = options.onQualityChange;
|
|
4879
|
-
this.logger = options.logger;
|
|
4880
|
-
this.currentIndex = 0;
|
|
4881
|
-
}
|
|
4882
|
-
/** Get the current quality profile. */
|
|
4883
|
-
get currentProfile() {
|
|
4884
|
-
return this.profiles[this.currentIndex];
|
|
4885
|
-
}
|
|
4886
|
-
/** Get the current quality tier. */
|
|
4887
|
-
get currentTier() {
|
|
4888
|
-
return this.currentProfile.tier;
|
|
4889
|
-
}
|
|
4890
|
-
/** Get aggregated stats summary. */
|
|
4891
|
-
getAggregatedStats() {
|
|
4892
|
-
if (this.sessionStats.size === 0) {
|
|
4893
|
-
return { packetLoss: 0, jitterMs: 0, rttMs: 0 };
|
|
4894
|
-
}
|
|
4895
|
-
let totalLoss = 0;
|
|
4896
|
-
let totalJitter = 0;
|
|
4897
|
-
let totalRtt = 0;
|
|
4898
|
-
let count = 0;
|
|
4899
|
-
for (const stats of this.sessionStats.values()) {
|
|
4900
|
-
if (Date.now() - stats.lastUpdate > 3e4) continue;
|
|
4901
|
-
totalLoss += stats.loss.get();
|
|
4902
|
-
totalJitter += stats.jitter.get();
|
|
4903
|
-
totalRtt += stats.rtt.get();
|
|
4904
|
-
count++;
|
|
4905
|
-
}
|
|
4906
|
-
if (count === 0) return { packetLoss: 0, jitterMs: 0, rttMs: 0 };
|
|
4907
|
-
return {
|
|
4908
|
-
packetLoss: totalLoss / count,
|
|
4909
|
-
jitterMs: totalJitter / count,
|
|
4910
|
-
rttMs: totalRtt / count
|
|
4911
|
-
};
|
|
4912
|
-
}
|
|
4913
|
-
/**
|
|
4914
|
-
* Report stats from a session (RTCP or client-reported).
|
|
4915
|
-
* Call this periodically (e.g. every 3–5 seconds).
|
|
4916
|
-
*/
|
|
4917
|
-
reportStats(sessionId, stats) {
|
|
4918
|
-
let entry = this.sessionStats.get(sessionId);
|
|
4919
|
-
if (!entry) {
|
|
4920
|
-
entry = {
|
|
4921
|
-
loss: new EWMA(0.3),
|
|
4922
|
-
jitter: new EWMA(0.3),
|
|
4923
|
-
rtt: new EWMA(0.3),
|
|
4924
|
-
lastUpdate: 0
|
|
4925
|
-
};
|
|
4926
|
-
this.sessionStats.set(sessionId, entry);
|
|
4927
|
-
}
|
|
4928
|
-
entry.loss.update(stats.packetLoss);
|
|
4929
|
-
entry.jitter.update(stats.jitterMs);
|
|
4930
|
-
entry.rtt.update(stats.rttMs);
|
|
4931
|
-
entry.lastUpdate = stats.timestamp;
|
|
4932
|
-
this.evaluate();
|
|
4933
|
-
}
|
|
4934
|
-
/** Remove a session's stats (call on session close). */
|
|
4935
|
-
removeSession(sessionId) {
|
|
4936
|
-
this.sessionStats.delete(sessionId);
|
|
4937
|
-
}
|
|
4938
|
-
/** Force a specific quality tier (null = auto). */
|
|
4939
|
-
forceQuality(tier) {
|
|
4940
|
-
this.forcedTier = tier;
|
|
4941
|
-
if (tier === null) {
|
|
4942
|
-
this.consecutiveBad = 0;
|
|
4943
|
-
this.consecutiveGood = 0;
|
|
4944
|
-
return;
|
|
4945
|
-
}
|
|
4946
|
-
const targetIdx = this.profiles.findIndex((p) => p.tier === tier);
|
|
4947
|
-
if (targetIdx >= 0 && targetIdx !== this.currentIndex) {
|
|
4948
|
-
void this.switchTo(targetIdx);
|
|
4949
|
-
}
|
|
4950
|
-
}
|
|
4951
|
-
/** Check if auto-adaptation is active (not forced). */
|
|
4952
|
-
get isAuto() {
|
|
4953
|
-
return this.forcedTier === null;
|
|
4954
|
-
}
|
|
4955
|
-
// -----------------------------------------------------------------------
|
|
4956
|
-
// Private
|
|
4957
|
-
// -----------------------------------------------------------------------
|
|
4958
|
-
evaluate() {
|
|
4959
|
-
if (this.forcedTier !== null || this.switching) return;
|
|
4960
|
-
const { packetLoss } = this.getAggregatedStats();
|
|
4961
|
-
if (packetLoss > this.degradeThreshold) {
|
|
4962
|
-
this.consecutiveGood = 0;
|
|
4963
|
-
this.consecutiveBad++;
|
|
4964
|
-
if (this.consecutiveBad >= this.degradeCount) {
|
|
4965
|
-
this.consecutiveBad = 0;
|
|
4966
|
-
this.degrade();
|
|
4967
|
-
}
|
|
4968
|
-
} else if (packetLoss < this.recoverThreshold) {
|
|
4969
|
-
this.consecutiveBad = 0;
|
|
4970
|
-
this.consecutiveGood++;
|
|
4971
|
-
if (this.consecutiveGood >= this.recoverCount) {
|
|
4972
|
-
this.consecutiveGood = 0;
|
|
4973
|
-
this.recover();
|
|
4974
|
-
}
|
|
4975
|
-
} else {
|
|
4976
|
-
this.consecutiveBad = 0;
|
|
4977
|
-
this.consecutiveGood = 0;
|
|
4978
|
-
}
|
|
4979
|
-
}
|
|
4980
|
-
degrade() {
|
|
4981
|
-
if (this.currentIndex >= this.profiles.length - 1) {
|
|
4982
|
-
this.logger?.debug("[adaptive] Already at lowest quality, cannot degrade further");
|
|
4983
|
-
return;
|
|
4984
|
-
}
|
|
4985
|
-
void this.switchTo(this.currentIndex + 1);
|
|
4986
|
-
}
|
|
4987
|
-
recover() {
|
|
4988
|
-
if (this.currentIndex <= 0) {
|
|
4989
|
-
this.logger?.debug("[adaptive] Already at highest quality, cannot recover further");
|
|
4990
|
-
return;
|
|
4991
|
-
}
|
|
4992
|
-
void this.switchTo(this.currentIndex - 1);
|
|
4993
|
-
}
|
|
4994
|
-
async switchTo(newIndex) {
|
|
4995
|
-
if (this.switching) return;
|
|
4996
|
-
if (newIndex === this.currentIndex) return;
|
|
4997
|
-
if (newIndex < 0 || newIndex >= this.profiles.length) return;
|
|
4998
|
-
this.switching = true;
|
|
4999
|
-
const from = this.profiles[this.currentIndex];
|
|
5000
|
-
const to = this.profiles[newIndex];
|
|
5001
|
-
this.logger?.info(
|
|
5002
|
-
`[adaptive] Quality change: ${from.tier} \u2192 ${to.tier} (${from.encoding.maxBitrateKbps}kbps \u2192 ${to.encoding.maxBitrateKbps}kbps)`
|
|
5003
|
-
);
|
|
5004
|
-
try {
|
|
5005
|
-
await this.onQualityChange(from, to);
|
|
5006
|
-
this.currentIndex = newIndex;
|
|
5007
|
-
} catch (err) {
|
|
5008
|
-
this.logger?.error("[adaptive] Quality change failed:", err);
|
|
5009
|
-
} finally {
|
|
5010
|
-
this.switching = false;
|
|
5011
|
-
}
|
|
5012
|
-
}
|
|
5013
|
-
};
|
|
5014
|
-
|
|
5015
|
-
// src/webrtc/session.ts
|
|
5016
|
-
var _werift;
|
|
5017
|
-
async function loadWerift() {
|
|
5018
|
-
if (_werift) return _werift;
|
|
5019
|
-
try {
|
|
5020
|
-
const moduleName = "werift";
|
|
5021
|
-
_werift = await Function("m", "return import(m)")(moduleName);
|
|
5022
|
-
return _werift;
|
|
5023
|
-
} catch {
|
|
5024
|
-
throw new Error(
|
|
5025
|
-
"The 'werift' package is required for WebRTC support but is not installed. Install it with: npm install werift"
|
|
5026
|
-
);
|
|
5027
|
-
}
|
|
5028
|
-
}
|
|
5029
|
-
var AdaptiveSession = class _AdaptiveSession {
|
|
5030
|
-
sessionId;
|
|
5031
|
-
source;
|
|
5032
|
-
logger;
|
|
5033
|
-
intercom;
|
|
5034
|
-
iceConfig;
|
|
5035
|
-
onStats;
|
|
5036
|
-
debug;
|
|
5037
|
-
createdAt;
|
|
5038
|
-
state = "new";
|
|
5039
|
-
pc = null;
|
|
5040
|
-
videoTrack = null;
|
|
5041
|
-
audioTrack = null;
|
|
5042
|
-
/** Transceiver senders for direct sendRtp (more reliable than track.writeRtp) */
|
|
5043
|
-
videoSender = null;
|
|
5044
|
-
audioSender = null;
|
|
5045
|
-
feedAbort = null;
|
|
5046
|
-
closed = false;
|
|
5047
|
-
statsTimer = null;
|
|
5048
|
-
/** RTP sequence number counter (must increment per packet). */
|
|
5049
|
-
videoSeqNum = 0;
|
|
5050
|
-
audioSeqNum = 0;
|
|
5051
|
-
/** Previous RTCP stats for delta calculation. */
|
|
5052
|
-
prevPacketsReceived = 0;
|
|
5053
|
-
prevPacketsLost = 0;
|
|
5054
|
-
constructor(options) {
|
|
5055
|
-
this.sessionId = options.sessionId;
|
|
5056
|
-
this.source = options.source;
|
|
5057
|
-
this.logger = options.logger;
|
|
5058
|
-
this.intercom = options.intercom;
|
|
5059
|
-
this.iceConfig = options.iceConfig;
|
|
5060
|
-
this.onStats = options.onStats;
|
|
5061
|
-
this.debug = options.debug ?? false;
|
|
5062
|
-
this.createdAt = Date.now();
|
|
5063
|
-
}
|
|
5064
|
-
/** Build PeerConnection options including H.264 codec config. */
|
|
5065
|
-
async buildPcOptions() {
|
|
5066
|
-
const werift = await loadWerift();
|
|
5067
|
-
const iceServers = [];
|
|
5068
|
-
if (this.iceConfig?.stunServers) {
|
|
5069
|
-
for (const url of this.iceConfig.stunServers) iceServers.push({ urls: url });
|
|
5070
|
-
}
|
|
5071
|
-
if (this.iceConfig?.turnServers) {
|
|
5072
|
-
for (const turn of this.iceConfig.turnServers) {
|
|
5073
|
-
iceServers.push({ urls: turn.urls, username: turn.username, credential: turn.credential });
|
|
5074
|
-
}
|
|
5075
|
-
}
|
|
5076
|
-
const pcOptions = {
|
|
5077
|
-
// H.264 + Opus codecs with RTCP feedback (matching Scrypted's proven config)
|
|
5078
|
-
codecs: {
|
|
5079
|
-
video: [
|
|
5080
|
-
new werift.RTCRtpCodecParameters({
|
|
5081
|
-
mimeType: "video/H264",
|
|
5082
|
-
clockRate: 9e4,
|
|
5083
|
-
payloadType: 96,
|
|
5084
|
-
parameters: "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f",
|
|
5085
|
-
rtcpFeedback: [
|
|
5086
|
-
{ type: "transport-cc" },
|
|
5087
|
-
{ type: "ccm", parameter: "fir" },
|
|
5088
|
-
{ type: "nack" },
|
|
5089
|
-
{ type: "nack", parameter: "pli" },
|
|
5090
|
-
{ type: "goog-remb" }
|
|
5091
|
-
]
|
|
5092
|
-
})
|
|
5093
|
-
],
|
|
5094
|
-
audio: [
|
|
5095
|
-
new werift.RTCRtpCodecParameters({
|
|
5096
|
-
mimeType: "audio/PCMU",
|
|
5097
|
-
clockRate: 8e3,
|
|
5098
|
-
payloadType: 0,
|
|
5099
|
-
channels: 1,
|
|
5100
|
-
parameters: ""
|
|
5101
|
-
})
|
|
5102
|
-
]
|
|
5103
|
-
}
|
|
5104
|
-
};
|
|
5105
|
-
if (iceServers.length > 0) pcOptions.iceServers = iceServers;
|
|
5106
|
-
if (this.iceConfig?.portRange) pcOptions.icePortRange = this.iceConfig.portRange;
|
|
5107
|
-
if (this.iceConfig?.additionalHostAddresses) {
|
|
5108
|
-
pcOptions.iceAdditionalHostAddresses = this.iceConfig.additionalHostAddresses;
|
|
5109
|
-
}
|
|
5110
|
-
return { werift, pcOptions };
|
|
5111
|
-
}
|
|
5112
|
-
/** Create offer SDP (server → client). */
|
|
5113
|
-
async createOffer() {
|
|
5114
|
-
const { werift, pcOptions } = await this.buildPcOptions();
|
|
5115
|
-
this.pc = new werift.RTCPeerConnection(pcOptions);
|
|
5116
|
-
this.pc.iceConnectionStateChange.subscribe((state) => {
|
|
5117
|
-
this.logger.debug(`[session:${this.sessionId}] ICE: ${state}`);
|
|
5118
|
-
if (state === "connected") {
|
|
5119
|
-
this.state = "connected";
|
|
5120
|
-
this.startStatsCollection();
|
|
5121
|
-
} else if (state === "disconnected" || state === "failed" || state === "closed") {
|
|
5122
|
-
this.state = state === "disconnected" ? "disconnected" : "closed";
|
|
5123
|
-
void this.close();
|
|
5124
|
-
}
|
|
5125
|
-
});
|
|
5126
|
-
this.videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
5127
|
-
const videoTransceiver = this.pc.addTransceiver(this.videoTrack, { direction: "sendonly" });
|
|
5128
|
-
this.videoSender = videoTransceiver.sender;
|
|
5129
|
-
this.audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
5130
|
-
const audioDir = this.intercom ? "sendrecv" : "sendonly";
|
|
5131
|
-
const audioTransceiver = this.pc.addTransceiver(this.audioTrack, { direction: audioDir });
|
|
5132
|
-
this.audioSender = audioTransceiver.sender;
|
|
5133
|
-
if (this.intercom) {
|
|
5134
|
-
const cb = this.intercom.onAudioReceived;
|
|
5135
|
-
audioTransceiver.onTrack.subscribe((track) => {
|
|
5136
|
-
track.onReceiveRtp.subscribe((pkt) => {
|
|
5137
|
-
try {
|
|
5138
|
-
const payload = pkt.payload;
|
|
5139
|
-
if (payload?.length > 0) void cb(payload, "Opus");
|
|
5140
|
-
} catch (err) {
|
|
5141
|
-
this.logger.error(`[session:${this.sessionId}] Intercom error:`, err);
|
|
5142
|
-
}
|
|
5143
|
-
});
|
|
5144
|
-
});
|
|
5145
|
-
}
|
|
5146
|
-
const offer = await this.pc.createOffer();
|
|
5147
|
-
await this.pc.setLocalDescription(offer);
|
|
5148
|
-
await new Promise((resolve) => {
|
|
5149
|
-
if (this.pc.iceGatheringState === "complete") {
|
|
5150
|
-
resolve();
|
|
5151
|
-
return;
|
|
5152
|
-
}
|
|
5153
|
-
this.pc.iceGatheringStateChange.subscribe((state) => {
|
|
5154
|
-
if (state === "complete") resolve();
|
|
5155
|
-
});
|
|
5156
|
-
setTimeout(resolve, 5e3);
|
|
5157
|
-
});
|
|
5158
|
-
let finalSdp = this.pc.localDescription?.sdp ?? offer.sdp;
|
|
5159
|
-
finalSdp = finalSdp.replace(/a=setup:active\r?\n/g, "a=setup:actpass\r\n");
|
|
5160
|
-
this.state = "connecting";
|
|
5161
|
-
this.logger.info(`[session:${this.sessionId}] Offer created`);
|
|
5162
|
-
return { sdp: finalSdp, type: "offer" };
|
|
5163
|
-
}
|
|
5164
|
-
/** Handle WHEP answer: client sends SDP answer, we set remote description and start feeding. */
|
|
5165
|
-
async handleAnswer(answer) {
|
|
5166
|
-
if (!this.pc) throw new Error("Call createOffer() first");
|
|
5167
|
-
const werift = await loadWerift();
|
|
5168
|
-
const desc = new werift.RTCSessionDescription(answer.sdp, answer.type);
|
|
5169
|
-
await this.pc.setRemoteDescription(desc);
|
|
5170
|
-
this.logger.info(`[session:${this.sessionId}] Answer set, feeding started`);
|
|
5171
|
-
this.startFeedingFrames();
|
|
5172
|
-
}
|
|
5173
|
-
/**
|
|
5174
|
-
* Handle WHEP offer: client sends SDP offer, we create answer.
|
|
5175
|
-
*
|
|
5176
|
-
* Uses the server-creates-offer pattern internally: we create our own offer
|
|
5177
|
-
* with sendonly tracks, then use the client's offer codecs to build a
|
|
5178
|
-
* compatible answer. This avoids werift transceiver direction issues.
|
|
5179
|
-
*/
|
|
5180
|
-
async handleOffer(clientOffer) {
|
|
5181
|
-
const { werift, pcOptions } = await this.buildPcOptions();
|
|
5182
|
-
this.pc = new werift.RTCPeerConnection(pcOptions);
|
|
5183
|
-
this.pc.iceConnectionStateChange.subscribe((state) => {
|
|
5184
|
-
this.logger.debug(`[session:${this.sessionId}] ICE: ${state}`);
|
|
5185
|
-
if (state === "connected") {
|
|
5186
|
-
this.state = "connected";
|
|
5187
|
-
this.startStatsCollection();
|
|
5188
|
-
} else if (state === "disconnected" || state === "failed" || state === "closed") {
|
|
5189
|
-
this.state = state === "disconnected" ? "disconnected" : "closed";
|
|
5190
|
-
void this.close();
|
|
5191
|
-
}
|
|
5192
|
-
});
|
|
5193
|
-
const remoteDesc = new werift.RTCSessionDescription(clientOffer.sdp, clientOffer.type);
|
|
5194
|
-
await this.pc.setRemoteDescription(remoteDesc);
|
|
5195
|
-
const transceivers = this.pc.getTransceivers();
|
|
5196
|
-
for (const t of transceivers) {
|
|
5197
|
-
const kind = t.receiver?.track?.kind ?? t.kind;
|
|
5198
|
-
if (kind === "video" && !this.videoTrack) {
|
|
5199
|
-
this.videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
5200
|
-
await t.sender.replaceTrack(this.videoTrack);
|
|
5201
|
-
} else if (kind === "audio" && !this.audioTrack) {
|
|
5202
|
-
this.audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
5203
|
-
await t.sender.replaceTrack(this.audioTrack);
|
|
5204
|
-
}
|
|
5205
|
-
}
|
|
5206
|
-
if (!this.videoTrack) {
|
|
5207
|
-
this.logger.warn(`[session:${this.sessionId}] No video transceiver found in offer, adding one`);
|
|
5208
|
-
this.videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
5209
|
-
this.pc.addTransceiver(this.videoTrack, { direction: "sendonly" });
|
|
5210
|
-
}
|
|
5211
|
-
if (!this.audioTrack) {
|
|
5212
|
-
this.logger.warn(`[session:${this.sessionId}] No audio transceiver found in offer, adding one`);
|
|
5213
|
-
this.audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
5214
|
-
this.pc.addTransceiver(this.audioTrack, { direction: "sendonly" });
|
|
5215
|
-
}
|
|
5216
|
-
const answer = await this.pc.createAnswer();
|
|
5217
|
-
await this.pc.setLocalDescription(answer);
|
|
5218
|
-
this.state = "connecting";
|
|
5219
|
-
this.logger.info(`[session:${this.sessionId}] WHEP answer created`);
|
|
5220
|
-
this.startFeedingFrames();
|
|
5221
|
-
return { sdp: answer.sdp, type: "answer" };
|
|
5222
|
-
}
|
|
5223
|
-
/** Add ICE candidate. */
|
|
5224
|
-
async addIceCandidate(candidate) {
|
|
5225
|
-
if (!this.pc) throw new Error("Call createOffer() first");
|
|
5226
|
-
const werift = await loadWerift();
|
|
5227
|
-
await this.pc.addIceCandidate(new werift.RTCIceCandidate(candidate));
|
|
5228
|
-
}
|
|
5229
|
-
/**
|
|
5230
|
-
* Detach the frame source (for connection pooling).
|
|
5231
|
-
* The session stays alive (ICE/DTLS connected) but stops feeding frames.
|
|
5232
|
-
* Call replaceSource() later to reattach a camera.
|
|
5233
|
-
*/
|
|
5234
|
-
detachSource() {
|
|
5235
|
-
if (this.feedAbort) {
|
|
5236
|
-
this.feedAbort.abort();
|
|
5237
|
-
this.feedAbort = null;
|
|
5238
|
-
}
|
|
5239
|
-
this.logger.debug(`[session:${this.sessionId}] Source detached (idle)`);
|
|
5240
|
-
}
|
|
5241
|
-
/** Whether the session has an active feed (vs idle/pooled). */
|
|
5242
|
-
get isFeeding() {
|
|
5243
|
-
return this.feedAbort !== null && !this.feedAbort.signal.aborted;
|
|
5244
|
-
}
|
|
5245
|
-
/**
|
|
5246
|
-
* Replace the frame source (for seamless source switching).
|
|
5247
|
-
* The new source will take effect at the next keyframe.
|
|
5248
|
-
*/
|
|
5249
|
-
replaceSource(newSource) {
|
|
5250
|
-
this.source = newSource;
|
|
5251
|
-
if (this.feedAbort) {
|
|
5252
|
-
this.feedAbort.abort();
|
|
5253
|
-
this.feedAbort = null;
|
|
5254
|
-
}
|
|
5255
|
-
this.startFeedingFrames();
|
|
5256
|
-
}
|
|
5257
|
-
getInfo() {
|
|
5258
|
-
return { sessionId: this.sessionId, state: this.state, createdAt: this.createdAt };
|
|
5259
|
-
}
|
|
5260
|
-
async close() {
|
|
5261
|
-
if (this.closed) return;
|
|
5262
|
-
this.closed = true;
|
|
5263
|
-
this.state = "closed";
|
|
5264
|
-
this.logger.info(`[session:${this.sessionId}] Closing`);
|
|
5265
|
-
if (this.statsTimer) {
|
|
5266
|
-
clearInterval(this.statsTimer);
|
|
5267
|
-
this.statsTimer = null;
|
|
5268
|
-
}
|
|
5269
|
-
if (this.feedAbort) {
|
|
5270
|
-
this.feedAbort.abort();
|
|
5271
|
-
this.feedAbort = null;
|
|
5272
|
-
}
|
|
5273
|
-
try {
|
|
5274
|
-
await this.source.return(void 0);
|
|
5275
|
-
} catch {
|
|
5276
|
-
}
|
|
5277
|
-
if (this.pc) {
|
|
5278
|
-
try {
|
|
5279
|
-
await this.pc.close();
|
|
5280
|
-
} catch {
|
|
5281
|
-
}
|
|
5282
|
-
this.pc = null;
|
|
5283
|
-
}
|
|
5284
|
-
this.videoTrack = null;
|
|
5285
|
-
this.audioTrack = null;
|
|
5286
|
-
}
|
|
5287
|
-
// -----------------------------------------------------------------------
|
|
5288
|
-
// Frame feeding
|
|
5289
|
-
// -----------------------------------------------------------------------
|
|
5290
|
-
startFeedingFrames() {
|
|
5291
|
-
this.feedAbort = new AbortController();
|
|
5292
|
-
const { signal } = this.feedAbort;
|
|
5293
|
-
void (async () => {
|
|
5294
|
-
let gotKeyframe = false;
|
|
5295
|
-
let videoTimestampBase = null;
|
|
5296
|
-
let audioTimestampBase = null;
|
|
5297
|
-
let frameCount = 0;
|
|
5298
|
-
try {
|
|
5299
|
-
for await (const mediaFrame of this.source) {
|
|
5300
|
-
if (signal.aborted || this.closed) break;
|
|
5301
|
-
frameCount++;
|
|
5302
|
-
if (this.debug && (frameCount <= 5 || frameCount % 100 === 0)) {
|
|
5303
|
-
this.logger.debug(
|
|
5304
|
-
`[session:${this.sessionId}] Frame #${frameCount}: ${mediaFrame.type} size=${mediaFrame.frame.data.length} ` + (mediaFrame.type === "video" ? `key=${mediaFrame.frame.isKeyframe}` : "")
|
|
5305
|
-
);
|
|
5306
|
-
}
|
|
5307
|
-
if (mediaFrame.type === "video") {
|
|
5308
|
-
const frame = mediaFrame.frame;
|
|
5309
|
-
const annexB = frame.codec === "H264" ? convertH264ToAnnexB(frame.data) : convertH265ToAnnexB(frame.data);
|
|
5310
|
-
if (!gotKeyframe) {
|
|
5311
|
-
const isKey = frame.codec === "H264" ? isH264IdrAccessUnit(annexB) : isH265IrapAccessUnit(annexB);
|
|
5312
|
-
if (!isKey) continue;
|
|
5313
|
-
gotKeyframe = true;
|
|
5314
|
-
if (this.debug) {
|
|
5315
|
-
const iceState = this.pc?.iceConnectionState ?? "unknown";
|
|
5316
|
-
const connState = this.pc?.connectionState ?? "unknown";
|
|
5317
|
-
this.logger.info(
|
|
5318
|
-
`[session:${this.sessionId}] First keyframe at frame #${frameCount}, size=${annexB.length}, ICE=${iceState}, conn=${connState}`
|
|
5319
|
-
);
|
|
5320
|
-
}
|
|
5321
|
-
}
|
|
5322
|
-
if (videoTimestampBase === null) videoTimestampBase = frame.timestampMicros;
|
|
5323
|
-
const rtpTs = Math.floor(
|
|
5324
|
-
(frame.timestampMicros - videoTimestampBase) * 9e4 / 1e6
|
|
5325
|
-
) >>> 0;
|
|
5326
|
-
const nals = splitAnnexBToNals(annexB).filter((n) => {
|
|
5327
|
-
const t = n[0] & 31;
|
|
5328
|
-
return t !== 9 && t !== 6;
|
|
5329
|
-
});
|
|
5330
|
-
if (nals.length > 0 && this.videoTrack) {
|
|
5331
|
-
this.writeVideoNals(nals, rtpTs, frame.codec);
|
|
5332
|
-
if (this.debug && frameCount % 250 === 0) {
|
|
5333
|
-
this.logger.info(
|
|
5334
|
-
`[session:${this.sessionId}] ${frameCount} frames, ${this.rtpPacketsSent} RTP pkts, ICE=${this.pc?.iceConnectionState}, conn=${this.pc?.connectionState}`
|
|
5335
|
-
);
|
|
5336
|
-
}
|
|
5337
|
-
}
|
|
5338
|
-
} else if (mediaFrame.type === "audio") {
|
|
5339
|
-
const frame = mediaFrame.frame;
|
|
5340
|
-
if (!this.audioSender) continue;
|
|
5341
|
-
if (audioTimestampBase === null) audioTimestampBase = 0;
|
|
5342
|
-
audioTimestampBase = audioTimestampBase + frame.data.length >>> 0;
|
|
5343
|
-
const rtpTs = audioTimestampBase;
|
|
5344
|
-
this.writeAudio(frame.data, rtpTs, frame.codec);
|
|
5345
|
-
}
|
|
5346
|
-
}
|
|
5347
|
-
} catch (err) {
|
|
5348
|
-
if (!signal.aborted && !this.closed) {
|
|
5349
|
-
this.logger.error(`[session:${this.sessionId}] Feed error:`, err);
|
|
5350
|
-
}
|
|
5351
|
-
} finally {
|
|
5352
|
-
if (!this.closed) {
|
|
5353
|
-
this.logger.info(`[session:${this.sessionId}] Feed ended`);
|
|
5354
|
-
void this.close();
|
|
5355
|
-
}
|
|
5356
|
-
}
|
|
5357
|
-
})();
|
|
5358
|
-
}
|
|
5359
|
-
/** Build a serialized RTP packet for sender.sendRtp(). */
|
|
5360
|
-
buildRtpBuffer(weriftModule, payload, rtpTs, payloadType, marker, isVideo) {
|
|
5361
|
-
const header = new weriftModule.RtpHeader();
|
|
5362
|
-
header.payloadType = payloadType;
|
|
5363
|
-
header.timestamp = rtpTs;
|
|
5364
|
-
header.marker = marker;
|
|
5365
|
-
header.sequenceNumber = isVideo ? this.videoSeqNum = this.videoSeqNum + 1 & 65535 : this.audioSeqNum = this.audioSeqNum + 1 & 65535;
|
|
5366
|
-
const pkt = new weriftModule.RtpPacket(header, payload);
|
|
5367
|
-
return pkt.serialize();
|
|
5368
|
-
}
|
|
5369
|
-
/** Max RTP payload size (MTU 1200 to stay under typical network MTU). */
|
|
5370
|
-
static MAX_RTP_PAYLOAD = 1200;
|
|
5371
|
-
rtpPacketsSent = 0;
|
|
5372
|
-
writeVideoNals(nals, rtpTs, codec) {
|
|
5373
|
-
if (!this.videoSender || !_werift) return;
|
|
5374
|
-
const pt = codec === "H264" ? 96 : 97;
|
|
5375
|
-
const sendPkt = (payload, marker) => {
|
|
5376
|
-
try {
|
|
5377
|
-
const buf = this.buildRtpBuffer(_werift, payload, rtpTs, pt, marker, true);
|
|
5378
|
-
this.videoSender.sendRtp(buf);
|
|
5379
|
-
this.rtpPacketsSent++;
|
|
5380
|
-
} catch (err) {
|
|
5381
|
-
if (this.rtpPacketsSent <= 10) {
|
|
5382
|
-
this.logger.error(`[session:${this.sessionId}] sendRtp error #${this.rtpPacketsSent}:`, err);
|
|
5383
|
-
}
|
|
5384
|
-
}
|
|
5385
|
-
};
|
|
5386
|
-
for (let i = 0; i < nals.length; i++) {
|
|
5387
|
-
const nal = nals[i];
|
|
5388
|
-
const isLastNal = i === nals.length - 1;
|
|
5389
|
-
if (nal.length <= _AdaptiveSession.MAX_RTP_PAYLOAD) {
|
|
5390
|
-
sendPkt(nal, isLastNal);
|
|
5391
|
-
} else {
|
|
5392
|
-
const nalHeader = nal[0];
|
|
5393
|
-
const fnri = nalHeader & 224;
|
|
5394
|
-
const nalType = nalHeader & 31;
|
|
5395
|
-
const fuIndicator = fnri | 28;
|
|
5396
|
-
const nalBody = nal.subarray(1);
|
|
5397
|
-
let offset = 0;
|
|
5398
|
-
let isFirst = true;
|
|
5399
|
-
while (offset < nalBody.length) {
|
|
5400
|
-
const end = Math.min(offset + _AdaptiveSession.MAX_RTP_PAYLOAD - 2, nalBody.length);
|
|
5401
|
-
const isLast = end >= nalBody.length;
|
|
5402
|
-
let fuHeader = nalType;
|
|
5403
|
-
if (isFirst) fuHeader |= 128;
|
|
5404
|
-
if (isLast) fuHeader |= 64;
|
|
5405
|
-
const fragment = Buffer.alloc(2 + (end - offset));
|
|
5406
|
-
fragment[0] = fuIndicator;
|
|
5407
|
-
fragment[1] = fuHeader;
|
|
5408
|
-
nalBody.copy(fragment, 2, offset, end);
|
|
5409
|
-
sendPkt(fragment, isLastNal && isLast);
|
|
5410
|
-
offset = end;
|
|
5411
|
-
isFirst = false;
|
|
5412
|
-
}
|
|
5413
|
-
}
|
|
5414
|
-
}
|
|
5415
|
-
}
|
|
5416
|
-
writeAudio(data, rtpTs, codec) {
|
|
5417
|
-
if (!this.audioSender || !_werift) return;
|
|
5418
|
-
const pt = codec === "Pcmu" || codec === "Pcma" ? 0 : 111;
|
|
5419
|
-
try {
|
|
5420
|
-
const buf = this.buildRtpBuffer(_werift, data, rtpTs, pt, true, false);
|
|
5421
|
-
this.audioSender.sendRtp(buf);
|
|
5422
|
-
} catch (err) {
|
|
5423
|
-
this.logger.debug(`[session:${this.sessionId}] Audio write error:`, err);
|
|
5424
|
-
}
|
|
5425
|
-
}
|
|
5426
|
-
// -----------------------------------------------------------------------
|
|
5427
|
-
// RTCP stats collection
|
|
5428
|
-
// -----------------------------------------------------------------------
|
|
5429
|
-
startStatsCollection() {
|
|
5430
|
-
if (this.statsTimer || !this.onStats) return;
|
|
5431
|
-
this.statsTimer = setInterval(() => {
|
|
5432
|
-
if (!this.pc || this.closed) return;
|
|
5433
|
-
this.collectStats();
|
|
5434
|
-
}, 3e3);
|
|
5435
|
-
}
|
|
5436
|
-
collectStats() {
|
|
5437
|
-
if (!this.pc || !this.onStats) return;
|
|
5438
|
-
try {
|
|
5439
|
-
const senders = this.pc.getSenders?.() ?? [];
|
|
5440
|
-
for (const sender of senders) {
|
|
5441
|
-
const track = sender.track;
|
|
5442
|
-
if (!track || track.kind !== "video") continue;
|
|
5443
|
-
const report = sender.lastReceiverReport ?? sender.rtcpReport;
|
|
5444
|
-
if (!report) continue;
|
|
5445
|
-
const fractionLost = report.fractionLost ?? 0;
|
|
5446
|
-
const packetsLost = report.packetsLost ?? report.cumulativeLost ?? 0;
|
|
5447
|
-
const jitter = report.jitter ?? 0;
|
|
5448
|
-
const rtt = report.roundTripTime ?? report.rtt ?? 0;
|
|
5449
|
-
const packetLoss = fractionLost / 256;
|
|
5450
|
-
this.onStats({
|
|
5451
|
-
sessionId: this.sessionId,
|
|
5452
|
-
packetLoss,
|
|
5453
|
-
jitterMs: jitter,
|
|
5454
|
-
rttMs: rtt * 1e3,
|
|
5455
|
-
// seconds → ms
|
|
5456
|
-
packetsReceived: 0,
|
|
5457
|
-
// Not available from sender side
|
|
5458
|
-
packetsLost,
|
|
5459
|
-
timestamp: Date.now()
|
|
5460
|
-
});
|
|
5461
|
-
return;
|
|
5462
|
-
}
|
|
5463
|
-
} catch {
|
|
5464
|
-
}
|
|
5465
|
-
}
|
|
5466
|
-
};
|
|
5467
|
-
|
|
5468
|
-
// src/webrtc/rtsp-relay.ts
|
|
5469
|
-
import { spawn as spawn6 } from "child_process";
|
|
5470
|
-
var AdaptiveRtspRelay = class {
|
|
5471
|
-
rtspUrl;
|
|
5472
|
-
rtspOutputUrl;
|
|
5473
|
-
ffmpegPath;
|
|
5474
|
-
logger;
|
|
5475
|
-
label;
|
|
5476
|
-
currentParams;
|
|
5477
|
-
proc = null;
|
|
5478
|
-
closed = false;
|
|
5479
|
-
constructor(options) {
|
|
5480
|
-
this.rtspUrl = options.rtspUrl;
|
|
5481
|
-
this.rtspOutputUrl = options.rtspOutputUrl;
|
|
5482
|
-
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
5483
|
-
this.logger = options.logger;
|
|
5484
|
-
this.label = options.label ?? "adaptive-rtsp";
|
|
5485
|
-
this.currentParams = { ...options.initialParams };
|
|
5486
|
-
}
|
|
5487
|
-
getParams() {
|
|
5488
|
-
return { ...this.currentParams };
|
|
5489
|
-
}
|
|
5490
|
-
/** Start the ffmpeg relay. */
|
|
5491
|
-
start() {
|
|
5492
|
-
if (this.closed) return;
|
|
5493
|
-
this.spawnFfmpeg();
|
|
5494
|
-
}
|
|
5495
|
-
/** Hot-swap encoding parameters by restarting ffmpeg. */
|
|
5496
|
-
async updateParams(params) {
|
|
5497
|
-
const prev = { ...this.currentParams };
|
|
5498
|
-
if (params.maxBitrateKbps !== void 0) this.currentParams.maxBitrateKbps = params.maxBitrateKbps;
|
|
5499
|
-
if (params.width !== void 0) this.currentParams.width = params.width;
|
|
5500
|
-
if (params.height !== void 0) this.currentParams.height = params.height;
|
|
5501
|
-
if (params.preset !== void 0) this.currentParams.preset = params.preset;
|
|
5502
|
-
if (prev.maxBitrateKbps === this.currentParams.maxBitrateKbps && prev.width === this.currentParams.width && prev.height === this.currentParams.height) return;
|
|
5503
|
-
this.logger?.info(
|
|
5504
|
-
`[${this.label}] Updating: ${prev.maxBitrateKbps}kbps ${prev.width}x${prev.height} \u2192 ${this.currentParams.maxBitrateKbps}kbps ${this.currentParams.width}x${this.currentParams.height}`
|
|
5505
|
-
);
|
|
5506
|
-
await this.killFfmpeg();
|
|
5507
|
-
if (!this.closed) this.spawnFfmpeg();
|
|
5508
|
-
}
|
|
5509
|
-
/** Stop the relay. */
|
|
5510
|
-
async stop() {
|
|
5511
|
-
if (this.closed) return;
|
|
5512
|
-
this.closed = true;
|
|
5513
|
-
await this.killFfmpeg();
|
|
5514
|
-
}
|
|
5515
|
-
/** Check if ffmpeg is running. */
|
|
5516
|
-
isRunning() {
|
|
5517
|
-
return this.proc !== null;
|
|
5518
|
-
}
|
|
5519
|
-
// -----------------------------------------------------------------------
|
|
5520
|
-
// Private
|
|
5521
|
-
// -----------------------------------------------------------------------
|
|
5522
|
-
spawnFfmpeg() {
|
|
5523
|
-
const { maxBitrateKbps, width, height, preset } = this.currentParams;
|
|
5524
|
-
const args = [
|
|
5525
|
-
"-hide_banner",
|
|
5526
|
-
"-loglevel",
|
|
5527
|
-
"error",
|
|
5528
|
-
// Input
|
|
5529
|
-
"-rtsp_transport",
|
|
5530
|
-
"tcp",
|
|
5531
|
-
"-i",
|
|
5532
|
-
this.rtspUrl,
|
|
5533
|
-
// Video encoding
|
|
5534
|
-
"-c:v",
|
|
5535
|
-
"libx264",
|
|
5536
|
-
"-preset",
|
|
5537
|
-
preset ?? "ultrafast",
|
|
5538
|
-
"-tune",
|
|
5539
|
-
"zerolatency",
|
|
5540
|
-
"-crf",
|
|
5541
|
-
"28",
|
|
5542
|
-
"-maxrate",
|
|
5543
|
-
`${maxBitrateKbps}k`,
|
|
5544
|
-
"-bufsize",
|
|
5545
|
-
`${Math.round(maxBitrateKbps * 0.5)}k`,
|
|
5546
|
-
"-g",
|
|
5547
|
-
"50",
|
|
5548
|
-
"-keyint_min",
|
|
5549
|
-
"25"
|
|
5550
|
-
];
|
|
5551
|
-
if (width > 0 && height > 0) {
|
|
5552
|
-
args.push("-vf", `scale=${width}:${height}`);
|
|
5553
|
-
}
|
|
5554
|
-
args.push("-c:a", "aac", "-b:a", "64k");
|
|
5555
|
-
args.push(
|
|
5556
|
-
"-f",
|
|
5557
|
-
"rtsp",
|
|
5558
|
-
"-rtsp_transport",
|
|
5559
|
-
"tcp",
|
|
5560
|
-
this.rtspOutputUrl
|
|
5561
|
-
);
|
|
5562
|
-
this.proc = spawn6(this.ffmpegPath, args, {
|
|
5563
|
-
stdio: ["ignore", "ignore", "pipe"]
|
|
5564
|
-
});
|
|
5565
|
-
this.proc.on("error", (err) => {
|
|
5566
|
-
this.logger?.error(`[${this.label}] ffmpeg spawn error: ${err.message}`);
|
|
5567
|
-
});
|
|
5568
|
-
this.proc.on("close", (code, signal) => {
|
|
5569
|
-
this.logger?.debug(`[${this.label}] ffmpeg exited code=${code} signal=${signal}`);
|
|
5570
|
-
this.proc = null;
|
|
5571
|
-
if (!this.closed) {
|
|
5572
|
-
setTimeout(() => {
|
|
5573
|
-
if (!this.closed) this.spawnFfmpeg();
|
|
5574
|
-
}, 2e3);
|
|
5575
|
-
}
|
|
5576
|
-
});
|
|
5577
|
-
this.proc.stderr?.on("data", (data) => {
|
|
5578
|
-
const s = data.toString();
|
|
5579
|
-
if (s.includes("error") || s.includes("Error") || s.includes("fatal")) {
|
|
5580
|
-
this.logger?.error(`[${this.label}] ffmpeg: ${s.trim()}`);
|
|
5581
|
-
}
|
|
5582
|
-
});
|
|
5583
|
-
this.logger?.info(
|
|
5584
|
-
`[${this.label}] Started: ${maxBitrateKbps}kbps ` + (width > 0 ? `${width}x${height}` : "native") + ` \u2192 ${this.rtspOutputUrl}`
|
|
5585
|
-
);
|
|
5586
|
-
}
|
|
5587
|
-
async killFfmpeg() {
|
|
5588
|
-
const proc = this.proc;
|
|
5589
|
-
if (!proc) return;
|
|
5590
|
-
this.proc = null;
|
|
5591
|
-
try {
|
|
5592
|
-
proc.kill("SIGTERM");
|
|
5593
|
-
} catch {
|
|
5594
|
-
}
|
|
5595
|
-
await new Promise((resolve) => {
|
|
5596
|
-
const timer = setTimeout(() => {
|
|
5597
|
-
try {
|
|
5598
|
-
proc.kill("SIGKILL");
|
|
5599
|
-
} catch {
|
|
5600
|
-
}
|
|
5601
|
-
resolve();
|
|
5602
|
-
}, 3e3);
|
|
5603
|
-
proc.on("close", () => {
|
|
5604
|
-
clearTimeout(timer);
|
|
5605
|
-
resolve();
|
|
5606
|
-
});
|
|
5607
|
-
});
|
|
5608
|
-
}
|
|
5609
|
-
};
|
|
5610
|
-
|
|
5611
|
-
// src/webrtc/shared-session.ts
|
|
5612
|
-
var _werift2;
|
|
5613
|
-
async function loadWerift2() {
|
|
5614
|
-
if (_werift2) return _werift2;
|
|
5615
|
-
const moduleName = "werift";
|
|
5616
|
-
_werift2 = await Function("m", "return import(m)")(moduleName);
|
|
5617
|
-
return _werift2;
|
|
5618
|
-
}
|
|
5619
|
-
var SharedSession = class {
|
|
5620
|
-
constructor(options) {
|
|
5621
|
-
this.options = options;
|
|
5622
|
-
this.logger = options.logger;
|
|
5623
|
-
this.iceConfig = options.iceConfig;
|
|
5624
|
-
this.onTrackRequested = options.onTrackRequested;
|
|
5625
|
-
this.onTrackReleased = options.onTrackReleased;
|
|
5626
|
-
}
|
|
5627
|
-
logger;
|
|
5628
|
-
iceConfig;
|
|
5629
|
-
onTrackRequested;
|
|
5630
|
-
onTrackReleased;
|
|
5631
|
-
pc = null;
|
|
5632
|
-
dataChannel = null;
|
|
5633
|
-
activeTracks = /* @__PURE__ */ new Map();
|
|
5634
|
-
closed = false;
|
|
5635
|
-
negotiating = false;
|
|
5636
|
-
/** Create the initial SDP offer (with data channel, no media tracks yet). */
|
|
5637
|
-
async createOffer() {
|
|
5638
|
-
const werift = await loadWerift2();
|
|
5639
|
-
const iceServers = [];
|
|
5640
|
-
if (this.iceConfig?.stunServers) {
|
|
5641
|
-
for (const url of this.iceConfig.stunServers) iceServers.push({ urls: url });
|
|
5642
|
-
}
|
|
5643
|
-
if (this.iceConfig?.turnServers) {
|
|
5644
|
-
for (const t of this.iceConfig.turnServers) {
|
|
5645
|
-
iceServers.push({ urls: t.urls, username: t.username, credential: t.credential });
|
|
5646
|
-
}
|
|
5647
|
-
}
|
|
5648
|
-
const pcOptions = {
|
|
5649
|
-
codecs: {
|
|
5650
|
-
video: [
|
|
5651
|
-
new werift.RTCRtpCodecParameters({
|
|
5652
|
-
mimeType: "video/H264",
|
|
5653
|
-
clockRate: 9e4,
|
|
5654
|
-
payloadType: 96,
|
|
5655
|
-
parameters: "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f",
|
|
5656
|
-
rtcpFeedback: [
|
|
5657
|
-
{ type: "transport-cc" },
|
|
5658
|
-
{ type: "ccm", parameter: "fir" },
|
|
5659
|
-
{ type: "nack" },
|
|
5660
|
-
{ type: "nack", parameter: "pli" },
|
|
5661
|
-
{ type: "goog-remb" }
|
|
5662
|
-
]
|
|
5663
|
-
})
|
|
5664
|
-
],
|
|
5665
|
-
audio: [
|
|
5666
|
-
new werift.RTCRtpCodecParameters({
|
|
5667
|
-
mimeType: "audio/opus",
|
|
5668
|
-
clockRate: 48e3,
|
|
5669
|
-
payloadType: 111,
|
|
5670
|
-
channels: 2,
|
|
5671
|
-
parameters: "minptime=10;useinbandfec=1"
|
|
5672
|
-
})
|
|
5673
|
-
]
|
|
5674
|
-
}
|
|
5675
|
-
};
|
|
5676
|
-
if (iceServers.length > 0) pcOptions.iceServers = iceServers;
|
|
5677
|
-
if (this.iceConfig?.portRange) pcOptions.icePortRange = this.iceConfig.portRange;
|
|
5678
|
-
if (this.iceConfig?.additionalHostAddresses) {
|
|
5679
|
-
pcOptions.iceAdditionalHostAddresses = this.iceConfig.additionalHostAddresses;
|
|
5680
|
-
}
|
|
5681
|
-
this.pc = new werift.RTCPeerConnection(pcOptions);
|
|
5682
|
-
this.pc.iceConnectionStateChange.subscribe((state) => {
|
|
5683
|
-
this.logger.debug(`[shared] ICE: ${state}`);
|
|
5684
|
-
});
|
|
5685
|
-
this.dataChannel = this.pc.createDataChannel("control", { ordered: true });
|
|
5686
|
-
this.dataChannel.message.subscribe((msg) => {
|
|
5687
|
-
try {
|
|
5688
|
-
const data = JSON.parse(typeof msg === "string" ? msg : msg.toString());
|
|
5689
|
-
this.handleDataChannelMessage(data);
|
|
5690
|
-
} catch (err) {
|
|
5691
|
-
this.logger.error("[shared] DC message parse error:", err);
|
|
5692
|
-
}
|
|
5693
|
-
});
|
|
5694
|
-
const offer = await this.pc.createOffer();
|
|
5695
|
-
await this.pc.setLocalDescription(offer);
|
|
5696
|
-
await new Promise((resolve) => {
|
|
5697
|
-
if (this.pc.iceGatheringState === "complete") {
|
|
5698
|
-
resolve();
|
|
5699
|
-
return;
|
|
5700
|
-
}
|
|
5701
|
-
this.pc.iceGatheringStateChange.subscribe((state) => {
|
|
5702
|
-
if (state === "complete") resolve();
|
|
5703
|
-
});
|
|
5704
|
-
setTimeout(resolve, 5e3);
|
|
5705
|
-
});
|
|
5706
|
-
const sdp = this.pc.localDescription?.sdp ?? offer.sdp;
|
|
5707
|
-
this.logger.info("[shared] Initial offer created (data channel only)");
|
|
5708
|
-
return sdp;
|
|
5709
|
-
}
|
|
5710
|
-
/** Handle the client's SDP answer. */
|
|
5711
|
-
async handleAnswer(sdpAnswer) {
|
|
5712
|
-
const werift = await loadWerift2();
|
|
5713
|
-
const desc = new werift.RTCSessionDescription(sdpAnswer, "answer");
|
|
5714
|
-
await this.pc.setRemoteDescription(desc);
|
|
5715
|
-
this.logger.info("[shared] Answer set, connection ready");
|
|
5716
|
-
}
|
|
5717
|
-
/** Close the shared session and all tracks. */
|
|
5718
|
-
async close() {
|
|
5719
|
-
if (this.closed) return;
|
|
5720
|
-
this.closed = true;
|
|
5721
|
-
for (const [, track] of this.activeTracks) {
|
|
5722
|
-
track.feedAbort.abort();
|
|
5723
|
-
this.onTrackReleased?.(track.cameraName);
|
|
5724
|
-
}
|
|
5725
|
-
this.activeTracks.clear();
|
|
5726
|
-
if (this.pc) {
|
|
5727
|
-
try {
|
|
5728
|
-
await this.pc.close();
|
|
5729
|
-
} catch {
|
|
5730
|
-
}
|
|
5731
|
-
this.pc = null;
|
|
5732
|
-
}
|
|
5733
|
-
this.logger.info("[shared] Session closed");
|
|
5734
|
-
}
|
|
5735
|
-
// -----------------------------------------------------------------------
|
|
5736
|
-
// Data channel message handling
|
|
5737
|
-
// -----------------------------------------------------------------------
|
|
5738
|
-
async handleDataChannelMessage(msg) {
|
|
5739
|
-
try {
|
|
5740
|
-
switch (msg.type) {
|
|
5741
|
-
case "addTrack":
|
|
5742
|
-
await this.handleAddTrack(msg.cameraName, msg.trackId);
|
|
5743
|
-
break;
|
|
5744
|
-
case "removeTrack":
|
|
5745
|
-
await this.handleRemoveTrack(msg.trackId);
|
|
5746
|
-
break;
|
|
5747
|
-
case "answer":
|
|
5748
|
-
await this.handleRenegotiationAnswer(msg.sdp);
|
|
5749
|
-
break;
|
|
5750
|
-
default:
|
|
5751
|
-
this.logger.warn("[shared] Unknown DC message type:", msg.type);
|
|
5752
|
-
}
|
|
5753
|
-
} catch (err) {
|
|
5754
|
-
this.logger.error("[shared] DC handler error:", err);
|
|
5755
|
-
this.sendDC({ type: "error", message: err.message });
|
|
5756
|
-
}
|
|
5757
|
-
}
|
|
5758
|
-
async handleAddTrack(cameraName, trackId) {
|
|
5759
|
-
if (this.activeTracks.has(trackId)) {
|
|
5760
|
-
this.sendDC({ type: "error", message: `Track ${trackId} already exists` });
|
|
5761
|
-
return;
|
|
5762
|
-
}
|
|
5763
|
-
const werift = await loadWerift2();
|
|
5764
|
-
const videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
5765
|
-
const videoTransceiver = this.pc.addTransceiver(videoTrack, { direction: "sendonly" });
|
|
5766
|
-
const audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
5767
|
-
const audioDirection = this.options.onIntercomAudio ? "sendrecv" : "sendonly";
|
|
5768
|
-
const audioTransceiver = this.pc.addTransceiver(audioTrack, { direction: audioDirection });
|
|
5769
|
-
if (this.options.onIntercomAudio) {
|
|
5770
|
-
const intercomCb = this.options.onIntercomAudio;
|
|
5771
|
-
audioTransceiver.onTrack.subscribe((incomingTrack) => {
|
|
5772
|
-
incomingTrack.onReceiveRtp.subscribe((rtpPacket) => {
|
|
5773
|
-
const payload = rtpPacket.payload;
|
|
5774
|
-
if (payload?.length > 0) {
|
|
5775
|
-
intercomCb(cameraName, payload);
|
|
5776
|
-
}
|
|
5777
|
-
});
|
|
5778
|
-
});
|
|
5779
|
-
}
|
|
5780
|
-
await this.renegotiate();
|
|
5781
|
-
const videoMid = videoTransceiver.mid;
|
|
5782
|
-
const audioMid = audioTransceiver.mid;
|
|
5783
|
-
const source = this.onTrackRequested(cameraName);
|
|
5784
|
-
if (!source) {
|
|
5785
|
-
this.sendDC({ type: "error", message: `Camera not found: ${cameraName}` });
|
|
5786
|
-
return;
|
|
5787
|
-
}
|
|
5788
|
-
const feedAbort = new AbortController();
|
|
5789
|
-
const activeTrack = {
|
|
5790
|
-
trackId,
|
|
5791
|
-
cameraName,
|
|
5792
|
-
videoSender: videoTransceiver.sender,
|
|
5793
|
-
audioSender: audioTransceiver.sender,
|
|
5794
|
-
feedAbort,
|
|
5795
|
-
videoSeqNum: 0,
|
|
5796
|
-
audioSeqNum: 0
|
|
5797
|
-
};
|
|
5798
|
-
this.activeTracks.set(trackId, activeTrack);
|
|
5799
|
-
this.sendDC({ type: "trackReady", trackId, videoMid, audioMid });
|
|
5800
|
-
this.startFeeding(activeTrack, source);
|
|
5801
|
-
this.logger.info(`[shared] Track "${trackId}" added for camera "${cameraName}" (video=${videoMid}, audio=${audioMid})`);
|
|
5802
|
-
}
|
|
5803
|
-
async handleRemoveTrack(trackId) {
|
|
5804
|
-
const track = this.activeTracks.get(trackId);
|
|
5805
|
-
if (!track) return;
|
|
5806
|
-
track.feedAbort.abort();
|
|
5807
|
-
this.activeTracks.delete(trackId);
|
|
5808
|
-
this.onTrackReleased?.(track.cameraName);
|
|
5809
|
-
await this.renegotiate();
|
|
5810
|
-
this.sendDC({ type: "trackRemoved", trackId });
|
|
5811
|
-
this.logger.info(`[shared] Track "${trackId}" removed`);
|
|
5812
|
-
}
|
|
5813
|
-
async handleRenegotiationAnswer(sdp) {
|
|
5814
|
-
const werift = await loadWerift2();
|
|
5815
|
-
const desc = new werift.RTCSessionDescription(sdp, "answer");
|
|
5816
|
-
await this.pc.setRemoteDescription(desc);
|
|
5817
|
-
this.negotiating = false;
|
|
5818
|
-
this.logger.debug("[shared] Renegotiation answer set");
|
|
5819
|
-
}
|
|
5820
|
-
// -----------------------------------------------------------------------
|
|
5821
|
-
// SDP renegotiation
|
|
5822
|
-
// -----------------------------------------------------------------------
|
|
5823
|
-
async renegotiate() {
|
|
5824
|
-
if (!this.pc || !this.dataChannel) return;
|
|
5825
|
-
this.negotiating = true;
|
|
5826
|
-
const offer = await this.pc.createOffer();
|
|
5827
|
-
await this.pc.setLocalDescription(offer);
|
|
5828
|
-
await new Promise((resolve) => {
|
|
5829
|
-
if (this.pc.iceGatheringState === "complete") {
|
|
5830
|
-
resolve();
|
|
5831
|
-
return;
|
|
5832
|
-
}
|
|
5833
|
-
this.pc.iceGatheringStateChange.subscribe((state) => {
|
|
5834
|
-
if (state === "complete") resolve();
|
|
5835
|
-
});
|
|
5836
|
-
setTimeout(resolve, 3e3);
|
|
5837
|
-
});
|
|
5838
|
-
const sdp = this.pc.localDescription?.sdp ?? offer.sdp;
|
|
5839
|
-
this.sendDC({ type: "offer", sdp });
|
|
5840
|
-
await new Promise((resolve) => {
|
|
5841
|
-
const check = setInterval(() => {
|
|
5842
|
-
if (!this.negotiating) {
|
|
5843
|
-
clearInterval(check);
|
|
5844
|
-
resolve();
|
|
5845
|
-
}
|
|
5846
|
-
}, 50);
|
|
5847
|
-
setTimeout(() => {
|
|
5848
|
-
clearInterval(check);
|
|
5849
|
-
resolve();
|
|
5850
|
-
}, 1e4);
|
|
5851
|
-
});
|
|
5852
|
-
}
|
|
5853
|
-
// -----------------------------------------------------------------------
|
|
5854
|
-
// Frame feeding
|
|
5855
|
-
// -----------------------------------------------------------------------
|
|
5856
|
-
startFeeding(track, source) {
|
|
5857
|
-
const { signal } = track.feedAbort;
|
|
5858
|
-
const werift = _werift2;
|
|
5859
|
-
if (!werift) return;
|
|
5860
|
-
void (async () => {
|
|
5861
|
-
let gotKeyframe = false;
|
|
5862
|
-
let videoTimestampBase = null;
|
|
5863
|
-
let audioTimestampBase = null;
|
|
5864
|
-
try {
|
|
5865
|
-
for await (const mediaFrame of source) {
|
|
5866
|
-
if (signal.aborted || this.closed) break;
|
|
5867
|
-
if (mediaFrame.type === "audio") {
|
|
5868
|
-
const frame2 = mediaFrame.frame;
|
|
5869
|
-
if (audioTimestampBase === null) audioTimestampBase = frame2.timestampMicros;
|
|
5870
|
-
const rtpTs2 = Math.floor(
|
|
5871
|
-
(frame2.timestampMicros - audioTimestampBase) * (frame2.sampleRate || 48e3) / 1e6
|
|
5872
|
-
) >>> 0;
|
|
5873
|
-
track.audioSeqNum = track.audioSeqNum + 1 & 65535;
|
|
5874
|
-
const header = new werift.RtpHeader();
|
|
5875
|
-
header.payloadType = 111;
|
|
5876
|
-
header.timestamp = rtpTs2;
|
|
5877
|
-
header.marker = true;
|
|
5878
|
-
header.sequenceNumber = track.audioSeqNum;
|
|
5879
|
-
const pkt = new werift.RtpPacket(header, frame2.data);
|
|
5880
|
-
try {
|
|
5881
|
-
track.audioSender.sendRtp(pkt.serialize());
|
|
5882
|
-
} catch {
|
|
5883
|
-
}
|
|
5884
|
-
continue;
|
|
5885
|
-
}
|
|
5886
|
-
if (mediaFrame.type !== "video") continue;
|
|
5887
|
-
const frame = mediaFrame.frame;
|
|
5888
|
-
const annexB = convertH264ToAnnexB(frame.data);
|
|
5889
|
-
if (!gotKeyframe) {
|
|
5890
|
-
if (!isH264IdrAccessUnit(annexB)) continue;
|
|
5891
|
-
gotKeyframe = true;
|
|
5892
|
-
}
|
|
5893
|
-
if (videoTimestampBase === null) videoTimestampBase = frame.timestampMicros;
|
|
5894
|
-
const rtpTs = Math.floor(
|
|
5895
|
-
(frame.timestampMicros - videoTimestampBase) * 9e4 / 1e6
|
|
5896
|
-
) >>> 0;
|
|
5897
|
-
const nals = splitAnnexBToNals(annexB).filter((n) => {
|
|
5898
|
-
const t = n[0] & 31;
|
|
5899
|
-
return t !== 9 && t !== 6;
|
|
5900
|
-
});
|
|
5901
|
-
for (let i = 0; i < nals.length; i++) {
|
|
5902
|
-
const nal = nals[i];
|
|
5903
|
-
const isLastNal = i === nals.length - 1;
|
|
5904
|
-
if (nal.length <= 1200) {
|
|
5905
|
-
track.videoSeqNum = track.videoSeqNum + 1 & 65535;
|
|
5906
|
-
const header = new werift.RtpHeader();
|
|
5907
|
-
header.payloadType = 96;
|
|
5908
|
-
header.timestamp = rtpTs;
|
|
5909
|
-
header.marker = isLastNal;
|
|
5910
|
-
header.sequenceNumber = track.videoSeqNum;
|
|
5911
|
-
const pkt = new werift.RtpPacket(header, nal);
|
|
5912
|
-
try {
|
|
5913
|
-
track.videoSender.sendRtp(pkt.serialize());
|
|
5914
|
-
} catch {
|
|
5915
|
-
}
|
|
5916
|
-
} else {
|
|
5917
|
-
const nalHeader = nal[0];
|
|
5918
|
-
const fnri = nalHeader & 224;
|
|
5919
|
-
const nalType = nalHeader & 31;
|
|
5920
|
-
const fuIndicator = fnri | 28;
|
|
5921
|
-
const nalBody = nal.subarray(1);
|
|
5922
|
-
let offset = 0;
|
|
5923
|
-
let isFirst = true;
|
|
5924
|
-
while (offset < nalBody.length) {
|
|
5925
|
-
const end = Math.min(offset + 1198, nalBody.length);
|
|
5926
|
-
const isLast = end >= nalBody.length;
|
|
5927
|
-
let fuHeader = nalType;
|
|
5928
|
-
if (isFirst) fuHeader |= 128;
|
|
5929
|
-
if (isLast) fuHeader |= 64;
|
|
5930
|
-
const frag = Buffer.alloc(2 + (end - offset));
|
|
5931
|
-
frag[0] = fuIndicator;
|
|
5932
|
-
frag[1] = fuHeader;
|
|
5933
|
-
nalBody.copy(frag, 2, offset, end);
|
|
5934
|
-
track.videoSeqNum = track.videoSeqNum + 1 & 65535;
|
|
5935
|
-
const header = new werift.RtpHeader();
|
|
5936
|
-
header.payloadType = 96;
|
|
5937
|
-
header.timestamp = rtpTs;
|
|
5938
|
-
header.marker = isLastNal && isLast;
|
|
5939
|
-
header.sequenceNumber = track.videoSeqNum;
|
|
5940
|
-
const pkt = new werift.RtpPacket(header, frag);
|
|
5941
|
-
try {
|
|
5942
|
-
track.videoSender.sendRtp(pkt.serialize());
|
|
5943
|
-
} catch {
|
|
5944
|
-
}
|
|
5945
|
-
offset = end;
|
|
5946
|
-
isFirst = false;
|
|
5947
|
-
}
|
|
5948
|
-
}
|
|
5949
|
-
}
|
|
5950
|
-
}
|
|
5951
|
-
} catch (err) {
|
|
5952
|
-
if (!signal.aborted) {
|
|
5953
|
-
this.logger.error(`[shared] Feed error for track "${track.trackId}":`, err);
|
|
5954
|
-
}
|
|
5955
|
-
}
|
|
5956
|
-
})();
|
|
5957
|
-
}
|
|
5958
|
-
// -----------------------------------------------------------------------
|
|
5959
|
-
// Helpers
|
|
5960
|
-
// -----------------------------------------------------------------------
|
|
5961
|
-
sendDC(msg) {
|
|
5962
|
-
if (this.dataChannel?.readyState === "open") {
|
|
5963
|
-
this.dataChannel.send(JSON.stringify(msg));
|
|
5964
|
-
}
|
|
5965
|
-
}
|
|
5966
|
-
get isConnected() {
|
|
5967
|
-
return this.pc?.iceConnectionState === "connected" && !this.closed;
|
|
5968
|
-
}
|
|
5969
|
-
get trackCount() {
|
|
5970
|
-
return this.activeTracks.size;
|
|
5971
|
-
}
|
|
5972
|
-
};
|
|
5973
|
-
|
|
5974
|
-
// src/webrtc/server.ts
|
|
5975
|
-
import crypto from "crypto";
|
|
5976
|
-
import { EventEmitter } from "events";
|
|
5977
|
-
function createDefaultProfiles() {
|
|
5978
|
-
return [
|
|
5979
|
-
{
|
|
5980
|
-
tier: "high",
|
|
5981
|
-
encoding: { maxBitrateKbps: 6e3, width: 0, height: 0 },
|
|
5982
|
-
// native resolution
|
|
5983
|
-
sourceProfile: "main"
|
|
5984
|
-
},
|
|
5985
|
-
{
|
|
5986
|
-
tier: "medium",
|
|
5987
|
-
encoding: { maxBitrateKbps: 2500, width: 1280, height: 720 },
|
|
5988
|
-
sourceProfile: "main"
|
|
5989
|
-
},
|
|
5990
|
-
{
|
|
5991
|
-
tier: "low",
|
|
5992
|
-
encoding: { maxBitrateKbps: 1e3, width: 640, height: 360 },
|
|
5993
|
-
sourceProfile: "sub"
|
|
5994
|
-
}
|
|
5995
|
-
];
|
|
5996
|
-
}
|
|
5997
|
-
var AdaptiveStreamServer = class extends EventEmitter {
|
|
5998
|
-
ffmpegPath;
|
|
5999
|
-
stunServers;
|
|
6000
|
-
turnServers;
|
|
6001
|
-
icePortRange;
|
|
6002
|
-
iceAdditionalHostAddresses;
|
|
6003
|
-
logger;
|
|
6004
|
-
cameras = /* @__PURE__ */ new Map();
|
|
6005
|
-
sessionCamera = /* @__PURE__ */ new Map();
|
|
6006
|
-
stopped = false;
|
|
6007
|
-
constructor(options = {}) {
|
|
6008
|
-
super();
|
|
6009
|
-
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
6010
|
-
this.stunServers = options.stunServers;
|
|
6011
|
-
this.turnServers = options.turnServers;
|
|
6012
|
-
this.icePortRange = options.icePortRange;
|
|
6013
|
-
this.iceAdditionalHostAddresses = options.iceAdditionalHostAddresses;
|
|
6014
|
-
this.logger = options.logger ? asLogger(options.logger) : createNullLogger();
|
|
6015
|
-
this.logger.info("[adaptive-server] Initialized");
|
|
6016
|
-
}
|
|
6017
|
-
// -----------------------------------------------------------------------
|
|
6018
|
-
// Camera management
|
|
6019
|
-
// -----------------------------------------------------------------------
|
|
6020
|
-
/** Register a camera with adaptive streaming. */
|
|
6021
|
-
addCamera(name, config) {
|
|
6022
|
-
if (this.cameras.has(name)) {
|
|
6023
|
-
this.logger.warn(`[adaptive-server] Camera "${name}" already registered`);
|
|
6024
|
-
return;
|
|
6025
|
-
}
|
|
6026
|
-
const profiles = config.profiles;
|
|
6027
|
-
const initialParams = profiles[0].encoding;
|
|
6028
|
-
const mainFfmpegSource = new AdaptiveFfmpegSource({
|
|
6029
|
-
rtspUrl: config.rtspUrl,
|
|
6030
|
-
initialParams,
|
|
6031
|
-
audioMode: config.audioMode ?? "copy",
|
|
6032
|
-
ffmpegPath: this.ffmpegPath,
|
|
6033
|
-
logger: this.logger,
|
|
6034
|
-
label: `ffmpeg:${name}:main`
|
|
6035
|
-
});
|
|
6036
|
-
const mainFanout = new StreamFanout({
|
|
6037
|
-
maxQueueItems: 30,
|
|
6038
|
-
createSource: () => mainFfmpegSource.source,
|
|
6039
|
-
onError: (err) => {
|
|
6040
|
-
this.logger.error(`[adaptive-server] Main fanout error (${name}):`, err);
|
|
6041
|
-
}
|
|
6042
|
-
});
|
|
6043
|
-
const controller = new AdaptiveController({
|
|
6044
|
-
profiles,
|
|
6045
|
-
onQualityChange: async (from, to) => {
|
|
6046
|
-
await this.handleQualityChange(name, from, to);
|
|
6047
|
-
},
|
|
6048
|
-
logger: this.logger
|
|
6049
|
-
});
|
|
6050
|
-
this.cameras.set(name, {
|
|
6051
|
-
config,
|
|
6052
|
-
mainFfmpegSource,
|
|
6053
|
-
mainFanout,
|
|
6054
|
-
subFfmpegSource: null,
|
|
6055
|
-
subFanout: null,
|
|
6056
|
-
activeSourceProfile: "main",
|
|
6057
|
-
controller,
|
|
6058
|
-
sessions: /* @__PURE__ */ new Map(),
|
|
6059
|
-
autoStopTimer: null,
|
|
6060
|
-
switching: false
|
|
6061
|
-
});
|
|
6062
|
-
this.logger.info(`[adaptive-server] Camera "${name}" added`);
|
|
6063
|
-
}
|
|
6064
|
-
/** Remove a camera and close all its sessions. */
|
|
6065
|
-
async removeCamera(name) {
|
|
6066
|
-
const cam = this.cameras.get(name);
|
|
6067
|
-
if (!cam) return;
|
|
6068
|
-
const closePs = [];
|
|
6069
|
-
for (const [sid, session] of cam.sessions) {
|
|
6070
|
-
this.sessionCamera.delete(sid);
|
|
6071
|
-
closePs.push(session.close().catch(() => {
|
|
6072
|
-
}));
|
|
6073
|
-
}
|
|
6074
|
-
await Promise.all(closePs);
|
|
6075
|
-
cam.sessions.clear();
|
|
6076
|
-
await cam.mainFanout.stop();
|
|
6077
|
-
await cam.mainFfmpegSource.stop();
|
|
6078
|
-
if (cam.subFanout) await cam.subFanout.stop();
|
|
6079
|
-
if (cam.subFfmpegSource) await cam.subFfmpegSource.stop();
|
|
6080
|
-
if (cam.autoStopTimer) {
|
|
6081
|
-
clearTimeout(cam.autoStopTimer);
|
|
6082
|
-
cam.autoStopTimer = null;
|
|
6083
|
-
}
|
|
6084
|
-
this.cameras.delete(name);
|
|
6085
|
-
this.logger.info(`[adaptive-server] Camera "${name}" removed`);
|
|
6086
|
-
}
|
|
6087
|
-
getCameraNames() {
|
|
6088
|
-
return [...this.cameras.keys()];
|
|
6089
|
-
}
|
|
6090
|
-
// -----------------------------------------------------------------------
|
|
6091
|
-
// Signaling (2-step: server creates offer, client sends answer)
|
|
6092
|
-
// -----------------------------------------------------------------------
|
|
6093
|
-
/**
|
|
6094
|
-
* Create an adaptive session for a camera.
|
|
6095
|
-
* Returns a server-generated SDP offer that the client must answer.
|
|
6096
|
-
*
|
|
6097
|
-
* Flow: createSession() → server offer → client sets remote, creates answer → handleAnswer()
|
|
6098
|
-
*/
|
|
6099
|
-
async createSession(cameraName) {
|
|
6100
|
-
if (this.stopped) throw new Error("Server stopped");
|
|
6101
|
-
const cam = this.cameras.get(cameraName);
|
|
6102
|
-
if (!cam) throw new Error(`Camera not found: ${cameraName}`);
|
|
6103
|
-
if (cam.autoStopTimer) {
|
|
6104
|
-
clearTimeout(cam.autoStopTimer);
|
|
6105
|
-
cam.autoStopTimer = null;
|
|
6106
|
-
}
|
|
6107
|
-
this.ensureCameraRunning(cameraName, cam);
|
|
6108
|
-
const sessionId = crypto.randomUUID();
|
|
6109
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6110
|
-
const source = activeFanout.subscribe(sessionId);
|
|
6111
|
-
const session = new AdaptiveSession({
|
|
6112
|
-
sessionId,
|
|
6113
|
-
source,
|
|
6114
|
-
iceConfig: {
|
|
6115
|
-
stunServers: this.stunServers,
|
|
6116
|
-
turnServers: this.turnServers,
|
|
6117
|
-
portRange: this.icePortRange,
|
|
6118
|
-
additionalHostAddresses: this.iceAdditionalHostAddresses
|
|
6119
|
-
},
|
|
6120
|
-
onStats: (stats) => {
|
|
6121
|
-
cam.controller.reportStats(sessionId, {
|
|
6122
|
-
packetLoss: stats.packetLoss,
|
|
6123
|
-
jitterMs: stats.jitterMs,
|
|
6124
|
-
rttMs: stats.rttMs,
|
|
6125
|
-
timestamp: stats.timestamp
|
|
6126
|
-
});
|
|
6127
|
-
this.emit("session:stats", { camera: cameraName, ...stats });
|
|
6128
|
-
},
|
|
6129
|
-
logger: this.logger
|
|
6130
|
-
});
|
|
6131
|
-
cam.sessions.set(sessionId, session);
|
|
6132
|
-
this.sessionCamera.set(sessionId, cameraName);
|
|
6133
|
-
try {
|
|
6134
|
-
const offer = await session.createOffer();
|
|
6135
|
-
this.emit("session:created", { sessionId, camera: cameraName });
|
|
6136
|
-
return { sessionId, sdpOffer: offer.sdp };
|
|
6137
|
-
} catch (err) {
|
|
6138
|
-
cam.sessions.delete(sessionId);
|
|
6139
|
-
this.sessionCamera.delete(sessionId);
|
|
6140
|
-
activeFanout.unsubscribe(sessionId);
|
|
6141
|
-
await session.close().catch(() => {
|
|
6142
|
-
});
|
|
6143
|
-
this.scheduleCameraAutoStop(cameraName, cam);
|
|
6144
|
-
throw err;
|
|
6145
|
-
}
|
|
6146
|
-
}
|
|
6147
|
-
/**
|
|
6148
|
-
* Handle the client's SDP answer for an adaptive session.
|
|
6149
|
-
* Call after createSession() with the client's answer.
|
|
6150
|
-
*/
|
|
6151
|
-
async handleAnswer(sessionId, sdpAnswer) {
|
|
6152
|
-
const camName = this.sessionCamera.get(sessionId);
|
|
6153
|
-
if (!camName) throw new Error(`Session not found: ${sessionId}`);
|
|
6154
|
-
const cam = this.cameras.get(camName);
|
|
6155
|
-
if (!cam) throw new Error(`Camera not found: ${camName}`);
|
|
6156
|
-
const session = cam.sessions.get(sessionId);
|
|
6157
|
-
if (!session) throw new Error(`Session not found: ${sessionId}`);
|
|
6158
|
-
await session.handleAnswer({ sdp: sdpAnswer, type: "answer" });
|
|
6159
|
-
}
|
|
6160
|
-
/**
|
|
6161
|
-
* Convenience: handleWhepOffer is NOT supported — werift requires server-initiated offers.
|
|
6162
|
-
* Use createSession() + handleAnswer() instead.
|
|
6163
|
-
*/
|
|
6164
|
-
async handleWhepOffer(_cameraName, _sdpOffer) {
|
|
6165
|
-
throw new Error(
|
|
6166
|
-
"handleWhepOffer is not supported \u2014 werift requires server-initiated offers. Use createSession() to get a server offer, then handleAnswer() with the client's answer."
|
|
6167
|
-
);
|
|
6168
|
-
}
|
|
6169
|
-
// -----------------------------------------------------------------------
|
|
6170
|
-
// Connection pool: pre-warmed sessions without camera assignment
|
|
6171
|
-
// -----------------------------------------------------------------------
|
|
6172
|
-
/** Pooled sessions: sessionId → true (idle, no camera attached). */
|
|
6173
|
-
pooledSessions = /* @__PURE__ */ new Set();
|
|
6174
|
-
/**
|
|
6175
|
-
* Create a pooled session (no camera attached yet).
|
|
6176
|
-
* The SDP exchange happens, ICE connects, but no ffmpeg is started.
|
|
6177
|
-
* Call attachCamera() later to start feeding frames.
|
|
6178
|
-
*/
|
|
6179
|
-
async createPooledSession() {
|
|
6180
|
-
if (this.stopped) throw new Error("Server stopped");
|
|
6181
|
-
const sessionId = crypto.randomUUID();
|
|
6182
|
-
const emptySource = (async function* () {
|
|
6183
|
-
await new Promise(() => {
|
|
6184
|
-
});
|
|
6185
|
-
})();
|
|
6186
|
-
const session = new AdaptiveSession({
|
|
6187
|
-
sessionId,
|
|
6188
|
-
source: emptySource,
|
|
6189
|
-
iceConfig: {
|
|
6190
|
-
stunServers: this.stunServers,
|
|
6191
|
-
turnServers: this.turnServers,
|
|
6192
|
-
portRange: this.icePortRange,
|
|
6193
|
-
additionalHostAddresses: this.iceAdditionalHostAddresses
|
|
6194
|
-
},
|
|
6195
|
-
logger: this.logger
|
|
6196
|
-
});
|
|
6197
|
-
this.pooledSessions.add(sessionId);
|
|
6198
|
-
const poolCamKey = "__pool__";
|
|
6199
|
-
if (!this.cameras.has(poolCamKey)) {
|
|
6200
|
-
const dummyFfmpeg = new AdaptiveFfmpegSource({
|
|
6201
|
-
rtspUrl: "rtsp://0.0.0.0/dummy",
|
|
6202
|
-
initialParams: { maxBitrateKbps: 0, width: 0, height: 0 }
|
|
6203
|
-
});
|
|
6204
|
-
const dummyFanout = new StreamFanout({
|
|
6205
|
-
maxQueueItems: 1,
|
|
6206
|
-
createSource: () => dummyFfmpeg.source
|
|
6207
|
-
});
|
|
6208
|
-
const dummyController = new AdaptiveController({
|
|
6209
|
-
profiles: createDefaultProfiles(),
|
|
6210
|
-
onQualityChange: async () => {
|
|
6211
|
-
}
|
|
6212
|
-
});
|
|
6213
|
-
this.cameras.set(poolCamKey, {
|
|
6214
|
-
config: { rtspUrl: "", profiles: createDefaultProfiles() },
|
|
6215
|
-
mainFfmpegSource: dummyFfmpeg,
|
|
6216
|
-
mainFanout: dummyFanout,
|
|
6217
|
-
subFfmpegSource: null,
|
|
6218
|
-
subFanout: null,
|
|
6219
|
-
activeSourceProfile: "main",
|
|
6220
|
-
controller: dummyController,
|
|
6221
|
-
sessions: /* @__PURE__ */ new Map(),
|
|
6222
|
-
autoStopTimer: null,
|
|
6223
|
-
switching: false
|
|
6224
|
-
});
|
|
6225
|
-
}
|
|
6226
|
-
const poolCam = this.cameras.get(poolCamKey);
|
|
6227
|
-
poolCam.sessions.set(sessionId, session);
|
|
6228
|
-
this.sessionCamera.set(sessionId, poolCamKey);
|
|
6229
|
-
try {
|
|
6230
|
-
const offer = await session.createOffer();
|
|
6231
|
-
this.logger.info(`[adaptive-server] Pooled session ${sessionId.slice(0, 8)} created`);
|
|
6232
|
-
return { sessionId, sdpOffer: offer.sdp };
|
|
6233
|
-
} catch (err) {
|
|
6234
|
-
poolCam.sessions.delete(sessionId);
|
|
6235
|
-
this.sessionCamera.delete(sessionId);
|
|
6236
|
-
this.pooledSessions.delete(sessionId);
|
|
6237
|
-
await session.close().catch(() => {
|
|
6238
|
-
});
|
|
6239
|
-
throw err;
|
|
6240
|
-
}
|
|
6241
|
-
}
|
|
6242
|
-
/**
|
|
6243
|
-
* Attach a camera to a pooled session.
|
|
6244
|
-
* Starts the ffmpeg transcoder and begins feeding frames.
|
|
6245
|
-
*/
|
|
6246
|
-
async attachCamera(sessionId, cameraName) {
|
|
6247
|
-
if (!this.pooledSessions.has(sessionId)) {
|
|
6248
|
-
throw new Error(`Session ${sessionId} is not a pooled session`);
|
|
6249
|
-
}
|
|
6250
|
-
const cam = this.cameras.get(cameraName);
|
|
6251
|
-
if (!cam) throw new Error(`Camera not found: ${cameraName}`);
|
|
6252
|
-
this.ensureCameraRunning(cameraName, cam);
|
|
6253
|
-
const poolCam = this.cameras.get("__pool__");
|
|
6254
|
-
const session = poolCam?.sessions.get(sessionId);
|
|
6255
|
-
if (!session) throw new Error(`Pooled session not found: ${sessionId}`);
|
|
6256
|
-
poolCam.sessions.delete(sessionId);
|
|
6257
|
-
cam.sessions.set(sessionId, session);
|
|
6258
|
-
this.sessionCamera.set(sessionId, cameraName);
|
|
6259
|
-
this.pooledSessions.delete(sessionId);
|
|
6260
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6261
|
-
const source = activeFanout.subscribe(sessionId);
|
|
6262
|
-
session.replaceSource(source);
|
|
6263
|
-
this.logger.info(`[adaptive-server] Attached camera "${cameraName}" to session ${sessionId.slice(0, 8)}`);
|
|
6264
|
-
}
|
|
6265
|
-
/**
|
|
6266
|
-
* Detach a camera from a session (session returns to pool).
|
|
6267
|
-
*/
|
|
6268
|
-
async detachCamera(sessionId) {
|
|
6269
|
-
const camName = this.sessionCamera.get(sessionId);
|
|
6270
|
-
if (!camName || camName === "__pool__") return;
|
|
6271
|
-
const cam = this.cameras.get(camName);
|
|
6272
|
-
if (!cam) return;
|
|
6273
|
-
const session = cam.sessions.get(sessionId);
|
|
6274
|
-
if (!session) return;
|
|
6275
|
-
session.detachSource();
|
|
6276
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6277
|
-
activeFanout.unsubscribe(sessionId);
|
|
6278
|
-
cam.sessions.delete(sessionId);
|
|
6279
|
-
const poolCam = this.cameras.get("__pool__");
|
|
6280
|
-
if (poolCam) {
|
|
6281
|
-
poolCam.sessions.set(sessionId, session);
|
|
6282
|
-
this.sessionCamera.set(sessionId, "__pool__");
|
|
6283
|
-
this.pooledSessions.add(sessionId);
|
|
6284
|
-
}
|
|
6285
|
-
this.logger.info(`[adaptive-server] Detached camera "${camName}" from session ${sessionId.slice(0, 8)} (back to pool)`);
|
|
6286
|
-
this.scheduleCameraAutoStop(camName, cam);
|
|
6287
|
-
}
|
|
6288
|
-
/** Check if a session is in the idle pool. */
|
|
6289
|
-
isPooledSession(sessionId) {
|
|
6290
|
-
return this.pooledSessions.has(sessionId);
|
|
6291
|
-
}
|
|
6292
|
-
/** Set debug flag on all sessions for a camera. */
|
|
6293
|
-
setDebug(cameraName, debug) {
|
|
6294
|
-
const cam = this.cameras.get(cameraName);
|
|
6295
|
-
if (!cam) return 0;
|
|
6296
|
-
let count = 0;
|
|
6297
|
-
for (const session of cam.sessions.values()) {
|
|
6298
|
-
session.debug = debug;
|
|
6299
|
-
count++;
|
|
6300
|
-
}
|
|
6301
|
-
return count;
|
|
6302
|
-
}
|
|
6303
|
-
/** Get count of idle pooled sessions. */
|
|
6304
|
-
getPoolSize() {
|
|
6305
|
-
return this.pooledSessions.size;
|
|
6306
|
-
}
|
|
6307
|
-
// -----------------------------------------------------------------------
|
|
6308
|
-
// Session management
|
|
6309
|
-
// -----------------------------------------------------------------------
|
|
6310
|
-
/** Close a specific session. */
|
|
6311
|
-
async closeSession(sessionId) {
|
|
6312
|
-
const camName = this.sessionCamera.get(sessionId);
|
|
6313
|
-
if (!camName) return;
|
|
6314
|
-
const cam = this.cameras.get(camName);
|
|
6315
|
-
if (!cam) return;
|
|
6316
|
-
const session = cam.sessions.get(sessionId);
|
|
6317
|
-
if (!session) return;
|
|
6318
|
-
cam.sessions.delete(sessionId);
|
|
6319
|
-
this.sessionCamera.delete(sessionId);
|
|
6320
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6321
|
-
activeFanout.unsubscribe(sessionId);
|
|
6322
|
-
cam.controller.removeSession(sessionId);
|
|
6323
|
-
await session.close();
|
|
6324
|
-
this.logger.info(`[adaptive-server] Session ${sessionId} closed (camera "${camName}", remaining: ${cam.sessions.size})`);
|
|
6325
|
-
this.emit("session:closed", { sessionId, camera: camName });
|
|
6326
|
-
this.scheduleCameraAutoStop(camName, cam);
|
|
6327
|
-
}
|
|
6328
|
-
/**
|
|
6329
|
-
* Report client-side stats for a session (supplements RTCP monitoring).
|
|
6330
|
-
* Call from tRPC route when the client pushes stats.
|
|
6331
|
-
*/
|
|
6332
|
-
reportClientStats(sessionId, stats) {
|
|
6333
|
-
const camName = this.sessionCamera.get(sessionId);
|
|
6334
|
-
if (!camName) return null;
|
|
6335
|
-
const cam = this.cameras.get(camName);
|
|
6336
|
-
if (!cam) return null;
|
|
6337
|
-
cam.controller.reportStats(sessionId, stats);
|
|
6338
|
-
const profile = cam.controller.currentProfile;
|
|
6339
|
-
return {
|
|
6340
|
-
currentTier: profile.tier,
|
|
6341
|
-
currentBitrateKbps: profile.encoding.maxBitrateKbps,
|
|
6342
|
-
currentResolution: { width: profile.encoding.width, height: profile.encoding.height },
|
|
6343
|
-
sourceProfile: cam.activeSourceProfile
|
|
6344
|
-
};
|
|
6345
|
-
}
|
|
6346
|
-
/** Force quality for a camera (null = auto). */
|
|
6347
|
-
forceQuality(cameraName, tier) {
|
|
6348
|
-
const cam = this.cameras.get(cameraName);
|
|
6349
|
-
if (!cam) return false;
|
|
6350
|
-
cam.controller.forceQuality(tier);
|
|
6351
|
-
return true;
|
|
6352
|
-
}
|
|
6353
|
-
/** Get current quality info for a camera. */
|
|
6354
|
-
getCameraQuality(cameraName) {
|
|
6355
|
-
const cam = this.cameras.get(cameraName);
|
|
6356
|
-
if (!cam) return null;
|
|
6357
|
-
const profile = cam.controller.currentProfile;
|
|
6358
|
-
return {
|
|
6359
|
-
tier: profile.tier,
|
|
6360
|
-
encoding: profile.encoding,
|
|
6361
|
-
isAuto: cam.controller.isAuto,
|
|
6362
|
-
stats: cam.controller.getAggregatedStats(),
|
|
6363
|
-
sessionCount: cam.sessions.size,
|
|
6364
|
-
sourceProfile: cam.activeSourceProfile
|
|
6365
|
-
};
|
|
6366
|
-
}
|
|
6367
|
-
/** Get all sessions. */
|
|
6368
|
-
getSessions(cameraName) {
|
|
6369
|
-
const infos = [];
|
|
6370
|
-
if (cameraName) {
|
|
6371
|
-
const cam = this.cameras.get(cameraName);
|
|
6372
|
-
if (cam) {
|
|
6373
|
-
for (const s of cam.sessions.values()) infos.push(s.getInfo());
|
|
6374
|
-
}
|
|
6375
|
-
} else {
|
|
6376
|
-
for (const cam of this.cameras.values()) {
|
|
6377
|
-
for (const s of cam.sessions.values()) infos.push(s.getInfo());
|
|
6378
|
-
}
|
|
6379
|
-
}
|
|
6380
|
-
return infos;
|
|
6381
|
-
}
|
|
6382
|
-
getSessionCount(cameraName) {
|
|
6383
|
-
if (cameraName) return this.cameras.get(cameraName)?.sessions.size ?? 0;
|
|
6384
|
-
let total = 0;
|
|
6385
|
-
for (const cam of this.cameras.values()) total += cam.sessions.size;
|
|
6386
|
-
return total;
|
|
6387
|
-
}
|
|
6388
|
-
/** Stop all cameras and sessions. */
|
|
6389
|
-
async stop() {
|
|
6390
|
-
if (this.stopped) return;
|
|
6391
|
-
this.stopped = true;
|
|
6392
|
-
const closePs = [];
|
|
6393
|
-
for (const [name, cam] of this.cameras) {
|
|
6394
|
-
if (cam.autoStopTimer) {
|
|
6395
|
-
clearTimeout(cam.autoStopTimer);
|
|
6396
|
-
cam.autoStopTimer = null;
|
|
6397
|
-
}
|
|
6398
|
-
for (const [sid, session] of cam.sessions) {
|
|
6399
|
-
this.sessionCamera.delete(sid);
|
|
6400
|
-
closePs.push(session.close().catch(() => {
|
|
6401
|
-
}));
|
|
6402
|
-
}
|
|
6403
|
-
cam.sessions.clear();
|
|
6404
|
-
closePs.push(cam.mainFanout.stop().catch(() => {
|
|
6405
|
-
}));
|
|
6406
|
-
closePs.push(cam.mainFfmpegSource.stop().catch(() => {
|
|
6407
|
-
}));
|
|
6408
|
-
if (cam.subFanout) closePs.push(cam.subFanout.stop().catch(() => {
|
|
6409
|
-
}));
|
|
6410
|
-
if (cam.subFfmpegSource) closePs.push(cam.subFfmpegSource.stop().catch(() => {
|
|
6411
|
-
}));
|
|
6412
|
-
}
|
|
6413
|
-
await Promise.all(closePs);
|
|
6414
|
-
this.cameras.clear();
|
|
6415
|
-
this.logger.info("[adaptive-server] Stopped");
|
|
6416
|
-
this.emit("stopped");
|
|
6417
|
-
}
|
|
6418
|
-
// -----------------------------------------------------------------------
|
|
6419
|
-
// Private
|
|
6420
|
-
// -----------------------------------------------------------------------
|
|
6421
|
-
/** Get the currently active fanout for a camera. */
|
|
6422
|
-
getActiveFanout(cam) {
|
|
6423
|
-
if (cam.activeSourceProfile === "sub" && cam.subFanout) {
|
|
6424
|
-
return cam.subFanout;
|
|
6425
|
-
}
|
|
6426
|
-
return cam.mainFanout;
|
|
6427
|
-
}
|
|
6428
|
-
ensureCameraRunning(name, cam) {
|
|
6429
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6430
|
-
if (activeFanout.isRunning()) return;
|
|
6431
|
-
this.logger.info(`[adaptive-server] Starting camera "${name}" (${cam.activeSourceProfile})`);
|
|
6432
|
-
if (cam.activeSourceProfile === "sub" && cam.subFfmpegSource) {
|
|
6433
|
-
void cam.subFfmpegSource.start();
|
|
6434
|
-
cam.subFanout.start();
|
|
6435
|
-
} else {
|
|
6436
|
-
void cam.mainFfmpegSource.start();
|
|
6437
|
-
cam.mainFanout.start();
|
|
6438
|
-
}
|
|
6439
|
-
}
|
|
6440
|
-
scheduleCameraAutoStop(name, cam) {
|
|
6441
|
-
if (cam.sessions.size > 0 || this.stopped) return;
|
|
6442
|
-
if (cam.autoStopTimer) clearTimeout(cam.autoStopTimer);
|
|
6443
|
-
cam.autoStopTimer = setTimeout(async () => {
|
|
6444
|
-
cam.autoStopTimer = null;
|
|
6445
|
-
if (cam.sessions.size > 0 || this.stopped) return;
|
|
6446
|
-
this.logger.info(`[adaptive-server] No viewers for "${name}", stopping ffmpeg`);
|
|
6447
|
-
await cam.mainFanout.stop();
|
|
6448
|
-
await cam.mainFfmpegSource.stop();
|
|
6449
|
-
if (cam.subFanout) await cam.subFanout.stop();
|
|
6450
|
-
if (cam.subFfmpegSource) await cam.subFfmpegSource.stop();
|
|
6451
|
-
}, 1e4);
|
|
6452
|
-
}
|
|
6453
|
-
// -----------------------------------------------------------------------
|
|
6454
|
-
// Source switching (Phase 5)
|
|
6455
|
-
// -----------------------------------------------------------------------
|
|
6456
|
-
/**
|
|
6457
|
-
* Handle a quality change from the AdaptiveController.
|
|
6458
|
-
* When the sourceProfile changes (main ↔ sub), performs a seamless source
|
|
6459
|
-
* switch for all active sessions. When only encoding params change (same
|
|
6460
|
-
* sourceProfile), updates ffmpeg params in-place.
|
|
6461
|
-
*/
|
|
6462
|
-
async handleQualityChange(cameraName, from, to) {
|
|
6463
|
-
const cam = this.cameras.get(cameraName);
|
|
6464
|
-
if (!cam) return;
|
|
6465
|
-
const sourceChanged = from.sourceProfile !== to.sourceProfile;
|
|
6466
|
-
if (sourceChanged) {
|
|
6467
|
-
await this.switchSource(cameraName, cam, to);
|
|
6468
|
-
} else {
|
|
6469
|
-
const activeSource = cam.activeSourceProfile === "sub" ? cam.subFfmpegSource : cam.mainFfmpegSource;
|
|
6470
|
-
if (activeSource) {
|
|
6471
|
-
await activeSource.updateParams(to.encoding);
|
|
6472
|
-
}
|
|
6473
|
-
}
|
|
6474
|
-
this.emit("quality:change", {
|
|
6475
|
-
camera: cameraName,
|
|
6476
|
-
tier: to.tier,
|
|
6477
|
-
encoding: to.encoding,
|
|
6478
|
-
sourceProfile: to.sourceProfile
|
|
6479
|
-
});
|
|
6480
|
-
}
|
|
6481
|
-
/**
|
|
6482
|
-
* Switch all active sessions from one source to another (main ↔ sub).
|
|
6483
|
-
*
|
|
6484
|
-
* Steps:
|
|
6485
|
-
* 1. Create/start the target ffmpeg source + fanout
|
|
6486
|
-
* 2. For each session: subscribe to new fanout, call replaceSource()
|
|
6487
|
-
* 3. Unsubscribe all from old fanout
|
|
6488
|
-
* 4. Stop old ffmpeg + fanout (save resources)
|
|
6489
|
-
* 5. Update activeSourceProfile
|
|
6490
|
-
*/
|
|
6491
|
-
async switchSource(cameraName, cam, toProfile) {
|
|
6492
|
-
if (cam.switching) {
|
|
6493
|
-
this.logger.warn(`[adaptive-server] Source switch already in progress for "${cameraName}", skipping`);
|
|
6494
|
-
return;
|
|
6495
|
-
}
|
|
6496
|
-
cam.switching = true;
|
|
6497
|
-
const switchingToSub = toProfile.sourceProfile === "sub";
|
|
6498
|
-
this.logger.info(
|
|
6499
|
-
`[adaptive-server] Source switch for "${cameraName}": ${cam.activeSourceProfile} \u2192 ${toProfile.sourceProfile}`
|
|
6500
|
-
);
|
|
6501
|
-
try {
|
|
6502
|
-
if (switchingToSub) {
|
|
6503
|
-
if (!cam.config.subRtspUrl) {
|
|
6504
|
-
this.logger.warn(
|
|
6505
|
-
`[adaptive-server] No subRtspUrl configured for "${cameraName}", cannot switch to sub stream \u2014 falling back to param update only`
|
|
6506
|
-
);
|
|
6507
|
-
await cam.mainFfmpegSource.updateParams(toProfile.encoding);
|
|
6508
|
-
return;
|
|
6509
|
-
}
|
|
6510
|
-
if (!cam.subFfmpegSource) {
|
|
6511
|
-
cam.subFfmpegSource = new AdaptiveFfmpegSource({
|
|
6512
|
-
rtspUrl: cam.config.subRtspUrl,
|
|
6513
|
-
initialParams: toProfile.encoding,
|
|
6514
|
-
ffmpegPath: this.ffmpegPath,
|
|
6515
|
-
logger: this.logger,
|
|
6516
|
-
label: `ffmpeg:${cameraName}:sub`
|
|
6517
|
-
});
|
|
6518
|
-
cam.subFanout = new StreamFanout({
|
|
6519
|
-
maxQueueItems: 30,
|
|
6520
|
-
createSource: () => cam.subFfmpegSource.source,
|
|
6521
|
-
onError: (err) => {
|
|
6522
|
-
this.logger.error(`[adaptive-server] Sub fanout error (${cameraName}):`, err);
|
|
6523
|
-
}
|
|
6524
|
-
});
|
|
6525
|
-
}
|
|
6526
|
-
void cam.subFfmpegSource.start();
|
|
6527
|
-
cam.subFanout.start();
|
|
6528
|
-
for (const [sid, session] of cam.sessions) {
|
|
6529
|
-
cam.mainFanout.unsubscribe(sid);
|
|
6530
|
-
const newSource = cam.subFanout.subscribe(sid);
|
|
6531
|
-
session.replaceSource(newSource);
|
|
6532
|
-
}
|
|
6533
|
-
await cam.mainFanout.stop();
|
|
6534
|
-
await cam.mainFfmpegSource.stop();
|
|
6535
|
-
cam.activeSourceProfile = "sub";
|
|
6536
|
-
} else {
|
|
6537
|
-
cam.mainFfmpegSource = new AdaptiveFfmpegSource({
|
|
6538
|
-
rtspUrl: cam.config.rtspUrl,
|
|
6539
|
-
initialParams: toProfile.encoding,
|
|
6540
|
-
ffmpegPath: this.ffmpegPath,
|
|
6541
|
-
logger: this.logger,
|
|
6542
|
-
label: `ffmpeg:${cameraName}:main`
|
|
6543
|
-
});
|
|
6544
|
-
cam.mainFanout = new StreamFanout({
|
|
6545
|
-
maxQueueItems: 30,
|
|
6546
|
-
createSource: () => cam.mainFfmpegSource.source,
|
|
6547
|
-
onError: (err) => {
|
|
6548
|
-
this.logger.error(`[adaptive-server] Main fanout error (${cameraName}):`, err);
|
|
6549
|
-
}
|
|
6550
|
-
});
|
|
6551
|
-
void cam.mainFfmpegSource.start();
|
|
6552
|
-
cam.mainFanout.start();
|
|
6553
|
-
for (const [sid, session] of cam.sessions) {
|
|
6554
|
-
if (cam.subFanout) cam.subFanout.unsubscribe(sid);
|
|
6555
|
-
const newSource = cam.mainFanout.subscribe(sid);
|
|
6556
|
-
session.replaceSource(newSource);
|
|
6557
|
-
}
|
|
6558
|
-
if (cam.subFanout) await cam.subFanout.stop();
|
|
6559
|
-
if (cam.subFfmpegSource) await cam.subFfmpegSource.stop();
|
|
6560
|
-
cam.subFfmpegSource = null;
|
|
6561
|
-
cam.subFanout = null;
|
|
6562
|
-
cam.activeSourceProfile = "main";
|
|
6563
|
-
}
|
|
6564
|
-
this.logger.info(
|
|
6565
|
-
`[adaptive-server] Source switch complete for "${cameraName}": now on ${cam.activeSourceProfile} stream`
|
|
6566
|
-
);
|
|
6567
|
-
} catch (err) {
|
|
6568
|
-
this.logger.error(`[adaptive-server] Source switch failed for "${cameraName}":`, err);
|
|
6569
|
-
} finally {
|
|
6570
|
-
cam.switching = false;
|
|
6571
|
-
}
|
|
6572
|
-
}
|
|
6573
|
-
};
|
|
6574
3363
|
export {
|
|
6575
|
-
AdaptiveController,
|
|
6576
|
-
AdaptiveFfmpegSource,
|
|
6577
|
-
AdaptiveRtspRelay,
|
|
6578
|
-
AdaptiveSession,
|
|
6579
|
-
AdaptiveStreamServer,
|
|
6580
|
-
AsyncBoundedQueue,
|
|
6581
3364
|
BuiltinAnalysisAddon,
|
|
6582
3365
|
DEFAULT_RETENTION,
|
|
6583
3366
|
DecoderRegistry,
|
|
6584
3367
|
EventPersistenceService,
|
|
6585
3368
|
FfmpegDecoderProvider,
|
|
6586
3369
|
FfmpegDecoderSession,
|
|
6587
|
-
FfmpegProcess,
|
|
6588
3370
|
FrameDropper,
|
|
6589
|
-
H264RtpDepacketizer,
|
|
6590
|
-
H265RtpDepacketizer,
|
|
6591
3371
|
KnownFacesService,
|
|
6592
3372
|
PipelineAddon,
|
|
6593
3373
|
RecordingCoordinator,
|
|
6594
3374
|
RecordingDb,
|
|
6595
3375
|
RetentionService,
|
|
6596
3376
|
SessionTrackerService,
|
|
6597
|
-
SharedSession,
|
|
6598
3377
|
StreamBroker,
|
|
6599
3378
|
StreamBrokerManager,
|
|
6600
|
-
StreamFanout,
|
|
6601
3379
|
StreamPipeServer,
|
|
6602
3380
|
TrackTrailService,
|
|
6603
|
-
|
|
6604
|
-
convertH264ToAnnexB,
|
|
6605
|
-
convertH265ToAnnexB,
|
|
6606
|
-
cosineSimilarity,
|
|
6607
|
-
createDefaultProfiles,
|
|
6608
|
-
createNullLogger,
|
|
6609
|
-
detectVideoCodecFromNal,
|
|
6610
|
-
extractH264ParamSets,
|
|
6611
|
-
extractH265ParamSets,
|
|
6612
|
-
fromEventEmitter,
|
|
6613
|
-
fromNativeStream,
|
|
6614
|
-
fromPushCallback,
|
|
6615
|
-
getH265NalType,
|
|
6616
|
-
hasStartCodes,
|
|
6617
|
-
isH264IdrAccessUnit,
|
|
6618
|
-
isH264KeyframeAnnexB,
|
|
6619
|
-
isH265Irap,
|
|
6620
|
-
isH265IrapAccessUnit,
|
|
6621
|
-
isH265KeyframeAnnexB,
|
|
6622
|
-
joinNalsToAnnexB,
|
|
6623
|
-
prependStartCode,
|
|
6624
|
-
splitAnnexBToNals
|
|
3381
|
+
cosineSimilarity
|
|
6625
3382
|
};
|
|
6626
3383
|
//# sourceMappingURL=index.js.map
|