@camstack/addon-pipeline 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{addon-CwDFZWAb.d.cts → addon-DK7eQ0PN.d.cts} +3 -4
- package/dist/{addon-CwDFZWAb.d.ts → addon-DK7eQ0PN.d.ts} +3 -4
- package/dist/addon.cjs +29 -73
- package/dist/addon.cjs.map +1 -1
- package/dist/addon.d.cts +1 -1
- package/dist/addon.d.ts +1 -1
- package/dist/addon.js +28 -72
- package/dist/addon.js.map +1 -1
- package/dist/index.cjs +31 -3305
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +4 -921
- package/dist/index.d.ts +4 -921
- package/dist/index.js +29 -3271
- package/dist/index.js.map +1 -1
- package/package.json +35 -17
package/dist/index.cjs
CHANGED
|
@@ -36,6 +36,7 @@ __export(ffmpeg_config_exports, {
|
|
|
36
36
|
buildFfmpegInputArgs: () => buildFfmpegInputArgs,
|
|
37
37
|
buildFfmpegOutputArgs: () => buildFfmpegOutputArgs,
|
|
38
38
|
detectPlatformDefaults: () => detectPlatformDefaults,
|
|
39
|
+
resolveFfmpegBinary: () => resolveFfmpegBinary,
|
|
39
40
|
resolveFfmpegConfig: () => resolveFfmpegConfig
|
|
40
41
|
});
|
|
41
42
|
function detectPlatformDefaults(ffmpegPath = "ffmpeg") {
|
|
@@ -78,12 +79,19 @@ function buildFfmpegOutputArgs(config) {
|
|
|
78
79
|
if (config.outputArgs?.length) args.push(...config.outputArgs);
|
|
79
80
|
return args;
|
|
80
81
|
}
|
|
81
|
-
|
|
82
|
+
async function resolveFfmpegBinary(configPath, dataDir, logger) {
|
|
83
|
+
if (configPath && configPath !== "ffmpeg") {
|
|
84
|
+
return configPath;
|
|
85
|
+
}
|
|
86
|
+
return (0, import_core.ensureFfmpeg)(dataDir, logger);
|
|
87
|
+
}
|
|
88
|
+
var import_node_child_process3, os, import_core;
|
|
82
89
|
var init_ffmpeg_config = __esm({
|
|
83
90
|
"src/recording/ffmpeg-config.ts"() {
|
|
84
91
|
"use strict";
|
|
85
92
|
import_node_child_process3 = require("child_process");
|
|
86
93
|
os = __toESM(require("os"), 1);
|
|
94
|
+
import_core = require("@camstack/core");
|
|
87
95
|
}
|
|
88
96
|
});
|
|
89
97
|
|
|
@@ -1189,7 +1197,7 @@ var recording_coordinator_exports = {};
|
|
|
1189
1197
|
__export(recording_coordinator_exports, {
|
|
1190
1198
|
RecordingCoordinator: () => RecordingCoordinator
|
|
1191
1199
|
});
|
|
1192
|
-
var POLICY_EVAL_INTERVAL_MS, MOTION_FALLBACK_TIMEOUT_MS, RecordingCoordinator;
|
|
1200
|
+
var DEFAULT_SEGMENT_DURATION_SEC, POLICY_EVAL_INTERVAL_MS, MOTION_FALLBACK_TIMEOUT_MS, RecordingCoordinator;
|
|
1193
1201
|
var init_recording_coordinator = __esm({
|
|
1194
1202
|
"src/recording/recording-coordinator.ts"() {
|
|
1195
1203
|
"use strict";
|
|
@@ -1199,6 +1207,7 @@ var init_recording_coordinator = __esm({
|
|
|
1199
1207
|
init_retention_manager();
|
|
1200
1208
|
init_playlist_generator();
|
|
1201
1209
|
init_storage_estimator();
|
|
1210
|
+
DEFAULT_SEGMENT_DURATION_SEC = 4;
|
|
1202
1211
|
POLICY_EVAL_INTERVAL_MS = 1e3;
|
|
1203
1212
|
MOTION_FALLBACK_TIMEOUT_MS = 6e4;
|
|
1204
1213
|
RecordingCoordinator = class _RecordingCoordinator {
|
|
@@ -1212,6 +1221,7 @@ var init_recording_coordinator = __esm({
|
|
|
1212
1221
|
storagePath;
|
|
1213
1222
|
globalFfmpegConfig;
|
|
1214
1223
|
detectedFfmpegConfig;
|
|
1224
|
+
segmentDurationSec;
|
|
1215
1225
|
recordings = /* @__PURE__ */ new Map();
|
|
1216
1226
|
policyTimer = null;
|
|
1217
1227
|
retentionManager;
|
|
@@ -1228,6 +1238,7 @@ var init_recording_coordinator = __esm({
|
|
|
1228
1238
|
this.storagePath = config.storagePath;
|
|
1229
1239
|
this.globalFfmpegConfig = config.globalFfmpegConfig;
|
|
1230
1240
|
this.detectedFfmpegConfig = config.detectedFfmpegConfig;
|
|
1241
|
+
this.segmentDurationSec = config.segmentDurationSec ?? DEFAULT_SEGMENT_DURATION_SEC;
|
|
1231
1242
|
this.retentionManager = new RetentionManager(
|
|
1232
1243
|
this.db,
|
|
1233
1244
|
this.logger.child("retention"),
|
|
@@ -1313,7 +1324,7 @@ var init_recording_coordinator = __esm({
|
|
|
1313
1324
|
const writerConfig = {
|
|
1314
1325
|
deviceId,
|
|
1315
1326
|
streamId: sp.streamId,
|
|
1316
|
-
segmentDurationSec:
|
|
1327
|
+
segmentDurationSec: this.segmentDurationSec,
|
|
1317
1328
|
storagePath: this.storagePath,
|
|
1318
1329
|
storageName,
|
|
1319
1330
|
subDirectory,
|
|
@@ -1562,56 +1573,24 @@ var init_recording_coordinator = __esm({
|
|
|
1562
1573
|
// src/index.ts
|
|
1563
1574
|
var index_exports = {};
|
|
1564
1575
|
__export(index_exports, {
|
|
1565
|
-
AdaptiveController: () => AdaptiveController,
|
|
1566
|
-
AdaptiveFfmpegSource: () => AdaptiveFfmpegSource,
|
|
1567
|
-
AdaptiveRtspRelay: () => AdaptiveRtspRelay,
|
|
1568
|
-
AdaptiveSession: () => AdaptiveSession,
|
|
1569
|
-
AdaptiveStreamServer: () => AdaptiveStreamServer,
|
|
1570
|
-
AsyncBoundedQueue: () => AsyncBoundedQueue,
|
|
1571
1576
|
BuiltinAnalysisAddon: () => BuiltinAnalysisAddon,
|
|
1572
1577
|
DEFAULT_RETENTION: () => DEFAULT_RETENTION,
|
|
1573
1578
|
DecoderRegistry: () => DecoderRegistry,
|
|
1574
1579
|
EventPersistenceService: () => EventPersistenceService,
|
|
1575
1580
|
FfmpegDecoderProvider: () => FfmpegDecoderProvider,
|
|
1576
1581
|
FfmpegDecoderSession: () => FfmpegDecoderSession,
|
|
1577
|
-
FfmpegProcess: () => FfmpegProcess,
|
|
1578
1582
|
FrameDropper: () => FrameDropper,
|
|
1579
|
-
H264RtpDepacketizer: () => H264RtpDepacketizer,
|
|
1580
|
-
H265RtpDepacketizer: () => H265RtpDepacketizer,
|
|
1581
1583
|
KnownFacesService: () => KnownFacesService,
|
|
1582
1584
|
PipelineAddon: () => PipelineAddon,
|
|
1583
1585
|
RecordingCoordinator: () => RecordingCoordinator,
|
|
1584
1586
|
RecordingDb: () => RecordingDb,
|
|
1585
1587
|
RetentionService: () => RetentionService,
|
|
1586
1588
|
SessionTrackerService: () => SessionTrackerService,
|
|
1587
|
-
SharedSession: () => SharedSession,
|
|
1588
1589
|
StreamBroker: () => StreamBroker,
|
|
1589
1590
|
StreamBrokerManager: () => StreamBrokerManager,
|
|
1590
|
-
StreamFanout: () => StreamFanout,
|
|
1591
1591
|
StreamPipeServer: () => StreamPipeServer,
|
|
1592
1592
|
TrackTrailService: () => TrackTrailService,
|
|
1593
|
-
|
|
1594
|
-
convertH264ToAnnexB: () => convertH264ToAnnexB,
|
|
1595
|
-
convertH265ToAnnexB: () => convertH265ToAnnexB,
|
|
1596
|
-
cosineSimilarity: () => cosineSimilarity,
|
|
1597
|
-
createDefaultProfiles: () => createDefaultProfiles,
|
|
1598
|
-
createNullLogger: () => createNullLogger,
|
|
1599
|
-
detectVideoCodecFromNal: () => detectVideoCodecFromNal,
|
|
1600
|
-
extractH264ParamSets: () => extractH264ParamSets,
|
|
1601
|
-
extractH265ParamSets: () => extractH265ParamSets,
|
|
1602
|
-
fromEventEmitter: () => fromEventEmitter,
|
|
1603
|
-
fromNativeStream: () => fromNativeStream,
|
|
1604
|
-
fromPushCallback: () => fromPushCallback,
|
|
1605
|
-
getH265NalType: () => getH265NalType,
|
|
1606
|
-
hasStartCodes: () => hasStartCodes,
|
|
1607
|
-
isH264IdrAccessUnit: () => isH264IdrAccessUnit,
|
|
1608
|
-
isH264KeyframeAnnexB: () => isH264KeyframeAnnexB,
|
|
1609
|
-
isH265Irap: () => isH265Irap,
|
|
1610
|
-
isH265IrapAccessUnit: () => isH265IrapAccessUnit,
|
|
1611
|
-
isH265KeyframeAnnexB: () => isH265KeyframeAnnexB,
|
|
1612
|
-
joinNalsToAnnexB: () => joinNalsToAnnexB,
|
|
1613
|
-
prependStartCode: () => prependStartCode,
|
|
1614
|
-
splitAnnexBToNals: () => splitAnnexBToNals
|
|
1593
|
+
cosineSimilarity: () => cosineSimilarity
|
|
1615
1594
|
});
|
|
1616
1595
|
module.exports = __toCommonJS(index_exports);
|
|
1617
1596
|
|
|
@@ -3214,9 +3193,7 @@ var PipelineAddon = class {
|
|
|
3214
3193
|
capabilities: [
|
|
3215
3194
|
{ name: "stream-broker", mode: "singleton" },
|
|
3216
3195
|
{ name: "recording-engine", mode: "singleton" },
|
|
3217
|
-
{ name: "analysis-
|
|
3218
|
-
{ name: "analysis-data-persistence", mode: "singleton" },
|
|
3219
|
-
{ name: "webrtc", mode: "collection" }
|
|
3196
|
+
{ name: "analysis-data-persistence", mode: "singleton" }
|
|
3220
3197
|
]
|
|
3221
3198
|
};
|
|
3222
3199
|
// Stream broker
|
|
@@ -3229,11 +3206,10 @@ var PipelineAddon = class {
|
|
|
3229
3206
|
currentRecordingConfig = {
|
|
3230
3207
|
ffmpegPath: "ffmpeg",
|
|
3231
3208
|
hwaccel: void 0,
|
|
3232
|
-
threads: void 0
|
|
3209
|
+
threads: void 0,
|
|
3210
|
+
segmentDurationSeconds: 4,
|
|
3211
|
+
defaultRetentionDays: 30
|
|
3233
3212
|
};
|
|
3234
|
-
// Analysis pipeline
|
|
3235
|
-
analysisPipeline = null;
|
|
3236
|
-
analysisLogger = null;
|
|
3237
3213
|
// Analysis persistence
|
|
3238
3214
|
persistenceFacade = null;
|
|
3239
3215
|
setRecordingDependencies(deps) {
|
|
@@ -3263,10 +3239,14 @@ var PipelineAddon = class {
|
|
|
3263
3239
|
hwaccel: context.addonConfig.hwaccel ?? this.currentRecordingConfig.hwaccel,
|
|
3264
3240
|
threads: context.addonConfig.threads ?? this.currentRecordingConfig.threads
|
|
3265
3241
|
};
|
|
3242
|
+
const segmentDurationSeconds = context.addonConfig.segmentDurationSeconds ?? this.currentRecordingConfig.segmentDurationSeconds;
|
|
3243
|
+
const defaultRetentionDays = context.addonConfig.defaultRetentionDays ?? this.currentRecordingConfig.defaultRetentionDays;
|
|
3266
3244
|
this.currentRecordingConfig = {
|
|
3267
3245
|
ffmpegPath,
|
|
3268
3246
|
hwaccel: globalFfmpegConfig.hwaccel,
|
|
3269
|
-
threads: globalFfmpegConfig.threads
|
|
3247
|
+
threads: globalFfmpegConfig.threads,
|
|
3248
|
+
segmentDurationSeconds,
|
|
3249
|
+
defaultRetentionDays
|
|
3270
3250
|
};
|
|
3271
3251
|
const fileStorage = context.storage.files;
|
|
3272
3252
|
if (!fileStorage) {
|
|
@@ -3282,7 +3262,8 @@ var PipelineAddon = class {
|
|
|
3282
3262
|
fileStorage,
|
|
3283
3263
|
storagePath,
|
|
3284
3264
|
globalFfmpegConfig,
|
|
3285
|
-
detectedFfmpegConfig
|
|
3265
|
+
detectedFfmpegConfig,
|
|
3266
|
+
segmentDurationSec: segmentDurationSeconds
|
|
3286
3267
|
});
|
|
3287
3268
|
await this.coordinator.start();
|
|
3288
3269
|
context.logger.info("Recording Engine initialized");
|
|
@@ -3291,16 +3272,6 @@ var PipelineAddon = class {
|
|
|
3291
3272
|
context.logger.warn(`Recording Engine failed to initialize: ${msg}`);
|
|
3292
3273
|
}
|
|
3293
3274
|
}
|
|
3294
|
-
this.analysisLogger = context.logger;
|
|
3295
|
-
try {
|
|
3296
|
-
const mod = await import("@camstack/lib-pipeline-analysis");
|
|
3297
|
-
const instance = new mod.AnalysisPipeline();
|
|
3298
|
-
this.analysisPipeline = instance;
|
|
3299
|
-
this.analysisLogger.info("Analysis pipeline loaded successfully");
|
|
3300
|
-
} catch (error) {
|
|
3301
|
-
const msg = error instanceof Error ? error.message : String(error);
|
|
3302
|
-
this.analysisLogger.warn(`Analysis pipeline not available: ${msg} -- analysis features disabled`);
|
|
3303
|
-
}
|
|
3304
3275
|
const eventPersistence = new EventPersistenceService({
|
|
3305
3276
|
getStorageLocation: () => context.storage,
|
|
3306
3277
|
subscribe: (filter, handler) => context.eventBus.subscribe(filter, handler),
|
|
@@ -3352,7 +3323,6 @@ var PipelineAddon = class {
|
|
|
3352
3323
|
this.sqliteDb = null;
|
|
3353
3324
|
}
|
|
3354
3325
|
this.recordingDb = null;
|
|
3355
|
-
this.analysisPipeline = null;
|
|
3356
3326
|
if (this.persistenceFacade) {
|
|
3357
3327
|
this.persistenceFacade.eventPersistence.stop();
|
|
3358
3328
|
this.persistenceFacade.retention.stop();
|
|
@@ -3365,13 +3335,8 @@ var PipelineAddon = class {
|
|
|
3365
3335
|
return this.brokerManager;
|
|
3366
3336
|
case "recording-engine":
|
|
3367
3337
|
return this.coordinator;
|
|
3368
|
-
case "analysis-pipeline":
|
|
3369
|
-
return this.analysisPipeline;
|
|
3370
3338
|
case "analysis-data-persistence":
|
|
3371
3339
|
return this.persistenceFacade;
|
|
3372
|
-
case "webrtc":
|
|
3373
|
-
return null;
|
|
3374
|
-
// WebRTC is provided externally or via collection
|
|
3375
3340
|
default:
|
|
3376
3341
|
return null;
|
|
3377
3342
|
}
|
|
@@ -3385,52 +3350,9 @@ var PipelineAddon = class {
|
|
|
3385
3350
|
if (!this.recordingDb) throw new Error("PipelineAddon recording not initialized");
|
|
3386
3351
|
return this.recordingDb;
|
|
3387
3352
|
}
|
|
3388
|
-
/** Whether the analysis pipeline package loaded successfully */
|
|
3389
|
-
isAnalysisAvailable() {
|
|
3390
|
-
return this.analysisPipeline !== null;
|
|
3391
|
-
}
|
|
3392
3353
|
// --- IConfigurable ---
|
|
3393
3354
|
getConfigSchema() {
|
|
3394
|
-
return {
|
|
3395
|
-
sections: [
|
|
3396
|
-
{
|
|
3397
|
-
id: "ffmpeg",
|
|
3398
|
-
title: "FFmpeg Settings",
|
|
3399
|
-
columns: 2,
|
|
3400
|
-
fields: [
|
|
3401
|
-
{
|
|
3402
|
-
type: "text",
|
|
3403
|
-
key: "ffmpegPath",
|
|
3404
|
-
label: "FFmpeg Binary Path",
|
|
3405
|
-
description: 'Path to the ffmpeg executable, or just "ffmpeg" if it is in your PATH',
|
|
3406
|
-
placeholder: "ffmpeg"
|
|
3407
|
-
},
|
|
3408
|
-
{
|
|
3409
|
-
type: "select",
|
|
3410
|
-
key: "hwaccel",
|
|
3411
|
-
label: "Hardware Acceleration",
|
|
3412
|
-
description: "Enable GPU-accelerated video encoding if supported by your hardware",
|
|
3413
|
-
options: [
|
|
3414
|
-
{ value: "", label: "None (software)", description: "CPU-only encoding" },
|
|
3415
|
-
{ value: "nvenc", label: "NVIDIA NVENC", description: "NVIDIA GPU encoding" },
|
|
3416
|
-
{ value: "vaapi", label: "Intel VAAPI", description: "Intel GPU encoding (Linux)" },
|
|
3417
|
-
{ value: "videotoolbox", label: "Apple VideoToolbox", description: "macOS hardware encoding" }
|
|
3418
|
-
]
|
|
3419
|
-
},
|
|
3420
|
-
{
|
|
3421
|
-
type: "number",
|
|
3422
|
-
key: "threads",
|
|
3423
|
-
label: "FFmpeg Threads",
|
|
3424
|
-
description: "Number of CPU threads for software encoding (0 = auto)",
|
|
3425
|
-
min: 0,
|
|
3426
|
-
max: 64,
|
|
3427
|
-
step: 1,
|
|
3428
|
-
unit: "threads"
|
|
3429
|
-
}
|
|
3430
|
-
]
|
|
3431
|
-
}
|
|
3432
|
-
]
|
|
3433
|
-
};
|
|
3355
|
+
return { sections: [] };
|
|
3434
3356
|
}
|
|
3435
3357
|
getConfig() {
|
|
3436
3358
|
return { ...this.currentRecordingConfig };
|
|
@@ -3439,7 +3361,9 @@ var PipelineAddon = class {
|
|
|
3439
3361
|
this.currentRecordingConfig = {
|
|
3440
3362
|
ffmpegPath: config.ffmpegPath ?? this.currentRecordingConfig.ffmpegPath,
|
|
3441
3363
|
hwaccel: config.hwaccel ?? this.currentRecordingConfig.hwaccel,
|
|
3442
|
-
threads: config.threads ?? this.currentRecordingConfig.threads
|
|
3364
|
+
threads: config.threads ?? this.currentRecordingConfig.threads,
|
|
3365
|
+
segmentDurationSeconds: config.segmentDurationSeconds ?? this.currentRecordingConfig.segmentDurationSeconds,
|
|
3366
|
+
defaultRetentionDays: config.defaultRetentionDays ?? this.currentRecordingConfig.defaultRetentionDays
|
|
3443
3367
|
};
|
|
3444
3368
|
}
|
|
3445
3369
|
};
|
|
@@ -3484,3223 +3408,25 @@ var BuiltinAnalysisAddon = class {
|
|
|
3484
3408
|
return this.pipeline !== null;
|
|
3485
3409
|
}
|
|
3486
3410
|
};
|
|
3487
|
-
|
|
3488
|
-
// src/webrtc/types.ts
|
|
3489
|
-
function asLogger(logger) {
|
|
3490
|
-
if (!logger) return createNullLogger();
|
|
3491
|
-
const noop = () => {
|
|
3492
|
-
};
|
|
3493
|
-
return {
|
|
3494
|
-
log: logger.log?.bind(logger) ?? noop,
|
|
3495
|
-
info: logger.info?.bind(logger) ?? noop,
|
|
3496
|
-
warn: logger.warn?.bind(logger) ?? noop,
|
|
3497
|
-
error: logger.error?.bind(logger) ?? noop,
|
|
3498
|
-
debug: logger.debug?.bind(logger) ?? noop
|
|
3499
|
-
};
|
|
3500
|
-
}
|
|
3501
|
-
function createNullLogger() {
|
|
3502
|
-
const noop = () => {
|
|
3503
|
-
};
|
|
3504
|
-
return { log: noop, info: noop, warn: noop, error: noop, debug: noop };
|
|
3505
|
-
}
|
|
3506
|
-
|
|
3507
|
-
// src/webrtc/nal-utils.ts
|
|
3508
|
-
var NAL_START_CODE_4B = Buffer.from([0, 0, 0, 1]);
|
|
3509
|
-
var NAL_START_CODE_3B = Buffer.from([0, 0, 1]);
|
|
3510
|
-
function hasStartCodes(data) {
|
|
3511
|
-
if (data.length < 4) return false;
|
|
3512
|
-
if (data.subarray(0, 4).equals(NAL_START_CODE_4B)) return true;
|
|
3513
|
-
if (data.subarray(0, 3).equals(NAL_START_CODE_3B)) return true;
|
|
3514
|
-
return false;
|
|
3515
|
-
}
|
|
3516
|
-
function splitAnnexBToNals(annexB) {
|
|
3517
|
-
const nals = [];
|
|
3518
|
-
const len = annexB.length;
|
|
3519
|
-
const isStartCodeAt = (i2) => {
|
|
3520
|
-
if (i2 + 3 <= len && annexB[i2] === 0 && annexB[i2 + 1] === 0) {
|
|
3521
|
-
if (annexB[i2 + 2] === 1) return 3;
|
|
3522
|
-
if (i2 + 4 <= len && annexB[i2 + 2] === 0 && annexB[i2 + 3] === 1)
|
|
3523
|
-
return 4;
|
|
3524
|
-
}
|
|
3525
|
-
return 0;
|
|
3526
|
-
};
|
|
3527
|
-
let i = 0;
|
|
3528
|
-
while (i < len) {
|
|
3529
|
-
const sc = isStartCodeAt(i);
|
|
3530
|
-
if (sc) break;
|
|
3531
|
-
i++;
|
|
3532
|
-
}
|
|
3533
|
-
while (i < len) {
|
|
3534
|
-
const sc = isStartCodeAt(i);
|
|
3535
|
-
if (!sc) {
|
|
3536
|
-
i++;
|
|
3537
|
-
continue;
|
|
3538
|
-
}
|
|
3539
|
-
const nalStart = i + sc;
|
|
3540
|
-
let j = nalStart;
|
|
3541
|
-
while (j < len) {
|
|
3542
|
-
const sc2 = isStartCodeAt(j);
|
|
3543
|
-
if (sc2) break;
|
|
3544
|
-
j++;
|
|
3545
|
-
}
|
|
3546
|
-
if (nalStart < j) {
|
|
3547
|
-
const nal = annexB.subarray(nalStart, j);
|
|
3548
|
-
if (nal.length > 0) nals.push(nal);
|
|
3549
|
-
}
|
|
3550
|
-
i = j;
|
|
3551
|
-
}
|
|
3552
|
-
return nals;
|
|
3553
|
-
}
|
|
3554
|
-
function prependStartCode(nal) {
|
|
3555
|
-
return Buffer.concat([NAL_START_CODE_4B, nal]);
|
|
3556
|
-
}
|
|
3557
|
-
function joinNalsToAnnexB(...nals) {
|
|
3558
|
-
const present = nals.filter((n) => !!n && n.length > 0);
|
|
3559
|
-
if (!present.length) return;
|
|
3560
|
-
const parts = [];
|
|
3561
|
-
for (const nal of present) {
|
|
3562
|
-
parts.push(NAL_START_CODE_4B, nal);
|
|
3563
|
-
}
|
|
3564
|
-
return Buffer.concat(parts);
|
|
3565
|
-
}
|
|
3566
|
-
function detectVideoCodecFromNal(data) {
|
|
3567
|
-
if (!data || data.length < 5) return null;
|
|
3568
|
-
let nalStart = -1;
|
|
3569
|
-
for (let i = 0; i < Math.min(data.length - 4, 100); i++) {
|
|
3570
|
-
if (data[i] === 0 && data[i + 1] === 0) {
|
|
3571
|
-
if (data[i + 2] === 0 && data[i + 3] === 1) {
|
|
3572
|
-
nalStart = i + 4;
|
|
3573
|
-
break;
|
|
3574
|
-
}
|
|
3575
|
-
if (data[i + 2] === 1) {
|
|
3576
|
-
nalStart = i + 3;
|
|
3577
|
-
break;
|
|
3578
|
-
}
|
|
3579
|
-
}
|
|
3580
|
-
}
|
|
3581
|
-
if (nalStart < 0 && data.length >= 5) {
|
|
3582
|
-
const len = data.readUInt32BE(0);
|
|
3583
|
-
if (len > 0 && len <= data.length - 4) {
|
|
3584
|
-
nalStart = 4;
|
|
3585
|
-
}
|
|
3586
|
-
}
|
|
3587
|
-
if (nalStart < 0 || nalStart >= data.length) return null;
|
|
3588
|
-
const nalByte = data[nalStart];
|
|
3589
|
-
if (nalByte === void 0) return null;
|
|
3590
|
-
const forbiddenBit264 = nalByte >> 7 & 1;
|
|
3591
|
-
const h264Type = nalByte & 31;
|
|
3592
|
-
if (forbiddenBit264 === 0 && h264Type > 0 && h264Type <= 12) {
|
|
3593
|
-
if (h264Type === 7 || h264Type === 8) return "H264";
|
|
3594
|
-
if (h264Type === 5) return "H264";
|
|
3595
|
-
if (h264Type === 1) {
|
|
3596
|
-
const nalRefIdc = nalByte >> 5 & 3;
|
|
3597
|
-
if (nalRefIdc >= 1) return "H264";
|
|
3598
|
-
}
|
|
3599
|
-
}
|
|
3600
|
-
if (nalStart + 1 < data.length) {
|
|
3601
|
-
const nalByte2 = data[nalStart + 1];
|
|
3602
|
-
if (nalByte2 !== void 0) {
|
|
3603
|
-
const forbiddenBit = nalByte >> 7 & 1;
|
|
3604
|
-
const hevcType = nalByte >> 1 & 63;
|
|
3605
|
-
const temporalId = nalByte2 & 7;
|
|
3606
|
-
if (forbiddenBit === 0 && temporalId > 0 && hevcType <= 40) {
|
|
3607
|
-
if (hevcType === 32 || hevcType === 33 || hevcType === 34)
|
|
3608
|
-
return "H265";
|
|
3609
|
-
if (hevcType === 19 || hevcType === 20 || hevcType === 21)
|
|
3610
|
-
return "H265";
|
|
3611
|
-
if (hevcType <= 1 && nalByte <= 3) return "H265";
|
|
3612
|
-
}
|
|
3613
|
-
}
|
|
3614
|
-
}
|
|
3615
|
-
return null;
|
|
3616
|
-
}
|
|
3617
|
-
|
|
3618
|
-
// src/webrtc/h264-utils.ts
|
|
3619
|
-
function tryConvertWithLengthReader(data, readLen) {
|
|
3620
|
-
const result = [];
|
|
3621
|
-
let offset = 0;
|
|
3622
|
-
let nalCount = 0;
|
|
3623
|
-
while (offset < data.length) {
|
|
3624
|
-
if (offset + 4 > data.length) return null;
|
|
3625
|
-
const nalLength = readLen(data, offset);
|
|
3626
|
-
offset += 4;
|
|
3627
|
-
if (nalLength <= 0) return null;
|
|
3628
|
-
if (nalLength > data.length - offset) return null;
|
|
3629
|
-
result.push(NAL_START_CODE_4B);
|
|
3630
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3631
|
-
offset += nalLength;
|
|
3632
|
-
nalCount++;
|
|
3633
|
-
}
|
|
3634
|
-
if (nalCount === 0) return null;
|
|
3635
|
-
return Buffer.concat(result);
|
|
3636
|
-
}
|
|
3637
|
-
function tryConvertWithLengthReader16(data, readLen) {
|
|
3638
|
-
const result = [];
|
|
3639
|
-
let offset = 0;
|
|
3640
|
-
let nalCount = 0;
|
|
3641
|
-
while (offset < data.length) {
|
|
3642
|
-
if (offset + 2 > data.length) return null;
|
|
3643
|
-
const nalLength = readLen(data, offset);
|
|
3644
|
-
offset += 2;
|
|
3645
|
-
if (nalLength <= 0) return null;
|
|
3646
|
-
if (nalLength > data.length - offset) return null;
|
|
3647
|
-
result.push(NAL_START_CODE_4B);
|
|
3648
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3649
|
-
offset += nalLength;
|
|
3650
|
-
nalCount++;
|
|
3651
|
-
}
|
|
3652
|
-
if (nalCount === 0) return null;
|
|
3653
|
-
return Buffer.concat(result);
|
|
3654
|
-
}
|
|
3655
|
-
function tryConvertWithLengthReader24(data, endian) {
|
|
3656
|
-
const result = [];
|
|
3657
|
-
let offset = 0;
|
|
3658
|
-
let nalCount = 0;
|
|
3659
|
-
const readLen24 = (buf, at) => {
|
|
3660
|
-
if (at + 3 > buf.length) return 0;
|
|
3661
|
-
const b0 = buf[at];
|
|
3662
|
-
const b1 = buf[at + 1];
|
|
3663
|
-
const b2 = buf[at + 2];
|
|
3664
|
-
return endian === "be" ? (b0 << 16 | b1 << 8 | b2) >>> 0 : (b2 << 16 | b1 << 8 | b0) >>> 0;
|
|
3665
|
-
};
|
|
3666
|
-
while (offset < data.length) {
|
|
3667
|
-
if (offset + 3 > data.length) return null;
|
|
3668
|
-
const nalLength = readLen24(data, offset);
|
|
3669
|
-
offset += 3;
|
|
3670
|
-
if (nalLength <= 0) return null;
|
|
3671
|
-
if (nalLength > data.length - offset) return null;
|
|
3672
|
-
result.push(NAL_START_CODE_4B);
|
|
3673
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3674
|
-
offset += nalLength;
|
|
3675
|
-
nalCount++;
|
|
3676
|
-
}
|
|
3677
|
-
if (nalCount === 0) return null;
|
|
3678
|
-
return Buffer.concat(result);
|
|
3679
|
-
}
|
|
3680
|
-
function looksLikeSingleH264Nal(nalPayload) {
|
|
3681
|
-
if (nalPayload.length < 1) return false;
|
|
3682
|
-
const b0 = nalPayload[0];
|
|
3683
|
-
if (b0 === void 0) return false;
|
|
3684
|
-
if ((b0 & 128) !== 0) return false;
|
|
3685
|
-
const nalType = b0 & 31;
|
|
3686
|
-
return nalType >= 1 && nalType <= 23;
|
|
3687
|
-
}
|
|
3688
|
-
function depacketizeRtpAggregationToAnnexB(payload) {
|
|
3689
|
-
if (payload.length < 1) return null;
|
|
3690
|
-
const nalHeader = payload[0];
|
|
3691
|
-
const nalType = nalHeader & 31;
|
|
3692
|
-
const out = [];
|
|
3693
|
-
const pushNal = (nal) => {
|
|
3694
|
-
if (nal.length === 0) return;
|
|
3695
|
-
out.push(NAL_START_CODE_4B, nal);
|
|
3696
|
-
};
|
|
3697
|
-
if (nalType === 24) {
|
|
3698
|
-
let off = 1;
|
|
3699
|
-
while (off + 2 <= payload.length) {
|
|
3700
|
-
const size = payload.readUInt16BE(off);
|
|
3701
|
-
off += 2;
|
|
3702
|
-
if (size <= 0 || off + size > payload.length) return null;
|
|
3703
|
-
pushNal(payload.subarray(off, off + size));
|
|
3704
|
-
off += size;
|
|
3705
|
-
}
|
|
3706
|
-
return out.length ? Buffer.concat(out) : null;
|
|
3707
|
-
}
|
|
3708
|
-
if (nalType === 25) {
|
|
3709
|
-
let off = 1 + 2;
|
|
3710
|
-
if (off > payload.length) return null;
|
|
3711
|
-
while (off + 2 <= payload.length) {
|
|
3712
|
-
const size = payload.readUInt16BE(off);
|
|
3713
|
-
off += 2;
|
|
3714
|
-
if (size <= 0 || off + size > payload.length) return null;
|
|
3715
|
-
pushNal(payload.subarray(off, off + size));
|
|
3716
|
-
off += size;
|
|
3717
|
-
}
|
|
3718
|
-
return out.length ? Buffer.concat(out) : null;
|
|
3719
|
-
}
|
|
3720
|
-
if (nalType === 26) {
|
|
3721
|
-
let off = 1 + 2;
|
|
3722
|
-
if (off > payload.length) return null;
|
|
3723
|
-
while (off + 2 <= payload.length) {
|
|
3724
|
-
const size = payload.readUInt16BE(off);
|
|
3725
|
-
off += 2;
|
|
3726
|
-
if (off + 1 + 2 > payload.length) return null;
|
|
3727
|
-
off += 1 + 2;
|
|
3728
|
-
if (size <= 0 || off + size > payload.length) return null;
|
|
3729
|
-
pushNal(payload.subarray(off, off + size));
|
|
3730
|
-
off += size;
|
|
3731
|
-
}
|
|
3732
|
-
return out.length ? Buffer.concat(out) : null;
|
|
3733
|
-
}
|
|
3734
|
-
if (nalType === 27) {
|
|
3735
|
-
let off = 1 + 2;
|
|
3736
|
-
if (off > payload.length) return null;
|
|
3737
|
-
while (off + 2 <= payload.length) {
|
|
3738
|
-
const size = payload.readUInt16BE(off);
|
|
3739
|
-
off += 2;
|
|
3740
|
-
if (off + 1 + 3 > payload.length) return null;
|
|
3741
|
-
off += 1 + 3;
|
|
3742
|
-
if (size <= 0 || off + size > payload.length) return null;
|
|
3743
|
-
pushNal(payload.subarray(off, off + size));
|
|
3744
|
-
off += size;
|
|
3745
|
-
}
|
|
3746
|
-
return out.length ? Buffer.concat(out) : null;
|
|
3747
|
-
}
|
|
3748
|
-
return null;
|
|
3749
|
-
}
|
|
3750
|
-
function convertH264ToAnnexB(data) {
|
|
3751
|
-
if (hasStartCodes(data)) return data;
|
|
3752
|
-
const sc4 = Buffer.from([0, 0, 0, 1]);
|
|
3753
|
-
const sc3 = Buffer.from([0, 0, 1]);
|
|
3754
|
-
const maxScan = Math.min(64, data.length);
|
|
3755
|
-
const idx4 = data.subarray(0, maxScan).indexOf(sc4);
|
|
3756
|
-
if (idx4 > 0) return data.subarray(idx4);
|
|
3757
|
-
const idx3 = data.subarray(0, maxScan).indexOf(sc3);
|
|
3758
|
-
if (idx3 > 0) return data.subarray(idx3);
|
|
3759
|
-
const be = tryConvertWithLengthReader(data, (b, o) => b.readUInt32BE(o));
|
|
3760
|
-
if (be) return be;
|
|
3761
|
-
const le = tryConvertWithLengthReader(data, (b, o) => b.readUInt32LE(o));
|
|
3762
|
-
if (le) return le;
|
|
3763
|
-
const be24 = tryConvertWithLengthReader24(data, "be");
|
|
3764
|
-
if (be24) return be24;
|
|
3765
|
-
const le24 = tryConvertWithLengthReader24(data, "le");
|
|
3766
|
-
if (le24) return le24;
|
|
3767
|
-
const be16 = tryConvertWithLengthReader16(data, (b, o) => b.readUInt16BE(o));
|
|
3768
|
-
if (be16) return be16;
|
|
3769
|
-
const le16 = tryConvertWithLengthReader16(data, (b, o) => b.readUInt16LE(o));
|
|
3770
|
-
if (le16) return le16;
|
|
3771
|
-
const agg = depacketizeRtpAggregationToAnnexB(data);
|
|
3772
|
-
if (agg) return agg;
|
|
3773
|
-
if (looksLikeSingleH264Nal(data)) {
|
|
3774
|
-
return Buffer.concat([NAL_START_CODE_4B, data]);
|
|
3775
|
-
}
|
|
3776
|
-
return data;
|
|
3777
|
-
}
|
|
3778
|
-
function isH264KeyframeAnnexB(annexB) {
|
|
3779
|
-
const nals = splitAnnexBToNals(annexB);
|
|
3780
|
-
let hasSps = false;
|
|
3781
|
-
let hasPps = false;
|
|
3782
|
-
let hasIdr = false;
|
|
3783
|
-
for (const nal of nals) {
|
|
3784
|
-
const t = (nal[0] ?? 0) & 31;
|
|
3785
|
-
if (t === 7) hasSps = true;
|
|
3786
|
-
if (t === 8) hasPps = true;
|
|
3787
|
-
if (t === 5) hasIdr = true;
|
|
3788
|
-
}
|
|
3789
|
-
return hasIdr && hasSps && hasPps;
|
|
3790
|
-
}
|
|
3791
|
-
function isH264IdrAccessUnit(annexB) {
|
|
3792
|
-
const nals = splitAnnexBToNals(annexB);
|
|
3793
|
-
for (const nal of nals) {
|
|
3794
|
-
if (nal.length < 1) continue;
|
|
3795
|
-
const t = (nal[0] ?? 0) & 31;
|
|
3796
|
-
if (t === 5) return true;
|
|
3797
|
-
}
|
|
3798
|
-
return false;
|
|
3799
|
-
}
|
|
3800
|
-
function extractH264ParamSets(annexB) {
|
|
3801
|
-
const nals = splitAnnexBToNals(annexB);
|
|
3802
|
-
let sps;
|
|
3803
|
-
let pps;
|
|
3804
|
-
let profileLevelId;
|
|
3805
|
-
for (const nal of nals) {
|
|
3806
|
-
if (nal.length < 1) continue;
|
|
3807
|
-
const nalType = nal[0] & 31;
|
|
3808
|
-
if (nalType === 7) {
|
|
3809
|
-
sps = nal;
|
|
3810
|
-
if (nal.length >= 4) {
|
|
3811
|
-
profileLevelId = Buffer.from([nal[1], nal[2], nal[3]]).toString(
|
|
3812
|
-
"hex"
|
|
3813
|
-
);
|
|
3814
|
-
}
|
|
3815
|
-
} else if (nalType === 8) {
|
|
3816
|
-
pps = nal;
|
|
3817
|
-
}
|
|
3818
|
-
}
|
|
3819
|
-
const out = {};
|
|
3820
|
-
if (sps) out.sps = sps;
|
|
3821
|
-
if (pps) out.pps = pps;
|
|
3822
|
-
if (profileLevelId) out.profileLevelId = profileLevelId;
|
|
3823
|
-
return out;
|
|
3824
|
-
}
|
|
3825
|
-
var H264RtpDepacketizer = class _H264RtpDepacketizer {
|
|
3826
|
-
fuNalHeader = null;
|
|
3827
|
-
fuParts = [];
|
|
3828
|
-
static parseRtpPayload(packet) {
|
|
3829
|
-
if (!packet || packet.length < 12) return null;
|
|
3830
|
-
const version = packet[0] >> 6 & 3;
|
|
3831
|
-
if (version !== 2) return null;
|
|
3832
|
-
const padding = (packet[0] & 32) !== 0;
|
|
3833
|
-
const extension = (packet[0] & 16) !== 0;
|
|
3834
|
-
const csrcCount = packet[0] & 15;
|
|
3835
|
-
let offset = 12 + csrcCount * 4;
|
|
3836
|
-
if (offset > packet.length) return null;
|
|
3837
|
-
if (extension) {
|
|
3838
|
-
if (offset + 4 > packet.length) return null;
|
|
3839
|
-
const extLenWords = packet.readUInt16BE(offset + 2);
|
|
3840
|
-
offset += 4 + extLenWords * 4;
|
|
3841
|
-
if (offset > packet.length) return null;
|
|
3842
|
-
}
|
|
3843
|
-
let end = packet.length;
|
|
3844
|
-
if (padding) {
|
|
3845
|
-
const padLen = packet[packet.length - 1];
|
|
3846
|
-
if (padLen <= 0 || padLen > packet.length) return null;
|
|
3847
|
-
end = packet.length - padLen;
|
|
3848
|
-
if (end < offset) return null;
|
|
3849
|
-
}
|
|
3850
|
-
if (end <= offset) return null;
|
|
3851
|
-
return packet.subarray(offset, end);
|
|
3852
|
-
}
|
|
3853
|
-
reset() {
|
|
3854
|
-
this.fuNalHeader = null;
|
|
3855
|
-
this.fuParts = [];
|
|
3856
|
-
}
|
|
3857
|
-
push(payload) {
|
|
3858
|
-
if (payload.length === 0) return [];
|
|
3859
|
-
const rtpPayload = _H264RtpDepacketizer.parseRtpPayload(payload);
|
|
3860
|
-
if (rtpPayload) payload = rtpPayload;
|
|
3861
|
-
if (hasStartCodes(payload)) return [payload];
|
|
3862
|
-
const b0 = payload[0];
|
|
3863
|
-
if ((b0 & 128) !== 0) return [];
|
|
3864
|
-
const nalType = b0 & 31;
|
|
3865
|
-
if (nalType >= 1 && nalType <= 23) {
|
|
3866
|
-
return [Buffer.concat([NAL_START_CODE_4B, payload])];
|
|
3867
|
-
}
|
|
3868
|
-
if (nalType === 24) {
|
|
3869
|
-
if (payload.length < 1 + 2) return [];
|
|
3870
|
-
let off = 1;
|
|
3871
|
-
const out = [];
|
|
3872
|
-
while (off + 2 <= payload.length) {
|
|
3873
|
-
const size = payload.readUInt16BE(off);
|
|
3874
|
-
off += 2;
|
|
3875
|
-
if (size <= 0 || off + size > payload.length) return [];
|
|
3876
|
-
const nal = payload.subarray(off, off + size);
|
|
3877
|
-
off += size;
|
|
3878
|
-
if (nal.length < 1) return [];
|
|
3879
|
-
if ((nal[0] & 128) !== 0) return [];
|
|
3880
|
-
const t = nal[0] & 31;
|
|
3881
|
-
if (t === 0 || t >= 24) return [];
|
|
3882
|
-
out.push(Buffer.concat([NAL_START_CODE_4B, nal]));
|
|
3883
|
-
}
|
|
3884
|
-
return out;
|
|
3885
|
-
}
|
|
3886
|
-
if (nalType === 28 || nalType === 29) {
|
|
3887
|
-
if (payload.length < 2) return [];
|
|
3888
|
-
const fuIndicator = payload[0];
|
|
3889
|
-
const fuHeader = payload[1];
|
|
3890
|
-
const start = (fuHeader & 128) !== 0;
|
|
3891
|
-
const end = (fuHeader & 64) !== 0;
|
|
3892
|
-
const origType = fuHeader & 31;
|
|
3893
|
-
const reconstructedHeader = fuIndicator & 224 | origType;
|
|
3894
|
-
let off = 2;
|
|
3895
|
-
if (nalType === 29) {
|
|
3896
|
-
if (payload.length < off + 2) return [];
|
|
3897
|
-
off += 2;
|
|
3898
|
-
}
|
|
3899
|
-
const frag = payload.subarray(off);
|
|
3900
|
-
if (start) {
|
|
3901
|
-
this.fuNalHeader = reconstructedHeader;
|
|
3902
|
-
this.fuParts = [frag];
|
|
3903
|
-
} else if (this.fuNalHeader != null) {
|
|
3904
|
-
this.fuParts.push(frag);
|
|
3905
|
-
} else {
|
|
3906
|
-
return [];
|
|
3907
|
-
}
|
|
3908
|
-
if (end && this.fuNalHeader != null) {
|
|
3909
|
-
const nal = Buffer.concat([
|
|
3910
|
-
Buffer.from([this.fuNalHeader]),
|
|
3911
|
-
...this.fuParts
|
|
3912
|
-
]);
|
|
3913
|
-
this.reset();
|
|
3914
|
-
return [Buffer.concat([NAL_START_CODE_4B, nal])];
|
|
3915
|
-
}
|
|
3916
|
-
return [];
|
|
3917
|
-
}
|
|
3918
|
-
return [];
|
|
3919
|
-
}
|
|
3920
|
-
};
|
|
3921
|
-
|
|
3922
|
-
// src/webrtc/h265-utils.ts
|
|
3923
|
-
function tryConvertWithLengthReader2(data, readLen) {
|
|
3924
|
-
const result = [];
|
|
3925
|
-
let offset = 0;
|
|
3926
|
-
let nalCount = 0;
|
|
3927
|
-
while (offset < data.length) {
|
|
3928
|
-
if (offset + 4 > data.length) return null;
|
|
3929
|
-
const nalLength = readLen(data, offset);
|
|
3930
|
-
offset += 4;
|
|
3931
|
-
if (nalLength <= 0) return null;
|
|
3932
|
-
if (nalLength > data.length - offset) return null;
|
|
3933
|
-
result.push(NAL_START_CODE_4B);
|
|
3934
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3935
|
-
offset += nalLength;
|
|
3936
|
-
nalCount++;
|
|
3937
|
-
}
|
|
3938
|
-
if (nalCount === 0) return null;
|
|
3939
|
-
return Buffer.concat(result);
|
|
3940
|
-
}
|
|
3941
|
-
function tryConvertWithLengthReader162(data, readLen) {
|
|
3942
|
-
const result = [];
|
|
3943
|
-
let offset = 0;
|
|
3944
|
-
let nalCount = 0;
|
|
3945
|
-
while (offset < data.length) {
|
|
3946
|
-
if (offset + 2 > data.length) return null;
|
|
3947
|
-
const nalLength = readLen(data, offset);
|
|
3948
|
-
offset += 2;
|
|
3949
|
-
if (nalLength <= 0) return null;
|
|
3950
|
-
if (nalLength > data.length - offset) return null;
|
|
3951
|
-
result.push(NAL_START_CODE_4B);
|
|
3952
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3953
|
-
offset += nalLength;
|
|
3954
|
-
nalCount++;
|
|
3955
|
-
}
|
|
3956
|
-
if (nalCount === 0) return null;
|
|
3957
|
-
return Buffer.concat(result);
|
|
3958
|
-
}
|
|
3959
|
-
function tryConvertWithLengthReader242(data, endian) {
|
|
3960
|
-
const result = [];
|
|
3961
|
-
let offset = 0;
|
|
3962
|
-
let nalCount = 0;
|
|
3963
|
-
const readLen24 = (buf, at) => {
|
|
3964
|
-
if (at + 3 > buf.length) return 0;
|
|
3965
|
-
const b0 = buf[at];
|
|
3966
|
-
const b1 = buf[at + 1];
|
|
3967
|
-
const b2 = buf[at + 2];
|
|
3968
|
-
return endian === "be" ? (b0 << 16 | b1 << 8 | b2) >>> 0 : (b2 << 16 | b1 << 8 | b0) >>> 0;
|
|
3969
|
-
};
|
|
3970
|
-
while (offset < data.length) {
|
|
3971
|
-
if (offset + 3 > data.length) return null;
|
|
3972
|
-
const nalLength = readLen24(data, offset);
|
|
3973
|
-
offset += 3;
|
|
3974
|
-
if (nalLength <= 0) return null;
|
|
3975
|
-
if (nalLength > data.length - offset) return null;
|
|
3976
|
-
result.push(NAL_START_CODE_4B);
|
|
3977
|
-
result.push(data.subarray(offset, offset + nalLength));
|
|
3978
|
-
offset += nalLength;
|
|
3979
|
-
nalCount++;
|
|
3980
|
-
}
|
|
3981
|
-
if (nalCount === 0) return null;
|
|
3982
|
-
return Buffer.concat(result);
|
|
3983
|
-
}
|
|
3984
|
-
function looksLikeSingleH265Nal(nalPayload) {
|
|
3985
|
-
if (nalPayload.length < 2) return false;
|
|
3986
|
-
const b0 = nalPayload[0];
|
|
3987
|
-
if (b0 === void 0) return false;
|
|
3988
|
-
if ((b0 & 128) !== 0) return false;
|
|
3989
|
-
const nalType = b0 >> 1 & 63;
|
|
3990
|
-
return nalType <= 40;
|
|
3991
|
-
}
|
|
3992
|
-
function convertH265ToAnnexB(data) {
|
|
3993
|
-
if (hasStartCodes(data)) return data;
|
|
3994
|
-
const sc4 = Buffer.from([0, 0, 0, 1]);
|
|
3995
|
-
const sc3 = Buffer.from([0, 0, 1]);
|
|
3996
|
-
const maxScan = Math.min(64, data.length);
|
|
3997
|
-
const idx4 = data.subarray(0, maxScan).indexOf(sc4);
|
|
3998
|
-
if (idx4 > 0) return data.subarray(idx4);
|
|
3999
|
-
const idx3 = data.subarray(0, maxScan).indexOf(sc3);
|
|
4000
|
-
if (idx3 > 0) return data.subarray(idx3);
|
|
4001
|
-
const be = tryConvertWithLengthReader2(data, (b, o) => b.readUInt32BE(o));
|
|
4002
|
-
if (be) return be;
|
|
4003
|
-
const le = tryConvertWithLengthReader2(data, (b, o) => b.readUInt32LE(o));
|
|
4004
|
-
if (le) return le;
|
|
4005
|
-
const be24 = tryConvertWithLengthReader242(data, "be");
|
|
4006
|
-
if (be24) return be24;
|
|
4007
|
-
const le24 = tryConvertWithLengthReader242(data, "le");
|
|
4008
|
-
if (le24) return le24;
|
|
4009
|
-
const be16 = tryConvertWithLengthReader162(data, (b, o) => b.readUInt16BE(o));
|
|
4010
|
-
if (be16) return be16;
|
|
4011
|
-
const le16 = tryConvertWithLengthReader162(data, (b, o) => b.readUInt16LE(o));
|
|
4012
|
-
if (le16) return le16;
|
|
4013
|
-
if (looksLikeSingleH265Nal(data)) {
|
|
4014
|
-
return Buffer.concat([NAL_START_CODE_4B, data]);
|
|
4015
|
-
}
|
|
4016
|
-
return data;
|
|
4017
|
-
}
|
|
4018
|
-
function getH265NalType(nalPayload) {
|
|
4019
|
-
if (nalPayload.length < 1) return null;
|
|
4020
|
-
const b0 = nalPayload[0];
|
|
4021
|
-
if (b0 === void 0) return null;
|
|
4022
|
-
if ((b0 & 128) !== 0) return null;
|
|
4023
|
-
return b0 >> 1 & 63;
|
|
4024
|
-
}
|
|
4025
|
-
function isH265Irap(nalType) {
|
|
4026
|
-
return nalType >= 16 && nalType <= 23;
|
|
4027
|
-
}
|
|
4028
|
-
function isH265KeyframeAnnexB(annexB) {
|
|
4029
|
-
const nals = splitAnnexBToNals(annexB);
|
|
4030
|
-
let hasVps = false;
|
|
4031
|
-
let hasSps = false;
|
|
4032
|
-
let hasPps = false;
|
|
4033
|
-
let hasIrap = false;
|
|
4034
|
-
for (const nal of nals) {
|
|
4035
|
-
const nalType = getH265NalType(nal);
|
|
4036
|
-
if (nalType === null) continue;
|
|
4037
|
-
if (nalType === 32) hasVps = true;
|
|
4038
|
-
if (nalType === 33) hasSps = true;
|
|
4039
|
-
if (nalType === 34) hasPps = true;
|
|
4040
|
-
if (isH265Irap(nalType)) hasIrap = true;
|
|
4041
|
-
}
|
|
4042
|
-
return hasIrap && hasVps && hasSps && hasPps;
|
|
4043
|
-
}
|
|
4044
|
-
function isH265IrapAccessUnit(annexB) {
|
|
4045
|
-
const nals = splitAnnexBToNals(annexB);
|
|
4046
|
-
for (const nal of nals) {
|
|
4047
|
-
if (nal.length < 2) continue;
|
|
4048
|
-
const b0 = nal[0];
|
|
4049
|
-
if (b0 === void 0) continue;
|
|
4050
|
-
if ((b0 & 128) !== 0) continue;
|
|
4051
|
-
const nalType = b0 >> 1 & 63;
|
|
4052
|
-
if (isH265Irap(nalType)) return true;
|
|
4053
|
-
}
|
|
4054
|
-
return false;
|
|
4055
|
-
}
|
|
4056
|
-
function extractH265ParamSets(annexB) {
|
|
4057
|
-
const nals = splitAnnexBToNals(annexB);
|
|
4058
|
-
let vps;
|
|
4059
|
-
let sps;
|
|
4060
|
-
let pps;
|
|
4061
|
-
for (const nal of nals) {
|
|
4062
|
-
if (nal.length < 2) continue;
|
|
4063
|
-
const nalType = nal[0] >> 1 & 63;
|
|
4064
|
-
if (nalType === 32) vps = nal;
|
|
4065
|
-
else if (nalType === 33) sps = nal;
|
|
4066
|
-
else if (nalType === 34) pps = nal;
|
|
4067
|
-
}
|
|
4068
|
-
const out = {};
|
|
4069
|
-
if (vps) out.vps = vps;
|
|
4070
|
-
if (sps) out.sps = sps;
|
|
4071
|
-
if (pps) out.pps = pps;
|
|
4072
|
-
return out;
|
|
4073
|
-
}
|
|
4074
|
-
var H265RtpDepacketizer = class _H265RtpDepacketizer {
|
|
4075
|
-
fuParts = null;
|
|
4076
|
-
static parseRtpPayload(packet) {
|
|
4077
|
-
if (!packet || packet.length < 12) return null;
|
|
4078
|
-
const version = packet[0] >> 6 & 3;
|
|
4079
|
-
if (version !== 2) return null;
|
|
4080
|
-
const padding = (packet[0] & 32) !== 0;
|
|
4081
|
-
const extension = (packet[0] & 16) !== 0;
|
|
4082
|
-
const csrcCount = packet[0] & 15;
|
|
4083
|
-
let offset = 12 + csrcCount * 4;
|
|
4084
|
-
if (offset > packet.length) return null;
|
|
4085
|
-
if (extension) {
|
|
4086
|
-
if (offset + 4 > packet.length) return null;
|
|
4087
|
-
const extLenWords = packet.readUInt16BE(offset + 2);
|
|
4088
|
-
offset += 4 + extLenWords * 4;
|
|
4089
|
-
if (offset > packet.length) return null;
|
|
4090
|
-
}
|
|
4091
|
-
let end = packet.length;
|
|
4092
|
-
if (padding) {
|
|
4093
|
-
const padLen = packet[packet.length - 1];
|
|
4094
|
-
if (padLen <= 0 || padLen > packet.length) return null;
|
|
4095
|
-
end = packet.length - padLen;
|
|
4096
|
-
if (end < offset) return null;
|
|
4097
|
-
}
|
|
4098
|
-
if (end <= offset) return null;
|
|
4099
|
-
return packet.subarray(offset, end);
|
|
4100
|
-
}
|
|
4101
|
-
reset() {
|
|
4102
|
-
this.fuParts = null;
|
|
4103
|
-
}
|
|
4104
|
-
push(payload) {
|
|
4105
|
-
if (!payload || payload.length < 2) return [];
|
|
4106
|
-
const rtpPayload = _H265RtpDepacketizer.parseRtpPayload(payload);
|
|
4107
|
-
if (rtpPayload) payload = rtpPayload;
|
|
4108
|
-
const h0 = payload[0];
|
|
4109
|
-
const h1 = payload[1];
|
|
4110
|
-
if ((h0 & 128) !== 0) return [];
|
|
4111
|
-
const nalType = h0 >> 1 & 63;
|
|
4112
|
-
if (nalType === 48) {
|
|
4113
|
-
let off = 2;
|
|
4114
|
-
const out = [];
|
|
4115
|
-
while (off + 2 <= payload.length) {
|
|
4116
|
-
const size = payload.readUInt16BE(off);
|
|
4117
|
-
off += 2;
|
|
4118
|
-
if (size <= 0 || off + size > payload.length) return [];
|
|
4119
|
-
const nal = payload.subarray(off, off + size);
|
|
4120
|
-
off += size;
|
|
4121
|
-
if (nal.length) out.push(NAL_START_CODE_4B, nal);
|
|
4122
|
-
}
|
|
4123
|
-
return out.length ? [Buffer.concat(out)] : [];
|
|
4124
|
-
}
|
|
4125
|
-
if (nalType === 49) {
|
|
4126
|
-
if (payload.length < 3) return [];
|
|
4127
|
-
const fuHeader = payload[2];
|
|
4128
|
-
const start = (fuHeader & 128) !== 0;
|
|
4129
|
-
const end = (fuHeader & 64) !== 0;
|
|
4130
|
-
const origType = fuHeader & 63;
|
|
4131
|
-
const orig0 = h0 & 129 | (origType & 63) << 1;
|
|
4132
|
-
const orig1 = h1;
|
|
4133
|
-
const frag = payload.subarray(3);
|
|
4134
|
-
if (start) {
|
|
4135
|
-
this.fuParts = [NAL_START_CODE_4B, Buffer.from([orig0, orig1]), frag];
|
|
4136
|
-
} else {
|
|
4137
|
-
if (!this.fuParts) return [];
|
|
4138
|
-
this.fuParts.push(frag);
|
|
4139
|
-
}
|
|
4140
|
-
if (end) {
|
|
4141
|
-
if (!this.fuParts) return [];
|
|
4142
|
-
const out = Buffer.concat(this.fuParts);
|
|
4143
|
-
this.fuParts = null;
|
|
4144
|
-
return [out];
|
|
4145
|
-
}
|
|
4146
|
-
return [];
|
|
4147
|
-
}
|
|
4148
|
-
return [Buffer.concat([NAL_START_CODE_4B, payload])];
|
|
4149
|
-
}
|
|
4150
|
-
};
|
|
4151
|
-
|
|
4152
|
-
// src/webrtc/fanout.ts
|
|
4153
|
-
var AsyncBoundedQueue = class {
|
|
4154
|
-
maxItems;
|
|
4155
|
-
queue = [];
|
|
4156
|
-
waiting;
|
|
4157
|
-
closed = false;
|
|
4158
|
-
constructor(maxItems) {
|
|
4159
|
-
this.maxItems = Math.max(1, maxItems | 0);
|
|
4160
|
-
}
|
|
4161
|
-
push(item) {
|
|
4162
|
-
if (this.closed) return;
|
|
4163
|
-
if (this.waiting) {
|
|
4164
|
-
const { resolve } = this.waiting;
|
|
4165
|
-
this.waiting = void 0;
|
|
4166
|
-
resolve({ value: item, done: false });
|
|
4167
|
-
return;
|
|
4168
|
-
}
|
|
4169
|
-
this.queue.push(item);
|
|
4170
|
-
if (this.queue.length > this.maxItems) {
|
|
4171
|
-
this.queue.splice(0, this.queue.length - this.maxItems);
|
|
4172
|
-
}
|
|
4173
|
-
}
|
|
4174
|
-
close() {
|
|
4175
|
-
if (this.closed) return;
|
|
4176
|
-
this.closed = true;
|
|
4177
|
-
if (this.waiting) {
|
|
4178
|
-
const { resolve } = this.waiting;
|
|
4179
|
-
this.waiting = void 0;
|
|
4180
|
-
resolve({ value: void 0, done: true });
|
|
4181
|
-
}
|
|
4182
|
-
}
|
|
4183
|
-
async next() {
|
|
4184
|
-
const item = this.queue.shift();
|
|
4185
|
-
if (item !== void 0) return { value: item, done: false };
|
|
4186
|
-
if (this.closed) return { value: void 0, done: true };
|
|
4187
|
-
return await new Promise((resolve) => {
|
|
4188
|
-
this.waiting = { resolve };
|
|
4189
|
-
});
|
|
4190
|
-
}
|
|
4191
|
-
isClosed() {
|
|
4192
|
-
return this.closed;
|
|
4193
|
-
}
|
|
4194
|
-
size() {
|
|
4195
|
-
return this.queue.length;
|
|
4196
|
-
}
|
|
4197
|
-
};
|
|
4198
|
-
var StreamFanout = class {
|
|
4199
|
-
opts;
|
|
4200
|
-
queues = /* @__PURE__ */ new Map();
|
|
4201
|
-
source = null;
|
|
4202
|
-
running = false;
|
|
4203
|
-
pumpPromise = null;
|
|
4204
|
-
constructor(opts) {
|
|
4205
|
-
this.opts = opts;
|
|
4206
|
-
}
|
|
4207
|
-
/** Start pumping frames from the source to all subscribers. */
|
|
4208
|
-
start() {
|
|
4209
|
-
if (this.running) return;
|
|
4210
|
-
this.running = true;
|
|
4211
|
-
this.source = this.opts.createSource();
|
|
4212
|
-
this.pumpPromise = (async () => {
|
|
4213
|
-
try {
|
|
4214
|
-
for await (const frame of this.source) {
|
|
4215
|
-
try {
|
|
4216
|
-
this.opts.onFrame?.(frame);
|
|
4217
|
-
} catch {
|
|
4218
|
-
}
|
|
4219
|
-
for (const q of this.queues.values()) {
|
|
4220
|
-
q.push(frame);
|
|
4221
|
-
}
|
|
4222
|
-
}
|
|
4223
|
-
} catch (e) {
|
|
4224
|
-
this.opts.onError?.(e);
|
|
4225
|
-
} finally {
|
|
4226
|
-
this.running = false;
|
|
4227
|
-
for (const q of this.queues.values()) q.close();
|
|
4228
|
-
this.queues.clear();
|
|
4229
|
-
}
|
|
4230
|
-
})();
|
|
4231
|
-
}
|
|
4232
|
-
/**
|
|
4233
|
-
* Create a subscriber async generator.
|
|
4234
|
-
* Returns an async generator that yields frames from the shared source.
|
|
4235
|
-
* The generator terminates when the source ends or unsubscribe is called.
|
|
4236
|
-
*/
|
|
4237
|
-
subscribe(id) {
|
|
4238
|
-
const q = new AsyncBoundedQueue(this.opts.maxQueueItems);
|
|
4239
|
-
if (!this.running) {
|
|
4240
|
-
q.close();
|
|
4241
|
-
} else {
|
|
4242
|
-
this.queues.set(id, q);
|
|
4243
|
-
}
|
|
4244
|
-
const self = this;
|
|
4245
|
-
return (async function* () {
|
|
4246
|
-
try {
|
|
4247
|
-
while (true) {
|
|
4248
|
-
const r = await q.next();
|
|
4249
|
-
if (r.done) return;
|
|
4250
|
-
yield r.value;
|
|
4251
|
-
}
|
|
4252
|
-
} finally {
|
|
4253
|
-
q.close();
|
|
4254
|
-
self.queues.delete(id);
|
|
4255
|
-
}
|
|
4256
|
-
})();
|
|
4257
|
-
}
|
|
4258
|
-
/** Unsubscribe a specific subscriber. */
|
|
4259
|
-
unsubscribe(id) {
|
|
4260
|
-
const q = this.queues.get(id);
|
|
4261
|
-
if (q) {
|
|
4262
|
-
q.close();
|
|
4263
|
-
this.queues.delete(id);
|
|
4264
|
-
}
|
|
4265
|
-
}
|
|
4266
|
-
/** Stop the source and close all subscriber queues. */
|
|
4267
|
-
async stop() {
|
|
4268
|
-
if (!this.running) return;
|
|
4269
|
-
this.running = false;
|
|
4270
|
-
const src = this.source;
|
|
4271
|
-
this.source = null;
|
|
4272
|
-
for (const q of this.queues.values()) q.close();
|
|
4273
|
-
this.queues.clear();
|
|
4274
|
-
const STOP_TIMEOUT = 3e3;
|
|
4275
|
-
const timeout = new Promise((r) => setTimeout(r, STOP_TIMEOUT));
|
|
4276
|
-
try {
|
|
4277
|
-
await Promise.race([
|
|
4278
|
-
(async () => {
|
|
4279
|
-
try {
|
|
4280
|
-
await src?.return(void 0);
|
|
4281
|
-
} catch {
|
|
4282
|
-
}
|
|
4283
|
-
try {
|
|
4284
|
-
await this.pumpPromise;
|
|
4285
|
-
} catch {
|
|
4286
|
-
}
|
|
4287
|
-
})(),
|
|
4288
|
-
timeout
|
|
4289
|
-
]);
|
|
4290
|
-
} catch {
|
|
4291
|
-
}
|
|
4292
|
-
this.pumpPromise = null;
|
|
4293
|
-
}
|
|
4294
|
-
/** Returns true if the fan-out is running. */
|
|
4295
|
-
isRunning() {
|
|
4296
|
-
return this.running;
|
|
4297
|
-
}
|
|
4298
|
-
/** Returns the number of active subscribers. */
|
|
4299
|
-
subscriberCount() {
|
|
4300
|
-
return this.queues.size;
|
|
4301
|
-
}
|
|
4302
|
-
};
|
|
4303
|
-
|
|
4304
|
-
// src/webrtc/ffmpeg-process.ts
|
|
4305
|
-
var import_node_child_process5 = require("child_process");
|
|
4306
|
-
var FfmpegProcess = class {
|
|
4307
|
-
constructor(options) {
|
|
4308
|
-
this.options = options;
|
|
4309
|
-
this.logger = options.logger;
|
|
4310
|
-
this.label = options.label ?? "ffmpeg";
|
|
4311
|
-
}
|
|
4312
|
-
process = null;
|
|
4313
|
-
killed = false;
|
|
4314
|
-
logger;
|
|
4315
|
-
label;
|
|
4316
|
-
/** Spawn the FFmpeg process. Returns stdin writable stream. */
|
|
4317
|
-
start() {
|
|
4318
|
-
if (this.process) {
|
|
4319
|
-
throw new Error(`[${this.label}] FFmpeg process already started`);
|
|
4320
|
-
}
|
|
4321
|
-
const ffmpegPath = this.options.ffmpegPath ?? "ffmpeg";
|
|
4322
|
-
const stdio = [
|
|
4323
|
-
"pipe",
|
|
4324
|
-
"ignore",
|
|
4325
|
-
"pipe",
|
|
4326
|
-
...this.options.extraStdio ?? []
|
|
4327
|
-
];
|
|
4328
|
-
this.process = (0, import_node_child_process5.spawn)(ffmpegPath, this.options.args, { stdio });
|
|
4329
|
-
this.process.on("error", (error) => {
|
|
4330
|
-
this.logger?.error(`[${this.label}] Failed to spawn FFmpeg:`, error);
|
|
4331
|
-
});
|
|
4332
|
-
this.process.on("close", (code, signal) => {
|
|
4333
|
-
this.options.onExit?.(code, signal);
|
|
4334
|
-
});
|
|
4335
|
-
this.process.stderr?.on("data", (data) => {
|
|
4336
|
-
const output = data.toString();
|
|
4337
|
-
this.options.onStderr?.(output);
|
|
4338
|
-
});
|
|
4339
|
-
this.process.stdin?.on("error", (error) => {
|
|
4340
|
-
const code = error?.code;
|
|
4341
|
-
if (code === "EPIPE" || code === "ERR_STREAM_WRITE_AFTER_END") return;
|
|
4342
|
-
this.logger?.error(`[${this.label}] FFmpeg stdin error:`, error);
|
|
4343
|
-
});
|
|
4344
|
-
return this.process.stdin;
|
|
4345
|
-
}
|
|
4346
|
-
/** Get a specific stdio stream by fd index (e.g. 3 for pipe:3). */
|
|
4347
|
-
getStdio(fd) {
|
|
4348
|
-
if (!this.process) return null;
|
|
4349
|
-
return this.process.stdio?.[fd] ?? null;
|
|
4350
|
-
}
|
|
4351
|
-
/** Get the underlying ChildProcess. */
|
|
4352
|
-
getProcess() {
|
|
4353
|
-
return this.process;
|
|
4354
|
-
}
|
|
4355
|
-
/** Kill the FFmpeg process gracefully (SIGTERM then SIGKILL after timeout). */
|
|
4356
|
-
async kill(timeoutMs = 3e3) {
|
|
4357
|
-
if (this.killed || !this.process) return;
|
|
4358
|
-
this.killed = true;
|
|
4359
|
-
const proc = this.process;
|
|
4360
|
-
this.process = null;
|
|
4361
|
-
try {
|
|
4362
|
-
proc.stdin?.end();
|
|
4363
|
-
} catch {
|
|
4364
|
-
}
|
|
4365
|
-
try {
|
|
4366
|
-
proc.kill("SIGTERM");
|
|
4367
|
-
} catch {
|
|
4368
|
-
}
|
|
4369
|
-
await new Promise((resolve) => {
|
|
4370
|
-
const timer = setTimeout(() => {
|
|
4371
|
-
try {
|
|
4372
|
-
proc.kill("SIGKILL");
|
|
4373
|
-
} catch {
|
|
4374
|
-
}
|
|
4375
|
-
resolve();
|
|
4376
|
-
}, timeoutMs);
|
|
4377
|
-
proc.on("close", () => {
|
|
4378
|
-
clearTimeout(timer);
|
|
4379
|
-
resolve();
|
|
4380
|
-
});
|
|
4381
|
-
});
|
|
4382
|
-
}
|
|
4383
|
-
/** Check if the process is running. */
|
|
4384
|
-
isRunning() {
|
|
4385
|
-
return this.process !== null && !this.killed;
|
|
4386
|
-
}
|
|
4387
|
-
};
|
|
4388
|
-
|
|
4389
|
-
// src/webrtc/frame-source.ts
|
|
4390
|
-
function fromEventEmitter(emitter, videoEvent = "videoFrame", audioEvent = "audioFrame") {
|
|
4391
|
-
const queue = [];
|
|
4392
|
-
let resolve = null;
|
|
4393
|
-
let done = false;
|
|
4394
|
-
const onVideo = (frame) => {
|
|
4395
|
-
const mf = { type: "video", frame };
|
|
4396
|
-
if (resolve) {
|
|
4397
|
-
const r = resolve;
|
|
4398
|
-
resolve = null;
|
|
4399
|
-
r({ value: mf, done: false });
|
|
4400
|
-
} else {
|
|
4401
|
-
queue.push(mf);
|
|
4402
|
-
if (queue.length > 500) queue.splice(0, queue.length - 500);
|
|
4403
|
-
}
|
|
4404
|
-
};
|
|
4405
|
-
const onAudio = (frame) => {
|
|
4406
|
-
const mf = { type: "audio", frame };
|
|
4407
|
-
if (resolve) {
|
|
4408
|
-
const r = resolve;
|
|
4409
|
-
resolve = null;
|
|
4410
|
-
r({ value: mf, done: false });
|
|
4411
|
-
} else {
|
|
4412
|
-
queue.push(mf);
|
|
4413
|
-
if (queue.length > 500) queue.splice(0, queue.length - 500);
|
|
4414
|
-
}
|
|
4415
|
-
};
|
|
4416
|
-
const cleanup = () => {
|
|
4417
|
-
done = true;
|
|
4418
|
-
emitter.removeListener(videoEvent, onVideo);
|
|
4419
|
-
emitter.removeListener(audioEvent, onAudio);
|
|
4420
|
-
if (resolve) {
|
|
4421
|
-
const r = resolve;
|
|
4422
|
-
resolve = null;
|
|
4423
|
-
r({ value: void 0, done: true });
|
|
4424
|
-
}
|
|
4425
|
-
};
|
|
4426
|
-
emitter.on(videoEvent, onVideo);
|
|
4427
|
-
emitter.on(audioEvent, onAudio);
|
|
4428
|
-
emitter.once("close", cleanup);
|
|
4429
|
-
emitter.once("end", cleanup);
|
|
4430
|
-
return (async function* () {
|
|
4431
|
-
try {
|
|
4432
|
-
while (true) {
|
|
4433
|
-
const item = queue.shift();
|
|
4434
|
-
if (item) {
|
|
4435
|
-
yield item;
|
|
4436
|
-
continue;
|
|
4437
|
-
}
|
|
4438
|
-
if (done) return;
|
|
4439
|
-
const result = await new Promise((r) => {
|
|
4440
|
-
resolve = r;
|
|
4441
|
-
});
|
|
4442
|
-
if (result.done) return;
|
|
4443
|
-
yield result.value;
|
|
4444
|
-
}
|
|
4445
|
-
} finally {
|
|
4446
|
-
cleanup();
|
|
4447
|
-
}
|
|
4448
|
-
})();
|
|
4449
|
-
}
|
|
4450
|
-
function fromPushCallback() {
|
|
4451
|
-
const queue = [];
|
|
4452
|
-
let resolve = null;
|
|
4453
|
-
let closed = false;
|
|
4454
|
-
const push = (mf) => {
|
|
4455
|
-
if (closed) return;
|
|
4456
|
-
if (resolve) {
|
|
4457
|
-
const r = resolve;
|
|
4458
|
-
resolve = null;
|
|
4459
|
-
r({ value: mf, done: false });
|
|
4460
|
-
} else {
|
|
4461
|
-
queue.push(mf);
|
|
4462
|
-
if (queue.length > 500) queue.splice(0, queue.length - 500);
|
|
4463
|
-
}
|
|
4464
|
-
};
|
|
4465
|
-
const source = (async function* () {
|
|
4466
|
-
try {
|
|
4467
|
-
while (true) {
|
|
4468
|
-
const item = queue.shift();
|
|
4469
|
-
if (item) {
|
|
4470
|
-
yield item;
|
|
4471
|
-
continue;
|
|
4472
|
-
}
|
|
4473
|
-
if (closed) return;
|
|
4474
|
-
const result = await new Promise((r) => {
|
|
4475
|
-
resolve = r;
|
|
4476
|
-
});
|
|
4477
|
-
if (result.done) return;
|
|
4478
|
-
yield result.value;
|
|
4479
|
-
}
|
|
4480
|
-
} finally {
|
|
4481
|
-
closed = true;
|
|
4482
|
-
}
|
|
4483
|
-
})();
|
|
4484
|
-
return {
|
|
4485
|
-
source,
|
|
4486
|
-
pushVideo: (frame) => push({ type: "video", frame }),
|
|
4487
|
-
pushAudio: (frame) => push({ type: "audio", frame }),
|
|
4488
|
-
close: () => {
|
|
4489
|
-
closed = true;
|
|
4490
|
-
if (resolve) {
|
|
4491
|
-
const r = resolve;
|
|
4492
|
-
resolve = null;
|
|
4493
|
-
r({ value: void 0, done: true });
|
|
4494
|
-
}
|
|
4495
|
-
}
|
|
4496
|
-
};
|
|
4497
|
-
}
|
|
4498
|
-
function fromNativeStream(native) {
|
|
4499
|
-
return (async function* () {
|
|
4500
|
-
for await (const frame of native) {
|
|
4501
|
-
if (frame.audio) {
|
|
4502
|
-
yield {
|
|
4503
|
-
type: "audio",
|
|
4504
|
-
frame: {
|
|
4505
|
-
data: frame.data,
|
|
4506
|
-
codec: frame.codec === "aac" ? "Aac" : "Adpcm",
|
|
4507
|
-
sampleRate: frame.sampleRate ?? 8e3,
|
|
4508
|
-
channels: 1,
|
|
4509
|
-
timestampMicros: frame.microseconds ?? Date.now() * 1e3
|
|
4510
|
-
}
|
|
4511
|
-
};
|
|
4512
|
-
} else {
|
|
4513
|
-
yield {
|
|
4514
|
-
type: "video",
|
|
4515
|
-
frame: {
|
|
4516
|
-
data: frame.data,
|
|
4517
|
-
codec: frame.videoType ?? "H264",
|
|
4518
|
-
isKeyframe: frame.isKeyframe ?? false,
|
|
4519
|
-
timestampMicros: frame.microseconds ?? Date.now() * 1e3
|
|
4520
|
-
}
|
|
4521
|
-
};
|
|
4522
|
-
}
|
|
4523
|
-
}
|
|
4524
|
-
})();
|
|
4525
|
-
}
|
|
4526
|
-
|
|
4527
|
-
// src/webrtc/ffmpeg-source.ts
|
|
4528
|
-
var import_node_child_process6 = require("child_process");
|
|
4529
|
-
var AnnexBAccessUnitAssembler = class {
|
|
4530
|
-
buffer = Buffer.alloc(0);
|
|
4531
|
-
/** Feed data from ffmpeg stdout. Returns complete access units (one per frame). */
|
|
4532
|
-
feed(data) {
|
|
4533
|
-
this.buffer = this.buffer.length > 0 ? Buffer.concat([this.buffer, data]) : data;
|
|
4534
|
-
const aus = [];
|
|
4535
|
-
const audPositions = [];
|
|
4536
|
-
for (let i = 0; i < this.buffer.length - 5; i++) {
|
|
4537
|
-
if (this.buffer[i] === 0 && this.buffer[i + 1] === 0) {
|
|
4538
|
-
let scLen = 0;
|
|
4539
|
-
if (this.buffer[i + 2] === 0 && this.buffer[i + 3] === 1) scLen = 4;
|
|
4540
|
-
else if (this.buffer[i + 2] === 1) scLen = 3;
|
|
4541
|
-
if (scLen > 0) {
|
|
4542
|
-
const nalType = this.buffer[i + scLen] & 31;
|
|
4543
|
-
if (nalType === 9) {
|
|
4544
|
-
audPositions.push(i);
|
|
4545
|
-
}
|
|
4546
|
-
}
|
|
4547
|
-
}
|
|
4548
|
-
}
|
|
4549
|
-
if (audPositions.length < 2) return aus;
|
|
4550
|
-
for (let j = 0; j < audPositions.length - 1; j++) {
|
|
4551
|
-
const au = this.buffer.subarray(audPositions[j], audPositions[j + 1]);
|
|
4552
|
-
if (au.length > 4) aus.push(au);
|
|
4553
|
-
}
|
|
4554
|
-
this.buffer = this.buffer.subarray(audPositions[audPositions.length - 1]);
|
|
4555
|
-
return aus;
|
|
4556
|
-
}
|
|
4557
|
-
/** Flush any remaining buffered data as a final access unit. */
|
|
4558
|
-
flush() {
|
|
4559
|
-
if (this.buffer.length <= 4) return null;
|
|
4560
|
-
const au = this.buffer;
|
|
4561
|
-
this.buffer = Buffer.alloc(0);
|
|
4562
|
-
return au;
|
|
4563
|
-
}
|
|
4564
|
-
};
|
|
4565
|
-
var AdaptiveFfmpegSource = class {
|
|
4566
|
-
rtspUrl;
|
|
4567
|
-
ffmpegPath;
|
|
4568
|
-
logger;
|
|
4569
|
-
label;
|
|
4570
|
-
audioMode;
|
|
4571
|
-
currentParams;
|
|
4572
|
-
proc = null;
|
|
4573
|
-
audioProc = null;
|
|
4574
|
-
closed = false;
|
|
4575
|
-
/** Push callback for the frame source. */
|
|
4576
|
-
pushFrame = null;
|
|
4577
|
-
closeSource = null;
|
|
4578
|
-
/** The FrameSource async generator. Created once, survives ffmpeg restarts. */
|
|
4579
|
-
source;
|
|
4580
|
-
constructor(options) {
|
|
4581
|
-
this.rtspUrl = options.rtspUrl;
|
|
4582
|
-
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
4583
|
-
this.audioMode = options.audioMode ?? "copy";
|
|
4584
|
-
this.logger = options.logger;
|
|
4585
|
-
this.label = options.label ?? "adaptive-ffmpeg";
|
|
4586
|
-
this.currentParams = { ...options.initialParams };
|
|
4587
|
-
const queue = [];
|
|
4588
|
-
let resolve = null;
|
|
4589
|
-
let done = false;
|
|
4590
|
-
this.pushFrame = (mf) => {
|
|
4591
|
-
if (done) return;
|
|
4592
|
-
if (resolve) {
|
|
4593
|
-
const r = resolve;
|
|
4594
|
-
resolve = null;
|
|
4595
|
-
r({ value: mf, done: false });
|
|
4596
|
-
} else {
|
|
4597
|
-
queue.push(mf);
|
|
4598
|
-
if (queue.length > 120) queue.splice(0, queue.length - 60);
|
|
4599
|
-
}
|
|
4600
|
-
};
|
|
4601
|
-
this.closeSource = () => {
|
|
4602
|
-
done = true;
|
|
4603
|
-
if (resolve) {
|
|
4604
|
-
const r = resolve;
|
|
4605
|
-
resolve = null;
|
|
4606
|
-
r({ value: void 0, done: true });
|
|
4607
|
-
}
|
|
4608
|
-
};
|
|
4609
|
-
this.source = (async function* () {
|
|
4610
|
-
try {
|
|
4611
|
-
while (true) {
|
|
4612
|
-
const item = queue.shift();
|
|
4613
|
-
if (item) {
|
|
4614
|
-
yield item;
|
|
4615
|
-
continue;
|
|
4616
|
-
}
|
|
4617
|
-
if (done) return;
|
|
4618
|
-
const result = await new Promise((r) => {
|
|
4619
|
-
resolve = r;
|
|
4620
|
-
});
|
|
4621
|
-
if (result.done) return;
|
|
4622
|
-
yield result.value;
|
|
4623
|
-
}
|
|
4624
|
-
} finally {
|
|
4625
|
-
done = true;
|
|
4626
|
-
}
|
|
4627
|
-
})();
|
|
4628
|
-
}
|
|
4629
|
-
/** Start the ffmpeg process with current encoding params. */
|
|
4630
|
-
async start() {
|
|
4631
|
-
if (this.closed) return;
|
|
4632
|
-
this.spawnFfmpeg();
|
|
4633
|
-
}
|
|
4634
|
-
/** Get the current encoding parameters. */
|
|
4635
|
-
getParams() {
|
|
4636
|
-
return { ...this.currentParams };
|
|
4637
|
-
}
|
|
4638
|
-
/**
|
|
4639
|
-
* Hot-swap encoding parameters.
|
|
4640
|
-
* Stops the current ffmpeg and starts a new one with updated params.
|
|
4641
|
-
* The FrameSource continues seamlessly — the new ffmpeg's first keyframe
|
|
4642
|
-
* is gated internally so consumers see a clean transition.
|
|
4643
|
-
*/
|
|
4644
|
-
async updateParams(params) {
|
|
4645
|
-
const prev = { ...this.currentParams };
|
|
4646
|
-
if (params.maxBitrateKbps !== void 0) this.currentParams.maxBitrateKbps = params.maxBitrateKbps;
|
|
4647
|
-
if (params.width !== void 0) this.currentParams.width = params.width;
|
|
4648
|
-
if (params.height !== void 0) this.currentParams.height = params.height;
|
|
4649
|
-
if (params.preset !== void 0) this.currentParams.preset = params.preset;
|
|
4650
|
-
if (prev.maxBitrateKbps === this.currentParams.maxBitrateKbps && prev.width === this.currentParams.width && prev.height === this.currentParams.height) return;
|
|
4651
|
-
this.logger?.info(
|
|
4652
|
-
`[${this.label}] Updating params: ${prev.maxBitrateKbps}kbps ${prev.width}x${prev.height} \u2192 ${this.currentParams.maxBitrateKbps}kbps ${this.currentParams.width}x${this.currentParams.height}`
|
|
4653
|
-
);
|
|
4654
|
-
await this.killFfmpeg();
|
|
4655
|
-
if (!this.closed) {
|
|
4656
|
-
this.spawnFfmpeg();
|
|
4657
|
-
}
|
|
4658
|
-
}
|
|
4659
|
-
/** Stop the source and kill ffmpeg. */
|
|
4660
|
-
async stop() {
|
|
4661
|
-
if (this.closed) return;
|
|
4662
|
-
this.closed = true;
|
|
4663
|
-
await this.killFfmpeg();
|
|
4664
|
-
this.closeSource?.();
|
|
4665
|
-
}
|
|
4666
|
-
// -----------------------------------------------------------------------
|
|
4667
|
-
// Private
|
|
4668
|
-
// -----------------------------------------------------------------------
|
|
4669
|
-
spawnFfmpeg() {
|
|
4670
|
-
const { maxBitrateKbps, width, height, preset } = this.currentParams;
|
|
4671
|
-
const args = [
|
|
4672
|
-
"-hide_banner",
|
|
4673
|
-
"-loglevel",
|
|
4674
|
-
"error",
|
|
4675
|
-
"-fflags",
|
|
4676
|
-
"+nobuffer",
|
|
4677
|
-
"-flags",
|
|
4678
|
-
"+low_delay",
|
|
4679
|
-
"-rtsp_transport",
|
|
4680
|
-
"tcp",
|
|
4681
|
-
"-i",
|
|
4682
|
-
this.rtspUrl,
|
|
4683
|
-
"-c:v",
|
|
4684
|
-
"libx264",
|
|
4685
|
-
"-preset",
|
|
4686
|
-
preset ?? "ultrafast",
|
|
4687
|
-
"-tune",
|
|
4688
|
-
"zerolatency",
|
|
4689
|
-
"-crf",
|
|
4690
|
-
"28",
|
|
4691
|
-
"-maxrate",
|
|
4692
|
-
`${maxBitrateKbps}k`,
|
|
4693
|
-
"-bufsize",
|
|
4694
|
-
`${Math.round(maxBitrateKbps * 0.5)}k`,
|
|
4695
|
-
"-g",
|
|
4696
|
-
"50",
|
|
4697
|
-
"-keyint_min",
|
|
4698
|
-
"25",
|
|
4699
|
-
"-x264opts",
|
|
4700
|
-
"aud=1:sliced-threads=1",
|
|
4701
|
-
"-flush_packets",
|
|
4702
|
-
"1"
|
|
4703
|
-
];
|
|
4704
|
-
if (width > 0 && height > 0) {
|
|
4705
|
-
args.push("-vf", `scale=${width}:${height}`);
|
|
4706
|
-
}
|
|
4707
|
-
args.push(
|
|
4708
|
-
"-an",
|
|
4709
|
-
"-f",
|
|
4710
|
-
"h264",
|
|
4711
|
-
"-"
|
|
4712
|
-
);
|
|
4713
|
-
this.proc = (0, import_node_child_process6.spawn)(this.ffmpegPath, args, {
|
|
4714
|
-
stdio: ["ignore", "pipe", "pipe"]
|
|
4715
|
-
});
|
|
4716
|
-
this.proc.on("error", (err) => {
|
|
4717
|
-
this.logger?.error(`[${this.label}] ffmpeg spawn error: ${err.message}`);
|
|
4718
|
-
});
|
|
4719
|
-
this.proc.on("close", (code, signal) => {
|
|
4720
|
-
this.logger?.debug(`[${this.label}] ffmpeg exited code=${code} signal=${signal}`);
|
|
4721
|
-
this.proc = null;
|
|
4722
|
-
if (!this.closed) {
|
|
4723
|
-
setTimeout(() => {
|
|
4724
|
-
if (!this.closed) this.spawnFfmpeg();
|
|
4725
|
-
}, 2e3);
|
|
4726
|
-
}
|
|
4727
|
-
});
|
|
4728
|
-
this.proc.stderr?.on("data", (data) => {
|
|
4729
|
-
const s = data.toString();
|
|
4730
|
-
if (s.includes("error") || s.includes("Error") || s.includes("fatal")) {
|
|
4731
|
-
this.logger?.error(`[${this.label}] ffmpeg: ${s.trim()}`);
|
|
4732
|
-
}
|
|
4733
|
-
});
|
|
4734
|
-
if (!this.proc.stdout) {
|
|
4735
|
-
this.logger?.error(`[${this.label}] ffmpeg stdout not available`);
|
|
4736
|
-
return;
|
|
4737
|
-
}
|
|
4738
|
-
const assembler = new AnnexBAccessUnitAssembler();
|
|
4739
|
-
const startTime = Date.now();
|
|
4740
|
-
this.proc.stdout.on("data", (data) => {
|
|
4741
|
-
if (this.closed) return;
|
|
4742
|
-
const aus = assembler.feed(data);
|
|
4743
|
-
for (const au of aus) {
|
|
4744
|
-
if (au.length < 4) continue;
|
|
4745
|
-
const isKeyframe = isH264IdrAccessUnit(au);
|
|
4746
|
-
const timestampMicros = (Date.now() - startTime) * 1e3;
|
|
4747
|
-
const vf = {
|
|
4748
|
-
data: au,
|
|
4749
|
-
codec: "H264",
|
|
4750
|
-
isKeyframe,
|
|
4751
|
-
timestampMicros
|
|
4752
|
-
};
|
|
4753
|
-
this.pushFrame?.({ type: "video", frame: vf });
|
|
4754
|
-
}
|
|
4755
|
-
});
|
|
4756
|
-
this.proc.stdout.on("end", () => {
|
|
4757
|
-
const remaining = assembler.flush();
|
|
4758
|
-
if (remaining && remaining.length > 4 && this.pushFrame) {
|
|
4759
|
-
const vf = {
|
|
4760
|
-
data: remaining,
|
|
4761
|
-
codec: "H264",
|
|
4762
|
-
isKeyframe: isH264IdrAccessUnit(remaining),
|
|
4763
|
-
timestampMicros: (Date.now() - startTime) * 1e3
|
|
4764
|
-
};
|
|
4765
|
-
this.pushFrame({ type: "video", frame: vf });
|
|
4766
|
-
}
|
|
4767
|
-
});
|
|
4768
|
-
if (this.audioMode !== "off") {
|
|
4769
|
-
const audioArgs = [
|
|
4770
|
-
"-hide_banner",
|
|
4771
|
-
"-loglevel",
|
|
4772
|
-
"error",
|
|
4773
|
-
"-fflags",
|
|
4774
|
-
"+nobuffer+flush_packets",
|
|
4775
|
-
"-rtsp_transport",
|
|
4776
|
-
"tcp",
|
|
4777
|
-
"-analyzeduration",
|
|
4778
|
-
"500000",
|
|
4779
|
-
"-probesize",
|
|
4780
|
-
"500000",
|
|
4781
|
-
"-i",
|
|
4782
|
-
this.rtspUrl,
|
|
4783
|
-
"-vn"
|
|
4784
|
-
];
|
|
4785
|
-
let audioCodecLabel;
|
|
4786
|
-
let frameSize;
|
|
4787
|
-
let sampleRate;
|
|
4788
|
-
let codecName;
|
|
4789
|
-
if (this.audioMode === "opus") {
|
|
4790
|
-
audioArgs.push("-c:a", "libopus", "-ar", "48000", "-ac", "2", "-b:a", "64k", "-f", "ogg", "-");
|
|
4791
|
-
audioCodecLabel = "opus";
|
|
4792
|
-
frameSize = 960;
|
|
4793
|
-
sampleRate = 48e3;
|
|
4794
|
-
codecName = "Opus";
|
|
4795
|
-
audioArgs.length = 0;
|
|
4796
|
-
audioArgs.push(
|
|
4797
|
-
"-hide_banner",
|
|
4798
|
-
"-loglevel",
|
|
4799
|
-
"error",
|
|
4800
|
-
"-rtsp_transport",
|
|
4801
|
-
"tcp",
|
|
4802
|
-
"-i",
|
|
4803
|
-
this.rtspUrl,
|
|
4804
|
-
"-vn",
|
|
4805
|
-
"-c:a",
|
|
4806
|
-
"pcm_mulaw",
|
|
4807
|
-
"-ar",
|
|
4808
|
-
"8000",
|
|
4809
|
-
"-ac",
|
|
4810
|
-
"1",
|
|
4811
|
-
"-f",
|
|
4812
|
-
"mulaw",
|
|
4813
|
-
"-"
|
|
4814
|
-
);
|
|
4815
|
-
audioCodecLabel = "pcmu(opus-fallback)";
|
|
4816
|
-
frameSize = 160;
|
|
4817
|
-
sampleRate = 8e3;
|
|
4818
|
-
codecName = "Pcmu";
|
|
4819
|
-
} else {
|
|
4820
|
-
audioArgs.push("-c:a", "pcm_mulaw", "-ar", "8000", "-ac", "1", "-f", "mulaw", "-");
|
|
4821
|
-
audioCodecLabel = "pcmu";
|
|
4822
|
-
frameSize = 160;
|
|
4823
|
-
sampleRate = 8e3;
|
|
4824
|
-
codecName = "Pcmu";
|
|
4825
|
-
}
|
|
4826
|
-
this.audioProc = (0, import_node_child_process6.spawn)(this.ffmpegPath, audioArgs, {
|
|
4827
|
-
stdio: ["ignore", "pipe", "pipe"]
|
|
4828
|
-
});
|
|
4829
|
-
this.audioProc.on("error", () => {
|
|
4830
|
-
});
|
|
4831
|
-
this.audioProc.on("close", () => {
|
|
4832
|
-
this.audioProc = null;
|
|
4833
|
-
});
|
|
4834
|
-
if (this.audioProc.stdout) {
|
|
4835
|
-
let audioBuf = Buffer.alloc(0);
|
|
4836
|
-
this.audioProc.stdout.on("data", (data) => {
|
|
4837
|
-
if (this.closed || !this.pushFrame) return;
|
|
4838
|
-
audioBuf = audioBuf.length > 0 ? Buffer.concat([audioBuf, data]) : data;
|
|
4839
|
-
while (audioBuf.length >= frameSize) {
|
|
4840
|
-
const audioFrame = audioBuf.subarray(0, frameSize);
|
|
4841
|
-
audioBuf = audioBuf.subarray(frameSize);
|
|
4842
|
-
this.pushFrame({
|
|
4843
|
-
type: "audio",
|
|
4844
|
-
frame: {
|
|
4845
|
-
data: Buffer.from(audioFrame),
|
|
4846
|
-
codec: codecName,
|
|
4847
|
-
sampleRate,
|
|
4848
|
-
channels: 1,
|
|
4849
|
-
timestampMicros: (Date.now() - startTime) * 1e3
|
|
4850
|
-
}
|
|
4851
|
-
});
|
|
4852
|
-
}
|
|
4853
|
-
});
|
|
4854
|
-
}
|
|
4855
|
-
this.logger?.info(
|
|
4856
|
-
`[${this.label}] Started: ${maxBitrateKbps}kbps ` + (width > 0 ? `${width}x${height}` : "native") + ` +audio(${audioCodecLabel})`
|
|
4857
|
-
);
|
|
4858
|
-
} else {
|
|
4859
|
-
this.logger?.info(
|
|
4860
|
-
`[${this.label}] Started: ${maxBitrateKbps}kbps ` + (width > 0 ? `${width}x${height}` : "native") + " (no audio)"
|
|
4861
|
-
);
|
|
4862
|
-
}
|
|
4863
|
-
}
|
|
4864
|
-
async killFfmpeg() {
|
|
4865
|
-
const proc = this.proc;
|
|
4866
|
-
if (proc) {
|
|
4867
|
-
this.proc = null;
|
|
4868
|
-
try {
|
|
4869
|
-
proc.kill("SIGTERM");
|
|
4870
|
-
} catch {
|
|
4871
|
-
}
|
|
4872
|
-
await new Promise((resolve) => {
|
|
4873
|
-
const timer = setTimeout(() => {
|
|
4874
|
-
try {
|
|
4875
|
-
proc.kill("SIGKILL");
|
|
4876
|
-
} catch {
|
|
4877
|
-
}
|
|
4878
|
-
resolve();
|
|
4879
|
-
}, 3e3);
|
|
4880
|
-
proc.on("close", () => {
|
|
4881
|
-
clearTimeout(timer);
|
|
4882
|
-
resolve();
|
|
4883
|
-
});
|
|
4884
|
-
});
|
|
4885
|
-
}
|
|
4886
|
-
const audioProc = this.audioProc;
|
|
4887
|
-
if (audioProc) {
|
|
4888
|
-
this.audioProc = null;
|
|
4889
|
-
try {
|
|
4890
|
-
audioProc.kill("SIGTERM");
|
|
4891
|
-
} catch {
|
|
4892
|
-
}
|
|
4893
|
-
await new Promise((resolve) => {
|
|
4894
|
-
const timer = setTimeout(() => {
|
|
4895
|
-
try {
|
|
4896
|
-
audioProc.kill("SIGKILL");
|
|
4897
|
-
} catch {
|
|
4898
|
-
}
|
|
4899
|
-
resolve();
|
|
4900
|
-
}, 1e3);
|
|
4901
|
-
audioProc.on("close", () => {
|
|
4902
|
-
clearTimeout(timer);
|
|
4903
|
-
resolve();
|
|
4904
|
-
});
|
|
4905
|
-
});
|
|
4906
|
-
}
|
|
4907
|
-
}
|
|
4908
|
-
};
|
|
4909
|
-
|
|
4910
|
-
// src/webrtc/adaptive-controller.ts
|
|
4911
|
-
var EWMA = class {
|
|
4912
|
-
value = null;
|
|
4913
|
-
alpha;
|
|
4914
|
-
constructor(alpha = 0.3) {
|
|
4915
|
-
this.alpha = alpha;
|
|
4916
|
-
}
|
|
4917
|
-
update(sample) {
|
|
4918
|
-
if (this.value === null) {
|
|
4919
|
-
this.value = sample;
|
|
4920
|
-
} else {
|
|
4921
|
-
this.value = this.alpha * sample + (1 - this.alpha) * this.value;
|
|
4922
|
-
}
|
|
4923
|
-
return this.value;
|
|
4924
|
-
}
|
|
4925
|
-
get() {
|
|
4926
|
-
return this.value ?? 0;
|
|
4927
|
-
}
|
|
4928
|
-
reset() {
|
|
4929
|
-
this.value = null;
|
|
4930
|
-
}
|
|
4931
|
-
};
|
|
4932
|
-
var AdaptiveController = class {
|
|
4933
|
-
profiles;
|
|
4934
|
-
degradeThreshold;
|
|
4935
|
-
recoverThreshold;
|
|
4936
|
-
degradeCount;
|
|
4937
|
-
recoverCount;
|
|
4938
|
-
onQualityChange;
|
|
4939
|
-
logger;
|
|
4940
|
-
currentIndex;
|
|
4941
|
-
consecutiveBad = 0;
|
|
4942
|
-
consecutiveGood = 0;
|
|
4943
|
-
switching = false;
|
|
4944
|
-
/** Smoothed stats per session (aggregated for decisions). */
|
|
4945
|
-
sessionStats = /* @__PURE__ */ new Map();
|
|
4946
|
-
/** Manual override tier (null = auto). */
|
|
4947
|
-
forcedTier = null;
|
|
4948
|
-
constructor(options) {
|
|
4949
|
-
if (options.profiles.length === 0) {
|
|
4950
|
-
throw new Error("At least one quality profile is required");
|
|
4951
|
-
}
|
|
4952
|
-
this.profiles = options.profiles;
|
|
4953
|
-
this.degradeThreshold = options.degradeThreshold ?? 0.02;
|
|
4954
|
-
this.recoverThreshold = options.recoverThreshold ?? 5e-3;
|
|
4955
|
-
this.degradeCount = options.degradeCount ?? 2;
|
|
4956
|
-
this.recoverCount = options.recoverCount ?? 3;
|
|
4957
|
-
this.onQualityChange = options.onQualityChange;
|
|
4958
|
-
this.logger = options.logger;
|
|
4959
|
-
this.currentIndex = 0;
|
|
4960
|
-
}
|
|
4961
|
-
/** Get the current quality profile. */
|
|
4962
|
-
get currentProfile() {
|
|
4963
|
-
return this.profiles[this.currentIndex];
|
|
4964
|
-
}
|
|
4965
|
-
/** Get the current quality tier. */
|
|
4966
|
-
get currentTier() {
|
|
4967
|
-
return this.currentProfile.tier;
|
|
4968
|
-
}
|
|
4969
|
-
/** Get aggregated stats summary. */
|
|
4970
|
-
getAggregatedStats() {
|
|
4971
|
-
if (this.sessionStats.size === 0) {
|
|
4972
|
-
return { packetLoss: 0, jitterMs: 0, rttMs: 0 };
|
|
4973
|
-
}
|
|
4974
|
-
let totalLoss = 0;
|
|
4975
|
-
let totalJitter = 0;
|
|
4976
|
-
let totalRtt = 0;
|
|
4977
|
-
let count = 0;
|
|
4978
|
-
for (const stats of this.sessionStats.values()) {
|
|
4979
|
-
if (Date.now() - stats.lastUpdate > 3e4) continue;
|
|
4980
|
-
totalLoss += stats.loss.get();
|
|
4981
|
-
totalJitter += stats.jitter.get();
|
|
4982
|
-
totalRtt += stats.rtt.get();
|
|
4983
|
-
count++;
|
|
4984
|
-
}
|
|
4985
|
-
if (count === 0) return { packetLoss: 0, jitterMs: 0, rttMs: 0 };
|
|
4986
|
-
return {
|
|
4987
|
-
packetLoss: totalLoss / count,
|
|
4988
|
-
jitterMs: totalJitter / count,
|
|
4989
|
-
rttMs: totalRtt / count
|
|
4990
|
-
};
|
|
4991
|
-
}
|
|
4992
|
-
/**
|
|
4993
|
-
* Report stats from a session (RTCP or client-reported).
|
|
4994
|
-
* Call this periodically (e.g. every 3–5 seconds).
|
|
4995
|
-
*/
|
|
4996
|
-
reportStats(sessionId, stats) {
|
|
4997
|
-
let entry = this.sessionStats.get(sessionId);
|
|
4998
|
-
if (!entry) {
|
|
4999
|
-
entry = {
|
|
5000
|
-
loss: new EWMA(0.3),
|
|
5001
|
-
jitter: new EWMA(0.3),
|
|
5002
|
-
rtt: new EWMA(0.3),
|
|
5003
|
-
lastUpdate: 0
|
|
5004
|
-
};
|
|
5005
|
-
this.sessionStats.set(sessionId, entry);
|
|
5006
|
-
}
|
|
5007
|
-
entry.loss.update(stats.packetLoss);
|
|
5008
|
-
entry.jitter.update(stats.jitterMs);
|
|
5009
|
-
entry.rtt.update(stats.rttMs);
|
|
5010
|
-
entry.lastUpdate = stats.timestamp;
|
|
5011
|
-
this.evaluate();
|
|
5012
|
-
}
|
|
5013
|
-
/** Remove a session's stats (call on session close). */
|
|
5014
|
-
removeSession(sessionId) {
|
|
5015
|
-
this.sessionStats.delete(sessionId);
|
|
5016
|
-
}
|
|
5017
|
-
/** Force a specific quality tier (null = auto). */
|
|
5018
|
-
forceQuality(tier) {
|
|
5019
|
-
this.forcedTier = tier;
|
|
5020
|
-
if (tier === null) {
|
|
5021
|
-
this.consecutiveBad = 0;
|
|
5022
|
-
this.consecutiveGood = 0;
|
|
5023
|
-
return;
|
|
5024
|
-
}
|
|
5025
|
-
const targetIdx = this.profiles.findIndex((p) => p.tier === tier);
|
|
5026
|
-
if (targetIdx >= 0 && targetIdx !== this.currentIndex) {
|
|
5027
|
-
void this.switchTo(targetIdx);
|
|
5028
|
-
}
|
|
5029
|
-
}
|
|
5030
|
-
/** Check if auto-adaptation is active (not forced). */
|
|
5031
|
-
get isAuto() {
|
|
5032
|
-
return this.forcedTier === null;
|
|
5033
|
-
}
|
|
5034
|
-
// -----------------------------------------------------------------------
|
|
5035
|
-
// Private
|
|
5036
|
-
// -----------------------------------------------------------------------
|
|
5037
|
-
evaluate() {
|
|
5038
|
-
if (this.forcedTier !== null || this.switching) return;
|
|
5039
|
-
const { packetLoss } = this.getAggregatedStats();
|
|
5040
|
-
if (packetLoss > this.degradeThreshold) {
|
|
5041
|
-
this.consecutiveGood = 0;
|
|
5042
|
-
this.consecutiveBad++;
|
|
5043
|
-
if (this.consecutiveBad >= this.degradeCount) {
|
|
5044
|
-
this.consecutiveBad = 0;
|
|
5045
|
-
this.degrade();
|
|
5046
|
-
}
|
|
5047
|
-
} else if (packetLoss < this.recoverThreshold) {
|
|
5048
|
-
this.consecutiveBad = 0;
|
|
5049
|
-
this.consecutiveGood++;
|
|
5050
|
-
if (this.consecutiveGood >= this.recoverCount) {
|
|
5051
|
-
this.consecutiveGood = 0;
|
|
5052
|
-
this.recover();
|
|
5053
|
-
}
|
|
5054
|
-
} else {
|
|
5055
|
-
this.consecutiveBad = 0;
|
|
5056
|
-
this.consecutiveGood = 0;
|
|
5057
|
-
}
|
|
5058
|
-
}
|
|
5059
|
-
degrade() {
|
|
5060
|
-
if (this.currentIndex >= this.profiles.length - 1) {
|
|
5061
|
-
this.logger?.debug("[adaptive] Already at lowest quality, cannot degrade further");
|
|
5062
|
-
return;
|
|
5063
|
-
}
|
|
5064
|
-
void this.switchTo(this.currentIndex + 1);
|
|
5065
|
-
}
|
|
5066
|
-
recover() {
|
|
5067
|
-
if (this.currentIndex <= 0) {
|
|
5068
|
-
this.logger?.debug("[adaptive] Already at highest quality, cannot recover further");
|
|
5069
|
-
return;
|
|
5070
|
-
}
|
|
5071
|
-
void this.switchTo(this.currentIndex - 1);
|
|
5072
|
-
}
|
|
5073
|
-
async switchTo(newIndex) {
|
|
5074
|
-
if (this.switching) return;
|
|
5075
|
-
if (newIndex === this.currentIndex) return;
|
|
5076
|
-
if (newIndex < 0 || newIndex >= this.profiles.length) return;
|
|
5077
|
-
this.switching = true;
|
|
5078
|
-
const from = this.profiles[this.currentIndex];
|
|
5079
|
-
const to = this.profiles[newIndex];
|
|
5080
|
-
this.logger?.info(
|
|
5081
|
-
`[adaptive] Quality change: ${from.tier} \u2192 ${to.tier} (${from.encoding.maxBitrateKbps}kbps \u2192 ${to.encoding.maxBitrateKbps}kbps)`
|
|
5082
|
-
);
|
|
5083
|
-
try {
|
|
5084
|
-
await this.onQualityChange(from, to);
|
|
5085
|
-
this.currentIndex = newIndex;
|
|
5086
|
-
} catch (err) {
|
|
5087
|
-
this.logger?.error("[adaptive] Quality change failed:", err);
|
|
5088
|
-
} finally {
|
|
5089
|
-
this.switching = false;
|
|
5090
|
-
}
|
|
5091
|
-
}
|
|
5092
|
-
};
|
|
5093
|
-
|
|
5094
|
-
// src/webrtc/session.ts
|
|
5095
|
-
var _werift;
|
|
5096
|
-
async function loadWerift() {
|
|
5097
|
-
if (_werift) return _werift;
|
|
5098
|
-
try {
|
|
5099
|
-
const moduleName = "werift";
|
|
5100
|
-
_werift = await Function("m", "return import(m)")(moduleName);
|
|
5101
|
-
return _werift;
|
|
5102
|
-
} catch {
|
|
5103
|
-
throw new Error(
|
|
5104
|
-
"The 'werift' package is required for WebRTC support but is not installed. Install it with: npm install werift"
|
|
5105
|
-
);
|
|
5106
|
-
}
|
|
5107
|
-
}
|
|
5108
|
-
var AdaptiveSession = class _AdaptiveSession {
|
|
5109
|
-
sessionId;
|
|
5110
|
-
source;
|
|
5111
|
-
logger;
|
|
5112
|
-
intercom;
|
|
5113
|
-
iceConfig;
|
|
5114
|
-
onStats;
|
|
5115
|
-
debug;
|
|
5116
|
-
createdAt;
|
|
5117
|
-
state = "new";
|
|
5118
|
-
pc = null;
|
|
5119
|
-
videoTrack = null;
|
|
5120
|
-
audioTrack = null;
|
|
5121
|
-
/** Transceiver senders for direct sendRtp (more reliable than track.writeRtp) */
|
|
5122
|
-
videoSender = null;
|
|
5123
|
-
audioSender = null;
|
|
5124
|
-
feedAbort = null;
|
|
5125
|
-
closed = false;
|
|
5126
|
-
statsTimer = null;
|
|
5127
|
-
/** RTP sequence number counter (must increment per packet). */
|
|
5128
|
-
videoSeqNum = 0;
|
|
5129
|
-
audioSeqNum = 0;
|
|
5130
|
-
/** Previous RTCP stats for delta calculation. */
|
|
5131
|
-
prevPacketsReceived = 0;
|
|
5132
|
-
prevPacketsLost = 0;
|
|
5133
|
-
constructor(options) {
|
|
5134
|
-
this.sessionId = options.sessionId;
|
|
5135
|
-
this.source = options.source;
|
|
5136
|
-
this.logger = options.logger;
|
|
5137
|
-
this.intercom = options.intercom;
|
|
5138
|
-
this.iceConfig = options.iceConfig;
|
|
5139
|
-
this.onStats = options.onStats;
|
|
5140
|
-
this.debug = options.debug ?? false;
|
|
5141
|
-
this.createdAt = Date.now();
|
|
5142
|
-
}
|
|
5143
|
-
/** Build PeerConnection options including H.264 codec config. */
|
|
5144
|
-
async buildPcOptions() {
|
|
5145
|
-
const werift = await loadWerift();
|
|
5146
|
-
const iceServers = [];
|
|
5147
|
-
if (this.iceConfig?.stunServers) {
|
|
5148
|
-
for (const url of this.iceConfig.stunServers) iceServers.push({ urls: url });
|
|
5149
|
-
}
|
|
5150
|
-
if (this.iceConfig?.turnServers) {
|
|
5151
|
-
for (const turn of this.iceConfig.turnServers) {
|
|
5152
|
-
iceServers.push({ urls: turn.urls, username: turn.username, credential: turn.credential });
|
|
5153
|
-
}
|
|
5154
|
-
}
|
|
5155
|
-
const pcOptions = {
|
|
5156
|
-
// H.264 + Opus codecs with RTCP feedback (matching Scrypted's proven config)
|
|
5157
|
-
codecs: {
|
|
5158
|
-
video: [
|
|
5159
|
-
new werift.RTCRtpCodecParameters({
|
|
5160
|
-
mimeType: "video/H264",
|
|
5161
|
-
clockRate: 9e4,
|
|
5162
|
-
payloadType: 96,
|
|
5163
|
-
parameters: "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f",
|
|
5164
|
-
rtcpFeedback: [
|
|
5165
|
-
{ type: "transport-cc" },
|
|
5166
|
-
{ type: "ccm", parameter: "fir" },
|
|
5167
|
-
{ type: "nack" },
|
|
5168
|
-
{ type: "nack", parameter: "pli" },
|
|
5169
|
-
{ type: "goog-remb" }
|
|
5170
|
-
]
|
|
5171
|
-
})
|
|
5172
|
-
],
|
|
5173
|
-
audio: [
|
|
5174
|
-
new werift.RTCRtpCodecParameters({
|
|
5175
|
-
mimeType: "audio/PCMU",
|
|
5176
|
-
clockRate: 8e3,
|
|
5177
|
-
payloadType: 0,
|
|
5178
|
-
channels: 1,
|
|
5179
|
-
parameters: ""
|
|
5180
|
-
})
|
|
5181
|
-
]
|
|
5182
|
-
}
|
|
5183
|
-
};
|
|
5184
|
-
if (iceServers.length > 0) pcOptions.iceServers = iceServers;
|
|
5185
|
-
if (this.iceConfig?.portRange) pcOptions.icePortRange = this.iceConfig.portRange;
|
|
5186
|
-
if (this.iceConfig?.additionalHostAddresses) {
|
|
5187
|
-
pcOptions.iceAdditionalHostAddresses = this.iceConfig.additionalHostAddresses;
|
|
5188
|
-
}
|
|
5189
|
-
return { werift, pcOptions };
|
|
5190
|
-
}
|
|
5191
|
-
/** Create offer SDP (server → client). */
|
|
5192
|
-
async createOffer() {
|
|
5193
|
-
const { werift, pcOptions } = await this.buildPcOptions();
|
|
5194
|
-
this.pc = new werift.RTCPeerConnection(pcOptions);
|
|
5195
|
-
this.pc.iceConnectionStateChange.subscribe((state) => {
|
|
5196
|
-
this.logger.debug(`[session:${this.sessionId}] ICE: ${state}`);
|
|
5197
|
-
if (state === "connected") {
|
|
5198
|
-
this.state = "connected";
|
|
5199
|
-
this.startStatsCollection();
|
|
5200
|
-
} else if (state === "disconnected" || state === "failed" || state === "closed") {
|
|
5201
|
-
this.state = state === "disconnected" ? "disconnected" : "closed";
|
|
5202
|
-
void this.close();
|
|
5203
|
-
}
|
|
5204
|
-
});
|
|
5205
|
-
this.videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
5206
|
-
const videoTransceiver = this.pc.addTransceiver(this.videoTrack, { direction: "sendonly" });
|
|
5207
|
-
this.videoSender = videoTransceiver.sender;
|
|
5208
|
-
this.audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
5209
|
-
const audioDir = this.intercom ? "sendrecv" : "sendonly";
|
|
5210
|
-
const audioTransceiver = this.pc.addTransceiver(this.audioTrack, { direction: audioDir });
|
|
5211
|
-
this.audioSender = audioTransceiver.sender;
|
|
5212
|
-
if (this.intercom) {
|
|
5213
|
-
const cb = this.intercom.onAudioReceived;
|
|
5214
|
-
audioTransceiver.onTrack.subscribe((track) => {
|
|
5215
|
-
track.onReceiveRtp.subscribe((pkt) => {
|
|
5216
|
-
try {
|
|
5217
|
-
const payload = pkt.payload;
|
|
5218
|
-
if (payload?.length > 0) void cb(payload, "Opus");
|
|
5219
|
-
} catch (err) {
|
|
5220
|
-
this.logger.error(`[session:${this.sessionId}] Intercom error:`, err);
|
|
5221
|
-
}
|
|
5222
|
-
});
|
|
5223
|
-
});
|
|
5224
|
-
}
|
|
5225
|
-
const offer = await this.pc.createOffer();
|
|
5226
|
-
await this.pc.setLocalDescription(offer);
|
|
5227
|
-
await new Promise((resolve) => {
|
|
5228
|
-
if (this.pc.iceGatheringState === "complete") {
|
|
5229
|
-
resolve();
|
|
5230
|
-
return;
|
|
5231
|
-
}
|
|
5232
|
-
this.pc.iceGatheringStateChange.subscribe((state) => {
|
|
5233
|
-
if (state === "complete") resolve();
|
|
5234
|
-
});
|
|
5235
|
-
setTimeout(resolve, 5e3);
|
|
5236
|
-
});
|
|
5237
|
-
let finalSdp = this.pc.localDescription?.sdp ?? offer.sdp;
|
|
5238
|
-
finalSdp = finalSdp.replace(/a=setup:active\r?\n/g, "a=setup:actpass\r\n");
|
|
5239
|
-
this.state = "connecting";
|
|
5240
|
-
this.logger.info(`[session:${this.sessionId}] Offer created`);
|
|
5241
|
-
return { sdp: finalSdp, type: "offer" };
|
|
5242
|
-
}
|
|
5243
|
-
/** Handle WHEP answer: client sends SDP answer, we set remote description and start feeding. */
|
|
5244
|
-
async handleAnswer(answer) {
|
|
5245
|
-
if (!this.pc) throw new Error("Call createOffer() first");
|
|
5246
|
-
const werift = await loadWerift();
|
|
5247
|
-
const desc = new werift.RTCSessionDescription(answer.sdp, answer.type);
|
|
5248
|
-
await this.pc.setRemoteDescription(desc);
|
|
5249
|
-
this.logger.info(`[session:${this.sessionId}] Answer set, feeding started`);
|
|
5250
|
-
this.startFeedingFrames();
|
|
5251
|
-
}
|
|
5252
|
-
/**
|
|
5253
|
-
* Handle WHEP offer: client sends SDP offer, we create answer.
|
|
5254
|
-
*
|
|
5255
|
-
* Uses the server-creates-offer pattern internally: we create our own offer
|
|
5256
|
-
* with sendonly tracks, then use the client's offer codecs to build a
|
|
5257
|
-
* compatible answer. This avoids werift transceiver direction issues.
|
|
5258
|
-
*/
|
|
5259
|
-
async handleOffer(clientOffer) {
|
|
5260
|
-
const { werift, pcOptions } = await this.buildPcOptions();
|
|
5261
|
-
this.pc = new werift.RTCPeerConnection(pcOptions);
|
|
5262
|
-
this.pc.iceConnectionStateChange.subscribe((state) => {
|
|
5263
|
-
this.logger.debug(`[session:${this.sessionId}] ICE: ${state}`);
|
|
5264
|
-
if (state === "connected") {
|
|
5265
|
-
this.state = "connected";
|
|
5266
|
-
this.startStatsCollection();
|
|
5267
|
-
} else if (state === "disconnected" || state === "failed" || state === "closed") {
|
|
5268
|
-
this.state = state === "disconnected" ? "disconnected" : "closed";
|
|
5269
|
-
void this.close();
|
|
5270
|
-
}
|
|
5271
|
-
});
|
|
5272
|
-
const remoteDesc = new werift.RTCSessionDescription(clientOffer.sdp, clientOffer.type);
|
|
5273
|
-
await this.pc.setRemoteDescription(remoteDesc);
|
|
5274
|
-
const transceivers = this.pc.getTransceivers();
|
|
5275
|
-
for (const t of transceivers) {
|
|
5276
|
-
const kind = t.receiver?.track?.kind ?? t.kind;
|
|
5277
|
-
if (kind === "video" && !this.videoTrack) {
|
|
5278
|
-
this.videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
5279
|
-
await t.sender.replaceTrack(this.videoTrack);
|
|
5280
|
-
} else if (kind === "audio" && !this.audioTrack) {
|
|
5281
|
-
this.audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
5282
|
-
await t.sender.replaceTrack(this.audioTrack);
|
|
5283
|
-
}
|
|
5284
|
-
}
|
|
5285
|
-
if (!this.videoTrack) {
|
|
5286
|
-
this.logger.warn(`[session:${this.sessionId}] No video transceiver found in offer, adding one`);
|
|
5287
|
-
this.videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
5288
|
-
this.pc.addTransceiver(this.videoTrack, { direction: "sendonly" });
|
|
5289
|
-
}
|
|
5290
|
-
if (!this.audioTrack) {
|
|
5291
|
-
this.logger.warn(`[session:${this.sessionId}] No audio transceiver found in offer, adding one`);
|
|
5292
|
-
this.audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
5293
|
-
this.pc.addTransceiver(this.audioTrack, { direction: "sendonly" });
|
|
5294
|
-
}
|
|
5295
|
-
const answer = await this.pc.createAnswer();
|
|
5296
|
-
await this.pc.setLocalDescription(answer);
|
|
5297
|
-
this.state = "connecting";
|
|
5298
|
-
this.logger.info(`[session:${this.sessionId}] WHEP answer created`);
|
|
5299
|
-
this.startFeedingFrames();
|
|
5300
|
-
return { sdp: answer.sdp, type: "answer" };
|
|
5301
|
-
}
|
|
5302
|
-
/** Add ICE candidate. */
|
|
5303
|
-
async addIceCandidate(candidate) {
|
|
5304
|
-
if (!this.pc) throw new Error("Call createOffer() first");
|
|
5305
|
-
const werift = await loadWerift();
|
|
5306
|
-
await this.pc.addIceCandidate(new werift.RTCIceCandidate(candidate));
|
|
5307
|
-
}
|
|
5308
|
-
/**
|
|
5309
|
-
* Detach the frame source (for connection pooling).
|
|
5310
|
-
* The session stays alive (ICE/DTLS connected) but stops feeding frames.
|
|
5311
|
-
* Call replaceSource() later to reattach a camera.
|
|
5312
|
-
*/
|
|
5313
|
-
detachSource() {
|
|
5314
|
-
if (this.feedAbort) {
|
|
5315
|
-
this.feedAbort.abort();
|
|
5316
|
-
this.feedAbort = null;
|
|
5317
|
-
}
|
|
5318
|
-
this.logger.debug(`[session:${this.sessionId}] Source detached (idle)`);
|
|
5319
|
-
}
|
|
5320
|
-
/** Whether the session has an active feed (vs idle/pooled). */
|
|
5321
|
-
get isFeeding() {
|
|
5322
|
-
return this.feedAbort !== null && !this.feedAbort.signal.aborted;
|
|
5323
|
-
}
|
|
5324
|
-
/**
|
|
5325
|
-
* Replace the frame source (for seamless source switching).
|
|
5326
|
-
* The new source will take effect at the next keyframe.
|
|
5327
|
-
*/
|
|
5328
|
-
replaceSource(newSource) {
|
|
5329
|
-
this.source = newSource;
|
|
5330
|
-
if (this.feedAbort) {
|
|
5331
|
-
this.feedAbort.abort();
|
|
5332
|
-
this.feedAbort = null;
|
|
5333
|
-
}
|
|
5334
|
-
this.startFeedingFrames();
|
|
5335
|
-
}
|
|
5336
|
-
getInfo() {
|
|
5337
|
-
return { sessionId: this.sessionId, state: this.state, createdAt: this.createdAt };
|
|
5338
|
-
}
|
|
5339
|
-
async close() {
|
|
5340
|
-
if (this.closed) return;
|
|
5341
|
-
this.closed = true;
|
|
5342
|
-
this.state = "closed";
|
|
5343
|
-
this.logger.info(`[session:${this.sessionId}] Closing`);
|
|
5344
|
-
if (this.statsTimer) {
|
|
5345
|
-
clearInterval(this.statsTimer);
|
|
5346
|
-
this.statsTimer = null;
|
|
5347
|
-
}
|
|
5348
|
-
if (this.feedAbort) {
|
|
5349
|
-
this.feedAbort.abort();
|
|
5350
|
-
this.feedAbort = null;
|
|
5351
|
-
}
|
|
5352
|
-
try {
|
|
5353
|
-
await this.source.return(void 0);
|
|
5354
|
-
} catch {
|
|
5355
|
-
}
|
|
5356
|
-
if (this.pc) {
|
|
5357
|
-
try {
|
|
5358
|
-
await this.pc.close();
|
|
5359
|
-
} catch {
|
|
5360
|
-
}
|
|
5361
|
-
this.pc = null;
|
|
5362
|
-
}
|
|
5363
|
-
this.videoTrack = null;
|
|
5364
|
-
this.audioTrack = null;
|
|
5365
|
-
}
|
|
5366
|
-
// -----------------------------------------------------------------------
|
|
5367
|
-
// Frame feeding
|
|
5368
|
-
// -----------------------------------------------------------------------
|
|
5369
|
-
startFeedingFrames() {
|
|
5370
|
-
this.feedAbort = new AbortController();
|
|
5371
|
-
const { signal } = this.feedAbort;
|
|
5372
|
-
void (async () => {
|
|
5373
|
-
let gotKeyframe = false;
|
|
5374
|
-
let videoTimestampBase = null;
|
|
5375
|
-
let audioTimestampBase = null;
|
|
5376
|
-
let frameCount = 0;
|
|
5377
|
-
try {
|
|
5378
|
-
for await (const mediaFrame of this.source) {
|
|
5379
|
-
if (signal.aborted || this.closed) break;
|
|
5380
|
-
frameCount++;
|
|
5381
|
-
if (this.debug && (frameCount <= 5 || frameCount % 100 === 0)) {
|
|
5382
|
-
this.logger.debug(
|
|
5383
|
-
`[session:${this.sessionId}] Frame #${frameCount}: ${mediaFrame.type} size=${mediaFrame.frame.data.length} ` + (mediaFrame.type === "video" ? `key=${mediaFrame.frame.isKeyframe}` : "")
|
|
5384
|
-
);
|
|
5385
|
-
}
|
|
5386
|
-
if (mediaFrame.type === "video") {
|
|
5387
|
-
const frame = mediaFrame.frame;
|
|
5388
|
-
const annexB = frame.codec === "H264" ? convertH264ToAnnexB(frame.data) : convertH265ToAnnexB(frame.data);
|
|
5389
|
-
if (!gotKeyframe) {
|
|
5390
|
-
const isKey = frame.codec === "H264" ? isH264IdrAccessUnit(annexB) : isH265IrapAccessUnit(annexB);
|
|
5391
|
-
if (!isKey) continue;
|
|
5392
|
-
gotKeyframe = true;
|
|
5393
|
-
if (this.debug) {
|
|
5394
|
-
const iceState = this.pc?.iceConnectionState ?? "unknown";
|
|
5395
|
-
const connState = this.pc?.connectionState ?? "unknown";
|
|
5396
|
-
this.logger.info(
|
|
5397
|
-
`[session:${this.sessionId}] First keyframe at frame #${frameCount}, size=${annexB.length}, ICE=${iceState}, conn=${connState}`
|
|
5398
|
-
);
|
|
5399
|
-
}
|
|
5400
|
-
}
|
|
5401
|
-
if (videoTimestampBase === null) videoTimestampBase = frame.timestampMicros;
|
|
5402
|
-
const rtpTs = Math.floor(
|
|
5403
|
-
(frame.timestampMicros - videoTimestampBase) * 9e4 / 1e6
|
|
5404
|
-
) >>> 0;
|
|
5405
|
-
const nals = splitAnnexBToNals(annexB).filter((n) => {
|
|
5406
|
-
const t = n[0] & 31;
|
|
5407
|
-
return t !== 9 && t !== 6;
|
|
5408
|
-
});
|
|
5409
|
-
if (nals.length > 0 && this.videoTrack) {
|
|
5410
|
-
this.writeVideoNals(nals, rtpTs, frame.codec);
|
|
5411
|
-
if (this.debug && frameCount % 250 === 0) {
|
|
5412
|
-
this.logger.info(
|
|
5413
|
-
`[session:${this.sessionId}] ${frameCount} frames, ${this.rtpPacketsSent} RTP pkts, ICE=${this.pc?.iceConnectionState}, conn=${this.pc?.connectionState}`
|
|
5414
|
-
);
|
|
5415
|
-
}
|
|
5416
|
-
}
|
|
5417
|
-
} else if (mediaFrame.type === "audio") {
|
|
5418
|
-
const frame = mediaFrame.frame;
|
|
5419
|
-
if (!this.audioSender) continue;
|
|
5420
|
-
if (audioTimestampBase === null) audioTimestampBase = 0;
|
|
5421
|
-
audioTimestampBase = audioTimestampBase + frame.data.length >>> 0;
|
|
5422
|
-
const rtpTs = audioTimestampBase;
|
|
5423
|
-
this.writeAudio(frame.data, rtpTs, frame.codec);
|
|
5424
|
-
}
|
|
5425
|
-
}
|
|
5426
|
-
} catch (err) {
|
|
5427
|
-
if (!signal.aborted && !this.closed) {
|
|
5428
|
-
this.logger.error(`[session:${this.sessionId}] Feed error:`, err);
|
|
5429
|
-
}
|
|
5430
|
-
} finally {
|
|
5431
|
-
if (!this.closed) {
|
|
5432
|
-
this.logger.info(`[session:${this.sessionId}] Feed ended`);
|
|
5433
|
-
void this.close();
|
|
5434
|
-
}
|
|
5435
|
-
}
|
|
5436
|
-
})();
|
|
5437
|
-
}
|
|
5438
|
-
/** Build a serialized RTP packet for sender.sendRtp(). */
|
|
5439
|
-
buildRtpBuffer(weriftModule, payload, rtpTs, payloadType, marker, isVideo) {
|
|
5440
|
-
const header = new weriftModule.RtpHeader();
|
|
5441
|
-
header.payloadType = payloadType;
|
|
5442
|
-
header.timestamp = rtpTs;
|
|
5443
|
-
header.marker = marker;
|
|
5444
|
-
header.sequenceNumber = isVideo ? this.videoSeqNum = this.videoSeqNum + 1 & 65535 : this.audioSeqNum = this.audioSeqNum + 1 & 65535;
|
|
5445
|
-
const pkt = new weriftModule.RtpPacket(header, payload);
|
|
5446
|
-
return pkt.serialize();
|
|
5447
|
-
}
|
|
5448
|
-
/** Max RTP payload size (MTU 1200 to stay under typical network MTU). */
|
|
5449
|
-
static MAX_RTP_PAYLOAD = 1200;
|
|
5450
|
-
rtpPacketsSent = 0;
|
|
5451
|
-
writeVideoNals(nals, rtpTs, codec) {
|
|
5452
|
-
if (!this.videoSender || !_werift) return;
|
|
5453
|
-
const pt = codec === "H264" ? 96 : 97;
|
|
5454
|
-
const sendPkt = (payload, marker) => {
|
|
5455
|
-
try {
|
|
5456
|
-
const buf = this.buildRtpBuffer(_werift, payload, rtpTs, pt, marker, true);
|
|
5457
|
-
this.videoSender.sendRtp(buf);
|
|
5458
|
-
this.rtpPacketsSent++;
|
|
5459
|
-
} catch (err) {
|
|
5460
|
-
if (this.rtpPacketsSent <= 10) {
|
|
5461
|
-
this.logger.error(`[session:${this.sessionId}] sendRtp error #${this.rtpPacketsSent}:`, err);
|
|
5462
|
-
}
|
|
5463
|
-
}
|
|
5464
|
-
};
|
|
5465
|
-
for (let i = 0; i < nals.length; i++) {
|
|
5466
|
-
const nal = nals[i];
|
|
5467
|
-
const isLastNal = i === nals.length - 1;
|
|
5468
|
-
if (nal.length <= _AdaptiveSession.MAX_RTP_PAYLOAD) {
|
|
5469
|
-
sendPkt(nal, isLastNal);
|
|
5470
|
-
} else {
|
|
5471
|
-
const nalHeader = nal[0];
|
|
5472
|
-
const fnri = nalHeader & 224;
|
|
5473
|
-
const nalType = nalHeader & 31;
|
|
5474
|
-
const fuIndicator = fnri | 28;
|
|
5475
|
-
const nalBody = nal.subarray(1);
|
|
5476
|
-
let offset = 0;
|
|
5477
|
-
let isFirst = true;
|
|
5478
|
-
while (offset < nalBody.length) {
|
|
5479
|
-
const end = Math.min(offset + _AdaptiveSession.MAX_RTP_PAYLOAD - 2, nalBody.length);
|
|
5480
|
-
const isLast = end >= nalBody.length;
|
|
5481
|
-
let fuHeader = nalType;
|
|
5482
|
-
if (isFirst) fuHeader |= 128;
|
|
5483
|
-
if (isLast) fuHeader |= 64;
|
|
5484
|
-
const fragment = Buffer.alloc(2 + (end - offset));
|
|
5485
|
-
fragment[0] = fuIndicator;
|
|
5486
|
-
fragment[1] = fuHeader;
|
|
5487
|
-
nalBody.copy(fragment, 2, offset, end);
|
|
5488
|
-
sendPkt(fragment, isLastNal && isLast);
|
|
5489
|
-
offset = end;
|
|
5490
|
-
isFirst = false;
|
|
5491
|
-
}
|
|
5492
|
-
}
|
|
5493
|
-
}
|
|
5494
|
-
}
|
|
5495
|
-
writeAudio(data, rtpTs, codec) {
|
|
5496
|
-
if (!this.audioSender || !_werift) return;
|
|
5497
|
-
const pt = codec === "Pcmu" || codec === "Pcma" ? 0 : 111;
|
|
5498
|
-
try {
|
|
5499
|
-
const buf = this.buildRtpBuffer(_werift, data, rtpTs, pt, true, false);
|
|
5500
|
-
this.audioSender.sendRtp(buf);
|
|
5501
|
-
} catch (err) {
|
|
5502
|
-
this.logger.debug(`[session:${this.sessionId}] Audio write error:`, err);
|
|
5503
|
-
}
|
|
5504
|
-
}
|
|
5505
|
-
// -----------------------------------------------------------------------
|
|
5506
|
-
// RTCP stats collection
|
|
5507
|
-
// -----------------------------------------------------------------------
|
|
5508
|
-
startStatsCollection() {
|
|
5509
|
-
if (this.statsTimer || !this.onStats) return;
|
|
5510
|
-
this.statsTimer = setInterval(() => {
|
|
5511
|
-
if (!this.pc || this.closed) return;
|
|
5512
|
-
this.collectStats();
|
|
5513
|
-
}, 3e3);
|
|
5514
|
-
}
|
|
5515
|
-
collectStats() {
|
|
5516
|
-
if (!this.pc || !this.onStats) return;
|
|
5517
|
-
try {
|
|
5518
|
-
const senders = this.pc.getSenders?.() ?? [];
|
|
5519
|
-
for (const sender of senders) {
|
|
5520
|
-
const track = sender.track;
|
|
5521
|
-
if (!track || track.kind !== "video") continue;
|
|
5522
|
-
const report = sender.lastReceiverReport ?? sender.rtcpReport;
|
|
5523
|
-
if (!report) continue;
|
|
5524
|
-
const fractionLost = report.fractionLost ?? 0;
|
|
5525
|
-
const packetsLost = report.packetsLost ?? report.cumulativeLost ?? 0;
|
|
5526
|
-
const jitter = report.jitter ?? 0;
|
|
5527
|
-
const rtt = report.roundTripTime ?? report.rtt ?? 0;
|
|
5528
|
-
const packetLoss = fractionLost / 256;
|
|
5529
|
-
this.onStats({
|
|
5530
|
-
sessionId: this.sessionId,
|
|
5531
|
-
packetLoss,
|
|
5532
|
-
jitterMs: jitter,
|
|
5533
|
-
rttMs: rtt * 1e3,
|
|
5534
|
-
// seconds → ms
|
|
5535
|
-
packetsReceived: 0,
|
|
5536
|
-
// Not available from sender side
|
|
5537
|
-
packetsLost,
|
|
5538
|
-
timestamp: Date.now()
|
|
5539
|
-
});
|
|
5540
|
-
return;
|
|
5541
|
-
}
|
|
5542
|
-
} catch {
|
|
5543
|
-
}
|
|
5544
|
-
}
|
|
5545
|
-
};
|
|
5546
|
-
|
|
5547
|
-
// src/webrtc/rtsp-relay.ts
|
|
5548
|
-
var import_node_child_process7 = require("child_process");
|
|
5549
|
-
var AdaptiveRtspRelay = class {
|
|
5550
|
-
rtspUrl;
|
|
5551
|
-
rtspOutputUrl;
|
|
5552
|
-
ffmpegPath;
|
|
5553
|
-
logger;
|
|
5554
|
-
label;
|
|
5555
|
-
currentParams;
|
|
5556
|
-
proc = null;
|
|
5557
|
-
closed = false;
|
|
5558
|
-
constructor(options) {
|
|
5559
|
-
this.rtspUrl = options.rtspUrl;
|
|
5560
|
-
this.rtspOutputUrl = options.rtspOutputUrl;
|
|
5561
|
-
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
5562
|
-
this.logger = options.logger;
|
|
5563
|
-
this.label = options.label ?? "adaptive-rtsp";
|
|
5564
|
-
this.currentParams = { ...options.initialParams };
|
|
5565
|
-
}
|
|
5566
|
-
getParams() {
|
|
5567
|
-
return { ...this.currentParams };
|
|
5568
|
-
}
|
|
5569
|
-
/** Start the ffmpeg relay. */
|
|
5570
|
-
start() {
|
|
5571
|
-
if (this.closed) return;
|
|
5572
|
-
this.spawnFfmpeg();
|
|
5573
|
-
}
|
|
5574
|
-
/** Hot-swap encoding parameters by restarting ffmpeg. */
|
|
5575
|
-
async updateParams(params) {
|
|
5576
|
-
const prev = { ...this.currentParams };
|
|
5577
|
-
if (params.maxBitrateKbps !== void 0) this.currentParams.maxBitrateKbps = params.maxBitrateKbps;
|
|
5578
|
-
if (params.width !== void 0) this.currentParams.width = params.width;
|
|
5579
|
-
if (params.height !== void 0) this.currentParams.height = params.height;
|
|
5580
|
-
if (params.preset !== void 0) this.currentParams.preset = params.preset;
|
|
5581
|
-
if (prev.maxBitrateKbps === this.currentParams.maxBitrateKbps && prev.width === this.currentParams.width && prev.height === this.currentParams.height) return;
|
|
5582
|
-
this.logger?.info(
|
|
5583
|
-
`[${this.label}] Updating: ${prev.maxBitrateKbps}kbps ${prev.width}x${prev.height} \u2192 ${this.currentParams.maxBitrateKbps}kbps ${this.currentParams.width}x${this.currentParams.height}`
|
|
5584
|
-
);
|
|
5585
|
-
await this.killFfmpeg();
|
|
5586
|
-
if (!this.closed) this.spawnFfmpeg();
|
|
5587
|
-
}
|
|
5588
|
-
/** Stop the relay. */
|
|
5589
|
-
async stop() {
|
|
5590
|
-
if (this.closed) return;
|
|
5591
|
-
this.closed = true;
|
|
5592
|
-
await this.killFfmpeg();
|
|
5593
|
-
}
|
|
5594
|
-
/** Check if ffmpeg is running. */
|
|
5595
|
-
isRunning() {
|
|
5596
|
-
return this.proc !== null;
|
|
5597
|
-
}
|
|
5598
|
-
// -----------------------------------------------------------------------
|
|
5599
|
-
// Private
|
|
5600
|
-
// -----------------------------------------------------------------------
|
|
5601
|
-
spawnFfmpeg() {
|
|
5602
|
-
const { maxBitrateKbps, width, height, preset } = this.currentParams;
|
|
5603
|
-
const args = [
|
|
5604
|
-
"-hide_banner",
|
|
5605
|
-
"-loglevel",
|
|
5606
|
-
"error",
|
|
5607
|
-
// Input
|
|
5608
|
-
"-rtsp_transport",
|
|
5609
|
-
"tcp",
|
|
5610
|
-
"-i",
|
|
5611
|
-
this.rtspUrl,
|
|
5612
|
-
// Video encoding
|
|
5613
|
-
"-c:v",
|
|
5614
|
-
"libx264",
|
|
5615
|
-
"-preset",
|
|
5616
|
-
preset ?? "ultrafast",
|
|
5617
|
-
"-tune",
|
|
5618
|
-
"zerolatency",
|
|
5619
|
-
"-crf",
|
|
5620
|
-
"28",
|
|
5621
|
-
"-maxrate",
|
|
5622
|
-
`${maxBitrateKbps}k`,
|
|
5623
|
-
"-bufsize",
|
|
5624
|
-
`${Math.round(maxBitrateKbps * 0.5)}k`,
|
|
5625
|
-
"-g",
|
|
5626
|
-
"50",
|
|
5627
|
-
"-keyint_min",
|
|
5628
|
-
"25"
|
|
5629
|
-
];
|
|
5630
|
-
if (width > 0 && height > 0) {
|
|
5631
|
-
args.push("-vf", `scale=${width}:${height}`);
|
|
5632
|
-
}
|
|
5633
|
-
args.push("-c:a", "aac", "-b:a", "64k");
|
|
5634
|
-
args.push(
|
|
5635
|
-
"-f",
|
|
5636
|
-
"rtsp",
|
|
5637
|
-
"-rtsp_transport",
|
|
5638
|
-
"tcp",
|
|
5639
|
-
this.rtspOutputUrl
|
|
5640
|
-
);
|
|
5641
|
-
this.proc = (0, import_node_child_process7.spawn)(this.ffmpegPath, args, {
|
|
5642
|
-
stdio: ["ignore", "ignore", "pipe"]
|
|
5643
|
-
});
|
|
5644
|
-
this.proc.on("error", (err) => {
|
|
5645
|
-
this.logger?.error(`[${this.label}] ffmpeg spawn error: ${err.message}`);
|
|
5646
|
-
});
|
|
5647
|
-
this.proc.on("close", (code, signal) => {
|
|
5648
|
-
this.logger?.debug(`[${this.label}] ffmpeg exited code=${code} signal=${signal}`);
|
|
5649
|
-
this.proc = null;
|
|
5650
|
-
if (!this.closed) {
|
|
5651
|
-
setTimeout(() => {
|
|
5652
|
-
if (!this.closed) this.spawnFfmpeg();
|
|
5653
|
-
}, 2e3);
|
|
5654
|
-
}
|
|
5655
|
-
});
|
|
5656
|
-
this.proc.stderr?.on("data", (data) => {
|
|
5657
|
-
const s = data.toString();
|
|
5658
|
-
if (s.includes("error") || s.includes("Error") || s.includes("fatal")) {
|
|
5659
|
-
this.logger?.error(`[${this.label}] ffmpeg: ${s.trim()}`);
|
|
5660
|
-
}
|
|
5661
|
-
});
|
|
5662
|
-
this.logger?.info(
|
|
5663
|
-
`[${this.label}] Started: ${maxBitrateKbps}kbps ` + (width > 0 ? `${width}x${height}` : "native") + ` \u2192 ${this.rtspOutputUrl}`
|
|
5664
|
-
);
|
|
5665
|
-
}
|
|
5666
|
-
async killFfmpeg() {
|
|
5667
|
-
const proc = this.proc;
|
|
5668
|
-
if (!proc) return;
|
|
5669
|
-
this.proc = null;
|
|
5670
|
-
try {
|
|
5671
|
-
proc.kill("SIGTERM");
|
|
5672
|
-
} catch {
|
|
5673
|
-
}
|
|
5674
|
-
await new Promise((resolve) => {
|
|
5675
|
-
const timer = setTimeout(() => {
|
|
5676
|
-
try {
|
|
5677
|
-
proc.kill("SIGKILL");
|
|
5678
|
-
} catch {
|
|
5679
|
-
}
|
|
5680
|
-
resolve();
|
|
5681
|
-
}, 3e3);
|
|
5682
|
-
proc.on("close", () => {
|
|
5683
|
-
clearTimeout(timer);
|
|
5684
|
-
resolve();
|
|
5685
|
-
});
|
|
5686
|
-
});
|
|
5687
|
-
}
|
|
5688
|
-
};
|
|
5689
|
-
|
|
5690
|
-
// src/webrtc/shared-session.ts
|
|
5691
|
-
var _werift2;
|
|
5692
|
-
async function loadWerift2() {
|
|
5693
|
-
if (_werift2) return _werift2;
|
|
5694
|
-
const moduleName = "werift";
|
|
5695
|
-
_werift2 = await Function("m", "return import(m)")(moduleName);
|
|
5696
|
-
return _werift2;
|
|
5697
|
-
}
|
|
5698
|
-
var SharedSession = class {
|
|
5699
|
-
constructor(options) {
|
|
5700
|
-
this.options = options;
|
|
5701
|
-
this.logger = options.logger;
|
|
5702
|
-
this.iceConfig = options.iceConfig;
|
|
5703
|
-
this.onTrackRequested = options.onTrackRequested;
|
|
5704
|
-
this.onTrackReleased = options.onTrackReleased;
|
|
5705
|
-
}
|
|
5706
|
-
logger;
|
|
5707
|
-
iceConfig;
|
|
5708
|
-
onTrackRequested;
|
|
5709
|
-
onTrackReleased;
|
|
5710
|
-
pc = null;
|
|
5711
|
-
dataChannel = null;
|
|
5712
|
-
activeTracks = /* @__PURE__ */ new Map();
|
|
5713
|
-
closed = false;
|
|
5714
|
-
negotiating = false;
|
|
5715
|
-
/** Create the initial SDP offer (with data channel, no media tracks yet). */
|
|
5716
|
-
async createOffer() {
|
|
5717
|
-
const werift = await loadWerift2();
|
|
5718
|
-
const iceServers = [];
|
|
5719
|
-
if (this.iceConfig?.stunServers) {
|
|
5720
|
-
for (const url of this.iceConfig.stunServers) iceServers.push({ urls: url });
|
|
5721
|
-
}
|
|
5722
|
-
if (this.iceConfig?.turnServers) {
|
|
5723
|
-
for (const t of this.iceConfig.turnServers) {
|
|
5724
|
-
iceServers.push({ urls: t.urls, username: t.username, credential: t.credential });
|
|
5725
|
-
}
|
|
5726
|
-
}
|
|
5727
|
-
const pcOptions = {
|
|
5728
|
-
codecs: {
|
|
5729
|
-
video: [
|
|
5730
|
-
new werift.RTCRtpCodecParameters({
|
|
5731
|
-
mimeType: "video/H264",
|
|
5732
|
-
clockRate: 9e4,
|
|
5733
|
-
payloadType: 96,
|
|
5734
|
-
parameters: "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f",
|
|
5735
|
-
rtcpFeedback: [
|
|
5736
|
-
{ type: "transport-cc" },
|
|
5737
|
-
{ type: "ccm", parameter: "fir" },
|
|
5738
|
-
{ type: "nack" },
|
|
5739
|
-
{ type: "nack", parameter: "pli" },
|
|
5740
|
-
{ type: "goog-remb" }
|
|
5741
|
-
]
|
|
5742
|
-
})
|
|
5743
|
-
],
|
|
5744
|
-
audio: [
|
|
5745
|
-
new werift.RTCRtpCodecParameters({
|
|
5746
|
-
mimeType: "audio/opus",
|
|
5747
|
-
clockRate: 48e3,
|
|
5748
|
-
payloadType: 111,
|
|
5749
|
-
channels: 2,
|
|
5750
|
-
parameters: "minptime=10;useinbandfec=1"
|
|
5751
|
-
})
|
|
5752
|
-
]
|
|
5753
|
-
}
|
|
5754
|
-
};
|
|
5755
|
-
if (iceServers.length > 0) pcOptions.iceServers = iceServers;
|
|
5756
|
-
if (this.iceConfig?.portRange) pcOptions.icePortRange = this.iceConfig.portRange;
|
|
5757
|
-
if (this.iceConfig?.additionalHostAddresses) {
|
|
5758
|
-
pcOptions.iceAdditionalHostAddresses = this.iceConfig.additionalHostAddresses;
|
|
5759
|
-
}
|
|
5760
|
-
this.pc = new werift.RTCPeerConnection(pcOptions);
|
|
5761
|
-
this.pc.iceConnectionStateChange.subscribe((state) => {
|
|
5762
|
-
this.logger.debug(`[shared] ICE: ${state}`);
|
|
5763
|
-
});
|
|
5764
|
-
this.dataChannel = this.pc.createDataChannel("control", { ordered: true });
|
|
5765
|
-
this.dataChannel.message.subscribe((msg) => {
|
|
5766
|
-
try {
|
|
5767
|
-
const data = JSON.parse(typeof msg === "string" ? msg : msg.toString());
|
|
5768
|
-
this.handleDataChannelMessage(data);
|
|
5769
|
-
} catch (err) {
|
|
5770
|
-
this.logger.error("[shared] DC message parse error:", err);
|
|
5771
|
-
}
|
|
5772
|
-
});
|
|
5773
|
-
const offer = await this.pc.createOffer();
|
|
5774
|
-
await this.pc.setLocalDescription(offer);
|
|
5775
|
-
await new Promise((resolve) => {
|
|
5776
|
-
if (this.pc.iceGatheringState === "complete") {
|
|
5777
|
-
resolve();
|
|
5778
|
-
return;
|
|
5779
|
-
}
|
|
5780
|
-
this.pc.iceGatheringStateChange.subscribe((state) => {
|
|
5781
|
-
if (state === "complete") resolve();
|
|
5782
|
-
});
|
|
5783
|
-
setTimeout(resolve, 5e3);
|
|
5784
|
-
});
|
|
5785
|
-
const sdp = this.pc.localDescription?.sdp ?? offer.sdp;
|
|
5786
|
-
this.logger.info("[shared] Initial offer created (data channel only)");
|
|
5787
|
-
return sdp;
|
|
5788
|
-
}
|
|
5789
|
-
/** Handle the client's SDP answer. */
|
|
5790
|
-
async handleAnswer(sdpAnswer) {
|
|
5791
|
-
const werift = await loadWerift2();
|
|
5792
|
-
const desc = new werift.RTCSessionDescription(sdpAnswer, "answer");
|
|
5793
|
-
await this.pc.setRemoteDescription(desc);
|
|
5794
|
-
this.logger.info("[shared] Answer set, connection ready");
|
|
5795
|
-
}
|
|
5796
|
-
/** Close the shared session and all tracks. */
|
|
5797
|
-
async close() {
|
|
5798
|
-
if (this.closed) return;
|
|
5799
|
-
this.closed = true;
|
|
5800
|
-
for (const [, track] of this.activeTracks) {
|
|
5801
|
-
track.feedAbort.abort();
|
|
5802
|
-
this.onTrackReleased?.(track.cameraName);
|
|
5803
|
-
}
|
|
5804
|
-
this.activeTracks.clear();
|
|
5805
|
-
if (this.pc) {
|
|
5806
|
-
try {
|
|
5807
|
-
await this.pc.close();
|
|
5808
|
-
} catch {
|
|
5809
|
-
}
|
|
5810
|
-
this.pc = null;
|
|
5811
|
-
}
|
|
5812
|
-
this.logger.info("[shared] Session closed");
|
|
5813
|
-
}
|
|
5814
|
-
// -----------------------------------------------------------------------
|
|
5815
|
-
// Data channel message handling
|
|
5816
|
-
// -----------------------------------------------------------------------
|
|
5817
|
-
async handleDataChannelMessage(msg) {
|
|
5818
|
-
try {
|
|
5819
|
-
switch (msg.type) {
|
|
5820
|
-
case "addTrack":
|
|
5821
|
-
await this.handleAddTrack(msg.cameraName, msg.trackId);
|
|
5822
|
-
break;
|
|
5823
|
-
case "removeTrack":
|
|
5824
|
-
await this.handleRemoveTrack(msg.trackId);
|
|
5825
|
-
break;
|
|
5826
|
-
case "answer":
|
|
5827
|
-
await this.handleRenegotiationAnswer(msg.sdp);
|
|
5828
|
-
break;
|
|
5829
|
-
default:
|
|
5830
|
-
this.logger.warn("[shared] Unknown DC message type:", msg.type);
|
|
5831
|
-
}
|
|
5832
|
-
} catch (err) {
|
|
5833
|
-
this.logger.error("[shared] DC handler error:", err);
|
|
5834
|
-
this.sendDC({ type: "error", message: err.message });
|
|
5835
|
-
}
|
|
5836
|
-
}
|
|
5837
|
-
async handleAddTrack(cameraName, trackId) {
|
|
5838
|
-
if (this.activeTracks.has(trackId)) {
|
|
5839
|
-
this.sendDC({ type: "error", message: `Track ${trackId} already exists` });
|
|
5840
|
-
return;
|
|
5841
|
-
}
|
|
5842
|
-
const werift = await loadWerift2();
|
|
5843
|
-
const videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
5844
|
-
const videoTransceiver = this.pc.addTransceiver(videoTrack, { direction: "sendonly" });
|
|
5845
|
-
const audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
5846
|
-
const audioDirection = this.options.onIntercomAudio ? "sendrecv" : "sendonly";
|
|
5847
|
-
const audioTransceiver = this.pc.addTransceiver(audioTrack, { direction: audioDirection });
|
|
5848
|
-
if (this.options.onIntercomAudio) {
|
|
5849
|
-
const intercomCb = this.options.onIntercomAudio;
|
|
5850
|
-
audioTransceiver.onTrack.subscribe((incomingTrack) => {
|
|
5851
|
-
incomingTrack.onReceiveRtp.subscribe((rtpPacket) => {
|
|
5852
|
-
const payload = rtpPacket.payload;
|
|
5853
|
-
if (payload?.length > 0) {
|
|
5854
|
-
intercomCb(cameraName, payload);
|
|
5855
|
-
}
|
|
5856
|
-
});
|
|
5857
|
-
});
|
|
5858
|
-
}
|
|
5859
|
-
await this.renegotiate();
|
|
5860
|
-
const videoMid = videoTransceiver.mid;
|
|
5861
|
-
const audioMid = audioTransceiver.mid;
|
|
5862
|
-
const source = this.onTrackRequested(cameraName);
|
|
5863
|
-
if (!source) {
|
|
5864
|
-
this.sendDC({ type: "error", message: `Camera not found: ${cameraName}` });
|
|
5865
|
-
return;
|
|
5866
|
-
}
|
|
5867
|
-
const feedAbort = new AbortController();
|
|
5868
|
-
const activeTrack = {
|
|
5869
|
-
trackId,
|
|
5870
|
-
cameraName,
|
|
5871
|
-
videoSender: videoTransceiver.sender,
|
|
5872
|
-
audioSender: audioTransceiver.sender,
|
|
5873
|
-
feedAbort,
|
|
5874
|
-
videoSeqNum: 0,
|
|
5875
|
-
audioSeqNum: 0
|
|
5876
|
-
};
|
|
5877
|
-
this.activeTracks.set(trackId, activeTrack);
|
|
5878
|
-
this.sendDC({ type: "trackReady", trackId, videoMid, audioMid });
|
|
5879
|
-
this.startFeeding(activeTrack, source);
|
|
5880
|
-
this.logger.info(`[shared] Track "${trackId}" added for camera "${cameraName}" (video=${videoMid}, audio=${audioMid})`);
|
|
5881
|
-
}
|
|
5882
|
-
async handleRemoveTrack(trackId) {
|
|
5883
|
-
const track = this.activeTracks.get(trackId);
|
|
5884
|
-
if (!track) return;
|
|
5885
|
-
track.feedAbort.abort();
|
|
5886
|
-
this.activeTracks.delete(trackId);
|
|
5887
|
-
this.onTrackReleased?.(track.cameraName);
|
|
5888
|
-
await this.renegotiate();
|
|
5889
|
-
this.sendDC({ type: "trackRemoved", trackId });
|
|
5890
|
-
this.logger.info(`[shared] Track "${trackId}" removed`);
|
|
5891
|
-
}
|
|
5892
|
-
async handleRenegotiationAnswer(sdp) {
|
|
5893
|
-
const werift = await loadWerift2();
|
|
5894
|
-
const desc = new werift.RTCSessionDescription(sdp, "answer");
|
|
5895
|
-
await this.pc.setRemoteDescription(desc);
|
|
5896
|
-
this.negotiating = false;
|
|
5897
|
-
this.logger.debug("[shared] Renegotiation answer set");
|
|
5898
|
-
}
|
|
5899
|
-
// -----------------------------------------------------------------------
|
|
5900
|
-
// SDP renegotiation
|
|
5901
|
-
// -----------------------------------------------------------------------
|
|
5902
|
-
async renegotiate() {
|
|
5903
|
-
if (!this.pc || !this.dataChannel) return;
|
|
5904
|
-
this.negotiating = true;
|
|
5905
|
-
const offer = await this.pc.createOffer();
|
|
5906
|
-
await this.pc.setLocalDescription(offer);
|
|
5907
|
-
await new Promise((resolve) => {
|
|
5908
|
-
if (this.pc.iceGatheringState === "complete") {
|
|
5909
|
-
resolve();
|
|
5910
|
-
return;
|
|
5911
|
-
}
|
|
5912
|
-
this.pc.iceGatheringStateChange.subscribe((state) => {
|
|
5913
|
-
if (state === "complete") resolve();
|
|
5914
|
-
});
|
|
5915
|
-
setTimeout(resolve, 3e3);
|
|
5916
|
-
});
|
|
5917
|
-
const sdp = this.pc.localDescription?.sdp ?? offer.sdp;
|
|
5918
|
-
this.sendDC({ type: "offer", sdp });
|
|
5919
|
-
await new Promise((resolve) => {
|
|
5920
|
-
const check = setInterval(() => {
|
|
5921
|
-
if (!this.negotiating) {
|
|
5922
|
-
clearInterval(check);
|
|
5923
|
-
resolve();
|
|
5924
|
-
}
|
|
5925
|
-
}, 50);
|
|
5926
|
-
setTimeout(() => {
|
|
5927
|
-
clearInterval(check);
|
|
5928
|
-
resolve();
|
|
5929
|
-
}, 1e4);
|
|
5930
|
-
});
|
|
5931
|
-
}
|
|
5932
|
-
// -----------------------------------------------------------------------
|
|
5933
|
-
// Frame feeding
|
|
5934
|
-
// -----------------------------------------------------------------------
|
|
5935
|
-
startFeeding(track, source) {
|
|
5936
|
-
const { signal } = track.feedAbort;
|
|
5937
|
-
const werift = _werift2;
|
|
5938
|
-
if (!werift) return;
|
|
5939
|
-
void (async () => {
|
|
5940
|
-
let gotKeyframe = false;
|
|
5941
|
-
let videoTimestampBase = null;
|
|
5942
|
-
let audioTimestampBase = null;
|
|
5943
|
-
try {
|
|
5944
|
-
for await (const mediaFrame of source) {
|
|
5945
|
-
if (signal.aborted || this.closed) break;
|
|
5946
|
-
if (mediaFrame.type === "audio") {
|
|
5947
|
-
const frame2 = mediaFrame.frame;
|
|
5948
|
-
if (audioTimestampBase === null) audioTimestampBase = frame2.timestampMicros;
|
|
5949
|
-
const rtpTs2 = Math.floor(
|
|
5950
|
-
(frame2.timestampMicros - audioTimestampBase) * (frame2.sampleRate || 48e3) / 1e6
|
|
5951
|
-
) >>> 0;
|
|
5952
|
-
track.audioSeqNum = track.audioSeqNum + 1 & 65535;
|
|
5953
|
-
const header = new werift.RtpHeader();
|
|
5954
|
-
header.payloadType = 111;
|
|
5955
|
-
header.timestamp = rtpTs2;
|
|
5956
|
-
header.marker = true;
|
|
5957
|
-
header.sequenceNumber = track.audioSeqNum;
|
|
5958
|
-
const pkt = new werift.RtpPacket(header, frame2.data);
|
|
5959
|
-
try {
|
|
5960
|
-
track.audioSender.sendRtp(pkt.serialize());
|
|
5961
|
-
} catch {
|
|
5962
|
-
}
|
|
5963
|
-
continue;
|
|
5964
|
-
}
|
|
5965
|
-
if (mediaFrame.type !== "video") continue;
|
|
5966
|
-
const frame = mediaFrame.frame;
|
|
5967
|
-
const annexB = convertH264ToAnnexB(frame.data);
|
|
5968
|
-
if (!gotKeyframe) {
|
|
5969
|
-
if (!isH264IdrAccessUnit(annexB)) continue;
|
|
5970
|
-
gotKeyframe = true;
|
|
5971
|
-
}
|
|
5972
|
-
if (videoTimestampBase === null) videoTimestampBase = frame.timestampMicros;
|
|
5973
|
-
const rtpTs = Math.floor(
|
|
5974
|
-
(frame.timestampMicros - videoTimestampBase) * 9e4 / 1e6
|
|
5975
|
-
) >>> 0;
|
|
5976
|
-
const nals = splitAnnexBToNals(annexB).filter((n) => {
|
|
5977
|
-
const t = n[0] & 31;
|
|
5978
|
-
return t !== 9 && t !== 6;
|
|
5979
|
-
});
|
|
5980
|
-
for (let i = 0; i < nals.length; i++) {
|
|
5981
|
-
const nal = nals[i];
|
|
5982
|
-
const isLastNal = i === nals.length - 1;
|
|
5983
|
-
if (nal.length <= 1200) {
|
|
5984
|
-
track.videoSeqNum = track.videoSeqNum + 1 & 65535;
|
|
5985
|
-
const header = new werift.RtpHeader();
|
|
5986
|
-
header.payloadType = 96;
|
|
5987
|
-
header.timestamp = rtpTs;
|
|
5988
|
-
header.marker = isLastNal;
|
|
5989
|
-
header.sequenceNumber = track.videoSeqNum;
|
|
5990
|
-
const pkt = new werift.RtpPacket(header, nal);
|
|
5991
|
-
try {
|
|
5992
|
-
track.videoSender.sendRtp(pkt.serialize());
|
|
5993
|
-
} catch {
|
|
5994
|
-
}
|
|
5995
|
-
} else {
|
|
5996
|
-
const nalHeader = nal[0];
|
|
5997
|
-
const fnri = nalHeader & 224;
|
|
5998
|
-
const nalType = nalHeader & 31;
|
|
5999
|
-
const fuIndicator = fnri | 28;
|
|
6000
|
-
const nalBody = nal.subarray(1);
|
|
6001
|
-
let offset = 0;
|
|
6002
|
-
let isFirst = true;
|
|
6003
|
-
while (offset < nalBody.length) {
|
|
6004
|
-
const end = Math.min(offset + 1198, nalBody.length);
|
|
6005
|
-
const isLast = end >= nalBody.length;
|
|
6006
|
-
let fuHeader = nalType;
|
|
6007
|
-
if (isFirst) fuHeader |= 128;
|
|
6008
|
-
if (isLast) fuHeader |= 64;
|
|
6009
|
-
const frag = Buffer.alloc(2 + (end - offset));
|
|
6010
|
-
frag[0] = fuIndicator;
|
|
6011
|
-
frag[1] = fuHeader;
|
|
6012
|
-
nalBody.copy(frag, 2, offset, end);
|
|
6013
|
-
track.videoSeqNum = track.videoSeqNum + 1 & 65535;
|
|
6014
|
-
const header = new werift.RtpHeader();
|
|
6015
|
-
header.payloadType = 96;
|
|
6016
|
-
header.timestamp = rtpTs;
|
|
6017
|
-
header.marker = isLastNal && isLast;
|
|
6018
|
-
header.sequenceNumber = track.videoSeqNum;
|
|
6019
|
-
const pkt = new werift.RtpPacket(header, frag);
|
|
6020
|
-
try {
|
|
6021
|
-
track.videoSender.sendRtp(pkt.serialize());
|
|
6022
|
-
} catch {
|
|
6023
|
-
}
|
|
6024
|
-
offset = end;
|
|
6025
|
-
isFirst = false;
|
|
6026
|
-
}
|
|
6027
|
-
}
|
|
6028
|
-
}
|
|
6029
|
-
}
|
|
6030
|
-
} catch (err) {
|
|
6031
|
-
if (!signal.aborted) {
|
|
6032
|
-
this.logger.error(`[shared] Feed error for track "${track.trackId}":`, err);
|
|
6033
|
-
}
|
|
6034
|
-
}
|
|
6035
|
-
})();
|
|
6036
|
-
}
|
|
6037
|
-
// -----------------------------------------------------------------------
|
|
6038
|
-
// Helpers
|
|
6039
|
-
// -----------------------------------------------------------------------
|
|
6040
|
-
sendDC(msg) {
|
|
6041
|
-
if (this.dataChannel?.readyState === "open") {
|
|
6042
|
-
this.dataChannel.send(JSON.stringify(msg));
|
|
6043
|
-
}
|
|
6044
|
-
}
|
|
6045
|
-
get isConnected() {
|
|
6046
|
-
return this.pc?.iceConnectionState === "connected" && !this.closed;
|
|
6047
|
-
}
|
|
6048
|
-
get trackCount() {
|
|
6049
|
-
return this.activeTracks.size;
|
|
6050
|
-
}
|
|
6051
|
-
};
|
|
6052
|
-
|
|
6053
|
-
// src/webrtc/server.ts
|
|
6054
|
-
var import_node_crypto5 = __toESM(require("crypto"), 1);
|
|
6055
|
-
var import_node_events = require("events");
|
|
6056
|
-
function createDefaultProfiles() {
|
|
6057
|
-
return [
|
|
6058
|
-
{
|
|
6059
|
-
tier: "high",
|
|
6060
|
-
encoding: { maxBitrateKbps: 6e3, width: 0, height: 0 },
|
|
6061
|
-
// native resolution
|
|
6062
|
-
sourceProfile: "main"
|
|
6063
|
-
},
|
|
6064
|
-
{
|
|
6065
|
-
tier: "medium",
|
|
6066
|
-
encoding: { maxBitrateKbps: 2500, width: 1280, height: 720 },
|
|
6067
|
-
sourceProfile: "main"
|
|
6068
|
-
},
|
|
6069
|
-
{
|
|
6070
|
-
tier: "low",
|
|
6071
|
-
encoding: { maxBitrateKbps: 1e3, width: 640, height: 360 },
|
|
6072
|
-
sourceProfile: "sub"
|
|
6073
|
-
}
|
|
6074
|
-
];
|
|
6075
|
-
}
|
|
6076
|
-
var AdaptiveStreamServer = class extends import_node_events.EventEmitter {
|
|
6077
|
-
ffmpegPath;
|
|
6078
|
-
stunServers;
|
|
6079
|
-
turnServers;
|
|
6080
|
-
icePortRange;
|
|
6081
|
-
iceAdditionalHostAddresses;
|
|
6082
|
-
logger;
|
|
6083
|
-
cameras = /* @__PURE__ */ new Map();
|
|
6084
|
-
sessionCamera = /* @__PURE__ */ new Map();
|
|
6085
|
-
stopped = false;
|
|
6086
|
-
constructor(options = {}) {
|
|
6087
|
-
super();
|
|
6088
|
-
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
6089
|
-
this.stunServers = options.stunServers;
|
|
6090
|
-
this.turnServers = options.turnServers;
|
|
6091
|
-
this.icePortRange = options.icePortRange;
|
|
6092
|
-
this.iceAdditionalHostAddresses = options.iceAdditionalHostAddresses;
|
|
6093
|
-
this.logger = options.logger ? asLogger(options.logger) : createNullLogger();
|
|
6094
|
-
this.logger.info("[adaptive-server] Initialized");
|
|
6095
|
-
}
|
|
6096
|
-
// -----------------------------------------------------------------------
|
|
6097
|
-
// Camera management
|
|
6098
|
-
// -----------------------------------------------------------------------
|
|
6099
|
-
/** Register a camera with adaptive streaming. */
|
|
6100
|
-
addCamera(name, config) {
|
|
6101
|
-
if (this.cameras.has(name)) {
|
|
6102
|
-
this.logger.warn(`[adaptive-server] Camera "${name}" already registered`);
|
|
6103
|
-
return;
|
|
6104
|
-
}
|
|
6105
|
-
const profiles = config.profiles;
|
|
6106
|
-
const initialParams = profiles[0].encoding;
|
|
6107
|
-
const mainFfmpegSource = new AdaptiveFfmpegSource({
|
|
6108
|
-
rtspUrl: config.rtspUrl,
|
|
6109
|
-
initialParams,
|
|
6110
|
-
audioMode: config.audioMode ?? "copy",
|
|
6111
|
-
ffmpegPath: this.ffmpegPath,
|
|
6112
|
-
logger: this.logger,
|
|
6113
|
-
label: `ffmpeg:${name}:main`
|
|
6114
|
-
});
|
|
6115
|
-
const mainFanout = new StreamFanout({
|
|
6116
|
-
maxQueueItems: 30,
|
|
6117
|
-
createSource: () => mainFfmpegSource.source,
|
|
6118
|
-
onError: (err) => {
|
|
6119
|
-
this.logger.error(`[adaptive-server] Main fanout error (${name}):`, err);
|
|
6120
|
-
}
|
|
6121
|
-
});
|
|
6122
|
-
const controller = new AdaptiveController({
|
|
6123
|
-
profiles,
|
|
6124
|
-
onQualityChange: async (from, to) => {
|
|
6125
|
-
await this.handleQualityChange(name, from, to);
|
|
6126
|
-
},
|
|
6127
|
-
logger: this.logger
|
|
6128
|
-
});
|
|
6129
|
-
this.cameras.set(name, {
|
|
6130
|
-
config,
|
|
6131
|
-
mainFfmpegSource,
|
|
6132
|
-
mainFanout,
|
|
6133
|
-
subFfmpegSource: null,
|
|
6134
|
-
subFanout: null,
|
|
6135
|
-
activeSourceProfile: "main",
|
|
6136
|
-
controller,
|
|
6137
|
-
sessions: /* @__PURE__ */ new Map(),
|
|
6138
|
-
autoStopTimer: null,
|
|
6139
|
-
switching: false
|
|
6140
|
-
});
|
|
6141
|
-
this.logger.info(`[adaptive-server] Camera "${name}" added`);
|
|
6142
|
-
}
|
|
6143
|
-
/** Remove a camera and close all its sessions. */
|
|
6144
|
-
async removeCamera(name) {
|
|
6145
|
-
const cam = this.cameras.get(name);
|
|
6146
|
-
if (!cam) return;
|
|
6147
|
-
const closePs = [];
|
|
6148
|
-
for (const [sid, session] of cam.sessions) {
|
|
6149
|
-
this.sessionCamera.delete(sid);
|
|
6150
|
-
closePs.push(session.close().catch(() => {
|
|
6151
|
-
}));
|
|
6152
|
-
}
|
|
6153
|
-
await Promise.all(closePs);
|
|
6154
|
-
cam.sessions.clear();
|
|
6155
|
-
await cam.mainFanout.stop();
|
|
6156
|
-
await cam.mainFfmpegSource.stop();
|
|
6157
|
-
if (cam.subFanout) await cam.subFanout.stop();
|
|
6158
|
-
if (cam.subFfmpegSource) await cam.subFfmpegSource.stop();
|
|
6159
|
-
if (cam.autoStopTimer) {
|
|
6160
|
-
clearTimeout(cam.autoStopTimer);
|
|
6161
|
-
cam.autoStopTimer = null;
|
|
6162
|
-
}
|
|
6163
|
-
this.cameras.delete(name);
|
|
6164
|
-
this.logger.info(`[adaptive-server] Camera "${name}" removed`);
|
|
6165
|
-
}
|
|
6166
|
-
getCameraNames() {
|
|
6167
|
-
return [...this.cameras.keys()];
|
|
6168
|
-
}
|
|
6169
|
-
// -----------------------------------------------------------------------
|
|
6170
|
-
// Signaling (2-step: server creates offer, client sends answer)
|
|
6171
|
-
// -----------------------------------------------------------------------
|
|
6172
|
-
/**
|
|
6173
|
-
* Create an adaptive session for a camera.
|
|
6174
|
-
* Returns a server-generated SDP offer that the client must answer.
|
|
6175
|
-
*
|
|
6176
|
-
* Flow: createSession() → server offer → client sets remote, creates answer → handleAnswer()
|
|
6177
|
-
*/
|
|
6178
|
-
async createSession(cameraName) {
|
|
6179
|
-
if (this.stopped) throw new Error("Server stopped");
|
|
6180
|
-
const cam = this.cameras.get(cameraName);
|
|
6181
|
-
if (!cam) throw new Error(`Camera not found: ${cameraName}`);
|
|
6182
|
-
if (cam.autoStopTimer) {
|
|
6183
|
-
clearTimeout(cam.autoStopTimer);
|
|
6184
|
-
cam.autoStopTimer = null;
|
|
6185
|
-
}
|
|
6186
|
-
this.ensureCameraRunning(cameraName, cam);
|
|
6187
|
-
const sessionId = import_node_crypto5.default.randomUUID();
|
|
6188
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6189
|
-
const source = activeFanout.subscribe(sessionId);
|
|
6190
|
-
const session = new AdaptiveSession({
|
|
6191
|
-
sessionId,
|
|
6192
|
-
source,
|
|
6193
|
-
iceConfig: {
|
|
6194
|
-
stunServers: this.stunServers,
|
|
6195
|
-
turnServers: this.turnServers,
|
|
6196
|
-
portRange: this.icePortRange,
|
|
6197
|
-
additionalHostAddresses: this.iceAdditionalHostAddresses
|
|
6198
|
-
},
|
|
6199
|
-
onStats: (stats) => {
|
|
6200
|
-
cam.controller.reportStats(sessionId, {
|
|
6201
|
-
packetLoss: stats.packetLoss,
|
|
6202
|
-
jitterMs: stats.jitterMs,
|
|
6203
|
-
rttMs: stats.rttMs,
|
|
6204
|
-
timestamp: stats.timestamp
|
|
6205
|
-
});
|
|
6206
|
-
this.emit("session:stats", { camera: cameraName, ...stats });
|
|
6207
|
-
},
|
|
6208
|
-
logger: this.logger
|
|
6209
|
-
});
|
|
6210
|
-
cam.sessions.set(sessionId, session);
|
|
6211
|
-
this.sessionCamera.set(sessionId, cameraName);
|
|
6212
|
-
try {
|
|
6213
|
-
const offer = await session.createOffer();
|
|
6214
|
-
this.emit("session:created", { sessionId, camera: cameraName });
|
|
6215
|
-
return { sessionId, sdpOffer: offer.sdp };
|
|
6216
|
-
} catch (err) {
|
|
6217
|
-
cam.sessions.delete(sessionId);
|
|
6218
|
-
this.sessionCamera.delete(sessionId);
|
|
6219
|
-
activeFanout.unsubscribe(sessionId);
|
|
6220
|
-
await session.close().catch(() => {
|
|
6221
|
-
});
|
|
6222
|
-
this.scheduleCameraAutoStop(cameraName, cam);
|
|
6223
|
-
throw err;
|
|
6224
|
-
}
|
|
6225
|
-
}
|
|
6226
|
-
/**
|
|
6227
|
-
* Handle the client's SDP answer for an adaptive session.
|
|
6228
|
-
* Call after createSession() with the client's answer.
|
|
6229
|
-
*/
|
|
6230
|
-
async handleAnswer(sessionId, sdpAnswer) {
|
|
6231
|
-
const camName = this.sessionCamera.get(sessionId);
|
|
6232
|
-
if (!camName) throw new Error(`Session not found: ${sessionId}`);
|
|
6233
|
-
const cam = this.cameras.get(camName);
|
|
6234
|
-
if (!cam) throw new Error(`Camera not found: ${camName}`);
|
|
6235
|
-
const session = cam.sessions.get(sessionId);
|
|
6236
|
-
if (!session) throw new Error(`Session not found: ${sessionId}`);
|
|
6237
|
-
await session.handleAnswer({ sdp: sdpAnswer, type: "answer" });
|
|
6238
|
-
}
|
|
6239
|
-
/**
|
|
6240
|
-
* Convenience: handleWhepOffer is NOT supported — werift requires server-initiated offers.
|
|
6241
|
-
* Use createSession() + handleAnswer() instead.
|
|
6242
|
-
*/
|
|
6243
|
-
async handleWhepOffer(_cameraName, _sdpOffer) {
|
|
6244
|
-
throw new Error(
|
|
6245
|
-
"handleWhepOffer is not supported \u2014 werift requires server-initiated offers. Use createSession() to get a server offer, then handleAnswer() with the client's answer."
|
|
6246
|
-
);
|
|
6247
|
-
}
|
|
6248
|
-
// -----------------------------------------------------------------------
|
|
6249
|
-
// Connection pool: pre-warmed sessions without camera assignment
|
|
6250
|
-
// -----------------------------------------------------------------------
|
|
6251
|
-
/** Pooled sessions: sessionId → true (idle, no camera attached). */
|
|
6252
|
-
pooledSessions = /* @__PURE__ */ new Set();
|
|
6253
|
-
/**
|
|
6254
|
-
* Create a pooled session (no camera attached yet).
|
|
6255
|
-
* The SDP exchange happens, ICE connects, but no ffmpeg is started.
|
|
6256
|
-
* Call attachCamera() later to start feeding frames.
|
|
6257
|
-
*/
|
|
6258
|
-
async createPooledSession() {
|
|
6259
|
-
if (this.stopped) throw new Error("Server stopped");
|
|
6260
|
-
const sessionId = import_node_crypto5.default.randomUUID();
|
|
6261
|
-
const emptySource = (async function* () {
|
|
6262
|
-
await new Promise(() => {
|
|
6263
|
-
});
|
|
6264
|
-
})();
|
|
6265
|
-
const session = new AdaptiveSession({
|
|
6266
|
-
sessionId,
|
|
6267
|
-
source: emptySource,
|
|
6268
|
-
iceConfig: {
|
|
6269
|
-
stunServers: this.stunServers,
|
|
6270
|
-
turnServers: this.turnServers,
|
|
6271
|
-
portRange: this.icePortRange,
|
|
6272
|
-
additionalHostAddresses: this.iceAdditionalHostAddresses
|
|
6273
|
-
},
|
|
6274
|
-
logger: this.logger
|
|
6275
|
-
});
|
|
6276
|
-
this.pooledSessions.add(sessionId);
|
|
6277
|
-
const poolCamKey = "__pool__";
|
|
6278
|
-
if (!this.cameras.has(poolCamKey)) {
|
|
6279
|
-
const dummyFfmpeg = new AdaptiveFfmpegSource({
|
|
6280
|
-
rtspUrl: "rtsp://0.0.0.0/dummy",
|
|
6281
|
-
initialParams: { maxBitrateKbps: 0, width: 0, height: 0 }
|
|
6282
|
-
});
|
|
6283
|
-
const dummyFanout = new StreamFanout({
|
|
6284
|
-
maxQueueItems: 1,
|
|
6285
|
-
createSource: () => dummyFfmpeg.source
|
|
6286
|
-
});
|
|
6287
|
-
const dummyController = new AdaptiveController({
|
|
6288
|
-
profiles: createDefaultProfiles(),
|
|
6289
|
-
onQualityChange: async () => {
|
|
6290
|
-
}
|
|
6291
|
-
});
|
|
6292
|
-
this.cameras.set(poolCamKey, {
|
|
6293
|
-
config: { rtspUrl: "", profiles: createDefaultProfiles() },
|
|
6294
|
-
mainFfmpegSource: dummyFfmpeg,
|
|
6295
|
-
mainFanout: dummyFanout,
|
|
6296
|
-
subFfmpegSource: null,
|
|
6297
|
-
subFanout: null,
|
|
6298
|
-
activeSourceProfile: "main",
|
|
6299
|
-
controller: dummyController,
|
|
6300
|
-
sessions: /* @__PURE__ */ new Map(),
|
|
6301
|
-
autoStopTimer: null,
|
|
6302
|
-
switching: false
|
|
6303
|
-
});
|
|
6304
|
-
}
|
|
6305
|
-
const poolCam = this.cameras.get(poolCamKey);
|
|
6306
|
-
poolCam.sessions.set(sessionId, session);
|
|
6307
|
-
this.sessionCamera.set(sessionId, poolCamKey);
|
|
6308
|
-
try {
|
|
6309
|
-
const offer = await session.createOffer();
|
|
6310
|
-
this.logger.info(`[adaptive-server] Pooled session ${sessionId.slice(0, 8)} created`);
|
|
6311
|
-
return { sessionId, sdpOffer: offer.sdp };
|
|
6312
|
-
} catch (err) {
|
|
6313
|
-
poolCam.sessions.delete(sessionId);
|
|
6314
|
-
this.sessionCamera.delete(sessionId);
|
|
6315
|
-
this.pooledSessions.delete(sessionId);
|
|
6316
|
-
await session.close().catch(() => {
|
|
6317
|
-
});
|
|
6318
|
-
throw err;
|
|
6319
|
-
}
|
|
6320
|
-
}
|
|
6321
|
-
/**
|
|
6322
|
-
* Attach a camera to a pooled session.
|
|
6323
|
-
* Starts the ffmpeg transcoder and begins feeding frames.
|
|
6324
|
-
*/
|
|
6325
|
-
async attachCamera(sessionId, cameraName) {
|
|
6326
|
-
if (!this.pooledSessions.has(sessionId)) {
|
|
6327
|
-
throw new Error(`Session ${sessionId} is not a pooled session`);
|
|
6328
|
-
}
|
|
6329
|
-
const cam = this.cameras.get(cameraName);
|
|
6330
|
-
if (!cam) throw new Error(`Camera not found: ${cameraName}`);
|
|
6331
|
-
this.ensureCameraRunning(cameraName, cam);
|
|
6332
|
-
const poolCam = this.cameras.get("__pool__");
|
|
6333
|
-
const session = poolCam?.sessions.get(sessionId);
|
|
6334
|
-
if (!session) throw new Error(`Pooled session not found: ${sessionId}`);
|
|
6335
|
-
poolCam.sessions.delete(sessionId);
|
|
6336
|
-
cam.sessions.set(sessionId, session);
|
|
6337
|
-
this.sessionCamera.set(sessionId, cameraName);
|
|
6338
|
-
this.pooledSessions.delete(sessionId);
|
|
6339
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6340
|
-
const source = activeFanout.subscribe(sessionId);
|
|
6341
|
-
session.replaceSource(source);
|
|
6342
|
-
this.logger.info(`[adaptive-server] Attached camera "${cameraName}" to session ${sessionId.slice(0, 8)}`);
|
|
6343
|
-
}
|
|
6344
|
-
/**
|
|
6345
|
-
* Detach a camera from a session (session returns to pool).
|
|
6346
|
-
*/
|
|
6347
|
-
async detachCamera(sessionId) {
|
|
6348
|
-
const camName = this.sessionCamera.get(sessionId);
|
|
6349
|
-
if (!camName || camName === "__pool__") return;
|
|
6350
|
-
const cam = this.cameras.get(camName);
|
|
6351
|
-
if (!cam) return;
|
|
6352
|
-
const session = cam.sessions.get(sessionId);
|
|
6353
|
-
if (!session) return;
|
|
6354
|
-
session.detachSource();
|
|
6355
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6356
|
-
activeFanout.unsubscribe(sessionId);
|
|
6357
|
-
cam.sessions.delete(sessionId);
|
|
6358
|
-
const poolCam = this.cameras.get("__pool__");
|
|
6359
|
-
if (poolCam) {
|
|
6360
|
-
poolCam.sessions.set(sessionId, session);
|
|
6361
|
-
this.sessionCamera.set(sessionId, "__pool__");
|
|
6362
|
-
this.pooledSessions.add(sessionId);
|
|
6363
|
-
}
|
|
6364
|
-
this.logger.info(`[adaptive-server] Detached camera "${camName}" from session ${sessionId.slice(0, 8)} (back to pool)`);
|
|
6365
|
-
this.scheduleCameraAutoStop(camName, cam);
|
|
6366
|
-
}
|
|
6367
|
-
/** Check if a session is in the idle pool. */
|
|
6368
|
-
isPooledSession(sessionId) {
|
|
6369
|
-
return this.pooledSessions.has(sessionId);
|
|
6370
|
-
}
|
|
6371
|
-
/** Set debug flag on all sessions for a camera. */
|
|
6372
|
-
setDebug(cameraName, debug) {
|
|
6373
|
-
const cam = this.cameras.get(cameraName);
|
|
6374
|
-
if (!cam) return 0;
|
|
6375
|
-
let count = 0;
|
|
6376
|
-
for (const session of cam.sessions.values()) {
|
|
6377
|
-
session.debug = debug;
|
|
6378
|
-
count++;
|
|
6379
|
-
}
|
|
6380
|
-
return count;
|
|
6381
|
-
}
|
|
6382
|
-
/** Get count of idle pooled sessions. */
|
|
6383
|
-
getPoolSize() {
|
|
6384
|
-
return this.pooledSessions.size;
|
|
6385
|
-
}
|
|
6386
|
-
// -----------------------------------------------------------------------
|
|
6387
|
-
// Session management
|
|
6388
|
-
// -----------------------------------------------------------------------
|
|
6389
|
-
/** Close a specific session. */
|
|
6390
|
-
async closeSession(sessionId) {
|
|
6391
|
-
const camName = this.sessionCamera.get(sessionId);
|
|
6392
|
-
if (!camName) return;
|
|
6393
|
-
const cam = this.cameras.get(camName);
|
|
6394
|
-
if (!cam) return;
|
|
6395
|
-
const session = cam.sessions.get(sessionId);
|
|
6396
|
-
if (!session) return;
|
|
6397
|
-
cam.sessions.delete(sessionId);
|
|
6398
|
-
this.sessionCamera.delete(sessionId);
|
|
6399
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6400
|
-
activeFanout.unsubscribe(sessionId);
|
|
6401
|
-
cam.controller.removeSession(sessionId);
|
|
6402
|
-
await session.close();
|
|
6403
|
-
this.logger.info(`[adaptive-server] Session ${sessionId} closed (camera "${camName}", remaining: ${cam.sessions.size})`);
|
|
6404
|
-
this.emit("session:closed", { sessionId, camera: camName });
|
|
6405
|
-
this.scheduleCameraAutoStop(camName, cam);
|
|
6406
|
-
}
|
|
6407
|
-
/**
|
|
6408
|
-
* Report client-side stats for a session (supplements RTCP monitoring).
|
|
6409
|
-
* Call from tRPC route when the client pushes stats.
|
|
6410
|
-
*/
|
|
6411
|
-
reportClientStats(sessionId, stats) {
|
|
6412
|
-
const camName = this.sessionCamera.get(sessionId);
|
|
6413
|
-
if (!camName) return null;
|
|
6414
|
-
const cam = this.cameras.get(camName);
|
|
6415
|
-
if (!cam) return null;
|
|
6416
|
-
cam.controller.reportStats(sessionId, stats);
|
|
6417
|
-
const profile = cam.controller.currentProfile;
|
|
6418
|
-
return {
|
|
6419
|
-
currentTier: profile.tier,
|
|
6420
|
-
currentBitrateKbps: profile.encoding.maxBitrateKbps,
|
|
6421
|
-
currentResolution: { width: profile.encoding.width, height: profile.encoding.height },
|
|
6422
|
-
sourceProfile: cam.activeSourceProfile
|
|
6423
|
-
};
|
|
6424
|
-
}
|
|
6425
|
-
/** Force quality for a camera (null = auto). */
|
|
6426
|
-
forceQuality(cameraName, tier) {
|
|
6427
|
-
const cam = this.cameras.get(cameraName);
|
|
6428
|
-
if (!cam) return false;
|
|
6429
|
-
cam.controller.forceQuality(tier);
|
|
6430
|
-
return true;
|
|
6431
|
-
}
|
|
6432
|
-
/** Get current quality info for a camera. */
|
|
6433
|
-
getCameraQuality(cameraName) {
|
|
6434
|
-
const cam = this.cameras.get(cameraName);
|
|
6435
|
-
if (!cam) return null;
|
|
6436
|
-
const profile = cam.controller.currentProfile;
|
|
6437
|
-
return {
|
|
6438
|
-
tier: profile.tier,
|
|
6439
|
-
encoding: profile.encoding,
|
|
6440
|
-
isAuto: cam.controller.isAuto,
|
|
6441
|
-
stats: cam.controller.getAggregatedStats(),
|
|
6442
|
-
sessionCount: cam.sessions.size,
|
|
6443
|
-
sourceProfile: cam.activeSourceProfile
|
|
6444
|
-
};
|
|
6445
|
-
}
|
|
6446
|
-
/** Get all sessions. */
|
|
6447
|
-
getSessions(cameraName) {
|
|
6448
|
-
const infos = [];
|
|
6449
|
-
if (cameraName) {
|
|
6450
|
-
const cam = this.cameras.get(cameraName);
|
|
6451
|
-
if (cam) {
|
|
6452
|
-
for (const s of cam.sessions.values()) infos.push(s.getInfo());
|
|
6453
|
-
}
|
|
6454
|
-
} else {
|
|
6455
|
-
for (const cam of this.cameras.values()) {
|
|
6456
|
-
for (const s of cam.sessions.values()) infos.push(s.getInfo());
|
|
6457
|
-
}
|
|
6458
|
-
}
|
|
6459
|
-
return infos;
|
|
6460
|
-
}
|
|
6461
|
-
getSessionCount(cameraName) {
|
|
6462
|
-
if (cameraName) return this.cameras.get(cameraName)?.sessions.size ?? 0;
|
|
6463
|
-
let total = 0;
|
|
6464
|
-
for (const cam of this.cameras.values()) total += cam.sessions.size;
|
|
6465
|
-
return total;
|
|
6466
|
-
}
|
|
6467
|
-
/** Stop all cameras and sessions. */
|
|
6468
|
-
async stop() {
|
|
6469
|
-
if (this.stopped) return;
|
|
6470
|
-
this.stopped = true;
|
|
6471
|
-
const closePs = [];
|
|
6472
|
-
for (const [name, cam] of this.cameras) {
|
|
6473
|
-
if (cam.autoStopTimer) {
|
|
6474
|
-
clearTimeout(cam.autoStopTimer);
|
|
6475
|
-
cam.autoStopTimer = null;
|
|
6476
|
-
}
|
|
6477
|
-
for (const [sid, session] of cam.sessions) {
|
|
6478
|
-
this.sessionCamera.delete(sid);
|
|
6479
|
-
closePs.push(session.close().catch(() => {
|
|
6480
|
-
}));
|
|
6481
|
-
}
|
|
6482
|
-
cam.sessions.clear();
|
|
6483
|
-
closePs.push(cam.mainFanout.stop().catch(() => {
|
|
6484
|
-
}));
|
|
6485
|
-
closePs.push(cam.mainFfmpegSource.stop().catch(() => {
|
|
6486
|
-
}));
|
|
6487
|
-
if (cam.subFanout) closePs.push(cam.subFanout.stop().catch(() => {
|
|
6488
|
-
}));
|
|
6489
|
-
if (cam.subFfmpegSource) closePs.push(cam.subFfmpegSource.stop().catch(() => {
|
|
6490
|
-
}));
|
|
6491
|
-
}
|
|
6492
|
-
await Promise.all(closePs);
|
|
6493
|
-
this.cameras.clear();
|
|
6494
|
-
this.logger.info("[adaptive-server] Stopped");
|
|
6495
|
-
this.emit("stopped");
|
|
6496
|
-
}
|
|
6497
|
-
// -----------------------------------------------------------------------
|
|
6498
|
-
// Private
|
|
6499
|
-
// -----------------------------------------------------------------------
|
|
6500
|
-
/** Get the currently active fanout for a camera. */
|
|
6501
|
-
getActiveFanout(cam) {
|
|
6502
|
-
if (cam.activeSourceProfile === "sub" && cam.subFanout) {
|
|
6503
|
-
return cam.subFanout;
|
|
6504
|
-
}
|
|
6505
|
-
return cam.mainFanout;
|
|
6506
|
-
}
|
|
6507
|
-
ensureCameraRunning(name, cam) {
|
|
6508
|
-
const activeFanout = this.getActiveFanout(cam);
|
|
6509
|
-
if (activeFanout.isRunning()) return;
|
|
6510
|
-
this.logger.info(`[adaptive-server] Starting camera "${name}" (${cam.activeSourceProfile})`);
|
|
6511
|
-
if (cam.activeSourceProfile === "sub" && cam.subFfmpegSource) {
|
|
6512
|
-
void cam.subFfmpegSource.start();
|
|
6513
|
-
cam.subFanout.start();
|
|
6514
|
-
} else {
|
|
6515
|
-
void cam.mainFfmpegSource.start();
|
|
6516
|
-
cam.mainFanout.start();
|
|
6517
|
-
}
|
|
6518
|
-
}
|
|
6519
|
-
scheduleCameraAutoStop(name, cam) {
|
|
6520
|
-
if (cam.sessions.size > 0 || this.stopped) return;
|
|
6521
|
-
if (cam.autoStopTimer) clearTimeout(cam.autoStopTimer);
|
|
6522
|
-
cam.autoStopTimer = setTimeout(async () => {
|
|
6523
|
-
cam.autoStopTimer = null;
|
|
6524
|
-
if (cam.sessions.size > 0 || this.stopped) return;
|
|
6525
|
-
this.logger.info(`[adaptive-server] No viewers for "${name}", stopping ffmpeg`);
|
|
6526
|
-
await cam.mainFanout.stop();
|
|
6527
|
-
await cam.mainFfmpegSource.stop();
|
|
6528
|
-
if (cam.subFanout) await cam.subFanout.stop();
|
|
6529
|
-
if (cam.subFfmpegSource) await cam.subFfmpegSource.stop();
|
|
6530
|
-
}, 1e4);
|
|
6531
|
-
}
|
|
6532
|
-
// -----------------------------------------------------------------------
|
|
6533
|
-
// Source switching (Phase 5)
|
|
6534
|
-
// -----------------------------------------------------------------------
|
|
6535
|
-
/**
|
|
6536
|
-
* Handle a quality change from the AdaptiveController.
|
|
6537
|
-
* When the sourceProfile changes (main ↔ sub), performs a seamless source
|
|
6538
|
-
* switch for all active sessions. When only encoding params change (same
|
|
6539
|
-
* sourceProfile), updates ffmpeg params in-place.
|
|
6540
|
-
*/
|
|
6541
|
-
async handleQualityChange(cameraName, from, to) {
|
|
6542
|
-
const cam = this.cameras.get(cameraName);
|
|
6543
|
-
if (!cam) return;
|
|
6544
|
-
const sourceChanged = from.sourceProfile !== to.sourceProfile;
|
|
6545
|
-
if (sourceChanged) {
|
|
6546
|
-
await this.switchSource(cameraName, cam, to);
|
|
6547
|
-
} else {
|
|
6548
|
-
const activeSource = cam.activeSourceProfile === "sub" ? cam.subFfmpegSource : cam.mainFfmpegSource;
|
|
6549
|
-
if (activeSource) {
|
|
6550
|
-
await activeSource.updateParams(to.encoding);
|
|
6551
|
-
}
|
|
6552
|
-
}
|
|
6553
|
-
this.emit("quality:change", {
|
|
6554
|
-
camera: cameraName,
|
|
6555
|
-
tier: to.tier,
|
|
6556
|
-
encoding: to.encoding,
|
|
6557
|
-
sourceProfile: to.sourceProfile
|
|
6558
|
-
});
|
|
6559
|
-
}
|
|
6560
|
-
/**
|
|
6561
|
-
* Switch all active sessions from one source to another (main ↔ sub).
|
|
6562
|
-
*
|
|
6563
|
-
* Steps:
|
|
6564
|
-
* 1. Create/start the target ffmpeg source + fanout
|
|
6565
|
-
* 2. For each session: subscribe to new fanout, call replaceSource()
|
|
6566
|
-
* 3. Unsubscribe all from old fanout
|
|
6567
|
-
* 4. Stop old ffmpeg + fanout (save resources)
|
|
6568
|
-
* 5. Update activeSourceProfile
|
|
6569
|
-
*/
|
|
6570
|
-
async switchSource(cameraName, cam, toProfile) {
|
|
6571
|
-
if (cam.switching) {
|
|
6572
|
-
this.logger.warn(`[adaptive-server] Source switch already in progress for "${cameraName}", skipping`);
|
|
6573
|
-
return;
|
|
6574
|
-
}
|
|
6575
|
-
cam.switching = true;
|
|
6576
|
-
const switchingToSub = toProfile.sourceProfile === "sub";
|
|
6577
|
-
this.logger.info(
|
|
6578
|
-
`[adaptive-server] Source switch for "${cameraName}": ${cam.activeSourceProfile} \u2192 ${toProfile.sourceProfile}`
|
|
6579
|
-
);
|
|
6580
|
-
try {
|
|
6581
|
-
if (switchingToSub) {
|
|
6582
|
-
if (!cam.config.subRtspUrl) {
|
|
6583
|
-
this.logger.warn(
|
|
6584
|
-
`[adaptive-server] No subRtspUrl configured for "${cameraName}", cannot switch to sub stream \u2014 falling back to param update only`
|
|
6585
|
-
);
|
|
6586
|
-
await cam.mainFfmpegSource.updateParams(toProfile.encoding);
|
|
6587
|
-
return;
|
|
6588
|
-
}
|
|
6589
|
-
if (!cam.subFfmpegSource) {
|
|
6590
|
-
cam.subFfmpegSource = new AdaptiveFfmpegSource({
|
|
6591
|
-
rtspUrl: cam.config.subRtspUrl,
|
|
6592
|
-
initialParams: toProfile.encoding,
|
|
6593
|
-
ffmpegPath: this.ffmpegPath,
|
|
6594
|
-
logger: this.logger,
|
|
6595
|
-
label: `ffmpeg:${cameraName}:sub`
|
|
6596
|
-
});
|
|
6597
|
-
cam.subFanout = new StreamFanout({
|
|
6598
|
-
maxQueueItems: 30,
|
|
6599
|
-
createSource: () => cam.subFfmpegSource.source,
|
|
6600
|
-
onError: (err) => {
|
|
6601
|
-
this.logger.error(`[adaptive-server] Sub fanout error (${cameraName}):`, err);
|
|
6602
|
-
}
|
|
6603
|
-
});
|
|
6604
|
-
}
|
|
6605
|
-
void cam.subFfmpegSource.start();
|
|
6606
|
-
cam.subFanout.start();
|
|
6607
|
-
for (const [sid, session] of cam.sessions) {
|
|
6608
|
-
cam.mainFanout.unsubscribe(sid);
|
|
6609
|
-
const newSource = cam.subFanout.subscribe(sid);
|
|
6610
|
-
session.replaceSource(newSource);
|
|
6611
|
-
}
|
|
6612
|
-
await cam.mainFanout.stop();
|
|
6613
|
-
await cam.mainFfmpegSource.stop();
|
|
6614
|
-
cam.activeSourceProfile = "sub";
|
|
6615
|
-
} else {
|
|
6616
|
-
cam.mainFfmpegSource = new AdaptiveFfmpegSource({
|
|
6617
|
-
rtspUrl: cam.config.rtspUrl,
|
|
6618
|
-
initialParams: toProfile.encoding,
|
|
6619
|
-
ffmpegPath: this.ffmpegPath,
|
|
6620
|
-
logger: this.logger,
|
|
6621
|
-
label: `ffmpeg:${cameraName}:main`
|
|
6622
|
-
});
|
|
6623
|
-
cam.mainFanout = new StreamFanout({
|
|
6624
|
-
maxQueueItems: 30,
|
|
6625
|
-
createSource: () => cam.mainFfmpegSource.source,
|
|
6626
|
-
onError: (err) => {
|
|
6627
|
-
this.logger.error(`[adaptive-server] Main fanout error (${cameraName}):`, err);
|
|
6628
|
-
}
|
|
6629
|
-
});
|
|
6630
|
-
void cam.mainFfmpegSource.start();
|
|
6631
|
-
cam.mainFanout.start();
|
|
6632
|
-
for (const [sid, session] of cam.sessions) {
|
|
6633
|
-
if (cam.subFanout) cam.subFanout.unsubscribe(sid);
|
|
6634
|
-
const newSource = cam.mainFanout.subscribe(sid);
|
|
6635
|
-
session.replaceSource(newSource);
|
|
6636
|
-
}
|
|
6637
|
-
if (cam.subFanout) await cam.subFanout.stop();
|
|
6638
|
-
if (cam.subFfmpegSource) await cam.subFfmpegSource.stop();
|
|
6639
|
-
cam.subFfmpegSource = null;
|
|
6640
|
-
cam.subFanout = null;
|
|
6641
|
-
cam.activeSourceProfile = "main";
|
|
6642
|
-
}
|
|
6643
|
-
this.logger.info(
|
|
6644
|
-
`[adaptive-server] Source switch complete for "${cameraName}": now on ${cam.activeSourceProfile} stream`
|
|
6645
|
-
);
|
|
6646
|
-
} catch (err) {
|
|
6647
|
-
this.logger.error(`[adaptive-server] Source switch failed for "${cameraName}":`, err);
|
|
6648
|
-
} finally {
|
|
6649
|
-
cam.switching = false;
|
|
6650
|
-
}
|
|
6651
|
-
}
|
|
6652
|
-
};
|
|
6653
3411
|
// Annotate the CommonJS export names for ESM import in node:
|
|
6654
3412
|
0 && (module.exports = {
|
|
6655
|
-
AdaptiveController,
|
|
6656
|
-
AdaptiveFfmpegSource,
|
|
6657
|
-
AdaptiveRtspRelay,
|
|
6658
|
-
AdaptiveSession,
|
|
6659
|
-
AdaptiveStreamServer,
|
|
6660
|
-
AsyncBoundedQueue,
|
|
6661
3413
|
BuiltinAnalysisAddon,
|
|
6662
3414
|
DEFAULT_RETENTION,
|
|
6663
3415
|
DecoderRegistry,
|
|
6664
3416
|
EventPersistenceService,
|
|
6665
3417
|
FfmpegDecoderProvider,
|
|
6666
3418
|
FfmpegDecoderSession,
|
|
6667
|
-
FfmpegProcess,
|
|
6668
3419
|
FrameDropper,
|
|
6669
|
-
H264RtpDepacketizer,
|
|
6670
|
-
H265RtpDepacketizer,
|
|
6671
3420
|
KnownFacesService,
|
|
6672
3421
|
PipelineAddon,
|
|
6673
3422
|
RecordingCoordinator,
|
|
6674
3423
|
RecordingDb,
|
|
6675
3424
|
RetentionService,
|
|
6676
3425
|
SessionTrackerService,
|
|
6677
|
-
SharedSession,
|
|
6678
3426
|
StreamBroker,
|
|
6679
3427
|
StreamBrokerManager,
|
|
6680
|
-
StreamFanout,
|
|
6681
3428
|
StreamPipeServer,
|
|
6682
3429
|
TrackTrailService,
|
|
6683
|
-
|
|
6684
|
-
convertH264ToAnnexB,
|
|
6685
|
-
convertH265ToAnnexB,
|
|
6686
|
-
cosineSimilarity,
|
|
6687
|
-
createDefaultProfiles,
|
|
6688
|
-
createNullLogger,
|
|
6689
|
-
detectVideoCodecFromNal,
|
|
6690
|
-
extractH264ParamSets,
|
|
6691
|
-
extractH265ParamSets,
|
|
6692
|
-
fromEventEmitter,
|
|
6693
|
-
fromNativeStream,
|
|
6694
|
-
fromPushCallback,
|
|
6695
|
-
getH265NalType,
|
|
6696
|
-
hasStartCodes,
|
|
6697
|
-
isH264IdrAccessUnit,
|
|
6698
|
-
isH264KeyframeAnnexB,
|
|
6699
|
-
isH265Irap,
|
|
6700
|
-
isH265IrapAccessUnit,
|
|
6701
|
-
isH265KeyframeAnnexB,
|
|
6702
|
-
joinNalsToAnnexB,
|
|
6703
|
-
prependStartCode,
|
|
6704
|
-
splitAnnexBToNals
|
|
3430
|
+
cosineSimilarity
|
|
6705
3431
|
});
|
|
6706
3432
|
//# sourceMappingURL=index.cjs.map
|