@camstack/addon-webrtc-adaptive 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/addon--i9xjbhN.d.cts +561 -0
- package/dist/addon--i9xjbhN.d.ts +561 -0
- package/dist/addon.cjs +2213 -0
- package/dist/addon.cjs.map +1 -0
- package/dist/addon.d.cts +3 -0
- package/dist/addon.d.ts +3 -0
- package/dist/addon.js +2176 -0
- package/dist/addon.js.map +1 -0
- package/dist/index.cjs +3319 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +378 -0
- package/dist/index.d.ts +378 -0
- package/dist/index.js +3250 -0
- package/dist/index.js.map +1 -0
- package/package.json +64 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,3319 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
AdaptiveController: () => AdaptiveController,
|
|
34
|
+
AdaptiveFfmpegSource: () => AdaptiveFfmpegSource,
|
|
35
|
+
AdaptiveRtspRelay: () => AdaptiveRtspRelay,
|
|
36
|
+
AdaptiveSession: () => AdaptiveSession,
|
|
37
|
+
AdaptiveStreamServer: () => AdaptiveStreamServer,
|
|
38
|
+
AsyncBoundedQueue: () => AsyncBoundedQueue,
|
|
39
|
+
FfmpegProcess: () => FfmpegProcess,
|
|
40
|
+
H264RtpDepacketizer: () => H264RtpDepacketizer,
|
|
41
|
+
H265RtpDepacketizer: () => H265RtpDepacketizer,
|
|
42
|
+
SharedSession: () => SharedSession,
|
|
43
|
+
StreamFanout: () => StreamFanout,
|
|
44
|
+
WebrtcAdaptiveAddon: () => WebrtcAdaptiveAddon,
|
|
45
|
+
asLogger: () => asLogger,
|
|
46
|
+
convertH264ToAnnexB: () => convertH264ToAnnexB,
|
|
47
|
+
convertH265ToAnnexB: () => convertH265ToAnnexB,
|
|
48
|
+
createDefaultProfiles: () => createDefaultProfiles,
|
|
49
|
+
createNullLogger: () => createNullLogger,
|
|
50
|
+
detectVideoCodecFromNal: () => detectVideoCodecFromNal,
|
|
51
|
+
extractH264ParamSets: () => extractH264ParamSets,
|
|
52
|
+
extractH265ParamSets: () => extractH265ParamSets,
|
|
53
|
+
fromEventEmitter: () => fromEventEmitter,
|
|
54
|
+
fromNativeStream: () => fromNativeStream,
|
|
55
|
+
fromPushCallback: () => fromPushCallback,
|
|
56
|
+
getH265NalType: () => getH265NalType,
|
|
57
|
+
hasStartCodes: () => hasStartCodes,
|
|
58
|
+
isH264IdrAccessUnit: () => isH264IdrAccessUnit,
|
|
59
|
+
isH264KeyframeAnnexB: () => isH264KeyframeAnnexB,
|
|
60
|
+
isH265Irap: () => isH265Irap,
|
|
61
|
+
isH265IrapAccessUnit: () => isH265IrapAccessUnit,
|
|
62
|
+
isH265KeyframeAnnexB: () => isH265KeyframeAnnexB,
|
|
63
|
+
joinNalsToAnnexB: () => joinNalsToAnnexB,
|
|
64
|
+
prependStartCode: () => prependStartCode,
|
|
65
|
+
splitAnnexBToNals: () => splitAnnexBToNals
|
|
66
|
+
});
|
|
67
|
+
module.exports = __toCommonJS(index_exports);
|
|
68
|
+
|
|
69
|
+
// src/server.ts
|
|
70
|
+
var import_node_crypto = __toESM(require("crypto"), 1);
|
|
71
|
+
var import_node_events = require("events");
|
|
72
|
+
|
|
73
|
+
// src/types.ts
|
|
74
|
+
function asLogger(logger) {
|
|
75
|
+
if (!logger) return createNullLogger();
|
|
76
|
+
const noop = () => {
|
|
77
|
+
};
|
|
78
|
+
return {
|
|
79
|
+
log: logger.log?.bind(logger) ?? noop,
|
|
80
|
+
info: logger.info?.bind(logger) ?? noop,
|
|
81
|
+
warn: logger.warn?.bind(logger) ?? noop,
|
|
82
|
+
error: logger.error?.bind(logger) ?? noop,
|
|
83
|
+
debug: logger.debug?.bind(logger) ?? noop
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
function createNullLogger() {
|
|
87
|
+
const noop = () => {
|
|
88
|
+
};
|
|
89
|
+
return { log: noop, info: noop, warn: noop, error: noop, debug: noop };
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// src/fanout.ts
|
|
93
|
+
var AsyncBoundedQueue = class {
|
|
94
|
+
maxItems;
|
|
95
|
+
queue = [];
|
|
96
|
+
waiting;
|
|
97
|
+
closed = false;
|
|
98
|
+
constructor(maxItems) {
|
|
99
|
+
this.maxItems = Math.max(1, maxItems | 0);
|
|
100
|
+
}
|
|
101
|
+
push(item) {
|
|
102
|
+
if (this.closed) return;
|
|
103
|
+
if (this.waiting) {
|
|
104
|
+
const { resolve } = this.waiting;
|
|
105
|
+
this.waiting = void 0;
|
|
106
|
+
resolve({ value: item, done: false });
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
this.queue.push(item);
|
|
110
|
+
if (this.queue.length > this.maxItems) {
|
|
111
|
+
this.queue.splice(0, this.queue.length - this.maxItems);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
close() {
|
|
115
|
+
if (this.closed) return;
|
|
116
|
+
this.closed = true;
|
|
117
|
+
if (this.waiting) {
|
|
118
|
+
const { resolve } = this.waiting;
|
|
119
|
+
this.waiting = void 0;
|
|
120
|
+
resolve({ value: void 0, done: true });
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
async next() {
|
|
124
|
+
const item = this.queue.shift();
|
|
125
|
+
if (item !== void 0) return { value: item, done: false };
|
|
126
|
+
if (this.closed) return { value: void 0, done: true };
|
|
127
|
+
return await new Promise((resolve) => {
|
|
128
|
+
this.waiting = { resolve };
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
isClosed() {
|
|
132
|
+
return this.closed;
|
|
133
|
+
}
|
|
134
|
+
size() {
|
|
135
|
+
return this.queue.length;
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
var StreamFanout = class {
|
|
139
|
+
opts;
|
|
140
|
+
queues = /* @__PURE__ */ new Map();
|
|
141
|
+
source = null;
|
|
142
|
+
running = false;
|
|
143
|
+
pumpPromise = null;
|
|
144
|
+
constructor(opts) {
|
|
145
|
+
this.opts = opts;
|
|
146
|
+
}
|
|
147
|
+
/** Start pumping frames from the source to all subscribers. */
|
|
148
|
+
start() {
|
|
149
|
+
if (this.running) return;
|
|
150
|
+
this.running = true;
|
|
151
|
+
this.source = this.opts.createSource();
|
|
152
|
+
this.pumpPromise = (async () => {
|
|
153
|
+
try {
|
|
154
|
+
for await (const frame of this.source) {
|
|
155
|
+
try {
|
|
156
|
+
this.opts.onFrame?.(frame);
|
|
157
|
+
} catch {
|
|
158
|
+
}
|
|
159
|
+
for (const q of this.queues.values()) {
|
|
160
|
+
q.push(frame);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
} catch (e) {
|
|
164
|
+
this.opts.onError?.(e);
|
|
165
|
+
} finally {
|
|
166
|
+
this.running = false;
|
|
167
|
+
for (const q of this.queues.values()) q.close();
|
|
168
|
+
this.queues.clear();
|
|
169
|
+
}
|
|
170
|
+
})();
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Create a subscriber async generator.
|
|
174
|
+
* Returns an async generator that yields frames from the shared source.
|
|
175
|
+
* The generator terminates when the source ends or unsubscribe is called.
|
|
176
|
+
*/
|
|
177
|
+
subscribe(id) {
|
|
178
|
+
const q = new AsyncBoundedQueue(this.opts.maxQueueItems);
|
|
179
|
+
if (!this.running) {
|
|
180
|
+
q.close();
|
|
181
|
+
} else {
|
|
182
|
+
this.queues.set(id, q);
|
|
183
|
+
}
|
|
184
|
+
const self = this;
|
|
185
|
+
return (async function* () {
|
|
186
|
+
try {
|
|
187
|
+
while (true) {
|
|
188
|
+
const r = await q.next();
|
|
189
|
+
if (r.done) return;
|
|
190
|
+
yield r.value;
|
|
191
|
+
}
|
|
192
|
+
} finally {
|
|
193
|
+
q.close();
|
|
194
|
+
self.queues.delete(id);
|
|
195
|
+
}
|
|
196
|
+
})();
|
|
197
|
+
}
|
|
198
|
+
/** Unsubscribe a specific subscriber. */
|
|
199
|
+
unsubscribe(id) {
|
|
200
|
+
const q = this.queues.get(id);
|
|
201
|
+
if (q) {
|
|
202
|
+
q.close();
|
|
203
|
+
this.queues.delete(id);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
/** Stop the source and close all subscriber queues. */
|
|
207
|
+
async stop() {
|
|
208
|
+
if (!this.running) return;
|
|
209
|
+
this.running = false;
|
|
210
|
+
const src = this.source;
|
|
211
|
+
this.source = null;
|
|
212
|
+
for (const q of this.queues.values()) q.close();
|
|
213
|
+
this.queues.clear();
|
|
214
|
+
const STOP_TIMEOUT = 3e3;
|
|
215
|
+
const timeout = new Promise((r) => setTimeout(r, STOP_TIMEOUT));
|
|
216
|
+
try {
|
|
217
|
+
await Promise.race([
|
|
218
|
+
(async () => {
|
|
219
|
+
try {
|
|
220
|
+
await src?.return(void 0);
|
|
221
|
+
} catch {
|
|
222
|
+
}
|
|
223
|
+
try {
|
|
224
|
+
await this.pumpPromise;
|
|
225
|
+
} catch {
|
|
226
|
+
}
|
|
227
|
+
})(),
|
|
228
|
+
timeout
|
|
229
|
+
]);
|
|
230
|
+
} catch {
|
|
231
|
+
}
|
|
232
|
+
this.pumpPromise = null;
|
|
233
|
+
}
|
|
234
|
+
/** Returns true if the fan-out is running. */
|
|
235
|
+
isRunning() {
|
|
236
|
+
return this.running;
|
|
237
|
+
}
|
|
238
|
+
/** Returns the number of active subscribers. */
|
|
239
|
+
subscriberCount() {
|
|
240
|
+
return this.queues.size;
|
|
241
|
+
}
|
|
242
|
+
};
|
|
243
|
+
|
|
244
|
+
// src/ffmpeg-source.ts
|
|
245
|
+
var import_node_child_process = require("child_process");
|
|
246
|
+
|
|
247
|
+
// src/nal-utils.ts
|
|
248
|
+
var NAL_START_CODE_4B = Buffer.from([0, 0, 0, 1]);
|
|
249
|
+
var NAL_START_CODE_3B = Buffer.from([0, 0, 1]);
|
|
250
|
+
function hasStartCodes(data) {
|
|
251
|
+
if (data.length < 4) return false;
|
|
252
|
+
if (data.subarray(0, 4).equals(NAL_START_CODE_4B)) return true;
|
|
253
|
+
if (data.subarray(0, 3).equals(NAL_START_CODE_3B)) return true;
|
|
254
|
+
return false;
|
|
255
|
+
}
|
|
256
|
+
function splitAnnexBToNals(annexB) {
|
|
257
|
+
const nals = [];
|
|
258
|
+
const len = annexB.length;
|
|
259
|
+
const isStartCodeAt = (i2) => {
|
|
260
|
+
if (i2 + 3 <= len && annexB[i2] === 0 && annexB[i2 + 1] === 0) {
|
|
261
|
+
if (annexB[i2 + 2] === 1) return 3;
|
|
262
|
+
if (i2 + 4 <= len && annexB[i2 + 2] === 0 && annexB[i2 + 3] === 1)
|
|
263
|
+
return 4;
|
|
264
|
+
}
|
|
265
|
+
return 0;
|
|
266
|
+
};
|
|
267
|
+
let i = 0;
|
|
268
|
+
while (i < len) {
|
|
269
|
+
const sc = isStartCodeAt(i);
|
|
270
|
+
if (sc) break;
|
|
271
|
+
i++;
|
|
272
|
+
}
|
|
273
|
+
while (i < len) {
|
|
274
|
+
const sc = isStartCodeAt(i);
|
|
275
|
+
if (!sc) {
|
|
276
|
+
i++;
|
|
277
|
+
continue;
|
|
278
|
+
}
|
|
279
|
+
const nalStart = i + sc;
|
|
280
|
+
let j = nalStart;
|
|
281
|
+
while (j < len) {
|
|
282
|
+
const sc2 = isStartCodeAt(j);
|
|
283
|
+
if (sc2) break;
|
|
284
|
+
j++;
|
|
285
|
+
}
|
|
286
|
+
if (nalStart < j) {
|
|
287
|
+
const nal = annexB.subarray(nalStart, j);
|
|
288
|
+
if (nal.length > 0) nals.push(nal);
|
|
289
|
+
}
|
|
290
|
+
i = j;
|
|
291
|
+
}
|
|
292
|
+
return nals;
|
|
293
|
+
}
|
|
294
|
+
function prependStartCode(nal) {
|
|
295
|
+
return Buffer.concat([NAL_START_CODE_4B, nal]);
|
|
296
|
+
}
|
|
297
|
+
function joinNalsToAnnexB(...nals) {
|
|
298
|
+
const present = nals.filter((n) => !!n && n.length > 0);
|
|
299
|
+
if (!present.length) return;
|
|
300
|
+
const parts = [];
|
|
301
|
+
for (const nal of present) {
|
|
302
|
+
parts.push(NAL_START_CODE_4B, nal);
|
|
303
|
+
}
|
|
304
|
+
return Buffer.concat(parts);
|
|
305
|
+
}
|
|
306
|
+
function detectVideoCodecFromNal(data) {
|
|
307
|
+
if (!data || data.length < 5) return null;
|
|
308
|
+
let nalStart = -1;
|
|
309
|
+
for (let i = 0; i < Math.min(data.length - 4, 100); i++) {
|
|
310
|
+
if (data[i] === 0 && data[i + 1] === 0) {
|
|
311
|
+
if (data[i + 2] === 0 && data[i + 3] === 1) {
|
|
312
|
+
nalStart = i + 4;
|
|
313
|
+
break;
|
|
314
|
+
}
|
|
315
|
+
if (data[i + 2] === 1) {
|
|
316
|
+
nalStart = i + 3;
|
|
317
|
+
break;
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
if (nalStart < 0 && data.length >= 5) {
|
|
322
|
+
const len = data.readUInt32BE(0);
|
|
323
|
+
if (len > 0 && len <= data.length - 4) {
|
|
324
|
+
nalStart = 4;
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
if (nalStart < 0 || nalStart >= data.length) return null;
|
|
328
|
+
const nalByte = data[nalStart];
|
|
329
|
+
if (nalByte === void 0) return null;
|
|
330
|
+
const forbiddenBit264 = nalByte >> 7 & 1;
|
|
331
|
+
const h264Type = nalByte & 31;
|
|
332
|
+
if (forbiddenBit264 === 0 && h264Type > 0 && h264Type <= 12) {
|
|
333
|
+
if (h264Type === 7 || h264Type === 8) return "H264";
|
|
334
|
+
if (h264Type === 5) return "H264";
|
|
335
|
+
if (h264Type === 1) {
|
|
336
|
+
const nalRefIdc = nalByte >> 5 & 3;
|
|
337
|
+
if (nalRefIdc >= 1) return "H264";
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
if (nalStart + 1 < data.length) {
|
|
341
|
+
const nalByte2 = data[nalStart + 1];
|
|
342
|
+
if (nalByte2 !== void 0) {
|
|
343
|
+
const forbiddenBit = nalByte >> 7 & 1;
|
|
344
|
+
const hevcType = nalByte >> 1 & 63;
|
|
345
|
+
const temporalId = nalByte2 & 7;
|
|
346
|
+
if (forbiddenBit === 0 && temporalId > 0 && hevcType <= 40) {
|
|
347
|
+
if (hevcType === 32 || hevcType === 33 || hevcType === 34)
|
|
348
|
+
return "H265";
|
|
349
|
+
if (hevcType === 19 || hevcType === 20 || hevcType === 21)
|
|
350
|
+
return "H265";
|
|
351
|
+
if (hevcType <= 1 && nalByte <= 3) return "H265";
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
return null;
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// src/h264-utils.ts
|
|
359
|
+
function tryConvertWithLengthReader(data, readLen) {
|
|
360
|
+
const result = [];
|
|
361
|
+
let offset = 0;
|
|
362
|
+
let nalCount = 0;
|
|
363
|
+
while (offset < data.length) {
|
|
364
|
+
if (offset + 4 > data.length) return null;
|
|
365
|
+
const nalLength = readLen(data, offset);
|
|
366
|
+
offset += 4;
|
|
367
|
+
if (nalLength <= 0) return null;
|
|
368
|
+
if (nalLength > data.length - offset) return null;
|
|
369
|
+
result.push(NAL_START_CODE_4B);
|
|
370
|
+
result.push(data.subarray(offset, offset + nalLength));
|
|
371
|
+
offset += nalLength;
|
|
372
|
+
nalCount++;
|
|
373
|
+
}
|
|
374
|
+
if (nalCount === 0) return null;
|
|
375
|
+
return Buffer.concat(result);
|
|
376
|
+
}
|
|
377
|
+
function tryConvertWithLengthReader16(data, readLen) {
|
|
378
|
+
const result = [];
|
|
379
|
+
let offset = 0;
|
|
380
|
+
let nalCount = 0;
|
|
381
|
+
while (offset < data.length) {
|
|
382
|
+
if (offset + 2 > data.length) return null;
|
|
383
|
+
const nalLength = readLen(data, offset);
|
|
384
|
+
offset += 2;
|
|
385
|
+
if (nalLength <= 0) return null;
|
|
386
|
+
if (nalLength > data.length - offset) return null;
|
|
387
|
+
result.push(NAL_START_CODE_4B);
|
|
388
|
+
result.push(data.subarray(offset, offset + nalLength));
|
|
389
|
+
offset += nalLength;
|
|
390
|
+
nalCount++;
|
|
391
|
+
}
|
|
392
|
+
if (nalCount === 0) return null;
|
|
393
|
+
return Buffer.concat(result);
|
|
394
|
+
}
|
|
395
|
+
function tryConvertWithLengthReader24(data, endian) {
|
|
396
|
+
const result = [];
|
|
397
|
+
let offset = 0;
|
|
398
|
+
let nalCount = 0;
|
|
399
|
+
const readLen24 = (buf, at) => {
|
|
400
|
+
if (at + 3 > buf.length) return 0;
|
|
401
|
+
const b0 = buf[at];
|
|
402
|
+
const b1 = buf[at + 1];
|
|
403
|
+
const b2 = buf[at + 2];
|
|
404
|
+
return endian === "be" ? (b0 << 16 | b1 << 8 | b2) >>> 0 : (b2 << 16 | b1 << 8 | b0) >>> 0;
|
|
405
|
+
};
|
|
406
|
+
while (offset < data.length) {
|
|
407
|
+
if (offset + 3 > data.length) return null;
|
|
408
|
+
const nalLength = readLen24(data, offset);
|
|
409
|
+
offset += 3;
|
|
410
|
+
if (nalLength <= 0) return null;
|
|
411
|
+
if (nalLength > data.length - offset) return null;
|
|
412
|
+
result.push(NAL_START_CODE_4B);
|
|
413
|
+
result.push(data.subarray(offset, offset + nalLength));
|
|
414
|
+
offset += nalLength;
|
|
415
|
+
nalCount++;
|
|
416
|
+
}
|
|
417
|
+
if (nalCount === 0) return null;
|
|
418
|
+
return Buffer.concat(result);
|
|
419
|
+
}
|
|
420
|
+
function looksLikeSingleH264Nal(nalPayload) {
|
|
421
|
+
if (nalPayload.length < 1) return false;
|
|
422
|
+
const b0 = nalPayload[0];
|
|
423
|
+
if (b0 === void 0) return false;
|
|
424
|
+
if ((b0 & 128) !== 0) return false;
|
|
425
|
+
const nalType = b0 & 31;
|
|
426
|
+
return nalType >= 1 && nalType <= 23;
|
|
427
|
+
}
|
|
428
|
+
function depacketizeRtpAggregationToAnnexB(payload) {
|
|
429
|
+
if (payload.length < 1) return null;
|
|
430
|
+
const nalHeader = payload[0];
|
|
431
|
+
const nalType = nalHeader & 31;
|
|
432
|
+
const out = [];
|
|
433
|
+
const pushNal = (nal) => {
|
|
434
|
+
if (nal.length === 0) return;
|
|
435
|
+
out.push(NAL_START_CODE_4B, nal);
|
|
436
|
+
};
|
|
437
|
+
if (nalType === 24) {
|
|
438
|
+
let off = 1;
|
|
439
|
+
while (off + 2 <= payload.length) {
|
|
440
|
+
const size = payload.readUInt16BE(off);
|
|
441
|
+
off += 2;
|
|
442
|
+
if (size <= 0 || off + size > payload.length) return null;
|
|
443
|
+
pushNal(payload.subarray(off, off + size));
|
|
444
|
+
off += size;
|
|
445
|
+
}
|
|
446
|
+
return out.length ? Buffer.concat(out) : null;
|
|
447
|
+
}
|
|
448
|
+
if (nalType === 25) {
|
|
449
|
+
let off = 1 + 2;
|
|
450
|
+
if (off > payload.length) return null;
|
|
451
|
+
while (off + 2 <= payload.length) {
|
|
452
|
+
const size = payload.readUInt16BE(off);
|
|
453
|
+
off += 2;
|
|
454
|
+
if (size <= 0 || off + size > payload.length) return null;
|
|
455
|
+
pushNal(payload.subarray(off, off + size));
|
|
456
|
+
off += size;
|
|
457
|
+
}
|
|
458
|
+
return out.length ? Buffer.concat(out) : null;
|
|
459
|
+
}
|
|
460
|
+
if (nalType === 26) {
|
|
461
|
+
let off = 1 + 2;
|
|
462
|
+
if (off > payload.length) return null;
|
|
463
|
+
while (off + 2 <= payload.length) {
|
|
464
|
+
const size = payload.readUInt16BE(off);
|
|
465
|
+
off += 2;
|
|
466
|
+
if (off + 1 + 2 > payload.length) return null;
|
|
467
|
+
off += 1 + 2;
|
|
468
|
+
if (size <= 0 || off + size > payload.length) return null;
|
|
469
|
+
pushNal(payload.subarray(off, off + size));
|
|
470
|
+
off += size;
|
|
471
|
+
}
|
|
472
|
+
return out.length ? Buffer.concat(out) : null;
|
|
473
|
+
}
|
|
474
|
+
if (nalType === 27) {
|
|
475
|
+
let off = 1 + 2;
|
|
476
|
+
if (off > payload.length) return null;
|
|
477
|
+
while (off + 2 <= payload.length) {
|
|
478
|
+
const size = payload.readUInt16BE(off);
|
|
479
|
+
off += 2;
|
|
480
|
+
if (off + 1 + 3 > payload.length) return null;
|
|
481
|
+
off += 1 + 3;
|
|
482
|
+
if (size <= 0 || off + size > payload.length) return null;
|
|
483
|
+
pushNal(payload.subarray(off, off + size));
|
|
484
|
+
off += size;
|
|
485
|
+
}
|
|
486
|
+
return out.length ? Buffer.concat(out) : null;
|
|
487
|
+
}
|
|
488
|
+
return null;
|
|
489
|
+
}
|
|
490
|
+
function convertH264ToAnnexB(data) {
|
|
491
|
+
if (hasStartCodes(data)) return data;
|
|
492
|
+
const sc4 = Buffer.from([0, 0, 0, 1]);
|
|
493
|
+
const sc3 = Buffer.from([0, 0, 1]);
|
|
494
|
+
const maxScan = Math.min(64, data.length);
|
|
495
|
+
const idx4 = data.subarray(0, maxScan).indexOf(sc4);
|
|
496
|
+
if (idx4 > 0) return data.subarray(idx4);
|
|
497
|
+
const idx3 = data.subarray(0, maxScan).indexOf(sc3);
|
|
498
|
+
if (idx3 > 0) return data.subarray(idx3);
|
|
499
|
+
const be = tryConvertWithLengthReader(data, (b, o) => b.readUInt32BE(o));
|
|
500
|
+
if (be) return be;
|
|
501
|
+
const le = tryConvertWithLengthReader(data, (b, o) => b.readUInt32LE(o));
|
|
502
|
+
if (le) return le;
|
|
503
|
+
const be24 = tryConvertWithLengthReader24(data, "be");
|
|
504
|
+
if (be24) return be24;
|
|
505
|
+
const le24 = tryConvertWithLengthReader24(data, "le");
|
|
506
|
+
if (le24) return le24;
|
|
507
|
+
const be16 = tryConvertWithLengthReader16(data, (b, o) => b.readUInt16BE(o));
|
|
508
|
+
if (be16) return be16;
|
|
509
|
+
const le16 = tryConvertWithLengthReader16(data, (b, o) => b.readUInt16LE(o));
|
|
510
|
+
if (le16) return le16;
|
|
511
|
+
const agg = depacketizeRtpAggregationToAnnexB(data);
|
|
512
|
+
if (agg) return agg;
|
|
513
|
+
if (looksLikeSingleH264Nal(data)) {
|
|
514
|
+
return Buffer.concat([NAL_START_CODE_4B, data]);
|
|
515
|
+
}
|
|
516
|
+
return data;
|
|
517
|
+
}
|
|
518
|
+
function isH264KeyframeAnnexB(annexB) {
|
|
519
|
+
const nals = splitAnnexBToNals(annexB);
|
|
520
|
+
let hasSps = false;
|
|
521
|
+
let hasPps = false;
|
|
522
|
+
let hasIdr = false;
|
|
523
|
+
for (const nal of nals) {
|
|
524
|
+
const t = (nal[0] ?? 0) & 31;
|
|
525
|
+
if (t === 7) hasSps = true;
|
|
526
|
+
if (t === 8) hasPps = true;
|
|
527
|
+
if (t === 5) hasIdr = true;
|
|
528
|
+
}
|
|
529
|
+
return hasIdr && hasSps && hasPps;
|
|
530
|
+
}
|
|
531
|
+
function isH264IdrAccessUnit(annexB) {
|
|
532
|
+
const nals = splitAnnexBToNals(annexB);
|
|
533
|
+
for (const nal of nals) {
|
|
534
|
+
if (nal.length < 1) continue;
|
|
535
|
+
const t = (nal[0] ?? 0) & 31;
|
|
536
|
+
if (t === 5) return true;
|
|
537
|
+
}
|
|
538
|
+
return false;
|
|
539
|
+
}
|
|
540
|
+
function extractH264ParamSets(annexB) {
|
|
541
|
+
const nals = splitAnnexBToNals(annexB);
|
|
542
|
+
let sps;
|
|
543
|
+
let pps;
|
|
544
|
+
let profileLevelId;
|
|
545
|
+
for (const nal of nals) {
|
|
546
|
+
if (nal.length < 1) continue;
|
|
547
|
+
const nalType = nal[0] & 31;
|
|
548
|
+
if (nalType === 7) {
|
|
549
|
+
sps = nal;
|
|
550
|
+
if (nal.length >= 4) {
|
|
551
|
+
profileLevelId = Buffer.from([nal[1], nal[2], nal[3]]).toString(
|
|
552
|
+
"hex"
|
|
553
|
+
);
|
|
554
|
+
}
|
|
555
|
+
} else if (nalType === 8) {
|
|
556
|
+
pps = nal;
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
const out = {};
|
|
560
|
+
if (sps) out.sps = sps;
|
|
561
|
+
if (pps) out.pps = pps;
|
|
562
|
+
if (profileLevelId) out.profileLevelId = profileLevelId;
|
|
563
|
+
return out;
|
|
564
|
+
}
|
|
565
|
+
var H264RtpDepacketizer = class _H264RtpDepacketizer {
|
|
566
|
+
fuNalHeader = null;
|
|
567
|
+
fuParts = [];
|
|
568
|
+
static parseRtpPayload(packet) {
|
|
569
|
+
if (!packet || packet.length < 12) return null;
|
|
570
|
+
const version = packet[0] >> 6 & 3;
|
|
571
|
+
if (version !== 2) return null;
|
|
572
|
+
const padding = (packet[0] & 32) !== 0;
|
|
573
|
+
const extension = (packet[0] & 16) !== 0;
|
|
574
|
+
const csrcCount = packet[0] & 15;
|
|
575
|
+
let offset = 12 + csrcCount * 4;
|
|
576
|
+
if (offset > packet.length) return null;
|
|
577
|
+
if (extension) {
|
|
578
|
+
if (offset + 4 > packet.length) return null;
|
|
579
|
+
const extLenWords = packet.readUInt16BE(offset + 2);
|
|
580
|
+
offset += 4 + extLenWords * 4;
|
|
581
|
+
if (offset > packet.length) return null;
|
|
582
|
+
}
|
|
583
|
+
let end = packet.length;
|
|
584
|
+
if (padding) {
|
|
585
|
+
const padLen = packet[packet.length - 1];
|
|
586
|
+
if (padLen <= 0 || padLen > packet.length) return null;
|
|
587
|
+
end = packet.length - padLen;
|
|
588
|
+
if (end < offset) return null;
|
|
589
|
+
}
|
|
590
|
+
if (end <= offset) return null;
|
|
591
|
+
return packet.subarray(offset, end);
|
|
592
|
+
}
|
|
593
|
+
reset() {
|
|
594
|
+
this.fuNalHeader = null;
|
|
595
|
+
this.fuParts = [];
|
|
596
|
+
}
|
|
597
|
+
push(payload) {
|
|
598
|
+
if (payload.length === 0) return [];
|
|
599
|
+
const rtpPayload = _H264RtpDepacketizer.parseRtpPayload(payload);
|
|
600
|
+
if (rtpPayload) payload = rtpPayload;
|
|
601
|
+
if (hasStartCodes(payload)) return [payload];
|
|
602
|
+
const b0 = payload[0];
|
|
603
|
+
if ((b0 & 128) !== 0) return [];
|
|
604
|
+
const nalType = b0 & 31;
|
|
605
|
+
if (nalType >= 1 && nalType <= 23) {
|
|
606
|
+
return [Buffer.concat([NAL_START_CODE_4B, payload])];
|
|
607
|
+
}
|
|
608
|
+
if (nalType === 24) {
|
|
609
|
+
if (payload.length < 1 + 2) return [];
|
|
610
|
+
let off = 1;
|
|
611
|
+
const out = [];
|
|
612
|
+
while (off + 2 <= payload.length) {
|
|
613
|
+
const size = payload.readUInt16BE(off);
|
|
614
|
+
off += 2;
|
|
615
|
+
if (size <= 0 || off + size > payload.length) return [];
|
|
616
|
+
const nal = payload.subarray(off, off + size);
|
|
617
|
+
off += size;
|
|
618
|
+
if (nal.length < 1) return [];
|
|
619
|
+
if ((nal[0] & 128) !== 0) return [];
|
|
620
|
+
const t = nal[0] & 31;
|
|
621
|
+
if (t === 0 || t >= 24) return [];
|
|
622
|
+
out.push(Buffer.concat([NAL_START_CODE_4B, nal]));
|
|
623
|
+
}
|
|
624
|
+
return out;
|
|
625
|
+
}
|
|
626
|
+
if (nalType === 28 || nalType === 29) {
|
|
627
|
+
if (payload.length < 2) return [];
|
|
628
|
+
const fuIndicator = payload[0];
|
|
629
|
+
const fuHeader = payload[1];
|
|
630
|
+
const start = (fuHeader & 128) !== 0;
|
|
631
|
+
const end = (fuHeader & 64) !== 0;
|
|
632
|
+
const origType = fuHeader & 31;
|
|
633
|
+
const reconstructedHeader = fuIndicator & 224 | origType;
|
|
634
|
+
let off = 2;
|
|
635
|
+
if (nalType === 29) {
|
|
636
|
+
if (payload.length < off + 2) return [];
|
|
637
|
+
off += 2;
|
|
638
|
+
}
|
|
639
|
+
const frag = payload.subarray(off);
|
|
640
|
+
if (start) {
|
|
641
|
+
this.fuNalHeader = reconstructedHeader;
|
|
642
|
+
this.fuParts = [frag];
|
|
643
|
+
} else if (this.fuNalHeader != null) {
|
|
644
|
+
this.fuParts.push(frag);
|
|
645
|
+
} else {
|
|
646
|
+
return [];
|
|
647
|
+
}
|
|
648
|
+
if (end && this.fuNalHeader != null) {
|
|
649
|
+
const nal = Buffer.concat([
|
|
650
|
+
Buffer.from([this.fuNalHeader]),
|
|
651
|
+
...this.fuParts
|
|
652
|
+
]);
|
|
653
|
+
this.reset();
|
|
654
|
+
return [Buffer.concat([NAL_START_CODE_4B, nal])];
|
|
655
|
+
}
|
|
656
|
+
return [];
|
|
657
|
+
}
|
|
658
|
+
return [];
|
|
659
|
+
}
|
|
660
|
+
};
|
|
661
|
+
|
|
662
|
+
// src/ffmpeg-source.ts
|
|
663
|
+
var AnnexBAccessUnitAssembler = class {
|
|
664
|
+
buffer = Buffer.alloc(0);
|
|
665
|
+
/** Feed data from ffmpeg stdout. Returns complete access units (one per frame). */
|
|
666
|
+
feed(data) {
|
|
667
|
+
this.buffer = this.buffer.length > 0 ? Buffer.concat([this.buffer, data]) : data;
|
|
668
|
+
const aus = [];
|
|
669
|
+
const audPositions = [];
|
|
670
|
+
for (let i = 0; i < this.buffer.length - 5; i++) {
|
|
671
|
+
if (this.buffer[i] === 0 && this.buffer[i + 1] === 0) {
|
|
672
|
+
let scLen = 0;
|
|
673
|
+
if (this.buffer[i + 2] === 0 && this.buffer[i + 3] === 1) scLen = 4;
|
|
674
|
+
else if (this.buffer[i + 2] === 1) scLen = 3;
|
|
675
|
+
if (scLen > 0) {
|
|
676
|
+
const nalType = this.buffer[i + scLen] & 31;
|
|
677
|
+
if (nalType === 9) {
|
|
678
|
+
audPositions.push(i);
|
|
679
|
+
}
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
if (audPositions.length < 2) return aus;
|
|
684
|
+
for (let j = 0; j < audPositions.length - 1; j++) {
|
|
685
|
+
const au = this.buffer.subarray(audPositions[j], audPositions[j + 1]);
|
|
686
|
+
if (au.length > 4) aus.push(au);
|
|
687
|
+
}
|
|
688
|
+
this.buffer = this.buffer.subarray(audPositions[audPositions.length - 1]);
|
|
689
|
+
return aus;
|
|
690
|
+
}
|
|
691
|
+
/** Flush any remaining buffered data as a final access unit. */
|
|
692
|
+
flush() {
|
|
693
|
+
if (this.buffer.length <= 4) return null;
|
|
694
|
+
const au = this.buffer;
|
|
695
|
+
this.buffer = Buffer.alloc(0);
|
|
696
|
+
return au;
|
|
697
|
+
}
|
|
698
|
+
};
|
|
699
|
+
var AdaptiveFfmpegSource = class {
|
|
700
|
+
rtspUrl;
|
|
701
|
+
ffmpegPath;
|
|
702
|
+
logger;
|
|
703
|
+
label;
|
|
704
|
+
audioMode;
|
|
705
|
+
currentParams;
|
|
706
|
+
proc = null;
|
|
707
|
+
audioProc = null;
|
|
708
|
+
closed = false;
|
|
709
|
+
/** Push callback for the frame source. */
|
|
710
|
+
pushFrame = null;
|
|
711
|
+
closeSource = null;
|
|
712
|
+
/** The FrameSource async generator. Created once, survives ffmpeg restarts. */
|
|
713
|
+
source;
|
|
714
|
+
constructor(options) {
|
|
715
|
+
this.rtspUrl = options.rtspUrl;
|
|
716
|
+
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
717
|
+
this.audioMode = options.audioMode ?? "copy";
|
|
718
|
+
this.logger = options.logger;
|
|
719
|
+
this.label = options.label ?? "adaptive-ffmpeg";
|
|
720
|
+
this.currentParams = { ...options.initialParams };
|
|
721
|
+
const queue = [];
|
|
722
|
+
let resolve = null;
|
|
723
|
+
let done = false;
|
|
724
|
+
this.pushFrame = (mf) => {
|
|
725
|
+
if (done) return;
|
|
726
|
+
if (resolve) {
|
|
727
|
+
const r = resolve;
|
|
728
|
+
resolve = null;
|
|
729
|
+
r({ value: mf, done: false });
|
|
730
|
+
} else {
|
|
731
|
+
queue.push(mf);
|
|
732
|
+
if (queue.length > 120) queue.splice(0, queue.length - 60);
|
|
733
|
+
}
|
|
734
|
+
};
|
|
735
|
+
this.closeSource = () => {
|
|
736
|
+
done = true;
|
|
737
|
+
if (resolve) {
|
|
738
|
+
const r = resolve;
|
|
739
|
+
resolve = null;
|
|
740
|
+
r({ value: void 0, done: true });
|
|
741
|
+
}
|
|
742
|
+
};
|
|
743
|
+
this.source = (async function* () {
|
|
744
|
+
try {
|
|
745
|
+
while (true) {
|
|
746
|
+
const item = queue.shift();
|
|
747
|
+
if (item) {
|
|
748
|
+
yield item;
|
|
749
|
+
continue;
|
|
750
|
+
}
|
|
751
|
+
if (done) return;
|
|
752
|
+
const result = await new Promise((r) => {
|
|
753
|
+
resolve = r;
|
|
754
|
+
});
|
|
755
|
+
if (result.done) return;
|
|
756
|
+
yield result.value;
|
|
757
|
+
}
|
|
758
|
+
} finally {
|
|
759
|
+
done = true;
|
|
760
|
+
}
|
|
761
|
+
})();
|
|
762
|
+
}
|
|
763
|
+
/** Start the ffmpeg process with current encoding params. */
|
|
764
|
+
async start() {
|
|
765
|
+
if (this.closed) return;
|
|
766
|
+
this.spawnFfmpeg();
|
|
767
|
+
}
|
|
768
|
+
/** Get the current encoding parameters. */
|
|
769
|
+
getParams() {
|
|
770
|
+
return { ...this.currentParams };
|
|
771
|
+
}
|
|
772
|
+
/**
|
|
773
|
+
* Hot-swap encoding parameters.
|
|
774
|
+
* Stops the current ffmpeg and starts a new one with updated params.
|
|
775
|
+
* The FrameSource continues seamlessly — the new ffmpeg's first keyframe
|
|
776
|
+
* is gated internally so consumers see a clean transition.
|
|
777
|
+
*/
|
|
778
|
+
async updateParams(params) {
|
|
779
|
+
const prev = { ...this.currentParams };
|
|
780
|
+
if (params.maxBitrateKbps !== void 0) this.currentParams.maxBitrateKbps = params.maxBitrateKbps;
|
|
781
|
+
if (params.width !== void 0) this.currentParams.width = params.width;
|
|
782
|
+
if (params.height !== void 0) this.currentParams.height = params.height;
|
|
783
|
+
if (params.preset !== void 0) this.currentParams.preset = params.preset;
|
|
784
|
+
if (prev.maxBitrateKbps === this.currentParams.maxBitrateKbps && prev.width === this.currentParams.width && prev.height === this.currentParams.height) return;
|
|
785
|
+
this.logger?.info(
|
|
786
|
+
`[${this.label}] Updating params: ${prev.maxBitrateKbps}kbps ${prev.width}x${prev.height} \u2192 ${this.currentParams.maxBitrateKbps}kbps ${this.currentParams.width}x${this.currentParams.height}`
|
|
787
|
+
);
|
|
788
|
+
await this.killFfmpeg();
|
|
789
|
+
if (!this.closed) {
|
|
790
|
+
this.spawnFfmpeg();
|
|
791
|
+
}
|
|
792
|
+
}
|
|
793
|
+
/** Stop the source and kill ffmpeg. */
|
|
794
|
+
async stop() {
|
|
795
|
+
if (this.closed) return;
|
|
796
|
+
this.closed = true;
|
|
797
|
+
await this.killFfmpeg();
|
|
798
|
+
this.closeSource?.();
|
|
799
|
+
}
|
|
800
|
+
// -----------------------------------------------------------------------
|
|
801
|
+
// Private
|
|
802
|
+
// -----------------------------------------------------------------------
|
|
803
|
+
spawnFfmpeg() {
|
|
804
|
+
const { maxBitrateKbps, width, height, preset } = this.currentParams;
|
|
805
|
+
const args = [
|
|
806
|
+
"-hide_banner",
|
|
807
|
+
"-loglevel",
|
|
808
|
+
"error",
|
|
809
|
+
"-fflags",
|
|
810
|
+
"+nobuffer",
|
|
811
|
+
"-flags",
|
|
812
|
+
"+low_delay",
|
|
813
|
+
"-rtsp_transport",
|
|
814
|
+
"tcp",
|
|
815
|
+
"-i",
|
|
816
|
+
this.rtspUrl,
|
|
817
|
+
"-c:v",
|
|
818
|
+
"libx264",
|
|
819
|
+
"-preset",
|
|
820
|
+
preset ?? "ultrafast",
|
|
821
|
+
"-tune",
|
|
822
|
+
"zerolatency",
|
|
823
|
+
"-crf",
|
|
824
|
+
"28",
|
|
825
|
+
"-maxrate",
|
|
826
|
+
`${maxBitrateKbps}k`,
|
|
827
|
+
"-bufsize",
|
|
828
|
+
`${Math.round(maxBitrateKbps * 0.5)}k`,
|
|
829
|
+
"-g",
|
|
830
|
+
"50",
|
|
831
|
+
"-keyint_min",
|
|
832
|
+
"25",
|
|
833
|
+
"-x264opts",
|
|
834
|
+
"aud=1:sliced-threads=1",
|
|
835
|
+
"-flush_packets",
|
|
836
|
+
"1"
|
|
837
|
+
];
|
|
838
|
+
if (width > 0 && height > 0) {
|
|
839
|
+
args.push("-vf", `scale=${width}:${height}`);
|
|
840
|
+
}
|
|
841
|
+
args.push(
|
|
842
|
+
"-an",
|
|
843
|
+
"-f",
|
|
844
|
+
"h264",
|
|
845
|
+
"-"
|
|
846
|
+
);
|
|
847
|
+
this.proc = (0, import_node_child_process.spawn)(this.ffmpegPath, args, {
|
|
848
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
849
|
+
});
|
|
850
|
+
this.proc.on("error", (err) => {
|
|
851
|
+
this.logger?.error(`[${this.label}] ffmpeg spawn error: ${err.message}`);
|
|
852
|
+
});
|
|
853
|
+
this.proc.on("close", (code, signal) => {
|
|
854
|
+
this.logger?.debug(`[${this.label}] ffmpeg exited code=${code} signal=${signal}`);
|
|
855
|
+
this.proc = null;
|
|
856
|
+
if (!this.closed) {
|
|
857
|
+
setTimeout(() => {
|
|
858
|
+
if (!this.closed) this.spawnFfmpeg();
|
|
859
|
+
}, 2e3);
|
|
860
|
+
}
|
|
861
|
+
});
|
|
862
|
+
this.proc.stderr?.on("data", (data) => {
|
|
863
|
+
const s = data.toString();
|
|
864
|
+
if (s.includes("error") || s.includes("Error") || s.includes("fatal")) {
|
|
865
|
+
this.logger?.error(`[${this.label}] ffmpeg: ${s.trim()}`);
|
|
866
|
+
}
|
|
867
|
+
});
|
|
868
|
+
if (!this.proc.stdout) {
|
|
869
|
+
this.logger?.error(`[${this.label}] ffmpeg stdout not available`);
|
|
870
|
+
return;
|
|
871
|
+
}
|
|
872
|
+
const assembler = new AnnexBAccessUnitAssembler();
|
|
873
|
+
const startTime = Date.now();
|
|
874
|
+
this.proc.stdout.on("data", (data) => {
|
|
875
|
+
if (this.closed) return;
|
|
876
|
+
const aus = assembler.feed(data);
|
|
877
|
+
for (const au of aus) {
|
|
878
|
+
if (au.length < 4) continue;
|
|
879
|
+
const isKeyframe = isH264IdrAccessUnit(au);
|
|
880
|
+
const timestampMicros = (Date.now() - startTime) * 1e3;
|
|
881
|
+
const vf = {
|
|
882
|
+
data: au,
|
|
883
|
+
codec: "H264",
|
|
884
|
+
isKeyframe,
|
|
885
|
+
timestampMicros
|
|
886
|
+
};
|
|
887
|
+
this.pushFrame?.({ type: "video", frame: vf });
|
|
888
|
+
}
|
|
889
|
+
});
|
|
890
|
+
this.proc.stdout.on("end", () => {
|
|
891
|
+
const remaining = assembler.flush();
|
|
892
|
+
if (remaining && remaining.length > 4 && this.pushFrame) {
|
|
893
|
+
const vf = {
|
|
894
|
+
data: remaining,
|
|
895
|
+
codec: "H264",
|
|
896
|
+
isKeyframe: isH264IdrAccessUnit(remaining),
|
|
897
|
+
timestampMicros: (Date.now() - startTime) * 1e3
|
|
898
|
+
};
|
|
899
|
+
this.pushFrame({ type: "video", frame: vf });
|
|
900
|
+
}
|
|
901
|
+
});
|
|
902
|
+
if (this.audioMode !== "off") {
|
|
903
|
+
const audioArgs = [
|
|
904
|
+
"-hide_banner",
|
|
905
|
+
"-loglevel",
|
|
906
|
+
"error",
|
|
907
|
+
"-fflags",
|
|
908
|
+
"+nobuffer+flush_packets",
|
|
909
|
+
"-rtsp_transport",
|
|
910
|
+
"tcp",
|
|
911
|
+
"-analyzeduration",
|
|
912
|
+
"500000",
|
|
913
|
+
"-probesize",
|
|
914
|
+
"500000",
|
|
915
|
+
"-i",
|
|
916
|
+
this.rtspUrl,
|
|
917
|
+
"-vn"
|
|
918
|
+
];
|
|
919
|
+
let audioCodecLabel;
|
|
920
|
+
let frameSize;
|
|
921
|
+
let sampleRate;
|
|
922
|
+
let codecName;
|
|
923
|
+
if (this.audioMode === "opus") {
|
|
924
|
+
audioArgs.push("-c:a", "libopus", "-ar", "48000", "-ac", "2", "-b:a", "64k", "-f", "ogg", "-");
|
|
925
|
+
audioCodecLabel = "opus";
|
|
926
|
+
frameSize = 960;
|
|
927
|
+
sampleRate = 48e3;
|
|
928
|
+
codecName = "Opus";
|
|
929
|
+
audioArgs.length = 0;
|
|
930
|
+
audioArgs.push(
|
|
931
|
+
"-hide_banner",
|
|
932
|
+
"-loglevel",
|
|
933
|
+
"error",
|
|
934
|
+
"-rtsp_transport",
|
|
935
|
+
"tcp",
|
|
936
|
+
"-i",
|
|
937
|
+
this.rtspUrl,
|
|
938
|
+
"-vn",
|
|
939
|
+
"-c:a",
|
|
940
|
+
"pcm_mulaw",
|
|
941
|
+
"-ar",
|
|
942
|
+
"8000",
|
|
943
|
+
"-ac",
|
|
944
|
+
"1",
|
|
945
|
+
"-f",
|
|
946
|
+
"mulaw",
|
|
947
|
+
"-"
|
|
948
|
+
);
|
|
949
|
+
audioCodecLabel = "pcmu(opus-fallback)";
|
|
950
|
+
frameSize = 160;
|
|
951
|
+
sampleRate = 8e3;
|
|
952
|
+
codecName = "Pcmu";
|
|
953
|
+
} else {
|
|
954
|
+
audioArgs.push("-c:a", "pcm_mulaw", "-ar", "8000", "-ac", "1", "-f", "mulaw", "-");
|
|
955
|
+
audioCodecLabel = "pcmu";
|
|
956
|
+
frameSize = 160;
|
|
957
|
+
sampleRate = 8e3;
|
|
958
|
+
codecName = "Pcmu";
|
|
959
|
+
}
|
|
960
|
+
this.audioProc = (0, import_node_child_process.spawn)(this.ffmpegPath, audioArgs, {
|
|
961
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
962
|
+
});
|
|
963
|
+
this.audioProc.on("error", () => {
|
|
964
|
+
});
|
|
965
|
+
this.audioProc.on("close", () => {
|
|
966
|
+
this.audioProc = null;
|
|
967
|
+
});
|
|
968
|
+
if (this.audioProc.stdout) {
|
|
969
|
+
let audioBuf = Buffer.alloc(0);
|
|
970
|
+
this.audioProc.stdout.on("data", (data) => {
|
|
971
|
+
if (this.closed || !this.pushFrame) return;
|
|
972
|
+
audioBuf = audioBuf.length > 0 ? Buffer.concat([audioBuf, data]) : data;
|
|
973
|
+
while (audioBuf.length >= frameSize) {
|
|
974
|
+
const audioFrame = audioBuf.subarray(0, frameSize);
|
|
975
|
+
audioBuf = audioBuf.subarray(frameSize);
|
|
976
|
+
this.pushFrame({
|
|
977
|
+
type: "audio",
|
|
978
|
+
frame: {
|
|
979
|
+
data: Buffer.from(audioFrame),
|
|
980
|
+
codec: codecName,
|
|
981
|
+
sampleRate,
|
|
982
|
+
channels: 1,
|
|
983
|
+
timestampMicros: (Date.now() - startTime) * 1e3
|
|
984
|
+
}
|
|
985
|
+
});
|
|
986
|
+
}
|
|
987
|
+
});
|
|
988
|
+
}
|
|
989
|
+
this.logger?.info(
|
|
990
|
+
`[${this.label}] Started: ${maxBitrateKbps}kbps ` + (width > 0 ? `${width}x${height}` : "native") + ` +audio(${audioCodecLabel})`
|
|
991
|
+
);
|
|
992
|
+
} else {
|
|
993
|
+
this.logger?.info(
|
|
994
|
+
`[${this.label}] Started: ${maxBitrateKbps}kbps ` + (width > 0 ? `${width}x${height}` : "native") + " (no audio)"
|
|
995
|
+
);
|
|
996
|
+
}
|
|
997
|
+
}
|
|
998
|
+
async killFfmpeg() {
|
|
999
|
+
const proc = this.proc;
|
|
1000
|
+
if (proc) {
|
|
1001
|
+
this.proc = null;
|
|
1002
|
+
try {
|
|
1003
|
+
proc.kill("SIGTERM");
|
|
1004
|
+
} catch {
|
|
1005
|
+
}
|
|
1006
|
+
await new Promise((resolve) => {
|
|
1007
|
+
const timer = setTimeout(() => {
|
|
1008
|
+
try {
|
|
1009
|
+
proc.kill("SIGKILL");
|
|
1010
|
+
} catch {
|
|
1011
|
+
}
|
|
1012
|
+
resolve();
|
|
1013
|
+
}, 3e3);
|
|
1014
|
+
proc.on("close", () => {
|
|
1015
|
+
clearTimeout(timer);
|
|
1016
|
+
resolve();
|
|
1017
|
+
});
|
|
1018
|
+
});
|
|
1019
|
+
}
|
|
1020
|
+
const audioProc = this.audioProc;
|
|
1021
|
+
if (audioProc) {
|
|
1022
|
+
this.audioProc = null;
|
|
1023
|
+
try {
|
|
1024
|
+
audioProc.kill("SIGTERM");
|
|
1025
|
+
} catch {
|
|
1026
|
+
}
|
|
1027
|
+
await new Promise((resolve) => {
|
|
1028
|
+
const timer = setTimeout(() => {
|
|
1029
|
+
try {
|
|
1030
|
+
audioProc.kill("SIGKILL");
|
|
1031
|
+
} catch {
|
|
1032
|
+
}
|
|
1033
|
+
resolve();
|
|
1034
|
+
}, 1e3);
|
|
1035
|
+
audioProc.on("close", () => {
|
|
1036
|
+
clearTimeout(timer);
|
|
1037
|
+
resolve();
|
|
1038
|
+
});
|
|
1039
|
+
});
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
};
|
|
1043
|
+
|
|
1044
|
+
// src/adaptive-controller.ts
|
|
1045
|
+
var EWMA = class {
|
|
1046
|
+
value = null;
|
|
1047
|
+
alpha;
|
|
1048
|
+
constructor(alpha = 0.3) {
|
|
1049
|
+
this.alpha = alpha;
|
|
1050
|
+
}
|
|
1051
|
+
update(sample) {
|
|
1052
|
+
if (this.value === null) {
|
|
1053
|
+
this.value = sample;
|
|
1054
|
+
} else {
|
|
1055
|
+
this.value = this.alpha * sample + (1 - this.alpha) * this.value;
|
|
1056
|
+
}
|
|
1057
|
+
return this.value;
|
|
1058
|
+
}
|
|
1059
|
+
get() {
|
|
1060
|
+
return this.value ?? 0;
|
|
1061
|
+
}
|
|
1062
|
+
reset() {
|
|
1063
|
+
this.value = null;
|
|
1064
|
+
}
|
|
1065
|
+
};
|
|
1066
|
+
var AdaptiveController = class {
|
|
1067
|
+
profiles;
|
|
1068
|
+
degradeThreshold;
|
|
1069
|
+
recoverThreshold;
|
|
1070
|
+
degradeCount;
|
|
1071
|
+
recoverCount;
|
|
1072
|
+
onQualityChange;
|
|
1073
|
+
logger;
|
|
1074
|
+
currentIndex;
|
|
1075
|
+
consecutiveBad = 0;
|
|
1076
|
+
consecutiveGood = 0;
|
|
1077
|
+
switching = false;
|
|
1078
|
+
/** Smoothed stats per session (aggregated for decisions). */
|
|
1079
|
+
sessionStats = /* @__PURE__ */ new Map();
|
|
1080
|
+
/** Manual override tier (null = auto). */
|
|
1081
|
+
forcedTier = null;
|
|
1082
|
+
constructor(options) {
|
|
1083
|
+
if (options.profiles.length === 0) {
|
|
1084
|
+
throw new Error("At least one quality profile is required");
|
|
1085
|
+
}
|
|
1086
|
+
this.profiles = options.profiles;
|
|
1087
|
+
this.degradeThreshold = options.degradeThreshold ?? 0.02;
|
|
1088
|
+
this.recoverThreshold = options.recoverThreshold ?? 5e-3;
|
|
1089
|
+
this.degradeCount = options.degradeCount ?? 2;
|
|
1090
|
+
this.recoverCount = options.recoverCount ?? 3;
|
|
1091
|
+
this.onQualityChange = options.onQualityChange;
|
|
1092
|
+
this.logger = options.logger;
|
|
1093
|
+
this.currentIndex = 0;
|
|
1094
|
+
}
|
|
1095
|
+
/** Get the current quality profile. */
|
|
1096
|
+
get currentProfile() {
|
|
1097
|
+
return this.profiles[this.currentIndex];
|
|
1098
|
+
}
|
|
1099
|
+
/** Get the current quality tier. */
|
|
1100
|
+
get currentTier() {
|
|
1101
|
+
return this.currentProfile.tier;
|
|
1102
|
+
}
|
|
1103
|
+
/** Get aggregated stats summary. */
|
|
1104
|
+
getAggregatedStats() {
|
|
1105
|
+
if (this.sessionStats.size === 0) {
|
|
1106
|
+
return { packetLoss: 0, jitterMs: 0, rttMs: 0 };
|
|
1107
|
+
}
|
|
1108
|
+
let totalLoss = 0;
|
|
1109
|
+
let totalJitter = 0;
|
|
1110
|
+
let totalRtt = 0;
|
|
1111
|
+
let count = 0;
|
|
1112
|
+
for (const stats of this.sessionStats.values()) {
|
|
1113
|
+
if (Date.now() - stats.lastUpdate > 3e4) continue;
|
|
1114
|
+
totalLoss += stats.loss.get();
|
|
1115
|
+
totalJitter += stats.jitter.get();
|
|
1116
|
+
totalRtt += stats.rtt.get();
|
|
1117
|
+
count++;
|
|
1118
|
+
}
|
|
1119
|
+
if (count === 0) return { packetLoss: 0, jitterMs: 0, rttMs: 0 };
|
|
1120
|
+
return {
|
|
1121
|
+
packetLoss: totalLoss / count,
|
|
1122
|
+
jitterMs: totalJitter / count,
|
|
1123
|
+
rttMs: totalRtt / count
|
|
1124
|
+
};
|
|
1125
|
+
}
|
|
1126
|
+
/**
|
|
1127
|
+
* Report stats from a session (RTCP or client-reported).
|
|
1128
|
+
* Call this periodically (e.g. every 3–5 seconds).
|
|
1129
|
+
*/
|
|
1130
|
+
reportStats(sessionId, stats) {
|
|
1131
|
+
let entry = this.sessionStats.get(sessionId);
|
|
1132
|
+
if (!entry) {
|
|
1133
|
+
entry = {
|
|
1134
|
+
loss: new EWMA(0.3),
|
|
1135
|
+
jitter: new EWMA(0.3),
|
|
1136
|
+
rtt: new EWMA(0.3),
|
|
1137
|
+
lastUpdate: 0
|
|
1138
|
+
};
|
|
1139
|
+
this.sessionStats.set(sessionId, entry);
|
|
1140
|
+
}
|
|
1141
|
+
entry.loss.update(stats.packetLoss);
|
|
1142
|
+
entry.jitter.update(stats.jitterMs);
|
|
1143
|
+
entry.rtt.update(stats.rttMs);
|
|
1144
|
+
entry.lastUpdate = stats.timestamp;
|
|
1145
|
+
this.evaluate();
|
|
1146
|
+
}
|
|
1147
|
+
/** Remove a session's stats (call on session close). */
|
|
1148
|
+
removeSession(sessionId) {
|
|
1149
|
+
this.sessionStats.delete(sessionId);
|
|
1150
|
+
}
|
|
1151
|
+
/** Force a specific quality tier (null = auto). */
|
|
1152
|
+
forceQuality(tier) {
|
|
1153
|
+
this.forcedTier = tier;
|
|
1154
|
+
if (tier === null) {
|
|
1155
|
+
this.consecutiveBad = 0;
|
|
1156
|
+
this.consecutiveGood = 0;
|
|
1157
|
+
return;
|
|
1158
|
+
}
|
|
1159
|
+
const targetIdx = this.profiles.findIndex((p) => p.tier === tier);
|
|
1160
|
+
if (targetIdx >= 0 && targetIdx !== this.currentIndex) {
|
|
1161
|
+
void this.switchTo(targetIdx);
|
|
1162
|
+
}
|
|
1163
|
+
}
|
|
1164
|
+
/** Check if auto-adaptation is active (not forced). */
|
|
1165
|
+
get isAuto() {
|
|
1166
|
+
return this.forcedTier === null;
|
|
1167
|
+
}
|
|
1168
|
+
// -----------------------------------------------------------------------
|
|
1169
|
+
// Private
|
|
1170
|
+
// -----------------------------------------------------------------------
|
|
1171
|
+
evaluate() {
|
|
1172
|
+
if (this.forcedTier !== null || this.switching) return;
|
|
1173
|
+
const { packetLoss } = this.getAggregatedStats();
|
|
1174
|
+
if (packetLoss > this.degradeThreshold) {
|
|
1175
|
+
this.consecutiveGood = 0;
|
|
1176
|
+
this.consecutiveBad++;
|
|
1177
|
+
if (this.consecutiveBad >= this.degradeCount) {
|
|
1178
|
+
this.consecutiveBad = 0;
|
|
1179
|
+
this.degrade();
|
|
1180
|
+
}
|
|
1181
|
+
} else if (packetLoss < this.recoverThreshold) {
|
|
1182
|
+
this.consecutiveBad = 0;
|
|
1183
|
+
this.consecutiveGood++;
|
|
1184
|
+
if (this.consecutiveGood >= this.recoverCount) {
|
|
1185
|
+
this.consecutiveGood = 0;
|
|
1186
|
+
this.recover();
|
|
1187
|
+
}
|
|
1188
|
+
} else {
|
|
1189
|
+
this.consecutiveBad = 0;
|
|
1190
|
+
this.consecutiveGood = 0;
|
|
1191
|
+
}
|
|
1192
|
+
}
|
|
1193
|
+
degrade() {
|
|
1194
|
+
if (this.currentIndex >= this.profiles.length - 1) {
|
|
1195
|
+
this.logger?.debug("[adaptive] Already at lowest quality, cannot degrade further");
|
|
1196
|
+
return;
|
|
1197
|
+
}
|
|
1198
|
+
void this.switchTo(this.currentIndex + 1);
|
|
1199
|
+
}
|
|
1200
|
+
recover() {
|
|
1201
|
+
if (this.currentIndex <= 0) {
|
|
1202
|
+
this.logger?.debug("[adaptive] Already at highest quality, cannot recover further");
|
|
1203
|
+
return;
|
|
1204
|
+
}
|
|
1205
|
+
void this.switchTo(this.currentIndex - 1);
|
|
1206
|
+
}
|
|
1207
|
+
async switchTo(newIndex) {
|
|
1208
|
+
if (this.switching) return;
|
|
1209
|
+
if (newIndex === this.currentIndex) return;
|
|
1210
|
+
if (newIndex < 0 || newIndex >= this.profiles.length) return;
|
|
1211
|
+
this.switching = true;
|
|
1212
|
+
const from = this.profiles[this.currentIndex];
|
|
1213
|
+
const to = this.profiles[newIndex];
|
|
1214
|
+
this.logger?.info(
|
|
1215
|
+
`[adaptive] Quality change: ${from.tier} \u2192 ${to.tier} (${from.encoding.maxBitrateKbps}kbps \u2192 ${to.encoding.maxBitrateKbps}kbps)`
|
|
1216
|
+
);
|
|
1217
|
+
try {
|
|
1218
|
+
await this.onQualityChange(from, to);
|
|
1219
|
+
this.currentIndex = newIndex;
|
|
1220
|
+
} catch (err) {
|
|
1221
|
+
this.logger?.error("[adaptive] Quality change failed:", err);
|
|
1222
|
+
} finally {
|
|
1223
|
+
this.switching = false;
|
|
1224
|
+
}
|
|
1225
|
+
}
|
|
1226
|
+
};
|
|
1227
|
+
|
|
1228
|
+
// src/h265-utils.ts
|
|
1229
|
+
function tryConvertWithLengthReader2(data, readLen) {
|
|
1230
|
+
const result = [];
|
|
1231
|
+
let offset = 0;
|
|
1232
|
+
let nalCount = 0;
|
|
1233
|
+
while (offset < data.length) {
|
|
1234
|
+
if (offset + 4 > data.length) return null;
|
|
1235
|
+
const nalLength = readLen(data, offset);
|
|
1236
|
+
offset += 4;
|
|
1237
|
+
if (nalLength <= 0) return null;
|
|
1238
|
+
if (nalLength > data.length - offset) return null;
|
|
1239
|
+
result.push(NAL_START_CODE_4B);
|
|
1240
|
+
result.push(data.subarray(offset, offset + nalLength));
|
|
1241
|
+
offset += nalLength;
|
|
1242
|
+
nalCount++;
|
|
1243
|
+
}
|
|
1244
|
+
if (nalCount === 0) return null;
|
|
1245
|
+
return Buffer.concat(result);
|
|
1246
|
+
}
|
|
1247
|
+
function tryConvertWithLengthReader162(data, readLen) {
|
|
1248
|
+
const result = [];
|
|
1249
|
+
let offset = 0;
|
|
1250
|
+
let nalCount = 0;
|
|
1251
|
+
while (offset < data.length) {
|
|
1252
|
+
if (offset + 2 > data.length) return null;
|
|
1253
|
+
const nalLength = readLen(data, offset);
|
|
1254
|
+
offset += 2;
|
|
1255
|
+
if (nalLength <= 0) return null;
|
|
1256
|
+
if (nalLength > data.length - offset) return null;
|
|
1257
|
+
result.push(NAL_START_CODE_4B);
|
|
1258
|
+
result.push(data.subarray(offset, offset + nalLength));
|
|
1259
|
+
offset += nalLength;
|
|
1260
|
+
nalCount++;
|
|
1261
|
+
}
|
|
1262
|
+
if (nalCount === 0) return null;
|
|
1263
|
+
return Buffer.concat(result);
|
|
1264
|
+
}
|
|
1265
|
+
function tryConvertWithLengthReader242(data, endian) {
|
|
1266
|
+
const result = [];
|
|
1267
|
+
let offset = 0;
|
|
1268
|
+
let nalCount = 0;
|
|
1269
|
+
const readLen24 = (buf, at) => {
|
|
1270
|
+
if (at + 3 > buf.length) return 0;
|
|
1271
|
+
const b0 = buf[at];
|
|
1272
|
+
const b1 = buf[at + 1];
|
|
1273
|
+
const b2 = buf[at + 2];
|
|
1274
|
+
return endian === "be" ? (b0 << 16 | b1 << 8 | b2) >>> 0 : (b2 << 16 | b1 << 8 | b0) >>> 0;
|
|
1275
|
+
};
|
|
1276
|
+
while (offset < data.length) {
|
|
1277
|
+
if (offset + 3 > data.length) return null;
|
|
1278
|
+
const nalLength = readLen24(data, offset);
|
|
1279
|
+
offset += 3;
|
|
1280
|
+
if (nalLength <= 0) return null;
|
|
1281
|
+
if (nalLength > data.length - offset) return null;
|
|
1282
|
+
result.push(NAL_START_CODE_4B);
|
|
1283
|
+
result.push(data.subarray(offset, offset + nalLength));
|
|
1284
|
+
offset += nalLength;
|
|
1285
|
+
nalCount++;
|
|
1286
|
+
}
|
|
1287
|
+
if (nalCount === 0) return null;
|
|
1288
|
+
return Buffer.concat(result);
|
|
1289
|
+
}
|
|
1290
|
+
function looksLikeSingleH265Nal(nalPayload) {
|
|
1291
|
+
if (nalPayload.length < 2) return false;
|
|
1292
|
+
const b0 = nalPayload[0];
|
|
1293
|
+
if (b0 === void 0) return false;
|
|
1294
|
+
if ((b0 & 128) !== 0) return false;
|
|
1295
|
+
const nalType = b0 >> 1 & 63;
|
|
1296
|
+
return nalType <= 40;
|
|
1297
|
+
}
|
|
1298
|
+
function convertH265ToAnnexB(data) {
|
|
1299
|
+
if (hasStartCodes(data)) return data;
|
|
1300
|
+
const sc4 = Buffer.from([0, 0, 0, 1]);
|
|
1301
|
+
const sc3 = Buffer.from([0, 0, 1]);
|
|
1302
|
+
const maxScan = Math.min(64, data.length);
|
|
1303
|
+
const idx4 = data.subarray(0, maxScan).indexOf(sc4);
|
|
1304
|
+
if (idx4 > 0) return data.subarray(idx4);
|
|
1305
|
+
const idx3 = data.subarray(0, maxScan).indexOf(sc3);
|
|
1306
|
+
if (idx3 > 0) return data.subarray(idx3);
|
|
1307
|
+
const be = tryConvertWithLengthReader2(data, (b, o) => b.readUInt32BE(o));
|
|
1308
|
+
if (be) return be;
|
|
1309
|
+
const le = tryConvertWithLengthReader2(data, (b, o) => b.readUInt32LE(o));
|
|
1310
|
+
if (le) return le;
|
|
1311
|
+
const be24 = tryConvertWithLengthReader242(data, "be");
|
|
1312
|
+
if (be24) return be24;
|
|
1313
|
+
const le24 = tryConvertWithLengthReader242(data, "le");
|
|
1314
|
+
if (le24) return le24;
|
|
1315
|
+
const be16 = tryConvertWithLengthReader162(data, (b, o) => b.readUInt16BE(o));
|
|
1316
|
+
if (be16) return be16;
|
|
1317
|
+
const le16 = tryConvertWithLengthReader162(data, (b, o) => b.readUInt16LE(o));
|
|
1318
|
+
if (le16) return le16;
|
|
1319
|
+
if (looksLikeSingleH265Nal(data)) {
|
|
1320
|
+
return Buffer.concat([NAL_START_CODE_4B, data]);
|
|
1321
|
+
}
|
|
1322
|
+
return data;
|
|
1323
|
+
}
|
|
1324
|
+
function getH265NalType(nalPayload) {
|
|
1325
|
+
if (nalPayload.length < 1) return null;
|
|
1326
|
+
const b0 = nalPayload[0];
|
|
1327
|
+
if (b0 === void 0) return null;
|
|
1328
|
+
if ((b0 & 128) !== 0) return null;
|
|
1329
|
+
return b0 >> 1 & 63;
|
|
1330
|
+
}
|
|
1331
|
+
function isH265Irap(nalType) {
|
|
1332
|
+
return nalType >= 16 && nalType <= 23;
|
|
1333
|
+
}
|
|
1334
|
+
function isH265KeyframeAnnexB(annexB) {
|
|
1335
|
+
const nals = splitAnnexBToNals(annexB);
|
|
1336
|
+
let hasVps = false;
|
|
1337
|
+
let hasSps = false;
|
|
1338
|
+
let hasPps = false;
|
|
1339
|
+
let hasIrap = false;
|
|
1340
|
+
for (const nal of nals) {
|
|
1341
|
+
const nalType = getH265NalType(nal);
|
|
1342
|
+
if (nalType === null) continue;
|
|
1343
|
+
if (nalType === 32) hasVps = true;
|
|
1344
|
+
if (nalType === 33) hasSps = true;
|
|
1345
|
+
if (nalType === 34) hasPps = true;
|
|
1346
|
+
if (isH265Irap(nalType)) hasIrap = true;
|
|
1347
|
+
}
|
|
1348
|
+
return hasIrap && hasVps && hasSps && hasPps;
|
|
1349
|
+
}
|
|
1350
|
+
function isH265IrapAccessUnit(annexB) {
|
|
1351
|
+
const nals = splitAnnexBToNals(annexB);
|
|
1352
|
+
for (const nal of nals) {
|
|
1353
|
+
if (nal.length < 2) continue;
|
|
1354
|
+
const b0 = nal[0];
|
|
1355
|
+
if (b0 === void 0) continue;
|
|
1356
|
+
if ((b0 & 128) !== 0) continue;
|
|
1357
|
+
const nalType = b0 >> 1 & 63;
|
|
1358
|
+
if (isH265Irap(nalType)) return true;
|
|
1359
|
+
}
|
|
1360
|
+
return false;
|
|
1361
|
+
}
|
|
1362
|
+
function extractH265ParamSets(annexB) {
|
|
1363
|
+
const nals = splitAnnexBToNals(annexB);
|
|
1364
|
+
let vps;
|
|
1365
|
+
let sps;
|
|
1366
|
+
let pps;
|
|
1367
|
+
for (const nal of nals) {
|
|
1368
|
+
if (nal.length < 2) continue;
|
|
1369
|
+
const nalType = nal[0] >> 1 & 63;
|
|
1370
|
+
if (nalType === 32) vps = nal;
|
|
1371
|
+
else if (nalType === 33) sps = nal;
|
|
1372
|
+
else if (nalType === 34) pps = nal;
|
|
1373
|
+
}
|
|
1374
|
+
const out = {};
|
|
1375
|
+
if (vps) out.vps = vps;
|
|
1376
|
+
if (sps) out.sps = sps;
|
|
1377
|
+
if (pps) out.pps = pps;
|
|
1378
|
+
return out;
|
|
1379
|
+
}
|
|
1380
|
+
var H265RtpDepacketizer = class _H265RtpDepacketizer {
|
|
1381
|
+
fuParts = null;
|
|
1382
|
+
static parseRtpPayload(packet) {
|
|
1383
|
+
if (!packet || packet.length < 12) return null;
|
|
1384
|
+
const version = packet[0] >> 6 & 3;
|
|
1385
|
+
if (version !== 2) return null;
|
|
1386
|
+
const padding = (packet[0] & 32) !== 0;
|
|
1387
|
+
const extension = (packet[0] & 16) !== 0;
|
|
1388
|
+
const csrcCount = packet[0] & 15;
|
|
1389
|
+
let offset = 12 + csrcCount * 4;
|
|
1390
|
+
if (offset > packet.length) return null;
|
|
1391
|
+
if (extension) {
|
|
1392
|
+
if (offset + 4 > packet.length) return null;
|
|
1393
|
+
const extLenWords = packet.readUInt16BE(offset + 2);
|
|
1394
|
+
offset += 4 + extLenWords * 4;
|
|
1395
|
+
if (offset > packet.length) return null;
|
|
1396
|
+
}
|
|
1397
|
+
let end = packet.length;
|
|
1398
|
+
if (padding) {
|
|
1399
|
+
const padLen = packet[packet.length - 1];
|
|
1400
|
+
if (padLen <= 0 || padLen > packet.length) return null;
|
|
1401
|
+
end = packet.length - padLen;
|
|
1402
|
+
if (end < offset) return null;
|
|
1403
|
+
}
|
|
1404
|
+
if (end <= offset) return null;
|
|
1405
|
+
return packet.subarray(offset, end);
|
|
1406
|
+
}
|
|
1407
|
+
reset() {
|
|
1408
|
+
this.fuParts = null;
|
|
1409
|
+
}
|
|
1410
|
+
push(payload) {
|
|
1411
|
+
if (!payload || payload.length < 2) return [];
|
|
1412
|
+
const rtpPayload = _H265RtpDepacketizer.parseRtpPayload(payload);
|
|
1413
|
+
if (rtpPayload) payload = rtpPayload;
|
|
1414
|
+
const h0 = payload[0];
|
|
1415
|
+
const h1 = payload[1];
|
|
1416
|
+
if ((h0 & 128) !== 0) return [];
|
|
1417
|
+
const nalType = h0 >> 1 & 63;
|
|
1418
|
+
if (nalType === 48) {
|
|
1419
|
+
let off = 2;
|
|
1420
|
+
const out = [];
|
|
1421
|
+
while (off + 2 <= payload.length) {
|
|
1422
|
+
const size = payload.readUInt16BE(off);
|
|
1423
|
+
off += 2;
|
|
1424
|
+
if (size <= 0 || off + size > payload.length) return [];
|
|
1425
|
+
const nal = payload.subarray(off, off + size);
|
|
1426
|
+
off += size;
|
|
1427
|
+
if (nal.length) out.push(NAL_START_CODE_4B, nal);
|
|
1428
|
+
}
|
|
1429
|
+
return out.length ? [Buffer.concat(out)] : [];
|
|
1430
|
+
}
|
|
1431
|
+
if (nalType === 49) {
|
|
1432
|
+
if (payload.length < 3) return [];
|
|
1433
|
+
const fuHeader = payload[2];
|
|
1434
|
+
const start = (fuHeader & 128) !== 0;
|
|
1435
|
+
const end = (fuHeader & 64) !== 0;
|
|
1436
|
+
const origType = fuHeader & 63;
|
|
1437
|
+
const orig0 = h0 & 129 | (origType & 63) << 1;
|
|
1438
|
+
const orig1 = h1;
|
|
1439
|
+
const frag = payload.subarray(3);
|
|
1440
|
+
if (start) {
|
|
1441
|
+
this.fuParts = [NAL_START_CODE_4B, Buffer.from([orig0, orig1]), frag];
|
|
1442
|
+
} else {
|
|
1443
|
+
if (!this.fuParts) return [];
|
|
1444
|
+
this.fuParts.push(frag);
|
|
1445
|
+
}
|
|
1446
|
+
if (end) {
|
|
1447
|
+
if (!this.fuParts) return [];
|
|
1448
|
+
const out = Buffer.concat(this.fuParts);
|
|
1449
|
+
this.fuParts = null;
|
|
1450
|
+
return [out];
|
|
1451
|
+
}
|
|
1452
|
+
return [];
|
|
1453
|
+
}
|
|
1454
|
+
return [Buffer.concat([NAL_START_CODE_4B, payload])];
|
|
1455
|
+
}
|
|
1456
|
+
};
|
|
1457
|
+
|
|
1458
|
+
// src/session.ts
|
|
1459
|
+
var _werift;
|
|
1460
|
+
async function loadWerift() {
|
|
1461
|
+
if (_werift) return _werift;
|
|
1462
|
+
try {
|
|
1463
|
+
const moduleName = "werift";
|
|
1464
|
+
_werift = await Function("m", "return import(m)")(moduleName);
|
|
1465
|
+
return _werift;
|
|
1466
|
+
} catch {
|
|
1467
|
+
throw new Error(
|
|
1468
|
+
"The 'werift' package is required for WebRTC support but is not installed. Install it with: npm install werift"
|
|
1469
|
+
);
|
|
1470
|
+
}
|
|
1471
|
+
}
|
|
1472
|
+
var AdaptiveSession = class _AdaptiveSession {
|
|
1473
|
+
sessionId;
|
|
1474
|
+
source;
|
|
1475
|
+
logger;
|
|
1476
|
+
intercom;
|
|
1477
|
+
iceConfig;
|
|
1478
|
+
onStats;
|
|
1479
|
+
debug;
|
|
1480
|
+
createdAt;
|
|
1481
|
+
state = "new";
|
|
1482
|
+
pc = null;
|
|
1483
|
+
videoTrack = null;
|
|
1484
|
+
audioTrack = null;
|
|
1485
|
+
/** Transceiver senders for direct sendRtp (more reliable than track.writeRtp) */
|
|
1486
|
+
videoSender = null;
|
|
1487
|
+
audioSender = null;
|
|
1488
|
+
feedAbort = null;
|
|
1489
|
+
closed = false;
|
|
1490
|
+
statsTimer = null;
|
|
1491
|
+
/** RTP sequence number counter (must increment per packet). */
|
|
1492
|
+
videoSeqNum = 0;
|
|
1493
|
+
audioSeqNum = 0;
|
|
1494
|
+
/** Previous RTCP stats for delta calculation. */
|
|
1495
|
+
prevPacketsReceived = 0;
|
|
1496
|
+
prevPacketsLost = 0;
|
|
1497
|
+
constructor(options) {
|
|
1498
|
+
this.sessionId = options.sessionId;
|
|
1499
|
+
this.source = options.source;
|
|
1500
|
+
this.logger = options.logger;
|
|
1501
|
+
this.intercom = options.intercom;
|
|
1502
|
+
this.iceConfig = options.iceConfig;
|
|
1503
|
+
this.onStats = options.onStats;
|
|
1504
|
+
this.debug = options.debug ?? false;
|
|
1505
|
+
this.createdAt = Date.now();
|
|
1506
|
+
}
|
|
1507
|
+
/** Build PeerConnection options including H.264 codec config. */
|
|
1508
|
+
async buildPcOptions() {
|
|
1509
|
+
const werift = await loadWerift();
|
|
1510
|
+
const iceServers = [];
|
|
1511
|
+
if (this.iceConfig?.stunServers) {
|
|
1512
|
+
for (const url of this.iceConfig.stunServers) iceServers.push({ urls: url });
|
|
1513
|
+
}
|
|
1514
|
+
if (this.iceConfig?.turnServers) {
|
|
1515
|
+
for (const turn of this.iceConfig.turnServers) {
|
|
1516
|
+
iceServers.push({ urls: turn.urls, username: turn.username, credential: turn.credential });
|
|
1517
|
+
}
|
|
1518
|
+
}
|
|
1519
|
+
const pcOptions = {
|
|
1520
|
+
// H.264 + Opus codecs with RTCP feedback (matching Scrypted's proven config)
|
|
1521
|
+
codecs: {
|
|
1522
|
+
video: [
|
|
1523
|
+
new werift.RTCRtpCodecParameters({
|
|
1524
|
+
mimeType: "video/H264",
|
|
1525
|
+
clockRate: 9e4,
|
|
1526
|
+
payloadType: 96,
|
|
1527
|
+
parameters: "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f",
|
|
1528
|
+
rtcpFeedback: [
|
|
1529
|
+
{ type: "transport-cc" },
|
|
1530
|
+
{ type: "ccm", parameter: "fir" },
|
|
1531
|
+
{ type: "nack" },
|
|
1532
|
+
{ type: "nack", parameter: "pli" },
|
|
1533
|
+
{ type: "goog-remb" }
|
|
1534
|
+
]
|
|
1535
|
+
})
|
|
1536
|
+
],
|
|
1537
|
+
audio: [
|
|
1538
|
+
new werift.RTCRtpCodecParameters({
|
|
1539
|
+
mimeType: "audio/PCMU",
|
|
1540
|
+
clockRate: 8e3,
|
|
1541
|
+
payloadType: 0,
|
|
1542
|
+
channels: 1,
|
|
1543
|
+
parameters: ""
|
|
1544
|
+
})
|
|
1545
|
+
]
|
|
1546
|
+
}
|
|
1547
|
+
};
|
|
1548
|
+
if (iceServers.length > 0) pcOptions.iceServers = iceServers;
|
|
1549
|
+
if (this.iceConfig?.portRange) pcOptions.icePortRange = this.iceConfig.portRange;
|
|
1550
|
+
if (this.iceConfig?.additionalHostAddresses) {
|
|
1551
|
+
pcOptions.iceAdditionalHostAddresses = this.iceConfig.additionalHostAddresses;
|
|
1552
|
+
}
|
|
1553
|
+
return { werift, pcOptions };
|
|
1554
|
+
}
|
|
1555
|
+
/** Create offer SDP (server → client). */
|
|
1556
|
+
async createOffer() {
|
|
1557
|
+
const { werift, pcOptions } = await this.buildPcOptions();
|
|
1558
|
+
this.pc = new werift.RTCPeerConnection(pcOptions);
|
|
1559
|
+
this.pc.iceConnectionStateChange.subscribe((state) => {
|
|
1560
|
+
this.logger.debug(`[session:${this.sessionId}] ICE: ${state}`);
|
|
1561
|
+
if (state === "connected") {
|
|
1562
|
+
this.state = "connected";
|
|
1563
|
+
this.startStatsCollection();
|
|
1564
|
+
} else if (state === "disconnected" || state === "failed" || state === "closed") {
|
|
1565
|
+
this.state = state === "disconnected" ? "disconnected" : "closed";
|
|
1566
|
+
void this.close();
|
|
1567
|
+
}
|
|
1568
|
+
});
|
|
1569
|
+
this.videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
1570
|
+
const videoTransceiver = this.pc.addTransceiver(this.videoTrack, { direction: "sendonly" });
|
|
1571
|
+
this.videoSender = videoTransceiver.sender;
|
|
1572
|
+
this.audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
1573
|
+
const audioDir = this.intercom ? "sendrecv" : "sendonly";
|
|
1574
|
+
const audioTransceiver = this.pc.addTransceiver(this.audioTrack, { direction: audioDir });
|
|
1575
|
+
this.audioSender = audioTransceiver.sender;
|
|
1576
|
+
if (this.intercom) {
|
|
1577
|
+
const cb = this.intercom.onAudioReceived;
|
|
1578
|
+
audioTransceiver.onTrack.subscribe((track) => {
|
|
1579
|
+
track.onReceiveRtp.subscribe((pkt) => {
|
|
1580
|
+
try {
|
|
1581
|
+
const payload = pkt.payload;
|
|
1582
|
+
if (payload?.length > 0) void cb(payload, "Opus");
|
|
1583
|
+
} catch (err) {
|
|
1584
|
+
this.logger.error(`[session:${this.sessionId}] Intercom error:`, err);
|
|
1585
|
+
}
|
|
1586
|
+
});
|
|
1587
|
+
});
|
|
1588
|
+
}
|
|
1589
|
+
const offer = await this.pc.createOffer();
|
|
1590
|
+
await this.pc.setLocalDescription(offer);
|
|
1591
|
+
await new Promise((resolve) => {
|
|
1592
|
+
if (this.pc.iceGatheringState === "complete") {
|
|
1593
|
+
resolve();
|
|
1594
|
+
return;
|
|
1595
|
+
}
|
|
1596
|
+
this.pc.iceGatheringStateChange.subscribe((state) => {
|
|
1597
|
+
if (state === "complete") resolve();
|
|
1598
|
+
});
|
|
1599
|
+
setTimeout(resolve, 5e3);
|
|
1600
|
+
});
|
|
1601
|
+
let finalSdp = this.pc.localDescription?.sdp ?? offer.sdp;
|
|
1602
|
+
finalSdp = finalSdp.replace(/a=setup:active\r?\n/g, "a=setup:actpass\r\n");
|
|
1603
|
+
this.state = "connecting";
|
|
1604
|
+
this.logger.info(`[session:${this.sessionId}] Offer created`);
|
|
1605
|
+
return { sdp: finalSdp, type: "offer" };
|
|
1606
|
+
}
|
|
1607
|
+
/** Handle WHEP answer: client sends SDP answer, we set remote description and start feeding. */
|
|
1608
|
+
async handleAnswer(answer) {
|
|
1609
|
+
if (!this.pc) throw new Error("Call createOffer() first");
|
|
1610
|
+
const werift = await loadWerift();
|
|
1611
|
+
const desc = new werift.RTCSessionDescription(answer.sdp, answer.type);
|
|
1612
|
+
await this.pc.setRemoteDescription(desc);
|
|
1613
|
+
this.logger.info(`[session:${this.sessionId}] Answer set, feeding started`);
|
|
1614
|
+
this.startFeedingFrames();
|
|
1615
|
+
}
|
|
1616
|
+
/**
|
|
1617
|
+
* Handle WHEP offer: client sends SDP offer, we create answer.
|
|
1618
|
+
*
|
|
1619
|
+
* Uses the server-creates-offer pattern internally: we create our own offer
|
|
1620
|
+
* with sendonly tracks, then use the client's offer codecs to build a
|
|
1621
|
+
* compatible answer. This avoids werift transceiver direction issues.
|
|
1622
|
+
*/
|
|
1623
|
+
async handleOffer(clientOffer) {
|
|
1624
|
+
const { werift, pcOptions } = await this.buildPcOptions();
|
|
1625
|
+
this.pc = new werift.RTCPeerConnection(pcOptions);
|
|
1626
|
+
this.pc.iceConnectionStateChange.subscribe((state) => {
|
|
1627
|
+
this.logger.debug(`[session:${this.sessionId}] ICE: ${state}`);
|
|
1628
|
+
if (state === "connected") {
|
|
1629
|
+
this.state = "connected";
|
|
1630
|
+
this.startStatsCollection();
|
|
1631
|
+
} else if (state === "disconnected" || state === "failed" || state === "closed") {
|
|
1632
|
+
this.state = state === "disconnected" ? "disconnected" : "closed";
|
|
1633
|
+
void this.close();
|
|
1634
|
+
}
|
|
1635
|
+
});
|
|
1636
|
+
const remoteDesc = new werift.RTCSessionDescription(clientOffer.sdp, clientOffer.type);
|
|
1637
|
+
await this.pc.setRemoteDescription(remoteDesc);
|
|
1638
|
+
const transceivers = this.pc.getTransceivers();
|
|
1639
|
+
for (const t of transceivers) {
|
|
1640
|
+
const kind = t.receiver?.track?.kind ?? t.kind;
|
|
1641
|
+
if (kind === "video" && !this.videoTrack) {
|
|
1642
|
+
this.videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
1643
|
+
await t.sender.replaceTrack(this.videoTrack);
|
|
1644
|
+
} else if (kind === "audio" && !this.audioTrack) {
|
|
1645
|
+
this.audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
1646
|
+
await t.sender.replaceTrack(this.audioTrack);
|
|
1647
|
+
}
|
|
1648
|
+
}
|
|
1649
|
+
if (!this.videoTrack) {
|
|
1650
|
+
this.logger.warn(`[session:${this.sessionId}] No video transceiver found in offer, adding one`);
|
|
1651
|
+
this.videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
1652
|
+
this.pc.addTransceiver(this.videoTrack, { direction: "sendonly" });
|
|
1653
|
+
}
|
|
1654
|
+
if (!this.audioTrack) {
|
|
1655
|
+
this.logger.warn(`[session:${this.sessionId}] No audio transceiver found in offer, adding one`);
|
|
1656
|
+
this.audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
1657
|
+
this.pc.addTransceiver(this.audioTrack, { direction: "sendonly" });
|
|
1658
|
+
}
|
|
1659
|
+
const answer = await this.pc.createAnswer();
|
|
1660
|
+
await this.pc.setLocalDescription(answer);
|
|
1661
|
+
this.state = "connecting";
|
|
1662
|
+
this.logger.info(`[session:${this.sessionId}] WHEP answer created`);
|
|
1663
|
+
this.startFeedingFrames();
|
|
1664
|
+
return { sdp: answer.sdp, type: "answer" };
|
|
1665
|
+
}
|
|
1666
|
+
/** Add ICE candidate. */
|
|
1667
|
+
async addIceCandidate(candidate) {
|
|
1668
|
+
if (!this.pc) throw new Error("Call createOffer() first");
|
|
1669
|
+
const werift = await loadWerift();
|
|
1670
|
+
await this.pc.addIceCandidate(new werift.RTCIceCandidate(candidate));
|
|
1671
|
+
}
|
|
1672
|
+
/**
|
|
1673
|
+
* Detach the frame source (for connection pooling).
|
|
1674
|
+
* The session stays alive (ICE/DTLS connected) but stops feeding frames.
|
|
1675
|
+
* Call replaceSource() later to reattach a camera.
|
|
1676
|
+
*/
|
|
1677
|
+
detachSource() {
|
|
1678
|
+
if (this.feedAbort) {
|
|
1679
|
+
this.feedAbort.abort();
|
|
1680
|
+
this.feedAbort = null;
|
|
1681
|
+
}
|
|
1682
|
+
this.logger.debug(`[session:${this.sessionId}] Source detached (idle)`);
|
|
1683
|
+
}
|
|
1684
|
+
/** Whether the session has an active feed (vs idle/pooled). */
|
|
1685
|
+
get isFeeding() {
|
|
1686
|
+
return this.feedAbort !== null && !this.feedAbort.signal.aborted;
|
|
1687
|
+
}
|
|
1688
|
+
/**
|
|
1689
|
+
* Replace the frame source (for seamless source switching).
|
|
1690
|
+
* The new source will take effect at the next keyframe.
|
|
1691
|
+
*/
|
|
1692
|
+
replaceSource(newSource) {
|
|
1693
|
+
this.source = newSource;
|
|
1694
|
+
if (this.feedAbort) {
|
|
1695
|
+
this.feedAbort.abort();
|
|
1696
|
+
this.feedAbort = null;
|
|
1697
|
+
}
|
|
1698
|
+
this.startFeedingFrames();
|
|
1699
|
+
}
|
|
1700
|
+
getInfo() {
|
|
1701
|
+
return { sessionId: this.sessionId, state: this.state, createdAt: this.createdAt };
|
|
1702
|
+
}
|
|
1703
|
+
async close() {
|
|
1704
|
+
if (this.closed) return;
|
|
1705
|
+
this.closed = true;
|
|
1706
|
+
this.state = "closed";
|
|
1707
|
+
this.logger.info(`[session:${this.sessionId}] Closing`);
|
|
1708
|
+
if (this.statsTimer) {
|
|
1709
|
+
clearInterval(this.statsTimer);
|
|
1710
|
+
this.statsTimer = null;
|
|
1711
|
+
}
|
|
1712
|
+
if (this.feedAbort) {
|
|
1713
|
+
this.feedAbort.abort();
|
|
1714
|
+
this.feedAbort = null;
|
|
1715
|
+
}
|
|
1716
|
+
try {
|
|
1717
|
+
await this.source.return(void 0);
|
|
1718
|
+
} catch {
|
|
1719
|
+
}
|
|
1720
|
+
if (this.pc) {
|
|
1721
|
+
try {
|
|
1722
|
+
await this.pc.close();
|
|
1723
|
+
} catch {
|
|
1724
|
+
}
|
|
1725
|
+
this.pc = null;
|
|
1726
|
+
}
|
|
1727
|
+
this.videoTrack = null;
|
|
1728
|
+
this.audioTrack = null;
|
|
1729
|
+
}
|
|
1730
|
+
// -----------------------------------------------------------------------
|
|
1731
|
+
// Frame feeding
|
|
1732
|
+
// -----------------------------------------------------------------------
|
|
1733
|
+
startFeedingFrames() {
|
|
1734
|
+
this.feedAbort = new AbortController();
|
|
1735
|
+
const { signal } = this.feedAbort;
|
|
1736
|
+
void (async () => {
|
|
1737
|
+
let gotKeyframe = false;
|
|
1738
|
+
let videoTimestampBase = null;
|
|
1739
|
+
let audioTimestampBase = null;
|
|
1740
|
+
let frameCount = 0;
|
|
1741
|
+
try {
|
|
1742
|
+
for await (const mediaFrame of this.source) {
|
|
1743
|
+
if (signal.aborted || this.closed) break;
|
|
1744
|
+
frameCount++;
|
|
1745
|
+
if (this.debug && (frameCount <= 5 || frameCount % 100 === 0)) {
|
|
1746
|
+
this.logger.debug(
|
|
1747
|
+
`[session:${this.sessionId}] Frame #${frameCount}: ${mediaFrame.type} size=${mediaFrame.frame.data.length} ` + (mediaFrame.type === "video" ? `key=${mediaFrame.frame.isKeyframe}` : "")
|
|
1748
|
+
);
|
|
1749
|
+
}
|
|
1750
|
+
if (mediaFrame.type === "video") {
|
|
1751
|
+
const frame = mediaFrame.frame;
|
|
1752
|
+
const annexB = frame.codec === "H264" ? convertH264ToAnnexB(frame.data) : convertH265ToAnnexB(frame.data);
|
|
1753
|
+
if (!gotKeyframe) {
|
|
1754
|
+
const isKey = frame.codec === "H264" ? isH264IdrAccessUnit(annexB) : isH265IrapAccessUnit(annexB);
|
|
1755
|
+
if (!isKey) continue;
|
|
1756
|
+
gotKeyframe = true;
|
|
1757
|
+
if (this.debug) {
|
|
1758
|
+
const iceState = this.pc?.iceConnectionState ?? "unknown";
|
|
1759
|
+
const connState = this.pc?.connectionState ?? "unknown";
|
|
1760
|
+
this.logger.info(
|
|
1761
|
+
`[session:${this.sessionId}] First keyframe at frame #${frameCount}, size=${annexB.length}, ICE=${iceState}, conn=${connState}`
|
|
1762
|
+
);
|
|
1763
|
+
}
|
|
1764
|
+
}
|
|
1765
|
+
if (videoTimestampBase === null) videoTimestampBase = frame.timestampMicros;
|
|
1766
|
+
const rtpTs = Math.floor(
|
|
1767
|
+
(frame.timestampMicros - videoTimestampBase) * 9e4 / 1e6
|
|
1768
|
+
) >>> 0;
|
|
1769
|
+
const nals = splitAnnexBToNals(annexB).filter((n) => {
|
|
1770
|
+
const t = n[0] & 31;
|
|
1771
|
+
return t !== 9 && t !== 6;
|
|
1772
|
+
});
|
|
1773
|
+
if (nals.length > 0 && this.videoTrack) {
|
|
1774
|
+
this.writeVideoNals(nals, rtpTs, frame.codec);
|
|
1775
|
+
if (this.debug && frameCount % 250 === 0) {
|
|
1776
|
+
this.logger.info(
|
|
1777
|
+
`[session:${this.sessionId}] ${frameCount} frames, ${this.rtpPacketsSent} RTP pkts, ICE=${this.pc?.iceConnectionState}, conn=${this.pc?.connectionState}`
|
|
1778
|
+
);
|
|
1779
|
+
}
|
|
1780
|
+
}
|
|
1781
|
+
} else if (mediaFrame.type === "audio") {
|
|
1782
|
+
const frame = mediaFrame.frame;
|
|
1783
|
+
if (!this.audioSender) continue;
|
|
1784
|
+
if (audioTimestampBase === null) audioTimestampBase = 0;
|
|
1785
|
+
audioTimestampBase = audioTimestampBase + frame.data.length >>> 0;
|
|
1786
|
+
const rtpTs = audioTimestampBase;
|
|
1787
|
+
this.writeAudio(frame.data, rtpTs, frame.codec);
|
|
1788
|
+
}
|
|
1789
|
+
}
|
|
1790
|
+
} catch (err) {
|
|
1791
|
+
if (!signal.aborted && !this.closed) {
|
|
1792
|
+
this.logger.error(`[session:${this.sessionId}] Feed error:`, err);
|
|
1793
|
+
}
|
|
1794
|
+
} finally {
|
|
1795
|
+
if (!this.closed) {
|
|
1796
|
+
this.logger.info(`[session:${this.sessionId}] Feed ended`);
|
|
1797
|
+
void this.close();
|
|
1798
|
+
}
|
|
1799
|
+
}
|
|
1800
|
+
})();
|
|
1801
|
+
}
|
|
1802
|
+
/** Build a serialized RTP packet for sender.sendRtp(). */
|
|
1803
|
+
buildRtpBuffer(weriftModule, payload, rtpTs, payloadType, marker, isVideo) {
|
|
1804
|
+
const header = new weriftModule.RtpHeader();
|
|
1805
|
+
header.payloadType = payloadType;
|
|
1806
|
+
header.timestamp = rtpTs;
|
|
1807
|
+
header.marker = marker;
|
|
1808
|
+
header.sequenceNumber = isVideo ? this.videoSeqNum = this.videoSeqNum + 1 & 65535 : this.audioSeqNum = this.audioSeqNum + 1 & 65535;
|
|
1809
|
+
const pkt = new weriftModule.RtpPacket(header, payload);
|
|
1810
|
+
return pkt.serialize();
|
|
1811
|
+
}
|
|
1812
|
+
/** Max RTP payload size (MTU 1200 to stay under typical network MTU). */
|
|
1813
|
+
static MAX_RTP_PAYLOAD = 1200;
|
|
1814
|
+
rtpPacketsSent = 0;
|
|
1815
|
+
writeVideoNals(nals, rtpTs, codec) {
|
|
1816
|
+
if (!this.videoSender || !_werift) return;
|
|
1817
|
+
const pt = codec === "H264" ? 96 : 97;
|
|
1818
|
+
const sendPkt = (payload, marker) => {
|
|
1819
|
+
try {
|
|
1820
|
+
const buf = this.buildRtpBuffer(_werift, payload, rtpTs, pt, marker, true);
|
|
1821
|
+
this.videoSender.sendRtp(buf);
|
|
1822
|
+
this.rtpPacketsSent++;
|
|
1823
|
+
} catch (err) {
|
|
1824
|
+
if (this.rtpPacketsSent <= 10) {
|
|
1825
|
+
this.logger.error(`[session:${this.sessionId}] sendRtp error #${this.rtpPacketsSent}:`, err);
|
|
1826
|
+
}
|
|
1827
|
+
}
|
|
1828
|
+
};
|
|
1829
|
+
for (let i = 0; i < nals.length; i++) {
|
|
1830
|
+
const nal = nals[i];
|
|
1831
|
+
const isLastNal = i === nals.length - 1;
|
|
1832
|
+
if (nal.length <= _AdaptiveSession.MAX_RTP_PAYLOAD) {
|
|
1833
|
+
sendPkt(nal, isLastNal);
|
|
1834
|
+
} else {
|
|
1835
|
+
const nalHeader = nal[0];
|
|
1836
|
+
const fnri = nalHeader & 224;
|
|
1837
|
+
const nalType = nalHeader & 31;
|
|
1838
|
+
const fuIndicator = fnri | 28;
|
|
1839
|
+
const nalBody = nal.subarray(1);
|
|
1840
|
+
let offset = 0;
|
|
1841
|
+
let isFirst = true;
|
|
1842
|
+
while (offset < nalBody.length) {
|
|
1843
|
+
const end = Math.min(offset + _AdaptiveSession.MAX_RTP_PAYLOAD - 2, nalBody.length);
|
|
1844
|
+
const isLast = end >= nalBody.length;
|
|
1845
|
+
let fuHeader = nalType;
|
|
1846
|
+
if (isFirst) fuHeader |= 128;
|
|
1847
|
+
if (isLast) fuHeader |= 64;
|
|
1848
|
+
const fragment = Buffer.alloc(2 + (end - offset));
|
|
1849
|
+
fragment[0] = fuIndicator;
|
|
1850
|
+
fragment[1] = fuHeader;
|
|
1851
|
+
nalBody.copy(fragment, 2, offset, end);
|
|
1852
|
+
sendPkt(fragment, isLastNal && isLast);
|
|
1853
|
+
offset = end;
|
|
1854
|
+
isFirst = false;
|
|
1855
|
+
}
|
|
1856
|
+
}
|
|
1857
|
+
}
|
|
1858
|
+
}
|
|
1859
|
+
writeAudio(data, rtpTs, codec) {
|
|
1860
|
+
if (!this.audioSender || !_werift) return;
|
|
1861
|
+
const pt = codec === "Pcmu" || codec === "Pcma" ? 0 : 111;
|
|
1862
|
+
try {
|
|
1863
|
+
const buf = this.buildRtpBuffer(_werift, data, rtpTs, pt, true, false);
|
|
1864
|
+
this.audioSender.sendRtp(buf);
|
|
1865
|
+
} catch (err) {
|
|
1866
|
+
this.logger.debug(`[session:${this.sessionId}] Audio write error:`, err);
|
|
1867
|
+
}
|
|
1868
|
+
}
|
|
1869
|
+
// -----------------------------------------------------------------------
|
|
1870
|
+
// RTCP stats collection
|
|
1871
|
+
// -----------------------------------------------------------------------
|
|
1872
|
+
startStatsCollection() {
|
|
1873
|
+
if (this.statsTimer || !this.onStats) return;
|
|
1874
|
+
this.statsTimer = setInterval(() => {
|
|
1875
|
+
if (!this.pc || this.closed) return;
|
|
1876
|
+
this.collectStats();
|
|
1877
|
+
}, 3e3);
|
|
1878
|
+
}
|
|
1879
|
+
collectStats() {
|
|
1880
|
+
if (!this.pc || !this.onStats) return;
|
|
1881
|
+
try {
|
|
1882
|
+
const senders = this.pc.getSenders?.() ?? [];
|
|
1883
|
+
for (const sender of senders) {
|
|
1884
|
+
const track = sender.track;
|
|
1885
|
+
if (!track || track.kind !== "video") continue;
|
|
1886
|
+
const report = sender.lastReceiverReport ?? sender.rtcpReport;
|
|
1887
|
+
if (!report) continue;
|
|
1888
|
+
const fractionLost = report.fractionLost ?? 0;
|
|
1889
|
+
const packetsLost = report.packetsLost ?? report.cumulativeLost ?? 0;
|
|
1890
|
+
const jitter = report.jitter ?? 0;
|
|
1891
|
+
const rtt = report.roundTripTime ?? report.rtt ?? 0;
|
|
1892
|
+
const packetLoss = fractionLost / 256;
|
|
1893
|
+
this.onStats({
|
|
1894
|
+
sessionId: this.sessionId,
|
|
1895
|
+
packetLoss,
|
|
1896
|
+
jitterMs: jitter,
|
|
1897
|
+
rttMs: rtt * 1e3,
|
|
1898
|
+
// seconds → ms
|
|
1899
|
+
packetsReceived: 0,
|
|
1900
|
+
// Not available from sender side
|
|
1901
|
+
packetsLost,
|
|
1902
|
+
timestamp: Date.now()
|
|
1903
|
+
});
|
|
1904
|
+
return;
|
|
1905
|
+
}
|
|
1906
|
+
} catch {
|
|
1907
|
+
}
|
|
1908
|
+
}
|
|
1909
|
+
};
|
|
1910
|
+
|
|
1911
|
+
// src/server.ts
|
|
1912
|
+
function createDefaultProfiles() {
|
|
1913
|
+
return [
|
|
1914
|
+
{
|
|
1915
|
+
tier: "high",
|
|
1916
|
+
encoding: { maxBitrateKbps: 6e3, width: 0, height: 0 },
|
|
1917
|
+
// native resolution
|
|
1918
|
+
sourceProfile: "main"
|
|
1919
|
+
},
|
|
1920
|
+
{
|
|
1921
|
+
tier: "medium",
|
|
1922
|
+
encoding: { maxBitrateKbps: 2500, width: 1280, height: 720 },
|
|
1923
|
+
sourceProfile: "main"
|
|
1924
|
+
},
|
|
1925
|
+
{
|
|
1926
|
+
tier: "low",
|
|
1927
|
+
encoding: { maxBitrateKbps: 1e3, width: 640, height: 360 },
|
|
1928
|
+
sourceProfile: "sub"
|
|
1929
|
+
}
|
|
1930
|
+
];
|
|
1931
|
+
}
|
|
1932
|
+
var AdaptiveStreamServer = class extends import_node_events.EventEmitter {
|
|
1933
|
+
ffmpegPath;
|
|
1934
|
+
stunServers;
|
|
1935
|
+
turnServers;
|
|
1936
|
+
icePortRange;
|
|
1937
|
+
iceAdditionalHostAddresses;
|
|
1938
|
+
logger;
|
|
1939
|
+
cameras = /* @__PURE__ */ new Map();
|
|
1940
|
+
sessionCamera = /* @__PURE__ */ new Map();
|
|
1941
|
+
stopped = false;
|
|
1942
|
+
constructor(options = {}) {
|
|
1943
|
+
super();
|
|
1944
|
+
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
1945
|
+
this.stunServers = options.stunServers;
|
|
1946
|
+
this.turnServers = options.turnServers;
|
|
1947
|
+
this.icePortRange = options.icePortRange;
|
|
1948
|
+
this.iceAdditionalHostAddresses = options.iceAdditionalHostAddresses;
|
|
1949
|
+
this.logger = options.logger ? asLogger(options.logger) : createNullLogger();
|
|
1950
|
+
this.logger.info("[adaptive-server] Initialized");
|
|
1951
|
+
}
|
|
1952
|
+
// -----------------------------------------------------------------------
|
|
1953
|
+
// Camera management
|
|
1954
|
+
// -----------------------------------------------------------------------
|
|
1955
|
+
/** Register a camera with adaptive streaming. */
|
|
1956
|
+
addCamera(name, config) {
|
|
1957
|
+
if (this.cameras.has(name)) {
|
|
1958
|
+
this.logger.warn(`[adaptive-server] Camera "${name}" already registered`);
|
|
1959
|
+
return;
|
|
1960
|
+
}
|
|
1961
|
+
const profiles = config.profiles;
|
|
1962
|
+
const initialParams = profiles[0].encoding;
|
|
1963
|
+
const mainFfmpegSource = new AdaptiveFfmpegSource({
|
|
1964
|
+
rtspUrl: config.rtspUrl,
|
|
1965
|
+
initialParams,
|
|
1966
|
+
audioMode: config.audioMode ?? "copy",
|
|
1967
|
+
ffmpegPath: this.ffmpegPath,
|
|
1968
|
+
logger: this.logger,
|
|
1969
|
+
label: `ffmpeg:${name}:main`
|
|
1970
|
+
});
|
|
1971
|
+
const mainFanout = new StreamFanout({
|
|
1972
|
+
maxQueueItems: 30,
|
|
1973
|
+
createSource: () => mainFfmpegSource.source,
|
|
1974
|
+
onError: (err) => {
|
|
1975
|
+
this.logger.error(`[adaptive-server] Main fanout error (${name}):`, err);
|
|
1976
|
+
}
|
|
1977
|
+
});
|
|
1978
|
+
const controller = new AdaptiveController({
|
|
1979
|
+
profiles,
|
|
1980
|
+
onQualityChange: async (from, to) => {
|
|
1981
|
+
await this.handleQualityChange(name, from, to);
|
|
1982
|
+
},
|
|
1983
|
+
logger: this.logger
|
|
1984
|
+
});
|
|
1985
|
+
this.cameras.set(name, {
|
|
1986
|
+
config,
|
|
1987
|
+
mainFfmpegSource,
|
|
1988
|
+
mainFanout,
|
|
1989
|
+
subFfmpegSource: null,
|
|
1990
|
+
subFanout: null,
|
|
1991
|
+
activeSourceProfile: "main",
|
|
1992
|
+
controller,
|
|
1993
|
+
sessions: /* @__PURE__ */ new Map(),
|
|
1994
|
+
autoStopTimer: null,
|
|
1995
|
+
switching: false
|
|
1996
|
+
});
|
|
1997
|
+
this.logger.info(`[adaptive-server] Camera "${name}" added`);
|
|
1998
|
+
}
|
|
1999
|
+
/** Remove a camera and close all its sessions. */
|
|
2000
|
+
async removeCamera(name) {
|
|
2001
|
+
const cam = this.cameras.get(name);
|
|
2002
|
+
if (!cam) return;
|
|
2003
|
+
const closePs = [];
|
|
2004
|
+
for (const [sid, session] of cam.sessions) {
|
|
2005
|
+
this.sessionCamera.delete(sid);
|
|
2006
|
+
closePs.push(session.close().catch(() => {
|
|
2007
|
+
}));
|
|
2008
|
+
}
|
|
2009
|
+
await Promise.all(closePs);
|
|
2010
|
+
cam.sessions.clear();
|
|
2011
|
+
await cam.mainFanout.stop();
|
|
2012
|
+
await cam.mainFfmpegSource.stop();
|
|
2013
|
+
if (cam.subFanout) await cam.subFanout.stop();
|
|
2014
|
+
if (cam.subFfmpegSource) await cam.subFfmpegSource.stop();
|
|
2015
|
+
if (cam.autoStopTimer) {
|
|
2016
|
+
clearTimeout(cam.autoStopTimer);
|
|
2017
|
+
cam.autoStopTimer = null;
|
|
2018
|
+
}
|
|
2019
|
+
this.cameras.delete(name);
|
|
2020
|
+
this.logger.info(`[adaptive-server] Camera "${name}" removed`);
|
|
2021
|
+
}
|
|
2022
|
+
getCameraNames() {
|
|
2023
|
+
return [...this.cameras.keys()];
|
|
2024
|
+
}
|
|
2025
|
+
// -----------------------------------------------------------------------
|
|
2026
|
+
// Signaling (2-step: server creates offer, client sends answer)
|
|
2027
|
+
// -----------------------------------------------------------------------
|
|
2028
|
+
/**
|
|
2029
|
+
* Create an adaptive session for a camera.
|
|
2030
|
+
* Returns a server-generated SDP offer that the client must answer.
|
|
2031
|
+
*
|
|
2032
|
+
* Flow: createSession() → server offer → client sets remote, creates answer → handleAnswer()
|
|
2033
|
+
*/
|
|
2034
|
+
async createSession(cameraName) {
|
|
2035
|
+
if (this.stopped) throw new Error("Server stopped");
|
|
2036
|
+
const cam = this.cameras.get(cameraName);
|
|
2037
|
+
if (!cam) throw new Error(`Camera not found: ${cameraName}`);
|
|
2038
|
+
if (cam.autoStopTimer) {
|
|
2039
|
+
clearTimeout(cam.autoStopTimer);
|
|
2040
|
+
cam.autoStopTimer = null;
|
|
2041
|
+
}
|
|
2042
|
+
this.ensureCameraRunning(cameraName, cam);
|
|
2043
|
+
const sessionId = import_node_crypto.default.randomUUID();
|
|
2044
|
+
const activeFanout = this.getActiveFanout(cam);
|
|
2045
|
+
const source = activeFanout.subscribe(sessionId);
|
|
2046
|
+
const session = new AdaptiveSession({
|
|
2047
|
+
sessionId,
|
|
2048
|
+
source,
|
|
2049
|
+
iceConfig: {
|
|
2050
|
+
stunServers: this.stunServers,
|
|
2051
|
+
turnServers: this.turnServers,
|
|
2052
|
+
portRange: this.icePortRange,
|
|
2053
|
+
additionalHostAddresses: this.iceAdditionalHostAddresses
|
|
2054
|
+
},
|
|
2055
|
+
onStats: (stats) => {
|
|
2056
|
+
cam.controller.reportStats(sessionId, {
|
|
2057
|
+
packetLoss: stats.packetLoss,
|
|
2058
|
+
jitterMs: stats.jitterMs,
|
|
2059
|
+
rttMs: stats.rttMs,
|
|
2060
|
+
timestamp: stats.timestamp
|
|
2061
|
+
});
|
|
2062
|
+
this.emit("session:stats", { camera: cameraName, ...stats });
|
|
2063
|
+
},
|
|
2064
|
+
logger: this.logger
|
|
2065
|
+
});
|
|
2066
|
+
cam.sessions.set(sessionId, session);
|
|
2067
|
+
this.sessionCamera.set(sessionId, cameraName);
|
|
2068
|
+
try {
|
|
2069
|
+
const offer = await session.createOffer();
|
|
2070
|
+
this.emit("session:created", { sessionId, camera: cameraName });
|
|
2071
|
+
return { sessionId, sdpOffer: offer.sdp };
|
|
2072
|
+
} catch (err) {
|
|
2073
|
+
cam.sessions.delete(sessionId);
|
|
2074
|
+
this.sessionCamera.delete(sessionId);
|
|
2075
|
+
activeFanout.unsubscribe(sessionId);
|
|
2076
|
+
await session.close().catch(() => {
|
|
2077
|
+
});
|
|
2078
|
+
this.scheduleCameraAutoStop(cameraName, cam);
|
|
2079
|
+
throw err;
|
|
2080
|
+
}
|
|
2081
|
+
}
|
|
2082
|
+
/**
|
|
2083
|
+
* Handle the client's SDP answer for an adaptive session.
|
|
2084
|
+
* Call after createSession() with the client's answer.
|
|
2085
|
+
*/
|
|
2086
|
+
async handleAnswer(sessionId, sdpAnswer) {
|
|
2087
|
+
const camName = this.sessionCamera.get(sessionId);
|
|
2088
|
+
if (!camName) throw new Error(`Session not found: ${sessionId}`);
|
|
2089
|
+
const cam = this.cameras.get(camName);
|
|
2090
|
+
if (!cam) throw new Error(`Camera not found: ${camName}`);
|
|
2091
|
+
const session = cam.sessions.get(sessionId);
|
|
2092
|
+
if (!session) throw new Error(`Session not found: ${sessionId}`);
|
|
2093
|
+
await session.handleAnswer({ sdp: sdpAnswer, type: "answer" });
|
|
2094
|
+
}
|
|
2095
|
+
/**
|
|
2096
|
+
* Convenience: handleWhepOffer is NOT supported — werift requires server-initiated offers.
|
|
2097
|
+
* Use createSession() + handleAnswer() instead.
|
|
2098
|
+
*/
|
|
2099
|
+
async handleWhepOffer(_cameraName, _sdpOffer) {
|
|
2100
|
+
throw new Error(
|
|
2101
|
+
"handleWhepOffer is not supported \u2014 werift requires server-initiated offers. Use createSession() to get a server offer, then handleAnswer() with the client's answer."
|
|
2102
|
+
);
|
|
2103
|
+
}
|
|
2104
|
+
// -----------------------------------------------------------------------
|
|
2105
|
+
// Connection pool: pre-warmed sessions without camera assignment
|
|
2106
|
+
// -----------------------------------------------------------------------
|
|
2107
|
+
/** Pooled sessions: sessionId → true (idle, no camera attached). */
|
|
2108
|
+
pooledSessions = /* @__PURE__ */ new Set();
|
|
2109
|
+
/**
|
|
2110
|
+
* Create a pooled session (no camera attached yet).
|
|
2111
|
+
* The SDP exchange happens, ICE connects, but no ffmpeg is started.
|
|
2112
|
+
* Call attachCamera() later to start feeding frames.
|
|
2113
|
+
*/
|
|
2114
|
+
async createPooledSession() {
|
|
2115
|
+
if (this.stopped) throw new Error("Server stopped");
|
|
2116
|
+
const sessionId = import_node_crypto.default.randomUUID();
|
|
2117
|
+
const emptySource = (async function* () {
|
|
2118
|
+
await new Promise(() => {
|
|
2119
|
+
});
|
|
2120
|
+
})();
|
|
2121
|
+
const session = new AdaptiveSession({
|
|
2122
|
+
sessionId,
|
|
2123
|
+
source: emptySource,
|
|
2124
|
+
iceConfig: {
|
|
2125
|
+
stunServers: this.stunServers,
|
|
2126
|
+
turnServers: this.turnServers,
|
|
2127
|
+
portRange: this.icePortRange,
|
|
2128
|
+
additionalHostAddresses: this.iceAdditionalHostAddresses
|
|
2129
|
+
},
|
|
2130
|
+
logger: this.logger
|
|
2131
|
+
});
|
|
2132
|
+
this.pooledSessions.add(sessionId);
|
|
2133
|
+
const poolCamKey = "__pool__";
|
|
2134
|
+
if (!this.cameras.has(poolCamKey)) {
|
|
2135
|
+
const dummyFfmpeg = new AdaptiveFfmpegSource({
|
|
2136
|
+
rtspUrl: "rtsp://0.0.0.0/dummy",
|
|
2137
|
+
initialParams: { maxBitrateKbps: 0, width: 0, height: 0 }
|
|
2138
|
+
});
|
|
2139
|
+
const dummyFanout = new StreamFanout({
|
|
2140
|
+
maxQueueItems: 1,
|
|
2141
|
+
createSource: () => dummyFfmpeg.source
|
|
2142
|
+
});
|
|
2143
|
+
const dummyController = new AdaptiveController({
|
|
2144
|
+
profiles: createDefaultProfiles(),
|
|
2145
|
+
onQualityChange: async () => {
|
|
2146
|
+
}
|
|
2147
|
+
});
|
|
2148
|
+
this.cameras.set(poolCamKey, {
|
|
2149
|
+
config: { rtspUrl: "", profiles: createDefaultProfiles() },
|
|
2150
|
+
mainFfmpegSource: dummyFfmpeg,
|
|
2151
|
+
mainFanout: dummyFanout,
|
|
2152
|
+
subFfmpegSource: null,
|
|
2153
|
+
subFanout: null,
|
|
2154
|
+
activeSourceProfile: "main",
|
|
2155
|
+
controller: dummyController,
|
|
2156
|
+
sessions: /* @__PURE__ */ new Map(),
|
|
2157
|
+
autoStopTimer: null,
|
|
2158
|
+
switching: false
|
|
2159
|
+
});
|
|
2160
|
+
}
|
|
2161
|
+
const poolCam = this.cameras.get(poolCamKey);
|
|
2162
|
+
poolCam.sessions.set(sessionId, session);
|
|
2163
|
+
this.sessionCamera.set(sessionId, poolCamKey);
|
|
2164
|
+
try {
|
|
2165
|
+
const offer = await session.createOffer();
|
|
2166
|
+
this.logger.info(`[adaptive-server] Pooled session ${sessionId.slice(0, 8)} created`);
|
|
2167
|
+
return { sessionId, sdpOffer: offer.sdp };
|
|
2168
|
+
} catch (err) {
|
|
2169
|
+
poolCam.sessions.delete(sessionId);
|
|
2170
|
+
this.sessionCamera.delete(sessionId);
|
|
2171
|
+
this.pooledSessions.delete(sessionId);
|
|
2172
|
+
await session.close().catch(() => {
|
|
2173
|
+
});
|
|
2174
|
+
throw err;
|
|
2175
|
+
}
|
|
2176
|
+
}
|
|
2177
|
+
/**
|
|
2178
|
+
* Attach a camera to a pooled session.
|
|
2179
|
+
* Starts the ffmpeg transcoder and begins feeding frames.
|
|
2180
|
+
*/
|
|
2181
|
+
async attachCamera(sessionId, cameraName) {
|
|
2182
|
+
if (!this.pooledSessions.has(sessionId)) {
|
|
2183
|
+
throw new Error(`Session ${sessionId} is not a pooled session`);
|
|
2184
|
+
}
|
|
2185
|
+
const cam = this.cameras.get(cameraName);
|
|
2186
|
+
if (!cam) throw new Error(`Camera not found: ${cameraName}`);
|
|
2187
|
+
this.ensureCameraRunning(cameraName, cam);
|
|
2188
|
+
const poolCam = this.cameras.get("__pool__");
|
|
2189
|
+
const session = poolCam?.sessions.get(sessionId);
|
|
2190
|
+
if (!session) throw new Error(`Pooled session not found: ${sessionId}`);
|
|
2191
|
+
poolCam.sessions.delete(sessionId);
|
|
2192
|
+
cam.sessions.set(sessionId, session);
|
|
2193
|
+
this.sessionCamera.set(sessionId, cameraName);
|
|
2194
|
+
this.pooledSessions.delete(sessionId);
|
|
2195
|
+
const activeFanout = this.getActiveFanout(cam);
|
|
2196
|
+
const source = activeFanout.subscribe(sessionId);
|
|
2197
|
+
session.replaceSource(source);
|
|
2198
|
+
this.logger.info(`[adaptive-server] Attached camera "${cameraName}" to session ${sessionId.slice(0, 8)}`);
|
|
2199
|
+
}
|
|
2200
|
+
/**
|
|
2201
|
+
* Detach a camera from a session (session returns to pool).
|
|
2202
|
+
*/
|
|
2203
|
+
async detachCamera(sessionId) {
|
|
2204
|
+
const camName = this.sessionCamera.get(sessionId);
|
|
2205
|
+
if (!camName || camName === "__pool__") return;
|
|
2206
|
+
const cam = this.cameras.get(camName);
|
|
2207
|
+
if (!cam) return;
|
|
2208
|
+
const session = cam.sessions.get(sessionId);
|
|
2209
|
+
if (!session) return;
|
|
2210
|
+
session.detachSource();
|
|
2211
|
+
const activeFanout = this.getActiveFanout(cam);
|
|
2212
|
+
activeFanout.unsubscribe(sessionId);
|
|
2213
|
+
cam.sessions.delete(sessionId);
|
|
2214
|
+
const poolCam = this.cameras.get("__pool__");
|
|
2215
|
+
if (poolCam) {
|
|
2216
|
+
poolCam.sessions.set(sessionId, session);
|
|
2217
|
+
this.sessionCamera.set(sessionId, "__pool__");
|
|
2218
|
+
this.pooledSessions.add(sessionId);
|
|
2219
|
+
}
|
|
2220
|
+
this.logger.info(`[adaptive-server] Detached camera "${camName}" from session ${sessionId.slice(0, 8)} (back to pool)`);
|
|
2221
|
+
this.scheduleCameraAutoStop(camName, cam);
|
|
2222
|
+
}
|
|
2223
|
+
/** Check if a session is in the idle pool. */
|
|
2224
|
+
isPooledSession(sessionId) {
|
|
2225
|
+
return this.pooledSessions.has(sessionId);
|
|
2226
|
+
}
|
|
2227
|
+
/** Set debug flag on all sessions for a camera. */
|
|
2228
|
+
setDebug(cameraName, debug) {
|
|
2229
|
+
const cam = this.cameras.get(cameraName);
|
|
2230
|
+
if (!cam) return 0;
|
|
2231
|
+
let count = 0;
|
|
2232
|
+
for (const session of cam.sessions.values()) {
|
|
2233
|
+
session.debug = debug;
|
|
2234
|
+
count++;
|
|
2235
|
+
}
|
|
2236
|
+
return count;
|
|
2237
|
+
}
|
|
2238
|
+
/** Get count of idle pooled sessions. */
|
|
2239
|
+
getPoolSize() {
|
|
2240
|
+
return this.pooledSessions.size;
|
|
2241
|
+
}
|
|
2242
|
+
// -----------------------------------------------------------------------
|
|
2243
|
+
// Session management
|
|
2244
|
+
// -----------------------------------------------------------------------
|
|
2245
|
+
/** Close a specific session. */
|
|
2246
|
+
async closeSession(sessionId) {
|
|
2247
|
+
const camName = this.sessionCamera.get(sessionId);
|
|
2248
|
+
if (!camName) return;
|
|
2249
|
+
const cam = this.cameras.get(camName);
|
|
2250
|
+
if (!cam) return;
|
|
2251
|
+
const session = cam.sessions.get(sessionId);
|
|
2252
|
+
if (!session) return;
|
|
2253
|
+
cam.sessions.delete(sessionId);
|
|
2254
|
+
this.sessionCamera.delete(sessionId);
|
|
2255
|
+
const activeFanout = this.getActiveFanout(cam);
|
|
2256
|
+
activeFanout.unsubscribe(sessionId);
|
|
2257
|
+
cam.controller.removeSession(sessionId);
|
|
2258
|
+
await session.close();
|
|
2259
|
+
this.logger.info(`[adaptive-server] Session ${sessionId} closed (camera "${camName}", remaining: ${cam.sessions.size})`);
|
|
2260
|
+
this.emit("session:closed", { sessionId, camera: camName });
|
|
2261
|
+
this.scheduleCameraAutoStop(camName, cam);
|
|
2262
|
+
}
|
|
2263
|
+
/**
|
|
2264
|
+
* Report client-side stats for a session (supplements RTCP monitoring).
|
|
2265
|
+
* Call from tRPC route when the client pushes stats.
|
|
2266
|
+
*/
|
|
2267
|
+
reportClientStats(sessionId, stats) {
|
|
2268
|
+
const camName = this.sessionCamera.get(sessionId);
|
|
2269
|
+
if (!camName) return null;
|
|
2270
|
+
const cam = this.cameras.get(camName);
|
|
2271
|
+
if (!cam) return null;
|
|
2272
|
+
cam.controller.reportStats(sessionId, stats);
|
|
2273
|
+
const profile = cam.controller.currentProfile;
|
|
2274
|
+
return {
|
|
2275
|
+
currentTier: profile.tier,
|
|
2276
|
+
currentBitrateKbps: profile.encoding.maxBitrateKbps,
|
|
2277
|
+
currentResolution: { width: profile.encoding.width, height: profile.encoding.height },
|
|
2278
|
+
sourceProfile: cam.activeSourceProfile
|
|
2279
|
+
};
|
|
2280
|
+
}
|
|
2281
|
+
/** Force quality for a camera (null = auto). */
|
|
2282
|
+
forceQuality(cameraName, tier) {
|
|
2283
|
+
const cam = this.cameras.get(cameraName);
|
|
2284
|
+
if (!cam) return false;
|
|
2285
|
+
cam.controller.forceQuality(tier);
|
|
2286
|
+
return true;
|
|
2287
|
+
}
|
|
2288
|
+
/** Get current quality info for a camera. */
|
|
2289
|
+
getCameraQuality(cameraName) {
|
|
2290
|
+
const cam = this.cameras.get(cameraName);
|
|
2291
|
+
if (!cam) return null;
|
|
2292
|
+
const profile = cam.controller.currentProfile;
|
|
2293
|
+
return {
|
|
2294
|
+
tier: profile.tier,
|
|
2295
|
+
encoding: profile.encoding,
|
|
2296
|
+
isAuto: cam.controller.isAuto,
|
|
2297
|
+
stats: cam.controller.getAggregatedStats(),
|
|
2298
|
+
sessionCount: cam.sessions.size,
|
|
2299
|
+
sourceProfile: cam.activeSourceProfile
|
|
2300
|
+
};
|
|
2301
|
+
}
|
|
2302
|
+
/** Get all sessions. */
|
|
2303
|
+
getSessions(cameraName) {
|
|
2304
|
+
const infos = [];
|
|
2305
|
+
if (cameraName) {
|
|
2306
|
+
const cam = this.cameras.get(cameraName);
|
|
2307
|
+
if (cam) {
|
|
2308
|
+
for (const s of cam.sessions.values()) infos.push(s.getInfo());
|
|
2309
|
+
}
|
|
2310
|
+
} else {
|
|
2311
|
+
for (const cam of this.cameras.values()) {
|
|
2312
|
+
for (const s of cam.sessions.values()) infos.push(s.getInfo());
|
|
2313
|
+
}
|
|
2314
|
+
}
|
|
2315
|
+
return infos;
|
|
2316
|
+
}
|
|
2317
|
+
getSessionCount(cameraName) {
|
|
2318
|
+
if (cameraName) return this.cameras.get(cameraName)?.sessions.size ?? 0;
|
|
2319
|
+
let total = 0;
|
|
2320
|
+
for (const cam of this.cameras.values()) total += cam.sessions.size;
|
|
2321
|
+
return total;
|
|
2322
|
+
}
|
|
2323
|
+
/** Stop all cameras and sessions. */
|
|
2324
|
+
async stop() {
|
|
2325
|
+
if (this.stopped) return;
|
|
2326
|
+
this.stopped = true;
|
|
2327
|
+
const closePs = [];
|
|
2328
|
+
for (const [name, cam] of this.cameras) {
|
|
2329
|
+
if (cam.autoStopTimer) {
|
|
2330
|
+
clearTimeout(cam.autoStopTimer);
|
|
2331
|
+
cam.autoStopTimer = null;
|
|
2332
|
+
}
|
|
2333
|
+
for (const [sid, session] of cam.sessions) {
|
|
2334
|
+
this.sessionCamera.delete(sid);
|
|
2335
|
+
closePs.push(session.close().catch(() => {
|
|
2336
|
+
}));
|
|
2337
|
+
}
|
|
2338
|
+
cam.sessions.clear();
|
|
2339
|
+
closePs.push(cam.mainFanout.stop().catch(() => {
|
|
2340
|
+
}));
|
|
2341
|
+
closePs.push(cam.mainFfmpegSource.stop().catch(() => {
|
|
2342
|
+
}));
|
|
2343
|
+
if (cam.subFanout) closePs.push(cam.subFanout.stop().catch(() => {
|
|
2344
|
+
}));
|
|
2345
|
+
if (cam.subFfmpegSource) closePs.push(cam.subFfmpegSource.stop().catch(() => {
|
|
2346
|
+
}));
|
|
2347
|
+
}
|
|
2348
|
+
await Promise.all(closePs);
|
|
2349
|
+
this.cameras.clear();
|
|
2350
|
+
this.logger.info("[adaptive-server] Stopped");
|
|
2351
|
+
this.emit("stopped");
|
|
2352
|
+
}
|
|
2353
|
+
// -----------------------------------------------------------------------
|
|
2354
|
+
// Private
|
|
2355
|
+
// -----------------------------------------------------------------------
|
|
2356
|
+
/** Get the currently active fanout for a camera. */
|
|
2357
|
+
getActiveFanout(cam) {
|
|
2358
|
+
if (cam.activeSourceProfile === "sub" && cam.subFanout) {
|
|
2359
|
+
return cam.subFanout;
|
|
2360
|
+
}
|
|
2361
|
+
return cam.mainFanout;
|
|
2362
|
+
}
|
|
2363
|
+
ensureCameraRunning(name, cam) {
|
|
2364
|
+
const activeFanout = this.getActiveFanout(cam);
|
|
2365
|
+
if (activeFanout.isRunning()) return;
|
|
2366
|
+
this.logger.info(`[adaptive-server] Starting camera "${name}" (${cam.activeSourceProfile})`);
|
|
2367
|
+
if (cam.activeSourceProfile === "sub" && cam.subFfmpegSource) {
|
|
2368
|
+
void cam.subFfmpegSource.start();
|
|
2369
|
+
cam.subFanout.start();
|
|
2370
|
+
} else {
|
|
2371
|
+
void cam.mainFfmpegSource.start();
|
|
2372
|
+
cam.mainFanout.start();
|
|
2373
|
+
}
|
|
2374
|
+
}
|
|
2375
|
+
scheduleCameraAutoStop(name, cam) {
|
|
2376
|
+
if (cam.sessions.size > 0 || this.stopped) return;
|
|
2377
|
+
if (cam.autoStopTimer) clearTimeout(cam.autoStopTimer);
|
|
2378
|
+
cam.autoStopTimer = setTimeout(async () => {
|
|
2379
|
+
cam.autoStopTimer = null;
|
|
2380
|
+
if (cam.sessions.size > 0 || this.stopped) return;
|
|
2381
|
+
this.logger.info(`[adaptive-server] No viewers for "${name}", stopping ffmpeg`);
|
|
2382
|
+
await cam.mainFanout.stop();
|
|
2383
|
+
await cam.mainFfmpegSource.stop();
|
|
2384
|
+
if (cam.subFanout) await cam.subFanout.stop();
|
|
2385
|
+
if (cam.subFfmpegSource) await cam.subFfmpegSource.stop();
|
|
2386
|
+
}, 1e4);
|
|
2387
|
+
}
|
|
2388
|
+
// -----------------------------------------------------------------------
|
|
2389
|
+
// Source switching (Phase 5)
|
|
2390
|
+
// -----------------------------------------------------------------------
|
|
2391
|
+
/**
|
|
2392
|
+
* Handle a quality change from the AdaptiveController.
|
|
2393
|
+
* When the sourceProfile changes (main ↔ sub), performs a seamless source
|
|
2394
|
+
* switch for all active sessions. When only encoding params change (same
|
|
2395
|
+
* sourceProfile), updates ffmpeg params in-place.
|
|
2396
|
+
*/
|
|
2397
|
+
async handleQualityChange(cameraName, from, to) {
|
|
2398
|
+
const cam = this.cameras.get(cameraName);
|
|
2399
|
+
if (!cam) return;
|
|
2400
|
+
const sourceChanged = from.sourceProfile !== to.sourceProfile;
|
|
2401
|
+
if (sourceChanged) {
|
|
2402
|
+
await this.switchSource(cameraName, cam, to);
|
|
2403
|
+
} else {
|
|
2404
|
+
const activeSource = cam.activeSourceProfile === "sub" ? cam.subFfmpegSource : cam.mainFfmpegSource;
|
|
2405
|
+
if (activeSource) {
|
|
2406
|
+
await activeSource.updateParams(to.encoding);
|
|
2407
|
+
}
|
|
2408
|
+
}
|
|
2409
|
+
this.emit("quality:change", {
|
|
2410
|
+
camera: cameraName,
|
|
2411
|
+
tier: to.tier,
|
|
2412
|
+
encoding: to.encoding,
|
|
2413
|
+
sourceProfile: to.sourceProfile
|
|
2414
|
+
});
|
|
2415
|
+
}
|
|
2416
|
+
/**
|
|
2417
|
+
* Switch all active sessions from one source to another (main ↔ sub).
|
|
2418
|
+
*
|
|
2419
|
+
* Steps:
|
|
2420
|
+
* 1. Create/start the target ffmpeg source + fanout
|
|
2421
|
+
* 2. For each session: subscribe to new fanout, call replaceSource()
|
|
2422
|
+
* 3. Unsubscribe all from old fanout
|
|
2423
|
+
* 4. Stop old ffmpeg + fanout (save resources)
|
|
2424
|
+
* 5. Update activeSourceProfile
|
|
2425
|
+
*/
|
|
2426
|
+
async switchSource(cameraName, cam, toProfile) {
|
|
2427
|
+
if (cam.switching) {
|
|
2428
|
+
this.logger.warn(`[adaptive-server] Source switch already in progress for "${cameraName}", skipping`);
|
|
2429
|
+
return;
|
|
2430
|
+
}
|
|
2431
|
+
cam.switching = true;
|
|
2432
|
+
const switchingToSub = toProfile.sourceProfile === "sub";
|
|
2433
|
+
this.logger.info(
|
|
2434
|
+
`[adaptive-server] Source switch for "${cameraName}": ${cam.activeSourceProfile} \u2192 ${toProfile.sourceProfile}`
|
|
2435
|
+
);
|
|
2436
|
+
try {
|
|
2437
|
+
if (switchingToSub) {
|
|
2438
|
+
if (!cam.config.subRtspUrl) {
|
|
2439
|
+
this.logger.warn(
|
|
2440
|
+
`[adaptive-server] No subRtspUrl configured for "${cameraName}", cannot switch to sub stream \u2014 falling back to param update only`
|
|
2441
|
+
);
|
|
2442
|
+
await cam.mainFfmpegSource.updateParams(toProfile.encoding);
|
|
2443
|
+
return;
|
|
2444
|
+
}
|
|
2445
|
+
if (!cam.subFfmpegSource) {
|
|
2446
|
+
cam.subFfmpegSource = new AdaptiveFfmpegSource({
|
|
2447
|
+
rtspUrl: cam.config.subRtspUrl,
|
|
2448
|
+
initialParams: toProfile.encoding,
|
|
2449
|
+
ffmpegPath: this.ffmpegPath,
|
|
2450
|
+
logger: this.logger,
|
|
2451
|
+
label: `ffmpeg:${cameraName}:sub`
|
|
2452
|
+
});
|
|
2453
|
+
cam.subFanout = new StreamFanout({
|
|
2454
|
+
maxQueueItems: 30,
|
|
2455
|
+
createSource: () => cam.subFfmpegSource.source,
|
|
2456
|
+
onError: (err) => {
|
|
2457
|
+
this.logger.error(`[adaptive-server] Sub fanout error (${cameraName}):`, err);
|
|
2458
|
+
}
|
|
2459
|
+
});
|
|
2460
|
+
}
|
|
2461
|
+
void cam.subFfmpegSource.start();
|
|
2462
|
+
cam.subFanout.start();
|
|
2463
|
+
for (const [sid, session] of cam.sessions) {
|
|
2464
|
+
cam.mainFanout.unsubscribe(sid);
|
|
2465
|
+
const newSource = cam.subFanout.subscribe(sid);
|
|
2466
|
+
session.replaceSource(newSource);
|
|
2467
|
+
}
|
|
2468
|
+
await cam.mainFanout.stop();
|
|
2469
|
+
await cam.mainFfmpegSource.stop();
|
|
2470
|
+
cam.activeSourceProfile = "sub";
|
|
2471
|
+
} else {
|
|
2472
|
+
cam.mainFfmpegSource = new AdaptiveFfmpegSource({
|
|
2473
|
+
rtspUrl: cam.config.rtspUrl,
|
|
2474
|
+
initialParams: toProfile.encoding,
|
|
2475
|
+
ffmpegPath: this.ffmpegPath,
|
|
2476
|
+
logger: this.logger,
|
|
2477
|
+
label: `ffmpeg:${cameraName}:main`
|
|
2478
|
+
});
|
|
2479
|
+
cam.mainFanout = new StreamFanout({
|
|
2480
|
+
maxQueueItems: 30,
|
|
2481
|
+
createSource: () => cam.mainFfmpegSource.source,
|
|
2482
|
+
onError: (err) => {
|
|
2483
|
+
this.logger.error(`[adaptive-server] Main fanout error (${cameraName}):`, err);
|
|
2484
|
+
}
|
|
2485
|
+
});
|
|
2486
|
+
void cam.mainFfmpegSource.start();
|
|
2487
|
+
cam.mainFanout.start();
|
|
2488
|
+
for (const [sid, session] of cam.sessions) {
|
|
2489
|
+
if (cam.subFanout) cam.subFanout.unsubscribe(sid);
|
|
2490
|
+
const newSource = cam.mainFanout.subscribe(sid);
|
|
2491
|
+
session.replaceSource(newSource);
|
|
2492
|
+
}
|
|
2493
|
+
if (cam.subFanout) await cam.subFanout.stop();
|
|
2494
|
+
if (cam.subFfmpegSource) await cam.subFfmpegSource.stop();
|
|
2495
|
+
cam.subFfmpegSource = null;
|
|
2496
|
+
cam.subFanout = null;
|
|
2497
|
+
cam.activeSourceProfile = "main";
|
|
2498
|
+
}
|
|
2499
|
+
this.logger.info(
|
|
2500
|
+
`[adaptive-server] Source switch complete for "${cameraName}": now on ${cam.activeSourceProfile} stream`
|
|
2501
|
+
);
|
|
2502
|
+
} catch (err) {
|
|
2503
|
+
this.logger.error(`[adaptive-server] Source switch failed for "${cameraName}":`, err);
|
|
2504
|
+
} finally {
|
|
2505
|
+
cam.switching = false;
|
|
2506
|
+
}
|
|
2507
|
+
}
|
|
2508
|
+
};
|
|
2509
|
+
|
|
2510
|
+
// src/addon.ts
|
|
2511
|
+
var WebrtcAdaptiveAddon = class {
|
|
2512
|
+
manifest = {
|
|
2513
|
+
id: "webrtc-adaptive",
|
|
2514
|
+
name: "Adaptive WebRTC",
|
|
2515
|
+
version: "0.1.0",
|
|
2516
|
+
description: "Adaptive WebRTC streaming with quality degradation/recovery",
|
|
2517
|
+
capabilities: [{ name: "webrtc", mode: "collection" }]
|
|
2518
|
+
};
|
|
2519
|
+
server = null;
|
|
2520
|
+
currentConfig = {};
|
|
2521
|
+
async initialize(context) {
|
|
2522
|
+
this.currentConfig = {
|
|
2523
|
+
ffmpegPath: context.addonConfig.ffmpegPath ?? "ffmpeg",
|
|
2524
|
+
logger: context.logger
|
|
2525
|
+
};
|
|
2526
|
+
this.server = new AdaptiveStreamServer(this.currentConfig);
|
|
2527
|
+
context.logger.info("WebRTC Adaptive streaming initialized");
|
|
2528
|
+
}
|
|
2529
|
+
async shutdown() {
|
|
2530
|
+
this.server = null;
|
|
2531
|
+
}
|
|
2532
|
+
getCapabilityProvider(name) {
|
|
2533
|
+
if (name === "webrtc" && this.server) {
|
|
2534
|
+
return this.server;
|
|
2535
|
+
}
|
|
2536
|
+
return null;
|
|
2537
|
+
}
|
|
2538
|
+
getConfigSchema() {
|
|
2539
|
+
return { sections: [] };
|
|
2540
|
+
}
|
|
2541
|
+
getConfig() {
|
|
2542
|
+
return { ...this.currentConfig };
|
|
2543
|
+
}
|
|
2544
|
+
async onConfigChange(config) {
|
|
2545
|
+
this.currentConfig = {
|
|
2546
|
+
...this.currentConfig,
|
|
2547
|
+
ffmpegPath: config.ffmpegPath ?? this.currentConfig.ffmpegPath
|
|
2548
|
+
};
|
|
2549
|
+
}
|
|
2550
|
+
getServer() {
|
|
2551
|
+
return this.server;
|
|
2552
|
+
}
|
|
2553
|
+
};
|
|
2554
|
+
|
|
2555
|
+
// src/ffmpeg-process.ts
|
|
2556
|
+
var import_node_child_process2 = require("child_process");
|
|
2557
|
+
var FfmpegProcess = class {
|
|
2558
|
+
constructor(options) {
|
|
2559
|
+
this.options = options;
|
|
2560
|
+
this.logger = options.logger;
|
|
2561
|
+
this.label = options.label ?? "ffmpeg";
|
|
2562
|
+
}
|
|
2563
|
+
process = null;
|
|
2564
|
+
killed = false;
|
|
2565
|
+
logger;
|
|
2566
|
+
label;
|
|
2567
|
+
/** Spawn the FFmpeg process. Returns stdin writable stream. */
|
|
2568
|
+
start() {
|
|
2569
|
+
if (this.process) {
|
|
2570
|
+
throw new Error(`[${this.label}] FFmpeg process already started`);
|
|
2571
|
+
}
|
|
2572
|
+
const ffmpegPath = this.options.ffmpegPath ?? "ffmpeg";
|
|
2573
|
+
const stdio = [
|
|
2574
|
+
"pipe",
|
|
2575
|
+
"ignore",
|
|
2576
|
+
"pipe",
|
|
2577
|
+
...this.options.extraStdio ?? []
|
|
2578
|
+
];
|
|
2579
|
+
this.process = (0, import_node_child_process2.spawn)(ffmpegPath, this.options.args, { stdio });
|
|
2580
|
+
this.process.on("error", (error) => {
|
|
2581
|
+
this.logger?.error(`[${this.label}] Failed to spawn FFmpeg:`, error);
|
|
2582
|
+
});
|
|
2583
|
+
this.process.on("close", (code, signal) => {
|
|
2584
|
+
this.options.onExit?.(code, signal);
|
|
2585
|
+
});
|
|
2586
|
+
this.process.stderr?.on("data", (data) => {
|
|
2587
|
+
const output = data.toString();
|
|
2588
|
+
this.options.onStderr?.(output);
|
|
2589
|
+
});
|
|
2590
|
+
this.process.stdin?.on("error", (error) => {
|
|
2591
|
+
const code = error?.code;
|
|
2592
|
+
if (code === "EPIPE" || code === "ERR_STREAM_WRITE_AFTER_END") return;
|
|
2593
|
+
this.logger?.error(`[${this.label}] FFmpeg stdin error:`, error);
|
|
2594
|
+
});
|
|
2595
|
+
return this.process.stdin;
|
|
2596
|
+
}
|
|
2597
|
+
/** Get a specific stdio stream by fd index (e.g. 3 for pipe:3). */
|
|
2598
|
+
getStdio(fd) {
|
|
2599
|
+
if (!this.process) return null;
|
|
2600
|
+
return this.process.stdio?.[fd] ?? null;
|
|
2601
|
+
}
|
|
2602
|
+
/** Get the underlying ChildProcess. */
|
|
2603
|
+
getProcess() {
|
|
2604
|
+
return this.process;
|
|
2605
|
+
}
|
|
2606
|
+
/** Kill the FFmpeg process gracefully (SIGTERM then SIGKILL after timeout). */
|
|
2607
|
+
async kill(timeoutMs = 3e3) {
|
|
2608
|
+
if (this.killed || !this.process) return;
|
|
2609
|
+
this.killed = true;
|
|
2610
|
+
const proc = this.process;
|
|
2611
|
+
this.process = null;
|
|
2612
|
+
try {
|
|
2613
|
+
proc.stdin?.end();
|
|
2614
|
+
} catch {
|
|
2615
|
+
}
|
|
2616
|
+
try {
|
|
2617
|
+
proc.kill("SIGTERM");
|
|
2618
|
+
} catch {
|
|
2619
|
+
}
|
|
2620
|
+
await new Promise((resolve) => {
|
|
2621
|
+
const timer = setTimeout(() => {
|
|
2622
|
+
try {
|
|
2623
|
+
proc.kill("SIGKILL");
|
|
2624
|
+
} catch {
|
|
2625
|
+
}
|
|
2626
|
+
resolve();
|
|
2627
|
+
}, timeoutMs);
|
|
2628
|
+
proc.on("close", () => {
|
|
2629
|
+
clearTimeout(timer);
|
|
2630
|
+
resolve();
|
|
2631
|
+
});
|
|
2632
|
+
});
|
|
2633
|
+
}
|
|
2634
|
+
/** Check if the process is running. */
|
|
2635
|
+
isRunning() {
|
|
2636
|
+
return this.process !== null && !this.killed;
|
|
2637
|
+
}
|
|
2638
|
+
};
|
|
2639
|
+
|
|
2640
|
+
// src/frame-source.ts
|
|
2641
|
+
function fromEventEmitter(emitter, videoEvent = "videoFrame", audioEvent = "audioFrame") {
|
|
2642
|
+
const queue = [];
|
|
2643
|
+
let resolve = null;
|
|
2644
|
+
let done = false;
|
|
2645
|
+
const onVideo = (frame) => {
|
|
2646
|
+
const mf = { type: "video", frame };
|
|
2647
|
+
if (resolve) {
|
|
2648
|
+
const r = resolve;
|
|
2649
|
+
resolve = null;
|
|
2650
|
+
r({ value: mf, done: false });
|
|
2651
|
+
} else {
|
|
2652
|
+
queue.push(mf);
|
|
2653
|
+
if (queue.length > 500) queue.splice(0, queue.length - 500);
|
|
2654
|
+
}
|
|
2655
|
+
};
|
|
2656
|
+
const onAudio = (frame) => {
|
|
2657
|
+
const mf = { type: "audio", frame };
|
|
2658
|
+
if (resolve) {
|
|
2659
|
+
const r = resolve;
|
|
2660
|
+
resolve = null;
|
|
2661
|
+
r({ value: mf, done: false });
|
|
2662
|
+
} else {
|
|
2663
|
+
queue.push(mf);
|
|
2664
|
+
if (queue.length > 500) queue.splice(0, queue.length - 500);
|
|
2665
|
+
}
|
|
2666
|
+
};
|
|
2667
|
+
const cleanup = () => {
|
|
2668
|
+
done = true;
|
|
2669
|
+
emitter.removeListener(videoEvent, onVideo);
|
|
2670
|
+
emitter.removeListener(audioEvent, onAudio);
|
|
2671
|
+
if (resolve) {
|
|
2672
|
+
const r = resolve;
|
|
2673
|
+
resolve = null;
|
|
2674
|
+
r({ value: void 0, done: true });
|
|
2675
|
+
}
|
|
2676
|
+
};
|
|
2677
|
+
emitter.on(videoEvent, onVideo);
|
|
2678
|
+
emitter.on(audioEvent, onAudio);
|
|
2679
|
+
emitter.once("close", cleanup);
|
|
2680
|
+
emitter.once("end", cleanup);
|
|
2681
|
+
return (async function* () {
|
|
2682
|
+
try {
|
|
2683
|
+
while (true) {
|
|
2684
|
+
const item = queue.shift();
|
|
2685
|
+
if (item) {
|
|
2686
|
+
yield item;
|
|
2687
|
+
continue;
|
|
2688
|
+
}
|
|
2689
|
+
if (done) return;
|
|
2690
|
+
const result = await new Promise((r) => {
|
|
2691
|
+
resolve = r;
|
|
2692
|
+
});
|
|
2693
|
+
if (result.done) return;
|
|
2694
|
+
yield result.value;
|
|
2695
|
+
}
|
|
2696
|
+
} finally {
|
|
2697
|
+
cleanup();
|
|
2698
|
+
}
|
|
2699
|
+
})();
|
|
2700
|
+
}
|
|
2701
|
+
function fromPushCallback() {
|
|
2702
|
+
const queue = [];
|
|
2703
|
+
let resolve = null;
|
|
2704
|
+
let closed = false;
|
|
2705
|
+
const push = (mf) => {
|
|
2706
|
+
if (closed) return;
|
|
2707
|
+
if (resolve) {
|
|
2708
|
+
const r = resolve;
|
|
2709
|
+
resolve = null;
|
|
2710
|
+
r({ value: mf, done: false });
|
|
2711
|
+
} else {
|
|
2712
|
+
queue.push(mf);
|
|
2713
|
+
if (queue.length > 500) queue.splice(0, queue.length - 500);
|
|
2714
|
+
}
|
|
2715
|
+
};
|
|
2716
|
+
const source = (async function* () {
|
|
2717
|
+
try {
|
|
2718
|
+
while (true) {
|
|
2719
|
+
const item = queue.shift();
|
|
2720
|
+
if (item) {
|
|
2721
|
+
yield item;
|
|
2722
|
+
continue;
|
|
2723
|
+
}
|
|
2724
|
+
if (closed) return;
|
|
2725
|
+
const result = await new Promise((r) => {
|
|
2726
|
+
resolve = r;
|
|
2727
|
+
});
|
|
2728
|
+
if (result.done) return;
|
|
2729
|
+
yield result.value;
|
|
2730
|
+
}
|
|
2731
|
+
} finally {
|
|
2732
|
+
closed = true;
|
|
2733
|
+
}
|
|
2734
|
+
})();
|
|
2735
|
+
return {
|
|
2736
|
+
source,
|
|
2737
|
+
pushVideo: (frame) => push({ type: "video", frame }),
|
|
2738
|
+
pushAudio: (frame) => push({ type: "audio", frame }),
|
|
2739
|
+
close: () => {
|
|
2740
|
+
closed = true;
|
|
2741
|
+
if (resolve) {
|
|
2742
|
+
const r = resolve;
|
|
2743
|
+
resolve = null;
|
|
2744
|
+
r({ value: void 0, done: true });
|
|
2745
|
+
}
|
|
2746
|
+
}
|
|
2747
|
+
};
|
|
2748
|
+
}
|
|
2749
|
+
function fromNativeStream(native) {
|
|
2750
|
+
return (async function* () {
|
|
2751
|
+
for await (const frame of native) {
|
|
2752
|
+
if (frame.audio) {
|
|
2753
|
+
yield {
|
|
2754
|
+
type: "audio",
|
|
2755
|
+
frame: {
|
|
2756
|
+
data: frame.data,
|
|
2757
|
+
codec: frame.codec === "aac" ? "Aac" : "Adpcm",
|
|
2758
|
+
sampleRate: frame.sampleRate ?? 8e3,
|
|
2759
|
+
channels: 1,
|
|
2760
|
+
timestampMicros: frame.microseconds ?? Date.now() * 1e3
|
|
2761
|
+
}
|
|
2762
|
+
};
|
|
2763
|
+
} else {
|
|
2764
|
+
yield {
|
|
2765
|
+
type: "video",
|
|
2766
|
+
frame: {
|
|
2767
|
+
data: frame.data,
|
|
2768
|
+
codec: frame.videoType ?? "H264",
|
|
2769
|
+
isKeyframe: frame.isKeyframe ?? false,
|
|
2770
|
+
timestampMicros: frame.microseconds ?? Date.now() * 1e3
|
|
2771
|
+
}
|
|
2772
|
+
};
|
|
2773
|
+
}
|
|
2774
|
+
}
|
|
2775
|
+
})();
|
|
2776
|
+
}
|
|
2777
|
+
|
|
2778
|
+
// src/rtsp-relay.ts
|
|
2779
|
+
var import_node_child_process3 = require("child_process");
|
|
2780
|
+
var AdaptiveRtspRelay = class {
|
|
2781
|
+
rtspUrl;
|
|
2782
|
+
rtspOutputUrl;
|
|
2783
|
+
ffmpegPath;
|
|
2784
|
+
logger;
|
|
2785
|
+
label;
|
|
2786
|
+
currentParams;
|
|
2787
|
+
proc = null;
|
|
2788
|
+
closed = false;
|
|
2789
|
+
constructor(options) {
|
|
2790
|
+
this.rtspUrl = options.rtspUrl;
|
|
2791
|
+
this.rtspOutputUrl = options.rtspOutputUrl;
|
|
2792
|
+
this.ffmpegPath = options.ffmpegPath ?? "ffmpeg";
|
|
2793
|
+
this.logger = options.logger;
|
|
2794
|
+
this.label = options.label ?? "adaptive-rtsp";
|
|
2795
|
+
this.currentParams = { ...options.initialParams };
|
|
2796
|
+
}
|
|
2797
|
+
getParams() {
|
|
2798
|
+
return { ...this.currentParams };
|
|
2799
|
+
}
|
|
2800
|
+
/** Start the ffmpeg relay. */
|
|
2801
|
+
start() {
|
|
2802
|
+
if (this.closed) return;
|
|
2803
|
+
this.spawnFfmpeg();
|
|
2804
|
+
}
|
|
2805
|
+
/** Hot-swap encoding parameters by restarting ffmpeg. */
|
|
2806
|
+
async updateParams(params) {
|
|
2807
|
+
const prev = { ...this.currentParams };
|
|
2808
|
+
if (params.maxBitrateKbps !== void 0) this.currentParams.maxBitrateKbps = params.maxBitrateKbps;
|
|
2809
|
+
if (params.width !== void 0) this.currentParams.width = params.width;
|
|
2810
|
+
if (params.height !== void 0) this.currentParams.height = params.height;
|
|
2811
|
+
if (params.preset !== void 0) this.currentParams.preset = params.preset;
|
|
2812
|
+
if (prev.maxBitrateKbps === this.currentParams.maxBitrateKbps && prev.width === this.currentParams.width && prev.height === this.currentParams.height) return;
|
|
2813
|
+
this.logger?.info(
|
|
2814
|
+
`[${this.label}] Updating: ${prev.maxBitrateKbps}kbps ${prev.width}x${prev.height} \u2192 ${this.currentParams.maxBitrateKbps}kbps ${this.currentParams.width}x${this.currentParams.height}`
|
|
2815
|
+
);
|
|
2816
|
+
await this.killFfmpeg();
|
|
2817
|
+
if (!this.closed) this.spawnFfmpeg();
|
|
2818
|
+
}
|
|
2819
|
+
/** Stop the relay. */
|
|
2820
|
+
async stop() {
|
|
2821
|
+
if (this.closed) return;
|
|
2822
|
+
this.closed = true;
|
|
2823
|
+
await this.killFfmpeg();
|
|
2824
|
+
}
|
|
2825
|
+
/** Check if ffmpeg is running. */
|
|
2826
|
+
isRunning() {
|
|
2827
|
+
return this.proc !== null;
|
|
2828
|
+
}
|
|
2829
|
+
// -----------------------------------------------------------------------
|
|
2830
|
+
// Private
|
|
2831
|
+
// -----------------------------------------------------------------------
|
|
2832
|
+
spawnFfmpeg() {
|
|
2833
|
+
const { maxBitrateKbps, width, height, preset } = this.currentParams;
|
|
2834
|
+
const args = [
|
|
2835
|
+
"-hide_banner",
|
|
2836
|
+
"-loglevel",
|
|
2837
|
+
"error",
|
|
2838
|
+
// Input
|
|
2839
|
+
"-rtsp_transport",
|
|
2840
|
+
"tcp",
|
|
2841
|
+
"-i",
|
|
2842
|
+
this.rtspUrl,
|
|
2843
|
+
// Video encoding
|
|
2844
|
+
"-c:v",
|
|
2845
|
+
"libx264",
|
|
2846
|
+
"-preset",
|
|
2847
|
+
preset ?? "ultrafast",
|
|
2848
|
+
"-tune",
|
|
2849
|
+
"zerolatency",
|
|
2850
|
+
"-crf",
|
|
2851
|
+
"28",
|
|
2852
|
+
"-maxrate",
|
|
2853
|
+
`${maxBitrateKbps}k`,
|
|
2854
|
+
"-bufsize",
|
|
2855
|
+
`${Math.round(maxBitrateKbps * 0.5)}k`,
|
|
2856
|
+
"-g",
|
|
2857
|
+
"50",
|
|
2858
|
+
"-keyint_min",
|
|
2859
|
+
"25"
|
|
2860
|
+
];
|
|
2861
|
+
if (width > 0 && height > 0) {
|
|
2862
|
+
args.push("-vf", `scale=${width}:${height}`);
|
|
2863
|
+
}
|
|
2864
|
+
args.push("-c:a", "aac", "-b:a", "64k");
|
|
2865
|
+
args.push(
|
|
2866
|
+
"-f",
|
|
2867
|
+
"rtsp",
|
|
2868
|
+
"-rtsp_transport",
|
|
2869
|
+
"tcp",
|
|
2870
|
+
this.rtspOutputUrl
|
|
2871
|
+
);
|
|
2872
|
+
this.proc = (0, import_node_child_process3.spawn)(this.ffmpegPath, args, {
|
|
2873
|
+
stdio: ["ignore", "ignore", "pipe"]
|
|
2874
|
+
});
|
|
2875
|
+
this.proc.on("error", (err) => {
|
|
2876
|
+
this.logger?.error(`[${this.label}] ffmpeg spawn error: ${err.message}`);
|
|
2877
|
+
});
|
|
2878
|
+
this.proc.on("close", (code, signal) => {
|
|
2879
|
+
this.logger?.debug(`[${this.label}] ffmpeg exited code=${code} signal=${signal}`);
|
|
2880
|
+
this.proc = null;
|
|
2881
|
+
if (!this.closed) {
|
|
2882
|
+
setTimeout(() => {
|
|
2883
|
+
if (!this.closed) this.spawnFfmpeg();
|
|
2884
|
+
}, 2e3);
|
|
2885
|
+
}
|
|
2886
|
+
});
|
|
2887
|
+
this.proc.stderr?.on("data", (data) => {
|
|
2888
|
+
const s = data.toString();
|
|
2889
|
+
if (s.includes("error") || s.includes("Error") || s.includes("fatal")) {
|
|
2890
|
+
this.logger?.error(`[${this.label}] ffmpeg: ${s.trim()}`);
|
|
2891
|
+
}
|
|
2892
|
+
});
|
|
2893
|
+
this.logger?.info(
|
|
2894
|
+
`[${this.label}] Started: ${maxBitrateKbps}kbps ` + (width > 0 ? `${width}x${height}` : "native") + ` \u2192 ${this.rtspOutputUrl}`
|
|
2895
|
+
);
|
|
2896
|
+
}
|
|
2897
|
+
async killFfmpeg() {
|
|
2898
|
+
const proc = this.proc;
|
|
2899
|
+
if (!proc) return;
|
|
2900
|
+
this.proc = null;
|
|
2901
|
+
try {
|
|
2902
|
+
proc.kill("SIGTERM");
|
|
2903
|
+
} catch {
|
|
2904
|
+
}
|
|
2905
|
+
await new Promise((resolve) => {
|
|
2906
|
+
const timer = setTimeout(() => {
|
|
2907
|
+
try {
|
|
2908
|
+
proc.kill("SIGKILL");
|
|
2909
|
+
} catch {
|
|
2910
|
+
}
|
|
2911
|
+
resolve();
|
|
2912
|
+
}, 3e3);
|
|
2913
|
+
proc.on("close", () => {
|
|
2914
|
+
clearTimeout(timer);
|
|
2915
|
+
resolve();
|
|
2916
|
+
});
|
|
2917
|
+
});
|
|
2918
|
+
}
|
|
2919
|
+
};
|
|
2920
|
+
|
|
2921
|
+
// src/shared-session.ts
|
|
2922
|
+
var _werift2;
|
|
2923
|
+
async function loadWerift2() {
|
|
2924
|
+
if (_werift2) return _werift2;
|
|
2925
|
+
const moduleName = "werift";
|
|
2926
|
+
_werift2 = await Function("m", "return import(m)")(moduleName);
|
|
2927
|
+
return _werift2;
|
|
2928
|
+
}
|
|
2929
|
+
var SharedSession = class {
|
|
2930
|
+
constructor(options) {
|
|
2931
|
+
this.options = options;
|
|
2932
|
+
this.logger = options.logger;
|
|
2933
|
+
this.iceConfig = options.iceConfig;
|
|
2934
|
+
this.onTrackRequested = options.onTrackRequested;
|
|
2935
|
+
this.onTrackReleased = options.onTrackReleased;
|
|
2936
|
+
}
|
|
2937
|
+
logger;
|
|
2938
|
+
iceConfig;
|
|
2939
|
+
onTrackRequested;
|
|
2940
|
+
onTrackReleased;
|
|
2941
|
+
pc = null;
|
|
2942
|
+
dataChannel = null;
|
|
2943
|
+
activeTracks = /* @__PURE__ */ new Map();
|
|
2944
|
+
closed = false;
|
|
2945
|
+
negotiating = false;
|
|
2946
|
+
/** Create the initial SDP offer (with data channel, no media tracks yet). */
|
|
2947
|
+
async createOffer() {
|
|
2948
|
+
const werift = await loadWerift2();
|
|
2949
|
+
const iceServers = [];
|
|
2950
|
+
if (this.iceConfig?.stunServers) {
|
|
2951
|
+
for (const url of this.iceConfig.stunServers) iceServers.push({ urls: url });
|
|
2952
|
+
}
|
|
2953
|
+
if (this.iceConfig?.turnServers) {
|
|
2954
|
+
for (const t of this.iceConfig.turnServers) {
|
|
2955
|
+
iceServers.push({ urls: t.urls, username: t.username, credential: t.credential });
|
|
2956
|
+
}
|
|
2957
|
+
}
|
|
2958
|
+
const pcOptions = {
|
|
2959
|
+
codecs: {
|
|
2960
|
+
video: [
|
|
2961
|
+
new werift.RTCRtpCodecParameters({
|
|
2962
|
+
mimeType: "video/H264",
|
|
2963
|
+
clockRate: 9e4,
|
|
2964
|
+
payloadType: 96,
|
|
2965
|
+
parameters: "level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f",
|
|
2966
|
+
rtcpFeedback: [
|
|
2967
|
+
{ type: "transport-cc" },
|
|
2968
|
+
{ type: "ccm", parameter: "fir" },
|
|
2969
|
+
{ type: "nack" },
|
|
2970
|
+
{ type: "nack", parameter: "pli" },
|
|
2971
|
+
{ type: "goog-remb" }
|
|
2972
|
+
]
|
|
2973
|
+
})
|
|
2974
|
+
],
|
|
2975
|
+
audio: [
|
|
2976
|
+
new werift.RTCRtpCodecParameters({
|
|
2977
|
+
mimeType: "audio/opus",
|
|
2978
|
+
clockRate: 48e3,
|
|
2979
|
+
payloadType: 111,
|
|
2980
|
+
channels: 2,
|
|
2981
|
+
parameters: "minptime=10;useinbandfec=1"
|
|
2982
|
+
})
|
|
2983
|
+
]
|
|
2984
|
+
}
|
|
2985
|
+
};
|
|
2986
|
+
if (iceServers.length > 0) pcOptions.iceServers = iceServers;
|
|
2987
|
+
if (this.iceConfig?.portRange) pcOptions.icePortRange = this.iceConfig.portRange;
|
|
2988
|
+
if (this.iceConfig?.additionalHostAddresses) {
|
|
2989
|
+
pcOptions.iceAdditionalHostAddresses = this.iceConfig.additionalHostAddresses;
|
|
2990
|
+
}
|
|
2991
|
+
this.pc = new werift.RTCPeerConnection(pcOptions);
|
|
2992
|
+
this.pc.iceConnectionStateChange.subscribe((state) => {
|
|
2993
|
+
this.logger.debug(`[shared] ICE: ${state}`);
|
|
2994
|
+
});
|
|
2995
|
+
this.dataChannel = this.pc.createDataChannel("control", { ordered: true });
|
|
2996
|
+
this.dataChannel.message.subscribe((msg) => {
|
|
2997
|
+
try {
|
|
2998
|
+
const data = JSON.parse(typeof msg === "string" ? msg : msg.toString());
|
|
2999
|
+
this.handleDataChannelMessage(data);
|
|
3000
|
+
} catch (err) {
|
|
3001
|
+
this.logger.error("[shared] DC message parse error:", err);
|
|
3002
|
+
}
|
|
3003
|
+
});
|
|
3004
|
+
const offer = await this.pc.createOffer();
|
|
3005
|
+
await this.pc.setLocalDescription(offer);
|
|
3006
|
+
await new Promise((resolve) => {
|
|
3007
|
+
if (this.pc.iceGatheringState === "complete") {
|
|
3008
|
+
resolve();
|
|
3009
|
+
return;
|
|
3010
|
+
}
|
|
3011
|
+
this.pc.iceGatheringStateChange.subscribe((state) => {
|
|
3012
|
+
if (state === "complete") resolve();
|
|
3013
|
+
});
|
|
3014
|
+
setTimeout(resolve, 5e3);
|
|
3015
|
+
});
|
|
3016
|
+
const sdp = this.pc.localDescription?.sdp ?? offer.sdp;
|
|
3017
|
+
this.logger.info("[shared] Initial offer created (data channel only)");
|
|
3018
|
+
return sdp;
|
|
3019
|
+
}
|
|
3020
|
+
/** Handle the client's SDP answer. */
|
|
3021
|
+
async handleAnswer(sdpAnswer) {
|
|
3022
|
+
const werift = await loadWerift2();
|
|
3023
|
+
const desc = new werift.RTCSessionDescription(sdpAnswer, "answer");
|
|
3024
|
+
await this.pc.setRemoteDescription(desc);
|
|
3025
|
+
this.logger.info("[shared] Answer set, connection ready");
|
|
3026
|
+
}
|
|
3027
|
+
/** Close the shared session and all tracks. */
|
|
3028
|
+
async close() {
|
|
3029
|
+
if (this.closed) return;
|
|
3030
|
+
this.closed = true;
|
|
3031
|
+
for (const [, track] of this.activeTracks) {
|
|
3032
|
+
track.feedAbort.abort();
|
|
3033
|
+
this.onTrackReleased?.(track.cameraName);
|
|
3034
|
+
}
|
|
3035
|
+
this.activeTracks.clear();
|
|
3036
|
+
if (this.pc) {
|
|
3037
|
+
try {
|
|
3038
|
+
await this.pc.close();
|
|
3039
|
+
} catch {
|
|
3040
|
+
}
|
|
3041
|
+
this.pc = null;
|
|
3042
|
+
}
|
|
3043
|
+
this.logger.info("[shared] Session closed");
|
|
3044
|
+
}
|
|
3045
|
+
// -----------------------------------------------------------------------
|
|
3046
|
+
// Data channel message handling
|
|
3047
|
+
// -----------------------------------------------------------------------
|
|
3048
|
+
async handleDataChannelMessage(msg) {
|
|
3049
|
+
try {
|
|
3050
|
+
switch (msg.type) {
|
|
3051
|
+
case "addTrack":
|
|
3052
|
+
await this.handleAddTrack(msg.cameraName, msg.trackId);
|
|
3053
|
+
break;
|
|
3054
|
+
case "removeTrack":
|
|
3055
|
+
await this.handleRemoveTrack(msg.trackId);
|
|
3056
|
+
break;
|
|
3057
|
+
case "answer":
|
|
3058
|
+
await this.handleRenegotiationAnswer(msg.sdp);
|
|
3059
|
+
break;
|
|
3060
|
+
default:
|
|
3061
|
+
this.logger.warn("[shared] Unknown DC message type:", msg.type);
|
|
3062
|
+
}
|
|
3063
|
+
} catch (err) {
|
|
3064
|
+
this.logger.error("[shared] DC handler error:", err);
|
|
3065
|
+
this.sendDC({ type: "error", message: err.message });
|
|
3066
|
+
}
|
|
3067
|
+
}
|
|
3068
|
+
async handleAddTrack(cameraName, trackId) {
|
|
3069
|
+
if (this.activeTracks.has(trackId)) {
|
|
3070
|
+
this.sendDC({ type: "error", message: `Track ${trackId} already exists` });
|
|
3071
|
+
return;
|
|
3072
|
+
}
|
|
3073
|
+
const werift = await loadWerift2();
|
|
3074
|
+
const videoTrack = new werift.MediaStreamTrack({ kind: "video" });
|
|
3075
|
+
const videoTransceiver = this.pc.addTransceiver(videoTrack, { direction: "sendonly" });
|
|
3076
|
+
const audioTrack = new werift.MediaStreamTrack({ kind: "audio" });
|
|
3077
|
+
const audioDirection = this.options.onIntercomAudio ? "sendrecv" : "sendonly";
|
|
3078
|
+
const audioTransceiver = this.pc.addTransceiver(audioTrack, { direction: audioDirection });
|
|
3079
|
+
if (this.options.onIntercomAudio) {
|
|
3080
|
+
const intercomCb = this.options.onIntercomAudio;
|
|
3081
|
+
audioTransceiver.onTrack.subscribe((incomingTrack) => {
|
|
3082
|
+
incomingTrack.onReceiveRtp.subscribe((rtpPacket) => {
|
|
3083
|
+
const payload = rtpPacket.payload;
|
|
3084
|
+
if (payload?.length > 0) {
|
|
3085
|
+
intercomCb(cameraName, payload);
|
|
3086
|
+
}
|
|
3087
|
+
});
|
|
3088
|
+
});
|
|
3089
|
+
}
|
|
3090
|
+
await this.renegotiate();
|
|
3091
|
+
const videoMid = videoTransceiver.mid;
|
|
3092
|
+
const audioMid = audioTransceiver.mid;
|
|
3093
|
+
const source = this.onTrackRequested(cameraName);
|
|
3094
|
+
if (!source) {
|
|
3095
|
+
this.sendDC({ type: "error", message: `Camera not found: ${cameraName}` });
|
|
3096
|
+
return;
|
|
3097
|
+
}
|
|
3098
|
+
const feedAbort = new AbortController();
|
|
3099
|
+
const activeTrack = {
|
|
3100
|
+
trackId,
|
|
3101
|
+
cameraName,
|
|
3102
|
+
videoSender: videoTransceiver.sender,
|
|
3103
|
+
audioSender: audioTransceiver.sender,
|
|
3104
|
+
feedAbort,
|
|
3105
|
+
videoSeqNum: 0,
|
|
3106
|
+
audioSeqNum: 0
|
|
3107
|
+
};
|
|
3108
|
+
this.activeTracks.set(trackId, activeTrack);
|
|
3109
|
+
this.sendDC({ type: "trackReady", trackId, videoMid, audioMid });
|
|
3110
|
+
this.startFeeding(activeTrack, source);
|
|
3111
|
+
this.logger.info(`[shared] Track "${trackId}" added for camera "${cameraName}" (video=${videoMid}, audio=${audioMid})`);
|
|
3112
|
+
}
|
|
3113
|
+
async handleRemoveTrack(trackId) {
|
|
3114
|
+
const track = this.activeTracks.get(trackId);
|
|
3115
|
+
if (!track) return;
|
|
3116
|
+
track.feedAbort.abort();
|
|
3117
|
+
this.activeTracks.delete(trackId);
|
|
3118
|
+
this.onTrackReleased?.(track.cameraName);
|
|
3119
|
+
await this.renegotiate();
|
|
3120
|
+
this.sendDC({ type: "trackRemoved", trackId });
|
|
3121
|
+
this.logger.info(`[shared] Track "${trackId}" removed`);
|
|
3122
|
+
}
|
|
3123
|
+
async handleRenegotiationAnswer(sdp) {
|
|
3124
|
+
const werift = await loadWerift2();
|
|
3125
|
+
const desc = new werift.RTCSessionDescription(sdp, "answer");
|
|
3126
|
+
await this.pc.setRemoteDescription(desc);
|
|
3127
|
+
this.negotiating = false;
|
|
3128
|
+
this.logger.debug("[shared] Renegotiation answer set");
|
|
3129
|
+
}
|
|
3130
|
+
// -----------------------------------------------------------------------
|
|
3131
|
+
// SDP renegotiation
|
|
3132
|
+
// -----------------------------------------------------------------------
|
|
3133
|
+
async renegotiate() {
|
|
3134
|
+
if (!this.pc || !this.dataChannel) return;
|
|
3135
|
+
this.negotiating = true;
|
|
3136
|
+
const offer = await this.pc.createOffer();
|
|
3137
|
+
await this.pc.setLocalDescription(offer);
|
|
3138
|
+
await new Promise((resolve) => {
|
|
3139
|
+
if (this.pc.iceGatheringState === "complete") {
|
|
3140
|
+
resolve();
|
|
3141
|
+
return;
|
|
3142
|
+
}
|
|
3143
|
+
this.pc.iceGatheringStateChange.subscribe((state) => {
|
|
3144
|
+
if (state === "complete") resolve();
|
|
3145
|
+
});
|
|
3146
|
+
setTimeout(resolve, 3e3);
|
|
3147
|
+
});
|
|
3148
|
+
const sdp = this.pc.localDescription?.sdp ?? offer.sdp;
|
|
3149
|
+
this.sendDC({ type: "offer", sdp });
|
|
3150
|
+
await new Promise((resolve) => {
|
|
3151
|
+
const check = setInterval(() => {
|
|
3152
|
+
if (!this.negotiating) {
|
|
3153
|
+
clearInterval(check);
|
|
3154
|
+
resolve();
|
|
3155
|
+
}
|
|
3156
|
+
}, 50);
|
|
3157
|
+
setTimeout(() => {
|
|
3158
|
+
clearInterval(check);
|
|
3159
|
+
resolve();
|
|
3160
|
+
}, 1e4);
|
|
3161
|
+
});
|
|
3162
|
+
}
|
|
3163
|
+
// -----------------------------------------------------------------------
|
|
3164
|
+
// Frame feeding
|
|
3165
|
+
// -----------------------------------------------------------------------
|
|
3166
|
+
startFeeding(track, source) {
|
|
3167
|
+
const { signal } = track.feedAbort;
|
|
3168
|
+
const werift = _werift2;
|
|
3169
|
+
if (!werift) return;
|
|
3170
|
+
void (async () => {
|
|
3171
|
+
let gotKeyframe = false;
|
|
3172
|
+
let videoTimestampBase = null;
|
|
3173
|
+
let audioTimestampBase = null;
|
|
3174
|
+
try {
|
|
3175
|
+
for await (const mediaFrame of source) {
|
|
3176
|
+
if (signal.aborted || this.closed) break;
|
|
3177
|
+
if (mediaFrame.type === "audio") {
|
|
3178
|
+
const frame2 = mediaFrame.frame;
|
|
3179
|
+
if (audioTimestampBase === null) audioTimestampBase = frame2.timestampMicros;
|
|
3180
|
+
const rtpTs2 = Math.floor(
|
|
3181
|
+
(frame2.timestampMicros - audioTimestampBase) * (frame2.sampleRate || 48e3) / 1e6
|
|
3182
|
+
) >>> 0;
|
|
3183
|
+
track.audioSeqNum = track.audioSeqNum + 1 & 65535;
|
|
3184
|
+
const header = new werift.RtpHeader();
|
|
3185
|
+
header.payloadType = 111;
|
|
3186
|
+
header.timestamp = rtpTs2;
|
|
3187
|
+
header.marker = true;
|
|
3188
|
+
header.sequenceNumber = track.audioSeqNum;
|
|
3189
|
+
const pkt = new werift.RtpPacket(header, frame2.data);
|
|
3190
|
+
try {
|
|
3191
|
+
track.audioSender.sendRtp(pkt.serialize());
|
|
3192
|
+
} catch {
|
|
3193
|
+
}
|
|
3194
|
+
continue;
|
|
3195
|
+
}
|
|
3196
|
+
if (mediaFrame.type !== "video") continue;
|
|
3197
|
+
const frame = mediaFrame.frame;
|
|
3198
|
+
const annexB = convertH264ToAnnexB(frame.data);
|
|
3199
|
+
if (!gotKeyframe) {
|
|
3200
|
+
if (!isH264IdrAccessUnit(annexB)) continue;
|
|
3201
|
+
gotKeyframe = true;
|
|
3202
|
+
}
|
|
3203
|
+
if (videoTimestampBase === null) videoTimestampBase = frame.timestampMicros;
|
|
3204
|
+
const rtpTs = Math.floor(
|
|
3205
|
+
(frame.timestampMicros - videoTimestampBase) * 9e4 / 1e6
|
|
3206
|
+
) >>> 0;
|
|
3207
|
+
const nals = splitAnnexBToNals(annexB).filter((n) => {
|
|
3208
|
+
const t = n[0] & 31;
|
|
3209
|
+
return t !== 9 && t !== 6;
|
|
3210
|
+
});
|
|
3211
|
+
for (let i = 0; i < nals.length; i++) {
|
|
3212
|
+
const nal = nals[i];
|
|
3213
|
+
const isLastNal = i === nals.length - 1;
|
|
3214
|
+
if (nal.length <= 1200) {
|
|
3215
|
+
track.videoSeqNum = track.videoSeqNum + 1 & 65535;
|
|
3216
|
+
const header = new werift.RtpHeader();
|
|
3217
|
+
header.payloadType = 96;
|
|
3218
|
+
header.timestamp = rtpTs;
|
|
3219
|
+
header.marker = isLastNal;
|
|
3220
|
+
header.sequenceNumber = track.videoSeqNum;
|
|
3221
|
+
const pkt = new werift.RtpPacket(header, nal);
|
|
3222
|
+
try {
|
|
3223
|
+
track.videoSender.sendRtp(pkt.serialize());
|
|
3224
|
+
} catch {
|
|
3225
|
+
}
|
|
3226
|
+
} else {
|
|
3227
|
+
const nalHeader = nal[0];
|
|
3228
|
+
const fnri = nalHeader & 224;
|
|
3229
|
+
const nalType = nalHeader & 31;
|
|
3230
|
+
const fuIndicator = fnri | 28;
|
|
3231
|
+
const nalBody = nal.subarray(1);
|
|
3232
|
+
let offset = 0;
|
|
3233
|
+
let isFirst = true;
|
|
3234
|
+
while (offset < nalBody.length) {
|
|
3235
|
+
const end = Math.min(offset + 1198, nalBody.length);
|
|
3236
|
+
const isLast = end >= nalBody.length;
|
|
3237
|
+
let fuHeader = nalType;
|
|
3238
|
+
if (isFirst) fuHeader |= 128;
|
|
3239
|
+
if (isLast) fuHeader |= 64;
|
|
3240
|
+
const frag = Buffer.alloc(2 + (end - offset));
|
|
3241
|
+
frag[0] = fuIndicator;
|
|
3242
|
+
frag[1] = fuHeader;
|
|
3243
|
+
nalBody.copy(frag, 2, offset, end);
|
|
3244
|
+
track.videoSeqNum = track.videoSeqNum + 1 & 65535;
|
|
3245
|
+
const header = new werift.RtpHeader();
|
|
3246
|
+
header.payloadType = 96;
|
|
3247
|
+
header.timestamp = rtpTs;
|
|
3248
|
+
header.marker = isLastNal && isLast;
|
|
3249
|
+
header.sequenceNumber = track.videoSeqNum;
|
|
3250
|
+
const pkt = new werift.RtpPacket(header, frag);
|
|
3251
|
+
try {
|
|
3252
|
+
track.videoSender.sendRtp(pkt.serialize());
|
|
3253
|
+
} catch {
|
|
3254
|
+
}
|
|
3255
|
+
offset = end;
|
|
3256
|
+
isFirst = false;
|
|
3257
|
+
}
|
|
3258
|
+
}
|
|
3259
|
+
}
|
|
3260
|
+
}
|
|
3261
|
+
} catch (err) {
|
|
3262
|
+
if (!signal.aborted) {
|
|
3263
|
+
this.logger.error(`[shared] Feed error for track "${track.trackId}":`, err);
|
|
3264
|
+
}
|
|
3265
|
+
}
|
|
3266
|
+
})();
|
|
3267
|
+
}
|
|
3268
|
+
// -----------------------------------------------------------------------
|
|
3269
|
+
// Helpers
|
|
3270
|
+
// -----------------------------------------------------------------------
|
|
3271
|
+
sendDC(msg) {
|
|
3272
|
+
if (this.dataChannel?.readyState === "open") {
|
|
3273
|
+
this.dataChannel.send(JSON.stringify(msg));
|
|
3274
|
+
}
|
|
3275
|
+
}
|
|
3276
|
+
get isConnected() {
|
|
3277
|
+
return this.pc?.iceConnectionState === "connected" && !this.closed;
|
|
3278
|
+
}
|
|
3279
|
+
get trackCount() {
|
|
3280
|
+
return this.activeTracks.size;
|
|
3281
|
+
}
|
|
3282
|
+
};
|
|
3283
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
3284
|
+
0 && (module.exports = {
|
|
3285
|
+
AdaptiveController,
|
|
3286
|
+
AdaptiveFfmpegSource,
|
|
3287
|
+
AdaptiveRtspRelay,
|
|
3288
|
+
AdaptiveSession,
|
|
3289
|
+
AdaptiveStreamServer,
|
|
3290
|
+
AsyncBoundedQueue,
|
|
3291
|
+
FfmpegProcess,
|
|
3292
|
+
H264RtpDepacketizer,
|
|
3293
|
+
H265RtpDepacketizer,
|
|
3294
|
+
SharedSession,
|
|
3295
|
+
StreamFanout,
|
|
3296
|
+
WebrtcAdaptiveAddon,
|
|
3297
|
+
asLogger,
|
|
3298
|
+
convertH264ToAnnexB,
|
|
3299
|
+
convertH265ToAnnexB,
|
|
3300
|
+
createDefaultProfiles,
|
|
3301
|
+
createNullLogger,
|
|
3302
|
+
detectVideoCodecFromNal,
|
|
3303
|
+
extractH264ParamSets,
|
|
3304
|
+
extractH265ParamSets,
|
|
3305
|
+
fromEventEmitter,
|
|
3306
|
+
fromNativeStream,
|
|
3307
|
+
fromPushCallback,
|
|
3308
|
+
getH265NalType,
|
|
3309
|
+
hasStartCodes,
|
|
3310
|
+
isH264IdrAccessUnit,
|
|
3311
|
+
isH264KeyframeAnnexB,
|
|
3312
|
+
isH265Irap,
|
|
3313
|
+
isH265IrapAccessUnit,
|
|
3314
|
+
isH265KeyframeAnnexB,
|
|
3315
|
+
joinNalsToAnnexB,
|
|
3316
|
+
prependStartCode,
|
|
3317
|
+
splitAnnexBToNals
|
|
3318
|
+
});
|
|
3319
|
+
//# sourceMappingURL=index.cjs.map
|