hls.js 1.5.2-0.canary.9924 → 1.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/hls-demo.js +0 -5
- package/dist/hls-demo.js.map +1 -1
- package/dist/hls.js +686 -762
- package/dist/hls.js.d.ts +47 -49
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +471 -563
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +329 -409
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +500 -559
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +9 -9
- package/src/config.ts +2 -3
- package/src/controller/abr-controller.ts +22 -23
- package/src/controller/audio-stream-controller.ts +14 -11
- package/src/controller/audio-track-controller.ts +1 -1
- package/src/controller/base-playlist-controller.ts +7 -7
- package/src/controller/base-stream-controller.ts +29 -42
- package/src/controller/buffer-controller.ts +11 -10
- package/src/controller/cap-level-controller.ts +2 -1
- package/src/controller/content-steering-controller.ts +6 -8
- package/src/controller/eme-controller.ts +22 -9
- package/src/controller/error-controller.ts +8 -6
- package/src/controller/fps-controller.ts +3 -2
- package/src/controller/gap-controller.ts +10 -16
- package/src/controller/latency-controller.ts +11 -9
- package/src/controller/level-controller.ts +19 -8
- package/src/controller/stream-controller.ts +29 -20
- package/src/controller/subtitle-stream-controller.ts +14 -13
- package/src/controller/subtitle-track-controller.ts +3 -5
- package/src/controller/timeline-controller.ts +30 -23
- package/src/crypt/aes-crypto.ts +2 -21
- package/src/crypt/decrypter.ts +18 -32
- package/src/crypt/fast-aes-key.ts +5 -24
- package/src/demux/audio/adts.ts +4 -9
- package/src/demux/sample-aes.ts +0 -2
- package/src/demux/transmuxer-interface.ts +12 -4
- package/src/demux/transmuxer-worker.ts +4 -4
- package/src/demux/transmuxer.ts +3 -16
- package/src/demux/tsdemuxer.ts +17 -12
- package/src/hls.ts +20 -32
- package/src/loader/fragment-loader.ts +2 -9
- package/src/loader/key-loader.ts +0 -2
- package/src/loader/level-key.ts +9 -10
- package/src/remux/mp4-remuxer.ts +3 -4
- package/src/task-loop.ts +2 -5
- package/src/types/demuxer.ts +0 -1
- package/src/utils/codecs.ts +4 -33
- package/src/utils/logger.ts +24 -53
- package/src/crypt/decrypter-aes-mode.ts +0 -4
- package/src/utils/encryption-methods-util.ts +0 -21
package/dist/hls.light.mjs
CHANGED
@@ -369,23 +369,6 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
|
|
369
369
|
return ErrorDetails;
|
370
370
|
}({});
|
371
371
|
|
372
|
-
class Logger {
|
373
|
-
constructor(label, logger) {
|
374
|
-
this.trace = void 0;
|
375
|
-
this.debug = void 0;
|
376
|
-
this.log = void 0;
|
377
|
-
this.warn = void 0;
|
378
|
-
this.info = void 0;
|
379
|
-
this.error = void 0;
|
380
|
-
const lb = `[${label}]:`;
|
381
|
-
this.trace = noop;
|
382
|
-
this.debug = logger.debug.bind(null, lb);
|
383
|
-
this.log = logger.log.bind(null, lb);
|
384
|
-
this.warn = logger.warn.bind(null, lb);
|
385
|
-
this.info = logger.info.bind(null, lb);
|
386
|
-
this.error = logger.error.bind(null, lb);
|
387
|
-
}
|
388
|
-
}
|
389
372
|
const noop = function noop() {};
|
390
373
|
const fakeLogger = {
|
391
374
|
trace: noop,
|
@@ -395,9 +378,7 @@ const fakeLogger = {
|
|
395
378
|
info: noop,
|
396
379
|
error: noop
|
397
380
|
};
|
398
|
-
|
399
|
-
return _extends({}, fakeLogger);
|
400
|
-
}
|
381
|
+
let exportedLogger = fakeLogger;
|
401
382
|
|
402
383
|
// let lastCallTime;
|
403
384
|
// function formatMsgWithTimeInfo(type, msg) {
|
@@ -408,36 +389,35 @@ function createLogger() {
|
|
408
389
|
// return msg;
|
409
390
|
// }
|
410
391
|
|
411
|
-
function consolePrintFn(type
|
392
|
+
function consolePrintFn(type) {
|
412
393
|
const func = self.console[type];
|
413
|
-
|
394
|
+
if (func) {
|
395
|
+
return func.bind(self.console, `[${type}] >`);
|
396
|
+
}
|
397
|
+
return noop;
|
414
398
|
}
|
415
|
-
function
|
416
|
-
|
399
|
+
function exportLoggerFunctions(debugConfig, ...functions) {
|
400
|
+
functions.forEach(function (type) {
|
401
|
+
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
402
|
+
});
|
417
403
|
}
|
418
|
-
|
419
|
-
function enableLogs(debugConfig, context, id) {
|
404
|
+
function enableLogs(debugConfig, id) {
|
420
405
|
// check that console is available
|
421
|
-
const newLogger = createLogger();
|
422
406
|
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
423
|
-
|
407
|
+
exportLoggerFunctions(debugConfig,
|
424
408
|
// Remove out from list here to hard-disable a log-level
|
425
409
|
// 'trace',
|
426
|
-
'debug', 'log', 'info', 'warn', 'error'
|
427
|
-
keys.forEach(key => {
|
428
|
-
newLogger[key] = getLoggerFn(key, debugConfig, id);
|
429
|
-
});
|
410
|
+
'debug', 'log', 'info', 'warn', 'error');
|
430
411
|
// Some browsers don't allow to use bind on console object anyway
|
431
412
|
// fallback to default if needed
|
432
413
|
try {
|
433
|
-
|
414
|
+
exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.2"}`);
|
434
415
|
} catch (e) {
|
435
|
-
|
436
|
-
return createLogger();
|
416
|
+
exportedLogger = fakeLogger;
|
437
417
|
}
|
418
|
+
} else {
|
419
|
+
exportedLogger = fakeLogger;
|
438
420
|
}
|
439
|
-
exportedLogger = newLogger;
|
440
|
-
return newLogger;
|
441
421
|
}
|
442
422
|
const logger = exportedLogger;
|
443
423
|
|
@@ -1011,26 +991,6 @@ class LevelDetails {
|
|
1011
991
|
}
|
1012
992
|
}
|
1013
993
|
|
1014
|
-
var DecrypterAesMode = {
|
1015
|
-
cbc: 0,
|
1016
|
-
ctr: 1
|
1017
|
-
};
|
1018
|
-
|
1019
|
-
function isFullSegmentEncryption(method) {
|
1020
|
-
return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
|
1021
|
-
}
|
1022
|
-
function getAesModeFromFullSegmentMethod(method) {
|
1023
|
-
switch (method) {
|
1024
|
-
case 'AES-128':
|
1025
|
-
case 'AES-256':
|
1026
|
-
return DecrypterAesMode.cbc;
|
1027
|
-
case 'AES-256-CTR':
|
1028
|
-
return DecrypterAesMode.ctr;
|
1029
|
-
default:
|
1030
|
-
throw new Error(`invalid full segment method ${method}`);
|
1031
|
-
}
|
1032
|
-
}
|
1033
|
-
|
1034
994
|
// This file is inserted as a shim for modules which we do not want to include into the distro.
|
1035
995
|
// This replacement is done in the "alias" plugin of the rollup config.
|
1036
996
|
var empty = undefined;
|
@@ -2459,12 +2419,12 @@ class LevelKey {
|
|
2459
2419
|
this.keyFormatVersions = formatversions;
|
2460
2420
|
this.iv = iv;
|
2461
2421
|
this.encrypted = method ? method !== 'NONE' : false;
|
2462
|
-
this.isCommonEncryption = this.encrypted &&
|
2422
|
+
this.isCommonEncryption = this.encrypted && method !== 'AES-128';
|
2463
2423
|
}
|
2464
2424
|
isSupported() {
|
2465
2425
|
// If it's Segment encryption or No encryption, just select that key system
|
2466
2426
|
if (this.method) {
|
2467
|
-
if (
|
2427
|
+
if (this.method === 'AES-128' || this.method === 'NONE') {
|
2468
2428
|
return true;
|
2469
2429
|
}
|
2470
2430
|
if (this.keyFormat === 'identity') {
|
@@ -2478,13 +2438,14 @@ class LevelKey {
|
|
2478
2438
|
if (!this.encrypted || !this.uri) {
|
2479
2439
|
return null;
|
2480
2440
|
}
|
2481
|
-
if (
|
2441
|
+
if (this.method === 'AES-128' && this.uri && !this.iv) {
|
2482
2442
|
if (typeof sn !== 'number') {
|
2483
2443
|
// We are fetching decryption data for a initialization segment
|
2484
|
-
// If the segment was encrypted with AES-128
|
2444
|
+
// If the segment was encrypted with AES-128
|
2485
2445
|
// It must have an IV defined. We cannot substitute the Segment Number in.
|
2486
|
-
|
2487
|
-
|
2446
|
+
if (this.method === 'AES-128' && !this.iv) {
|
2447
|
+
logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
|
2448
|
+
}
|
2488
2449
|
// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
2489
2450
|
sn = 0;
|
2490
2451
|
}
|
@@ -2631,28 +2592,23 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
|
|
2631
2592
|
if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
|
2632
2593
|
return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
|
2633
2594
|
}
|
2595
|
+
|
2596
|
+
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2597
|
+
// some browsers will report that fLaC is supported then fail.
|
2598
|
+
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2634
2599
|
const codecsToCheck = {
|
2635
|
-
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2636
|
-
// some browsers will report that fLaC is supported then fail.
|
2637
|
-
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2638
2600
|
flac: ['flac', 'fLaC', 'FLAC'],
|
2639
|
-
opus: ['opus', 'Opus']
|
2640
|
-
// Replace audio codec info if browser does not support mp4a.40.34,
|
2641
|
-
// and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
|
2642
|
-
'mp4a.40.34': ['mp3']
|
2601
|
+
opus: ['opus', 'Opus']
|
2643
2602
|
}[lowerCaseCodec];
|
2644
2603
|
for (let i = 0; i < codecsToCheck.length; i++) {
|
2645
|
-
var _getMediaSource;
|
2646
2604
|
if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
|
2647
2605
|
CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
|
2648
2606
|
return codecsToCheck[i];
|
2649
|
-
} else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
|
2650
|
-
return '';
|
2651
2607
|
}
|
2652
2608
|
}
|
2653
2609
|
return lowerCaseCodec;
|
2654
2610
|
}
|
2655
|
-
const AUDIO_CODEC_REGEXP = /flac|opus
|
2611
|
+
const AUDIO_CODEC_REGEXP = /flac|opus/i;
|
2656
2612
|
function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
|
2657
2613
|
return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
|
2658
2614
|
}
|
@@ -2675,16 +2631,6 @@ function convertAVC1ToAVCOTI(codec) {
|
|
2675
2631
|
}
|
2676
2632
|
return codec;
|
2677
2633
|
}
|
2678
|
-
function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
|
2679
|
-
const MediaSource = getMediaSource(preferManagedMediaSource) || {
|
2680
|
-
isTypeSupported: () => false
|
2681
|
-
};
|
2682
|
-
return {
|
2683
|
-
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
2684
|
-
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
2685
|
-
ac3: false
|
2686
|
-
};
|
2687
|
-
}
|
2688
2634
|
|
2689
2635
|
const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
|
2690
2636
|
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
|
@@ -4262,47 +4208,7 @@ class LatencyController {
|
|
4262
4208
|
this.currentTime = 0;
|
4263
4209
|
this.stallCount = 0;
|
4264
4210
|
this._latency = null;
|
4265
|
-
this.
|
4266
|
-
const {
|
4267
|
-
media,
|
4268
|
-
levelDetails
|
4269
|
-
} = this;
|
4270
|
-
if (!media || !levelDetails) {
|
4271
|
-
return;
|
4272
|
-
}
|
4273
|
-
this.currentTime = media.currentTime;
|
4274
|
-
const latency = this.computeLatency();
|
4275
|
-
if (latency === null) {
|
4276
|
-
return;
|
4277
|
-
}
|
4278
|
-
this._latency = latency;
|
4279
|
-
|
4280
|
-
// Adapt playbackRate to meet target latency in low-latency mode
|
4281
|
-
const {
|
4282
|
-
lowLatencyMode,
|
4283
|
-
maxLiveSyncPlaybackRate
|
4284
|
-
} = this.config;
|
4285
|
-
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4286
|
-
return;
|
4287
|
-
}
|
4288
|
-
const targetLatency = this.targetLatency;
|
4289
|
-
if (targetLatency === null) {
|
4290
|
-
return;
|
4291
|
-
}
|
4292
|
-
const distanceFromTarget = latency - targetLatency;
|
4293
|
-
// Only adjust playbackRate when within one target duration of targetLatency
|
4294
|
-
// and more than one second from under-buffering.
|
4295
|
-
// Playback further than one target duration from target can be considered DVR playback.
|
4296
|
-
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4297
|
-
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4298
|
-
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4299
|
-
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4300
|
-
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4301
|
-
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4302
|
-
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4303
|
-
media.playbackRate = 1;
|
4304
|
-
}
|
4305
|
-
};
|
4211
|
+
this.timeupdateHandler = () => this.timeupdate();
|
4306
4212
|
this.hls = hls;
|
4307
4213
|
this.config = hls.config;
|
4308
4214
|
this.registerListeners();
|
@@ -4394,7 +4300,7 @@ class LatencyController {
|
|
4394
4300
|
this.onMediaDetaching();
|
4395
4301
|
this.levelDetails = null;
|
4396
4302
|
// @ts-ignore
|
4397
|
-
this.hls = null;
|
4303
|
+
this.hls = this.timeupdateHandler = null;
|
4398
4304
|
}
|
4399
4305
|
registerListeners() {
|
4400
4306
|
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
@@ -4412,11 +4318,11 @@ class LatencyController {
|
|
4412
4318
|
}
|
4413
4319
|
onMediaAttached(event, data) {
|
4414
4320
|
this.media = data.media;
|
4415
|
-
this.media.addEventListener('timeupdate', this.
|
4321
|
+
this.media.addEventListener('timeupdate', this.timeupdateHandler);
|
4416
4322
|
}
|
4417
4323
|
onMediaDetaching() {
|
4418
4324
|
if (this.media) {
|
4419
|
-
this.media.removeEventListener('timeupdate', this.
|
4325
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4420
4326
|
this.media = null;
|
4421
4327
|
}
|
4422
4328
|
}
|
@@ -4430,10 +4336,10 @@ class LatencyController {
|
|
4430
4336
|
}) {
|
4431
4337
|
this.levelDetails = details;
|
4432
4338
|
if (details.advanced) {
|
4433
|
-
this.
|
4339
|
+
this.timeupdate();
|
4434
4340
|
}
|
4435
4341
|
if (!details.live && this.media) {
|
4436
|
-
this.media.removeEventListener('timeupdate', this.
|
4342
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4437
4343
|
}
|
4438
4344
|
}
|
4439
4345
|
onError(event, data) {
|
@@ -4443,7 +4349,48 @@ class LatencyController {
|
|
4443
4349
|
}
|
4444
4350
|
this.stallCount++;
|
4445
4351
|
if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
|
4446
|
-
|
4352
|
+
logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');
|
4353
|
+
}
|
4354
|
+
}
|
4355
|
+
timeupdate() {
|
4356
|
+
const {
|
4357
|
+
media,
|
4358
|
+
levelDetails
|
4359
|
+
} = this;
|
4360
|
+
if (!media || !levelDetails) {
|
4361
|
+
return;
|
4362
|
+
}
|
4363
|
+
this.currentTime = media.currentTime;
|
4364
|
+
const latency = this.computeLatency();
|
4365
|
+
if (latency === null) {
|
4366
|
+
return;
|
4367
|
+
}
|
4368
|
+
this._latency = latency;
|
4369
|
+
|
4370
|
+
// Adapt playbackRate to meet target latency in low-latency mode
|
4371
|
+
const {
|
4372
|
+
lowLatencyMode,
|
4373
|
+
maxLiveSyncPlaybackRate
|
4374
|
+
} = this.config;
|
4375
|
+
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4376
|
+
return;
|
4377
|
+
}
|
4378
|
+
const targetLatency = this.targetLatency;
|
4379
|
+
if (targetLatency === null) {
|
4380
|
+
return;
|
4381
|
+
}
|
4382
|
+
const distanceFromTarget = latency - targetLatency;
|
4383
|
+
// Only adjust playbackRate when within one target duration of targetLatency
|
4384
|
+
// and more than one second from under-buffering.
|
4385
|
+
// Playback further than one target duration from target can be considered DVR playback.
|
4386
|
+
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4387
|
+
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4388
|
+
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4389
|
+
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4390
|
+
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4391
|
+
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4392
|
+
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4393
|
+
media.playbackRate = 1;
|
4447
4394
|
}
|
4448
4395
|
}
|
4449
4396
|
estimateLiveEdge() {
|
@@ -5215,13 +5162,18 @@ var ErrorActionFlags = {
|
|
5215
5162
|
MoveAllAlternatesMatchingHDCP: 2,
|
5216
5163
|
SwitchToSDR: 4
|
5217
5164
|
}; // Reserved for future use
|
5218
|
-
class ErrorController
|
5165
|
+
class ErrorController {
|
5219
5166
|
constructor(hls) {
|
5220
|
-
super('error-controller', hls.logger);
|
5221
5167
|
this.hls = void 0;
|
5222
5168
|
this.playlistError = 0;
|
5223
5169
|
this.penalizedRenditions = {};
|
5170
|
+
this.log = void 0;
|
5171
|
+
this.warn = void 0;
|
5172
|
+
this.error = void 0;
|
5224
5173
|
this.hls = hls;
|
5174
|
+
this.log = logger.log.bind(logger, `[info]:`);
|
5175
|
+
this.warn = logger.warn.bind(logger, `[warning]:`);
|
5176
|
+
this.error = logger.error.bind(logger, `[error]:`);
|
5225
5177
|
this.registerListeners();
|
5226
5178
|
}
|
5227
5179
|
registerListeners() {
|
@@ -5573,13 +5525,16 @@ class ErrorController extends Logger {
|
|
5573
5525
|
}
|
5574
5526
|
}
|
5575
5527
|
|
5576
|
-
class BasePlaylistController
|
5528
|
+
class BasePlaylistController {
|
5577
5529
|
constructor(hls, logPrefix) {
|
5578
|
-
super(logPrefix, hls.logger);
|
5579
5530
|
this.hls = void 0;
|
5580
5531
|
this.timer = -1;
|
5581
5532
|
this.requestScheduled = -1;
|
5582
5533
|
this.canLoad = false;
|
5534
|
+
this.log = void 0;
|
5535
|
+
this.warn = void 0;
|
5536
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
5537
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
5583
5538
|
this.hls = hls;
|
5584
5539
|
}
|
5585
5540
|
destroy() {
|
@@ -5612,7 +5567,7 @@ class BasePlaylistController extends Logger {
|
|
5612
5567
|
try {
|
5613
5568
|
uri = new self.URL(attr.URI, previous.url).href;
|
5614
5569
|
} catch (error) {
|
5615
|
-
|
5570
|
+
logger.warn(`Could not construct new URL for Rendition Report: ${error}`);
|
5616
5571
|
uri = attr.URI || '';
|
5617
5572
|
}
|
5618
5573
|
// Use exact match. Otherwise, the last partial match, if any, will be used
|
@@ -6158,9 +6113,8 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
|
|
6158
6113
|
}, {});
|
6159
6114
|
}
|
6160
6115
|
|
6161
|
-
class AbrController
|
6116
|
+
class AbrController {
|
6162
6117
|
constructor(_hls) {
|
6163
|
-
super('abr', _hls.logger);
|
6164
6118
|
this.hls = void 0;
|
6165
6119
|
this.lastLevelLoadSec = 0;
|
6166
6120
|
this.lastLoadedFragLevel = -1;
|
@@ -6226,7 +6180,7 @@ class AbrController extends Logger {
|
|
6226
6180
|
const bwEstimate = this.getBwEstimate();
|
6227
6181
|
const levels = hls.levels;
|
6228
6182
|
const level = levels[frag.level];
|
6229
|
-
const expectedLen = stats.total || Math.max(stats.loaded, Math.round(duration * level.
|
6183
|
+
const expectedLen = stats.total || Math.max(stats.loaded, Math.round(duration * level.averageBitrate / 8));
|
6230
6184
|
let timeStreaming = loadedFirstByte ? timeLoading - ttfb : timeLoading;
|
6231
6185
|
if (timeStreaming < 1 && loadedFirstByte) {
|
6232
6186
|
timeStreaming = Math.min(timeLoading, stats.loaded * 8 / bwEstimate);
|
@@ -6269,12 +6223,12 @@ class AbrController extends Logger {
|
|
6269
6223
|
// If there has been no loading progress, sample TTFB
|
6270
6224
|
this.bwEstimator.sampleTTFB(timeLoading);
|
6271
6225
|
}
|
6272
|
-
const nextLoadLevelBitrate = levels[nextLoadLevel].
|
6226
|
+
const nextLoadLevelBitrate = levels[nextLoadLevel].maxBitrate;
|
6273
6227
|
if (this.getBwEstimate() * this.hls.config.abrBandWidthUpFactor > nextLoadLevelBitrate) {
|
6274
6228
|
this.resetEstimator(nextLoadLevelBitrate);
|
6275
6229
|
}
|
6276
6230
|
this.clearTimer();
|
6277
|
-
|
6231
|
+
logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
|
6278
6232
|
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
|
6279
6233
|
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
|
6280
6234
|
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
|
@@ -6294,7 +6248,7 @@ class AbrController extends Logger {
|
|
6294
6248
|
}
|
6295
6249
|
resetEstimator(abrEwmaDefaultEstimate) {
|
6296
6250
|
if (abrEwmaDefaultEstimate) {
|
6297
|
-
|
6251
|
+
logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
|
6298
6252
|
this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
|
6299
6253
|
}
|
6300
6254
|
this.firstSelection = -1;
|
@@ -6526,7 +6480,7 @@ class AbrController extends Logger {
|
|
6526
6480
|
}
|
6527
6481
|
const firstLevel = this.hls.firstLevel;
|
6528
6482
|
const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
|
6529
|
-
|
6483
|
+
logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
|
6530
6484
|
return clamped;
|
6531
6485
|
}
|
6532
6486
|
get forcedAutoLevel() {
|
@@ -6611,13 +6565,13 @@ class AbrController extends Logger {
|
|
6611
6565
|
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
6612
6566
|
const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
|
6613
6567
|
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
|
6614
|
-
|
6568
|
+
logger.info(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
|
6615
6569
|
// don't use conservative factor on bitrate test
|
6616
6570
|
bwFactor = bwUpFactor = 1;
|
6617
6571
|
}
|
6618
6572
|
}
|
6619
6573
|
const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
|
6620
|
-
|
6574
|
+
logger.info(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
|
6621
6575
|
if (bestLevel > -1) {
|
6622
6576
|
return bestLevel;
|
6623
6577
|
}
|
@@ -6679,7 +6633,7 @@ class AbrController extends Logger {
|
|
6679
6633
|
currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
|
6680
6634
|
currentFrameRate = minFramerate;
|
6681
6635
|
currentBw = Math.max(currentBw, minBitrate);
|
6682
|
-
|
6636
|
+
logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
|
6683
6637
|
} else {
|
6684
6638
|
currentCodecSet = level == null ? void 0 : level.codecSet;
|
6685
6639
|
currentVideoRange = level == null ? void 0 : level.videoRange;
|
@@ -6732,9 +6686,9 @@ class AbrController extends Logger {
|
|
6732
6686
|
const forcedAutoLevel = this.forcedAutoLevel;
|
6733
6687
|
if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
|
6734
6688
|
if (levelsSkipped.length) {
|
6735
|
-
|
6689
|
+
logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
|
6736
6690
|
}
|
6737
|
-
|
6691
|
+
logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
|
6738
6692
|
}
|
6739
6693
|
if (firstSelection) {
|
6740
6694
|
this.firstSelection = i;
|
@@ -6966,9 +6920,8 @@ class BufferOperationQueue {
|
|
6966
6920
|
}
|
6967
6921
|
|
6968
6922
|
const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
6969
|
-
class BufferController
|
6923
|
+
class BufferController {
|
6970
6924
|
constructor(hls) {
|
6971
|
-
super('buffer-controller', hls.logger);
|
6972
6925
|
// The level details used to determine duration, target-duration and live
|
6973
6926
|
this.details = null;
|
6974
6927
|
// cache the self generated object url to detect hijack of video tag
|
@@ -6998,6 +6951,9 @@ class BufferController extends Logger {
|
|
6998
6951
|
this.tracks = {};
|
6999
6952
|
this.pendingTracks = {};
|
7000
6953
|
this.sourceBuffer = void 0;
|
6954
|
+
this.log = void 0;
|
6955
|
+
this.warn = void 0;
|
6956
|
+
this.error = void 0;
|
7001
6957
|
this._onEndStreaming = event => {
|
7002
6958
|
if (!this.hls) {
|
7003
6959
|
return;
|
@@ -7043,11 +6999,15 @@ class BufferController extends Logger {
|
|
7043
6999
|
_objectUrl
|
7044
7000
|
} = this;
|
7045
7001
|
if (mediaSrc !== _objectUrl) {
|
7046
|
-
|
7002
|
+
logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
|
7047
7003
|
}
|
7048
7004
|
};
|
7049
7005
|
this.hls = hls;
|
7006
|
+
const logPrefix = '[buffer-controller]';
|
7050
7007
|
this.appendSource = hls.config.preferManagedMediaSource;
|
7008
|
+
this.log = logger.log.bind(logger, logPrefix);
|
7009
|
+
this.warn = logger.warn.bind(logger, logPrefix);
|
7010
|
+
this.error = logger.error.bind(logger, logPrefix);
|
7051
7011
|
this._initSourceBuffer();
|
7052
7012
|
this.registerListeners();
|
7053
7013
|
}
|
@@ -7060,12 +7020,6 @@ class BufferController extends Logger {
|
|
7060
7020
|
this.lastMpegAudioChunk = null;
|
7061
7021
|
// @ts-ignore
|
7062
7022
|
this.hls = null;
|
7063
|
-
// @ts-ignore
|
7064
|
-
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
7065
|
-
// @ts-ignore
|
7066
|
-
this._onMediaSourceEnded = null;
|
7067
|
-
// @ts-ignore
|
7068
|
-
this._onStartStreaming = this._onEndStreaming = null;
|
7069
7023
|
}
|
7070
7024
|
registerListeners() {
|
7071
7025
|
const {
|
@@ -8065,7 +8019,7 @@ class CapLevelController {
|
|
8065
8019
|
const hls = this.hls;
|
8066
8020
|
const maxLevel = this.getMaxLevel(levels.length - 1);
|
8067
8021
|
if (maxLevel !== this.autoLevelCapping) {
|
8068
|
-
|
8022
|
+
logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8069
8023
|
}
|
8070
8024
|
hls.autoLevelCapping = maxLevel;
|
8071
8025
|
if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
@@ -8243,10 +8197,10 @@ class FPSController {
|
|
8243
8197
|
totalDroppedFrames: droppedFrames
|
8244
8198
|
});
|
8245
8199
|
if (droppedFPS > 0) {
|
8246
|
-
//
|
8200
|
+
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8247
8201
|
if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
8248
8202
|
let currentLevel = hls.currentLevel;
|
8249
|
-
|
8203
|
+
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8250
8204
|
if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
|
8251
8205
|
currentLevel = currentLevel - 1;
|
8252
8206
|
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
|
@@ -8279,10 +8233,10 @@ class FPSController {
|
|
8279
8233
|
}
|
8280
8234
|
|
8281
8235
|
const PATHWAY_PENALTY_DURATION_MS = 300000;
|
8282
|
-
class ContentSteeringController
|
8236
|
+
class ContentSteeringController {
|
8283
8237
|
constructor(hls) {
|
8284
|
-
super('content-steering', hls.logger);
|
8285
8238
|
this.hls = void 0;
|
8239
|
+
this.log = void 0;
|
8286
8240
|
this.loader = null;
|
8287
8241
|
this.uri = null;
|
8288
8242
|
this.pathwayId = '.';
|
@@ -8297,6 +8251,7 @@ class ContentSteeringController extends Logger {
|
|
8297
8251
|
this.subtitleTracks = null;
|
8298
8252
|
this.penalizedPathways = {};
|
8299
8253
|
this.hls = hls;
|
8254
|
+
this.log = logger.log.bind(logger, `[content-steering]:`);
|
8300
8255
|
this.registerListeners();
|
8301
8256
|
}
|
8302
8257
|
registerListeners() {
|
@@ -8420,7 +8375,7 @@ class ContentSteeringController extends Logger {
|
|
8420
8375
|
errorAction.resolved = this.pathwayId !== errorPathway;
|
8421
8376
|
}
|
8422
8377
|
if (!errorAction.resolved) {
|
8423
|
-
|
8378
|
+
logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
|
8424
8379
|
}
|
8425
8380
|
}
|
8426
8381
|
}
|
@@ -8591,7 +8546,7 @@ class ContentSteeringController extends Logger {
|
|
8591
8546
|
onSuccess: (response, stats, context, networkDetails) => {
|
8592
8547
|
this.log(`Loaded steering manifest: "${url}"`);
|
8593
8548
|
const steeringData = response.data;
|
8594
|
-
if (
|
8549
|
+
if (steeringData.VERSION !== 1) {
|
8595
8550
|
this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
|
8596
8551
|
return;
|
8597
8552
|
}
|
@@ -9530,7 +9485,7 @@ function timelineConfig() {
|
|
9530
9485
|
/**
|
9531
9486
|
* @ignore
|
9532
9487
|
*/
|
9533
|
-
function mergeConfig(defaultConfig, userConfig
|
9488
|
+
function mergeConfig(defaultConfig, userConfig) {
|
9534
9489
|
if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
|
9535
9490
|
throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
|
9536
9491
|
}
|
@@ -9600,7 +9555,7 @@ function deepCpy(obj) {
|
|
9600
9555
|
/**
|
9601
9556
|
* @ignore
|
9602
9557
|
*/
|
9603
|
-
function enableStreamingMode(config
|
9558
|
+
function enableStreamingMode(config) {
|
9604
9559
|
const currentLoader = config.loader;
|
9605
9560
|
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
|
9606
9561
|
// If a developer has configured their own loader, respect that choice
|
@@ -9617,9 +9572,10 @@ function enableStreamingMode(config, logger) {
|
|
9617
9572
|
}
|
9618
9573
|
}
|
9619
9574
|
|
9575
|
+
let chromeOrFirefox;
|
9620
9576
|
class LevelController extends BasePlaylistController {
|
9621
9577
|
constructor(hls, contentSteeringController) {
|
9622
|
-
super(hls, 'level-controller');
|
9578
|
+
super(hls, '[level-controller]');
|
9623
9579
|
this._levels = [];
|
9624
9580
|
this._firstLevel = -1;
|
9625
9581
|
this._maxAutoLevel = -1;
|
@@ -9690,15 +9646,23 @@ class LevelController extends BasePlaylistController {
|
|
9690
9646
|
let videoCodecFound = false;
|
9691
9647
|
let audioCodecFound = false;
|
9692
9648
|
data.levels.forEach(levelParsed => {
|
9693
|
-
var _videoCodec;
|
9649
|
+
var _audioCodec, _videoCodec;
|
9694
9650
|
const attributes = levelParsed.attrs;
|
9651
|
+
|
9652
|
+
// erase audio codec info if browser does not support mp4a.40.34.
|
9653
|
+
// demuxer will autodetect codec and fallback to mpeg/audio
|
9695
9654
|
let {
|
9696
9655
|
audioCodec,
|
9697
9656
|
videoCodec
|
9698
9657
|
} = levelParsed;
|
9658
|
+
if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
|
9659
|
+
chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
|
9660
|
+
if (chromeOrFirefox) {
|
9661
|
+
levelParsed.audioCodec = audioCodec = undefined;
|
9662
|
+
}
|
9663
|
+
}
|
9699
9664
|
if (audioCodec) {
|
9700
|
-
|
9701
|
-
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
|
9665
|
+
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource);
|
9702
9666
|
}
|
9703
9667
|
if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
|
9704
9668
|
videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
|
@@ -9823,8 +9787,8 @@ class LevelController extends BasePlaylistController {
|
|
9823
9787
|
return valueB - valueA;
|
9824
9788
|
}
|
9825
9789
|
}
|
9826
|
-
if (a.
|
9827
|
-
return a.
|
9790
|
+
if (a.averageBitrate !== b.averageBitrate) {
|
9791
|
+
return a.averageBitrate - b.averageBitrate;
|
9828
9792
|
}
|
9829
9793
|
return 0;
|
9830
9794
|
});
|
@@ -10826,8 +10790,8 @@ function createLoaderContext(frag, part = null) {
|
|
10826
10790
|
var _frag$decryptdata;
|
10827
10791
|
let byteRangeStart = start;
|
10828
10792
|
let byteRangeEnd = end;
|
10829
|
-
if (frag.sn === 'initSegment' &&
|
10830
|
-
// MAP segment encrypted with method 'AES-128'
|
10793
|
+
if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') {
|
10794
|
+
// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
|
10831
10795
|
// has the unencrypted size specified in the range.
|
10832
10796
|
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
|
10833
10797
|
const fragmentLen = end - start;
|
@@ -10860,9 +10824,6 @@ function createGapLoadError(frag, part) {
|
|
10860
10824
|
(part ? part : frag).stats.aborted = true;
|
10861
10825
|
return new LoadError(errorData);
|
10862
10826
|
}
|
10863
|
-
function isMethodFullSegmentAesCbc(method) {
|
10864
|
-
return method === 'AES-128' || method === 'AES-256';
|
10865
|
-
}
|
10866
10827
|
class LoadError extends Error {
|
10867
10828
|
constructor(data) {
|
10868
10829
|
super(data.error.message);
|
@@ -11008,8 +10969,6 @@ class KeyLoader {
|
|
11008
10969
|
}
|
11009
10970
|
return this.loadKeyEME(keyInfo, frag);
|
11010
10971
|
case 'AES-128':
|
11011
|
-
case 'AES-256':
|
11012
|
-
case 'AES-256-CTR':
|
11013
10972
|
return this.loadKeyHTTP(keyInfo, frag);
|
11014
10973
|
default:
|
11015
10974
|
return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
|
@@ -11145,9 +11104,8 @@ class KeyLoader {
|
|
11145
11104
|
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
11146
11105
|
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
11147
11106
|
*/
|
11148
|
-
class TaskLoop
|
11149
|
-
constructor(
|
11150
|
-
super(label, logger);
|
11107
|
+
class TaskLoop {
|
11108
|
+
constructor() {
|
11151
11109
|
this._boundTick = void 0;
|
11152
11110
|
this._tickTimer = null;
|
11153
11111
|
this._tickInterval = null;
|
@@ -11415,61 +11373,33 @@ function alignMediaPlaylistByPDT(details, refDetails) {
|
|
11415
11373
|
}
|
11416
11374
|
|
11417
11375
|
class AESCrypto {
|
11418
|
-
constructor(subtle, iv
|
11376
|
+
constructor(subtle, iv) {
|
11419
11377
|
this.subtle = void 0;
|
11420
11378
|
this.aesIV = void 0;
|
11421
|
-
this.aesMode = void 0;
|
11422
11379
|
this.subtle = subtle;
|
11423
11380
|
this.aesIV = iv;
|
11424
|
-
this.aesMode = aesMode;
|
11425
11381
|
}
|
11426
11382
|
decrypt(data, key) {
|
11427
|
-
|
11428
|
-
|
11429
|
-
|
11430
|
-
|
11431
|
-
iv: this.aesIV
|
11432
|
-
}, key, data);
|
11433
|
-
case DecrypterAesMode.ctr:
|
11434
|
-
return this.subtle.decrypt({
|
11435
|
-
name: 'AES-CTR',
|
11436
|
-
counter: this.aesIV,
|
11437
|
-
length: 64
|
11438
|
-
},
|
11439
|
-
//64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
|
11440
|
-
key, data);
|
11441
|
-
default:
|
11442
|
-
throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
|
11443
|
-
}
|
11383
|
+
return this.subtle.decrypt({
|
11384
|
+
name: 'AES-CBC',
|
11385
|
+
iv: this.aesIV
|
11386
|
+
}, key, data);
|
11444
11387
|
}
|
11445
11388
|
}
|
11446
11389
|
|
11447
11390
|
class FastAESKey {
|
11448
|
-
constructor(subtle, key
|
11391
|
+
constructor(subtle, key) {
|
11449
11392
|
this.subtle = void 0;
|
11450
11393
|
this.key = void 0;
|
11451
|
-
this.aesMode = void 0;
|
11452
11394
|
this.subtle = subtle;
|
11453
11395
|
this.key = key;
|
11454
|
-
this.aesMode = aesMode;
|
11455
11396
|
}
|
11456
11397
|
expandKey() {
|
11457
|
-
const subtleAlgoName = getSubtleAlgoName(this.aesMode);
|
11458
11398
|
return this.subtle.importKey('raw', this.key, {
|
11459
|
-
name:
|
11399
|
+
name: 'AES-CBC'
|
11460
11400
|
}, false, ['encrypt', 'decrypt']);
|
11461
11401
|
}
|
11462
11402
|
}
|
11463
|
-
function getSubtleAlgoName(aesMode) {
|
11464
|
-
switch (aesMode) {
|
11465
|
-
case DecrypterAesMode.cbc:
|
11466
|
-
return 'AES-CBC';
|
11467
|
-
case DecrypterAesMode.ctr:
|
11468
|
-
return 'AES-CTR';
|
11469
|
-
default:
|
11470
|
-
throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
|
11471
|
-
}
|
11472
|
-
}
|
11473
11403
|
|
11474
11404
|
// PKCS7
|
11475
11405
|
function removePadding(array) {
|
@@ -11719,8 +11649,7 @@ class Decrypter {
|
|
11719
11649
|
this.currentIV = null;
|
11720
11650
|
this.currentResult = null;
|
11721
11651
|
this.useSoftware = void 0;
|
11722
|
-
this.
|
11723
|
-
this.enableSoftwareAES = config.enableSoftwareAES;
|
11652
|
+
this.useSoftware = config.enableSoftwareAES;
|
11724
11653
|
this.removePKCS7Padding = removePKCS7Padding;
|
11725
11654
|
// built in decryptor expects PKCS7 padding
|
11726
11655
|
if (removePKCS7Padding) {
|
@@ -11733,7 +11662,9 @@ class Decrypter {
|
|
11733
11662
|
/* no-op */
|
11734
11663
|
}
|
11735
11664
|
}
|
11736
|
-
|
11665
|
+
if (this.subtle === null) {
|
11666
|
+
this.useSoftware = true;
|
11667
|
+
}
|
11737
11668
|
}
|
11738
11669
|
destroy() {
|
11739
11670
|
this.subtle = null;
|
@@ -11771,10 +11702,10 @@ class Decrypter {
|
|
11771
11702
|
this.softwareDecrypter = null;
|
11772
11703
|
}
|
11773
11704
|
}
|
11774
|
-
decrypt(data, key, iv
|
11705
|
+
decrypt(data, key, iv) {
|
11775
11706
|
if (this.useSoftware) {
|
11776
11707
|
return new Promise((resolve, reject) => {
|
11777
|
-
this.softwareDecrypt(new Uint8Array(data), key, iv
|
11708
|
+
this.softwareDecrypt(new Uint8Array(data), key, iv);
|
11778
11709
|
const decryptResult = this.flush();
|
11779
11710
|
if (decryptResult) {
|
11780
11711
|
resolve(decryptResult.buffer);
|
@@ -11783,21 +11714,17 @@ class Decrypter {
|
|
11783
11714
|
}
|
11784
11715
|
});
|
11785
11716
|
}
|
11786
|
-
return this.webCryptoDecrypt(new Uint8Array(data), key, iv
|
11717
|
+
return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
|
11787
11718
|
}
|
11788
11719
|
|
11789
11720
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
11790
11721
|
// data is handled in the flush() call
|
11791
|
-
softwareDecrypt(data, key, iv
|
11722
|
+
softwareDecrypt(data, key, iv) {
|
11792
11723
|
const {
|
11793
11724
|
currentIV,
|
11794
11725
|
currentResult,
|
11795
11726
|
remainderData
|
11796
11727
|
} = this;
|
11797
|
-
if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
|
11798
|
-
logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
|
11799
|
-
return null;
|
11800
|
-
}
|
11801
11728
|
this.logOnce('JS AES decrypt');
|
11802
11729
|
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
11803
11730
|
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
@@ -11830,11 +11757,11 @@ class Decrypter {
|
|
11830
11757
|
}
|
11831
11758
|
return result;
|
11832
11759
|
}
|
11833
|
-
webCryptoDecrypt(data, key, iv
|
11760
|
+
webCryptoDecrypt(data, key, iv) {
|
11834
11761
|
const subtle = this.subtle;
|
11835
11762
|
if (this.key !== key || !this.fastAesKey) {
|
11836
11763
|
this.key = key;
|
11837
|
-
this.fastAesKey = new FastAESKey(subtle, key
|
11764
|
+
this.fastAesKey = new FastAESKey(subtle, key);
|
11838
11765
|
}
|
11839
11766
|
return this.fastAesKey.expandKey().then(aesKey => {
|
11840
11767
|
// decrypt using web crypto
|
@@ -11842,25 +11769,22 @@ class Decrypter {
|
|
11842
11769
|
return Promise.reject(new Error('web crypto not initialized'));
|
11843
11770
|
}
|
11844
11771
|
this.logOnce('WebCrypto AES decrypt');
|
11845
|
-
const crypto = new AESCrypto(subtle, new Uint8Array(iv)
|
11772
|
+
const crypto = new AESCrypto(subtle, new Uint8Array(iv));
|
11846
11773
|
return crypto.decrypt(data.buffer, aesKey);
|
11847
11774
|
}).catch(err => {
|
11848
11775
|
logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
|
11849
|
-
return this.onWebCryptoError(data, key, iv
|
11776
|
+
return this.onWebCryptoError(data, key, iv);
|
11850
11777
|
});
|
11851
11778
|
}
|
11852
|
-
onWebCryptoError(data, key, iv
|
11853
|
-
|
11854
|
-
|
11855
|
-
|
11856
|
-
|
11857
|
-
|
11858
|
-
|
11859
|
-
if (decryptResult) {
|
11860
|
-
return decryptResult.buffer;
|
11861
|
-
}
|
11779
|
+
onWebCryptoError(data, key, iv) {
|
11780
|
+
this.useSoftware = true;
|
11781
|
+
this.logEnabled = true;
|
11782
|
+
this.softwareDecrypt(data, key, iv);
|
11783
|
+
const decryptResult = this.flush();
|
11784
|
+
if (decryptResult) {
|
11785
|
+
return decryptResult.buffer;
|
11862
11786
|
}
|
11863
|
-
throw new Error('WebCrypto
|
11787
|
+
throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
|
11864
11788
|
}
|
11865
11789
|
getValidChunk(data) {
|
11866
11790
|
let currentChunk = data;
|
@@ -11911,7 +11835,7 @@ const State = {
|
|
11911
11835
|
};
|
11912
11836
|
class BaseStreamController extends TaskLoop {
|
11913
11837
|
constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
|
11914
|
-
super(
|
11838
|
+
super();
|
11915
11839
|
this.hls = void 0;
|
11916
11840
|
this.fragPrevious = null;
|
11917
11841
|
this.fragCurrent = null;
|
@@ -11936,83 +11860,22 @@ class BaseStreamController extends TaskLoop {
|
|
11936
11860
|
this.startFragRequested = false;
|
11937
11861
|
this.decrypter = void 0;
|
11938
11862
|
this.initPTS = [];
|
11939
|
-
this.
|
11940
|
-
|
11941
|
-
|
11942
|
-
|
11943
|
-
|
11944
|
-
mediaBuffer,
|
11945
|
-
state
|
11946
|
-
} = this;
|
11947
|
-
const currentTime = media ? media.currentTime : 0;
|
11948
|
-
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
11949
|
-
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
11950
|
-
if (this.state === State.ENDED) {
|
11951
|
-
this.resetLoadingState();
|
11952
|
-
} else if (fragCurrent) {
|
11953
|
-
// Seeking while frag load is in progress
|
11954
|
-
const tolerance = config.maxFragLookUpTolerance;
|
11955
|
-
const fragStartOffset = fragCurrent.start - tolerance;
|
11956
|
-
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
11957
|
-
// if seeking out of buffered range or into new one
|
11958
|
-
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
11959
|
-
const pastFragment = currentTime > fragEndOffset;
|
11960
|
-
// if the seek position is outside the current fragment range
|
11961
|
-
if (currentTime < fragStartOffset || pastFragment) {
|
11962
|
-
if (pastFragment && fragCurrent.loader) {
|
11963
|
-
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
11964
|
-
fragCurrent.abortRequests();
|
11965
|
-
this.resetLoadingState();
|
11966
|
-
}
|
11967
|
-
this.fragPrevious = null;
|
11968
|
-
}
|
11969
|
-
}
|
11970
|
-
}
|
11971
|
-
if (media) {
|
11972
|
-
// Remove gap fragments
|
11973
|
-
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
11974
|
-
this.lastCurrentTime = currentTime;
|
11975
|
-
}
|
11976
|
-
|
11977
|
-
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
11978
|
-
if (!this.loadedmetadata && !bufferInfo.len) {
|
11979
|
-
this.nextLoadPosition = this.startPosition = currentTime;
|
11980
|
-
}
|
11981
|
-
|
11982
|
-
// Async tick to speed up processing
|
11983
|
-
this.tickImmediate();
|
11984
|
-
};
|
11985
|
-
this.onMediaEnded = () => {
|
11986
|
-
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
11987
|
-
this.startPosition = this.lastCurrentTime = 0;
|
11988
|
-
};
|
11863
|
+
this.onvseeking = null;
|
11864
|
+
this.onvended = null;
|
11865
|
+
this.logPrefix = '';
|
11866
|
+
this.log = void 0;
|
11867
|
+
this.warn = void 0;
|
11989
11868
|
this.playlistType = playlistType;
|
11869
|
+
this.logPrefix = logPrefix;
|
11870
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
11871
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
11990
11872
|
this.hls = hls;
|
11991
11873
|
this.fragmentLoader = new FragmentLoader(hls.config);
|
11992
11874
|
this.keyLoader = keyLoader;
|
11993
11875
|
this.fragmentTracker = fragmentTracker;
|
11994
11876
|
this.config = hls.config;
|
11995
11877
|
this.decrypter = new Decrypter(hls.config);
|
11996
|
-
}
|
11997
|
-
registerListeners() {
|
11998
|
-
const {
|
11999
|
-
hls
|
12000
|
-
} = this;
|
12001
|
-
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12002
|
-
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12003
|
-
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12004
11878
|
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12005
|
-
hls.on(Events.ERROR, this.onError, this);
|
12006
|
-
}
|
12007
|
-
unregisterListeners() {
|
12008
|
-
const {
|
12009
|
-
hls
|
12010
|
-
} = this;
|
12011
|
-
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12012
|
-
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12013
|
-
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12014
|
-
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12015
|
-
hls.off(Events.ERROR, this.onError, this);
|
12016
11879
|
}
|
12017
11880
|
doTick() {
|
12018
11881
|
this.onTickEnd();
|
@@ -12066,8 +11929,10 @@ class BaseStreamController extends TaskLoop {
|
|
12066
11929
|
}
|
12067
11930
|
onMediaAttached(event, data) {
|
12068
11931
|
const media = this.media = this.mediaBuffer = data.media;
|
12069
|
-
|
12070
|
-
|
11932
|
+
this.onvseeking = this.onMediaSeeking.bind(this);
|
11933
|
+
this.onvended = this.onMediaEnded.bind(this);
|
11934
|
+
media.addEventListener('seeking', this.onvseeking);
|
11935
|
+
media.addEventListener('ended', this.onvended);
|
12071
11936
|
const config = this.config;
|
12072
11937
|
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
12073
11938
|
this.startLoad(config.startPosition);
|
@@ -12081,9 +11946,10 @@ class BaseStreamController extends TaskLoop {
|
|
12081
11946
|
}
|
12082
11947
|
|
12083
11948
|
// remove video listeners
|
12084
|
-
if (media) {
|
12085
|
-
media.removeEventListener('seeking', this.
|
12086
|
-
media.removeEventListener('ended', this.
|
11949
|
+
if (media && this.onvseeking && this.onvended) {
|
11950
|
+
media.removeEventListener('seeking', this.onvseeking);
|
11951
|
+
media.removeEventListener('ended', this.onvended);
|
11952
|
+
this.onvseeking = this.onvended = null;
|
12087
11953
|
}
|
12088
11954
|
if (this.keyLoader) {
|
12089
11955
|
this.keyLoader.detach();
|
@@ -12093,8 +11959,56 @@ class BaseStreamController extends TaskLoop {
|
|
12093
11959
|
this.fragmentTracker.removeAllFragments();
|
12094
11960
|
this.stopLoad();
|
12095
11961
|
}
|
12096
|
-
|
12097
|
-
|
11962
|
+
onMediaSeeking() {
|
11963
|
+
const {
|
11964
|
+
config,
|
11965
|
+
fragCurrent,
|
11966
|
+
media,
|
11967
|
+
mediaBuffer,
|
11968
|
+
state
|
11969
|
+
} = this;
|
11970
|
+
const currentTime = media ? media.currentTime : 0;
|
11971
|
+
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
11972
|
+
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
11973
|
+
if (this.state === State.ENDED) {
|
11974
|
+
this.resetLoadingState();
|
11975
|
+
} else if (fragCurrent) {
|
11976
|
+
// Seeking while frag load is in progress
|
11977
|
+
const tolerance = config.maxFragLookUpTolerance;
|
11978
|
+
const fragStartOffset = fragCurrent.start - tolerance;
|
11979
|
+
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
11980
|
+
// if seeking out of buffered range or into new one
|
11981
|
+
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
11982
|
+
const pastFragment = currentTime > fragEndOffset;
|
11983
|
+
// if the seek position is outside the current fragment range
|
11984
|
+
if (currentTime < fragStartOffset || pastFragment) {
|
11985
|
+
if (pastFragment && fragCurrent.loader) {
|
11986
|
+
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
11987
|
+
fragCurrent.abortRequests();
|
11988
|
+
this.resetLoadingState();
|
11989
|
+
}
|
11990
|
+
this.fragPrevious = null;
|
11991
|
+
}
|
11992
|
+
}
|
11993
|
+
}
|
11994
|
+
if (media) {
|
11995
|
+
// Remove gap fragments
|
11996
|
+
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
11997
|
+
this.lastCurrentTime = currentTime;
|
11998
|
+
}
|
11999
|
+
|
12000
|
+
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12001
|
+
if (!this.loadedmetadata && !bufferInfo.len) {
|
12002
|
+
this.nextLoadPosition = this.startPosition = currentTime;
|
12003
|
+
}
|
12004
|
+
|
12005
|
+
// Async tick to speed up processing
|
12006
|
+
this.tickImmediate();
|
12007
|
+
}
|
12008
|
+
onMediaEnded() {
|
12009
|
+
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12010
|
+
this.startPosition = this.lastCurrentTime = 0;
|
12011
|
+
}
|
12098
12012
|
onManifestLoaded(event, data) {
|
12099
12013
|
this.startTimeOffset = data.startTimeOffset;
|
12100
12014
|
this.initPTS = [];
|
@@ -12104,7 +12018,7 @@ class BaseStreamController extends TaskLoop {
|
|
12104
12018
|
this.stopLoad();
|
12105
12019
|
super.onHandlerDestroying();
|
12106
12020
|
// @ts-ignore
|
12107
|
-
this.hls =
|
12021
|
+
this.hls = null;
|
12108
12022
|
}
|
12109
12023
|
onHandlerDestroyed() {
|
12110
12024
|
this.state = State.STOPPED;
|
@@ -12235,10 +12149,10 @@ class BaseStreamController extends TaskLoop {
|
|
12235
12149
|
const decryptData = frag.decryptdata;
|
12236
12150
|
|
12237
12151
|
// check to see if the payload needs to be decrypted
|
12238
|
-
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv &&
|
12152
|
+
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
|
12239
12153
|
const startTime = self.performance.now();
|
12240
12154
|
// decrypt init segment data
|
12241
|
-
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer
|
12155
|
+
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
|
12242
12156
|
hls.trigger(Events.ERROR, {
|
12243
12157
|
type: ErrorTypes.MEDIA_ERROR,
|
12244
12158
|
details: ErrorDetails.FRAG_DECRYPT_ERROR,
|
@@ -12350,7 +12264,7 @@ class BaseStreamController extends TaskLoop {
|
|
12350
12264
|
}
|
12351
12265
|
let keyLoadingPromise = null;
|
12352
12266
|
if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
|
12353
|
-
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.
|
12267
|
+
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`);
|
12354
12268
|
this.state = State.KEY_LOADING;
|
12355
12269
|
this.fragCurrent = frag;
|
12356
12270
|
keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
|
@@ -12381,7 +12295,7 @@ class BaseStreamController extends TaskLoop {
|
|
12381
12295
|
const partIndex = this.getNextPart(partList, frag, targetBufferTime);
|
12382
12296
|
if (partIndex > -1) {
|
12383
12297
|
const part = partList[partIndex];
|
12384
|
-
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.
|
12298
|
+
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12385
12299
|
this.nextLoadPosition = part.start + part.duration;
|
12386
12300
|
this.state = State.FRAG_LOADING;
|
12387
12301
|
let _result;
|
@@ -12410,7 +12324,7 @@ class BaseStreamController extends TaskLoop {
|
|
12410
12324
|
}
|
12411
12325
|
}
|
12412
12326
|
}
|
12413
|
-
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.
|
12327
|
+
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12414
12328
|
// Don't update nextLoadPosition for fragments which are not buffered
|
12415
12329
|
if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
|
12416
12330
|
this.nextLoadPosition = frag.start + frag.duration;
|
@@ -12995,7 +12909,7 @@ class BaseStreamController extends TaskLoop {
|
|
12995
12909
|
errorAction.resolved = true;
|
12996
12910
|
}
|
12997
12911
|
} else {
|
12998
|
-
|
12912
|
+
logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
|
12999
12913
|
return;
|
13000
12914
|
}
|
13001
12915
|
} else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
|
@@ -13390,7 +13304,6 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
|
|
13390
13304
|
*/
|
13391
13305
|
function getAudioConfig(observer, data, offset, audioCodec) {
|
13392
13306
|
let adtsObjectType;
|
13393
|
-
let originalAdtsObjectType;
|
13394
13307
|
let adtsExtensionSamplingIndex;
|
13395
13308
|
let adtsChannelConfig;
|
13396
13309
|
let config;
|
@@ -13398,7 +13311,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13398
13311
|
const manifestCodec = audioCodec;
|
13399
13312
|
const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
|
13400
13313
|
// byte 2
|
13401
|
-
adtsObjectType =
|
13314
|
+
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13402
13315
|
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
|
13403
13316
|
if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
|
13404
13317
|
const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
|
@@ -13415,8 +13328,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13415
13328
|
// byte 3
|
13416
13329
|
adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
|
13417
13330
|
logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
|
13418
|
-
//
|
13419
|
-
if (/firefox
|
13331
|
+
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
|
13332
|
+
if (/firefox/i.test(userAgent)) {
|
13420
13333
|
if (adtsSamplingIndex >= 6) {
|
13421
13334
|
adtsObjectType = 5;
|
13422
13335
|
config = new Array(4);
|
@@ -13510,7 +13423,6 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13510
13423
|
samplerate: adtsSamplingRates[adtsSamplingIndex],
|
13511
13424
|
channelCount: adtsChannelConfig,
|
13512
13425
|
codec: 'mp4a.40.' + adtsObjectType,
|
13513
|
-
parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
|
13514
13426
|
manifestCodec
|
13515
13427
|
};
|
13516
13428
|
}
|
@@ -13565,8 +13477,7 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
|
|
13565
13477
|
track.channelCount = config.channelCount;
|
13566
13478
|
track.codec = config.codec;
|
13567
13479
|
track.manifestCodec = config.manifestCodec;
|
13568
|
-
track.
|
13569
|
-
logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
13480
|
+
logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
13570
13481
|
}
|
13571
13482
|
}
|
13572
13483
|
function getFrameDuration(samplerate) {
|
@@ -14629,7 +14540,7 @@ class SampleAesDecrypter {
|
|
14629
14540
|
});
|
14630
14541
|
}
|
14631
14542
|
decryptBuffer(encryptedData) {
|
14632
|
-
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer
|
14543
|
+
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
|
14633
14544
|
}
|
14634
14545
|
|
14635
14546
|
// AAC - encrypt all full 16 bytes blocks starting from offset 16
|
@@ -16832,7 +16743,7 @@ class MP4Remuxer {
|
|
16832
16743
|
logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
|
16833
16744
|
for (let j = 0; j < missing; j++) {
|
16834
16745
|
const newStamp = Math.max(nextPts, 0);
|
16835
|
-
let fillFrame = AAC.getSilentFrame(track.
|
16746
|
+
let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
16836
16747
|
if (!fillFrame) {
|
16837
16748
|
logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
|
16838
16749
|
fillFrame = sample.unit.subarray();
|
@@ -16960,7 +16871,7 @@ class MP4Remuxer {
|
|
16960
16871
|
// samples count of this segment's duration
|
16961
16872
|
const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
|
16962
16873
|
// silent frame
|
16963
|
-
const silentFrame = AAC.getSilentFrame(track.
|
16874
|
+
const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
16964
16875
|
logger.warn('[mp4-remuxer]: remux empty Audio');
|
16965
16876
|
// Can't remux if we can't generate a silent frame...
|
16966
16877
|
if (!silentFrame) {
|
@@ -17351,15 +17262,13 @@ class Transmuxer {
|
|
17351
17262
|
initSegmentData
|
17352
17263
|
} = transmuxConfig;
|
17353
17264
|
const keyData = getEncryptionType(uintData, decryptdata);
|
17354
|
-
if (keyData &&
|
17265
|
+
if (keyData && keyData.method === 'AES-128') {
|
17355
17266
|
const decrypter = this.getDecrypter();
|
17356
|
-
const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
|
17357
|
-
|
17358
17267
|
// Software decryption is synchronous; webCrypto is not
|
17359
17268
|
if (decrypter.isSync()) {
|
17360
17269
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
17361
17270
|
// data is handled in the flush() call
|
17362
|
-
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17271
|
+
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
|
17363
17272
|
// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
|
17364
17273
|
const loadingParts = chunkMeta.part > -1;
|
17365
17274
|
if (loadingParts) {
|
@@ -17371,7 +17280,7 @@ class Transmuxer {
|
|
17371
17280
|
}
|
17372
17281
|
uintData = new Uint8Array(decryptedData);
|
17373
17282
|
} else {
|
17374
|
-
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17283
|
+
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
|
17375
17284
|
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
17376
17285
|
// the decrypted data has been transmuxed
|
17377
17286
|
const result = this.push(decryptedData, null, chunkMeta);
|
@@ -18025,7 +17934,14 @@ class TransmuxerInterface {
|
|
18025
17934
|
this.observer = new EventEmitter();
|
18026
17935
|
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
|
18027
17936
|
this.observer.on(Events.ERROR, forwardMessage);
|
18028
|
-
const
|
17937
|
+
const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
|
17938
|
+
isTypeSupported: () => false
|
17939
|
+
};
|
17940
|
+
const m2tsTypeSupported = {
|
17941
|
+
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
17942
|
+
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
17943
|
+
ac3: false
|
17944
|
+
};
|
18029
17945
|
|
18030
17946
|
// navigator.vendor is not always available in Web Worker
|
18031
17947
|
// refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
|
@@ -18289,9 +18205,8 @@ const STALL_MINIMUM_DURATION_MS = 250;
|
|
18289
18205
|
const MAX_START_GAP_JUMP = 2.0;
|
18290
18206
|
const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
18291
18207
|
const SKIP_BUFFER_RANGE_START = 0.05;
|
18292
|
-
class GapController
|
18208
|
+
class GapController {
|
18293
18209
|
constructor(config, media, fragmentTracker, hls) {
|
18294
|
-
super('gap-controller', hls.logger);
|
18295
18210
|
this.config = void 0;
|
18296
18211
|
this.media = null;
|
18297
18212
|
this.fragmentTracker = void 0;
|
@@ -18345,7 +18260,7 @@ class GapController extends Logger {
|
|
18345
18260
|
// The playhead is now moving, but was previously stalled
|
18346
18261
|
if (this.stallReported) {
|
18347
18262
|
const _stalledDuration = self.performance.now() - stalled;
|
18348
|
-
|
18263
|
+
logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
|
18349
18264
|
this.stallReported = false;
|
18350
18265
|
}
|
18351
18266
|
this.stalled = null;
|
@@ -18456,7 +18371,7 @@ class GapController extends Logger {
|
|
18456
18371
|
// needs to cross some sort of threshold covering all source-buffers content
|
18457
18372
|
// to start playing properly.
|
18458
18373
|
if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
|
18459
|
-
|
18374
|
+
logger.warn('Trying to nudge playhead over buffer-hole');
|
18460
18375
|
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
18461
18376
|
// We only try to jump the hole if it's under the configured size
|
18462
18377
|
// Reset stalled so to rearm watchdog timer
|
@@ -18480,7 +18395,7 @@ class GapController extends Logger {
|
|
18480
18395
|
// Report stalled error once
|
18481
18396
|
this.stallReported = true;
|
18482
18397
|
const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
|
18483
|
-
|
18398
|
+
logger.warn(error.message);
|
18484
18399
|
hls.trigger(Events.ERROR, {
|
18485
18400
|
type: ErrorTypes.MEDIA_ERROR,
|
18486
18401
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -18548,7 +18463,7 @@ class GapController extends Logger {
|
|
18548
18463
|
}
|
18549
18464
|
}
|
18550
18465
|
const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
|
18551
|
-
|
18466
|
+
logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
|
18552
18467
|
this.moved = true;
|
18553
18468
|
this.stalled = null;
|
18554
18469
|
media.currentTime = targetTime;
|
@@ -18589,7 +18504,7 @@ class GapController extends Logger {
|
|
18589
18504
|
const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
|
18590
18505
|
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
18591
18506
|
const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
|
18592
|
-
|
18507
|
+
logger.warn(error.message);
|
18593
18508
|
media.currentTime = targetTime;
|
18594
18509
|
hls.trigger(Events.ERROR, {
|
18595
18510
|
type: ErrorTypes.MEDIA_ERROR,
|
@@ -18599,7 +18514,7 @@ class GapController extends Logger {
|
|
18599
18514
|
});
|
18600
18515
|
} else {
|
18601
18516
|
const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
|
18602
|
-
|
18517
|
+
logger.error(error.message);
|
18603
18518
|
hls.trigger(Events.ERROR, {
|
18604
18519
|
type: ErrorTypes.MEDIA_ERROR,
|
18605
18520
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -18614,7 +18529,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
|
|
18614
18529
|
|
18615
18530
|
class StreamController extends BaseStreamController {
|
18616
18531
|
constructor(hls, fragmentTracker, keyLoader) {
|
18617
|
-
super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
|
18532
|
+
super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN);
|
18618
18533
|
this.audioCodecSwap = false;
|
18619
18534
|
this.gapController = null;
|
18620
18535
|
this.level = -1;
|
@@ -18622,43 +18537,27 @@ class StreamController extends BaseStreamController {
|
|
18622
18537
|
this.altAudio = false;
|
18623
18538
|
this.audioOnly = false;
|
18624
18539
|
this.fragPlaying = null;
|
18540
|
+
this.onvplaying = null;
|
18541
|
+
this.onvseeked = null;
|
18625
18542
|
this.fragLastKbps = 0;
|
18626
18543
|
this.couldBacktrack = false;
|
18627
18544
|
this.backtrackFragment = null;
|
18628
18545
|
this.audioCodecSwitch = false;
|
18629
18546
|
this.videoBuffer = null;
|
18630
|
-
this.
|
18631
|
-
// tick to speed up FRAG_CHANGED triggering
|
18632
|
-
this.tick();
|
18633
|
-
};
|
18634
|
-
this.onMediaSeeked = () => {
|
18635
|
-
const media = this.media;
|
18636
|
-
const currentTime = media ? media.currentTime : null;
|
18637
|
-
if (isFiniteNumber(currentTime)) {
|
18638
|
-
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
18639
|
-
}
|
18640
|
-
|
18641
|
-
// If seeked was issued before buffer was appended do not tick immediately
|
18642
|
-
const bufferInfo = this.getMainFwdBufferInfo();
|
18643
|
-
if (bufferInfo === null || bufferInfo.len === 0) {
|
18644
|
-
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
18645
|
-
return;
|
18646
|
-
}
|
18647
|
-
|
18648
|
-
// tick to speed up FRAG_CHANGED triggering
|
18649
|
-
this.tick();
|
18650
|
-
};
|
18651
|
-
this.registerListeners();
|
18547
|
+
this._registerListeners();
|
18652
18548
|
}
|
18653
|
-
|
18654
|
-
super.registerListeners();
|
18549
|
+
_registerListeners() {
|
18655
18550
|
const {
|
18656
18551
|
hls
|
18657
18552
|
} = this;
|
18553
|
+
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18554
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18555
|
+
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
18658
18556
|
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
18659
18557
|
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
|
18660
18558
|
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
18661
18559
|
hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18560
|
+
hls.on(Events.ERROR, this.onError, this);
|
18662
18561
|
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
18663
18562
|
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
18664
18563
|
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -18666,14 +18565,17 @@ class StreamController extends BaseStreamController {
|
|
18666
18565
|
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
18667
18566
|
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
18668
18567
|
}
|
18669
|
-
|
18670
|
-
super.unregisterListeners();
|
18568
|
+
_unregisterListeners() {
|
18671
18569
|
const {
|
18672
18570
|
hls
|
18673
18571
|
} = this;
|
18572
|
+
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18573
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18574
|
+
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
18674
18575
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
18675
18576
|
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
18676
18577
|
hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18578
|
+
hls.off(Events.ERROR, this.onError, this);
|
18677
18579
|
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
18678
18580
|
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
18679
18581
|
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -18682,9 +18584,7 @@ class StreamController extends BaseStreamController {
|
|
18682
18584
|
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
18683
18585
|
}
|
18684
18586
|
onHandlerDestroying() {
|
18685
|
-
|
18686
|
-
this.onMediaPlaying = this.onMediaSeeked = null;
|
18687
|
-
this.unregisterListeners();
|
18587
|
+
this._unregisterListeners();
|
18688
18588
|
super.onHandlerDestroying();
|
18689
18589
|
}
|
18690
18590
|
startLoad(startPosition) {
|
@@ -19011,17 +18911,20 @@ class StreamController extends BaseStreamController {
|
|
19011
18911
|
onMediaAttached(event, data) {
|
19012
18912
|
super.onMediaAttached(event, data);
|
19013
18913
|
const media = data.media;
|
19014
|
-
|
19015
|
-
|
18914
|
+
this.onvplaying = this.onMediaPlaying.bind(this);
|
18915
|
+
this.onvseeked = this.onMediaSeeked.bind(this);
|
18916
|
+
media.addEventListener('playing', this.onvplaying);
|
18917
|
+
media.addEventListener('seeked', this.onvseeked);
|
19016
18918
|
this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
|
19017
18919
|
}
|
19018
18920
|
onMediaDetaching() {
|
19019
18921
|
const {
|
19020
18922
|
media
|
19021
18923
|
} = this;
|
19022
|
-
if (media) {
|
19023
|
-
media.removeEventListener('playing', this.
|
19024
|
-
media.removeEventListener('seeked', this.
|
18924
|
+
if (media && this.onvplaying && this.onvseeked) {
|
18925
|
+
media.removeEventListener('playing', this.onvplaying);
|
18926
|
+
media.removeEventListener('seeked', this.onvseeked);
|
18927
|
+
this.onvplaying = this.onvseeked = null;
|
19025
18928
|
this.videoBuffer = null;
|
19026
18929
|
}
|
19027
18930
|
this.fragPlaying = null;
|
@@ -19031,6 +18934,27 @@ class StreamController extends BaseStreamController {
|
|
19031
18934
|
}
|
19032
18935
|
super.onMediaDetaching();
|
19033
18936
|
}
|
18937
|
+
onMediaPlaying() {
|
18938
|
+
// tick to speed up FRAG_CHANGED triggering
|
18939
|
+
this.tick();
|
18940
|
+
}
|
18941
|
+
onMediaSeeked() {
|
18942
|
+
const media = this.media;
|
18943
|
+
const currentTime = media ? media.currentTime : null;
|
18944
|
+
if (isFiniteNumber(currentTime)) {
|
18945
|
+
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
18946
|
+
}
|
18947
|
+
|
18948
|
+
// If seeked was issued before buffer was appended do not tick immediately
|
18949
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
18950
|
+
if (bufferInfo === null || bufferInfo.len === 0) {
|
18951
|
+
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
18952
|
+
return;
|
18953
|
+
}
|
18954
|
+
|
18955
|
+
// tick to speed up FRAG_CHANGED triggering
|
18956
|
+
this.tick();
|
18957
|
+
}
|
19034
18958
|
onManifestLoading() {
|
19035
18959
|
// reset buffer on manifest loading
|
19036
18960
|
this.log('Trigger BUFFER_RESET');
|
@@ -19761,7 +19685,7 @@ class Hls {
|
|
19761
19685
|
* Get the video-dev/hls.js package version.
|
19762
19686
|
*/
|
19763
19687
|
static get version() {
|
19764
|
-
return "1.5.2
|
19688
|
+
return "1.5.2";
|
19765
19689
|
}
|
19766
19690
|
|
19767
19691
|
/**
|
@@ -19824,10 +19748,6 @@ class Hls {
|
|
19824
19748
|
* The configuration object provided on player instantiation.
|
19825
19749
|
*/
|
19826
19750
|
this.userConfig = void 0;
|
19827
|
-
/**
|
19828
|
-
* The logger functions used by this player instance, configured on player instantiation.
|
19829
|
-
*/
|
19830
|
-
this.logger = void 0;
|
19831
19751
|
this.coreComponents = void 0;
|
19832
19752
|
this.networkControllers = void 0;
|
19833
19753
|
this.started = false;
|
@@ -19847,11 +19767,11 @@ class Hls {
|
|
19847
19767
|
this._media = null;
|
19848
19768
|
this.url = null;
|
19849
19769
|
this.triggeringException = void 0;
|
19850
|
-
|
19851
|
-
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig
|
19770
|
+
enableLogs(userConfig.debug || false, 'Hls instance');
|
19771
|
+
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
|
19852
19772
|
this.userConfig = userConfig;
|
19853
19773
|
if (config.progressive) {
|
19854
|
-
enableStreamingMode(config
|
19774
|
+
enableStreamingMode(config);
|
19855
19775
|
}
|
19856
19776
|
|
19857
19777
|
// core controllers and network loaders
|
@@ -19950,7 +19870,7 @@ class Hls {
|
|
19950
19870
|
try {
|
19951
19871
|
return this.emit(event, event, eventObject);
|
19952
19872
|
} catch (error) {
|
19953
|
-
|
19873
|
+
logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
19954
19874
|
// Prevent recursion in error event handlers that throw #5497
|
19955
19875
|
if (!this.triggeringException) {
|
19956
19876
|
this.triggeringException = true;
|
@@ -19976,7 +19896,7 @@ class Hls {
|
|
19976
19896
|
* Dispose of the instance
|
19977
19897
|
*/
|
19978
19898
|
destroy() {
|
19979
|
-
|
19899
|
+
logger.log('destroy');
|
19980
19900
|
this.trigger(Events.DESTROYING, undefined);
|
19981
19901
|
this.detachMedia();
|
19982
19902
|
this.removeAllListeners();
|
@@ -19997,7 +19917,7 @@ class Hls {
|
|
19997
19917
|
* Attaches Hls.js to a media element
|
19998
19918
|
*/
|
19999
19919
|
attachMedia(media) {
|
20000
|
-
|
19920
|
+
logger.log('attachMedia');
|
20001
19921
|
this._media = media;
|
20002
19922
|
this.trigger(Events.MEDIA_ATTACHING, {
|
20003
19923
|
media: media
|
@@ -20008,7 +19928,7 @@ class Hls {
|
|
20008
19928
|
* Detach Hls.js from the media
|
20009
19929
|
*/
|
20010
19930
|
detachMedia() {
|
20011
|
-
|
19931
|
+
logger.log('detachMedia');
|
20012
19932
|
this.trigger(Events.MEDIA_DETACHING, undefined);
|
20013
19933
|
this._media = null;
|
20014
19934
|
}
|
@@ -20025,7 +19945,7 @@ class Hls {
|
|
20025
19945
|
});
|
20026
19946
|
this._autoLevelCapping = -1;
|
20027
19947
|
this._maxHdcpLevel = null;
|
20028
|
-
|
19948
|
+
logger.log(`loadSource:${loadingSource}`);
|
20029
19949
|
if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
|
20030
19950
|
this.detachMedia();
|
20031
19951
|
this.attachMedia(media);
|
@@ -20044,7 +19964,7 @@ class Hls {
|
|
20044
19964
|
* Defaults to -1 (None: starts from earliest point)
|
20045
19965
|
*/
|
20046
19966
|
startLoad(startPosition = -1) {
|
20047
|
-
|
19967
|
+
logger.log(`startLoad(${startPosition})`);
|
20048
19968
|
this.started = true;
|
20049
19969
|
this.networkControllers.forEach(controller => {
|
20050
19970
|
controller.startLoad(startPosition);
|
@@ -20055,7 +19975,7 @@ class Hls {
|
|
20055
19975
|
* Stop loading of any stream data.
|
20056
19976
|
*/
|
20057
19977
|
stopLoad() {
|
20058
|
-
|
19978
|
+
logger.log('stopLoad');
|
20059
19979
|
this.started = false;
|
20060
19980
|
this.networkControllers.forEach(controller => {
|
20061
19981
|
controller.stopLoad();
|
@@ -20091,7 +20011,7 @@ class Hls {
|
|
20091
20011
|
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
20092
20012
|
*/
|
20093
20013
|
swapAudioCodec() {
|
20094
|
-
|
20014
|
+
logger.log('swapAudioCodec');
|
20095
20015
|
this.streamController.swapAudioCodec();
|
20096
20016
|
}
|
20097
20017
|
|
@@ -20102,7 +20022,7 @@ class Hls {
|
|
20102
20022
|
* Automatic recovery of media-errors by this process is configurable.
|
20103
20023
|
*/
|
20104
20024
|
recoverMediaError() {
|
20105
|
-
|
20025
|
+
logger.log('recoverMediaError');
|
20106
20026
|
const media = this._media;
|
20107
20027
|
this.detachMedia();
|
20108
20028
|
if (media) {
|
@@ -20132,7 +20052,7 @@ class Hls {
|
|
20132
20052
|
* Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
|
20133
20053
|
*/
|
20134
20054
|
set currentLevel(newLevel) {
|
20135
|
-
|
20055
|
+
logger.log(`set currentLevel:${newLevel}`);
|
20136
20056
|
this.levelController.manualLevel = newLevel;
|
20137
20057
|
this.streamController.immediateLevelSwitch();
|
20138
20058
|
}
|
@@ -20151,7 +20071,7 @@ class Hls {
|
|
20151
20071
|
* @param newLevel - Pass -1 for automatic level selection
|
20152
20072
|
*/
|
20153
20073
|
set nextLevel(newLevel) {
|
20154
|
-
|
20074
|
+
logger.log(`set nextLevel:${newLevel}`);
|
20155
20075
|
this.levelController.manualLevel = newLevel;
|
20156
20076
|
this.streamController.nextLevelSwitch();
|
20157
20077
|
}
|
@@ -20170,7 +20090,7 @@ class Hls {
|
|
20170
20090
|
* @param newLevel - Pass -1 for automatic level selection
|
20171
20091
|
*/
|
20172
20092
|
set loadLevel(newLevel) {
|
20173
|
-
|
20093
|
+
logger.log(`set loadLevel:${newLevel}`);
|
20174
20094
|
this.levelController.manualLevel = newLevel;
|
20175
20095
|
}
|
20176
20096
|
|
@@ -20201,7 +20121,7 @@ class Hls {
|
|
20201
20121
|
* Sets "first-level", see getter.
|
20202
20122
|
*/
|
20203
20123
|
set firstLevel(newLevel) {
|
20204
|
-
|
20124
|
+
logger.log(`set firstLevel:${newLevel}`);
|
20205
20125
|
this.levelController.firstLevel = newLevel;
|
20206
20126
|
}
|
20207
20127
|
|
@@ -20226,7 +20146,7 @@ class Hls {
|
|
20226
20146
|
* (determined from download of first segment)
|
20227
20147
|
*/
|
20228
20148
|
set startLevel(newLevel) {
|
20229
|
-
|
20149
|
+
logger.log(`set startLevel:${newLevel}`);
|
20230
20150
|
// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
20231
20151
|
if (newLevel !== -1) {
|
20232
20152
|
newLevel = Math.max(newLevel, this.minAutoLevel);
|
@@ -20301,7 +20221,7 @@ class Hls {
|
|
20301
20221
|
*/
|
20302
20222
|
set autoLevelCapping(newLevel) {
|
20303
20223
|
if (this._autoLevelCapping !== newLevel) {
|
20304
|
-
|
20224
|
+
logger.log(`set autoLevelCapping:${newLevel}`);
|
20305
20225
|
this._autoLevelCapping = newLevel;
|
20306
20226
|
this.levelController.checkMaxAutoUpdated();
|
20307
20227
|
}
|