hls.js 1.5.9-0.canary.10308 → 1.5.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -4
- package/dist/hls-demo.js +8 -21
- package/dist/hls-demo.js.map +1 -1
- package/dist/hls.js +4743 -6054
- package/dist/hls.js.d.ts +85 -108
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +1708 -2381
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +1293 -1978
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +1549 -2866
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +34 -34
- package/src/config.ts +2 -3
- package/src/controller/abr-controller.ts +20 -24
- package/src/controller/audio-stream-controller.ts +74 -68
- package/src/controller/audio-track-controller.ts +1 -1
- package/src/controller/base-playlist-controller.ts +10 -27
- package/src/controller/base-stream-controller.ts +38 -160
- package/src/controller/buffer-controller.ts +92 -230
- package/src/controller/buffer-operation-queue.ts +19 -16
- package/src/controller/cap-level-controller.ts +2 -3
- package/src/controller/cmcd-controller.ts +14 -51
- package/src/controller/content-steering-controller.ts +15 -29
- package/src/controller/eme-controller.ts +23 -10
- package/src/controller/error-controller.ts +8 -6
- package/src/controller/fps-controller.ts +3 -8
- package/src/controller/fragment-tracker.ts +11 -15
- package/src/controller/gap-controller.ts +16 -43
- package/src/controller/id3-track-controller.ts +7 -7
- package/src/controller/latency-controller.ts +11 -9
- package/src/controller/level-controller.ts +19 -37
- package/src/controller/stream-controller.ts +32 -37
- package/src/controller/subtitle-stream-controller.ts +40 -28
- package/src/controller/subtitle-track-controller.ts +3 -5
- package/src/controller/timeline-controller.ts +21 -19
- package/src/crypt/aes-crypto.ts +2 -21
- package/src/crypt/decrypter.ts +16 -32
- package/src/crypt/fast-aes-key.ts +5 -24
- package/src/demux/audio/aacdemuxer.ts +2 -2
- package/src/demux/audio/ac3-demuxer.ts +3 -4
- package/src/demux/audio/adts.ts +4 -9
- package/src/demux/audio/base-audio-demuxer.ts +14 -16
- package/src/demux/audio/mp3demuxer.ts +3 -4
- package/src/demux/audio/mpegaudio.ts +1 -1
- package/src/demux/id3.ts +411 -0
- package/src/demux/mp4demuxer.ts +7 -7
- package/src/demux/sample-aes.ts +0 -2
- package/src/demux/transmuxer-interface.ts +12 -4
- package/src/demux/transmuxer-worker.ts +4 -4
- package/src/demux/transmuxer.ts +3 -16
- package/src/demux/tsdemuxer.ts +37 -71
- package/src/demux/video/avc-video-parser.ts +119 -208
- package/src/demux/video/base-video-parser.ts +18 -147
- package/src/demux/video/exp-golomb.ts +208 -0
- package/src/events.ts +1 -8
- package/src/exports-named.ts +1 -1
- package/src/hls.ts +38 -61
- package/src/loader/fragment-loader.ts +3 -10
- package/src/loader/key-loader.ts +1 -3
- package/src/loader/level-key.ts +9 -10
- package/src/loader/playlist-loader.ts +5 -4
- package/src/remux/mp4-generator.ts +1 -196
- package/src/remux/mp4-remuxer.ts +8 -24
- package/src/task-loop.ts +2 -5
- package/src/types/component-api.ts +1 -3
- package/src/types/demuxer.ts +0 -4
- package/src/types/events.ts +0 -4
- package/src/types/remuxer.ts +1 -1
- package/src/utils/buffer-helper.ts +31 -12
- package/src/utils/cea-608-parser.ts +3 -1
- package/src/utils/codecs.ts +5 -34
- package/src/utils/fetch-loader.ts +1 -1
- package/src/utils/imsc1-ttml-parser.ts +1 -1
- package/src/utils/keysystem-util.ts +6 -1
- package/src/utils/logger.ts +23 -58
- package/src/utils/mp4-tools.ts +3 -5
- package/src/utils/webvtt-parser.ts +1 -1
- package/src/crypt/decrypter-aes-mode.ts +0 -4
- package/src/demux/video/hevc-video-parser.ts +0 -749
- package/src/empty-es.js +0 -5
- package/src/utils/encryption-methods-util.ts +0 -21
- package/src/utils/utf8-utils.ts +0 -18
package/dist/hls.light.mjs
CHANGED
@@ -209,7 +209,7 @@ function _toPrimitive(t, r) {
|
|
209
209
|
}
|
210
210
|
function _toPropertyKey(t) {
|
211
211
|
var i = _toPrimitive(t, "string");
|
212
|
-
return "symbol" == typeof i ? i : i
|
212
|
+
return "symbol" == typeof i ? i : String(i);
|
213
213
|
}
|
214
214
|
function _defineProperty(obj, key, value) {
|
215
215
|
key = _toPropertyKey(key);
|
@@ -256,7 +256,6 @@ let Events = /*#__PURE__*/function (Events) {
|
|
256
256
|
Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
|
257
257
|
Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
|
258
258
|
Events["MEDIA_DETACHED"] = "hlsMediaDetached";
|
259
|
-
Events["MEDIA_ENDED"] = "hlsMediaEnded";
|
260
259
|
Events["BUFFER_RESET"] = "hlsBufferReset";
|
261
260
|
Events["BUFFER_CODECS"] = "hlsBufferCodecs";
|
262
261
|
Events["BUFFER_CREATED"] = "hlsBufferCreated";
|
@@ -370,6 +369,58 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
|
|
370
369
|
return ErrorDetails;
|
371
370
|
}({});
|
372
371
|
|
372
|
+
const noop = function noop() {};
|
373
|
+
const fakeLogger = {
|
374
|
+
trace: noop,
|
375
|
+
debug: noop,
|
376
|
+
log: noop,
|
377
|
+
warn: noop,
|
378
|
+
info: noop,
|
379
|
+
error: noop
|
380
|
+
};
|
381
|
+
let exportedLogger = fakeLogger;
|
382
|
+
|
383
|
+
// let lastCallTime;
|
384
|
+
// function formatMsgWithTimeInfo(type, msg) {
|
385
|
+
// const now = Date.now();
|
386
|
+
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
387
|
+
// lastCallTime = now;
|
388
|
+
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
389
|
+
// return msg;
|
390
|
+
// }
|
391
|
+
|
392
|
+
function consolePrintFn(type) {
|
393
|
+
const func = self.console[type];
|
394
|
+
if (func) {
|
395
|
+
return func.bind(self.console, `[${type}] >`);
|
396
|
+
}
|
397
|
+
return noop;
|
398
|
+
}
|
399
|
+
function exportLoggerFunctions(debugConfig, ...functions) {
|
400
|
+
functions.forEach(function (type) {
|
401
|
+
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
402
|
+
});
|
403
|
+
}
|
404
|
+
function enableLogs(debugConfig, id) {
|
405
|
+
// check that console is available
|
406
|
+
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
407
|
+
exportLoggerFunctions(debugConfig,
|
408
|
+
// Remove out from list here to hard-disable a log-level
|
409
|
+
// 'trace',
|
410
|
+
'debug', 'log', 'info', 'warn', 'error');
|
411
|
+
// Some browsers don't allow to use bind on console object anyway
|
412
|
+
// fallback to default if needed
|
413
|
+
try {
|
414
|
+
exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.9"}`);
|
415
|
+
} catch (e) {
|
416
|
+
exportedLogger = fakeLogger;
|
417
|
+
}
|
418
|
+
} else {
|
419
|
+
exportedLogger = fakeLogger;
|
420
|
+
}
|
421
|
+
}
|
422
|
+
const logger = exportedLogger;
|
423
|
+
|
373
424
|
const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/;
|
374
425
|
const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g;
|
375
426
|
|
@@ -451,84 +502,6 @@ class AttrList {
|
|
451
502
|
}
|
452
503
|
}
|
453
504
|
|
454
|
-
class Logger {
|
455
|
-
constructor(label, logger) {
|
456
|
-
this.trace = void 0;
|
457
|
-
this.debug = void 0;
|
458
|
-
this.log = void 0;
|
459
|
-
this.warn = void 0;
|
460
|
-
this.info = void 0;
|
461
|
-
this.error = void 0;
|
462
|
-
const lb = `[${label}]:`;
|
463
|
-
this.trace = noop;
|
464
|
-
this.debug = logger.debug.bind(null, lb);
|
465
|
-
this.log = logger.log.bind(null, lb);
|
466
|
-
this.warn = logger.warn.bind(null, lb);
|
467
|
-
this.info = logger.info.bind(null, lb);
|
468
|
-
this.error = logger.error.bind(null, lb);
|
469
|
-
}
|
470
|
-
}
|
471
|
-
const noop = function noop() {};
|
472
|
-
const fakeLogger = {
|
473
|
-
trace: noop,
|
474
|
-
debug: noop,
|
475
|
-
log: noop,
|
476
|
-
warn: noop,
|
477
|
-
info: noop,
|
478
|
-
error: noop
|
479
|
-
};
|
480
|
-
function createLogger() {
|
481
|
-
return _extends({}, fakeLogger);
|
482
|
-
}
|
483
|
-
|
484
|
-
// let lastCallTime;
|
485
|
-
// function formatMsgWithTimeInfo(type, msg) {
|
486
|
-
// const now = Date.now();
|
487
|
-
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
488
|
-
// lastCallTime = now;
|
489
|
-
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
490
|
-
// return msg;
|
491
|
-
// }
|
492
|
-
|
493
|
-
function consolePrintFn(type, id) {
|
494
|
-
const func = self.console[type];
|
495
|
-
return func ? func.bind(self.console, `${''}[${type}] >`) : noop;
|
496
|
-
}
|
497
|
-
function getLoggerFn(key, debugConfig, id) {
|
498
|
-
return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key);
|
499
|
-
}
|
500
|
-
const exportedLogger = createLogger();
|
501
|
-
function enableLogs(debugConfig, context, id) {
|
502
|
-
// check that console is available
|
503
|
-
const newLogger = createLogger();
|
504
|
-
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
505
|
-
const keys = [
|
506
|
-
// Remove out from list here to hard-disable a log-level
|
507
|
-
// 'trace',
|
508
|
-
'debug', 'log', 'info', 'warn', 'error'];
|
509
|
-
keys.forEach(key => {
|
510
|
-
newLogger[key] = getLoggerFn(key, debugConfig);
|
511
|
-
});
|
512
|
-
// Some browsers don't allow to use bind on console object anyway
|
513
|
-
// fallback to default if needed
|
514
|
-
try {
|
515
|
-
newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.9-0.canary.10308"}`);
|
516
|
-
} catch (e) {
|
517
|
-
/* log fn threw an exception. All logger methods are no-ops. */
|
518
|
-
return createLogger();
|
519
|
-
}
|
520
|
-
// global exported logger uses the same functions as new logger without `id`
|
521
|
-
keys.forEach(key => {
|
522
|
-
exportedLogger[key] = getLoggerFn(key, debugConfig);
|
523
|
-
});
|
524
|
-
} else {
|
525
|
-
// Reset global exported logger
|
526
|
-
_extends(exportedLogger, newLogger);
|
527
|
-
}
|
528
|
-
return newLogger;
|
529
|
-
}
|
530
|
-
const logger = exportedLogger;
|
531
|
-
|
532
505
|
// Avoid exporting const enum so that these values can be inlined
|
533
506
|
|
534
507
|
function isDateRangeCueAttribute(attrName) {
|
@@ -1018,32 +991,10 @@ class LevelDetails {
|
|
1018
991
|
}
|
1019
992
|
}
|
1020
993
|
|
1021
|
-
var DecrypterAesMode = {
|
1022
|
-
cbc: 0,
|
1023
|
-
ctr: 1
|
1024
|
-
};
|
1025
|
-
|
1026
|
-
function isFullSegmentEncryption(method) {
|
1027
|
-
return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
|
1028
|
-
}
|
1029
|
-
function getAesModeFromFullSegmentMethod(method) {
|
1030
|
-
switch (method) {
|
1031
|
-
case 'AES-128':
|
1032
|
-
case 'AES-256':
|
1033
|
-
return DecrypterAesMode.cbc;
|
1034
|
-
case 'AES-256-CTR':
|
1035
|
-
return DecrypterAesMode.ctr;
|
1036
|
-
default:
|
1037
|
-
throw new Error(`invalid full segment method ${method}`);
|
1038
|
-
}
|
1039
|
-
}
|
1040
|
-
|
1041
994
|
// This file is inserted as a shim for modules which we do not want to include into the distro.
|
1042
995
|
// This replacement is done in the "alias" plugin of the rollup config.
|
1043
|
-
|
1044
|
-
|
1045
|
-
var emptyEs = {};
|
1046
|
-
var HevcVideoParser = /*@__PURE__*/getDefaultExportFromCjs(emptyEs);
|
996
|
+
var empty = undefined;
|
997
|
+
var Cues = /*@__PURE__*/getDefaultExportFromCjs(empty);
|
1047
998
|
|
1048
999
|
function sliceUint8(array, start, end) {
|
1049
1000
|
// @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
|
@@ -1051,104 +1002,373 @@ function sliceUint8(array, start, end) {
|
|
1051
1002
|
return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end));
|
1052
1003
|
}
|
1053
1004
|
|
1054
|
-
//
|
1055
|
-
|
1056
|
-
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1057
|
-
*
|
1058
|
-
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1059
|
-
* Version: 1.0
|
1060
|
-
* LastModified: Dec 25 1999
|
1061
|
-
* This library is free. You can redistribute it and/or modify it.
|
1062
|
-
*/
|
1005
|
+
// breaking up those two types in order to clarify what is happening in the decoding path.
|
1006
|
+
|
1063
1007
|
/**
|
1064
|
-
*
|
1065
|
-
*
|
1066
|
-
* @param
|
1067
|
-
*
|
1068
|
-
* @returns The string
|
1069
|
-
*
|
1070
|
-
* @group Utils
|
1071
|
-
*
|
1072
|
-
* @beta
|
1008
|
+
* Returns true if an ID3 header can be found at offset in data
|
1009
|
+
* @param data - The data to search
|
1010
|
+
* @param offset - The offset at which to start searching
|
1073
1011
|
*/
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1012
|
+
const isHeader$2 = (data, offset) => {
|
1013
|
+
/*
|
1014
|
+
* http://id3.org/id3v2.3.0
|
1015
|
+
* [0] = 'I'
|
1016
|
+
* [1] = 'D'
|
1017
|
+
* [2] = '3'
|
1018
|
+
* [3,4] = {Version}
|
1019
|
+
* [5] = {Flags}
|
1020
|
+
* [6-9] = {ID3 Size}
|
1021
|
+
*
|
1022
|
+
* An ID3v2 tag can be detected with the following pattern:
|
1023
|
+
* $49 44 33 yy yy xx zz zz zz zz
|
1024
|
+
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
1025
|
+
*/
|
1026
|
+
if (offset + 10 <= data.length) {
|
1027
|
+
// look for 'ID3' identifier
|
1028
|
+
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
1029
|
+
// check version is within range
|
1030
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1031
|
+
// check size is within range
|
1032
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1033
|
+
return true;
|
1034
|
+
}
|
1035
|
+
}
|
1082
1036
|
}
|
1083
|
-
// remove any null characters
|
1084
|
-
return decoded.replace(/\0/g, '');
|
1085
1037
|
}
|
1086
|
-
|
1087
|
-
|
1088
|
-
|
1089
|
-
|
1090
|
-
|
1091
|
-
|
1092
|
-
|
1093
|
-
|
1094
|
-
|
1095
|
-
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1099
|
-
|
1100
|
-
|
1101
|
-
|
1102
|
-
|
1103
|
-
|
1104
|
-
|
1105
|
-
|
1106
|
-
|
1107
|
-
|
1108
|
-
case 7:
|
1109
|
-
// 0xxxxxxx
|
1110
|
-
out += String.fromCharCode(c);
|
1111
|
-
break;
|
1112
|
-
case 12:
|
1113
|
-
case 13:
|
1114
|
-
// 110x xxxx 10xx xxxx
|
1115
|
-
char2 = array[i++];
|
1116
|
-
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1117
|
-
break;
|
1118
|
-
case 14:
|
1119
|
-
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1120
|
-
char2 = array[i++];
|
1121
|
-
char3 = array[i++];
|
1122
|
-
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1123
|
-
break;
|
1038
|
+
return false;
|
1039
|
+
};
|
1040
|
+
|
1041
|
+
/**
|
1042
|
+
* Returns true if an ID3 footer can be found at offset in data
|
1043
|
+
* @param data - The data to search
|
1044
|
+
* @param offset - The offset at which to start searching
|
1045
|
+
*/
|
1046
|
+
const isFooter = (data, offset) => {
|
1047
|
+
/*
|
1048
|
+
* The footer is a copy of the header, but with a different identifier
|
1049
|
+
*/
|
1050
|
+
if (offset + 10 <= data.length) {
|
1051
|
+
// look for '3DI' identifier
|
1052
|
+
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
1053
|
+
// check version is within range
|
1054
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1055
|
+
// check size is within range
|
1056
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1057
|
+
return true;
|
1058
|
+
}
|
1059
|
+
}
|
1124
1060
|
}
|
1125
1061
|
}
|
1126
|
-
return
|
1127
|
-
}
|
1062
|
+
return false;
|
1063
|
+
};
|
1128
1064
|
|
1129
1065
|
/**
|
1130
|
-
*
|
1066
|
+
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
1067
|
+
* @param data - The data to search in
|
1068
|
+
* @param offset - The offset at which to start searching
|
1069
|
+
* @returns the block of data containing any ID3 tags found
|
1070
|
+
* or *undefined* if no header is found at the starting offset
|
1131
1071
|
*/
|
1072
|
+
const getID3Data = (data, offset) => {
|
1073
|
+
const front = offset;
|
1074
|
+
let length = 0;
|
1075
|
+
while (isHeader$2(data, offset)) {
|
1076
|
+
// ID3 header is 10 bytes
|
1077
|
+
length += 10;
|
1078
|
+
const size = readSize(data, offset + 6);
|
1079
|
+
length += size;
|
1080
|
+
if (isFooter(data, offset + 10)) {
|
1081
|
+
// ID3 footer is 10 bytes
|
1082
|
+
length += 10;
|
1083
|
+
}
|
1084
|
+
offset += length;
|
1085
|
+
}
|
1086
|
+
if (length > 0) {
|
1087
|
+
return data.subarray(front, front + length);
|
1088
|
+
}
|
1089
|
+
return undefined;
|
1090
|
+
};
|
1091
|
+
const readSize = (data, offset) => {
|
1092
|
+
let size = 0;
|
1093
|
+
size = (data[offset] & 0x7f) << 21;
|
1094
|
+
size |= (data[offset + 1] & 0x7f) << 14;
|
1095
|
+
size |= (data[offset + 2] & 0x7f) << 7;
|
1096
|
+
size |= data[offset + 3] & 0x7f;
|
1097
|
+
return size;
|
1098
|
+
};
|
1099
|
+
const canParse$2 = (data, offset) => {
|
1100
|
+
return isHeader$2(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset;
|
1101
|
+
};
|
1132
1102
|
|
1133
|
-
|
1134
|
-
|
1135
|
-
|
1136
|
-
|
1137
|
-
|
1138
|
-
|
1139
|
-
|
1140
|
-
|
1141
|
-
|
1103
|
+
/**
|
1104
|
+
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
1105
|
+
* @param data - Block of data containing one or more ID3 tags
|
1106
|
+
*/
|
1107
|
+
const getTimeStamp = data => {
|
1108
|
+
const frames = getID3Frames(data);
|
1109
|
+
for (let i = 0; i < frames.length; i++) {
|
1110
|
+
const frame = frames[i];
|
1111
|
+
if (isTimeStampFrame(frame)) {
|
1112
|
+
return readTimeStamp(frame);
|
1142
1113
|
}
|
1143
|
-
return str;
|
1144
1114
|
}
|
1115
|
+
return undefined;
|
1145
1116
|
};
|
1146
1117
|
|
1147
|
-
|
1148
|
-
|
1118
|
+
/**
|
1119
|
+
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
1120
|
+
*/
|
1121
|
+
const isTimeStampFrame = frame => {
|
1122
|
+
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
1123
|
+
};
|
1124
|
+
const getFrameData = data => {
|
1125
|
+
/*
|
1126
|
+
Frame ID $xx xx xx xx (four characters)
|
1127
|
+
Size $xx xx xx xx
|
1128
|
+
Flags $xx xx
|
1129
|
+
*/
|
1130
|
+
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
1131
|
+
const size = readSize(data, 4);
|
1149
1132
|
|
1150
|
-
//
|
1151
|
-
|
1133
|
+
// skip frame id, size, and flags
|
1134
|
+
const offset = 10;
|
1135
|
+
return {
|
1136
|
+
type,
|
1137
|
+
size,
|
1138
|
+
data: data.subarray(offset, offset + size)
|
1139
|
+
};
|
1140
|
+
};
|
1141
|
+
|
1142
|
+
/**
|
1143
|
+
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
1144
|
+
* @param id3Data - The ID3 data containing one or more ID3 tags
|
1145
|
+
*/
|
1146
|
+
const getID3Frames = id3Data => {
|
1147
|
+
let offset = 0;
|
1148
|
+
const frames = [];
|
1149
|
+
while (isHeader$2(id3Data, offset)) {
|
1150
|
+
const size = readSize(id3Data, offset + 6);
|
1151
|
+
// skip past ID3 header
|
1152
|
+
offset += 10;
|
1153
|
+
const end = offset + size;
|
1154
|
+
// loop through frames in the ID3 tag
|
1155
|
+
while (offset + 8 < end) {
|
1156
|
+
const frameData = getFrameData(id3Data.subarray(offset));
|
1157
|
+
const frame = decodeFrame(frameData);
|
1158
|
+
if (frame) {
|
1159
|
+
frames.push(frame);
|
1160
|
+
}
|
1161
|
+
|
1162
|
+
// skip frame header and frame data
|
1163
|
+
offset += frameData.size + 10;
|
1164
|
+
}
|
1165
|
+
if (isFooter(id3Data, offset)) {
|
1166
|
+
offset += 10;
|
1167
|
+
}
|
1168
|
+
}
|
1169
|
+
return frames;
|
1170
|
+
};
|
1171
|
+
const decodeFrame = frame => {
|
1172
|
+
if (frame.type === 'PRIV') {
|
1173
|
+
return decodePrivFrame(frame);
|
1174
|
+
} else if (frame.type[0] === 'W') {
|
1175
|
+
return decodeURLFrame(frame);
|
1176
|
+
}
|
1177
|
+
return decodeTextFrame(frame);
|
1178
|
+
};
|
1179
|
+
const decodePrivFrame = frame => {
|
1180
|
+
/*
|
1181
|
+
Format: <text string>\0<binary data>
|
1182
|
+
*/
|
1183
|
+
if (frame.size < 2) {
|
1184
|
+
return undefined;
|
1185
|
+
}
|
1186
|
+
const owner = utf8ArrayToStr(frame.data, true);
|
1187
|
+
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
1188
|
+
return {
|
1189
|
+
key: frame.type,
|
1190
|
+
info: owner,
|
1191
|
+
data: privateData.buffer
|
1192
|
+
};
|
1193
|
+
};
|
1194
|
+
const decodeTextFrame = frame => {
|
1195
|
+
if (frame.size < 2) {
|
1196
|
+
return undefined;
|
1197
|
+
}
|
1198
|
+
if (frame.type === 'TXXX') {
|
1199
|
+
/*
|
1200
|
+
Format:
|
1201
|
+
[0] = {Text Encoding}
|
1202
|
+
[1-?] = {Description}\0{Value}
|
1203
|
+
*/
|
1204
|
+
let index = 1;
|
1205
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1206
|
+
index += description.length + 1;
|
1207
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1208
|
+
return {
|
1209
|
+
key: frame.type,
|
1210
|
+
info: description,
|
1211
|
+
data: value
|
1212
|
+
};
|
1213
|
+
}
|
1214
|
+
/*
|
1215
|
+
Format:
|
1216
|
+
[0] = {Text Encoding}
|
1217
|
+
[1-?] = {Value}
|
1218
|
+
*/
|
1219
|
+
const text = utf8ArrayToStr(frame.data.subarray(1));
|
1220
|
+
return {
|
1221
|
+
key: frame.type,
|
1222
|
+
data: text
|
1223
|
+
};
|
1224
|
+
};
|
1225
|
+
const decodeURLFrame = frame => {
|
1226
|
+
if (frame.type === 'WXXX') {
|
1227
|
+
/*
|
1228
|
+
Format:
|
1229
|
+
[0] = {Text Encoding}
|
1230
|
+
[1-?] = {Description}\0{URL}
|
1231
|
+
*/
|
1232
|
+
if (frame.size < 2) {
|
1233
|
+
return undefined;
|
1234
|
+
}
|
1235
|
+
let index = 1;
|
1236
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1237
|
+
index += description.length + 1;
|
1238
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1239
|
+
return {
|
1240
|
+
key: frame.type,
|
1241
|
+
info: description,
|
1242
|
+
data: value
|
1243
|
+
};
|
1244
|
+
}
|
1245
|
+
/*
|
1246
|
+
Format:
|
1247
|
+
[0-?] = {URL}
|
1248
|
+
*/
|
1249
|
+
const url = utf8ArrayToStr(frame.data);
|
1250
|
+
return {
|
1251
|
+
key: frame.type,
|
1252
|
+
data: url
|
1253
|
+
};
|
1254
|
+
};
|
1255
|
+
const readTimeStamp = timeStampFrame => {
|
1256
|
+
if (timeStampFrame.data.byteLength === 8) {
|
1257
|
+
const data = new Uint8Array(timeStampFrame.data);
|
1258
|
+
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
1259
|
+
// with the upper 31 bits set to zero.
|
1260
|
+
const pts33Bit = data[3] & 0x1;
|
1261
|
+
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
1262
|
+
timestamp /= 45;
|
1263
|
+
if (pts33Bit) {
|
1264
|
+
timestamp += 47721858.84;
|
1265
|
+
} // 2^32 / 90
|
1266
|
+
|
1267
|
+
return Math.round(timestamp);
|
1268
|
+
}
|
1269
|
+
return undefined;
|
1270
|
+
};
|
1271
|
+
|
1272
|
+
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
1273
|
+
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
1274
|
+
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1275
|
+
*
|
1276
|
+
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1277
|
+
* Version: 1.0
|
1278
|
+
* LastModified: Dec 25 1999
|
1279
|
+
* This library is free. You can redistribute it and/or modify it.
|
1280
|
+
*/
|
1281
|
+
const utf8ArrayToStr = (array, exitOnNull = false) => {
|
1282
|
+
const decoder = getTextDecoder();
|
1283
|
+
if (decoder) {
|
1284
|
+
const decoded = decoder.decode(array);
|
1285
|
+
if (exitOnNull) {
|
1286
|
+
// grab up to the first null
|
1287
|
+
const idx = decoded.indexOf('\0');
|
1288
|
+
return idx !== -1 ? decoded.substring(0, idx) : decoded;
|
1289
|
+
}
|
1290
|
+
|
1291
|
+
// remove any null characters
|
1292
|
+
return decoded.replace(/\0/g, '');
|
1293
|
+
}
|
1294
|
+
const len = array.length;
|
1295
|
+
let c;
|
1296
|
+
let char2;
|
1297
|
+
let char3;
|
1298
|
+
let out = '';
|
1299
|
+
let i = 0;
|
1300
|
+
while (i < len) {
|
1301
|
+
c = array[i++];
|
1302
|
+
if (c === 0x00 && exitOnNull) {
|
1303
|
+
return out;
|
1304
|
+
} else if (c === 0x00 || c === 0x03) {
|
1305
|
+
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
1306
|
+
continue;
|
1307
|
+
}
|
1308
|
+
switch (c >> 4) {
|
1309
|
+
case 0:
|
1310
|
+
case 1:
|
1311
|
+
case 2:
|
1312
|
+
case 3:
|
1313
|
+
case 4:
|
1314
|
+
case 5:
|
1315
|
+
case 6:
|
1316
|
+
case 7:
|
1317
|
+
// 0xxxxxxx
|
1318
|
+
out += String.fromCharCode(c);
|
1319
|
+
break;
|
1320
|
+
case 12:
|
1321
|
+
case 13:
|
1322
|
+
// 110x xxxx 10xx xxxx
|
1323
|
+
char2 = array[i++];
|
1324
|
+
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1325
|
+
break;
|
1326
|
+
case 14:
|
1327
|
+
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1328
|
+
char2 = array[i++];
|
1329
|
+
char3 = array[i++];
|
1330
|
+
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1331
|
+
break;
|
1332
|
+
}
|
1333
|
+
}
|
1334
|
+
return out;
|
1335
|
+
};
|
1336
|
+
let decoder;
|
1337
|
+
function getTextDecoder() {
|
1338
|
+
// On Play Station 4, TextDecoder is defined but partially implemented.
|
1339
|
+
// Manual decoding option is preferable
|
1340
|
+
if (navigator.userAgent.includes('PlayStation 4')) {
|
1341
|
+
return;
|
1342
|
+
}
|
1343
|
+
if (!decoder && typeof self.TextDecoder !== 'undefined') {
|
1344
|
+
decoder = new self.TextDecoder('utf-8');
|
1345
|
+
}
|
1346
|
+
return decoder;
|
1347
|
+
}
|
1348
|
+
|
1349
|
+
/**
|
1350
|
+
* hex dump helper class
|
1351
|
+
*/
|
1352
|
+
|
1353
|
+
const Hex = {
|
1354
|
+
hexDump: function (array) {
|
1355
|
+
let str = '';
|
1356
|
+
for (let i = 0; i < array.length; i++) {
|
1357
|
+
let h = array[i].toString(16);
|
1358
|
+
if (h.length < 2) {
|
1359
|
+
h = '0' + h;
|
1360
|
+
}
|
1361
|
+
str += h;
|
1362
|
+
}
|
1363
|
+
return str;
|
1364
|
+
}
|
1365
|
+
};
|
1366
|
+
|
1367
|
+
const UINT32_MAX$1 = Math.pow(2, 32) - 1;
|
1368
|
+
const push = [].push;
|
1369
|
+
|
1370
|
+
// We are using fixed track IDs for driving the MP4 remuxer
|
1371
|
+
// instead of following the TS PIDs.
|
1152
1372
|
// There is no reason not to do this and some browsers/SourceBuffer-demuxers
|
1153
1373
|
// may not like if there are TrackID "switches"
|
1154
1374
|
// See https://github.com/video-dev/hls.js/issues/1331
|
@@ -1406,7 +1626,7 @@ function parseStsd(stsd) {
|
|
1406
1626
|
{
|
1407
1627
|
const codecBox = findBox(sampleEntries, [fourCC])[0];
|
1408
1628
|
const esdsBox = findBox(codecBox.subarray(28), ['esds'])[0];
|
1409
|
-
if (esdsBox && esdsBox.length >
|
1629
|
+
if (esdsBox && esdsBox.length > 12) {
|
1410
1630
|
let i = 4;
|
1411
1631
|
// ES Descriptor tag
|
1412
1632
|
if (esdsBox[i++] !== 0x03) {
|
@@ -1521,9 +1741,7 @@ function parseStsd(stsd) {
|
|
1521
1741
|
}
|
1522
1742
|
function skipBERInteger(bytes, i) {
|
1523
1743
|
const limit = i + 5;
|
1524
|
-
while (bytes[i++] & 0x80 && i < limit) {
|
1525
|
-
/* do nothing */
|
1526
|
-
}
|
1744
|
+
while (bytes[i++] & 0x80 && i < limit) {}
|
1527
1745
|
return i;
|
1528
1746
|
}
|
1529
1747
|
function toHex(x) {
|
@@ -2215,12 +2433,12 @@ class LevelKey {
|
|
2215
2433
|
this.keyFormatVersions = formatversions;
|
2216
2434
|
this.iv = iv;
|
2217
2435
|
this.encrypted = method ? method !== 'NONE' : false;
|
2218
|
-
this.isCommonEncryption = this.encrypted &&
|
2436
|
+
this.isCommonEncryption = this.encrypted && method !== 'AES-128';
|
2219
2437
|
}
|
2220
2438
|
isSupported() {
|
2221
2439
|
// If it's Segment encryption or No encryption, just select that key system
|
2222
2440
|
if (this.method) {
|
2223
|
-
if (
|
2441
|
+
if (this.method === 'AES-128' || this.method === 'NONE') {
|
2224
2442
|
return true;
|
2225
2443
|
}
|
2226
2444
|
if (this.keyFormat === 'identity') {
|
@@ -2234,13 +2452,14 @@ class LevelKey {
|
|
2234
2452
|
if (!this.encrypted || !this.uri) {
|
2235
2453
|
return null;
|
2236
2454
|
}
|
2237
|
-
if (
|
2455
|
+
if (this.method === 'AES-128' && this.uri && !this.iv) {
|
2238
2456
|
if (typeof sn !== 'number') {
|
2239
2457
|
// We are fetching decryption data for a initialization segment
|
2240
|
-
// If the segment was encrypted with AES-128
|
2458
|
+
// If the segment was encrypted with AES-128
|
2241
2459
|
// It must have an IV defined. We cannot substitute the Segment Number in.
|
2242
|
-
|
2243
|
-
|
2460
|
+
if (this.method === 'AES-128' && !this.iv) {
|
2461
|
+
logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
|
2462
|
+
}
|
2244
2463
|
// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
2245
2464
|
sn = 0;
|
2246
2465
|
}
|
@@ -2390,28 +2609,23 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
|
|
2390
2609
|
if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
|
2391
2610
|
return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
|
2392
2611
|
}
|
2612
|
+
|
2613
|
+
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2614
|
+
// some browsers will report that fLaC is supported then fail.
|
2615
|
+
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2393
2616
|
const codecsToCheck = {
|
2394
|
-
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2395
|
-
// some browsers will report that fLaC is supported then fail.
|
2396
|
-
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2397
2617
|
flac: ['flac', 'fLaC', 'FLAC'],
|
2398
|
-
opus: ['opus', 'Opus']
|
2399
|
-
// Replace audio codec info if browser does not support mp4a.40.34,
|
2400
|
-
// and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
|
2401
|
-
'mp4a.40.34': ['mp3']
|
2618
|
+
opus: ['opus', 'Opus']
|
2402
2619
|
}[lowerCaseCodec];
|
2403
2620
|
for (let i = 0; i < codecsToCheck.length; i++) {
|
2404
|
-
var _getMediaSource;
|
2405
2621
|
if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
|
2406
2622
|
CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
|
2407
2623
|
return codecsToCheck[i];
|
2408
|
-
} else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
|
2409
|
-
return '';
|
2410
2624
|
}
|
2411
2625
|
}
|
2412
2626
|
return lowerCaseCodec;
|
2413
2627
|
}
|
2414
|
-
const AUDIO_CODEC_REGEXP = /flac|opus
|
2628
|
+
const AUDIO_CODEC_REGEXP = /flac|opus/i;
|
2415
2629
|
function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
|
2416
2630
|
return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
|
2417
2631
|
}
|
@@ -2434,16 +2648,6 @@ function convertAVC1ToAVCOTI(codec) {
|
|
2434
2648
|
}
|
2435
2649
|
return codec;
|
2436
2650
|
}
|
2437
|
-
function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
|
2438
|
-
const MediaSource = getMediaSource(preferManagedMediaSource) || {
|
2439
|
-
isTypeSupported: () => false
|
2440
|
-
};
|
2441
|
-
return {
|
2442
|
-
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
2443
|
-
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
2444
|
-
ac3: false
|
2445
|
-
};
|
2446
|
-
}
|
2447
2651
|
|
2448
2652
|
const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
|
2449
2653
|
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
|
@@ -3244,10 +3448,10 @@ class PlaylistLoader {
|
|
3244
3448
|
const loaderContext = loader.context;
|
3245
3449
|
if (loaderContext && loaderContext.url === context.url && loaderContext.level === context.level) {
|
3246
3450
|
// same URL can't overlap
|
3247
|
-
|
3451
|
+
logger.trace('[playlist-loader]: playlist request ongoing');
|
3248
3452
|
return;
|
3249
3453
|
}
|
3250
|
-
|
3454
|
+
logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
|
3251
3455
|
loader.abort();
|
3252
3456
|
}
|
3253
3457
|
|
@@ -3357,7 +3561,7 @@ class PlaylistLoader {
|
|
3357
3561
|
// alt audio rendition in which quality levels (main)
|
3358
3562
|
// contains both audio+video. but with mixed audio track not signaled
|
3359
3563
|
if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
|
3360
|
-
|
3564
|
+
logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
|
3361
3565
|
audioTracks.unshift({
|
3362
3566
|
type: 'main',
|
3363
3567
|
name: 'main',
|
@@ -3456,7 +3660,7 @@ class PlaylistLoader {
|
|
3456
3660
|
message += ` id: ${context.id} group-id: "${context.groupId}"`;
|
3457
3661
|
}
|
3458
3662
|
const error = new Error(message);
|
3459
|
-
|
3663
|
+
logger.warn(`[playlist-loader]: ${message}`);
|
3460
3664
|
let details = ErrorDetails.UNKNOWN;
|
3461
3665
|
let fatal = false;
|
3462
3666
|
const loader = this.getInternalLoader(context);
|
@@ -3694,423 +3898,24 @@ var MetadataSchema = {
|
|
3694
3898
|
emsg: "https://aomedia.org/emsg/ID3"
|
3695
3899
|
};
|
3696
3900
|
|
3697
|
-
|
3698
|
-
|
3699
|
-
|
3700
|
-
|
3701
|
-
|
3702
|
-
|
3703
|
-
|
3704
|
-
|
3705
|
-
|
3706
|
-
|
3707
|
-
|
3708
|
-
|
3709
|
-
|
3710
|
-
|
3711
|
-
|
3712
|
-
|
3713
|
-
return undefined;
|
3901
|
+
const MIN_CUE_DURATION = 0.25;
|
3902
|
+
function getCueClass() {
|
3903
|
+
if (typeof self === 'undefined') return undefined;
|
3904
|
+
return self.VTTCue || self.TextTrackCue;
|
3905
|
+
}
|
3906
|
+
function createCueWithDataFields(Cue, startTime, endTime, data, type) {
|
3907
|
+
let cue = new Cue(startTime, endTime, '');
|
3908
|
+
try {
|
3909
|
+
cue.value = data;
|
3910
|
+
if (type) {
|
3911
|
+
cue.type = type;
|
3912
|
+
}
|
3913
|
+
} catch (e) {
|
3914
|
+
cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
|
3915
|
+
type
|
3916
|
+
}, data) : data));
|
3714
3917
|
}
|
3715
|
-
|
3716
|
-
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
3717
|
-
return {
|
3718
|
-
key: frame.type,
|
3719
|
-
info: owner,
|
3720
|
-
data: privateData.buffer
|
3721
|
-
};
|
3722
|
-
}
|
3723
|
-
|
3724
|
-
/**
|
3725
|
-
* Decodes an ID3 text frame
|
3726
|
-
*
|
3727
|
-
* @param frame - the ID3 text frame
|
3728
|
-
*
|
3729
|
-
* @returns The decoded ID3 text frame
|
3730
|
-
*
|
3731
|
-
* @internal
|
3732
|
-
*
|
3733
|
-
* @group ID3
|
3734
|
-
*/
|
3735
|
-
function decodeId3TextFrame(frame) {
|
3736
|
-
if (frame.size < 2) {
|
3737
|
-
return undefined;
|
3738
|
-
}
|
3739
|
-
if (frame.type === 'TXXX') {
|
3740
|
-
/*
|
3741
|
-
Format:
|
3742
|
-
[0] = {Text Encoding}
|
3743
|
-
[1-?] = {Description}\0{Value}
|
3744
|
-
*/
|
3745
|
-
let index = 1;
|
3746
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3747
|
-
index += description.length + 1;
|
3748
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3749
|
-
return {
|
3750
|
-
key: frame.type,
|
3751
|
-
info: description,
|
3752
|
-
data: value
|
3753
|
-
};
|
3754
|
-
}
|
3755
|
-
/*
|
3756
|
-
Format:
|
3757
|
-
[0] = {Text Encoding}
|
3758
|
-
[1-?] = {Value}
|
3759
|
-
*/
|
3760
|
-
const text = utf8ArrayToStr(frame.data.subarray(1));
|
3761
|
-
return {
|
3762
|
-
key: frame.type,
|
3763
|
-
info: '',
|
3764
|
-
data: text
|
3765
|
-
};
|
3766
|
-
}
|
3767
|
-
|
3768
|
-
/**
|
3769
|
-
* Decode a URL frame
|
3770
|
-
*
|
3771
|
-
* @param frame - the ID3 URL frame
|
3772
|
-
*
|
3773
|
-
* @returns The decoded ID3 URL frame
|
3774
|
-
*
|
3775
|
-
* @internal
|
3776
|
-
*
|
3777
|
-
* @group ID3
|
3778
|
-
*/
|
3779
|
-
function decodeId3UrlFrame(frame) {
|
3780
|
-
if (frame.type === 'WXXX') {
|
3781
|
-
/*
|
3782
|
-
Format:
|
3783
|
-
[0] = {Text Encoding}
|
3784
|
-
[1-?] = {Description}\0{URL}
|
3785
|
-
*/
|
3786
|
-
if (frame.size < 2) {
|
3787
|
-
return undefined;
|
3788
|
-
}
|
3789
|
-
let index = 1;
|
3790
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3791
|
-
index += description.length + 1;
|
3792
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3793
|
-
return {
|
3794
|
-
key: frame.type,
|
3795
|
-
info: description,
|
3796
|
-
data: value
|
3797
|
-
};
|
3798
|
-
}
|
3799
|
-
/*
|
3800
|
-
Format:
|
3801
|
-
[0-?] = {URL}
|
3802
|
-
*/
|
3803
|
-
const url = utf8ArrayToStr(frame.data);
|
3804
|
-
return {
|
3805
|
-
key: frame.type,
|
3806
|
-
info: '',
|
3807
|
-
data: url
|
3808
|
-
};
|
3809
|
-
}
|
3810
|
-
|
3811
|
-
function toUint8(data, offset = 0, length = Infinity) {
|
3812
|
-
return view(data, offset, length, Uint8Array);
|
3813
|
-
}
|
3814
|
-
function view(data, offset, length, Type) {
|
3815
|
-
const buffer = unsafeGetArrayBuffer(data);
|
3816
|
-
let bytesPerElement = 1;
|
3817
|
-
if ('BYTES_PER_ELEMENT' in Type) {
|
3818
|
-
bytesPerElement = Type.BYTES_PER_ELEMENT;
|
3819
|
-
}
|
3820
|
-
// Absolute end of the |data| view within |buffer|.
|
3821
|
-
const dataOffset = isArrayBufferView(data) ? data.byteOffset : 0;
|
3822
|
-
const dataEnd = (dataOffset + data.byteLength) / bytesPerElement;
|
3823
|
-
// Absolute start of the result within |buffer|.
|
3824
|
-
const rawStart = (dataOffset + offset) / bytesPerElement;
|
3825
|
-
const start = Math.floor(Math.max(0, Math.min(rawStart, dataEnd)));
|
3826
|
-
// Absolute end of the result within |buffer|.
|
3827
|
-
const end = Math.floor(Math.min(start + Math.max(length, 0), dataEnd));
|
3828
|
-
return new Type(buffer, start, end - start);
|
3829
|
-
}
|
3830
|
-
function unsafeGetArrayBuffer(view) {
|
3831
|
-
if (view instanceof ArrayBuffer) {
|
3832
|
-
return view;
|
3833
|
-
} else {
|
3834
|
-
return view.buffer;
|
3835
|
-
}
|
3836
|
-
}
|
3837
|
-
function isArrayBufferView(obj) {
|
3838
|
-
return obj && obj.buffer instanceof ArrayBuffer && obj.byteLength !== undefined && obj.byteOffset !== undefined;
|
3839
|
-
}
|
3840
|
-
|
3841
|
-
function toArrayBuffer(view) {
|
3842
|
-
if (view instanceof ArrayBuffer) {
|
3843
|
-
return view;
|
3844
|
-
} else {
|
3845
|
-
if (view.byteOffset == 0 && view.byteLength == view.buffer.byteLength) {
|
3846
|
-
// This is a TypedArray over the whole buffer.
|
3847
|
-
return view.buffer;
|
3848
|
-
}
|
3849
|
-
// This is a 'view' on the buffer. Create a new buffer that only contains
|
3850
|
-
// the data. Note that since this isn't an ArrayBuffer, the 'new' call
|
3851
|
-
// will allocate a new buffer to hold the copy.
|
3852
|
-
return new Uint8Array(view).buffer;
|
3853
|
-
}
|
3854
|
-
}
|
3855
|
-
|
3856
|
-
function decodeId3ImageFrame(frame) {
|
3857
|
-
const metadataFrame = {
|
3858
|
-
key: frame.type,
|
3859
|
-
description: '',
|
3860
|
-
data: '',
|
3861
|
-
mimeType: null,
|
3862
|
-
pictureType: null
|
3863
|
-
};
|
3864
|
-
const utf8Encoding = 0x03;
|
3865
|
-
if (frame.size < 2) {
|
3866
|
-
return undefined;
|
3867
|
-
}
|
3868
|
-
if (frame.data[0] !== utf8Encoding) {
|
3869
|
-
console.log('Ignore frame with unrecognized character ' + 'encoding');
|
3870
|
-
return undefined;
|
3871
|
-
}
|
3872
|
-
const mimeTypeEndIndex = frame.data.subarray(1).indexOf(0);
|
3873
|
-
if (mimeTypeEndIndex === -1) {
|
3874
|
-
return undefined;
|
3875
|
-
}
|
3876
|
-
const mimeType = utf8ArrayToStr(toUint8(frame.data, 1, mimeTypeEndIndex));
|
3877
|
-
const pictureType = frame.data[2 + mimeTypeEndIndex];
|
3878
|
-
const descriptionEndIndex = frame.data.subarray(3 + mimeTypeEndIndex).indexOf(0);
|
3879
|
-
if (descriptionEndIndex === -1) {
|
3880
|
-
return undefined;
|
3881
|
-
}
|
3882
|
-
const description = utf8ArrayToStr(toUint8(frame.data, 3 + mimeTypeEndIndex, descriptionEndIndex));
|
3883
|
-
let data;
|
3884
|
-
if (mimeType === '-->') {
|
3885
|
-
data = utf8ArrayToStr(toUint8(frame.data, 4 + mimeTypeEndIndex + descriptionEndIndex));
|
3886
|
-
} else {
|
3887
|
-
data = toArrayBuffer(frame.data.subarray(4 + mimeTypeEndIndex + descriptionEndIndex));
|
3888
|
-
}
|
3889
|
-
metadataFrame.mimeType = mimeType;
|
3890
|
-
metadataFrame.pictureType = pictureType;
|
3891
|
-
metadataFrame.description = description;
|
3892
|
-
metadataFrame.data = data;
|
3893
|
-
return metadataFrame;
|
3894
|
-
}
|
3895
|
-
|
3896
|
-
/**
|
3897
|
-
* Decode an ID3 frame.
|
3898
|
-
*
|
3899
|
-
* @param frame - the ID3 frame
|
3900
|
-
*
|
3901
|
-
* @returns The decoded ID3 frame
|
3902
|
-
*
|
3903
|
-
* @internal
|
3904
|
-
*
|
3905
|
-
* @group ID3
|
3906
|
-
*/
|
3907
|
-
function decodeId3Frame(frame) {
|
3908
|
-
if (frame.type === 'PRIV') {
|
3909
|
-
return decodeId3PrivFrame(frame);
|
3910
|
-
} else if (frame.type[0] === 'W') {
|
3911
|
-
return decodeId3UrlFrame(frame);
|
3912
|
-
} else if (frame.type === 'APIC') {
|
3913
|
-
return decodeId3ImageFrame(frame);
|
3914
|
-
}
|
3915
|
-
return decodeId3TextFrame(frame);
|
3916
|
-
}
|
3917
|
-
|
3918
|
-
/**
|
3919
|
-
* Read ID3 size
|
3920
|
-
*
|
3921
|
-
* @param data - The data to read from
|
3922
|
-
* @param offset - The offset at which to start reading
|
3923
|
-
*
|
3924
|
-
* @returns The size
|
3925
|
-
*
|
3926
|
-
* @internal
|
3927
|
-
*
|
3928
|
-
* @group ID3
|
3929
|
-
*/
|
3930
|
-
function readId3Size(data, offset) {
|
3931
|
-
let size = 0;
|
3932
|
-
size = (data[offset] & 0x7f) << 21;
|
3933
|
-
size |= (data[offset + 1] & 0x7f) << 14;
|
3934
|
-
size |= (data[offset + 2] & 0x7f) << 7;
|
3935
|
-
size |= data[offset + 3] & 0x7f;
|
3936
|
-
return size;
|
3937
|
-
}
|
3938
|
-
|
3939
|
-
/**
|
3940
|
-
* Returns the data of an ID3 frame.
|
3941
|
-
*
|
3942
|
-
* @param data - The data to read from
|
3943
|
-
*
|
3944
|
-
* @returns The data of the ID3 frame
|
3945
|
-
*
|
3946
|
-
* @internal
|
3947
|
-
*
|
3948
|
-
* @group ID3
|
3949
|
-
*/
|
3950
|
-
function getId3FrameData(data) {
|
3951
|
-
/*
|
3952
|
-
Frame ID $xx xx xx xx (four characters)
|
3953
|
-
Size $xx xx xx xx
|
3954
|
-
Flags $xx xx
|
3955
|
-
*/
|
3956
|
-
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
3957
|
-
const size = readId3Size(data, 4);
|
3958
|
-
// skip frame id, size, and flags
|
3959
|
-
const offset = 10;
|
3960
|
-
return {
|
3961
|
-
type,
|
3962
|
-
size,
|
3963
|
-
data: data.subarray(offset, offset + size)
|
3964
|
-
};
|
3965
|
-
}
|
3966
|
-
|
3967
|
-
/**
|
3968
|
-
* Returns true if an ID3 footer can be found at offset in data
|
3969
|
-
*
|
3970
|
-
* @param data - The data to search in
|
3971
|
-
* @param offset - The offset at which to start searching
|
3972
|
-
*
|
3973
|
-
* @returns `true` if an ID3 footer is found
|
3974
|
-
*
|
3975
|
-
* @internal
|
3976
|
-
*
|
3977
|
-
* @group ID3
|
3978
|
-
*/
|
3979
|
-
function isId3Footer(data, offset) {
|
3980
|
-
/*
|
3981
|
-
* The footer is a copy of the header, but with a different identifier
|
3982
|
-
*/
|
3983
|
-
if (offset + 10 <= data.length) {
|
3984
|
-
// look for '3DI' identifier
|
3985
|
-
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
3986
|
-
// check version is within range
|
3987
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
3988
|
-
// check size is within range
|
3989
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
3990
|
-
return true;
|
3991
|
-
}
|
3992
|
-
}
|
3993
|
-
}
|
3994
|
-
}
|
3995
|
-
return false;
|
3996
|
-
}
|
3997
|
-
|
3998
|
-
/**
|
3999
|
-
* Returns true if an ID3 header can be found at offset in data
|
4000
|
-
*
|
4001
|
-
* @param data - The data to search in
|
4002
|
-
* @param offset - The offset at which to start searching
|
4003
|
-
*
|
4004
|
-
* @returns `true` if an ID3 header is found
|
4005
|
-
*
|
4006
|
-
* @internal
|
4007
|
-
*
|
4008
|
-
* @group ID3
|
4009
|
-
*/
|
4010
|
-
function isId3Header(data, offset) {
|
4011
|
-
/*
|
4012
|
-
* http://id3.org/id3v2.3.0
|
4013
|
-
* [0] = 'I'
|
4014
|
-
* [1] = 'D'
|
4015
|
-
* [2] = '3'
|
4016
|
-
* [3,4] = {Version}
|
4017
|
-
* [5] = {Flags}
|
4018
|
-
* [6-9] = {ID3 Size}
|
4019
|
-
*
|
4020
|
-
* An ID3v2 tag can be detected with the following pattern:
|
4021
|
-
* $49 44 33 yy yy xx zz zz zz zz
|
4022
|
-
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
4023
|
-
*/
|
4024
|
-
if (offset + 10 <= data.length) {
|
4025
|
-
// look for 'ID3' identifier
|
4026
|
-
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
4027
|
-
// check version is within range
|
4028
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
4029
|
-
// check size is within range
|
4030
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
4031
|
-
return true;
|
4032
|
-
}
|
4033
|
-
}
|
4034
|
-
}
|
4035
|
-
}
|
4036
|
-
return false;
|
4037
|
-
}
|
4038
|
-
|
4039
|
-
const HEADER_FOOTER_SIZE = 10;
|
4040
|
-
const FRAME_SIZE = 10;
|
4041
|
-
/**
|
4042
|
-
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
4043
|
-
*
|
4044
|
-
* @param id3Data - The ID3 data containing one or more ID3 tags
|
4045
|
-
*
|
4046
|
-
* @returns Array of ID3 frame objects
|
4047
|
-
*
|
4048
|
-
* @group ID3
|
4049
|
-
*
|
4050
|
-
* @beta
|
4051
|
-
*/
|
4052
|
-
function getId3Frames(id3Data) {
|
4053
|
-
let offset = 0;
|
4054
|
-
const frames = [];
|
4055
|
-
while (isId3Header(id3Data, offset)) {
|
4056
|
-
const size = readId3Size(id3Data, offset + 6);
|
4057
|
-
if (id3Data[offset + 5] >> 6 & 1) {
|
4058
|
-
// skip extended header
|
4059
|
-
offset += HEADER_FOOTER_SIZE;
|
4060
|
-
}
|
4061
|
-
// skip past ID3 header
|
4062
|
-
offset += HEADER_FOOTER_SIZE;
|
4063
|
-
const end = offset + size;
|
4064
|
-
// loop through frames in the ID3 tag
|
4065
|
-
while (offset + FRAME_SIZE < end) {
|
4066
|
-
const frameData = getId3FrameData(id3Data.subarray(offset));
|
4067
|
-
const frame = decodeId3Frame(frameData);
|
4068
|
-
if (frame) {
|
4069
|
-
frames.push(frame);
|
4070
|
-
}
|
4071
|
-
// skip frame header and frame data
|
4072
|
-
offset += frameData.size + HEADER_FOOTER_SIZE;
|
4073
|
-
}
|
4074
|
-
if (isId3Footer(id3Data, offset)) {
|
4075
|
-
offset += HEADER_FOOTER_SIZE;
|
4076
|
-
}
|
4077
|
-
}
|
4078
|
-
return frames;
|
4079
|
-
}
|
4080
|
-
|
4081
|
-
/**
|
4082
|
-
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
4083
|
-
*
|
4084
|
-
* @param frame - the ID3 frame
|
4085
|
-
*
|
4086
|
-
* @returns `true` if the ID3 frame is an Elementary Stream timestamp frame
|
4087
|
-
*
|
4088
|
-
* @internal
|
4089
|
-
*
|
4090
|
-
* @group ID3
|
4091
|
-
*/
|
4092
|
-
function isId3TimestampFrame(frame) {
|
4093
|
-
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
4094
|
-
}
|
4095
|
-
|
4096
|
-
const MIN_CUE_DURATION = 0.25;
|
4097
|
-
function getCueClass() {
|
4098
|
-
if (typeof self === 'undefined') return undefined;
|
4099
|
-
return self.VTTCue || self.TextTrackCue;
|
4100
|
-
}
|
4101
|
-
function createCueWithDataFields(Cue, startTime, endTime, data, type) {
|
4102
|
-
let cue = new Cue(startTime, endTime, '');
|
4103
|
-
try {
|
4104
|
-
cue.value = data;
|
4105
|
-
if (type) {
|
4106
|
-
cue.type = type;
|
4107
|
-
}
|
4108
|
-
} catch (e) {
|
4109
|
-
cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
|
4110
|
-
type
|
4111
|
-
}, data) : data));
|
4112
|
-
}
|
4113
|
-
return cue;
|
3918
|
+
return cue;
|
4114
3919
|
}
|
4115
3920
|
|
4116
3921
|
// VTTCue latest draft allows an infinite duration, fallback
|
@@ -4175,10 +3980,11 @@ class ID3TrackController {
|
|
4175
3980
|
this.media = data.media;
|
4176
3981
|
}
|
4177
3982
|
onMediaDetaching() {
|
4178
|
-
if (this.id3Track) {
|
4179
|
-
|
4180
|
-
this.id3Track = null;
|
3983
|
+
if (!this.id3Track) {
|
3984
|
+
return;
|
4181
3985
|
}
|
3986
|
+
clearCurrentCues(this.id3Track);
|
3987
|
+
this.id3Track = null;
|
4182
3988
|
this.media = null;
|
4183
3989
|
this.dateRangeCuesAppended = {};
|
4184
3990
|
}
|
@@ -4237,7 +4043,7 @@ class ID3TrackController {
|
|
4237
4043
|
if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
|
4238
4044
|
continue;
|
4239
4045
|
}
|
4240
|
-
const frames =
|
4046
|
+
const frames = getID3Frames(samples[i].data);
|
4241
4047
|
if (frames) {
|
4242
4048
|
const startTime = samples[i].pts;
|
4243
4049
|
let endTime = startTime + samples[i].duration;
|
@@ -4251,7 +4057,7 @@ class ID3TrackController {
|
|
4251
4057
|
for (let j = 0; j < frames.length; j++) {
|
4252
4058
|
const frame = frames[j];
|
4253
4059
|
// Safari doesn't put the timestamp frame in the TextTrack
|
4254
|
-
if (!
|
4060
|
+
if (!isTimeStampFrame(frame)) {
|
4255
4061
|
// add a bounds to any unbounded cues
|
4256
4062
|
this.updateId3CueEnds(startTime, type);
|
4257
4063
|
const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
|
@@ -4419,47 +4225,7 @@ class LatencyController {
|
|
4419
4225
|
this.currentTime = 0;
|
4420
4226
|
this.stallCount = 0;
|
4421
4227
|
this._latency = null;
|
4422
|
-
this.
|
4423
|
-
const {
|
4424
|
-
media,
|
4425
|
-
levelDetails
|
4426
|
-
} = this;
|
4427
|
-
if (!media || !levelDetails) {
|
4428
|
-
return;
|
4429
|
-
}
|
4430
|
-
this.currentTime = media.currentTime;
|
4431
|
-
const latency = this.computeLatency();
|
4432
|
-
if (latency === null) {
|
4433
|
-
return;
|
4434
|
-
}
|
4435
|
-
this._latency = latency;
|
4436
|
-
|
4437
|
-
// Adapt playbackRate to meet target latency in low-latency mode
|
4438
|
-
const {
|
4439
|
-
lowLatencyMode,
|
4440
|
-
maxLiveSyncPlaybackRate
|
4441
|
-
} = this.config;
|
4442
|
-
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4443
|
-
return;
|
4444
|
-
}
|
4445
|
-
const targetLatency = this.targetLatency;
|
4446
|
-
if (targetLatency === null) {
|
4447
|
-
return;
|
4448
|
-
}
|
4449
|
-
const distanceFromTarget = latency - targetLatency;
|
4450
|
-
// Only adjust playbackRate when within one target duration of targetLatency
|
4451
|
-
// and more than one second from under-buffering.
|
4452
|
-
// Playback further than one target duration from target can be considered DVR playback.
|
4453
|
-
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4454
|
-
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4455
|
-
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4456
|
-
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4457
|
-
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4458
|
-
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4459
|
-
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4460
|
-
media.playbackRate = 1;
|
4461
|
-
}
|
4462
|
-
};
|
4228
|
+
this.timeupdateHandler = () => this.timeupdate();
|
4463
4229
|
this.hls = hls;
|
4464
4230
|
this.config = hls.config;
|
4465
4231
|
this.registerListeners();
|
@@ -4551,7 +4317,7 @@ class LatencyController {
|
|
4551
4317
|
this.onMediaDetaching();
|
4552
4318
|
this.levelDetails = null;
|
4553
4319
|
// @ts-ignore
|
4554
|
-
this.hls = null;
|
4320
|
+
this.hls = this.timeupdateHandler = null;
|
4555
4321
|
}
|
4556
4322
|
registerListeners() {
|
4557
4323
|
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
@@ -4569,11 +4335,11 @@ class LatencyController {
|
|
4569
4335
|
}
|
4570
4336
|
onMediaAttached(event, data) {
|
4571
4337
|
this.media = data.media;
|
4572
|
-
this.media.addEventListener('timeupdate', this.
|
4338
|
+
this.media.addEventListener('timeupdate', this.timeupdateHandler);
|
4573
4339
|
}
|
4574
4340
|
onMediaDetaching() {
|
4575
4341
|
if (this.media) {
|
4576
|
-
this.media.removeEventListener('timeupdate', this.
|
4342
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4577
4343
|
this.media = null;
|
4578
4344
|
}
|
4579
4345
|
}
|
@@ -4587,10 +4353,10 @@ class LatencyController {
|
|
4587
4353
|
}) {
|
4588
4354
|
this.levelDetails = details;
|
4589
4355
|
if (details.advanced) {
|
4590
|
-
this.
|
4356
|
+
this.timeupdate();
|
4591
4357
|
}
|
4592
4358
|
if (!details.live && this.media) {
|
4593
|
-
this.media.removeEventListener('timeupdate', this.
|
4359
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4594
4360
|
}
|
4595
4361
|
}
|
4596
4362
|
onError(event, data) {
|
@@ -4600,7 +4366,48 @@ class LatencyController {
|
|
4600
4366
|
}
|
4601
4367
|
this.stallCount++;
|
4602
4368
|
if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
|
4603
|
-
|
4369
|
+
logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');
|
4370
|
+
}
|
4371
|
+
}
|
4372
|
+
timeupdate() {
|
4373
|
+
const {
|
4374
|
+
media,
|
4375
|
+
levelDetails
|
4376
|
+
} = this;
|
4377
|
+
if (!media || !levelDetails) {
|
4378
|
+
return;
|
4379
|
+
}
|
4380
|
+
this.currentTime = media.currentTime;
|
4381
|
+
const latency = this.computeLatency();
|
4382
|
+
if (latency === null) {
|
4383
|
+
return;
|
4384
|
+
}
|
4385
|
+
this._latency = latency;
|
4386
|
+
|
4387
|
+
// Adapt playbackRate to meet target latency in low-latency mode
|
4388
|
+
const {
|
4389
|
+
lowLatencyMode,
|
4390
|
+
maxLiveSyncPlaybackRate
|
4391
|
+
} = this.config;
|
4392
|
+
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4393
|
+
return;
|
4394
|
+
}
|
4395
|
+
const targetLatency = this.targetLatency;
|
4396
|
+
if (targetLatency === null) {
|
4397
|
+
return;
|
4398
|
+
}
|
4399
|
+
const distanceFromTarget = latency - targetLatency;
|
4400
|
+
// Only adjust playbackRate when within one target duration of targetLatency
|
4401
|
+
// and more than one second from under-buffering.
|
4402
|
+
// Playback further than one target duration from target can be considered DVR playback.
|
4403
|
+
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4404
|
+
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4405
|
+
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4406
|
+
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4407
|
+
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4408
|
+
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4409
|
+
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4410
|
+
media.playbackRate = 1;
|
4604
4411
|
}
|
4605
4412
|
}
|
4606
4413
|
estimateLiveEdge() {
|
@@ -5375,13 +5182,18 @@ var ErrorActionFlags = {
|
|
5375
5182
|
MoveAllAlternatesMatchingHDCP: 2,
|
5376
5183
|
SwitchToSDR: 4
|
5377
5184
|
}; // Reserved for future use
|
5378
|
-
class ErrorController
|
5185
|
+
class ErrorController {
|
5379
5186
|
constructor(hls) {
|
5380
|
-
super('error-controller', hls.logger);
|
5381
5187
|
this.hls = void 0;
|
5382
5188
|
this.playlistError = 0;
|
5383
5189
|
this.penalizedRenditions = {};
|
5190
|
+
this.log = void 0;
|
5191
|
+
this.warn = void 0;
|
5192
|
+
this.error = void 0;
|
5384
5193
|
this.hls = hls;
|
5194
|
+
this.log = logger.log.bind(logger, `[info]:`);
|
5195
|
+
this.warn = logger.warn.bind(logger, `[warning]:`);
|
5196
|
+
this.error = logger.error.bind(logger, `[error]:`);
|
5385
5197
|
this.registerListeners();
|
5386
5198
|
}
|
5387
5199
|
registerListeners() {
|
@@ -5733,13 +5545,16 @@ class ErrorController extends Logger {
|
|
5733
5545
|
}
|
5734
5546
|
}
|
5735
5547
|
|
5736
|
-
class BasePlaylistController
|
5548
|
+
class BasePlaylistController {
|
5737
5549
|
constructor(hls, logPrefix) {
|
5738
|
-
super(logPrefix, hls.logger);
|
5739
5550
|
this.hls = void 0;
|
5740
5551
|
this.timer = -1;
|
5741
5552
|
this.requestScheduled = -1;
|
5742
5553
|
this.canLoad = false;
|
5554
|
+
this.log = void 0;
|
5555
|
+
this.warn = void 0;
|
5556
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
5557
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
5743
5558
|
this.hls = hls;
|
5744
5559
|
}
|
5745
5560
|
destroy() {
|
@@ -5772,7 +5587,7 @@ class BasePlaylistController extends Logger {
|
|
5772
5587
|
try {
|
5773
5588
|
uri = new self.URL(attr.URI, previous.url).href;
|
5774
5589
|
} catch (error) {
|
5775
|
-
|
5590
|
+
logger.warn(`Could not construct new URL for Rendition Report: ${error}`);
|
5776
5591
|
uri = attr.URI || '';
|
5777
5592
|
}
|
5778
5593
|
// Use exact match. Otherwise, the last partial match, if any, will be used
|
@@ -5860,12 +5675,7 @@ class BasePlaylistController extends Logger {
|
|
5860
5675
|
const cdnAge = lastAdvanced + details.ageHeader;
|
5861
5676
|
let currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
|
5862
5677
|
if (currentGoal > 0) {
|
5863
|
-
if (
|
5864
|
-
// Omit segment and part directives when the last response was more than 3 target durations ago,
|
5865
|
-
this.log(`Playlist last advanced ${lastAdvanced.toFixed(2)}s ago. Omitting segment and part directives.`);
|
5866
|
-
msn = undefined;
|
5867
|
-
part = undefined;
|
5868
|
-
} else if (previousDetails != null && previousDetails.tuneInGoal && cdnAge - details.partTarget > previousDetails.tuneInGoal) {
|
5678
|
+
if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
|
5869
5679
|
// If we attempted to get the next or latest playlist update, but currentGoal increased,
|
5870
5680
|
// then we either can't catchup, or the "age" header cannot be trusted.
|
5871
5681
|
this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`);
|
@@ -6311,9 +6121,8 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
|
|
6311
6121
|
}, {});
|
6312
6122
|
}
|
6313
6123
|
|
6314
|
-
class AbrController
|
6124
|
+
class AbrController {
|
6315
6125
|
constructor(_hls) {
|
6316
|
-
super('abr', _hls.logger);
|
6317
6126
|
this.hls = void 0;
|
6318
6127
|
this.lastLevelLoadSec = 0;
|
6319
6128
|
this.lastLoadedFragLevel = -1;
|
@@ -6427,7 +6236,7 @@ class AbrController extends Logger {
|
|
6427
6236
|
this.resetEstimator(nextLoadLevelBitrate);
|
6428
6237
|
}
|
6429
6238
|
this.clearTimer();
|
6430
|
-
|
6239
|
+
logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
|
6431
6240
|
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
|
6432
6241
|
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
|
6433
6242
|
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
|
@@ -6447,7 +6256,7 @@ class AbrController extends Logger {
|
|
6447
6256
|
}
|
6448
6257
|
resetEstimator(abrEwmaDefaultEstimate) {
|
6449
6258
|
if (abrEwmaDefaultEstimate) {
|
6450
|
-
|
6259
|
+
logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
|
6451
6260
|
this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
|
6452
6261
|
}
|
6453
6262
|
this.firstSelection = -1;
|
@@ -6679,7 +6488,7 @@ class AbrController extends Logger {
|
|
6679
6488
|
}
|
6680
6489
|
const firstLevel = this.hls.firstLevel;
|
6681
6490
|
const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
|
6682
|
-
|
6491
|
+
logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
|
6683
6492
|
return clamped;
|
6684
6493
|
}
|
6685
6494
|
get forcedAutoLevel() {
|
@@ -6725,9 +6534,6 @@ class AbrController extends Logger {
|
|
6725
6534
|
partCurrent,
|
6726
6535
|
hls
|
6727
6536
|
} = this;
|
6728
|
-
if (hls.levels.length <= 1) {
|
6729
|
-
return hls.loadLevel;
|
6730
|
-
}
|
6731
6537
|
const {
|
6732
6538
|
maxAutoLevel,
|
6733
6539
|
config,
|
@@ -6760,13 +6566,13 @@ class AbrController extends Logger {
|
|
6760
6566
|
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
6761
6567
|
const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
|
6762
6568
|
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
|
6763
|
-
|
6569
|
+
logger.info(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
|
6764
6570
|
// don't use conservative factor on bitrate test
|
6765
6571
|
bwFactor = bwUpFactor = 1;
|
6766
6572
|
}
|
6767
6573
|
}
|
6768
6574
|
const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
|
6769
|
-
|
6575
|
+
logger.info(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
|
6770
6576
|
if (bestLevel > -1) {
|
6771
6577
|
return bestLevel;
|
6772
6578
|
}
|
@@ -6840,7 +6646,7 @@ class AbrController extends Logger {
|
|
6840
6646
|
currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
|
6841
6647
|
currentFrameRate = minFramerate;
|
6842
6648
|
currentBw = Math.max(currentBw, minBitrate);
|
6843
|
-
|
6649
|
+
logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
|
6844
6650
|
} else {
|
6845
6651
|
currentCodecSet = level == null ? void 0 : level.codecSet;
|
6846
6652
|
currentVideoRange = level == null ? void 0 : level.videoRange;
|
@@ -6893,9 +6699,9 @@ class AbrController extends Logger {
|
|
6893
6699
|
const forcedAutoLevel = this.forcedAutoLevel;
|
6894
6700
|
if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
|
6895
6701
|
if (levelsSkipped.length) {
|
6896
|
-
|
6702
|
+
logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
|
6897
6703
|
}
|
6898
|
-
|
6704
|
+
logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
|
6899
6705
|
}
|
6900
6706
|
if (firstSelection) {
|
6901
6707
|
this.firstSelection = i;
|
@@ -6938,29 +6744,40 @@ class BufferHelper {
|
|
6938
6744
|
* Return true if `media`'s buffered include `position`
|
6939
6745
|
*/
|
6940
6746
|
static isBuffered(media, position) {
|
6941
|
-
|
6942
|
-
|
6943
|
-
|
6944
|
-
|
6945
|
-
|
6747
|
+
try {
|
6748
|
+
if (media) {
|
6749
|
+
const buffered = BufferHelper.getBuffered(media);
|
6750
|
+
for (let i = 0; i < buffered.length; i++) {
|
6751
|
+
if (position >= buffered.start(i) && position <= buffered.end(i)) {
|
6752
|
+
return true;
|
6753
|
+
}
|
6946
6754
|
}
|
6947
6755
|
}
|
6756
|
+
} catch (error) {
|
6757
|
+
// this is to catch
|
6758
|
+
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6759
|
+
// This SourceBuffer has been removed from the parent media source
|
6948
6760
|
}
|
6949
6761
|
return false;
|
6950
6762
|
}
|
6951
6763
|
static bufferInfo(media, pos, maxHoleDuration) {
|
6952
|
-
|
6953
|
-
|
6954
|
-
|
6764
|
+
try {
|
6765
|
+
if (media) {
|
6766
|
+
const vbuffered = BufferHelper.getBuffered(media);
|
6955
6767
|
const buffered = [];
|
6956
|
-
|
6768
|
+
let i;
|
6769
|
+
for (i = 0; i < vbuffered.length; i++) {
|
6957
6770
|
buffered.push({
|
6958
6771
|
start: vbuffered.start(i),
|
6959
6772
|
end: vbuffered.end(i)
|
6960
6773
|
});
|
6961
6774
|
}
|
6962
|
-
return
|
6775
|
+
return this.bufferedInfo(buffered, pos, maxHoleDuration);
|
6963
6776
|
}
|
6777
|
+
} catch (error) {
|
6778
|
+
// this is to catch
|
6779
|
+
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6780
|
+
// This SourceBuffer has been removed from the parent media source
|
6964
6781
|
}
|
6965
6782
|
return {
|
6966
6783
|
len: 0,
|
@@ -6972,7 +6789,14 @@ class BufferHelper {
|
|
6972
6789
|
static bufferedInfo(buffered, pos, maxHoleDuration) {
|
6973
6790
|
pos = Math.max(0, pos);
|
6974
6791
|
// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
|
6975
|
-
buffered.sort((a, b)
|
6792
|
+
buffered.sort(function (a, b) {
|
6793
|
+
const diff = a.start - b.start;
|
6794
|
+
if (diff) {
|
6795
|
+
return diff;
|
6796
|
+
} else {
|
6797
|
+
return b.end - a.end;
|
6798
|
+
}
|
6799
|
+
});
|
6976
6800
|
let buffered2 = [];
|
6977
6801
|
if (maxHoleDuration) {
|
6978
6802
|
// there might be some small holes between buffer time range
|
@@ -7039,7 +6863,7 @@ class BufferHelper {
|
|
7039
6863
|
*/
|
7040
6864
|
static getBuffered(media) {
|
7041
6865
|
try {
|
7042
|
-
return media.buffered
|
6866
|
+
return media.buffered;
|
7043
6867
|
} catch (e) {
|
7044
6868
|
logger.log('failed to get media.buffered', e);
|
7045
6869
|
return noopBuffered;
|
@@ -7064,22 +6888,24 @@ class BufferOperationQueue {
|
|
7064
6888
|
this.executeNext(type);
|
7065
6889
|
}
|
7066
6890
|
}
|
6891
|
+
insertAbort(operation, type) {
|
6892
|
+
const queue = this.queues[type];
|
6893
|
+
queue.unshift(operation);
|
6894
|
+
this.executeNext(type);
|
6895
|
+
}
|
7067
6896
|
appendBlocker(type) {
|
7068
|
-
|
7069
|
-
|
7070
|
-
|
7071
|
-
onStart: () => {},
|
7072
|
-
onComplete: () => {},
|
7073
|
-
onError: () => {}
|
7074
|
-
};
|
7075
|
-
this.append(operation, type);
|
6897
|
+
let execute;
|
6898
|
+
const promise = new Promise(resolve => {
|
6899
|
+
execute = resolve;
|
7076
6900
|
});
|
7077
|
-
|
7078
|
-
|
7079
|
-
|
7080
|
-
|
7081
|
-
|
7082
|
-
}
|
6901
|
+
const operation = {
|
6902
|
+
execute,
|
6903
|
+
onStart: () => {},
|
6904
|
+
onComplete: () => {},
|
6905
|
+
onError: () => {}
|
6906
|
+
};
|
6907
|
+
this.append(operation, type);
|
6908
|
+
return promise;
|
7083
6909
|
}
|
7084
6910
|
executeNext(type) {
|
7085
6911
|
const queue = this.queues[type];
|
@@ -7111,9 +6937,8 @@ class BufferOperationQueue {
|
|
7111
6937
|
}
|
7112
6938
|
|
7113
6939
|
const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
7114
|
-
class BufferController
|
7115
|
-
constructor(hls
|
7116
|
-
super('buffer-controller', hls.logger);
|
6940
|
+
class BufferController {
|
6941
|
+
constructor(hls) {
|
7117
6942
|
// The level details used to determine duration, target-duration and live
|
7118
6943
|
this.details = null;
|
7119
6944
|
// cache the self generated object url to detect hijack of video tag
|
@@ -7123,7 +6948,6 @@ class BufferController extends Logger {
|
|
7123
6948
|
// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
|
7124
6949
|
this.listeners = void 0;
|
7125
6950
|
this.hls = void 0;
|
7126
|
-
this.fragmentTracker = void 0;
|
7127
6951
|
// The number of BUFFER_CODEC events received before any sourceBuffers are created
|
7128
6952
|
this.bufferCodecEventsExpected = 0;
|
7129
6953
|
// The total number of BUFFER_CODEC events received
|
@@ -7134,10 +6958,6 @@ class BufferController extends Logger {
|
|
7134
6958
|
this.mediaSource = null;
|
7135
6959
|
// Last MP3 audio chunk appended
|
7136
6960
|
this.lastMpegAudioChunk = null;
|
7137
|
-
// Audio fragment blocked from appending until corresponding video appends or context changes
|
7138
|
-
this.blockedAudioAppend = null;
|
7139
|
-
// Keep track of video append position for unblocking audio
|
7140
|
-
this.lastVideoAppendEnd = 0;
|
7141
6961
|
this.appendSource = void 0;
|
7142
6962
|
// counters
|
7143
6963
|
this.appendErrors = {
|
@@ -7148,6 +6968,9 @@ class BufferController extends Logger {
|
|
7148
6968
|
this.tracks = {};
|
7149
6969
|
this.pendingTracks = {};
|
7150
6970
|
this.sourceBuffer = void 0;
|
6971
|
+
this.log = void 0;
|
6972
|
+
this.warn = void 0;
|
6973
|
+
this.error = void 0;
|
7151
6974
|
this._onEndStreaming = event => {
|
7152
6975
|
if (!this.hls) {
|
7153
6976
|
return;
|
@@ -7169,10 +6992,7 @@ class BufferController extends Logger {
|
|
7169
6992
|
this.log('Media source opened');
|
7170
6993
|
if (media) {
|
7171
6994
|
media.removeEventListener('emptied', this._onMediaEmptied);
|
7172
|
-
|
7173
|
-
if (durationAndRange) {
|
7174
|
-
this.updateMediaSource(durationAndRange);
|
7175
|
-
}
|
6995
|
+
this.updateMediaElementDuration();
|
7176
6996
|
this.hls.trigger(Events.MEDIA_ATTACHED, {
|
7177
6997
|
media,
|
7178
6998
|
mediaSource: mediaSource
|
@@ -7196,12 +7016,15 @@ class BufferController extends Logger {
|
|
7196
7016
|
_objectUrl
|
7197
7017
|
} = this;
|
7198
7018
|
if (mediaSrc !== _objectUrl) {
|
7199
|
-
|
7019
|
+
logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
|
7200
7020
|
}
|
7201
7021
|
};
|
7202
7022
|
this.hls = hls;
|
7203
|
-
|
7023
|
+
const logPrefix = '[buffer-controller]';
|
7204
7024
|
this.appendSource = isManagedMediaSource(getMediaSource(hls.config.preferManagedMediaSource));
|
7025
|
+
this.log = logger.log.bind(logger, logPrefix);
|
7026
|
+
this.warn = logger.warn.bind(logger, logPrefix);
|
7027
|
+
this.error = logger.error.bind(logger, logPrefix);
|
7205
7028
|
this._initSourceBuffer();
|
7206
7029
|
this.registerListeners();
|
7207
7030
|
}
|
@@ -7213,13 +7036,7 @@ class BufferController extends Logger {
|
|
7213
7036
|
this.details = null;
|
7214
7037
|
this.lastMpegAudioChunk = null;
|
7215
7038
|
// @ts-ignore
|
7216
|
-
this.hls =
|
7217
|
-
// @ts-ignore
|
7218
|
-
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
7219
|
-
// @ts-ignore
|
7220
|
-
this._onMediaSourceEnded = null;
|
7221
|
-
// @ts-ignore
|
7222
|
-
this._onStartStreaming = this._onEndStreaming = null;
|
7039
|
+
this.hls = null;
|
7223
7040
|
}
|
7224
7041
|
registerListeners() {
|
7225
7042
|
const {
|
@@ -7269,8 +7086,6 @@ class BufferController extends Logger {
|
|
7269
7086
|
audiovideo: 0
|
7270
7087
|
};
|
7271
7088
|
this.lastMpegAudioChunk = null;
|
7272
|
-
this.blockedAudioAppend = null;
|
7273
|
-
this.lastVideoAppendEnd = 0;
|
7274
7089
|
}
|
7275
7090
|
onManifestLoading() {
|
7276
7091
|
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
|
@@ -7353,34 +7168,34 @@ class BufferController extends Logger {
|
|
7353
7168
|
mediaSource.removeEventListener('startstreaming', this._onStartStreaming);
|
7354
7169
|
mediaSource.removeEventListener('endstreaming', this._onEndStreaming);
|
7355
7170
|
}
|
7356
|
-
this.mediaSource = null;
|
7357
|
-
this._objectUrl = null;
|
7358
|
-
}
|
7359
7171
|
|
7360
|
-
|
7361
|
-
|
7362
|
-
|
7363
|
-
|
7364
|
-
|
7365
|
-
|
7366
|
-
|
7172
|
+
// Detach properly the MediaSource from the HTMLMediaElement as
|
7173
|
+
// suggested in https://github.com/w3c/media-source/issues/53.
|
7174
|
+
if (media) {
|
7175
|
+
media.removeEventListener('emptied', this._onMediaEmptied);
|
7176
|
+
if (_objectUrl) {
|
7177
|
+
self.URL.revokeObjectURL(_objectUrl);
|
7178
|
+
}
|
7367
7179
|
|
7368
|
-
|
7369
|
-
|
7370
|
-
|
7371
|
-
|
7372
|
-
|
7373
|
-
|
7180
|
+
// clean up video tag src only if it's our own url. some external libraries might
|
7181
|
+
// hijack the video tag and change its 'src' without destroying the Hls instance first
|
7182
|
+
if (this.mediaSrc === _objectUrl) {
|
7183
|
+
media.removeAttribute('src');
|
7184
|
+
if (this.appendSource) {
|
7185
|
+
removeSourceChildren(media);
|
7186
|
+
}
|
7187
|
+
media.load();
|
7188
|
+
} else {
|
7189
|
+
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7374
7190
|
}
|
7375
|
-
media.load();
|
7376
|
-
} else {
|
7377
|
-
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7378
7191
|
}
|
7192
|
+
this.mediaSource = null;
|
7379
7193
|
this.media = null;
|
7194
|
+
this._objectUrl = null;
|
7195
|
+
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7196
|
+
this.pendingTracks = {};
|
7197
|
+
this.tracks = {};
|
7380
7198
|
}
|
7381
|
-
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7382
|
-
this.pendingTracks = {};
|
7383
|
-
this.tracks = {};
|
7384
7199
|
this.hls.trigger(Events.MEDIA_DETACHED, undefined);
|
7385
7200
|
}
|
7386
7201
|
onBufferReset() {
|
@@ -7388,7 +7203,6 @@ class BufferController extends Logger {
|
|
7388
7203
|
this.resetBuffer(type);
|
7389
7204
|
});
|
7390
7205
|
this._initSourceBuffer();
|
7391
|
-
this.hls.resumeBuffering();
|
7392
7206
|
}
|
7393
7207
|
resetBuffer(type) {
|
7394
7208
|
const sb = this.sourceBuffer[type];
|
@@ -7412,10 +7226,9 @@ class BufferController extends Logger {
|
|
7412
7226
|
const trackNames = Object.keys(data);
|
7413
7227
|
trackNames.forEach(trackName => {
|
7414
7228
|
if (sourceBufferCount) {
|
7415
|
-
var _track$buffer;
|
7416
7229
|
// check if SourceBuffer codec needs to change
|
7417
7230
|
const track = this.tracks[trackName];
|
7418
|
-
if (track && typeof
|
7231
|
+
if (track && typeof track.buffer.changeType === 'function') {
|
7419
7232
|
var _trackCodec;
|
7420
7233
|
const {
|
7421
7234
|
id,
|
@@ -7485,54 +7298,20 @@ class BufferController extends Logger {
|
|
7485
7298
|
};
|
7486
7299
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7487
7300
|
}
|
7488
|
-
blockAudio(partOrFrag) {
|
7489
|
-
var _this$fragmentTracker;
|
7490
|
-
const pStart = partOrFrag.start;
|
7491
|
-
const pTime = pStart + partOrFrag.duration * 0.05;
|
7492
|
-
const atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
|
7493
|
-
if (atGap) {
|
7494
|
-
return;
|
7495
|
-
}
|
7496
|
-
const op = {
|
7497
|
-
execute: () => {
|
7498
|
-
var _this$fragmentTracker2;
|
7499
|
-
if (this.lastVideoAppendEnd > pTime || this.sourceBuffer.video && BufferHelper.isBuffered(this.sourceBuffer.video, pTime) || ((_this$fragmentTracker2 = this.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker2.gap) === true) {
|
7500
|
-
this.blockedAudioAppend = null;
|
7501
|
-
this.operationQueue.shiftAndExecuteNext('audio');
|
7502
|
-
}
|
7503
|
-
},
|
7504
|
-
onStart: () => {},
|
7505
|
-
onComplete: () => {},
|
7506
|
-
onError: () => {}
|
7507
|
-
};
|
7508
|
-
this.blockedAudioAppend = {
|
7509
|
-
op,
|
7510
|
-
frag: partOrFrag
|
7511
|
-
};
|
7512
|
-
this.operationQueue.append(op, 'audio', true);
|
7513
|
-
}
|
7514
|
-
unblockAudio() {
|
7515
|
-
const blockedAudioAppend = this.blockedAudioAppend;
|
7516
|
-
if (blockedAudioAppend) {
|
7517
|
-
this.blockedAudioAppend = null;
|
7518
|
-
this.operationQueue.unblockAudio(blockedAudioAppend.op);
|
7519
|
-
}
|
7520
|
-
}
|
7521
7301
|
onBufferAppending(event, eventData) {
|
7522
7302
|
const {
|
7303
|
+
hls,
|
7523
7304
|
operationQueue,
|
7524
7305
|
tracks
|
7525
7306
|
} = this;
|
7526
7307
|
const {
|
7527
7308
|
data,
|
7528
7309
|
type,
|
7529
|
-
parent,
|
7530
7310
|
frag,
|
7531
7311
|
part,
|
7532
7312
|
chunkMeta
|
7533
7313
|
} = eventData;
|
7534
7314
|
const chunkStats = chunkMeta.buffering[type];
|
7535
|
-
const sn = frag.sn;
|
7536
7315
|
const bufferAppendingStart = self.performance.now();
|
7537
7316
|
chunkStats.start = bufferAppendingStart;
|
7538
7317
|
const fragBuffering = frag.stats.buffering;
|
@@ -7555,36 +7334,7 @@ class BufferController extends Logger {
|
|
7555
7334
|
checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
|
7556
7335
|
this.lastMpegAudioChunk = chunkMeta;
|
7557
7336
|
}
|
7558
|
-
|
7559
|
-
// Block audio append until overlapping video append
|
7560
|
-
const videoSb = this.sourceBuffer.video;
|
7561
|
-
if (videoSb && sn !== 'initSegment') {
|
7562
|
-
const partOrFrag = part || frag;
|
7563
|
-
const blockedAudioAppend = this.blockedAudioAppend;
|
7564
|
-
if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
|
7565
|
-
const pStart = partOrFrag.start;
|
7566
|
-
const pTime = pStart + partOrFrag.duration * 0.05;
|
7567
|
-
const vbuffered = videoSb.buffered;
|
7568
|
-
const vappending = this.operationQueue.current('video');
|
7569
|
-
if (!vbuffered.length && !vappending) {
|
7570
|
-
// wait for video before appending audio
|
7571
|
-
this.blockAudio(partOrFrag);
|
7572
|
-
} else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
|
7573
|
-
// audio is ahead of video
|
7574
|
-
this.blockAudio(partOrFrag);
|
7575
|
-
}
|
7576
|
-
} else if (type === 'video') {
|
7577
|
-
const videoAppendEnd = partOrFrag.end;
|
7578
|
-
if (blockedAudioAppend) {
|
7579
|
-
const audioStart = blockedAudioAppend.frag.start;
|
7580
|
-
if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
|
7581
|
-
this.unblockAudio();
|
7582
|
-
}
|
7583
|
-
}
|
7584
|
-
this.lastVideoAppendEnd = videoAppendEnd;
|
7585
|
-
}
|
7586
|
-
}
|
7587
|
-
const fragStart = (part || frag).start;
|
7337
|
+
const fragStart = frag.start;
|
7588
7338
|
const operation = {
|
7589
7339
|
execute: () => {
|
7590
7340
|
chunkStats.executeStart = self.performance.now();
|
@@ -7593,7 +7343,7 @@ class BufferController extends Logger {
|
|
7593
7343
|
if (sb) {
|
7594
7344
|
const delta = fragStart - sb.timestampOffset;
|
7595
7345
|
if (Math.abs(delta) >= 0.1) {
|
7596
|
-
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${sn})`);
|
7346
|
+
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`);
|
7597
7347
|
sb.timestampOffset = fragStart;
|
7598
7348
|
}
|
7599
7349
|
}
|
@@ -7660,21 +7410,22 @@ class BufferController extends Logger {
|
|
7660
7410
|
/* with UHD content, we could get loop of quota exceeded error until
|
7661
7411
|
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
7662
7412
|
*/
|
7663
|
-
this.warn(`Failed ${appendErrorCount}/${
|
7664
|
-
if (appendErrorCount >=
|
7413
|
+
this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
|
7414
|
+
if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
|
7665
7415
|
event.fatal = true;
|
7666
7416
|
}
|
7667
7417
|
}
|
7668
|
-
|
7418
|
+
hls.trigger(Events.ERROR, event);
|
7669
7419
|
}
|
7670
7420
|
};
|
7671
7421
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7672
|
-
}
|
7673
|
-
|
7674
|
-
|
7675
|
-
|
7676
|
-
|
7677
|
-
|
7422
|
+
}
|
7423
|
+
onBufferFlushing(event, data) {
|
7424
|
+
const {
|
7425
|
+
operationQueue
|
7426
|
+
} = this;
|
7427
|
+
const flushOperation = type => ({
|
7428
|
+
execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset),
|
7678
7429
|
onStart: () => {
|
7679
7430
|
// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
7680
7431
|
},
|
@@ -7687,22 +7438,12 @@ class BufferController extends Logger {
|
|
7687
7438
|
onError: error => {
|
7688
7439
|
this.warn(`Failed to remove from ${type} SourceBuffer`, error);
|
7689
7440
|
}
|
7690
|
-
};
|
7691
|
-
|
7692
|
-
|
7693
|
-
const {
|
7694
|
-
operationQueue
|
7695
|
-
} = this;
|
7696
|
-
const {
|
7697
|
-
type,
|
7698
|
-
startOffset,
|
7699
|
-
endOffset
|
7700
|
-
} = data;
|
7701
|
-
if (type) {
|
7702
|
-
operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
|
7441
|
+
});
|
7442
|
+
if (data.type) {
|
7443
|
+
operationQueue.append(flushOperation(data.type), data.type);
|
7703
7444
|
} else {
|
7704
|
-
this.getSourceBufferTypes().forEach(
|
7705
|
-
operationQueue.append(
|
7445
|
+
this.getSourceBufferTypes().forEach(type => {
|
7446
|
+
operationQueue.append(flushOperation(type), type);
|
7706
7447
|
});
|
7707
7448
|
}
|
7708
7449
|
}
|
@@ -7749,9 +7490,6 @@ class BufferController extends Logger {
|
|
7749
7490
|
// on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
|
7750
7491
|
// an undefined data.type will mark all buffers as EOS.
|
7751
7492
|
onBufferEos(event, data) {
|
7752
|
-
if (data.type === 'video') {
|
7753
|
-
this.unblockAudio();
|
7754
|
-
}
|
7755
7493
|
const ended = this.getSourceBufferTypes().reduce((acc, type) => {
|
7756
7494
|
const sb = this.sourceBuffer[type];
|
7757
7495
|
if (sb && (!data.type || data.type === type)) {
|
@@ -7794,14 +7532,10 @@ class BufferController extends Logger {
|
|
7794
7532
|
return;
|
7795
7533
|
}
|
7796
7534
|
this.details = details;
|
7797
|
-
const durationAndRange = this.getDurationAndRange();
|
7798
|
-
if (!durationAndRange) {
|
7799
|
-
return;
|
7800
|
-
}
|
7801
7535
|
if (this.getSourceBufferTypes().length) {
|
7802
|
-
this.blockBuffers(
|
7536
|
+
this.blockBuffers(this.updateMediaElementDuration.bind(this));
|
7803
7537
|
} else {
|
7804
|
-
this.
|
7538
|
+
this.updateMediaElementDuration();
|
7805
7539
|
}
|
7806
7540
|
}
|
7807
7541
|
trimBuffers() {
|
@@ -7906,9 +7640,9 @@ class BufferController extends Logger {
|
|
7906
7640
|
* 'liveDurationInfinity` is set to `true`
|
7907
7641
|
* More details: https://github.com/video-dev/hls.js/issues/355
|
7908
7642
|
*/
|
7909
|
-
|
7643
|
+
updateMediaElementDuration() {
|
7910
7644
|
if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
7911
|
-
return
|
7645
|
+
return;
|
7912
7646
|
}
|
7913
7647
|
const {
|
7914
7648
|
details,
|
@@ -7922,41 +7656,25 @@ class BufferController extends Logger {
|
|
7922
7656
|
if (details.live && hls.config.liveDurationInfinity) {
|
7923
7657
|
// Override duration to Infinity
|
7924
7658
|
mediaSource.duration = Infinity;
|
7925
|
-
|
7926
|
-
if (len && details.live && !!mediaSource.setLiveSeekableRange) {
|
7927
|
-
const start = Math.max(0, details.fragments[0].start);
|
7928
|
-
const end = Math.max(start, start + details.totalduration);
|
7929
|
-
return {
|
7930
|
-
duration: Infinity,
|
7931
|
-
start,
|
7932
|
-
end
|
7933
|
-
};
|
7934
|
-
}
|
7935
|
-
return {
|
7936
|
-
duration: Infinity
|
7937
|
-
};
|
7659
|
+
this.updateSeekableRange(details);
|
7938
7660
|
} else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
|
7939
|
-
|
7940
|
-
|
7941
|
-
|
7661
|
+
// levelDuration was the last value we set.
|
7662
|
+
// not using mediaSource.duration as the browser may tweak this value
|
7663
|
+
// only update Media Source duration if its value increase, this is to avoid
|
7664
|
+
// flushing already buffered portion when switching between quality level
|
7665
|
+
this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);
|
7666
|
+
mediaSource.duration = levelDuration;
|
7942
7667
|
}
|
7943
|
-
return null;
|
7944
7668
|
}
|
7945
|
-
|
7946
|
-
|
7947
|
-
|
7948
|
-
|
7949
|
-
|
7950
|
-
|
7951
|
-
|
7952
|
-
|
7953
|
-
|
7954
|
-
this.log(`Updating Media Source duration to ${duration.toFixed(3)}`);
|
7955
|
-
}
|
7956
|
-
this.mediaSource.duration = duration;
|
7957
|
-
if (start !== undefined && end !== undefined) {
|
7958
|
-
this.log(`Media Source duration is set to ${this.mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7959
|
-
this.mediaSource.setLiveSeekableRange(start, end);
|
7669
|
+
updateSeekableRange(levelDetails) {
|
7670
|
+
const mediaSource = this.mediaSource;
|
7671
|
+
const fragments = levelDetails.fragments;
|
7672
|
+
const len = fragments.length;
|
7673
|
+
if (len && levelDetails.live && mediaSource != null && mediaSource.setLiveSeekableRange) {
|
7674
|
+
const start = Math.max(0, fragments[0].start);
|
7675
|
+
const end = Math.max(start, start + levelDetails.totalduration);
|
7676
|
+
this.log(`Media Source duration is set to ${mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7677
|
+
mediaSource.setLiveSeekableRange(start, end);
|
7960
7678
|
}
|
7961
7679
|
}
|
7962
7680
|
checkPendingTracks() {
|
@@ -8142,7 +7860,6 @@ class BufferController extends Logger {
|
|
8142
7860
|
}
|
8143
7861
|
return;
|
8144
7862
|
}
|
8145
|
-
sb.ending = false;
|
8146
7863
|
sb.ended = false;
|
8147
7864
|
sb.appendBuffer(data);
|
8148
7865
|
}
|
@@ -8162,14 +7879,10 @@ class BufferController extends Logger {
|
|
8162
7879
|
|
8163
7880
|
// logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
|
8164
7881
|
const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type));
|
8165
|
-
|
8166
|
-
if (audioBlocked) {
|
8167
|
-
this.unblockAudio();
|
8168
|
-
}
|
8169
|
-
Promise.all(blockingOperations).then(result => {
|
7882
|
+
Promise.all(blockingOperations).then(() => {
|
8170
7883
|
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
8171
7884
|
onUnblocked();
|
8172
|
-
buffers.forEach(
|
7885
|
+
buffers.forEach(type => {
|
8173
7886
|
const sb = this.sourceBuffer[type];
|
8174
7887
|
// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
8175
7888
|
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
@@ -8318,7 +8031,6 @@ class CapLevelController {
|
|
8318
8031
|
}
|
8319
8032
|
onMediaDetaching() {
|
8320
8033
|
this.stopCapping();
|
8321
|
-
this.media = null;
|
8322
8034
|
}
|
8323
8035
|
detectPlayerSize() {
|
8324
8036
|
if (this.media) {
|
@@ -8331,10 +8043,10 @@ class CapLevelController {
|
|
8331
8043
|
const hls = this.hls;
|
8332
8044
|
const maxLevel = this.getMaxLevel(levels.length - 1);
|
8333
8045
|
if (maxLevel !== this.autoLevelCapping) {
|
8334
|
-
|
8046
|
+
logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8335
8047
|
}
|
8336
8048
|
hls.autoLevelCapping = maxLevel;
|
8337
|
-
if (hls.
|
8049
|
+
if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
8338
8050
|
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
|
8339
8051
|
// usually happen when the user go to the fullscreen mode.
|
8340
8052
|
this.streamController.nextLevelSwitch();
|
@@ -8470,11 +8182,9 @@ class FPSController {
|
|
8470
8182
|
}
|
8471
8183
|
registerListeners() {
|
8472
8184
|
this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8473
|
-
this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8474
8185
|
}
|
8475
8186
|
unregisterListeners() {
|
8476
8187
|
this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8477
|
-
this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8478
8188
|
}
|
8479
8189
|
destroy() {
|
8480
8190
|
if (this.timer) {
|
@@ -8496,9 +8206,6 @@ class FPSController {
|
|
8496
8206
|
this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
|
8497
8207
|
}
|
8498
8208
|
}
|
8499
|
-
onMediaDetaching() {
|
8500
|
-
this.media = null;
|
8501
|
-
}
|
8502
8209
|
checkFPS(video, decodedFrames, droppedFrames) {
|
8503
8210
|
const currentTime = performance.now();
|
8504
8211
|
if (decodedFrames) {
|
@@ -8514,10 +8221,10 @@ class FPSController {
|
|
8514
8221
|
totalDroppedFrames: droppedFrames
|
8515
8222
|
});
|
8516
8223
|
if (droppedFPS > 0) {
|
8517
|
-
//
|
8224
|
+
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8518
8225
|
if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
8519
8226
|
let currentLevel = hls.currentLevel;
|
8520
|
-
|
8227
|
+
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8521
8228
|
if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
|
8522
8229
|
currentLevel = currentLevel - 1;
|
8523
8230
|
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
|
@@ -8550,14 +8257,14 @@ class FPSController {
|
|
8550
8257
|
}
|
8551
8258
|
|
8552
8259
|
const PATHWAY_PENALTY_DURATION_MS = 300000;
|
8553
|
-
class ContentSteeringController
|
8260
|
+
class ContentSteeringController {
|
8554
8261
|
constructor(hls) {
|
8555
|
-
super('content-steering', hls.logger);
|
8556
8262
|
this.hls = void 0;
|
8263
|
+
this.log = void 0;
|
8557
8264
|
this.loader = null;
|
8558
8265
|
this.uri = null;
|
8559
8266
|
this.pathwayId = '.';
|
8560
|
-
this.
|
8267
|
+
this.pathwayPriority = null;
|
8561
8268
|
this.timeToLoad = 300;
|
8562
8269
|
this.reloadTimer = -1;
|
8563
8270
|
this.updated = 0;
|
@@ -8568,6 +8275,7 @@ class ContentSteeringController extends Logger {
|
|
8568
8275
|
this.subtitleTracks = null;
|
8569
8276
|
this.penalizedPathways = {};
|
8570
8277
|
this.hls = hls;
|
8278
|
+
this.log = logger.log.bind(logger, `[content-steering]:`);
|
8571
8279
|
this.registerListeners();
|
8572
8280
|
}
|
8573
8281
|
registerListeners() {
|
@@ -8587,20 +8295,6 @@ class ContentSteeringController extends Logger {
|
|
8587
8295
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
8588
8296
|
hls.off(Events.ERROR, this.onError, this);
|
8589
8297
|
}
|
8590
|
-
pathways() {
|
8591
|
-
return (this.levels || []).reduce((pathways, level) => {
|
8592
|
-
if (pathways.indexOf(level.pathwayId) === -1) {
|
8593
|
-
pathways.push(level.pathwayId);
|
8594
|
-
}
|
8595
|
-
return pathways;
|
8596
|
-
}, []);
|
8597
|
-
}
|
8598
|
-
get pathwayPriority() {
|
8599
|
-
return this._pathwayPriority;
|
8600
|
-
}
|
8601
|
-
set pathwayPriority(pathwayPriority) {
|
8602
|
-
this.updatePathwayPriority(pathwayPriority);
|
8603
|
-
}
|
8604
8298
|
startLoad() {
|
8605
8299
|
this.started = true;
|
8606
8300
|
this.clearTimeout();
|
@@ -8674,7 +8368,7 @@ class ContentSteeringController extends Logger {
|
|
8674
8368
|
} = data;
|
8675
8369
|
if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox && errorAction.flags === ErrorActionFlags.MoveAllAlternatesMatchingHost) {
|
8676
8370
|
const levels = this.levels;
|
8677
|
-
let pathwayPriority = this.
|
8371
|
+
let pathwayPriority = this.pathwayPriority;
|
8678
8372
|
let errorPathway = this.pathwayId;
|
8679
8373
|
if (data.context) {
|
8680
8374
|
const {
|
@@ -8693,14 +8387,19 @@ class ContentSteeringController extends Logger {
|
|
8693
8387
|
}
|
8694
8388
|
if (!pathwayPriority && levels) {
|
8695
8389
|
// If PATHWAY-PRIORITY was not provided, list pathways for error handling
|
8696
|
-
pathwayPriority =
|
8390
|
+
pathwayPriority = levels.reduce((pathways, level) => {
|
8391
|
+
if (pathways.indexOf(level.pathwayId) === -1) {
|
8392
|
+
pathways.push(level.pathwayId);
|
8393
|
+
}
|
8394
|
+
return pathways;
|
8395
|
+
}, []);
|
8697
8396
|
}
|
8698
8397
|
if (pathwayPriority && pathwayPriority.length > 1) {
|
8699
8398
|
this.updatePathwayPriority(pathwayPriority);
|
8700
8399
|
errorAction.resolved = this.pathwayId !== errorPathway;
|
8701
8400
|
}
|
8702
8401
|
if (!errorAction.resolved) {
|
8703
|
-
|
8402
|
+
logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
|
8704
8403
|
}
|
8705
8404
|
}
|
8706
8405
|
}
|
@@ -8727,7 +8426,7 @@ class ContentSteeringController extends Logger {
|
|
8727
8426
|
return this.levels.filter(level => pathwayId === level.pathwayId);
|
8728
8427
|
}
|
8729
8428
|
updatePathwayPriority(pathwayPriority) {
|
8730
|
-
this.
|
8429
|
+
this.pathwayPriority = pathwayPriority;
|
8731
8430
|
let levels;
|
8732
8431
|
|
8733
8432
|
// Evaluate if we should remove the pathway from the penalized list
|
@@ -8871,7 +8570,7 @@ class ContentSteeringController extends Logger {
|
|
8871
8570
|
onSuccess: (response, stats, context, networkDetails) => {
|
8872
8571
|
this.log(`Loaded steering manifest: "${url}"`);
|
8873
8572
|
const steeringData = response.data;
|
8874
|
-
if (
|
8573
|
+
if (steeringData.VERSION !== 1) {
|
8875
8574
|
this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
|
8876
8575
|
return;
|
8877
8576
|
}
|
@@ -9779,7 +9478,7 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
|
|
9779
9478
|
});
|
9780
9479
|
function timelineConfig() {
|
9781
9480
|
return {
|
9782
|
-
cueHandler:
|
9481
|
+
cueHandler: Cues,
|
9783
9482
|
// used by timeline-controller
|
9784
9483
|
enableWebVTT: false,
|
9785
9484
|
// used by timeline-controller
|
@@ -9810,7 +9509,7 @@ function timelineConfig() {
|
|
9810
9509
|
/**
|
9811
9510
|
* @ignore
|
9812
9511
|
*/
|
9813
|
-
function mergeConfig(defaultConfig, userConfig
|
9512
|
+
function mergeConfig(defaultConfig, userConfig) {
|
9814
9513
|
if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
|
9815
9514
|
throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
|
9816
9515
|
}
|
@@ -9880,7 +9579,7 @@ function deepCpy(obj) {
|
|
9880
9579
|
/**
|
9881
9580
|
* @ignore
|
9882
9581
|
*/
|
9883
|
-
function enableStreamingMode(config
|
9582
|
+
function enableStreamingMode(config) {
|
9884
9583
|
const currentLoader = config.loader;
|
9885
9584
|
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
|
9886
9585
|
// If a developer has configured their own loader, respect that choice
|
@@ -9897,9 +9596,10 @@ function enableStreamingMode(config, logger) {
|
|
9897
9596
|
}
|
9898
9597
|
}
|
9899
9598
|
|
9599
|
+
let chromeOrFirefox;
|
9900
9600
|
class LevelController extends BasePlaylistController {
|
9901
9601
|
constructor(hls, contentSteeringController) {
|
9902
|
-
super(hls, 'level-controller');
|
9602
|
+
super(hls, '[level-controller]');
|
9903
9603
|
this._levels = [];
|
9904
9604
|
this._firstLevel = -1;
|
9905
9605
|
this._maxAutoLevel = -1;
|
@@ -9970,15 +9670,23 @@ class LevelController extends BasePlaylistController {
|
|
9970
9670
|
let videoCodecFound = false;
|
9971
9671
|
let audioCodecFound = false;
|
9972
9672
|
data.levels.forEach(levelParsed => {
|
9973
|
-
var _videoCodec;
|
9673
|
+
var _audioCodec, _videoCodec;
|
9974
9674
|
const attributes = levelParsed.attrs;
|
9675
|
+
|
9676
|
+
// erase audio codec info if browser does not support mp4a.40.34.
|
9677
|
+
// demuxer will autodetect codec and fallback to mpeg/audio
|
9975
9678
|
let {
|
9976
9679
|
audioCodec,
|
9977
9680
|
videoCodec
|
9978
9681
|
} = levelParsed;
|
9682
|
+
if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
|
9683
|
+
chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
|
9684
|
+
if (chromeOrFirefox) {
|
9685
|
+
levelParsed.audioCodec = audioCodec = undefined;
|
9686
|
+
}
|
9687
|
+
}
|
9979
9688
|
if (audioCodec) {
|
9980
|
-
|
9981
|
-
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
|
9689
|
+
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource);
|
9982
9690
|
}
|
9983
9691
|
if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
|
9984
9692
|
videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
|
@@ -10273,25 +9981,6 @@ class LevelController extends BasePlaylistController {
|
|
10273
9981
|
set startLevel(newLevel) {
|
10274
9982
|
this._startLevel = newLevel;
|
10275
9983
|
}
|
10276
|
-
get pathwayPriority() {
|
10277
|
-
if (this.steering) {
|
10278
|
-
return this.steering.pathwayPriority;
|
10279
|
-
}
|
10280
|
-
return null;
|
10281
|
-
}
|
10282
|
-
set pathwayPriority(pathwayPriority) {
|
10283
|
-
if (this.steering) {
|
10284
|
-
const pathwaysList = this.steering.pathways();
|
10285
|
-
const filteredPathwayPriority = pathwayPriority.filter(pathwayId => {
|
10286
|
-
return pathwaysList.indexOf(pathwayId) !== -1;
|
10287
|
-
});
|
10288
|
-
if (pathwayPriority.length < 1) {
|
10289
|
-
this.warn(`pathwayPriority ${pathwayPriority} should contain at least one pathway from list: ${pathwaysList}`);
|
10290
|
-
return;
|
10291
|
-
}
|
10292
|
-
this.steering.pathwayPriority = filteredPathwayPriority;
|
10293
|
-
}
|
10294
|
-
}
|
10295
9984
|
onError(event, data) {
|
10296
9985
|
if (data.fatal || !data.context) {
|
10297
9986
|
return;
|
@@ -10339,12 +10028,7 @@ class LevelController extends BasePlaylistController {
|
|
10339
10028
|
if (curLevel.fragmentError === 0) {
|
10340
10029
|
curLevel.loadError = 0;
|
10341
10030
|
}
|
10342
|
-
|
10343
|
-
let previousDetails = curLevel.details;
|
10344
|
-
if (previousDetails === data.details && previousDetails.advanced) {
|
10345
|
-
previousDetails = undefined;
|
10346
|
-
}
|
10347
|
-
this.playlistLoaded(level, data, previousDetails);
|
10031
|
+
this.playlistLoaded(level, data, curLevel.details);
|
10348
10032
|
} else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
|
10349
10033
|
// received a delta playlist update that cannot be merged
|
10350
10034
|
details.deltaUpdateFailed = true;
|
@@ -10522,16 +10206,13 @@ class FragmentTracker {
|
|
10522
10206
|
* If not found any Fragment, return null
|
10523
10207
|
*/
|
10524
10208
|
getBufferedFrag(position, levelType) {
|
10525
|
-
return this.getFragAtPos(position, levelType, true);
|
10526
|
-
}
|
10527
|
-
getFragAtPos(position, levelType, buffered) {
|
10528
10209
|
const {
|
10529
10210
|
fragments
|
10530
10211
|
} = this;
|
10531
10212
|
const keys = Object.keys(fragments);
|
10532
10213
|
for (let i = keys.length; i--;) {
|
10533
10214
|
const fragmentEntity = fragments[keys[i]];
|
10534
|
-
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType &&
|
10215
|
+
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
|
10535
10216
|
const frag = fragmentEntity.body;
|
10536
10217
|
if (frag.start <= position && position <= frag.end) {
|
10537
10218
|
return frag;
|
@@ -10786,8 +10467,7 @@ class FragmentTracker {
|
|
10786
10467
|
const {
|
10787
10468
|
frag,
|
10788
10469
|
part,
|
10789
|
-
timeRanges
|
10790
|
-
type
|
10470
|
+
timeRanges
|
10791
10471
|
} = data;
|
10792
10472
|
if (frag.sn === 'initSegment') {
|
10793
10473
|
return;
|
@@ -10802,8 +10482,10 @@ class FragmentTracker {
|
|
10802
10482
|
}
|
10803
10483
|
// Store the latest timeRanges loaded in the buffer
|
10804
10484
|
this.timeRanges = timeRanges;
|
10805
|
-
|
10806
|
-
|
10485
|
+
Object.keys(timeRanges).forEach(elementaryStream => {
|
10486
|
+
const timeRange = timeRanges[elementaryStream];
|
10487
|
+
this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
|
10488
|
+
});
|
10807
10489
|
}
|
10808
10490
|
onFragBuffered(event, data) {
|
10809
10491
|
this.detectPartialFragments(data);
|
@@ -11132,8 +10814,8 @@ function createLoaderContext(frag, part = null) {
|
|
11132
10814
|
var _frag$decryptdata;
|
11133
10815
|
let byteRangeStart = start;
|
11134
10816
|
let byteRangeEnd = end;
|
11135
|
-
if (frag.sn === 'initSegment' &&
|
11136
|
-
// MAP segment encrypted with method 'AES-128'
|
10817
|
+
if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') {
|
10818
|
+
// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
|
11137
10819
|
// has the unencrypted size specified in the range.
|
11138
10820
|
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
|
11139
10821
|
const fragmentLen = end - start;
|
@@ -11166,9 +10848,6 @@ function createGapLoadError(frag, part) {
|
|
11166
10848
|
(part ? part : frag).stats.aborted = true;
|
11167
10849
|
return new LoadError(errorData);
|
11168
10850
|
}
|
11169
|
-
function isMethodFullSegmentAesCbc(method) {
|
11170
|
-
return method === 'AES-128' || method === 'AES-256';
|
11171
|
-
}
|
11172
10851
|
class LoadError extends Error {
|
11173
10852
|
constructor(data) {
|
11174
10853
|
super(data.error.message);
|
@@ -11314,8 +10993,6 @@ class KeyLoader {
|
|
11314
10993
|
}
|
11315
10994
|
return this.loadKeyEME(keyInfo, frag);
|
11316
10995
|
case 'AES-128':
|
11317
|
-
case 'AES-256':
|
11318
|
-
case 'AES-256-CTR':
|
11319
10996
|
return this.loadKeyHTTP(keyInfo, frag);
|
11320
10997
|
default:
|
11321
10998
|
return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
|
@@ -11451,9 +11128,8 @@ class KeyLoader {
|
|
11451
11128
|
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
11452
11129
|
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
11453
11130
|
*/
|
11454
|
-
class TaskLoop
|
11455
|
-
constructor(
|
11456
|
-
super(label, logger);
|
11131
|
+
class TaskLoop {
|
11132
|
+
constructor() {
|
11457
11133
|
this._boundTick = void 0;
|
11458
11134
|
this._tickTimer = null;
|
11459
11135
|
this._tickInterval = null;
|
@@ -11721,61 +11397,33 @@ function alignMediaPlaylistByPDT(details, refDetails) {
|
|
11721
11397
|
}
|
11722
11398
|
|
11723
11399
|
class AESCrypto {
|
11724
|
-
constructor(subtle, iv
|
11400
|
+
constructor(subtle, iv) {
|
11725
11401
|
this.subtle = void 0;
|
11726
11402
|
this.aesIV = void 0;
|
11727
|
-
this.aesMode = void 0;
|
11728
11403
|
this.subtle = subtle;
|
11729
11404
|
this.aesIV = iv;
|
11730
|
-
this.aesMode = aesMode;
|
11731
11405
|
}
|
11732
11406
|
decrypt(data, key) {
|
11733
|
-
|
11734
|
-
|
11735
|
-
|
11736
|
-
|
11737
|
-
iv: this.aesIV
|
11738
|
-
}, key, data);
|
11739
|
-
case DecrypterAesMode.ctr:
|
11740
|
-
return this.subtle.decrypt({
|
11741
|
-
name: 'AES-CTR',
|
11742
|
-
counter: this.aesIV,
|
11743
|
-
length: 64
|
11744
|
-
},
|
11745
|
-
//64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
|
11746
|
-
key, data);
|
11747
|
-
default:
|
11748
|
-
throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
|
11749
|
-
}
|
11407
|
+
return this.subtle.decrypt({
|
11408
|
+
name: 'AES-CBC',
|
11409
|
+
iv: this.aesIV
|
11410
|
+
}, key, data);
|
11750
11411
|
}
|
11751
11412
|
}
|
11752
11413
|
|
11753
11414
|
class FastAESKey {
|
11754
|
-
constructor(subtle, key
|
11415
|
+
constructor(subtle, key) {
|
11755
11416
|
this.subtle = void 0;
|
11756
11417
|
this.key = void 0;
|
11757
|
-
this.aesMode = void 0;
|
11758
11418
|
this.subtle = subtle;
|
11759
11419
|
this.key = key;
|
11760
|
-
this.aesMode = aesMode;
|
11761
11420
|
}
|
11762
11421
|
expandKey() {
|
11763
|
-
const subtleAlgoName = getSubtleAlgoName(this.aesMode);
|
11764
11422
|
return this.subtle.importKey('raw', this.key, {
|
11765
|
-
name:
|
11423
|
+
name: 'AES-CBC'
|
11766
11424
|
}, false, ['encrypt', 'decrypt']);
|
11767
11425
|
}
|
11768
11426
|
}
|
11769
|
-
function getSubtleAlgoName(aesMode) {
|
11770
|
-
switch (aesMode) {
|
11771
|
-
case DecrypterAesMode.cbc:
|
11772
|
-
return 'AES-CBC';
|
11773
|
-
case DecrypterAesMode.ctr:
|
11774
|
-
return 'AES-CTR';
|
11775
|
-
default:
|
11776
|
-
throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
|
11777
|
-
}
|
11778
|
-
}
|
11779
11427
|
|
11780
11428
|
// PKCS7
|
11781
11429
|
function removePadding(array) {
|
@@ -12025,8 +11673,7 @@ class Decrypter {
|
|
12025
11673
|
this.currentIV = null;
|
12026
11674
|
this.currentResult = null;
|
12027
11675
|
this.useSoftware = void 0;
|
12028
|
-
this.
|
12029
|
-
this.enableSoftwareAES = config.enableSoftwareAES;
|
11676
|
+
this.useSoftware = config.enableSoftwareAES;
|
12030
11677
|
this.removePKCS7Padding = removePKCS7Padding;
|
12031
11678
|
// built in decryptor expects PKCS7 padding
|
12032
11679
|
if (removePKCS7Padding) {
|
@@ -12077,10 +11724,10 @@ class Decrypter {
|
|
12077
11724
|
this.softwareDecrypter = null;
|
12078
11725
|
}
|
12079
11726
|
}
|
12080
|
-
decrypt(data, key, iv
|
11727
|
+
decrypt(data, key, iv) {
|
12081
11728
|
if (this.useSoftware) {
|
12082
11729
|
return new Promise((resolve, reject) => {
|
12083
|
-
this.softwareDecrypt(new Uint8Array(data), key, iv
|
11730
|
+
this.softwareDecrypt(new Uint8Array(data), key, iv);
|
12084
11731
|
const decryptResult = this.flush();
|
12085
11732
|
if (decryptResult) {
|
12086
11733
|
resolve(decryptResult.buffer);
|
@@ -12089,21 +11736,17 @@ class Decrypter {
|
|
12089
11736
|
}
|
12090
11737
|
});
|
12091
11738
|
}
|
12092
|
-
return this.webCryptoDecrypt(new Uint8Array(data), key, iv
|
11739
|
+
return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
|
12093
11740
|
}
|
12094
11741
|
|
12095
11742
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
12096
11743
|
// data is handled in the flush() call
|
12097
|
-
softwareDecrypt(data, key, iv
|
11744
|
+
softwareDecrypt(data, key, iv) {
|
12098
11745
|
const {
|
12099
11746
|
currentIV,
|
12100
11747
|
currentResult,
|
12101
11748
|
remainderData
|
12102
11749
|
} = this;
|
12103
|
-
if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
|
12104
|
-
logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
|
12105
|
-
return null;
|
12106
|
-
}
|
12107
11750
|
this.logOnce('JS AES decrypt');
|
12108
11751
|
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
12109
11752
|
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
@@ -12136,13 +11779,13 @@ class Decrypter {
|
|
12136
11779
|
}
|
12137
11780
|
return result;
|
12138
11781
|
}
|
12139
|
-
webCryptoDecrypt(data, key, iv
|
11782
|
+
webCryptoDecrypt(data, key, iv) {
|
12140
11783
|
if (this.key !== key || !this.fastAesKey) {
|
12141
11784
|
if (!this.subtle) {
|
12142
|
-
return Promise.resolve(this.onWebCryptoError(data, key, iv
|
11785
|
+
return Promise.resolve(this.onWebCryptoError(data, key, iv));
|
12143
11786
|
}
|
12144
11787
|
this.key = key;
|
12145
|
-
this.fastAesKey = new FastAESKey(this.subtle, key
|
11788
|
+
this.fastAesKey = new FastAESKey(this.subtle, key);
|
12146
11789
|
}
|
12147
11790
|
return this.fastAesKey.expandKey().then(aesKey => {
|
12148
11791
|
// decrypt using web crypto
|
@@ -12150,25 +11793,22 @@ class Decrypter {
|
|
12150
11793
|
return Promise.reject(new Error('web crypto not initialized'));
|
12151
11794
|
}
|
12152
11795
|
this.logOnce('WebCrypto AES decrypt');
|
12153
|
-
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv)
|
11796
|
+
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv));
|
12154
11797
|
return crypto.decrypt(data.buffer, aesKey);
|
12155
11798
|
}).catch(err => {
|
12156
11799
|
logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
|
12157
|
-
return this.onWebCryptoError(data, key, iv
|
11800
|
+
return this.onWebCryptoError(data, key, iv);
|
12158
11801
|
});
|
12159
11802
|
}
|
12160
|
-
onWebCryptoError(data, key, iv
|
12161
|
-
|
12162
|
-
|
12163
|
-
|
12164
|
-
|
12165
|
-
|
12166
|
-
|
12167
|
-
if (decryptResult) {
|
12168
|
-
return decryptResult.buffer;
|
12169
|
-
}
|
11803
|
+
onWebCryptoError(data, key, iv) {
|
11804
|
+
this.useSoftware = true;
|
11805
|
+
this.logEnabled = true;
|
11806
|
+
this.softwareDecrypt(data, key, iv);
|
11807
|
+
const decryptResult = this.flush();
|
11808
|
+
if (decryptResult) {
|
11809
|
+
return decryptResult.buffer;
|
12170
11810
|
}
|
12171
|
-
throw new Error('WebCrypto
|
11811
|
+
throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
|
12172
11812
|
}
|
12173
11813
|
getValidChunk(data) {
|
12174
11814
|
let currentChunk = data;
|
@@ -12219,7 +11859,7 @@ const State = {
|
|
12219
11859
|
};
|
12220
11860
|
class BaseStreamController extends TaskLoop {
|
12221
11861
|
constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
|
12222
|
-
super(
|
11862
|
+
super();
|
12223
11863
|
this.hls = void 0;
|
12224
11864
|
this.fragPrevious = null;
|
12225
11865
|
this.fragCurrent = null;
|
@@ -12244,98 +11884,22 @@ class BaseStreamController extends TaskLoop {
|
|
12244
11884
|
this.startFragRequested = false;
|
12245
11885
|
this.decrypter = void 0;
|
12246
11886
|
this.initPTS = [];
|
12247
|
-
this.
|
12248
|
-
this.
|
12249
|
-
this.
|
12250
|
-
|
12251
|
-
|
12252
|
-
fragCurrent,
|
12253
|
-
media,
|
12254
|
-
mediaBuffer,
|
12255
|
-
state
|
12256
|
-
} = this;
|
12257
|
-
const currentTime = media ? media.currentTime : 0;
|
12258
|
-
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
12259
|
-
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
12260
|
-
if (this.state === State.ENDED) {
|
12261
|
-
this.resetLoadingState();
|
12262
|
-
} else if (fragCurrent) {
|
12263
|
-
// Seeking while frag load is in progress
|
12264
|
-
const tolerance = config.maxFragLookUpTolerance;
|
12265
|
-
const fragStartOffset = fragCurrent.start - tolerance;
|
12266
|
-
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12267
|
-
// if seeking out of buffered range or into new one
|
12268
|
-
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12269
|
-
const pastFragment = currentTime > fragEndOffset;
|
12270
|
-
// if the seek position is outside the current fragment range
|
12271
|
-
if (currentTime < fragStartOffset || pastFragment) {
|
12272
|
-
if (pastFragment && fragCurrent.loader) {
|
12273
|
-
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12274
|
-
fragCurrent.abortRequests();
|
12275
|
-
this.resetLoadingState();
|
12276
|
-
}
|
12277
|
-
this.fragPrevious = null;
|
12278
|
-
}
|
12279
|
-
}
|
12280
|
-
}
|
12281
|
-
if (media) {
|
12282
|
-
// Remove gap fragments
|
12283
|
-
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12284
|
-
this.lastCurrentTime = currentTime;
|
12285
|
-
if (!this.loadingParts) {
|
12286
|
-
const bufferEnd = Math.max(bufferInfo.end, currentTime);
|
12287
|
-
const shouldLoadParts = this.shouldLoadParts(this.getLevelDetails(), bufferEnd);
|
12288
|
-
if (shouldLoadParts) {
|
12289
|
-
this.log(`LL-Part loading ON after seeking to ${currentTime.toFixed(2)} with buffer @${bufferEnd.toFixed(2)}`);
|
12290
|
-
this.loadingParts = shouldLoadParts;
|
12291
|
-
}
|
12292
|
-
}
|
12293
|
-
}
|
12294
|
-
|
12295
|
-
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12296
|
-
if (!this.loadedmetadata && !bufferInfo.len) {
|
12297
|
-
this.nextLoadPosition = this.startPosition = currentTime;
|
12298
|
-
}
|
12299
|
-
|
12300
|
-
// Async tick to speed up processing
|
12301
|
-
this.tickImmediate();
|
12302
|
-
};
|
12303
|
-
this.onMediaEnded = () => {
|
12304
|
-
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12305
|
-
this.startPosition = this.lastCurrentTime = 0;
|
12306
|
-
if (this.playlistType === PlaylistLevelType.MAIN) {
|
12307
|
-
this.hls.trigger(Events.MEDIA_ENDED, {
|
12308
|
-
stalled: false
|
12309
|
-
});
|
12310
|
-
}
|
12311
|
-
};
|
11887
|
+
this.onvseeking = null;
|
11888
|
+
this.onvended = null;
|
11889
|
+
this.logPrefix = '';
|
11890
|
+
this.log = void 0;
|
11891
|
+
this.warn = void 0;
|
12312
11892
|
this.playlistType = playlistType;
|
11893
|
+
this.logPrefix = logPrefix;
|
11894
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
11895
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
12313
11896
|
this.hls = hls;
|
12314
11897
|
this.fragmentLoader = new FragmentLoader(hls.config);
|
12315
11898
|
this.keyLoader = keyLoader;
|
12316
11899
|
this.fragmentTracker = fragmentTracker;
|
12317
11900
|
this.config = hls.config;
|
12318
11901
|
this.decrypter = new Decrypter(hls.config);
|
12319
|
-
}
|
12320
|
-
registerListeners() {
|
12321
|
-
const {
|
12322
|
-
hls
|
12323
|
-
} = this;
|
12324
|
-
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12325
|
-
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12326
|
-
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12327
11902
|
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12328
|
-
hls.on(Events.ERROR, this.onError, this);
|
12329
|
-
}
|
12330
|
-
unregisterListeners() {
|
12331
|
-
const {
|
12332
|
-
hls
|
12333
|
-
} = this;
|
12334
|
-
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12335
|
-
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12336
|
-
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12337
|
-
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12338
|
-
hls.off(Events.ERROR, this.onError, this);
|
12339
11903
|
}
|
12340
11904
|
doTick() {
|
12341
11905
|
this.onTickEnd();
|
@@ -12359,12 +11923,6 @@ class BaseStreamController extends TaskLoop {
|
|
12359
11923
|
this.clearNextTick();
|
12360
11924
|
this.state = State.STOPPED;
|
12361
11925
|
}
|
12362
|
-
pauseBuffering() {
|
12363
|
-
this.buffering = false;
|
12364
|
-
}
|
12365
|
-
resumeBuffering() {
|
12366
|
-
this.buffering = true;
|
12367
|
-
}
|
12368
11926
|
_streamEnded(bufferInfo, levelDetails) {
|
12369
11927
|
// If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
|
12370
11928
|
// of nothing loading/loaded return false
|
@@ -12395,8 +11953,10 @@ class BaseStreamController extends TaskLoop {
|
|
12395
11953
|
}
|
12396
11954
|
onMediaAttached(event, data) {
|
12397
11955
|
const media = this.media = this.mediaBuffer = data.media;
|
12398
|
-
|
12399
|
-
|
11956
|
+
this.onvseeking = this.onMediaSeeking.bind(this);
|
11957
|
+
this.onvended = this.onMediaEnded.bind(this);
|
11958
|
+
media.addEventListener('seeking', this.onvseeking);
|
11959
|
+
media.addEventListener('ended', this.onvended);
|
12400
11960
|
const config = this.config;
|
12401
11961
|
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
12402
11962
|
this.startLoad(config.startPosition);
|
@@ -12410,9 +11970,10 @@ class BaseStreamController extends TaskLoop {
|
|
12410
11970
|
}
|
12411
11971
|
|
12412
11972
|
// remove video listeners
|
12413
|
-
if (media) {
|
12414
|
-
media.removeEventListener('seeking', this.
|
12415
|
-
media.removeEventListener('ended', this.
|
11973
|
+
if (media && this.onvseeking && this.onvended) {
|
11974
|
+
media.removeEventListener('seeking', this.onvseeking);
|
11975
|
+
media.removeEventListener('ended', this.onvended);
|
11976
|
+
this.onvseeking = this.onvended = null;
|
12416
11977
|
}
|
12417
11978
|
if (this.keyLoader) {
|
12418
11979
|
this.keyLoader.detach();
|
@@ -12422,17 +11983,66 @@ class BaseStreamController extends TaskLoop {
|
|
12422
11983
|
this.fragmentTracker.removeAllFragments();
|
12423
11984
|
this.stopLoad();
|
12424
11985
|
}
|
12425
|
-
|
12426
|
-
|
11986
|
+
onMediaSeeking() {
|
11987
|
+
const {
|
11988
|
+
config,
|
11989
|
+
fragCurrent,
|
11990
|
+
media,
|
11991
|
+
mediaBuffer,
|
11992
|
+
state
|
11993
|
+
} = this;
|
11994
|
+
const currentTime = media ? media.currentTime : 0;
|
11995
|
+
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
11996
|
+
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
11997
|
+
if (this.state === State.ENDED) {
|
11998
|
+
this.resetLoadingState();
|
11999
|
+
} else if (fragCurrent) {
|
12000
|
+
// Seeking while frag load is in progress
|
12001
|
+
const tolerance = config.maxFragLookUpTolerance;
|
12002
|
+
const fragStartOffset = fragCurrent.start - tolerance;
|
12003
|
+
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12004
|
+
// if seeking out of buffered range or into new one
|
12005
|
+
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12006
|
+
const pastFragment = currentTime > fragEndOffset;
|
12007
|
+
// if the seek position is outside the current fragment range
|
12008
|
+
if (currentTime < fragStartOffset || pastFragment) {
|
12009
|
+
if (pastFragment && fragCurrent.loader) {
|
12010
|
+
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12011
|
+
fragCurrent.abortRequests();
|
12012
|
+
this.resetLoadingState();
|
12013
|
+
}
|
12014
|
+
this.fragPrevious = null;
|
12015
|
+
}
|
12016
|
+
}
|
12017
|
+
}
|
12018
|
+
if (media) {
|
12019
|
+
// Remove gap fragments
|
12020
|
+
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12021
|
+
this.lastCurrentTime = currentTime;
|
12022
|
+
}
|
12023
|
+
|
12024
|
+
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12025
|
+
if (!this.loadedmetadata && !bufferInfo.len) {
|
12026
|
+
this.nextLoadPosition = this.startPosition = currentTime;
|
12027
|
+
}
|
12028
|
+
|
12029
|
+
// Async tick to speed up processing
|
12030
|
+
this.tickImmediate();
|
12031
|
+
}
|
12032
|
+
onMediaEnded() {
|
12033
|
+
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12034
|
+
this.startPosition = this.lastCurrentTime = 0;
|
12035
|
+
}
|
12427
12036
|
onManifestLoaded(event, data) {
|
12428
12037
|
this.startTimeOffset = data.startTimeOffset;
|
12429
12038
|
this.initPTS = [];
|
12430
12039
|
}
|
12431
12040
|
onHandlerDestroying() {
|
12041
|
+
this.hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12432
12042
|
this.stopLoad();
|
12433
12043
|
super.onHandlerDestroying();
|
12434
12044
|
// @ts-ignore
|
12435
|
-
this.hls =
|
12045
|
+
this.hls = null;
|
12436
12046
|
}
|
12437
12047
|
onHandlerDestroyed() {
|
12438
12048
|
this.state = State.STOPPED;
|
@@ -12566,10 +12176,10 @@ class BaseStreamController extends TaskLoop {
|
|
12566
12176
|
const decryptData = frag.decryptdata;
|
12567
12177
|
|
12568
12178
|
// check to see if the payload needs to be decrypted
|
12569
|
-
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv &&
|
12179
|
+
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
|
12570
12180
|
const startTime = self.performance.now();
|
12571
12181
|
// decrypt init segment data
|
12572
|
-
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer
|
12182
|
+
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
|
12573
12183
|
hls.trigger(Events.ERROR, {
|
12574
12184
|
type: ErrorTypes.MEDIA_ERROR,
|
12575
12185
|
details: ErrorDetails.FRAG_DECRYPT_ERROR,
|
@@ -12610,9 +12220,7 @@ class BaseStreamController extends TaskLoop {
|
|
12610
12220
|
throw new Error('init load aborted, missing levels');
|
12611
12221
|
}
|
12612
12222
|
const stats = data.frag.stats;
|
12613
|
-
|
12614
|
-
this.state = State.IDLE;
|
12615
|
-
}
|
12223
|
+
this.state = State.IDLE;
|
12616
12224
|
data.frag.data = new Uint8Array(data.payload);
|
12617
12225
|
stats.parsing.start = stats.buffering.start = self.performance.now();
|
12618
12226
|
stats.parsing.end = stats.buffering.end = self.performance.now();
|
@@ -12683,7 +12291,7 @@ class BaseStreamController extends TaskLoop {
|
|
12683
12291
|
}
|
12684
12292
|
let keyLoadingPromise = null;
|
12685
12293
|
if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
|
12686
|
-
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.
|
12294
|
+
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`);
|
12687
12295
|
this.state = State.KEY_LOADING;
|
12688
12296
|
this.fragCurrent = frag;
|
12689
12297
|
keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
|
@@ -12704,16 +12312,8 @@ class BaseStreamController extends TaskLoop {
|
|
12704
12312
|
} else if (!frag.encrypted && details.encryptedFragments.length) {
|
12705
12313
|
this.keyLoader.loadClear(frag, details.encryptedFragments);
|
12706
12314
|
}
|
12707
|
-
const fragPrevious = this.fragPrevious;
|
12708
|
-
if (frag.sn !== 'initSegment' && (!fragPrevious || frag.sn !== fragPrevious.sn)) {
|
12709
|
-
const shouldLoadParts = this.shouldLoadParts(level.details, frag.end);
|
12710
|
-
if (shouldLoadParts !== this.loadingParts) {
|
12711
|
-
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} loading sn ${fragPrevious == null ? void 0 : fragPrevious.sn}->${frag.sn}`);
|
12712
|
-
this.loadingParts = shouldLoadParts;
|
12713
|
-
}
|
12714
|
-
}
|
12715
12315
|
targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
|
12716
|
-
if (this.
|
12316
|
+
if (this.config.lowLatencyMode && frag.sn !== 'initSegment') {
|
12717
12317
|
const partList = details.partList;
|
12718
12318
|
if (partList && progressCallback) {
|
12719
12319
|
if (targetBufferTime > frag.end && details.fragmentHint) {
|
@@ -12722,7 +12322,7 @@ class BaseStreamController extends TaskLoop {
|
|
12722
12322
|
const partIndex = this.getNextPart(partList, frag, targetBufferTime);
|
12723
12323
|
if (partIndex > -1) {
|
12724
12324
|
const part = partList[partIndex];
|
12725
|
-
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.
|
12325
|
+
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12726
12326
|
this.nextLoadPosition = part.start + part.duration;
|
12727
12327
|
this.state = State.FRAG_LOADING;
|
12728
12328
|
let _result;
|
@@ -12751,14 +12351,7 @@ class BaseStreamController extends TaskLoop {
|
|
12751
12351
|
}
|
12752
12352
|
}
|
12753
12353
|
}
|
12754
|
-
|
12755
|
-
this.log(`LL-Part loading OFF after next part miss @${targetBufferTime.toFixed(2)}`);
|
12756
|
-
this.loadingParts = false;
|
12757
|
-
} else if (!frag.url) {
|
12758
|
-
// Selected fragment hint for part but not loading parts
|
12759
|
-
return Promise.resolve(null);
|
12760
|
-
}
|
12761
|
-
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12354
|
+
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12762
12355
|
// Don't update nextLoadPosition for fragments which are not buffered
|
12763
12356
|
if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
|
12764
12357
|
this.nextLoadPosition = frag.start + frag.duration;
|
@@ -12856,36 +12449,8 @@ class BaseStreamController extends TaskLoop {
|
|
12856
12449
|
if (part) {
|
12857
12450
|
part.stats.parsing.end = now;
|
12858
12451
|
}
|
12859
|
-
// See if part loading should be disabled/enabled based on buffer and playback position.
|
12860
|
-
if (frag.sn !== 'initSegment') {
|
12861
|
-
const levelDetails = this.getLevelDetails();
|
12862
|
-
const loadingPartsAtEdge = levelDetails && frag.sn > levelDetails.endSN;
|
12863
|
-
const shouldLoadParts = loadingPartsAtEdge || this.shouldLoadParts(levelDetails, frag.end);
|
12864
|
-
if (shouldLoadParts !== this.loadingParts) {
|
12865
|
-
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} after parsing segment ending @${frag.end.toFixed(2)}`);
|
12866
|
-
this.loadingParts = shouldLoadParts;
|
12867
|
-
}
|
12868
|
-
}
|
12869
12452
|
this.updateLevelTiming(frag, part, level, chunkMeta.partial);
|
12870
12453
|
}
|
12871
|
-
shouldLoadParts(details, bufferEnd) {
|
12872
|
-
if (this.config.lowLatencyMode) {
|
12873
|
-
if (!details) {
|
12874
|
-
return this.loadingParts;
|
12875
|
-
}
|
12876
|
-
if (details != null && details.partList) {
|
12877
|
-
var _details$fragmentHint;
|
12878
|
-
// Buffer must be ahead of first part + duration of parts after last segment
|
12879
|
-
// and playback must be at or past segment adjacent to part list
|
12880
|
-
const firstPart = details.partList[0];
|
12881
|
-
const safePartStart = firstPart.end + (((_details$fragmentHint = details.fragmentHint) == null ? void 0 : _details$fragmentHint.duration) || 0);
|
12882
|
-
if (bufferEnd >= safePartStart && this.lastCurrentTime > firstPart.start - firstPart.fragment.duration) {
|
12883
|
-
return true;
|
12884
|
-
}
|
12885
|
-
}
|
12886
|
-
}
|
12887
|
-
return false;
|
12888
|
-
}
|
12889
12454
|
getCurrentContext(chunkMeta) {
|
12890
12455
|
const {
|
12891
12456
|
levels,
|
@@ -12986,7 +12551,7 @@ class BaseStreamController extends TaskLoop {
|
|
12986
12551
|
// Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
|
12987
12552
|
if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
|
12988
12553
|
const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
|
12989
|
-
if (bufferedFragAtPos &&
|
12554
|
+
if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
|
12990
12555
|
return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
|
12991
12556
|
}
|
12992
12557
|
}
|
@@ -13035,8 +12600,7 @@ class BaseStreamController extends TaskLoop {
|
|
13035
12600
|
config
|
13036
12601
|
} = this;
|
13037
12602
|
const start = fragments[0].start;
|
13038
|
-
|
13039
|
-
let frag = null;
|
12603
|
+
let frag;
|
13040
12604
|
if (levelDetails.live) {
|
13041
12605
|
const initialLiveManifestSize = config.initialLiveManifestSize;
|
13042
12606
|
if (fragLen < initialLiveManifestSize) {
|
@@ -13048,10 +12612,6 @@ class BaseStreamController extends TaskLoop {
|
|
13048
12612
|
// Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
|
13049
12613
|
// we get the fragment matching that start time
|
13050
12614
|
if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1 || pos < start) {
|
13051
|
-
if (canLoadParts && !this.loadingParts) {
|
13052
|
-
this.log(`LL-Part loading ON for initial live fragment`);
|
13053
|
-
this.loadingParts = true;
|
13054
|
-
}
|
13055
12615
|
frag = this.getInitialLiveFragment(levelDetails, fragments);
|
13056
12616
|
this.startPosition = this.nextLoadPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
|
13057
12617
|
}
|
@@ -13062,7 +12622,7 @@ class BaseStreamController extends TaskLoop {
|
|
13062
12622
|
|
13063
12623
|
// If we haven't run into any special cases already, just load the fragment most closely matching the requested position
|
13064
12624
|
if (!frag) {
|
13065
|
-
const end =
|
12625
|
+
const end = config.lowLatencyMode ? levelDetails.partEnd : levelDetails.fragmentEnd;
|
13066
12626
|
frag = this.getFragmentAtPosition(pos, end, levelDetails);
|
13067
12627
|
}
|
13068
12628
|
return this.mapToInitFragWhenRequired(frag);
|
@@ -13184,7 +12744,7 @@ class BaseStreamController extends TaskLoop {
|
|
13184
12744
|
} = levelDetails;
|
13185
12745
|
const tolerance = config.maxFragLookUpTolerance;
|
13186
12746
|
const partList = levelDetails.partList;
|
13187
|
-
const loadingParts = !!(
|
12747
|
+
const loadingParts = !!(config.lowLatencyMode && partList != null && partList.length && fragmentHint);
|
13188
12748
|
if (loadingParts && fragmentHint && !this.bitrateTest) {
|
13189
12749
|
// Include incomplete fragment with parts at end
|
13190
12750
|
fragments = fragments.concat(fragmentHint);
|
@@ -13377,7 +12937,7 @@ class BaseStreamController extends TaskLoop {
|
|
13377
12937
|
errorAction.resolved = true;
|
13378
12938
|
}
|
13379
12939
|
} else {
|
13380
|
-
|
12940
|
+
logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
|
13381
12941
|
return;
|
13382
12942
|
}
|
13383
12943
|
} else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
|
@@ -13445,9 +13005,7 @@ class BaseStreamController extends TaskLoop {
|
|
13445
13005
|
this.log('Reset loading state');
|
13446
13006
|
this.fragCurrent = null;
|
13447
13007
|
this.fragPrevious = null;
|
13448
|
-
|
13449
|
-
this.state = State.IDLE;
|
13450
|
-
}
|
13008
|
+
this.state = State.IDLE;
|
13451
13009
|
}
|
13452
13010
|
resetStartWhenNotLoaded(level) {
|
13453
13011
|
// if loadedmetadata is not set, it means that first frag request failed
|
@@ -13601,127 +13159,29 @@ function injectWorker() {
|
|
13601
13159
|
const objectURL = self.URL.createObjectURL(blob);
|
13602
13160
|
const worker = new self.Worker(objectURL);
|
13603
13161
|
return {
|
13604
|
-
worker,
|
13605
|
-
objectURL
|
13606
|
-
};
|
13607
|
-
}
|
13608
|
-
function loadWorker(path) {
|
13609
|
-
const scriptURL = new self.URL(path, self.location.href).href;
|
13610
|
-
const worker = new self.Worker(scriptURL);
|
13611
|
-
return {
|
13612
|
-
worker,
|
13613
|
-
scriptURL
|
13614
|
-
};
|
13615
|
-
}
|
13616
|
-
|
13617
|
-
function dummyTrack(type = '', inputTimeScale = 90000) {
|
13618
|
-
return {
|
13619
|
-
type,
|
13620
|
-
id: -1,
|
13621
|
-
pid: -1,
|
13622
|
-
inputTimeScale,
|
13623
|
-
sequenceNumber: -1,
|
13624
|
-
samples: [],
|
13625
|
-
dropped: 0
|
13626
|
-
};
|
13627
|
-
}
|
13628
|
-
|
13629
|
-
/**
|
13630
|
-
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
13631
|
-
*
|
13632
|
-
* @param data - The data to search in
|
13633
|
-
* @param offset - The offset at which to start searching
|
13634
|
-
*
|
13635
|
-
* @returns The block of data containing any ID3 tags found
|
13636
|
-
* or `undefined` if no header is found at the starting offset
|
13637
|
-
*
|
13638
|
-
* @internal
|
13639
|
-
*
|
13640
|
-
* @group ID3
|
13641
|
-
*/
|
13642
|
-
function getId3Data(data, offset) {
|
13643
|
-
const front = offset;
|
13644
|
-
let length = 0;
|
13645
|
-
while (isId3Header(data, offset)) {
|
13646
|
-
// ID3 header is 10 bytes
|
13647
|
-
length += 10;
|
13648
|
-
const size = readId3Size(data, offset + 6);
|
13649
|
-
length += size;
|
13650
|
-
if (isId3Footer(data, offset + 10)) {
|
13651
|
-
// ID3 footer is 10 bytes
|
13652
|
-
length += 10;
|
13653
|
-
}
|
13654
|
-
offset += length;
|
13655
|
-
}
|
13656
|
-
if (length > 0) {
|
13657
|
-
return data.subarray(front, front + length);
|
13658
|
-
}
|
13659
|
-
return undefined;
|
13660
|
-
}
|
13661
|
-
|
13662
|
-
/**
|
13663
|
-
* Read a 33 bit timestamp from an ID3 frame.
|
13664
|
-
*
|
13665
|
-
* @param timeStampFrame - the ID3 frame
|
13666
|
-
*
|
13667
|
-
* @returns The timestamp
|
13668
|
-
*
|
13669
|
-
* @internal
|
13670
|
-
*
|
13671
|
-
* @group ID3
|
13672
|
-
*/
|
13673
|
-
function readId3Timestamp(timeStampFrame) {
|
13674
|
-
if (timeStampFrame.data.byteLength === 8) {
|
13675
|
-
const data = new Uint8Array(timeStampFrame.data);
|
13676
|
-
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
13677
|
-
// with the upper 31 bits set to zero.
|
13678
|
-
const pts33Bit = data[3] & 0x1;
|
13679
|
-
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
13680
|
-
timestamp /= 45;
|
13681
|
-
if (pts33Bit) {
|
13682
|
-
timestamp += 47721858.84;
|
13683
|
-
} // 2^32 / 90
|
13684
|
-
return Math.round(timestamp);
|
13685
|
-
}
|
13686
|
-
return undefined;
|
13687
|
-
}
|
13688
|
-
|
13689
|
-
/**
|
13690
|
-
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
13691
|
-
*
|
13692
|
-
* @param data - Block of data containing one or more ID3 tags
|
13693
|
-
*
|
13694
|
-
* @returns The timestamp
|
13695
|
-
*
|
13696
|
-
* @group ID3
|
13697
|
-
*
|
13698
|
-
* @beta
|
13699
|
-
*/
|
13700
|
-
function getId3Timestamp(data) {
|
13701
|
-
const frames = getId3Frames(data);
|
13702
|
-
for (let i = 0; i < frames.length; i++) {
|
13703
|
-
const frame = frames[i];
|
13704
|
-
if (isId3TimestampFrame(frame)) {
|
13705
|
-
return readId3Timestamp(frame);
|
13706
|
-
}
|
13707
|
-
}
|
13708
|
-
return undefined;
|
13162
|
+
worker,
|
13163
|
+
objectURL
|
13164
|
+
};
|
13165
|
+
}
|
13166
|
+
function loadWorker(path) {
|
13167
|
+
const scriptURL = new self.URL(path, self.location.href).href;
|
13168
|
+
const worker = new self.Worker(scriptURL);
|
13169
|
+
return {
|
13170
|
+
worker,
|
13171
|
+
scriptURL
|
13172
|
+
};
|
13709
13173
|
}
|
13710
13174
|
|
13711
|
-
|
13712
|
-
|
13713
|
-
|
13714
|
-
|
13715
|
-
|
13716
|
-
|
13717
|
-
|
13718
|
-
|
13719
|
-
|
13720
|
-
|
13721
|
-
* @beta
|
13722
|
-
*/
|
13723
|
-
function canParseId3(data, offset) {
|
13724
|
-
return isId3Header(data, offset) && readId3Size(data, offset + 6) + 10 <= data.length - offset;
|
13175
|
+
function dummyTrack(type = '', inputTimeScale = 90000) {
|
13176
|
+
return {
|
13177
|
+
type,
|
13178
|
+
id: -1,
|
13179
|
+
pid: -1,
|
13180
|
+
inputTimeScale,
|
13181
|
+
sequenceNumber: -1,
|
13182
|
+
samples: [],
|
13183
|
+
dropped: 0
|
13184
|
+
};
|
13725
13185
|
}
|
13726
13186
|
|
13727
13187
|
class BaseAudioDemuxer {
|
@@ -13765,12 +13225,12 @@ class BaseAudioDemuxer {
|
|
13765
13225
|
data = appendUint8Array(this.cachedData, data);
|
13766
13226
|
this.cachedData = null;
|
13767
13227
|
}
|
13768
|
-
let id3Data =
|
13228
|
+
let id3Data = getID3Data(data, 0);
|
13769
13229
|
let offset = id3Data ? id3Data.length : 0;
|
13770
13230
|
let lastDataIndex;
|
13771
13231
|
const track = this._audioTrack;
|
13772
13232
|
const id3Track = this._id3Track;
|
13773
|
-
const timestamp = id3Data ?
|
13233
|
+
const timestamp = id3Data ? getTimeStamp(id3Data) : undefined;
|
13774
13234
|
const length = data.length;
|
13775
13235
|
if (this.basePTS === null || this.frameIndex === 0 && isFiniteNumber(timestamp)) {
|
13776
13236
|
this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS);
|
@@ -13801,9 +13261,9 @@ class BaseAudioDemuxer {
|
|
13801
13261
|
} else {
|
13802
13262
|
offset = length;
|
13803
13263
|
}
|
13804
|
-
} else if (
|
13805
|
-
// after a canParse, a call to
|
13806
|
-
id3Data =
|
13264
|
+
} else if (canParse$2(data, offset)) {
|
13265
|
+
// after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
|
13266
|
+
id3Data = getID3Data(data, offset);
|
13807
13267
|
id3Track.samples.push({
|
13808
13268
|
pts: this.lastPTS,
|
13809
13269
|
dts: this.lastPTS,
|
@@ -13872,7 +13332,6 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
|
|
13872
13332
|
*/
|
13873
13333
|
function getAudioConfig(observer, data, offset, audioCodec) {
|
13874
13334
|
let adtsObjectType;
|
13875
|
-
let originalAdtsObjectType;
|
13876
13335
|
let adtsExtensionSamplingIndex;
|
13877
13336
|
let adtsChannelConfig;
|
13878
13337
|
let config;
|
@@ -13880,7 +13339,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13880
13339
|
const manifestCodec = audioCodec;
|
13881
13340
|
const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
|
13882
13341
|
// byte 2
|
13883
|
-
adtsObjectType =
|
13342
|
+
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13884
13343
|
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
|
13885
13344
|
if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
|
13886
13345
|
const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
|
@@ -13897,8 +13356,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13897
13356
|
// byte 3
|
13898
13357
|
adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
|
13899
13358
|
logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
|
13900
|
-
//
|
13901
|
-
if (/firefox
|
13359
|
+
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
|
13360
|
+
if (/firefox/i.test(userAgent)) {
|
13902
13361
|
if (adtsSamplingIndex >= 6) {
|
13903
13362
|
adtsObjectType = 5;
|
13904
13363
|
config = new Array(4);
|
@@ -13992,7 +13451,6 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13992
13451
|
samplerate: adtsSamplingRates[adtsSamplingIndex],
|
13993
13452
|
channelCount: adtsChannelConfig,
|
13994
13453
|
codec: 'mp4a.40.' + adtsObjectType,
|
13995
|
-
parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
|
13996
13454
|
manifestCodec
|
13997
13455
|
};
|
13998
13456
|
}
|
@@ -14047,8 +13505,7 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
|
|
14047
13505
|
track.channelCount = config.channelCount;
|
14048
13506
|
track.codec = config.codec;
|
14049
13507
|
track.manifestCodec = config.manifestCodec;
|
14050
|
-
track.
|
14051
|
-
logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
13508
|
+
logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
14052
13509
|
}
|
14053
13510
|
}
|
14054
13511
|
function getFrameDuration(samplerate) {
|
@@ -14297,7 +13754,7 @@ class AACDemuxer extends BaseAudioDemuxer {
|
|
14297
13754
|
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
14298
13755
|
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
14299
13756
|
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
14300
|
-
const id3Data =
|
13757
|
+
const id3Data = getID3Data(data, 0);
|
14301
13758
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
14302
13759
|
if (probe(data, offset)) {
|
14303
13760
|
return false;
|
@@ -14490,6 +13947,20 @@ class BaseVideoParser {
|
|
14490
13947
|
length: 0
|
14491
13948
|
};
|
14492
13949
|
}
|
13950
|
+
getLastNalUnit(samples) {
|
13951
|
+
var _VideoSample;
|
13952
|
+
let VideoSample = this.VideoSample;
|
13953
|
+
let lastUnit;
|
13954
|
+
// try to fallback to previous sample if current one is empty
|
13955
|
+
if (!VideoSample || VideoSample.units.length === 0) {
|
13956
|
+
VideoSample = samples[samples.length - 1];
|
13957
|
+
}
|
13958
|
+
if ((_VideoSample = VideoSample) != null && _VideoSample.units) {
|
13959
|
+
const units = VideoSample.units;
|
13960
|
+
lastUnit = units[units.length - 1];
|
13961
|
+
}
|
13962
|
+
return lastUnit;
|
13963
|
+
}
|
14493
13964
|
pushAccessUnit(VideoSample, videoTrack) {
|
14494
13965
|
if (VideoSample.units.length && VideoSample.frame) {
|
14495
13966
|
// if sample does not have PTS/DTS, patch with last sample PTS/DTS
|
@@ -14512,122 +13983,6 @@ class BaseVideoParser {
|
|
14512
13983
|
logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
|
14513
13984
|
}
|
14514
13985
|
}
|
14515
|
-
parseNALu(track, array, last) {
|
14516
|
-
const len = array.byteLength;
|
14517
|
-
let state = track.naluState || 0;
|
14518
|
-
const lastState = state;
|
14519
|
-
const units = [];
|
14520
|
-
let i = 0;
|
14521
|
-
let value;
|
14522
|
-
let overflow;
|
14523
|
-
let unitType;
|
14524
|
-
let lastUnitStart = -1;
|
14525
|
-
let lastUnitType = 0;
|
14526
|
-
// logger.log('PES:' + Hex.hexDump(array));
|
14527
|
-
|
14528
|
-
if (state === -1) {
|
14529
|
-
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14530
|
-
lastUnitStart = 0;
|
14531
|
-
// NALu type is value read from offset 0
|
14532
|
-
lastUnitType = this.getNALuType(array, 0);
|
14533
|
-
state = 0;
|
14534
|
-
i = 1;
|
14535
|
-
}
|
14536
|
-
while (i < len) {
|
14537
|
-
value = array[i++];
|
14538
|
-
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14539
|
-
if (!state) {
|
14540
|
-
state = value ? 0 : 1;
|
14541
|
-
continue;
|
14542
|
-
}
|
14543
|
-
if (state === 1) {
|
14544
|
-
state = value ? 0 : 2;
|
14545
|
-
continue;
|
14546
|
-
}
|
14547
|
-
// here we have state either equal to 2 or 3
|
14548
|
-
if (!value) {
|
14549
|
-
state = 3;
|
14550
|
-
} else if (value === 1) {
|
14551
|
-
overflow = i - state - 1;
|
14552
|
-
if (lastUnitStart >= 0) {
|
14553
|
-
const unit = {
|
14554
|
-
data: array.subarray(lastUnitStart, overflow),
|
14555
|
-
type: lastUnitType
|
14556
|
-
};
|
14557
|
-
if (track.lastNalu) {
|
14558
|
-
units.push(track.lastNalu);
|
14559
|
-
track.lastNalu = null;
|
14560
|
-
}
|
14561
|
-
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14562
|
-
units.push(unit);
|
14563
|
-
} else {
|
14564
|
-
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14565
|
-
// first check if start code delimiter is overlapping between 2 PES packets,
|
14566
|
-
// ie it started in last packet (lastState not zero)
|
14567
|
-
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14568
|
-
const lastUnit = track.lastNalu;
|
14569
|
-
if (lastUnit) {
|
14570
|
-
if (lastState && i <= 4 - lastState) {
|
14571
|
-
// start delimiter overlapping between PES packets
|
14572
|
-
// strip start delimiter bytes from the end of last NAL unit
|
14573
|
-
// check if lastUnit had a state different from zero
|
14574
|
-
if (lastUnit.state) {
|
14575
|
-
// strip last bytes
|
14576
|
-
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14577
|
-
}
|
14578
|
-
}
|
14579
|
-
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14580
|
-
|
14581
|
-
if (overflow > 0) {
|
14582
|
-
// logger.log('first NALU found with overflow:' + overflow);
|
14583
|
-
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14584
|
-
lastUnit.state = 0;
|
14585
|
-
units.push(lastUnit);
|
14586
|
-
track.lastNalu = null;
|
14587
|
-
}
|
14588
|
-
}
|
14589
|
-
}
|
14590
|
-
// check if we can read unit type
|
14591
|
-
if (i < len) {
|
14592
|
-
unitType = this.getNALuType(array, i);
|
14593
|
-
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14594
|
-
lastUnitStart = i;
|
14595
|
-
lastUnitType = unitType;
|
14596
|
-
state = 0;
|
14597
|
-
} else {
|
14598
|
-
// not enough byte to read unit type. let's read it on next PES parsing
|
14599
|
-
state = -1;
|
14600
|
-
}
|
14601
|
-
} else {
|
14602
|
-
state = 0;
|
14603
|
-
}
|
14604
|
-
}
|
14605
|
-
if (lastUnitStart >= 0 && state >= 0) {
|
14606
|
-
const unit = {
|
14607
|
-
data: array.subarray(lastUnitStart, len),
|
14608
|
-
type: lastUnitType,
|
14609
|
-
state: state
|
14610
|
-
};
|
14611
|
-
if (!last) {
|
14612
|
-
track.lastNalu = unit;
|
14613
|
-
// logger.log('store NALu to push it on next PES');
|
14614
|
-
} else {
|
14615
|
-
units.push(unit);
|
14616
|
-
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
14617
|
-
}
|
14618
|
-
} else if (units.length === 0) {
|
14619
|
-
// no NALu found
|
14620
|
-
// append pes.data to previous NAL unit
|
14621
|
-
const lastUnit = track.lastNalu;
|
14622
|
-
if (lastUnit) {
|
14623
|
-
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
14624
|
-
units.push(lastUnit);
|
14625
|
-
track.lastNalu = null;
|
14626
|
-
}
|
14627
|
-
}
|
14628
|
-
track.naluState = state;
|
14629
|
-
return units;
|
14630
|
-
}
|
14631
13986
|
}
|
14632
13987
|
|
14633
13988
|
/**
|
@@ -14718,63 +14073,246 @@ class ExpGolomb {
|
|
14718
14073
|
return leadingZeroCount;
|
14719
14074
|
}
|
14720
14075
|
}
|
14721
|
-
// we exhausted word and still have not found a 1
|
14722
|
-
this.loadWord();
|
14723
|
-
return leadingZeroCount + this.skipLZ();
|
14724
|
-
}
|
14725
|
-
|
14726
|
-
// ():void
|
14727
|
-
skipUEG() {
|
14728
|
-
this.skipBits(1 + this.skipLZ());
|
14729
|
-
}
|
14730
|
-
|
14731
|
-
// ():void
|
14732
|
-
skipEG() {
|
14733
|
-
this.skipBits(1 + this.skipLZ());
|
14734
|
-
}
|
14735
|
-
|
14736
|
-
// ():uint
|
14737
|
-
readUEG() {
|
14738
|
-
const clz = this.skipLZ(); // :uint
|
14739
|
-
return this.readBits(clz + 1) - 1;
|
14740
|
-
}
|
14076
|
+
// we exhausted word and still have not found a 1
|
14077
|
+
this.loadWord();
|
14078
|
+
return leadingZeroCount + this.skipLZ();
|
14079
|
+
}
|
14080
|
+
|
14081
|
+
// ():void
|
14082
|
+
skipUEG() {
|
14083
|
+
this.skipBits(1 + this.skipLZ());
|
14084
|
+
}
|
14085
|
+
|
14086
|
+
// ():void
|
14087
|
+
skipEG() {
|
14088
|
+
this.skipBits(1 + this.skipLZ());
|
14089
|
+
}
|
14090
|
+
|
14091
|
+
// ():uint
|
14092
|
+
readUEG() {
|
14093
|
+
const clz = this.skipLZ(); // :uint
|
14094
|
+
return this.readBits(clz + 1) - 1;
|
14095
|
+
}
|
14096
|
+
|
14097
|
+
// ():int
|
14098
|
+
readEG() {
|
14099
|
+
const valu = this.readUEG(); // :int
|
14100
|
+
if (0x01 & valu) {
|
14101
|
+
// the number is odd if the low order bit is set
|
14102
|
+
return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
|
14103
|
+
} else {
|
14104
|
+
return -1 * (valu >>> 1); // divide by two then make it negative
|
14105
|
+
}
|
14106
|
+
}
|
14107
|
+
|
14108
|
+
// Some convenience functions
|
14109
|
+
// :Boolean
|
14110
|
+
readBoolean() {
|
14111
|
+
return this.readBits(1) === 1;
|
14112
|
+
}
|
14113
|
+
|
14114
|
+
// ():int
|
14115
|
+
readUByte() {
|
14116
|
+
return this.readBits(8);
|
14117
|
+
}
|
14118
|
+
|
14119
|
+
// ():int
|
14120
|
+
readUShort() {
|
14121
|
+
return this.readBits(16);
|
14122
|
+
}
|
14123
|
+
|
14124
|
+
// ():int
|
14125
|
+
readUInt() {
|
14126
|
+
return this.readBits(32);
|
14127
|
+
}
|
14128
|
+
|
14129
|
+
/**
|
14130
|
+
* Advance the ExpGolomb decoder past a scaling list. The scaling
|
14131
|
+
* list is optionally transmitted as part of a sequence parameter
|
14132
|
+
* set and is not relevant to transmuxing.
|
14133
|
+
* @param count the number of entries in this scaling list
|
14134
|
+
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
14135
|
+
*/
|
14136
|
+
skipScalingList(count) {
|
14137
|
+
let lastScale = 8;
|
14138
|
+
let nextScale = 8;
|
14139
|
+
let deltaScale;
|
14140
|
+
for (let j = 0; j < count; j++) {
|
14141
|
+
if (nextScale !== 0) {
|
14142
|
+
deltaScale = this.readEG();
|
14143
|
+
nextScale = (lastScale + deltaScale + 256) % 256;
|
14144
|
+
}
|
14145
|
+
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14146
|
+
}
|
14147
|
+
}
|
14148
|
+
|
14149
|
+
/**
|
14150
|
+
* Read a sequence parameter set and return some interesting video
|
14151
|
+
* properties. A sequence parameter set is the H264 metadata that
|
14152
|
+
* describes the properties of upcoming video frames.
|
14153
|
+
* @returns an object with configuration parsed from the
|
14154
|
+
* sequence parameter set, including the dimensions of the
|
14155
|
+
* associated video frames.
|
14156
|
+
*/
|
14157
|
+
readSPS() {
|
14158
|
+
let frameCropLeftOffset = 0;
|
14159
|
+
let frameCropRightOffset = 0;
|
14160
|
+
let frameCropTopOffset = 0;
|
14161
|
+
let frameCropBottomOffset = 0;
|
14162
|
+
let numRefFramesInPicOrderCntCycle;
|
14163
|
+
let scalingListCount;
|
14164
|
+
let i;
|
14165
|
+
const readUByte = this.readUByte.bind(this);
|
14166
|
+
const readBits = this.readBits.bind(this);
|
14167
|
+
const readUEG = this.readUEG.bind(this);
|
14168
|
+
const readBoolean = this.readBoolean.bind(this);
|
14169
|
+
const skipBits = this.skipBits.bind(this);
|
14170
|
+
const skipEG = this.skipEG.bind(this);
|
14171
|
+
const skipUEG = this.skipUEG.bind(this);
|
14172
|
+
const skipScalingList = this.skipScalingList.bind(this);
|
14173
|
+
readUByte();
|
14174
|
+
const profileIdc = readUByte(); // profile_idc
|
14175
|
+
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
|
14176
|
+
skipBits(3); // reserved_zero_3bits u(3),
|
14177
|
+
readUByte(); // level_idc u(8)
|
14178
|
+
skipUEG(); // seq_parameter_set_id
|
14179
|
+
// some profiles have more optional data we don't need
|
14180
|
+
if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
|
14181
|
+
const chromaFormatIdc = readUEG();
|
14182
|
+
if (chromaFormatIdc === 3) {
|
14183
|
+
skipBits(1);
|
14184
|
+
} // separate_colour_plane_flag
|
14185
|
+
|
14186
|
+
skipUEG(); // bit_depth_luma_minus8
|
14187
|
+
skipUEG(); // bit_depth_chroma_minus8
|
14188
|
+
skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
14189
|
+
if (readBoolean()) {
|
14190
|
+
// seq_scaling_matrix_present_flag
|
14191
|
+
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14192
|
+
for (i = 0; i < scalingListCount; i++) {
|
14193
|
+
if (readBoolean()) {
|
14194
|
+
// seq_scaling_list_present_flag[ i ]
|
14195
|
+
if (i < 6) {
|
14196
|
+
skipScalingList(16);
|
14197
|
+
} else {
|
14198
|
+
skipScalingList(64);
|
14199
|
+
}
|
14200
|
+
}
|
14201
|
+
}
|
14202
|
+
}
|
14203
|
+
}
|
14204
|
+
skipUEG(); // log2_max_frame_num_minus4
|
14205
|
+
const picOrderCntType = readUEG();
|
14206
|
+
if (picOrderCntType === 0) {
|
14207
|
+
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
|
14208
|
+
} else if (picOrderCntType === 1) {
|
14209
|
+
skipBits(1); // delta_pic_order_always_zero_flag
|
14210
|
+
skipEG(); // offset_for_non_ref_pic
|
14211
|
+
skipEG(); // offset_for_top_to_bottom_field
|
14212
|
+
numRefFramesInPicOrderCntCycle = readUEG();
|
14213
|
+
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14214
|
+
skipEG();
|
14215
|
+
} // offset_for_ref_frame[ i ]
|
14216
|
+
}
|
14217
|
+
skipUEG(); // max_num_ref_frames
|
14218
|
+
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
14219
|
+
const picWidthInMbsMinus1 = readUEG();
|
14220
|
+
const picHeightInMapUnitsMinus1 = readUEG();
|
14221
|
+
const frameMbsOnlyFlag = readBits(1);
|
14222
|
+
if (frameMbsOnlyFlag === 0) {
|
14223
|
+
skipBits(1);
|
14224
|
+
} // mb_adaptive_frame_field_flag
|
14741
14225
|
|
14742
|
-
|
14743
|
-
|
14744
|
-
|
14745
|
-
|
14746
|
-
|
14747
|
-
|
14748
|
-
|
14749
|
-
return -1 * (valu >>> 1); // divide by two then make it negative
|
14226
|
+
skipBits(1); // direct_8x8_inference_flag
|
14227
|
+
if (readBoolean()) {
|
14228
|
+
// frame_cropping_flag
|
14229
|
+
frameCropLeftOffset = readUEG();
|
14230
|
+
frameCropRightOffset = readUEG();
|
14231
|
+
frameCropTopOffset = readUEG();
|
14232
|
+
frameCropBottomOffset = readUEG();
|
14750
14233
|
}
|
14234
|
+
let pixelRatio = [1, 1];
|
14235
|
+
if (readBoolean()) {
|
14236
|
+
// vui_parameters_present_flag
|
14237
|
+
if (readBoolean()) {
|
14238
|
+
// aspect_ratio_info_present_flag
|
14239
|
+
const aspectRatioIdc = readUByte();
|
14240
|
+
switch (aspectRatioIdc) {
|
14241
|
+
case 1:
|
14242
|
+
pixelRatio = [1, 1];
|
14243
|
+
break;
|
14244
|
+
case 2:
|
14245
|
+
pixelRatio = [12, 11];
|
14246
|
+
break;
|
14247
|
+
case 3:
|
14248
|
+
pixelRatio = [10, 11];
|
14249
|
+
break;
|
14250
|
+
case 4:
|
14251
|
+
pixelRatio = [16, 11];
|
14252
|
+
break;
|
14253
|
+
case 5:
|
14254
|
+
pixelRatio = [40, 33];
|
14255
|
+
break;
|
14256
|
+
case 6:
|
14257
|
+
pixelRatio = [24, 11];
|
14258
|
+
break;
|
14259
|
+
case 7:
|
14260
|
+
pixelRatio = [20, 11];
|
14261
|
+
break;
|
14262
|
+
case 8:
|
14263
|
+
pixelRatio = [32, 11];
|
14264
|
+
break;
|
14265
|
+
case 9:
|
14266
|
+
pixelRatio = [80, 33];
|
14267
|
+
break;
|
14268
|
+
case 10:
|
14269
|
+
pixelRatio = [18, 11];
|
14270
|
+
break;
|
14271
|
+
case 11:
|
14272
|
+
pixelRatio = [15, 11];
|
14273
|
+
break;
|
14274
|
+
case 12:
|
14275
|
+
pixelRatio = [64, 33];
|
14276
|
+
break;
|
14277
|
+
case 13:
|
14278
|
+
pixelRatio = [160, 99];
|
14279
|
+
break;
|
14280
|
+
case 14:
|
14281
|
+
pixelRatio = [4, 3];
|
14282
|
+
break;
|
14283
|
+
case 15:
|
14284
|
+
pixelRatio = [3, 2];
|
14285
|
+
break;
|
14286
|
+
case 16:
|
14287
|
+
pixelRatio = [2, 1];
|
14288
|
+
break;
|
14289
|
+
case 255:
|
14290
|
+
{
|
14291
|
+
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
14292
|
+
break;
|
14293
|
+
}
|
14294
|
+
}
|
14295
|
+
}
|
14296
|
+
}
|
14297
|
+
return {
|
14298
|
+
width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
|
14299
|
+
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
14300
|
+
pixelRatio: pixelRatio
|
14301
|
+
};
|
14751
14302
|
}
|
14752
|
-
|
14753
|
-
|
14754
|
-
|
14755
|
-
|
14756
|
-
|
14757
|
-
|
14758
|
-
|
14759
|
-
// ():int
|
14760
|
-
readUByte() {
|
14761
|
-
return this.readBits(8);
|
14762
|
-
}
|
14763
|
-
|
14764
|
-
// ():int
|
14765
|
-
readUShort() {
|
14766
|
-
return this.readBits(16);
|
14767
|
-
}
|
14768
|
-
|
14769
|
-
// ():int
|
14770
|
-
readUInt() {
|
14771
|
-
return this.readBits(32);
|
14303
|
+
readSliceType() {
|
14304
|
+
// skip NALu type
|
14305
|
+
this.readUByte();
|
14306
|
+
// discard first_mb_in_slice
|
14307
|
+
this.readUEG();
|
14308
|
+
// return slice_type
|
14309
|
+
return this.readUEG();
|
14772
14310
|
}
|
14773
14311
|
}
|
14774
14312
|
|
14775
14313
|
class AvcVideoParser extends BaseVideoParser {
|
14776
|
-
|
14777
|
-
const units = this.
|
14314
|
+
parseAVCPES(track, textTrack, pes, last, duration) {
|
14315
|
+
const units = this.parseAVCNALu(track, pes.data);
|
14778
14316
|
let VideoSample = this.VideoSample;
|
14779
14317
|
let push;
|
14780
14318
|
let spsfound = false;
|
@@ -14799,7 +14337,7 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14799
14337
|
// only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
|
14800
14338
|
if (spsfound && data.length > 4) {
|
14801
14339
|
// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
|
14802
|
-
const sliceType =
|
14340
|
+
const sliceType = new ExpGolomb(data).readSliceType();
|
14803
14341
|
// 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
|
14804
14342
|
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
|
14805
14343
|
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
|
@@ -14853,7 +14391,8 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14853
14391
|
push = true;
|
14854
14392
|
spsfound = true;
|
14855
14393
|
const sps = unit.data;
|
14856
|
-
const
|
14394
|
+
const expGolombDecoder = new ExpGolomb(sps);
|
14395
|
+
const config = expGolombDecoder.readSPS();
|
14857
14396
|
if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
|
14858
14397
|
track.width = config.width;
|
14859
14398
|
track.height = config.height;
|
@@ -14909,192 +14448,109 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14909
14448
|
this.VideoSample = null;
|
14910
14449
|
}
|
14911
14450
|
}
|
14912
|
-
|
14913
|
-
|
14914
|
-
|
14915
|
-
|
14916
|
-
const
|
14917
|
-
|
14918
|
-
|
14919
|
-
|
14920
|
-
|
14921
|
-
|
14922
|
-
|
14923
|
-
|
14451
|
+
parseAVCNALu(track, array) {
|
14452
|
+
const len = array.byteLength;
|
14453
|
+
let state = track.naluState || 0;
|
14454
|
+
const lastState = state;
|
14455
|
+
const units = [];
|
14456
|
+
let i = 0;
|
14457
|
+
let value;
|
14458
|
+
let overflow;
|
14459
|
+
let unitType;
|
14460
|
+
let lastUnitStart = -1;
|
14461
|
+
let lastUnitType = 0;
|
14462
|
+
// logger.log('PES:' + Hex.hexDump(array));
|
14924
14463
|
|
14925
|
-
|
14926
|
-
|
14927
|
-
|
14928
|
-
|
14929
|
-
|
14930
|
-
|
14931
|
-
|
14932
|
-
let lastScale = 8;
|
14933
|
-
let nextScale = 8;
|
14934
|
-
let deltaScale;
|
14935
|
-
for (let j = 0; j < count; j++) {
|
14936
|
-
if (nextScale !== 0) {
|
14937
|
-
deltaScale = reader.readEG();
|
14938
|
-
nextScale = (lastScale + deltaScale + 256) % 256;
|
14939
|
-
}
|
14940
|
-
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14464
|
+
if (state === -1) {
|
14465
|
+
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14466
|
+
lastUnitStart = 0;
|
14467
|
+
// NALu type is value read from offset 0
|
14468
|
+
lastUnitType = array[0] & 0x1f;
|
14469
|
+
state = 0;
|
14470
|
+
i = 1;
|
14941
14471
|
}
|
14942
|
-
|
14943
|
-
|
14944
|
-
|
14945
|
-
|
14946
|
-
|
14947
|
-
|
14948
|
-
|
14949
|
-
|
14950
|
-
|
14951
|
-
|
14952
|
-
|
14953
|
-
|
14954
|
-
|
14955
|
-
|
14956
|
-
|
14957
|
-
|
14958
|
-
|
14959
|
-
|
14960
|
-
|
14961
|
-
|
14962
|
-
|
14963
|
-
|
14964
|
-
|
14965
|
-
|
14966
|
-
|
14967
|
-
|
14968
|
-
|
14969
|
-
|
14970
|
-
|
14971
|
-
|
14972
|
-
|
14973
|
-
|
14974
|
-
|
14975
|
-
|
14976
|
-
|
14977
|
-
|
14978
|
-
|
14979
|
-
|
14980
|
-
|
14472
|
+
while (i < len) {
|
14473
|
+
value = array[i++];
|
14474
|
+
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14475
|
+
if (!state) {
|
14476
|
+
state = value ? 0 : 1;
|
14477
|
+
continue;
|
14478
|
+
}
|
14479
|
+
if (state === 1) {
|
14480
|
+
state = value ? 0 : 2;
|
14481
|
+
continue;
|
14482
|
+
}
|
14483
|
+
// here we have state either equal to 2 or 3
|
14484
|
+
if (!value) {
|
14485
|
+
state = 3;
|
14486
|
+
} else if (value === 1) {
|
14487
|
+
overflow = i - state - 1;
|
14488
|
+
if (lastUnitStart >= 0) {
|
14489
|
+
const unit = {
|
14490
|
+
data: array.subarray(lastUnitStart, overflow),
|
14491
|
+
type: lastUnitType
|
14492
|
+
};
|
14493
|
+
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14494
|
+
units.push(unit);
|
14495
|
+
} else {
|
14496
|
+
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14497
|
+
// first check if start code delimiter is overlapping between 2 PES packets,
|
14498
|
+
// ie it started in last packet (lastState not zero)
|
14499
|
+
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14500
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14501
|
+
if (lastUnit) {
|
14502
|
+
if (lastState && i <= 4 - lastState) {
|
14503
|
+
// start delimiter overlapping between PES packets
|
14504
|
+
// strip start delimiter bytes from the end of last NAL unit
|
14505
|
+
// check if lastUnit had a state different from zero
|
14506
|
+
if (lastUnit.state) {
|
14507
|
+
// strip last bytes
|
14508
|
+
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14509
|
+
}
|
14510
|
+
}
|
14511
|
+
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14981
14512
|
|
14982
|
-
|
14983
|
-
|
14984
|
-
|
14985
|
-
|
14986
|
-
// seq_scaling_matrix_present_flag
|
14987
|
-
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14988
|
-
for (i = 0; i < scalingListCount; i++) {
|
14989
|
-
if (readBoolean()) {
|
14990
|
-
// seq_scaling_list_present_flag[ i ]
|
14991
|
-
if (i < 6) {
|
14992
|
-
skipScalingList(16, eg);
|
14993
|
-
} else {
|
14994
|
-
skipScalingList(64, eg);
|
14513
|
+
if (overflow > 0) {
|
14514
|
+
// logger.log('first NALU found with overflow:' + overflow);
|
14515
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14516
|
+
lastUnit.state = 0;
|
14995
14517
|
}
|
14996
14518
|
}
|
14997
14519
|
}
|
14520
|
+
// check if we can read unit type
|
14521
|
+
if (i < len) {
|
14522
|
+
unitType = array[i] & 0x1f;
|
14523
|
+
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14524
|
+
lastUnitStart = i;
|
14525
|
+
lastUnitType = unitType;
|
14526
|
+
state = 0;
|
14527
|
+
} else {
|
14528
|
+
// not enough byte to read unit type. let's read it on next PES parsing
|
14529
|
+
state = -1;
|
14530
|
+
}
|
14531
|
+
} else {
|
14532
|
+
state = 0;
|
14998
14533
|
}
|
14999
14534
|
}
|
15000
|
-
|
15001
|
-
|
15002
|
-
|
15003
|
-
|
15004
|
-
|
15005
|
-
|
15006
|
-
|
15007
|
-
|
15008
|
-
numRefFramesInPicOrderCntCycle = readUEG();
|
15009
|
-
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
15010
|
-
skipEG();
|
15011
|
-
} // offset_for_ref_frame[ i ]
|
15012
|
-
}
|
15013
|
-
skipUEG(); // max_num_ref_frames
|
15014
|
-
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
15015
|
-
const picWidthInMbsMinus1 = readUEG();
|
15016
|
-
const picHeightInMapUnitsMinus1 = readUEG();
|
15017
|
-
const frameMbsOnlyFlag = readBits(1);
|
15018
|
-
if (frameMbsOnlyFlag === 0) {
|
15019
|
-
skipBits(1);
|
15020
|
-
} // mb_adaptive_frame_field_flag
|
15021
|
-
|
15022
|
-
skipBits(1); // direct_8x8_inference_flag
|
15023
|
-
if (readBoolean()) {
|
15024
|
-
// frame_cropping_flag
|
15025
|
-
frameCropLeftOffset = readUEG();
|
15026
|
-
frameCropRightOffset = readUEG();
|
15027
|
-
frameCropTopOffset = readUEG();
|
15028
|
-
frameCropBottomOffset = readUEG();
|
14535
|
+
if (lastUnitStart >= 0 && state >= 0) {
|
14536
|
+
const unit = {
|
14537
|
+
data: array.subarray(lastUnitStart, len),
|
14538
|
+
type: lastUnitType,
|
14539
|
+
state: state
|
14540
|
+
};
|
14541
|
+
units.push(unit);
|
14542
|
+
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
15029
14543
|
}
|
15030
|
-
|
15031
|
-
if (
|
15032
|
-
//
|
15033
|
-
|
15034
|
-
|
15035
|
-
|
15036
|
-
switch (aspectRatioIdc) {
|
15037
|
-
case 1:
|
15038
|
-
pixelRatio = [1, 1];
|
15039
|
-
break;
|
15040
|
-
case 2:
|
15041
|
-
pixelRatio = [12, 11];
|
15042
|
-
break;
|
15043
|
-
case 3:
|
15044
|
-
pixelRatio = [10, 11];
|
15045
|
-
break;
|
15046
|
-
case 4:
|
15047
|
-
pixelRatio = [16, 11];
|
15048
|
-
break;
|
15049
|
-
case 5:
|
15050
|
-
pixelRatio = [40, 33];
|
15051
|
-
break;
|
15052
|
-
case 6:
|
15053
|
-
pixelRatio = [24, 11];
|
15054
|
-
break;
|
15055
|
-
case 7:
|
15056
|
-
pixelRatio = [20, 11];
|
15057
|
-
break;
|
15058
|
-
case 8:
|
15059
|
-
pixelRatio = [32, 11];
|
15060
|
-
break;
|
15061
|
-
case 9:
|
15062
|
-
pixelRatio = [80, 33];
|
15063
|
-
break;
|
15064
|
-
case 10:
|
15065
|
-
pixelRatio = [18, 11];
|
15066
|
-
break;
|
15067
|
-
case 11:
|
15068
|
-
pixelRatio = [15, 11];
|
15069
|
-
break;
|
15070
|
-
case 12:
|
15071
|
-
pixelRatio = [64, 33];
|
15072
|
-
break;
|
15073
|
-
case 13:
|
15074
|
-
pixelRatio = [160, 99];
|
15075
|
-
break;
|
15076
|
-
case 14:
|
15077
|
-
pixelRatio = [4, 3];
|
15078
|
-
break;
|
15079
|
-
case 15:
|
15080
|
-
pixelRatio = [3, 2];
|
15081
|
-
break;
|
15082
|
-
case 16:
|
15083
|
-
pixelRatio = [2, 1];
|
15084
|
-
break;
|
15085
|
-
case 255:
|
15086
|
-
{
|
15087
|
-
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
15088
|
-
break;
|
15089
|
-
}
|
15090
|
-
}
|
14544
|
+
// no NALu found
|
14545
|
+
if (units.length === 0) {
|
14546
|
+
// append pes.data to previous NAL unit
|
14547
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14548
|
+
if (lastUnit) {
|
14549
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
15091
14550
|
}
|
15092
14551
|
}
|
15093
|
-
|
15094
|
-
|
15095
|
-
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
15096
|
-
pixelRatio: pixelRatio
|
15097
|
-
};
|
14552
|
+
track.naluState = state;
|
14553
|
+
return units;
|
15098
14554
|
}
|
15099
14555
|
}
|
15100
14556
|
|
@@ -15112,7 +14568,7 @@ class SampleAesDecrypter {
|
|
15112
14568
|
});
|
15113
14569
|
}
|
15114
14570
|
decryptBuffer(encryptedData) {
|
15115
|
-
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer
|
14571
|
+
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
|
15116
14572
|
}
|
15117
14573
|
|
15118
14574
|
// AAC - encrypt all full 16 bytes blocks starting from offset 16
|
@@ -15226,7 +14682,7 @@ class TSDemuxer {
|
|
15226
14682
|
this.observer = observer;
|
15227
14683
|
this.config = config;
|
15228
14684
|
this.typeSupported = typeSupported;
|
15229
|
-
this.videoParser =
|
14685
|
+
this.videoParser = new AvcVideoParser();
|
15230
14686
|
}
|
15231
14687
|
static probe(data) {
|
15232
14688
|
const syncOffset = TSDemuxer.syncOffset(data);
|
@@ -15391,16 +14847,7 @@ class TSDemuxer {
|
|
15391
14847
|
case videoPid:
|
15392
14848
|
if (stt) {
|
15393
14849
|
if (videoData && (pes = parsePES(videoData))) {
|
15394
|
-
|
15395
|
-
switch (videoTrack.segmentCodec) {
|
15396
|
-
case 'avc':
|
15397
|
-
this.videoParser = new AvcVideoParser();
|
15398
|
-
break;
|
15399
|
-
}
|
15400
|
-
}
|
15401
|
-
if (this.videoParser !== null) {
|
15402
|
-
this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
|
15403
|
-
}
|
14850
|
+
this.videoParser.parseAVCPES(videoTrack, textTrack, pes, false, this._duration);
|
15404
14851
|
}
|
15405
14852
|
videoData = {
|
15406
14853
|
data: [],
|
@@ -15562,17 +15009,8 @@ class TSDemuxer {
|
|
15562
15009
|
// try to parse last PES packets
|
15563
15010
|
let pes;
|
15564
15011
|
if (videoData && (pes = parsePES(videoData))) {
|
15565
|
-
|
15566
|
-
|
15567
|
-
case 'avc':
|
15568
|
-
this.videoParser = new AvcVideoParser();
|
15569
|
-
break;
|
15570
|
-
}
|
15571
|
-
}
|
15572
|
-
if (this.videoParser !== null) {
|
15573
|
-
this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
|
15574
|
-
videoTrack.pesData = null;
|
15575
|
-
}
|
15012
|
+
this.videoParser.parseAVCPES(videoTrack, textTrack, pes, true, this._duration);
|
15013
|
+
videoTrack.pesData = null;
|
15576
15014
|
} else {
|
15577
15015
|
// either avcData null or PES truncated, keep it for next frag parsing
|
15578
15016
|
videoTrack.pesData = videoData;
|
@@ -15874,10 +15312,7 @@ function parsePMT(data, offset, typeSupported, isSampleAes) {
|
|
15874
15312
|
case 0x87:
|
15875
15313
|
throw new Error('Unsupported EC-3 in M2TS found');
|
15876
15314
|
case 0x24:
|
15877
|
-
|
15878
|
-
{
|
15879
|
-
throw new Error('Unsupported HEVC in M2TS found');
|
15880
|
-
}
|
15315
|
+
throw new Error('Unsupported HEVC in M2TS found');
|
15881
15316
|
}
|
15882
15317
|
// move to the next table entry
|
15883
15318
|
// skip past the elementary stream descriptors, if present
|
@@ -16020,11 +15455,11 @@ class MP3Demuxer extends BaseAudioDemuxer {
|
|
16020
15455
|
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
16021
15456
|
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
16022
15457
|
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
16023
|
-
const id3Data =
|
15458
|
+
const id3Data = getID3Data(data, 0);
|
16024
15459
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
16025
15460
|
|
16026
15461
|
// Check for ac-3|ec-3 sync bytes and return false if present
|
16027
|
-
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 &&
|
15462
|
+
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 && getTimeStamp(id3Data) !== undefined &&
|
16028
15463
|
// check the bsid to confirm ac-3 or ec-3 (not mp3)
|
16029
15464
|
getAudioBSID(data, offset) <= 16) {
|
16030
15465
|
return false;
|
@@ -16099,8 +15534,6 @@ class MP4 {
|
|
16099
15534
|
avc1: [],
|
16100
15535
|
// codingname
|
16101
15536
|
avcC: [],
|
16102
|
-
hvc1: [],
|
16103
|
-
hvcC: [],
|
16104
15537
|
btrt: [],
|
16105
15538
|
dinf: [],
|
16106
15539
|
dref: [],
|
@@ -16525,10 +15958,8 @@ class MP4 {
|
|
16525
15958
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
|
16526
15959
|
}
|
16527
15960
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
|
16528
|
-
} else if (track.segmentCodec === 'avc') {
|
16529
|
-
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16530
15961
|
} else {
|
16531
|
-
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.
|
15962
|
+
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16532
15963
|
}
|
16533
15964
|
}
|
16534
15965
|
static tkhd(track) {
|
@@ -16666,84 +16097,6 @@ class MP4 {
|
|
16666
16097
|
const result = appendUint8Array(MP4.FTYP, movie);
|
16667
16098
|
return result;
|
16668
16099
|
}
|
16669
|
-
static hvc1(track) {
|
16670
|
-
const ps = track.params;
|
16671
|
-
const units = [track.vps, track.sps, track.pps];
|
16672
|
-
const NALuLengthSize = 4;
|
16673
|
-
const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
|
16674
|
-
|
16675
|
-
// compute hvcC size in bytes
|
16676
|
-
let length = config.length;
|
16677
|
-
for (let i = 0; i < units.length; i += 1) {
|
16678
|
-
length += 3;
|
16679
|
-
for (let j = 0; j < units[i].length; j += 1) {
|
16680
|
-
length += 2 + units[i][j].length;
|
16681
|
-
}
|
16682
|
-
}
|
16683
|
-
const hvcC = new Uint8Array(length);
|
16684
|
-
hvcC.set(config, 0);
|
16685
|
-
length = config.length;
|
16686
|
-
// append parameter set units: one vps, one or more sps and pps
|
16687
|
-
const iMax = units.length - 1;
|
16688
|
-
for (let i = 0; i < units.length; i += 1) {
|
16689
|
-
hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
|
16690
|
-
length += 3;
|
16691
|
-
for (let j = 0; j < units[i].length; j += 1) {
|
16692
|
-
hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
|
16693
|
-
length += 2;
|
16694
|
-
hvcC.set(units[i][j], length);
|
16695
|
-
length += units[i][j].length;
|
16696
|
-
}
|
16697
|
-
}
|
16698
|
-
const hvcc = MP4.box(MP4.types.hvcC, hvcC);
|
16699
|
-
const width = track.width;
|
16700
|
-
const height = track.height;
|
16701
|
-
const hSpacing = track.pixelRatio[0];
|
16702
|
-
const vSpacing = track.pixelRatio[1];
|
16703
|
-
return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
|
16704
|
-
// reserved
|
16705
|
-
0x00, 0x00, 0x00,
|
16706
|
-
// reserved
|
16707
|
-
0x00, 0x01,
|
16708
|
-
// data_reference_index
|
16709
|
-
0x00, 0x00,
|
16710
|
-
// pre_defined
|
16711
|
-
0x00, 0x00,
|
16712
|
-
// reserved
|
16713
|
-
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16714
|
-
// pre_defined
|
16715
|
-
width >> 8 & 0xff, width & 0xff,
|
16716
|
-
// width
|
16717
|
-
height >> 8 & 0xff, height & 0xff,
|
16718
|
-
// height
|
16719
|
-
0x00, 0x48, 0x00, 0x00,
|
16720
|
-
// horizresolution
|
16721
|
-
0x00, 0x48, 0x00, 0x00,
|
16722
|
-
// vertresolution
|
16723
|
-
0x00, 0x00, 0x00, 0x00,
|
16724
|
-
// reserved
|
16725
|
-
0x00, 0x01,
|
16726
|
-
// frame_count
|
16727
|
-
0x12, 0x64, 0x61, 0x69, 0x6c,
|
16728
|
-
// dailymotion/hls.js
|
16729
|
-
0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16730
|
-
// compressorname
|
16731
|
-
0x00, 0x18,
|
16732
|
-
// depth = 24
|
16733
|
-
0x11, 0x11]),
|
16734
|
-
// pre_defined = -1
|
16735
|
-
hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
|
16736
|
-
// bufferSizeDB
|
16737
|
-
0x00, 0x2d, 0xc6, 0xc0,
|
16738
|
-
// maxBitrate
|
16739
|
-
0x00, 0x2d, 0xc6, 0xc0])),
|
16740
|
-
// avgBitrate
|
16741
|
-
MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
|
16742
|
-
// hSpacing
|
16743
|
-
hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
|
16744
|
-
// vSpacing
|
16745
|
-
vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
|
16746
|
-
}
|
16747
16100
|
}
|
16748
16101
|
MP4.types = void 0;
|
16749
16102
|
MP4.HDLR_TYPES = void 0;
|
@@ -17119,9 +16472,9 @@ class MP4Remuxer {
|
|
17119
16472
|
const foundOverlap = delta < -1;
|
17120
16473
|
if (foundHole || foundOverlap) {
|
17121
16474
|
if (foundHole) {
|
17122
|
-
logger.warn(
|
16475
|
+
logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
|
17123
16476
|
} else {
|
17124
|
-
logger.warn(
|
16477
|
+
logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
|
17125
16478
|
}
|
17126
16479
|
if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
|
17127
16480
|
firstDTS = nextAvcDts;
|
@@ -17130,24 +16483,12 @@ class MP4Remuxer {
|
|
17130
16483
|
inputSamples[0].dts = firstDTS;
|
17131
16484
|
inputSamples[0].pts = firstPTS;
|
17132
16485
|
} else {
|
17133
|
-
let isPTSOrderRetained = true;
|
17134
16486
|
for (let i = 0; i < inputSamples.length; i++) {
|
17135
|
-
if (inputSamples[i].dts > firstPTS
|
16487
|
+
if (inputSamples[i].dts > firstPTS) {
|
17136
16488
|
break;
|
17137
16489
|
}
|
17138
|
-
const prevPTS = inputSamples[i].pts;
|
17139
16490
|
inputSamples[i].dts -= delta;
|
17140
16491
|
inputSamples[i].pts -= delta;
|
17141
|
-
|
17142
|
-
// check to see if this sample's PTS order has changed
|
17143
|
-
// relative to the next one
|
17144
|
-
if (i < inputSamples.length - 1) {
|
17145
|
-
const nextSamplePTS = inputSamples[i + 1].pts;
|
17146
|
-
const currentSamplePTS = inputSamples[i].pts;
|
17147
|
-
const currentOrder = nextSamplePTS <= currentSamplePTS;
|
17148
|
-
const prevOrder = nextSamplePTS <= prevPTS;
|
17149
|
-
isPTSOrderRetained = currentOrder == prevOrder;
|
17150
|
-
}
|
17151
16492
|
}
|
17152
16493
|
}
|
17153
16494
|
logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
|
@@ -17295,7 +16636,7 @@ class MP4Remuxer {
|
|
17295
16636
|
}
|
17296
16637
|
}
|
17297
16638
|
}
|
17298
|
-
// next AVC
|
16639
|
+
// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
17299
16640
|
mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
|
17300
16641
|
this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
|
17301
16642
|
this.videoSampleDuration = mp4SampleDuration;
|
@@ -17428,7 +16769,7 @@ class MP4Remuxer {
|
|
17428
16769
|
logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
|
17429
16770
|
for (let j = 0; j < missing; j++) {
|
17430
16771
|
const newStamp = Math.max(nextPts, 0);
|
17431
|
-
let fillFrame = AAC.getSilentFrame(track.
|
16772
|
+
let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17432
16773
|
if (!fillFrame) {
|
17433
16774
|
logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
|
17434
16775
|
fillFrame = sample.unit.subarray();
|
@@ -17556,7 +16897,7 @@ class MP4Remuxer {
|
|
17556
16897
|
// samples count of this segment's duration
|
17557
16898
|
const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
|
17558
16899
|
// silent frame
|
17559
|
-
const silentFrame = AAC.getSilentFrame(track.
|
16900
|
+
const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17560
16901
|
logger.warn('[mp4-remuxer]: remux empty Audio');
|
17561
16902
|
// Can't remux if we can't generate a silent frame...
|
17562
16903
|
if (!silentFrame) {
|
@@ -17947,15 +17288,13 @@ class Transmuxer {
|
|
17947
17288
|
initSegmentData
|
17948
17289
|
} = transmuxConfig;
|
17949
17290
|
const keyData = getEncryptionType(uintData, decryptdata);
|
17950
|
-
if (keyData &&
|
17291
|
+
if (keyData && keyData.method === 'AES-128') {
|
17951
17292
|
const decrypter = this.getDecrypter();
|
17952
|
-
const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
|
17953
|
-
|
17954
17293
|
// Software decryption is synchronous; webCrypto is not
|
17955
17294
|
if (decrypter.isSync()) {
|
17956
17295
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
17957
17296
|
// data is handled in the flush() call
|
17958
|
-
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17297
|
+
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
|
17959
17298
|
// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
|
17960
17299
|
const loadingParts = chunkMeta.part > -1;
|
17961
17300
|
if (loadingParts) {
|
@@ -17967,7 +17306,7 @@ class Transmuxer {
|
|
17967
17306
|
}
|
17968
17307
|
uintData = new Uint8Array(decryptedData);
|
17969
17308
|
} else {
|
17970
|
-
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17309
|
+
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
|
17971
17310
|
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
17972
17311
|
// the decrypted data has been transmuxed
|
17973
17312
|
const result = this.push(decryptedData, null, chunkMeta);
|
@@ -18621,7 +17960,14 @@ class TransmuxerInterface {
|
|
18621
17960
|
this.observer = new EventEmitter();
|
18622
17961
|
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
|
18623
17962
|
this.observer.on(Events.ERROR, forwardMessage);
|
18624
|
-
const
|
17963
|
+
const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
|
17964
|
+
isTypeSupported: () => false
|
17965
|
+
};
|
17966
|
+
const m2tsTypeSupported = {
|
17967
|
+
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
17968
|
+
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
17969
|
+
ac3: false
|
17970
|
+
};
|
18625
17971
|
|
18626
17972
|
// navigator.vendor is not always available in Web Worker
|
18627
17973
|
// refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
|
@@ -18886,9 +18232,8 @@ const STALL_MINIMUM_DURATION_MS = 250;
|
|
18886
18232
|
const MAX_START_GAP_JUMP = 2.0;
|
18887
18233
|
const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
18888
18234
|
const SKIP_BUFFER_RANGE_START = 0.05;
|
18889
|
-
class GapController
|
18235
|
+
class GapController {
|
18890
18236
|
constructor(config, media, fragmentTracker, hls) {
|
18891
|
-
super('gap-controller', hls.logger);
|
18892
18237
|
this.config = void 0;
|
18893
18238
|
this.media = null;
|
18894
18239
|
this.fragmentTracker = void 0;
|
@@ -18898,7 +18243,6 @@ class GapController extends Logger {
|
|
18898
18243
|
this.stalled = null;
|
18899
18244
|
this.moved = false;
|
18900
18245
|
this.seeking = false;
|
18901
|
-
this.ended = 0;
|
18902
18246
|
this.config = config;
|
18903
18247
|
this.media = media;
|
18904
18248
|
this.fragmentTracker = fragmentTracker;
|
@@ -18916,7 +18260,7 @@ class GapController extends Logger {
|
|
18916
18260
|
*
|
18917
18261
|
* @param lastCurrentTime - Previously read playhead position
|
18918
18262
|
*/
|
18919
|
-
poll(lastCurrentTime, activeFrag
|
18263
|
+
poll(lastCurrentTime, activeFrag) {
|
18920
18264
|
const {
|
18921
18265
|
config,
|
18922
18266
|
media,
|
@@ -18935,7 +18279,6 @@ class GapController extends Logger {
|
|
18935
18279
|
|
18936
18280
|
// The playhead is moving, no-op
|
18937
18281
|
if (currentTime !== lastCurrentTime) {
|
18938
|
-
this.ended = 0;
|
18939
18282
|
this.moved = true;
|
18940
18283
|
if (!seeking) {
|
18941
18284
|
this.nudgeRetry = 0;
|
@@ -18944,7 +18287,7 @@ class GapController extends Logger {
|
|
18944
18287
|
// The playhead is now moving, but was previously stalled
|
18945
18288
|
if (this.stallReported) {
|
18946
18289
|
const _stalledDuration = self.performance.now() - stalled;
|
18947
|
-
|
18290
|
+
logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
|
18948
18291
|
this.stallReported = false;
|
18949
18292
|
}
|
18950
18293
|
this.stalled = null;
|
@@ -18980,6 +18323,7 @@ class GapController extends Logger {
|
|
18980
18323
|
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
18981
18324
|
// The addition poll gives the browser a chance to jump the gap for us
|
18982
18325
|
if (!this.moved && this.stalled !== null) {
|
18326
|
+
var _level$details;
|
18983
18327
|
// There is no playable buffer (seeked, waiting for buffer)
|
18984
18328
|
const isBuffered = bufferInfo.len > 0;
|
18985
18329
|
if (!isBuffered && !nextStart) {
|
@@ -18991,8 +18335,9 @@ class GapController extends Logger {
|
|
18991
18335
|
// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
|
18992
18336
|
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
|
18993
18337
|
// that begins over 1 target duration after the video start position.
|
18994
|
-
const
|
18995
|
-
const
|
18338
|
+
const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
|
18339
|
+
const isLive = level == null ? void 0 : (_level$details = level.details) == null ? void 0 : _level$details.live;
|
18340
|
+
const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
|
18996
18341
|
const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
|
18997
18342
|
if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
|
18998
18343
|
if (!media.paused) {
|
@@ -19010,17 +18355,6 @@ class GapController extends Logger {
|
|
19010
18355
|
}
|
19011
18356
|
const stalledDuration = tnow - stalled;
|
19012
18357
|
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
|
19013
|
-
// Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
|
19014
|
-
if (state === State.ENDED && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
|
19015
|
-
if (stalledDuration < 1000 || this.ended) {
|
19016
|
-
return;
|
19017
|
-
}
|
19018
|
-
this.ended = currentTime;
|
19019
|
-
this.hls.trigger(Events.MEDIA_ENDED, {
|
19020
|
-
stalled: true
|
19021
|
-
});
|
19022
|
-
return;
|
19023
|
-
}
|
19024
18358
|
// Report stalling after trying to fix
|
19025
18359
|
this._reportStall(bufferInfo);
|
19026
18360
|
if (!this.media) {
|
@@ -19064,7 +18398,7 @@ class GapController extends Logger {
|
|
19064
18398
|
// needs to cross some sort of threshold covering all source-buffers content
|
19065
18399
|
// to start playing properly.
|
19066
18400
|
if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
|
19067
|
-
|
18401
|
+
logger.warn('Trying to nudge playhead over buffer-hole');
|
19068
18402
|
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
19069
18403
|
// We only try to jump the hole if it's under the configured size
|
19070
18404
|
// Reset stalled so to rearm watchdog timer
|
@@ -19088,7 +18422,7 @@ class GapController extends Logger {
|
|
19088
18422
|
// Report stalled error once
|
19089
18423
|
this.stallReported = true;
|
19090
18424
|
const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
|
19091
|
-
|
18425
|
+
logger.warn(error.message);
|
19092
18426
|
hls.trigger(Events.ERROR, {
|
19093
18427
|
type: ErrorTypes.MEDIA_ERROR,
|
19094
18428
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -19156,7 +18490,7 @@ class GapController extends Logger {
|
|
19156
18490
|
}
|
19157
18491
|
}
|
19158
18492
|
const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
|
19159
|
-
|
18493
|
+
logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
|
19160
18494
|
this.moved = true;
|
19161
18495
|
this.stalled = null;
|
19162
18496
|
media.currentTime = targetTime;
|
@@ -19197,7 +18531,7 @@ class GapController extends Logger {
|
|
19197
18531
|
const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
|
19198
18532
|
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
19199
18533
|
const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
|
19200
|
-
|
18534
|
+
logger.warn(error.message);
|
19201
18535
|
media.currentTime = targetTime;
|
19202
18536
|
hls.trigger(Events.ERROR, {
|
19203
18537
|
type: ErrorTypes.MEDIA_ERROR,
|
@@ -19207,7 +18541,7 @@ class GapController extends Logger {
|
|
19207
18541
|
});
|
19208
18542
|
} else {
|
19209
18543
|
const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
|
19210
|
-
|
18544
|
+
logger.error(error.message);
|
19211
18545
|
hls.trigger(Events.ERROR, {
|
19212
18546
|
type: ErrorTypes.MEDIA_ERROR,
|
19213
18547
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -19222,7 +18556,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
|
|
19222
18556
|
|
19223
18557
|
class StreamController extends BaseStreamController {
|
19224
18558
|
constructor(hls, fragmentTracker, keyLoader) {
|
19225
|
-
super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
|
18559
|
+
super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN);
|
19226
18560
|
this.audioCodecSwap = false;
|
19227
18561
|
this.gapController = null;
|
19228
18562
|
this.level = -1;
|
@@ -19230,43 +18564,27 @@ class StreamController extends BaseStreamController {
|
|
19230
18564
|
this.altAudio = false;
|
19231
18565
|
this.audioOnly = false;
|
19232
18566
|
this.fragPlaying = null;
|
18567
|
+
this.onvplaying = null;
|
18568
|
+
this.onvseeked = null;
|
19233
18569
|
this.fragLastKbps = 0;
|
19234
18570
|
this.couldBacktrack = false;
|
19235
18571
|
this.backtrackFragment = null;
|
19236
18572
|
this.audioCodecSwitch = false;
|
19237
18573
|
this.videoBuffer = null;
|
19238
|
-
this.
|
19239
|
-
// tick to speed up FRAG_CHANGED triggering
|
19240
|
-
this.tick();
|
19241
|
-
};
|
19242
|
-
this.onMediaSeeked = () => {
|
19243
|
-
const media = this.media;
|
19244
|
-
const currentTime = media ? media.currentTime : null;
|
19245
|
-
if (isFiniteNumber(currentTime)) {
|
19246
|
-
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
19247
|
-
}
|
19248
|
-
|
19249
|
-
// If seeked was issued before buffer was appended do not tick immediately
|
19250
|
-
const bufferInfo = this.getMainFwdBufferInfo();
|
19251
|
-
if (bufferInfo === null || bufferInfo.len === 0) {
|
19252
|
-
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
19253
|
-
return;
|
19254
|
-
}
|
19255
|
-
|
19256
|
-
// tick to speed up FRAG_CHANGED triggering
|
19257
|
-
this.tick();
|
19258
|
-
};
|
19259
|
-
this.registerListeners();
|
18574
|
+
this._registerListeners();
|
19260
18575
|
}
|
19261
|
-
|
19262
|
-
super.registerListeners();
|
18576
|
+
_registerListeners() {
|
19263
18577
|
const {
|
19264
18578
|
hls
|
19265
18579
|
} = this;
|
18580
|
+
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18581
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18582
|
+
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
19266
18583
|
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
19267
18584
|
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
|
19268
18585
|
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
19269
18586
|
hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18587
|
+
hls.on(Events.ERROR, this.onError, this);
|
19270
18588
|
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
19271
18589
|
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
19272
18590
|
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -19274,14 +18592,17 @@ class StreamController extends BaseStreamController {
|
|
19274
18592
|
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
19275
18593
|
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
19276
18594
|
}
|
19277
|
-
|
19278
|
-
super.unregisterListeners();
|
18595
|
+
_unregisterListeners() {
|
19279
18596
|
const {
|
19280
18597
|
hls
|
19281
18598
|
} = this;
|
18599
|
+
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18600
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18601
|
+
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
19282
18602
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
19283
18603
|
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
19284
18604
|
hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18605
|
+
hls.off(Events.ERROR, this.onError, this);
|
19285
18606
|
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
19286
18607
|
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
19287
18608
|
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -19290,9 +18611,7 @@ class StreamController extends BaseStreamController {
|
|
19290
18611
|
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
19291
18612
|
}
|
19292
18613
|
onHandlerDestroying() {
|
19293
|
-
|
19294
|
-
this.onMediaPlaying = this.onMediaSeeked = null;
|
19295
|
-
this.unregisterListeners();
|
18614
|
+
this._unregisterListeners();
|
19296
18615
|
super.onHandlerDestroying();
|
19297
18616
|
}
|
19298
18617
|
startLoad(startPosition) {
|
@@ -19390,9 +18709,6 @@ class StreamController extends BaseStreamController {
|
|
19390
18709
|
this.checkFragmentChanged();
|
19391
18710
|
}
|
19392
18711
|
doTickIdle() {
|
19393
|
-
if (!this.buffering) {
|
19394
|
-
return;
|
19395
|
-
}
|
19396
18712
|
const {
|
19397
18713
|
hls,
|
19398
18714
|
levelLastLoaded,
|
@@ -19620,19 +18936,22 @@ class StreamController extends BaseStreamController {
|
|
19620
18936
|
onMediaAttached(event, data) {
|
19621
18937
|
super.onMediaAttached(event, data);
|
19622
18938
|
const media = data.media;
|
19623
|
-
|
19624
|
-
|
18939
|
+
this.onvplaying = this.onMediaPlaying.bind(this);
|
18940
|
+
this.onvseeked = this.onMediaSeeked.bind(this);
|
18941
|
+
media.addEventListener('playing', this.onvplaying);
|
18942
|
+
media.addEventListener('seeked', this.onvseeked);
|
19625
18943
|
this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
|
19626
18944
|
}
|
19627
18945
|
onMediaDetaching() {
|
19628
18946
|
const {
|
19629
18947
|
media
|
19630
18948
|
} = this;
|
19631
|
-
if (media) {
|
19632
|
-
media.removeEventListener('playing', this.
|
19633
|
-
media.removeEventListener('seeked', this.
|
18949
|
+
if (media && this.onvplaying && this.onvseeked) {
|
18950
|
+
media.removeEventListener('playing', this.onvplaying);
|
18951
|
+
media.removeEventListener('seeked', this.onvseeked);
|
18952
|
+
this.onvplaying = this.onvseeked = null;
|
18953
|
+
this.videoBuffer = null;
|
19634
18954
|
}
|
19635
|
-
this.videoBuffer = null;
|
19636
18955
|
this.fragPlaying = null;
|
19637
18956
|
if (this.gapController) {
|
19638
18957
|
this.gapController.destroy();
|
@@ -19640,6 +18959,27 @@ class StreamController extends BaseStreamController {
|
|
19640
18959
|
}
|
19641
18960
|
super.onMediaDetaching();
|
19642
18961
|
}
|
18962
|
+
onMediaPlaying() {
|
18963
|
+
// tick to speed up FRAG_CHANGED triggering
|
18964
|
+
this.tick();
|
18965
|
+
}
|
18966
|
+
onMediaSeeked() {
|
18967
|
+
const media = this.media;
|
18968
|
+
const currentTime = media ? media.currentTime : null;
|
18969
|
+
if (isFiniteNumber(currentTime)) {
|
18970
|
+
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
18971
|
+
}
|
18972
|
+
|
18973
|
+
// If seeked was issued before buffer was appended do not tick immediately
|
18974
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
18975
|
+
if (bufferInfo === null || bufferInfo.len === 0) {
|
18976
|
+
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
18977
|
+
return;
|
18978
|
+
}
|
18979
|
+
|
18980
|
+
// tick to speed up FRAG_CHANGED triggering
|
18981
|
+
this.tick();
|
18982
|
+
}
|
19643
18983
|
onManifestLoading() {
|
19644
18984
|
// reset buffer on manifest loading
|
19645
18985
|
this.log('Trigger BUFFER_RESET');
|
@@ -19931,10 +19271,8 @@ class StreamController extends BaseStreamController {
|
|
19931
19271
|
}
|
19932
19272
|
if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
|
19933
19273
|
// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
19934
|
-
const
|
19935
|
-
|
19936
|
-
const levelDetails = this.getLevelDetails();
|
19937
|
-
gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
|
19274
|
+
const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
|
19275
|
+
gapController.poll(this.lastCurrentTime, activeFrag);
|
19938
19276
|
}
|
19939
19277
|
this.lastCurrentTime = media.currentTime;
|
19940
19278
|
}
|
@@ -20267,17 +19605,6 @@ class StreamController extends BaseStreamController {
|
|
20267
19605
|
getMainFwdBufferInfo() {
|
20268
19606
|
return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN);
|
20269
19607
|
}
|
20270
|
-
get maxBufferLength() {
|
20271
|
-
const {
|
20272
|
-
levels,
|
20273
|
-
level
|
20274
|
-
} = this;
|
20275
|
-
const levelInfo = levels == null ? void 0 : levels[level];
|
20276
|
-
if (!levelInfo) {
|
20277
|
-
return this.config.maxBufferLength;
|
20278
|
-
}
|
20279
|
-
return this.getMaxBufferLength(levelInfo.maxBitrate);
|
20280
|
-
}
|
20281
19608
|
backtrack(frag) {
|
20282
19609
|
this.couldBacktrack = true;
|
20283
19610
|
// Causes findFragments to backtrack through fragments to find the keyframe
|
@@ -20383,7 +19710,7 @@ class Hls {
|
|
20383
19710
|
* Get the video-dev/hls.js package version.
|
20384
19711
|
*/
|
20385
19712
|
static get version() {
|
20386
|
-
return "1.5.9
|
19713
|
+
return "1.5.9";
|
20387
19714
|
}
|
20388
19715
|
|
20389
19716
|
/**
|
@@ -20446,12 +19773,9 @@ class Hls {
|
|
20446
19773
|
* The configuration object provided on player instantiation.
|
20447
19774
|
*/
|
20448
19775
|
this.userConfig = void 0;
|
20449
|
-
/**
|
20450
|
-
* The logger functions used by this player instance, configured on player instantiation.
|
20451
|
-
*/
|
20452
|
-
this.logger = void 0;
|
20453
19776
|
this.coreComponents = void 0;
|
20454
19777
|
this.networkControllers = void 0;
|
19778
|
+
this.started = false;
|
20455
19779
|
this._emitter = new EventEmitter();
|
20456
19780
|
this._autoLevelCapping = -1;
|
20457
19781
|
this._maxHdcpLevel = null;
|
@@ -20468,11 +19792,11 @@ class Hls {
|
|
20468
19792
|
this._media = null;
|
20469
19793
|
this.url = null;
|
20470
19794
|
this.triggeringException = void 0;
|
20471
|
-
|
20472
|
-
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig
|
19795
|
+
enableLogs(userConfig.debug || false, 'Hls instance');
|
19796
|
+
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
|
20473
19797
|
this.userConfig = userConfig;
|
20474
19798
|
if (config.progressive) {
|
20475
|
-
enableStreamingMode(config
|
19799
|
+
enableStreamingMode(config);
|
20476
19800
|
}
|
20477
19801
|
|
20478
19802
|
// core controllers and network loaders
|
@@ -20485,17 +19809,17 @@ class Hls {
|
|
20485
19809
|
} = config;
|
20486
19810
|
const errorController = new ConfigErrorController(this);
|
20487
19811
|
const abrController = this.abrController = new ConfigAbrController(this);
|
20488
|
-
|
20489
|
-
const fragmentTracker = new FragmentTracker(this);
|
20490
|
-
const bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
|
19812
|
+
const bufferController = this.bufferController = new ConfigBufferController(this);
|
20491
19813
|
const capLevelController = this.capLevelController = new ConfigCapLevelController(this);
|
20492
19814
|
const fpsController = new ConfigFpsController(this);
|
20493
19815
|
const playListLoader = new PlaylistLoader(this);
|
20494
19816
|
const id3TrackController = new ID3TrackController(this);
|
20495
19817
|
const ConfigContentSteeringController = config.contentSteeringController;
|
20496
|
-
//
|
19818
|
+
// ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
|
20497
19819
|
const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
|
20498
19820
|
const levelController = this.levelController = new LevelController(this, contentSteering);
|
19821
|
+
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
19822
|
+
const fragmentTracker = new FragmentTracker(this);
|
20499
19823
|
const keyLoader = new KeyLoader(this.config);
|
20500
19824
|
const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
|
20501
19825
|
|
@@ -20571,7 +19895,7 @@ class Hls {
|
|
20571
19895
|
try {
|
20572
19896
|
return this.emit(event, event, eventObject);
|
20573
19897
|
} catch (error) {
|
20574
|
-
|
19898
|
+
logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
20575
19899
|
// Prevent recursion in error event handlers that throw #5497
|
20576
19900
|
if (!this.triggeringException) {
|
20577
19901
|
this.triggeringException = true;
|
@@ -20597,7 +19921,7 @@ class Hls {
|
|
20597
19921
|
* Dispose of the instance
|
20598
19922
|
*/
|
20599
19923
|
destroy() {
|
20600
|
-
|
19924
|
+
logger.log('destroy');
|
20601
19925
|
this.trigger(Events.DESTROYING, undefined);
|
20602
19926
|
this.detachMedia();
|
20603
19927
|
this.removeAllListeners();
|
@@ -20618,7 +19942,7 @@ class Hls {
|
|
20618
19942
|
* Attaches Hls.js to a media element
|
20619
19943
|
*/
|
20620
19944
|
attachMedia(media) {
|
20621
|
-
|
19945
|
+
logger.log('attachMedia');
|
20622
19946
|
this._media = media;
|
20623
19947
|
this.trigger(Events.MEDIA_ATTACHING, {
|
20624
19948
|
media: media
|
@@ -20629,7 +19953,7 @@ class Hls {
|
|
20629
19953
|
* Detach Hls.js from the media
|
20630
19954
|
*/
|
20631
19955
|
detachMedia() {
|
20632
|
-
|
19956
|
+
logger.log('detachMedia');
|
20633
19957
|
this.trigger(Events.MEDIA_DETACHING, undefined);
|
20634
19958
|
this._media = null;
|
20635
19959
|
}
|
@@ -20646,7 +19970,7 @@ class Hls {
|
|
20646
19970
|
});
|
20647
19971
|
this._autoLevelCapping = -1;
|
20648
19972
|
this._maxHdcpLevel = null;
|
20649
|
-
|
19973
|
+
logger.log(`loadSource:${loadingSource}`);
|
20650
19974
|
if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
|
20651
19975
|
this.detachMedia();
|
20652
19976
|
this.attachMedia(media);
|
@@ -20665,7 +19989,8 @@ class Hls {
|
|
20665
19989
|
* Defaults to -1 (None: starts from earliest point)
|
20666
19990
|
*/
|
20667
19991
|
startLoad(startPosition = -1) {
|
20668
|
-
|
19992
|
+
logger.log(`startLoad(${startPosition})`);
|
19993
|
+
this.started = true;
|
20669
19994
|
this.networkControllers.forEach(controller => {
|
20670
19995
|
controller.startLoad(startPosition);
|
20671
19996
|
});
|
@@ -20675,31 +20000,34 @@ class Hls {
|
|
20675
20000
|
* Stop loading of any stream data.
|
20676
20001
|
*/
|
20677
20002
|
stopLoad() {
|
20678
|
-
|
20003
|
+
logger.log('stopLoad');
|
20004
|
+
this.started = false;
|
20679
20005
|
this.networkControllers.forEach(controller => {
|
20680
20006
|
controller.stopLoad();
|
20681
20007
|
});
|
20682
20008
|
}
|
20683
20009
|
|
20684
20010
|
/**
|
20685
|
-
* Resumes stream controller segment loading
|
20011
|
+
* Resumes stream controller segment loading if previously started.
|
20686
20012
|
*/
|
20687
20013
|
resumeBuffering() {
|
20688
|
-
this.
|
20689
|
-
|
20690
|
-
controller
|
20691
|
-
|
20692
|
-
|
20014
|
+
if (this.started) {
|
20015
|
+
this.networkControllers.forEach(controller => {
|
20016
|
+
if ('fragmentLoader' in controller) {
|
20017
|
+
controller.startLoad(-1);
|
20018
|
+
}
|
20019
|
+
});
|
20020
|
+
}
|
20693
20021
|
}
|
20694
20022
|
|
20695
20023
|
/**
|
20696
|
-
*
|
20024
|
+
* Stops stream controller segment loading without changing 'started' state like stopLoad().
|
20697
20025
|
* This allows for media buffering to be paused without interupting playlist loading.
|
20698
20026
|
*/
|
20699
20027
|
pauseBuffering() {
|
20700
20028
|
this.networkControllers.forEach(controller => {
|
20701
|
-
if (controller
|
20702
|
-
controller.
|
20029
|
+
if ('fragmentLoader' in controller) {
|
20030
|
+
controller.stopLoad();
|
20703
20031
|
}
|
20704
20032
|
});
|
20705
20033
|
}
|
@@ -20708,7 +20036,7 @@ class Hls {
|
|
20708
20036
|
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
20709
20037
|
*/
|
20710
20038
|
swapAudioCodec() {
|
20711
|
-
|
20039
|
+
logger.log('swapAudioCodec');
|
20712
20040
|
this.streamController.swapAudioCodec();
|
20713
20041
|
}
|
20714
20042
|
|
@@ -20719,7 +20047,7 @@ class Hls {
|
|
20719
20047
|
* Automatic recovery of media-errors by this process is configurable.
|
20720
20048
|
*/
|
20721
20049
|
recoverMediaError() {
|
20722
|
-
|
20050
|
+
logger.log('recoverMediaError');
|
20723
20051
|
const media = this._media;
|
20724
20052
|
this.detachMedia();
|
20725
20053
|
if (media) {
|
@@ -20749,7 +20077,7 @@ class Hls {
|
|
20749
20077
|
* Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
|
20750
20078
|
*/
|
20751
20079
|
set currentLevel(newLevel) {
|
20752
|
-
|
20080
|
+
logger.log(`set currentLevel:${newLevel}`);
|
20753
20081
|
this.levelController.manualLevel = newLevel;
|
20754
20082
|
this.streamController.immediateLevelSwitch();
|
20755
20083
|
}
|
@@ -20768,7 +20096,7 @@ class Hls {
|
|
20768
20096
|
* @param newLevel - Pass -1 for automatic level selection
|
20769
20097
|
*/
|
20770
20098
|
set nextLevel(newLevel) {
|
20771
|
-
|
20099
|
+
logger.log(`set nextLevel:${newLevel}`);
|
20772
20100
|
this.levelController.manualLevel = newLevel;
|
20773
20101
|
this.streamController.nextLevelSwitch();
|
20774
20102
|
}
|
@@ -20787,7 +20115,7 @@ class Hls {
|
|
20787
20115
|
* @param newLevel - Pass -1 for automatic level selection
|
20788
20116
|
*/
|
20789
20117
|
set loadLevel(newLevel) {
|
20790
|
-
|
20118
|
+
logger.log(`set loadLevel:${newLevel}`);
|
20791
20119
|
this.levelController.manualLevel = newLevel;
|
20792
20120
|
}
|
20793
20121
|
|
@@ -20818,7 +20146,7 @@ class Hls {
|
|
20818
20146
|
* Sets "first-level", see getter.
|
20819
20147
|
*/
|
20820
20148
|
set firstLevel(newLevel) {
|
20821
|
-
|
20149
|
+
logger.log(`set firstLevel:${newLevel}`);
|
20822
20150
|
this.levelController.firstLevel = newLevel;
|
20823
20151
|
}
|
20824
20152
|
|
@@ -20843,7 +20171,7 @@ class Hls {
|
|
20843
20171
|
* (determined from download of first segment)
|
20844
20172
|
*/
|
20845
20173
|
set startLevel(newLevel) {
|
20846
|
-
|
20174
|
+
logger.log(`set startLevel:${newLevel}`);
|
20847
20175
|
// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
20848
20176
|
if (newLevel !== -1) {
|
20849
20177
|
newLevel = Math.max(newLevel, this.minAutoLevel);
|
@@ -20918,7 +20246,7 @@ class Hls {
|
|
20918
20246
|
*/
|
20919
20247
|
set autoLevelCapping(newLevel) {
|
20920
20248
|
if (this._autoLevelCapping !== newLevel) {
|
20921
|
-
|
20249
|
+
logger.log(`set autoLevelCapping:${newLevel}`);
|
20922
20250
|
this._autoLevelCapping = newLevel;
|
20923
20251
|
this.levelController.checkMaxAutoUpdated();
|
20924
20252
|
}
|
@@ -21023,9 +20351,6 @@ class Hls {
|
|
21023
20351
|
get mainForwardBufferInfo() {
|
21024
20352
|
return this.streamController.getMainFwdBufferInfo();
|
21025
20353
|
}
|
21026
|
-
get maxBufferLength() {
|
21027
|
-
return this.streamController.maxBufferLength;
|
21028
|
-
}
|
21029
20354
|
|
21030
20355
|
/**
|
21031
20356
|
* Find and select the best matching audio track, making a level switch when a Group change is necessary.
|
@@ -21193,22 +20518,12 @@ class Hls {
|
|
21193
20518
|
get forceStartLoad() {
|
21194
20519
|
return this.streamController.forceStartLoad;
|
21195
20520
|
}
|
21196
|
-
|
21197
|
-
/**
|
21198
|
-
* ContentSteering pathwayPriority getter/setter
|
21199
|
-
*/
|
21200
|
-
get pathwayPriority() {
|
21201
|
-
return this.levelController.pathwayPriority;
|
21202
|
-
}
|
21203
|
-
set pathwayPriority(pathwayPriority) {
|
21204
|
-
this.levelController.pathwayPriority = pathwayPriority;
|
21205
|
-
}
|
21206
20521
|
}
|
21207
20522
|
Hls.defaultConfig = void 0;
|
21208
20523
|
|
21209
|
-
var KeySystemFormats =
|
21210
|
-
var KeySystems =
|
21211
|
-
var SubtitleStreamController =
|
21212
|
-
var TimelineController =
|
21213
|
-
export { AbrController, AttrList,
|
20524
|
+
var KeySystemFormats = empty.KeySystemFormats;
|
20525
|
+
var KeySystems = empty.KeySystems;
|
20526
|
+
var SubtitleStreamController = empty.SubtitleStreamController;
|
20527
|
+
var TimelineController = empty.TimelineController;
|
20528
|
+
export { AbrController, AttrList, Cues as AudioStreamController, Cues as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, Cues as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, Cues as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, Level, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, Cues as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
|
21214
20529
|
//# sourceMappingURL=hls.light.mjs.map
|