hls.js 1.5.8-0.canary.10170 → 1.5.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -4
- package/dist/hls-demo.js +3 -12
- package/dist/hls-demo.js.map +1 -1
- package/dist/hls.js +2366 -3626
- package/dist/hls.js.d.ts +84 -98
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +1643 -2278
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +1258 -1903
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +1531 -2794
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +30 -30
- package/src/config.ts +2 -3
- package/src/controller/abr-controller.ts +20 -24
- package/src/controller/audio-stream-controller.ts +74 -68
- package/src/controller/audio-track-controller.ts +1 -1
- package/src/controller/base-playlist-controller.ts +10 -27
- package/src/controller/base-stream-controller.ts +38 -160
- package/src/controller/buffer-controller.ts +92 -230
- package/src/controller/buffer-operation-queue.ts +19 -16
- package/src/controller/cap-level-controller.ts +2 -3
- package/src/controller/cmcd-controller.ts +9 -30
- package/src/controller/content-steering-controller.ts +6 -8
- package/src/controller/eme-controller.ts +23 -10
- package/src/controller/error-controller.ts +8 -6
- package/src/controller/fps-controller.ts +3 -8
- package/src/controller/fragment-tracker.ts +11 -15
- package/src/controller/gap-controller.ts +16 -43
- package/src/controller/id3-track-controller.ts +7 -7
- package/src/controller/latency-controller.ts +11 -9
- package/src/controller/level-controller.ts +19 -13
- package/src/controller/stream-controller.ts +32 -37
- package/src/controller/subtitle-stream-controller.ts +40 -28
- package/src/controller/subtitle-track-controller.ts +3 -5
- package/src/controller/timeline-controller.ts +31 -25
- package/src/crypt/aes-crypto.ts +2 -21
- package/src/crypt/decrypter.ts +18 -32
- package/src/crypt/fast-aes-key.ts +5 -24
- package/src/demux/audio/aacdemuxer.ts +2 -2
- package/src/demux/audio/ac3-demuxer.ts +3 -4
- package/src/demux/audio/adts.ts +4 -9
- package/src/demux/audio/base-audio-demuxer.ts +14 -16
- package/src/demux/audio/mp3demuxer.ts +3 -4
- package/src/demux/audio/mpegaudio.ts +1 -1
- package/src/demux/id3.ts +411 -0
- package/src/demux/mp4demuxer.ts +7 -7
- package/src/demux/sample-aes.ts +0 -2
- package/src/demux/transmuxer-interface.ts +12 -4
- package/src/demux/transmuxer-worker.ts +4 -4
- package/src/demux/transmuxer.ts +3 -16
- package/src/demux/tsdemuxer.ts +37 -71
- package/src/demux/video/avc-video-parser.ts +119 -208
- package/src/demux/video/base-video-parser.ts +2 -134
- package/src/demux/video/exp-golomb.ts +208 -0
- package/src/events.ts +1 -8
- package/src/exports-named.ts +1 -1
- package/src/hls.ts +37 -49
- package/src/loader/fragment-loader.ts +3 -10
- package/src/loader/key-loader.ts +1 -3
- package/src/loader/level-key.ts +9 -10
- package/src/loader/playlist-loader.ts +5 -4
- package/src/remux/mp4-generator.ts +1 -196
- package/src/remux/mp4-remuxer.ts +8 -24
- package/src/task-loop.ts +2 -5
- package/src/types/component-api.ts +1 -3
- package/src/types/demuxer.ts +0 -3
- package/src/types/events.ts +0 -4
- package/src/types/remuxer.ts +1 -1
- package/src/utils/buffer-helper.ts +31 -12
- package/src/utils/codecs.ts +5 -34
- package/src/utils/fetch-loader.ts +1 -1
- package/src/utils/imsc1-ttml-parser.ts +1 -1
- package/src/utils/keysystem-util.ts +6 -1
- package/src/utils/logger.ts +23 -58
- package/src/utils/mp4-tools.ts +3 -5
- package/src/utils/webvtt-parser.ts +1 -1
- package/src/crypt/decrypter-aes-mode.ts +0 -4
- package/src/demux/video/hevc-video-parser.ts +0 -749
- package/src/utils/encryption-methods-util.ts +0 -21
- package/src/utils/utf8-utils.ts +0 -18
package/dist/hls.light.mjs
CHANGED
@@ -209,7 +209,7 @@ function _toPrimitive(t, r) {
|
|
209
209
|
}
|
210
210
|
function _toPropertyKey(t) {
|
211
211
|
var i = _toPrimitive(t, "string");
|
212
|
-
return "symbol" == typeof i ? i : i
|
212
|
+
return "symbol" == typeof i ? i : String(i);
|
213
213
|
}
|
214
214
|
function _defineProperty(obj, key, value) {
|
215
215
|
key = _toPropertyKey(key);
|
@@ -256,7 +256,6 @@ let Events = /*#__PURE__*/function (Events) {
|
|
256
256
|
Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
|
257
257
|
Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
|
258
258
|
Events["MEDIA_DETACHED"] = "hlsMediaDetached";
|
259
|
-
Events["MEDIA_ENDED"] = "hlsMediaEnded";
|
260
259
|
Events["BUFFER_RESET"] = "hlsBufferReset";
|
261
260
|
Events["BUFFER_CODECS"] = "hlsBufferCodecs";
|
262
261
|
Events["BUFFER_CREATED"] = "hlsBufferCreated";
|
@@ -370,6 +369,58 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
|
|
370
369
|
return ErrorDetails;
|
371
370
|
}({});
|
372
371
|
|
372
|
+
const noop = function noop() {};
|
373
|
+
const fakeLogger = {
|
374
|
+
trace: noop,
|
375
|
+
debug: noop,
|
376
|
+
log: noop,
|
377
|
+
warn: noop,
|
378
|
+
info: noop,
|
379
|
+
error: noop
|
380
|
+
};
|
381
|
+
let exportedLogger = fakeLogger;
|
382
|
+
|
383
|
+
// let lastCallTime;
|
384
|
+
// function formatMsgWithTimeInfo(type, msg) {
|
385
|
+
// const now = Date.now();
|
386
|
+
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
387
|
+
// lastCallTime = now;
|
388
|
+
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
389
|
+
// return msg;
|
390
|
+
// }
|
391
|
+
|
392
|
+
function consolePrintFn(type) {
|
393
|
+
const func = self.console[type];
|
394
|
+
if (func) {
|
395
|
+
return func.bind(self.console, `[${type}] >`);
|
396
|
+
}
|
397
|
+
return noop;
|
398
|
+
}
|
399
|
+
function exportLoggerFunctions(debugConfig, ...functions) {
|
400
|
+
functions.forEach(function (type) {
|
401
|
+
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
402
|
+
});
|
403
|
+
}
|
404
|
+
function enableLogs(debugConfig, id) {
|
405
|
+
// check that console is available
|
406
|
+
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
407
|
+
exportLoggerFunctions(debugConfig,
|
408
|
+
// Remove out from list here to hard-disable a log-level
|
409
|
+
// 'trace',
|
410
|
+
'debug', 'log', 'info', 'warn', 'error');
|
411
|
+
// Some browsers don't allow to use bind on console object anyway
|
412
|
+
// fallback to default if needed
|
413
|
+
try {
|
414
|
+
exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.8"}`);
|
415
|
+
} catch (e) {
|
416
|
+
exportedLogger = fakeLogger;
|
417
|
+
}
|
418
|
+
} else {
|
419
|
+
exportedLogger = fakeLogger;
|
420
|
+
}
|
421
|
+
}
|
422
|
+
const logger = exportedLogger;
|
423
|
+
|
373
424
|
const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/;
|
374
425
|
const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g;
|
375
426
|
|
@@ -451,84 +502,6 @@ class AttrList {
|
|
451
502
|
}
|
452
503
|
}
|
453
504
|
|
454
|
-
class Logger {
|
455
|
-
constructor(label, logger) {
|
456
|
-
this.trace = void 0;
|
457
|
-
this.debug = void 0;
|
458
|
-
this.log = void 0;
|
459
|
-
this.warn = void 0;
|
460
|
-
this.info = void 0;
|
461
|
-
this.error = void 0;
|
462
|
-
const lb = `[${label}]:`;
|
463
|
-
this.trace = noop;
|
464
|
-
this.debug = logger.debug.bind(null, lb);
|
465
|
-
this.log = logger.log.bind(null, lb);
|
466
|
-
this.warn = logger.warn.bind(null, lb);
|
467
|
-
this.info = logger.info.bind(null, lb);
|
468
|
-
this.error = logger.error.bind(null, lb);
|
469
|
-
}
|
470
|
-
}
|
471
|
-
const noop = function noop() {};
|
472
|
-
const fakeLogger = {
|
473
|
-
trace: noop,
|
474
|
-
debug: noop,
|
475
|
-
log: noop,
|
476
|
-
warn: noop,
|
477
|
-
info: noop,
|
478
|
-
error: noop
|
479
|
-
};
|
480
|
-
function createLogger() {
|
481
|
-
return _extends({}, fakeLogger);
|
482
|
-
}
|
483
|
-
|
484
|
-
// let lastCallTime;
|
485
|
-
// function formatMsgWithTimeInfo(type, msg) {
|
486
|
-
// const now = Date.now();
|
487
|
-
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
488
|
-
// lastCallTime = now;
|
489
|
-
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
490
|
-
// return msg;
|
491
|
-
// }
|
492
|
-
|
493
|
-
function consolePrintFn(type, id) {
|
494
|
-
const func = self.console[type];
|
495
|
-
return func ? func.bind(self.console, `${id ? '[' + id + '] ' : ''}[${type}] >`) : noop;
|
496
|
-
}
|
497
|
-
function getLoggerFn(key, debugConfig, id) {
|
498
|
-
return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key, id);
|
499
|
-
}
|
500
|
-
const exportedLogger = createLogger();
|
501
|
-
function enableLogs(debugConfig, context, id) {
|
502
|
-
// check that console is available
|
503
|
-
const newLogger = createLogger();
|
504
|
-
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
505
|
-
const keys = [
|
506
|
-
// Remove out from list here to hard-disable a log-level
|
507
|
-
// 'trace',
|
508
|
-
'debug', 'log', 'info', 'warn', 'error'];
|
509
|
-
keys.forEach(key => {
|
510
|
-
newLogger[key] = getLoggerFn(key, debugConfig, id);
|
511
|
-
});
|
512
|
-
// Some browsers don't allow to use bind on console object anyway
|
513
|
-
// fallback to default if needed
|
514
|
-
try {
|
515
|
-
newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.8-0.canary.10170"}`);
|
516
|
-
} catch (e) {
|
517
|
-
/* log fn threw an exception. All logger methods are no-ops. */
|
518
|
-
return createLogger();
|
519
|
-
}
|
520
|
-
// global exported logger uses the same functions as new logger without `id`
|
521
|
-
keys.forEach(key => {
|
522
|
-
exportedLogger[key] = getLoggerFn(key, debugConfig);
|
523
|
-
});
|
524
|
-
} else {
|
525
|
-
// Reset global exported logger
|
526
|
-
_extends(exportedLogger, newLogger);
|
527
|
-
}
|
528
|
-
return newLogger;
|
529
|
-
}
|
530
|
-
const logger = exportedLogger;
|
531
|
-
|
532
505
|
// Avoid exporting const enum so that these values can be inlined
|
533
506
|
|
534
507
|
function isDateRangeCueAttribute(attrName) {
|
@@ -1018,30 +991,10 @@ class LevelDetails {
|
|
1018
991
|
}
|
1019
992
|
}
|
1020
993
|
|
1021
|
-
var DecrypterAesMode = {
|
1022
|
-
cbc: 0,
|
1023
|
-
ctr: 1
|
1024
|
-
};
|
1025
|
-
|
1026
|
-
function isFullSegmentEncryption(method) {
|
1027
|
-
return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
|
1028
|
-
}
|
1029
|
-
function getAesModeFromFullSegmentMethod(method) {
|
1030
|
-
switch (method) {
|
1031
|
-
case 'AES-128':
|
1032
|
-
case 'AES-256':
|
1033
|
-
return DecrypterAesMode.cbc;
|
1034
|
-
case 'AES-256-CTR':
|
1035
|
-
return DecrypterAesMode.ctr;
|
1036
|
-
default:
|
1037
|
-
throw new Error(`invalid full segment method ${method}`);
|
1038
|
-
}
|
1039
|
-
}
|
1040
|
-
|
1041
994
|
// This file is inserted as a shim for modules which we do not want to include into the distro.
|
1042
995
|
// This replacement is done in the "alias" plugin of the rollup config.
|
1043
996
|
var empty = undefined;
|
1044
|
-
var
|
997
|
+
var Cues = /*@__PURE__*/getDefaultExportFromCjs(empty);
|
1045
998
|
|
1046
999
|
function sliceUint8(array, start, end) {
|
1047
1000
|
// @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
|
@@ -1049,104 +1002,373 @@ function sliceUint8(array, start, end) {
|
|
1049
1002
|
return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end));
|
1050
1003
|
}
|
1051
1004
|
|
1052
|
-
//
|
1053
|
-
|
1054
|
-
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1055
|
-
*
|
1056
|
-
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1057
|
-
* Version: 1.0
|
1058
|
-
* LastModified: Dec 25 1999
|
1059
|
-
* This library is free. You can redistribute it and/or modify it.
|
1060
|
-
*/
|
1005
|
+
// breaking up those two types in order to clarify what is happening in the decoding path.
|
1006
|
+
|
1061
1007
|
/**
|
1062
|
-
*
|
1063
|
-
*
|
1064
|
-
* @param
|
1065
|
-
*
|
1066
|
-
* @returns The string
|
1067
|
-
*
|
1068
|
-
* @group Utils
|
1069
|
-
*
|
1070
|
-
* @beta
|
1008
|
+
* Returns true if an ID3 header can be found at offset in data
|
1009
|
+
* @param data - The data to search
|
1010
|
+
* @param offset - The offset at which to start searching
|
1071
1011
|
*/
|
1072
|
-
|
1073
|
-
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1012
|
+
const isHeader$2 = (data, offset) => {
|
1013
|
+
/*
|
1014
|
+
* http://id3.org/id3v2.3.0
|
1015
|
+
* [0] = 'I'
|
1016
|
+
* [1] = 'D'
|
1017
|
+
* [2] = '3'
|
1018
|
+
* [3,4] = {Version}
|
1019
|
+
* [5] = {Flags}
|
1020
|
+
* [6-9] = {ID3 Size}
|
1021
|
+
*
|
1022
|
+
* An ID3v2 tag can be detected with the following pattern:
|
1023
|
+
* $49 44 33 yy yy xx zz zz zz zz
|
1024
|
+
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
1025
|
+
*/
|
1026
|
+
if (offset + 10 <= data.length) {
|
1027
|
+
// look for 'ID3' identifier
|
1028
|
+
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
1029
|
+
// check version is within range
|
1030
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1031
|
+
// check size is within range
|
1032
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1033
|
+
return true;
|
1034
|
+
}
|
1035
|
+
}
|
1080
1036
|
}
|
1081
|
-
// remove any null characters
|
1082
|
-
return decoded.replace(/\0/g, '');
|
1083
1037
|
}
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
|
1089
|
-
|
1090
|
-
|
1091
|
-
|
1092
|
-
|
1093
|
-
|
1094
|
-
|
1095
|
-
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1099
|
-
|
1100
|
-
|
1101
|
-
|
1102
|
-
|
1103
|
-
|
1104
|
-
|
1105
|
-
|
1106
|
-
case 7:
|
1107
|
-
// 0xxxxxxx
|
1108
|
-
out += String.fromCharCode(c);
|
1109
|
-
break;
|
1110
|
-
case 12:
|
1111
|
-
case 13:
|
1112
|
-
// 110x xxxx 10xx xxxx
|
1113
|
-
char2 = array[i++];
|
1114
|
-
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1115
|
-
break;
|
1116
|
-
case 14:
|
1117
|
-
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1118
|
-
char2 = array[i++];
|
1119
|
-
char3 = array[i++];
|
1120
|
-
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1121
|
-
break;
|
1038
|
+
return false;
|
1039
|
+
};
|
1040
|
+
|
1041
|
+
/**
|
1042
|
+
* Returns true if an ID3 footer can be found at offset in data
|
1043
|
+
* @param data - The data to search
|
1044
|
+
* @param offset - The offset at which to start searching
|
1045
|
+
*/
|
1046
|
+
const isFooter = (data, offset) => {
|
1047
|
+
/*
|
1048
|
+
* The footer is a copy of the header, but with a different identifier
|
1049
|
+
*/
|
1050
|
+
if (offset + 10 <= data.length) {
|
1051
|
+
// look for '3DI' identifier
|
1052
|
+
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
1053
|
+
// check version is within range
|
1054
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1055
|
+
// check size is within range
|
1056
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1057
|
+
return true;
|
1058
|
+
}
|
1059
|
+
}
|
1122
1060
|
}
|
1123
1061
|
}
|
1124
|
-
return
|
1125
|
-
}
|
1062
|
+
return false;
|
1063
|
+
};
|
1126
1064
|
|
1127
1065
|
/**
|
1128
|
-
*
|
1066
|
+
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
1067
|
+
* @param data - The data to search in
|
1068
|
+
* @param offset - The offset at which to start searching
|
1069
|
+
* @returns the block of data containing any ID3 tags found
|
1070
|
+
* or *undefined* if no header is found at the starting offset
|
1129
1071
|
*/
|
1072
|
+
const getID3Data = (data, offset) => {
|
1073
|
+
const front = offset;
|
1074
|
+
let length = 0;
|
1075
|
+
while (isHeader$2(data, offset)) {
|
1076
|
+
// ID3 header is 10 bytes
|
1077
|
+
length += 10;
|
1078
|
+
const size = readSize(data, offset + 6);
|
1079
|
+
length += size;
|
1080
|
+
if (isFooter(data, offset + 10)) {
|
1081
|
+
// ID3 footer is 10 bytes
|
1082
|
+
length += 10;
|
1083
|
+
}
|
1084
|
+
offset += length;
|
1085
|
+
}
|
1086
|
+
if (length > 0) {
|
1087
|
+
return data.subarray(front, front + length);
|
1088
|
+
}
|
1089
|
+
return undefined;
|
1090
|
+
};
|
1091
|
+
const readSize = (data, offset) => {
|
1092
|
+
let size = 0;
|
1093
|
+
size = (data[offset] & 0x7f) << 21;
|
1094
|
+
size |= (data[offset + 1] & 0x7f) << 14;
|
1095
|
+
size |= (data[offset + 2] & 0x7f) << 7;
|
1096
|
+
size |= data[offset + 3] & 0x7f;
|
1097
|
+
return size;
|
1098
|
+
};
|
1099
|
+
const canParse$2 = (data, offset) => {
|
1100
|
+
return isHeader$2(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset;
|
1101
|
+
};
|
1130
1102
|
|
1131
|
-
|
1132
|
-
|
1133
|
-
|
1134
|
-
|
1135
|
-
|
1136
|
-
|
1137
|
-
|
1138
|
-
|
1139
|
-
|
1103
|
+
/**
|
1104
|
+
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
1105
|
+
* @param data - Block of data containing one or more ID3 tags
|
1106
|
+
*/
|
1107
|
+
const getTimeStamp = data => {
|
1108
|
+
const frames = getID3Frames(data);
|
1109
|
+
for (let i = 0; i < frames.length; i++) {
|
1110
|
+
const frame = frames[i];
|
1111
|
+
if (isTimeStampFrame(frame)) {
|
1112
|
+
return readTimeStamp(frame);
|
1140
1113
|
}
|
1141
|
-
return str;
|
1142
1114
|
}
|
1115
|
+
return undefined;
|
1143
1116
|
};
|
1144
1117
|
|
1145
|
-
|
1146
|
-
|
1118
|
+
/**
|
1119
|
+
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
1120
|
+
*/
|
1121
|
+
const isTimeStampFrame = frame => {
|
1122
|
+
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
1123
|
+
};
|
1124
|
+
const getFrameData = data => {
|
1125
|
+
/*
|
1126
|
+
Frame ID $xx xx xx xx (four characters)
|
1127
|
+
Size $xx xx xx xx
|
1128
|
+
Flags $xx xx
|
1129
|
+
*/
|
1130
|
+
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
1131
|
+
const size = readSize(data, 4);
|
1147
1132
|
|
1148
|
-
//
|
1149
|
-
|
1133
|
+
// skip frame id, size, and flags
|
1134
|
+
const offset = 10;
|
1135
|
+
return {
|
1136
|
+
type,
|
1137
|
+
size,
|
1138
|
+
data: data.subarray(offset, offset + size)
|
1139
|
+
};
|
1140
|
+
};
|
1141
|
+
|
1142
|
+
/**
|
1143
|
+
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
1144
|
+
* @param id3Data - The ID3 data containing one or more ID3 tags
|
1145
|
+
*/
|
1146
|
+
const getID3Frames = id3Data => {
|
1147
|
+
let offset = 0;
|
1148
|
+
const frames = [];
|
1149
|
+
while (isHeader$2(id3Data, offset)) {
|
1150
|
+
const size = readSize(id3Data, offset + 6);
|
1151
|
+
// skip past ID3 header
|
1152
|
+
offset += 10;
|
1153
|
+
const end = offset + size;
|
1154
|
+
// loop through frames in the ID3 tag
|
1155
|
+
while (offset + 8 < end) {
|
1156
|
+
const frameData = getFrameData(id3Data.subarray(offset));
|
1157
|
+
const frame = decodeFrame(frameData);
|
1158
|
+
if (frame) {
|
1159
|
+
frames.push(frame);
|
1160
|
+
}
|
1161
|
+
|
1162
|
+
// skip frame header and frame data
|
1163
|
+
offset += frameData.size + 10;
|
1164
|
+
}
|
1165
|
+
if (isFooter(id3Data, offset)) {
|
1166
|
+
offset += 10;
|
1167
|
+
}
|
1168
|
+
}
|
1169
|
+
return frames;
|
1170
|
+
};
|
1171
|
+
const decodeFrame = frame => {
|
1172
|
+
if (frame.type === 'PRIV') {
|
1173
|
+
return decodePrivFrame(frame);
|
1174
|
+
} else if (frame.type[0] === 'W') {
|
1175
|
+
return decodeURLFrame(frame);
|
1176
|
+
}
|
1177
|
+
return decodeTextFrame(frame);
|
1178
|
+
};
|
1179
|
+
const decodePrivFrame = frame => {
|
1180
|
+
/*
|
1181
|
+
Format: <text string>\0<binary data>
|
1182
|
+
*/
|
1183
|
+
if (frame.size < 2) {
|
1184
|
+
return undefined;
|
1185
|
+
}
|
1186
|
+
const owner = utf8ArrayToStr(frame.data, true);
|
1187
|
+
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
1188
|
+
return {
|
1189
|
+
key: frame.type,
|
1190
|
+
info: owner,
|
1191
|
+
data: privateData.buffer
|
1192
|
+
};
|
1193
|
+
};
|
1194
|
+
const decodeTextFrame = frame => {
|
1195
|
+
if (frame.size < 2) {
|
1196
|
+
return undefined;
|
1197
|
+
}
|
1198
|
+
if (frame.type === 'TXXX') {
|
1199
|
+
/*
|
1200
|
+
Format:
|
1201
|
+
[0] = {Text Encoding}
|
1202
|
+
[1-?] = {Description}\0{Value}
|
1203
|
+
*/
|
1204
|
+
let index = 1;
|
1205
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1206
|
+
index += description.length + 1;
|
1207
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1208
|
+
return {
|
1209
|
+
key: frame.type,
|
1210
|
+
info: description,
|
1211
|
+
data: value
|
1212
|
+
};
|
1213
|
+
}
|
1214
|
+
/*
|
1215
|
+
Format:
|
1216
|
+
[0] = {Text Encoding}
|
1217
|
+
[1-?] = {Value}
|
1218
|
+
*/
|
1219
|
+
const text = utf8ArrayToStr(frame.data.subarray(1));
|
1220
|
+
return {
|
1221
|
+
key: frame.type,
|
1222
|
+
data: text
|
1223
|
+
};
|
1224
|
+
};
|
1225
|
+
const decodeURLFrame = frame => {
|
1226
|
+
if (frame.type === 'WXXX') {
|
1227
|
+
/*
|
1228
|
+
Format:
|
1229
|
+
[0] = {Text Encoding}
|
1230
|
+
[1-?] = {Description}\0{URL}
|
1231
|
+
*/
|
1232
|
+
if (frame.size < 2) {
|
1233
|
+
return undefined;
|
1234
|
+
}
|
1235
|
+
let index = 1;
|
1236
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1237
|
+
index += description.length + 1;
|
1238
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1239
|
+
return {
|
1240
|
+
key: frame.type,
|
1241
|
+
info: description,
|
1242
|
+
data: value
|
1243
|
+
};
|
1244
|
+
}
|
1245
|
+
/*
|
1246
|
+
Format:
|
1247
|
+
[0-?] = {URL}
|
1248
|
+
*/
|
1249
|
+
const url = utf8ArrayToStr(frame.data);
|
1250
|
+
return {
|
1251
|
+
key: frame.type,
|
1252
|
+
data: url
|
1253
|
+
};
|
1254
|
+
};
|
1255
|
+
const readTimeStamp = timeStampFrame => {
|
1256
|
+
if (timeStampFrame.data.byteLength === 8) {
|
1257
|
+
const data = new Uint8Array(timeStampFrame.data);
|
1258
|
+
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
1259
|
+
// with the upper 31 bits set to zero.
|
1260
|
+
const pts33Bit = data[3] & 0x1;
|
1261
|
+
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
1262
|
+
timestamp /= 45;
|
1263
|
+
if (pts33Bit) {
|
1264
|
+
timestamp += 47721858.84;
|
1265
|
+
} // 2^32 / 90
|
1266
|
+
|
1267
|
+
return Math.round(timestamp);
|
1268
|
+
}
|
1269
|
+
return undefined;
|
1270
|
+
};
|
1271
|
+
|
1272
|
+
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
1273
|
+
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
1274
|
+
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1275
|
+
*
|
1276
|
+
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1277
|
+
* Version: 1.0
|
1278
|
+
* LastModified: Dec 25 1999
|
1279
|
+
* This library is free. You can redistribute it and/or modify it.
|
1280
|
+
*/
|
1281
|
+
const utf8ArrayToStr = (array, exitOnNull = false) => {
|
1282
|
+
const decoder = getTextDecoder();
|
1283
|
+
if (decoder) {
|
1284
|
+
const decoded = decoder.decode(array);
|
1285
|
+
if (exitOnNull) {
|
1286
|
+
// grab up to the first null
|
1287
|
+
const idx = decoded.indexOf('\0');
|
1288
|
+
return idx !== -1 ? decoded.substring(0, idx) : decoded;
|
1289
|
+
}
|
1290
|
+
|
1291
|
+
// remove any null characters
|
1292
|
+
return decoded.replace(/\0/g, '');
|
1293
|
+
}
|
1294
|
+
const len = array.length;
|
1295
|
+
let c;
|
1296
|
+
let char2;
|
1297
|
+
let char3;
|
1298
|
+
let out = '';
|
1299
|
+
let i = 0;
|
1300
|
+
while (i < len) {
|
1301
|
+
c = array[i++];
|
1302
|
+
if (c === 0x00 && exitOnNull) {
|
1303
|
+
return out;
|
1304
|
+
} else if (c === 0x00 || c === 0x03) {
|
1305
|
+
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
1306
|
+
continue;
|
1307
|
+
}
|
1308
|
+
switch (c >> 4) {
|
1309
|
+
case 0:
|
1310
|
+
case 1:
|
1311
|
+
case 2:
|
1312
|
+
case 3:
|
1313
|
+
case 4:
|
1314
|
+
case 5:
|
1315
|
+
case 6:
|
1316
|
+
case 7:
|
1317
|
+
// 0xxxxxxx
|
1318
|
+
out += String.fromCharCode(c);
|
1319
|
+
break;
|
1320
|
+
case 12:
|
1321
|
+
case 13:
|
1322
|
+
// 110x xxxx 10xx xxxx
|
1323
|
+
char2 = array[i++];
|
1324
|
+
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1325
|
+
break;
|
1326
|
+
case 14:
|
1327
|
+
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1328
|
+
char2 = array[i++];
|
1329
|
+
char3 = array[i++];
|
1330
|
+
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1331
|
+
break;
|
1332
|
+
}
|
1333
|
+
}
|
1334
|
+
return out;
|
1335
|
+
};
|
1336
|
+
let decoder;
|
1337
|
+
function getTextDecoder() {
|
1338
|
+
// On Play Station 4, TextDecoder is defined but partially implemented.
|
1339
|
+
// Manual decoding option is preferable
|
1340
|
+
if (navigator.userAgent.includes('PlayStation 4')) {
|
1341
|
+
return;
|
1342
|
+
}
|
1343
|
+
if (!decoder && typeof self.TextDecoder !== 'undefined') {
|
1344
|
+
decoder = new self.TextDecoder('utf-8');
|
1345
|
+
}
|
1346
|
+
return decoder;
|
1347
|
+
}
|
1348
|
+
|
1349
|
+
/**
|
1350
|
+
* hex dump helper class
|
1351
|
+
*/
|
1352
|
+
|
1353
|
+
const Hex = {
|
1354
|
+
hexDump: function (array) {
|
1355
|
+
let str = '';
|
1356
|
+
for (let i = 0; i < array.length; i++) {
|
1357
|
+
let h = array[i].toString(16);
|
1358
|
+
if (h.length < 2) {
|
1359
|
+
h = '0' + h;
|
1360
|
+
}
|
1361
|
+
str += h;
|
1362
|
+
}
|
1363
|
+
return str;
|
1364
|
+
}
|
1365
|
+
};
|
1366
|
+
|
1367
|
+
const UINT32_MAX$1 = Math.pow(2, 32) - 1;
|
1368
|
+
const push = [].push;
|
1369
|
+
|
1370
|
+
// We are using fixed track IDs for driving the MP4 remuxer
|
1371
|
+
// instead of following the TS PIDs.
|
1150
1372
|
// There is no reason not to do this and some browsers/SourceBuffer-demuxers
|
1151
1373
|
// may not like if there are TrackID "switches"
|
1152
1374
|
// See https://github.com/video-dev/hls.js/issues/1331
|
@@ -1404,7 +1626,7 @@ function parseStsd(stsd) {
|
|
1404
1626
|
{
|
1405
1627
|
const codecBox = findBox(sampleEntries, [fourCC])[0];
|
1406
1628
|
const esdsBox = findBox(codecBox.subarray(28), ['esds'])[0];
|
1407
|
-
if (esdsBox && esdsBox.length >
|
1629
|
+
if (esdsBox && esdsBox.length > 12) {
|
1408
1630
|
let i = 4;
|
1409
1631
|
// ES Descriptor tag
|
1410
1632
|
if (esdsBox[i++] !== 0x03) {
|
@@ -1519,9 +1741,7 @@ function parseStsd(stsd) {
|
|
1519
1741
|
}
|
1520
1742
|
function skipBERInteger(bytes, i) {
|
1521
1743
|
const limit = i + 5;
|
1522
|
-
while (bytes[i++] & 0x80 && i < limit) {
|
1523
|
-
/* do nothing */
|
1524
|
-
}
|
1744
|
+
while (bytes[i++] & 0x80 && i < limit) {}
|
1525
1745
|
return i;
|
1526
1746
|
}
|
1527
1747
|
function toHex(x) {
|
@@ -2213,12 +2433,12 @@ class LevelKey {
|
|
2213
2433
|
this.keyFormatVersions = formatversions;
|
2214
2434
|
this.iv = iv;
|
2215
2435
|
this.encrypted = method ? method !== 'NONE' : false;
|
2216
|
-
this.isCommonEncryption = this.encrypted &&
|
2436
|
+
this.isCommonEncryption = this.encrypted && method !== 'AES-128';
|
2217
2437
|
}
|
2218
2438
|
isSupported() {
|
2219
2439
|
// If it's Segment encryption or No encryption, just select that key system
|
2220
2440
|
if (this.method) {
|
2221
|
-
if (
|
2441
|
+
if (this.method === 'AES-128' || this.method === 'NONE') {
|
2222
2442
|
return true;
|
2223
2443
|
}
|
2224
2444
|
if (this.keyFormat === 'identity') {
|
@@ -2232,13 +2452,14 @@ class LevelKey {
|
|
2232
2452
|
if (!this.encrypted || !this.uri) {
|
2233
2453
|
return null;
|
2234
2454
|
}
|
2235
|
-
if (
|
2455
|
+
if (this.method === 'AES-128' && this.uri && !this.iv) {
|
2236
2456
|
if (typeof sn !== 'number') {
|
2237
2457
|
// We are fetching decryption data for a initialization segment
|
2238
|
-
// If the segment was encrypted with AES-128
|
2458
|
+
// If the segment was encrypted with AES-128
|
2239
2459
|
// It must have an IV defined. We cannot substitute the Segment Number in.
|
2240
|
-
|
2241
|
-
|
2460
|
+
if (this.method === 'AES-128' && !this.iv) {
|
2461
|
+
logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
|
2462
|
+
}
|
2242
2463
|
// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
2243
2464
|
sn = 0;
|
2244
2465
|
}
|
@@ -2388,28 +2609,23 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
|
|
2388
2609
|
if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
|
2389
2610
|
return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
|
2390
2611
|
}
|
2612
|
+
|
2613
|
+
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2614
|
+
// some browsers will report that fLaC is supported then fail.
|
2615
|
+
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2391
2616
|
const codecsToCheck = {
|
2392
|
-
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2393
|
-
// some browsers will report that fLaC is supported then fail.
|
2394
|
-
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2395
2617
|
flac: ['flac', 'fLaC', 'FLAC'],
|
2396
|
-
opus: ['opus', 'Opus']
|
2397
|
-
// Replace audio codec info if browser does not support mp4a.40.34,
|
2398
|
-
// and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
|
2399
|
-
'mp4a.40.34': ['mp3']
|
2618
|
+
opus: ['opus', 'Opus']
|
2400
2619
|
}[lowerCaseCodec];
|
2401
2620
|
for (let i = 0; i < codecsToCheck.length; i++) {
|
2402
|
-
var _getMediaSource;
|
2403
2621
|
if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
|
2404
2622
|
CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
|
2405
2623
|
return codecsToCheck[i];
|
2406
|
-
} else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
|
2407
|
-
return '';
|
2408
2624
|
}
|
2409
2625
|
}
|
2410
2626
|
return lowerCaseCodec;
|
2411
2627
|
}
|
2412
|
-
const AUDIO_CODEC_REGEXP = /flac|opus
|
2628
|
+
const AUDIO_CODEC_REGEXP = /flac|opus/i;
|
2413
2629
|
function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
|
2414
2630
|
return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
|
2415
2631
|
}
|
@@ -2432,16 +2648,6 @@ function convertAVC1ToAVCOTI(codec) {
|
|
2432
2648
|
}
|
2433
2649
|
return codec;
|
2434
2650
|
}
|
2435
|
-
function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
|
2436
|
-
const MediaSource = getMediaSource(preferManagedMediaSource) || {
|
2437
|
-
isTypeSupported: () => false
|
2438
|
-
};
|
2439
|
-
return {
|
2440
|
-
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
2441
|
-
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
2442
|
-
ac3: false
|
2443
|
-
};
|
2444
|
-
}
|
2445
2651
|
|
2446
2652
|
const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
|
2447
2653
|
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
|
@@ -3242,10 +3448,10 @@ class PlaylistLoader {
|
|
3242
3448
|
const loaderContext = loader.context;
|
3243
3449
|
if (loaderContext && loaderContext.url === context.url && loaderContext.level === context.level) {
|
3244
3450
|
// same URL can't overlap
|
3245
|
-
|
3451
|
+
logger.trace('[playlist-loader]: playlist request ongoing');
|
3246
3452
|
return;
|
3247
3453
|
}
|
3248
|
-
|
3454
|
+
logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
|
3249
3455
|
loader.abort();
|
3250
3456
|
}
|
3251
3457
|
|
@@ -3355,7 +3561,7 @@ class PlaylistLoader {
|
|
3355
3561
|
// alt audio rendition in which quality levels (main)
|
3356
3562
|
// contains both audio+video. but with mixed audio track not signaled
|
3357
3563
|
if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
|
3358
|
-
|
3564
|
+
logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
|
3359
3565
|
audioTracks.unshift({
|
3360
3566
|
type: 'main',
|
3361
3567
|
name: 'main',
|
@@ -3454,7 +3660,7 @@ class PlaylistLoader {
|
|
3454
3660
|
message += ` id: ${context.id} group-id: "${context.groupId}"`;
|
3455
3661
|
}
|
3456
3662
|
const error = new Error(message);
|
3457
|
-
|
3663
|
+
logger.warn(`[playlist-loader]: ${message}`);
|
3458
3664
|
let details = ErrorDetails.UNKNOWN;
|
3459
3665
|
let fatal = false;
|
3460
3666
|
const loader = this.getInternalLoader(context);
|
@@ -3692,423 +3898,24 @@ var MetadataSchema = {
|
|
3692
3898
|
emsg: "https://aomedia.org/emsg/ID3"
|
3693
3899
|
};
|
3694
3900
|
|
3695
|
-
|
3696
|
-
|
3697
|
-
|
3698
|
-
|
3699
|
-
|
3700
|
-
|
3701
|
-
|
3702
|
-
|
3703
|
-
|
3704
|
-
|
3705
|
-
|
3706
|
-
|
3707
|
-
|
3708
|
-
|
3709
|
-
|
3710
|
-
|
3711
|
-
return undefined;
|
3901
|
+
const MIN_CUE_DURATION = 0.25;
|
3902
|
+
function getCueClass() {
|
3903
|
+
if (typeof self === 'undefined') return undefined;
|
3904
|
+
return self.VTTCue || self.TextTrackCue;
|
3905
|
+
}
|
3906
|
+
function createCueWithDataFields(Cue, startTime, endTime, data, type) {
|
3907
|
+
let cue = new Cue(startTime, endTime, '');
|
3908
|
+
try {
|
3909
|
+
cue.value = data;
|
3910
|
+
if (type) {
|
3911
|
+
cue.type = type;
|
3912
|
+
}
|
3913
|
+
} catch (e) {
|
3914
|
+
cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
|
3915
|
+
type
|
3916
|
+
}, data) : data));
|
3712
3917
|
}
|
3713
|
-
|
3714
|
-
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
3715
|
-
return {
|
3716
|
-
key: frame.type,
|
3717
|
-
info: owner,
|
3718
|
-
data: privateData.buffer
|
3719
|
-
};
|
3720
|
-
}
|
3721
|
-
|
3722
|
-
/**
|
3723
|
-
* Decodes an ID3 text frame
|
3724
|
-
*
|
3725
|
-
* @param frame - the ID3 text frame
|
3726
|
-
*
|
3727
|
-
* @returns The decoded ID3 text frame
|
3728
|
-
*
|
3729
|
-
* @internal
|
3730
|
-
*
|
3731
|
-
* @group ID3
|
3732
|
-
*/
|
3733
|
-
function decodeId3TextFrame(frame) {
|
3734
|
-
if (frame.size < 2) {
|
3735
|
-
return undefined;
|
3736
|
-
}
|
3737
|
-
if (frame.type === 'TXXX') {
|
3738
|
-
/*
|
3739
|
-
Format:
|
3740
|
-
[0] = {Text Encoding}
|
3741
|
-
[1-?] = {Description}\0{Value}
|
3742
|
-
*/
|
3743
|
-
let index = 1;
|
3744
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3745
|
-
index += description.length + 1;
|
3746
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3747
|
-
return {
|
3748
|
-
key: frame.type,
|
3749
|
-
info: description,
|
3750
|
-
data: value
|
3751
|
-
};
|
3752
|
-
}
|
3753
|
-
/*
|
3754
|
-
Format:
|
3755
|
-
[0] = {Text Encoding}
|
3756
|
-
[1-?] = {Value}
|
3757
|
-
*/
|
3758
|
-
const text = utf8ArrayToStr(frame.data.subarray(1));
|
3759
|
-
return {
|
3760
|
-
key: frame.type,
|
3761
|
-
info: '',
|
3762
|
-
data: text
|
3763
|
-
};
|
3764
|
-
}
|
3765
|
-
|
3766
|
-
/**
|
3767
|
-
* Decode a URL frame
|
3768
|
-
*
|
3769
|
-
* @param frame - the ID3 URL frame
|
3770
|
-
*
|
3771
|
-
* @returns The decoded ID3 URL frame
|
3772
|
-
*
|
3773
|
-
* @internal
|
3774
|
-
*
|
3775
|
-
* @group ID3
|
3776
|
-
*/
|
3777
|
-
function decodeId3UrlFrame(frame) {
|
3778
|
-
if (frame.type === 'WXXX') {
|
3779
|
-
/*
|
3780
|
-
Format:
|
3781
|
-
[0] = {Text Encoding}
|
3782
|
-
[1-?] = {Description}\0{URL}
|
3783
|
-
*/
|
3784
|
-
if (frame.size < 2) {
|
3785
|
-
return undefined;
|
3786
|
-
}
|
3787
|
-
let index = 1;
|
3788
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3789
|
-
index += description.length + 1;
|
3790
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3791
|
-
return {
|
3792
|
-
key: frame.type,
|
3793
|
-
info: description,
|
3794
|
-
data: value
|
3795
|
-
};
|
3796
|
-
}
|
3797
|
-
/*
|
3798
|
-
Format:
|
3799
|
-
[0-?] = {URL}
|
3800
|
-
*/
|
3801
|
-
const url = utf8ArrayToStr(frame.data);
|
3802
|
-
return {
|
3803
|
-
key: frame.type,
|
3804
|
-
info: '',
|
3805
|
-
data: url
|
3806
|
-
};
|
3807
|
-
}
|
3808
|
-
|
3809
|
-
function toUint8(data, offset = 0, length = Infinity) {
|
3810
|
-
return view(data, offset, length, Uint8Array);
|
3811
|
-
}
|
3812
|
-
function view(data, offset, length, Type) {
|
3813
|
-
const buffer = unsafeGetArrayBuffer(data);
|
3814
|
-
let bytesPerElement = 1;
|
3815
|
-
if ('BYTES_PER_ELEMENT' in Type) {
|
3816
|
-
bytesPerElement = Type.BYTES_PER_ELEMENT;
|
3817
|
-
}
|
3818
|
-
// Absolute end of the |data| view within |buffer|.
|
3819
|
-
const dataOffset = isArrayBufferView(data) ? data.byteOffset : 0;
|
3820
|
-
const dataEnd = (dataOffset + data.byteLength) / bytesPerElement;
|
3821
|
-
// Absolute start of the result within |buffer|.
|
3822
|
-
const rawStart = (dataOffset + offset) / bytesPerElement;
|
3823
|
-
const start = Math.floor(Math.max(0, Math.min(rawStart, dataEnd)));
|
3824
|
-
// Absolute end of the result within |buffer|.
|
3825
|
-
const end = Math.floor(Math.min(start + Math.max(length, 0), dataEnd));
|
3826
|
-
return new Type(buffer, start, end - start);
|
3827
|
-
}
|
3828
|
-
function unsafeGetArrayBuffer(view) {
|
3829
|
-
if (view instanceof ArrayBuffer) {
|
3830
|
-
return view;
|
3831
|
-
} else {
|
3832
|
-
return view.buffer;
|
3833
|
-
}
|
3834
|
-
}
|
3835
|
-
function isArrayBufferView(obj) {
|
3836
|
-
return obj && obj.buffer instanceof ArrayBuffer && obj.byteLength !== undefined && obj.byteOffset !== undefined;
|
3837
|
-
}
|
3838
|
-
|
3839
|
-
function toArrayBuffer(view) {
|
3840
|
-
if (view instanceof ArrayBuffer) {
|
3841
|
-
return view;
|
3842
|
-
} else {
|
3843
|
-
if (view.byteOffset == 0 && view.byteLength == view.buffer.byteLength) {
|
3844
|
-
// This is a TypedArray over the whole buffer.
|
3845
|
-
return view.buffer;
|
3846
|
-
}
|
3847
|
-
// This is a 'view' on the buffer. Create a new buffer that only contains
|
3848
|
-
// the data. Note that since this isn't an ArrayBuffer, the 'new' call
|
3849
|
-
// will allocate a new buffer to hold the copy.
|
3850
|
-
return new Uint8Array(view).buffer;
|
3851
|
-
}
|
3852
|
-
}
|
3853
|
-
|
3854
|
-
function decodeId3ImageFrame(frame) {
|
3855
|
-
const metadataFrame = {
|
3856
|
-
key: frame.type,
|
3857
|
-
description: '',
|
3858
|
-
data: '',
|
3859
|
-
mimeType: null,
|
3860
|
-
pictureType: null
|
3861
|
-
};
|
3862
|
-
const utf8Encoding = 0x03;
|
3863
|
-
if (frame.size < 2) {
|
3864
|
-
return undefined;
|
3865
|
-
}
|
3866
|
-
if (frame.data[0] !== utf8Encoding) {
|
3867
|
-
console.log('Ignore frame with unrecognized character ' + 'encoding');
|
3868
|
-
return undefined;
|
3869
|
-
}
|
3870
|
-
const mimeTypeEndIndex = frame.data.subarray(1).indexOf(0);
|
3871
|
-
if (mimeTypeEndIndex === -1) {
|
3872
|
-
return undefined;
|
3873
|
-
}
|
3874
|
-
const mimeType = utf8ArrayToStr(toUint8(frame.data, 1, mimeTypeEndIndex));
|
3875
|
-
const pictureType = frame.data[2 + mimeTypeEndIndex];
|
3876
|
-
const descriptionEndIndex = frame.data.subarray(3 + mimeTypeEndIndex).indexOf(0);
|
3877
|
-
if (descriptionEndIndex === -1) {
|
3878
|
-
return undefined;
|
3879
|
-
}
|
3880
|
-
const description = utf8ArrayToStr(toUint8(frame.data, 3 + mimeTypeEndIndex, descriptionEndIndex));
|
3881
|
-
let data;
|
3882
|
-
if (mimeType === '-->') {
|
3883
|
-
data = utf8ArrayToStr(toUint8(frame.data, 4 + mimeTypeEndIndex + descriptionEndIndex));
|
3884
|
-
} else {
|
3885
|
-
data = toArrayBuffer(frame.data.subarray(4 + mimeTypeEndIndex + descriptionEndIndex));
|
3886
|
-
}
|
3887
|
-
metadataFrame.mimeType = mimeType;
|
3888
|
-
metadataFrame.pictureType = pictureType;
|
3889
|
-
metadataFrame.description = description;
|
3890
|
-
metadataFrame.data = data;
|
3891
|
-
return metadataFrame;
|
3892
|
-
}
|
3893
|
-
|
3894
|
-
/**
|
3895
|
-
* Decode an ID3 frame.
|
3896
|
-
*
|
3897
|
-
* @param frame - the ID3 frame
|
3898
|
-
*
|
3899
|
-
* @returns The decoded ID3 frame
|
3900
|
-
*
|
3901
|
-
* @internal
|
3902
|
-
*
|
3903
|
-
* @group ID3
|
3904
|
-
*/
|
3905
|
-
function decodeId3Frame(frame) {
|
3906
|
-
if (frame.type === 'PRIV') {
|
3907
|
-
return decodeId3PrivFrame(frame);
|
3908
|
-
} else if (frame.type[0] === 'W') {
|
3909
|
-
return decodeId3UrlFrame(frame);
|
3910
|
-
} else if (frame.type === 'APIC') {
|
3911
|
-
return decodeId3ImageFrame(frame);
|
3912
|
-
}
|
3913
|
-
return decodeId3TextFrame(frame);
|
3914
|
-
}
|
3915
|
-
|
3916
|
-
/**
|
3917
|
-
* Read ID3 size
|
3918
|
-
*
|
3919
|
-
* @param data - The data to read from
|
3920
|
-
* @param offset - The offset at which to start reading
|
3921
|
-
*
|
3922
|
-
* @returns The size
|
3923
|
-
*
|
3924
|
-
* @internal
|
3925
|
-
*
|
3926
|
-
* @group ID3
|
3927
|
-
*/
|
3928
|
-
function readId3Size(data, offset) {
|
3929
|
-
let size = 0;
|
3930
|
-
size = (data[offset] & 0x7f) << 21;
|
3931
|
-
size |= (data[offset + 1] & 0x7f) << 14;
|
3932
|
-
size |= (data[offset + 2] & 0x7f) << 7;
|
3933
|
-
size |= data[offset + 3] & 0x7f;
|
3934
|
-
return size;
|
3935
|
-
}
|
3936
|
-
|
3937
|
-
/**
|
3938
|
-
* Returns the data of an ID3 frame.
|
3939
|
-
*
|
3940
|
-
* @param data - The data to read from
|
3941
|
-
*
|
3942
|
-
* @returns The data of the ID3 frame
|
3943
|
-
*
|
3944
|
-
* @internal
|
3945
|
-
*
|
3946
|
-
* @group ID3
|
3947
|
-
*/
|
3948
|
-
function getId3FrameData(data) {
|
3949
|
-
/*
|
3950
|
-
Frame ID $xx xx xx xx (four characters)
|
3951
|
-
Size $xx xx xx xx
|
3952
|
-
Flags $xx xx
|
3953
|
-
*/
|
3954
|
-
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
3955
|
-
const size = readId3Size(data, 4);
|
3956
|
-
// skip frame id, size, and flags
|
3957
|
-
const offset = 10;
|
3958
|
-
return {
|
3959
|
-
type,
|
3960
|
-
size,
|
3961
|
-
data: data.subarray(offset, offset + size)
|
3962
|
-
};
|
3963
|
-
}
|
3964
|
-
|
3965
|
-
/**
|
3966
|
-
* Returns true if an ID3 footer can be found at offset in data
|
3967
|
-
*
|
3968
|
-
* @param data - The data to search in
|
3969
|
-
* @param offset - The offset at which to start searching
|
3970
|
-
*
|
3971
|
-
* @returns `true` if an ID3 footer is found
|
3972
|
-
*
|
3973
|
-
* @internal
|
3974
|
-
*
|
3975
|
-
* @group ID3
|
3976
|
-
*/
|
3977
|
-
function isId3Footer(data, offset) {
|
3978
|
-
/*
|
3979
|
-
* The footer is a copy of the header, but with a different identifier
|
3980
|
-
*/
|
3981
|
-
if (offset + 10 <= data.length) {
|
3982
|
-
// look for '3DI' identifier
|
3983
|
-
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
3984
|
-
// check version is within range
|
3985
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
3986
|
-
// check size is within range
|
3987
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
3988
|
-
return true;
|
3989
|
-
}
|
3990
|
-
}
|
3991
|
-
}
|
3992
|
-
}
|
3993
|
-
return false;
|
3994
|
-
}
|
3995
|
-
|
3996
|
-
/**
|
3997
|
-
* Returns true if an ID3 header can be found at offset in data
|
3998
|
-
*
|
3999
|
-
* @param data - The data to search in
|
4000
|
-
* @param offset - The offset at which to start searching
|
4001
|
-
*
|
4002
|
-
* @returns `true` if an ID3 header is found
|
4003
|
-
*
|
4004
|
-
* @internal
|
4005
|
-
*
|
4006
|
-
* @group ID3
|
4007
|
-
*/
|
4008
|
-
function isId3Header(data, offset) {
|
4009
|
-
/*
|
4010
|
-
* http://id3.org/id3v2.3.0
|
4011
|
-
* [0] = 'I'
|
4012
|
-
* [1] = 'D'
|
4013
|
-
* [2] = '3'
|
4014
|
-
* [3,4] = {Version}
|
4015
|
-
* [5] = {Flags}
|
4016
|
-
* [6-9] = {ID3 Size}
|
4017
|
-
*
|
4018
|
-
* An ID3v2 tag can be detected with the following pattern:
|
4019
|
-
* $49 44 33 yy yy xx zz zz zz zz
|
4020
|
-
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
4021
|
-
*/
|
4022
|
-
if (offset + 10 <= data.length) {
|
4023
|
-
// look for 'ID3' identifier
|
4024
|
-
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
4025
|
-
// check version is within range
|
4026
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
4027
|
-
// check size is within range
|
4028
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
4029
|
-
return true;
|
4030
|
-
}
|
4031
|
-
}
|
4032
|
-
}
|
4033
|
-
}
|
4034
|
-
return false;
|
4035
|
-
}
|
4036
|
-
|
4037
|
-
const HEADER_FOOTER_SIZE = 10;
|
4038
|
-
const FRAME_SIZE = 10;
|
4039
|
-
/**
|
4040
|
-
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
4041
|
-
*
|
4042
|
-
* @param id3Data - The ID3 data containing one or more ID3 tags
|
4043
|
-
*
|
4044
|
-
* @returns Array of ID3 frame objects
|
4045
|
-
*
|
4046
|
-
* @group ID3
|
4047
|
-
*
|
4048
|
-
* @beta
|
4049
|
-
*/
|
4050
|
-
function getId3Frames(id3Data) {
|
4051
|
-
let offset = 0;
|
4052
|
-
const frames = [];
|
4053
|
-
while (isId3Header(id3Data, offset)) {
|
4054
|
-
const size = readId3Size(id3Data, offset + 6);
|
4055
|
-
if (id3Data[offset + 5] >> 6 & 1) {
|
4056
|
-
// skip extended header
|
4057
|
-
offset += HEADER_FOOTER_SIZE;
|
4058
|
-
}
|
4059
|
-
// skip past ID3 header
|
4060
|
-
offset += HEADER_FOOTER_SIZE;
|
4061
|
-
const end = offset + size;
|
4062
|
-
// loop through frames in the ID3 tag
|
4063
|
-
while (offset + FRAME_SIZE < end) {
|
4064
|
-
const frameData = getId3FrameData(id3Data.subarray(offset));
|
4065
|
-
const frame = decodeId3Frame(frameData);
|
4066
|
-
if (frame) {
|
4067
|
-
frames.push(frame);
|
4068
|
-
}
|
4069
|
-
// skip frame header and frame data
|
4070
|
-
offset += frameData.size + HEADER_FOOTER_SIZE;
|
4071
|
-
}
|
4072
|
-
if (isId3Footer(id3Data, offset)) {
|
4073
|
-
offset += HEADER_FOOTER_SIZE;
|
4074
|
-
}
|
4075
|
-
}
|
4076
|
-
return frames;
|
4077
|
-
}
|
4078
|
-
|
4079
|
-
/**
|
4080
|
-
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
4081
|
-
*
|
4082
|
-
* @param frame - the ID3 frame
|
4083
|
-
*
|
4084
|
-
* @returns `true` if the ID3 frame is an Elementary Stream timestamp frame
|
4085
|
-
*
|
4086
|
-
* @internal
|
4087
|
-
*
|
4088
|
-
* @group ID3
|
4089
|
-
*/
|
4090
|
-
function isId3TimestampFrame(frame) {
|
4091
|
-
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
4092
|
-
}
|
4093
|
-
|
4094
|
-
const MIN_CUE_DURATION = 0.25;
|
4095
|
-
function getCueClass() {
|
4096
|
-
if (typeof self === 'undefined') return undefined;
|
4097
|
-
return self.VTTCue || self.TextTrackCue;
|
4098
|
-
}
|
4099
|
-
function createCueWithDataFields(Cue, startTime, endTime, data, type) {
|
4100
|
-
let cue = new Cue(startTime, endTime, '');
|
4101
|
-
try {
|
4102
|
-
cue.value = data;
|
4103
|
-
if (type) {
|
4104
|
-
cue.type = type;
|
4105
|
-
}
|
4106
|
-
} catch (e) {
|
4107
|
-
cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
|
4108
|
-
type
|
4109
|
-
}, data) : data));
|
4110
|
-
}
|
4111
|
-
return cue;
|
3918
|
+
return cue;
|
4112
3919
|
}
|
4113
3920
|
|
4114
3921
|
// VTTCue latest draft allows an infinite duration, fallback
|
@@ -4173,10 +3980,11 @@ class ID3TrackController {
|
|
4173
3980
|
this.media = data.media;
|
4174
3981
|
}
|
4175
3982
|
onMediaDetaching() {
|
4176
|
-
if (this.id3Track) {
|
4177
|
-
|
4178
|
-
this.id3Track = null;
|
3983
|
+
if (!this.id3Track) {
|
3984
|
+
return;
|
4179
3985
|
}
|
3986
|
+
clearCurrentCues(this.id3Track);
|
3987
|
+
this.id3Track = null;
|
4180
3988
|
this.media = null;
|
4181
3989
|
this.dateRangeCuesAppended = {};
|
4182
3990
|
}
|
@@ -4235,7 +4043,7 @@ class ID3TrackController {
|
|
4235
4043
|
if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
|
4236
4044
|
continue;
|
4237
4045
|
}
|
4238
|
-
const frames =
|
4046
|
+
const frames = getID3Frames(samples[i].data);
|
4239
4047
|
if (frames) {
|
4240
4048
|
const startTime = samples[i].pts;
|
4241
4049
|
let endTime = startTime + samples[i].duration;
|
@@ -4249,7 +4057,7 @@ class ID3TrackController {
|
|
4249
4057
|
for (let j = 0; j < frames.length; j++) {
|
4250
4058
|
const frame = frames[j];
|
4251
4059
|
// Safari doesn't put the timestamp frame in the TextTrack
|
4252
|
-
if (!
|
4060
|
+
if (!isTimeStampFrame(frame)) {
|
4253
4061
|
// add a bounds to any unbounded cues
|
4254
4062
|
this.updateId3CueEnds(startTime, type);
|
4255
4063
|
const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
|
@@ -4417,47 +4225,7 @@ class LatencyController {
|
|
4417
4225
|
this.currentTime = 0;
|
4418
4226
|
this.stallCount = 0;
|
4419
4227
|
this._latency = null;
|
4420
|
-
this.
|
4421
|
-
const {
|
4422
|
-
media,
|
4423
|
-
levelDetails
|
4424
|
-
} = this;
|
4425
|
-
if (!media || !levelDetails) {
|
4426
|
-
return;
|
4427
|
-
}
|
4428
|
-
this.currentTime = media.currentTime;
|
4429
|
-
const latency = this.computeLatency();
|
4430
|
-
if (latency === null) {
|
4431
|
-
return;
|
4432
|
-
}
|
4433
|
-
this._latency = latency;
|
4434
|
-
|
4435
|
-
// Adapt playbackRate to meet target latency in low-latency mode
|
4436
|
-
const {
|
4437
|
-
lowLatencyMode,
|
4438
|
-
maxLiveSyncPlaybackRate
|
4439
|
-
} = this.config;
|
4440
|
-
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4441
|
-
return;
|
4442
|
-
}
|
4443
|
-
const targetLatency = this.targetLatency;
|
4444
|
-
if (targetLatency === null) {
|
4445
|
-
return;
|
4446
|
-
}
|
4447
|
-
const distanceFromTarget = latency - targetLatency;
|
4448
|
-
// Only adjust playbackRate when within one target duration of targetLatency
|
4449
|
-
// and more than one second from under-buffering.
|
4450
|
-
// Playback further than one target duration from target can be considered DVR playback.
|
4451
|
-
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4452
|
-
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4453
|
-
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4454
|
-
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4455
|
-
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4456
|
-
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4457
|
-
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4458
|
-
media.playbackRate = 1;
|
4459
|
-
}
|
4460
|
-
};
|
4228
|
+
this.timeupdateHandler = () => this.timeupdate();
|
4461
4229
|
this.hls = hls;
|
4462
4230
|
this.config = hls.config;
|
4463
4231
|
this.registerListeners();
|
@@ -4549,7 +4317,7 @@ class LatencyController {
|
|
4549
4317
|
this.onMediaDetaching();
|
4550
4318
|
this.levelDetails = null;
|
4551
4319
|
// @ts-ignore
|
4552
|
-
this.hls = null;
|
4320
|
+
this.hls = this.timeupdateHandler = null;
|
4553
4321
|
}
|
4554
4322
|
registerListeners() {
|
4555
4323
|
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
@@ -4567,11 +4335,11 @@ class LatencyController {
|
|
4567
4335
|
}
|
4568
4336
|
onMediaAttached(event, data) {
|
4569
4337
|
this.media = data.media;
|
4570
|
-
this.media.addEventListener('timeupdate', this.
|
4338
|
+
this.media.addEventListener('timeupdate', this.timeupdateHandler);
|
4571
4339
|
}
|
4572
4340
|
onMediaDetaching() {
|
4573
4341
|
if (this.media) {
|
4574
|
-
this.media.removeEventListener('timeupdate', this.
|
4342
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4575
4343
|
this.media = null;
|
4576
4344
|
}
|
4577
4345
|
}
|
@@ -4585,10 +4353,10 @@ class LatencyController {
|
|
4585
4353
|
}) {
|
4586
4354
|
this.levelDetails = details;
|
4587
4355
|
if (details.advanced) {
|
4588
|
-
this.
|
4356
|
+
this.timeupdate();
|
4589
4357
|
}
|
4590
4358
|
if (!details.live && this.media) {
|
4591
|
-
this.media.removeEventListener('timeupdate', this.
|
4359
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4592
4360
|
}
|
4593
4361
|
}
|
4594
4362
|
onError(event, data) {
|
@@ -4598,7 +4366,48 @@ class LatencyController {
|
|
4598
4366
|
}
|
4599
4367
|
this.stallCount++;
|
4600
4368
|
if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
|
4601
|
-
|
4369
|
+
logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');
|
4370
|
+
}
|
4371
|
+
}
|
4372
|
+
timeupdate() {
|
4373
|
+
const {
|
4374
|
+
media,
|
4375
|
+
levelDetails
|
4376
|
+
} = this;
|
4377
|
+
if (!media || !levelDetails) {
|
4378
|
+
return;
|
4379
|
+
}
|
4380
|
+
this.currentTime = media.currentTime;
|
4381
|
+
const latency = this.computeLatency();
|
4382
|
+
if (latency === null) {
|
4383
|
+
return;
|
4384
|
+
}
|
4385
|
+
this._latency = latency;
|
4386
|
+
|
4387
|
+
// Adapt playbackRate to meet target latency in low-latency mode
|
4388
|
+
const {
|
4389
|
+
lowLatencyMode,
|
4390
|
+
maxLiveSyncPlaybackRate
|
4391
|
+
} = this.config;
|
4392
|
+
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4393
|
+
return;
|
4394
|
+
}
|
4395
|
+
const targetLatency = this.targetLatency;
|
4396
|
+
if (targetLatency === null) {
|
4397
|
+
return;
|
4398
|
+
}
|
4399
|
+
const distanceFromTarget = latency - targetLatency;
|
4400
|
+
// Only adjust playbackRate when within one target duration of targetLatency
|
4401
|
+
// and more than one second from under-buffering.
|
4402
|
+
// Playback further than one target duration from target can be considered DVR playback.
|
4403
|
+
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4404
|
+
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4405
|
+
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4406
|
+
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4407
|
+
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4408
|
+
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4409
|
+
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4410
|
+
media.playbackRate = 1;
|
4602
4411
|
}
|
4603
4412
|
}
|
4604
4413
|
estimateLiveEdge() {
|
@@ -5373,13 +5182,18 @@ var ErrorActionFlags = {
|
|
5373
5182
|
MoveAllAlternatesMatchingHDCP: 2,
|
5374
5183
|
SwitchToSDR: 4
|
5375
5184
|
}; // Reserved for future use
|
5376
|
-
class ErrorController
|
5185
|
+
class ErrorController {
|
5377
5186
|
constructor(hls) {
|
5378
|
-
super('error-controller', hls.logger);
|
5379
5187
|
this.hls = void 0;
|
5380
5188
|
this.playlistError = 0;
|
5381
5189
|
this.penalizedRenditions = {};
|
5190
|
+
this.log = void 0;
|
5191
|
+
this.warn = void 0;
|
5192
|
+
this.error = void 0;
|
5382
5193
|
this.hls = hls;
|
5194
|
+
this.log = logger.log.bind(logger, `[info]:`);
|
5195
|
+
this.warn = logger.warn.bind(logger, `[warning]:`);
|
5196
|
+
this.error = logger.error.bind(logger, `[error]:`);
|
5383
5197
|
this.registerListeners();
|
5384
5198
|
}
|
5385
5199
|
registerListeners() {
|
@@ -5731,13 +5545,16 @@ class ErrorController extends Logger {
|
|
5731
5545
|
}
|
5732
5546
|
}
|
5733
5547
|
|
5734
|
-
class BasePlaylistController
|
5548
|
+
class BasePlaylistController {
|
5735
5549
|
constructor(hls, logPrefix) {
|
5736
|
-
super(logPrefix, hls.logger);
|
5737
5550
|
this.hls = void 0;
|
5738
5551
|
this.timer = -1;
|
5739
5552
|
this.requestScheduled = -1;
|
5740
5553
|
this.canLoad = false;
|
5554
|
+
this.log = void 0;
|
5555
|
+
this.warn = void 0;
|
5556
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
5557
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
5741
5558
|
this.hls = hls;
|
5742
5559
|
}
|
5743
5560
|
destroy() {
|
@@ -5770,7 +5587,7 @@ class BasePlaylistController extends Logger {
|
|
5770
5587
|
try {
|
5771
5588
|
uri = new self.URL(attr.URI, previous.url).href;
|
5772
5589
|
} catch (error) {
|
5773
|
-
|
5590
|
+
logger.warn(`Could not construct new URL for Rendition Report: ${error}`);
|
5774
5591
|
uri = attr.URI || '';
|
5775
5592
|
}
|
5776
5593
|
// Use exact match. Otherwise, the last partial match, if any, will be used
|
@@ -5858,12 +5675,7 @@ class BasePlaylistController extends Logger {
|
|
5858
5675
|
const cdnAge = lastAdvanced + details.ageHeader;
|
5859
5676
|
let currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
|
5860
5677
|
if (currentGoal > 0) {
|
5861
|
-
if (
|
5862
|
-
// Omit segment and part directives when the last response was more than 3 target durations ago,
|
5863
|
-
this.log(`Playlist last advanced ${lastAdvanced.toFixed(2)}s ago. Omitting segment and part directives.`);
|
5864
|
-
msn = undefined;
|
5865
|
-
part = undefined;
|
5866
|
-
} else if (previousDetails != null && previousDetails.tuneInGoal && cdnAge - details.partTarget > previousDetails.tuneInGoal) {
|
5678
|
+
if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
|
5867
5679
|
// If we attempted to get the next or latest playlist update, but currentGoal increased,
|
5868
5680
|
// then we either can't catchup, or the "age" header cannot be trusted.
|
5869
5681
|
this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`);
|
@@ -6309,9 +6121,8 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
|
|
6309
6121
|
}, {});
|
6310
6122
|
}
|
6311
6123
|
|
6312
|
-
class AbrController
|
6124
|
+
class AbrController {
|
6313
6125
|
constructor(_hls) {
|
6314
|
-
super('abr', _hls.logger);
|
6315
6126
|
this.hls = void 0;
|
6316
6127
|
this.lastLevelLoadSec = 0;
|
6317
6128
|
this.lastLoadedFragLevel = -1;
|
@@ -6425,7 +6236,7 @@ class AbrController extends Logger {
|
|
6425
6236
|
this.resetEstimator(nextLoadLevelBitrate);
|
6426
6237
|
}
|
6427
6238
|
this.clearTimer();
|
6428
|
-
|
6239
|
+
logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
|
6429
6240
|
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
|
6430
6241
|
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
|
6431
6242
|
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
|
@@ -6445,7 +6256,7 @@ class AbrController extends Logger {
|
|
6445
6256
|
}
|
6446
6257
|
resetEstimator(abrEwmaDefaultEstimate) {
|
6447
6258
|
if (abrEwmaDefaultEstimate) {
|
6448
|
-
|
6259
|
+
logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
|
6449
6260
|
this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
|
6450
6261
|
}
|
6451
6262
|
this.firstSelection = -1;
|
@@ -6677,7 +6488,7 @@ class AbrController extends Logger {
|
|
6677
6488
|
}
|
6678
6489
|
const firstLevel = this.hls.firstLevel;
|
6679
6490
|
const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
|
6680
|
-
|
6491
|
+
logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
|
6681
6492
|
return clamped;
|
6682
6493
|
}
|
6683
6494
|
get forcedAutoLevel() {
|
@@ -6723,9 +6534,6 @@ class AbrController extends Logger {
|
|
6723
6534
|
partCurrent,
|
6724
6535
|
hls
|
6725
6536
|
} = this;
|
6726
|
-
if (hls.levels.length <= 1) {
|
6727
|
-
return hls.loadLevel;
|
6728
|
-
}
|
6729
6537
|
const {
|
6730
6538
|
maxAutoLevel,
|
6731
6539
|
config,
|
@@ -6758,13 +6566,13 @@ class AbrController extends Logger {
|
|
6758
6566
|
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
6759
6567
|
const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
|
6760
6568
|
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
|
6761
|
-
|
6569
|
+
logger.info(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
|
6762
6570
|
// don't use conservative factor on bitrate test
|
6763
6571
|
bwFactor = bwUpFactor = 1;
|
6764
6572
|
}
|
6765
6573
|
}
|
6766
6574
|
const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
|
6767
|
-
|
6575
|
+
logger.info(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
|
6768
6576
|
if (bestLevel > -1) {
|
6769
6577
|
return bestLevel;
|
6770
6578
|
}
|
@@ -6838,7 +6646,7 @@ class AbrController extends Logger {
|
|
6838
6646
|
currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
|
6839
6647
|
currentFrameRate = minFramerate;
|
6840
6648
|
currentBw = Math.max(currentBw, minBitrate);
|
6841
|
-
|
6649
|
+
logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
|
6842
6650
|
} else {
|
6843
6651
|
currentCodecSet = level == null ? void 0 : level.codecSet;
|
6844
6652
|
currentVideoRange = level == null ? void 0 : level.videoRange;
|
@@ -6891,9 +6699,9 @@ class AbrController extends Logger {
|
|
6891
6699
|
const forcedAutoLevel = this.forcedAutoLevel;
|
6892
6700
|
if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
|
6893
6701
|
if (levelsSkipped.length) {
|
6894
|
-
|
6702
|
+
logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
|
6895
6703
|
}
|
6896
|
-
|
6704
|
+
logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
|
6897
6705
|
}
|
6898
6706
|
if (firstSelection) {
|
6899
6707
|
this.firstSelection = i;
|
@@ -6936,29 +6744,40 @@ class BufferHelper {
|
|
6936
6744
|
* Return true if `media`'s buffered include `position`
|
6937
6745
|
*/
|
6938
6746
|
static isBuffered(media, position) {
|
6939
|
-
|
6940
|
-
|
6941
|
-
|
6942
|
-
|
6943
|
-
|
6747
|
+
try {
|
6748
|
+
if (media) {
|
6749
|
+
const buffered = BufferHelper.getBuffered(media);
|
6750
|
+
for (let i = 0; i < buffered.length; i++) {
|
6751
|
+
if (position >= buffered.start(i) && position <= buffered.end(i)) {
|
6752
|
+
return true;
|
6753
|
+
}
|
6944
6754
|
}
|
6945
6755
|
}
|
6756
|
+
} catch (error) {
|
6757
|
+
// this is to catch
|
6758
|
+
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6759
|
+
// This SourceBuffer has been removed from the parent media source
|
6946
6760
|
}
|
6947
6761
|
return false;
|
6948
6762
|
}
|
6949
6763
|
static bufferInfo(media, pos, maxHoleDuration) {
|
6950
|
-
|
6951
|
-
|
6952
|
-
|
6764
|
+
try {
|
6765
|
+
if (media) {
|
6766
|
+
const vbuffered = BufferHelper.getBuffered(media);
|
6953
6767
|
const buffered = [];
|
6954
|
-
|
6768
|
+
let i;
|
6769
|
+
for (i = 0; i < vbuffered.length; i++) {
|
6955
6770
|
buffered.push({
|
6956
6771
|
start: vbuffered.start(i),
|
6957
6772
|
end: vbuffered.end(i)
|
6958
6773
|
});
|
6959
6774
|
}
|
6960
|
-
return
|
6775
|
+
return this.bufferedInfo(buffered, pos, maxHoleDuration);
|
6961
6776
|
}
|
6777
|
+
} catch (error) {
|
6778
|
+
// this is to catch
|
6779
|
+
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6780
|
+
// This SourceBuffer has been removed from the parent media source
|
6962
6781
|
}
|
6963
6782
|
return {
|
6964
6783
|
len: 0,
|
@@ -6970,7 +6789,14 @@ class BufferHelper {
|
|
6970
6789
|
static bufferedInfo(buffered, pos, maxHoleDuration) {
|
6971
6790
|
pos = Math.max(0, pos);
|
6972
6791
|
// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
|
6973
|
-
buffered.sort((a, b)
|
6792
|
+
buffered.sort(function (a, b) {
|
6793
|
+
const diff = a.start - b.start;
|
6794
|
+
if (diff) {
|
6795
|
+
return diff;
|
6796
|
+
} else {
|
6797
|
+
return b.end - a.end;
|
6798
|
+
}
|
6799
|
+
});
|
6974
6800
|
let buffered2 = [];
|
6975
6801
|
if (maxHoleDuration) {
|
6976
6802
|
// there might be some small holes between buffer time range
|
@@ -7037,7 +6863,7 @@ class BufferHelper {
|
|
7037
6863
|
*/
|
7038
6864
|
static getBuffered(media) {
|
7039
6865
|
try {
|
7040
|
-
return media.buffered
|
6866
|
+
return media.buffered;
|
7041
6867
|
} catch (e) {
|
7042
6868
|
logger.log('failed to get media.buffered', e);
|
7043
6869
|
return noopBuffered;
|
@@ -7062,22 +6888,24 @@ class BufferOperationQueue {
|
|
7062
6888
|
this.executeNext(type);
|
7063
6889
|
}
|
7064
6890
|
}
|
6891
|
+
insertAbort(operation, type) {
|
6892
|
+
const queue = this.queues[type];
|
6893
|
+
queue.unshift(operation);
|
6894
|
+
this.executeNext(type);
|
6895
|
+
}
|
7065
6896
|
appendBlocker(type) {
|
7066
|
-
|
7067
|
-
|
7068
|
-
|
7069
|
-
onStart: () => {},
|
7070
|
-
onComplete: () => {},
|
7071
|
-
onError: () => {}
|
7072
|
-
};
|
7073
|
-
this.append(operation, type);
|
6897
|
+
let execute;
|
6898
|
+
const promise = new Promise(resolve => {
|
6899
|
+
execute = resolve;
|
7074
6900
|
});
|
7075
|
-
|
7076
|
-
|
7077
|
-
|
7078
|
-
|
7079
|
-
|
7080
|
-
}
|
6901
|
+
const operation = {
|
6902
|
+
execute,
|
6903
|
+
onStart: () => {},
|
6904
|
+
onComplete: () => {},
|
6905
|
+
onError: () => {}
|
6906
|
+
};
|
6907
|
+
this.append(operation, type);
|
6908
|
+
return promise;
|
7081
6909
|
}
|
7082
6910
|
executeNext(type) {
|
7083
6911
|
const queue = this.queues[type];
|
@@ -7109,9 +6937,8 @@ class BufferOperationQueue {
|
|
7109
6937
|
}
|
7110
6938
|
|
7111
6939
|
const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
7112
|
-
class BufferController
|
7113
|
-
constructor(hls
|
7114
|
-
super('buffer-controller', hls.logger);
|
6940
|
+
class BufferController {
|
6941
|
+
constructor(hls) {
|
7115
6942
|
// The level details used to determine duration, target-duration and live
|
7116
6943
|
this.details = null;
|
7117
6944
|
// cache the self generated object url to detect hijack of video tag
|
@@ -7121,7 +6948,6 @@ class BufferController extends Logger {
|
|
7121
6948
|
// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
|
7122
6949
|
this.listeners = void 0;
|
7123
6950
|
this.hls = void 0;
|
7124
|
-
this.fragmentTracker = void 0;
|
7125
6951
|
// The number of BUFFER_CODEC events received before any sourceBuffers are created
|
7126
6952
|
this.bufferCodecEventsExpected = 0;
|
7127
6953
|
// The total number of BUFFER_CODEC events received
|
@@ -7132,10 +6958,6 @@ class BufferController extends Logger {
|
|
7132
6958
|
this.mediaSource = null;
|
7133
6959
|
// Last MP3 audio chunk appended
|
7134
6960
|
this.lastMpegAudioChunk = null;
|
7135
|
-
// Audio fragment blocked from appending until corresponding video appends or context changes
|
7136
|
-
this.blockedAudioAppend = null;
|
7137
|
-
// Keep track of video append position for unblocking audio
|
7138
|
-
this.lastVideoAppendEnd = 0;
|
7139
6961
|
this.appendSource = void 0;
|
7140
6962
|
// counters
|
7141
6963
|
this.appendErrors = {
|
@@ -7146,6 +6968,9 @@ class BufferController extends Logger {
|
|
7146
6968
|
this.tracks = {};
|
7147
6969
|
this.pendingTracks = {};
|
7148
6970
|
this.sourceBuffer = void 0;
|
6971
|
+
this.log = void 0;
|
6972
|
+
this.warn = void 0;
|
6973
|
+
this.error = void 0;
|
7149
6974
|
this._onEndStreaming = event => {
|
7150
6975
|
if (!this.hls) {
|
7151
6976
|
return;
|
@@ -7167,10 +6992,7 @@ class BufferController extends Logger {
|
|
7167
6992
|
this.log('Media source opened');
|
7168
6993
|
if (media) {
|
7169
6994
|
media.removeEventListener('emptied', this._onMediaEmptied);
|
7170
|
-
|
7171
|
-
if (durationAndRange) {
|
7172
|
-
this.updateMediaSource(durationAndRange);
|
7173
|
-
}
|
6995
|
+
this.updateMediaElementDuration();
|
7174
6996
|
this.hls.trigger(Events.MEDIA_ATTACHED, {
|
7175
6997
|
media,
|
7176
6998
|
mediaSource: mediaSource
|
@@ -7194,12 +7016,15 @@ class BufferController extends Logger {
|
|
7194
7016
|
_objectUrl
|
7195
7017
|
} = this;
|
7196
7018
|
if (mediaSrc !== _objectUrl) {
|
7197
|
-
|
7019
|
+
logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
|
7198
7020
|
}
|
7199
7021
|
};
|
7200
7022
|
this.hls = hls;
|
7201
|
-
|
7023
|
+
const logPrefix = '[buffer-controller]';
|
7202
7024
|
this.appendSource = isManagedMediaSource(getMediaSource(hls.config.preferManagedMediaSource));
|
7025
|
+
this.log = logger.log.bind(logger, logPrefix);
|
7026
|
+
this.warn = logger.warn.bind(logger, logPrefix);
|
7027
|
+
this.error = logger.error.bind(logger, logPrefix);
|
7203
7028
|
this._initSourceBuffer();
|
7204
7029
|
this.registerListeners();
|
7205
7030
|
}
|
@@ -7211,13 +7036,7 @@ class BufferController extends Logger {
|
|
7211
7036
|
this.details = null;
|
7212
7037
|
this.lastMpegAudioChunk = null;
|
7213
7038
|
// @ts-ignore
|
7214
|
-
this.hls =
|
7215
|
-
// @ts-ignore
|
7216
|
-
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
7217
|
-
// @ts-ignore
|
7218
|
-
this._onMediaSourceEnded = null;
|
7219
|
-
// @ts-ignore
|
7220
|
-
this._onStartStreaming = this._onEndStreaming = null;
|
7039
|
+
this.hls = null;
|
7221
7040
|
}
|
7222
7041
|
registerListeners() {
|
7223
7042
|
const {
|
@@ -7267,8 +7086,6 @@ class BufferController extends Logger {
|
|
7267
7086
|
audiovideo: 0
|
7268
7087
|
};
|
7269
7088
|
this.lastMpegAudioChunk = null;
|
7270
|
-
this.blockedAudioAppend = null;
|
7271
|
-
this.lastVideoAppendEnd = 0;
|
7272
7089
|
}
|
7273
7090
|
onManifestLoading() {
|
7274
7091
|
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
|
@@ -7351,34 +7168,34 @@ class BufferController extends Logger {
|
|
7351
7168
|
mediaSource.removeEventListener('startstreaming', this._onStartStreaming);
|
7352
7169
|
mediaSource.removeEventListener('endstreaming', this._onEndStreaming);
|
7353
7170
|
}
|
7354
|
-
this.mediaSource = null;
|
7355
|
-
this._objectUrl = null;
|
7356
|
-
}
|
7357
7171
|
|
7358
|
-
|
7359
|
-
|
7360
|
-
|
7361
|
-
|
7362
|
-
|
7363
|
-
|
7364
|
-
|
7172
|
+
// Detach properly the MediaSource from the HTMLMediaElement as
|
7173
|
+
// suggested in https://github.com/w3c/media-source/issues/53.
|
7174
|
+
if (media) {
|
7175
|
+
media.removeEventListener('emptied', this._onMediaEmptied);
|
7176
|
+
if (_objectUrl) {
|
7177
|
+
self.URL.revokeObjectURL(_objectUrl);
|
7178
|
+
}
|
7365
7179
|
|
7366
|
-
|
7367
|
-
|
7368
|
-
|
7369
|
-
|
7370
|
-
|
7371
|
-
|
7180
|
+
// clean up video tag src only if it's our own url. some external libraries might
|
7181
|
+
// hijack the video tag and change its 'src' without destroying the Hls instance first
|
7182
|
+
if (this.mediaSrc === _objectUrl) {
|
7183
|
+
media.removeAttribute('src');
|
7184
|
+
if (this.appendSource) {
|
7185
|
+
removeSourceChildren(media);
|
7186
|
+
}
|
7187
|
+
media.load();
|
7188
|
+
} else {
|
7189
|
+
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7372
7190
|
}
|
7373
|
-
media.load();
|
7374
|
-
} else {
|
7375
|
-
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7376
7191
|
}
|
7192
|
+
this.mediaSource = null;
|
7377
7193
|
this.media = null;
|
7194
|
+
this._objectUrl = null;
|
7195
|
+
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7196
|
+
this.pendingTracks = {};
|
7197
|
+
this.tracks = {};
|
7378
7198
|
}
|
7379
|
-
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7380
|
-
this.pendingTracks = {};
|
7381
|
-
this.tracks = {};
|
7382
7199
|
this.hls.trigger(Events.MEDIA_DETACHED, undefined);
|
7383
7200
|
}
|
7384
7201
|
onBufferReset() {
|
@@ -7386,7 +7203,6 @@ class BufferController extends Logger {
|
|
7386
7203
|
this.resetBuffer(type);
|
7387
7204
|
});
|
7388
7205
|
this._initSourceBuffer();
|
7389
|
-
this.hls.resumeBuffering();
|
7390
7206
|
}
|
7391
7207
|
resetBuffer(type) {
|
7392
7208
|
const sb = this.sourceBuffer[type];
|
@@ -7410,10 +7226,9 @@ class BufferController extends Logger {
|
|
7410
7226
|
const trackNames = Object.keys(data);
|
7411
7227
|
trackNames.forEach(trackName => {
|
7412
7228
|
if (sourceBufferCount) {
|
7413
|
-
var _track$buffer;
|
7414
7229
|
// check if SourceBuffer codec needs to change
|
7415
7230
|
const track = this.tracks[trackName];
|
7416
|
-
if (track && typeof
|
7231
|
+
if (track && typeof track.buffer.changeType === 'function') {
|
7417
7232
|
var _trackCodec;
|
7418
7233
|
const {
|
7419
7234
|
id,
|
@@ -7483,54 +7298,20 @@ class BufferController extends Logger {
|
|
7483
7298
|
};
|
7484
7299
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7485
7300
|
}
|
7486
|
-
blockAudio(partOrFrag) {
|
7487
|
-
var _this$fragmentTracker;
|
7488
|
-
const pStart = partOrFrag.start;
|
7489
|
-
const pTime = pStart + partOrFrag.duration * 0.05;
|
7490
|
-
const atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
|
7491
|
-
if (atGap) {
|
7492
|
-
return;
|
7493
|
-
}
|
7494
|
-
const op = {
|
7495
|
-
execute: () => {
|
7496
|
-
var _this$fragmentTracker2;
|
7497
|
-
if (this.lastVideoAppendEnd > pTime || this.sourceBuffer.video && BufferHelper.isBuffered(this.sourceBuffer.video, pTime) || ((_this$fragmentTracker2 = this.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker2.gap) === true) {
|
7498
|
-
this.blockedAudioAppend = null;
|
7499
|
-
this.operationQueue.shiftAndExecuteNext('audio');
|
7500
|
-
}
|
7501
|
-
},
|
7502
|
-
onStart: () => {},
|
7503
|
-
onComplete: () => {},
|
7504
|
-
onError: () => {}
|
7505
|
-
};
|
7506
|
-
this.blockedAudioAppend = {
|
7507
|
-
op,
|
7508
|
-
frag: partOrFrag
|
7509
|
-
};
|
7510
|
-
this.operationQueue.append(op, 'audio', true);
|
7511
|
-
}
|
7512
|
-
unblockAudio() {
|
7513
|
-
const blockedAudioAppend = this.blockedAudioAppend;
|
7514
|
-
if (blockedAudioAppend) {
|
7515
|
-
this.blockedAudioAppend = null;
|
7516
|
-
this.operationQueue.unblockAudio(blockedAudioAppend.op);
|
7517
|
-
}
|
7518
|
-
}
|
7519
7301
|
onBufferAppending(event, eventData) {
|
7520
7302
|
const {
|
7303
|
+
hls,
|
7521
7304
|
operationQueue,
|
7522
7305
|
tracks
|
7523
7306
|
} = this;
|
7524
7307
|
const {
|
7525
7308
|
data,
|
7526
7309
|
type,
|
7527
|
-
parent,
|
7528
7310
|
frag,
|
7529
7311
|
part,
|
7530
7312
|
chunkMeta
|
7531
7313
|
} = eventData;
|
7532
7314
|
const chunkStats = chunkMeta.buffering[type];
|
7533
|
-
const sn = frag.sn;
|
7534
7315
|
const bufferAppendingStart = self.performance.now();
|
7535
7316
|
chunkStats.start = bufferAppendingStart;
|
7536
7317
|
const fragBuffering = frag.stats.buffering;
|
@@ -7553,36 +7334,7 @@ class BufferController extends Logger {
|
|
7553
7334
|
checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
|
7554
7335
|
this.lastMpegAudioChunk = chunkMeta;
|
7555
7336
|
}
|
7556
|
-
|
7557
|
-
// Block audio append until overlapping video append
|
7558
|
-
const videoSb = this.sourceBuffer.video;
|
7559
|
-
if (videoSb && sn !== 'initSegment') {
|
7560
|
-
const partOrFrag = part || frag;
|
7561
|
-
const blockedAudioAppend = this.blockedAudioAppend;
|
7562
|
-
if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
|
7563
|
-
const pStart = partOrFrag.start;
|
7564
|
-
const pTime = pStart + partOrFrag.duration * 0.05;
|
7565
|
-
const vbuffered = videoSb.buffered;
|
7566
|
-
const vappending = this.operationQueue.current('video');
|
7567
|
-
if (!vbuffered.length && !vappending) {
|
7568
|
-
// wait for video before appending audio
|
7569
|
-
this.blockAudio(partOrFrag);
|
7570
|
-
} else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
|
7571
|
-
// audio is ahead of video
|
7572
|
-
this.blockAudio(partOrFrag);
|
7573
|
-
}
|
7574
|
-
} else if (type === 'video') {
|
7575
|
-
const videoAppendEnd = partOrFrag.end;
|
7576
|
-
if (blockedAudioAppend) {
|
7577
|
-
const audioStart = blockedAudioAppend.frag.start;
|
7578
|
-
if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
|
7579
|
-
this.unblockAudio();
|
7580
|
-
}
|
7581
|
-
}
|
7582
|
-
this.lastVideoAppendEnd = videoAppendEnd;
|
7583
|
-
}
|
7584
|
-
}
|
7585
|
-
const fragStart = (part || frag).start;
|
7337
|
+
const fragStart = frag.start;
|
7586
7338
|
const operation = {
|
7587
7339
|
execute: () => {
|
7588
7340
|
chunkStats.executeStart = self.performance.now();
|
@@ -7591,7 +7343,7 @@ class BufferController extends Logger {
|
|
7591
7343
|
if (sb) {
|
7592
7344
|
const delta = fragStart - sb.timestampOffset;
|
7593
7345
|
if (Math.abs(delta) >= 0.1) {
|
7594
|
-
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${sn})`);
|
7346
|
+
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`);
|
7595
7347
|
sb.timestampOffset = fragStart;
|
7596
7348
|
}
|
7597
7349
|
}
|
@@ -7658,21 +7410,22 @@ class BufferController extends Logger {
|
|
7658
7410
|
/* with UHD content, we could get loop of quota exceeded error until
|
7659
7411
|
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
7660
7412
|
*/
|
7661
|
-
this.warn(`Failed ${appendErrorCount}/${
|
7662
|
-
if (appendErrorCount >=
|
7413
|
+
this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
|
7414
|
+
if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
|
7663
7415
|
event.fatal = true;
|
7664
7416
|
}
|
7665
7417
|
}
|
7666
|
-
|
7418
|
+
hls.trigger(Events.ERROR, event);
|
7667
7419
|
}
|
7668
7420
|
};
|
7669
7421
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7670
7422
|
}
|
7671
|
-
|
7672
|
-
|
7673
|
-
|
7674
|
-
|
7675
|
-
|
7423
|
+
onBufferFlushing(event, data) {
|
7424
|
+
const {
|
7425
|
+
operationQueue
|
7426
|
+
} = this;
|
7427
|
+
const flushOperation = type => ({
|
7428
|
+
execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset),
|
7676
7429
|
onStart: () => {
|
7677
7430
|
// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
7678
7431
|
},
|
@@ -7685,22 +7438,12 @@ class BufferController extends Logger {
|
|
7685
7438
|
onError: error => {
|
7686
7439
|
this.warn(`Failed to remove from ${type} SourceBuffer`, error);
|
7687
7440
|
}
|
7688
|
-
};
|
7689
|
-
|
7690
|
-
|
7691
|
-
const {
|
7692
|
-
operationQueue
|
7693
|
-
} = this;
|
7694
|
-
const {
|
7695
|
-
type,
|
7696
|
-
startOffset,
|
7697
|
-
endOffset
|
7698
|
-
} = data;
|
7699
|
-
if (type) {
|
7700
|
-
operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
|
7441
|
+
});
|
7442
|
+
if (data.type) {
|
7443
|
+
operationQueue.append(flushOperation(data.type), data.type);
|
7701
7444
|
} else {
|
7702
|
-
this.getSourceBufferTypes().forEach(
|
7703
|
-
operationQueue.append(
|
7445
|
+
this.getSourceBufferTypes().forEach(type => {
|
7446
|
+
operationQueue.append(flushOperation(type), type);
|
7704
7447
|
});
|
7705
7448
|
}
|
7706
7449
|
}
|
@@ -7747,9 +7490,6 @@ class BufferController extends Logger {
|
|
7747
7490
|
// on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
|
7748
7491
|
// an undefined data.type will mark all buffers as EOS.
|
7749
7492
|
onBufferEos(event, data) {
|
7750
|
-
if (data.type === 'video') {
|
7751
|
-
this.unblockAudio();
|
7752
|
-
}
|
7753
7493
|
const ended = this.getSourceBufferTypes().reduce((acc, type) => {
|
7754
7494
|
const sb = this.sourceBuffer[type];
|
7755
7495
|
if (sb && (!data.type || data.type === type)) {
|
@@ -7792,14 +7532,10 @@ class BufferController extends Logger {
|
|
7792
7532
|
return;
|
7793
7533
|
}
|
7794
7534
|
this.details = details;
|
7795
|
-
const durationAndRange = this.getDurationAndRange();
|
7796
|
-
if (!durationAndRange) {
|
7797
|
-
return;
|
7798
|
-
}
|
7799
7535
|
if (this.getSourceBufferTypes().length) {
|
7800
|
-
this.blockBuffers(
|
7536
|
+
this.blockBuffers(this.updateMediaElementDuration.bind(this));
|
7801
7537
|
} else {
|
7802
|
-
this.
|
7538
|
+
this.updateMediaElementDuration();
|
7803
7539
|
}
|
7804
7540
|
}
|
7805
7541
|
trimBuffers() {
|
@@ -7904,9 +7640,9 @@ class BufferController extends Logger {
|
|
7904
7640
|
* 'liveDurationInfinity` is set to `true`
|
7905
7641
|
* More details: https://github.com/video-dev/hls.js/issues/355
|
7906
7642
|
*/
|
7907
|
-
|
7643
|
+
updateMediaElementDuration() {
|
7908
7644
|
if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
7909
|
-
return
|
7645
|
+
return;
|
7910
7646
|
}
|
7911
7647
|
const {
|
7912
7648
|
details,
|
@@ -7920,41 +7656,25 @@ class BufferController extends Logger {
|
|
7920
7656
|
if (details.live && hls.config.liveDurationInfinity) {
|
7921
7657
|
// Override duration to Infinity
|
7922
7658
|
mediaSource.duration = Infinity;
|
7923
|
-
|
7924
|
-
if (len && details.live && !!mediaSource.setLiveSeekableRange) {
|
7925
|
-
const start = Math.max(0, details.fragments[0].start);
|
7926
|
-
const end = Math.max(start, start + details.totalduration);
|
7927
|
-
return {
|
7928
|
-
duration: Infinity,
|
7929
|
-
start,
|
7930
|
-
end
|
7931
|
-
};
|
7932
|
-
}
|
7933
|
-
return {
|
7934
|
-
duration: Infinity
|
7935
|
-
};
|
7659
|
+
this.updateSeekableRange(details);
|
7936
7660
|
} else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
|
7937
|
-
|
7938
|
-
|
7939
|
-
|
7661
|
+
// levelDuration was the last value we set.
|
7662
|
+
// not using mediaSource.duration as the browser may tweak this value
|
7663
|
+
// only update Media Source duration if its value increase, this is to avoid
|
7664
|
+
// flushing already buffered portion when switching between quality level
|
7665
|
+
this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);
|
7666
|
+
mediaSource.duration = levelDuration;
|
7940
7667
|
}
|
7941
|
-
return null;
|
7942
7668
|
}
|
7943
|
-
|
7944
|
-
|
7945
|
-
|
7946
|
-
|
7947
|
-
|
7948
|
-
|
7949
|
-
|
7950
|
-
|
7951
|
-
|
7952
|
-
this.log(`Updating Media Source duration to ${duration.toFixed(3)}`);
|
7953
|
-
}
|
7954
|
-
this.mediaSource.duration = duration;
|
7955
|
-
if (start !== undefined && end !== undefined) {
|
7956
|
-
this.log(`Media Source duration is set to ${this.mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7957
|
-
this.mediaSource.setLiveSeekableRange(start, end);
|
7669
|
+
updateSeekableRange(levelDetails) {
|
7670
|
+
const mediaSource = this.mediaSource;
|
7671
|
+
const fragments = levelDetails.fragments;
|
7672
|
+
const len = fragments.length;
|
7673
|
+
if (len && levelDetails.live && mediaSource != null && mediaSource.setLiveSeekableRange) {
|
7674
|
+
const start = Math.max(0, fragments[0].start);
|
7675
|
+
const end = Math.max(start, start + levelDetails.totalduration);
|
7676
|
+
this.log(`Media Source duration is set to ${mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7677
|
+
mediaSource.setLiveSeekableRange(start, end);
|
7958
7678
|
}
|
7959
7679
|
}
|
7960
7680
|
checkPendingTracks() {
|
@@ -8140,7 +7860,6 @@ class BufferController extends Logger {
|
|
8140
7860
|
}
|
8141
7861
|
return;
|
8142
7862
|
}
|
8143
|
-
sb.ending = false;
|
8144
7863
|
sb.ended = false;
|
8145
7864
|
sb.appendBuffer(data);
|
8146
7865
|
}
|
@@ -8160,14 +7879,10 @@ class BufferController extends Logger {
|
|
8160
7879
|
|
8161
7880
|
// logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
|
8162
7881
|
const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type));
|
8163
|
-
|
8164
|
-
if (audioBlocked) {
|
8165
|
-
this.unblockAudio();
|
8166
|
-
}
|
8167
|
-
Promise.all(blockingOperations).then(result => {
|
7882
|
+
Promise.all(blockingOperations).then(() => {
|
8168
7883
|
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
8169
7884
|
onUnblocked();
|
8170
|
-
buffers.forEach(
|
7885
|
+
buffers.forEach(type => {
|
8171
7886
|
const sb = this.sourceBuffer[type];
|
8172
7887
|
// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
8173
7888
|
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
@@ -8316,7 +8031,6 @@ class CapLevelController {
|
|
8316
8031
|
}
|
8317
8032
|
onMediaDetaching() {
|
8318
8033
|
this.stopCapping();
|
8319
|
-
this.media = null;
|
8320
8034
|
}
|
8321
8035
|
detectPlayerSize() {
|
8322
8036
|
if (this.media) {
|
@@ -8329,10 +8043,10 @@ class CapLevelController {
|
|
8329
8043
|
const hls = this.hls;
|
8330
8044
|
const maxLevel = this.getMaxLevel(levels.length - 1);
|
8331
8045
|
if (maxLevel !== this.autoLevelCapping) {
|
8332
|
-
|
8046
|
+
logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8333
8047
|
}
|
8334
8048
|
hls.autoLevelCapping = maxLevel;
|
8335
|
-
if (hls.
|
8049
|
+
if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
8336
8050
|
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
|
8337
8051
|
// usually happen when the user go to the fullscreen mode.
|
8338
8052
|
this.streamController.nextLevelSwitch();
|
@@ -8468,11 +8182,9 @@ class FPSController {
|
|
8468
8182
|
}
|
8469
8183
|
registerListeners() {
|
8470
8184
|
this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8471
|
-
this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8472
8185
|
}
|
8473
8186
|
unregisterListeners() {
|
8474
8187
|
this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8475
|
-
this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8476
8188
|
}
|
8477
8189
|
destroy() {
|
8478
8190
|
if (this.timer) {
|
@@ -8494,9 +8206,6 @@ class FPSController {
|
|
8494
8206
|
this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
|
8495
8207
|
}
|
8496
8208
|
}
|
8497
|
-
onMediaDetaching() {
|
8498
|
-
this.media = null;
|
8499
|
-
}
|
8500
8209
|
checkFPS(video, decodedFrames, droppedFrames) {
|
8501
8210
|
const currentTime = performance.now();
|
8502
8211
|
if (decodedFrames) {
|
@@ -8512,10 +8221,10 @@ class FPSController {
|
|
8512
8221
|
totalDroppedFrames: droppedFrames
|
8513
8222
|
});
|
8514
8223
|
if (droppedFPS > 0) {
|
8515
|
-
//
|
8224
|
+
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8516
8225
|
if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
8517
8226
|
let currentLevel = hls.currentLevel;
|
8518
|
-
|
8227
|
+
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8519
8228
|
if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
|
8520
8229
|
currentLevel = currentLevel - 1;
|
8521
8230
|
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
|
@@ -8548,10 +8257,10 @@ class FPSController {
|
|
8548
8257
|
}
|
8549
8258
|
|
8550
8259
|
const PATHWAY_PENALTY_DURATION_MS = 300000;
|
8551
|
-
class ContentSteeringController
|
8260
|
+
class ContentSteeringController {
|
8552
8261
|
constructor(hls) {
|
8553
|
-
super('content-steering', hls.logger);
|
8554
8262
|
this.hls = void 0;
|
8263
|
+
this.log = void 0;
|
8555
8264
|
this.loader = null;
|
8556
8265
|
this.uri = null;
|
8557
8266
|
this.pathwayId = '.';
|
@@ -8566,6 +8275,7 @@ class ContentSteeringController extends Logger {
|
|
8566
8275
|
this.subtitleTracks = null;
|
8567
8276
|
this.penalizedPathways = {};
|
8568
8277
|
this.hls = hls;
|
8278
|
+
this.log = logger.log.bind(logger, `[content-steering]:`);
|
8569
8279
|
this.registerListeners();
|
8570
8280
|
}
|
8571
8281
|
registerListeners() {
|
@@ -8689,7 +8399,7 @@ class ContentSteeringController extends Logger {
|
|
8689
8399
|
errorAction.resolved = this.pathwayId !== errorPathway;
|
8690
8400
|
}
|
8691
8401
|
if (!errorAction.resolved) {
|
8692
|
-
|
8402
|
+
logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
|
8693
8403
|
}
|
8694
8404
|
}
|
8695
8405
|
}
|
@@ -8860,7 +8570,7 @@ class ContentSteeringController extends Logger {
|
|
8860
8570
|
onSuccess: (response, stats, context, networkDetails) => {
|
8861
8571
|
this.log(`Loaded steering manifest: "${url}"`);
|
8862
8572
|
const steeringData = response.data;
|
8863
|
-
if (
|
8573
|
+
if (steeringData.VERSION !== 1) {
|
8864
8574
|
this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
|
8865
8575
|
return;
|
8866
8576
|
}
|
@@ -9768,7 +9478,7 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
|
|
9768
9478
|
});
|
9769
9479
|
function timelineConfig() {
|
9770
9480
|
return {
|
9771
|
-
cueHandler:
|
9481
|
+
cueHandler: Cues,
|
9772
9482
|
// used by timeline-controller
|
9773
9483
|
enableWebVTT: false,
|
9774
9484
|
// used by timeline-controller
|
@@ -9799,7 +9509,7 @@ function timelineConfig() {
|
|
9799
9509
|
/**
|
9800
9510
|
* @ignore
|
9801
9511
|
*/
|
9802
|
-
function mergeConfig(defaultConfig, userConfig
|
9512
|
+
function mergeConfig(defaultConfig, userConfig) {
|
9803
9513
|
if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
|
9804
9514
|
throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
|
9805
9515
|
}
|
@@ -9869,7 +9579,7 @@ function deepCpy(obj) {
|
|
9869
9579
|
/**
|
9870
9580
|
* @ignore
|
9871
9581
|
*/
|
9872
|
-
function enableStreamingMode(config
|
9582
|
+
function enableStreamingMode(config) {
|
9873
9583
|
const currentLoader = config.loader;
|
9874
9584
|
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
|
9875
9585
|
// If a developer has configured their own loader, respect that choice
|
@@ -9886,9 +9596,10 @@ function enableStreamingMode(config, logger) {
|
|
9886
9596
|
}
|
9887
9597
|
}
|
9888
9598
|
|
9599
|
+
let chromeOrFirefox;
|
9889
9600
|
class LevelController extends BasePlaylistController {
|
9890
9601
|
constructor(hls, contentSteeringController) {
|
9891
|
-
super(hls, 'level-controller');
|
9602
|
+
super(hls, '[level-controller]');
|
9892
9603
|
this._levels = [];
|
9893
9604
|
this._firstLevel = -1;
|
9894
9605
|
this._maxAutoLevel = -1;
|
@@ -9959,15 +9670,23 @@ class LevelController extends BasePlaylistController {
|
|
9959
9670
|
let videoCodecFound = false;
|
9960
9671
|
let audioCodecFound = false;
|
9961
9672
|
data.levels.forEach(levelParsed => {
|
9962
|
-
var _videoCodec;
|
9673
|
+
var _audioCodec, _videoCodec;
|
9963
9674
|
const attributes = levelParsed.attrs;
|
9675
|
+
|
9676
|
+
// erase audio codec info if browser does not support mp4a.40.34.
|
9677
|
+
// demuxer will autodetect codec and fallback to mpeg/audio
|
9964
9678
|
let {
|
9965
9679
|
audioCodec,
|
9966
9680
|
videoCodec
|
9967
9681
|
} = levelParsed;
|
9682
|
+
if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
|
9683
|
+
chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
|
9684
|
+
if (chromeOrFirefox) {
|
9685
|
+
levelParsed.audioCodec = audioCodec = undefined;
|
9686
|
+
}
|
9687
|
+
}
|
9968
9688
|
if (audioCodec) {
|
9969
|
-
|
9970
|
-
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
|
9689
|
+
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource);
|
9971
9690
|
}
|
9972
9691
|
if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
|
9973
9692
|
videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
|
@@ -10309,12 +10028,7 @@ class LevelController extends BasePlaylistController {
|
|
10309
10028
|
if (curLevel.fragmentError === 0) {
|
10310
10029
|
curLevel.loadError = 0;
|
10311
10030
|
}
|
10312
|
-
|
10313
|
-
let previousDetails = curLevel.details;
|
10314
|
-
if (previousDetails === data.details && previousDetails.advanced) {
|
10315
|
-
previousDetails = undefined;
|
10316
|
-
}
|
10317
|
-
this.playlistLoaded(level, data, previousDetails);
|
10031
|
+
this.playlistLoaded(level, data, curLevel.details);
|
10318
10032
|
} else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
|
10319
10033
|
// received a delta playlist update that cannot be merged
|
10320
10034
|
details.deltaUpdateFailed = true;
|
@@ -10492,16 +10206,13 @@ class FragmentTracker {
|
|
10492
10206
|
* If not found any Fragment, return null
|
10493
10207
|
*/
|
10494
10208
|
getBufferedFrag(position, levelType) {
|
10495
|
-
return this.getFragAtPos(position, levelType, true);
|
10496
|
-
}
|
10497
|
-
getFragAtPos(position, levelType, buffered) {
|
10498
10209
|
const {
|
10499
10210
|
fragments
|
10500
10211
|
} = this;
|
10501
10212
|
const keys = Object.keys(fragments);
|
10502
10213
|
for (let i = keys.length; i--;) {
|
10503
10214
|
const fragmentEntity = fragments[keys[i]];
|
10504
|
-
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType &&
|
10215
|
+
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
|
10505
10216
|
const frag = fragmentEntity.body;
|
10506
10217
|
if (frag.start <= position && position <= frag.end) {
|
10507
10218
|
return frag;
|
@@ -10756,8 +10467,7 @@ class FragmentTracker {
|
|
10756
10467
|
const {
|
10757
10468
|
frag,
|
10758
10469
|
part,
|
10759
|
-
timeRanges
|
10760
|
-
type
|
10470
|
+
timeRanges
|
10761
10471
|
} = data;
|
10762
10472
|
if (frag.sn === 'initSegment') {
|
10763
10473
|
return;
|
@@ -10772,8 +10482,10 @@ class FragmentTracker {
|
|
10772
10482
|
}
|
10773
10483
|
// Store the latest timeRanges loaded in the buffer
|
10774
10484
|
this.timeRanges = timeRanges;
|
10775
|
-
|
10776
|
-
|
10485
|
+
Object.keys(timeRanges).forEach(elementaryStream => {
|
10486
|
+
const timeRange = timeRanges[elementaryStream];
|
10487
|
+
this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
|
10488
|
+
});
|
10777
10489
|
}
|
10778
10490
|
onFragBuffered(event, data) {
|
10779
10491
|
this.detectPartialFragments(data);
|
@@ -11102,8 +10814,8 @@ function createLoaderContext(frag, part = null) {
|
|
11102
10814
|
var _frag$decryptdata;
|
11103
10815
|
let byteRangeStart = start;
|
11104
10816
|
let byteRangeEnd = end;
|
11105
|
-
if (frag.sn === 'initSegment' &&
|
11106
|
-
// MAP segment encrypted with method 'AES-128'
|
10817
|
+
if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') {
|
10818
|
+
// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
|
11107
10819
|
// has the unencrypted size specified in the range.
|
11108
10820
|
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
|
11109
10821
|
const fragmentLen = end - start;
|
@@ -11136,9 +10848,6 @@ function createGapLoadError(frag, part) {
|
|
11136
10848
|
(part ? part : frag).stats.aborted = true;
|
11137
10849
|
return new LoadError(errorData);
|
11138
10850
|
}
|
11139
|
-
function isMethodFullSegmentAesCbc(method) {
|
11140
|
-
return method === 'AES-128' || method === 'AES-256';
|
11141
|
-
}
|
11142
10851
|
class LoadError extends Error {
|
11143
10852
|
constructor(data) {
|
11144
10853
|
super(data.error.message);
|
@@ -11284,8 +10993,6 @@ class KeyLoader {
|
|
11284
10993
|
}
|
11285
10994
|
return this.loadKeyEME(keyInfo, frag);
|
11286
10995
|
case 'AES-128':
|
11287
|
-
case 'AES-256':
|
11288
|
-
case 'AES-256-CTR':
|
11289
10996
|
return this.loadKeyHTTP(keyInfo, frag);
|
11290
10997
|
default:
|
11291
10998
|
return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
|
@@ -11421,9 +11128,8 @@ class KeyLoader {
|
|
11421
11128
|
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
11422
11129
|
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
11423
11130
|
*/
|
11424
|
-
class TaskLoop
|
11425
|
-
constructor(
|
11426
|
-
super(label, logger);
|
11131
|
+
class TaskLoop {
|
11132
|
+
constructor() {
|
11427
11133
|
this._boundTick = void 0;
|
11428
11134
|
this._tickTimer = null;
|
11429
11135
|
this._tickInterval = null;
|
@@ -11691,61 +11397,33 @@ function alignMediaPlaylistByPDT(details, refDetails) {
|
|
11691
11397
|
}
|
11692
11398
|
|
11693
11399
|
class AESCrypto {
|
11694
|
-
constructor(subtle, iv
|
11400
|
+
constructor(subtle, iv) {
|
11695
11401
|
this.subtle = void 0;
|
11696
11402
|
this.aesIV = void 0;
|
11697
|
-
this.aesMode = void 0;
|
11698
11403
|
this.subtle = subtle;
|
11699
11404
|
this.aesIV = iv;
|
11700
|
-
this.aesMode = aesMode;
|
11701
11405
|
}
|
11702
11406
|
decrypt(data, key) {
|
11703
|
-
|
11704
|
-
|
11705
|
-
|
11706
|
-
|
11707
|
-
iv: this.aesIV
|
11708
|
-
}, key, data);
|
11709
|
-
case DecrypterAesMode.ctr:
|
11710
|
-
return this.subtle.decrypt({
|
11711
|
-
name: 'AES-CTR',
|
11712
|
-
counter: this.aesIV,
|
11713
|
-
length: 64
|
11714
|
-
},
|
11715
|
-
//64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
|
11716
|
-
key, data);
|
11717
|
-
default:
|
11718
|
-
throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
|
11719
|
-
}
|
11407
|
+
return this.subtle.decrypt({
|
11408
|
+
name: 'AES-CBC',
|
11409
|
+
iv: this.aesIV
|
11410
|
+
}, key, data);
|
11720
11411
|
}
|
11721
11412
|
}
|
11722
11413
|
|
11723
11414
|
class FastAESKey {
|
11724
|
-
constructor(subtle, key
|
11415
|
+
constructor(subtle, key) {
|
11725
11416
|
this.subtle = void 0;
|
11726
11417
|
this.key = void 0;
|
11727
|
-
this.aesMode = void 0;
|
11728
11418
|
this.subtle = subtle;
|
11729
11419
|
this.key = key;
|
11730
|
-
this.aesMode = aesMode;
|
11731
11420
|
}
|
11732
11421
|
expandKey() {
|
11733
|
-
const subtleAlgoName = getSubtleAlgoName(this.aesMode);
|
11734
11422
|
return this.subtle.importKey('raw', this.key, {
|
11735
|
-
name:
|
11423
|
+
name: 'AES-CBC'
|
11736
11424
|
}, false, ['encrypt', 'decrypt']);
|
11737
11425
|
}
|
11738
11426
|
}
|
11739
|
-
function getSubtleAlgoName(aesMode) {
|
11740
|
-
switch (aesMode) {
|
11741
|
-
case DecrypterAesMode.cbc:
|
11742
|
-
return 'AES-CBC';
|
11743
|
-
case DecrypterAesMode.ctr:
|
11744
|
-
return 'AES-CTR';
|
11745
|
-
default:
|
11746
|
-
throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
|
11747
|
-
}
|
11748
|
-
}
|
11749
11427
|
|
11750
11428
|
// PKCS7
|
11751
11429
|
function removePadding(array) {
|
@@ -11995,8 +11673,7 @@ class Decrypter {
|
|
11995
11673
|
this.currentIV = null;
|
11996
11674
|
this.currentResult = null;
|
11997
11675
|
this.useSoftware = void 0;
|
11998
|
-
this.
|
11999
|
-
this.enableSoftwareAES = config.enableSoftwareAES;
|
11676
|
+
this.useSoftware = config.enableSoftwareAES;
|
12000
11677
|
this.removePKCS7Padding = removePKCS7Padding;
|
12001
11678
|
// built in decryptor expects PKCS7 padding
|
12002
11679
|
if (removePKCS7Padding) {
|
@@ -12009,7 +11686,9 @@ class Decrypter {
|
|
12009
11686
|
/* no-op */
|
12010
11687
|
}
|
12011
11688
|
}
|
12012
|
-
|
11689
|
+
if (this.subtle === null) {
|
11690
|
+
this.useSoftware = true;
|
11691
|
+
}
|
12013
11692
|
}
|
12014
11693
|
destroy() {
|
12015
11694
|
this.subtle = null;
|
@@ -12047,10 +11726,10 @@ class Decrypter {
|
|
12047
11726
|
this.softwareDecrypter = null;
|
12048
11727
|
}
|
12049
11728
|
}
|
12050
|
-
decrypt(data, key, iv
|
11729
|
+
decrypt(data, key, iv) {
|
12051
11730
|
if (this.useSoftware) {
|
12052
11731
|
return new Promise((resolve, reject) => {
|
12053
|
-
this.softwareDecrypt(new Uint8Array(data), key, iv
|
11732
|
+
this.softwareDecrypt(new Uint8Array(data), key, iv);
|
12054
11733
|
const decryptResult = this.flush();
|
12055
11734
|
if (decryptResult) {
|
12056
11735
|
resolve(decryptResult.buffer);
|
@@ -12059,21 +11738,17 @@ class Decrypter {
|
|
12059
11738
|
}
|
12060
11739
|
});
|
12061
11740
|
}
|
12062
|
-
return this.webCryptoDecrypt(new Uint8Array(data), key, iv
|
11741
|
+
return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
|
12063
11742
|
}
|
12064
11743
|
|
12065
11744
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
12066
11745
|
// data is handled in the flush() call
|
12067
|
-
softwareDecrypt(data, key, iv
|
11746
|
+
softwareDecrypt(data, key, iv) {
|
12068
11747
|
const {
|
12069
11748
|
currentIV,
|
12070
11749
|
currentResult,
|
12071
11750
|
remainderData
|
12072
11751
|
} = this;
|
12073
|
-
if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
|
12074
|
-
logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
|
12075
|
-
return null;
|
12076
|
-
}
|
12077
11752
|
this.logOnce('JS AES decrypt');
|
12078
11753
|
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
12079
11754
|
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
@@ -12106,11 +11781,11 @@ class Decrypter {
|
|
12106
11781
|
}
|
12107
11782
|
return result;
|
12108
11783
|
}
|
12109
|
-
webCryptoDecrypt(data, key, iv
|
11784
|
+
webCryptoDecrypt(data, key, iv) {
|
12110
11785
|
const subtle = this.subtle;
|
12111
11786
|
if (this.key !== key || !this.fastAesKey) {
|
12112
11787
|
this.key = key;
|
12113
|
-
this.fastAesKey = new FastAESKey(subtle, key
|
11788
|
+
this.fastAesKey = new FastAESKey(subtle, key);
|
12114
11789
|
}
|
12115
11790
|
return this.fastAesKey.expandKey().then(aesKey => {
|
12116
11791
|
// decrypt using web crypto
|
@@ -12118,25 +11793,22 @@ class Decrypter {
|
|
12118
11793
|
return Promise.reject(new Error('web crypto not initialized'));
|
12119
11794
|
}
|
12120
11795
|
this.logOnce('WebCrypto AES decrypt');
|
12121
|
-
const crypto = new AESCrypto(subtle, new Uint8Array(iv)
|
11796
|
+
const crypto = new AESCrypto(subtle, new Uint8Array(iv));
|
12122
11797
|
return crypto.decrypt(data.buffer, aesKey);
|
12123
11798
|
}).catch(err => {
|
12124
11799
|
logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
|
12125
|
-
return this.onWebCryptoError(data, key, iv
|
11800
|
+
return this.onWebCryptoError(data, key, iv);
|
12126
11801
|
});
|
12127
11802
|
}
|
12128
|
-
onWebCryptoError(data, key, iv
|
12129
|
-
|
12130
|
-
|
12131
|
-
|
12132
|
-
|
12133
|
-
|
12134
|
-
|
12135
|
-
if (decryptResult) {
|
12136
|
-
return decryptResult.buffer;
|
12137
|
-
}
|
11803
|
+
onWebCryptoError(data, key, iv) {
|
11804
|
+
this.useSoftware = true;
|
11805
|
+
this.logEnabled = true;
|
11806
|
+
this.softwareDecrypt(data, key, iv);
|
11807
|
+
const decryptResult = this.flush();
|
11808
|
+
if (decryptResult) {
|
11809
|
+
return decryptResult.buffer;
|
12138
11810
|
}
|
12139
|
-
throw new Error('WebCrypto
|
11811
|
+
throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
|
12140
11812
|
}
|
12141
11813
|
getValidChunk(data) {
|
12142
11814
|
let currentChunk = data;
|
@@ -12187,7 +11859,7 @@ const State = {
|
|
12187
11859
|
};
|
12188
11860
|
class BaseStreamController extends TaskLoop {
|
12189
11861
|
constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
|
12190
|
-
super(
|
11862
|
+
super();
|
12191
11863
|
this.hls = void 0;
|
12192
11864
|
this.fragPrevious = null;
|
12193
11865
|
this.fragCurrent = null;
|
@@ -12212,98 +11884,22 @@ class BaseStreamController extends TaskLoop {
|
|
12212
11884
|
this.startFragRequested = false;
|
12213
11885
|
this.decrypter = void 0;
|
12214
11886
|
this.initPTS = [];
|
12215
|
-
this.
|
12216
|
-
this.
|
12217
|
-
this.
|
12218
|
-
|
12219
|
-
|
12220
|
-
fragCurrent,
|
12221
|
-
media,
|
12222
|
-
mediaBuffer,
|
12223
|
-
state
|
12224
|
-
} = this;
|
12225
|
-
const currentTime = media ? media.currentTime : 0;
|
12226
|
-
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
12227
|
-
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
12228
|
-
if (this.state === State.ENDED) {
|
12229
|
-
this.resetLoadingState();
|
12230
|
-
} else if (fragCurrent) {
|
12231
|
-
// Seeking while frag load is in progress
|
12232
|
-
const tolerance = config.maxFragLookUpTolerance;
|
12233
|
-
const fragStartOffset = fragCurrent.start - tolerance;
|
12234
|
-
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12235
|
-
// if seeking out of buffered range or into new one
|
12236
|
-
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12237
|
-
const pastFragment = currentTime > fragEndOffset;
|
12238
|
-
// if the seek position is outside the current fragment range
|
12239
|
-
if (currentTime < fragStartOffset || pastFragment) {
|
12240
|
-
if (pastFragment && fragCurrent.loader) {
|
12241
|
-
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12242
|
-
fragCurrent.abortRequests();
|
12243
|
-
this.resetLoadingState();
|
12244
|
-
}
|
12245
|
-
this.fragPrevious = null;
|
12246
|
-
}
|
12247
|
-
}
|
12248
|
-
}
|
12249
|
-
if (media) {
|
12250
|
-
// Remove gap fragments
|
12251
|
-
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12252
|
-
this.lastCurrentTime = currentTime;
|
12253
|
-
if (!this.loadingParts) {
|
12254
|
-
const bufferEnd = Math.max(bufferInfo.end, currentTime);
|
12255
|
-
const shouldLoadParts = this.shouldLoadParts(this.getLevelDetails(), bufferEnd);
|
12256
|
-
if (shouldLoadParts) {
|
12257
|
-
this.log(`LL-Part loading ON after seeking to ${currentTime.toFixed(2)} with buffer @${bufferEnd.toFixed(2)}`);
|
12258
|
-
this.loadingParts = shouldLoadParts;
|
12259
|
-
}
|
12260
|
-
}
|
12261
|
-
}
|
12262
|
-
|
12263
|
-
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12264
|
-
if (!this.loadedmetadata && !bufferInfo.len) {
|
12265
|
-
this.nextLoadPosition = this.startPosition = currentTime;
|
12266
|
-
}
|
12267
|
-
|
12268
|
-
// Async tick to speed up processing
|
12269
|
-
this.tickImmediate();
|
12270
|
-
};
|
12271
|
-
this.onMediaEnded = () => {
|
12272
|
-
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12273
|
-
this.startPosition = this.lastCurrentTime = 0;
|
12274
|
-
if (this.playlistType === PlaylistLevelType.MAIN) {
|
12275
|
-
this.hls.trigger(Events.MEDIA_ENDED, {
|
12276
|
-
stalled: false
|
12277
|
-
});
|
12278
|
-
}
|
12279
|
-
};
|
11887
|
+
this.onvseeking = null;
|
11888
|
+
this.onvended = null;
|
11889
|
+
this.logPrefix = '';
|
11890
|
+
this.log = void 0;
|
11891
|
+
this.warn = void 0;
|
12280
11892
|
this.playlistType = playlistType;
|
11893
|
+
this.logPrefix = logPrefix;
|
11894
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
11895
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
12281
11896
|
this.hls = hls;
|
12282
11897
|
this.fragmentLoader = new FragmentLoader(hls.config);
|
12283
11898
|
this.keyLoader = keyLoader;
|
12284
11899
|
this.fragmentTracker = fragmentTracker;
|
12285
11900
|
this.config = hls.config;
|
12286
11901
|
this.decrypter = new Decrypter(hls.config);
|
12287
|
-
}
|
12288
|
-
registerListeners() {
|
12289
|
-
const {
|
12290
|
-
hls
|
12291
|
-
} = this;
|
12292
|
-
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12293
|
-
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12294
|
-
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12295
11902
|
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12296
|
-
hls.on(Events.ERROR, this.onError, this);
|
12297
|
-
}
|
12298
|
-
unregisterListeners() {
|
12299
|
-
const {
|
12300
|
-
hls
|
12301
|
-
} = this;
|
12302
|
-
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12303
|
-
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12304
|
-
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12305
|
-
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12306
|
-
hls.off(Events.ERROR, this.onError, this);
|
12307
11903
|
}
|
12308
11904
|
doTick() {
|
12309
11905
|
this.onTickEnd();
|
@@ -12327,12 +11923,6 @@ class BaseStreamController extends TaskLoop {
|
|
12327
11923
|
this.clearNextTick();
|
12328
11924
|
this.state = State.STOPPED;
|
12329
11925
|
}
|
12330
|
-
pauseBuffering() {
|
12331
|
-
this.buffering = false;
|
12332
|
-
}
|
12333
|
-
resumeBuffering() {
|
12334
|
-
this.buffering = true;
|
12335
|
-
}
|
12336
11926
|
_streamEnded(bufferInfo, levelDetails) {
|
12337
11927
|
// If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
|
12338
11928
|
// of nothing loading/loaded return false
|
@@ -12363,8 +11953,10 @@ class BaseStreamController extends TaskLoop {
|
|
12363
11953
|
}
|
12364
11954
|
onMediaAttached(event, data) {
|
12365
11955
|
const media = this.media = this.mediaBuffer = data.media;
|
12366
|
-
|
12367
|
-
|
11956
|
+
this.onvseeking = this.onMediaSeeking.bind(this);
|
11957
|
+
this.onvended = this.onMediaEnded.bind(this);
|
11958
|
+
media.addEventListener('seeking', this.onvseeking);
|
11959
|
+
media.addEventListener('ended', this.onvended);
|
12368
11960
|
const config = this.config;
|
12369
11961
|
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
12370
11962
|
this.startLoad(config.startPosition);
|
@@ -12378,9 +11970,10 @@ class BaseStreamController extends TaskLoop {
|
|
12378
11970
|
}
|
12379
11971
|
|
12380
11972
|
// remove video listeners
|
12381
|
-
if (media) {
|
12382
|
-
media.removeEventListener('seeking', this.
|
12383
|
-
media.removeEventListener('ended', this.
|
11973
|
+
if (media && this.onvseeking && this.onvended) {
|
11974
|
+
media.removeEventListener('seeking', this.onvseeking);
|
11975
|
+
media.removeEventListener('ended', this.onvended);
|
11976
|
+
this.onvseeking = this.onvended = null;
|
12384
11977
|
}
|
12385
11978
|
if (this.keyLoader) {
|
12386
11979
|
this.keyLoader.detach();
|
@@ -12390,17 +11983,66 @@ class BaseStreamController extends TaskLoop {
|
|
12390
11983
|
this.fragmentTracker.removeAllFragments();
|
12391
11984
|
this.stopLoad();
|
12392
11985
|
}
|
12393
|
-
|
12394
|
-
|
11986
|
+
onMediaSeeking() {
|
11987
|
+
const {
|
11988
|
+
config,
|
11989
|
+
fragCurrent,
|
11990
|
+
media,
|
11991
|
+
mediaBuffer,
|
11992
|
+
state
|
11993
|
+
} = this;
|
11994
|
+
const currentTime = media ? media.currentTime : 0;
|
11995
|
+
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
11996
|
+
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
11997
|
+
if (this.state === State.ENDED) {
|
11998
|
+
this.resetLoadingState();
|
11999
|
+
} else if (fragCurrent) {
|
12000
|
+
// Seeking while frag load is in progress
|
12001
|
+
const tolerance = config.maxFragLookUpTolerance;
|
12002
|
+
const fragStartOffset = fragCurrent.start - tolerance;
|
12003
|
+
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12004
|
+
// if seeking out of buffered range or into new one
|
12005
|
+
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12006
|
+
const pastFragment = currentTime > fragEndOffset;
|
12007
|
+
// if the seek position is outside the current fragment range
|
12008
|
+
if (currentTime < fragStartOffset || pastFragment) {
|
12009
|
+
if (pastFragment && fragCurrent.loader) {
|
12010
|
+
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12011
|
+
fragCurrent.abortRequests();
|
12012
|
+
this.resetLoadingState();
|
12013
|
+
}
|
12014
|
+
this.fragPrevious = null;
|
12015
|
+
}
|
12016
|
+
}
|
12017
|
+
}
|
12018
|
+
if (media) {
|
12019
|
+
// Remove gap fragments
|
12020
|
+
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12021
|
+
this.lastCurrentTime = currentTime;
|
12022
|
+
}
|
12023
|
+
|
12024
|
+
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12025
|
+
if (!this.loadedmetadata && !bufferInfo.len) {
|
12026
|
+
this.nextLoadPosition = this.startPosition = currentTime;
|
12027
|
+
}
|
12028
|
+
|
12029
|
+
// Async tick to speed up processing
|
12030
|
+
this.tickImmediate();
|
12031
|
+
}
|
12032
|
+
onMediaEnded() {
|
12033
|
+
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12034
|
+
this.startPosition = this.lastCurrentTime = 0;
|
12035
|
+
}
|
12395
12036
|
onManifestLoaded(event, data) {
|
12396
12037
|
this.startTimeOffset = data.startTimeOffset;
|
12397
12038
|
this.initPTS = [];
|
12398
12039
|
}
|
12399
12040
|
onHandlerDestroying() {
|
12041
|
+
this.hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12400
12042
|
this.stopLoad();
|
12401
12043
|
super.onHandlerDestroying();
|
12402
12044
|
// @ts-ignore
|
12403
|
-
this.hls =
|
12045
|
+
this.hls = null;
|
12404
12046
|
}
|
12405
12047
|
onHandlerDestroyed() {
|
12406
12048
|
this.state = State.STOPPED;
|
@@ -12531,10 +12173,10 @@ class BaseStreamController extends TaskLoop {
|
|
12531
12173
|
const decryptData = frag.decryptdata;
|
12532
12174
|
|
12533
12175
|
// check to see if the payload needs to be decrypted
|
12534
|
-
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv &&
|
12176
|
+
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
|
12535
12177
|
const startTime = self.performance.now();
|
12536
12178
|
// decrypt init segment data
|
12537
|
-
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer
|
12179
|
+
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
|
12538
12180
|
hls.trigger(Events.ERROR, {
|
12539
12181
|
type: ErrorTypes.MEDIA_ERROR,
|
12540
12182
|
details: ErrorDetails.FRAG_DECRYPT_ERROR,
|
@@ -12575,9 +12217,7 @@ class BaseStreamController extends TaskLoop {
|
|
12575
12217
|
throw new Error('init load aborted, missing levels');
|
12576
12218
|
}
|
12577
12219
|
const stats = data.frag.stats;
|
12578
|
-
|
12579
|
-
this.state = State.IDLE;
|
12580
|
-
}
|
12220
|
+
this.state = State.IDLE;
|
12581
12221
|
data.frag.data = new Uint8Array(data.payload);
|
12582
12222
|
stats.parsing.start = stats.buffering.start = self.performance.now();
|
12583
12223
|
stats.parsing.end = stats.buffering.end = self.performance.now();
|
@@ -12648,7 +12288,7 @@ class BaseStreamController extends TaskLoop {
|
|
12648
12288
|
}
|
12649
12289
|
let keyLoadingPromise = null;
|
12650
12290
|
if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
|
12651
|
-
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.
|
12291
|
+
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`);
|
12652
12292
|
this.state = State.KEY_LOADING;
|
12653
12293
|
this.fragCurrent = frag;
|
12654
12294
|
keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
|
@@ -12669,16 +12309,8 @@ class BaseStreamController extends TaskLoop {
|
|
12669
12309
|
} else if (!frag.encrypted && details.encryptedFragments.length) {
|
12670
12310
|
this.keyLoader.loadClear(frag, details.encryptedFragments);
|
12671
12311
|
}
|
12672
|
-
const fragPrevious = this.fragPrevious;
|
12673
|
-
if (frag.sn !== 'initSegment' && (!fragPrevious || frag.sn !== fragPrevious.sn)) {
|
12674
|
-
const shouldLoadParts = this.shouldLoadParts(level.details, frag.end);
|
12675
|
-
if (shouldLoadParts !== this.loadingParts) {
|
12676
|
-
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} loading sn ${fragPrevious == null ? void 0 : fragPrevious.sn}->${frag.sn}`);
|
12677
|
-
this.loadingParts = shouldLoadParts;
|
12678
|
-
}
|
12679
|
-
}
|
12680
12312
|
targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
|
12681
|
-
if (this.
|
12313
|
+
if (this.config.lowLatencyMode && frag.sn !== 'initSegment') {
|
12682
12314
|
const partList = details.partList;
|
12683
12315
|
if (partList && progressCallback) {
|
12684
12316
|
if (targetBufferTime > frag.end && details.fragmentHint) {
|
@@ -12687,7 +12319,7 @@ class BaseStreamController extends TaskLoop {
|
|
12687
12319
|
const partIndex = this.getNextPart(partList, frag, targetBufferTime);
|
12688
12320
|
if (partIndex > -1) {
|
12689
12321
|
const part = partList[partIndex];
|
12690
|
-
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.
|
12322
|
+
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12691
12323
|
this.nextLoadPosition = part.start + part.duration;
|
12692
12324
|
this.state = State.FRAG_LOADING;
|
12693
12325
|
let _result;
|
@@ -12716,14 +12348,7 @@ class BaseStreamController extends TaskLoop {
|
|
12716
12348
|
}
|
12717
12349
|
}
|
12718
12350
|
}
|
12719
|
-
|
12720
|
-
this.log(`LL-Part loading OFF after next part miss @${targetBufferTime.toFixed(2)}`);
|
12721
|
-
this.loadingParts = false;
|
12722
|
-
} else if (!frag.url) {
|
12723
|
-
// Selected fragment hint for part but not loading parts
|
12724
|
-
return Promise.resolve(null);
|
12725
|
-
}
|
12726
|
-
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12351
|
+
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12727
12352
|
// Don't update nextLoadPosition for fragments which are not buffered
|
12728
12353
|
if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
|
12729
12354
|
this.nextLoadPosition = frag.start + frag.duration;
|
@@ -12821,36 +12446,8 @@ class BaseStreamController extends TaskLoop {
|
|
12821
12446
|
if (part) {
|
12822
12447
|
part.stats.parsing.end = now;
|
12823
12448
|
}
|
12824
|
-
// See if part loading should be disabled/enabled based on buffer and playback position.
|
12825
|
-
if (frag.sn !== 'initSegment') {
|
12826
|
-
const levelDetails = this.getLevelDetails();
|
12827
|
-
const loadingPartsAtEdge = levelDetails && frag.sn > levelDetails.endSN;
|
12828
|
-
const shouldLoadParts = loadingPartsAtEdge || this.shouldLoadParts(levelDetails, frag.end);
|
12829
|
-
if (shouldLoadParts !== this.loadingParts) {
|
12830
|
-
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} after parsing segment ending @${frag.end.toFixed(2)}`);
|
12831
|
-
this.loadingParts = shouldLoadParts;
|
12832
|
-
}
|
12833
|
-
}
|
12834
12449
|
this.updateLevelTiming(frag, part, level, chunkMeta.partial);
|
12835
12450
|
}
|
12836
|
-
shouldLoadParts(details, bufferEnd) {
|
12837
|
-
if (this.config.lowLatencyMode) {
|
12838
|
-
if (!details) {
|
12839
|
-
return this.loadingParts;
|
12840
|
-
}
|
12841
|
-
if (details != null && details.partList) {
|
12842
|
-
var _details$fragmentHint;
|
12843
|
-
// Buffer must be ahead of first part + duration of parts after last segment
|
12844
|
-
// and playback must be at or past segment adjacent to part list
|
12845
|
-
const firstPart = details.partList[0];
|
12846
|
-
const safePartStart = firstPart.end + (((_details$fragmentHint = details.fragmentHint) == null ? void 0 : _details$fragmentHint.duration) || 0);
|
12847
|
-
if (bufferEnd >= safePartStart && this.lastCurrentTime > firstPart.start - firstPart.fragment.duration) {
|
12848
|
-
return true;
|
12849
|
-
}
|
12850
|
-
}
|
12851
|
-
}
|
12852
|
-
return false;
|
12853
|
-
}
|
12854
12451
|
getCurrentContext(chunkMeta) {
|
12855
12452
|
const {
|
12856
12453
|
levels,
|
@@ -12951,7 +12548,7 @@ class BaseStreamController extends TaskLoop {
|
|
12951
12548
|
// Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
|
12952
12549
|
if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
|
12953
12550
|
const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
|
12954
|
-
if (bufferedFragAtPos &&
|
12551
|
+
if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
|
12955
12552
|
return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
|
12956
12553
|
}
|
12957
12554
|
}
|
@@ -12999,8 +12596,7 @@ class BaseStreamController extends TaskLoop {
|
|
12999
12596
|
config
|
13000
12597
|
} = this;
|
13001
12598
|
const start = fragments[0].start;
|
13002
|
-
|
13003
|
-
let frag = null;
|
12599
|
+
let frag;
|
13004
12600
|
if (levelDetails.live) {
|
13005
12601
|
const initialLiveManifestSize = config.initialLiveManifestSize;
|
13006
12602
|
if (fragLen < initialLiveManifestSize) {
|
@@ -13012,10 +12608,6 @@ class BaseStreamController extends TaskLoop {
|
|
13012
12608
|
// Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
|
13013
12609
|
// we get the fragment matching that start time
|
13014
12610
|
if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1 || pos < start) {
|
13015
|
-
if (canLoadParts && !this.loadingParts) {
|
13016
|
-
this.log(`LL-Part loading ON for initial live fragment`);
|
13017
|
-
this.loadingParts = true;
|
13018
|
-
}
|
13019
12611
|
frag = this.getInitialLiveFragment(levelDetails, fragments);
|
13020
12612
|
this.startPosition = this.nextLoadPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
|
13021
12613
|
}
|
@@ -13026,7 +12618,7 @@ class BaseStreamController extends TaskLoop {
|
|
13026
12618
|
|
13027
12619
|
// If we haven't run into any special cases already, just load the fragment most closely matching the requested position
|
13028
12620
|
if (!frag) {
|
13029
|
-
const end =
|
12621
|
+
const end = config.lowLatencyMode ? levelDetails.partEnd : levelDetails.fragmentEnd;
|
13030
12622
|
frag = this.getFragmentAtPosition(pos, end, levelDetails);
|
13031
12623
|
}
|
13032
12624
|
return this.mapToInitFragWhenRequired(frag);
|
@@ -13148,7 +12740,7 @@ class BaseStreamController extends TaskLoop {
|
|
13148
12740
|
} = levelDetails;
|
13149
12741
|
const tolerance = config.maxFragLookUpTolerance;
|
13150
12742
|
const partList = levelDetails.partList;
|
13151
|
-
const loadingParts = !!(
|
12743
|
+
const loadingParts = !!(config.lowLatencyMode && partList != null && partList.length && fragmentHint);
|
13152
12744
|
if (loadingParts && fragmentHint && !this.bitrateTest) {
|
13153
12745
|
// Include incomplete fragment with parts at end
|
13154
12746
|
fragments = fragments.concat(fragmentHint);
|
@@ -13341,7 +12933,7 @@ class BaseStreamController extends TaskLoop {
|
|
13341
12933
|
errorAction.resolved = true;
|
13342
12934
|
}
|
13343
12935
|
} else {
|
13344
|
-
|
12936
|
+
logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
|
13345
12937
|
return;
|
13346
12938
|
}
|
13347
12939
|
} else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
|
@@ -13409,9 +13001,7 @@ class BaseStreamController extends TaskLoop {
|
|
13409
13001
|
this.log('Reset loading state');
|
13410
13002
|
this.fragCurrent = null;
|
13411
13003
|
this.fragPrevious = null;
|
13412
|
-
|
13413
|
-
this.state = State.IDLE;
|
13414
|
-
}
|
13004
|
+
this.state = State.IDLE;
|
13415
13005
|
}
|
13416
13006
|
resetStartWhenNotLoaded(level) {
|
13417
13007
|
// if loadedmetadata is not set, it means that first frag request failed
|
@@ -13587,105 +13177,7 @@ function dummyTrack(type = '', inputTimeScale = 90000) {
|
|
13587
13177
|
sequenceNumber: -1,
|
13588
13178
|
samples: [],
|
13589
13179
|
dropped: 0
|
13590
|
-
};
|
13591
|
-
}
|
13592
|
-
|
13593
|
-
/**
|
13594
|
-
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
13595
|
-
*
|
13596
|
-
* @param data - The data to search in
|
13597
|
-
* @param offset - The offset at which to start searching
|
13598
|
-
*
|
13599
|
-
* @returns The block of data containing any ID3 tags found
|
13600
|
-
* or `undefined` if no header is found at the starting offset
|
13601
|
-
*
|
13602
|
-
* @internal
|
13603
|
-
*
|
13604
|
-
* @group ID3
|
13605
|
-
*/
|
13606
|
-
function getId3Data(data, offset) {
|
13607
|
-
const front = offset;
|
13608
|
-
let length = 0;
|
13609
|
-
while (isId3Header(data, offset)) {
|
13610
|
-
// ID3 header is 10 bytes
|
13611
|
-
length += 10;
|
13612
|
-
const size = readId3Size(data, offset + 6);
|
13613
|
-
length += size;
|
13614
|
-
if (isId3Footer(data, offset + 10)) {
|
13615
|
-
// ID3 footer is 10 bytes
|
13616
|
-
length += 10;
|
13617
|
-
}
|
13618
|
-
offset += length;
|
13619
|
-
}
|
13620
|
-
if (length > 0) {
|
13621
|
-
return data.subarray(front, front + length);
|
13622
|
-
}
|
13623
|
-
return undefined;
|
13624
|
-
}
|
13625
|
-
|
13626
|
-
/**
|
13627
|
-
* Read a 33 bit timestamp from an ID3 frame.
|
13628
|
-
*
|
13629
|
-
* @param timeStampFrame - the ID3 frame
|
13630
|
-
*
|
13631
|
-
* @returns The timestamp
|
13632
|
-
*
|
13633
|
-
* @internal
|
13634
|
-
*
|
13635
|
-
* @group ID3
|
13636
|
-
*/
|
13637
|
-
function readId3Timestamp(timeStampFrame) {
|
13638
|
-
if (timeStampFrame.data.byteLength === 8) {
|
13639
|
-
const data = new Uint8Array(timeStampFrame.data);
|
13640
|
-
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
13641
|
-
// with the upper 31 bits set to zero.
|
13642
|
-
const pts33Bit = data[3] & 0x1;
|
13643
|
-
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
13644
|
-
timestamp /= 45;
|
13645
|
-
if (pts33Bit) {
|
13646
|
-
timestamp += 47721858.84;
|
13647
|
-
} // 2^32 / 90
|
13648
|
-
return Math.round(timestamp);
|
13649
|
-
}
|
13650
|
-
return undefined;
|
13651
|
-
}
|
13652
|
-
|
13653
|
-
/**
|
13654
|
-
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
13655
|
-
*
|
13656
|
-
* @param data - Block of data containing one or more ID3 tags
|
13657
|
-
*
|
13658
|
-
* @returns The timestamp
|
13659
|
-
*
|
13660
|
-
* @group ID3
|
13661
|
-
*
|
13662
|
-
* @beta
|
13663
|
-
*/
|
13664
|
-
function getId3Timestamp(data) {
|
13665
|
-
const frames = getId3Frames(data);
|
13666
|
-
for (let i = 0; i < frames.length; i++) {
|
13667
|
-
const frame = frames[i];
|
13668
|
-
if (isId3TimestampFrame(frame)) {
|
13669
|
-
return readId3Timestamp(frame);
|
13670
|
-
}
|
13671
|
-
}
|
13672
|
-
return undefined;
|
13673
|
-
}
|
13674
|
-
|
13675
|
-
/**
|
13676
|
-
* Checks if the given data contains an ID3 tag.
|
13677
|
-
*
|
13678
|
-
* @param data - The data to check
|
13679
|
-
* @param offset - The offset at which to start checking
|
13680
|
-
*
|
13681
|
-
* @returns `true` if an ID3 tag is found
|
13682
|
-
*
|
13683
|
-
* @group ID3
|
13684
|
-
*
|
13685
|
-
* @beta
|
13686
|
-
*/
|
13687
|
-
function canParseId3(data, offset) {
|
13688
|
-
return isId3Header(data, offset) && readId3Size(data, offset + 6) + 10 <= data.length - offset;
|
13180
|
+
};
|
13689
13181
|
}
|
13690
13182
|
|
13691
13183
|
class BaseAudioDemuxer {
|
@@ -13729,12 +13221,12 @@ class BaseAudioDemuxer {
|
|
13729
13221
|
data = appendUint8Array(this.cachedData, data);
|
13730
13222
|
this.cachedData = null;
|
13731
13223
|
}
|
13732
|
-
let id3Data =
|
13224
|
+
let id3Data = getID3Data(data, 0);
|
13733
13225
|
let offset = id3Data ? id3Data.length : 0;
|
13734
13226
|
let lastDataIndex;
|
13735
13227
|
const track = this._audioTrack;
|
13736
13228
|
const id3Track = this._id3Track;
|
13737
|
-
const timestamp = id3Data ?
|
13229
|
+
const timestamp = id3Data ? getTimeStamp(id3Data) : undefined;
|
13738
13230
|
const length = data.length;
|
13739
13231
|
if (this.basePTS === null || this.frameIndex === 0 && isFiniteNumber(timestamp)) {
|
13740
13232
|
this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS);
|
@@ -13765,9 +13257,9 @@ class BaseAudioDemuxer {
|
|
13765
13257
|
} else {
|
13766
13258
|
offset = length;
|
13767
13259
|
}
|
13768
|
-
} else if (
|
13769
|
-
// after a canParse, a call to
|
13770
|
-
id3Data =
|
13260
|
+
} else if (canParse$2(data, offset)) {
|
13261
|
+
// after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
|
13262
|
+
id3Data = getID3Data(data, offset);
|
13771
13263
|
id3Track.samples.push({
|
13772
13264
|
pts: this.lastPTS,
|
13773
13265
|
dts: this.lastPTS,
|
@@ -13836,7 +13328,6 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
|
|
13836
13328
|
*/
|
13837
13329
|
function getAudioConfig(observer, data, offset, audioCodec) {
|
13838
13330
|
let adtsObjectType;
|
13839
|
-
let originalAdtsObjectType;
|
13840
13331
|
let adtsExtensionSamplingIndex;
|
13841
13332
|
let adtsChannelConfig;
|
13842
13333
|
let config;
|
@@ -13844,7 +13335,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13844
13335
|
const manifestCodec = audioCodec;
|
13845
13336
|
const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
|
13846
13337
|
// byte 2
|
13847
|
-
adtsObjectType =
|
13338
|
+
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13848
13339
|
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
|
13849
13340
|
if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
|
13850
13341
|
const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
|
@@ -13861,8 +13352,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13861
13352
|
// byte 3
|
13862
13353
|
adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
|
13863
13354
|
logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
|
13864
|
-
//
|
13865
|
-
if (/firefox
|
13355
|
+
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
|
13356
|
+
if (/firefox/i.test(userAgent)) {
|
13866
13357
|
if (adtsSamplingIndex >= 6) {
|
13867
13358
|
adtsObjectType = 5;
|
13868
13359
|
config = new Array(4);
|
@@ -13956,7 +13447,6 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13956
13447
|
samplerate: adtsSamplingRates[adtsSamplingIndex],
|
13957
13448
|
channelCount: adtsChannelConfig,
|
13958
13449
|
codec: 'mp4a.40.' + adtsObjectType,
|
13959
|
-
parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
|
13960
13450
|
manifestCodec
|
13961
13451
|
};
|
13962
13452
|
}
|
@@ -14011,8 +13501,7 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
|
|
14011
13501
|
track.channelCount = config.channelCount;
|
14012
13502
|
track.codec = config.codec;
|
14013
13503
|
track.manifestCodec = config.manifestCodec;
|
14014
|
-
track.
|
14015
|
-
logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
13504
|
+
logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
14016
13505
|
}
|
14017
13506
|
}
|
14018
13507
|
function getFrameDuration(samplerate) {
|
@@ -14261,7 +13750,7 @@ class AACDemuxer extends BaseAudioDemuxer {
|
|
14261
13750
|
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
14262
13751
|
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
14263
13752
|
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
14264
|
-
const id3Data =
|
13753
|
+
const id3Data = getID3Data(data, 0);
|
14265
13754
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
14266
13755
|
if (probe(data, offset)) {
|
14267
13756
|
return false;
|
@@ -14490,110 +13979,6 @@ class BaseVideoParser {
|
|
14490
13979
|
logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
|
14491
13980
|
}
|
14492
13981
|
}
|
14493
|
-
parseNALu(track, array) {
|
14494
|
-
const len = array.byteLength;
|
14495
|
-
let state = track.naluState || 0;
|
14496
|
-
const lastState = state;
|
14497
|
-
const units = [];
|
14498
|
-
let i = 0;
|
14499
|
-
let value;
|
14500
|
-
let overflow;
|
14501
|
-
let unitType;
|
14502
|
-
let lastUnitStart = -1;
|
14503
|
-
let lastUnitType = 0;
|
14504
|
-
// logger.log('PES:' + Hex.hexDump(array));
|
14505
|
-
|
14506
|
-
if (state === -1) {
|
14507
|
-
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14508
|
-
lastUnitStart = 0;
|
14509
|
-
// NALu type is value read from offset 0
|
14510
|
-
lastUnitType = this.getNALuType(array, 0);
|
14511
|
-
state = 0;
|
14512
|
-
i = 1;
|
14513
|
-
}
|
14514
|
-
while (i < len) {
|
14515
|
-
value = array[i++];
|
14516
|
-
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14517
|
-
if (!state) {
|
14518
|
-
state = value ? 0 : 1;
|
14519
|
-
continue;
|
14520
|
-
}
|
14521
|
-
if (state === 1) {
|
14522
|
-
state = value ? 0 : 2;
|
14523
|
-
continue;
|
14524
|
-
}
|
14525
|
-
// here we have state either equal to 2 or 3
|
14526
|
-
if (!value) {
|
14527
|
-
state = 3;
|
14528
|
-
} else if (value === 1) {
|
14529
|
-
overflow = i - state - 1;
|
14530
|
-
if (lastUnitStart >= 0) {
|
14531
|
-
const unit = {
|
14532
|
-
data: array.subarray(lastUnitStart, overflow),
|
14533
|
-
type: lastUnitType
|
14534
|
-
};
|
14535
|
-
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14536
|
-
units.push(unit);
|
14537
|
-
} else {
|
14538
|
-
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14539
|
-
// first check if start code delimiter is overlapping between 2 PES packets,
|
14540
|
-
// ie it started in last packet (lastState not zero)
|
14541
|
-
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14542
|
-
const lastUnit = this.getLastNalUnit(track.samples);
|
14543
|
-
if (lastUnit) {
|
14544
|
-
if (lastState && i <= 4 - lastState) {
|
14545
|
-
// start delimiter overlapping between PES packets
|
14546
|
-
// strip start delimiter bytes from the end of last NAL unit
|
14547
|
-
// check if lastUnit had a state different from zero
|
14548
|
-
if (lastUnit.state) {
|
14549
|
-
// strip last bytes
|
14550
|
-
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14551
|
-
}
|
14552
|
-
}
|
14553
|
-
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14554
|
-
|
14555
|
-
if (overflow > 0) {
|
14556
|
-
// logger.log('first NALU found with overflow:' + overflow);
|
14557
|
-
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14558
|
-
lastUnit.state = 0;
|
14559
|
-
}
|
14560
|
-
}
|
14561
|
-
}
|
14562
|
-
// check if we can read unit type
|
14563
|
-
if (i < len) {
|
14564
|
-
unitType = this.getNALuType(array, i);
|
14565
|
-
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14566
|
-
lastUnitStart = i;
|
14567
|
-
lastUnitType = unitType;
|
14568
|
-
state = 0;
|
14569
|
-
} else {
|
14570
|
-
// not enough byte to read unit type. let's read it on next PES parsing
|
14571
|
-
state = -1;
|
14572
|
-
}
|
14573
|
-
} else {
|
14574
|
-
state = 0;
|
14575
|
-
}
|
14576
|
-
}
|
14577
|
-
if (lastUnitStart >= 0 && state >= 0) {
|
14578
|
-
const unit = {
|
14579
|
-
data: array.subarray(lastUnitStart, len),
|
14580
|
-
type: lastUnitType,
|
14581
|
-
state: state
|
14582
|
-
};
|
14583
|
-
units.push(unit);
|
14584
|
-
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
14585
|
-
}
|
14586
|
-
// no NALu found
|
14587
|
-
if (units.length === 0) {
|
14588
|
-
// append pes.data to previous NAL unit
|
14589
|
-
const lastUnit = this.getLastNalUnit(track.samples);
|
14590
|
-
if (lastUnit) {
|
14591
|
-
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
14592
|
-
}
|
14593
|
-
}
|
14594
|
-
track.naluState = state;
|
14595
|
-
return units;
|
14596
|
-
}
|
14597
13982
|
}
|
14598
13983
|
|
14599
13984
|
/**
|
@@ -14671,76 +14056,259 @@ class ExpGolomb {
|
|
14671
14056
|
} else {
|
14672
14057
|
return valu;
|
14673
14058
|
}
|
14674
|
-
}
|
14675
|
-
|
14676
|
-
// ():uint
|
14677
|
-
skipLZ() {
|
14678
|
-
let leadingZeroCount; // :uint
|
14679
|
-
for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
|
14680
|
-
if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) {
|
14681
|
-
// the first bit of working word is 1
|
14682
|
-
this.word <<= leadingZeroCount;
|
14683
|
-
this.bitsAvailable -= leadingZeroCount;
|
14684
|
-
return leadingZeroCount;
|
14059
|
+
}
|
14060
|
+
|
14061
|
+
// ():uint
|
14062
|
+
skipLZ() {
|
14063
|
+
let leadingZeroCount; // :uint
|
14064
|
+
for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
|
14065
|
+
if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) {
|
14066
|
+
// the first bit of working word is 1
|
14067
|
+
this.word <<= leadingZeroCount;
|
14068
|
+
this.bitsAvailable -= leadingZeroCount;
|
14069
|
+
return leadingZeroCount;
|
14070
|
+
}
|
14071
|
+
}
|
14072
|
+
// we exhausted word and still have not found a 1
|
14073
|
+
this.loadWord();
|
14074
|
+
return leadingZeroCount + this.skipLZ();
|
14075
|
+
}
|
14076
|
+
|
14077
|
+
// ():void
|
14078
|
+
skipUEG() {
|
14079
|
+
this.skipBits(1 + this.skipLZ());
|
14080
|
+
}
|
14081
|
+
|
14082
|
+
// ():void
|
14083
|
+
skipEG() {
|
14084
|
+
this.skipBits(1 + this.skipLZ());
|
14085
|
+
}
|
14086
|
+
|
14087
|
+
// ():uint
|
14088
|
+
readUEG() {
|
14089
|
+
const clz = this.skipLZ(); // :uint
|
14090
|
+
return this.readBits(clz + 1) - 1;
|
14091
|
+
}
|
14092
|
+
|
14093
|
+
// ():int
|
14094
|
+
readEG() {
|
14095
|
+
const valu = this.readUEG(); // :int
|
14096
|
+
if (0x01 & valu) {
|
14097
|
+
// the number is odd if the low order bit is set
|
14098
|
+
return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
|
14099
|
+
} else {
|
14100
|
+
return -1 * (valu >>> 1); // divide by two then make it negative
|
14101
|
+
}
|
14102
|
+
}
|
14103
|
+
|
14104
|
+
// Some convenience functions
|
14105
|
+
// :Boolean
|
14106
|
+
readBoolean() {
|
14107
|
+
return this.readBits(1) === 1;
|
14108
|
+
}
|
14109
|
+
|
14110
|
+
// ():int
|
14111
|
+
readUByte() {
|
14112
|
+
return this.readBits(8);
|
14113
|
+
}
|
14114
|
+
|
14115
|
+
// ():int
|
14116
|
+
readUShort() {
|
14117
|
+
return this.readBits(16);
|
14118
|
+
}
|
14119
|
+
|
14120
|
+
// ():int
|
14121
|
+
readUInt() {
|
14122
|
+
return this.readBits(32);
|
14123
|
+
}
|
14124
|
+
|
14125
|
+
/**
|
14126
|
+
* Advance the ExpGolomb decoder past a scaling list. The scaling
|
14127
|
+
* list is optionally transmitted as part of a sequence parameter
|
14128
|
+
* set and is not relevant to transmuxing.
|
14129
|
+
* @param count the number of entries in this scaling list
|
14130
|
+
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
14131
|
+
*/
|
14132
|
+
skipScalingList(count) {
|
14133
|
+
let lastScale = 8;
|
14134
|
+
let nextScale = 8;
|
14135
|
+
let deltaScale;
|
14136
|
+
for (let j = 0; j < count; j++) {
|
14137
|
+
if (nextScale !== 0) {
|
14138
|
+
deltaScale = this.readEG();
|
14139
|
+
nextScale = (lastScale + deltaScale + 256) % 256;
|
14140
|
+
}
|
14141
|
+
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14142
|
+
}
|
14143
|
+
}
|
14144
|
+
|
14145
|
+
/**
|
14146
|
+
* Read a sequence parameter set and return some interesting video
|
14147
|
+
* properties. A sequence parameter set is the H264 metadata that
|
14148
|
+
* describes the properties of upcoming video frames.
|
14149
|
+
* @returns an object with configuration parsed from the
|
14150
|
+
* sequence parameter set, including the dimensions of the
|
14151
|
+
* associated video frames.
|
14152
|
+
*/
|
14153
|
+
readSPS() {
|
14154
|
+
let frameCropLeftOffset = 0;
|
14155
|
+
let frameCropRightOffset = 0;
|
14156
|
+
let frameCropTopOffset = 0;
|
14157
|
+
let frameCropBottomOffset = 0;
|
14158
|
+
let numRefFramesInPicOrderCntCycle;
|
14159
|
+
let scalingListCount;
|
14160
|
+
let i;
|
14161
|
+
const readUByte = this.readUByte.bind(this);
|
14162
|
+
const readBits = this.readBits.bind(this);
|
14163
|
+
const readUEG = this.readUEG.bind(this);
|
14164
|
+
const readBoolean = this.readBoolean.bind(this);
|
14165
|
+
const skipBits = this.skipBits.bind(this);
|
14166
|
+
const skipEG = this.skipEG.bind(this);
|
14167
|
+
const skipUEG = this.skipUEG.bind(this);
|
14168
|
+
const skipScalingList = this.skipScalingList.bind(this);
|
14169
|
+
readUByte();
|
14170
|
+
const profileIdc = readUByte(); // profile_idc
|
14171
|
+
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
|
14172
|
+
skipBits(3); // reserved_zero_3bits u(3),
|
14173
|
+
readUByte(); // level_idc u(8)
|
14174
|
+
skipUEG(); // seq_parameter_set_id
|
14175
|
+
// some profiles have more optional data we don't need
|
14176
|
+
if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
|
14177
|
+
const chromaFormatIdc = readUEG();
|
14178
|
+
if (chromaFormatIdc === 3) {
|
14179
|
+
skipBits(1);
|
14180
|
+
} // separate_colour_plane_flag
|
14181
|
+
|
14182
|
+
skipUEG(); // bit_depth_luma_minus8
|
14183
|
+
skipUEG(); // bit_depth_chroma_minus8
|
14184
|
+
skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
14185
|
+
if (readBoolean()) {
|
14186
|
+
// seq_scaling_matrix_present_flag
|
14187
|
+
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14188
|
+
for (i = 0; i < scalingListCount; i++) {
|
14189
|
+
if (readBoolean()) {
|
14190
|
+
// seq_scaling_list_present_flag[ i ]
|
14191
|
+
if (i < 6) {
|
14192
|
+
skipScalingList(16);
|
14193
|
+
} else {
|
14194
|
+
skipScalingList(64);
|
14195
|
+
}
|
14196
|
+
}
|
14197
|
+
}
|
14198
|
+
}
|
14199
|
+
}
|
14200
|
+
skipUEG(); // log2_max_frame_num_minus4
|
14201
|
+
const picOrderCntType = readUEG();
|
14202
|
+
if (picOrderCntType === 0) {
|
14203
|
+
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
|
14204
|
+
} else if (picOrderCntType === 1) {
|
14205
|
+
skipBits(1); // delta_pic_order_always_zero_flag
|
14206
|
+
skipEG(); // offset_for_non_ref_pic
|
14207
|
+
skipEG(); // offset_for_top_to_bottom_field
|
14208
|
+
numRefFramesInPicOrderCntCycle = readUEG();
|
14209
|
+
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14210
|
+
skipEG();
|
14211
|
+
} // offset_for_ref_frame[ i ]
|
14212
|
+
}
|
14213
|
+
skipUEG(); // max_num_ref_frames
|
14214
|
+
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
14215
|
+
const picWidthInMbsMinus1 = readUEG();
|
14216
|
+
const picHeightInMapUnitsMinus1 = readUEG();
|
14217
|
+
const frameMbsOnlyFlag = readBits(1);
|
14218
|
+
if (frameMbsOnlyFlag === 0) {
|
14219
|
+
skipBits(1);
|
14220
|
+
} // mb_adaptive_frame_field_flag
|
14221
|
+
|
14222
|
+
skipBits(1); // direct_8x8_inference_flag
|
14223
|
+
if (readBoolean()) {
|
14224
|
+
// frame_cropping_flag
|
14225
|
+
frameCropLeftOffset = readUEG();
|
14226
|
+
frameCropRightOffset = readUEG();
|
14227
|
+
frameCropTopOffset = readUEG();
|
14228
|
+
frameCropBottomOffset = readUEG();
|
14229
|
+
}
|
14230
|
+
let pixelRatio = [1, 1];
|
14231
|
+
if (readBoolean()) {
|
14232
|
+
// vui_parameters_present_flag
|
14233
|
+
if (readBoolean()) {
|
14234
|
+
// aspect_ratio_info_present_flag
|
14235
|
+
const aspectRatioIdc = readUByte();
|
14236
|
+
switch (aspectRatioIdc) {
|
14237
|
+
case 1:
|
14238
|
+
pixelRatio = [1, 1];
|
14239
|
+
break;
|
14240
|
+
case 2:
|
14241
|
+
pixelRatio = [12, 11];
|
14242
|
+
break;
|
14243
|
+
case 3:
|
14244
|
+
pixelRatio = [10, 11];
|
14245
|
+
break;
|
14246
|
+
case 4:
|
14247
|
+
pixelRatio = [16, 11];
|
14248
|
+
break;
|
14249
|
+
case 5:
|
14250
|
+
pixelRatio = [40, 33];
|
14251
|
+
break;
|
14252
|
+
case 6:
|
14253
|
+
pixelRatio = [24, 11];
|
14254
|
+
break;
|
14255
|
+
case 7:
|
14256
|
+
pixelRatio = [20, 11];
|
14257
|
+
break;
|
14258
|
+
case 8:
|
14259
|
+
pixelRatio = [32, 11];
|
14260
|
+
break;
|
14261
|
+
case 9:
|
14262
|
+
pixelRatio = [80, 33];
|
14263
|
+
break;
|
14264
|
+
case 10:
|
14265
|
+
pixelRatio = [18, 11];
|
14266
|
+
break;
|
14267
|
+
case 11:
|
14268
|
+
pixelRatio = [15, 11];
|
14269
|
+
break;
|
14270
|
+
case 12:
|
14271
|
+
pixelRatio = [64, 33];
|
14272
|
+
break;
|
14273
|
+
case 13:
|
14274
|
+
pixelRatio = [160, 99];
|
14275
|
+
break;
|
14276
|
+
case 14:
|
14277
|
+
pixelRatio = [4, 3];
|
14278
|
+
break;
|
14279
|
+
case 15:
|
14280
|
+
pixelRatio = [3, 2];
|
14281
|
+
break;
|
14282
|
+
case 16:
|
14283
|
+
pixelRatio = [2, 1];
|
14284
|
+
break;
|
14285
|
+
case 255:
|
14286
|
+
{
|
14287
|
+
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
14288
|
+
break;
|
14289
|
+
}
|
14290
|
+
}
|
14685
14291
|
}
|
14686
14292
|
}
|
14687
|
-
|
14688
|
-
|
14689
|
-
|
14690
|
-
|
14691
|
-
|
14692
|
-
// ():void
|
14693
|
-
skipUEG() {
|
14694
|
-
this.skipBits(1 + this.skipLZ());
|
14695
|
-
}
|
14696
|
-
|
14697
|
-
// ():void
|
14698
|
-
skipEG() {
|
14699
|
-
this.skipBits(1 + this.skipLZ());
|
14700
|
-
}
|
14701
|
-
|
14702
|
-
// ():uint
|
14703
|
-
readUEG() {
|
14704
|
-
const clz = this.skipLZ(); // :uint
|
14705
|
-
return this.readBits(clz + 1) - 1;
|
14706
|
-
}
|
14707
|
-
|
14708
|
-
// ():int
|
14709
|
-
readEG() {
|
14710
|
-
const valu = this.readUEG(); // :int
|
14711
|
-
if (0x01 & valu) {
|
14712
|
-
// the number is odd if the low order bit is set
|
14713
|
-
return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
|
14714
|
-
} else {
|
14715
|
-
return -1 * (valu >>> 1); // divide by two then make it negative
|
14716
|
-
}
|
14717
|
-
}
|
14718
|
-
|
14719
|
-
// Some convenience functions
|
14720
|
-
// :Boolean
|
14721
|
-
readBoolean() {
|
14722
|
-
return this.readBits(1) === 1;
|
14723
|
-
}
|
14724
|
-
|
14725
|
-
// ():int
|
14726
|
-
readUByte() {
|
14727
|
-
return this.readBits(8);
|
14728
|
-
}
|
14729
|
-
|
14730
|
-
// ():int
|
14731
|
-
readUShort() {
|
14732
|
-
return this.readBits(16);
|
14293
|
+
return {
|
14294
|
+
width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
|
14295
|
+
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
14296
|
+
pixelRatio: pixelRatio
|
14297
|
+
};
|
14733
14298
|
}
|
14734
|
-
|
14735
|
-
|
14736
|
-
|
14737
|
-
|
14299
|
+
readSliceType() {
|
14300
|
+
// skip NALu type
|
14301
|
+
this.readUByte();
|
14302
|
+
// discard first_mb_in_slice
|
14303
|
+
this.readUEG();
|
14304
|
+
// return slice_type
|
14305
|
+
return this.readUEG();
|
14738
14306
|
}
|
14739
14307
|
}
|
14740
14308
|
|
14741
14309
|
class AvcVideoParser extends BaseVideoParser {
|
14742
|
-
|
14743
|
-
const units = this.
|
14310
|
+
parseAVCPES(track, textTrack, pes, last, duration) {
|
14311
|
+
const units = this.parseAVCNALu(track, pes.data);
|
14744
14312
|
let VideoSample = this.VideoSample;
|
14745
14313
|
let push;
|
14746
14314
|
let spsfound = false;
|
@@ -14765,7 +14333,7 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14765
14333
|
// only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
|
14766
14334
|
if (spsfound && data.length > 4) {
|
14767
14335
|
// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
|
14768
|
-
const sliceType =
|
14336
|
+
const sliceType = new ExpGolomb(data).readSliceType();
|
14769
14337
|
// 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
|
14770
14338
|
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
|
14771
14339
|
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
|
@@ -14819,7 +14387,8 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14819
14387
|
push = true;
|
14820
14388
|
spsfound = true;
|
14821
14389
|
const sps = unit.data;
|
14822
|
-
const
|
14390
|
+
const expGolombDecoder = new ExpGolomb(sps);
|
14391
|
+
const config = expGolombDecoder.readSPS();
|
14823
14392
|
if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
|
14824
14393
|
track.width = config.width;
|
14825
14394
|
track.height = config.height;
|
@@ -14875,192 +14444,109 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14875
14444
|
this.VideoSample = null;
|
14876
14445
|
}
|
14877
14446
|
}
|
14878
|
-
|
14879
|
-
|
14880
|
-
|
14881
|
-
|
14882
|
-
const
|
14883
|
-
|
14884
|
-
|
14885
|
-
|
14886
|
-
|
14887
|
-
|
14888
|
-
|
14889
|
-
|
14447
|
+
parseAVCNALu(track, array) {
|
14448
|
+
const len = array.byteLength;
|
14449
|
+
let state = track.naluState || 0;
|
14450
|
+
const lastState = state;
|
14451
|
+
const units = [];
|
14452
|
+
let i = 0;
|
14453
|
+
let value;
|
14454
|
+
let overflow;
|
14455
|
+
let unitType;
|
14456
|
+
let lastUnitStart = -1;
|
14457
|
+
let lastUnitType = 0;
|
14458
|
+
// logger.log('PES:' + Hex.hexDump(array));
|
14890
14459
|
|
14891
|
-
|
14892
|
-
|
14893
|
-
|
14894
|
-
|
14895
|
-
|
14896
|
-
|
14897
|
-
|
14898
|
-
let lastScale = 8;
|
14899
|
-
let nextScale = 8;
|
14900
|
-
let deltaScale;
|
14901
|
-
for (let j = 0; j < count; j++) {
|
14902
|
-
if (nextScale !== 0) {
|
14903
|
-
deltaScale = reader.readEG();
|
14904
|
-
nextScale = (lastScale + deltaScale + 256) % 256;
|
14905
|
-
}
|
14906
|
-
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14460
|
+
if (state === -1) {
|
14461
|
+
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14462
|
+
lastUnitStart = 0;
|
14463
|
+
// NALu type is value read from offset 0
|
14464
|
+
lastUnitType = array[0] & 0x1f;
|
14465
|
+
state = 0;
|
14466
|
+
i = 1;
|
14907
14467
|
}
|
14908
|
-
|
14909
|
-
|
14910
|
-
|
14911
|
-
|
14912
|
-
|
14913
|
-
|
14914
|
-
|
14915
|
-
|
14916
|
-
|
14917
|
-
|
14918
|
-
|
14919
|
-
|
14920
|
-
|
14921
|
-
|
14922
|
-
|
14923
|
-
|
14924
|
-
|
14925
|
-
|
14926
|
-
|
14927
|
-
|
14928
|
-
|
14929
|
-
|
14930
|
-
|
14931
|
-
|
14932
|
-
|
14933
|
-
|
14934
|
-
|
14935
|
-
|
14936
|
-
|
14937
|
-
|
14938
|
-
|
14939
|
-
|
14940
|
-
|
14941
|
-
|
14942
|
-
|
14943
|
-
|
14944
|
-
|
14945
|
-
|
14946
|
-
|
14468
|
+
while (i < len) {
|
14469
|
+
value = array[i++];
|
14470
|
+
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14471
|
+
if (!state) {
|
14472
|
+
state = value ? 0 : 1;
|
14473
|
+
continue;
|
14474
|
+
}
|
14475
|
+
if (state === 1) {
|
14476
|
+
state = value ? 0 : 2;
|
14477
|
+
continue;
|
14478
|
+
}
|
14479
|
+
// here we have state either equal to 2 or 3
|
14480
|
+
if (!value) {
|
14481
|
+
state = 3;
|
14482
|
+
} else if (value === 1) {
|
14483
|
+
overflow = i - state - 1;
|
14484
|
+
if (lastUnitStart >= 0) {
|
14485
|
+
const unit = {
|
14486
|
+
data: array.subarray(lastUnitStart, overflow),
|
14487
|
+
type: lastUnitType
|
14488
|
+
};
|
14489
|
+
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14490
|
+
units.push(unit);
|
14491
|
+
} else {
|
14492
|
+
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14493
|
+
// first check if start code delimiter is overlapping between 2 PES packets,
|
14494
|
+
// ie it started in last packet (lastState not zero)
|
14495
|
+
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14496
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14497
|
+
if (lastUnit) {
|
14498
|
+
if (lastState && i <= 4 - lastState) {
|
14499
|
+
// start delimiter overlapping between PES packets
|
14500
|
+
// strip start delimiter bytes from the end of last NAL unit
|
14501
|
+
// check if lastUnit had a state different from zero
|
14502
|
+
if (lastUnit.state) {
|
14503
|
+
// strip last bytes
|
14504
|
+
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14505
|
+
}
|
14506
|
+
}
|
14507
|
+
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14947
14508
|
|
14948
|
-
|
14949
|
-
|
14950
|
-
|
14951
|
-
|
14952
|
-
// seq_scaling_matrix_present_flag
|
14953
|
-
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14954
|
-
for (i = 0; i < scalingListCount; i++) {
|
14955
|
-
if (readBoolean()) {
|
14956
|
-
// seq_scaling_list_present_flag[ i ]
|
14957
|
-
if (i < 6) {
|
14958
|
-
skipScalingList(16, eg);
|
14959
|
-
} else {
|
14960
|
-
skipScalingList(64, eg);
|
14509
|
+
if (overflow > 0) {
|
14510
|
+
// logger.log('first NALU found with overflow:' + overflow);
|
14511
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14512
|
+
lastUnit.state = 0;
|
14961
14513
|
}
|
14962
14514
|
}
|
14963
14515
|
}
|
14516
|
+
// check if we can read unit type
|
14517
|
+
if (i < len) {
|
14518
|
+
unitType = array[i] & 0x1f;
|
14519
|
+
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14520
|
+
lastUnitStart = i;
|
14521
|
+
lastUnitType = unitType;
|
14522
|
+
state = 0;
|
14523
|
+
} else {
|
14524
|
+
// not enough byte to read unit type. let's read it on next PES parsing
|
14525
|
+
state = -1;
|
14526
|
+
}
|
14527
|
+
} else {
|
14528
|
+
state = 0;
|
14964
14529
|
}
|
14965
14530
|
}
|
14966
|
-
|
14967
|
-
|
14968
|
-
|
14969
|
-
|
14970
|
-
|
14971
|
-
|
14972
|
-
|
14973
|
-
|
14974
|
-
numRefFramesInPicOrderCntCycle = readUEG();
|
14975
|
-
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14976
|
-
skipEG();
|
14977
|
-
} // offset_for_ref_frame[ i ]
|
14978
|
-
}
|
14979
|
-
skipUEG(); // max_num_ref_frames
|
14980
|
-
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
14981
|
-
const picWidthInMbsMinus1 = readUEG();
|
14982
|
-
const picHeightInMapUnitsMinus1 = readUEG();
|
14983
|
-
const frameMbsOnlyFlag = readBits(1);
|
14984
|
-
if (frameMbsOnlyFlag === 0) {
|
14985
|
-
skipBits(1);
|
14986
|
-
} // mb_adaptive_frame_field_flag
|
14987
|
-
|
14988
|
-
skipBits(1); // direct_8x8_inference_flag
|
14989
|
-
if (readBoolean()) {
|
14990
|
-
// frame_cropping_flag
|
14991
|
-
frameCropLeftOffset = readUEG();
|
14992
|
-
frameCropRightOffset = readUEG();
|
14993
|
-
frameCropTopOffset = readUEG();
|
14994
|
-
frameCropBottomOffset = readUEG();
|
14531
|
+
if (lastUnitStart >= 0 && state >= 0) {
|
14532
|
+
const unit = {
|
14533
|
+
data: array.subarray(lastUnitStart, len),
|
14534
|
+
type: lastUnitType,
|
14535
|
+
state: state
|
14536
|
+
};
|
14537
|
+
units.push(unit);
|
14538
|
+
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
14995
14539
|
}
|
14996
|
-
|
14997
|
-
if (
|
14998
|
-
//
|
14999
|
-
|
15000
|
-
|
15001
|
-
|
15002
|
-
switch (aspectRatioIdc) {
|
15003
|
-
case 1:
|
15004
|
-
pixelRatio = [1, 1];
|
15005
|
-
break;
|
15006
|
-
case 2:
|
15007
|
-
pixelRatio = [12, 11];
|
15008
|
-
break;
|
15009
|
-
case 3:
|
15010
|
-
pixelRatio = [10, 11];
|
15011
|
-
break;
|
15012
|
-
case 4:
|
15013
|
-
pixelRatio = [16, 11];
|
15014
|
-
break;
|
15015
|
-
case 5:
|
15016
|
-
pixelRatio = [40, 33];
|
15017
|
-
break;
|
15018
|
-
case 6:
|
15019
|
-
pixelRatio = [24, 11];
|
15020
|
-
break;
|
15021
|
-
case 7:
|
15022
|
-
pixelRatio = [20, 11];
|
15023
|
-
break;
|
15024
|
-
case 8:
|
15025
|
-
pixelRatio = [32, 11];
|
15026
|
-
break;
|
15027
|
-
case 9:
|
15028
|
-
pixelRatio = [80, 33];
|
15029
|
-
break;
|
15030
|
-
case 10:
|
15031
|
-
pixelRatio = [18, 11];
|
15032
|
-
break;
|
15033
|
-
case 11:
|
15034
|
-
pixelRatio = [15, 11];
|
15035
|
-
break;
|
15036
|
-
case 12:
|
15037
|
-
pixelRatio = [64, 33];
|
15038
|
-
break;
|
15039
|
-
case 13:
|
15040
|
-
pixelRatio = [160, 99];
|
15041
|
-
break;
|
15042
|
-
case 14:
|
15043
|
-
pixelRatio = [4, 3];
|
15044
|
-
break;
|
15045
|
-
case 15:
|
15046
|
-
pixelRatio = [3, 2];
|
15047
|
-
break;
|
15048
|
-
case 16:
|
15049
|
-
pixelRatio = [2, 1];
|
15050
|
-
break;
|
15051
|
-
case 255:
|
15052
|
-
{
|
15053
|
-
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
15054
|
-
break;
|
15055
|
-
}
|
15056
|
-
}
|
14540
|
+
// no NALu found
|
14541
|
+
if (units.length === 0) {
|
14542
|
+
// append pes.data to previous NAL unit
|
14543
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14544
|
+
if (lastUnit) {
|
14545
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
15057
14546
|
}
|
15058
14547
|
}
|
15059
|
-
|
15060
|
-
|
15061
|
-
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
15062
|
-
pixelRatio: pixelRatio
|
15063
|
-
};
|
14548
|
+
track.naluState = state;
|
14549
|
+
return units;
|
15064
14550
|
}
|
15065
14551
|
}
|
15066
14552
|
|
@@ -15078,7 +14564,7 @@ class SampleAesDecrypter {
|
|
15078
14564
|
});
|
15079
14565
|
}
|
15080
14566
|
decryptBuffer(encryptedData) {
|
15081
|
-
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer
|
14567
|
+
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
|
15082
14568
|
}
|
15083
14569
|
|
15084
14570
|
// AAC - encrypt all full 16 bytes blocks starting from offset 16
|
@@ -15192,7 +14678,7 @@ class TSDemuxer {
|
|
15192
14678
|
this.observer = observer;
|
15193
14679
|
this.config = config;
|
15194
14680
|
this.typeSupported = typeSupported;
|
15195
|
-
this.videoParser =
|
14681
|
+
this.videoParser = new AvcVideoParser();
|
15196
14682
|
}
|
15197
14683
|
static probe(data) {
|
15198
14684
|
const syncOffset = TSDemuxer.syncOffset(data);
|
@@ -15357,16 +14843,7 @@ class TSDemuxer {
|
|
15357
14843
|
case videoPid:
|
15358
14844
|
if (stt) {
|
15359
14845
|
if (videoData && (pes = parsePES(videoData))) {
|
15360
|
-
|
15361
|
-
switch (videoTrack.segmentCodec) {
|
15362
|
-
case 'avc':
|
15363
|
-
this.videoParser = new AvcVideoParser();
|
15364
|
-
break;
|
15365
|
-
}
|
15366
|
-
}
|
15367
|
-
if (this.videoParser !== null) {
|
15368
|
-
this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
|
15369
|
-
}
|
14846
|
+
this.videoParser.parseAVCPES(videoTrack, textTrack, pes, false, this._duration);
|
15370
14847
|
}
|
15371
14848
|
videoData = {
|
15372
14849
|
data: [],
|
@@ -15528,17 +15005,8 @@ class TSDemuxer {
|
|
15528
15005
|
// try to parse last PES packets
|
15529
15006
|
let pes;
|
15530
15007
|
if (videoData && (pes = parsePES(videoData))) {
|
15531
|
-
|
15532
|
-
|
15533
|
-
case 'avc':
|
15534
|
-
this.videoParser = new AvcVideoParser();
|
15535
|
-
break;
|
15536
|
-
}
|
15537
|
-
}
|
15538
|
-
if (this.videoParser !== null) {
|
15539
|
-
this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
|
15540
|
-
videoTrack.pesData = null;
|
15541
|
-
}
|
15008
|
+
this.videoParser.parseAVCPES(videoTrack, textTrack, pes, true, this._duration);
|
15009
|
+
videoTrack.pesData = null;
|
15542
15010
|
} else {
|
15543
15011
|
// either avcData null or PES truncated, keep it for next frag parsing
|
15544
15012
|
videoTrack.pesData = videoData;
|
@@ -15841,10 +15309,7 @@ function parsePMT(data, offset, typeSupported, isSampleAes) {
|
|
15841
15309
|
logger.warn('Unsupported EC-3 in M2TS found');
|
15842
15310
|
break;
|
15843
15311
|
case 0x24:
|
15844
|
-
|
15845
|
-
{
|
15846
|
-
logger.warn('Unsupported HEVC in M2TS found');
|
15847
|
-
}
|
15312
|
+
logger.warn('Unsupported HEVC in M2TS found');
|
15848
15313
|
break;
|
15849
15314
|
}
|
15850
15315
|
// move to the next table entry
|
@@ -15988,11 +15453,11 @@ class MP3Demuxer extends BaseAudioDemuxer {
|
|
15988
15453
|
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
15989
15454
|
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
15990
15455
|
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
15991
|
-
const id3Data =
|
15456
|
+
const id3Data = getID3Data(data, 0);
|
15992
15457
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
15993
15458
|
|
15994
15459
|
// Check for ac-3|ec-3 sync bytes and return false if present
|
15995
|
-
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 &&
|
15460
|
+
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 && getTimeStamp(id3Data) !== undefined &&
|
15996
15461
|
// check the bsid to confirm ac-3 or ec-3 (not mp3)
|
15997
15462
|
getAudioBSID(data, offset) <= 16) {
|
15998
15463
|
return false;
|
@@ -16067,8 +15532,6 @@ class MP4 {
|
|
16067
15532
|
avc1: [],
|
16068
15533
|
// codingname
|
16069
15534
|
avcC: [],
|
16070
|
-
hvc1: [],
|
16071
|
-
hvcC: [],
|
16072
15535
|
btrt: [],
|
16073
15536
|
dinf: [],
|
16074
15537
|
dref: [],
|
@@ -16493,10 +15956,8 @@ class MP4 {
|
|
16493
15956
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
|
16494
15957
|
}
|
16495
15958
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
|
16496
|
-
} else if (track.segmentCodec === 'avc') {
|
16497
|
-
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16498
15959
|
} else {
|
16499
|
-
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.
|
15960
|
+
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16500
15961
|
}
|
16501
15962
|
}
|
16502
15963
|
static tkhd(track) {
|
@@ -16634,84 +16095,6 @@ class MP4 {
|
|
16634
16095
|
const result = appendUint8Array(MP4.FTYP, movie);
|
16635
16096
|
return result;
|
16636
16097
|
}
|
16637
|
-
static hvc1(track) {
|
16638
|
-
const ps = track.params;
|
16639
|
-
const units = [track.vps, track.sps, track.pps];
|
16640
|
-
const NALuLengthSize = 4;
|
16641
|
-
const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
|
16642
|
-
|
16643
|
-
// compute hvcC size in bytes
|
16644
|
-
let length = config.length;
|
16645
|
-
for (let i = 0; i < units.length; i += 1) {
|
16646
|
-
length += 3;
|
16647
|
-
for (let j = 0; j < units[i].length; j += 1) {
|
16648
|
-
length += 2 + units[i][j].length;
|
16649
|
-
}
|
16650
|
-
}
|
16651
|
-
const hvcC = new Uint8Array(length);
|
16652
|
-
hvcC.set(config, 0);
|
16653
|
-
length = config.length;
|
16654
|
-
// append parameter set units: one vps, one or more sps and pps
|
16655
|
-
const iMax = units.length - 1;
|
16656
|
-
for (let i = 0; i < units.length; i += 1) {
|
16657
|
-
hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
|
16658
|
-
length += 3;
|
16659
|
-
for (let j = 0; j < units[i].length; j += 1) {
|
16660
|
-
hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
|
16661
|
-
length += 2;
|
16662
|
-
hvcC.set(units[i][j], length);
|
16663
|
-
length += units[i][j].length;
|
16664
|
-
}
|
16665
|
-
}
|
16666
|
-
const hvcc = MP4.box(MP4.types.hvcC, hvcC);
|
16667
|
-
const width = track.width;
|
16668
|
-
const height = track.height;
|
16669
|
-
const hSpacing = track.pixelRatio[0];
|
16670
|
-
const vSpacing = track.pixelRatio[1];
|
16671
|
-
return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
|
16672
|
-
// reserved
|
16673
|
-
0x00, 0x00, 0x00,
|
16674
|
-
// reserved
|
16675
|
-
0x00, 0x01,
|
16676
|
-
// data_reference_index
|
16677
|
-
0x00, 0x00,
|
16678
|
-
// pre_defined
|
16679
|
-
0x00, 0x00,
|
16680
|
-
// reserved
|
16681
|
-
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16682
|
-
// pre_defined
|
16683
|
-
width >> 8 & 0xff, width & 0xff,
|
16684
|
-
// width
|
16685
|
-
height >> 8 & 0xff, height & 0xff,
|
16686
|
-
// height
|
16687
|
-
0x00, 0x48, 0x00, 0x00,
|
16688
|
-
// horizresolution
|
16689
|
-
0x00, 0x48, 0x00, 0x00,
|
16690
|
-
// vertresolution
|
16691
|
-
0x00, 0x00, 0x00, 0x00,
|
16692
|
-
// reserved
|
16693
|
-
0x00, 0x01,
|
16694
|
-
// frame_count
|
16695
|
-
0x12, 0x64, 0x61, 0x69, 0x6c,
|
16696
|
-
// dailymotion/hls.js
|
16697
|
-
0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16698
|
-
// compressorname
|
16699
|
-
0x00, 0x18,
|
16700
|
-
// depth = 24
|
16701
|
-
0x11, 0x11]),
|
16702
|
-
// pre_defined = -1
|
16703
|
-
hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
|
16704
|
-
// bufferSizeDB
|
16705
|
-
0x00, 0x2d, 0xc6, 0xc0,
|
16706
|
-
// maxBitrate
|
16707
|
-
0x00, 0x2d, 0xc6, 0xc0])),
|
16708
|
-
// avgBitrate
|
16709
|
-
MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
|
16710
|
-
// hSpacing
|
16711
|
-
hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
|
16712
|
-
// vSpacing
|
16713
|
-
vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
|
16714
|
-
}
|
16715
16098
|
}
|
16716
16099
|
MP4.types = void 0;
|
16717
16100
|
MP4.HDLR_TYPES = void 0;
|
@@ -17087,9 +16470,9 @@ class MP4Remuxer {
|
|
17087
16470
|
const foundOverlap = delta < -1;
|
17088
16471
|
if (foundHole || foundOverlap) {
|
17089
16472
|
if (foundHole) {
|
17090
|
-
logger.warn(
|
16473
|
+
logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
|
17091
16474
|
} else {
|
17092
|
-
logger.warn(
|
16475
|
+
logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
|
17093
16476
|
}
|
17094
16477
|
if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
|
17095
16478
|
firstDTS = nextAvcDts;
|
@@ -17098,24 +16481,12 @@ class MP4Remuxer {
|
|
17098
16481
|
inputSamples[0].dts = firstDTS;
|
17099
16482
|
inputSamples[0].pts = firstPTS;
|
17100
16483
|
} else {
|
17101
|
-
let isPTSOrderRetained = true;
|
17102
16484
|
for (let i = 0; i < inputSamples.length; i++) {
|
17103
|
-
if (inputSamples[i].dts > firstPTS
|
16485
|
+
if (inputSamples[i].dts > firstPTS) {
|
17104
16486
|
break;
|
17105
16487
|
}
|
17106
|
-
const prevPTS = inputSamples[i].pts;
|
17107
16488
|
inputSamples[i].dts -= delta;
|
17108
16489
|
inputSamples[i].pts -= delta;
|
17109
|
-
|
17110
|
-
// check to see if this sample's PTS order has changed
|
17111
|
-
// relative to the next one
|
17112
|
-
if (i < inputSamples.length - 1) {
|
17113
|
-
const nextSamplePTS = inputSamples[i + 1].pts;
|
17114
|
-
const currentSamplePTS = inputSamples[i].pts;
|
17115
|
-
const currentOrder = nextSamplePTS <= currentSamplePTS;
|
17116
|
-
const prevOrder = nextSamplePTS <= prevPTS;
|
17117
|
-
isPTSOrderRetained = currentOrder == prevOrder;
|
17118
|
-
}
|
17119
16490
|
}
|
17120
16491
|
}
|
17121
16492
|
logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
|
@@ -17263,7 +16634,7 @@ class MP4Remuxer {
|
|
17263
16634
|
}
|
17264
16635
|
}
|
17265
16636
|
}
|
17266
|
-
// next AVC
|
16637
|
+
// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
17267
16638
|
mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
|
17268
16639
|
this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
|
17269
16640
|
this.videoSampleDuration = mp4SampleDuration;
|
@@ -17396,7 +16767,7 @@ class MP4Remuxer {
|
|
17396
16767
|
logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
|
17397
16768
|
for (let j = 0; j < missing; j++) {
|
17398
16769
|
const newStamp = Math.max(nextPts, 0);
|
17399
|
-
let fillFrame = AAC.getSilentFrame(track.
|
16770
|
+
let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17400
16771
|
if (!fillFrame) {
|
17401
16772
|
logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
|
17402
16773
|
fillFrame = sample.unit.subarray();
|
@@ -17524,7 +16895,7 @@ class MP4Remuxer {
|
|
17524
16895
|
// samples count of this segment's duration
|
17525
16896
|
const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
|
17526
16897
|
// silent frame
|
17527
|
-
const silentFrame = AAC.getSilentFrame(track.
|
16898
|
+
const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17528
16899
|
logger.warn('[mp4-remuxer]: remux empty Audio');
|
17529
16900
|
// Can't remux if we can't generate a silent frame...
|
17530
16901
|
if (!silentFrame) {
|
@@ -17915,15 +17286,13 @@ class Transmuxer {
|
|
17915
17286
|
initSegmentData
|
17916
17287
|
} = transmuxConfig;
|
17917
17288
|
const keyData = getEncryptionType(uintData, decryptdata);
|
17918
|
-
if (keyData &&
|
17289
|
+
if (keyData && keyData.method === 'AES-128') {
|
17919
17290
|
const decrypter = this.getDecrypter();
|
17920
|
-
const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
|
17921
|
-
|
17922
17291
|
// Software decryption is synchronous; webCrypto is not
|
17923
17292
|
if (decrypter.isSync()) {
|
17924
17293
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
17925
17294
|
// data is handled in the flush() call
|
17926
|
-
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17295
|
+
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
|
17927
17296
|
// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
|
17928
17297
|
const loadingParts = chunkMeta.part > -1;
|
17929
17298
|
if (loadingParts) {
|
@@ -17935,7 +17304,7 @@ class Transmuxer {
|
|
17935
17304
|
}
|
17936
17305
|
uintData = new Uint8Array(decryptedData);
|
17937
17306
|
} else {
|
17938
|
-
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17307
|
+
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
|
17939
17308
|
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
17940
17309
|
// the decrypted data has been transmuxed
|
17941
17310
|
const result = this.push(decryptedData, null, chunkMeta);
|
@@ -18589,7 +17958,14 @@ class TransmuxerInterface {
|
|
18589
17958
|
this.observer = new EventEmitter();
|
18590
17959
|
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
|
18591
17960
|
this.observer.on(Events.ERROR, forwardMessage);
|
18592
|
-
const
|
17961
|
+
const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
|
17962
|
+
isTypeSupported: () => false
|
17963
|
+
};
|
17964
|
+
const m2tsTypeSupported = {
|
17965
|
+
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
17966
|
+
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
17967
|
+
ac3: false
|
17968
|
+
};
|
18593
17969
|
|
18594
17970
|
// navigator.vendor is not always available in Web Worker
|
18595
17971
|
// refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
|
@@ -18853,9 +18229,8 @@ const STALL_MINIMUM_DURATION_MS = 250;
|
|
18853
18229
|
const MAX_START_GAP_JUMP = 2.0;
|
18854
18230
|
const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
18855
18231
|
const SKIP_BUFFER_RANGE_START = 0.05;
|
18856
|
-
class GapController
|
18232
|
+
class GapController {
|
18857
18233
|
constructor(config, media, fragmentTracker, hls) {
|
18858
|
-
super('gap-controller', hls.logger);
|
18859
18234
|
this.config = void 0;
|
18860
18235
|
this.media = null;
|
18861
18236
|
this.fragmentTracker = void 0;
|
@@ -18865,7 +18240,6 @@ class GapController extends Logger {
|
|
18865
18240
|
this.stalled = null;
|
18866
18241
|
this.moved = false;
|
18867
18242
|
this.seeking = false;
|
18868
|
-
this.ended = 0;
|
18869
18243
|
this.config = config;
|
18870
18244
|
this.media = media;
|
18871
18245
|
this.fragmentTracker = fragmentTracker;
|
@@ -18883,7 +18257,7 @@ class GapController extends Logger {
|
|
18883
18257
|
*
|
18884
18258
|
* @param lastCurrentTime - Previously read playhead position
|
18885
18259
|
*/
|
18886
|
-
poll(lastCurrentTime, activeFrag
|
18260
|
+
poll(lastCurrentTime, activeFrag) {
|
18887
18261
|
const {
|
18888
18262
|
config,
|
18889
18263
|
media,
|
@@ -18902,7 +18276,6 @@ class GapController extends Logger {
|
|
18902
18276
|
|
18903
18277
|
// The playhead is moving, no-op
|
18904
18278
|
if (currentTime !== lastCurrentTime) {
|
18905
|
-
this.ended = 0;
|
18906
18279
|
this.moved = true;
|
18907
18280
|
if (!seeking) {
|
18908
18281
|
this.nudgeRetry = 0;
|
@@ -18911,7 +18284,7 @@ class GapController extends Logger {
|
|
18911
18284
|
// The playhead is now moving, but was previously stalled
|
18912
18285
|
if (this.stallReported) {
|
18913
18286
|
const _stalledDuration = self.performance.now() - stalled;
|
18914
|
-
|
18287
|
+
logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
|
18915
18288
|
this.stallReported = false;
|
18916
18289
|
}
|
18917
18290
|
this.stalled = null;
|
@@ -18947,6 +18320,7 @@ class GapController extends Logger {
|
|
18947
18320
|
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
18948
18321
|
// The addition poll gives the browser a chance to jump the gap for us
|
18949
18322
|
if (!this.moved && this.stalled !== null) {
|
18323
|
+
var _level$details;
|
18950
18324
|
// There is no playable buffer (seeked, waiting for buffer)
|
18951
18325
|
const isBuffered = bufferInfo.len > 0;
|
18952
18326
|
if (!isBuffered && !nextStart) {
|
@@ -18958,8 +18332,9 @@ class GapController extends Logger {
|
|
18958
18332
|
// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
|
18959
18333
|
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
|
18960
18334
|
// that begins over 1 target duration after the video start position.
|
18961
|
-
const
|
18962
|
-
const
|
18335
|
+
const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
|
18336
|
+
const isLive = level == null ? void 0 : (_level$details = level.details) == null ? void 0 : _level$details.live;
|
18337
|
+
const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
|
18963
18338
|
const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
|
18964
18339
|
if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
|
18965
18340
|
if (!media.paused) {
|
@@ -18977,17 +18352,6 @@ class GapController extends Logger {
|
|
18977
18352
|
}
|
18978
18353
|
const stalledDuration = tnow - stalled;
|
18979
18354
|
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
|
18980
|
-
// Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
|
18981
|
-
if (state === State.ENDED && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
|
18982
|
-
if (stalledDuration < 1000 || this.ended) {
|
18983
|
-
return;
|
18984
|
-
}
|
18985
|
-
this.ended = currentTime;
|
18986
|
-
this.hls.trigger(Events.MEDIA_ENDED, {
|
18987
|
-
stalled: true
|
18988
|
-
});
|
18989
|
-
return;
|
18990
|
-
}
|
18991
18355
|
// Report stalling after trying to fix
|
18992
18356
|
this._reportStall(bufferInfo);
|
18993
18357
|
if (!this.media) {
|
@@ -19031,7 +18395,7 @@ class GapController extends Logger {
|
|
19031
18395
|
// needs to cross some sort of threshold covering all source-buffers content
|
19032
18396
|
// to start playing properly.
|
19033
18397
|
if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
|
19034
|
-
|
18398
|
+
logger.warn('Trying to nudge playhead over buffer-hole');
|
19035
18399
|
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
19036
18400
|
// We only try to jump the hole if it's under the configured size
|
19037
18401
|
// Reset stalled so to rearm watchdog timer
|
@@ -19055,7 +18419,7 @@ class GapController extends Logger {
|
|
19055
18419
|
// Report stalled error once
|
19056
18420
|
this.stallReported = true;
|
19057
18421
|
const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
|
19058
|
-
|
18422
|
+
logger.warn(error.message);
|
19059
18423
|
hls.trigger(Events.ERROR, {
|
19060
18424
|
type: ErrorTypes.MEDIA_ERROR,
|
19061
18425
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -19123,7 +18487,7 @@ class GapController extends Logger {
|
|
19123
18487
|
}
|
19124
18488
|
}
|
19125
18489
|
const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
|
19126
|
-
|
18490
|
+
logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
|
19127
18491
|
this.moved = true;
|
19128
18492
|
this.stalled = null;
|
19129
18493
|
media.currentTime = targetTime;
|
@@ -19164,7 +18528,7 @@ class GapController extends Logger {
|
|
19164
18528
|
const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
|
19165
18529
|
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
19166
18530
|
const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
|
19167
|
-
|
18531
|
+
logger.warn(error.message);
|
19168
18532
|
media.currentTime = targetTime;
|
19169
18533
|
hls.trigger(Events.ERROR, {
|
19170
18534
|
type: ErrorTypes.MEDIA_ERROR,
|
@@ -19174,7 +18538,7 @@ class GapController extends Logger {
|
|
19174
18538
|
});
|
19175
18539
|
} else {
|
19176
18540
|
const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
|
19177
|
-
|
18541
|
+
logger.error(error.message);
|
19178
18542
|
hls.trigger(Events.ERROR, {
|
19179
18543
|
type: ErrorTypes.MEDIA_ERROR,
|
19180
18544
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -19189,7 +18553,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
|
|
19189
18553
|
|
19190
18554
|
class StreamController extends BaseStreamController {
|
19191
18555
|
constructor(hls, fragmentTracker, keyLoader) {
|
19192
|
-
super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
|
18556
|
+
super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN);
|
19193
18557
|
this.audioCodecSwap = false;
|
19194
18558
|
this.gapController = null;
|
19195
18559
|
this.level = -1;
|
@@ -19197,43 +18561,27 @@ class StreamController extends BaseStreamController {
|
|
19197
18561
|
this.altAudio = false;
|
19198
18562
|
this.audioOnly = false;
|
19199
18563
|
this.fragPlaying = null;
|
18564
|
+
this.onvplaying = null;
|
18565
|
+
this.onvseeked = null;
|
19200
18566
|
this.fragLastKbps = 0;
|
19201
18567
|
this.couldBacktrack = false;
|
19202
18568
|
this.backtrackFragment = null;
|
19203
18569
|
this.audioCodecSwitch = false;
|
19204
18570
|
this.videoBuffer = null;
|
19205
|
-
this.
|
19206
|
-
// tick to speed up FRAG_CHANGED triggering
|
19207
|
-
this.tick();
|
19208
|
-
};
|
19209
|
-
this.onMediaSeeked = () => {
|
19210
|
-
const media = this.media;
|
19211
|
-
const currentTime = media ? media.currentTime : null;
|
19212
|
-
if (isFiniteNumber(currentTime)) {
|
19213
|
-
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
19214
|
-
}
|
19215
|
-
|
19216
|
-
// If seeked was issued before buffer was appended do not tick immediately
|
19217
|
-
const bufferInfo = this.getMainFwdBufferInfo();
|
19218
|
-
if (bufferInfo === null || bufferInfo.len === 0) {
|
19219
|
-
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
19220
|
-
return;
|
19221
|
-
}
|
19222
|
-
|
19223
|
-
// tick to speed up FRAG_CHANGED triggering
|
19224
|
-
this.tick();
|
19225
|
-
};
|
19226
|
-
this.registerListeners();
|
18571
|
+
this._registerListeners();
|
19227
18572
|
}
|
19228
|
-
|
19229
|
-
super.registerListeners();
|
18573
|
+
_registerListeners() {
|
19230
18574
|
const {
|
19231
18575
|
hls
|
19232
18576
|
} = this;
|
18577
|
+
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18578
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18579
|
+
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
19233
18580
|
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
19234
18581
|
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
|
19235
18582
|
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
19236
18583
|
hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18584
|
+
hls.on(Events.ERROR, this.onError, this);
|
19237
18585
|
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
19238
18586
|
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
19239
18587
|
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -19241,14 +18589,17 @@ class StreamController extends BaseStreamController {
|
|
19241
18589
|
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
19242
18590
|
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
19243
18591
|
}
|
19244
|
-
|
19245
|
-
super.unregisterListeners();
|
18592
|
+
_unregisterListeners() {
|
19246
18593
|
const {
|
19247
18594
|
hls
|
19248
18595
|
} = this;
|
18596
|
+
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18597
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18598
|
+
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
19249
18599
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
19250
18600
|
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
19251
18601
|
hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18602
|
+
hls.off(Events.ERROR, this.onError, this);
|
19252
18603
|
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
19253
18604
|
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
19254
18605
|
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -19257,9 +18608,7 @@ class StreamController extends BaseStreamController {
|
|
19257
18608
|
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
19258
18609
|
}
|
19259
18610
|
onHandlerDestroying() {
|
19260
|
-
|
19261
|
-
this.onMediaPlaying = this.onMediaSeeked = null;
|
19262
|
-
this.unregisterListeners();
|
18611
|
+
this._unregisterListeners();
|
19263
18612
|
super.onHandlerDestroying();
|
19264
18613
|
}
|
19265
18614
|
startLoad(startPosition) {
|
@@ -19357,9 +18706,6 @@ class StreamController extends BaseStreamController {
|
|
19357
18706
|
this.checkFragmentChanged();
|
19358
18707
|
}
|
19359
18708
|
doTickIdle() {
|
19360
|
-
if (!this.buffering) {
|
19361
|
-
return;
|
19362
|
-
}
|
19363
18709
|
const {
|
19364
18710
|
hls,
|
19365
18711
|
levelLastLoaded,
|
@@ -19587,19 +18933,22 @@ class StreamController extends BaseStreamController {
|
|
19587
18933
|
onMediaAttached(event, data) {
|
19588
18934
|
super.onMediaAttached(event, data);
|
19589
18935
|
const media = data.media;
|
19590
|
-
|
19591
|
-
|
18936
|
+
this.onvplaying = this.onMediaPlaying.bind(this);
|
18937
|
+
this.onvseeked = this.onMediaSeeked.bind(this);
|
18938
|
+
media.addEventListener('playing', this.onvplaying);
|
18939
|
+
media.addEventListener('seeked', this.onvseeked);
|
19592
18940
|
this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
|
19593
18941
|
}
|
19594
18942
|
onMediaDetaching() {
|
19595
18943
|
const {
|
19596
18944
|
media
|
19597
18945
|
} = this;
|
19598
|
-
if (media) {
|
19599
|
-
media.removeEventListener('playing', this.
|
19600
|
-
media.removeEventListener('seeked', this.
|
18946
|
+
if (media && this.onvplaying && this.onvseeked) {
|
18947
|
+
media.removeEventListener('playing', this.onvplaying);
|
18948
|
+
media.removeEventListener('seeked', this.onvseeked);
|
18949
|
+
this.onvplaying = this.onvseeked = null;
|
18950
|
+
this.videoBuffer = null;
|
19601
18951
|
}
|
19602
|
-
this.videoBuffer = null;
|
19603
18952
|
this.fragPlaying = null;
|
19604
18953
|
if (this.gapController) {
|
19605
18954
|
this.gapController.destroy();
|
@@ -19607,6 +18956,27 @@ class StreamController extends BaseStreamController {
|
|
19607
18956
|
}
|
19608
18957
|
super.onMediaDetaching();
|
19609
18958
|
}
|
18959
|
+
onMediaPlaying() {
|
18960
|
+
// tick to speed up FRAG_CHANGED triggering
|
18961
|
+
this.tick();
|
18962
|
+
}
|
18963
|
+
onMediaSeeked() {
|
18964
|
+
const media = this.media;
|
18965
|
+
const currentTime = media ? media.currentTime : null;
|
18966
|
+
if (isFiniteNumber(currentTime)) {
|
18967
|
+
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
18968
|
+
}
|
18969
|
+
|
18970
|
+
// If seeked was issued before buffer was appended do not tick immediately
|
18971
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
18972
|
+
if (bufferInfo === null || bufferInfo.len === 0) {
|
18973
|
+
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
18974
|
+
return;
|
18975
|
+
}
|
18976
|
+
|
18977
|
+
// tick to speed up FRAG_CHANGED triggering
|
18978
|
+
this.tick();
|
18979
|
+
}
|
19610
18980
|
onManifestLoading() {
|
19611
18981
|
// reset buffer on manifest loading
|
19612
18982
|
this.log('Trigger BUFFER_RESET');
|
@@ -19898,10 +19268,8 @@ class StreamController extends BaseStreamController {
|
|
19898
19268
|
}
|
19899
19269
|
if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
|
19900
19270
|
// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
19901
|
-
const
|
19902
|
-
|
19903
|
-
const levelDetails = this.getLevelDetails();
|
19904
|
-
gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
|
19271
|
+
const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
|
19272
|
+
gapController.poll(this.lastCurrentTime, activeFrag);
|
19905
19273
|
}
|
19906
19274
|
this.lastCurrentTime = media.currentTime;
|
19907
19275
|
}
|
@@ -20234,17 +19602,6 @@ class StreamController extends BaseStreamController {
|
|
20234
19602
|
getMainFwdBufferInfo() {
|
20235
19603
|
return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN);
|
20236
19604
|
}
|
20237
|
-
get maxBufferLength() {
|
20238
|
-
const {
|
20239
|
-
levels,
|
20240
|
-
level
|
20241
|
-
} = this;
|
20242
|
-
const levelInfo = levels == null ? void 0 : levels[level];
|
20243
|
-
if (!levelInfo) {
|
20244
|
-
return this.config.maxBufferLength;
|
20245
|
-
}
|
20246
|
-
return this.getMaxBufferLength(levelInfo.maxBitrate);
|
20247
|
-
}
|
20248
19605
|
backtrack(frag) {
|
20249
19606
|
this.couldBacktrack = true;
|
20250
19607
|
// Causes findFragments to backtrack through fragments to find the keyframe
|
@@ -20350,7 +19707,7 @@ class Hls {
|
|
20350
19707
|
* Get the video-dev/hls.js package version.
|
20351
19708
|
*/
|
20352
19709
|
static get version() {
|
20353
|
-
return "1.5.8
|
19710
|
+
return "1.5.8";
|
20354
19711
|
}
|
20355
19712
|
|
20356
19713
|
/**
|
@@ -20413,12 +19770,9 @@ class Hls {
|
|
20413
19770
|
* The configuration object provided on player instantiation.
|
20414
19771
|
*/
|
20415
19772
|
this.userConfig = void 0;
|
20416
|
-
/**
|
20417
|
-
* The logger functions used by this player instance, configured on player instantiation.
|
20418
|
-
*/
|
20419
|
-
this.logger = void 0;
|
20420
19773
|
this.coreComponents = void 0;
|
20421
19774
|
this.networkControllers = void 0;
|
19775
|
+
this.started = false;
|
20422
19776
|
this._emitter = new EventEmitter();
|
20423
19777
|
this._autoLevelCapping = -1;
|
20424
19778
|
this._maxHdcpLevel = null;
|
@@ -20435,11 +19789,11 @@ class Hls {
|
|
20435
19789
|
this._media = null;
|
20436
19790
|
this.url = null;
|
20437
19791
|
this.triggeringException = void 0;
|
20438
|
-
|
20439
|
-
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig
|
19792
|
+
enableLogs(userConfig.debug || false, 'Hls instance');
|
19793
|
+
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
|
20440
19794
|
this.userConfig = userConfig;
|
20441
19795
|
if (config.progressive) {
|
20442
|
-
enableStreamingMode(config
|
19796
|
+
enableStreamingMode(config);
|
20443
19797
|
}
|
20444
19798
|
|
20445
19799
|
// core controllers and network loaders
|
@@ -20452,9 +19806,7 @@ class Hls {
|
|
20452
19806
|
} = config;
|
20453
19807
|
const errorController = new ConfigErrorController(this);
|
20454
19808
|
const abrController = this.abrController = new ConfigAbrController(this);
|
20455
|
-
|
20456
|
-
const fragmentTracker = new FragmentTracker(this);
|
20457
|
-
const bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
|
19809
|
+
const bufferController = this.bufferController = new ConfigBufferController(this);
|
20458
19810
|
const capLevelController = this.capLevelController = new ConfigCapLevelController(this);
|
20459
19811
|
const fpsController = new ConfigFpsController(this);
|
20460
19812
|
const playListLoader = new PlaylistLoader(this);
|
@@ -20463,6 +19815,8 @@ class Hls {
|
|
20463
19815
|
// ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
|
20464
19816
|
const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
|
20465
19817
|
const levelController = this.levelController = new LevelController(this, contentSteering);
|
19818
|
+
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
19819
|
+
const fragmentTracker = new FragmentTracker(this);
|
20466
19820
|
const keyLoader = new KeyLoader(this.config);
|
20467
19821
|
const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
|
20468
19822
|
|
@@ -20538,7 +19892,7 @@ class Hls {
|
|
20538
19892
|
try {
|
20539
19893
|
return this.emit(event, event, eventObject);
|
20540
19894
|
} catch (error) {
|
20541
|
-
|
19895
|
+
logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
20542
19896
|
// Prevent recursion in error event handlers that throw #5497
|
20543
19897
|
if (!this.triggeringException) {
|
20544
19898
|
this.triggeringException = true;
|
@@ -20564,7 +19918,7 @@ class Hls {
|
|
20564
19918
|
* Dispose of the instance
|
20565
19919
|
*/
|
20566
19920
|
destroy() {
|
20567
|
-
|
19921
|
+
logger.log('destroy');
|
20568
19922
|
this.trigger(Events.DESTROYING, undefined);
|
20569
19923
|
this.detachMedia();
|
20570
19924
|
this.removeAllListeners();
|
@@ -20585,7 +19939,7 @@ class Hls {
|
|
20585
19939
|
* Attaches Hls.js to a media element
|
20586
19940
|
*/
|
20587
19941
|
attachMedia(media) {
|
20588
|
-
|
19942
|
+
logger.log('attachMedia');
|
20589
19943
|
this._media = media;
|
20590
19944
|
this.trigger(Events.MEDIA_ATTACHING, {
|
20591
19945
|
media: media
|
@@ -20596,7 +19950,7 @@ class Hls {
|
|
20596
19950
|
* Detach Hls.js from the media
|
20597
19951
|
*/
|
20598
19952
|
detachMedia() {
|
20599
|
-
|
19953
|
+
logger.log('detachMedia');
|
20600
19954
|
this.trigger(Events.MEDIA_DETACHING, undefined);
|
20601
19955
|
this._media = null;
|
20602
19956
|
}
|
@@ -20613,7 +19967,7 @@ class Hls {
|
|
20613
19967
|
});
|
20614
19968
|
this._autoLevelCapping = -1;
|
20615
19969
|
this._maxHdcpLevel = null;
|
20616
|
-
|
19970
|
+
logger.log(`loadSource:${loadingSource}`);
|
20617
19971
|
if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
|
20618
19972
|
this.detachMedia();
|
20619
19973
|
this.attachMedia(media);
|
@@ -20632,7 +19986,8 @@ class Hls {
|
|
20632
19986
|
* Defaults to -1 (None: starts from earliest point)
|
20633
19987
|
*/
|
20634
19988
|
startLoad(startPosition = -1) {
|
20635
|
-
|
19989
|
+
logger.log(`startLoad(${startPosition})`);
|
19990
|
+
this.started = true;
|
20636
19991
|
this.networkControllers.forEach(controller => {
|
20637
19992
|
controller.startLoad(startPosition);
|
20638
19993
|
});
|
@@ -20642,31 +19997,34 @@ class Hls {
|
|
20642
19997
|
* Stop loading of any stream data.
|
20643
19998
|
*/
|
20644
19999
|
stopLoad() {
|
20645
|
-
|
20000
|
+
logger.log('stopLoad');
|
20001
|
+
this.started = false;
|
20646
20002
|
this.networkControllers.forEach(controller => {
|
20647
20003
|
controller.stopLoad();
|
20648
20004
|
});
|
20649
20005
|
}
|
20650
20006
|
|
20651
20007
|
/**
|
20652
|
-
* Resumes stream controller segment loading
|
20008
|
+
* Resumes stream controller segment loading if previously started.
|
20653
20009
|
*/
|
20654
20010
|
resumeBuffering() {
|
20655
|
-
this.
|
20656
|
-
|
20657
|
-
controller
|
20658
|
-
|
20659
|
-
|
20011
|
+
if (this.started) {
|
20012
|
+
this.networkControllers.forEach(controller => {
|
20013
|
+
if ('fragmentLoader' in controller) {
|
20014
|
+
controller.startLoad(-1);
|
20015
|
+
}
|
20016
|
+
});
|
20017
|
+
}
|
20660
20018
|
}
|
20661
20019
|
|
20662
20020
|
/**
|
20663
|
-
*
|
20021
|
+
* Stops stream controller segment loading without changing 'started' state like stopLoad().
|
20664
20022
|
* This allows for media buffering to be paused without interupting playlist loading.
|
20665
20023
|
*/
|
20666
20024
|
pauseBuffering() {
|
20667
20025
|
this.networkControllers.forEach(controller => {
|
20668
|
-
if (controller
|
20669
|
-
controller.
|
20026
|
+
if ('fragmentLoader' in controller) {
|
20027
|
+
controller.stopLoad();
|
20670
20028
|
}
|
20671
20029
|
});
|
20672
20030
|
}
|
@@ -20675,7 +20033,7 @@ class Hls {
|
|
20675
20033
|
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
20676
20034
|
*/
|
20677
20035
|
swapAudioCodec() {
|
20678
|
-
|
20036
|
+
logger.log('swapAudioCodec');
|
20679
20037
|
this.streamController.swapAudioCodec();
|
20680
20038
|
}
|
20681
20039
|
|
@@ -20686,7 +20044,7 @@ class Hls {
|
|
20686
20044
|
* Automatic recovery of media-errors by this process is configurable.
|
20687
20045
|
*/
|
20688
20046
|
recoverMediaError() {
|
20689
|
-
|
20047
|
+
logger.log('recoverMediaError');
|
20690
20048
|
const media = this._media;
|
20691
20049
|
this.detachMedia();
|
20692
20050
|
if (media) {
|
@@ -20716,7 +20074,7 @@ class Hls {
|
|
20716
20074
|
* Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
|
20717
20075
|
*/
|
20718
20076
|
set currentLevel(newLevel) {
|
20719
|
-
|
20077
|
+
logger.log(`set currentLevel:${newLevel}`);
|
20720
20078
|
this.levelController.manualLevel = newLevel;
|
20721
20079
|
this.streamController.immediateLevelSwitch();
|
20722
20080
|
}
|
@@ -20735,7 +20093,7 @@ class Hls {
|
|
20735
20093
|
* @param newLevel - Pass -1 for automatic level selection
|
20736
20094
|
*/
|
20737
20095
|
set nextLevel(newLevel) {
|
20738
|
-
|
20096
|
+
logger.log(`set nextLevel:${newLevel}`);
|
20739
20097
|
this.levelController.manualLevel = newLevel;
|
20740
20098
|
this.streamController.nextLevelSwitch();
|
20741
20099
|
}
|
@@ -20754,7 +20112,7 @@ class Hls {
|
|
20754
20112
|
* @param newLevel - Pass -1 for automatic level selection
|
20755
20113
|
*/
|
20756
20114
|
set loadLevel(newLevel) {
|
20757
|
-
|
20115
|
+
logger.log(`set loadLevel:${newLevel}`);
|
20758
20116
|
this.levelController.manualLevel = newLevel;
|
20759
20117
|
}
|
20760
20118
|
|
@@ -20785,7 +20143,7 @@ class Hls {
|
|
20785
20143
|
* Sets "first-level", see getter.
|
20786
20144
|
*/
|
20787
20145
|
set firstLevel(newLevel) {
|
20788
|
-
|
20146
|
+
logger.log(`set firstLevel:${newLevel}`);
|
20789
20147
|
this.levelController.firstLevel = newLevel;
|
20790
20148
|
}
|
20791
20149
|
|
@@ -20810,7 +20168,7 @@ class Hls {
|
|
20810
20168
|
* (determined from download of first segment)
|
20811
20169
|
*/
|
20812
20170
|
set startLevel(newLevel) {
|
20813
|
-
|
20171
|
+
logger.log(`set startLevel:${newLevel}`);
|
20814
20172
|
// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
20815
20173
|
if (newLevel !== -1) {
|
20816
20174
|
newLevel = Math.max(newLevel, this.minAutoLevel);
|
@@ -20885,7 +20243,7 @@ class Hls {
|
|
20885
20243
|
*/
|
20886
20244
|
set autoLevelCapping(newLevel) {
|
20887
20245
|
if (this._autoLevelCapping !== newLevel) {
|
20888
|
-
|
20246
|
+
logger.log(`set autoLevelCapping:${newLevel}`);
|
20889
20247
|
this._autoLevelCapping = newLevel;
|
20890
20248
|
this.levelController.checkMaxAutoUpdated();
|
20891
20249
|
}
|
@@ -20990,9 +20348,6 @@ class Hls {
|
|
20990
20348
|
get mainForwardBufferInfo() {
|
20991
20349
|
return this.streamController.getMainFwdBufferInfo();
|
20992
20350
|
}
|
20993
|
-
get maxBufferLength() {
|
20994
|
-
return this.streamController.maxBufferLength;
|
20995
|
-
}
|
20996
20351
|
|
20997
20352
|
/**
|
20998
20353
|
* Find and select the best matching audio track, making a level switch when a Group change is necessary.
|
@@ -21167,5 +20522,5 @@ var KeySystemFormats = empty.KeySystemFormats;
|
|
21167
20522
|
var KeySystems = empty.KeySystems;
|
21168
20523
|
var SubtitleStreamController = empty.SubtitleStreamController;
|
21169
20524
|
var TimelineController = empty.TimelineController;
|
21170
|
-
export { AbrController, AttrList,
|
20525
|
+
export { AbrController, AttrList, Cues as AudioStreamController, Cues as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, Cues as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, Cues as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, Level, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, Cues as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
|
21171
20526
|
//# sourceMappingURL=hls.light.mjs.map
|