hls.js 1.5.9 → 1.5.10-0.canary.10320
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -3
- package/dist/hls-demo.js +41 -38
- package/dist/hls-demo.js.map +1 -1
- package/dist/hls.js +3477 -2194
- package/dist/hls.js.d.ts +108 -85
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +2401 -1754
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +1989 -1315
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +2863 -1557
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +35 -35
- package/src/config.ts +3 -2
- package/src/controller/abr-controller.ts +24 -20
- package/src/controller/audio-stream-controller.ts +68 -74
- package/src/controller/audio-track-controller.ts +1 -1
- package/src/controller/base-playlist-controller.ts +27 -10
- package/src/controller/base-stream-controller.ts +160 -38
- package/src/controller/buffer-controller.ts +230 -92
- package/src/controller/buffer-operation-queue.ts +16 -19
- package/src/controller/cap-level-controller.ts +3 -2
- package/src/controller/cmcd-controller.ts +51 -14
- package/src/controller/content-steering-controller.ts +29 -15
- package/src/controller/eme-controller.ts +10 -23
- package/src/controller/error-controller.ts +6 -8
- package/src/controller/fps-controller.ts +8 -3
- package/src/controller/fragment-tracker.ts +15 -11
- package/src/controller/gap-controller.ts +43 -16
- package/src/controller/id3-track-controller.ts +7 -7
- package/src/controller/latency-controller.ts +9 -11
- package/src/controller/level-controller.ts +37 -19
- package/src/controller/stream-controller.ts +37 -32
- package/src/controller/subtitle-stream-controller.ts +28 -40
- package/src/controller/subtitle-track-controller.ts +5 -3
- package/src/controller/timeline-controller.ts +19 -21
- package/src/crypt/aes-crypto.ts +21 -2
- package/src/crypt/decrypter-aes-mode.ts +4 -0
- package/src/crypt/decrypter.ts +32 -16
- package/src/crypt/fast-aes-key.ts +28 -5
- package/src/demux/audio/aacdemuxer.ts +2 -2
- package/src/demux/audio/ac3-demuxer.ts +4 -3
- package/src/demux/audio/adts.ts +9 -4
- package/src/demux/audio/base-audio-demuxer.ts +16 -14
- package/src/demux/audio/mp3demuxer.ts +4 -3
- package/src/demux/audio/mpegaudio.ts +1 -1
- package/src/demux/mp4demuxer.ts +7 -7
- package/src/demux/sample-aes.ts +2 -0
- package/src/demux/transmuxer-interface.ts +4 -12
- package/src/demux/transmuxer-worker.ts +4 -4
- package/src/demux/transmuxer.ts +16 -3
- package/src/demux/tsdemuxer.ts +71 -37
- package/src/demux/video/avc-video-parser.ts +208 -119
- package/src/demux/video/base-video-parser.ts +147 -18
- package/src/demux/video/exp-golomb.ts +0 -208
- package/src/demux/video/hevc-video-parser.ts +749 -0
- package/src/empty-es.js +5 -0
- package/src/events.ts +8 -1
- package/src/exports-named.ts +1 -1
- package/src/hls.ts +61 -38
- package/src/loader/fragment-loader.ts +10 -3
- package/src/loader/key-loader.ts +3 -1
- package/src/loader/level-key.ts +10 -9
- package/src/loader/playlist-loader.ts +4 -5
- package/src/remux/mp4-generator.ts +196 -1
- package/src/remux/mp4-remuxer.ts +24 -8
- package/src/task-loop.ts +5 -2
- package/src/types/component-api.ts +3 -1
- package/src/types/demuxer.ts +4 -0
- package/src/types/events.ts +4 -0
- package/src/types/remuxer.ts +1 -1
- package/src/utils/buffer-helper.ts +12 -31
- package/src/utils/cea-608-parser.ts +1 -3
- package/src/utils/codecs.ts +34 -5
- package/src/utils/encryption-methods-util.ts +21 -0
- package/src/utils/fetch-loader.ts +1 -1
- package/src/utils/imsc1-ttml-parser.ts +1 -1
- package/src/utils/keysystem-util.ts +1 -6
- package/src/utils/logger.ts +58 -23
- package/src/utils/mp4-tools.ts +5 -3
- package/src/utils/utf8-utils.ts +18 -0
- package/src/utils/webvtt-parser.ts +1 -1
- package/src/demux/id3.ts +0 -411
package/dist/hls.light.mjs
CHANGED
@@ -176,6 +176,23 @@ var urlToolkit = {exports: {}};
|
|
176
176
|
|
177
177
|
var urlToolkitExports = urlToolkit.exports;
|
178
178
|
|
179
|
+
function _defineProperty(e, r, t) {
|
180
|
+
return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, {
|
181
|
+
value: t,
|
182
|
+
enumerable: !0,
|
183
|
+
configurable: !0,
|
184
|
+
writable: !0
|
185
|
+
}) : e[r] = t, e;
|
186
|
+
}
|
187
|
+
function _extends() {
|
188
|
+
return _extends = Object.assign ? Object.assign.bind() : function (n) {
|
189
|
+
for (var e = 1; e < arguments.length; e++) {
|
190
|
+
var t = arguments[e];
|
191
|
+
for (var r in t) ({}).hasOwnProperty.call(t, r) && (n[r] = t[r]);
|
192
|
+
}
|
193
|
+
return n;
|
194
|
+
}, _extends.apply(null, arguments);
|
195
|
+
}
|
179
196
|
function ownKeys(e, r) {
|
180
197
|
var t = Object.keys(e);
|
181
198
|
if (Object.getOwnPropertySymbols) {
|
@@ -209,35 +226,7 @@ function _toPrimitive(t, r) {
|
|
209
226
|
}
|
210
227
|
function _toPropertyKey(t) {
|
211
228
|
var i = _toPrimitive(t, "string");
|
212
|
-
return "symbol" == typeof i ? i :
|
213
|
-
}
|
214
|
-
function _defineProperty(obj, key, value) {
|
215
|
-
key = _toPropertyKey(key);
|
216
|
-
if (key in obj) {
|
217
|
-
Object.defineProperty(obj, key, {
|
218
|
-
value: value,
|
219
|
-
enumerable: true,
|
220
|
-
configurable: true,
|
221
|
-
writable: true
|
222
|
-
});
|
223
|
-
} else {
|
224
|
-
obj[key] = value;
|
225
|
-
}
|
226
|
-
return obj;
|
227
|
-
}
|
228
|
-
function _extends() {
|
229
|
-
_extends = Object.assign ? Object.assign.bind() : function (target) {
|
230
|
-
for (var i = 1; i < arguments.length; i++) {
|
231
|
-
var source = arguments[i];
|
232
|
-
for (var key in source) {
|
233
|
-
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
234
|
-
target[key] = source[key];
|
235
|
-
}
|
236
|
-
}
|
237
|
-
}
|
238
|
-
return target;
|
239
|
-
};
|
240
|
-
return _extends.apply(this, arguments);
|
229
|
+
return "symbol" == typeof i ? i : i + "";
|
241
230
|
}
|
242
231
|
|
243
232
|
// https://caniuse.com/mdn-javascript_builtins_number_isfinite
|
@@ -256,6 +245,7 @@ let Events = /*#__PURE__*/function (Events) {
|
|
256
245
|
Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
|
257
246
|
Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
|
258
247
|
Events["MEDIA_DETACHED"] = "hlsMediaDetached";
|
248
|
+
Events["MEDIA_ENDED"] = "hlsMediaEnded";
|
259
249
|
Events["BUFFER_RESET"] = "hlsBufferReset";
|
260
250
|
Events["BUFFER_CODECS"] = "hlsBufferCodecs";
|
261
251
|
Events["BUFFER_CREATED"] = "hlsBufferCreated";
|
@@ -369,58 +359,6 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
|
|
369
359
|
return ErrorDetails;
|
370
360
|
}({});
|
371
361
|
|
372
|
-
const noop = function noop() {};
|
373
|
-
const fakeLogger = {
|
374
|
-
trace: noop,
|
375
|
-
debug: noop,
|
376
|
-
log: noop,
|
377
|
-
warn: noop,
|
378
|
-
info: noop,
|
379
|
-
error: noop
|
380
|
-
};
|
381
|
-
let exportedLogger = fakeLogger;
|
382
|
-
|
383
|
-
// let lastCallTime;
|
384
|
-
// function formatMsgWithTimeInfo(type, msg) {
|
385
|
-
// const now = Date.now();
|
386
|
-
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
387
|
-
// lastCallTime = now;
|
388
|
-
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
389
|
-
// return msg;
|
390
|
-
// }
|
391
|
-
|
392
|
-
function consolePrintFn(type) {
|
393
|
-
const func = self.console[type];
|
394
|
-
if (func) {
|
395
|
-
return func.bind(self.console, `[${type}] >`);
|
396
|
-
}
|
397
|
-
return noop;
|
398
|
-
}
|
399
|
-
function exportLoggerFunctions(debugConfig, ...functions) {
|
400
|
-
functions.forEach(function (type) {
|
401
|
-
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
402
|
-
});
|
403
|
-
}
|
404
|
-
function enableLogs(debugConfig, id) {
|
405
|
-
// check that console is available
|
406
|
-
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
407
|
-
exportLoggerFunctions(debugConfig,
|
408
|
-
// Remove out from list here to hard-disable a log-level
|
409
|
-
// 'trace',
|
410
|
-
'debug', 'log', 'info', 'warn', 'error');
|
411
|
-
// Some browsers don't allow to use bind on console object anyway
|
412
|
-
// fallback to default if needed
|
413
|
-
try {
|
414
|
-
exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.9"}`);
|
415
|
-
} catch (e) {
|
416
|
-
exportedLogger = fakeLogger;
|
417
|
-
}
|
418
|
-
} else {
|
419
|
-
exportedLogger = fakeLogger;
|
420
|
-
}
|
421
|
-
}
|
422
|
-
const logger = exportedLogger;
|
423
|
-
|
424
362
|
const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/;
|
425
363
|
const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g;
|
426
364
|
|
@@ -502,6 +440,84 @@ class AttrList {
|
|
502
440
|
}
|
503
441
|
}
|
504
442
|
|
443
|
+
class Logger {
|
444
|
+
constructor(label, logger) {
|
445
|
+
this.trace = void 0;
|
446
|
+
this.debug = void 0;
|
447
|
+
this.log = void 0;
|
448
|
+
this.warn = void 0;
|
449
|
+
this.info = void 0;
|
450
|
+
this.error = void 0;
|
451
|
+
const lb = `[${label}]:`;
|
452
|
+
this.trace = noop;
|
453
|
+
this.debug = logger.debug.bind(null, lb);
|
454
|
+
this.log = logger.log.bind(null, lb);
|
455
|
+
this.warn = logger.warn.bind(null, lb);
|
456
|
+
this.info = logger.info.bind(null, lb);
|
457
|
+
this.error = logger.error.bind(null, lb);
|
458
|
+
}
|
459
|
+
}
|
460
|
+
const noop = function noop() {};
|
461
|
+
const fakeLogger = {
|
462
|
+
trace: noop,
|
463
|
+
debug: noop,
|
464
|
+
log: noop,
|
465
|
+
warn: noop,
|
466
|
+
info: noop,
|
467
|
+
error: noop
|
468
|
+
};
|
469
|
+
function createLogger() {
|
470
|
+
return _extends({}, fakeLogger);
|
471
|
+
}
|
472
|
+
|
473
|
+
// let lastCallTime;
|
474
|
+
// function formatMsgWithTimeInfo(type, msg) {
|
475
|
+
// const now = Date.now();
|
476
|
+
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
477
|
+
// lastCallTime = now;
|
478
|
+
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
479
|
+
// return msg;
|
480
|
+
// }
|
481
|
+
|
482
|
+
function consolePrintFn(type, id) {
|
483
|
+
const func = self.console[type];
|
484
|
+
return func ? func.bind(self.console, `${''}[${type}] >`) : noop;
|
485
|
+
}
|
486
|
+
function getLoggerFn(key, debugConfig, id) {
|
487
|
+
return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key);
|
488
|
+
}
|
489
|
+
const exportedLogger = createLogger();
|
490
|
+
function enableLogs(debugConfig, context, id) {
|
491
|
+
// check that console is available
|
492
|
+
const newLogger = createLogger();
|
493
|
+
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
494
|
+
const keys = [
|
495
|
+
// Remove out from list here to hard-disable a log-level
|
496
|
+
// 'trace',
|
497
|
+
'debug', 'log', 'info', 'warn', 'error'];
|
498
|
+
keys.forEach(key => {
|
499
|
+
newLogger[key] = getLoggerFn(key, debugConfig);
|
500
|
+
});
|
501
|
+
// Some browsers don't allow to use bind on console object anyway
|
502
|
+
// fallback to default if needed
|
503
|
+
try {
|
504
|
+
newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.10-0.canary.10320"}`);
|
505
|
+
} catch (e) {
|
506
|
+
/* log fn threw an exception. All logger methods are no-ops. */
|
507
|
+
return createLogger();
|
508
|
+
}
|
509
|
+
// global exported logger uses the same functions as new logger without `id`
|
510
|
+
keys.forEach(key => {
|
511
|
+
exportedLogger[key] = getLoggerFn(key, debugConfig);
|
512
|
+
});
|
513
|
+
} else {
|
514
|
+
// Reset global exported logger
|
515
|
+
_extends(exportedLogger, newLogger);
|
516
|
+
}
|
517
|
+
return newLogger;
|
518
|
+
}
|
519
|
+
const logger = exportedLogger;
|
520
|
+
|
505
521
|
// Avoid exporting const enum so that these values can be inlined
|
506
522
|
|
507
523
|
function isDateRangeCueAttribute(attrName) {
|
@@ -991,10 +1007,32 @@ class LevelDetails {
|
|
991
1007
|
}
|
992
1008
|
}
|
993
1009
|
|
1010
|
+
var DecrypterAesMode = {
|
1011
|
+
cbc: 0,
|
1012
|
+
ctr: 1
|
1013
|
+
};
|
1014
|
+
|
1015
|
+
function isFullSegmentEncryption(method) {
|
1016
|
+
return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
|
1017
|
+
}
|
1018
|
+
function getAesModeFromFullSegmentMethod(method) {
|
1019
|
+
switch (method) {
|
1020
|
+
case 'AES-128':
|
1021
|
+
case 'AES-256':
|
1022
|
+
return DecrypterAesMode.cbc;
|
1023
|
+
case 'AES-256-CTR':
|
1024
|
+
return DecrypterAesMode.ctr;
|
1025
|
+
default:
|
1026
|
+
throw new Error(`invalid full segment method ${method}`);
|
1027
|
+
}
|
1028
|
+
}
|
1029
|
+
|
994
1030
|
// This file is inserted as a shim for modules which we do not want to include into the distro.
|
995
1031
|
// This replacement is done in the "alias" plugin of the rollup config.
|
996
|
-
|
997
|
-
var
|
1032
|
+
// Use a ES dedicated file as Rollup assigns an object in the output
|
1033
|
+
// For example: "var KeySystemFormats = emptyEs.KeySystemFormats;"
|
1034
|
+
var emptyEs = {};
|
1035
|
+
var HevcVideoParser = /*@__PURE__*/getDefaultExportFromCjs(emptyEs);
|
998
1036
|
|
999
1037
|
function sliceUint8(array, start, end) {
|
1000
1038
|
// @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
|
@@ -1002,365 +1040,96 @@ function sliceUint8(array, start, end) {
|
|
1002
1040
|
return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end));
|
1003
1041
|
}
|
1004
1042
|
|
1005
|
-
//
|
1006
|
-
|
1007
|
-
|
1008
|
-
*
|
1009
|
-
*
|
1010
|
-
*
|
1043
|
+
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
1044
|
+
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
1045
|
+
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1046
|
+
*
|
1047
|
+
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1048
|
+
* Version: 1.0
|
1049
|
+
* LastModified: Dec 25 1999
|
1050
|
+
* This library is free. You can redistribute it and/or modify it.
|
1011
1051
|
*/
|
1012
|
-
const isHeader$2 = (data, offset) => {
|
1013
|
-
/*
|
1014
|
-
* http://id3.org/id3v2.3.0
|
1015
|
-
* [0] = 'I'
|
1016
|
-
* [1] = 'D'
|
1017
|
-
* [2] = '3'
|
1018
|
-
* [3,4] = {Version}
|
1019
|
-
* [5] = {Flags}
|
1020
|
-
* [6-9] = {ID3 Size}
|
1021
|
-
*
|
1022
|
-
* An ID3v2 tag can be detected with the following pattern:
|
1023
|
-
* $49 44 33 yy yy xx zz zz zz zz
|
1024
|
-
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
1025
|
-
*/
|
1026
|
-
if (offset + 10 <= data.length) {
|
1027
|
-
// look for 'ID3' identifier
|
1028
|
-
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
1029
|
-
// check version is within range
|
1030
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1031
|
-
// check size is within range
|
1032
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1033
|
-
return true;
|
1034
|
-
}
|
1035
|
-
}
|
1036
|
-
}
|
1037
|
-
}
|
1038
|
-
return false;
|
1039
|
-
};
|
1040
|
-
|
1041
1052
|
/**
|
1042
|
-
*
|
1043
|
-
*
|
1044
|
-
* @param
|
1053
|
+
* Converts a UTF-8 array to a string.
|
1054
|
+
*
|
1055
|
+
* @param array - The UTF-8 array to convert
|
1056
|
+
*
|
1057
|
+
* @returns The string
|
1058
|
+
*
|
1059
|
+
* @group Utils
|
1060
|
+
*
|
1061
|
+
* @beta
|
1045
1062
|
*/
|
1046
|
-
|
1047
|
-
|
1048
|
-
|
1049
|
-
|
1050
|
-
|
1051
|
-
|
1052
|
-
|
1053
|
-
|
1054
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1055
|
-
// check size is within range
|
1056
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1057
|
-
return true;
|
1058
|
-
}
|
1059
|
-
}
|
1063
|
+
function utf8ArrayToStr(array, exitOnNull = false) {
|
1064
|
+
if (typeof TextDecoder !== 'undefined') {
|
1065
|
+
const decoder = new TextDecoder('utf-8');
|
1066
|
+
const decoded = decoder.decode(array);
|
1067
|
+
if (exitOnNull) {
|
1068
|
+
// grab up to the first null
|
1069
|
+
const idx = decoded.indexOf('\0');
|
1070
|
+
return idx !== -1 ? decoded.substring(0, idx) : decoded;
|
1060
1071
|
}
|
1072
|
+
// remove any null characters
|
1073
|
+
return decoded.replace(/\0/g, '');
|
1061
1074
|
}
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1072
|
-
|
1073
|
-
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1075
|
+
const len = array.length;
|
1076
|
+
let c;
|
1077
|
+
let char2;
|
1078
|
+
let char3;
|
1079
|
+
let out = '';
|
1080
|
+
let i = 0;
|
1081
|
+
while (i < len) {
|
1082
|
+
c = array[i++];
|
1083
|
+
if (c === 0x00 && exitOnNull) {
|
1084
|
+
return out;
|
1085
|
+
} else if (c === 0x00 || c === 0x03) {
|
1086
|
+
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
1087
|
+
continue;
|
1088
|
+
}
|
1089
|
+
switch (c >> 4) {
|
1090
|
+
case 0:
|
1091
|
+
case 1:
|
1092
|
+
case 2:
|
1093
|
+
case 3:
|
1094
|
+
case 4:
|
1095
|
+
case 5:
|
1096
|
+
case 6:
|
1097
|
+
case 7:
|
1098
|
+
// 0xxxxxxx
|
1099
|
+
out += String.fromCharCode(c);
|
1100
|
+
break;
|
1101
|
+
case 12:
|
1102
|
+
case 13:
|
1103
|
+
// 110x xxxx 10xx xxxx
|
1104
|
+
char2 = array[i++];
|
1105
|
+
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1106
|
+
break;
|
1107
|
+
case 14:
|
1108
|
+
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1109
|
+
char2 = array[i++];
|
1110
|
+
char3 = array[i++];
|
1111
|
+
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1112
|
+
break;
|
1083
1113
|
}
|
1084
|
-
offset += length;
|
1085
|
-
}
|
1086
|
-
if (length > 0) {
|
1087
|
-
return data.subarray(front, front + length);
|
1088
1114
|
}
|
1089
|
-
return
|
1090
|
-
}
|
1091
|
-
const readSize = (data, offset) => {
|
1092
|
-
let size = 0;
|
1093
|
-
size = (data[offset] & 0x7f) << 21;
|
1094
|
-
size |= (data[offset + 1] & 0x7f) << 14;
|
1095
|
-
size |= (data[offset + 2] & 0x7f) << 7;
|
1096
|
-
size |= data[offset + 3] & 0x7f;
|
1097
|
-
return size;
|
1098
|
-
};
|
1099
|
-
const canParse$2 = (data, offset) => {
|
1100
|
-
return isHeader$2(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset;
|
1101
|
-
};
|
1115
|
+
return out;
|
1116
|
+
}
|
1102
1117
|
|
1103
1118
|
/**
|
1104
|
-
*
|
1105
|
-
* @param data - Block of data containing one or more ID3 tags
|
1119
|
+
* hex dump helper class
|
1106
1120
|
*/
|
1107
|
-
|
1108
|
-
|
1109
|
-
|
1110
|
-
|
1111
|
-
|
1112
|
-
|
1121
|
+
|
1122
|
+
const Hex = {
|
1123
|
+
hexDump: function (array) {
|
1124
|
+
let str = '';
|
1125
|
+
for (let i = 0; i < array.length; i++) {
|
1126
|
+
let h = array[i].toString(16);
|
1127
|
+
if (h.length < 2) {
|
1128
|
+
h = '0' + h;
|
1129
|
+
}
|
1130
|
+
str += h;
|
1113
1131
|
}
|
1114
|
-
|
1115
|
-
return undefined;
|
1116
|
-
};
|
1117
|
-
|
1118
|
-
/**
|
1119
|
-
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
1120
|
-
*/
|
1121
|
-
const isTimeStampFrame = frame => {
|
1122
|
-
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
1123
|
-
};
|
1124
|
-
const getFrameData = data => {
|
1125
|
-
/*
|
1126
|
-
Frame ID $xx xx xx xx (four characters)
|
1127
|
-
Size $xx xx xx xx
|
1128
|
-
Flags $xx xx
|
1129
|
-
*/
|
1130
|
-
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
1131
|
-
const size = readSize(data, 4);
|
1132
|
-
|
1133
|
-
// skip frame id, size, and flags
|
1134
|
-
const offset = 10;
|
1135
|
-
return {
|
1136
|
-
type,
|
1137
|
-
size,
|
1138
|
-
data: data.subarray(offset, offset + size)
|
1139
|
-
};
|
1140
|
-
};
|
1141
|
-
|
1142
|
-
/**
|
1143
|
-
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
1144
|
-
* @param id3Data - The ID3 data containing one or more ID3 tags
|
1145
|
-
*/
|
1146
|
-
const getID3Frames = id3Data => {
|
1147
|
-
let offset = 0;
|
1148
|
-
const frames = [];
|
1149
|
-
while (isHeader$2(id3Data, offset)) {
|
1150
|
-
const size = readSize(id3Data, offset + 6);
|
1151
|
-
// skip past ID3 header
|
1152
|
-
offset += 10;
|
1153
|
-
const end = offset + size;
|
1154
|
-
// loop through frames in the ID3 tag
|
1155
|
-
while (offset + 8 < end) {
|
1156
|
-
const frameData = getFrameData(id3Data.subarray(offset));
|
1157
|
-
const frame = decodeFrame(frameData);
|
1158
|
-
if (frame) {
|
1159
|
-
frames.push(frame);
|
1160
|
-
}
|
1161
|
-
|
1162
|
-
// skip frame header and frame data
|
1163
|
-
offset += frameData.size + 10;
|
1164
|
-
}
|
1165
|
-
if (isFooter(id3Data, offset)) {
|
1166
|
-
offset += 10;
|
1167
|
-
}
|
1168
|
-
}
|
1169
|
-
return frames;
|
1170
|
-
};
|
1171
|
-
const decodeFrame = frame => {
|
1172
|
-
if (frame.type === 'PRIV') {
|
1173
|
-
return decodePrivFrame(frame);
|
1174
|
-
} else if (frame.type[0] === 'W') {
|
1175
|
-
return decodeURLFrame(frame);
|
1176
|
-
}
|
1177
|
-
return decodeTextFrame(frame);
|
1178
|
-
};
|
1179
|
-
const decodePrivFrame = frame => {
|
1180
|
-
/*
|
1181
|
-
Format: <text string>\0<binary data>
|
1182
|
-
*/
|
1183
|
-
if (frame.size < 2) {
|
1184
|
-
return undefined;
|
1185
|
-
}
|
1186
|
-
const owner = utf8ArrayToStr(frame.data, true);
|
1187
|
-
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
1188
|
-
return {
|
1189
|
-
key: frame.type,
|
1190
|
-
info: owner,
|
1191
|
-
data: privateData.buffer
|
1192
|
-
};
|
1193
|
-
};
|
1194
|
-
const decodeTextFrame = frame => {
|
1195
|
-
if (frame.size < 2) {
|
1196
|
-
return undefined;
|
1197
|
-
}
|
1198
|
-
if (frame.type === 'TXXX') {
|
1199
|
-
/*
|
1200
|
-
Format:
|
1201
|
-
[0] = {Text Encoding}
|
1202
|
-
[1-?] = {Description}\0{Value}
|
1203
|
-
*/
|
1204
|
-
let index = 1;
|
1205
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1206
|
-
index += description.length + 1;
|
1207
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1208
|
-
return {
|
1209
|
-
key: frame.type,
|
1210
|
-
info: description,
|
1211
|
-
data: value
|
1212
|
-
};
|
1213
|
-
}
|
1214
|
-
/*
|
1215
|
-
Format:
|
1216
|
-
[0] = {Text Encoding}
|
1217
|
-
[1-?] = {Value}
|
1218
|
-
*/
|
1219
|
-
const text = utf8ArrayToStr(frame.data.subarray(1));
|
1220
|
-
return {
|
1221
|
-
key: frame.type,
|
1222
|
-
data: text
|
1223
|
-
};
|
1224
|
-
};
|
1225
|
-
const decodeURLFrame = frame => {
|
1226
|
-
if (frame.type === 'WXXX') {
|
1227
|
-
/*
|
1228
|
-
Format:
|
1229
|
-
[0] = {Text Encoding}
|
1230
|
-
[1-?] = {Description}\0{URL}
|
1231
|
-
*/
|
1232
|
-
if (frame.size < 2) {
|
1233
|
-
return undefined;
|
1234
|
-
}
|
1235
|
-
let index = 1;
|
1236
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1237
|
-
index += description.length + 1;
|
1238
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1239
|
-
return {
|
1240
|
-
key: frame.type,
|
1241
|
-
info: description,
|
1242
|
-
data: value
|
1243
|
-
};
|
1244
|
-
}
|
1245
|
-
/*
|
1246
|
-
Format:
|
1247
|
-
[0-?] = {URL}
|
1248
|
-
*/
|
1249
|
-
const url = utf8ArrayToStr(frame.data);
|
1250
|
-
return {
|
1251
|
-
key: frame.type,
|
1252
|
-
data: url
|
1253
|
-
};
|
1254
|
-
};
|
1255
|
-
const readTimeStamp = timeStampFrame => {
|
1256
|
-
if (timeStampFrame.data.byteLength === 8) {
|
1257
|
-
const data = new Uint8Array(timeStampFrame.data);
|
1258
|
-
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
1259
|
-
// with the upper 31 bits set to zero.
|
1260
|
-
const pts33Bit = data[3] & 0x1;
|
1261
|
-
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
1262
|
-
timestamp /= 45;
|
1263
|
-
if (pts33Bit) {
|
1264
|
-
timestamp += 47721858.84;
|
1265
|
-
} // 2^32 / 90
|
1266
|
-
|
1267
|
-
return Math.round(timestamp);
|
1268
|
-
}
|
1269
|
-
return undefined;
|
1270
|
-
};
|
1271
|
-
|
1272
|
-
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
1273
|
-
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
1274
|
-
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1275
|
-
*
|
1276
|
-
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1277
|
-
* Version: 1.0
|
1278
|
-
* LastModified: Dec 25 1999
|
1279
|
-
* This library is free. You can redistribute it and/or modify it.
|
1280
|
-
*/
|
1281
|
-
const utf8ArrayToStr = (array, exitOnNull = false) => {
|
1282
|
-
const decoder = getTextDecoder();
|
1283
|
-
if (decoder) {
|
1284
|
-
const decoded = decoder.decode(array);
|
1285
|
-
if (exitOnNull) {
|
1286
|
-
// grab up to the first null
|
1287
|
-
const idx = decoded.indexOf('\0');
|
1288
|
-
return idx !== -1 ? decoded.substring(0, idx) : decoded;
|
1289
|
-
}
|
1290
|
-
|
1291
|
-
// remove any null characters
|
1292
|
-
return decoded.replace(/\0/g, '');
|
1293
|
-
}
|
1294
|
-
const len = array.length;
|
1295
|
-
let c;
|
1296
|
-
let char2;
|
1297
|
-
let char3;
|
1298
|
-
let out = '';
|
1299
|
-
let i = 0;
|
1300
|
-
while (i < len) {
|
1301
|
-
c = array[i++];
|
1302
|
-
if (c === 0x00 && exitOnNull) {
|
1303
|
-
return out;
|
1304
|
-
} else if (c === 0x00 || c === 0x03) {
|
1305
|
-
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
1306
|
-
continue;
|
1307
|
-
}
|
1308
|
-
switch (c >> 4) {
|
1309
|
-
case 0:
|
1310
|
-
case 1:
|
1311
|
-
case 2:
|
1312
|
-
case 3:
|
1313
|
-
case 4:
|
1314
|
-
case 5:
|
1315
|
-
case 6:
|
1316
|
-
case 7:
|
1317
|
-
// 0xxxxxxx
|
1318
|
-
out += String.fromCharCode(c);
|
1319
|
-
break;
|
1320
|
-
case 12:
|
1321
|
-
case 13:
|
1322
|
-
// 110x xxxx 10xx xxxx
|
1323
|
-
char2 = array[i++];
|
1324
|
-
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1325
|
-
break;
|
1326
|
-
case 14:
|
1327
|
-
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1328
|
-
char2 = array[i++];
|
1329
|
-
char3 = array[i++];
|
1330
|
-
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1331
|
-
break;
|
1332
|
-
}
|
1333
|
-
}
|
1334
|
-
return out;
|
1335
|
-
};
|
1336
|
-
let decoder;
|
1337
|
-
function getTextDecoder() {
|
1338
|
-
// On Play Station 4, TextDecoder is defined but partially implemented.
|
1339
|
-
// Manual decoding option is preferable
|
1340
|
-
if (navigator.userAgent.includes('PlayStation 4')) {
|
1341
|
-
return;
|
1342
|
-
}
|
1343
|
-
if (!decoder && typeof self.TextDecoder !== 'undefined') {
|
1344
|
-
decoder = new self.TextDecoder('utf-8');
|
1345
|
-
}
|
1346
|
-
return decoder;
|
1347
|
-
}
|
1348
|
-
|
1349
|
-
/**
|
1350
|
-
* hex dump helper class
|
1351
|
-
*/
|
1352
|
-
|
1353
|
-
const Hex = {
|
1354
|
-
hexDump: function (array) {
|
1355
|
-
let str = '';
|
1356
|
-
for (let i = 0; i < array.length; i++) {
|
1357
|
-
let h = array[i].toString(16);
|
1358
|
-
if (h.length < 2) {
|
1359
|
-
h = '0' + h;
|
1360
|
-
}
|
1361
|
-
str += h;
|
1362
|
-
}
|
1363
|
-
return str;
|
1132
|
+
return str;
|
1364
1133
|
}
|
1365
1134
|
};
|
1366
1135
|
|
@@ -1626,7 +1395,7 @@ function parseStsd(stsd) {
|
|
1626
1395
|
{
|
1627
1396
|
const codecBox = findBox(sampleEntries, [fourCC])[0];
|
1628
1397
|
const esdsBox = findBox(codecBox.subarray(28), ['esds'])[0];
|
1629
|
-
if (esdsBox && esdsBox.length >
|
1398
|
+
if (esdsBox && esdsBox.length > 7) {
|
1630
1399
|
let i = 4;
|
1631
1400
|
// ES Descriptor tag
|
1632
1401
|
if (esdsBox[i++] !== 0x03) {
|
@@ -1741,7 +1510,9 @@ function parseStsd(stsd) {
|
|
1741
1510
|
}
|
1742
1511
|
function skipBERInteger(bytes, i) {
|
1743
1512
|
const limit = i + 5;
|
1744
|
-
while (bytes[i++] & 0x80 && i < limit) {
|
1513
|
+
while (bytes[i++] & 0x80 && i < limit) {
|
1514
|
+
/* do nothing */
|
1515
|
+
}
|
1745
1516
|
return i;
|
1746
1517
|
}
|
1747
1518
|
function toHex(x) {
|
@@ -2433,12 +2204,12 @@ class LevelKey {
|
|
2433
2204
|
this.keyFormatVersions = formatversions;
|
2434
2205
|
this.iv = iv;
|
2435
2206
|
this.encrypted = method ? method !== 'NONE' : false;
|
2436
|
-
this.isCommonEncryption = this.encrypted && method
|
2207
|
+
this.isCommonEncryption = this.encrypted && !isFullSegmentEncryption(method);
|
2437
2208
|
}
|
2438
2209
|
isSupported() {
|
2439
2210
|
// If it's Segment encryption or No encryption, just select that key system
|
2440
2211
|
if (this.method) {
|
2441
|
-
if (this.method
|
2212
|
+
if (isFullSegmentEncryption(this.method) || this.method === 'NONE') {
|
2442
2213
|
return true;
|
2443
2214
|
}
|
2444
2215
|
if (this.keyFormat === 'identity') {
|
@@ -2452,14 +2223,13 @@ class LevelKey {
|
|
2452
2223
|
if (!this.encrypted || !this.uri) {
|
2453
2224
|
return null;
|
2454
2225
|
}
|
2455
|
-
if (this.method
|
2226
|
+
if (isFullSegmentEncryption(this.method) && this.uri && !this.iv) {
|
2456
2227
|
if (typeof sn !== 'number') {
|
2457
2228
|
// We are fetching decryption data for a initialization segment
|
2458
|
-
// If the segment was encrypted with AES-128
|
2229
|
+
// If the segment was encrypted with AES-128/256
|
2459
2230
|
// It must have an IV defined. We cannot substitute the Segment Number in.
|
2460
|
-
|
2461
|
-
|
2462
|
-
}
|
2231
|
+
logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
|
2232
|
+
|
2463
2233
|
// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
2464
2234
|
sn = 0;
|
2465
2235
|
}
|
@@ -2609,23 +2379,28 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
|
|
2609
2379
|
if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
|
2610
2380
|
return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
|
2611
2381
|
}
|
2612
|
-
|
2613
|
-
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2614
|
-
// some browsers will report that fLaC is supported then fail.
|
2615
|
-
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2616
2382
|
const codecsToCheck = {
|
2383
|
+
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2384
|
+
// some browsers will report that fLaC is supported then fail.
|
2385
|
+
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2617
2386
|
flac: ['flac', 'fLaC', 'FLAC'],
|
2618
|
-
opus: ['opus', 'Opus']
|
2387
|
+
opus: ['opus', 'Opus'],
|
2388
|
+
// Replace audio codec info if browser does not support mp4a.40.34,
|
2389
|
+
// and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
|
2390
|
+
'mp4a.40.34': ['mp3']
|
2619
2391
|
}[lowerCaseCodec];
|
2620
2392
|
for (let i = 0; i < codecsToCheck.length; i++) {
|
2393
|
+
var _getMediaSource;
|
2621
2394
|
if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
|
2622
2395
|
CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
|
2623
2396
|
return codecsToCheck[i];
|
2397
|
+
} else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
|
2398
|
+
return '';
|
2624
2399
|
}
|
2625
2400
|
}
|
2626
2401
|
return lowerCaseCodec;
|
2627
2402
|
}
|
2628
|
-
const AUDIO_CODEC_REGEXP = /flac|opus/i;
|
2403
|
+
const AUDIO_CODEC_REGEXP = /flac|opus|mp4a\.40\.34/i;
|
2629
2404
|
function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
|
2630
2405
|
return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
|
2631
2406
|
}
|
@@ -2648,6 +2423,16 @@ function convertAVC1ToAVCOTI(codec) {
|
|
2648
2423
|
}
|
2649
2424
|
return codec;
|
2650
2425
|
}
|
2426
|
+
function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
|
2427
|
+
const MediaSource = getMediaSource(preferManagedMediaSource) || {
|
2428
|
+
isTypeSupported: () => false
|
2429
|
+
};
|
2430
|
+
return {
|
2431
|
+
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
2432
|
+
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
2433
|
+
ac3: false
|
2434
|
+
};
|
2435
|
+
}
|
2651
2436
|
|
2652
2437
|
const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
|
2653
2438
|
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
|
@@ -3448,10 +3233,10 @@ class PlaylistLoader {
|
|
3448
3233
|
const loaderContext = loader.context;
|
3449
3234
|
if (loaderContext && loaderContext.url === context.url && loaderContext.level === context.level) {
|
3450
3235
|
// same URL can't overlap
|
3451
|
-
logger.trace('[playlist-loader]: playlist request ongoing');
|
3236
|
+
this.hls.logger.trace('[playlist-loader]: playlist request ongoing');
|
3452
3237
|
return;
|
3453
3238
|
}
|
3454
|
-
logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
|
3239
|
+
this.hls.logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
|
3455
3240
|
loader.abort();
|
3456
3241
|
}
|
3457
3242
|
|
@@ -3561,7 +3346,7 @@ class PlaylistLoader {
|
|
3561
3346
|
// alt audio rendition in which quality levels (main)
|
3562
3347
|
// contains both audio+video. but with mixed audio track not signaled
|
3563
3348
|
if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
|
3564
|
-
logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
|
3349
|
+
this.hls.logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
|
3565
3350
|
audioTracks.unshift({
|
3566
3351
|
type: 'main',
|
3567
3352
|
name: 'main',
|
@@ -3660,7 +3445,7 @@ class PlaylistLoader {
|
|
3660
3445
|
message += ` id: ${context.id} group-id: "${context.groupId}"`;
|
3661
3446
|
}
|
3662
3447
|
const error = new Error(message);
|
3663
|
-
logger.warn(`[playlist-loader]: ${message}`);
|
3448
|
+
this.hls.logger.warn(`[playlist-loader]: ${message}`);
|
3664
3449
|
let details = ErrorDetails.UNKNOWN;
|
3665
3450
|
let fatal = false;
|
3666
3451
|
const loader = this.getInternalLoader(context);
|
@@ -3898,28 +3683,427 @@ var MetadataSchema = {
|
|
3898
3683
|
emsg: "https://aomedia.org/emsg/ID3"
|
3899
3684
|
};
|
3900
3685
|
|
3901
|
-
|
3902
|
-
|
3903
|
-
|
3904
|
-
|
3905
|
-
|
3906
|
-
|
3907
|
-
|
3908
|
-
|
3909
|
-
|
3910
|
-
|
3911
|
-
|
3912
|
-
|
3913
|
-
|
3914
|
-
|
3915
|
-
|
3916
|
-
|
3686
|
+
/**
|
3687
|
+
* Decode an ID3 PRIV frame.
|
3688
|
+
*
|
3689
|
+
* @param frame - the ID3 PRIV frame
|
3690
|
+
*
|
3691
|
+
* @returns The decoded ID3 PRIV frame
|
3692
|
+
*
|
3693
|
+
* @internal
|
3694
|
+
*
|
3695
|
+
* @group ID3
|
3696
|
+
*/
|
3697
|
+
function decodeId3PrivFrame(frame) {
|
3698
|
+
/*
|
3699
|
+
Format: <text string>\0<binary data>
|
3700
|
+
*/
|
3701
|
+
if (frame.size < 2) {
|
3702
|
+
return undefined;
|
3917
3703
|
}
|
3918
|
-
|
3919
|
-
|
3920
|
-
|
3921
|
-
|
3922
|
-
|
3704
|
+
const owner = utf8ArrayToStr(frame.data, true);
|
3705
|
+
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
3706
|
+
return {
|
3707
|
+
key: frame.type,
|
3708
|
+
info: owner,
|
3709
|
+
data: privateData.buffer
|
3710
|
+
};
|
3711
|
+
}
|
3712
|
+
|
3713
|
+
/**
|
3714
|
+
* Decodes an ID3 text frame
|
3715
|
+
*
|
3716
|
+
* @param frame - the ID3 text frame
|
3717
|
+
*
|
3718
|
+
* @returns The decoded ID3 text frame
|
3719
|
+
*
|
3720
|
+
* @internal
|
3721
|
+
*
|
3722
|
+
* @group ID3
|
3723
|
+
*/
|
3724
|
+
function decodeId3TextFrame(frame) {
|
3725
|
+
if (frame.size < 2) {
|
3726
|
+
return undefined;
|
3727
|
+
}
|
3728
|
+
if (frame.type === 'TXXX') {
|
3729
|
+
/*
|
3730
|
+
Format:
|
3731
|
+
[0] = {Text Encoding}
|
3732
|
+
[1-?] = {Description}\0{Value}
|
3733
|
+
*/
|
3734
|
+
let index = 1;
|
3735
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3736
|
+
index += description.length + 1;
|
3737
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3738
|
+
return {
|
3739
|
+
key: frame.type,
|
3740
|
+
info: description,
|
3741
|
+
data: value
|
3742
|
+
};
|
3743
|
+
}
|
3744
|
+
/*
|
3745
|
+
Format:
|
3746
|
+
[0] = {Text Encoding}
|
3747
|
+
[1-?] = {Value}
|
3748
|
+
*/
|
3749
|
+
const text = utf8ArrayToStr(frame.data.subarray(1));
|
3750
|
+
return {
|
3751
|
+
key: frame.type,
|
3752
|
+
info: '',
|
3753
|
+
data: text
|
3754
|
+
};
|
3755
|
+
}
|
3756
|
+
|
3757
|
+
/**
|
3758
|
+
* Decode a URL frame
|
3759
|
+
*
|
3760
|
+
* @param frame - the ID3 URL frame
|
3761
|
+
*
|
3762
|
+
* @returns The decoded ID3 URL frame
|
3763
|
+
*
|
3764
|
+
* @internal
|
3765
|
+
*
|
3766
|
+
* @group ID3
|
3767
|
+
*/
|
3768
|
+
function decodeId3UrlFrame(frame) {
|
3769
|
+
if (frame.type === 'WXXX') {
|
3770
|
+
/*
|
3771
|
+
Format:
|
3772
|
+
[0] = {Text Encoding}
|
3773
|
+
[1-?] = {Description}\0{URL}
|
3774
|
+
*/
|
3775
|
+
if (frame.size < 2) {
|
3776
|
+
return undefined;
|
3777
|
+
}
|
3778
|
+
let index = 1;
|
3779
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3780
|
+
index += description.length + 1;
|
3781
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3782
|
+
return {
|
3783
|
+
key: frame.type,
|
3784
|
+
info: description,
|
3785
|
+
data: value
|
3786
|
+
};
|
3787
|
+
}
|
3788
|
+
/*
|
3789
|
+
Format:
|
3790
|
+
[0-?] = {URL}
|
3791
|
+
*/
|
3792
|
+
const url = utf8ArrayToStr(frame.data);
|
3793
|
+
return {
|
3794
|
+
key: frame.type,
|
3795
|
+
info: '',
|
3796
|
+
data: url
|
3797
|
+
};
|
3798
|
+
}
|
3799
|
+
|
3800
|
+
function toUint8(data, offset = 0, length = Infinity) {
|
3801
|
+
return view(data, offset, length, Uint8Array);
|
3802
|
+
}
|
3803
|
+
function view(data, offset, length, Type) {
|
3804
|
+
const buffer = unsafeGetArrayBuffer(data);
|
3805
|
+
let bytesPerElement = 1;
|
3806
|
+
if ('BYTES_PER_ELEMENT' in Type) {
|
3807
|
+
bytesPerElement = Type.BYTES_PER_ELEMENT;
|
3808
|
+
}
|
3809
|
+
// Absolute end of the |data| view within |buffer|.
|
3810
|
+
const dataOffset = isArrayBufferView(data) ? data.byteOffset : 0;
|
3811
|
+
const dataEnd = (dataOffset + data.byteLength) / bytesPerElement;
|
3812
|
+
// Absolute start of the result within |buffer|.
|
3813
|
+
const rawStart = (dataOffset + offset) / bytesPerElement;
|
3814
|
+
const start = Math.floor(Math.max(0, Math.min(rawStart, dataEnd)));
|
3815
|
+
// Absolute end of the result within |buffer|.
|
3816
|
+
const end = Math.floor(Math.min(start + Math.max(length, 0), dataEnd));
|
3817
|
+
return new Type(buffer, start, end - start);
|
3818
|
+
}
|
3819
|
+
function unsafeGetArrayBuffer(view) {
|
3820
|
+
if (view instanceof ArrayBuffer) {
|
3821
|
+
return view;
|
3822
|
+
} else {
|
3823
|
+
return view.buffer;
|
3824
|
+
}
|
3825
|
+
}
|
3826
|
+
function isArrayBufferView(obj) {
|
3827
|
+
return obj && obj.buffer instanceof ArrayBuffer && obj.byteLength !== undefined && obj.byteOffset !== undefined;
|
3828
|
+
}
|
3829
|
+
|
3830
|
+
function toArrayBuffer(view) {
|
3831
|
+
if (view instanceof ArrayBuffer) {
|
3832
|
+
return view;
|
3833
|
+
} else {
|
3834
|
+
if (view.byteOffset == 0 && view.byteLength == view.buffer.byteLength) {
|
3835
|
+
// This is a TypedArray over the whole buffer.
|
3836
|
+
return view.buffer;
|
3837
|
+
}
|
3838
|
+
// This is a 'view' on the buffer. Create a new buffer that only contains
|
3839
|
+
// the data. Note that since this isn't an ArrayBuffer, the 'new' call
|
3840
|
+
// will allocate a new buffer to hold the copy.
|
3841
|
+
return new Uint8Array(view).buffer;
|
3842
|
+
}
|
3843
|
+
}
|
3844
|
+
|
3845
|
+
function decodeId3ImageFrame(frame) {
|
3846
|
+
const metadataFrame = {
|
3847
|
+
key: frame.type,
|
3848
|
+
description: '',
|
3849
|
+
data: '',
|
3850
|
+
mimeType: null,
|
3851
|
+
pictureType: null
|
3852
|
+
};
|
3853
|
+
const utf8Encoding = 0x03;
|
3854
|
+
if (frame.size < 2) {
|
3855
|
+
return undefined;
|
3856
|
+
}
|
3857
|
+
if (frame.data[0] !== utf8Encoding) {
|
3858
|
+
console.log('Ignore frame with unrecognized character ' + 'encoding');
|
3859
|
+
return undefined;
|
3860
|
+
}
|
3861
|
+
const mimeTypeEndIndex = frame.data.subarray(1).indexOf(0);
|
3862
|
+
if (mimeTypeEndIndex === -1) {
|
3863
|
+
return undefined;
|
3864
|
+
}
|
3865
|
+
const mimeType = utf8ArrayToStr(toUint8(frame.data, 1, mimeTypeEndIndex));
|
3866
|
+
const pictureType = frame.data[2 + mimeTypeEndIndex];
|
3867
|
+
const descriptionEndIndex = frame.data.subarray(3 + mimeTypeEndIndex).indexOf(0);
|
3868
|
+
if (descriptionEndIndex === -1) {
|
3869
|
+
return undefined;
|
3870
|
+
}
|
3871
|
+
const description = utf8ArrayToStr(toUint8(frame.data, 3 + mimeTypeEndIndex, descriptionEndIndex));
|
3872
|
+
let data;
|
3873
|
+
if (mimeType === '-->') {
|
3874
|
+
data = utf8ArrayToStr(toUint8(frame.data, 4 + mimeTypeEndIndex + descriptionEndIndex));
|
3875
|
+
} else {
|
3876
|
+
data = toArrayBuffer(frame.data.subarray(4 + mimeTypeEndIndex + descriptionEndIndex));
|
3877
|
+
}
|
3878
|
+
metadataFrame.mimeType = mimeType;
|
3879
|
+
metadataFrame.pictureType = pictureType;
|
3880
|
+
metadataFrame.description = description;
|
3881
|
+
metadataFrame.data = data;
|
3882
|
+
return metadataFrame;
|
3883
|
+
}
|
3884
|
+
|
3885
|
+
/**
|
3886
|
+
* Decode an ID3 frame.
|
3887
|
+
*
|
3888
|
+
* @param frame - the ID3 frame
|
3889
|
+
*
|
3890
|
+
* @returns The decoded ID3 frame
|
3891
|
+
*
|
3892
|
+
* @internal
|
3893
|
+
*
|
3894
|
+
* @group ID3
|
3895
|
+
*/
|
3896
|
+
function decodeId3Frame(frame) {
|
3897
|
+
if (frame.type === 'PRIV') {
|
3898
|
+
return decodeId3PrivFrame(frame);
|
3899
|
+
} else if (frame.type[0] === 'W') {
|
3900
|
+
return decodeId3UrlFrame(frame);
|
3901
|
+
} else if (frame.type === 'APIC') {
|
3902
|
+
return decodeId3ImageFrame(frame);
|
3903
|
+
}
|
3904
|
+
return decodeId3TextFrame(frame);
|
3905
|
+
}
|
3906
|
+
|
3907
|
+
/**
|
3908
|
+
* Read ID3 size
|
3909
|
+
*
|
3910
|
+
* @param data - The data to read from
|
3911
|
+
* @param offset - The offset at which to start reading
|
3912
|
+
*
|
3913
|
+
* @returns The size
|
3914
|
+
*
|
3915
|
+
* @internal
|
3916
|
+
*
|
3917
|
+
* @group ID3
|
3918
|
+
*/
|
3919
|
+
function readId3Size(data, offset) {
|
3920
|
+
let size = 0;
|
3921
|
+
size = (data[offset] & 0x7f) << 21;
|
3922
|
+
size |= (data[offset + 1] & 0x7f) << 14;
|
3923
|
+
size |= (data[offset + 2] & 0x7f) << 7;
|
3924
|
+
size |= data[offset + 3] & 0x7f;
|
3925
|
+
return size;
|
3926
|
+
}
|
3927
|
+
|
3928
|
+
/**
|
3929
|
+
* Returns the data of an ID3 frame.
|
3930
|
+
*
|
3931
|
+
* @param data - The data to read from
|
3932
|
+
*
|
3933
|
+
* @returns The data of the ID3 frame
|
3934
|
+
*
|
3935
|
+
* @internal
|
3936
|
+
*
|
3937
|
+
* @group ID3
|
3938
|
+
*/
|
3939
|
+
function getId3FrameData(data) {
|
3940
|
+
/*
|
3941
|
+
Frame ID $xx xx xx xx (four characters)
|
3942
|
+
Size $xx xx xx xx
|
3943
|
+
Flags $xx xx
|
3944
|
+
*/
|
3945
|
+
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
3946
|
+
const size = readId3Size(data, 4);
|
3947
|
+
// skip frame id, size, and flags
|
3948
|
+
const offset = 10;
|
3949
|
+
return {
|
3950
|
+
type,
|
3951
|
+
size,
|
3952
|
+
data: data.subarray(offset, offset + size)
|
3953
|
+
};
|
3954
|
+
}
|
3955
|
+
|
3956
|
+
/**
|
3957
|
+
* Returns true if an ID3 footer can be found at offset in data
|
3958
|
+
*
|
3959
|
+
* @param data - The data to search in
|
3960
|
+
* @param offset - The offset at which to start searching
|
3961
|
+
*
|
3962
|
+
* @returns `true` if an ID3 footer is found
|
3963
|
+
*
|
3964
|
+
* @internal
|
3965
|
+
*
|
3966
|
+
* @group ID3
|
3967
|
+
*/
|
3968
|
+
function isId3Footer(data, offset) {
|
3969
|
+
/*
|
3970
|
+
* The footer is a copy of the header, but with a different identifier
|
3971
|
+
*/
|
3972
|
+
if (offset + 10 <= data.length) {
|
3973
|
+
// look for '3DI' identifier
|
3974
|
+
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
3975
|
+
// check version is within range
|
3976
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
3977
|
+
// check size is within range
|
3978
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
3979
|
+
return true;
|
3980
|
+
}
|
3981
|
+
}
|
3982
|
+
}
|
3983
|
+
}
|
3984
|
+
return false;
|
3985
|
+
}
|
3986
|
+
|
3987
|
+
/**
|
3988
|
+
* Returns true if an ID3 header can be found at offset in data
|
3989
|
+
*
|
3990
|
+
* @param data - The data to search in
|
3991
|
+
* @param offset - The offset at which to start searching
|
3992
|
+
*
|
3993
|
+
* @returns `true` if an ID3 header is found
|
3994
|
+
*
|
3995
|
+
* @internal
|
3996
|
+
*
|
3997
|
+
* @group ID3
|
3998
|
+
*/
|
3999
|
+
function isId3Header(data, offset) {
|
4000
|
+
/*
|
4001
|
+
* http://id3.org/id3v2.3.0
|
4002
|
+
* [0] = 'I'
|
4003
|
+
* [1] = 'D'
|
4004
|
+
* [2] = '3'
|
4005
|
+
* [3,4] = {Version}
|
4006
|
+
* [5] = {Flags}
|
4007
|
+
* [6-9] = {ID3 Size}
|
4008
|
+
*
|
4009
|
+
* An ID3v2 tag can be detected with the following pattern:
|
4010
|
+
* $49 44 33 yy yy xx zz zz zz zz
|
4011
|
+
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
4012
|
+
*/
|
4013
|
+
if (offset + 10 <= data.length) {
|
4014
|
+
// look for 'ID3' identifier
|
4015
|
+
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
4016
|
+
// check version is within range
|
4017
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
4018
|
+
// check size is within range
|
4019
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
4020
|
+
return true;
|
4021
|
+
}
|
4022
|
+
}
|
4023
|
+
}
|
4024
|
+
}
|
4025
|
+
return false;
|
4026
|
+
}
|
4027
|
+
|
4028
|
+
const HEADER_FOOTER_SIZE = 10;
|
4029
|
+
const FRAME_SIZE = 10;
|
4030
|
+
/**
|
4031
|
+
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
4032
|
+
*
|
4033
|
+
* @param id3Data - The ID3 data containing one or more ID3 tags
|
4034
|
+
*
|
4035
|
+
* @returns Array of ID3 frame objects
|
4036
|
+
*
|
4037
|
+
* @group ID3
|
4038
|
+
*
|
4039
|
+
* @beta
|
4040
|
+
*/
|
4041
|
+
function getId3Frames(id3Data) {
|
4042
|
+
let offset = 0;
|
4043
|
+
const frames = [];
|
4044
|
+
while (isId3Header(id3Data, offset)) {
|
4045
|
+
const size = readId3Size(id3Data, offset + 6);
|
4046
|
+
if (id3Data[offset + 5] >> 6 & 1) {
|
4047
|
+
// skip extended header
|
4048
|
+
offset += HEADER_FOOTER_SIZE;
|
4049
|
+
}
|
4050
|
+
// skip past ID3 header
|
4051
|
+
offset += HEADER_FOOTER_SIZE;
|
4052
|
+
const end = offset + size;
|
4053
|
+
// loop through frames in the ID3 tag
|
4054
|
+
while (offset + FRAME_SIZE < end) {
|
4055
|
+
const frameData = getId3FrameData(id3Data.subarray(offset));
|
4056
|
+
const frame = decodeId3Frame(frameData);
|
4057
|
+
if (frame) {
|
4058
|
+
frames.push(frame);
|
4059
|
+
}
|
4060
|
+
// skip frame header and frame data
|
4061
|
+
offset += frameData.size + HEADER_FOOTER_SIZE;
|
4062
|
+
}
|
4063
|
+
if (isId3Footer(id3Data, offset)) {
|
4064
|
+
offset += HEADER_FOOTER_SIZE;
|
4065
|
+
}
|
4066
|
+
}
|
4067
|
+
return frames;
|
4068
|
+
}
|
4069
|
+
|
4070
|
+
/**
|
4071
|
+
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
4072
|
+
*
|
4073
|
+
* @param frame - the ID3 frame
|
4074
|
+
*
|
4075
|
+
* @returns `true` if the ID3 frame is an Elementary Stream timestamp frame
|
4076
|
+
*
|
4077
|
+
* @internal
|
4078
|
+
*
|
4079
|
+
* @group ID3
|
4080
|
+
*/
|
4081
|
+
function isId3TimestampFrame(frame) {
|
4082
|
+
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
4083
|
+
}
|
4084
|
+
|
4085
|
+
const MIN_CUE_DURATION = 0.25;
|
4086
|
+
function getCueClass() {
|
4087
|
+
if (typeof self === 'undefined') return undefined;
|
4088
|
+
return self.VTTCue || self.TextTrackCue;
|
4089
|
+
}
|
4090
|
+
function createCueWithDataFields(Cue, startTime, endTime, data, type) {
|
4091
|
+
let cue = new Cue(startTime, endTime, '');
|
4092
|
+
try {
|
4093
|
+
cue.value = data;
|
4094
|
+
if (type) {
|
4095
|
+
cue.type = type;
|
4096
|
+
}
|
4097
|
+
} catch (e) {
|
4098
|
+
cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
|
4099
|
+
type
|
4100
|
+
}, data) : data));
|
4101
|
+
}
|
4102
|
+
return cue;
|
4103
|
+
}
|
4104
|
+
|
4105
|
+
// VTTCue latest draft allows an infinite duration, fallback
|
4106
|
+
// to MAX_VALUE if necessary
|
3923
4107
|
const MAX_CUE_ENDTIME = (() => {
|
3924
4108
|
const Cue = getCueClass();
|
3925
4109
|
try {
|
@@ -3980,11 +4164,10 @@ class ID3TrackController {
|
|
3980
4164
|
this.media = data.media;
|
3981
4165
|
}
|
3982
4166
|
onMediaDetaching() {
|
3983
|
-
if (
|
3984
|
-
|
4167
|
+
if (this.id3Track) {
|
4168
|
+
clearCurrentCues(this.id3Track);
|
4169
|
+
this.id3Track = null;
|
3985
4170
|
}
|
3986
|
-
clearCurrentCues(this.id3Track);
|
3987
|
-
this.id3Track = null;
|
3988
4171
|
this.media = null;
|
3989
4172
|
this.dateRangeCuesAppended = {};
|
3990
4173
|
}
|
@@ -4043,7 +4226,7 @@ class ID3TrackController {
|
|
4043
4226
|
if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
|
4044
4227
|
continue;
|
4045
4228
|
}
|
4046
|
-
const frames =
|
4229
|
+
const frames = getId3Frames(samples[i].data);
|
4047
4230
|
if (frames) {
|
4048
4231
|
const startTime = samples[i].pts;
|
4049
4232
|
let endTime = startTime + samples[i].duration;
|
@@ -4057,7 +4240,7 @@ class ID3TrackController {
|
|
4057
4240
|
for (let j = 0; j < frames.length; j++) {
|
4058
4241
|
const frame = frames[j];
|
4059
4242
|
// Safari doesn't put the timestamp frame in the TextTrack
|
4060
|
-
if (!
|
4243
|
+
if (!isId3TimestampFrame(frame)) {
|
4061
4244
|
// add a bounds to any unbounded cues
|
4062
4245
|
this.updateId3CueEnds(startTime, type);
|
4063
4246
|
const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
|
@@ -4225,7 +4408,47 @@ class LatencyController {
|
|
4225
4408
|
this.currentTime = 0;
|
4226
4409
|
this.stallCount = 0;
|
4227
4410
|
this._latency = null;
|
4228
|
-
this.
|
4411
|
+
this.onTimeupdate = () => {
|
4412
|
+
const {
|
4413
|
+
media,
|
4414
|
+
levelDetails
|
4415
|
+
} = this;
|
4416
|
+
if (!media || !levelDetails) {
|
4417
|
+
return;
|
4418
|
+
}
|
4419
|
+
this.currentTime = media.currentTime;
|
4420
|
+
const latency = this.computeLatency();
|
4421
|
+
if (latency === null) {
|
4422
|
+
return;
|
4423
|
+
}
|
4424
|
+
this._latency = latency;
|
4425
|
+
|
4426
|
+
// Adapt playbackRate to meet target latency in low-latency mode
|
4427
|
+
const {
|
4428
|
+
lowLatencyMode,
|
4429
|
+
maxLiveSyncPlaybackRate
|
4430
|
+
} = this.config;
|
4431
|
+
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4432
|
+
return;
|
4433
|
+
}
|
4434
|
+
const targetLatency = this.targetLatency;
|
4435
|
+
if (targetLatency === null) {
|
4436
|
+
return;
|
4437
|
+
}
|
4438
|
+
const distanceFromTarget = latency - targetLatency;
|
4439
|
+
// Only adjust playbackRate when within one target duration of targetLatency
|
4440
|
+
// and more than one second from under-buffering.
|
4441
|
+
// Playback further than one target duration from target can be considered DVR playback.
|
4442
|
+
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4443
|
+
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4444
|
+
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4445
|
+
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4446
|
+
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4447
|
+
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4448
|
+
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4449
|
+
media.playbackRate = 1;
|
4450
|
+
}
|
4451
|
+
};
|
4229
4452
|
this.hls = hls;
|
4230
4453
|
this.config = hls.config;
|
4231
4454
|
this.registerListeners();
|
@@ -4317,7 +4540,7 @@ class LatencyController {
|
|
4317
4540
|
this.onMediaDetaching();
|
4318
4541
|
this.levelDetails = null;
|
4319
4542
|
// @ts-ignore
|
4320
|
-
this.hls =
|
4543
|
+
this.hls = null;
|
4321
4544
|
}
|
4322
4545
|
registerListeners() {
|
4323
4546
|
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
@@ -4335,11 +4558,11 @@ class LatencyController {
|
|
4335
4558
|
}
|
4336
4559
|
onMediaAttached(event, data) {
|
4337
4560
|
this.media = data.media;
|
4338
|
-
this.media.addEventListener('timeupdate', this.
|
4561
|
+
this.media.addEventListener('timeupdate', this.onTimeupdate);
|
4339
4562
|
}
|
4340
4563
|
onMediaDetaching() {
|
4341
4564
|
if (this.media) {
|
4342
|
-
this.media.removeEventListener('timeupdate', this.
|
4565
|
+
this.media.removeEventListener('timeupdate', this.onTimeupdate);
|
4343
4566
|
this.media = null;
|
4344
4567
|
}
|
4345
4568
|
}
|
@@ -4353,10 +4576,10 @@ class LatencyController {
|
|
4353
4576
|
}) {
|
4354
4577
|
this.levelDetails = details;
|
4355
4578
|
if (details.advanced) {
|
4356
|
-
this.
|
4579
|
+
this.onTimeupdate();
|
4357
4580
|
}
|
4358
4581
|
if (!details.live && this.media) {
|
4359
|
-
this.media.removeEventListener('timeupdate', this.
|
4582
|
+
this.media.removeEventListener('timeupdate', this.onTimeupdate);
|
4360
4583
|
}
|
4361
4584
|
}
|
4362
4585
|
onError(event, data) {
|
@@ -4366,48 +4589,7 @@ class LatencyController {
|
|
4366
4589
|
}
|
4367
4590
|
this.stallCount++;
|
4368
4591
|
if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
|
4369
|
-
logger.warn('[
|
4370
|
-
}
|
4371
|
-
}
|
4372
|
-
timeupdate() {
|
4373
|
-
const {
|
4374
|
-
media,
|
4375
|
-
levelDetails
|
4376
|
-
} = this;
|
4377
|
-
if (!media || !levelDetails) {
|
4378
|
-
return;
|
4379
|
-
}
|
4380
|
-
this.currentTime = media.currentTime;
|
4381
|
-
const latency = this.computeLatency();
|
4382
|
-
if (latency === null) {
|
4383
|
-
return;
|
4384
|
-
}
|
4385
|
-
this._latency = latency;
|
4386
|
-
|
4387
|
-
// Adapt playbackRate to meet target latency in low-latency mode
|
4388
|
-
const {
|
4389
|
-
lowLatencyMode,
|
4390
|
-
maxLiveSyncPlaybackRate
|
4391
|
-
} = this.config;
|
4392
|
-
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4393
|
-
return;
|
4394
|
-
}
|
4395
|
-
const targetLatency = this.targetLatency;
|
4396
|
-
if (targetLatency === null) {
|
4397
|
-
return;
|
4398
|
-
}
|
4399
|
-
const distanceFromTarget = latency - targetLatency;
|
4400
|
-
// Only adjust playbackRate when within one target duration of targetLatency
|
4401
|
-
// and more than one second from under-buffering.
|
4402
|
-
// Playback further than one target duration from target can be considered DVR playback.
|
4403
|
-
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4404
|
-
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4405
|
-
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4406
|
-
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4407
|
-
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4408
|
-
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4409
|
-
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4410
|
-
media.playbackRate = 1;
|
4592
|
+
this.hls.logger.warn('[latency-controller]: Stall detected, adjusting target latency');
|
4411
4593
|
}
|
4412
4594
|
}
|
4413
4595
|
estimateLiveEdge() {
|
@@ -5182,18 +5364,13 @@ var ErrorActionFlags = {
|
|
5182
5364
|
MoveAllAlternatesMatchingHDCP: 2,
|
5183
5365
|
SwitchToSDR: 4
|
5184
5366
|
}; // Reserved for future use
|
5185
|
-
class ErrorController {
|
5367
|
+
class ErrorController extends Logger {
|
5186
5368
|
constructor(hls) {
|
5369
|
+
super('error-controller', hls.logger);
|
5187
5370
|
this.hls = void 0;
|
5188
5371
|
this.playlistError = 0;
|
5189
5372
|
this.penalizedRenditions = {};
|
5190
|
-
this.log = void 0;
|
5191
|
-
this.warn = void 0;
|
5192
|
-
this.error = void 0;
|
5193
5373
|
this.hls = hls;
|
5194
|
-
this.log = logger.log.bind(logger, `[info]:`);
|
5195
|
-
this.warn = logger.warn.bind(logger, `[warning]:`);
|
5196
|
-
this.error = logger.error.bind(logger, `[error]:`);
|
5197
5374
|
this.registerListeners();
|
5198
5375
|
}
|
5199
5376
|
registerListeners() {
|
@@ -5545,16 +5722,13 @@ class ErrorController {
|
|
5545
5722
|
}
|
5546
5723
|
}
|
5547
5724
|
|
5548
|
-
class BasePlaylistController {
|
5725
|
+
class BasePlaylistController extends Logger {
|
5549
5726
|
constructor(hls, logPrefix) {
|
5727
|
+
super(logPrefix, hls.logger);
|
5550
5728
|
this.hls = void 0;
|
5551
5729
|
this.timer = -1;
|
5552
5730
|
this.requestScheduled = -1;
|
5553
5731
|
this.canLoad = false;
|
5554
|
-
this.log = void 0;
|
5555
|
-
this.warn = void 0;
|
5556
|
-
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
5557
|
-
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
5558
5732
|
this.hls = hls;
|
5559
5733
|
}
|
5560
5734
|
destroy() {
|
@@ -5587,7 +5761,7 @@ class BasePlaylistController {
|
|
5587
5761
|
try {
|
5588
5762
|
uri = new self.URL(attr.URI, previous.url).href;
|
5589
5763
|
} catch (error) {
|
5590
|
-
|
5764
|
+
this.warn(`Could not construct new URL for Rendition Report: ${error}`);
|
5591
5765
|
uri = attr.URI || '';
|
5592
5766
|
}
|
5593
5767
|
// Use exact match. Otherwise, the last partial match, if any, will be used
|
@@ -5675,7 +5849,12 @@ class BasePlaylistController {
|
|
5675
5849
|
const cdnAge = lastAdvanced + details.ageHeader;
|
5676
5850
|
let currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
|
5677
5851
|
if (currentGoal > 0) {
|
5678
|
-
if (
|
5852
|
+
if (cdnAge > details.targetduration * 3) {
|
5853
|
+
// Omit segment and part directives when the last response was more than 3 target durations ago,
|
5854
|
+
this.log(`Playlist last advanced ${lastAdvanced.toFixed(2)}s ago. Omitting segment and part directives.`);
|
5855
|
+
msn = undefined;
|
5856
|
+
part = undefined;
|
5857
|
+
} else if (previousDetails != null && previousDetails.tuneInGoal && cdnAge - details.partTarget > previousDetails.tuneInGoal) {
|
5679
5858
|
// If we attempted to get the next or latest playlist update, but currentGoal increased,
|
5680
5859
|
// then we either can't catchup, or the "age" header cannot be trusted.
|
5681
5860
|
this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`);
|
@@ -6121,8 +6300,9 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
|
|
6121
6300
|
}, {});
|
6122
6301
|
}
|
6123
6302
|
|
6124
|
-
class AbrController {
|
6303
|
+
class AbrController extends Logger {
|
6125
6304
|
constructor(_hls) {
|
6305
|
+
super('abr', _hls.logger);
|
6126
6306
|
this.hls = void 0;
|
6127
6307
|
this.lastLevelLoadSec = 0;
|
6128
6308
|
this.lastLoadedFragLevel = -1;
|
@@ -6236,7 +6416,7 @@ class AbrController {
|
|
6236
6416
|
this.resetEstimator(nextLoadLevelBitrate);
|
6237
6417
|
}
|
6238
6418
|
this.clearTimer();
|
6239
|
-
|
6419
|
+
this.warn(`Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
|
6240
6420
|
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
|
6241
6421
|
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
|
6242
6422
|
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
|
@@ -6256,7 +6436,7 @@ class AbrController {
|
|
6256
6436
|
}
|
6257
6437
|
resetEstimator(abrEwmaDefaultEstimate) {
|
6258
6438
|
if (abrEwmaDefaultEstimate) {
|
6259
|
-
|
6439
|
+
this.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
|
6260
6440
|
this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
|
6261
6441
|
}
|
6262
6442
|
this.firstSelection = -1;
|
@@ -6488,7 +6668,7 @@ class AbrController {
|
|
6488
6668
|
}
|
6489
6669
|
const firstLevel = this.hls.firstLevel;
|
6490
6670
|
const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
|
6491
|
-
|
6671
|
+
this.warn(`Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
|
6492
6672
|
return clamped;
|
6493
6673
|
}
|
6494
6674
|
get forcedAutoLevel() {
|
@@ -6534,6 +6714,9 @@ class AbrController {
|
|
6534
6714
|
partCurrent,
|
6535
6715
|
hls
|
6536
6716
|
} = this;
|
6717
|
+
if (hls.levels.length <= 1) {
|
6718
|
+
return hls.loadLevel;
|
6719
|
+
}
|
6537
6720
|
const {
|
6538
6721
|
maxAutoLevel,
|
6539
6722
|
config,
|
@@ -6566,13 +6749,13 @@ class AbrController {
|
|
6566
6749
|
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
6567
6750
|
const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
|
6568
6751
|
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
|
6569
|
-
|
6752
|
+
this.info(`bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
|
6570
6753
|
// don't use conservative factor on bitrate test
|
6571
6754
|
bwFactor = bwUpFactor = 1;
|
6572
6755
|
}
|
6573
6756
|
}
|
6574
6757
|
const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
|
6575
|
-
|
6758
|
+
this.info(`${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
|
6576
6759
|
if (bestLevel > -1) {
|
6577
6760
|
return bestLevel;
|
6578
6761
|
}
|
@@ -6646,7 +6829,7 @@ class AbrController {
|
|
6646
6829
|
currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
|
6647
6830
|
currentFrameRate = minFramerate;
|
6648
6831
|
currentBw = Math.max(currentBw, minBitrate);
|
6649
|
-
|
6832
|
+
this.log(`picked start tier ${JSON.stringify(startTier)}`);
|
6650
6833
|
} else {
|
6651
6834
|
currentCodecSet = level == null ? void 0 : level.codecSet;
|
6652
6835
|
currentVideoRange = level == null ? void 0 : level.videoRange;
|
@@ -6699,9 +6882,9 @@ class AbrController {
|
|
6699
6882
|
const forcedAutoLevel = this.forcedAutoLevel;
|
6700
6883
|
if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
|
6701
6884
|
if (levelsSkipped.length) {
|
6702
|
-
|
6885
|
+
this.trace(`Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
|
6703
6886
|
}
|
6704
|
-
|
6887
|
+
this.info(`switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
|
6705
6888
|
}
|
6706
6889
|
if (firstSelection) {
|
6707
6890
|
this.firstSelection = i;
|
@@ -6744,40 +6927,29 @@ class BufferHelper {
|
|
6744
6927
|
* Return true if `media`'s buffered include `position`
|
6745
6928
|
*/
|
6746
6929
|
static isBuffered(media, position) {
|
6747
|
-
|
6748
|
-
|
6749
|
-
|
6750
|
-
|
6751
|
-
|
6752
|
-
return true;
|
6753
|
-
}
|
6930
|
+
if (media) {
|
6931
|
+
const buffered = BufferHelper.getBuffered(media);
|
6932
|
+
for (let i = buffered.length; i--;) {
|
6933
|
+
if (position >= buffered.start(i) && position <= buffered.end(i)) {
|
6934
|
+
return true;
|
6754
6935
|
}
|
6755
6936
|
}
|
6756
|
-
} catch (error) {
|
6757
|
-
// this is to catch
|
6758
|
-
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6759
|
-
// This SourceBuffer has been removed from the parent media source
|
6760
6937
|
}
|
6761
6938
|
return false;
|
6762
6939
|
}
|
6763
6940
|
static bufferInfo(media, pos, maxHoleDuration) {
|
6764
|
-
|
6765
|
-
|
6766
|
-
|
6941
|
+
if (media) {
|
6942
|
+
const vbuffered = BufferHelper.getBuffered(media);
|
6943
|
+
if (vbuffered.length) {
|
6767
6944
|
const buffered = [];
|
6768
|
-
let i;
|
6769
|
-
for (i = 0; i < vbuffered.length; i++) {
|
6945
|
+
for (let i = 0; i < vbuffered.length; i++) {
|
6770
6946
|
buffered.push({
|
6771
6947
|
start: vbuffered.start(i),
|
6772
6948
|
end: vbuffered.end(i)
|
6773
6949
|
});
|
6774
6950
|
}
|
6775
|
-
return
|
6951
|
+
return BufferHelper.bufferedInfo(buffered, pos, maxHoleDuration);
|
6776
6952
|
}
|
6777
|
-
} catch (error) {
|
6778
|
-
// this is to catch
|
6779
|
-
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6780
|
-
// This SourceBuffer has been removed from the parent media source
|
6781
6953
|
}
|
6782
6954
|
return {
|
6783
6955
|
len: 0,
|
@@ -6789,14 +6961,7 @@ class BufferHelper {
|
|
6789
6961
|
static bufferedInfo(buffered, pos, maxHoleDuration) {
|
6790
6962
|
pos = Math.max(0, pos);
|
6791
6963
|
// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
|
6792
|
-
buffered.sort(
|
6793
|
-
const diff = a.start - b.start;
|
6794
|
-
if (diff) {
|
6795
|
-
return diff;
|
6796
|
-
} else {
|
6797
|
-
return b.end - a.end;
|
6798
|
-
}
|
6799
|
-
});
|
6964
|
+
buffered.sort((a, b) => a.start - b.start || b.end - a.end);
|
6800
6965
|
let buffered2 = [];
|
6801
6966
|
if (maxHoleDuration) {
|
6802
6967
|
// there might be some small holes between buffer time range
|
@@ -6863,7 +7028,7 @@ class BufferHelper {
|
|
6863
7028
|
*/
|
6864
7029
|
static getBuffered(media) {
|
6865
7030
|
try {
|
6866
|
-
return media.buffered;
|
7031
|
+
return media.buffered || noopBuffered;
|
6867
7032
|
} catch (e) {
|
6868
7033
|
logger.log('failed to get media.buffered', e);
|
6869
7034
|
return noopBuffered;
|
@@ -6888,24 +7053,22 @@ class BufferOperationQueue {
|
|
6888
7053
|
this.executeNext(type);
|
6889
7054
|
}
|
6890
7055
|
}
|
6891
|
-
insertAbort(operation, type) {
|
6892
|
-
const queue = this.queues[type];
|
6893
|
-
queue.unshift(operation);
|
6894
|
-
this.executeNext(type);
|
6895
|
-
}
|
6896
7056
|
appendBlocker(type) {
|
6897
|
-
|
6898
|
-
|
6899
|
-
|
7057
|
+
return new Promise(resolve => {
|
7058
|
+
const operation = {
|
7059
|
+
execute: resolve,
|
7060
|
+
onStart: () => {},
|
7061
|
+
onComplete: () => {},
|
7062
|
+
onError: () => {}
|
7063
|
+
};
|
7064
|
+
this.append(operation, type);
|
6900
7065
|
});
|
6901
|
-
|
6902
|
-
|
6903
|
-
|
6904
|
-
|
6905
|
-
|
6906
|
-
}
|
6907
|
-
this.append(operation, type);
|
6908
|
-
return promise;
|
7066
|
+
}
|
7067
|
+
unblockAudio(op) {
|
7068
|
+
const queue = this.queues.audio;
|
7069
|
+
if (queue[0] === op) {
|
7070
|
+
this.shiftAndExecuteNext('audio');
|
7071
|
+
}
|
6909
7072
|
}
|
6910
7073
|
executeNext(type) {
|
6911
7074
|
const queue = this.queues[type];
|
@@ -6937,8 +7100,9 @@ class BufferOperationQueue {
|
|
6937
7100
|
}
|
6938
7101
|
|
6939
7102
|
const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
6940
|
-
class BufferController {
|
6941
|
-
constructor(hls) {
|
7103
|
+
class BufferController extends Logger {
|
7104
|
+
constructor(hls, fragmentTracker) {
|
7105
|
+
super('buffer-controller', hls.logger);
|
6942
7106
|
// The level details used to determine duration, target-duration and live
|
6943
7107
|
this.details = null;
|
6944
7108
|
// cache the self generated object url to detect hijack of video tag
|
@@ -6948,6 +7112,7 @@ class BufferController {
|
|
6948
7112
|
// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
|
6949
7113
|
this.listeners = void 0;
|
6950
7114
|
this.hls = void 0;
|
7115
|
+
this.fragmentTracker = void 0;
|
6951
7116
|
// The number of BUFFER_CODEC events received before any sourceBuffers are created
|
6952
7117
|
this.bufferCodecEventsExpected = 0;
|
6953
7118
|
// The total number of BUFFER_CODEC events received
|
@@ -6958,6 +7123,10 @@ class BufferController {
|
|
6958
7123
|
this.mediaSource = null;
|
6959
7124
|
// Last MP3 audio chunk appended
|
6960
7125
|
this.lastMpegAudioChunk = null;
|
7126
|
+
// Audio fragment blocked from appending until corresponding video appends or context changes
|
7127
|
+
this.blockedAudioAppend = null;
|
7128
|
+
// Keep track of video append position for unblocking audio
|
7129
|
+
this.lastVideoAppendEnd = 0;
|
6961
7130
|
this.appendSource = void 0;
|
6962
7131
|
// counters
|
6963
7132
|
this.appendErrors = {
|
@@ -6968,9 +7137,6 @@ class BufferController {
|
|
6968
7137
|
this.tracks = {};
|
6969
7138
|
this.pendingTracks = {};
|
6970
7139
|
this.sourceBuffer = void 0;
|
6971
|
-
this.log = void 0;
|
6972
|
-
this.warn = void 0;
|
6973
|
-
this.error = void 0;
|
6974
7140
|
this._onEndStreaming = event => {
|
6975
7141
|
if (!this.hls) {
|
6976
7142
|
return;
|
@@ -6992,7 +7158,10 @@ class BufferController {
|
|
6992
7158
|
this.log('Media source opened');
|
6993
7159
|
if (media) {
|
6994
7160
|
media.removeEventListener('emptied', this._onMediaEmptied);
|
6995
|
-
this.
|
7161
|
+
const durationAndRange = this.getDurationAndRange();
|
7162
|
+
if (durationAndRange) {
|
7163
|
+
this.updateMediaSource(durationAndRange);
|
7164
|
+
}
|
6996
7165
|
this.hls.trigger(Events.MEDIA_ATTACHED, {
|
6997
7166
|
media,
|
6998
7167
|
mediaSource: mediaSource
|
@@ -7016,15 +7185,12 @@ class BufferController {
|
|
7016
7185
|
_objectUrl
|
7017
7186
|
} = this;
|
7018
7187
|
if (mediaSrc !== _objectUrl) {
|
7019
|
-
|
7188
|
+
this.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
|
7020
7189
|
}
|
7021
7190
|
};
|
7022
7191
|
this.hls = hls;
|
7023
|
-
|
7192
|
+
this.fragmentTracker = fragmentTracker;
|
7024
7193
|
this.appendSource = isManagedMediaSource(getMediaSource(hls.config.preferManagedMediaSource));
|
7025
|
-
this.log = logger.log.bind(logger, logPrefix);
|
7026
|
-
this.warn = logger.warn.bind(logger, logPrefix);
|
7027
|
-
this.error = logger.error.bind(logger, logPrefix);
|
7028
7194
|
this._initSourceBuffer();
|
7029
7195
|
this.registerListeners();
|
7030
7196
|
}
|
@@ -7036,7 +7202,13 @@ class BufferController {
|
|
7036
7202
|
this.details = null;
|
7037
7203
|
this.lastMpegAudioChunk = null;
|
7038
7204
|
// @ts-ignore
|
7039
|
-
this.hls = null;
|
7205
|
+
this.hls = this.fragmentTracker = null;
|
7206
|
+
// @ts-ignore
|
7207
|
+
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
7208
|
+
// @ts-ignore
|
7209
|
+
this._onMediaSourceEnded = null;
|
7210
|
+
// @ts-ignore
|
7211
|
+
this._onStartStreaming = this._onEndStreaming = null;
|
7040
7212
|
}
|
7041
7213
|
registerListeners() {
|
7042
7214
|
const {
|
@@ -7086,6 +7258,8 @@ class BufferController {
|
|
7086
7258
|
audiovideo: 0
|
7087
7259
|
};
|
7088
7260
|
this.lastMpegAudioChunk = null;
|
7261
|
+
this.blockedAudioAppend = null;
|
7262
|
+
this.lastVideoAppendEnd = 0;
|
7089
7263
|
}
|
7090
7264
|
onManifestLoading() {
|
7091
7265
|
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
|
@@ -7168,34 +7342,34 @@ class BufferController {
|
|
7168
7342
|
mediaSource.removeEventListener('startstreaming', this._onStartStreaming);
|
7169
7343
|
mediaSource.removeEventListener('endstreaming', this._onEndStreaming);
|
7170
7344
|
}
|
7345
|
+
this.mediaSource = null;
|
7346
|
+
this._objectUrl = null;
|
7347
|
+
}
|
7171
7348
|
|
7172
|
-
|
7173
|
-
|
7174
|
-
|
7175
|
-
|
7176
|
-
|
7177
|
-
|
7178
|
-
|
7349
|
+
// Detach properly the MediaSource from the HTMLMediaElement as
|
7350
|
+
// suggested in https://github.com/w3c/media-source/issues/53.
|
7351
|
+
if (media) {
|
7352
|
+
media.removeEventListener('emptied', this._onMediaEmptied);
|
7353
|
+
if (_objectUrl) {
|
7354
|
+
self.URL.revokeObjectURL(_objectUrl);
|
7355
|
+
}
|
7179
7356
|
|
7180
|
-
|
7181
|
-
|
7182
|
-
|
7183
|
-
|
7184
|
-
|
7185
|
-
|
7186
|
-
}
|
7187
|
-
media.load();
|
7188
|
-
} else {
|
7189
|
-
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7357
|
+
// clean up video tag src only if it's our own url. some external libraries might
|
7358
|
+
// hijack the video tag and change its 'src' without destroying the Hls instance first
|
7359
|
+
if (this.mediaSrc === _objectUrl) {
|
7360
|
+
media.removeAttribute('src');
|
7361
|
+
if (this.appendSource) {
|
7362
|
+
removeSourceChildren(media);
|
7190
7363
|
}
|
7364
|
+
media.load();
|
7365
|
+
} else {
|
7366
|
+
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7191
7367
|
}
|
7192
|
-
this.mediaSource = null;
|
7193
7368
|
this.media = null;
|
7194
|
-
this._objectUrl = null;
|
7195
|
-
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7196
|
-
this.pendingTracks = {};
|
7197
|
-
this.tracks = {};
|
7198
7369
|
}
|
7370
|
+
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7371
|
+
this.pendingTracks = {};
|
7372
|
+
this.tracks = {};
|
7199
7373
|
this.hls.trigger(Events.MEDIA_DETACHED, undefined);
|
7200
7374
|
}
|
7201
7375
|
onBufferReset() {
|
@@ -7203,6 +7377,7 @@ class BufferController {
|
|
7203
7377
|
this.resetBuffer(type);
|
7204
7378
|
});
|
7205
7379
|
this._initSourceBuffer();
|
7380
|
+
this.hls.resumeBuffering();
|
7206
7381
|
}
|
7207
7382
|
resetBuffer(type) {
|
7208
7383
|
const sb = this.sourceBuffer[type];
|
@@ -7226,9 +7401,10 @@ class BufferController {
|
|
7226
7401
|
const trackNames = Object.keys(data);
|
7227
7402
|
trackNames.forEach(trackName => {
|
7228
7403
|
if (sourceBufferCount) {
|
7404
|
+
var _track$buffer;
|
7229
7405
|
// check if SourceBuffer codec needs to change
|
7230
7406
|
const track = this.tracks[trackName];
|
7231
|
-
if (track && typeof track.buffer.changeType === 'function') {
|
7407
|
+
if (track && typeof ((_track$buffer = track.buffer) == null ? void 0 : _track$buffer.changeType) === 'function') {
|
7232
7408
|
var _trackCodec;
|
7233
7409
|
const {
|
7234
7410
|
id,
|
@@ -7298,20 +7474,54 @@ class BufferController {
|
|
7298
7474
|
};
|
7299
7475
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7300
7476
|
}
|
7477
|
+
blockAudio(partOrFrag) {
|
7478
|
+
var _this$fragmentTracker;
|
7479
|
+
const pStart = partOrFrag.start;
|
7480
|
+
const pTime = pStart + partOrFrag.duration * 0.05;
|
7481
|
+
const atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
|
7482
|
+
if (atGap) {
|
7483
|
+
return;
|
7484
|
+
}
|
7485
|
+
const op = {
|
7486
|
+
execute: () => {
|
7487
|
+
var _this$fragmentTracker2;
|
7488
|
+
if (this.lastVideoAppendEnd > pTime || this.sourceBuffer.video && BufferHelper.isBuffered(this.sourceBuffer.video, pTime) || ((_this$fragmentTracker2 = this.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker2.gap) === true) {
|
7489
|
+
this.blockedAudioAppend = null;
|
7490
|
+
this.operationQueue.shiftAndExecuteNext('audio');
|
7491
|
+
}
|
7492
|
+
},
|
7493
|
+
onStart: () => {},
|
7494
|
+
onComplete: () => {},
|
7495
|
+
onError: () => {}
|
7496
|
+
};
|
7497
|
+
this.blockedAudioAppend = {
|
7498
|
+
op,
|
7499
|
+
frag: partOrFrag
|
7500
|
+
};
|
7501
|
+
this.operationQueue.append(op, 'audio', true);
|
7502
|
+
}
|
7503
|
+
unblockAudio() {
|
7504
|
+
const blockedAudioAppend = this.blockedAudioAppend;
|
7505
|
+
if (blockedAudioAppend) {
|
7506
|
+
this.blockedAudioAppend = null;
|
7507
|
+
this.operationQueue.unblockAudio(blockedAudioAppend.op);
|
7508
|
+
}
|
7509
|
+
}
|
7301
7510
|
onBufferAppending(event, eventData) {
|
7302
7511
|
const {
|
7303
|
-
hls,
|
7304
7512
|
operationQueue,
|
7305
7513
|
tracks
|
7306
7514
|
} = this;
|
7307
7515
|
const {
|
7308
7516
|
data,
|
7309
7517
|
type,
|
7518
|
+
parent,
|
7310
7519
|
frag,
|
7311
7520
|
part,
|
7312
7521
|
chunkMeta
|
7313
7522
|
} = eventData;
|
7314
7523
|
const chunkStats = chunkMeta.buffering[type];
|
7524
|
+
const sn = frag.sn;
|
7315
7525
|
const bufferAppendingStart = self.performance.now();
|
7316
7526
|
chunkStats.start = bufferAppendingStart;
|
7317
7527
|
const fragBuffering = frag.stats.buffering;
|
@@ -7334,7 +7544,36 @@ class BufferController {
|
|
7334
7544
|
checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
|
7335
7545
|
this.lastMpegAudioChunk = chunkMeta;
|
7336
7546
|
}
|
7337
|
-
|
7547
|
+
|
7548
|
+
// Block audio append until overlapping video append
|
7549
|
+
const videoSb = this.sourceBuffer.video;
|
7550
|
+
if (videoSb && sn !== 'initSegment') {
|
7551
|
+
const partOrFrag = part || frag;
|
7552
|
+
const blockedAudioAppend = this.blockedAudioAppend;
|
7553
|
+
if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
|
7554
|
+
const pStart = partOrFrag.start;
|
7555
|
+
const pTime = pStart + partOrFrag.duration * 0.05;
|
7556
|
+
const vbuffered = videoSb.buffered;
|
7557
|
+
const vappending = this.operationQueue.current('video');
|
7558
|
+
if (!vbuffered.length && !vappending) {
|
7559
|
+
// wait for video before appending audio
|
7560
|
+
this.blockAudio(partOrFrag);
|
7561
|
+
} else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
|
7562
|
+
// audio is ahead of video
|
7563
|
+
this.blockAudio(partOrFrag);
|
7564
|
+
}
|
7565
|
+
} else if (type === 'video') {
|
7566
|
+
const videoAppendEnd = partOrFrag.end;
|
7567
|
+
if (blockedAudioAppend) {
|
7568
|
+
const audioStart = blockedAudioAppend.frag.start;
|
7569
|
+
if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
|
7570
|
+
this.unblockAudio();
|
7571
|
+
}
|
7572
|
+
}
|
7573
|
+
this.lastVideoAppendEnd = videoAppendEnd;
|
7574
|
+
}
|
7575
|
+
}
|
7576
|
+
const fragStart = (part || frag).start;
|
7338
7577
|
const operation = {
|
7339
7578
|
execute: () => {
|
7340
7579
|
chunkStats.executeStart = self.performance.now();
|
@@ -7343,7 +7582,7 @@ class BufferController {
|
|
7343
7582
|
if (sb) {
|
7344
7583
|
const delta = fragStart - sb.timestampOffset;
|
7345
7584
|
if (Math.abs(delta) >= 0.1) {
|
7346
|
-
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${
|
7585
|
+
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${sn})`);
|
7347
7586
|
sb.timestampOffset = fragStart;
|
7348
7587
|
}
|
7349
7588
|
}
|
@@ -7410,22 +7649,21 @@ class BufferController {
|
|
7410
7649
|
/* with UHD content, we could get loop of quota exceeded error until
|
7411
7650
|
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
7412
7651
|
*/
|
7413
|
-
this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
|
7414
|
-
if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
|
7652
|
+
this.warn(`Failed ${appendErrorCount}/${this.hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
|
7653
|
+
if (appendErrorCount >= this.hls.config.appendErrorMaxRetry) {
|
7415
7654
|
event.fatal = true;
|
7416
7655
|
}
|
7417
7656
|
}
|
7418
|
-
hls.trigger(Events.ERROR, event);
|
7657
|
+
this.hls.trigger(Events.ERROR, event);
|
7419
7658
|
}
|
7420
7659
|
};
|
7421
7660
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7422
7661
|
}
|
7423
|
-
|
7424
|
-
|
7425
|
-
|
7426
|
-
|
7427
|
-
|
7428
|
-
execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset),
|
7662
|
+
getFlushOp(type, start, end) {
|
7663
|
+
return {
|
7664
|
+
execute: () => {
|
7665
|
+
this.removeExecutor(type, start, end);
|
7666
|
+
},
|
7429
7667
|
onStart: () => {
|
7430
7668
|
// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
7431
7669
|
},
|
@@ -7438,12 +7676,22 @@ class BufferController {
|
|
7438
7676
|
onError: error => {
|
7439
7677
|
this.warn(`Failed to remove from ${type} SourceBuffer`, error);
|
7440
7678
|
}
|
7441
|
-
}
|
7442
|
-
|
7443
|
-
|
7679
|
+
};
|
7680
|
+
}
|
7681
|
+
onBufferFlushing(event, data) {
|
7682
|
+
const {
|
7683
|
+
operationQueue
|
7684
|
+
} = this;
|
7685
|
+
const {
|
7686
|
+
type,
|
7687
|
+
startOffset,
|
7688
|
+
endOffset
|
7689
|
+
} = data;
|
7690
|
+
if (type) {
|
7691
|
+
operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
|
7444
7692
|
} else {
|
7445
|
-
this.getSourceBufferTypes().forEach(
|
7446
|
-
operationQueue.append(
|
7693
|
+
this.getSourceBufferTypes().forEach(sbType => {
|
7694
|
+
operationQueue.append(this.getFlushOp(sbType, startOffset, endOffset), sbType);
|
7447
7695
|
});
|
7448
7696
|
}
|
7449
7697
|
}
|
@@ -7490,6 +7738,9 @@ class BufferController {
|
|
7490
7738
|
// on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
|
7491
7739
|
// an undefined data.type will mark all buffers as EOS.
|
7492
7740
|
onBufferEos(event, data) {
|
7741
|
+
if (data.type === 'video') {
|
7742
|
+
this.unblockAudio();
|
7743
|
+
}
|
7493
7744
|
const ended = this.getSourceBufferTypes().reduce((acc, type) => {
|
7494
7745
|
const sb = this.sourceBuffer[type];
|
7495
7746
|
if (sb && (!data.type || data.type === type)) {
|
@@ -7532,10 +7783,14 @@ class BufferController {
|
|
7532
7783
|
return;
|
7533
7784
|
}
|
7534
7785
|
this.details = details;
|
7786
|
+
const durationAndRange = this.getDurationAndRange();
|
7787
|
+
if (!durationAndRange) {
|
7788
|
+
return;
|
7789
|
+
}
|
7535
7790
|
if (this.getSourceBufferTypes().length) {
|
7536
|
-
this.blockBuffers(this.
|
7791
|
+
this.blockBuffers(() => this.updateMediaSource(durationAndRange));
|
7537
7792
|
} else {
|
7538
|
-
this.
|
7793
|
+
this.updateMediaSource(durationAndRange);
|
7539
7794
|
}
|
7540
7795
|
}
|
7541
7796
|
trimBuffers() {
|
@@ -7640,9 +7895,9 @@ class BufferController {
|
|
7640
7895
|
* 'liveDurationInfinity` is set to `true`
|
7641
7896
|
* More details: https://github.com/video-dev/hls.js/issues/355
|
7642
7897
|
*/
|
7643
|
-
|
7898
|
+
getDurationAndRange() {
|
7644
7899
|
if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
7645
|
-
return;
|
7900
|
+
return null;
|
7646
7901
|
}
|
7647
7902
|
const {
|
7648
7903
|
details,
|
@@ -7656,25 +7911,41 @@ class BufferController {
|
|
7656
7911
|
if (details.live && hls.config.liveDurationInfinity) {
|
7657
7912
|
// Override duration to Infinity
|
7658
7913
|
mediaSource.duration = Infinity;
|
7659
|
-
|
7914
|
+
const len = details.fragments.length;
|
7915
|
+
if (len && details.live && !!mediaSource.setLiveSeekableRange) {
|
7916
|
+
const start = Math.max(0, details.fragments[0].start);
|
7917
|
+
const end = Math.max(start, start + details.totalduration);
|
7918
|
+
return {
|
7919
|
+
duration: Infinity,
|
7920
|
+
start,
|
7921
|
+
end
|
7922
|
+
};
|
7923
|
+
}
|
7924
|
+
return {
|
7925
|
+
duration: Infinity
|
7926
|
+
};
|
7660
7927
|
} else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
|
7661
|
-
|
7662
|
-
|
7663
|
-
|
7664
|
-
// flushing already buffered portion when switching between quality level
|
7665
|
-
this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);
|
7666
|
-
mediaSource.duration = levelDuration;
|
7928
|
+
return {
|
7929
|
+
duration: levelDuration
|
7930
|
+
};
|
7667
7931
|
}
|
7932
|
+
return null;
|
7668
7933
|
}
|
7669
|
-
|
7670
|
-
|
7671
|
-
|
7672
|
-
|
7673
|
-
|
7674
|
-
|
7675
|
-
|
7676
|
-
|
7677
|
-
|
7934
|
+
updateMediaSource({
|
7935
|
+
duration,
|
7936
|
+
start,
|
7937
|
+
end
|
7938
|
+
}) {
|
7939
|
+
if (!this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
7940
|
+
return;
|
7941
|
+
}
|
7942
|
+
if (isFiniteNumber(duration)) {
|
7943
|
+
this.log(`Updating Media Source duration to ${duration.toFixed(3)}`);
|
7944
|
+
}
|
7945
|
+
this.mediaSource.duration = duration;
|
7946
|
+
if (start !== undefined && end !== undefined) {
|
7947
|
+
this.log(`Media Source duration is set to ${this.mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7948
|
+
this.mediaSource.setLiveSeekableRange(start, end);
|
7678
7949
|
}
|
7679
7950
|
}
|
7680
7951
|
checkPendingTracks() {
|
@@ -7860,6 +8131,7 @@ class BufferController {
|
|
7860
8131
|
}
|
7861
8132
|
return;
|
7862
8133
|
}
|
8134
|
+
sb.ending = false;
|
7863
8135
|
sb.ended = false;
|
7864
8136
|
sb.appendBuffer(data);
|
7865
8137
|
}
|
@@ -7879,10 +8151,14 @@ class BufferController {
|
|
7879
8151
|
|
7880
8152
|
// logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
|
7881
8153
|
const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type));
|
7882
|
-
|
8154
|
+
const audioBlocked = buffers.length > 1 && !!this.blockedAudioAppend;
|
8155
|
+
if (audioBlocked) {
|
8156
|
+
this.unblockAudio();
|
8157
|
+
}
|
8158
|
+
Promise.all(blockingOperations).then(result => {
|
7883
8159
|
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
7884
8160
|
onUnblocked();
|
7885
|
-
buffers.forEach(type => {
|
8161
|
+
buffers.forEach((type, i) => {
|
7886
8162
|
const sb = this.sourceBuffer[type];
|
7887
8163
|
// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
7888
8164
|
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
@@ -8031,6 +8307,7 @@ class CapLevelController {
|
|
8031
8307
|
}
|
8032
8308
|
onMediaDetaching() {
|
8033
8309
|
this.stopCapping();
|
8310
|
+
this.media = null;
|
8034
8311
|
}
|
8035
8312
|
detectPlayerSize() {
|
8036
8313
|
if (this.media) {
|
@@ -8043,10 +8320,10 @@ class CapLevelController {
|
|
8043
8320
|
const hls = this.hls;
|
8044
8321
|
const maxLevel = this.getMaxLevel(levels.length - 1);
|
8045
8322
|
if (maxLevel !== this.autoLevelCapping) {
|
8046
|
-
logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8323
|
+
hls.logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8047
8324
|
}
|
8048
8325
|
hls.autoLevelCapping = maxLevel;
|
8049
|
-
if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
8326
|
+
if (hls.autoLevelEnabled && hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
8050
8327
|
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
|
8051
8328
|
// usually happen when the user go to the fullscreen mode.
|
8052
8329
|
this.streamController.nextLevelSwitch();
|
@@ -8182,9 +8459,11 @@ class FPSController {
|
|
8182
8459
|
}
|
8183
8460
|
registerListeners() {
|
8184
8461
|
this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8462
|
+
this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8185
8463
|
}
|
8186
8464
|
unregisterListeners() {
|
8187
8465
|
this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8466
|
+
this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8188
8467
|
}
|
8189
8468
|
destroy() {
|
8190
8469
|
if (this.timer) {
|
@@ -8206,6 +8485,9 @@ class FPSController {
|
|
8206
8485
|
this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
|
8207
8486
|
}
|
8208
8487
|
}
|
8488
|
+
onMediaDetaching() {
|
8489
|
+
this.media = null;
|
8490
|
+
}
|
8209
8491
|
checkFPS(video, decodedFrames, droppedFrames) {
|
8210
8492
|
const currentTime = performance.now();
|
8211
8493
|
if (decodedFrames) {
|
@@ -8221,10 +8503,10 @@ class FPSController {
|
|
8221
8503
|
totalDroppedFrames: droppedFrames
|
8222
8504
|
});
|
8223
8505
|
if (droppedFPS > 0) {
|
8224
|
-
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8506
|
+
// hls.logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8225
8507
|
if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
8226
8508
|
let currentLevel = hls.currentLevel;
|
8227
|
-
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8509
|
+
hls.logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8228
8510
|
if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
|
8229
8511
|
currentLevel = currentLevel - 1;
|
8230
8512
|
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
|
@@ -8257,14 +8539,14 @@ class FPSController {
|
|
8257
8539
|
}
|
8258
8540
|
|
8259
8541
|
const PATHWAY_PENALTY_DURATION_MS = 300000;
|
8260
|
-
class ContentSteeringController {
|
8542
|
+
class ContentSteeringController extends Logger {
|
8261
8543
|
constructor(hls) {
|
8544
|
+
super('content-steering', hls.logger);
|
8262
8545
|
this.hls = void 0;
|
8263
|
-
this.log = void 0;
|
8264
8546
|
this.loader = null;
|
8265
8547
|
this.uri = null;
|
8266
8548
|
this.pathwayId = '.';
|
8267
|
-
this.
|
8549
|
+
this._pathwayPriority = null;
|
8268
8550
|
this.timeToLoad = 300;
|
8269
8551
|
this.reloadTimer = -1;
|
8270
8552
|
this.updated = 0;
|
@@ -8275,7 +8557,6 @@ class ContentSteeringController {
|
|
8275
8557
|
this.subtitleTracks = null;
|
8276
8558
|
this.penalizedPathways = {};
|
8277
8559
|
this.hls = hls;
|
8278
|
-
this.log = logger.log.bind(logger, `[content-steering]:`);
|
8279
8560
|
this.registerListeners();
|
8280
8561
|
}
|
8281
8562
|
registerListeners() {
|
@@ -8295,6 +8576,20 @@ class ContentSteeringController {
|
|
8295
8576
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
8296
8577
|
hls.off(Events.ERROR, this.onError, this);
|
8297
8578
|
}
|
8579
|
+
pathways() {
|
8580
|
+
return (this.levels || []).reduce((pathways, level) => {
|
8581
|
+
if (pathways.indexOf(level.pathwayId) === -1) {
|
8582
|
+
pathways.push(level.pathwayId);
|
8583
|
+
}
|
8584
|
+
return pathways;
|
8585
|
+
}, []);
|
8586
|
+
}
|
8587
|
+
get pathwayPriority() {
|
8588
|
+
return this._pathwayPriority;
|
8589
|
+
}
|
8590
|
+
set pathwayPriority(pathwayPriority) {
|
8591
|
+
this.updatePathwayPriority(pathwayPriority);
|
8592
|
+
}
|
8298
8593
|
startLoad() {
|
8299
8594
|
this.started = true;
|
8300
8595
|
this.clearTimeout();
|
@@ -8368,7 +8663,7 @@ class ContentSteeringController {
|
|
8368
8663
|
} = data;
|
8369
8664
|
if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox && errorAction.flags === ErrorActionFlags.MoveAllAlternatesMatchingHost) {
|
8370
8665
|
const levels = this.levels;
|
8371
|
-
let pathwayPriority = this.
|
8666
|
+
let pathwayPriority = this._pathwayPriority;
|
8372
8667
|
let errorPathway = this.pathwayId;
|
8373
8668
|
if (data.context) {
|
8374
8669
|
const {
|
@@ -8387,19 +8682,14 @@ class ContentSteeringController {
|
|
8387
8682
|
}
|
8388
8683
|
if (!pathwayPriority && levels) {
|
8389
8684
|
// If PATHWAY-PRIORITY was not provided, list pathways for error handling
|
8390
|
-
pathwayPriority =
|
8391
|
-
if (pathways.indexOf(level.pathwayId) === -1) {
|
8392
|
-
pathways.push(level.pathwayId);
|
8393
|
-
}
|
8394
|
-
return pathways;
|
8395
|
-
}, []);
|
8685
|
+
pathwayPriority = this.pathways();
|
8396
8686
|
}
|
8397
8687
|
if (pathwayPriority && pathwayPriority.length > 1) {
|
8398
8688
|
this.updatePathwayPriority(pathwayPriority);
|
8399
8689
|
errorAction.resolved = this.pathwayId !== errorPathway;
|
8400
8690
|
}
|
8401
8691
|
if (!errorAction.resolved) {
|
8402
|
-
|
8692
|
+
this.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
|
8403
8693
|
}
|
8404
8694
|
}
|
8405
8695
|
}
|
@@ -8426,7 +8716,7 @@ class ContentSteeringController {
|
|
8426
8716
|
return this.levels.filter(level => pathwayId === level.pathwayId);
|
8427
8717
|
}
|
8428
8718
|
updatePathwayPriority(pathwayPriority) {
|
8429
|
-
this.
|
8719
|
+
this._pathwayPriority = pathwayPriority;
|
8430
8720
|
let levels;
|
8431
8721
|
|
8432
8722
|
// Evaluate if we should remove the pathway from the penalized list
|
@@ -8570,7 +8860,7 @@ class ContentSteeringController {
|
|
8570
8860
|
onSuccess: (response, stats, context, networkDetails) => {
|
8571
8861
|
this.log(`Loaded steering manifest: "${url}"`);
|
8572
8862
|
const steeringData = response.data;
|
8573
|
-
if (steeringData.VERSION !== 1) {
|
8863
|
+
if ((steeringData == null ? void 0 : steeringData.VERSION) !== 1) {
|
8574
8864
|
this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
|
8575
8865
|
return;
|
8576
8866
|
}
|
@@ -9478,7 +9768,7 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
|
|
9478
9768
|
});
|
9479
9769
|
function timelineConfig() {
|
9480
9770
|
return {
|
9481
|
-
cueHandler:
|
9771
|
+
cueHandler: HevcVideoParser,
|
9482
9772
|
// used by timeline-controller
|
9483
9773
|
enableWebVTT: false,
|
9484
9774
|
// used by timeline-controller
|
@@ -9509,7 +9799,7 @@ function timelineConfig() {
|
|
9509
9799
|
/**
|
9510
9800
|
* @ignore
|
9511
9801
|
*/
|
9512
|
-
function mergeConfig(defaultConfig, userConfig) {
|
9802
|
+
function mergeConfig(defaultConfig, userConfig, logger) {
|
9513
9803
|
if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
|
9514
9804
|
throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
|
9515
9805
|
}
|
@@ -9579,7 +9869,7 @@ function deepCpy(obj) {
|
|
9579
9869
|
/**
|
9580
9870
|
* @ignore
|
9581
9871
|
*/
|
9582
|
-
function enableStreamingMode(config) {
|
9872
|
+
function enableStreamingMode(config, logger) {
|
9583
9873
|
const currentLoader = config.loader;
|
9584
9874
|
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
|
9585
9875
|
// If a developer has configured their own loader, respect that choice
|
@@ -9596,10 +9886,9 @@ function enableStreamingMode(config) {
|
|
9596
9886
|
}
|
9597
9887
|
}
|
9598
9888
|
|
9599
|
-
let chromeOrFirefox;
|
9600
9889
|
class LevelController extends BasePlaylistController {
|
9601
9890
|
constructor(hls, contentSteeringController) {
|
9602
|
-
super(hls, '
|
9891
|
+
super(hls, 'level-controller');
|
9603
9892
|
this._levels = [];
|
9604
9893
|
this._firstLevel = -1;
|
9605
9894
|
this._maxAutoLevel = -1;
|
@@ -9670,23 +9959,15 @@ class LevelController extends BasePlaylistController {
|
|
9670
9959
|
let videoCodecFound = false;
|
9671
9960
|
let audioCodecFound = false;
|
9672
9961
|
data.levels.forEach(levelParsed => {
|
9673
|
-
var
|
9962
|
+
var _videoCodec;
|
9674
9963
|
const attributes = levelParsed.attrs;
|
9675
|
-
|
9676
|
-
// erase audio codec info if browser does not support mp4a.40.34.
|
9677
|
-
// demuxer will autodetect codec and fallback to mpeg/audio
|
9678
9964
|
let {
|
9679
9965
|
audioCodec,
|
9680
9966
|
videoCodec
|
9681
9967
|
} = levelParsed;
|
9682
|
-
if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
|
9683
|
-
chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
|
9684
|
-
if (chromeOrFirefox) {
|
9685
|
-
levelParsed.audioCodec = audioCodec = undefined;
|
9686
|
-
}
|
9687
|
-
}
|
9688
9968
|
if (audioCodec) {
|
9689
|
-
|
9969
|
+
// Returns empty and set to undefined for 'mp4a.40.34' with fallback to 'audio/mpeg' SourceBuffer
|
9970
|
+
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
|
9690
9971
|
}
|
9691
9972
|
if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
|
9692
9973
|
videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
|
@@ -9981,6 +10262,25 @@ class LevelController extends BasePlaylistController {
|
|
9981
10262
|
set startLevel(newLevel) {
|
9982
10263
|
this._startLevel = newLevel;
|
9983
10264
|
}
|
10265
|
+
get pathwayPriority() {
|
10266
|
+
if (this.steering) {
|
10267
|
+
return this.steering.pathwayPriority;
|
10268
|
+
}
|
10269
|
+
return null;
|
10270
|
+
}
|
10271
|
+
set pathwayPriority(pathwayPriority) {
|
10272
|
+
if (this.steering) {
|
10273
|
+
const pathwaysList = this.steering.pathways();
|
10274
|
+
const filteredPathwayPriority = pathwayPriority.filter(pathwayId => {
|
10275
|
+
return pathwaysList.indexOf(pathwayId) !== -1;
|
10276
|
+
});
|
10277
|
+
if (pathwayPriority.length < 1) {
|
10278
|
+
this.warn(`pathwayPriority ${pathwayPriority} should contain at least one pathway from list: ${pathwaysList}`);
|
10279
|
+
return;
|
10280
|
+
}
|
10281
|
+
this.steering.pathwayPriority = filteredPathwayPriority;
|
10282
|
+
}
|
10283
|
+
}
|
9984
10284
|
onError(event, data) {
|
9985
10285
|
if (data.fatal || !data.context) {
|
9986
10286
|
return;
|
@@ -10028,7 +10328,12 @@ class LevelController extends BasePlaylistController {
|
|
10028
10328
|
if (curLevel.fragmentError === 0) {
|
10029
10329
|
curLevel.loadError = 0;
|
10030
10330
|
}
|
10031
|
-
|
10331
|
+
// Ignore matching details populated by loading a Media Playlist directly
|
10332
|
+
let previousDetails = curLevel.details;
|
10333
|
+
if (previousDetails === data.details && previousDetails.advanced) {
|
10334
|
+
previousDetails = undefined;
|
10335
|
+
}
|
10336
|
+
this.playlistLoaded(level, data, previousDetails);
|
10032
10337
|
} else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
|
10033
10338
|
// received a delta playlist update that cannot be merged
|
10034
10339
|
details.deltaUpdateFailed = true;
|
@@ -10206,13 +10511,16 @@ class FragmentTracker {
|
|
10206
10511
|
* If not found any Fragment, return null
|
10207
10512
|
*/
|
10208
10513
|
getBufferedFrag(position, levelType) {
|
10514
|
+
return this.getFragAtPos(position, levelType, true);
|
10515
|
+
}
|
10516
|
+
getFragAtPos(position, levelType, buffered) {
|
10209
10517
|
const {
|
10210
10518
|
fragments
|
10211
10519
|
} = this;
|
10212
10520
|
const keys = Object.keys(fragments);
|
10213
10521
|
for (let i = keys.length; i--;) {
|
10214
10522
|
const fragmentEntity = fragments[keys[i]];
|
10215
|
-
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
|
10523
|
+
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && (!buffered || fragmentEntity.buffered)) {
|
10216
10524
|
const frag = fragmentEntity.body;
|
10217
10525
|
if (frag.start <= position && position <= frag.end) {
|
10218
10526
|
return frag;
|
@@ -10467,7 +10775,8 @@ class FragmentTracker {
|
|
10467
10775
|
const {
|
10468
10776
|
frag,
|
10469
10777
|
part,
|
10470
|
-
timeRanges
|
10778
|
+
timeRanges,
|
10779
|
+
type
|
10471
10780
|
} = data;
|
10472
10781
|
if (frag.sn === 'initSegment') {
|
10473
10782
|
return;
|
@@ -10482,10 +10791,8 @@ class FragmentTracker {
|
|
10482
10791
|
}
|
10483
10792
|
// Store the latest timeRanges loaded in the buffer
|
10484
10793
|
this.timeRanges = timeRanges;
|
10485
|
-
|
10486
|
-
|
10487
|
-
this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
|
10488
|
-
});
|
10794
|
+
const timeRange = timeRanges[type];
|
10795
|
+
this.detectEvictedFragments(type, timeRange, playlistType, part);
|
10489
10796
|
}
|
10490
10797
|
onFragBuffered(event, data) {
|
10491
10798
|
this.detectPartialFragments(data);
|
@@ -10814,8 +11121,8 @@ function createLoaderContext(frag, part = null) {
|
|
10814
11121
|
var _frag$decryptdata;
|
10815
11122
|
let byteRangeStart = start;
|
10816
11123
|
let byteRangeEnd = end;
|
10817
|
-
if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method)
|
10818
|
-
// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
|
11124
|
+
if (frag.sn === 'initSegment' && isMethodFullSegmentAesCbc((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method)) {
|
11125
|
+
// MAP segment encrypted with method 'AES-128' or 'AES-256' (cbc), when served with HTTP Range,
|
10819
11126
|
// has the unencrypted size specified in the range.
|
10820
11127
|
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
|
10821
11128
|
const fragmentLen = end - start;
|
@@ -10848,6 +11155,9 @@ function createGapLoadError(frag, part) {
|
|
10848
11155
|
(part ? part : frag).stats.aborted = true;
|
10849
11156
|
return new LoadError(errorData);
|
10850
11157
|
}
|
11158
|
+
function isMethodFullSegmentAesCbc(method) {
|
11159
|
+
return method === 'AES-128' || method === 'AES-256';
|
11160
|
+
}
|
10851
11161
|
class LoadError extends Error {
|
10852
11162
|
constructor(data) {
|
10853
11163
|
super(data.error.message);
|
@@ -10993,6 +11303,8 @@ class KeyLoader {
|
|
10993
11303
|
}
|
10994
11304
|
return this.loadKeyEME(keyInfo, frag);
|
10995
11305
|
case 'AES-128':
|
11306
|
+
case 'AES-256':
|
11307
|
+
case 'AES-256-CTR':
|
10996
11308
|
return this.loadKeyHTTP(keyInfo, frag);
|
10997
11309
|
default:
|
10998
11310
|
return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
|
@@ -11128,8 +11440,9 @@ class KeyLoader {
|
|
11128
11440
|
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
11129
11441
|
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
11130
11442
|
*/
|
11131
|
-
class TaskLoop {
|
11132
|
-
constructor() {
|
11443
|
+
class TaskLoop extends Logger {
|
11444
|
+
constructor(label, logger) {
|
11445
|
+
super(label, logger);
|
11133
11446
|
this._boundTick = void 0;
|
11134
11447
|
this._tickTimer = null;
|
11135
11448
|
this._tickInterval = null;
|
@@ -11397,33 +11710,61 @@ function alignMediaPlaylistByPDT(details, refDetails) {
|
|
11397
11710
|
}
|
11398
11711
|
|
11399
11712
|
class AESCrypto {
|
11400
|
-
constructor(subtle, iv) {
|
11713
|
+
constructor(subtle, iv, aesMode) {
|
11401
11714
|
this.subtle = void 0;
|
11402
11715
|
this.aesIV = void 0;
|
11716
|
+
this.aesMode = void 0;
|
11403
11717
|
this.subtle = subtle;
|
11404
11718
|
this.aesIV = iv;
|
11719
|
+
this.aesMode = aesMode;
|
11405
11720
|
}
|
11406
11721
|
decrypt(data, key) {
|
11407
|
-
|
11408
|
-
|
11409
|
-
|
11410
|
-
|
11722
|
+
switch (this.aesMode) {
|
11723
|
+
case DecrypterAesMode.cbc:
|
11724
|
+
return this.subtle.decrypt({
|
11725
|
+
name: 'AES-CBC',
|
11726
|
+
iv: this.aesIV
|
11727
|
+
}, key, data);
|
11728
|
+
case DecrypterAesMode.ctr:
|
11729
|
+
return this.subtle.decrypt({
|
11730
|
+
name: 'AES-CTR',
|
11731
|
+
counter: this.aesIV,
|
11732
|
+
length: 64
|
11733
|
+
},
|
11734
|
+
//64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
|
11735
|
+
key, data);
|
11736
|
+
default:
|
11737
|
+
throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
|
11738
|
+
}
|
11411
11739
|
}
|
11412
11740
|
}
|
11413
11741
|
|
11414
11742
|
class FastAESKey {
|
11415
|
-
constructor(subtle, key) {
|
11743
|
+
constructor(subtle, key, aesMode) {
|
11416
11744
|
this.subtle = void 0;
|
11417
11745
|
this.key = void 0;
|
11746
|
+
this.aesMode = void 0;
|
11418
11747
|
this.subtle = subtle;
|
11419
11748
|
this.key = key;
|
11749
|
+
this.aesMode = aesMode;
|
11420
11750
|
}
|
11421
11751
|
expandKey() {
|
11752
|
+
const subtleAlgoName = getSubtleAlgoName(this.aesMode);
|
11422
11753
|
return this.subtle.importKey('raw', this.key, {
|
11423
|
-
name:
|
11754
|
+
name: subtleAlgoName
|
11424
11755
|
}, false, ['encrypt', 'decrypt']);
|
11425
11756
|
}
|
11426
11757
|
}
|
11758
|
+
function getSubtleAlgoName(aesMode) {
|
11759
|
+
switch (aesMode) {
|
11760
|
+
case DecrypterAesMode.cbc:
|
11761
|
+
return 'AES-CBC';
|
11762
|
+
case DecrypterAesMode.ctr:
|
11763
|
+
return 'AES-CTR';
|
11764
|
+
default:
|
11765
|
+
throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
|
11766
|
+
}
|
11767
|
+
}
|
11427
11768
|
|
11428
11769
|
// PKCS7
|
11429
11770
|
function removePadding(array) {
|
@@ -11673,7 +12014,8 @@ class Decrypter {
|
|
11673
12014
|
this.currentIV = null;
|
11674
12015
|
this.currentResult = null;
|
11675
12016
|
this.useSoftware = void 0;
|
11676
|
-
this.
|
12017
|
+
this.enableSoftwareAES = void 0;
|
12018
|
+
this.enableSoftwareAES = config.enableSoftwareAES;
|
11677
12019
|
this.removePKCS7Padding = removePKCS7Padding;
|
11678
12020
|
// built in decryptor expects PKCS7 padding
|
11679
12021
|
if (removePKCS7Padding) {
|
@@ -11724,10 +12066,10 @@ class Decrypter {
|
|
11724
12066
|
this.softwareDecrypter = null;
|
11725
12067
|
}
|
11726
12068
|
}
|
11727
|
-
decrypt(data, key, iv) {
|
12069
|
+
decrypt(data, key, iv, aesMode) {
|
11728
12070
|
if (this.useSoftware) {
|
11729
12071
|
return new Promise((resolve, reject) => {
|
11730
|
-
this.softwareDecrypt(new Uint8Array(data), key, iv);
|
12072
|
+
this.softwareDecrypt(new Uint8Array(data), key, iv, aesMode);
|
11731
12073
|
const decryptResult = this.flush();
|
11732
12074
|
if (decryptResult) {
|
11733
12075
|
resolve(decryptResult.buffer);
|
@@ -11736,17 +12078,21 @@ class Decrypter {
|
|
11736
12078
|
}
|
11737
12079
|
});
|
11738
12080
|
}
|
11739
|
-
return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
|
12081
|
+
return this.webCryptoDecrypt(new Uint8Array(data), key, iv, aesMode);
|
11740
12082
|
}
|
11741
12083
|
|
11742
12084
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
11743
12085
|
// data is handled in the flush() call
|
11744
|
-
softwareDecrypt(data, key, iv) {
|
12086
|
+
softwareDecrypt(data, key, iv, aesMode) {
|
11745
12087
|
const {
|
11746
12088
|
currentIV,
|
11747
12089
|
currentResult,
|
11748
12090
|
remainderData
|
11749
12091
|
} = this;
|
12092
|
+
if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
|
12093
|
+
logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
|
12094
|
+
return null;
|
12095
|
+
}
|
11750
12096
|
this.logOnce('JS AES decrypt');
|
11751
12097
|
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
11752
12098
|
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
@@ -11779,13 +12125,13 @@ class Decrypter {
|
|
11779
12125
|
}
|
11780
12126
|
return result;
|
11781
12127
|
}
|
11782
|
-
webCryptoDecrypt(data, key, iv) {
|
12128
|
+
webCryptoDecrypt(data, key, iv, aesMode) {
|
11783
12129
|
if (this.key !== key || !this.fastAesKey) {
|
11784
12130
|
if (!this.subtle) {
|
11785
|
-
return Promise.resolve(this.onWebCryptoError(data, key, iv));
|
12131
|
+
return Promise.resolve(this.onWebCryptoError(data, key, iv, aesMode));
|
11786
12132
|
}
|
11787
12133
|
this.key = key;
|
11788
|
-
this.fastAesKey = new FastAESKey(this.subtle, key);
|
12134
|
+
this.fastAesKey = new FastAESKey(this.subtle, key, aesMode);
|
11789
12135
|
}
|
11790
12136
|
return this.fastAesKey.expandKey().then(aesKey => {
|
11791
12137
|
// decrypt using web crypto
|
@@ -11793,22 +12139,25 @@ class Decrypter {
|
|
11793
12139
|
return Promise.reject(new Error('web crypto not initialized'));
|
11794
12140
|
}
|
11795
12141
|
this.logOnce('WebCrypto AES decrypt');
|
11796
|
-
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv));
|
12142
|
+
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv), aesMode);
|
11797
12143
|
return crypto.decrypt(data.buffer, aesKey);
|
11798
12144
|
}).catch(err => {
|
11799
12145
|
logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
|
11800
|
-
return this.onWebCryptoError(data, key, iv);
|
12146
|
+
return this.onWebCryptoError(data, key, iv, aesMode);
|
11801
12147
|
});
|
11802
12148
|
}
|
11803
|
-
onWebCryptoError(data, key, iv) {
|
11804
|
-
|
11805
|
-
|
11806
|
-
|
11807
|
-
|
11808
|
-
|
11809
|
-
|
12149
|
+
onWebCryptoError(data, key, iv, aesMode) {
|
12150
|
+
const enableSoftwareAES = this.enableSoftwareAES;
|
12151
|
+
if (enableSoftwareAES) {
|
12152
|
+
this.useSoftware = true;
|
12153
|
+
this.logEnabled = true;
|
12154
|
+
this.softwareDecrypt(data, key, iv, aesMode);
|
12155
|
+
const decryptResult = this.flush();
|
12156
|
+
if (decryptResult) {
|
12157
|
+
return decryptResult.buffer;
|
12158
|
+
}
|
11810
12159
|
}
|
11811
|
-
throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
|
12160
|
+
throw new Error('WebCrypto' + (enableSoftwareAES ? ' and softwareDecrypt' : '') + ': failed to decrypt data');
|
11812
12161
|
}
|
11813
12162
|
getValidChunk(data) {
|
11814
12163
|
let currentChunk = data;
|
@@ -11859,7 +12208,7 @@ const State = {
|
|
11859
12208
|
};
|
11860
12209
|
class BaseStreamController extends TaskLoop {
|
11861
12210
|
constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
|
11862
|
-
super();
|
12211
|
+
super(logPrefix, hls.logger);
|
11863
12212
|
this.hls = void 0;
|
11864
12213
|
this.fragPrevious = null;
|
11865
12214
|
this.fragCurrent = null;
|
@@ -11884,22 +12233,98 @@ class BaseStreamController extends TaskLoop {
|
|
11884
12233
|
this.startFragRequested = false;
|
11885
12234
|
this.decrypter = void 0;
|
11886
12235
|
this.initPTS = [];
|
11887
|
-
this.
|
11888
|
-
this.
|
11889
|
-
this.
|
11890
|
-
|
11891
|
-
|
12236
|
+
this.buffering = true;
|
12237
|
+
this.loadingParts = false;
|
12238
|
+
this.onMediaSeeking = () => {
|
12239
|
+
const {
|
12240
|
+
config,
|
12241
|
+
fragCurrent,
|
12242
|
+
media,
|
12243
|
+
mediaBuffer,
|
12244
|
+
state
|
12245
|
+
} = this;
|
12246
|
+
const currentTime = media ? media.currentTime : 0;
|
12247
|
+
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
12248
|
+
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
12249
|
+
if (this.state === State.ENDED) {
|
12250
|
+
this.resetLoadingState();
|
12251
|
+
} else if (fragCurrent) {
|
12252
|
+
// Seeking while frag load is in progress
|
12253
|
+
const tolerance = config.maxFragLookUpTolerance;
|
12254
|
+
const fragStartOffset = fragCurrent.start - tolerance;
|
12255
|
+
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12256
|
+
// if seeking out of buffered range or into new one
|
12257
|
+
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12258
|
+
const pastFragment = currentTime > fragEndOffset;
|
12259
|
+
// if the seek position is outside the current fragment range
|
12260
|
+
if (currentTime < fragStartOffset || pastFragment) {
|
12261
|
+
if (pastFragment && fragCurrent.loader) {
|
12262
|
+
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12263
|
+
fragCurrent.abortRequests();
|
12264
|
+
this.resetLoadingState();
|
12265
|
+
}
|
12266
|
+
this.fragPrevious = null;
|
12267
|
+
}
|
12268
|
+
}
|
12269
|
+
}
|
12270
|
+
if (media) {
|
12271
|
+
// Remove gap fragments
|
12272
|
+
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12273
|
+
this.lastCurrentTime = currentTime;
|
12274
|
+
if (!this.loadingParts) {
|
12275
|
+
const bufferEnd = Math.max(bufferInfo.end, currentTime);
|
12276
|
+
const shouldLoadParts = this.shouldLoadParts(this.getLevelDetails(), bufferEnd);
|
12277
|
+
if (shouldLoadParts) {
|
12278
|
+
this.log(`LL-Part loading ON after seeking to ${currentTime.toFixed(2)} with buffer @${bufferEnd.toFixed(2)}`);
|
12279
|
+
this.loadingParts = shouldLoadParts;
|
12280
|
+
}
|
12281
|
+
}
|
12282
|
+
}
|
12283
|
+
|
12284
|
+
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12285
|
+
if (!this.loadedmetadata && !bufferInfo.len) {
|
12286
|
+
this.nextLoadPosition = this.startPosition = currentTime;
|
12287
|
+
}
|
12288
|
+
|
12289
|
+
// Async tick to speed up processing
|
12290
|
+
this.tickImmediate();
|
12291
|
+
};
|
12292
|
+
this.onMediaEnded = () => {
|
12293
|
+
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12294
|
+
this.startPosition = this.lastCurrentTime = 0;
|
12295
|
+
if (this.playlistType === PlaylistLevelType.MAIN) {
|
12296
|
+
this.hls.trigger(Events.MEDIA_ENDED, {
|
12297
|
+
stalled: false
|
12298
|
+
});
|
12299
|
+
}
|
12300
|
+
};
|
11892
12301
|
this.playlistType = playlistType;
|
11893
|
-
this.logPrefix = logPrefix;
|
11894
|
-
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
11895
|
-
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
11896
12302
|
this.hls = hls;
|
11897
12303
|
this.fragmentLoader = new FragmentLoader(hls.config);
|
11898
12304
|
this.keyLoader = keyLoader;
|
11899
12305
|
this.fragmentTracker = fragmentTracker;
|
11900
12306
|
this.config = hls.config;
|
11901
12307
|
this.decrypter = new Decrypter(hls.config);
|
12308
|
+
}
|
12309
|
+
registerListeners() {
|
12310
|
+
const {
|
12311
|
+
hls
|
12312
|
+
} = this;
|
12313
|
+
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12314
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12315
|
+
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
11902
12316
|
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12317
|
+
hls.on(Events.ERROR, this.onError, this);
|
12318
|
+
}
|
12319
|
+
unregisterListeners() {
|
12320
|
+
const {
|
12321
|
+
hls
|
12322
|
+
} = this;
|
12323
|
+
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12324
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12325
|
+
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12326
|
+
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12327
|
+
hls.off(Events.ERROR, this.onError, this);
|
11903
12328
|
}
|
11904
12329
|
doTick() {
|
11905
12330
|
this.onTickEnd();
|
@@ -11923,6 +12348,12 @@ class BaseStreamController extends TaskLoop {
|
|
11923
12348
|
this.clearNextTick();
|
11924
12349
|
this.state = State.STOPPED;
|
11925
12350
|
}
|
12351
|
+
pauseBuffering() {
|
12352
|
+
this.buffering = false;
|
12353
|
+
}
|
12354
|
+
resumeBuffering() {
|
12355
|
+
this.buffering = true;
|
12356
|
+
}
|
11926
12357
|
_streamEnded(bufferInfo, levelDetails) {
|
11927
12358
|
// If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
|
11928
12359
|
// of nothing loading/loaded return false
|
@@ -11953,10 +12384,8 @@ class BaseStreamController extends TaskLoop {
|
|
11953
12384
|
}
|
11954
12385
|
onMediaAttached(event, data) {
|
11955
12386
|
const media = this.media = this.mediaBuffer = data.media;
|
11956
|
-
|
11957
|
-
|
11958
|
-
media.addEventListener('seeking', this.onvseeking);
|
11959
|
-
media.addEventListener('ended', this.onvended);
|
12387
|
+
media.addEventListener('seeking', this.onMediaSeeking);
|
12388
|
+
media.addEventListener('ended', this.onMediaEnded);
|
11960
12389
|
const config = this.config;
|
11961
12390
|
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
11962
12391
|
this.startLoad(config.startPosition);
|
@@ -11970,10 +12399,9 @@ class BaseStreamController extends TaskLoop {
|
|
11970
12399
|
}
|
11971
12400
|
|
11972
12401
|
// remove video listeners
|
11973
|
-
if (media
|
11974
|
-
media.removeEventListener('seeking', this.
|
11975
|
-
media.removeEventListener('ended', this.
|
11976
|
-
this.onvseeking = this.onvended = null;
|
12402
|
+
if (media) {
|
12403
|
+
media.removeEventListener('seeking', this.onMediaSeeking);
|
12404
|
+
media.removeEventListener('ended', this.onMediaEnded);
|
11977
12405
|
}
|
11978
12406
|
if (this.keyLoader) {
|
11979
12407
|
this.keyLoader.detach();
|
@@ -11983,66 +12411,17 @@ class BaseStreamController extends TaskLoop {
|
|
11983
12411
|
this.fragmentTracker.removeAllFragments();
|
11984
12412
|
this.stopLoad();
|
11985
12413
|
}
|
11986
|
-
|
11987
|
-
|
11988
|
-
config,
|
11989
|
-
fragCurrent,
|
11990
|
-
media,
|
11991
|
-
mediaBuffer,
|
11992
|
-
state
|
11993
|
-
} = this;
|
11994
|
-
const currentTime = media ? media.currentTime : 0;
|
11995
|
-
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
11996
|
-
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
11997
|
-
if (this.state === State.ENDED) {
|
11998
|
-
this.resetLoadingState();
|
11999
|
-
} else if (fragCurrent) {
|
12000
|
-
// Seeking while frag load is in progress
|
12001
|
-
const tolerance = config.maxFragLookUpTolerance;
|
12002
|
-
const fragStartOffset = fragCurrent.start - tolerance;
|
12003
|
-
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12004
|
-
// if seeking out of buffered range or into new one
|
12005
|
-
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12006
|
-
const pastFragment = currentTime > fragEndOffset;
|
12007
|
-
// if the seek position is outside the current fragment range
|
12008
|
-
if (currentTime < fragStartOffset || pastFragment) {
|
12009
|
-
if (pastFragment && fragCurrent.loader) {
|
12010
|
-
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12011
|
-
fragCurrent.abortRequests();
|
12012
|
-
this.resetLoadingState();
|
12013
|
-
}
|
12014
|
-
this.fragPrevious = null;
|
12015
|
-
}
|
12016
|
-
}
|
12017
|
-
}
|
12018
|
-
if (media) {
|
12019
|
-
// Remove gap fragments
|
12020
|
-
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12021
|
-
this.lastCurrentTime = currentTime;
|
12022
|
-
}
|
12023
|
-
|
12024
|
-
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12025
|
-
if (!this.loadedmetadata && !bufferInfo.len) {
|
12026
|
-
this.nextLoadPosition = this.startPosition = currentTime;
|
12027
|
-
}
|
12028
|
-
|
12029
|
-
// Async tick to speed up processing
|
12030
|
-
this.tickImmediate();
|
12031
|
-
}
|
12032
|
-
onMediaEnded() {
|
12033
|
-
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12034
|
-
this.startPosition = this.lastCurrentTime = 0;
|
12035
|
-
}
|
12414
|
+
onManifestLoading() {}
|
12415
|
+
onError(event, data) {}
|
12036
12416
|
onManifestLoaded(event, data) {
|
12037
12417
|
this.startTimeOffset = data.startTimeOffset;
|
12038
12418
|
this.initPTS = [];
|
12039
12419
|
}
|
12040
12420
|
onHandlerDestroying() {
|
12041
|
-
this.hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12042
12421
|
this.stopLoad();
|
12043
12422
|
super.onHandlerDestroying();
|
12044
12423
|
// @ts-ignore
|
12045
|
-
this.hls = null;
|
12424
|
+
this.hls = this.onMediaSeeking = this.onMediaEnded = null;
|
12046
12425
|
}
|
12047
12426
|
onHandlerDestroyed() {
|
12048
12427
|
this.state = State.STOPPED;
|
@@ -12176,10 +12555,10 @@ class BaseStreamController extends TaskLoop {
|
|
12176
12555
|
const decryptData = frag.decryptdata;
|
12177
12556
|
|
12178
12557
|
// check to see if the payload needs to be decrypted
|
12179
|
-
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method
|
12558
|
+
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && isFullSegmentEncryption(decryptData.method)) {
|
12180
12559
|
const startTime = self.performance.now();
|
12181
12560
|
// decrypt init segment data
|
12182
|
-
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
|
12561
|
+
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer, getAesModeFromFullSegmentMethod(decryptData.method)).catch(err => {
|
12183
12562
|
hls.trigger(Events.ERROR, {
|
12184
12563
|
type: ErrorTypes.MEDIA_ERROR,
|
12185
12564
|
details: ErrorDetails.FRAG_DECRYPT_ERROR,
|
@@ -12220,7 +12599,9 @@ class BaseStreamController extends TaskLoop {
|
|
12220
12599
|
throw new Error('init load aborted, missing levels');
|
12221
12600
|
}
|
12222
12601
|
const stats = data.frag.stats;
|
12223
|
-
this.state
|
12602
|
+
if (this.state !== State.STOPPED) {
|
12603
|
+
this.state = State.IDLE;
|
12604
|
+
}
|
12224
12605
|
data.frag.data = new Uint8Array(data.payload);
|
12225
12606
|
stats.parsing.start = stats.buffering.start = self.performance.now();
|
12226
12607
|
stats.parsing.end = stats.buffering.end = self.performance.now();
|
@@ -12291,7 +12672,7 @@ class BaseStreamController extends TaskLoop {
|
|
12291
12672
|
}
|
12292
12673
|
let keyLoadingPromise = null;
|
12293
12674
|
if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
|
12294
|
-
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.
|
12675
|
+
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'} ${frag.level}`);
|
12295
12676
|
this.state = State.KEY_LOADING;
|
12296
12677
|
this.fragCurrent = frag;
|
12297
12678
|
keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
|
@@ -12312,8 +12693,16 @@ class BaseStreamController extends TaskLoop {
|
|
12312
12693
|
} else if (!frag.encrypted && details.encryptedFragments.length) {
|
12313
12694
|
this.keyLoader.loadClear(frag, details.encryptedFragments);
|
12314
12695
|
}
|
12696
|
+
const fragPrevious = this.fragPrevious;
|
12697
|
+
if (frag.sn !== 'initSegment' && (!fragPrevious || frag.sn !== fragPrevious.sn)) {
|
12698
|
+
const shouldLoadParts = this.shouldLoadParts(level.details, frag.end);
|
12699
|
+
if (shouldLoadParts !== this.loadingParts) {
|
12700
|
+
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} loading sn ${fragPrevious == null ? void 0 : fragPrevious.sn}->${frag.sn}`);
|
12701
|
+
this.loadingParts = shouldLoadParts;
|
12702
|
+
}
|
12703
|
+
}
|
12315
12704
|
targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
|
12316
|
-
if (this.
|
12705
|
+
if (this.loadingParts && frag.sn !== 'initSegment') {
|
12317
12706
|
const partList = details.partList;
|
12318
12707
|
if (partList && progressCallback) {
|
12319
12708
|
if (targetBufferTime > frag.end && details.fragmentHint) {
|
@@ -12322,7 +12711,7 @@ class BaseStreamController extends TaskLoop {
|
|
12322
12711
|
const partIndex = this.getNextPart(partList, frag, targetBufferTime);
|
12323
12712
|
if (partIndex > -1) {
|
12324
12713
|
const part = partList[partIndex];
|
12325
|
-
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.
|
12714
|
+
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12326
12715
|
this.nextLoadPosition = part.start + part.duration;
|
12327
12716
|
this.state = State.FRAG_LOADING;
|
12328
12717
|
let _result;
|
@@ -12351,7 +12740,14 @@ class BaseStreamController extends TaskLoop {
|
|
12351
12740
|
}
|
12352
12741
|
}
|
12353
12742
|
}
|
12354
|
-
|
12743
|
+
if (frag.sn !== 'initSegment' && this.loadingParts) {
|
12744
|
+
this.log(`LL-Part loading OFF after next part miss @${targetBufferTime.toFixed(2)}`);
|
12745
|
+
this.loadingParts = false;
|
12746
|
+
} else if (!frag.url) {
|
12747
|
+
// Selected fragment hint for part but not loading parts
|
12748
|
+
return Promise.resolve(null);
|
12749
|
+
}
|
12750
|
+
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12355
12751
|
// Don't update nextLoadPosition for fragments which are not buffered
|
12356
12752
|
if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
|
12357
12753
|
this.nextLoadPosition = frag.start + frag.duration;
|
@@ -12449,8 +12845,36 @@ class BaseStreamController extends TaskLoop {
|
|
12449
12845
|
if (part) {
|
12450
12846
|
part.stats.parsing.end = now;
|
12451
12847
|
}
|
12848
|
+
// See if part loading should be disabled/enabled based on buffer and playback position.
|
12849
|
+
if (frag.sn !== 'initSegment') {
|
12850
|
+
const levelDetails = this.getLevelDetails();
|
12851
|
+
const loadingPartsAtEdge = levelDetails && frag.sn > levelDetails.endSN;
|
12852
|
+
const shouldLoadParts = loadingPartsAtEdge || this.shouldLoadParts(levelDetails, frag.end);
|
12853
|
+
if (shouldLoadParts !== this.loadingParts) {
|
12854
|
+
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} after parsing segment ending @${frag.end.toFixed(2)}`);
|
12855
|
+
this.loadingParts = shouldLoadParts;
|
12856
|
+
}
|
12857
|
+
}
|
12452
12858
|
this.updateLevelTiming(frag, part, level, chunkMeta.partial);
|
12453
12859
|
}
|
12860
|
+
shouldLoadParts(details, bufferEnd) {
|
12861
|
+
if (this.config.lowLatencyMode) {
|
12862
|
+
if (!details) {
|
12863
|
+
return this.loadingParts;
|
12864
|
+
}
|
12865
|
+
if (details != null && details.partList) {
|
12866
|
+
var _details$fragmentHint;
|
12867
|
+
// Buffer must be ahead of first part + duration of parts after last segment
|
12868
|
+
// and playback must be at or past segment adjacent to part list
|
12869
|
+
const firstPart = details.partList[0];
|
12870
|
+
const safePartStart = firstPart.end + (((_details$fragmentHint = details.fragmentHint) == null ? void 0 : _details$fragmentHint.duration) || 0);
|
12871
|
+
if (bufferEnd >= safePartStart && this.lastCurrentTime > firstPart.start - firstPart.fragment.duration) {
|
12872
|
+
return true;
|
12873
|
+
}
|
12874
|
+
}
|
12875
|
+
}
|
12876
|
+
return false;
|
12877
|
+
}
|
12454
12878
|
getCurrentContext(chunkMeta) {
|
12455
12879
|
const {
|
12456
12880
|
levels,
|
@@ -12551,7 +12975,7 @@ class BaseStreamController extends TaskLoop {
|
|
12551
12975
|
// Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
|
12552
12976
|
if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
|
12553
12977
|
const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
|
12554
|
-
if (bufferedFragAtPos && bufferInfo.nextStart
|
12978
|
+
if (bufferedFragAtPos && (bufferInfo.nextStart <= bufferedFragAtPos.end || bufferedFragAtPos.gap)) {
|
12555
12979
|
return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
|
12556
12980
|
}
|
12557
12981
|
}
|
@@ -12600,7 +13024,8 @@ class BaseStreamController extends TaskLoop {
|
|
12600
13024
|
config
|
12601
13025
|
} = this;
|
12602
13026
|
const start = fragments[0].start;
|
12603
|
-
|
13027
|
+
const canLoadParts = config.lowLatencyMode && !!levelDetails.partList;
|
13028
|
+
let frag = null;
|
12604
13029
|
if (levelDetails.live) {
|
12605
13030
|
const initialLiveManifestSize = config.initialLiveManifestSize;
|
12606
13031
|
if (fragLen < initialLiveManifestSize) {
|
@@ -12612,6 +13037,10 @@ class BaseStreamController extends TaskLoop {
|
|
12612
13037
|
// Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
|
12613
13038
|
// we get the fragment matching that start time
|
12614
13039
|
if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1 || pos < start) {
|
13040
|
+
if (canLoadParts && !this.loadingParts) {
|
13041
|
+
this.log(`LL-Part loading ON for initial live fragment`);
|
13042
|
+
this.loadingParts = true;
|
13043
|
+
}
|
12615
13044
|
frag = this.getInitialLiveFragment(levelDetails, fragments);
|
12616
13045
|
this.startPosition = this.nextLoadPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
|
12617
13046
|
}
|
@@ -12622,7 +13051,7 @@ class BaseStreamController extends TaskLoop {
|
|
12622
13051
|
|
12623
13052
|
// If we haven't run into any special cases already, just load the fragment most closely matching the requested position
|
12624
13053
|
if (!frag) {
|
12625
|
-
const end =
|
13054
|
+
const end = this.loadingParts ? levelDetails.partEnd : levelDetails.fragmentEnd;
|
12626
13055
|
frag = this.getFragmentAtPosition(pos, end, levelDetails);
|
12627
13056
|
}
|
12628
13057
|
return this.mapToInitFragWhenRequired(frag);
|
@@ -12744,7 +13173,7 @@ class BaseStreamController extends TaskLoop {
|
|
12744
13173
|
} = levelDetails;
|
12745
13174
|
const tolerance = config.maxFragLookUpTolerance;
|
12746
13175
|
const partList = levelDetails.partList;
|
12747
|
-
const loadingParts = !!(
|
13176
|
+
const loadingParts = !!(this.loadingParts && partList != null && partList.length && fragmentHint);
|
12748
13177
|
if (loadingParts && fragmentHint && !this.bitrateTest) {
|
12749
13178
|
// Include incomplete fragment with parts at end
|
12750
13179
|
fragments = fragments.concat(fragmentHint);
|
@@ -12937,7 +13366,7 @@ class BaseStreamController extends TaskLoop {
|
|
12937
13366
|
errorAction.resolved = true;
|
12938
13367
|
}
|
12939
13368
|
} else {
|
12940
|
-
|
13369
|
+
this.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
|
12941
13370
|
return;
|
12942
13371
|
}
|
12943
13372
|
} else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
|
@@ -13005,7 +13434,9 @@ class BaseStreamController extends TaskLoop {
|
|
13005
13434
|
this.log('Reset loading state');
|
13006
13435
|
this.fragCurrent = null;
|
13007
13436
|
this.fragPrevious = null;
|
13008
|
-
this.state
|
13437
|
+
if (this.state !== State.STOPPED) {
|
13438
|
+
this.state = State.IDLE;
|
13439
|
+
}
|
13009
13440
|
}
|
13010
13441
|
resetStartWhenNotLoaded(level) {
|
13011
13442
|
// if loadedmetadata is not set, it means that first frag request failed
|
@@ -13184,6 +13615,104 @@ function dummyTrack(type = '', inputTimeScale = 90000) {
|
|
13184
13615
|
};
|
13185
13616
|
}
|
13186
13617
|
|
13618
|
+
/**
|
13619
|
+
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
13620
|
+
*
|
13621
|
+
* @param data - The data to search in
|
13622
|
+
* @param offset - The offset at which to start searching
|
13623
|
+
*
|
13624
|
+
* @returns The block of data containing any ID3 tags found
|
13625
|
+
* or `undefined` if no header is found at the starting offset
|
13626
|
+
*
|
13627
|
+
* @internal
|
13628
|
+
*
|
13629
|
+
* @group ID3
|
13630
|
+
*/
|
13631
|
+
function getId3Data(data, offset) {
|
13632
|
+
const front = offset;
|
13633
|
+
let length = 0;
|
13634
|
+
while (isId3Header(data, offset)) {
|
13635
|
+
// ID3 header is 10 bytes
|
13636
|
+
length += 10;
|
13637
|
+
const size = readId3Size(data, offset + 6);
|
13638
|
+
length += size;
|
13639
|
+
if (isId3Footer(data, offset + 10)) {
|
13640
|
+
// ID3 footer is 10 bytes
|
13641
|
+
length += 10;
|
13642
|
+
}
|
13643
|
+
offset += length;
|
13644
|
+
}
|
13645
|
+
if (length > 0) {
|
13646
|
+
return data.subarray(front, front + length);
|
13647
|
+
}
|
13648
|
+
return undefined;
|
13649
|
+
}
|
13650
|
+
|
13651
|
+
/**
|
13652
|
+
* Read a 33 bit timestamp from an ID3 frame.
|
13653
|
+
*
|
13654
|
+
* @param timeStampFrame - the ID3 frame
|
13655
|
+
*
|
13656
|
+
* @returns The timestamp
|
13657
|
+
*
|
13658
|
+
* @internal
|
13659
|
+
*
|
13660
|
+
* @group ID3
|
13661
|
+
*/
|
13662
|
+
function readId3Timestamp(timeStampFrame) {
|
13663
|
+
if (timeStampFrame.data.byteLength === 8) {
|
13664
|
+
const data = new Uint8Array(timeStampFrame.data);
|
13665
|
+
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
13666
|
+
// with the upper 31 bits set to zero.
|
13667
|
+
const pts33Bit = data[3] & 0x1;
|
13668
|
+
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
13669
|
+
timestamp /= 45;
|
13670
|
+
if (pts33Bit) {
|
13671
|
+
timestamp += 47721858.84;
|
13672
|
+
} // 2^32 / 90
|
13673
|
+
return Math.round(timestamp);
|
13674
|
+
}
|
13675
|
+
return undefined;
|
13676
|
+
}
|
13677
|
+
|
13678
|
+
/**
|
13679
|
+
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
13680
|
+
*
|
13681
|
+
* @param data - Block of data containing one or more ID3 tags
|
13682
|
+
*
|
13683
|
+
* @returns The timestamp
|
13684
|
+
*
|
13685
|
+
* @group ID3
|
13686
|
+
*
|
13687
|
+
* @beta
|
13688
|
+
*/
|
13689
|
+
function getId3Timestamp(data) {
|
13690
|
+
const frames = getId3Frames(data);
|
13691
|
+
for (let i = 0; i < frames.length; i++) {
|
13692
|
+
const frame = frames[i];
|
13693
|
+
if (isId3TimestampFrame(frame)) {
|
13694
|
+
return readId3Timestamp(frame);
|
13695
|
+
}
|
13696
|
+
}
|
13697
|
+
return undefined;
|
13698
|
+
}
|
13699
|
+
|
13700
|
+
/**
|
13701
|
+
* Checks if the given data contains an ID3 tag.
|
13702
|
+
*
|
13703
|
+
* @param data - The data to check
|
13704
|
+
* @param offset - The offset at which to start checking
|
13705
|
+
*
|
13706
|
+
* @returns `true` if an ID3 tag is found
|
13707
|
+
*
|
13708
|
+
* @group ID3
|
13709
|
+
*
|
13710
|
+
* @beta
|
13711
|
+
*/
|
13712
|
+
function canParseId3(data, offset) {
|
13713
|
+
return isId3Header(data, offset) && readId3Size(data, offset + 6) + 10 <= data.length - offset;
|
13714
|
+
}
|
13715
|
+
|
13187
13716
|
class BaseAudioDemuxer {
|
13188
13717
|
constructor() {
|
13189
13718
|
this._audioTrack = void 0;
|
@@ -13225,12 +13754,12 @@ class BaseAudioDemuxer {
|
|
13225
13754
|
data = appendUint8Array(this.cachedData, data);
|
13226
13755
|
this.cachedData = null;
|
13227
13756
|
}
|
13228
|
-
let id3Data =
|
13757
|
+
let id3Data = getId3Data(data, 0);
|
13229
13758
|
let offset = id3Data ? id3Data.length : 0;
|
13230
13759
|
let lastDataIndex;
|
13231
13760
|
const track = this._audioTrack;
|
13232
13761
|
const id3Track = this._id3Track;
|
13233
|
-
const timestamp = id3Data ?
|
13762
|
+
const timestamp = id3Data ? getId3Timestamp(id3Data) : undefined;
|
13234
13763
|
const length = data.length;
|
13235
13764
|
if (this.basePTS === null || this.frameIndex === 0 && isFiniteNumber(timestamp)) {
|
13236
13765
|
this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS);
|
@@ -13261,9 +13790,9 @@ class BaseAudioDemuxer {
|
|
13261
13790
|
} else {
|
13262
13791
|
offset = length;
|
13263
13792
|
}
|
13264
|
-
} else if (
|
13265
|
-
// after a
|
13266
|
-
id3Data =
|
13793
|
+
} else if (canParseId3(data, offset)) {
|
13794
|
+
// after a canParse, a call to getId3Data *should* always returns some data
|
13795
|
+
id3Data = getId3Data(data, offset);
|
13267
13796
|
id3Track.samples.push({
|
13268
13797
|
pts: this.lastPTS,
|
13269
13798
|
dts: this.lastPTS,
|
@@ -13332,6 +13861,7 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
|
|
13332
13861
|
*/
|
13333
13862
|
function getAudioConfig(observer, data, offset, audioCodec) {
|
13334
13863
|
let adtsObjectType;
|
13864
|
+
let originalAdtsObjectType;
|
13335
13865
|
let adtsExtensionSamplingIndex;
|
13336
13866
|
let adtsChannelConfig;
|
13337
13867
|
let config;
|
@@ -13339,7 +13869,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13339
13869
|
const manifestCodec = audioCodec;
|
13340
13870
|
const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
|
13341
13871
|
// byte 2
|
13342
|
-
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13872
|
+
adtsObjectType = originalAdtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13343
13873
|
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
|
13344
13874
|
if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
|
13345
13875
|
const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
|
@@ -13356,8 +13886,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13356
13886
|
// byte 3
|
13357
13887
|
adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
|
13358
13888
|
logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
|
13359
|
-
//
|
13360
|
-
if (/firefox/i.test(userAgent)) {
|
13889
|
+
// Firefox and Pale Moon: freq less than 24kHz = AAC SBR (HE-AAC)
|
13890
|
+
if (/firefox|palemoon/i.test(userAgent)) {
|
13361
13891
|
if (adtsSamplingIndex >= 6) {
|
13362
13892
|
adtsObjectType = 5;
|
13363
13893
|
config = new Array(4);
|
@@ -13451,6 +13981,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13451
13981
|
samplerate: adtsSamplingRates[adtsSamplingIndex],
|
13452
13982
|
channelCount: adtsChannelConfig,
|
13453
13983
|
codec: 'mp4a.40.' + adtsObjectType,
|
13984
|
+
parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
|
13454
13985
|
manifestCodec
|
13455
13986
|
};
|
13456
13987
|
}
|
@@ -13505,7 +14036,8 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
|
|
13505
14036
|
track.channelCount = config.channelCount;
|
13506
14037
|
track.codec = config.codec;
|
13507
14038
|
track.manifestCodec = config.manifestCodec;
|
13508
|
-
|
14039
|
+
track.parsedCodec = config.parsedCodec;
|
14040
|
+
logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
13509
14041
|
}
|
13510
14042
|
}
|
13511
14043
|
function getFrameDuration(samplerate) {
|
@@ -13754,7 +14286,7 @@ class AACDemuxer extends BaseAudioDemuxer {
|
|
13754
14286
|
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
13755
14287
|
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
13756
14288
|
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
13757
|
-
const id3Data =
|
14289
|
+
const id3Data = getId3Data(data, 0);
|
13758
14290
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
13759
14291
|
if (probe(data, offset)) {
|
13760
14292
|
return false;
|
@@ -13945,21 +14477,7 @@ class BaseVideoParser {
|
|
13945
14477
|
units: [],
|
13946
14478
|
debug,
|
13947
14479
|
length: 0
|
13948
|
-
};
|
13949
|
-
}
|
13950
|
-
getLastNalUnit(samples) {
|
13951
|
-
var _VideoSample;
|
13952
|
-
let VideoSample = this.VideoSample;
|
13953
|
-
let lastUnit;
|
13954
|
-
// try to fallback to previous sample if current one is empty
|
13955
|
-
if (!VideoSample || VideoSample.units.length === 0) {
|
13956
|
-
VideoSample = samples[samples.length - 1];
|
13957
|
-
}
|
13958
|
-
if ((_VideoSample = VideoSample) != null && _VideoSample.units) {
|
13959
|
-
const units = VideoSample.units;
|
13960
|
-
lastUnit = units[units.length - 1];
|
13961
|
-
}
|
13962
|
-
return lastUnit;
|
14480
|
+
};
|
13963
14481
|
}
|
13964
14482
|
pushAccessUnit(VideoSample, videoTrack) {
|
13965
14483
|
if (VideoSample.units.length && VideoSample.frame) {
|
@@ -13983,6 +14501,122 @@ class BaseVideoParser {
|
|
13983
14501
|
logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
|
13984
14502
|
}
|
13985
14503
|
}
|
14504
|
+
parseNALu(track, array, last) {
|
14505
|
+
const len = array.byteLength;
|
14506
|
+
let state = track.naluState || 0;
|
14507
|
+
const lastState = state;
|
14508
|
+
const units = [];
|
14509
|
+
let i = 0;
|
14510
|
+
let value;
|
14511
|
+
let overflow;
|
14512
|
+
let unitType;
|
14513
|
+
let lastUnitStart = -1;
|
14514
|
+
let lastUnitType = 0;
|
14515
|
+
// logger.log('PES:' + Hex.hexDump(array));
|
14516
|
+
|
14517
|
+
if (state === -1) {
|
14518
|
+
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14519
|
+
lastUnitStart = 0;
|
14520
|
+
// NALu type is value read from offset 0
|
14521
|
+
lastUnitType = this.getNALuType(array, 0);
|
14522
|
+
state = 0;
|
14523
|
+
i = 1;
|
14524
|
+
}
|
14525
|
+
while (i < len) {
|
14526
|
+
value = array[i++];
|
14527
|
+
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14528
|
+
if (!state) {
|
14529
|
+
state = value ? 0 : 1;
|
14530
|
+
continue;
|
14531
|
+
}
|
14532
|
+
if (state === 1) {
|
14533
|
+
state = value ? 0 : 2;
|
14534
|
+
continue;
|
14535
|
+
}
|
14536
|
+
// here we have state either equal to 2 or 3
|
14537
|
+
if (!value) {
|
14538
|
+
state = 3;
|
14539
|
+
} else if (value === 1) {
|
14540
|
+
overflow = i - state - 1;
|
14541
|
+
if (lastUnitStart >= 0) {
|
14542
|
+
const unit = {
|
14543
|
+
data: array.subarray(lastUnitStart, overflow),
|
14544
|
+
type: lastUnitType
|
14545
|
+
};
|
14546
|
+
if (track.lastNalu) {
|
14547
|
+
units.push(track.lastNalu);
|
14548
|
+
track.lastNalu = null;
|
14549
|
+
}
|
14550
|
+
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14551
|
+
units.push(unit);
|
14552
|
+
} else {
|
14553
|
+
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14554
|
+
// first check if start code delimiter is overlapping between 2 PES packets,
|
14555
|
+
// ie it started in last packet (lastState not zero)
|
14556
|
+
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14557
|
+
const lastUnit = track.lastNalu;
|
14558
|
+
if (lastUnit) {
|
14559
|
+
if (lastState && i <= 4 - lastState) {
|
14560
|
+
// start delimiter overlapping between PES packets
|
14561
|
+
// strip start delimiter bytes from the end of last NAL unit
|
14562
|
+
// check if lastUnit had a state different from zero
|
14563
|
+
if (lastUnit.state) {
|
14564
|
+
// strip last bytes
|
14565
|
+
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14566
|
+
}
|
14567
|
+
}
|
14568
|
+
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14569
|
+
|
14570
|
+
if (overflow > 0) {
|
14571
|
+
// logger.log('first NALU found with overflow:' + overflow);
|
14572
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14573
|
+
lastUnit.state = 0;
|
14574
|
+
units.push(lastUnit);
|
14575
|
+
track.lastNalu = null;
|
14576
|
+
}
|
14577
|
+
}
|
14578
|
+
}
|
14579
|
+
// check if we can read unit type
|
14580
|
+
if (i < len) {
|
14581
|
+
unitType = this.getNALuType(array, i);
|
14582
|
+
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14583
|
+
lastUnitStart = i;
|
14584
|
+
lastUnitType = unitType;
|
14585
|
+
state = 0;
|
14586
|
+
} else {
|
14587
|
+
// not enough byte to read unit type. let's read it on next PES parsing
|
14588
|
+
state = -1;
|
14589
|
+
}
|
14590
|
+
} else {
|
14591
|
+
state = 0;
|
14592
|
+
}
|
14593
|
+
}
|
14594
|
+
if (lastUnitStart >= 0 && state >= 0) {
|
14595
|
+
const unit = {
|
14596
|
+
data: array.subarray(lastUnitStart, len),
|
14597
|
+
type: lastUnitType,
|
14598
|
+
state: state
|
14599
|
+
};
|
14600
|
+
if (!last) {
|
14601
|
+
track.lastNalu = unit;
|
14602
|
+
// logger.log('store NALu to push it on next PES');
|
14603
|
+
} else {
|
14604
|
+
units.push(unit);
|
14605
|
+
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
14606
|
+
}
|
14607
|
+
} else if (units.length === 0) {
|
14608
|
+
// no NALu found
|
14609
|
+
// append pes.data to previous NAL unit
|
14610
|
+
const lastUnit = track.lastNalu;
|
14611
|
+
if (lastUnit) {
|
14612
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
14613
|
+
units.push(lastUnit);
|
14614
|
+
track.lastNalu = null;
|
14615
|
+
}
|
14616
|
+
}
|
14617
|
+
track.naluState = state;
|
14618
|
+
return units;
|
14619
|
+
}
|
13986
14620
|
}
|
13987
14621
|
|
13988
14622
|
/**
|
@@ -14060,259 +14694,76 @@ class ExpGolomb {
|
|
14060
14694
|
} else {
|
14061
14695
|
return valu;
|
14062
14696
|
}
|
14063
|
-
}
|
14064
|
-
|
14065
|
-
// ():uint
|
14066
|
-
skipLZ() {
|
14067
|
-
let leadingZeroCount; // :uint
|
14068
|
-
for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
|
14069
|
-
if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) {
|
14070
|
-
// the first bit of working word is 1
|
14071
|
-
this.word <<= leadingZeroCount;
|
14072
|
-
this.bitsAvailable -= leadingZeroCount;
|
14073
|
-
return leadingZeroCount;
|
14074
|
-
}
|
14075
|
-
}
|
14076
|
-
// we exhausted word and still have not found a 1
|
14077
|
-
this.loadWord();
|
14078
|
-
return leadingZeroCount + this.skipLZ();
|
14079
|
-
}
|
14080
|
-
|
14081
|
-
// ():void
|
14082
|
-
skipUEG() {
|
14083
|
-
this.skipBits(1 + this.skipLZ());
|
14084
|
-
}
|
14085
|
-
|
14086
|
-
// ():void
|
14087
|
-
skipEG() {
|
14088
|
-
this.skipBits(1 + this.skipLZ());
|
14089
|
-
}
|
14090
|
-
|
14091
|
-
// ():uint
|
14092
|
-
readUEG() {
|
14093
|
-
const clz = this.skipLZ(); // :uint
|
14094
|
-
return this.readBits(clz + 1) - 1;
|
14095
|
-
}
|
14096
|
-
|
14097
|
-
// ():int
|
14098
|
-
readEG() {
|
14099
|
-
const valu = this.readUEG(); // :int
|
14100
|
-
if (0x01 & valu) {
|
14101
|
-
// the number is odd if the low order bit is set
|
14102
|
-
return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
|
14103
|
-
} else {
|
14104
|
-
return -1 * (valu >>> 1); // divide by two then make it negative
|
14105
|
-
}
|
14106
|
-
}
|
14107
|
-
|
14108
|
-
// Some convenience functions
|
14109
|
-
// :Boolean
|
14110
|
-
readBoolean() {
|
14111
|
-
return this.readBits(1) === 1;
|
14112
|
-
}
|
14113
|
-
|
14114
|
-
// ():int
|
14115
|
-
readUByte() {
|
14116
|
-
return this.readBits(8);
|
14117
|
-
}
|
14118
|
-
|
14119
|
-
// ():int
|
14120
|
-
readUShort() {
|
14121
|
-
return this.readBits(16);
|
14122
|
-
}
|
14123
|
-
|
14124
|
-
// ():int
|
14125
|
-
readUInt() {
|
14126
|
-
return this.readBits(32);
|
14127
|
-
}
|
14128
|
-
|
14129
|
-
/**
|
14130
|
-
* Advance the ExpGolomb decoder past a scaling list. The scaling
|
14131
|
-
* list is optionally transmitted as part of a sequence parameter
|
14132
|
-
* set and is not relevant to transmuxing.
|
14133
|
-
* @param count the number of entries in this scaling list
|
14134
|
-
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
14135
|
-
*/
|
14136
|
-
skipScalingList(count) {
|
14137
|
-
let lastScale = 8;
|
14138
|
-
let nextScale = 8;
|
14139
|
-
let deltaScale;
|
14140
|
-
for (let j = 0; j < count; j++) {
|
14141
|
-
if (nextScale !== 0) {
|
14142
|
-
deltaScale = this.readEG();
|
14143
|
-
nextScale = (lastScale + deltaScale + 256) % 256;
|
14144
|
-
}
|
14145
|
-
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14146
|
-
}
|
14147
|
-
}
|
14148
|
-
|
14149
|
-
/**
|
14150
|
-
* Read a sequence parameter set and return some interesting video
|
14151
|
-
* properties. A sequence parameter set is the H264 metadata that
|
14152
|
-
* describes the properties of upcoming video frames.
|
14153
|
-
* @returns an object with configuration parsed from the
|
14154
|
-
* sequence parameter set, including the dimensions of the
|
14155
|
-
* associated video frames.
|
14156
|
-
*/
|
14157
|
-
readSPS() {
|
14158
|
-
let frameCropLeftOffset = 0;
|
14159
|
-
let frameCropRightOffset = 0;
|
14160
|
-
let frameCropTopOffset = 0;
|
14161
|
-
let frameCropBottomOffset = 0;
|
14162
|
-
let numRefFramesInPicOrderCntCycle;
|
14163
|
-
let scalingListCount;
|
14164
|
-
let i;
|
14165
|
-
const readUByte = this.readUByte.bind(this);
|
14166
|
-
const readBits = this.readBits.bind(this);
|
14167
|
-
const readUEG = this.readUEG.bind(this);
|
14168
|
-
const readBoolean = this.readBoolean.bind(this);
|
14169
|
-
const skipBits = this.skipBits.bind(this);
|
14170
|
-
const skipEG = this.skipEG.bind(this);
|
14171
|
-
const skipUEG = this.skipUEG.bind(this);
|
14172
|
-
const skipScalingList = this.skipScalingList.bind(this);
|
14173
|
-
readUByte();
|
14174
|
-
const profileIdc = readUByte(); // profile_idc
|
14175
|
-
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
|
14176
|
-
skipBits(3); // reserved_zero_3bits u(3),
|
14177
|
-
readUByte(); // level_idc u(8)
|
14178
|
-
skipUEG(); // seq_parameter_set_id
|
14179
|
-
// some profiles have more optional data we don't need
|
14180
|
-
if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
|
14181
|
-
const chromaFormatIdc = readUEG();
|
14182
|
-
if (chromaFormatIdc === 3) {
|
14183
|
-
skipBits(1);
|
14184
|
-
} // separate_colour_plane_flag
|
14185
|
-
|
14186
|
-
skipUEG(); // bit_depth_luma_minus8
|
14187
|
-
skipUEG(); // bit_depth_chroma_minus8
|
14188
|
-
skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
14189
|
-
if (readBoolean()) {
|
14190
|
-
// seq_scaling_matrix_present_flag
|
14191
|
-
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14192
|
-
for (i = 0; i < scalingListCount; i++) {
|
14193
|
-
if (readBoolean()) {
|
14194
|
-
// seq_scaling_list_present_flag[ i ]
|
14195
|
-
if (i < 6) {
|
14196
|
-
skipScalingList(16);
|
14197
|
-
} else {
|
14198
|
-
skipScalingList(64);
|
14199
|
-
}
|
14200
|
-
}
|
14201
|
-
}
|
14202
|
-
}
|
14203
|
-
}
|
14204
|
-
skipUEG(); // log2_max_frame_num_minus4
|
14205
|
-
const picOrderCntType = readUEG();
|
14206
|
-
if (picOrderCntType === 0) {
|
14207
|
-
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
|
14208
|
-
} else if (picOrderCntType === 1) {
|
14209
|
-
skipBits(1); // delta_pic_order_always_zero_flag
|
14210
|
-
skipEG(); // offset_for_non_ref_pic
|
14211
|
-
skipEG(); // offset_for_top_to_bottom_field
|
14212
|
-
numRefFramesInPicOrderCntCycle = readUEG();
|
14213
|
-
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14214
|
-
skipEG();
|
14215
|
-
} // offset_for_ref_frame[ i ]
|
14216
|
-
}
|
14217
|
-
skipUEG(); // max_num_ref_frames
|
14218
|
-
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
14219
|
-
const picWidthInMbsMinus1 = readUEG();
|
14220
|
-
const picHeightInMapUnitsMinus1 = readUEG();
|
14221
|
-
const frameMbsOnlyFlag = readBits(1);
|
14222
|
-
if (frameMbsOnlyFlag === 0) {
|
14223
|
-
skipBits(1);
|
14224
|
-
} // mb_adaptive_frame_field_flag
|
14225
|
-
|
14226
|
-
skipBits(1); // direct_8x8_inference_flag
|
14227
|
-
if (readBoolean()) {
|
14228
|
-
// frame_cropping_flag
|
14229
|
-
frameCropLeftOffset = readUEG();
|
14230
|
-
frameCropRightOffset = readUEG();
|
14231
|
-
frameCropTopOffset = readUEG();
|
14232
|
-
frameCropBottomOffset = readUEG();
|
14233
|
-
}
|
14234
|
-
let pixelRatio = [1, 1];
|
14235
|
-
if (readBoolean()) {
|
14236
|
-
// vui_parameters_present_flag
|
14237
|
-
if (readBoolean()) {
|
14238
|
-
// aspect_ratio_info_present_flag
|
14239
|
-
const aspectRatioIdc = readUByte();
|
14240
|
-
switch (aspectRatioIdc) {
|
14241
|
-
case 1:
|
14242
|
-
pixelRatio = [1, 1];
|
14243
|
-
break;
|
14244
|
-
case 2:
|
14245
|
-
pixelRatio = [12, 11];
|
14246
|
-
break;
|
14247
|
-
case 3:
|
14248
|
-
pixelRatio = [10, 11];
|
14249
|
-
break;
|
14250
|
-
case 4:
|
14251
|
-
pixelRatio = [16, 11];
|
14252
|
-
break;
|
14253
|
-
case 5:
|
14254
|
-
pixelRatio = [40, 33];
|
14255
|
-
break;
|
14256
|
-
case 6:
|
14257
|
-
pixelRatio = [24, 11];
|
14258
|
-
break;
|
14259
|
-
case 7:
|
14260
|
-
pixelRatio = [20, 11];
|
14261
|
-
break;
|
14262
|
-
case 8:
|
14263
|
-
pixelRatio = [32, 11];
|
14264
|
-
break;
|
14265
|
-
case 9:
|
14266
|
-
pixelRatio = [80, 33];
|
14267
|
-
break;
|
14268
|
-
case 10:
|
14269
|
-
pixelRatio = [18, 11];
|
14270
|
-
break;
|
14271
|
-
case 11:
|
14272
|
-
pixelRatio = [15, 11];
|
14273
|
-
break;
|
14274
|
-
case 12:
|
14275
|
-
pixelRatio = [64, 33];
|
14276
|
-
break;
|
14277
|
-
case 13:
|
14278
|
-
pixelRatio = [160, 99];
|
14279
|
-
break;
|
14280
|
-
case 14:
|
14281
|
-
pixelRatio = [4, 3];
|
14282
|
-
break;
|
14283
|
-
case 15:
|
14284
|
-
pixelRatio = [3, 2];
|
14285
|
-
break;
|
14286
|
-
case 16:
|
14287
|
-
pixelRatio = [2, 1];
|
14288
|
-
break;
|
14289
|
-
case 255:
|
14290
|
-
{
|
14291
|
-
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
14292
|
-
break;
|
14293
|
-
}
|
14294
|
-
}
|
14697
|
+
}
|
14698
|
+
|
14699
|
+
// ():uint
|
14700
|
+
skipLZ() {
|
14701
|
+
let leadingZeroCount; // :uint
|
14702
|
+
for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
|
14703
|
+
if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) {
|
14704
|
+
// the first bit of working word is 1
|
14705
|
+
this.word <<= leadingZeroCount;
|
14706
|
+
this.bitsAvailable -= leadingZeroCount;
|
14707
|
+
return leadingZeroCount;
|
14295
14708
|
}
|
14296
14709
|
}
|
14297
|
-
|
14298
|
-
|
14299
|
-
|
14300
|
-
pixelRatio: pixelRatio
|
14301
|
-
};
|
14710
|
+
// we exhausted word and still have not found a 1
|
14711
|
+
this.loadWord();
|
14712
|
+
return leadingZeroCount + this.skipLZ();
|
14302
14713
|
}
|
14303
|
-
|
14304
|
-
|
14305
|
-
|
14306
|
-
|
14307
|
-
|
14308
|
-
|
14309
|
-
|
14714
|
+
|
14715
|
+
// ():void
|
14716
|
+
skipUEG() {
|
14717
|
+
this.skipBits(1 + this.skipLZ());
|
14718
|
+
}
|
14719
|
+
|
14720
|
+
// ():void
|
14721
|
+
skipEG() {
|
14722
|
+
this.skipBits(1 + this.skipLZ());
|
14723
|
+
}
|
14724
|
+
|
14725
|
+
// ():uint
|
14726
|
+
readUEG() {
|
14727
|
+
const clz = this.skipLZ(); // :uint
|
14728
|
+
return this.readBits(clz + 1) - 1;
|
14729
|
+
}
|
14730
|
+
|
14731
|
+
// ():int
|
14732
|
+
readEG() {
|
14733
|
+
const valu = this.readUEG(); // :int
|
14734
|
+
if (0x01 & valu) {
|
14735
|
+
// the number is odd if the low order bit is set
|
14736
|
+
return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
|
14737
|
+
} else {
|
14738
|
+
return -1 * (valu >>> 1); // divide by two then make it negative
|
14739
|
+
}
|
14740
|
+
}
|
14741
|
+
|
14742
|
+
// Some convenience functions
|
14743
|
+
// :Boolean
|
14744
|
+
readBoolean() {
|
14745
|
+
return this.readBits(1) === 1;
|
14746
|
+
}
|
14747
|
+
|
14748
|
+
// ():int
|
14749
|
+
readUByte() {
|
14750
|
+
return this.readBits(8);
|
14751
|
+
}
|
14752
|
+
|
14753
|
+
// ():int
|
14754
|
+
readUShort() {
|
14755
|
+
return this.readBits(16);
|
14756
|
+
}
|
14757
|
+
|
14758
|
+
// ():int
|
14759
|
+
readUInt() {
|
14760
|
+
return this.readBits(32);
|
14310
14761
|
}
|
14311
14762
|
}
|
14312
14763
|
|
14313
14764
|
class AvcVideoParser extends BaseVideoParser {
|
14314
|
-
|
14315
|
-
const units = this.
|
14765
|
+
parsePES(track, textTrack, pes, last, duration) {
|
14766
|
+
const units = this.parseNALu(track, pes.data, last);
|
14316
14767
|
let VideoSample = this.VideoSample;
|
14317
14768
|
let push;
|
14318
14769
|
let spsfound = false;
|
@@ -14337,7 +14788,7 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14337
14788
|
// only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
|
14338
14789
|
if (spsfound && data.length > 4) {
|
14339
14790
|
// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
|
14340
|
-
const sliceType =
|
14791
|
+
const sliceType = this.readSliceType(data);
|
14341
14792
|
// 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
|
14342
14793
|
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
|
14343
14794
|
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
|
@@ -14391,8 +14842,7 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14391
14842
|
push = true;
|
14392
14843
|
spsfound = true;
|
14393
14844
|
const sps = unit.data;
|
14394
|
-
const
|
14395
|
-
const config = expGolombDecoder.readSPS();
|
14845
|
+
const config = this.readSPS(sps);
|
14396
14846
|
if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
|
14397
14847
|
track.width = config.width;
|
14398
14848
|
track.height = config.height;
|
@@ -14448,109 +14898,192 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14448
14898
|
this.VideoSample = null;
|
14449
14899
|
}
|
14450
14900
|
}
|
14451
|
-
|
14452
|
-
|
14453
|
-
|
14454
|
-
|
14455
|
-
const
|
14456
|
-
|
14457
|
-
|
14458
|
-
|
14459
|
-
|
14460
|
-
|
14461
|
-
|
14462
|
-
|
14901
|
+
getNALuType(data, offset) {
|
14902
|
+
return data[offset] & 0x1f;
|
14903
|
+
}
|
14904
|
+
readSliceType(data) {
|
14905
|
+
const eg = new ExpGolomb(data);
|
14906
|
+
// skip NALu type
|
14907
|
+
eg.readUByte();
|
14908
|
+
// discard first_mb_in_slice
|
14909
|
+
eg.readUEG();
|
14910
|
+
// return slice_type
|
14911
|
+
return eg.readUEG();
|
14912
|
+
}
|
14463
14913
|
|
14464
|
-
|
14465
|
-
|
14466
|
-
|
14467
|
-
|
14468
|
-
|
14469
|
-
|
14470
|
-
|
14471
|
-
|
14472
|
-
|
14473
|
-
|
14474
|
-
|
14475
|
-
if (
|
14476
|
-
|
14477
|
-
|
14478
|
-
}
|
14479
|
-
if (state === 1) {
|
14480
|
-
state = value ? 0 : 2;
|
14481
|
-
continue;
|
14914
|
+
/**
|
14915
|
+
* The scaling list is optionally transmitted as part of a sequence parameter
|
14916
|
+
* set and is not relevant to transmuxing.
|
14917
|
+
* @param count the number of entries in this scaling list
|
14918
|
+
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
14919
|
+
*/
|
14920
|
+
skipScalingList(count, reader) {
|
14921
|
+
let lastScale = 8;
|
14922
|
+
let nextScale = 8;
|
14923
|
+
let deltaScale;
|
14924
|
+
for (let j = 0; j < count; j++) {
|
14925
|
+
if (nextScale !== 0) {
|
14926
|
+
deltaScale = reader.readEG();
|
14927
|
+
nextScale = (lastScale + deltaScale + 256) % 256;
|
14482
14928
|
}
|
14483
|
-
|
14484
|
-
|
14485
|
-
|
14486
|
-
} else if (value === 1) {
|
14487
|
-
overflow = i - state - 1;
|
14488
|
-
if (lastUnitStart >= 0) {
|
14489
|
-
const unit = {
|
14490
|
-
data: array.subarray(lastUnitStart, overflow),
|
14491
|
-
type: lastUnitType
|
14492
|
-
};
|
14493
|
-
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14494
|
-
units.push(unit);
|
14495
|
-
} else {
|
14496
|
-
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14497
|
-
// first check if start code delimiter is overlapping between 2 PES packets,
|
14498
|
-
// ie it started in last packet (lastState not zero)
|
14499
|
-
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14500
|
-
const lastUnit = this.getLastNalUnit(track.samples);
|
14501
|
-
if (lastUnit) {
|
14502
|
-
if (lastState && i <= 4 - lastState) {
|
14503
|
-
// start delimiter overlapping between PES packets
|
14504
|
-
// strip start delimiter bytes from the end of last NAL unit
|
14505
|
-
// check if lastUnit had a state different from zero
|
14506
|
-
if (lastUnit.state) {
|
14507
|
-
// strip last bytes
|
14508
|
-
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14509
|
-
}
|
14510
|
-
}
|
14511
|
-
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14929
|
+
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14930
|
+
}
|
14931
|
+
}
|
14512
14932
|
|
14513
|
-
|
14514
|
-
|
14515
|
-
|
14516
|
-
|
14517
|
-
|
14518
|
-
|
14519
|
-
|
14520
|
-
|
14521
|
-
|
14522
|
-
|
14523
|
-
|
14524
|
-
|
14525
|
-
|
14526
|
-
|
14527
|
-
|
14528
|
-
|
14529
|
-
|
14933
|
+
/**
|
14934
|
+
* Read a sequence parameter set and return some interesting video
|
14935
|
+
* properties. A sequence parameter set is the H264 metadata that
|
14936
|
+
* describes the properties of upcoming video frames.
|
14937
|
+
* @returns an object with configuration parsed from the
|
14938
|
+
* sequence parameter set, including the dimensions of the
|
14939
|
+
* associated video frames.
|
14940
|
+
*/
|
14941
|
+
readSPS(sps) {
|
14942
|
+
const eg = new ExpGolomb(sps);
|
14943
|
+
let frameCropLeftOffset = 0;
|
14944
|
+
let frameCropRightOffset = 0;
|
14945
|
+
let frameCropTopOffset = 0;
|
14946
|
+
let frameCropBottomOffset = 0;
|
14947
|
+
let numRefFramesInPicOrderCntCycle;
|
14948
|
+
let scalingListCount;
|
14949
|
+
let i;
|
14950
|
+
const readUByte = eg.readUByte.bind(eg);
|
14951
|
+
const readBits = eg.readBits.bind(eg);
|
14952
|
+
const readUEG = eg.readUEG.bind(eg);
|
14953
|
+
const readBoolean = eg.readBoolean.bind(eg);
|
14954
|
+
const skipBits = eg.skipBits.bind(eg);
|
14955
|
+
const skipEG = eg.skipEG.bind(eg);
|
14956
|
+
const skipUEG = eg.skipUEG.bind(eg);
|
14957
|
+
const skipScalingList = this.skipScalingList.bind(this);
|
14958
|
+
readUByte();
|
14959
|
+
const profileIdc = readUByte(); // profile_idc
|
14960
|
+
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
|
14961
|
+
skipBits(3); // reserved_zero_3bits u(3),
|
14962
|
+
readUByte(); // level_idc u(8)
|
14963
|
+
skipUEG(); // seq_parameter_set_id
|
14964
|
+
// some profiles have more optional data we don't need
|
14965
|
+
if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
|
14966
|
+
const chromaFormatIdc = readUEG();
|
14967
|
+
if (chromaFormatIdc === 3) {
|
14968
|
+
skipBits(1);
|
14969
|
+
} // separate_colour_plane_flag
|
14970
|
+
|
14971
|
+
skipUEG(); // bit_depth_luma_minus8
|
14972
|
+
skipUEG(); // bit_depth_chroma_minus8
|
14973
|
+
skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
14974
|
+
if (readBoolean()) {
|
14975
|
+
// seq_scaling_matrix_present_flag
|
14976
|
+
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14977
|
+
for (i = 0; i < scalingListCount; i++) {
|
14978
|
+
if (readBoolean()) {
|
14979
|
+
// seq_scaling_list_present_flag[ i ]
|
14980
|
+
if (i < 6) {
|
14981
|
+
skipScalingList(16, eg);
|
14982
|
+
} else {
|
14983
|
+
skipScalingList(64, eg);
|
14984
|
+
}
|
14985
|
+
}
|
14530
14986
|
}
|
14531
|
-
} else {
|
14532
|
-
state = 0;
|
14533
14987
|
}
|
14534
14988
|
}
|
14535
|
-
|
14536
|
-
|
14537
|
-
|
14538
|
-
|
14539
|
-
|
14540
|
-
|
14541
|
-
|
14542
|
-
|
14989
|
+
skipUEG(); // log2_max_frame_num_minus4
|
14990
|
+
const picOrderCntType = readUEG();
|
14991
|
+
if (picOrderCntType === 0) {
|
14992
|
+
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
|
14993
|
+
} else if (picOrderCntType === 1) {
|
14994
|
+
skipBits(1); // delta_pic_order_always_zero_flag
|
14995
|
+
skipEG(); // offset_for_non_ref_pic
|
14996
|
+
skipEG(); // offset_for_top_to_bottom_field
|
14997
|
+
numRefFramesInPicOrderCntCycle = readUEG();
|
14998
|
+
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14999
|
+
skipEG();
|
15000
|
+
} // offset_for_ref_frame[ i ]
|
14543
15001
|
}
|
14544
|
-
//
|
14545
|
-
|
14546
|
-
|
14547
|
-
|
14548
|
-
|
14549
|
-
|
15002
|
+
skipUEG(); // max_num_ref_frames
|
15003
|
+
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
15004
|
+
const picWidthInMbsMinus1 = readUEG();
|
15005
|
+
const picHeightInMapUnitsMinus1 = readUEG();
|
15006
|
+
const frameMbsOnlyFlag = readBits(1);
|
15007
|
+
if (frameMbsOnlyFlag === 0) {
|
15008
|
+
skipBits(1);
|
15009
|
+
} // mb_adaptive_frame_field_flag
|
15010
|
+
|
15011
|
+
skipBits(1); // direct_8x8_inference_flag
|
15012
|
+
if (readBoolean()) {
|
15013
|
+
// frame_cropping_flag
|
15014
|
+
frameCropLeftOffset = readUEG();
|
15015
|
+
frameCropRightOffset = readUEG();
|
15016
|
+
frameCropTopOffset = readUEG();
|
15017
|
+
frameCropBottomOffset = readUEG();
|
15018
|
+
}
|
15019
|
+
let pixelRatio = [1, 1];
|
15020
|
+
if (readBoolean()) {
|
15021
|
+
// vui_parameters_present_flag
|
15022
|
+
if (readBoolean()) {
|
15023
|
+
// aspect_ratio_info_present_flag
|
15024
|
+
const aspectRatioIdc = readUByte();
|
15025
|
+
switch (aspectRatioIdc) {
|
15026
|
+
case 1:
|
15027
|
+
pixelRatio = [1, 1];
|
15028
|
+
break;
|
15029
|
+
case 2:
|
15030
|
+
pixelRatio = [12, 11];
|
15031
|
+
break;
|
15032
|
+
case 3:
|
15033
|
+
pixelRatio = [10, 11];
|
15034
|
+
break;
|
15035
|
+
case 4:
|
15036
|
+
pixelRatio = [16, 11];
|
15037
|
+
break;
|
15038
|
+
case 5:
|
15039
|
+
pixelRatio = [40, 33];
|
15040
|
+
break;
|
15041
|
+
case 6:
|
15042
|
+
pixelRatio = [24, 11];
|
15043
|
+
break;
|
15044
|
+
case 7:
|
15045
|
+
pixelRatio = [20, 11];
|
15046
|
+
break;
|
15047
|
+
case 8:
|
15048
|
+
pixelRatio = [32, 11];
|
15049
|
+
break;
|
15050
|
+
case 9:
|
15051
|
+
pixelRatio = [80, 33];
|
15052
|
+
break;
|
15053
|
+
case 10:
|
15054
|
+
pixelRatio = [18, 11];
|
15055
|
+
break;
|
15056
|
+
case 11:
|
15057
|
+
pixelRatio = [15, 11];
|
15058
|
+
break;
|
15059
|
+
case 12:
|
15060
|
+
pixelRatio = [64, 33];
|
15061
|
+
break;
|
15062
|
+
case 13:
|
15063
|
+
pixelRatio = [160, 99];
|
15064
|
+
break;
|
15065
|
+
case 14:
|
15066
|
+
pixelRatio = [4, 3];
|
15067
|
+
break;
|
15068
|
+
case 15:
|
15069
|
+
pixelRatio = [3, 2];
|
15070
|
+
break;
|
15071
|
+
case 16:
|
15072
|
+
pixelRatio = [2, 1];
|
15073
|
+
break;
|
15074
|
+
case 255:
|
15075
|
+
{
|
15076
|
+
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
15077
|
+
break;
|
15078
|
+
}
|
15079
|
+
}
|
14550
15080
|
}
|
14551
15081
|
}
|
14552
|
-
|
14553
|
-
|
15082
|
+
return {
|
15083
|
+
width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
|
15084
|
+
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
15085
|
+
pixelRatio: pixelRatio
|
15086
|
+
};
|
14554
15087
|
}
|
14555
15088
|
}
|
14556
15089
|
|
@@ -14568,7 +15101,7 @@ class SampleAesDecrypter {
|
|
14568
15101
|
});
|
14569
15102
|
}
|
14570
15103
|
decryptBuffer(encryptedData) {
|
14571
|
-
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
|
15104
|
+
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer, DecrypterAesMode.cbc);
|
14572
15105
|
}
|
14573
15106
|
|
14574
15107
|
// AAC - encrypt all full 16 bytes blocks starting from offset 16
|
@@ -14682,7 +15215,7 @@ class TSDemuxer {
|
|
14682
15215
|
this.observer = observer;
|
14683
15216
|
this.config = config;
|
14684
15217
|
this.typeSupported = typeSupported;
|
14685
|
-
this.videoParser =
|
15218
|
+
this.videoParser = null;
|
14686
15219
|
}
|
14687
15220
|
static probe(data) {
|
14688
15221
|
const syncOffset = TSDemuxer.syncOffset(data);
|
@@ -14847,7 +15380,16 @@ class TSDemuxer {
|
|
14847
15380
|
case videoPid:
|
14848
15381
|
if (stt) {
|
14849
15382
|
if (videoData && (pes = parsePES(videoData))) {
|
14850
|
-
this.videoParser
|
15383
|
+
if (this.videoParser === null) {
|
15384
|
+
switch (videoTrack.segmentCodec) {
|
15385
|
+
case 'avc':
|
15386
|
+
this.videoParser = new AvcVideoParser();
|
15387
|
+
break;
|
15388
|
+
}
|
15389
|
+
}
|
15390
|
+
if (this.videoParser !== null) {
|
15391
|
+
this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
|
15392
|
+
}
|
14851
15393
|
}
|
14852
15394
|
videoData = {
|
14853
15395
|
data: [],
|
@@ -15009,8 +15551,17 @@ class TSDemuxer {
|
|
15009
15551
|
// try to parse last PES packets
|
15010
15552
|
let pes;
|
15011
15553
|
if (videoData && (pes = parsePES(videoData))) {
|
15012
|
-
this.videoParser
|
15013
|
-
|
15554
|
+
if (this.videoParser === null) {
|
15555
|
+
switch (videoTrack.segmentCodec) {
|
15556
|
+
case 'avc':
|
15557
|
+
this.videoParser = new AvcVideoParser();
|
15558
|
+
break;
|
15559
|
+
}
|
15560
|
+
}
|
15561
|
+
if (this.videoParser !== null) {
|
15562
|
+
this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
|
15563
|
+
videoTrack.pesData = null;
|
15564
|
+
}
|
15014
15565
|
} else {
|
15015
15566
|
// either avcData null or PES truncated, keep it for next frag parsing
|
15016
15567
|
videoTrack.pesData = videoData;
|
@@ -15312,7 +15863,10 @@ function parsePMT(data, offset, typeSupported, isSampleAes) {
|
|
15312
15863
|
case 0x87:
|
15313
15864
|
throw new Error('Unsupported EC-3 in M2TS found');
|
15314
15865
|
case 0x24:
|
15315
|
-
|
15866
|
+
// ITU-T Rec. H.265 and ISO/IEC 23008-2 (HEVC)
|
15867
|
+
{
|
15868
|
+
throw new Error('Unsupported HEVC in M2TS found');
|
15869
|
+
}
|
15316
15870
|
}
|
15317
15871
|
// move to the next table entry
|
15318
15872
|
// skip past the elementary stream descriptors, if present
|
@@ -15455,11 +16009,11 @@ class MP3Demuxer extends BaseAudioDemuxer {
|
|
15455
16009
|
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
15456
16010
|
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
15457
16011
|
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
15458
|
-
const id3Data =
|
16012
|
+
const id3Data = getId3Data(data, 0);
|
15459
16013
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
15460
16014
|
|
15461
16015
|
// Check for ac-3|ec-3 sync bytes and return false if present
|
15462
|
-
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 &&
|
16016
|
+
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 && getId3Timestamp(id3Data) !== undefined &&
|
15463
16017
|
// check the bsid to confirm ac-3 or ec-3 (not mp3)
|
15464
16018
|
getAudioBSID(data, offset) <= 16) {
|
15465
16019
|
return false;
|
@@ -15534,6 +16088,8 @@ class MP4 {
|
|
15534
16088
|
avc1: [],
|
15535
16089
|
// codingname
|
15536
16090
|
avcC: [],
|
16091
|
+
hvc1: [],
|
16092
|
+
hvcC: [],
|
15537
16093
|
btrt: [],
|
15538
16094
|
dinf: [],
|
15539
16095
|
dref: [],
|
@@ -15958,8 +16514,10 @@ class MP4 {
|
|
15958
16514
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
|
15959
16515
|
}
|
15960
16516
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
|
15961
|
-
} else {
|
16517
|
+
} else if (track.segmentCodec === 'avc') {
|
15962
16518
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16519
|
+
} else {
|
16520
|
+
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.hvc1(track));
|
15963
16521
|
}
|
15964
16522
|
}
|
15965
16523
|
static tkhd(track) {
|
@@ -16097,6 +16655,84 @@ class MP4 {
|
|
16097
16655
|
const result = appendUint8Array(MP4.FTYP, movie);
|
16098
16656
|
return result;
|
16099
16657
|
}
|
16658
|
+
static hvc1(track) {
|
16659
|
+
const ps = track.params;
|
16660
|
+
const units = [track.vps, track.sps, track.pps];
|
16661
|
+
const NALuLengthSize = 4;
|
16662
|
+
const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
|
16663
|
+
|
16664
|
+
// compute hvcC size in bytes
|
16665
|
+
let length = config.length;
|
16666
|
+
for (let i = 0; i < units.length; i += 1) {
|
16667
|
+
length += 3;
|
16668
|
+
for (let j = 0; j < units[i].length; j += 1) {
|
16669
|
+
length += 2 + units[i][j].length;
|
16670
|
+
}
|
16671
|
+
}
|
16672
|
+
const hvcC = new Uint8Array(length);
|
16673
|
+
hvcC.set(config, 0);
|
16674
|
+
length = config.length;
|
16675
|
+
// append parameter set units: one vps, one or more sps and pps
|
16676
|
+
const iMax = units.length - 1;
|
16677
|
+
for (let i = 0; i < units.length; i += 1) {
|
16678
|
+
hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
|
16679
|
+
length += 3;
|
16680
|
+
for (let j = 0; j < units[i].length; j += 1) {
|
16681
|
+
hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
|
16682
|
+
length += 2;
|
16683
|
+
hvcC.set(units[i][j], length);
|
16684
|
+
length += units[i][j].length;
|
16685
|
+
}
|
16686
|
+
}
|
16687
|
+
const hvcc = MP4.box(MP4.types.hvcC, hvcC);
|
16688
|
+
const width = track.width;
|
16689
|
+
const height = track.height;
|
16690
|
+
const hSpacing = track.pixelRatio[0];
|
16691
|
+
const vSpacing = track.pixelRatio[1];
|
16692
|
+
return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
|
16693
|
+
// reserved
|
16694
|
+
0x00, 0x00, 0x00,
|
16695
|
+
// reserved
|
16696
|
+
0x00, 0x01,
|
16697
|
+
// data_reference_index
|
16698
|
+
0x00, 0x00,
|
16699
|
+
// pre_defined
|
16700
|
+
0x00, 0x00,
|
16701
|
+
// reserved
|
16702
|
+
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16703
|
+
// pre_defined
|
16704
|
+
width >> 8 & 0xff, width & 0xff,
|
16705
|
+
// width
|
16706
|
+
height >> 8 & 0xff, height & 0xff,
|
16707
|
+
// height
|
16708
|
+
0x00, 0x48, 0x00, 0x00,
|
16709
|
+
// horizresolution
|
16710
|
+
0x00, 0x48, 0x00, 0x00,
|
16711
|
+
// vertresolution
|
16712
|
+
0x00, 0x00, 0x00, 0x00,
|
16713
|
+
// reserved
|
16714
|
+
0x00, 0x01,
|
16715
|
+
// frame_count
|
16716
|
+
0x12, 0x64, 0x61, 0x69, 0x6c,
|
16717
|
+
// dailymotion/hls.js
|
16718
|
+
0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16719
|
+
// compressorname
|
16720
|
+
0x00, 0x18,
|
16721
|
+
// depth = 24
|
16722
|
+
0x11, 0x11]),
|
16723
|
+
// pre_defined = -1
|
16724
|
+
hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
|
16725
|
+
// bufferSizeDB
|
16726
|
+
0x00, 0x2d, 0xc6, 0xc0,
|
16727
|
+
// maxBitrate
|
16728
|
+
0x00, 0x2d, 0xc6, 0xc0])),
|
16729
|
+
// avgBitrate
|
16730
|
+
MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
|
16731
|
+
// hSpacing
|
16732
|
+
hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
|
16733
|
+
// vSpacing
|
16734
|
+
vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
|
16735
|
+
}
|
16100
16736
|
}
|
16101
16737
|
MP4.types = void 0;
|
16102
16738
|
MP4.HDLR_TYPES = void 0;
|
@@ -16472,9 +17108,9 @@ class MP4Remuxer {
|
|
16472
17108
|
const foundOverlap = delta < -1;
|
16473
17109
|
if (foundHole || foundOverlap) {
|
16474
17110
|
if (foundHole) {
|
16475
|
-
logger.warn(
|
17111
|
+
logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
|
16476
17112
|
} else {
|
16477
|
-
logger.warn(
|
17113
|
+
logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
|
16478
17114
|
}
|
16479
17115
|
if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
|
16480
17116
|
firstDTS = nextAvcDts;
|
@@ -16483,12 +17119,24 @@ class MP4Remuxer {
|
|
16483
17119
|
inputSamples[0].dts = firstDTS;
|
16484
17120
|
inputSamples[0].pts = firstPTS;
|
16485
17121
|
} else {
|
17122
|
+
let isPTSOrderRetained = true;
|
16486
17123
|
for (let i = 0; i < inputSamples.length; i++) {
|
16487
|
-
if (inputSamples[i].dts > firstPTS) {
|
17124
|
+
if (inputSamples[i].dts > firstPTS && isPTSOrderRetained) {
|
16488
17125
|
break;
|
16489
17126
|
}
|
17127
|
+
const prevPTS = inputSamples[i].pts;
|
16490
17128
|
inputSamples[i].dts -= delta;
|
16491
17129
|
inputSamples[i].pts -= delta;
|
17130
|
+
|
17131
|
+
// check to see if this sample's PTS order has changed
|
17132
|
+
// relative to the next one
|
17133
|
+
if (i < inputSamples.length - 1) {
|
17134
|
+
const nextSamplePTS = inputSamples[i + 1].pts;
|
17135
|
+
const currentSamplePTS = inputSamples[i].pts;
|
17136
|
+
const currentOrder = nextSamplePTS <= currentSamplePTS;
|
17137
|
+
const prevOrder = nextSamplePTS <= prevPTS;
|
17138
|
+
isPTSOrderRetained = currentOrder == prevOrder;
|
17139
|
+
}
|
16492
17140
|
}
|
16493
17141
|
}
|
16494
17142
|
logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
|
@@ -16636,7 +17284,7 @@ class MP4Remuxer {
|
|
16636
17284
|
}
|
16637
17285
|
}
|
16638
17286
|
}
|
16639
|
-
// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
17287
|
+
// next AVC/HEVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
16640
17288
|
mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
|
16641
17289
|
this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
|
16642
17290
|
this.videoSampleDuration = mp4SampleDuration;
|
@@ -16769,7 +17417,7 @@ class MP4Remuxer {
|
|
16769
17417
|
logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
|
16770
17418
|
for (let j = 0; j < missing; j++) {
|
16771
17419
|
const newStamp = Math.max(nextPts, 0);
|
16772
|
-
let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17420
|
+
let fillFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
|
16773
17421
|
if (!fillFrame) {
|
16774
17422
|
logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
|
16775
17423
|
fillFrame = sample.unit.subarray();
|
@@ -16897,7 +17545,7 @@ class MP4Remuxer {
|
|
16897
17545
|
// samples count of this segment's duration
|
16898
17546
|
const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
|
16899
17547
|
// silent frame
|
16900
|
-
const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17548
|
+
const silentFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
|
16901
17549
|
logger.warn('[mp4-remuxer]: remux empty Audio');
|
16902
17550
|
// Can't remux if we can't generate a silent frame...
|
16903
17551
|
if (!silentFrame) {
|
@@ -17288,13 +17936,15 @@ class Transmuxer {
|
|
17288
17936
|
initSegmentData
|
17289
17937
|
} = transmuxConfig;
|
17290
17938
|
const keyData = getEncryptionType(uintData, decryptdata);
|
17291
|
-
if (keyData && keyData.method
|
17939
|
+
if (keyData && isFullSegmentEncryption(keyData.method)) {
|
17292
17940
|
const decrypter = this.getDecrypter();
|
17941
|
+
const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
|
17942
|
+
|
17293
17943
|
// Software decryption is synchronous; webCrypto is not
|
17294
17944
|
if (decrypter.isSync()) {
|
17295
17945
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
17296
17946
|
// data is handled in the flush() call
|
17297
|
-
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
|
17947
|
+
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode);
|
17298
17948
|
// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
|
17299
17949
|
const loadingParts = chunkMeta.part > -1;
|
17300
17950
|
if (loadingParts) {
|
@@ -17306,7 +17956,7 @@ class Transmuxer {
|
|
17306
17956
|
}
|
17307
17957
|
uintData = new Uint8Array(decryptedData);
|
17308
17958
|
} else {
|
17309
|
-
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
|
17959
|
+
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode).then(decryptedData => {
|
17310
17960
|
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
17311
17961
|
// the decrypted data has been transmuxed
|
17312
17962
|
const result = this.push(decryptedData, null, chunkMeta);
|
@@ -17960,14 +18610,7 @@ class TransmuxerInterface {
|
|
17960
18610
|
this.observer = new EventEmitter();
|
17961
18611
|
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
|
17962
18612
|
this.observer.on(Events.ERROR, forwardMessage);
|
17963
|
-
const
|
17964
|
-
isTypeSupported: () => false
|
17965
|
-
};
|
17966
|
-
const m2tsTypeSupported = {
|
17967
|
-
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
17968
|
-
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
17969
|
-
ac3: false
|
17970
|
-
};
|
18613
|
+
const m2tsTypeSupported = getM2TSSupportedAudioTypes(config.preferManagedMediaSource);
|
17971
18614
|
|
17972
18615
|
// navigator.vendor is not always available in Web Worker
|
17973
18616
|
// refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
|
@@ -18232,8 +18875,9 @@ const STALL_MINIMUM_DURATION_MS = 250;
|
|
18232
18875
|
const MAX_START_GAP_JUMP = 2.0;
|
18233
18876
|
const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
18234
18877
|
const SKIP_BUFFER_RANGE_START = 0.05;
|
18235
|
-
class GapController {
|
18878
|
+
class GapController extends Logger {
|
18236
18879
|
constructor(config, media, fragmentTracker, hls) {
|
18880
|
+
super('gap-controller', hls.logger);
|
18237
18881
|
this.config = void 0;
|
18238
18882
|
this.media = null;
|
18239
18883
|
this.fragmentTracker = void 0;
|
@@ -18243,6 +18887,7 @@ class GapController {
|
|
18243
18887
|
this.stalled = null;
|
18244
18888
|
this.moved = false;
|
18245
18889
|
this.seeking = false;
|
18890
|
+
this.ended = 0;
|
18246
18891
|
this.config = config;
|
18247
18892
|
this.media = media;
|
18248
18893
|
this.fragmentTracker = fragmentTracker;
|
@@ -18260,7 +18905,7 @@ class GapController {
|
|
18260
18905
|
*
|
18261
18906
|
* @param lastCurrentTime - Previously read playhead position
|
18262
18907
|
*/
|
18263
|
-
poll(lastCurrentTime, activeFrag) {
|
18908
|
+
poll(lastCurrentTime, activeFrag, levelDetails, state) {
|
18264
18909
|
const {
|
18265
18910
|
config,
|
18266
18911
|
media,
|
@@ -18279,6 +18924,7 @@ class GapController {
|
|
18279
18924
|
|
18280
18925
|
// The playhead is moving, no-op
|
18281
18926
|
if (currentTime !== lastCurrentTime) {
|
18927
|
+
this.ended = 0;
|
18282
18928
|
this.moved = true;
|
18283
18929
|
if (!seeking) {
|
18284
18930
|
this.nudgeRetry = 0;
|
@@ -18287,7 +18933,7 @@ class GapController {
|
|
18287
18933
|
// The playhead is now moving, but was previously stalled
|
18288
18934
|
if (this.stallReported) {
|
18289
18935
|
const _stalledDuration = self.performance.now() - stalled;
|
18290
|
-
|
18936
|
+
this.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
|
18291
18937
|
this.stallReported = false;
|
18292
18938
|
}
|
18293
18939
|
this.stalled = null;
|
@@ -18323,7 +18969,6 @@ class GapController {
|
|
18323
18969
|
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
18324
18970
|
// The addition poll gives the browser a chance to jump the gap for us
|
18325
18971
|
if (!this.moved && this.stalled !== null) {
|
18326
|
-
var _level$details;
|
18327
18972
|
// There is no playable buffer (seeked, waiting for buffer)
|
18328
18973
|
const isBuffered = bufferInfo.len > 0;
|
18329
18974
|
if (!isBuffered && !nextStart) {
|
@@ -18335,9 +18980,8 @@ class GapController {
|
|
18335
18980
|
// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
|
18336
18981
|
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
|
18337
18982
|
// that begins over 1 target duration after the video start position.
|
18338
|
-
const
|
18339
|
-
const
|
18340
|
-
const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
|
18983
|
+
const isLive = !!(levelDetails != null && levelDetails.live);
|
18984
|
+
const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
|
18341
18985
|
const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
|
18342
18986
|
if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
|
18343
18987
|
if (!media.paused) {
|
@@ -18355,6 +18999,17 @@ class GapController {
|
|
18355
18999
|
}
|
18356
19000
|
const stalledDuration = tnow - stalled;
|
18357
19001
|
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
|
19002
|
+
// Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
|
19003
|
+
if (state === State.ENDED && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
|
19004
|
+
if (stalledDuration < 1000 || this.ended) {
|
19005
|
+
return;
|
19006
|
+
}
|
19007
|
+
this.ended = currentTime;
|
19008
|
+
this.hls.trigger(Events.MEDIA_ENDED, {
|
19009
|
+
stalled: true
|
19010
|
+
});
|
19011
|
+
return;
|
19012
|
+
}
|
18358
19013
|
// Report stalling after trying to fix
|
18359
19014
|
this._reportStall(bufferInfo);
|
18360
19015
|
if (!this.media) {
|
@@ -18398,7 +19053,7 @@ class GapController {
|
|
18398
19053
|
// needs to cross some sort of threshold covering all source-buffers content
|
18399
19054
|
// to start playing properly.
|
18400
19055
|
if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
|
18401
|
-
|
19056
|
+
this.warn('Trying to nudge playhead over buffer-hole');
|
18402
19057
|
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
18403
19058
|
// We only try to jump the hole if it's under the configured size
|
18404
19059
|
// Reset stalled so to rearm watchdog timer
|
@@ -18422,7 +19077,7 @@ class GapController {
|
|
18422
19077
|
// Report stalled error once
|
18423
19078
|
this.stallReported = true;
|
18424
19079
|
const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
|
18425
|
-
|
19080
|
+
this.warn(error.message);
|
18426
19081
|
hls.trigger(Events.ERROR, {
|
18427
19082
|
type: ErrorTypes.MEDIA_ERROR,
|
18428
19083
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -18490,7 +19145,7 @@ class GapController {
|
|
18490
19145
|
}
|
18491
19146
|
}
|
18492
19147
|
const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
|
18493
|
-
|
19148
|
+
this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
|
18494
19149
|
this.moved = true;
|
18495
19150
|
this.stalled = null;
|
18496
19151
|
media.currentTime = targetTime;
|
@@ -18531,7 +19186,7 @@ class GapController {
|
|
18531
19186
|
const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
|
18532
19187
|
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
18533
19188
|
const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
|
18534
|
-
|
19189
|
+
this.warn(error.message);
|
18535
19190
|
media.currentTime = targetTime;
|
18536
19191
|
hls.trigger(Events.ERROR, {
|
18537
19192
|
type: ErrorTypes.MEDIA_ERROR,
|
@@ -18541,7 +19196,7 @@ class GapController {
|
|
18541
19196
|
});
|
18542
19197
|
} else {
|
18543
19198
|
const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
|
18544
|
-
|
19199
|
+
this.error(error.message);
|
18545
19200
|
hls.trigger(Events.ERROR, {
|
18546
19201
|
type: ErrorTypes.MEDIA_ERROR,
|
18547
19202
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -18556,7 +19211,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
|
|
18556
19211
|
|
18557
19212
|
class StreamController extends BaseStreamController {
|
18558
19213
|
constructor(hls, fragmentTracker, keyLoader) {
|
18559
|
-
super(hls, fragmentTracker, keyLoader, '
|
19214
|
+
super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
|
18560
19215
|
this.audioCodecSwap = false;
|
18561
19216
|
this.gapController = null;
|
18562
19217
|
this.level = -1;
|
@@ -18564,27 +19219,43 @@ class StreamController extends BaseStreamController {
|
|
18564
19219
|
this.altAudio = false;
|
18565
19220
|
this.audioOnly = false;
|
18566
19221
|
this.fragPlaying = null;
|
18567
|
-
this.onvplaying = null;
|
18568
|
-
this.onvseeked = null;
|
18569
19222
|
this.fragLastKbps = 0;
|
18570
19223
|
this.couldBacktrack = false;
|
18571
19224
|
this.backtrackFragment = null;
|
18572
19225
|
this.audioCodecSwitch = false;
|
18573
19226
|
this.videoBuffer = null;
|
18574
|
-
this.
|
19227
|
+
this.onMediaPlaying = () => {
|
19228
|
+
// tick to speed up FRAG_CHANGED triggering
|
19229
|
+
this.tick();
|
19230
|
+
};
|
19231
|
+
this.onMediaSeeked = () => {
|
19232
|
+
const media = this.media;
|
19233
|
+
const currentTime = media ? media.currentTime : null;
|
19234
|
+
if (isFiniteNumber(currentTime)) {
|
19235
|
+
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
19236
|
+
}
|
19237
|
+
|
19238
|
+
// If seeked was issued before buffer was appended do not tick immediately
|
19239
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
19240
|
+
if (bufferInfo === null || bufferInfo.len === 0) {
|
19241
|
+
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
19242
|
+
return;
|
19243
|
+
}
|
19244
|
+
|
19245
|
+
// tick to speed up FRAG_CHANGED triggering
|
19246
|
+
this.tick();
|
19247
|
+
};
|
19248
|
+
this.registerListeners();
|
18575
19249
|
}
|
18576
|
-
|
19250
|
+
registerListeners() {
|
19251
|
+
super.registerListeners();
|
18577
19252
|
const {
|
18578
19253
|
hls
|
18579
19254
|
} = this;
|
18580
|
-
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18581
|
-
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18582
|
-
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
18583
19255
|
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
18584
19256
|
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
|
18585
19257
|
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
18586
19258
|
hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18587
|
-
hls.on(Events.ERROR, this.onError, this);
|
18588
19259
|
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
18589
19260
|
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
18590
19261
|
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -18592,17 +19263,14 @@ class StreamController extends BaseStreamController {
|
|
18592
19263
|
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
18593
19264
|
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
18594
19265
|
}
|
18595
|
-
|
19266
|
+
unregisterListeners() {
|
19267
|
+
super.unregisterListeners();
|
18596
19268
|
const {
|
18597
19269
|
hls
|
18598
19270
|
} = this;
|
18599
|
-
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18600
|
-
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18601
|
-
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
18602
19271
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
18603
19272
|
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
18604
19273
|
hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18605
|
-
hls.off(Events.ERROR, this.onError, this);
|
18606
19274
|
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
18607
19275
|
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
18608
19276
|
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -18611,7 +19279,9 @@ class StreamController extends BaseStreamController {
|
|
18611
19279
|
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
18612
19280
|
}
|
18613
19281
|
onHandlerDestroying() {
|
18614
|
-
|
19282
|
+
// @ts-ignore
|
19283
|
+
this.onMediaPlaying = this.onMediaSeeked = null;
|
19284
|
+
this.unregisterListeners();
|
18615
19285
|
super.onHandlerDestroying();
|
18616
19286
|
}
|
18617
19287
|
startLoad(startPosition) {
|
@@ -18709,6 +19379,9 @@ class StreamController extends BaseStreamController {
|
|
18709
19379
|
this.checkFragmentChanged();
|
18710
19380
|
}
|
18711
19381
|
doTickIdle() {
|
19382
|
+
if (!this.buffering) {
|
19383
|
+
return;
|
19384
|
+
}
|
18712
19385
|
const {
|
18713
19386
|
hls,
|
18714
19387
|
levelLastLoaded,
|
@@ -18936,22 +19609,19 @@ class StreamController extends BaseStreamController {
|
|
18936
19609
|
onMediaAttached(event, data) {
|
18937
19610
|
super.onMediaAttached(event, data);
|
18938
19611
|
const media = data.media;
|
18939
|
-
|
18940
|
-
|
18941
|
-
media.addEventListener('playing', this.onvplaying);
|
18942
|
-
media.addEventListener('seeked', this.onvseeked);
|
19612
|
+
media.addEventListener('playing', this.onMediaPlaying);
|
19613
|
+
media.addEventListener('seeked', this.onMediaSeeked);
|
18943
19614
|
this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
|
18944
19615
|
}
|
18945
19616
|
onMediaDetaching() {
|
18946
19617
|
const {
|
18947
19618
|
media
|
18948
19619
|
} = this;
|
18949
|
-
if (media
|
18950
|
-
media.removeEventListener('playing', this.
|
18951
|
-
media.removeEventListener('seeked', this.
|
18952
|
-
this.onvplaying = this.onvseeked = null;
|
18953
|
-
this.videoBuffer = null;
|
19620
|
+
if (media) {
|
19621
|
+
media.removeEventListener('playing', this.onMediaPlaying);
|
19622
|
+
media.removeEventListener('seeked', this.onMediaSeeked);
|
18954
19623
|
}
|
19624
|
+
this.videoBuffer = null;
|
18955
19625
|
this.fragPlaying = null;
|
18956
19626
|
if (this.gapController) {
|
18957
19627
|
this.gapController.destroy();
|
@@ -18959,27 +19629,6 @@ class StreamController extends BaseStreamController {
|
|
18959
19629
|
}
|
18960
19630
|
super.onMediaDetaching();
|
18961
19631
|
}
|
18962
|
-
onMediaPlaying() {
|
18963
|
-
// tick to speed up FRAG_CHANGED triggering
|
18964
|
-
this.tick();
|
18965
|
-
}
|
18966
|
-
onMediaSeeked() {
|
18967
|
-
const media = this.media;
|
18968
|
-
const currentTime = media ? media.currentTime : null;
|
18969
|
-
if (isFiniteNumber(currentTime)) {
|
18970
|
-
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
18971
|
-
}
|
18972
|
-
|
18973
|
-
// If seeked was issued before buffer was appended do not tick immediately
|
18974
|
-
const bufferInfo = this.getMainFwdBufferInfo();
|
18975
|
-
if (bufferInfo === null || bufferInfo.len === 0) {
|
18976
|
-
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
18977
|
-
return;
|
18978
|
-
}
|
18979
|
-
|
18980
|
-
// tick to speed up FRAG_CHANGED triggering
|
18981
|
-
this.tick();
|
18982
|
-
}
|
18983
19632
|
onManifestLoading() {
|
18984
19633
|
// reset buffer on manifest loading
|
18985
19634
|
this.log('Trigger BUFFER_RESET');
|
@@ -19271,8 +19920,10 @@ class StreamController extends BaseStreamController {
|
|
19271
19920
|
}
|
19272
19921
|
if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
|
19273
19922
|
// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
19274
|
-
const
|
19275
|
-
|
19923
|
+
const state = this.state;
|
19924
|
+
const activeFrag = state !== State.IDLE ? this.fragCurrent : null;
|
19925
|
+
const levelDetails = this.getLevelDetails();
|
19926
|
+
gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
|
19276
19927
|
}
|
19277
19928
|
this.lastCurrentTime = media.currentTime;
|
19278
19929
|
}
|
@@ -19605,6 +20256,17 @@ class StreamController extends BaseStreamController {
|
|
19605
20256
|
getMainFwdBufferInfo() {
|
19606
20257
|
return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN);
|
19607
20258
|
}
|
20259
|
+
get maxBufferLength() {
|
20260
|
+
const {
|
20261
|
+
levels,
|
20262
|
+
level
|
20263
|
+
} = this;
|
20264
|
+
const levelInfo = levels == null ? void 0 : levels[level];
|
20265
|
+
if (!levelInfo) {
|
20266
|
+
return this.config.maxBufferLength;
|
20267
|
+
}
|
20268
|
+
return this.getMaxBufferLength(levelInfo.maxBitrate);
|
20269
|
+
}
|
19608
20270
|
backtrack(frag) {
|
19609
20271
|
this.couldBacktrack = true;
|
19610
20272
|
// Causes findFragments to backtrack through fragments to find the keyframe
|
@@ -19710,7 +20372,7 @@ class Hls {
|
|
19710
20372
|
* Get the video-dev/hls.js package version.
|
19711
20373
|
*/
|
19712
20374
|
static get version() {
|
19713
|
-
return "1.5.
|
20375
|
+
return "1.5.10-0.canary.10320";
|
19714
20376
|
}
|
19715
20377
|
|
19716
20378
|
/**
|
@@ -19773,9 +20435,12 @@ class Hls {
|
|
19773
20435
|
* The configuration object provided on player instantiation.
|
19774
20436
|
*/
|
19775
20437
|
this.userConfig = void 0;
|
20438
|
+
/**
|
20439
|
+
* The logger functions used by this player instance, configured on player instantiation.
|
20440
|
+
*/
|
20441
|
+
this.logger = void 0;
|
19776
20442
|
this.coreComponents = void 0;
|
19777
20443
|
this.networkControllers = void 0;
|
19778
|
-
this.started = false;
|
19779
20444
|
this._emitter = new EventEmitter();
|
19780
20445
|
this._autoLevelCapping = -1;
|
19781
20446
|
this._maxHdcpLevel = null;
|
@@ -19792,11 +20457,11 @@ class Hls {
|
|
19792
20457
|
this._media = null;
|
19793
20458
|
this.url = null;
|
19794
20459
|
this.triggeringException = void 0;
|
19795
|
-
enableLogs(userConfig.debug || false, 'Hls instance');
|
19796
|
-
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
|
20460
|
+
const logger = this.logger = enableLogs(userConfig.debug || false, 'Hls instance');
|
20461
|
+
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig, logger);
|
19797
20462
|
this.userConfig = userConfig;
|
19798
20463
|
if (config.progressive) {
|
19799
|
-
enableStreamingMode(config);
|
20464
|
+
enableStreamingMode(config, logger);
|
19800
20465
|
}
|
19801
20466
|
|
19802
20467
|
// core controllers and network loaders
|
@@ -19809,17 +20474,17 @@ class Hls {
|
|
19809
20474
|
} = config;
|
19810
20475
|
const errorController = new ConfigErrorController(this);
|
19811
20476
|
const abrController = this.abrController = new ConfigAbrController(this);
|
19812
|
-
|
20477
|
+
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
20478
|
+
const fragmentTracker = new FragmentTracker(this);
|
20479
|
+
const bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
|
19813
20480
|
const capLevelController = this.capLevelController = new ConfigCapLevelController(this);
|
19814
20481
|
const fpsController = new ConfigFpsController(this);
|
19815
20482
|
const playListLoader = new PlaylistLoader(this);
|
19816
20483
|
const id3TrackController = new ID3TrackController(this);
|
19817
20484
|
const ConfigContentSteeringController = config.contentSteeringController;
|
19818
|
-
//
|
20485
|
+
// ContentSteeringController is defined before LevelController to receive Multivariant Playlist events first
|
19819
20486
|
const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
|
19820
20487
|
const levelController = this.levelController = new LevelController(this, contentSteering);
|
19821
|
-
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
19822
|
-
const fragmentTracker = new FragmentTracker(this);
|
19823
20488
|
const keyLoader = new KeyLoader(this.config);
|
19824
20489
|
const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
|
19825
20490
|
|
@@ -19895,7 +20560,7 @@ class Hls {
|
|
19895
20560
|
try {
|
19896
20561
|
return this.emit(event, event, eventObject);
|
19897
20562
|
} catch (error) {
|
19898
|
-
logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
20563
|
+
this.logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
19899
20564
|
// Prevent recursion in error event handlers that throw #5497
|
19900
20565
|
if (!this.triggeringException) {
|
19901
20566
|
this.triggeringException = true;
|
@@ -19921,7 +20586,7 @@ class Hls {
|
|
19921
20586
|
* Dispose of the instance
|
19922
20587
|
*/
|
19923
20588
|
destroy() {
|
19924
|
-
logger.log('destroy');
|
20589
|
+
this.logger.log('destroy');
|
19925
20590
|
this.trigger(Events.DESTROYING, undefined);
|
19926
20591
|
this.detachMedia();
|
19927
20592
|
this.removeAllListeners();
|
@@ -19942,7 +20607,7 @@ class Hls {
|
|
19942
20607
|
* Attaches Hls.js to a media element
|
19943
20608
|
*/
|
19944
20609
|
attachMedia(media) {
|
19945
|
-
logger.log('attachMedia');
|
20610
|
+
this.logger.log('attachMedia');
|
19946
20611
|
this._media = media;
|
19947
20612
|
this.trigger(Events.MEDIA_ATTACHING, {
|
19948
20613
|
media: media
|
@@ -19953,7 +20618,7 @@ class Hls {
|
|
19953
20618
|
* Detach Hls.js from the media
|
19954
20619
|
*/
|
19955
20620
|
detachMedia() {
|
19956
|
-
logger.log('detachMedia');
|
20621
|
+
this.logger.log('detachMedia');
|
19957
20622
|
this.trigger(Events.MEDIA_DETACHING, undefined);
|
19958
20623
|
this._media = null;
|
19959
20624
|
}
|
@@ -19970,7 +20635,7 @@ class Hls {
|
|
19970
20635
|
});
|
19971
20636
|
this._autoLevelCapping = -1;
|
19972
20637
|
this._maxHdcpLevel = null;
|
19973
|
-
logger.log(`loadSource:${loadingSource}`);
|
20638
|
+
this.logger.log(`loadSource:${loadingSource}`);
|
19974
20639
|
if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
|
19975
20640
|
this.detachMedia();
|
19976
20641
|
this.attachMedia(media);
|
@@ -19989,8 +20654,7 @@ class Hls {
|
|
19989
20654
|
* Defaults to -1 (None: starts from earliest point)
|
19990
20655
|
*/
|
19991
20656
|
startLoad(startPosition = -1) {
|
19992
|
-
logger.log(`startLoad(${startPosition})`);
|
19993
|
-
this.started = true;
|
20657
|
+
this.logger.log(`startLoad(${startPosition})`);
|
19994
20658
|
this.networkControllers.forEach(controller => {
|
19995
20659
|
controller.startLoad(startPosition);
|
19996
20660
|
});
|
@@ -20000,34 +20664,31 @@ class Hls {
|
|
20000
20664
|
* Stop loading of any stream data.
|
20001
20665
|
*/
|
20002
20666
|
stopLoad() {
|
20003
|
-
logger.log('stopLoad');
|
20004
|
-
this.started = false;
|
20667
|
+
this.logger.log('stopLoad');
|
20005
20668
|
this.networkControllers.forEach(controller => {
|
20006
20669
|
controller.stopLoad();
|
20007
20670
|
});
|
20008
20671
|
}
|
20009
20672
|
|
20010
20673
|
/**
|
20011
|
-
* Resumes stream controller segment loading
|
20674
|
+
* Resumes stream controller segment loading after `pauseBuffering` has been called.
|
20012
20675
|
*/
|
20013
20676
|
resumeBuffering() {
|
20014
|
-
|
20015
|
-
|
20016
|
-
|
20017
|
-
|
20018
|
-
|
20019
|
-
});
|
20020
|
-
}
|
20677
|
+
this.networkControllers.forEach(controller => {
|
20678
|
+
if (controller.resumeBuffering) {
|
20679
|
+
controller.resumeBuffering();
|
20680
|
+
}
|
20681
|
+
});
|
20021
20682
|
}
|
20022
20683
|
|
20023
20684
|
/**
|
20024
|
-
*
|
20685
|
+
* Prevents stream controller from loading new segments until `resumeBuffering` is called.
|
20025
20686
|
* This allows for media buffering to be paused without interupting playlist loading.
|
20026
20687
|
*/
|
20027
20688
|
pauseBuffering() {
|
20028
20689
|
this.networkControllers.forEach(controller => {
|
20029
|
-
if (
|
20030
|
-
controller.
|
20690
|
+
if (controller.pauseBuffering) {
|
20691
|
+
controller.pauseBuffering();
|
20031
20692
|
}
|
20032
20693
|
});
|
20033
20694
|
}
|
@@ -20036,7 +20697,7 @@ class Hls {
|
|
20036
20697
|
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
20037
20698
|
*/
|
20038
20699
|
swapAudioCodec() {
|
20039
|
-
logger.log('swapAudioCodec');
|
20700
|
+
this.logger.log('swapAudioCodec');
|
20040
20701
|
this.streamController.swapAudioCodec();
|
20041
20702
|
}
|
20042
20703
|
|
@@ -20047,7 +20708,7 @@ class Hls {
|
|
20047
20708
|
* Automatic recovery of media-errors by this process is configurable.
|
20048
20709
|
*/
|
20049
20710
|
recoverMediaError() {
|
20050
|
-
logger.log('recoverMediaError');
|
20711
|
+
this.logger.log('recoverMediaError');
|
20051
20712
|
const media = this._media;
|
20052
20713
|
this.detachMedia();
|
20053
20714
|
if (media) {
|
@@ -20077,7 +20738,7 @@ class Hls {
|
|
20077
20738
|
* Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
|
20078
20739
|
*/
|
20079
20740
|
set currentLevel(newLevel) {
|
20080
|
-
logger.log(`set currentLevel:${newLevel}`);
|
20741
|
+
this.logger.log(`set currentLevel:${newLevel}`);
|
20081
20742
|
this.levelController.manualLevel = newLevel;
|
20082
20743
|
this.streamController.immediateLevelSwitch();
|
20083
20744
|
}
|
@@ -20096,7 +20757,7 @@ class Hls {
|
|
20096
20757
|
* @param newLevel - Pass -1 for automatic level selection
|
20097
20758
|
*/
|
20098
20759
|
set nextLevel(newLevel) {
|
20099
|
-
logger.log(`set nextLevel:${newLevel}`);
|
20760
|
+
this.logger.log(`set nextLevel:${newLevel}`);
|
20100
20761
|
this.levelController.manualLevel = newLevel;
|
20101
20762
|
this.streamController.nextLevelSwitch();
|
20102
20763
|
}
|
@@ -20115,7 +20776,7 @@ class Hls {
|
|
20115
20776
|
* @param newLevel - Pass -1 for automatic level selection
|
20116
20777
|
*/
|
20117
20778
|
set loadLevel(newLevel) {
|
20118
|
-
logger.log(`set loadLevel:${newLevel}`);
|
20779
|
+
this.logger.log(`set loadLevel:${newLevel}`);
|
20119
20780
|
this.levelController.manualLevel = newLevel;
|
20120
20781
|
}
|
20121
20782
|
|
@@ -20146,7 +20807,7 @@ class Hls {
|
|
20146
20807
|
* Sets "first-level", see getter.
|
20147
20808
|
*/
|
20148
20809
|
set firstLevel(newLevel) {
|
20149
|
-
logger.log(`set firstLevel:${newLevel}`);
|
20810
|
+
this.logger.log(`set firstLevel:${newLevel}`);
|
20150
20811
|
this.levelController.firstLevel = newLevel;
|
20151
20812
|
}
|
20152
20813
|
|
@@ -20171,7 +20832,7 @@ class Hls {
|
|
20171
20832
|
* (determined from download of first segment)
|
20172
20833
|
*/
|
20173
20834
|
set startLevel(newLevel) {
|
20174
|
-
logger.log(`set startLevel:${newLevel}`);
|
20835
|
+
this.logger.log(`set startLevel:${newLevel}`);
|
20175
20836
|
// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
20176
20837
|
if (newLevel !== -1) {
|
20177
20838
|
newLevel = Math.max(newLevel, this.minAutoLevel);
|
@@ -20246,7 +20907,7 @@ class Hls {
|
|
20246
20907
|
*/
|
20247
20908
|
set autoLevelCapping(newLevel) {
|
20248
20909
|
if (this._autoLevelCapping !== newLevel) {
|
20249
|
-
logger.log(`set autoLevelCapping:${newLevel}`);
|
20910
|
+
this.logger.log(`set autoLevelCapping:${newLevel}`);
|
20250
20911
|
this._autoLevelCapping = newLevel;
|
20251
20912
|
this.levelController.checkMaxAutoUpdated();
|
20252
20913
|
}
|
@@ -20351,6 +21012,9 @@ class Hls {
|
|
20351
21012
|
get mainForwardBufferInfo() {
|
20352
21013
|
return this.streamController.getMainFwdBufferInfo();
|
20353
21014
|
}
|
21015
|
+
get maxBufferLength() {
|
21016
|
+
return this.streamController.maxBufferLength;
|
21017
|
+
}
|
20354
21018
|
|
20355
21019
|
/**
|
20356
21020
|
* Find and select the best matching audio track, making a level switch when a Group change is necessary.
|
@@ -20518,12 +21182,22 @@ class Hls {
|
|
20518
21182
|
get forceStartLoad() {
|
20519
21183
|
return this.streamController.forceStartLoad;
|
20520
21184
|
}
|
21185
|
+
|
21186
|
+
/**
|
21187
|
+
* ContentSteering pathwayPriority getter/setter
|
21188
|
+
*/
|
21189
|
+
get pathwayPriority() {
|
21190
|
+
return this.levelController.pathwayPriority;
|
21191
|
+
}
|
21192
|
+
set pathwayPriority(pathwayPriority) {
|
21193
|
+
this.levelController.pathwayPriority = pathwayPriority;
|
21194
|
+
}
|
20521
21195
|
}
|
20522
21196
|
Hls.defaultConfig = void 0;
|
20523
21197
|
|
20524
|
-
var KeySystemFormats =
|
20525
|
-
var KeySystems =
|
20526
|
-
var SubtitleStreamController =
|
20527
|
-
var TimelineController =
|
20528
|
-
export { AbrController, AttrList,
|
21198
|
+
var KeySystemFormats = emptyEs.KeySystemFormats;
|
21199
|
+
var KeySystems = emptyEs.KeySystems;
|
21200
|
+
var SubtitleStreamController = emptyEs.SubtitleStreamController;
|
21201
|
+
var TimelineController = emptyEs.TimelineController;
|
21202
|
+
export { AbrController, AttrList, HevcVideoParser as AudioStreamController, HevcVideoParser as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, HevcVideoParser as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, HevcVideoParser as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, HevcVideoParser as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
|
20529
21203
|
//# sourceMappingURL=hls.light.mjs.map
|