hls.js 1.5.9 → 1.5.10-0.canary.10321
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -3
- package/dist/hls-demo.js +41 -38
- package/dist/hls-demo.js.map +1 -1
- package/dist/hls.js +3479 -2197
- package/dist/hls.js.d.ts +108 -85
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +2407 -1761
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +1994 -1321
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +2868 -1563
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +35 -35
- package/src/config.ts +3 -2
- package/src/controller/abr-controller.ts +24 -20
- package/src/controller/audio-stream-controller.ts +68 -74
- package/src/controller/audio-track-controller.ts +1 -1
- package/src/controller/base-playlist-controller.ts +27 -10
- package/src/controller/base-stream-controller.ts +160 -38
- package/src/controller/buffer-controller.ts +230 -92
- package/src/controller/buffer-operation-queue.ts +16 -19
- package/src/controller/cap-level-controller.ts +3 -2
- package/src/controller/cmcd-controller.ts +51 -14
- package/src/controller/content-steering-controller.ts +29 -15
- package/src/controller/eme-controller.ts +10 -23
- package/src/controller/error-controller.ts +6 -8
- package/src/controller/fps-controller.ts +8 -3
- package/src/controller/fragment-tracker.ts +15 -11
- package/src/controller/gap-controller.ts +43 -16
- package/src/controller/id3-track-controller.ts +7 -7
- package/src/controller/latency-controller.ts +9 -11
- package/src/controller/level-controller.ts +37 -19
- package/src/controller/stream-controller.ts +37 -32
- package/src/controller/subtitle-stream-controller.ts +28 -40
- package/src/controller/subtitle-track-controller.ts +5 -3
- package/src/controller/timeline-controller.ts +19 -21
- package/src/crypt/aes-crypto.ts +21 -2
- package/src/crypt/decrypter-aes-mode.ts +4 -0
- package/src/crypt/decrypter.ts +32 -16
- package/src/crypt/fast-aes-key.ts +28 -5
- package/src/demux/audio/aacdemuxer.ts +2 -2
- package/src/demux/audio/ac3-demuxer.ts +4 -3
- package/src/demux/audio/adts.ts +9 -4
- package/src/demux/audio/base-audio-demuxer.ts +16 -14
- package/src/demux/audio/mp3demuxer.ts +4 -3
- package/src/demux/audio/mpegaudio.ts +1 -1
- package/src/demux/mp4demuxer.ts +7 -7
- package/src/demux/sample-aes.ts +2 -0
- package/src/demux/transmuxer-interface.ts +4 -12
- package/src/demux/transmuxer-worker.ts +4 -4
- package/src/demux/transmuxer.ts +16 -3
- package/src/demux/tsdemuxer.ts +71 -37
- package/src/demux/video/avc-video-parser.ts +208 -119
- package/src/demux/video/base-video-parser.ts +147 -18
- package/src/demux/video/exp-golomb.ts +0 -208
- package/src/demux/video/hevc-video-parser.ts +749 -0
- package/src/empty-es.js +5 -0
- package/src/events.ts +8 -1
- package/src/exports-named.ts +1 -1
- package/src/hls.ts +61 -38
- package/src/loader/fragment-loader.ts +10 -3
- package/src/loader/key-loader.ts +3 -1
- package/src/loader/level-key.ts +10 -9
- package/src/loader/playlist-loader.ts +4 -5
- package/src/remux/mp4-generator.ts +196 -1
- package/src/remux/mp4-remuxer.ts +24 -8
- package/src/task-loop.ts +5 -2
- package/src/types/component-api.ts +3 -1
- package/src/types/demuxer.ts +4 -0
- package/src/types/events.ts +4 -0
- package/src/types/remuxer.ts +1 -1
- package/src/utils/buffer-helper.ts +12 -31
- package/src/utils/cea-608-parser.ts +1 -3
- package/src/utils/codecs.ts +34 -5
- package/src/utils/encryption-methods-util.ts +21 -0
- package/src/utils/fetch-loader.ts +1 -1
- package/src/utils/imsc1-ttml-parser.ts +1 -1
- package/src/utils/keysystem-util.ts +1 -6
- package/src/utils/logger.ts +58 -23
- package/src/utils/mp4-tools.ts +5 -3
- package/src/utils/utf8-utils.ts +18 -0
- package/src/utils/webvtt-parser.ts +1 -1
- package/src/utils/xhr-loader.ts +5 -5
- package/src/demux/id3.ts +0 -411
package/dist/hls.light.mjs
CHANGED
@@ -176,6 +176,23 @@ var urlToolkit = {exports: {}};
|
|
176
176
|
|
177
177
|
var urlToolkitExports = urlToolkit.exports;
|
178
178
|
|
179
|
+
function _defineProperty(e, r, t) {
|
180
|
+
return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, {
|
181
|
+
value: t,
|
182
|
+
enumerable: !0,
|
183
|
+
configurable: !0,
|
184
|
+
writable: !0
|
185
|
+
}) : e[r] = t, e;
|
186
|
+
}
|
187
|
+
function _extends() {
|
188
|
+
return _extends = Object.assign ? Object.assign.bind() : function (n) {
|
189
|
+
for (var e = 1; e < arguments.length; e++) {
|
190
|
+
var t = arguments[e];
|
191
|
+
for (var r in t) ({}).hasOwnProperty.call(t, r) && (n[r] = t[r]);
|
192
|
+
}
|
193
|
+
return n;
|
194
|
+
}, _extends.apply(null, arguments);
|
195
|
+
}
|
179
196
|
function ownKeys(e, r) {
|
180
197
|
var t = Object.keys(e);
|
181
198
|
if (Object.getOwnPropertySymbols) {
|
@@ -209,35 +226,7 @@ function _toPrimitive(t, r) {
|
|
209
226
|
}
|
210
227
|
function _toPropertyKey(t) {
|
211
228
|
var i = _toPrimitive(t, "string");
|
212
|
-
return "symbol" == typeof i ? i :
|
213
|
-
}
|
214
|
-
function _defineProperty(obj, key, value) {
|
215
|
-
key = _toPropertyKey(key);
|
216
|
-
if (key in obj) {
|
217
|
-
Object.defineProperty(obj, key, {
|
218
|
-
value: value,
|
219
|
-
enumerable: true,
|
220
|
-
configurable: true,
|
221
|
-
writable: true
|
222
|
-
});
|
223
|
-
} else {
|
224
|
-
obj[key] = value;
|
225
|
-
}
|
226
|
-
return obj;
|
227
|
-
}
|
228
|
-
function _extends() {
|
229
|
-
_extends = Object.assign ? Object.assign.bind() : function (target) {
|
230
|
-
for (var i = 1; i < arguments.length; i++) {
|
231
|
-
var source = arguments[i];
|
232
|
-
for (var key in source) {
|
233
|
-
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
234
|
-
target[key] = source[key];
|
235
|
-
}
|
236
|
-
}
|
237
|
-
}
|
238
|
-
return target;
|
239
|
-
};
|
240
|
-
return _extends.apply(this, arguments);
|
229
|
+
return "symbol" == typeof i ? i : i + "";
|
241
230
|
}
|
242
231
|
|
243
232
|
// https://caniuse.com/mdn-javascript_builtins_number_isfinite
|
@@ -256,6 +245,7 @@ let Events = /*#__PURE__*/function (Events) {
|
|
256
245
|
Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
|
257
246
|
Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
|
258
247
|
Events["MEDIA_DETACHED"] = "hlsMediaDetached";
|
248
|
+
Events["MEDIA_ENDED"] = "hlsMediaEnded";
|
259
249
|
Events["BUFFER_RESET"] = "hlsBufferReset";
|
260
250
|
Events["BUFFER_CODECS"] = "hlsBufferCodecs";
|
261
251
|
Events["BUFFER_CREATED"] = "hlsBufferCreated";
|
@@ -369,58 +359,6 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
|
|
369
359
|
return ErrorDetails;
|
370
360
|
}({});
|
371
361
|
|
372
|
-
const noop = function noop() {};
|
373
|
-
const fakeLogger = {
|
374
|
-
trace: noop,
|
375
|
-
debug: noop,
|
376
|
-
log: noop,
|
377
|
-
warn: noop,
|
378
|
-
info: noop,
|
379
|
-
error: noop
|
380
|
-
};
|
381
|
-
let exportedLogger = fakeLogger;
|
382
|
-
|
383
|
-
// let lastCallTime;
|
384
|
-
// function formatMsgWithTimeInfo(type, msg) {
|
385
|
-
// const now = Date.now();
|
386
|
-
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
387
|
-
// lastCallTime = now;
|
388
|
-
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
389
|
-
// return msg;
|
390
|
-
// }
|
391
|
-
|
392
|
-
function consolePrintFn(type) {
|
393
|
-
const func = self.console[type];
|
394
|
-
if (func) {
|
395
|
-
return func.bind(self.console, `[${type}] >`);
|
396
|
-
}
|
397
|
-
return noop;
|
398
|
-
}
|
399
|
-
function exportLoggerFunctions(debugConfig, ...functions) {
|
400
|
-
functions.forEach(function (type) {
|
401
|
-
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
402
|
-
});
|
403
|
-
}
|
404
|
-
function enableLogs(debugConfig, id) {
|
405
|
-
// check that console is available
|
406
|
-
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
407
|
-
exportLoggerFunctions(debugConfig,
|
408
|
-
// Remove out from list here to hard-disable a log-level
|
409
|
-
// 'trace',
|
410
|
-
'debug', 'log', 'info', 'warn', 'error');
|
411
|
-
// Some browsers don't allow to use bind on console object anyway
|
412
|
-
// fallback to default if needed
|
413
|
-
try {
|
414
|
-
exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.9"}`);
|
415
|
-
} catch (e) {
|
416
|
-
exportedLogger = fakeLogger;
|
417
|
-
}
|
418
|
-
} else {
|
419
|
-
exportedLogger = fakeLogger;
|
420
|
-
}
|
421
|
-
}
|
422
|
-
const logger = exportedLogger;
|
423
|
-
|
424
362
|
const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/;
|
425
363
|
const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g;
|
426
364
|
|
@@ -502,6 +440,84 @@ class AttrList {
|
|
502
440
|
}
|
503
441
|
}
|
504
442
|
|
443
|
+
class Logger {
|
444
|
+
constructor(label, logger) {
|
445
|
+
this.trace = void 0;
|
446
|
+
this.debug = void 0;
|
447
|
+
this.log = void 0;
|
448
|
+
this.warn = void 0;
|
449
|
+
this.info = void 0;
|
450
|
+
this.error = void 0;
|
451
|
+
const lb = `[${label}]:`;
|
452
|
+
this.trace = noop;
|
453
|
+
this.debug = logger.debug.bind(null, lb);
|
454
|
+
this.log = logger.log.bind(null, lb);
|
455
|
+
this.warn = logger.warn.bind(null, lb);
|
456
|
+
this.info = logger.info.bind(null, lb);
|
457
|
+
this.error = logger.error.bind(null, lb);
|
458
|
+
}
|
459
|
+
}
|
460
|
+
const noop = function noop() {};
|
461
|
+
const fakeLogger = {
|
462
|
+
trace: noop,
|
463
|
+
debug: noop,
|
464
|
+
log: noop,
|
465
|
+
warn: noop,
|
466
|
+
info: noop,
|
467
|
+
error: noop
|
468
|
+
};
|
469
|
+
function createLogger() {
|
470
|
+
return _extends({}, fakeLogger);
|
471
|
+
}
|
472
|
+
|
473
|
+
// let lastCallTime;
|
474
|
+
// function formatMsgWithTimeInfo(type, msg) {
|
475
|
+
// const now = Date.now();
|
476
|
+
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
477
|
+
// lastCallTime = now;
|
478
|
+
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
479
|
+
// return msg;
|
480
|
+
// }
|
481
|
+
|
482
|
+
function consolePrintFn(type, id) {
|
483
|
+
const func = self.console[type];
|
484
|
+
return func ? func.bind(self.console, `${''}[${type}] >`) : noop;
|
485
|
+
}
|
486
|
+
function getLoggerFn(key, debugConfig, id) {
|
487
|
+
return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key);
|
488
|
+
}
|
489
|
+
const exportedLogger = createLogger();
|
490
|
+
function enableLogs(debugConfig, context, id) {
|
491
|
+
// check that console is available
|
492
|
+
const newLogger = createLogger();
|
493
|
+
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
494
|
+
const keys = [
|
495
|
+
// Remove out from list here to hard-disable a log-level
|
496
|
+
// 'trace',
|
497
|
+
'debug', 'log', 'info', 'warn', 'error'];
|
498
|
+
keys.forEach(key => {
|
499
|
+
newLogger[key] = getLoggerFn(key, debugConfig);
|
500
|
+
});
|
501
|
+
// Some browsers don't allow to use bind on console object anyway
|
502
|
+
// fallback to default if needed
|
503
|
+
try {
|
504
|
+
newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.10-0.canary.10321"}`);
|
505
|
+
} catch (e) {
|
506
|
+
/* log fn threw an exception. All logger methods are no-ops. */
|
507
|
+
return createLogger();
|
508
|
+
}
|
509
|
+
// global exported logger uses the same functions as new logger without `id`
|
510
|
+
keys.forEach(key => {
|
511
|
+
exportedLogger[key] = getLoggerFn(key, debugConfig);
|
512
|
+
});
|
513
|
+
} else {
|
514
|
+
// Reset global exported logger
|
515
|
+
_extends(exportedLogger, newLogger);
|
516
|
+
}
|
517
|
+
return newLogger;
|
518
|
+
}
|
519
|
+
const logger = exportedLogger;
|
520
|
+
|
505
521
|
// Avoid exporting const enum so that these values can be inlined
|
506
522
|
|
507
523
|
function isDateRangeCueAttribute(attrName) {
|
@@ -991,10 +1007,32 @@ class LevelDetails {
|
|
991
1007
|
}
|
992
1008
|
}
|
993
1009
|
|
1010
|
+
var DecrypterAesMode = {
|
1011
|
+
cbc: 0,
|
1012
|
+
ctr: 1
|
1013
|
+
};
|
1014
|
+
|
1015
|
+
function isFullSegmentEncryption(method) {
|
1016
|
+
return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
|
1017
|
+
}
|
1018
|
+
function getAesModeFromFullSegmentMethod(method) {
|
1019
|
+
switch (method) {
|
1020
|
+
case 'AES-128':
|
1021
|
+
case 'AES-256':
|
1022
|
+
return DecrypterAesMode.cbc;
|
1023
|
+
case 'AES-256-CTR':
|
1024
|
+
return DecrypterAesMode.ctr;
|
1025
|
+
default:
|
1026
|
+
throw new Error(`invalid full segment method ${method}`);
|
1027
|
+
}
|
1028
|
+
}
|
1029
|
+
|
994
1030
|
// This file is inserted as a shim for modules which we do not want to include into the distro.
|
995
1031
|
// This replacement is done in the "alias" plugin of the rollup config.
|
996
|
-
|
997
|
-
var
|
1032
|
+
// Use a ES dedicated file as Rollup assigns an object in the output
|
1033
|
+
// For example: "var KeySystemFormats = emptyEs.KeySystemFormats;"
|
1034
|
+
var emptyEs = {};
|
1035
|
+
var HevcVideoParser = /*@__PURE__*/getDefaultExportFromCjs(emptyEs);
|
998
1036
|
|
999
1037
|
function sliceUint8(array, start, end) {
|
1000
1038
|
// @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
|
@@ -1002,365 +1040,96 @@ function sliceUint8(array, start, end) {
|
|
1002
1040
|
return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end));
|
1003
1041
|
}
|
1004
1042
|
|
1005
|
-
//
|
1006
|
-
|
1007
|
-
|
1008
|
-
*
|
1009
|
-
*
|
1010
|
-
*
|
1043
|
+
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
1044
|
+
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
1045
|
+
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1046
|
+
*
|
1047
|
+
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1048
|
+
* Version: 1.0
|
1049
|
+
* LastModified: Dec 25 1999
|
1050
|
+
* This library is free. You can redistribute it and/or modify it.
|
1011
1051
|
*/
|
1012
|
-
const isHeader$2 = (data, offset) => {
|
1013
|
-
/*
|
1014
|
-
* http://id3.org/id3v2.3.0
|
1015
|
-
* [0] = 'I'
|
1016
|
-
* [1] = 'D'
|
1017
|
-
* [2] = '3'
|
1018
|
-
* [3,4] = {Version}
|
1019
|
-
* [5] = {Flags}
|
1020
|
-
* [6-9] = {ID3 Size}
|
1021
|
-
*
|
1022
|
-
* An ID3v2 tag can be detected with the following pattern:
|
1023
|
-
* $49 44 33 yy yy xx zz zz zz zz
|
1024
|
-
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
1025
|
-
*/
|
1026
|
-
if (offset + 10 <= data.length) {
|
1027
|
-
// look for 'ID3' identifier
|
1028
|
-
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
1029
|
-
// check version is within range
|
1030
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1031
|
-
// check size is within range
|
1032
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1033
|
-
return true;
|
1034
|
-
}
|
1035
|
-
}
|
1036
|
-
}
|
1037
|
-
}
|
1038
|
-
return false;
|
1039
|
-
};
|
1040
|
-
|
1041
1052
|
/**
|
1042
|
-
*
|
1043
|
-
*
|
1044
|
-
* @param
|
1053
|
+
* Converts a UTF-8 array to a string.
|
1054
|
+
*
|
1055
|
+
* @param array - The UTF-8 array to convert
|
1056
|
+
*
|
1057
|
+
* @returns The string
|
1058
|
+
*
|
1059
|
+
* @group Utils
|
1060
|
+
*
|
1061
|
+
* @beta
|
1045
1062
|
*/
|
1046
|
-
|
1047
|
-
|
1048
|
-
|
1049
|
-
|
1050
|
-
|
1051
|
-
|
1052
|
-
|
1053
|
-
|
1054
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1055
|
-
// check size is within range
|
1056
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1057
|
-
return true;
|
1058
|
-
}
|
1059
|
-
}
|
1063
|
+
function utf8ArrayToStr(array, exitOnNull = false) {
|
1064
|
+
if (typeof TextDecoder !== 'undefined') {
|
1065
|
+
const decoder = new TextDecoder('utf-8');
|
1066
|
+
const decoded = decoder.decode(array);
|
1067
|
+
if (exitOnNull) {
|
1068
|
+
// grab up to the first null
|
1069
|
+
const idx = decoded.indexOf('\0');
|
1070
|
+
return idx !== -1 ? decoded.substring(0, idx) : decoded;
|
1060
1071
|
}
|
1072
|
+
// remove any null characters
|
1073
|
+
return decoded.replace(/\0/g, '');
|
1061
1074
|
}
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1072
|
-
|
1073
|
-
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1075
|
+
const len = array.length;
|
1076
|
+
let c;
|
1077
|
+
let char2;
|
1078
|
+
let char3;
|
1079
|
+
let out = '';
|
1080
|
+
let i = 0;
|
1081
|
+
while (i < len) {
|
1082
|
+
c = array[i++];
|
1083
|
+
if (c === 0x00 && exitOnNull) {
|
1084
|
+
return out;
|
1085
|
+
} else if (c === 0x00 || c === 0x03) {
|
1086
|
+
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
1087
|
+
continue;
|
1088
|
+
}
|
1089
|
+
switch (c >> 4) {
|
1090
|
+
case 0:
|
1091
|
+
case 1:
|
1092
|
+
case 2:
|
1093
|
+
case 3:
|
1094
|
+
case 4:
|
1095
|
+
case 5:
|
1096
|
+
case 6:
|
1097
|
+
case 7:
|
1098
|
+
// 0xxxxxxx
|
1099
|
+
out += String.fromCharCode(c);
|
1100
|
+
break;
|
1101
|
+
case 12:
|
1102
|
+
case 13:
|
1103
|
+
// 110x xxxx 10xx xxxx
|
1104
|
+
char2 = array[i++];
|
1105
|
+
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1106
|
+
break;
|
1107
|
+
case 14:
|
1108
|
+
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1109
|
+
char2 = array[i++];
|
1110
|
+
char3 = array[i++];
|
1111
|
+
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1112
|
+
break;
|
1083
1113
|
}
|
1084
|
-
offset += length;
|
1085
|
-
}
|
1086
|
-
if (length > 0) {
|
1087
|
-
return data.subarray(front, front + length);
|
1088
1114
|
}
|
1089
|
-
return
|
1090
|
-
}
|
1091
|
-
const readSize = (data, offset) => {
|
1092
|
-
let size = 0;
|
1093
|
-
size = (data[offset] & 0x7f) << 21;
|
1094
|
-
size |= (data[offset + 1] & 0x7f) << 14;
|
1095
|
-
size |= (data[offset + 2] & 0x7f) << 7;
|
1096
|
-
size |= data[offset + 3] & 0x7f;
|
1097
|
-
return size;
|
1098
|
-
};
|
1099
|
-
const canParse$2 = (data, offset) => {
|
1100
|
-
return isHeader$2(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset;
|
1101
|
-
};
|
1115
|
+
return out;
|
1116
|
+
}
|
1102
1117
|
|
1103
1118
|
/**
|
1104
|
-
*
|
1105
|
-
* @param data - Block of data containing one or more ID3 tags
|
1119
|
+
* hex dump helper class
|
1106
1120
|
*/
|
1107
|
-
|
1108
|
-
|
1109
|
-
|
1110
|
-
|
1111
|
-
|
1112
|
-
|
1121
|
+
|
1122
|
+
const Hex = {
|
1123
|
+
hexDump: function (array) {
|
1124
|
+
let str = '';
|
1125
|
+
for (let i = 0; i < array.length; i++) {
|
1126
|
+
let h = array[i].toString(16);
|
1127
|
+
if (h.length < 2) {
|
1128
|
+
h = '0' + h;
|
1129
|
+
}
|
1130
|
+
str += h;
|
1113
1131
|
}
|
1114
|
-
|
1115
|
-
return undefined;
|
1116
|
-
};
|
1117
|
-
|
1118
|
-
/**
|
1119
|
-
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
1120
|
-
*/
|
1121
|
-
const isTimeStampFrame = frame => {
|
1122
|
-
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
1123
|
-
};
|
1124
|
-
const getFrameData = data => {
|
1125
|
-
/*
|
1126
|
-
Frame ID $xx xx xx xx (four characters)
|
1127
|
-
Size $xx xx xx xx
|
1128
|
-
Flags $xx xx
|
1129
|
-
*/
|
1130
|
-
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
1131
|
-
const size = readSize(data, 4);
|
1132
|
-
|
1133
|
-
// skip frame id, size, and flags
|
1134
|
-
const offset = 10;
|
1135
|
-
return {
|
1136
|
-
type,
|
1137
|
-
size,
|
1138
|
-
data: data.subarray(offset, offset + size)
|
1139
|
-
};
|
1140
|
-
};
|
1141
|
-
|
1142
|
-
/**
|
1143
|
-
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
1144
|
-
* @param id3Data - The ID3 data containing one or more ID3 tags
|
1145
|
-
*/
|
1146
|
-
const getID3Frames = id3Data => {
|
1147
|
-
let offset = 0;
|
1148
|
-
const frames = [];
|
1149
|
-
while (isHeader$2(id3Data, offset)) {
|
1150
|
-
const size = readSize(id3Data, offset + 6);
|
1151
|
-
// skip past ID3 header
|
1152
|
-
offset += 10;
|
1153
|
-
const end = offset + size;
|
1154
|
-
// loop through frames in the ID3 tag
|
1155
|
-
while (offset + 8 < end) {
|
1156
|
-
const frameData = getFrameData(id3Data.subarray(offset));
|
1157
|
-
const frame = decodeFrame(frameData);
|
1158
|
-
if (frame) {
|
1159
|
-
frames.push(frame);
|
1160
|
-
}
|
1161
|
-
|
1162
|
-
// skip frame header and frame data
|
1163
|
-
offset += frameData.size + 10;
|
1164
|
-
}
|
1165
|
-
if (isFooter(id3Data, offset)) {
|
1166
|
-
offset += 10;
|
1167
|
-
}
|
1168
|
-
}
|
1169
|
-
return frames;
|
1170
|
-
};
|
1171
|
-
const decodeFrame = frame => {
|
1172
|
-
if (frame.type === 'PRIV') {
|
1173
|
-
return decodePrivFrame(frame);
|
1174
|
-
} else if (frame.type[0] === 'W') {
|
1175
|
-
return decodeURLFrame(frame);
|
1176
|
-
}
|
1177
|
-
return decodeTextFrame(frame);
|
1178
|
-
};
|
1179
|
-
const decodePrivFrame = frame => {
|
1180
|
-
/*
|
1181
|
-
Format: <text string>\0<binary data>
|
1182
|
-
*/
|
1183
|
-
if (frame.size < 2) {
|
1184
|
-
return undefined;
|
1185
|
-
}
|
1186
|
-
const owner = utf8ArrayToStr(frame.data, true);
|
1187
|
-
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
1188
|
-
return {
|
1189
|
-
key: frame.type,
|
1190
|
-
info: owner,
|
1191
|
-
data: privateData.buffer
|
1192
|
-
};
|
1193
|
-
};
|
1194
|
-
const decodeTextFrame = frame => {
|
1195
|
-
if (frame.size < 2) {
|
1196
|
-
return undefined;
|
1197
|
-
}
|
1198
|
-
if (frame.type === 'TXXX') {
|
1199
|
-
/*
|
1200
|
-
Format:
|
1201
|
-
[0] = {Text Encoding}
|
1202
|
-
[1-?] = {Description}\0{Value}
|
1203
|
-
*/
|
1204
|
-
let index = 1;
|
1205
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1206
|
-
index += description.length + 1;
|
1207
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1208
|
-
return {
|
1209
|
-
key: frame.type,
|
1210
|
-
info: description,
|
1211
|
-
data: value
|
1212
|
-
};
|
1213
|
-
}
|
1214
|
-
/*
|
1215
|
-
Format:
|
1216
|
-
[0] = {Text Encoding}
|
1217
|
-
[1-?] = {Value}
|
1218
|
-
*/
|
1219
|
-
const text = utf8ArrayToStr(frame.data.subarray(1));
|
1220
|
-
return {
|
1221
|
-
key: frame.type,
|
1222
|
-
data: text
|
1223
|
-
};
|
1224
|
-
};
|
1225
|
-
const decodeURLFrame = frame => {
|
1226
|
-
if (frame.type === 'WXXX') {
|
1227
|
-
/*
|
1228
|
-
Format:
|
1229
|
-
[0] = {Text Encoding}
|
1230
|
-
[1-?] = {Description}\0{URL}
|
1231
|
-
*/
|
1232
|
-
if (frame.size < 2) {
|
1233
|
-
return undefined;
|
1234
|
-
}
|
1235
|
-
let index = 1;
|
1236
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1237
|
-
index += description.length + 1;
|
1238
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1239
|
-
return {
|
1240
|
-
key: frame.type,
|
1241
|
-
info: description,
|
1242
|
-
data: value
|
1243
|
-
};
|
1244
|
-
}
|
1245
|
-
/*
|
1246
|
-
Format:
|
1247
|
-
[0-?] = {URL}
|
1248
|
-
*/
|
1249
|
-
const url = utf8ArrayToStr(frame.data);
|
1250
|
-
return {
|
1251
|
-
key: frame.type,
|
1252
|
-
data: url
|
1253
|
-
};
|
1254
|
-
};
|
1255
|
-
const readTimeStamp = timeStampFrame => {
|
1256
|
-
if (timeStampFrame.data.byteLength === 8) {
|
1257
|
-
const data = new Uint8Array(timeStampFrame.data);
|
1258
|
-
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
1259
|
-
// with the upper 31 bits set to zero.
|
1260
|
-
const pts33Bit = data[3] & 0x1;
|
1261
|
-
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
1262
|
-
timestamp /= 45;
|
1263
|
-
if (pts33Bit) {
|
1264
|
-
timestamp += 47721858.84;
|
1265
|
-
} // 2^32 / 90
|
1266
|
-
|
1267
|
-
return Math.round(timestamp);
|
1268
|
-
}
|
1269
|
-
return undefined;
|
1270
|
-
};
|
1271
|
-
|
1272
|
-
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
1273
|
-
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
1274
|
-
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1275
|
-
*
|
1276
|
-
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1277
|
-
* Version: 1.0
|
1278
|
-
* LastModified: Dec 25 1999
|
1279
|
-
* This library is free. You can redistribute it and/or modify it.
|
1280
|
-
*/
|
1281
|
-
const utf8ArrayToStr = (array, exitOnNull = false) => {
|
1282
|
-
const decoder = getTextDecoder();
|
1283
|
-
if (decoder) {
|
1284
|
-
const decoded = decoder.decode(array);
|
1285
|
-
if (exitOnNull) {
|
1286
|
-
// grab up to the first null
|
1287
|
-
const idx = decoded.indexOf('\0');
|
1288
|
-
return idx !== -1 ? decoded.substring(0, idx) : decoded;
|
1289
|
-
}
|
1290
|
-
|
1291
|
-
// remove any null characters
|
1292
|
-
return decoded.replace(/\0/g, '');
|
1293
|
-
}
|
1294
|
-
const len = array.length;
|
1295
|
-
let c;
|
1296
|
-
let char2;
|
1297
|
-
let char3;
|
1298
|
-
let out = '';
|
1299
|
-
let i = 0;
|
1300
|
-
while (i < len) {
|
1301
|
-
c = array[i++];
|
1302
|
-
if (c === 0x00 && exitOnNull) {
|
1303
|
-
return out;
|
1304
|
-
} else if (c === 0x00 || c === 0x03) {
|
1305
|
-
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
1306
|
-
continue;
|
1307
|
-
}
|
1308
|
-
switch (c >> 4) {
|
1309
|
-
case 0:
|
1310
|
-
case 1:
|
1311
|
-
case 2:
|
1312
|
-
case 3:
|
1313
|
-
case 4:
|
1314
|
-
case 5:
|
1315
|
-
case 6:
|
1316
|
-
case 7:
|
1317
|
-
// 0xxxxxxx
|
1318
|
-
out += String.fromCharCode(c);
|
1319
|
-
break;
|
1320
|
-
case 12:
|
1321
|
-
case 13:
|
1322
|
-
// 110x xxxx 10xx xxxx
|
1323
|
-
char2 = array[i++];
|
1324
|
-
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1325
|
-
break;
|
1326
|
-
case 14:
|
1327
|
-
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1328
|
-
char2 = array[i++];
|
1329
|
-
char3 = array[i++];
|
1330
|
-
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1331
|
-
break;
|
1332
|
-
}
|
1333
|
-
}
|
1334
|
-
return out;
|
1335
|
-
};
|
1336
|
-
let decoder;
|
1337
|
-
function getTextDecoder() {
|
1338
|
-
// On Play Station 4, TextDecoder is defined but partially implemented.
|
1339
|
-
// Manual decoding option is preferable
|
1340
|
-
if (navigator.userAgent.includes('PlayStation 4')) {
|
1341
|
-
return;
|
1342
|
-
}
|
1343
|
-
if (!decoder && typeof self.TextDecoder !== 'undefined') {
|
1344
|
-
decoder = new self.TextDecoder('utf-8');
|
1345
|
-
}
|
1346
|
-
return decoder;
|
1347
|
-
}
|
1348
|
-
|
1349
|
-
/**
|
1350
|
-
* hex dump helper class
|
1351
|
-
*/
|
1352
|
-
|
1353
|
-
const Hex = {
|
1354
|
-
hexDump: function (array) {
|
1355
|
-
let str = '';
|
1356
|
-
for (let i = 0; i < array.length; i++) {
|
1357
|
-
let h = array[i].toString(16);
|
1358
|
-
if (h.length < 2) {
|
1359
|
-
h = '0' + h;
|
1360
|
-
}
|
1361
|
-
str += h;
|
1362
|
-
}
|
1363
|
-
return str;
|
1132
|
+
return str;
|
1364
1133
|
}
|
1365
1134
|
};
|
1366
1135
|
|
@@ -1626,7 +1395,7 @@ function parseStsd(stsd) {
|
|
1626
1395
|
{
|
1627
1396
|
const codecBox = findBox(sampleEntries, [fourCC])[0];
|
1628
1397
|
const esdsBox = findBox(codecBox.subarray(28), ['esds'])[0];
|
1629
|
-
if (esdsBox && esdsBox.length >
|
1398
|
+
if (esdsBox && esdsBox.length > 7) {
|
1630
1399
|
let i = 4;
|
1631
1400
|
// ES Descriptor tag
|
1632
1401
|
if (esdsBox[i++] !== 0x03) {
|
@@ -1741,7 +1510,9 @@ function parseStsd(stsd) {
|
|
1741
1510
|
}
|
1742
1511
|
function skipBERInteger(bytes, i) {
|
1743
1512
|
const limit = i + 5;
|
1744
|
-
while (bytes[i++] & 0x80 && i < limit) {
|
1513
|
+
while (bytes[i++] & 0x80 && i < limit) {
|
1514
|
+
/* do nothing */
|
1515
|
+
}
|
1745
1516
|
return i;
|
1746
1517
|
}
|
1747
1518
|
function toHex(x) {
|
@@ -2433,12 +2204,12 @@ class LevelKey {
|
|
2433
2204
|
this.keyFormatVersions = formatversions;
|
2434
2205
|
this.iv = iv;
|
2435
2206
|
this.encrypted = method ? method !== 'NONE' : false;
|
2436
|
-
this.isCommonEncryption = this.encrypted && method
|
2207
|
+
this.isCommonEncryption = this.encrypted && !isFullSegmentEncryption(method);
|
2437
2208
|
}
|
2438
2209
|
isSupported() {
|
2439
2210
|
// If it's Segment encryption or No encryption, just select that key system
|
2440
2211
|
if (this.method) {
|
2441
|
-
if (this.method
|
2212
|
+
if (isFullSegmentEncryption(this.method) || this.method === 'NONE') {
|
2442
2213
|
return true;
|
2443
2214
|
}
|
2444
2215
|
if (this.keyFormat === 'identity') {
|
@@ -2452,14 +2223,13 @@ class LevelKey {
|
|
2452
2223
|
if (!this.encrypted || !this.uri) {
|
2453
2224
|
return null;
|
2454
2225
|
}
|
2455
|
-
if (this.method
|
2226
|
+
if (isFullSegmentEncryption(this.method) && this.uri && !this.iv) {
|
2456
2227
|
if (typeof sn !== 'number') {
|
2457
2228
|
// We are fetching decryption data for a initialization segment
|
2458
|
-
// If the segment was encrypted with AES-128
|
2229
|
+
// If the segment was encrypted with AES-128/256
|
2459
2230
|
// It must have an IV defined. We cannot substitute the Segment Number in.
|
2460
|
-
|
2461
|
-
|
2462
|
-
}
|
2231
|
+
logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
|
2232
|
+
|
2463
2233
|
// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
2464
2234
|
sn = 0;
|
2465
2235
|
}
|
@@ -2609,23 +2379,28 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
|
|
2609
2379
|
if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
|
2610
2380
|
return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
|
2611
2381
|
}
|
2612
|
-
|
2613
|
-
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2614
|
-
// some browsers will report that fLaC is supported then fail.
|
2615
|
-
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2616
2382
|
const codecsToCheck = {
|
2383
|
+
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2384
|
+
// some browsers will report that fLaC is supported then fail.
|
2385
|
+
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2617
2386
|
flac: ['flac', 'fLaC', 'FLAC'],
|
2618
|
-
opus: ['opus', 'Opus']
|
2387
|
+
opus: ['opus', 'Opus'],
|
2388
|
+
// Replace audio codec info if browser does not support mp4a.40.34,
|
2389
|
+
// and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
|
2390
|
+
'mp4a.40.34': ['mp3']
|
2619
2391
|
}[lowerCaseCodec];
|
2620
2392
|
for (let i = 0; i < codecsToCheck.length; i++) {
|
2393
|
+
var _getMediaSource;
|
2621
2394
|
if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
|
2622
2395
|
CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
|
2623
2396
|
return codecsToCheck[i];
|
2397
|
+
} else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
|
2398
|
+
return '';
|
2624
2399
|
}
|
2625
2400
|
}
|
2626
2401
|
return lowerCaseCodec;
|
2627
2402
|
}
|
2628
|
-
const AUDIO_CODEC_REGEXP = /flac|opus/i;
|
2403
|
+
const AUDIO_CODEC_REGEXP = /flac|opus|mp4a\.40\.34/i;
|
2629
2404
|
function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
|
2630
2405
|
return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
|
2631
2406
|
}
|
@@ -2648,6 +2423,16 @@ function convertAVC1ToAVCOTI(codec) {
|
|
2648
2423
|
}
|
2649
2424
|
return codec;
|
2650
2425
|
}
|
2426
|
+
function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
|
2427
|
+
const MediaSource = getMediaSource(preferManagedMediaSource) || {
|
2428
|
+
isTypeSupported: () => false
|
2429
|
+
};
|
2430
|
+
return {
|
2431
|
+
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
2432
|
+
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
2433
|
+
ac3: false
|
2434
|
+
};
|
2435
|
+
}
|
2651
2436
|
|
2652
2437
|
const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
|
2653
2438
|
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
|
@@ -3448,10 +3233,10 @@ class PlaylistLoader {
|
|
3448
3233
|
const loaderContext = loader.context;
|
3449
3234
|
if (loaderContext && loaderContext.url === context.url && loaderContext.level === context.level) {
|
3450
3235
|
// same URL can't overlap
|
3451
|
-
logger.trace('[playlist-loader]: playlist request ongoing');
|
3236
|
+
this.hls.logger.trace('[playlist-loader]: playlist request ongoing');
|
3452
3237
|
return;
|
3453
3238
|
}
|
3454
|
-
logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
|
3239
|
+
this.hls.logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
|
3455
3240
|
loader.abort();
|
3456
3241
|
}
|
3457
3242
|
|
@@ -3561,7 +3346,7 @@ class PlaylistLoader {
|
|
3561
3346
|
// alt audio rendition in which quality levels (main)
|
3562
3347
|
// contains both audio+video. but with mixed audio track not signaled
|
3563
3348
|
if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
|
3564
|
-
logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
|
3349
|
+
this.hls.logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
|
3565
3350
|
audioTracks.unshift({
|
3566
3351
|
type: 'main',
|
3567
3352
|
name: 'main',
|
@@ -3660,7 +3445,7 @@ class PlaylistLoader {
|
|
3660
3445
|
message += ` id: ${context.id} group-id: "${context.groupId}"`;
|
3661
3446
|
}
|
3662
3447
|
const error = new Error(message);
|
3663
|
-
logger.warn(`[playlist-loader]: ${message}`);
|
3448
|
+
this.hls.logger.warn(`[playlist-loader]: ${message}`);
|
3664
3449
|
let details = ErrorDetails.UNKNOWN;
|
3665
3450
|
let fatal = false;
|
3666
3451
|
const loader = this.getInternalLoader(context);
|
@@ -3898,28 +3683,427 @@ var MetadataSchema = {
|
|
3898
3683
|
emsg: "https://aomedia.org/emsg/ID3"
|
3899
3684
|
};
|
3900
3685
|
|
3901
|
-
|
3902
|
-
|
3903
|
-
|
3904
|
-
|
3905
|
-
|
3906
|
-
|
3907
|
-
|
3908
|
-
|
3909
|
-
|
3910
|
-
|
3911
|
-
|
3912
|
-
|
3913
|
-
|
3914
|
-
|
3915
|
-
|
3916
|
-
|
3686
|
+
/**
|
3687
|
+
* Decode an ID3 PRIV frame.
|
3688
|
+
*
|
3689
|
+
* @param frame - the ID3 PRIV frame
|
3690
|
+
*
|
3691
|
+
* @returns The decoded ID3 PRIV frame
|
3692
|
+
*
|
3693
|
+
* @internal
|
3694
|
+
*
|
3695
|
+
* @group ID3
|
3696
|
+
*/
|
3697
|
+
function decodeId3PrivFrame(frame) {
|
3698
|
+
/*
|
3699
|
+
Format: <text string>\0<binary data>
|
3700
|
+
*/
|
3701
|
+
if (frame.size < 2) {
|
3702
|
+
return undefined;
|
3917
3703
|
}
|
3918
|
-
|
3919
|
-
|
3920
|
-
|
3921
|
-
|
3922
|
-
|
3704
|
+
const owner = utf8ArrayToStr(frame.data, true);
|
3705
|
+
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
3706
|
+
return {
|
3707
|
+
key: frame.type,
|
3708
|
+
info: owner,
|
3709
|
+
data: privateData.buffer
|
3710
|
+
};
|
3711
|
+
}
|
3712
|
+
|
3713
|
+
/**
|
3714
|
+
* Decodes an ID3 text frame
|
3715
|
+
*
|
3716
|
+
* @param frame - the ID3 text frame
|
3717
|
+
*
|
3718
|
+
* @returns The decoded ID3 text frame
|
3719
|
+
*
|
3720
|
+
* @internal
|
3721
|
+
*
|
3722
|
+
* @group ID3
|
3723
|
+
*/
|
3724
|
+
function decodeId3TextFrame(frame) {
|
3725
|
+
if (frame.size < 2) {
|
3726
|
+
return undefined;
|
3727
|
+
}
|
3728
|
+
if (frame.type === 'TXXX') {
|
3729
|
+
/*
|
3730
|
+
Format:
|
3731
|
+
[0] = {Text Encoding}
|
3732
|
+
[1-?] = {Description}\0{Value}
|
3733
|
+
*/
|
3734
|
+
let index = 1;
|
3735
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3736
|
+
index += description.length + 1;
|
3737
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3738
|
+
return {
|
3739
|
+
key: frame.type,
|
3740
|
+
info: description,
|
3741
|
+
data: value
|
3742
|
+
};
|
3743
|
+
}
|
3744
|
+
/*
|
3745
|
+
Format:
|
3746
|
+
[0] = {Text Encoding}
|
3747
|
+
[1-?] = {Value}
|
3748
|
+
*/
|
3749
|
+
const text = utf8ArrayToStr(frame.data.subarray(1));
|
3750
|
+
return {
|
3751
|
+
key: frame.type,
|
3752
|
+
info: '',
|
3753
|
+
data: text
|
3754
|
+
};
|
3755
|
+
}
|
3756
|
+
|
3757
|
+
/**
|
3758
|
+
* Decode a URL frame
|
3759
|
+
*
|
3760
|
+
* @param frame - the ID3 URL frame
|
3761
|
+
*
|
3762
|
+
* @returns The decoded ID3 URL frame
|
3763
|
+
*
|
3764
|
+
* @internal
|
3765
|
+
*
|
3766
|
+
* @group ID3
|
3767
|
+
*/
|
3768
|
+
function decodeId3UrlFrame(frame) {
|
3769
|
+
if (frame.type === 'WXXX') {
|
3770
|
+
/*
|
3771
|
+
Format:
|
3772
|
+
[0] = {Text Encoding}
|
3773
|
+
[1-?] = {Description}\0{URL}
|
3774
|
+
*/
|
3775
|
+
if (frame.size < 2) {
|
3776
|
+
return undefined;
|
3777
|
+
}
|
3778
|
+
let index = 1;
|
3779
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3780
|
+
index += description.length + 1;
|
3781
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3782
|
+
return {
|
3783
|
+
key: frame.type,
|
3784
|
+
info: description,
|
3785
|
+
data: value
|
3786
|
+
};
|
3787
|
+
}
|
3788
|
+
/*
|
3789
|
+
Format:
|
3790
|
+
[0-?] = {URL}
|
3791
|
+
*/
|
3792
|
+
const url = utf8ArrayToStr(frame.data);
|
3793
|
+
return {
|
3794
|
+
key: frame.type,
|
3795
|
+
info: '',
|
3796
|
+
data: url
|
3797
|
+
};
|
3798
|
+
}
|
3799
|
+
|
3800
|
+
function toUint8(data, offset = 0, length = Infinity) {
|
3801
|
+
return view(data, offset, length, Uint8Array);
|
3802
|
+
}
|
3803
|
+
function view(data, offset, length, Type) {
|
3804
|
+
const buffer = unsafeGetArrayBuffer(data);
|
3805
|
+
let bytesPerElement = 1;
|
3806
|
+
if ('BYTES_PER_ELEMENT' in Type) {
|
3807
|
+
bytesPerElement = Type.BYTES_PER_ELEMENT;
|
3808
|
+
}
|
3809
|
+
// Absolute end of the |data| view within |buffer|.
|
3810
|
+
const dataOffset = isArrayBufferView(data) ? data.byteOffset : 0;
|
3811
|
+
const dataEnd = (dataOffset + data.byteLength) / bytesPerElement;
|
3812
|
+
// Absolute start of the result within |buffer|.
|
3813
|
+
const rawStart = (dataOffset + offset) / bytesPerElement;
|
3814
|
+
const start = Math.floor(Math.max(0, Math.min(rawStart, dataEnd)));
|
3815
|
+
// Absolute end of the result within |buffer|.
|
3816
|
+
const end = Math.floor(Math.min(start + Math.max(length, 0), dataEnd));
|
3817
|
+
return new Type(buffer, start, end - start);
|
3818
|
+
}
|
3819
|
+
function unsafeGetArrayBuffer(view) {
|
3820
|
+
if (view instanceof ArrayBuffer) {
|
3821
|
+
return view;
|
3822
|
+
} else {
|
3823
|
+
return view.buffer;
|
3824
|
+
}
|
3825
|
+
}
|
3826
|
+
function isArrayBufferView(obj) {
|
3827
|
+
return obj && obj.buffer instanceof ArrayBuffer && obj.byteLength !== undefined && obj.byteOffset !== undefined;
|
3828
|
+
}
|
3829
|
+
|
3830
|
+
function toArrayBuffer(view) {
|
3831
|
+
if (view instanceof ArrayBuffer) {
|
3832
|
+
return view;
|
3833
|
+
} else {
|
3834
|
+
if (view.byteOffset == 0 && view.byteLength == view.buffer.byteLength) {
|
3835
|
+
// This is a TypedArray over the whole buffer.
|
3836
|
+
return view.buffer;
|
3837
|
+
}
|
3838
|
+
// This is a 'view' on the buffer. Create a new buffer that only contains
|
3839
|
+
// the data. Note that since this isn't an ArrayBuffer, the 'new' call
|
3840
|
+
// will allocate a new buffer to hold the copy.
|
3841
|
+
return new Uint8Array(view).buffer;
|
3842
|
+
}
|
3843
|
+
}
|
3844
|
+
|
3845
|
+
function decodeId3ImageFrame(frame) {
|
3846
|
+
const metadataFrame = {
|
3847
|
+
key: frame.type,
|
3848
|
+
description: '',
|
3849
|
+
data: '',
|
3850
|
+
mimeType: null,
|
3851
|
+
pictureType: null
|
3852
|
+
};
|
3853
|
+
const utf8Encoding = 0x03;
|
3854
|
+
if (frame.size < 2) {
|
3855
|
+
return undefined;
|
3856
|
+
}
|
3857
|
+
if (frame.data[0] !== utf8Encoding) {
|
3858
|
+
console.log('Ignore frame with unrecognized character ' + 'encoding');
|
3859
|
+
return undefined;
|
3860
|
+
}
|
3861
|
+
const mimeTypeEndIndex = frame.data.subarray(1).indexOf(0);
|
3862
|
+
if (mimeTypeEndIndex === -1) {
|
3863
|
+
return undefined;
|
3864
|
+
}
|
3865
|
+
const mimeType = utf8ArrayToStr(toUint8(frame.data, 1, mimeTypeEndIndex));
|
3866
|
+
const pictureType = frame.data[2 + mimeTypeEndIndex];
|
3867
|
+
const descriptionEndIndex = frame.data.subarray(3 + mimeTypeEndIndex).indexOf(0);
|
3868
|
+
if (descriptionEndIndex === -1) {
|
3869
|
+
return undefined;
|
3870
|
+
}
|
3871
|
+
const description = utf8ArrayToStr(toUint8(frame.data, 3 + mimeTypeEndIndex, descriptionEndIndex));
|
3872
|
+
let data;
|
3873
|
+
if (mimeType === '-->') {
|
3874
|
+
data = utf8ArrayToStr(toUint8(frame.data, 4 + mimeTypeEndIndex + descriptionEndIndex));
|
3875
|
+
} else {
|
3876
|
+
data = toArrayBuffer(frame.data.subarray(4 + mimeTypeEndIndex + descriptionEndIndex));
|
3877
|
+
}
|
3878
|
+
metadataFrame.mimeType = mimeType;
|
3879
|
+
metadataFrame.pictureType = pictureType;
|
3880
|
+
metadataFrame.description = description;
|
3881
|
+
metadataFrame.data = data;
|
3882
|
+
return metadataFrame;
|
3883
|
+
}
|
3884
|
+
|
3885
|
+
/**
|
3886
|
+
* Decode an ID3 frame.
|
3887
|
+
*
|
3888
|
+
* @param frame - the ID3 frame
|
3889
|
+
*
|
3890
|
+
* @returns The decoded ID3 frame
|
3891
|
+
*
|
3892
|
+
* @internal
|
3893
|
+
*
|
3894
|
+
* @group ID3
|
3895
|
+
*/
|
3896
|
+
function decodeId3Frame(frame) {
|
3897
|
+
if (frame.type === 'PRIV') {
|
3898
|
+
return decodeId3PrivFrame(frame);
|
3899
|
+
} else if (frame.type[0] === 'W') {
|
3900
|
+
return decodeId3UrlFrame(frame);
|
3901
|
+
} else if (frame.type === 'APIC') {
|
3902
|
+
return decodeId3ImageFrame(frame);
|
3903
|
+
}
|
3904
|
+
return decodeId3TextFrame(frame);
|
3905
|
+
}
|
3906
|
+
|
3907
|
+
/**
|
3908
|
+
* Read ID3 size
|
3909
|
+
*
|
3910
|
+
* @param data - The data to read from
|
3911
|
+
* @param offset - The offset at which to start reading
|
3912
|
+
*
|
3913
|
+
* @returns The size
|
3914
|
+
*
|
3915
|
+
* @internal
|
3916
|
+
*
|
3917
|
+
* @group ID3
|
3918
|
+
*/
|
3919
|
+
function readId3Size(data, offset) {
|
3920
|
+
let size = 0;
|
3921
|
+
size = (data[offset] & 0x7f) << 21;
|
3922
|
+
size |= (data[offset + 1] & 0x7f) << 14;
|
3923
|
+
size |= (data[offset + 2] & 0x7f) << 7;
|
3924
|
+
size |= data[offset + 3] & 0x7f;
|
3925
|
+
return size;
|
3926
|
+
}
|
3927
|
+
|
3928
|
+
/**
|
3929
|
+
* Returns the data of an ID3 frame.
|
3930
|
+
*
|
3931
|
+
* @param data - The data to read from
|
3932
|
+
*
|
3933
|
+
* @returns The data of the ID3 frame
|
3934
|
+
*
|
3935
|
+
* @internal
|
3936
|
+
*
|
3937
|
+
* @group ID3
|
3938
|
+
*/
|
3939
|
+
function getId3FrameData(data) {
|
3940
|
+
/*
|
3941
|
+
Frame ID $xx xx xx xx (four characters)
|
3942
|
+
Size $xx xx xx xx
|
3943
|
+
Flags $xx xx
|
3944
|
+
*/
|
3945
|
+
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
3946
|
+
const size = readId3Size(data, 4);
|
3947
|
+
// skip frame id, size, and flags
|
3948
|
+
const offset = 10;
|
3949
|
+
return {
|
3950
|
+
type,
|
3951
|
+
size,
|
3952
|
+
data: data.subarray(offset, offset + size)
|
3953
|
+
};
|
3954
|
+
}
|
3955
|
+
|
3956
|
+
/**
|
3957
|
+
* Returns true if an ID3 footer can be found at offset in data
|
3958
|
+
*
|
3959
|
+
* @param data - The data to search in
|
3960
|
+
* @param offset - The offset at which to start searching
|
3961
|
+
*
|
3962
|
+
* @returns `true` if an ID3 footer is found
|
3963
|
+
*
|
3964
|
+
* @internal
|
3965
|
+
*
|
3966
|
+
* @group ID3
|
3967
|
+
*/
|
3968
|
+
function isId3Footer(data, offset) {
|
3969
|
+
/*
|
3970
|
+
* The footer is a copy of the header, but with a different identifier
|
3971
|
+
*/
|
3972
|
+
if (offset + 10 <= data.length) {
|
3973
|
+
// look for '3DI' identifier
|
3974
|
+
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
3975
|
+
// check version is within range
|
3976
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
3977
|
+
// check size is within range
|
3978
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
3979
|
+
return true;
|
3980
|
+
}
|
3981
|
+
}
|
3982
|
+
}
|
3983
|
+
}
|
3984
|
+
return false;
|
3985
|
+
}
|
3986
|
+
|
3987
|
+
/**
|
3988
|
+
* Returns true if an ID3 header can be found at offset in data
|
3989
|
+
*
|
3990
|
+
* @param data - The data to search in
|
3991
|
+
* @param offset - The offset at which to start searching
|
3992
|
+
*
|
3993
|
+
* @returns `true` if an ID3 header is found
|
3994
|
+
*
|
3995
|
+
* @internal
|
3996
|
+
*
|
3997
|
+
* @group ID3
|
3998
|
+
*/
|
3999
|
+
function isId3Header(data, offset) {
|
4000
|
+
/*
|
4001
|
+
* http://id3.org/id3v2.3.0
|
4002
|
+
* [0] = 'I'
|
4003
|
+
* [1] = 'D'
|
4004
|
+
* [2] = '3'
|
4005
|
+
* [3,4] = {Version}
|
4006
|
+
* [5] = {Flags}
|
4007
|
+
* [6-9] = {ID3 Size}
|
4008
|
+
*
|
4009
|
+
* An ID3v2 tag can be detected with the following pattern:
|
4010
|
+
* $49 44 33 yy yy xx zz zz zz zz
|
4011
|
+
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
4012
|
+
*/
|
4013
|
+
if (offset + 10 <= data.length) {
|
4014
|
+
// look for 'ID3' identifier
|
4015
|
+
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
4016
|
+
// check version is within range
|
4017
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
4018
|
+
// check size is within range
|
4019
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
4020
|
+
return true;
|
4021
|
+
}
|
4022
|
+
}
|
4023
|
+
}
|
4024
|
+
}
|
4025
|
+
return false;
|
4026
|
+
}
|
4027
|
+
|
4028
|
+
const HEADER_FOOTER_SIZE = 10;
|
4029
|
+
const FRAME_SIZE = 10;
|
4030
|
+
/**
|
4031
|
+
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
4032
|
+
*
|
4033
|
+
* @param id3Data - The ID3 data containing one or more ID3 tags
|
4034
|
+
*
|
4035
|
+
* @returns Array of ID3 frame objects
|
4036
|
+
*
|
4037
|
+
* @group ID3
|
4038
|
+
*
|
4039
|
+
* @beta
|
4040
|
+
*/
|
4041
|
+
function getId3Frames(id3Data) {
|
4042
|
+
let offset = 0;
|
4043
|
+
const frames = [];
|
4044
|
+
while (isId3Header(id3Data, offset)) {
|
4045
|
+
const size = readId3Size(id3Data, offset + 6);
|
4046
|
+
if (id3Data[offset + 5] >> 6 & 1) {
|
4047
|
+
// skip extended header
|
4048
|
+
offset += HEADER_FOOTER_SIZE;
|
4049
|
+
}
|
4050
|
+
// skip past ID3 header
|
4051
|
+
offset += HEADER_FOOTER_SIZE;
|
4052
|
+
const end = offset + size;
|
4053
|
+
// loop through frames in the ID3 tag
|
4054
|
+
while (offset + FRAME_SIZE < end) {
|
4055
|
+
const frameData = getId3FrameData(id3Data.subarray(offset));
|
4056
|
+
const frame = decodeId3Frame(frameData);
|
4057
|
+
if (frame) {
|
4058
|
+
frames.push(frame);
|
4059
|
+
}
|
4060
|
+
// skip frame header and frame data
|
4061
|
+
offset += frameData.size + HEADER_FOOTER_SIZE;
|
4062
|
+
}
|
4063
|
+
if (isId3Footer(id3Data, offset)) {
|
4064
|
+
offset += HEADER_FOOTER_SIZE;
|
4065
|
+
}
|
4066
|
+
}
|
4067
|
+
return frames;
|
4068
|
+
}
|
4069
|
+
|
4070
|
+
/**
|
4071
|
+
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
4072
|
+
*
|
4073
|
+
* @param frame - the ID3 frame
|
4074
|
+
*
|
4075
|
+
* @returns `true` if the ID3 frame is an Elementary Stream timestamp frame
|
4076
|
+
*
|
4077
|
+
* @internal
|
4078
|
+
*
|
4079
|
+
* @group ID3
|
4080
|
+
*/
|
4081
|
+
function isId3TimestampFrame(frame) {
|
4082
|
+
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
4083
|
+
}
|
4084
|
+
|
4085
|
+
const MIN_CUE_DURATION = 0.25;
|
4086
|
+
function getCueClass() {
|
4087
|
+
if (typeof self === 'undefined') return undefined;
|
4088
|
+
return self.VTTCue || self.TextTrackCue;
|
4089
|
+
}
|
4090
|
+
function createCueWithDataFields(Cue, startTime, endTime, data, type) {
|
4091
|
+
let cue = new Cue(startTime, endTime, '');
|
4092
|
+
try {
|
4093
|
+
cue.value = data;
|
4094
|
+
if (type) {
|
4095
|
+
cue.type = type;
|
4096
|
+
}
|
4097
|
+
} catch (e) {
|
4098
|
+
cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
|
4099
|
+
type
|
4100
|
+
}, data) : data));
|
4101
|
+
}
|
4102
|
+
return cue;
|
4103
|
+
}
|
4104
|
+
|
4105
|
+
// VTTCue latest draft allows an infinite duration, fallback
|
4106
|
+
// to MAX_VALUE if necessary
|
3923
4107
|
const MAX_CUE_ENDTIME = (() => {
|
3924
4108
|
const Cue = getCueClass();
|
3925
4109
|
try {
|
@@ -3980,11 +4164,10 @@ class ID3TrackController {
|
|
3980
4164
|
this.media = data.media;
|
3981
4165
|
}
|
3982
4166
|
onMediaDetaching() {
|
3983
|
-
if (
|
3984
|
-
|
4167
|
+
if (this.id3Track) {
|
4168
|
+
clearCurrentCues(this.id3Track);
|
4169
|
+
this.id3Track = null;
|
3985
4170
|
}
|
3986
|
-
clearCurrentCues(this.id3Track);
|
3987
|
-
this.id3Track = null;
|
3988
4171
|
this.media = null;
|
3989
4172
|
this.dateRangeCuesAppended = {};
|
3990
4173
|
}
|
@@ -4043,7 +4226,7 @@ class ID3TrackController {
|
|
4043
4226
|
if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
|
4044
4227
|
continue;
|
4045
4228
|
}
|
4046
|
-
const frames =
|
4229
|
+
const frames = getId3Frames(samples[i].data);
|
4047
4230
|
if (frames) {
|
4048
4231
|
const startTime = samples[i].pts;
|
4049
4232
|
let endTime = startTime + samples[i].duration;
|
@@ -4057,7 +4240,7 @@ class ID3TrackController {
|
|
4057
4240
|
for (let j = 0; j < frames.length; j++) {
|
4058
4241
|
const frame = frames[j];
|
4059
4242
|
// Safari doesn't put the timestamp frame in the TextTrack
|
4060
|
-
if (!
|
4243
|
+
if (!isId3TimestampFrame(frame)) {
|
4061
4244
|
// add a bounds to any unbounded cues
|
4062
4245
|
this.updateId3CueEnds(startTime, type);
|
4063
4246
|
const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
|
@@ -4225,7 +4408,47 @@ class LatencyController {
|
|
4225
4408
|
this.currentTime = 0;
|
4226
4409
|
this.stallCount = 0;
|
4227
4410
|
this._latency = null;
|
4228
|
-
this.
|
4411
|
+
this.onTimeupdate = () => {
|
4412
|
+
const {
|
4413
|
+
media,
|
4414
|
+
levelDetails
|
4415
|
+
} = this;
|
4416
|
+
if (!media || !levelDetails) {
|
4417
|
+
return;
|
4418
|
+
}
|
4419
|
+
this.currentTime = media.currentTime;
|
4420
|
+
const latency = this.computeLatency();
|
4421
|
+
if (latency === null) {
|
4422
|
+
return;
|
4423
|
+
}
|
4424
|
+
this._latency = latency;
|
4425
|
+
|
4426
|
+
// Adapt playbackRate to meet target latency in low-latency mode
|
4427
|
+
const {
|
4428
|
+
lowLatencyMode,
|
4429
|
+
maxLiveSyncPlaybackRate
|
4430
|
+
} = this.config;
|
4431
|
+
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4432
|
+
return;
|
4433
|
+
}
|
4434
|
+
const targetLatency = this.targetLatency;
|
4435
|
+
if (targetLatency === null) {
|
4436
|
+
return;
|
4437
|
+
}
|
4438
|
+
const distanceFromTarget = latency - targetLatency;
|
4439
|
+
// Only adjust playbackRate when within one target duration of targetLatency
|
4440
|
+
// and more than one second from under-buffering.
|
4441
|
+
// Playback further than one target duration from target can be considered DVR playback.
|
4442
|
+
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4443
|
+
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4444
|
+
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4445
|
+
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4446
|
+
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4447
|
+
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4448
|
+
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4449
|
+
media.playbackRate = 1;
|
4450
|
+
}
|
4451
|
+
};
|
4229
4452
|
this.hls = hls;
|
4230
4453
|
this.config = hls.config;
|
4231
4454
|
this.registerListeners();
|
@@ -4317,7 +4540,7 @@ class LatencyController {
|
|
4317
4540
|
this.onMediaDetaching();
|
4318
4541
|
this.levelDetails = null;
|
4319
4542
|
// @ts-ignore
|
4320
|
-
this.hls =
|
4543
|
+
this.hls = null;
|
4321
4544
|
}
|
4322
4545
|
registerListeners() {
|
4323
4546
|
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
@@ -4335,11 +4558,11 @@ class LatencyController {
|
|
4335
4558
|
}
|
4336
4559
|
onMediaAttached(event, data) {
|
4337
4560
|
this.media = data.media;
|
4338
|
-
this.media.addEventListener('timeupdate', this.
|
4561
|
+
this.media.addEventListener('timeupdate', this.onTimeupdate);
|
4339
4562
|
}
|
4340
4563
|
onMediaDetaching() {
|
4341
4564
|
if (this.media) {
|
4342
|
-
this.media.removeEventListener('timeupdate', this.
|
4565
|
+
this.media.removeEventListener('timeupdate', this.onTimeupdate);
|
4343
4566
|
this.media = null;
|
4344
4567
|
}
|
4345
4568
|
}
|
@@ -4353,10 +4576,10 @@ class LatencyController {
|
|
4353
4576
|
}) {
|
4354
4577
|
this.levelDetails = details;
|
4355
4578
|
if (details.advanced) {
|
4356
|
-
this.
|
4579
|
+
this.onTimeupdate();
|
4357
4580
|
}
|
4358
4581
|
if (!details.live && this.media) {
|
4359
|
-
this.media.removeEventListener('timeupdate', this.
|
4582
|
+
this.media.removeEventListener('timeupdate', this.onTimeupdate);
|
4360
4583
|
}
|
4361
4584
|
}
|
4362
4585
|
onError(event, data) {
|
@@ -4366,48 +4589,7 @@ class LatencyController {
|
|
4366
4589
|
}
|
4367
4590
|
this.stallCount++;
|
4368
4591
|
if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
|
4369
|
-
logger.warn('[
|
4370
|
-
}
|
4371
|
-
}
|
4372
|
-
timeupdate() {
|
4373
|
-
const {
|
4374
|
-
media,
|
4375
|
-
levelDetails
|
4376
|
-
} = this;
|
4377
|
-
if (!media || !levelDetails) {
|
4378
|
-
return;
|
4379
|
-
}
|
4380
|
-
this.currentTime = media.currentTime;
|
4381
|
-
const latency = this.computeLatency();
|
4382
|
-
if (latency === null) {
|
4383
|
-
return;
|
4384
|
-
}
|
4385
|
-
this._latency = latency;
|
4386
|
-
|
4387
|
-
// Adapt playbackRate to meet target latency in low-latency mode
|
4388
|
-
const {
|
4389
|
-
lowLatencyMode,
|
4390
|
-
maxLiveSyncPlaybackRate
|
4391
|
-
} = this.config;
|
4392
|
-
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4393
|
-
return;
|
4394
|
-
}
|
4395
|
-
const targetLatency = this.targetLatency;
|
4396
|
-
if (targetLatency === null) {
|
4397
|
-
return;
|
4398
|
-
}
|
4399
|
-
const distanceFromTarget = latency - targetLatency;
|
4400
|
-
// Only adjust playbackRate when within one target duration of targetLatency
|
4401
|
-
// and more than one second from under-buffering.
|
4402
|
-
// Playback further than one target duration from target can be considered DVR playback.
|
4403
|
-
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4404
|
-
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4405
|
-
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4406
|
-
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4407
|
-
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4408
|
-
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4409
|
-
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4410
|
-
media.playbackRate = 1;
|
4592
|
+
this.hls.logger.warn('[latency-controller]: Stall detected, adjusting target latency');
|
4411
4593
|
}
|
4412
4594
|
}
|
4413
4595
|
estimateLiveEdge() {
|
@@ -5182,18 +5364,13 @@ var ErrorActionFlags = {
|
|
5182
5364
|
MoveAllAlternatesMatchingHDCP: 2,
|
5183
5365
|
SwitchToSDR: 4
|
5184
5366
|
}; // Reserved for future use
|
5185
|
-
class ErrorController {
|
5367
|
+
class ErrorController extends Logger {
|
5186
5368
|
constructor(hls) {
|
5369
|
+
super('error-controller', hls.logger);
|
5187
5370
|
this.hls = void 0;
|
5188
5371
|
this.playlistError = 0;
|
5189
5372
|
this.penalizedRenditions = {};
|
5190
|
-
this.log = void 0;
|
5191
|
-
this.warn = void 0;
|
5192
|
-
this.error = void 0;
|
5193
5373
|
this.hls = hls;
|
5194
|
-
this.log = logger.log.bind(logger, `[info]:`);
|
5195
|
-
this.warn = logger.warn.bind(logger, `[warning]:`);
|
5196
|
-
this.error = logger.error.bind(logger, `[error]:`);
|
5197
5374
|
this.registerListeners();
|
5198
5375
|
}
|
5199
5376
|
registerListeners() {
|
@@ -5545,16 +5722,13 @@ class ErrorController {
|
|
5545
5722
|
}
|
5546
5723
|
}
|
5547
5724
|
|
5548
|
-
class BasePlaylistController {
|
5725
|
+
class BasePlaylistController extends Logger {
|
5549
5726
|
constructor(hls, logPrefix) {
|
5727
|
+
super(logPrefix, hls.logger);
|
5550
5728
|
this.hls = void 0;
|
5551
5729
|
this.timer = -1;
|
5552
5730
|
this.requestScheduled = -1;
|
5553
5731
|
this.canLoad = false;
|
5554
|
-
this.log = void 0;
|
5555
|
-
this.warn = void 0;
|
5556
|
-
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
5557
|
-
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
5558
5732
|
this.hls = hls;
|
5559
5733
|
}
|
5560
5734
|
destroy() {
|
@@ -5587,7 +5761,7 @@ class BasePlaylistController {
|
|
5587
5761
|
try {
|
5588
5762
|
uri = new self.URL(attr.URI, previous.url).href;
|
5589
5763
|
} catch (error) {
|
5590
|
-
|
5764
|
+
this.warn(`Could not construct new URL for Rendition Report: ${error}`);
|
5591
5765
|
uri = attr.URI || '';
|
5592
5766
|
}
|
5593
5767
|
// Use exact match. Otherwise, the last partial match, if any, will be used
|
@@ -5675,7 +5849,12 @@ class BasePlaylistController {
|
|
5675
5849
|
const cdnAge = lastAdvanced + details.ageHeader;
|
5676
5850
|
let currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
|
5677
5851
|
if (currentGoal > 0) {
|
5678
|
-
if (
|
5852
|
+
if (cdnAge > details.targetduration * 3) {
|
5853
|
+
// Omit segment and part directives when the last response was more than 3 target durations ago,
|
5854
|
+
this.log(`Playlist last advanced ${lastAdvanced.toFixed(2)}s ago. Omitting segment and part directives.`);
|
5855
|
+
msn = undefined;
|
5856
|
+
part = undefined;
|
5857
|
+
} else if (previousDetails != null && previousDetails.tuneInGoal && cdnAge - details.partTarget > previousDetails.tuneInGoal) {
|
5679
5858
|
// If we attempted to get the next or latest playlist update, but currentGoal increased,
|
5680
5859
|
// then we either can't catchup, or the "age" header cannot be trusted.
|
5681
5860
|
this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`);
|
@@ -6121,8 +6300,9 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
|
|
6121
6300
|
}, {});
|
6122
6301
|
}
|
6123
6302
|
|
6124
|
-
class AbrController {
|
6303
|
+
class AbrController extends Logger {
|
6125
6304
|
constructor(_hls) {
|
6305
|
+
super('abr', _hls.logger);
|
6126
6306
|
this.hls = void 0;
|
6127
6307
|
this.lastLevelLoadSec = 0;
|
6128
6308
|
this.lastLoadedFragLevel = -1;
|
@@ -6236,7 +6416,7 @@ class AbrController {
|
|
6236
6416
|
this.resetEstimator(nextLoadLevelBitrate);
|
6237
6417
|
}
|
6238
6418
|
this.clearTimer();
|
6239
|
-
|
6419
|
+
this.warn(`Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
|
6240
6420
|
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
|
6241
6421
|
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
|
6242
6422
|
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
|
@@ -6256,7 +6436,7 @@ class AbrController {
|
|
6256
6436
|
}
|
6257
6437
|
resetEstimator(abrEwmaDefaultEstimate) {
|
6258
6438
|
if (abrEwmaDefaultEstimate) {
|
6259
|
-
|
6439
|
+
this.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
|
6260
6440
|
this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
|
6261
6441
|
}
|
6262
6442
|
this.firstSelection = -1;
|
@@ -6488,7 +6668,7 @@ class AbrController {
|
|
6488
6668
|
}
|
6489
6669
|
const firstLevel = this.hls.firstLevel;
|
6490
6670
|
const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
|
6491
|
-
|
6671
|
+
this.warn(`Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
|
6492
6672
|
return clamped;
|
6493
6673
|
}
|
6494
6674
|
get forcedAutoLevel() {
|
@@ -6534,6 +6714,9 @@ class AbrController {
|
|
6534
6714
|
partCurrent,
|
6535
6715
|
hls
|
6536
6716
|
} = this;
|
6717
|
+
if (hls.levels.length <= 1) {
|
6718
|
+
return hls.loadLevel;
|
6719
|
+
}
|
6537
6720
|
const {
|
6538
6721
|
maxAutoLevel,
|
6539
6722
|
config,
|
@@ -6566,13 +6749,13 @@ class AbrController {
|
|
6566
6749
|
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
6567
6750
|
const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
|
6568
6751
|
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
|
6569
|
-
|
6752
|
+
this.info(`bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
|
6570
6753
|
// don't use conservative factor on bitrate test
|
6571
6754
|
bwFactor = bwUpFactor = 1;
|
6572
6755
|
}
|
6573
6756
|
}
|
6574
6757
|
const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
|
6575
|
-
|
6758
|
+
this.info(`${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
|
6576
6759
|
if (bestLevel > -1) {
|
6577
6760
|
return bestLevel;
|
6578
6761
|
}
|
@@ -6646,7 +6829,7 @@ class AbrController {
|
|
6646
6829
|
currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
|
6647
6830
|
currentFrameRate = minFramerate;
|
6648
6831
|
currentBw = Math.max(currentBw, minBitrate);
|
6649
|
-
|
6832
|
+
this.log(`picked start tier ${JSON.stringify(startTier)}`);
|
6650
6833
|
} else {
|
6651
6834
|
currentCodecSet = level == null ? void 0 : level.codecSet;
|
6652
6835
|
currentVideoRange = level == null ? void 0 : level.videoRange;
|
@@ -6699,9 +6882,9 @@ class AbrController {
|
|
6699
6882
|
const forcedAutoLevel = this.forcedAutoLevel;
|
6700
6883
|
if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
|
6701
6884
|
if (levelsSkipped.length) {
|
6702
|
-
|
6885
|
+
this.trace(`Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
|
6703
6886
|
}
|
6704
|
-
|
6887
|
+
this.info(`switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
|
6705
6888
|
}
|
6706
6889
|
if (firstSelection) {
|
6707
6890
|
this.firstSelection = i;
|
@@ -6744,40 +6927,29 @@ class BufferHelper {
|
|
6744
6927
|
* Return true if `media`'s buffered include `position`
|
6745
6928
|
*/
|
6746
6929
|
static isBuffered(media, position) {
|
6747
|
-
|
6748
|
-
|
6749
|
-
|
6750
|
-
|
6751
|
-
|
6752
|
-
return true;
|
6753
|
-
}
|
6930
|
+
if (media) {
|
6931
|
+
const buffered = BufferHelper.getBuffered(media);
|
6932
|
+
for (let i = buffered.length; i--;) {
|
6933
|
+
if (position >= buffered.start(i) && position <= buffered.end(i)) {
|
6934
|
+
return true;
|
6754
6935
|
}
|
6755
6936
|
}
|
6756
|
-
} catch (error) {
|
6757
|
-
// this is to catch
|
6758
|
-
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6759
|
-
// This SourceBuffer has been removed from the parent media source
|
6760
6937
|
}
|
6761
6938
|
return false;
|
6762
6939
|
}
|
6763
6940
|
static bufferInfo(media, pos, maxHoleDuration) {
|
6764
|
-
|
6765
|
-
|
6766
|
-
|
6941
|
+
if (media) {
|
6942
|
+
const vbuffered = BufferHelper.getBuffered(media);
|
6943
|
+
if (vbuffered.length) {
|
6767
6944
|
const buffered = [];
|
6768
|
-
let i;
|
6769
|
-
for (i = 0; i < vbuffered.length; i++) {
|
6945
|
+
for (let i = 0; i < vbuffered.length; i++) {
|
6770
6946
|
buffered.push({
|
6771
6947
|
start: vbuffered.start(i),
|
6772
6948
|
end: vbuffered.end(i)
|
6773
6949
|
});
|
6774
6950
|
}
|
6775
|
-
return
|
6951
|
+
return BufferHelper.bufferedInfo(buffered, pos, maxHoleDuration);
|
6776
6952
|
}
|
6777
|
-
} catch (error) {
|
6778
|
-
// this is to catch
|
6779
|
-
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6780
|
-
// This SourceBuffer has been removed from the parent media source
|
6781
6953
|
}
|
6782
6954
|
return {
|
6783
6955
|
len: 0,
|
@@ -6789,14 +6961,7 @@ class BufferHelper {
|
|
6789
6961
|
static bufferedInfo(buffered, pos, maxHoleDuration) {
|
6790
6962
|
pos = Math.max(0, pos);
|
6791
6963
|
// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
|
6792
|
-
buffered.sort(
|
6793
|
-
const diff = a.start - b.start;
|
6794
|
-
if (diff) {
|
6795
|
-
return diff;
|
6796
|
-
} else {
|
6797
|
-
return b.end - a.end;
|
6798
|
-
}
|
6799
|
-
});
|
6964
|
+
buffered.sort((a, b) => a.start - b.start || b.end - a.end);
|
6800
6965
|
let buffered2 = [];
|
6801
6966
|
if (maxHoleDuration) {
|
6802
6967
|
// there might be some small holes between buffer time range
|
@@ -6863,7 +7028,7 @@ class BufferHelper {
|
|
6863
7028
|
*/
|
6864
7029
|
static getBuffered(media) {
|
6865
7030
|
try {
|
6866
|
-
return media.buffered;
|
7031
|
+
return media.buffered || noopBuffered;
|
6867
7032
|
} catch (e) {
|
6868
7033
|
logger.log('failed to get media.buffered', e);
|
6869
7034
|
return noopBuffered;
|
@@ -6888,24 +7053,22 @@ class BufferOperationQueue {
|
|
6888
7053
|
this.executeNext(type);
|
6889
7054
|
}
|
6890
7055
|
}
|
6891
|
-
insertAbort(operation, type) {
|
6892
|
-
const queue = this.queues[type];
|
6893
|
-
queue.unshift(operation);
|
6894
|
-
this.executeNext(type);
|
6895
|
-
}
|
6896
7056
|
appendBlocker(type) {
|
6897
|
-
|
6898
|
-
|
6899
|
-
|
7057
|
+
return new Promise(resolve => {
|
7058
|
+
const operation = {
|
7059
|
+
execute: resolve,
|
7060
|
+
onStart: () => {},
|
7061
|
+
onComplete: () => {},
|
7062
|
+
onError: () => {}
|
7063
|
+
};
|
7064
|
+
this.append(operation, type);
|
6900
7065
|
});
|
6901
|
-
|
6902
|
-
|
6903
|
-
|
6904
|
-
|
6905
|
-
|
6906
|
-
}
|
6907
|
-
this.append(operation, type);
|
6908
|
-
return promise;
|
7066
|
+
}
|
7067
|
+
unblockAudio(op) {
|
7068
|
+
const queue = this.queues.audio;
|
7069
|
+
if (queue[0] === op) {
|
7070
|
+
this.shiftAndExecuteNext('audio');
|
7071
|
+
}
|
6909
7072
|
}
|
6910
7073
|
executeNext(type) {
|
6911
7074
|
const queue = this.queues[type];
|
@@ -6937,8 +7100,9 @@ class BufferOperationQueue {
|
|
6937
7100
|
}
|
6938
7101
|
|
6939
7102
|
const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
6940
|
-
class BufferController {
|
6941
|
-
constructor(hls) {
|
7103
|
+
class BufferController extends Logger {
|
7104
|
+
constructor(hls, fragmentTracker) {
|
7105
|
+
super('buffer-controller', hls.logger);
|
6942
7106
|
// The level details used to determine duration, target-duration and live
|
6943
7107
|
this.details = null;
|
6944
7108
|
// cache the self generated object url to detect hijack of video tag
|
@@ -6948,6 +7112,7 @@ class BufferController {
|
|
6948
7112
|
// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
|
6949
7113
|
this.listeners = void 0;
|
6950
7114
|
this.hls = void 0;
|
7115
|
+
this.fragmentTracker = void 0;
|
6951
7116
|
// The number of BUFFER_CODEC events received before any sourceBuffers are created
|
6952
7117
|
this.bufferCodecEventsExpected = 0;
|
6953
7118
|
// The total number of BUFFER_CODEC events received
|
@@ -6958,6 +7123,10 @@ class BufferController {
|
|
6958
7123
|
this.mediaSource = null;
|
6959
7124
|
// Last MP3 audio chunk appended
|
6960
7125
|
this.lastMpegAudioChunk = null;
|
7126
|
+
// Audio fragment blocked from appending until corresponding video appends or context changes
|
7127
|
+
this.blockedAudioAppend = null;
|
7128
|
+
// Keep track of video append position for unblocking audio
|
7129
|
+
this.lastVideoAppendEnd = 0;
|
6961
7130
|
this.appendSource = void 0;
|
6962
7131
|
// counters
|
6963
7132
|
this.appendErrors = {
|
@@ -6968,9 +7137,6 @@ class BufferController {
|
|
6968
7137
|
this.tracks = {};
|
6969
7138
|
this.pendingTracks = {};
|
6970
7139
|
this.sourceBuffer = void 0;
|
6971
|
-
this.log = void 0;
|
6972
|
-
this.warn = void 0;
|
6973
|
-
this.error = void 0;
|
6974
7140
|
this._onEndStreaming = event => {
|
6975
7141
|
if (!this.hls) {
|
6976
7142
|
return;
|
@@ -6992,7 +7158,10 @@ class BufferController {
|
|
6992
7158
|
this.log('Media source opened');
|
6993
7159
|
if (media) {
|
6994
7160
|
media.removeEventListener('emptied', this._onMediaEmptied);
|
6995
|
-
this.
|
7161
|
+
const durationAndRange = this.getDurationAndRange();
|
7162
|
+
if (durationAndRange) {
|
7163
|
+
this.updateMediaSource(durationAndRange);
|
7164
|
+
}
|
6996
7165
|
this.hls.trigger(Events.MEDIA_ATTACHED, {
|
6997
7166
|
media,
|
6998
7167
|
mediaSource: mediaSource
|
@@ -7016,15 +7185,12 @@ class BufferController {
|
|
7016
7185
|
_objectUrl
|
7017
7186
|
} = this;
|
7018
7187
|
if (mediaSrc !== _objectUrl) {
|
7019
|
-
|
7188
|
+
this.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
|
7020
7189
|
}
|
7021
7190
|
};
|
7022
7191
|
this.hls = hls;
|
7023
|
-
|
7192
|
+
this.fragmentTracker = fragmentTracker;
|
7024
7193
|
this.appendSource = isManagedMediaSource(getMediaSource(hls.config.preferManagedMediaSource));
|
7025
|
-
this.log = logger.log.bind(logger, logPrefix);
|
7026
|
-
this.warn = logger.warn.bind(logger, logPrefix);
|
7027
|
-
this.error = logger.error.bind(logger, logPrefix);
|
7028
7194
|
this._initSourceBuffer();
|
7029
7195
|
this.registerListeners();
|
7030
7196
|
}
|
@@ -7036,7 +7202,13 @@ class BufferController {
|
|
7036
7202
|
this.details = null;
|
7037
7203
|
this.lastMpegAudioChunk = null;
|
7038
7204
|
// @ts-ignore
|
7039
|
-
this.hls = null;
|
7205
|
+
this.hls = this.fragmentTracker = null;
|
7206
|
+
// @ts-ignore
|
7207
|
+
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
7208
|
+
// @ts-ignore
|
7209
|
+
this._onMediaSourceEnded = null;
|
7210
|
+
// @ts-ignore
|
7211
|
+
this._onStartStreaming = this._onEndStreaming = null;
|
7040
7212
|
}
|
7041
7213
|
registerListeners() {
|
7042
7214
|
const {
|
@@ -7086,6 +7258,8 @@ class BufferController {
|
|
7086
7258
|
audiovideo: 0
|
7087
7259
|
};
|
7088
7260
|
this.lastMpegAudioChunk = null;
|
7261
|
+
this.blockedAudioAppend = null;
|
7262
|
+
this.lastVideoAppendEnd = 0;
|
7089
7263
|
}
|
7090
7264
|
onManifestLoading() {
|
7091
7265
|
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
|
@@ -7168,34 +7342,34 @@ class BufferController {
|
|
7168
7342
|
mediaSource.removeEventListener('startstreaming', this._onStartStreaming);
|
7169
7343
|
mediaSource.removeEventListener('endstreaming', this._onEndStreaming);
|
7170
7344
|
}
|
7345
|
+
this.mediaSource = null;
|
7346
|
+
this._objectUrl = null;
|
7347
|
+
}
|
7171
7348
|
|
7172
|
-
|
7173
|
-
|
7174
|
-
|
7175
|
-
|
7176
|
-
|
7177
|
-
|
7178
|
-
|
7349
|
+
// Detach properly the MediaSource from the HTMLMediaElement as
|
7350
|
+
// suggested in https://github.com/w3c/media-source/issues/53.
|
7351
|
+
if (media) {
|
7352
|
+
media.removeEventListener('emptied', this._onMediaEmptied);
|
7353
|
+
if (_objectUrl) {
|
7354
|
+
self.URL.revokeObjectURL(_objectUrl);
|
7355
|
+
}
|
7179
7356
|
|
7180
|
-
|
7181
|
-
|
7182
|
-
|
7183
|
-
|
7184
|
-
|
7185
|
-
|
7186
|
-
}
|
7187
|
-
media.load();
|
7188
|
-
} else {
|
7189
|
-
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7357
|
+
// clean up video tag src only if it's our own url. some external libraries might
|
7358
|
+
// hijack the video tag and change its 'src' without destroying the Hls instance first
|
7359
|
+
if (this.mediaSrc === _objectUrl) {
|
7360
|
+
media.removeAttribute('src');
|
7361
|
+
if (this.appendSource) {
|
7362
|
+
removeSourceChildren(media);
|
7190
7363
|
}
|
7364
|
+
media.load();
|
7365
|
+
} else {
|
7366
|
+
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7191
7367
|
}
|
7192
|
-
this.mediaSource = null;
|
7193
7368
|
this.media = null;
|
7194
|
-
this._objectUrl = null;
|
7195
|
-
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7196
|
-
this.pendingTracks = {};
|
7197
|
-
this.tracks = {};
|
7198
7369
|
}
|
7370
|
+
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7371
|
+
this.pendingTracks = {};
|
7372
|
+
this.tracks = {};
|
7199
7373
|
this.hls.trigger(Events.MEDIA_DETACHED, undefined);
|
7200
7374
|
}
|
7201
7375
|
onBufferReset() {
|
@@ -7203,6 +7377,7 @@ class BufferController {
|
|
7203
7377
|
this.resetBuffer(type);
|
7204
7378
|
});
|
7205
7379
|
this._initSourceBuffer();
|
7380
|
+
this.hls.resumeBuffering();
|
7206
7381
|
}
|
7207
7382
|
resetBuffer(type) {
|
7208
7383
|
const sb = this.sourceBuffer[type];
|
@@ -7226,9 +7401,10 @@ class BufferController {
|
|
7226
7401
|
const trackNames = Object.keys(data);
|
7227
7402
|
trackNames.forEach(trackName => {
|
7228
7403
|
if (sourceBufferCount) {
|
7404
|
+
var _track$buffer;
|
7229
7405
|
// check if SourceBuffer codec needs to change
|
7230
7406
|
const track = this.tracks[trackName];
|
7231
|
-
if (track && typeof track.buffer.changeType === 'function') {
|
7407
|
+
if (track && typeof ((_track$buffer = track.buffer) == null ? void 0 : _track$buffer.changeType) === 'function') {
|
7232
7408
|
var _trackCodec;
|
7233
7409
|
const {
|
7234
7410
|
id,
|
@@ -7298,20 +7474,54 @@ class BufferController {
|
|
7298
7474
|
};
|
7299
7475
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7300
7476
|
}
|
7477
|
+
blockAudio(partOrFrag) {
|
7478
|
+
var _this$fragmentTracker;
|
7479
|
+
const pStart = partOrFrag.start;
|
7480
|
+
const pTime = pStart + partOrFrag.duration * 0.05;
|
7481
|
+
const atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
|
7482
|
+
if (atGap) {
|
7483
|
+
return;
|
7484
|
+
}
|
7485
|
+
const op = {
|
7486
|
+
execute: () => {
|
7487
|
+
var _this$fragmentTracker2;
|
7488
|
+
if (this.lastVideoAppendEnd > pTime || this.sourceBuffer.video && BufferHelper.isBuffered(this.sourceBuffer.video, pTime) || ((_this$fragmentTracker2 = this.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker2.gap) === true) {
|
7489
|
+
this.blockedAudioAppend = null;
|
7490
|
+
this.operationQueue.shiftAndExecuteNext('audio');
|
7491
|
+
}
|
7492
|
+
},
|
7493
|
+
onStart: () => {},
|
7494
|
+
onComplete: () => {},
|
7495
|
+
onError: () => {}
|
7496
|
+
};
|
7497
|
+
this.blockedAudioAppend = {
|
7498
|
+
op,
|
7499
|
+
frag: partOrFrag
|
7500
|
+
};
|
7501
|
+
this.operationQueue.append(op, 'audio', true);
|
7502
|
+
}
|
7503
|
+
unblockAudio() {
|
7504
|
+
const blockedAudioAppend = this.blockedAudioAppend;
|
7505
|
+
if (blockedAudioAppend) {
|
7506
|
+
this.blockedAudioAppend = null;
|
7507
|
+
this.operationQueue.unblockAudio(blockedAudioAppend.op);
|
7508
|
+
}
|
7509
|
+
}
|
7301
7510
|
onBufferAppending(event, eventData) {
|
7302
7511
|
const {
|
7303
|
-
hls,
|
7304
7512
|
operationQueue,
|
7305
7513
|
tracks
|
7306
7514
|
} = this;
|
7307
7515
|
const {
|
7308
7516
|
data,
|
7309
7517
|
type,
|
7518
|
+
parent,
|
7310
7519
|
frag,
|
7311
7520
|
part,
|
7312
7521
|
chunkMeta
|
7313
7522
|
} = eventData;
|
7314
7523
|
const chunkStats = chunkMeta.buffering[type];
|
7524
|
+
const sn = frag.sn;
|
7315
7525
|
const bufferAppendingStart = self.performance.now();
|
7316
7526
|
chunkStats.start = bufferAppendingStart;
|
7317
7527
|
const fragBuffering = frag.stats.buffering;
|
@@ -7334,7 +7544,36 @@ class BufferController {
|
|
7334
7544
|
checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
|
7335
7545
|
this.lastMpegAudioChunk = chunkMeta;
|
7336
7546
|
}
|
7337
|
-
|
7547
|
+
|
7548
|
+
// Block audio append until overlapping video append
|
7549
|
+
const videoSb = this.sourceBuffer.video;
|
7550
|
+
if (videoSb && sn !== 'initSegment') {
|
7551
|
+
const partOrFrag = part || frag;
|
7552
|
+
const blockedAudioAppend = this.blockedAudioAppend;
|
7553
|
+
if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
|
7554
|
+
const pStart = partOrFrag.start;
|
7555
|
+
const pTime = pStart + partOrFrag.duration * 0.05;
|
7556
|
+
const vbuffered = videoSb.buffered;
|
7557
|
+
const vappending = this.operationQueue.current('video');
|
7558
|
+
if (!vbuffered.length && !vappending) {
|
7559
|
+
// wait for video before appending audio
|
7560
|
+
this.blockAudio(partOrFrag);
|
7561
|
+
} else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
|
7562
|
+
// audio is ahead of video
|
7563
|
+
this.blockAudio(partOrFrag);
|
7564
|
+
}
|
7565
|
+
} else if (type === 'video') {
|
7566
|
+
const videoAppendEnd = partOrFrag.end;
|
7567
|
+
if (blockedAudioAppend) {
|
7568
|
+
const audioStart = blockedAudioAppend.frag.start;
|
7569
|
+
if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
|
7570
|
+
this.unblockAudio();
|
7571
|
+
}
|
7572
|
+
}
|
7573
|
+
this.lastVideoAppendEnd = videoAppendEnd;
|
7574
|
+
}
|
7575
|
+
}
|
7576
|
+
const fragStart = (part || frag).start;
|
7338
7577
|
const operation = {
|
7339
7578
|
execute: () => {
|
7340
7579
|
chunkStats.executeStart = self.performance.now();
|
@@ -7343,7 +7582,7 @@ class BufferController {
|
|
7343
7582
|
if (sb) {
|
7344
7583
|
const delta = fragStart - sb.timestampOffset;
|
7345
7584
|
if (Math.abs(delta) >= 0.1) {
|
7346
|
-
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${
|
7585
|
+
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${sn})`);
|
7347
7586
|
sb.timestampOffset = fragStart;
|
7348
7587
|
}
|
7349
7588
|
}
|
@@ -7410,22 +7649,21 @@ class BufferController {
|
|
7410
7649
|
/* with UHD content, we could get loop of quota exceeded error until
|
7411
7650
|
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
7412
7651
|
*/
|
7413
|
-
this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
|
7414
|
-
if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
|
7652
|
+
this.warn(`Failed ${appendErrorCount}/${this.hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
|
7653
|
+
if (appendErrorCount >= this.hls.config.appendErrorMaxRetry) {
|
7415
7654
|
event.fatal = true;
|
7416
7655
|
}
|
7417
7656
|
}
|
7418
|
-
hls.trigger(Events.ERROR, event);
|
7657
|
+
this.hls.trigger(Events.ERROR, event);
|
7419
7658
|
}
|
7420
7659
|
};
|
7421
7660
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7422
7661
|
}
|
7423
|
-
|
7424
|
-
|
7425
|
-
|
7426
|
-
|
7427
|
-
|
7428
|
-
execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset),
|
7662
|
+
getFlushOp(type, start, end) {
|
7663
|
+
return {
|
7664
|
+
execute: () => {
|
7665
|
+
this.removeExecutor(type, start, end);
|
7666
|
+
},
|
7429
7667
|
onStart: () => {
|
7430
7668
|
// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
7431
7669
|
},
|
@@ -7438,12 +7676,22 @@ class BufferController {
|
|
7438
7676
|
onError: error => {
|
7439
7677
|
this.warn(`Failed to remove from ${type} SourceBuffer`, error);
|
7440
7678
|
}
|
7441
|
-
}
|
7442
|
-
|
7443
|
-
|
7679
|
+
};
|
7680
|
+
}
|
7681
|
+
onBufferFlushing(event, data) {
|
7682
|
+
const {
|
7683
|
+
operationQueue
|
7684
|
+
} = this;
|
7685
|
+
const {
|
7686
|
+
type,
|
7687
|
+
startOffset,
|
7688
|
+
endOffset
|
7689
|
+
} = data;
|
7690
|
+
if (type) {
|
7691
|
+
operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
|
7444
7692
|
} else {
|
7445
|
-
this.getSourceBufferTypes().forEach(
|
7446
|
-
operationQueue.append(
|
7693
|
+
this.getSourceBufferTypes().forEach(sbType => {
|
7694
|
+
operationQueue.append(this.getFlushOp(sbType, startOffset, endOffset), sbType);
|
7447
7695
|
});
|
7448
7696
|
}
|
7449
7697
|
}
|
@@ -7490,6 +7738,9 @@ class BufferController {
|
|
7490
7738
|
// on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
|
7491
7739
|
// an undefined data.type will mark all buffers as EOS.
|
7492
7740
|
onBufferEos(event, data) {
|
7741
|
+
if (data.type === 'video') {
|
7742
|
+
this.unblockAudio();
|
7743
|
+
}
|
7493
7744
|
const ended = this.getSourceBufferTypes().reduce((acc, type) => {
|
7494
7745
|
const sb = this.sourceBuffer[type];
|
7495
7746
|
if (sb && (!data.type || data.type === type)) {
|
@@ -7532,10 +7783,14 @@ class BufferController {
|
|
7532
7783
|
return;
|
7533
7784
|
}
|
7534
7785
|
this.details = details;
|
7786
|
+
const durationAndRange = this.getDurationAndRange();
|
7787
|
+
if (!durationAndRange) {
|
7788
|
+
return;
|
7789
|
+
}
|
7535
7790
|
if (this.getSourceBufferTypes().length) {
|
7536
|
-
this.blockBuffers(this.
|
7791
|
+
this.blockBuffers(() => this.updateMediaSource(durationAndRange));
|
7537
7792
|
} else {
|
7538
|
-
this.
|
7793
|
+
this.updateMediaSource(durationAndRange);
|
7539
7794
|
}
|
7540
7795
|
}
|
7541
7796
|
trimBuffers() {
|
@@ -7640,9 +7895,9 @@ class BufferController {
|
|
7640
7895
|
* 'liveDurationInfinity` is set to `true`
|
7641
7896
|
* More details: https://github.com/video-dev/hls.js/issues/355
|
7642
7897
|
*/
|
7643
|
-
|
7898
|
+
getDurationAndRange() {
|
7644
7899
|
if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
7645
|
-
return;
|
7900
|
+
return null;
|
7646
7901
|
}
|
7647
7902
|
const {
|
7648
7903
|
details,
|
@@ -7656,25 +7911,41 @@ class BufferController {
|
|
7656
7911
|
if (details.live && hls.config.liveDurationInfinity) {
|
7657
7912
|
// Override duration to Infinity
|
7658
7913
|
mediaSource.duration = Infinity;
|
7659
|
-
|
7914
|
+
const len = details.fragments.length;
|
7915
|
+
if (len && details.live && !!mediaSource.setLiveSeekableRange) {
|
7916
|
+
const start = Math.max(0, details.fragments[0].start);
|
7917
|
+
const end = Math.max(start, start + details.totalduration);
|
7918
|
+
return {
|
7919
|
+
duration: Infinity,
|
7920
|
+
start,
|
7921
|
+
end
|
7922
|
+
};
|
7923
|
+
}
|
7924
|
+
return {
|
7925
|
+
duration: Infinity
|
7926
|
+
};
|
7660
7927
|
} else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
|
7661
|
-
|
7662
|
-
|
7663
|
-
|
7664
|
-
// flushing already buffered portion when switching between quality level
|
7665
|
-
this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);
|
7666
|
-
mediaSource.duration = levelDuration;
|
7928
|
+
return {
|
7929
|
+
duration: levelDuration
|
7930
|
+
};
|
7667
7931
|
}
|
7932
|
+
return null;
|
7668
7933
|
}
|
7669
|
-
|
7670
|
-
|
7671
|
-
|
7672
|
-
|
7673
|
-
|
7674
|
-
|
7675
|
-
|
7676
|
-
|
7677
|
-
|
7934
|
+
updateMediaSource({
|
7935
|
+
duration,
|
7936
|
+
start,
|
7937
|
+
end
|
7938
|
+
}) {
|
7939
|
+
if (!this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
7940
|
+
return;
|
7941
|
+
}
|
7942
|
+
if (isFiniteNumber(duration)) {
|
7943
|
+
this.log(`Updating Media Source duration to ${duration.toFixed(3)}`);
|
7944
|
+
}
|
7945
|
+
this.mediaSource.duration = duration;
|
7946
|
+
if (start !== undefined && end !== undefined) {
|
7947
|
+
this.log(`Media Source duration is set to ${this.mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7948
|
+
this.mediaSource.setLiveSeekableRange(start, end);
|
7678
7949
|
}
|
7679
7950
|
}
|
7680
7951
|
checkPendingTracks() {
|
@@ -7860,6 +8131,7 @@ class BufferController {
|
|
7860
8131
|
}
|
7861
8132
|
return;
|
7862
8133
|
}
|
8134
|
+
sb.ending = false;
|
7863
8135
|
sb.ended = false;
|
7864
8136
|
sb.appendBuffer(data);
|
7865
8137
|
}
|
@@ -7879,10 +8151,14 @@ class BufferController {
|
|
7879
8151
|
|
7880
8152
|
// logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
|
7881
8153
|
const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type));
|
7882
|
-
|
8154
|
+
const audioBlocked = buffers.length > 1 && !!this.blockedAudioAppend;
|
8155
|
+
if (audioBlocked) {
|
8156
|
+
this.unblockAudio();
|
8157
|
+
}
|
8158
|
+
Promise.all(blockingOperations).then(result => {
|
7883
8159
|
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
7884
8160
|
onUnblocked();
|
7885
|
-
buffers.forEach(type => {
|
8161
|
+
buffers.forEach((type, i) => {
|
7886
8162
|
const sb = this.sourceBuffer[type];
|
7887
8163
|
// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
7888
8164
|
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
@@ -8031,6 +8307,7 @@ class CapLevelController {
|
|
8031
8307
|
}
|
8032
8308
|
onMediaDetaching() {
|
8033
8309
|
this.stopCapping();
|
8310
|
+
this.media = null;
|
8034
8311
|
}
|
8035
8312
|
detectPlayerSize() {
|
8036
8313
|
if (this.media) {
|
@@ -8043,10 +8320,10 @@ class CapLevelController {
|
|
8043
8320
|
const hls = this.hls;
|
8044
8321
|
const maxLevel = this.getMaxLevel(levels.length - 1);
|
8045
8322
|
if (maxLevel !== this.autoLevelCapping) {
|
8046
|
-
logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8323
|
+
hls.logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8047
8324
|
}
|
8048
8325
|
hls.autoLevelCapping = maxLevel;
|
8049
|
-
if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
8326
|
+
if (hls.autoLevelEnabled && hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
8050
8327
|
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
|
8051
8328
|
// usually happen when the user go to the fullscreen mode.
|
8052
8329
|
this.streamController.nextLevelSwitch();
|
@@ -8182,9 +8459,11 @@ class FPSController {
|
|
8182
8459
|
}
|
8183
8460
|
registerListeners() {
|
8184
8461
|
this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8462
|
+
this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8185
8463
|
}
|
8186
8464
|
unregisterListeners() {
|
8187
8465
|
this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8466
|
+
this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8188
8467
|
}
|
8189
8468
|
destroy() {
|
8190
8469
|
if (this.timer) {
|
@@ -8206,6 +8485,9 @@ class FPSController {
|
|
8206
8485
|
this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
|
8207
8486
|
}
|
8208
8487
|
}
|
8488
|
+
onMediaDetaching() {
|
8489
|
+
this.media = null;
|
8490
|
+
}
|
8209
8491
|
checkFPS(video, decodedFrames, droppedFrames) {
|
8210
8492
|
const currentTime = performance.now();
|
8211
8493
|
if (decodedFrames) {
|
@@ -8221,10 +8503,10 @@ class FPSController {
|
|
8221
8503
|
totalDroppedFrames: droppedFrames
|
8222
8504
|
});
|
8223
8505
|
if (droppedFPS > 0) {
|
8224
|
-
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8506
|
+
// hls.logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8225
8507
|
if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
8226
8508
|
let currentLevel = hls.currentLevel;
|
8227
|
-
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8509
|
+
hls.logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8228
8510
|
if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
|
8229
8511
|
currentLevel = currentLevel - 1;
|
8230
8512
|
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
|
@@ -8257,14 +8539,14 @@ class FPSController {
|
|
8257
8539
|
}
|
8258
8540
|
|
8259
8541
|
const PATHWAY_PENALTY_DURATION_MS = 300000;
|
8260
|
-
class ContentSteeringController {
|
8542
|
+
class ContentSteeringController extends Logger {
|
8261
8543
|
constructor(hls) {
|
8544
|
+
super('content-steering', hls.logger);
|
8262
8545
|
this.hls = void 0;
|
8263
|
-
this.log = void 0;
|
8264
8546
|
this.loader = null;
|
8265
8547
|
this.uri = null;
|
8266
8548
|
this.pathwayId = '.';
|
8267
|
-
this.
|
8549
|
+
this._pathwayPriority = null;
|
8268
8550
|
this.timeToLoad = 300;
|
8269
8551
|
this.reloadTimer = -1;
|
8270
8552
|
this.updated = 0;
|
@@ -8275,7 +8557,6 @@ class ContentSteeringController {
|
|
8275
8557
|
this.subtitleTracks = null;
|
8276
8558
|
this.penalizedPathways = {};
|
8277
8559
|
this.hls = hls;
|
8278
|
-
this.log = logger.log.bind(logger, `[content-steering]:`);
|
8279
8560
|
this.registerListeners();
|
8280
8561
|
}
|
8281
8562
|
registerListeners() {
|
@@ -8295,6 +8576,20 @@ class ContentSteeringController {
|
|
8295
8576
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
8296
8577
|
hls.off(Events.ERROR, this.onError, this);
|
8297
8578
|
}
|
8579
|
+
pathways() {
|
8580
|
+
return (this.levels || []).reduce((pathways, level) => {
|
8581
|
+
if (pathways.indexOf(level.pathwayId) === -1) {
|
8582
|
+
pathways.push(level.pathwayId);
|
8583
|
+
}
|
8584
|
+
return pathways;
|
8585
|
+
}, []);
|
8586
|
+
}
|
8587
|
+
get pathwayPriority() {
|
8588
|
+
return this._pathwayPriority;
|
8589
|
+
}
|
8590
|
+
set pathwayPriority(pathwayPriority) {
|
8591
|
+
this.updatePathwayPriority(pathwayPriority);
|
8592
|
+
}
|
8298
8593
|
startLoad() {
|
8299
8594
|
this.started = true;
|
8300
8595
|
this.clearTimeout();
|
@@ -8368,7 +8663,7 @@ class ContentSteeringController {
|
|
8368
8663
|
} = data;
|
8369
8664
|
if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox && errorAction.flags === ErrorActionFlags.MoveAllAlternatesMatchingHost) {
|
8370
8665
|
const levels = this.levels;
|
8371
|
-
let pathwayPriority = this.
|
8666
|
+
let pathwayPriority = this._pathwayPriority;
|
8372
8667
|
let errorPathway = this.pathwayId;
|
8373
8668
|
if (data.context) {
|
8374
8669
|
const {
|
@@ -8387,19 +8682,14 @@ class ContentSteeringController {
|
|
8387
8682
|
}
|
8388
8683
|
if (!pathwayPriority && levels) {
|
8389
8684
|
// If PATHWAY-PRIORITY was not provided, list pathways for error handling
|
8390
|
-
pathwayPriority =
|
8391
|
-
if (pathways.indexOf(level.pathwayId) === -1) {
|
8392
|
-
pathways.push(level.pathwayId);
|
8393
|
-
}
|
8394
|
-
return pathways;
|
8395
|
-
}, []);
|
8685
|
+
pathwayPriority = this.pathways();
|
8396
8686
|
}
|
8397
8687
|
if (pathwayPriority && pathwayPriority.length > 1) {
|
8398
8688
|
this.updatePathwayPriority(pathwayPriority);
|
8399
8689
|
errorAction.resolved = this.pathwayId !== errorPathway;
|
8400
8690
|
}
|
8401
8691
|
if (!errorAction.resolved) {
|
8402
|
-
|
8692
|
+
this.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
|
8403
8693
|
}
|
8404
8694
|
}
|
8405
8695
|
}
|
@@ -8426,7 +8716,7 @@ class ContentSteeringController {
|
|
8426
8716
|
return this.levels.filter(level => pathwayId === level.pathwayId);
|
8427
8717
|
}
|
8428
8718
|
updatePathwayPriority(pathwayPriority) {
|
8429
|
-
this.
|
8719
|
+
this._pathwayPriority = pathwayPriority;
|
8430
8720
|
let levels;
|
8431
8721
|
|
8432
8722
|
// Evaluate if we should remove the pathway from the penalized list
|
@@ -8570,7 +8860,7 @@ class ContentSteeringController {
|
|
8570
8860
|
onSuccess: (response, stats, context, networkDetails) => {
|
8571
8861
|
this.log(`Loaded steering manifest: "${url}"`);
|
8572
8862
|
const steeringData = response.data;
|
8573
|
-
if (steeringData.VERSION !== 1) {
|
8863
|
+
if ((steeringData == null ? void 0 : steeringData.VERSION) !== 1) {
|
8574
8864
|
this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
|
8575
8865
|
return;
|
8576
8866
|
}
|
@@ -8713,8 +9003,6 @@ class XhrLoader {
|
|
8713
9003
|
this.config = null;
|
8714
9004
|
this.context = null;
|
8715
9005
|
this.xhrSetup = null;
|
8716
|
-
// @ts-ignore
|
8717
|
-
this.stats = null;
|
8718
9006
|
}
|
8719
9007
|
abortInternal() {
|
8720
9008
|
const loader = this.loader;
|
@@ -8762,13 +9050,14 @@ class XhrLoader {
|
|
8762
9050
|
const xhrSetup = this.xhrSetup;
|
8763
9051
|
if (xhrSetup) {
|
8764
9052
|
Promise.resolve().then(() => {
|
8765
|
-
if (this.stats.aborted) return;
|
9053
|
+
if (this.loader !== xhr || this.stats.aborted) return;
|
8766
9054
|
return xhrSetup(xhr, context.url);
|
8767
9055
|
}).catch(error => {
|
9056
|
+
if (this.loader !== xhr || this.stats.aborted) return;
|
8768
9057
|
xhr.open('GET', context.url, true);
|
8769
9058
|
return xhrSetup(xhr, context.url);
|
8770
9059
|
}).then(() => {
|
8771
|
-
if (this.stats.aborted) return;
|
9060
|
+
if (this.loader !== xhr || this.stats.aborted) return;
|
8772
9061
|
this.openAndSendXhr(xhr, context, config);
|
8773
9062
|
}).catch(error => {
|
8774
9063
|
// IE11 throws an exception on xhr.open if attempting to access an HTTP resource over HTTPS
|
@@ -8888,8 +9177,8 @@ class XhrLoader {
|
|
8888
9177
|
}
|
8889
9178
|
}
|
8890
9179
|
loadtimeout() {
|
8891
|
-
|
8892
|
-
const retryConfig =
|
9180
|
+
if (!this.config) return;
|
9181
|
+
const retryConfig = this.config.loadPolicy.timeoutRetry;
|
8893
9182
|
const retryCount = this.stats.retry;
|
8894
9183
|
if (shouldRetry(retryConfig, retryCount, true)) {
|
8895
9184
|
this.retry(retryConfig);
|
@@ -9478,7 +9767,7 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
|
|
9478
9767
|
});
|
9479
9768
|
function timelineConfig() {
|
9480
9769
|
return {
|
9481
|
-
cueHandler:
|
9770
|
+
cueHandler: HevcVideoParser,
|
9482
9771
|
// used by timeline-controller
|
9483
9772
|
enableWebVTT: false,
|
9484
9773
|
// used by timeline-controller
|
@@ -9509,7 +9798,7 @@ function timelineConfig() {
|
|
9509
9798
|
/**
|
9510
9799
|
* @ignore
|
9511
9800
|
*/
|
9512
|
-
function mergeConfig(defaultConfig, userConfig) {
|
9801
|
+
function mergeConfig(defaultConfig, userConfig, logger) {
|
9513
9802
|
if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
|
9514
9803
|
throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
|
9515
9804
|
}
|
@@ -9579,7 +9868,7 @@ function deepCpy(obj) {
|
|
9579
9868
|
/**
|
9580
9869
|
* @ignore
|
9581
9870
|
*/
|
9582
|
-
function enableStreamingMode(config) {
|
9871
|
+
function enableStreamingMode(config, logger) {
|
9583
9872
|
const currentLoader = config.loader;
|
9584
9873
|
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
|
9585
9874
|
// If a developer has configured their own loader, respect that choice
|
@@ -9596,10 +9885,9 @@ function enableStreamingMode(config) {
|
|
9596
9885
|
}
|
9597
9886
|
}
|
9598
9887
|
|
9599
|
-
let chromeOrFirefox;
|
9600
9888
|
class LevelController extends BasePlaylistController {
|
9601
9889
|
constructor(hls, contentSteeringController) {
|
9602
|
-
super(hls, '
|
9890
|
+
super(hls, 'level-controller');
|
9603
9891
|
this._levels = [];
|
9604
9892
|
this._firstLevel = -1;
|
9605
9893
|
this._maxAutoLevel = -1;
|
@@ -9670,23 +9958,15 @@ class LevelController extends BasePlaylistController {
|
|
9670
9958
|
let videoCodecFound = false;
|
9671
9959
|
let audioCodecFound = false;
|
9672
9960
|
data.levels.forEach(levelParsed => {
|
9673
|
-
var
|
9961
|
+
var _videoCodec;
|
9674
9962
|
const attributes = levelParsed.attrs;
|
9675
|
-
|
9676
|
-
// erase audio codec info if browser does not support mp4a.40.34.
|
9677
|
-
// demuxer will autodetect codec and fallback to mpeg/audio
|
9678
9963
|
let {
|
9679
9964
|
audioCodec,
|
9680
9965
|
videoCodec
|
9681
9966
|
} = levelParsed;
|
9682
|
-
if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
|
9683
|
-
chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
|
9684
|
-
if (chromeOrFirefox) {
|
9685
|
-
levelParsed.audioCodec = audioCodec = undefined;
|
9686
|
-
}
|
9687
|
-
}
|
9688
9967
|
if (audioCodec) {
|
9689
|
-
|
9968
|
+
// Returns empty and set to undefined for 'mp4a.40.34' with fallback to 'audio/mpeg' SourceBuffer
|
9969
|
+
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
|
9690
9970
|
}
|
9691
9971
|
if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
|
9692
9972
|
videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
|
@@ -9981,6 +10261,25 @@ class LevelController extends BasePlaylistController {
|
|
9981
10261
|
set startLevel(newLevel) {
|
9982
10262
|
this._startLevel = newLevel;
|
9983
10263
|
}
|
10264
|
+
get pathwayPriority() {
|
10265
|
+
if (this.steering) {
|
10266
|
+
return this.steering.pathwayPriority;
|
10267
|
+
}
|
10268
|
+
return null;
|
10269
|
+
}
|
10270
|
+
set pathwayPriority(pathwayPriority) {
|
10271
|
+
if (this.steering) {
|
10272
|
+
const pathwaysList = this.steering.pathways();
|
10273
|
+
const filteredPathwayPriority = pathwayPriority.filter(pathwayId => {
|
10274
|
+
return pathwaysList.indexOf(pathwayId) !== -1;
|
10275
|
+
});
|
10276
|
+
if (pathwayPriority.length < 1) {
|
10277
|
+
this.warn(`pathwayPriority ${pathwayPriority} should contain at least one pathway from list: ${pathwaysList}`);
|
10278
|
+
return;
|
10279
|
+
}
|
10280
|
+
this.steering.pathwayPriority = filteredPathwayPriority;
|
10281
|
+
}
|
10282
|
+
}
|
9984
10283
|
onError(event, data) {
|
9985
10284
|
if (data.fatal || !data.context) {
|
9986
10285
|
return;
|
@@ -10028,7 +10327,12 @@ class LevelController extends BasePlaylistController {
|
|
10028
10327
|
if (curLevel.fragmentError === 0) {
|
10029
10328
|
curLevel.loadError = 0;
|
10030
10329
|
}
|
10031
|
-
|
10330
|
+
// Ignore matching details populated by loading a Media Playlist directly
|
10331
|
+
let previousDetails = curLevel.details;
|
10332
|
+
if (previousDetails === data.details && previousDetails.advanced) {
|
10333
|
+
previousDetails = undefined;
|
10334
|
+
}
|
10335
|
+
this.playlistLoaded(level, data, previousDetails);
|
10032
10336
|
} else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
|
10033
10337
|
// received a delta playlist update that cannot be merged
|
10034
10338
|
details.deltaUpdateFailed = true;
|
@@ -10206,13 +10510,16 @@ class FragmentTracker {
|
|
10206
10510
|
* If not found any Fragment, return null
|
10207
10511
|
*/
|
10208
10512
|
getBufferedFrag(position, levelType) {
|
10513
|
+
return this.getFragAtPos(position, levelType, true);
|
10514
|
+
}
|
10515
|
+
getFragAtPos(position, levelType, buffered) {
|
10209
10516
|
const {
|
10210
10517
|
fragments
|
10211
10518
|
} = this;
|
10212
10519
|
const keys = Object.keys(fragments);
|
10213
10520
|
for (let i = keys.length; i--;) {
|
10214
10521
|
const fragmentEntity = fragments[keys[i]];
|
10215
|
-
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
|
10522
|
+
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && (!buffered || fragmentEntity.buffered)) {
|
10216
10523
|
const frag = fragmentEntity.body;
|
10217
10524
|
if (frag.start <= position && position <= frag.end) {
|
10218
10525
|
return frag;
|
@@ -10467,7 +10774,8 @@ class FragmentTracker {
|
|
10467
10774
|
const {
|
10468
10775
|
frag,
|
10469
10776
|
part,
|
10470
|
-
timeRanges
|
10777
|
+
timeRanges,
|
10778
|
+
type
|
10471
10779
|
} = data;
|
10472
10780
|
if (frag.sn === 'initSegment') {
|
10473
10781
|
return;
|
@@ -10482,10 +10790,8 @@ class FragmentTracker {
|
|
10482
10790
|
}
|
10483
10791
|
// Store the latest timeRanges loaded in the buffer
|
10484
10792
|
this.timeRanges = timeRanges;
|
10485
|
-
|
10486
|
-
|
10487
|
-
this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
|
10488
|
-
});
|
10793
|
+
const timeRange = timeRanges[type];
|
10794
|
+
this.detectEvictedFragments(type, timeRange, playlistType, part);
|
10489
10795
|
}
|
10490
10796
|
onFragBuffered(event, data) {
|
10491
10797
|
this.detectPartialFragments(data);
|
@@ -10814,8 +11120,8 @@ function createLoaderContext(frag, part = null) {
|
|
10814
11120
|
var _frag$decryptdata;
|
10815
11121
|
let byteRangeStart = start;
|
10816
11122
|
let byteRangeEnd = end;
|
10817
|
-
if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method)
|
10818
|
-
// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
|
11123
|
+
if (frag.sn === 'initSegment' && isMethodFullSegmentAesCbc((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method)) {
|
11124
|
+
// MAP segment encrypted with method 'AES-128' or 'AES-256' (cbc), when served with HTTP Range,
|
10819
11125
|
// has the unencrypted size specified in the range.
|
10820
11126
|
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
|
10821
11127
|
const fragmentLen = end - start;
|
@@ -10848,6 +11154,9 @@ function createGapLoadError(frag, part) {
|
|
10848
11154
|
(part ? part : frag).stats.aborted = true;
|
10849
11155
|
return new LoadError(errorData);
|
10850
11156
|
}
|
11157
|
+
function isMethodFullSegmentAesCbc(method) {
|
11158
|
+
return method === 'AES-128' || method === 'AES-256';
|
11159
|
+
}
|
10851
11160
|
class LoadError extends Error {
|
10852
11161
|
constructor(data) {
|
10853
11162
|
super(data.error.message);
|
@@ -10993,6 +11302,8 @@ class KeyLoader {
|
|
10993
11302
|
}
|
10994
11303
|
return this.loadKeyEME(keyInfo, frag);
|
10995
11304
|
case 'AES-128':
|
11305
|
+
case 'AES-256':
|
11306
|
+
case 'AES-256-CTR':
|
10996
11307
|
return this.loadKeyHTTP(keyInfo, frag);
|
10997
11308
|
default:
|
10998
11309
|
return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
|
@@ -11128,8 +11439,9 @@ class KeyLoader {
|
|
11128
11439
|
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
11129
11440
|
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
11130
11441
|
*/
|
11131
|
-
class TaskLoop {
|
11132
|
-
constructor() {
|
11442
|
+
class TaskLoop extends Logger {
|
11443
|
+
constructor(label, logger) {
|
11444
|
+
super(label, logger);
|
11133
11445
|
this._boundTick = void 0;
|
11134
11446
|
this._tickTimer = null;
|
11135
11447
|
this._tickInterval = null;
|
@@ -11397,33 +11709,61 @@ function alignMediaPlaylistByPDT(details, refDetails) {
|
|
11397
11709
|
}
|
11398
11710
|
|
11399
11711
|
class AESCrypto {
|
11400
|
-
constructor(subtle, iv) {
|
11712
|
+
constructor(subtle, iv, aesMode) {
|
11401
11713
|
this.subtle = void 0;
|
11402
11714
|
this.aesIV = void 0;
|
11715
|
+
this.aesMode = void 0;
|
11403
11716
|
this.subtle = subtle;
|
11404
11717
|
this.aesIV = iv;
|
11718
|
+
this.aesMode = aesMode;
|
11405
11719
|
}
|
11406
11720
|
decrypt(data, key) {
|
11407
|
-
|
11408
|
-
|
11409
|
-
|
11410
|
-
|
11721
|
+
switch (this.aesMode) {
|
11722
|
+
case DecrypterAesMode.cbc:
|
11723
|
+
return this.subtle.decrypt({
|
11724
|
+
name: 'AES-CBC',
|
11725
|
+
iv: this.aesIV
|
11726
|
+
}, key, data);
|
11727
|
+
case DecrypterAesMode.ctr:
|
11728
|
+
return this.subtle.decrypt({
|
11729
|
+
name: 'AES-CTR',
|
11730
|
+
counter: this.aesIV,
|
11731
|
+
length: 64
|
11732
|
+
},
|
11733
|
+
//64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
|
11734
|
+
key, data);
|
11735
|
+
default:
|
11736
|
+
throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
|
11737
|
+
}
|
11411
11738
|
}
|
11412
11739
|
}
|
11413
11740
|
|
11414
11741
|
class FastAESKey {
|
11415
|
-
constructor(subtle, key) {
|
11742
|
+
constructor(subtle, key, aesMode) {
|
11416
11743
|
this.subtle = void 0;
|
11417
11744
|
this.key = void 0;
|
11745
|
+
this.aesMode = void 0;
|
11418
11746
|
this.subtle = subtle;
|
11419
11747
|
this.key = key;
|
11748
|
+
this.aesMode = aesMode;
|
11420
11749
|
}
|
11421
11750
|
expandKey() {
|
11751
|
+
const subtleAlgoName = getSubtleAlgoName(this.aesMode);
|
11422
11752
|
return this.subtle.importKey('raw', this.key, {
|
11423
|
-
name:
|
11753
|
+
name: subtleAlgoName
|
11424
11754
|
}, false, ['encrypt', 'decrypt']);
|
11425
11755
|
}
|
11426
11756
|
}
|
11757
|
+
function getSubtleAlgoName(aesMode) {
|
11758
|
+
switch (aesMode) {
|
11759
|
+
case DecrypterAesMode.cbc:
|
11760
|
+
return 'AES-CBC';
|
11761
|
+
case DecrypterAesMode.ctr:
|
11762
|
+
return 'AES-CTR';
|
11763
|
+
default:
|
11764
|
+
throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
|
11765
|
+
}
|
11766
|
+
}
|
11427
11767
|
|
11428
11768
|
// PKCS7
|
11429
11769
|
function removePadding(array) {
|
@@ -11673,7 +12013,8 @@ class Decrypter {
|
|
11673
12013
|
this.currentIV = null;
|
11674
12014
|
this.currentResult = null;
|
11675
12015
|
this.useSoftware = void 0;
|
11676
|
-
this.
|
12016
|
+
this.enableSoftwareAES = void 0;
|
12017
|
+
this.enableSoftwareAES = config.enableSoftwareAES;
|
11677
12018
|
this.removePKCS7Padding = removePKCS7Padding;
|
11678
12019
|
// built in decryptor expects PKCS7 padding
|
11679
12020
|
if (removePKCS7Padding) {
|
@@ -11724,10 +12065,10 @@ class Decrypter {
|
|
11724
12065
|
this.softwareDecrypter = null;
|
11725
12066
|
}
|
11726
12067
|
}
|
11727
|
-
decrypt(data, key, iv) {
|
12068
|
+
decrypt(data, key, iv, aesMode) {
|
11728
12069
|
if (this.useSoftware) {
|
11729
12070
|
return new Promise((resolve, reject) => {
|
11730
|
-
this.softwareDecrypt(new Uint8Array(data), key, iv);
|
12071
|
+
this.softwareDecrypt(new Uint8Array(data), key, iv, aesMode);
|
11731
12072
|
const decryptResult = this.flush();
|
11732
12073
|
if (decryptResult) {
|
11733
12074
|
resolve(decryptResult.buffer);
|
@@ -11736,17 +12077,21 @@ class Decrypter {
|
|
11736
12077
|
}
|
11737
12078
|
});
|
11738
12079
|
}
|
11739
|
-
return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
|
12080
|
+
return this.webCryptoDecrypt(new Uint8Array(data), key, iv, aesMode);
|
11740
12081
|
}
|
11741
12082
|
|
11742
12083
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
11743
12084
|
// data is handled in the flush() call
|
11744
|
-
softwareDecrypt(data, key, iv) {
|
12085
|
+
softwareDecrypt(data, key, iv, aesMode) {
|
11745
12086
|
const {
|
11746
12087
|
currentIV,
|
11747
12088
|
currentResult,
|
11748
12089
|
remainderData
|
11749
12090
|
} = this;
|
12091
|
+
if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
|
12092
|
+
logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
|
12093
|
+
return null;
|
12094
|
+
}
|
11750
12095
|
this.logOnce('JS AES decrypt');
|
11751
12096
|
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
11752
12097
|
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
@@ -11779,13 +12124,13 @@ class Decrypter {
|
|
11779
12124
|
}
|
11780
12125
|
return result;
|
11781
12126
|
}
|
11782
|
-
webCryptoDecrypt(data, key, iv) {
|
12127
|
+
webCryptoDecrypt(data, key, iv, aesMode) {
|
11783
12128
|
if (this.key !== key || !this.fastAesKey) {
|
11784
12129
|
if (!this.subtle) {
|
11785
|
-
return Promise.resolve(this.onWebCryptoError(data, key, iv));
|
12130
|
+
return Promise.resolve(this.onWebCryptoError(data, key, iv, aesMode));
|
11786
12131
|
}
|
11787
12132
|
this.key = key;
|
11788
|
-
this.fastAesKey = new FastAESKey(this.subtle, key);
|
12133
|
+
this.fastAesKey = new FastAESKey(this.subtle, key, aesMode);
|
11789
12134
|
}
|
11790
12135
|
return this.fastAesKey.expandKey().then(aesKey => {
|
11791
12136
|
// decrypt using web crypto
|
@@ -11793,22 +12138,25 @@ class Decrypter {
|
|
11793
12138
|
return Promise.reject(new Error('web crypto not initialized'));
|
11794
12139
|
}
|
11795
12140
|
this.logOnce('WebCrypto AES decrypt');
|
11796
|
-
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv));
|
12141
|
+
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv), aesMode);
|
11797
12142
|
return crypto.decrypt(data.buffer, aesKey);
|
11798
12143
|
}).catch(err => {
|
11799
12144
|
logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
|
11800
|
-
return this.onWebCryptoError(data, key, iv);
|
12145
|
+
return this.onWebCryptoError(data, key, iv, aesMode);
|
11801
12146
|
});
|
11802
12147
|
}
|
11803
|
-
onWebCryptoError(data, key, iv) {
|
11804
|
-
|
11805
|
-
|
11806
|
-
|
11807
|
-
|
11808
|
-
|
11809
|
-
|
12148
|
+
onWebCryptoError(data, key, iv, aesMode) {
|
12149
|
+
const enableSoftwareAES = this.enableSoftwareAES;
|
12150
|
+
if (enableSoftwareAES) {
|
12151
|
+
this.useSoftware = true;
|
12152
|
+
this.logEnabled = true;
|
12153
|
+
this.softwareDecrypt(data, key, iv, aesMode);
|
12154
|
+
const decryptResult = this.flush();
|
12155
|
+
if (decryptResult) {
|
12156
|
+
return decryptResult.buffer;
|
12157
|
+
}
|
11810
12158
|
}
|
11811
|
-
throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
|
12159
|
+
throw new Error('WebCrypto' + (enableSoftwareAES ? ' and softwareDecrypt' : '') + ': failed to decrypt data');
|
11812
12160
|
}
|
11813
12161
|
getValidChunk(data) {
|
11814
12162
|
let currentChunk = data;
|
@@ -11859,7 +12207,7 @@ const State = {
|
|
11859
12207
|
};
|
11860
12208
|
class BaseStreamController extends TaskLoop {
|
11861
12209
|
constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
|
11862
|
-
super();
|
12210
|
+
super(logPrefix, hls.logger);
|
11863
12211
|
this.hls = void 0;
|
11864
12212
|
this.fragPrevious = null;
|
11865
12213
|
this.fragCurrent = null;
|
@@ -11884,22 +12232,98 @@ class BaseStreamController extends TaskLoop {
|
|
11884
12232
|
this.startFragRequested = false;
|
11885
12233
|
this.decrypter = void 0;
|
11886
12234
|
this.initPTS = [];
|
11887
|
-
this.
|
11888
|
-
this.
|
11889
|
-
this.
|
11890
|
-
|
11891
|
-
|
12235
|
+
this.buffering = true;
|
12236
|
+
this.loadingParts = false;
|
12237
|
+
this.onMediaSeeking = () => {
|
12238
|
+
const {
|
12239
|
+
config,
|
12240
|
+
fragCurrent,
|
12241
|
+
media,
|
12242
|
+
mediaBuffer,
|
12243
|
+
state
|
12244
|
+
} = this;
|
12245
|
+
const currentTime = media ? media.currentTime : 0;
|
12246
|
+
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
12247
|
+
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
12248
|
+
if (this.state === State.ENDED) {
|
12249
|
+
this.resetLoadingState();
|
12250
|
+
} else if (fragCurrent) {
|
12251
|
+
// Seeking while frag load is in progress
|
12252
|
+
const tolerance = config.maxFragLookUpTolerance;
|
12253
|
+
const fragStartOffset = fragCurrent.start - tolerance;
|
12254
|
+
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12255
|
+
// if seeking out of buffered range or into new one
|
12256
|
+
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12257
|
+
const pastFragment = currentTime > fragEndOffset;
|
12258
|
+
// if the seek position is outside the current fragment range
|
12259
|
+
if (currentTime < fragStartOffset || pastFragment) {
|
12260
|
+
if (pastFragment && fragCurrent.loader) {
|
12261
|
+
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12262
|
+
fragCurrent.abortRequests();
|
12263
|
+
this.resetLoadingState();
|
12264
|
+
}
|
12265
|
+
this.fragPrevious = null;
|
12266
|
+
}
|
12267
|
+
}
|
12268
|
+
}
|
12269
|
+
if (media) {
|
12270
|
+
// Remove gap fragments
|
12271
|
+
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12272
|
+
this.lastCurrentTime = currentTime;
|
12273
|
+
if (!this.loadingParts) {
|
12274
|
+
const bufferEnd = Math.max(bufferInfo.end, currentTime);
|
12275
|
+
const shouldLoadParts = this.shouldLoadParts(this.getLevelDetails(), bufferEnd);
|
12276
|
+
if (shouldLoadParts) {
|
12277
|
+
this.log(`LL-Part loading ON after seeking to ${currentTime.toFixed(2)} with buffer @${bufferEnd.toFixed(2)}`);
|
12278
|
+
this.loadingParts = shouldLoadParts;
|
12279
|
+
}
|
12280
|
+
}
|
12281
|
+
}
|
12282
|
+
|
12283
|
+
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12284
|
+
if (!this.loadedmetadata && !bufferInfo.len) {
|
12285
|
+
this.nextLoadPosition = this.startPosition = currentTime;
|
12286
|
+
}
|
12287
|
+
|
12288
|
+
// Async tick to speed up processing
|
12289
|
+
this.tickImmediate();
|
12290
|
+
};
|
12291
|
+
this.onMediaEnded = () => {
|
12292
|
+
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12293
|
+
this.startPosition = this.lastCurrentTime = 0;
|
12294
|
+
if (this.playlistType === PlaylistLevelType.MAIN) {
|
12295
|
+
this.hls.trigger(Events.MEDIA_ENDED, {
|
12296
|
+
stalled: false
|
12297
|
+
});
|
12298
|
+
}
|
12299
|
+
};
|
11892
12300
|
this.playlistType = playlistType;
|
11893
|
-
this.logPrefix = logPrefix;
|
11894
|
-
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
11895
|
-
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
11896
12301
|
this.hls = hls;
|
11897
12302
|
this.fragmentLoader = new FragmentLoader(hls.config);
|
11898
12303
|
this.keyLoader = keyLoader;
|
11899
12304
|
this.fragmentTracker = fragmentTracker;
|
11900
12305
|
this.config = hls.config;
|
11901
12306
|
this.decrypter = new Decrypter(hls.config);
|
12307
|
+
}
|
12308
|
+
registerListeners() {
|
12309
|
+
const {
|
12310
|
+
hls
|
12311
|
+
} = this;
|
12312
|
+
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12313
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12314
|
+
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
11902
12315
|
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12316
|
+
hls.on(Events.ERROR, this.onError, this);
|
12317
|
+
}
|
12318
|
+
unregisterListeners() {
|
12319
|
+
const {
|
12320
|
+
hls
|
12321
|
+
} = this;
|
12322
|
+
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12323
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12324
|
+
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12325
|
+
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12326
|
+
hls.off(Events.ERROR, this.onError, this);
|
11903
12327
|
}
|
11904
12328
|
doTick() {
|
11905
12329
|
this.onTickEnd();
|
@@ -11923,6 +12347,12 @@ class BaseStreamController extends TaskLoop {
|
|
11923
12347
|
this.clearNextTick();
|
11924
12348
|
this.state = State.STOPPED;
|
11925
12349
|
}
|
12350
|
+
pauseBuffering() {
|
12351
|
+
this.buffering = false;
|
12352
|
+
}
|
12353
|
+
resumeBuffering() {
|
12354
|
+
this.buffering = true;
|
12355
|
+
}
|
11926
12356
|
_streamEnded(bufferInfo, levelDetails) {
|
11927
12357
|
// If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
|
11928
12358
|
// of nothing loading/loaded return false
|
@@ -11953,10 +12383,8 @@ class BaseStreamController extends TaskLoop {
|
|
11953
12383
|
}
|
11954
12384
|
onMediaAttached(event, data) {
|
11955
12385
|
const media = this.media = this.mediaBuffer = data.media;
|
11956
|
-
|
11957
|
-
|
11958
|
-
media.addEventListener('seeking', this.onvseeking);
|
11959
|
-
media.addEventListener('ended', this.onvended);
|
12386
|
+
media.addEventListener('seeking', this.onMediaSeeking);
|
12387
|
+
media.addEventListener('ended', this.onMediaEnded);
|
11960
12388
|
const config = this.config;
|
11961
12389
|
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
11962
12390
|
this.startLoad(config.startPosition);
|
@@ -11970,10 +12398,9 @@ class BaseStreamController extends TaskLoop {
|
|
11970
12398
|
}
|
11971
12399
|
|
11972
12400
|
// remove video listeners
|
11973
|
-
if (media
|
11974
|
-
media.removeEventListener('seeking', this.
|
11975
|
-
media.removeEventListener('ended', this.
|
11976
|
-
this.onvseeking = this.onvended = null;
|
12401
|
+
if (media) {
|
12402
|
+
media.removeEventListener('seeking', this.onMediaSeeking);
|
12403
|
+
media.removeEventListener('ended', this.onMediaEnded);
|
11977
12404
|
}
|
11978
12405
|
if (this.keyLoader) {
|
11979
12406
|
this.keyLoader.detach();
|
@@ -11983,66 +12410,17 @@ class BaseStreamController extends TaskLoop {
|
|
11983
12410
|
this.fragmentTracker.removeAllFragments();
|
11984
12411
|
this.stopLoad();
|
11985
12412
|
}
|
11986
|
-
|
11987
|
-
|
11988
|
-
config,
|
11989
|
-
fragCurrent,
|
11990
|
-
media,
|
11991
|
-
mediaBuffer,
|
11992
|
-
state
|
11993
|
-
} = this;
|
11994
|
-
const currentTime = media ? media.currentTime : 0;
|
11995
|
-
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
11996
|
-
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
11997
|
-
if (this.state === State.ENDED) {
|
11998
|
-
this.resetLoadingState();
|
11999
|
-
} else if (fragCurrent) {
|
12000
|
-
// Seeking while frag load is in progress
|
12001
|
-
const tolerance = config.maxFragLookUpTolerance;
|
12002
|
-
const fragStartOffset = fragCurrent.start - tolerance;
|
12003
|
-
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12004
|
-
// if seeking out of buffered range or into new one
|
12005
|
-
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12006
|
-
const pastFragment = currentTime > fragEndOffset;
|
12007
|
-
// if the seek position is outside the current fragment range
|
12008
|
-
if (currentTime < fragStartOffset || pastFragment) {
|
12009
|
-
if (pastFragment && fragCurrent.loader) {
|
12010
|
-
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12011
|
-
fragCurrent.abortRequests();
|
12012
|
-
this.resetLoadingState();
|
12013
|
-
}
|
12014
|
-
this.fragPrevious = null;
|
12015
|
-
}
|
12016
|
-
}
|
12017
|
-
}
|
12018
|
-
if (media) {
|
12019
|
-
// Remove gap fragments
|
12020
|
-
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12021
|
-
this.lastCurrentTime = currentTime;
|
12022
|
-
}
|
12023
|
-
|
12024
|
-
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12025
|
-
if (!this.loadedmetadata && !bufferInfo.len) {
|
12026
|
-
this.nextLoadPosition = this.startPosition = currentTime;
|
12027
|
-
}
|
12028
|
-
|
12029
|
-
// Async tick to speed up processing
|
12030
|
-
this.tickImmediate();
|
12031
|
-
}
|
12032
|
-
onMediaEnded() {
|
12033
|
-
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12034
|
-
this.startPosition = this.lastCurrentTime = 0;
|
12035
|
-
}
|
12413
|
+
onManifestLoading() {}
|
12414
|
+
onError(event, data) {}
|
12036
12415
|
onManifestLoaded(event, data) {
|
12037
12416
|
this.startTimeOffset = data.startTimeOffset;
|
12038
12417
|
this.initPTS = [];
|
12039
12418
|
}
|
12040
12419
|
onHandlerDestroying() {
|
12041
|
-
this.hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12042
12420
|
this.stopLoad();
|
12043
12421
|
super.onHandlerDestroying();
|
12044
12422
|
// @ts-ignore
|
12045
|
-
this.hls = null;
|
12423
|
+
this.hls = this.onMediaSeeking = this.onMediaEnded = null;
|
12046
12424
|
}
|
12047
12425
|
onHandlerDestroyed() {
|
12048
12426
|
this.state = State.STOPPED;
|
@@ -12176,10 +12554,10 @@ class BaseStreamController extends TaskLoop {
|
|
12176
12554
|
const decryptData = frag.decryptdata;
|
12177
12555
|
|
12178
12556
|
// check to see if the payload needs to be decrypted
|
12179
|
-
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method
|
12557
|
+
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && isFullSegmentEncryption(decryptData.method)) {
|
12180
12558
|
const startTime = self.performance.now();
|
12181
12559
|
// decrypt init segment data
|
12182
|
-
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
|
12560
|
+
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer, getAesModeFromFullSegmentMethod(decryptData.method)).catch(err => {
|
12183
12561
|
hls.trigger(Events.ERROR, {
|
12184
12562
|
type: ErrorTypes.MEDIA_ERROR,
|
12185
12563
|
details: ErrorDetails.FRAG_DECRYPT_ERROR,
|
@@ -12220,7 +12598,9 @@ class BaseStreamController extends TaskLoop {
|
|
12220
12598
|
throw new Error('init load aborted, missing levels');
|
12221
12599
|
}
|
12222
12600
|
const stats = data.frag.stats;
|
12223
|
-
this.state
|
12601
|
+
if (this.state !== State.STOPPED) {
|
12602
|
+
this.state = State.IDLE;
|
12603
|
+
}
|
12224
12604
|
data.frag.data = new Uint8Array(data.payload);
|
12225
12605
|
stats.parsing.start = stats.buffering.start = self.performance.now();
|
12226
12606
|
stats.parsing.end = stats.buffering.end = self.performance.now();
|
@@ -12291,7 +12671,7 @@ class BaseStreamController extends TaskLoop {
|
|
12291
12671
|
}
|
12292
12672
|
let keyLoadingPromise = null;
|
12293
12673
|
if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
|
12294
|
-
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.
|
12674
|
+
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'} ${frag.level}`);
|
12295
12675
|
this.state = State.KEY_LOADING;
|
12296
12676
|
this.fragCurrent = frag;
|
12297
12677
|
keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
|
@@ -12312,8 +12692,16 @@ class BaseStreamController extends TaskLoop {
|
|
12312
12692
|
} else if (!frag.encrypted && details.encryptedFragments.length) {
|
12313
12693
|
this.keyLoader.loadClear(frag, details.encryptedFragments);
|
12314
12694
|
}
|
12695
|
+
const fragPrevious = this.fragPrevious;
|
12696
|
+
if (frag.sn !== 'initSegment' && (!fragPrevious || frag.sn !== fragPrevious.sn)) {
|
12697
|
+
const shouldLoadParts = this.shouldLoadParts(level.details, frag.end);
|
12698
|
+
if (shouldLoadParts !== this.loadingParts) {
|
12699
|
+
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} loading sn ${fragPrevious == null ? void 0 : fragPrevious.sn}->${frag.sn}`);
|
12700
|
+
this.loadingParts = shouldLoadParts;
|
12701
|
+
}
|
12702
|
+
}
|
12315
12703
|
targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
|
12316
|
-
if (this.
|
12704
|
+
if (this.loadingParts && frag.sn !== 'initSegment') {
|
12317
12705
|
const partList = details.partList;
|
12318
12706
|
if (partList && progressCallback) {
|
12319
12707
|
if (targetBufferTime > frag.end && details.fragmentHint) {
|
@@ -12322,7 +12710,7 @@ class BaseStreamController extends TaskLoop {
|
|
12322
12710
|
const partIndex = this.getNextPart(partList, frag, targetBufferTime);
|
12323
12711
|
if (partIndex > -1) {
|
12324
12712
|
const part = partList[partIndex];
|
12325
|
-
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.
|
12713
|
+
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12326
12714
|
this.nextLoadPosition = part.start + part.duration;
|
12327
12715
|
this.state = State.FRAG_LOADING;
|
12328
12716
|
let _result;
|
@@ -12351,7 +12739,14 @@ class BaseStreamController extends TaskLoop {
|
|
12351
12739
|
}
|
12352
12740
|
}
|
12353
12741
|
}
|
12354
|
-
|
12742
|
+
if (frag.sn !== 'initSegment' && this.loadingParts) {
|
12743
|
+
this.log(`LL-Part loading OFF after next part miss @${targetBufferTime.toFixed(2)}`);
|
12744
|
+
this.loadingParts = false;
|
12745
|
+
} else if (!frag.url) {
|
12746
|
+
// Selected fragment hint for part but not loading parts
|
12747
|
+
return Promise.resolve(null);
|
12748
|
+
}
|
12749
|
+
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12355
12750
|
// Don't update nextLoadPosition for fragments which are not buffered
|
12356
12751
|
if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
|
12357
12752
|
this.nextLoadPosition = frag.start + frag.duration;
|
@@ -12449,8 +12844,36 @@ class BaseStreamController extends TaskLoop {
|
|
12449
12844
|
if (part) {
|
12450
12845
|
part.stats.parsing.end = now;
|
12451
12846
|
}
|
12847
|
+
// See if part loading should be disabled/enabled based on buffer and playback position.
|
12848
|
+
if (frag.sn !== 'initSegment') {
|
12849
|
+
const levelDetails = this.getLevelDetails();
|
12850
|
+
const loadingPartsAtEdge = levelDetails && frag.sn > levelDetails.endSN;
|
12851
|
+
const shouldLoadParts = loadingPartsAtEdge || this.shouldLoadParts(levelDetails, frag.end);
|
12852
|
+
if (shouldLoadParts !== this.loadingParts) {
|
12853
|
+
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} after parsing segment ending @${frag.end.toFixed(2)}`);
|
12854
|
+
this.loadingParts = shouldLoadParts;
|
12855
|
+
}
|
12856
|
+
}
|
12452
12857
|
this.updateLevelTiming(frag, part, level, chunkMeta.partial);
|
12453
12858
|
}
|
12859
|
+
shouldLoadParts(details, bufferEnd) {
|
12860
|
+
if (this.config.lowLatencyMode) {
|
12861
|
+
if (!details) {
|
12862
|
+
return this.loadingParts;
|
12863
|
+
}
|
12864
|
+
if (details != null && details.partList) {
|
12865
|
+
var _details$fragmentHint;
|
12866
|
+
// Buffer must be ahead of first part + duration of parts after last segment
|
12867
|
+
// and playback must be at or past segment adjacent to part list
|
12868
|
+
const firstPart = details.partList[0];
|
12869
|
+
const safePartStart = firstPart.end + (((_details$fragmentHint = details.fragmentHint) == null ? void 0 : _details$fragmentHint.duration) || 0);
|
12870
|
+
if (bufferEnd >= safePartStart && this.lastCurrentTime > firstPart.start - firstPart.fragment.duration) {
|
12871
|
+
return true;
|
12872
|
+
}
|
12873
|
+
}
|
12874
|
+
}
|
12875
|
+
return false;
|
12876
|
+
}
|
12454
12877
|
getCurrentContext(chunkMeta) {
|
12455
12878
|
const {
|
12456
12879
|
levels,
|
@@ -12551,7 +12974,7 @@ class BaseStreamController extends TaskLoop {
|
|
12551
12974
|
// Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
|
12552
12975
|
if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
|
12553
12976
|
const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
|
12554
|
-
if (bufferedFragAtPos && bufferInfo.nextStart
|
12977
|
+
if (bufferedFragAtPos && (bufferInfo.nextStart <= bufferedFragAtPos.end || bufferedFragAtPos.gap)) {
|
12555
12978
|
return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
|
12556
12979
|
}
|
12557
12980
|
}
|
@@ -12600,7 +13023,8 @@ class BaseStreamController extends TaskLoop {
|
|
12600
13023
|
config
|
12601
13024
|
} = this;
|
12602
13025
|
const start = fragments[0].start;
|
12603
|
-
|
13026
|
+
const canLoadParts = config.lowLatencyMode && !!levelDetails.partList;
|
13027
|
+
let frag = null;
|
12604
13028
|
if (levelDetails.live) {
|
12605
13029
|
const initialLiveManifestSize = config.initialLiveManifestSize;
|
12606
13030
|
if (fragLen < initialLiveManifestSize) {
|
@@ -12612,6 +13036,10 @@ class BaseStreamController extends TaskLoop {
|
|
12612
13036
|
// Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
|
12613
13037
|
// we get the fragment matching that start time
|
12614
13038
|
if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1 || pos < start) {
|
13039
|
+
if (canLoadParts && !this.loadingParts) {
|
13040
|
+
this.log(`LL-Part loading ON for initial live fragment`);
|
13041
|
+
this.loadingParts = true;
|
13042
|
+
}
|
12615
13043
|
frag = this.getInitialLiveFragment(levelDetails, fragments);
|
12616
13044
|
this.startPosition = this.nextLoadPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
|
12617
13045
|
}
|
@@ -12622,7 +13050,7 @@ class BaseStreamController extends TaskLoop {
|
|
12622
13050
|
|
12623
13051
|
// If we haven't run into any special cases already, just load the fragment most closely matching the requested position
|
12624
13052
|
if (!frag) {
|
12625
|
-
const end =
|
13053
|
+
const end = this.loadingParts ? levelDetails.partEnd : levelDetails.fragmentEnd;
|
12626
13054
|
frag = this.getFragmentAtPosition(pos, end, levelDetails);
|
12627
13055
|
}
|
12628
13056
|
return this.mapToInitFragWhenRequired(frag);
|
@@ -12744,7 +13172,7 @@ class BaseStreamController extends TaskLoop {
|
|
12744
13172
|
} = levelDetails;
|
12745
13173
|
const tolerance = config.maxFragLookUpTolerance;
|
12746
13174
|
const partList = levelDetails.partList;
|
12747
|
-
const loadingParts = !!(
|
13175
|
+
const loadingParts = !!(this.loadingParts && partList != null && partList.length && fragmentHint);
|
12748
13176
|
if (loadingParts && fragmentHint && !this.bitrateTest) {
|
12749
13177
|
// Include incomplete fragment with parts at end
|
12750
13178
|
fragments = fragments.concat(fragmentHint);
|
@@ -12937,7 +13365,7 @@ class BaseStreamController extends TaskLoop {
|
|
12937
13365
|
errorAction.resolved = true;
|
12938
13366
|
}
|
12939
13367
|
} else {
|
12940
|
-
|
13368
|
+
this.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
|
12941
13369
|
return;
|
12942
13370
|
}
|
12943
13371
|
} else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
|
@@ -13005,7 +13433,9 @@ class BaseStreamController extends TaskLoop {
|
|
13005
13433
|
this.log('Reset loading state');
|
13006
13434
|
this.fragCurrent = null;
|
13007
13435
|
this.fragPrevious = null;
|
13008
|
-
this.state
|
13436
|
+
if (this.state !== State.STOPPED) {
|
13437
|
+
this.state = State.IDLE;
|
13438
|
+
}
|
13009
13439
|
}
|
13010
13440
|
resetStartWhenNotLoaded(level) {
|
13011
13441
|
// if loadedmetadata is not set, it means that first frag request failed
|
@@ -13184,6 +13614,104 @@ function dummyTrack(type = '', inputTimeScale = 90000) {
|
|
13184
13614
|
};
|
13185
13615
|
}
|
13186
13616
|
|
13617
|
+
/**
|
13618
|
+
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
13619
|
+
*
|
13620
|
+
* @param data - The data to search in
|
13621
|
+
* @param offset - The offset at which to start searching
|
13622
|
+
*
|
13623
|
+
* @returns The block of data containing any ID3 tags found
|
13624
|
+
* or `undefined` if no header is found at the starting offset
|
13625
|
+
*
|
13626
|
+
* @internal
|
13627
|
+
*
|
13628
|
+
* @group ID3
|
13629
|
+
*/
|
13630
|
+
function getId3Data(data, offset) {
|
13631
|
+
const front = offset;
|
13632
|
+
let length = 0;
|
13633
|
+
while (isId3Header(data, offset)) {
|
13634
|
+
// ID3 header is 10 bytes
|
13635
|
+
length += 10;
|
13636
|
+
const size = readId3Size(data, offset + 6);
|
13637
|
+
length += size;
|
13638
|
+
if (isId3Footer(data, offset + 10)) {
|
13639
|
+
// ID3 footer is 10 bytes
|
13640
|
+
length += 10;
|
13641
|
+
}
|
13642
|
+
offset += length;
|
13643
|
+
}
|
13644
|
+
if (length > 0) {
|
13645
|
+
return data.subarray(front, front + length);
|
13646
|
+
}
|
13647
|
+
return undefined;
|
13648
|
+
}
|
13649
|
+
|
13650
|
+
/**
|
13651
|
+
* Read a 33 bit timestamp from an ID3 frame.
|
13652
|
+
*
|
13653
|
+
* @param timeStampFrame - the ID3 frame
|
13654
|
+
*
|
13655
|
+
* @returns The timestamp
|
13656
|
+
*
|
13657
|
+
* @internal
|
13658
|
+
*
|
13659
|
+
* @group ID3
|
13660
|
+
*/
|
13661
|
+
function readId3Timestamp(timeStampFrame) {
|
13662
|
+
if (timeStampFrame.data.byteLength === 8) {
|
13663
|
+
const data = new Uint8Array(timeStampFrame.data);
|
13664
|
+
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
13665
|
+
// with the upper 31 bits set to zero.
|
13666
|
+
const pts33Bit = data[3] & 0x1;
|
13667
|
+
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
13668
|
+
timestamp /= 45;
|
13669
|
+
if (pts33Bit) {
|
13670
|
+
timestamp += 47721858.84;
|
13671
|
+
} // 2^32 / 90
|
13672
|
+
return Math.round(timestamp);
|
13673
|
+
}
|
13674
|
+
return undefined;
|
13675
|
+
}
|
13676
|
+
|
13677
|
+
/**
|
13678
|
+
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
13679
|
+
*
|
13680
|
+
* @param data - Block of data containing one or more ID3 tags
|
13681
|
+
*
|
13682
|
+
* @returns The timestamp
|
13683
|
+
*
|
13684
|
+
* @group ID3
|
13685
|
+
*
|
13686
|
+
* @beta
|
13687
|
+
*/
|
13688
|
+
function getId3Timestamp(data) {
|
13689
|
+
const frames = getId3Frames(data);
|
13690
|
+
for (let i = 0; i < frames.length; i++) {
|
13691
|
+
const frame = frames[i];
|
13692
|
+
if (isId3TimestampFrame(frame)) {
|
13693
|
+
return readId3Timestamp(frame);
|
13694
|
+
}
|
13695
|
+
}
|
13696
|
+
return undefined;
|
13697
|
+
}
|
13698
|
+
|
13699
|
+
/**
|
13700
|
+
* Checks if the given data contains an ID3 tag.
|
13701
|
+
*
|
13702
|
+
* @param data - The data to check
|
13703
|
+
* @param offset - The offset at which to start checking
|
13704
|
+
*
|
13705
|
+
* @returns `true` if an ID3 tag is found
|
13706
|
+
*
|
13707
|
+
* @group ID3
|
13708
|
+
*
|
13709
|
+
* @beta
|
13710
|
+
*/
|
13711
|
+
function canParseId3(data, offset) {
|
13712
|
+
return isId3Header(data, offset) && readId3Size(data, offset + 6) + 10 <= data.length - offset;
|
13713
|
+
}
|
13714
|
+
|
13187
13715
|
class BaseAudioDemuxer {
|
13188
13716
|
constructor() {
|
13189
13717
|
this._audioTrack = void 0;
|
@@ -13225,12 +13753,12 @@ class BaseAudioDemuxer {
|
|
13225
13753
|
data = appendUint8Array(this.cachedData, data);
|
13226
13754
|
this.cachedData = null;
|
13227
13755
|
}
|
13228
|
-
let id3Data =
|
13756
|
+
let id3Data = getId3Data(data, 0);
|
13229
13757
|
let offset = id3Data ? id3Data.length : 0;
|
13230
13758
|
let lastDataIndex;
|
13231
13759
|
const track = this._audioTrack;
|
13232
13760
|
const id3Track = this._id3Track;
|
13233
|
-
const timestamp = id3Data ?
|
13761
|
+
const timestamp = id3Data ? getId3Timestamp(id3Data) : undefined;
|
13234
13762
|
const length = data.length;
|
13235
13763
|
if (this.basePTS === null || this.frameIndex === 0 && isFiniteNumber(timestamp)) {
|
13236
13764
|
this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS);
|
@@ -13261,9 +13789,9 @@ class BaseAudioDemuxer {
|
|
13261
13789
|
} else {
|
13262
13790
|
offset = length;
|
13263
13791
|
}
|
13264
|
-
} else if (
|
13265
|
-
// after a
|
13266
|
-
id3Data =
|
13792
|
+
} else if (canParseId3(data, offset)) {
|
13793
|
+
// after a canParse, a call to getId3Data *should* always returns some data
|
13794
|
+
id3Data = getId3Data(data, offset);
|
13267
13795
|
id3Track.samples.push({
|
13268
13796
|
pts: this.lastPTS,
|
13269
13797
|
dts: this.lastPTS,
|
@@ -13332,6 +13860,7 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
|
|
13332
13860
|
*/
|
13333
13861
|
function getAudioConfig(observer, data, offset, audioCodec) {
|
13334
13862
|
let adtsObjectType;
|
13863
|
+
let originalAdtsObjectType;
|
13335
13864
|
let adtsExtensionSamplingIndex;
|
13336
13865
|
let adtsChannelConfig;
|
13337
13866
|
let config;
|
@@ -13339,7 +13868,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13339
13868
|
const manifestCodec = audioCodec;
|
13340
13869
|
const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
|
13341
13870
|
// byte 2
|
13342
|
-
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13871
|
+
adtsObjectType = originalAdtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13343
13872
|
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
|
13344
13873
|
if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
|
13345
13874
|
const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
|
@@ -13356,8 +13885,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13356
13885
|
// byte 3
|
13357
13886
|
adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
|
13358
13887
|
logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
|
13359
|
-
//
|
13360
|
-
if (/firefox/i.test(userAgent)) {
|
13888
|
+
// Firefox and Pale Moon: freq less than 24kHz = AAC SBR (HE-AAC)
|
13889
|
+
if (/firefox|palemoon/i.test(userAgent)) {
|
13361
13890
|
if (adtsSamplingIndex >= 6) {
|
13362
13891
|
adtsObjectType = 5;
|
13363
13892
|
config = new Array(4);
|
@@ -13451,6 +13980,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13451
13980
|
samplerate: adtsSamplingRates[adtsSamplingIndex],
|
13452
13981
|
channelCount: adtsChannelConfig,
|
13453
13982
|
codec: 'mp4a.40.' + adtsObjectType,
|
13983
|
+
parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
|
13454
13984
|
manifestCodec
|
13455
13985
|
};
|
13456
13986
|
}
|
@@ -13505,7 +14035,8 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
|
|
13505
14035
|
track.channelCount = config.channelCount;
|
13506
14036
|
track.codec = config.codec;
|
13507
14037
|
track.manifestCodec = config.manifestCodec;
|
13508
|
-
|
14038
|
+
track.parsedCodec = config.parsedCodec;
|
14039
|
+
logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
13509
14040
|
}
|
13510
14041
|
}
|
13511
14042
|
function getFrameDuration(samplerate) {
|
@@ -13754,7 +14285,7 @@ class AACDemuxer extends BaseAudioDemuxer {
|
|
13754
14285
|
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
13755
14286
|
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
13756
14287
|
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
13757
|
-
const id3Data =
|
14288
|
+
const id3Data = getId3Data(data, 0);
|
13758
14289
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
13759
14290
|
if (probe(data, offset)) {
|
13760
14291
|
return false;
|
@@ -13945,21 +14476,7 @@ class BaseVideoParser {
|
|
13945
14476
|
units: [],
|
13946
14477
|
debug,
|
13947
14478
|
length: 0
|
13948
|
-
};
|
13949
|
-
}
|
13950
|
-
getLastNalUnit(samples) {
|
13951
|
-
var _VideoSample;
|
13952
|
-
let VideoSample = this.VideoSample;
|
13953
|
-
let lastUnit;
|
13954
|
-
// try to fallback to previous sample if current one is empty
|
13955
|
-
if (!VideoSample || VideoSample.units.length === 0) {
|
13956
|
-
VideoSample = samples[samples.length - 1];
|
13957
|
-
}
|
13958
|
-
if ((_VideoSample = VideoSample) != null && _VideoSample.units) {
|
13959
|
-
const units = VideoSample.units;
|
13960
|
-
lastUnit = units[units.length - 1];
|
13961
|
-
}
|
13962
|
-
return lastUnit;
|
14479
|
+
};
|
13963
14480
|
}
|
13964
14481
|
pushAccessUnit(VideoSample, videoTrack) {
|
13965
14482
|
if (VideoSample.units.length && VideoSample.frame) {
|
@@ -13983,6 +14500,122 @@ class BaseVideoParser {
|
|
13983
14500
|
logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
|
13984
14501
|
}
|
13985
14502
|
}
|
14503
|
+
parseNALu(track, array, last) {
|
14504
|
+
const len = array.byteLength;
|
14505
|
+
let state = track.naluState || 0;
|
14506
|
+
const lastState = state;
|
14507
|
+
const units = [];
|
14508
|
+
let i = 0;
|
14509
|
+
let value;
|
14510
|
+
let overflow;
|
14511
|
+
let unitType;
|
14512
|
+
let lastUnitStart = -1;
|
14513
|
+
let lastUnitType = 0;
|
14514
|
+
// logger.log('PES:' + Hex.hexDump(array));
|
14515
|
+
|
14516
|
+
if (state === -1) {
|
14517
|
+
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14518
|
+
lastUnitStart = 0;
|
14519
|
+
// NALu type is value read from offset 0
|
14520
|
+
lastUnitType = this.getNALuType(array, 0);
|
14521
|
+
state = 0;
|
14522
|
+
i = 1;
|
14523
|
+
}
|
14524
|
+
while (i < len) {
|
14525
|
+
value = array[i++];
|
14526
|
+
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14527
|
+
if (!state) {
|
14528
|
+
state = value ? 0 : 1;
|
14529
|
+
continue;
|
14530
|
+
}
|
14531
|
+
if (state === 1) {
|
14532
|
+
state = value ? 0 : 2;
|
14533
|
+
continue;
|
14534
|
+
}
|
14535
|
+
// here we have state either equal to 2 or 3
|
14536
|
+
if (!value) {
|
14537
|
+
state = 3;
|
14538
|
+
} else if (value === 1) {
|
14539
|
+
overflow = i - state - 1;
|
14540
|
+
if (lastUnitStart >= 0) {
|
14541
|
+
const unit = {
|
14542
|
+
data: array.subarray(lastUnitStart, overflow),
|
14543
|
+
type: lastUnitType
|
14544
|
+
};
|
14545
|
+
if (track.lastNalu) {
|
14546
|
+
units.push(track.lastNalu);
|
14547
|
+
track.lastNalu = null;
|
14548
|
+
}
|
14549
|
+
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14550
|
+
units.push(unit);
|
14551
|
+
} else {
|
14552
|
+
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14553
|
+
// first check if start code delimiter is overlapping between 2 PES packets,
|
14554
|
+
// ie it started in last packet (lastState not zero)
|
14555
|
+
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14556
|
+
const lastUnit = track.lastNalu;
|
14557
|
+
if (lastUnit) {
|
14558
|
+
if (lastState && i <= 4 - lastState) {
|
14559
|
+
// start delimiter overlapping between PES packets
|
14560
|
+
// strip start delimiter bytes from the end of last NAL unit
|
14561
|
+
// check if lastUnit had a state different from zero
|
14562
|
+
if (lastUnit.state) {
|
14563
|
+
// strip last bytes
|
14564
|
+
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14565
|
+
}
|
14566
|
+
}
|
14567
|
+
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14568
|
+
|
14569
|
+
if (overflow > 0) {
|
14570
|
+
// logger.log('first NALU found with overflow:' + overflow);
|
14571
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14572
|
+
lastUnit.state = 0;
|
14573
|
+
units.push(lastUnit);
|
14574
|
+
track.lastNalu = null;
|
14575
|
+
}
|
14576
|
+
}
|
14577
|
+
}
|
14578
|
+
// check if we can read unit type
|
14579
|
+
if (i < len) {
|
14580
|
+
unitType = this.getNALuType(array, i);
|
14581
|
+
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14582
|
+
lastUnitStart = i;
|
14583
|
+
lastUnitType = unitType;
|
14584
|
+
state = 0;
|
14585
|
+
} else {
|
14586
|
+
// not enough byte to read unit type. let's read it on next PES parsing
|
14587
|
+
state = -1;
|
14588
|
+
}
|
14589
|
+
} else {
|
14590
|
+
state = 0;
|
14591
|
+
}
|
14592
|
+
}
|
14593
|
+
if (lastUnitStart >= 0 && state >= 0) {
|
14594
|
+
const unit = {
|
14595
|
+
data: array.subarray(lastUnitStart, len),
|
14596
|
+
type: lastUnitType,
|
14597
|
+
state: state
|
14598
|
+
};
|
14599
|
+
if (!last) {
|
14600
|
+
track.lastNalu = unit;
|
14601
|
+
// logger.log('store NALu to push it on next PES');
|
14602
|
+
} else {
|
14603
|
+
units.push(unit);
|
14604
|
+
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
14605
|
+
}
|
14606
|
+
} else if (units.length === 0) {
|
14607
|
+
// no NALu found
|
14608
|
+
// append pes.data to previous NAL unit
|
14609
|
+
const lastUnit = track.lastNalu;
|
14610
|
+
if (lastUnit) {
|
14611
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
14612
|
+
units.push(lastUnit);
|
14613
|
+
track.lastNalu = null;
|
14614
|
+
}
|
14615
|
+
}
|
14616
|
+
track.naluState = state;
|
14617
|
+
return units;
|
14618
|
+
}
|
13986
14619
|
}
|
13987
14620
|
|
13988
14621
|
/**
|
@@ -14060,259 +14693,76 @@ class ExpGolomb {
|
|
14060
14693
|
} else {
|
14061
14694
|
return valu;
|
14062
14695
|
}
|
14063
|
-
}
|
14064
|
-
|
14065
|
-
// ():uint
|
14066
|
-
skipLZ() {
|
14067
|
-
let leadingZeroCount; // :uint
|
14068
|
-
for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
|
14069
|
-
if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) {
|
14070
|
-
// the first bit of working word is 1
|
14071
|
-
this.word <<= leadingZeroCount;
|
14072
|
-
this.bitsAvailable -= leadingZeroCount;
|
14073
|
-
return leadingZeroCount;
|
14074
|
-
}
|
14075
|
-
}
|
14076
|
-
// we exhausted word and still have not found a 1
|
14077
|
-
this.loadWord();
|
14078
|
-
return leadingZeroCount + this.skipLZ();
|
14079
|
-
}
|
14080
|
-
|
14081
|
-
// ():void
|
14082
|
-
skipUEG() {
|
14083
|
-
this.skipBits(1 + this.skipLZ());
|
14084
|
-
}
|
14085
|
-
|
14086
|
-
// ():void
|
14087
|
-
skipEG() {
|
14088
|
-
this.skipBits(1 + this.skipLZ());
|
14089
|
-
}
|
14090
|
-
|
14091
|
-
// ():uint
|
14092
|
-
readUEG() {
|
14093
|
-
const clz = this.skipLZ(); // :uint
|
14094
|
-
return this.readBits(clz + 1) - 1;
|
14095
|
-
}
|
14096
|
-
|
14097
|
-
// ():int
|
14098
|
-
readEG() {
|
14099
|
-
const valu = this.readUEG(); // :int
|
14100
|
-
if (0x01 & valu) {
|
14101
|
-
// the number is odd if the low order bit is set
|
14102
|
-
return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
|
14103
|
-
} else {
|
14104
|
-
return -1 * (valu >>> 1); // divide by two then make it negative
|
14105
|
-
}
|
14106
|
-
}
|
14107
|
-
|
14108
|
-
// Some convenience functions
|
14109
|
-
// :Boolean
|
14110
|
-
readBoolean() {
|
14111
|
-
return this.readBits(1) === 1;
|
14112
|
-
}
|
14113
|
-
|
14114
|
-
// ():int
|
14115
|
-
readUByte() {
|
14116
|
-
return this.readBits(8);
|
14117
|
-
}
|
14118
|
-
|
14119
|
-
// ():int
|
14120
|
-
readUShort() {
|
14121
|
-
return this.readBits(16);
|
14122
|
-
}
|
14123
|
-
|
14124
|
-
// ():int
|
14125
|
-
readUInt() {
|
14126
|
-
return this.readBits(32);
|
14127
|
-
}
|
14128
|
-
|
14129
|
-
/**
|
14130
|
-
* Advance the ExpGolomb decoder past a scaling list. The scaling
|
14131
|
-
* list is optionally transmitted as part of a sequence parameter
|
14132
|
-
* set and is not relevant to transmuxing.
|
14133
|
-
* @param count the number of entries in this scaling list
|
14134
|
-
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
14135
|
-
*/
|
14136
|
-
skipScalingList(count) {
|
14137
|
-
let lastScale = 8;
|
14138
|
-
let nextScale = 8;
|
14139
|
-
let deltaScale;
|
14140
|
-
for (let j = 0; j < count; j++) {
|
14141
|
-
if (nextScale !== 0) {
|
14142
|
-
deltaScale = this.readEG();
|
14143
|
-
nextScale = (lastScale + deltaScale + 256) % 256;
|
14144
|
-
}
|
14145
|
-
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14146
|
-
}
|
14147
|
-
}
|
14148
|
-
|
14149
|
-
/**
|
14150
|
-
* Read a sequence parameter set and return some interesting video
|
14151
|
-
* properties. A sequence parameter set is the H264 metadata that
|
14152
|
-
* describes the properties of upcoming video frames.
|
14153
|
-
* @returns an object with configuration parsed from the
|
14154
|
-
* sequence parameter set, including the dimensions of the
|
14155
|
-
* associated video frames.
|
14156
|
-
*/
|
14157
|
-
readSPS() {
|
14158
|
-
let frameCropLeftOffset = 0;
|
14159
|
-
let frameCropRightOffset = 0;
|
14160
|
-
let frameCropTopOffset = 0;
|
14161
|
-
let frameCropBottomOffset = 0;
|
14162
|
-
let numRefFramesInPicOrderCntCycle;
|
14163
|
-
let scalingListCount;
|
14164
|
-
let i;
|
14165
|
-
const readUByte = this.readUByte.bind(this);
|
14166
|
-
const readBits = this.readBits.bind(this);
|
14167
|
-
const readUEG = this.readUEG.bind(this);
|
14168
|
-
const readBoolean = this.readBoolean.bind(this);
|
14169
|
-
const skipBits = this.skipBits.bind(this);
|
14170
|
-
const skipEG = this.skipEG.bind(this);
|
14171
|
-
const skipUEG = this.skipUEG.bind(this);
|
14172
|
-
const skipScalingList = this.skipScalingList.bind(this);
|
14173
|
-
readUByte();
|
14174
|
-
const profileIdc = readUByte(); // profile_idc
|
14175
|
-
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
|
14176
|
-
skipBits(3); // reserved_zero_3bits u(3),
|
14177
|
-
readUByte(); // level_idc u(8)
|
14178
|
-
skipUEG(); // seq_parameter_set_id
|
14179
|
-
// some profiles have more optional data we don't need
|
14180
|
-
if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
|
14181
|
-
const chromaFormatIdc = readUEG();
|
14182
|
-
if (chromaFormatIdc === 3) {
|
14183
|
-
skipBits(1);
|
14184
|
-
} // separate_colour_plane_flag
|
14185
|
-
|
14186
|
-
skipUEG(); // bit_depth_luma_minus8
|
14187
|
-
skipUEG(); // bit_depth_chroma_minus8
|
14188
|
-
skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
14189
|
-
if (readBoolean()) {
|
14190
|
-
// seq_scaling_matrix_present_flag
|
14191
|
-
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14192
|
-
for (i = 0; i < scalingListCount; i++) {
|
14193
|
-
if (readBoolean()) {
|
14194
|
-
// seq_scaling_list_present_flag[ i ]
|
14195
|
-
if (i < 6) {
|
14196
|
-
skipScalingList(16);
|
14197
|
-
} else {
|
14198
|
-
skipScalingList(64);
|
14199
|
-
}
|
14200
|
-
}
|
14201
|
-
}
|
14202
|
-
}
|
14203
|
-
}
|
14204
|
-
skipUEG(); // log2_max_frame_num_minus4
|
14205
|
-
const picOrderCntType = readUEG();
|
14206
|
-
if (picOrderCntType === 0) {
|
14207
|
-
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
|
14208
|
-
} else if (picOrderCntType === 1) {
|
14209
|
-
skipBits(1); // delta_pic_order_always_zero_flag
|
14210
|
-
skipEG(); // offset_for_non_ref_pic
|
14211
|
-
skipEG(); // offset_for_top_to_bottom_field
|
14212
|
-
numRefFramesInPicOrderCntCycle = readUEG();
|
14213
|
-
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14214
|
-
skipEG();
|
14215
|
-
} // offset_for_ref_frame[ i ]
|
14216
|
-
}
|
14217
|
-
skipUEG(); // max_num_ref_frames
|
14218
|
-
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
14219
|
-
const picWidthInMbsMinus1 = readUEG();
|
14220
|
-
const picHeightInMapUnitsMinus1 = readUEG();
|
14221
|
-
const frameMbsOnlyFlag = readBits(1);
|
14222
|
-
if (frameMbsOnlyFlag === 0) {
|
14223
|
-
skipBits(1);
|
14224
|
-
} // mb_adaptive_frame_field_flag
|
14225
|
-
|
14226
|
-
skipBits(1); // direct_8x8_inference_flag
|
14227
|
-
if (readBoolean()) {
|
14228
|
-
// frame_cropping_flag
|
14229
|
-
frameCropLeftOffset = readUEG();
|
14230
|
-
frameCropRightOffset = readUEG();
|
14231
|
-
frameCropTopOffset = readUEG();
|
14232
|
-
frameCropBottomOffset = readUEG();
|
14233
|
-
}
|
14234
|
-
let pixelRatio = [1, 1];
|
14235
|
-
if (readBoolean()) {
|
14236
|
-
// vui_parameters_present_flag
|
14237
|
-
if (readBoolean()) {
|
14238
|
-
// aspect_ratio_info_present_flag
|
14239
|
-
const aspectRatioIdc = readUByte();
|
14240
|
-
switch (aspectRatioIdc) {
|
14241
|
-
case 1:
|
14242
|
-
pixelRatio = [1, 1];
|
14243
|
-
break;
|
14244
|
-
case 2:
|
14245
|
-
pixelRatio = [12, 11];
|
14246
|
-
break;
|
14247
|
-
case 3:
|
14248
|
-
pixelRatio = [10, 11];
|
14249
|
-
break;
|
14250
|
-
case 4:
|
14251
|
-
pixelRatio = [16, 11];
|
14252
|
-
break;
|
14253
|
-
case 5:
|
14254
|
-
pixelRatio = [40, 33];
|
14255
|
-
break;
|
14256
|
-
case 6:
|
14257
|
-
pixelRatio = [24, 11];
|
14258
|
-
break;
|
14259
|
-
case 7:
|
14260
|
-
pixelRatio = [20, 11];
|
14261
|
-
break;
|
14262
|
-
case 8:
|
14263
|
-
pixelRatio = [32, 11];
|
14264
|
-
break;
|
14265
|
-
case 9:
|
14266
|
-
pixelRatio = [80, 33];
|
14267
|
-
break;
|
14268
|
-
case 10:
|
14269
|
-
pixelRatio = [18, 11];
|
14270
|
-
break;
|
14271
|
-
case 11:
|
14272
|
-
pixelRatio = [15, 11];
|
14273
|
-
break;
|
14274
|
-
case 12:
|
14275
|
-
pixelRatio = [64, 33];
|
14276
|
-
break;
|
14277
|
-
case 13:
|
14278
|
-
pixelRatio = [160, 99];
|
14279
|
-
break;
|
14280
|
-
case 14:
|
14281
|
-
pixelRatio = [4, 3];
|
14282
|
-
break;
|
14283
|
-
case 15:
|
14284
|
-
pixelRatio = [3, 2];
|
14285
|
-
break;
|
14286
|
-
case 16:
|
14287
|
-
pixelRatio = [2, 1];
|
14288
|
-
break;
|
14289
|
-
case 255:
|
14290
|
-
{
|
14291
|
-
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
14292
|
-
break;
|
14293
|
-
}
|
14294
|
-
}
|
14696
|
+
}
|
14697
|
+
|
14698
|
+
// ():uint
|
14699
|
+
skipLZ() {
|
14700
|
+
let leadingZeroCount; // :uint
|
14701
|
+
for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) {
|
14702
|
+
if ((this.word & 0x80000000 >>> leadingZeroCount) !== 0) {
|
14703
|
+
// the first bit of working word is 1
|
14704
|
+
this.word <<= leadingZeroCount;
|
14705
|
+
this.bitsAvailable -= leadingZeroCount;
|
14706
|
+
return leadingZeroCount;
|
14295
14707
|
}
|
14296
14708
|
}
|
14297
|
-
|
14298
|
-
|
14299
|
-
|
14300
|
-
pixelRatio: pixelRatio
|
14301
|
-
};
|
14709
|
+
// we exhausted word and still have not found a 1
|
14710
|
+
this.loadWord();
|
14711
|
+
return leadingZeroCount + this.skipLZ();
|
14302
14712
|
}
|
14303
|
-
|
14304
|
-
|
14305
|
-
|
14306
|
-
|
14307
|
-
|
14308
|
-
|
14309
|
-
|
14713
|
+
|
14714
|
+
// ():void
|
14715
|
+
skipUEG() {
|
14716
|
+
this.skipBits(1 + this.skipLZ());
|
14717
|
+
}
|
14718
|
+
|
14719
|
+
// ():void
|
14720
|
+
skipEG() {
|
14721
|
+
this.skipBits(1 + this.skipLZ());
|
14722
|
+
}
|
14723
|
+
|
14724
|
+
// ():uint
|
14725
|
+
readUEG() {
|
14726
|
+
const clz = this.skipLZ(); // :uint
|
14727
|
+
return this.readBits(clz + 1) - 1;
|
14728
|
+
}
|
14729
|
+
|
14730
|
+
// ():int
|
14731
|
+
readEG() {
|
14732
|
+
const valu = this.readUEG(); // :int
|
14733
|
+
if (0x01 & valu) {
|
14734
|
+
// the number is odd if the low order bit is set
|
14735
|
+
return 1 + valu >>> 1; // add 1 to make it even, and divide by 2
|
14736
|
+
} else {
|
14737
|
+
return -1 * (valu >>> 1); // divide by two then make it negative
|
14738
|
+
}
|
14739
|
+
}
|
14740
|
+
|
14741
|
+
// Some convenience functions
|
14742
|
+
// :Boolean
|
14743
|
+
readBoolean() {
|
14744
|
+
return this.readBits(1) === 1;
|
14745
|
+
}
|
14746
|
+
|
14747
|
+
// ():int
|
14748
|
+
readUByte() {
|
14749
|
+
return this.readBits(8);
|
14750
|
+
}
|
14751
|
+
|
14752
|
+
// ():int
|
14753
|
+
readUShort() {
|
14754
|
+
return this.readBits(16);
|
14755
|
+
}
|
14756
|
+
|
14757
|
+
// ():int
|
14758
|
+
readUInt() {
|
14759
|
+
return this.readBits(32);
|
14310
14760
|
}
|
14311
14761
|
}
|
14312
14762
|
|
14313
14763
|
class AvcVideoParser extends BaseVideoParser {
|
14314
|
-
|
14315
|
-
const units = this.
|
14764
|
+
parsePES(track, textTrack, pes, last, duration) {
|
14765
|
+
const units = this.parseNALu(track, pes.data, last);
|
14316
14766
|
let VideoSample = this.VideoSample;
|
14317
14767
|
let push;
|
14318
14768
|
let spsfound = false;
|
@@ -14337,7 +14787,7 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14337
14787
|
// only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
|
14338
14788
|
if (spsfound && data.length > 4) {
|
14339
14789
|
// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
|
14340
|
-
const sliceType =
|
14790
|
+
const sliceType = this.readSliceType(data);
|
14341
14791
|
// 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
|
14342
14792
|
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
|
14343
14793
|
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
|
@@ -14391,8 +14841,7 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14391
14841
|
push = true;
|
14392
14842
|
spsfound = true;
|
14393
14843
|
const sps = unit.data;
|
14394
|
-
const
|
14395
|
-
const config = expGolombDecoder.readSPS();
|
14844
|
+
const config = this.readSPS(sps);
|
14396
14845
|
if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
|
14397
14846
|
track.width = config.width;
|
14398
14847
|
track.height = config.height;
|
@@ -14448,109 +14897,192 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14448
14897
|
this.VideoSample = null;
|
14449
14898
|
}
|
14450
14899
|
}
|
14451
|
-
|
14452
|
-
|
14453
|
-
|
14454
|
-
|
14455
|
-
const
|
14456
|
-
|
14457
|
-
|
14458
|
-
|
14459
|
-
|
14460
|
-
|
14461
|
-
|
14462
|
-
|
14900
|
+
getNALuType(data, offset) {
|
14901
|
+
return data[offset] & 0x1f;
|
14902
|
+
}
|
14903
|
+
readSliceType(data) {
|
14904
|
+
const eg = new ExpGolomb(data);
|
14905
|
+
// skip NALu type
|
14906
|
+
eg.readUByte();
|
14907
|
+
// discard first_mb_in_slice
|
14908
|
+
eg.readUEG();
|
14909
|
+
// return slice_type
|
14910
|
+
return eg.readUEG();
|
14911
|
+
}
|
14463
14912
|
|
14464
|
-
|
14465
|
-
|
14466
|
-
|
14467
|
-
|
14468
|
-
|
14469
|
-
|
14470
|
-
|
14471
|
-
|
14472
|
-
|
14473
|
-
|
14474
|
-
|
14475
|
-
if (
|
14476
|
-
|
14477
|
-
|
14478
|
-
}
|
14479
|
-
if (state === 1) {
|
14480
|
-
state = value ? 0 : 2;
|
14481
|
-
continue;
|
14913
|
+
/**
|
14914
|
+
* The scaling list is optionally transmitted as part of a sequence parameter
|
14915
|
+
* set and is not relevant to transmuxing.
|
14916
|
+
* @param count the number of entries in this scaling list
|
14917
|
+
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
14918
|
+
*/
|
14919
|
+
skipScalingList(count, reader) {
|
14920
|
+
let lastScale = 8;
|
14921
|
+
let nextScale = 8;
|
14922
|
+
let deltaScale;
|
14923
|
+
for (let j = 0; j < count; j++) {
|
14924
|
+
if (nextScale !== 0) {
|
14925
|
+
deltaScale = reader.readEG();
|
14926
|
+
nextScale = (lastScale + deltaScale + 256) % 256;
|
14482
14927
|
}
|
14483
|
-
|
14484
|
-
|
14485
|
-
|
14486
|
-
} else if (value === 1) {
|
14487
|
-
overflow = i - state - 1;
|
14488
|
-
if (lastUnitStart >= 0) {
|
14489
|
-
const unit = {
|
14490
|
-
data: array.subarray(lastUnitStart, overflow),
|
14491
|
-
type: lastUnitType
|
14492
|
-
};
|
14493
|
-
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14494
|
-
units.push(unit);
|
14495
|
-
} else {
|
14496
|
-
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14497
|
-
// first check if start code delimiter is overlapping between 2 PES packets,
|
14498
|
-
// ie it started in last packet (lastState not zero)
|
14499
|
-
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14500
|
-
const lastUnit = this.getLastNalUnit(track.samples);
|
14501
|
-
if (lastUnit) {
|
14502
|
-
if (lastState && i <= 4 - lastState) {
|
14503
|
-
// start delimiter overlapping between PES packets
|
14504
|
-
// strip start delimiter bytes from the end of last NAL unit
|
14505
|
-
// check if lastUnit had a state different from zero
|
14506
|
-
if (lastUnit.state) {
|
14507
|
-
// strip last bytes
|
14508
|
-
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14509
|
-
}
|
14510
|
-
}
|
14511
|
-
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14928
|
+
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14929
|
+
}
|
14930
|
+
}
|
14512
14931
|
|
14513
|
-
|
14514
|
-
|
14515
|
-
|
14516
|
-
|
14517
|
-
|
14518
|
-
|
14519
|
-
|
14520
|
-
|
14521
|
-
|
14522
|
-
|
14523
|
-
|
14524
|
-
|
14525
|
-
|
14526
|
-
|
14527
|
-
|
14528
|
-
|
14529
|
-
|
14932
|
+
/**
|
14933
|
+
* Read a sequence parameter set and return some interesting video
|
14934
|
+
* properties. A sequence parameter set is the H264 metadata that
|
14935
|
+
* describes the properties of upcoming video frames.
|
14936
|
+
* @returns an object with configuration parsed from the
|
14937
|
+
* sequence parameter set, including the dimensions of the
|
14938
|
+
* associated video frames.
|
14939
|
+
*/
|
14940
|
+
readSPS(sps) {
|
14941
|
+
const eg = new ExpGolomb(sps);
|
14942
|
+
let frameCropLeftOffset = 0;
|
14943
|
+
let frameCropRightOffset = 0;
|
14944
|
+
let frameCropTopOffset = 0;
|
14945
|
+
let frameCropBottomOffset = 0;
|
14946
|
+
let numRefFramesInPicOrderCntCycle;
|
14947
|
+
let scalingListCount;
|
14948
|
+
let i;
|
14949
|
+
const readUByte = eg.readUByte.bind(eg);
|
14950
|
+
const readBits = eg.readBits.bind(eg);
|
14951
|
+
const readUEG = eg.readUEG.bind(eg);
|
14952
|
+
const readBoolean = eg.readBoolean.bind(eg);
|
14953
|
+
const skipBits = eg.skipBits.bind(eg);
|
14954
|
+
const skipEG = eg.skipEG.bind(eg);
|
14955
|
+
const skipUEG = eg.skipUEG.bind(eg);
|
14956
|
+
const skipScalingList = this.skipScalingList.bind(this);
|
14957
|
+
readUByte();
|
14958
|
+
const profileIdc = readUByte(); // profile_idc
|
14959
|
+
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
|
14960
|
+
skipBits(3); // reserved_zero_3bits u(3),
|
14961
|
+
readUByte(); // level_idc u(8)
|
14962
|
+
skipUEG(); // seq_parameter_set_id
|
14963
|
+
// some profiles have more optional data we don't need
|
14964
|
+
if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
|
14965
|
+
const chromaFormatIdc = readUEG();
|
14966
|
+
if (chromaFormatIdc === 3) {
|
14967
|
+
skipBits(1);
|
14968
|
+
} // separate_colour_plane_flag
|
14969
|
+
|
14970
|
+
skipUEG(); // bit_depth_luma_minus8
|
14971
|
+
skipUEG(); // bit_depth_chroma_minus8
|
14972
|
+
skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
14973
|
+
if (readBoolean()) {
|
14974
|
+
// seq_scaling_matrix_present_flag
|
14975
|
+
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14976
|
+
for (i = 0; i < scalingListCount; i++) {
|
14977
|
+
if (readBoolean()) {
|
14978
|
+
// seq_scaling_list_present_flag[ i ]
|
14979
|
+
if (i < 6) {
|
14980
|
+
skipScalingList(16, eg);
|
14981
|
+
} else {
|
14982
|
+
skipScalingList(64, eg);
|
14983
|
+
}
|
14984
|
+
}
|
14530
14985
|
}
|
14531
|
-
} else {
|
14532
|
-
state = 0;
|
14533
14986
|
}
|
14534
14987
|
}
|
14535
|
-
|
14536
|
-
|
14537
|
-
|
14538
|
-
|
14539
|
-
|
14540
|
-
|
14541
|
-
|
14542
|
-
|
14988
|
+
skipUEG(); // log2_max_frame_num_minus4
|
14989
|
+
const picOrderCntType = readUEG();
|
14990
|
+
if (picOrderCntType === 0) {
|
14991
|
+
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
|
14992
|
+
} else if (picOrderCntType === 1) {
|
14993
|
+
skipBits(1); // delta_pic_order_always_zero_flag
|
14994
|
+
skipEG(); // offset_for_non_ref_pic
|
14995
|
+
skipEG(); // offset_for_top_to_bottom_field
|
14996
|
+
numRefFramesInPicOrderCntCycle = readUEG();
|
14997
|
+
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14998
|
+
skipEG();
|
14999
|
+
} // offset_for_ref_frame[ i ]
|
14543
15000
|
}
|
14544
|
-
//
|
14545
|
-
|
14546
|
-
|
14547
|
-
|
14548
|
-
|
14549
|
-
|
15001
|
+
skipUEG(); // max_num_ref_frames
|
15002
|
+
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
15003
|
+
const picWidthInMbsMinus1 = readUEG();
|
15004
|
+
const picHeightInMapUnitsMinus1 = readUEG();
|
15005
|
+
const frameMbsOnlyFlag = readBits(1);
|
15006
|
+
if (frameMbsOnlyFlag === 0) {
|
15007
|
+
skipBits(1);
|
15008
|
+
} // mb_adaptive_frame_field_flag
|
15009
|
+
|
15010
|
+
skipBits(1); // direct_8x8_inference_flag
|
15011
|
+
if (readBoolean()) {
|
15012
|
+
// frame_cropping_flag
|
15013
|
+
frameCropLeftOffset = readUEG();
|
15014
|
+
frameCropRightOffset = readUEG();
|
15015
|
+
frameCropTopOffset = readUEG();
|
15016
|
+
frameCropBottomOffset = readUEG();
|
15017
|
+
}
|
15018
|
+
let pixelRatio = [1, 1];
|
15019
|
+
if (readBoolean()) {
|
15020
|
+
// vui_parameters_present_flag
|
15021
|
+
if (readBoolean()) {
|
15022
|
+
// aspect_ratio_info_present_flag
|
15023
|
+
const aspectRatioIdc = readUByte();
|
15024
|
+
switch (aspectRatioIdc) {
|
15025
|
+
case 1:
|
15026
|
+
pixelRatio = [1, 1];
|
15027
|
+
break;
|
15028
|
+
case 2:
|
15029
|
+
pixelRatio = [12, 11];
|
15030
|
+
break;
|
15031
|
+
case 3:
|
15032
|
+
pixelRatio = [10, 11];
|
15033
|
+
break;
|
15034
|
+
case 4:
|
15035
|
+
pixelRatio = [16, 11];
|
15036
|
+
break;
|
15037
|
+
case 5:
|
15038
|
+
pixelRatio = [40, 33];
|
15039
|
+
break;
|
15040
|
+
case 6:
|
15041
|
+
pixelRatio = [24, 11];
|
15042
|
+
break;
|
15043
|
+
case 7:
|
15044
|
+
pixelRatio = [20, 11];
|
15045
|
+
break;
|
15046
|
+
case 8:
|
15047
|
+
pixelRatio = [32, 11];
|
15048
|
+
break;
|
15049
|
+
case 9:
|
15050
|
+
pixelRatio = [80, 33];
|
15051
|
+
break;
|
15052
|
+
case 10:
|
15053
|
+
pixelRatio = [18, 11];
|
15054
|
+
break;
|
15055
|
+
case 11:
|
15056
|
+
pixelRatio = [15, 11];
|
15057
|
+
break;
|
15058
|
+
case 12:
|
15059
|
+
pixelRatio = [64, 33];
|
15060
|
+
break;
|
15061
|
+
case 13:
|
15062
|
+
pixelRatio = [160, 99];
|
15063
|
+
break;
|
15064
|
+
case 14:
|
15065
|
+
pixelRatio = [4, 3];
|
15066
|
+
break;
|
15067
|
+
case 15:
|
15068
|
+
pixelRatio = [3, 2];
|
15069
|
+
break;
|
15070
|
+
case 16:
|
15071
|
+
pixelRatio = [2, 1];
|
15072
|
+
break;
|
15073
|
+
case 255:
|
15074
|
+
{
|
15075
|
+
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
15076
|
+
break;
|
15077
|
+
}
|
15078
|
+
}
|
14550
15079
|
}
|
14551
15080
|
}
|
14552
|
-
|
14553
|
-
|
15081
|
+
return {
|
15082
|
+
width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
|
15083
|
+
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
15084
|
+
pixelRatio: pixelRatio
|
15085
|
+
};
|
14554
15086
|
}
|
14555
15087
|
}
|
14556
15088
|
|
@@ -14568,7 +15100,7 @@ class SampleAesDecrypter {
|
|
14568
15100
|
});
|
14569
15101
|
}
|
14570
15102
|
decryptBuffer(encryptedData) {
|
14571
|
-
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
|
15103
|
+
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer, DecrypterAesMode.cbc);
|
14572
15104
|
}
|
14573
15105
|
|
14574
15106
|
// AAC - encrypt all full 16 bytes blocks starting from offset 16
|
@@ -14682,7 +15214,7 @@ class TSDemuxer {
|
|
14682
15214
|
this.observer = observer;
|
14683
15215
|
this.config = config;
|
14684
15216
|
this.typeSupported = typeSupported;
|
14685
|
-
this.videoParser =
|
15217
|
+
this.videoParser = null;
|
14686
15218
|
}
|
14687
15219
|
static probe(data) {
|
14688
15220
|
const syncOffset = TSDemuxer.syncOffset(data);
|
@@ -14847,7 +15379,16 @@ class TSDemuxer {
|
|
14847
15379
|
case videoPid:
|
14848
15380
|
if (stt) {
|
14849
15381
|
if (videoData && (pes = parsePES(videoData))) {
|
14850
|
-
this.videoParser
|
15382
|
+
if (this.videoParser === null) {
|
15383
|
+
switch (videoTrack.segmentCodec) {
|
15384
|
+
case 'avc':
|
15385
|
+
this.videoParser = new AvcVideoParser();
|
15386
|
+
break;
|
15387
|
+
}
|
15388
|
+
}
|
15389
|
+
if (this.videoParser !== null) {
|
15390
|
+
this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
|
15391
|
+
}
|
14851
15392
|
}
|
14852
15393
|
videoData = {
|
14853
15394
|
data: [],
|
@@ -15009,8 +15550,17 @@ class TSDemuxer {
|
|
15009
15550
|
// try to parse last PES packets
|
15010
15551
|
let pes;
|
15011
15552
|
if (videoData && (pes = parsePES(videoData))) {
|
15012
|
-
this.videoParser
|
15013
|
-
|
15553
|
+
if (this.videoParser === null) {
|
15554
|
+
switch (videoTrack.segmentCodec) {
|
15555
|
+
case 'avc':
|
15556
|
+
this.videoParser = new AvcVideoParser();
|
15557
|
+
break;
|
15558
|
+
}
|
15559
|
+
}
|
15560
|
+
if (this.videoParser !== null) {
|
15561
|
+
this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
|
15562
|
+
videoTrack.pesData = null;
|
15563
|
+
}
|
15014
15564
|
} else {
|
15015
15565
|
// either avcData null or PES truncated, keep it for next frag parsing
|
15016
15566
|
videoTrack.pesData = videoData;
|
@@ -15312,7 +15862,10 @@ function parsePMT(data, offset, typeSupported, isSampleAes) {
|
|
15312
15862
|
case 0x87:
|
15313
15863
|
throw new Error('Unsupported EC-3 in M2TS found');
|
15314
15864
|
case 0x24:
|
15315
|
-
|
15865
|
+
// ITU-T Rec. H.265 and ISO/IEC 23008-2 (HEVC)
|
15866
|
+
{
|
15867
|
+
throw new Error('Unsupported HEVC in M2TS found');
|
15868
|
+
}
|
15316
15869
|
}
|
15317
15870
|
// move to the next table entry
|
15318
15871
|
// skip past the elementary stream descriptors, if present
|
@@ -15455,11 +16008,11 @@ class MP3Demuxer extends BaseAudioDemuxer {
|
|
15455
16008
|
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
15456
16009
|
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
15457
16010
|
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
15458
|
-
const id3Data =
|
16011
|
+
const id3Data = getId3Data(data, 0);
|
15459
16012
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
15460
16013
|
|
15461
16014
|
// Check for ac-3|ec-3 sync bytes and return false if present
|
15462
|
-
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 &&
|
16015
|
+
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 && getId3Timestamp(id3Data) !== undefined &&
|
15463
16016
|
// check the bsid to confirm ac-3 or ec-3 (not mp3)
|
15464
16017
|
getAudioBSID(data, offset) <= 16) {
|
15465
16018
|
return false;
|
@@ -15534,6 +16087,8 @@ class MP4 {
|
|
15534
16087
|
avc1: [],
|
15535
16088
|
// codingname
|
15536
16089
|
avcC: [],
|
16090
|
+
hvc1: [],
|
16091
|
+
hvcC: [],
|
15537
16092
|
btrt: [],
|
15538
16093
|
dinf: [],
|
15539
16094
|
dref: [],
|
@@ -15958,8 +16513,10 @@ class MP4 {
|
|
15958
16513
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
|
15959
16514
|
}
|
15960
16515
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
|
15961
|
-
} else {
|
16516
|
+
} else if (track.segmentCodec === 'avc') {
|
15962
16517
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16518
|
+
} else {
|
16519
|
+
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.hvc1(track));
|
15963
16520
|
}
|
15964
16521
|
}
|
15965
16522
|
static tkhd(track) {
|
@@ -16097,6 +16654,84 @@ class MP4 {
|
|
16097
16654
|
const result = appendUint8Array(MP4.FTYP, movie);
|
16098
16655
|
return result;
|
16099
16656
|
}
|
16657
|
+
static hvc1(track) {
|
16658
|
+
const ps = track.params;
|
16659
|
+
const units = [track.vps, track.sps, track.pps];
|
16660
|
+
const NALuLengthSize = 4;
|
16661
|
+
const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
|
16662
|
+
|
16663
|
+
// compute hvcC size in bytes
|
16664
|
+
let length = config.length;
|
16665
|
+
for (let i = 0; i < units.length; i += 1) {
|
16666
|
+
length += 3;
|
16667
|
+
for (let j = 0; j < units[i].length; j += 1) {
|
16668
|
+
length += 2 + units[i][j].length;
|
16669
|
+
}
|
16670
|
+
}
|
16671
|
+
const hvcC = new Uint8Array(length);
|
16672
|
+
hvcC.set(config, 0);
|
16673
|
+
length = config.length;
|
16674
|
+
// append parameter set units: one vps, one or more sps and pps
|
16675
|
+
const iMax = units.length - 1;
|
16676
|
+
for (let i = 0; i < units.length; i += 1) {
|
16677
|
+
hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
|
16678
|
+
length += 3;
|
16679
|
+
for (let j = 0; j < units[i].length; j += 1) {
|
16680
|
+
hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
|
16681
|
+
length += 2;
|
16682
|
+
hvcC.set(units[i][j], length);
|
16683
|
+
length += units[i][j].length;
|
16684
|
+
}
|
16685
|
+
}
|
16686
|
+
const hvcc = MP4.box(MP4.types.hvcC, hvcC);
|
16687
|
+
const width = track.width;
|
16688
|
+
const height = track.height;
|
16689
|
+
const hSpacing = track.pixelRatio[0];
|
16690
|
+
const vSpacing = track.pixelRatio[1];
|
16691
|
+
return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
|
16692
|
+
// reserved
|
16693
|
+
0x00, 0x00, 0x00,
|
16694
|
+
// reserved
|
16695
|
+
0x00, 0x01,
|
16696
|
+
// data_reference_index
|
16697
|
+
0x00, 0x00,
|
16698
|
+
// pre_defined
|
16699
|
+
0x00, 0x00,
|
16700
|
+
// reserved
|
16701
|
+
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16702
|
+
// pre_defined
|
16703
|
+
width >> 8 & 0xff, width & 0xff,
|
16704
|
+
// width
|
16705
|
+
height >> 8 & 0xff, height & 0xff,
|
16706
|
+
// height
|
16707
|
+
0x00, 0x48, 0x00, 0x00,
|
16708
|
+
// horizresolution
|
16709
|
+
0x00, 0x48, 0x00, 0x00,
|
16710
|
+
// vertresolution
|
16711
|
+
0x00, 0x00, 0x00, 0x00,
|
16712
|
+
// reserved
|
16713
|
+
0x00, 0x01,
|
16714
|
+
// frame_count
|
16715
|
+
0x12, 0x64, 0x61, 0x69, 0x6c,
|
16716
|
+
// dailymotion/hls.js
|
16717
|
+
0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16718
|
+
// compressorname
|
16719
|
+
0x00, 0x18,
|
16720
|
+
// depth = 24
|
16721
|
+
0x11, 0x11]),
|
16722
|
+
// pre_defined = -1
|
16723
|
+
hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
|
16724
|
+
// bufferSizeDB
|
16725
|
+
0x00, 0x2d, 0xc6, 0xc0,
|
16726
|
+
// maxBitrate
|
16727
|
+
0x00, 0x2d, 0xc6, 0xc0])),
|
16728
|
+
// avgBitrate
|
16729
|
+
MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
|
16730
|
+
// hSpacing
|
16731
|
+
hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
|
16732
|
+
// vSpacing
|
16733
|
+
vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
|
16734
|
+
}
|
16100
16735
|
}
|
16101
16736
|
MP4.types = void 0;
|
16102
16737
|
MP4.HDLR_TYPES = void 0;
|
@@ -16472,9 +17107,9 @@ class MP4Remuxer {
|
|
16472
17107
|
const foundOverlap = delta < -1;
|
16473
17108
|
if (foundHole || foundOverlap) {
|
16474
17109
|
if (foundHole) {
|
16475
|
-
logger.warn(
|
17110
|
+
logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
|
16476
17111
|
} else {
|
16477
|
-
logger.warn(
|
17112
|
+
logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
|
16478
17113
|
}
|
16479
17114
|
if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
|
16480
17115
|
firstDTS = nextAvcDts;
|
@@ -16483,12 +17118,24 @@ class MP4Remuxer {
|
|
16483
17118
|
inputSamples[0].dts = firstDTS;
|
16484
17119
|
inputSamples[0].pts = firstPTS;
|
16485
17120
|
} else {
|
17121
|
+
let isPTSOrderRetained = true;
|
16486
17122
|
for (let i = 0; i < inputSamples.length; i++) {
|
16487
|
-
if (inputSamples[i].dts > firstPTS) {
|
17123
|
+
if (inputSamples[i].dts > firstPTS && isPTSOrderRetained) {
|
16488
17124
|
break;
|
16489
17125
|
}
|
17126
|
+
const prevPTS = inputSamples[i].pts;
|
16490
17127
|
inputSamples[i].dts -= delta;
|
16491
17128
|
inputSamples[i].pts -= delta;
|
17129
|
+
|
17130
|
+
// check to see if this sample's PTS order has changed
|
17131
|
+
// relative to the next one
|
17132
|
+
if (i < inputSamples.length - 1) {
|
17133
|
+
const nextSamplePTS = inputSamples[i + 1].pts;
|
17134
|
+
const currentSamplePTS = inputSamples[i].pts;
|
17135
|
+
const currentOrder = nextSamplePTS <= currentSamplePTS;
|
17136
|
+
const prevOrder = nextSamplePTS <= prevPTS;
|
17137
|
+
isPTSOrderRetained = currentOrder == prevOrder;
|
17138
|
+
}
|
16492
17139
|
}
|
16493
17140
|
}
|
16494
17141
|
logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
|
@@ -16636,7 +17283,7 @@ class MP4Remuxer {
|
|
16636
17283
|
}
|
16637
17284
|
}
|
16638
17285
|
}
|
16639
|
-
// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
17286
|
+
// next AVC/HEVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
16640
17287
|
mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
|
16641
17288
|
this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
|
16642
17289
|
this.videoSampleDuration = mp4SampleDuration;
|
@@ -16769,7 +17416,7 @@ class MP4Remuxer {
|
|
16769
17416
|
logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
|
16770
17417
|
for (let j = 0; j < missing; j++) {
|
16771
17418
|
const newStamp = Math.max(nextPts, 0);
|
16772
|
-
let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17419
|
+
let fillFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
|
16773
17420
|
if (!fillFrame) {
|
16774
17421
|
logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
|
16775
17422
|
fillFrame = sample.unit.subarray();
|
@@ -16897,7 +17544,7 @@ class MP4Remuxer {
|
|
16897
17544
|
// samples count of this segment's duration
|
16898
17545
|
const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
|
16899
17546
|
// silent frame
|
16900
|
-
const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17547
|
+
const silentFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
|
16901
17548
|
logger.warn('[mp4-remuxer]: remux empty Audio');
|
16902
17549
|
// Can't remux if we can't generate a silent frame...
|
16903
17550
|
if (!silentFrame) {
|
@@ -17288,13 +17935,15 @@ class Transmuxer {
|
|
17288
17935
|
initSegmentData
|
17289
17936
|
} = transmuxConfig;
|
17290
17937
|
const keyData = getEncryptionType(uintData, decryptdata);
|
17291
|
-
if (keyData && keyData.method
|
17938
|
+
if (keyData && isFullSegmentEncryption(keyData.method)) {
|
17292
17939
|
const decrypter = this.getDecrypter();
|
17940
|
+
const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
|
17941
|
+
|
17293
17942
|
// Software decryption is synchronous; webCrypto is not
|
17294
17943
|
if (decrypter.isSync()) {
|
17295
17944
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
17296
17945
|
// data is handled in the flush() call
|
17297
|
-
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
|
17946
|
+
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode);
|
17298
17947
|
// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
|
17299
17948
|
const loadingParts = chunkMeta.part > -1;
|
17300
17949
|
if (loadingParts) {
|
@@ -17306,7 +17955,7 @@ class Transmuxer {
|
|
17306
17955
|
}
|
17307
17956
|
uintData = new Uint8Array(decryptedData);
|
17308
17957
|
} else {
|
17309
|
-
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
|
17958
|
+
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode).then(decryptedData => {
|
17310
17959
|
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
17311
17960
|
// the decrypted data has been transmuxed
|
17312
17961
|
const result = this.push(decryptedData, null, chunkMeta);
|
@@ -17960,14 +18609,7 @@ class TransmuxerInterface {
|
|
17960
18609
|
this.observer = new EventEmitter();
|
17961
18610
|
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
|
17962
18611
|
this.observer.on(Events.ERROR, forwardMessage);
|
17963
|
-
const
|
17964
|
-
isTypeSupported: () => false
|
17965
|
-
};
|
17966
|
-
const m2tsTypeSupported = {
|
17967
|
-
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
17968
|
-
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
17969
|
-
ac3: false
|
17970
|
-
};
|
18612
|
+
const m2tsTypeSupported = getM2TSSupportedAudioTypes(config.preferManagedMediaSource);
|
17971
18613
|
|
17972
18614
|
// navigator.vendor is not always available in Web Worker
|
17973
18615
|
// refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
|
@@ -18232,8 +18874,9 @@ const STALL_MINIMUM_DURATION_MS = 250;
|
|
18232
18874
|
const MAX_START_GAP_JUMP = 2.0;
|
18233
18875
|
const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
18234
18876
|
const SKIP_BUFFER_RANGE_START = 0.05;
|
18235
|
-
class GapController {
|
18877
|
+
class GapController extends Logger {
|
18236
18878
|
constructor(config, media, fragmentTracker, hls) {
|
18879
|
+
super('gap-controller', hls.logger);
|
18237
18880
|
this.config = void 0;
|
18238
18881
|
this.media = null;
|
18239
18882
|
this.fragmentTracker = void 0;
|
@@ -18243,6 +18886,7 @@ class GapController {
|
|
18243
18886
|
this.stalled = null;
|
18244
18887
|
this.moved = false;
|
18245
18888
|
this.seeking = false;
|
18889
|
+
this.ended = 0;
|
18246
18890
|
this.config = config;
|
18247
18891
|
this.media = media;
|
18248
18892
|
this.fragmentTracker = fragmentTracker;
|
@@ -18260,7 +18904,7 @@ class GapController {
|
|
18260
18904
|
*
|
18261
18905
|
* @param lastCurrentTime - Previously read playhead position
|
18262
18906
|
*/
|
18263
|
-
poll(lastCurrentTime, activeFrag) {
|
18907
|
+
poll(lastCurrentTime, activeFrag, levelDetails, state) {
|
18264
18908
|
const {
|
18265
18909
|
config,
|
18266
18910
|
media,
|
@@ -18279,6 +18923,7 @@ class GapController {
|
|
18279
18923
|
|
18280
18924
|
// The playhead is moving, no-op
|
18281
18925
|
if (currentTime !== lastCurrentTime) {
|
18926
|
+
this.ended = 0;
|
18282
18927
|
this.moved = true;
|
18283
18928
|
if (!seeking) {
|
18284
18929
|
this.nudgeRetry = 0;
|
@@ -18287,7 +18932,7 @@ class GapController {
|
|
18287
18932
|
// The playhead is now moving, but was previously stalled
|
18288
18933
|
if (this.stallReported) {
|
18289
18934
|
const _stalledDuration = self.performance.now() - stalled;
|
18290
|
-
|
18935
|
+
this.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
|
18291
18936
|
this.stallReported = false;
|
18292
18937
|
}
|
18293
18938
|
this.stalled = null;
|
@@ -18323,7 +18968,6 @@ class GapController {
|
|
18323
18968
|
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
18324
18969
|
// The addition poll gives the browser a chance to jump the gap for us
|
18325
18970
|
if (!this.moved && this.stalled !== null) {
|
18326
|
-
var _level$details;
|
18327
18971
|
// There is no playable buffer (seeked, waiting for buffer)
|
18328
18972
|
const isBuffered = bufferInfo.len > 0;
|
18329
18973
|
if (!isBuffered && !nextStart) {
|
@@ -18335,9 +18979,8 @@ class GapController {
|
|
18335
18979
|
// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
|
18336
18980
|
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
|
18337
18981
|
// that begins over 1 target duration after the video start position.
|
18338
|
-
const
|
18339
|
-
const
|
18340
|
-
const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
|
18982
|
+
const isLive = !!(levelDetails != null && levelDetails.live);
|
18983
|
+
const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
|
18341
18984
|
const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
|
18342
18985
|
if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
|
18343
18986
|
if (!media.paused) {
|
@@ -18355,6 +18998,17 @@ class GapController {
|
|
18355
18998
|
}
|
18356
18999
|
const stalledDuration = tnow - stalled;
|
18357
19000
|
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
|
19001
|
+
// Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
|
19002
|
+
if (state === State.ENDED && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
|
19003
|
+
if (stalledDuration < 1000 || this.ended) {
|
19004
|
+
return;
|
19005
|
+
}
|
19006
|
+
this.ended = currentTime;
|
19007
|
+
this.hls.trigger(Events.MEDIA_ENDED, {
|
19008
|
+
stalled: true
|
19009
|
+
});
|
19010
|
+
return;
|
19011
|
+
}
|
18358
19012
|
// Report stalling after trying to fix
|
18359
19013
|
this._reportStall(bufferInfo);
|
18360
19014
|
if (!this.media) {
|
@@ -18398,7 +19052,7 @@ class GapController {
|
|
18398
19052
|
// needs to cross some sort of threshold covering all source-buffers content
|
18399
19053
|
// to start playing properly.
|
18400
19054
|
if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
|
18401
|
-
|
19055
|
+
this.warn('Trying to nudge playhead over buffer-hole');
|
18402
19056
|
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
18403
19057
|
// We only try to jump the hole if it's under the configured size
|
18404
19058
|
// Reset stalled so to rearm watchdog timer
|
@@ -18422,7 +19076,7 @@ class GapController {
|
|
18422
19076
|
// Report stalled error once
|
18423
19077
|
this.stallReported = true;
|
18424
19078
|
const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
|
18425
|
-
|
19079
|
+
this.warn(error.message);
|
18426
19080
|
hls.trigger(Events.ERROR, {
|
18427
19081
|
type: ErrorTypes.MEDIA_ERROR,
|
18428
19082
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -18490,7 +19144,7 @@ class GapController {
|
|
18490
19144
|
}
|
18491
19145
|
}
|
18492
19146
|
const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
|
18493
|
-
|
19147
|
+
this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
|
18494
19148
|
this.moved = true;
|
18495
19149
|
this.stalled = null;
|
18496
19150
|
media.currentTime = targetTime;
|
@@ -18531,7 +19185,7 @@ class GapController {
|
|
18531
19185
|
const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
|
18532
19186
|
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
18533
19187
|
const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
|
18534
|
-
|
19188
|
+
this.warn(error.message);
|
18535
19189
|
media.currentTime = targetTime;
|
18536
19190
|
hls.trigger(Events.ERROR, {
|
18537
19191
|
type: ErrorTypes.MEDIA_ERROR,
|
@@ -18541,7 +19195,7 @@ class GapController {
|
|
18541
19195
|
});
|
18542
19196
|
} else {
|
18543
19197
|
const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
|
18544
|
-
|
19198
|
+
this.error(error.message);
|
18545
19199
|
hls.trigger(Events.ERROR, {
|
18546
19200
|
type: ErrorTypes.MEDIA_ERROR,
|
18547
19201
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -18556,7 +19210,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
|
|
18556
19210
|
|
18557
19211
|
class StreamController extends BaseStreamController {
|
18558
19212
|
constructor(hls, fragmentTracker, keyLoader) {
|
18559
|
-
super(hls, fragmentTracker, keyLoader, '
|
19213
|
+
super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
|
18560
19214
|
this.audioCodecSwap = false;
|
18561
19215
|
this.gapController = null;
|
18562
19216
|
this.level = -1;
|
@@ -18564,27 +19218,43 @@ class StreamController extends BaseStreamController {
|
|
18564
19218
|
this.altAudio = false;
|
18565
19219
|
this.audioOnly = false;
|
18566
19220
|
this.fragPlaying = null;
|
18567
|
-
this.onvplaying = null;
|
18568
|
-
this.onvseeked = null;
|
18569
19221
|
this.fragLastKbps = 0;
|
18570
19222
|
this.couldBacktrack = false;
|
18571
19223
|
this.backtrackFragment = null;
|
18572
19224
|
this.audioCodecSwitch = false;
|
18573
19225
|
this.videoBuffer = null;
|
18574
|
-
this.
|
19226
|
+
this.onMediaPlaying = () => {
|
19227
|
+
// tick to speed up FRAG_CHANGED triggering
|
19228
|
+
this.tick();
|
19229
|
+
};
|
19230
|
+
this.onMediaSeeked = () => {
|
19231
|
+
const media = this.media;
|
19232
|
+
const currentTime = media ? media.currentTime : null;
|
19233
|
+
if (isFiniteNumber(currentTime)) {
|
19234
|
+
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
19235
|
+
}
|
19236
|
+
|
19237
|
+
// If seeked was issued before buffer was appended do not tick immediately
|
19238
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
19239
|
+
if (bufferInfo === null || bufferInfo.len === 0) {
|
19240
|
+
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
19241
|
+
return;
|
19242
|
+
}
|
19243
|
+
|
19244
|
+
// tick to speed up FRAG_CHANGED triggering
|
19245
|
+
this.tick();
|
19246
|
+
};
|
19247
|
+
this.registerListeners();
|
18575
19248
|
}
|
18576
|
-
|
19249
|
+
registerListeners() {
|
19250
|
+
super.registerListeners();
|
18577
19251
|
const {
|
18578
19252
|
hls
|
18579
19253
|
} = this;
|
18580
|
-
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18581
|
-
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18582
|
-
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
18583
19254
|
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
18584
19255
|
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
|
18585
19256
|
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
18586
19257
|
hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18587
|
-
hls.on(Events.ERROR, this.onError, this);
|
18588
19258
|
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
18589
19259
|
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
18590
19260
|
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -18592,17 +19262,14 @@ class StreamController extends BaseStreamController {
|
|
18592
19262
|
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
18593
19263
|
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
18594
19264
|
}
|
18595
|
-
|
19265
|
+
unregisterListeners() {
|
19266
|
+
super.unregisterListeners();
|
18596
19267
|
const {
|
18597
19268
|
hls
|
18598
19269
|
} = this;
|
18599
|
-
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18600
|
-
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18601
|
-
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
18602
19270
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
18603
19271
|
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
18604
19272
|
hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18605
|
-
hls.off(Events.ERROR, this.onError, this);
|
18606
19273
|
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
18607
19274
|
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
18608
19275
|
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -18611,7 +19278,9 @@ class StreamController extends BaseStreamController {
|
|
18611
19278
|
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
18612
19279
|
}
|
18613
19280
|
onHandlerDestroying() {
|
18614
|
-
|
19281
|
+
// @ts-ignore
|
19282
|
+
this.onMediaPlaying = this.onMediaSeeked = null;
|
19283
|
+
this.unregisterListeners();
|
18615
19284
|
super.onHandlerDestroying();
|
18616
19285
|
}
|
18617
19286
|
startLoad(startPosition) {
|
@@ -18709,6 +19378,9 @@ class StreamController extends BaseStreamController {
|
|
18709
19378
|
this.checkFragmentChanged();
|
18710
19379
|
}
|
18711
19380
|
doTickIdle() {
|
19381
|
+
if (!this.buffering) {
|
19382
|
+
return;
|
19383
|
+
}
|
18712
19384
|
const {
|
18713
19385
|
hls,
|
18714
19386
|
levelLastLoaded,
|
@@ -18936,22 +19608,19 @@ class StreamController extends BaseStreamController {
|
|
18936
19608
|
onMediaAttached(event, data) {
|
18937
19609
|
super.onMediaAttached(event, data);
|
18938
19610
|
const media = data.media;
|
18939
|
-
|
18940
|
-
|
18941
|
-
media.addEventListener('playing', this.onvplaying);
|
18942
|
-
media.addEventListener('seeked', this.onvseeked);
|
19611
|
+
media.addEventListener('playing', this.onMediaPlaying);
|
19612
|
+
media.addEventListener('seeked', this.onMediaSeeked);
|
18943
19613
|
this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
|
18944
19614
|
}
|
18945
19615
|
onMediaDetaching() {
|
18946
19616
|
const {
|
18947
19617
|
media
|
18948
19618
|
} = this;
|
18949
|
-
if (media
|
18950
|
-
media.removeEventListener('playing', this.
|
18951
|
-
media.removeEventListener('seeked', this.
|
18952
|
-
this.onvplaying = this.onvseeked = null;
|
18953
|
-
this.videoBuffer = null;
|
19619
|
+
if (media) {
|
19620
|
+
media.removeEventListener('playing', this.onMediaPlaying);
|
19621
|
+
media.removeEventListener('seeked', this.onMediaSeeked);
|
18954
19622
|
}
|
19623
|
+
this.videoBuffer = null;
|
18955
19624
|
this.fragPlaying = null;
|
18956
19625
|
if (this.gapController) {
|
18957
19626
|
this.gapController.destroy();
|
@@ -18959,27 +19628,6 @@ class StreamController extends BaseStreamController {
|
|
18959
19628
|
}
|
18960
19629
|
super.onMediaDetaching();
|
18961
19630
|
}
|
18962
|
-
onMediaPlaying() {
|
18963
|
-
// tick to speed up FRAG_CHANGED triggering
|
18964
|
-
this.tick();
|
18965
|
-
}
|
18966
|
-
onMediaSeeked() {
|
18967
|
-
const media = this.media;
|
18968
|
-
const currentTime = media ? media.currentTime : null;
|
18969
|
-
if (isFiniteNumber(currentTime)) {
|
18970
|
-
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
18971
|
-
}
|
18972
|
-
|
18973
|
-
// If seeked was issued before buffer was appended do not tick immediately
|
18974
|
-
const bufferInfo = this.getMainFwdBufferInfo();
|
18975
|
-
if (bufferInfo === null || bufferInfo.len === 0) {
|
18976
|
-
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
18977
|
-
return;
|
18978
|
-
}
|
18979
|
-
|
18980
|
-
// tick to speed up FRAG_CHANGED triggering
|
18981
|
-
this.tick();
|
18982
|
-
}
|
18983
19631
|
onManifestLoading() {
|
18984
19632
|
// reset buffer on manifest loading
|
18985
19633
|
this.log('Trigger BUFFER_RESET');
|
@@ -19271,8 +19919,10 @@ class StreamController extends BaseStreamController {
|
|
19271
19919
|
}
|
19272
19920
|
if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
|
19273
19921
|
// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
19274
|
-
const
|
19275
|
-
|
19922
|
+
const state = this.state;
|
19923
|
+
const activeFrag = state !== State.IDLE ? this.fragCurrent : null;
|
19924
|
+
const levelDetails = this.getLevelDetails();
|
19925
|
+
gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
|
19276
19926
|
}
|
19277
19927
|
this.lastCurrentTime = media.currentTime;
|
19278
19928
|
}
|
@@ -19605,6 +20255,17 @@ class StreamController extends BaseStreamController {
|
|
19605
20255
|
getMainFwdBufferInfo() {
|
19606
20256
|
return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN);
|
19607
20257
|
}
|
20258
|
+
get maxBufferLength() {
|
20259
|
+
const {
|
20260
|
+
levels,
|
20261
|
+
level
|
20262
|
+
} = this;
|
20263
|
+
const levelInfo = levels == null ? void 0 : levels[level];
|
20264
|
+
if (!levelInfo) {
|
20265
|
+
return this.config.maxBufferLength;
|
20266
|
+
}
|
20267
|
+
return this.getMaxBufferLength(levelInfo.maxBitrate);
|
20268
|
+
}
|
19608
20269
|
backtrack(frag) {
|
19609
20270
|
this.couldBacktrack = true;
|
19610
20271
|
// Causes findFragments to backtrack through fragments to find the keyframe
|
@@ -19710,7 +20371,7 @@ class Hls {
|
|
19710
20371
|
* Get the video-dev/hls.js package version.
|
19711
20372
|
*/
|
19712
20373
|
static get version() {
|
19713
|
-
return "1.5.
|
20374
|
+
return "1.5.10-0.canary.10321";
|
19714
20375
|
}
|
19715
20376
|
|
19716
20377
|
/**
|
@@ -19773,9 +20434,12 @@ class Hls {
|
|
19773
20434
|
* The configuration object provided on player instantiation.
|
19774
20435
|
*/
|
19775
20436
|
this.userConfig = void 0;
|
20437
|
+
/**
|
20438
|
+
* The logger functions used by this player instance, configured on player instantiation.
|
20439
|
+
*/
|
20440
|
+
this.logger = void 0;
|
19776
20441
|
this.coreComponents = void 0;
|
19777
20442
|
this.networkControllers = void 0;
|
19778
|
-
this.started = false;
|
19779
20443
|
this._emitter = new EventEmitter();
|
19780
20444
|
this._autoLevelCapping = -1;
|
19781
20445
|
this._maxHdcpLevel = null;
|
@@ -19792,11 +20456,11 @@ class Hls {
|
|
19792
20456
|
this._media = null;
|
19793
20457
|
this.url = null;
|
19794
20458
|
this.triggeringException = void 0;
|
19795
|
-
enableLogs(userConfig.debug || false, 'Hls instance');
|
19796
|
-
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
|
20459
|
+
const logger = this.logger = enableLogs(userConfig.debug || false, 'Hls instance');
|
20460
|
+
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig, logger);
|
19797
20461
|
this.userConfig = userConfig;
|
19798
20462
|
if (config.progressive) {
|
19799
|
-
enableStreamingMode(config);
|
20463
|
+
enableStreamingMode(config, logger);
|
19800
20464
|
}
|
19801
20465
|
|
19802
20466
|
// core controllers and network loaders
|
@@ -19809,17 +20473,17 @@ class Hls {
|
|
19809
20473
|
} = config;
|
19810
20474
|
const errorController = new ConfigErrorController(this);
|
19811
20475
|
const abrController = this.abrController = new ConfigAbrController(this);
|
19812
|
-
|
20476
|
+
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
20477
|
+
const fragmentTracker = new FragmentTracker(this);
|
20478
|
+
const bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
|
19813
20479
|
const capLevelController = this.capLevelController = new ConfigCapLevelController(this);
|
19814
20480
|
const fpsController = new ConfigFpsController(this);
|
19815
20481
|
const playListLoader = new PlaylistLoader(this);
|
19816
20482
|
const id3TrackController = new ID3TrackController(this);
|
19817
20483
|
const ConfigContentSteeringController = config.contentSteeringController;
|
19818
|
-
//
|
20484
|
+
// ContentSteeringController is defined before LevelController to receive Multivariant Playlist events first
|
19819
20485
|
const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
|
19820
20486
|
const levelController = this.levelController = new LevelController(this, contentSteering);
|
19821
|
-
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
19822
|
-
const fragmentTracker = new FragmentTracker(this);
|
19823
20487
|
const keyLoader = new KeyLoader(this.config);
|
19824
20488
|
const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
|
19825
20489
|
|
@@ -19895,7 +20559,7 @@ class Hls {
|
|
19895
20559
|
try {
|
19896
20560
|
return this.emit(event, event, eventObject);
|
19897
20561
|
} catch (error) {
|
19898
|
-
logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
20562
|
+
this.logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
19899
20563
|
// Prevent recursion in error event handlers that throw #5497
|
19900
20564
|
if (!this.triggeringException) {
|
19901
20565
|
this.triggeringException = true;
|
@@ -19921,7 +20585,7 @@ class Hls {
|
|
19921
20585
|
* Dispose of the instance
|
19922
20586
|
*/
|
19923
20587
|
destroy() {
|
19924
|
-
logger.log('destroy');
|
20588
|
+
this.logger.log('destroy');
|
19925
20589
|
this.trigger(Events.DESTROYING, undefined);
|
19926
20590
|
this.detachMedia();
|
19927
20591
|
this.removeAllListeners();
|
@@ -19942,7 +20606,7 @@ class Hls {
|
|
19942
20606
|
* Attaches Hls.js to a media element
|
19943
20607
|
*/
|
19944
20608
|
attachMedia(media) {
|
19945
|
-
logger.log('attachMedia');
|
20609
|
+
this.logger.log('attachMedia');
|
19946
20610
|
this._media = media;
|
19947
20611
|
this.trigger(Events.MEDIA_ATTACHING, {
|
19948
20612
|
media: media
|
@@ -19953,7 +20617,7 @@ class Hls {
|
|
19953
20617
|
* Detach Hls.js from the media
|
19954
20618
|
*/
|
19955
20619
|
detachMedia() {
|
19956
|
-
logger.log('detachMedia');
|
20620
|
+
this.logger.log('detachMedia');
|
19957
20621
|
this.trigger(Events.MEDIA_DETACHING, undefined);
|
19958
20622
|
this._media = null;
|
19959
20623
|
}
|
@@ -19970,7 +20634,7 @@ class Hls {
|
|
19970
20634
|
});
|
19971
20635
|
this._autoLevelCapping = -1;
|
19972
20636
|
this._maxHdcpLevel = null;
|
19973
|
-
logger.log(`loadSource:${loadingSource}`);
|
20637
|
+
this.logger.log(`loadSource:${loadingSource}`);
|
19974
20638
|
if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
|
19975
20639
|
this.detachMedia();
|
19976
20640
|
this.attachMedia(media);
|
@@ -19989,8 +20653,7 @@ class Hls {
|
|
19989
20653
|
* Defaults to -1 (None: starts from earliest point)
|
19990
20654
|
*/
|
19991
20655
|
startLoad(startPosition = -1) {
|
19992
|
-
logger.log(`startLoad(${startPosition})`);
|
19993
|
-
this.started = true;
|
20656
|
+
this.logger.log(`startLoad(${startPosition})`);
|
19994
20657
|
this.networkControllers.forEach(controller => {
|
19995
20658
|
controller.startLoad(startPosition);
|
19996
20659
|
});
|
@@ -20000,34 +20663,31 @@ class Hls {
|
|
20000
20663
|
* Stop loading of any stream data.
|
20001
20664
|
*/
|
20002
20665
|
stopLoad() {
|
20003
|
-
logger.log('stopLoad');
|
20004
|
-
this.started = false;
|
20666
|
+
this.logger.log('stopLoad');
|
20005
20667
|
this.networkControllers.forEach(controller => {
|
20006
20668
|
controller.stopLoad();
|
20007
20669
|
});
|
20008
20670
|
}
|
20009
20671
|
|
20010
20672
|
/**
|
20011
|
-
* Resumes stream controller segment loading
|
20673
|
+
* Resumes stream controller segment loading after `pauseBuffering` has been called.
|
20012
20674
|
*/
|
20013
20675
|
resumeBuffering() {
|
20014
|
-
|
20015
|
-
|
20016
|
-
|
20017
|
-
|
20018
|
-
|
20019
|
-
});
|
20020
|
-
}
|
20676
|
+
this.networkControllers.forEach(controller => {
|
20677
|
+
if (controller.resumeBuffering) {
|
20678
|
+
controller.resumeBuffering();
|
20679
|
+
}
|
20680
|
+
});
|
20021
20681
|
}
|
20022
20682
|
|
20023
20683
|
/**
|
20024
|
-
*
|
20684
|
+
* Prevents stream controller from loading new segments until `resumeBuffering` is called.
|
20025
20685
|
* This allows for media buffering to be paused without interupting playlist loading.
|
20026
20686
|
*/
|
20027
20687
|
pauseBuffering() {
|
20028
20688
|
this.networkControllers.forEach(controller => {
|
20029
|
-
if (
|
20030
|
-
controller.
|
20689
|
+
if (controller.pauseBuffering) {
|
20690
|
+
controller.pauseBuffering();
|
20031
20691
|
}
|
20032
20692
|
});
|
20033
20693
|
}
|
@@ -20036,7 +20696,7 @@ class Hls {
|
|
20036
20696
|
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
20037
20697
|
*/
|
20038
20698
|
swapAudioCodec() {
|
20039
|
-
logger.log('swapAudioCodec');
|
20699
|
+
this.logger.log('swapAudioCodec');
|
20040
20700
|
this.streamController.swapAudioCodec();
|
20041
20701
|
}
|
20042
20702
|
|
@@ -20047,7 +20707,7 @@ class Hls {
|
|
20047
20707
|
* Automatic recovery of media-errors by this process is configurable.
|
20048
20708
|
*/
|
20049
20709
|
recoverMediaError() {
|
20050
|
-
logger.log('recoverMediaError');
|
20710
|
+
this.logger.log('recoverMediaError');
|
20051
20711
|
const media = this._media;
|
20052
20712
|
this.detachMedia();
|
20053
20713
|
if (media) {
|
@@ -20077,7 +20737,7 @@ class Hls {
|
|
20077
20737
|
* Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
|
20078
20738
|
*/
|
20079
20739
|
set currentLevel(newLevel) {
|
20080
|
-
logger.log(`set currentLevel:${newLevel}`);
|
20740
|
+
this.logger.log(`set currentLevel:${newLevel}`);
|
20081
20741
|
this.levelController.manualLevel = newLevel;
|
20082
20742
|
this.streamController.immediateLevelSwitch();
|
20083
20743
|
}
|
@@ -20096,7 +20756,7 @@ class Hls {
|
|
20096
20756
|
* @param newLevel - Pass -1 for automatic level selection
|
20097
20757
|
*/
|
20098
20758
|
set nextLevel(newLevel) {
|
20099
|
-
logger.log(`set nextLevel:${newLevel}`);
|
20759
|
+
this.logger.log(`set nextLevel:${newLevel}`);
|
20100
20760
|
this.levelController.manualLevel = newLevel;
|
20101
20761
|
this.streamController.nextLevelSwitch();
|
20102
20762
|
}
|
@@ -20115,7 +20775,7 @@ class Hls {
|
|
20115
20775
|
* @param newLevel - Pass -1 for automatic level selection
|
20116
20776
|
*/
|
20117
20777
|
set loadLevel(newLevel) {
|
20118
|
-
logger.log(`set loadLevel:${newLevel}`);
|
20778
|
+
this.logger.log(`set loadLevel:${newLevel}`);
|
20119
20779
|
this.levelController.manualLevel = newLevel;
|
20120
20780
|
}
|
20121
20781
|
|
@@ -20146,7 +20806,7 @@ class Hls {
|
|
20146
20806
|
* Sets "first-level", see getter.
|
20147
20807
|
*/
|
20148
20808
|
set firstLevel(newLevel) {
|
20149
|
-
logger.log(`set firstLevel:${newLevel}`);
|
20809
|
+
this.logger.log(`set firstLevel:${newLevel}`);
|
20150
20810
|
this.levelController.firstLevel = newLevel;
|
20151
20811
|
}
|
20152
20812
|
|
@@ -20171,7 +20831,7 @@ class Hls {
|
|
20171
20831
|
* (determined from download of first segment)
|
20172
20832
|
*/
|
20173
20833
|
set startLevel(newLevel) {
|
20174
|
-
logger.log(`set startLevel:${newLevel}`);
|
20834
|
+
this.logger.log(`set startLevel:${newLevel}`);
|
20175
20835
|
// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
20176
20836
|
if (newLevel !== -1) {
|
20177
20837
|
newLevel = Math.max(newLevel, this.minAutoLevel);
|
@@ -20246,7 +20906,7 @@ class Hls {
|
|
20246
20906
|
*/
|
20247
20907
|
set autoLevelCapping(newLevel) {
|
20248
20908
|
if (this._autoLevelCapping !== newLevel) {
|
20249
|
-
logger.log(`set autoLevelCapping:${newLevel}`);
|
20909
|
+
this.logger.log(`set autoLevelCapping:${newLevel}`);
|
20250
20910
|
this._autoLevelCapping = newLevel;
|
20251
20911
|
this.levelController.checkMaxAutoUpdated();
|
20252
20912
|
}
|
@@ -20351,6 +21011,9 @@ class Hls {
|
|
20351
21011
|
get mainForwardBufferInfo() {
|
20352
21012
|
return this.streamController.getMainFwdBufferInfo();
|
20353
21013
|
}
|
21014
|
+
get maxBufferLength() {
|
21015
|
+
return this.streamController.maxBufferLength;
|
21016
|
+
}
|
20354
21017
|
|
20355
21018
|
/**
|
20356
21019
|
* Find and select the best matching audio track, making a level switch when a Group change is necessary.
|
@@ -20518,12 +21181,22 @@ class Hls {
|
|
20518
21181
|
get forceStartLoad() {
|
20519
21182
|
return this.streamController.forceStartLoad;
|
20520
21183
|
}
|
21184
|
+
|
21185
|
+
/**
|
21186
|
+
* ContentSteering pathwayPriority getter/setter
|
21187
|
+
*/
|
21188
|
+
get pathwayPriority() {
|
21189
|
+
return this.levelController.pathwayPriority;
|
21190
|
+
}
|
21191
|
+
set pathwayPriority(pathwayPriority) {
|
21192
|
+
this.levelController.pathwayPriority = pathwayPriority;
|
21193
|
+
}
|
20521
21194
|
}
|
20522
21195
|
Hls.defaultConfig = void 0;
|
20523
21196
|
|
20524
|
-
var KeySystemFormats =
|
20525
|
-
var KeySystems =
|
20526
|
-
var SubtitleStreamController =
|
20527
|
-
var TimelineController =
|
20528
|
-
export { AbrController, AttrList,
|
21197
|
+
var KeySystemFormats = emptyEs.KeySystemFormats;
|
21198
|
+
var KeySystems = emptyEs.KeySystems;
|
21199
|
+
var SubtitleStreamController = emptyEs.SubtitleStreamController;
|
21200
|
+
var TimelineController = emptyEs.TimelineController;
|
21201
|
+
export { AbrController, AttrList, HevcVideoParser as AudioStreamController, HevcVideoParser as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, HevcVideoParser as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, HevcVideoParser as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, HevcVideoParser as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
|
20529
21202
|
//# sourceMappingURL=hls.light.mjs.map
|