hls.js 1.5.9-0.canary.10310 → 1.5.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -4
- package/dist/hls-demo.js +38 -41
- package/dist/hls-demo.js.map +1 -1
- package/dist/hls.js +2191 -3474
- package/dist/hls.js.d.ts +85 -108
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +3136 -3783
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +1260 -1934
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +4182 -5488
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +35 -35
- package/src/config.ts +2 -3
- package/src/controller/abr-controller.ts +20 -24
- package/src/controller/audio-stream-controller.ts +74 -68
- package/src/controller/audio-track-controller.ts +1 -1
- package/src/controller/base-playlist-controller.ts +10 -27
- package/src/controller/base-stream-controller.ts +38 -160
- package/src/controller/buffer-controller.ts +92 -230
- package/src/controller/buffer-operation-queue.ts +19 -16
- package/src/controller/cap-level-controller.ts +2 -3
- package/src/controller/cmcd-controller.ts +14 -51
- package/src/controller/content-steering-controller.ts +15 -29
- package/src/controller/eme-controller.ts +23 -10
- package/src/controller/error-controller.ts +8 -6
- package/src/controller/fps-controller.ts +3 -8
- package/src/controller/fragment-tracker.ts +11 -15
- package/src/controller/gap-controller.ts +16 -43
- package/src/controller/id3-track-controller.ts +7 -7
- package/src/controller/latency-controller.ts +11 -9
- package/src/controller/level-controller.ts +19 -37
- package/src/controller/stream-controller.ts +32 -37
- package/src/controller/subtitle-stream-controller.ts +40 -28
- package/src/controller/subtitle-track-controller.ts +3 -5
- package/src/controller/timeline-controller.ts +21 -19
- package/src/crypt/aes-crypto.ts +2 -21
- package/src/crypt/decrypter.ts +16 -32
- package/src/crypt/fast-aes-key.ts +5 -24
- package/src/demux/audio/aacdemuxer.ts +2 -2
- package/src/demux/audio/ac3-demuxer.ts +3 -4
- package/src/demux/audio/adts.ts +4 -9
- package/src/demux/audio/base-audio-demuxer.ts +14 -16
- package/src/demux/audio/mp3demuxer.ts +3 -4
- package/src/demux/audio/mpegaudio.ts +1 -1
- package/src/demux/id3.ts +411 -0
- package/src/demux/mp4demuxer.ts +7 -7
- package/src/demux/sample-aes.ts +0 -2
- package/src/demux/transmuxer-interface.ts +12 -4
- package/src/demux/transmuxer-worker.ts +4 -4
- package/src/demux/transmuxer.ts +3 -16
- package/src/demux/tsdemuxer.ts +37 -71
- package/src/demux/video/avc-video-parser.ts +119 -208
- package/src/demux/video/base-video-parser.ts +18 -147
- package/src/demux/video/exp-golomb.ts +208 -0
- package/src/events.ts +1 -8
- package/src/exports-named.ts +1 -1
- package/src/hls.ts +38 -61
- package/src/loader/fragment-loader.ts +3 -10
- package/src/loader/key-loader.ts +1 -3
- package/src/loader/level-key.ts +9 -10
- package/src/loader/playlist-loader.ts +5 -4
- package/src/remux/mp4-generator.ts +1 -196
- package/src/remux/mp4-remuxer.ts +8 -24
- package/src/task-loop.ts +2 -5
- package/src/types/component-api.ts +1 -3
- package/src/types/demuxer.ts +0 -4
- package/src/types/events.ts +0 -4
- package/src/types/remuxer.ts +1 -1
- package/src/utils/buffer-helper.ts +31 -12
- package/src/utils/cea-608-parser.ts +3 -1
- package/src/utils/codecs.ts +5 -34
- package/src/utils/fetch-loader.ts +1 -1
- package/src/utils/imsc1-ttml-parser.ts +1 -1
- package/src/utils/keysystem-util.ts +6 -1
- package/src/utils/logger.ts +23 -58
- package/src/utils/mp4-tools.ts +3 -5
- package/src/utils/webvtt-parser.ts +1 -1
- package/src/crypt/decrypter-aes-mode.ts +0 -4
- package/src/demux/video/hevc-video-parser.ts +0 -749
- package/src/empty-es.js +0 -5
- package/src/utils/encryption-methods-util.ts +0 -21
- package/src/utils/utf8-utils.ts +0 -18
package/dist/hls.light.mjs
CHANGED
@@ -176,23 +176,6 @@ var urlToolkit = {exports: {}};
|
|
176
176
|
|
177
177
|
var urlToolkitExports = urlToolkit.exports;
|
178
178
|
|
179
|
-
function _defineProperty(e, r, t) {
|
180
|
-
return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, {
|
181
|
-
value: t,
|
182
|
-
enumerable: !0,
|
183
|
-
configurable: !0,
|
184
|
-
writable: !0
|
185
|
-
}) : e[r] = t, e;
|
186
|
-
}
|
187
|
-
function _extends() {
|
188
|
-
return _extends = Object.assign ? Object.assign.bind() : function (n) {
|
189
|
-
for (var e = 1; e < arguments.length; e++) {
|
190
|
-
var t = arguments[e];
|
191
|
-
for (var r in t) ({}).hasOwnProperty.call(t, r) && (n[r] = t[r]);
|
192
|
-
}
|
193
|
-
return n;
|
194
|
-
}, _extends.apply(null, arguments);
|
195
|
-
}
|
196
179
|
function ownKeys(e, r) {
|
197
180
|
var t = Object.keys(e);
|
198
181
|
if (Object.getOwnPropertySymbols) {
|
@@ -226,7 +209,35 @@ function _toPrimitive(t, r) {
|
|
226
209
|
}
|
227
210
|
function _toPropertyKey(t) {
|
228
211
|
var i = _toPrimitive(t, "string");
|
229
|
-
return "symbol" == typeof i ? i : i
|
212
|
+
return "symbol" == typeof i ? i : String(i);
|
213
|
+
}
|
214
|
+
function _defineProperty(obj, key, value) {
|
215
|
+
key = _toPropertyKey(key);
|
216
|
+
if (key in obj) {
|
217
|
+
Object.defineProperty(obj, key, {
|
218
|
+
value: value,
|
219
|
+
enumerable: true,
|
220
|
+
configurable: true,
|
221
|
+
writable: true
|
222
|
+
});
|
223
|
+
} else {
|
224
|
+
obj[key] = value;
|
225
|
+
}
|
226
|
+
return obj;
|
227
|
+
}
|
228
|
+
function _extends() {
|
229
|
+
_extends = Object.assign ? Object.assign.bind() : function (target) {
|
230
|
+
for (var i = 1; i < arguments.length; i++) {
|
231
|
+
var source = arguments[i];
|
232
|
+
for (var key in source) {
|
233
|
+
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
234
|
+
target[key] = source[key];
|
235
|
+
}
|
236
|
+
}
|
237
|
+
}
|
238
|
+
return target;
|
239
|
+
};
|
240
|
+
return _extends.apply(this, arguments);
|
230
241
|
}
|
231
242
|
|
232
243
|
// https://caniuse.com/mdn-javascript_builtins_number_isfinite
|
@@ -245,7 +256,6 @@ let Events = /*#__PURE__*/function (Events) {
|
|
245
256
|
Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
|
246
257
|
Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
|
247
258
|
Events["MEDIA_DETACHED"] = "hlsMediaDetached";
|
248
|
-
Events["MEDIA_ENDED"] = "hlsMediaEnded";
|
249
259
|
Events["BUFFER_RESET"] = "hlsBufferReset";
|
250
260
|
Events["BUFFER_CODECS"] = "hlsBufferCodecs";
|
251
261
|
Events["BUFFER_CREATED"] = "hlsBufferCreated";
|
@@ -359,6 +369,58 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
|
|
359
369
|
return ErrorDetails;
|
360
370
|
}({});
|
361
371
|
|
372
|
+
const noop = function noop() {};
|
373
|
+
const fakeLogger = {
|
374
|
+
trace: noop,
|
375
|
+
debug: noop,
|
376
|
+
log: noop,
|
377
|
+
warn: noop,
|
378
|
+
info: noop,
|
379
|
+
error: noop
|
380
|
+
};
|
381
|
+
let exportedLogger = fakeLogger;
|
382
|
+
|
383
|
+
// let lastCallTime;
|
384
|
+
// function formatMsgWithTimeInfo(type, msg) {
|
385
|
+
// const now = Date.now();
|
386
|
+
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
387
|
+
// lastCallTime = now;
|
388
|
+
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
389
|
+
// return msg;
|
390
|
+
// }
|
391
|
+
|
392
|
+
function consolePrintFn(type) {
|
393
|
+
const func = self.console[type];
|
394
|
+
if (func) {
|
395
|
+
return func.bind(self.console, `[${type}] >`);
|
396
|
+
}
|
397
|
+
return noop;
|
398
|
+
}
|
399
|
+
function exportLoggerFunctions(debugConfig, ...functions) {
|
400
|
+
functions.forEach(function (type) {
|
401
|
+
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
402
|
+
});
|
403
|
+
}
|
404
|
+
function enableLogs(debugConfig, id) {
|
405
|
+
// check that console is available
|
406
|
+
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
407
|
+
exportLoggerFunctions(debugConfig,
|
408
|
+
// Remove out from list here to hard-disable a log-level
|
409
|
+
// 'trace',
|
410
|
+
'debug', 'log', 'info', 'warn', 'error');
|
411
|
+
// Some browsers don't allow to use bind on console object anyway
|
412
|
+
// fallback to default if needed
|
413
|
+
try {
|
414
|
+
exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.9"}`);
|
415
|
+
} catch (e) {
|
416
|
+
exportedLogger = fakeLogger;
|
417
|
+
}
|
418
|
+
} else {
|
419
|
+
exportedLogger = fakeLogger;
|
420
|
+
}
|
421
|
+
}
|
422
|
+
const logger = exportedLogger;
|
423
|
+
|
362
424
|
const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/;
|
363
425
|
const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g;
|
364
426
|
|
@@ -440,84 +502,6 @@ class AttrList {
|
|
440
502
|
}
|
441
503
|
}
|
442
504
|
|
443
|
-
class Logger {
|
444
|
-
constructor(label, logger) {
|
445
|
-
this.trace = void 0;
|
446
|
-
this.debug = void 0;
|
447
|
-
this.log = void 0;
|
448
|
-
this.warn = void 0;
|
449
|
-
this.info = void 0;
|
450
|
-
this.error = void 0;
|
451
|
-
const lb = `[${label}]:`;
|
452
|
-
this.trace = noop;
|
453
|
-
this.debug = logger.debug.bind(null, lb);
|
454
|
-
this.log = logger.log.bind(null, lb);
|
455
|
-
this.warn = logger.warn.bind(null, lb);
|
456
|
-
this.info = logger.info.bind(null, lb);
|
457
|
-
this.error = logger.error.bind(null, lb);
|
458
|
-
}
|
459
|
-
}
|
460
|
-
const noop = function noop() {};
|
461
|
-
const fakeLogger = {
|
462
|
-
trace: noop,
|
463
|
-
debug: noop,
|
464
|
-
log: noop,
|
465
|
-
warn: noop,
|
466
|
-
info: noop,
|
467
|
-
error: noop
|
468
|
-
};
|
469
|
-
function createLogger() {
|
470
|
-
return _extends({}, fakeLogger);
|
471
|
-
}
|
472
|
-
|
473
|
-
// let lastCallTime;
|
474
|
-
// function formatMsgWithTimeInfo(type, msg) {
|
475
|
-
// const now = Date.now();
|
476
|
-
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
477
|
-
// lastCallTime = now;
|
478
|
-
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
479
|
-
// return msg;
|
480
|
-
// }
|
481
|
-
|
482
|
-
function consolePrintFn(type, id) {
|
483
|
-
const func = self.console[type];
|
484
|
-
return func ? func.bind(self.console, `${''}[${type}] >`) : noop;
|
485
|
-
}
|
486
|
-
function getLoggerFn(key, debugConfig, id) {
|
487
|
-
return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key);
|
488
|
-
}
|
489
|
-
const exportedLogger = createLogger();
|
490
|
-
function enableLogs(debugConfig, context, id) {
|
491
|
-
// check that console is available
|
492
|
-
const newLogger = createLogger();
|
493
|
-
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
494
|
-
const keys = [
|
495
|
-
// Remove out from list here to hard-disable a log-level
|
496
|
-
// 'trace',
|
497
|
-
'debug', 'log', 'info', 'warn', 'error'];
|
498
|
-
keys.forEach(key => {
|
499
|
-
newLogger[key] = getLoggerFn(key, debugConfig);
|
500
|
-
});
|
501
|
-
// Some browsers don't allow to use bind on console object anyway
|
502
|
-
// fallback to default if needed
|
503
|
-
try {
|
504
|
-
newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.9-0.canary.10310"}`);
|
505
|
-
} catch (e) {
|
506
|
-
/* log fn threw an exception. All logger methods are no-ops. */
|
507
|
-
return createLogger();
|
508
|
-
}
|
509
|
-
// global exported logger uses the same functions as new logger without `id`
|
510
|
-
keys.forEach(key => {
|
511
|
-
exportedLogger[key] = getLoggerFn(key, debugConfig);
|
512
|
-
});
|
513
|
-
} else {
|
514
|
-
// Reset global exported logger
|
515
|
-
_extends(exportedLogger, newLogger);
|
516
|
-
}
|
517
|
-
return newLogger;
|
518
|
-
}
|
519
|
-
const logger = exportedLogger;
|
520
|
-
|
521
505
|
// Avoid exporting const enum so that these values can be inlined
|
522
506
|
|
523
507
|
function isDateRangeCueAttribute(attrName) {
|
@@ -1007,32 +991,10 @@ class LevelDetails {
|
|
1007
991
|
}
|
1008
992
|
}
|
1009
993
|
|
1010
|
-
var DecrypterAesMode = {
|
1011
|
-
cbc: 0,
|
1012
|
-
ctr: 1
|
1013
|
-
};
|
1014
|
-
|
1015
|
-
function isFullSegmentEncryption(method) {
|
1016
|
-
return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
|
1017
|
-
}
|
1018
|
-
function getAesModeFromFullSegmentMethod(method) {
|
1019
|
-
switch (method) {
|
1020
|
-
case 'AES-128':
|
1021
|
-
case 'AES-256':
|
1022
|
-
return DecrypterAesMode.cbc;
|
1023
|
-
case 'AES-256-CTR':
|
1024
|
-
return DecrypterAesMode.ctr;
|
1025
|
-
default:
|
1026
|
-
throw new Error(`invalid full segment method ${method}`);
|
1027
|
-
}
|
1028
|
-
}
|
1029
|
-
|
1030
994
|
// This file is inserted as a shim for modules which we do not want to include into the distro.
|
1031
995
|
// This replacement is done in the "alias" plugin of the rollup config.
|
1032
|
-
|
1033
|
-
|
1034
|
-
var emptyEs = {};
|
1035
|
-
var HevcVideoParser = /*@__PURE__*/getDefaultExportFromCjs(emptyEs);
|
996
|
+
var empty = undefined;
|
997
|
+
var Cues = /*@__PURE__*/getDefaultExportFromCjs(empty);
|
1036
998
|
|
1037
999
|
function sliceUint8(array, start, end) {
|
1038
1000
|
// @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
|
@@ -1040,100 +1002,369 @@ function sliceUint8(array, start, end) {
|
|
1040
1002
|
return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end));
|
1041
1003
|
}
|
1042
1004
|
|
1043
|
-
//
|
1044
|
-
|
1045
|
-
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1046
|
-
*
|
1047
|
-
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1048
|
-
* Version: 1.0
|
1049
|
-
* LastModified: Dec 25 1999
|
1050
|
-
* This library is free. You can redistribute it and/or modify it.
|
1051
|
-
*/
|
1005
|
+
// breaking up those two types in order to clarify what is happening in the decoding path.
|
1006
|
+
|
1052
1007
|
/**
|
1053
|
-
*
|
1054
|
-
*
|
1055
|
-
* @param
|
1056
|
-
*
|
1057
|
-
* @returns The string
|
1058
|
-
*
|
1059
|
-
* @group Utils
|
1060
|
-
*
|
1061
|
-
* @beta
|
1008
|
+
* Returns true if an ID3 header can be found at offset in data
|
1009
|
+
* @param data - The data to search
|
1010
|
+
* @param offset - The offset at which to start searching
|
1062
1011
|
*/
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1012
|
+
const isHeader$2 = (data, offset) => {
|
1013
|
+
/*
|
1014
|
+
* http://id3.org/id3v2.3.0
|
1015
|
+
* [0] = 'I'
|
1016
|
+
* [1] = 'D'
|
1017
|
+
* [2] = '3'
|
1018
|
+
* [3,4] = {Version}
|
1019
|
+
* [5] = {Flags}
|
1020
|
+
* [6-9] = {ID3 Size}
|
1021
|
+
*
|
1022
|
+
* An ID3v2 tag can be detected with the following pattern:
|
1023
|
+
* $49 44 33 yy yy xx zz zz zz zz
|
1024
|
+
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
1025
|
+
*/
|
1026
|
+
if (offset + 10 <= data.length) {
|
1027
|
+
// look for 'ID3' identifier
|
1028
|
+
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
1029
|
+
// check version is within range
|
1030
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1031
|
+
// check size is within range
|
1032
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1033
|
+
return true;
|
1034
|
+
}
|
1035
|
+
}
|
1071
1036
|
}
|
1072
|
-
// remove any null characters
|
1073
|
-
return decoded.replace(/\0/g, '');
|
1074
1037
|
}
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
|
1089
|
-
|
1090
|
-
|
1091
|
-
|
1092
|
-
|
1093
|
-
|
1094
|
-
|
1095
|
-
|
1096
|
-
|
1097
|
-
case 7:
|
1098
|
-
// 0xxxxxxx
|
1099
|
-
out += String.fromCharCode(c);
|
1100
|
-
break;
|
1101
|
-
case 12:
|
1102
|
-
case 13:
|
1103
|
-
// 110x xxxx 10xx xxxx
|
1104
|
-
char2 = array[i++];
|
1105
|
-
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1106
|
-
break;
|
1107
|
-
case 14:
|
1108
|
-
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1109
|
-
char2 = array[i++];
|
1110
|
-
char3 = array[i++];
|
1111
|
-
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1112
|
-
break;
|
1038
|
+
return false;
|
1039
|
+
};
|
1040
|
+
|
1041
|
+
/**
|
1042
|
+
* Returns true if an ID3 footer can be found at offset in data
|
1043
|
+
* @param data - The data to search
|
1044
|
+
* @param offset - The offset at which to start searching
|
1045
|
+
*/
|
1046
|
+
const isFooter = (data, offset) => {
|
1047
|
+
/*
|
1048
|
+
* The footer is a copy of the header, but with a different identifier
|
1049
|
+
*/
|
1050
|
+
if (offset + 10 <= data.length) {
|
1051
|
+
// look for '3DI' identifier
|
1052
|
+
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
1053
|
+
// check version is within range
|
1054
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1055
|
+
// check size is within range
|
1056
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1057
|
+
return true;
|
1058
|
+
}
|
1059
|
+
}
|
1113
1060
|
}
|
1114
1061
|
}
|
1115
|
-
return
|
1116
|
-
}
|
1062
|
+
return false;
|
1063
|
+
};
|
1117
1064
|
|
1118
1065
|
/**
|
1119
|
-
*
|
1066
|
+
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
1067
|
+
* @param data - The data to search in
|
1068
|
+
* @param offset - The offset at which to start searching
|
1069
|
+
* @returns the block of data containing any ID3 tags found
|
1070
|
+
* or *undefined* if no header is found at the starting offset
|
1120
1071
|
*/
|
1072
|
+
const getID3Data = (data, offset) => {
|
1073
|
+
const front = offset;
|
1074
|
+
let length = 0;
|
1075
|
+
while (isHeader$2(data, offset)) {
|
1076
|
+
// ID3 header is 10 bytes
|
1077
|
+
length += 10;
|
1078
|
+
const size = readSize(data, offset + 6);
|
1079
|
+
length += size;
|
1080
|
+
if (isFooter(data, offset + 10)) {
|
1081
|
+
// ID3 footer is 10 bytes
|
1082
|
+
length += 10;
|
1083
|
+
}
|
1084
|
+
offset += length;
|
1085
|
+
}
|
1086
|
+
if (length > 0) {
|
1087
|
+
return data.subarray(front, front + length);
|
1088
|
+
}
|
1089
|
+
return undefined;
|
1090
|
+
};
|
1091
|
+
const readSize = (data, offset) => {
|
1092
|
+
let size = 0;
|
1093
|
+
size = (data[offset] & 0x7f) << 21;
|
1094
|
+
size |= (data[offset + 1] & 0x7f) << 14;
|
1095
|
+
size |= (data[offset + 2] & 0x7f) << 7;
|
1096
|
+
size |= data[offset + 3] & 0x7f;
|
1097
|
+
return size;
|
1098
|
+
};
|
1099
|
+
const canParse$2 = (data, offset) => {
|
1100
|
+
return isHeader$2(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset;
|
1101
|
+
};
|
1121
1102
|
|
1122
|
-
|
1123
|
-
|
1124
|
-
|
1125
|
-
|
1126
|
-
|
1127
|
-
|
1128
|
-
|
1129
|
-
|
1130
|
-
|
1103
|
+
/**
|
1104
|
+
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
1105
|
+
* @param data - Block of data containing one or more ID3 tags
|
1106
|
+
*/
|
1107
|
+
const getTimeStamp = data => {
|
1108
|
+
const frames = getID3Frames(data);
|
1109
|
+
for (let i = 0; i < frames.length; i++) {
|
1110
|
+
const frame = frames[i];
|
1111
|
+
if (isTimeStampFrame(frame)) {
|
1112
|
+
return readTimeStamp(frame);
|
1131
1113
|
}
|
1132
|
-
return str;
|
1133
1114
|
}
|
1115
|
+
return undefined;
|
1134
1116
|
};
|
1135
1117
|
|
1136
|
-
|
1118
|
+
/**
|
1119
|
+
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
1120
|
+
*/
|
1121
|
+
const isTimeStampFrame = frame => {
|
1122
|
+
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
1123
|
+
};
|
1124
|
+
const getFrameData = data => {
|
1125
|
+
/*
|
1126
|
+
Frame ID $xx xx xx xx (four characters)
|
1127
|
+
Size $xx xx xx xx
|
1128
|
+
Flags $xx xx
|
1129
|
+
*/
|
1130
|
+
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
1131
|
+
const size = readSize(data, 4);
|
1132
|
+
|
1133
|
+
// skip frame id, size, and flags
|
1134
|
+
const offset = 10;
|
1135
|
+
return {
|
1136
|
+
type,
|
1137
|
+
size,
|
1138
|
+
data: data.subarray(offset, offset + size)
|
1139
|
+
};
|
1140
|
+
};
|
1141
|
+
|
1142
|
+
/**
|
1143
|
+
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
1144
|
+
* @param id3Data - The ID3 data containing one or more ID3 tags
|
1145
|
+
*/
|
1146
|
+
const getID3Frames = id3Data => {
|
1147
|
+
let offset = 0;
|
1148
|
+
const frames = [];
|
1149
|
+
while (isHeader$2(id3Data, offset)) {
|
1150
|
+
const size = readSize(id3Data, offset + 6);
|
1151
|
+
// skip past ID3 header
|
1152
|
+
offset += 10;
|
1153
|
+
const end = offset + size;
|
1154
|
+
// loop through frames in the ID3 tag
|
1155
|
+
while (offset + 8 < end) {
|
1156
|
+
const frameData = getFrameData(id3Data.subarray(offset));
|
1157
|
+
const frame = decodeFrame(frameData);
|
1158
|
+
if (frame) {
|
1159
|
+
frames.push(frame);
|
1160
|
+
}
|
1161
|
+
|
1162
|
+
// skip frame header and frame data
|
1163
|
+
offset += frameData.size + 10;
|
1164
|
+
}
|
1165
|
+
if (isFooter(id3Data, offset)) {
|
1166
|
+
offset += 10;
|
1167
|
+
}
|
1168
|
+
}
|
1169
|
+
return frames;
|
1170
|
+
};
|
1171
|
+
const decodeFrame = frame => {
|
1172
|
+
if (frame.type === 'PRIV') {
|
1173
|
+
return decodePrivFrame(frame);
|
1174
|
+
} else if (frame.type[0] === 'W') {
|
1175
|
+
return decodeURLFrame(frame);
|
1176
|
+
}
|
1177
|
+
return decodeTextFrame(frame);
|
1178
|
+
};
|
1179
|
+
const decodePrivFrame = frame => {
|
1180
|
+
/*
|
1181
|
+
Format: <text string>\0<binary data>
|
1182
|
+
*/
|
1183
|
+
if (frame.size < 2) {
|
1184
|
+
return undefined;
|
1185
|
+
}
|
1186
|
+
const owner = utf8ArrayToStr(frame.data, true);
|
1187
|
+
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
1188
|
+
return {
|
1189
|
+
key: frame.type,
|
1190
|
+
info: owner,
|
1191
|
+
data: privateData.buffer
|
1192
|
+
};
|
1193
|
+
};
|
1194
|
+
const decodeTextFrame = frame => {
|
1195
|
+
if (frame.size < 2) {
|
1196
|
+
return undefined;
|
1197
|
+
}
|
1198
|
+
if (frame.type === 'TXXX') {
|
1199
|
+
/*
|
1200
|
+
Format:
|
1201
|
+
[0] = {Text Encoding}
|
1202
|
+
[1-?] = {Description}\0{Value}
|
1203
|
+
*/
|
1204
|
+
let index = 1;
|
1205
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1206
|
+
index += description.length + 1;
|
1207
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1208
|
+
return {
|
1209
|
+
key: frame.type,
|
1210
|
+
info: description,
|
1211
|
+
data: value
|
1212
|
+
};
|
1213
|
+
}
|
1214
|
+
/*
|
1215
|
+
Format:
|
1216
|
+
[0] = {Text Encoding}
|
1217
|
+
[1-?] = {Value}
|
1218
|
+
*/
|
1219
|
+
const text = utf8ArrayToStr(frame.data.subarray(1));
|
1220
|
+
return {
|
1221
|
+
key: frame.type,
|
1222
|
+
data: text
|
1223
|
+
};
|
1224
|
+
};
|
1225
|
+
const decodeURLFrame = frame => {
|
1226
|
+
if (frame.type === 'WXXX') {
|
1227
|
+
/*
|
1228
|
+
Format:
|
1229
|
+
[0] = {Text Encoding}
|
1230
|
+
[1-?] = {Description}\0{URL}
|
1231
|
+
*/
|
1232
|
+
if (frame.size < 2) {
|
1233
|
+
return undefined;
|
1234
|
+
}
|
1235
|
+
let index = 1;
|
1236
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1237
|
+
index += description.length + 1;
|
1238
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1239
|
+
return {
|
1240
|
+
key: frame.type,
|
1241
|
+
info: description,
|
1242
|
+
data: value
|
1243
|
+
};
|
1244
|
+
}
|
1245
|
+
/*
|
1246
|
+
Format:
|
1247
|
+
[0-?] = {URL}
|
1248
|
+
*/
|
1249
|
+
const url = utf8ArrayToStr(frame.data);
|
1250
|
+
return {
|
1251
|
+
key: frame.type,
|
1252
|
+
data: url
|
1253
|
+
};
|
1254
|
+
};
|
1255
|
+
const readTimeStamp = timeStampFrame => {
|
1256
|
+
if (timeStampFrame.data.byteLength === 8) {
|
1257
|
+
const data = new Uint8Array(timeStampFrame.data);
|
1258
|
+
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
1259
|
+
// with the upper 31 bits set to zero.
|
1260
|
+
const pts33Bit = data[3] & 0x1;
|
1261
|
+
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
1262
|
+
timestamp /= 45;
|
1263
|
+
if (pts33Bit) {
|
1264
|
+
timestamp += 47721858.84;
|
1265
|
+
} // 2^32 / 90
|
1266
|
+
|
1267
|
+
return Math.round(timestamp);
|
1268
|
+
}
|
1269
|
+
return undefined;
|
1270
|
+
};
|
1271
|
+
|
1272
|
+
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
1273
|
+
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
1274
|
+
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1275
|
+
*
|
1276
|
+
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1277
|
+
* Version: 1.0
|
1278
|
+
* LastModified: Dec 25 1999
|
1279
|
+
* This library is free. You can redistribute it and/or modify it.
|
1280
|
+
*/
|
1281
|
+
const utf8ArrayToStr = (array, exitOnNull = false) => {
|
1282
|
+
const decoder = getTextDecoder();
|
1283
|
+
if (decoder) {
|
1284
|
+
const decoded = decoder.decode(array);
|
1285
|
+
if (exitOnNull) {
|
1286
|
+
// grab up to the first null
|
1287
|
+
const idx = decoded.indexOf('\0');
|
1288
|
+
return idx !== -1 ? decoded.substring(0, idx) : decoded;
|
1289
|
+
}
|
1290
|
+
|
1291
|
+
// remove any null characters
|
1292
|
+
return decoded.replace(/\0/g, '');
|
1293
|
+
}
|
1294
|
+
const len = array.length;
|
1295
|
+
let c;
|
1296
|
+
let char2;
|
1297
|
+
let char3;
|
1298
|
+
let out = '';
|
1299
|
+
let i = 0;
|
1300
|
+
while (i < len) {
|
1301
|
+
c = array[i++];
|
1302
|
+
if (c === 0x00 && exitOnNull) {
|
1303
|
+
return out;
|
1304
|
+
} else if (c === 0x00 || c === 0x03) {
|
1305
|
+
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
1306
|
+
continue;
|
1307
|
+
}
|
1308
|
+
switch (c >> 4) {
|
1309
|
+
case 0:
|
1310
|
+
case 1:
|
1311
|
+
case 2:
|
1312
|
+
case 3:
|
1313
|
+
case 4:
|
1314
|
+
case 5:
|
1315
|
+
case 6:
|
1316
|
+
case 7:
|
1317
|
+
// 0xxxxxxx
|
1318
|
+
out += String.fromCharCode(c);
|
1319
|
+
break;
|
1320
|
+
case 12:
|
1321
|
+
case 13:
|
1322
|
+
// 110x xxxx 10xx xxxx
|
1323
|
+
char2 = array[i++];
|
1324
|
+
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1325
|
+
break;
|
1326
|
+
case 14:
|
1327
|
+
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1328
|
+
char2 = array[i++];
|
1329
|
+
char3 = array[i++];
|
1330
|
+
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1331
|
+
break;
|
1332
|
+
}
|
1333
|
+
}
|
1334
|
+
return out;
|
1335
|
+
};
|
1336
|
+
let decoder;
|
1337
|
+
function getTextDecoder() {
|
1338
|
+
// On Play Station 4, TextDecoder is defined but partially implemented.
|
1339
|
+
// Manual decoding option is preferable
|
1340
|
+
if (navigator.userAgent.includes('PlayStation 4')) {
|
1341
|
+
return;
|
1342
|
+
}
|
1343
|
+
if (!decoder && typeof self.TextDecoder !== 'undefined') {
|
1344
|
+
decoder = new self.TextDecoder('utf-8');
|
1345
|
+
}
|
1346
|
+
return decoder;
|
1347
|
+
}
|
1348
|
+
|
1349
|
+
/**
|
1350
|
+
* hex dump helper class
|
1351
|
+
*/
|
1352
|
+
|
1353
|
+
const Hex = {
|
1354
|
+
hexDump: function (array) {
|
1355
|
+
let str = '';
|
1356
|
+
for (let i = 0; i < array.length; i++) {
|
1357
|
+
let h = array[i].toString(16);
|
1358
|
+
if (h.length < 2) {
|
1359
|
+
h = '0' + h;
|
1360
|
+
}
|
1361
|
+
str += h;
|
1362
|
+
}
|
1363
|
+
return str;
|
1364
|
+
}
|
1365
|
+
};
|
1366
|
+
|
1367
|
+
const UINT32_MAX$1 = Math.pow(2, 32) - 1;
|
1137
1368
|
const push = [].push;
|
1138
1369
|
|
1139
1370
|
// We are using fixed track IDs for driving the MP4 remuxer
|
@@ -1395,7 +1626,7 @@ function parseStsd(stsd) {
|
|
1395
1626
|
{
|
1396
1627
|
const codecBox = findBox(sampleEntries, [fourCC])[0];
|
1397
1628
|
const esdsBox = findBox(codecBox.subarray(28), ['esds'])[0];
|
1398
|
-
if (esdsBox && esdsBox.length >
|
1629
|
+
if (esdsBox && esdsBox.length > 12) {
|
1399
1630
|
let i = 4;
|
1400
1631
|
// ES Descriptor tag
|
1401
1632
|
if (esdsBox[i++] !== 0x03) {
|
@@ -1510,9 +1741,7 @@ function parseStsd(stsd) {
|
|
1510
1741
|
}
|
1511
1742
|
function skipBERInteger(bytes, i) {
|
1512
1743
|
const limit = i + 5;
|
1513
|
-
while (bytes[i++] & 0x80 && i < limit) {
|
1514
|
-
/* do nothing */
|
1515
|
-
}
|
1744
|
+
while (bytes[i++] & 0x80 && i < limit) {}
|
1516
1745
|
return i;
|
1517
1746
|
}
|
1518
1747
|
function toHex(x) {
|
@@ -2204,12 +2433,12 @@ class LevelKey {
|
|
2204
2433
|
this.keyFormatVersions = formatversions;
|
2205
2434
|
this.iv = iv;
|
2206
2435
|
this.encrypted = method ? method !== 'NONE' : false;
|
2207
|
-
this.isCommonEncryption = this.encrypted &&
|
2436
|
+
this.isCommonEncryption = this.encrypted && method !== 'AES-128';
|
2208
2437
|
}
|
2209
2438
|
isSupported() {
|
2210
2439
|
// If it's Segment encryption or No encryption, just select that key system
|
2211
2440
|
if (this.method) {
|
2212
|
-
if (
|
2441
|
+
if (this.method === 'AES-128' || this.method === 'NONE') {
|
2213
2442
|
return true;
|
2214
2443
|
}
|
2215
2444
|
if (this.keyFormat === 'identity') {
|
@@ -2223,13 +2452,14 @@ class LevelKey {
|
|
2223
2452
|
if (!this.encrypted || !this.uri) {
|
2224
2453
|
return null;
|
2225
2454
|
}
|
2226
|
-
if (
|
2455
|
+
if (this.method === 'AES-128' && this.uri && !this.iv) {
|
2227
2456
|
if (typeof sn !== 'number') {
|
2228
2457
|
// We are fetching decryption data for a initialization segment
|
2229
|
-
// If the segment was encrypted with AES-128
|
2458
|
+
// If the segment was encrypted with AES-128
|
2230
2459
|
// It must have an IV defined. We cannot substitute the Segment Number in.
|
2231
|
-
|
2232
|
-
|
2460
|
+
if (this.method === 'AES-128' && !this.iv) {
|
2461
|
+
logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
|
2462
|
+
}
|
2233
2463
|
// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
2234
2464
|
sn = 0;
|
2235
2465
|
}
|
@@ -2379,28 +2609,23 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
|
|
2379
2609
|
if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
|
2380
2610
|
return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
|
2381
2611
|
}
|
2612
|
+
|
2613
|
+
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2614
|
+
// some browsers will report that fLaC is supported then fail.
|
2615
|
+
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2382
2616
|
const codecsToCheck = {
|
2383
|
-
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2384
|
-
// some browsers will report that fLaC is supported then fail.
|
2385
|
-
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2386
2617
|
flac: ['flac', 'fLaC', 'FLAC'],
|
2387
|
-
opus: ['opus', 'Opus']
|
2388
|
-
// Replace audio codec info if browser does not support mp4a.40.34,
|
2389
|
-
// and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
|
2390
|
-
'mp4a.40.34': ['mp3']
|
2618
|
+
opus: ['opus', 'Opus']
|
2391
2619
|
}[lowerCaseCodec];
|
2392
2620
|
for (let i = 0; i < codecsToCheck.length; i++) {
|
2393
|
-
var _getMediaSource;
|
2394
2621
|
if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
|
2395
2622
|
CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
|
2396
2623
|
return codecsToCheck[i];
|
2397
|
-
} else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
|
2398
|
-
return '';
|
2399
2624
|
}
|
2400
2625
|
}
|
2401
2626
|
return lowerCaseCodec;
|
2402
2627
|
}
|
2403
|
-
const AUDIO_CODEC_REGEXP = /flac|opus
|
2628
|
+
const AUDIO_CODEC_REGEXP = /flac|opus/i;
|
2404
2629
|
function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
|
2405
2630
|
return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
|
2406
2631
|
}
|
@@ -2423,16 +2648,6 @@ function convertAVC1ToAVCOTI(codec) {
|
|
2423
2648
|
}
|
2424
2649
|
return codec;
|
2425
2650
|
}
|
2426
|
-
function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
|
2427
|
-
const MediaSource = getMediaSource(preferManagedMediaSource) || {
|
2428
|
-
isTypeSupported: () => false
|
2429
|
-
};
|
2430
|
-
return {
|
2431
|
-
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
2432
|
-
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
2433
|
-
ac3: false
|
2434
|
-
};
|
2435
|
-
}
|
2436
2651
|
|
2437
2652
|
const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
|
2438
2653
|
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
|
@@ -3233,10 +3448,10 @@ class PlaylistLoader {
|
|
3233
3448
|
const loaderContext = loader.context;
|
3234
3449
|
if (loaderContext && loaderContext.url === context.url && loaderContext.level === context.level) {
|
3235
3450
|
// same URL can't overlap
|
3236
|
-
|
3451
|
+
logger.trace('[playlist-loader]: playlist request ongoing');
|
3237
3452
|
return;
|
3238
3453
|
}
|
3239
|
-
|
3454
|
+
logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
|
3240
3455
|
loader.abort();
|
3241
3456
|
}
|
3242
3457
|
|
@@ -3346,7 +3561,7 @@ class PlaylistLoader {
|
|
3346
3561
|
// alt audio rendition in which quality levels (main)
|
3347
3562
|
// contains both audio+video. but with mixed audio track not signaled
|
3348
3563
|
if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
|
3349
|
-
|
3564
|
+
logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
|
3350
3565
|
audioTracks.unshift({
|
3351
3566
|
type: 'main',
|
3352
3567
|
name: 'main',
|
@@ -3445,7 +3660,7 @@ class PlaylistLoader {
|
|
3445
3660
|
message += ` id: ${context.id} group-id: "${context.groupId}"`;
|
3446
3661
|
}
|
3447
3662
|
const error = new Error(message);
|
3448
|
-
|
3663
|
+
logger.warn(`[playlist-loader]: ${message}`);
|
3449
3664
|
let details = ErrorDetails.UNKNOWN;
|
3450
3665
|
let fatal = false;
|
3451
3666
|
const loader = this.getInternalLoader(context);
|
@@ -3683,409 +3898,10 @@ var MetadataSchema = {
|
|
3683
3898
|
emsg: "https://aomedia.org/emsg/ID3"
|
3684
3899
|
};
|
3685
3900
|
|
3686
|
-
|
3687
|
-
|
3688
|
-
|
3689
|
-
|
3690
|
-
*
|
3691
|
-
* @returns The decoded ID3 PRIV frame
|
3692
|
-
*
|
3693
|
-
* @internal
|
3694
|
-
*
|
3695
|
-
* @group ID3
|
3696
|
-
*/
|
3697
|
-
function decodeId3PrivFrame(frame) {
|
3698
|
-
/*
|
3699
|
-
Format: <text string>\0<binary data>
|
3700
|
-
*/
|
3701
|
-
if (frame.size < 2) {
|
3702
|
-
return undefined;
|
3703
|
-
}
|
3704
|
-
const owner = utf8ArrayToStr(frame.data, true);
|
3705
|
-
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
3706
|
-
return {
|
3707
|
-
key: frame.type,
|
3708
|
-
info: owner,
|
3709
|
-
data: privateData.buffer
|
3710
|
-
};
|
3711
|
-
}
|
3712
|
-
|
3713
|
-
/**
|
3714
|
-
* Decodes an ID3 text frame
|
3715
|
-
*
|
3716
|
-
* @param frame - the ID3 text frame
|
3717
|
-
*
|
3718
|
-
* @returns The decoded ID3 text frame
|
3719
|
-
*
|
3720
|
-
* @internal
|
3721
|
-
*
|
3722
|
-
* @group ID3
|
3723
|
-
*/
|
3724
|
-
function decodeId3TextFrame(frame) {
|
3725
|
-
if (frame.size < 2) {
|
3726
|
-
return undefined;
|
3727
|
-
}
|
3728
|
-
if (frame.type === 'TXXX') {
|
3729
|
-
/*
|
3730
|
-
Format:
|
3731
|
-
[0] = {Text Encoding}
|
3732
|
-
[1-?] = {Description}\0{Value}
|
3733
|
-
*/
|
3734
|
-
let index = 1;
|
3735
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3736
|
-
index += description.length + 1;
|
3737
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3738
|
-
return {
|
3739
|
-
key: frame.type,
|
3740
|
-
info: description,
|
3741
|
-
data: value
|
3742
|
-
};
|
3743
|
-
}
|
3744
|
-
/*
|
3745
|
-
Format:
|
3746
|
-
[0] = {Text Encoding}
|
3747
|
-
[1-?] = {Value}
|
3748
|
-
*/
|
3749
|
-
const text = utf8ArrayToStr(frame.data.subarray(1));
|
3750
|
-
return {
|
3751
|
-
key: frame.type,
|
3752
|
-
info: '',
|
3753
|
-
data: text
|
3754
|
-
};
|
3755
|
-
}
|
3756
|
-
|
3757
|
-
/**
|
3758
|
-
* Decode a URL frame
|
3759
|
-
*
|
3760
|
-
* @param frame - the ID3 URL frame
|
3761
|
-
*
|
3762
|
-
* @returns The decoded ID3 URL frame
|
3763
|
-
*
|
3764
|
-
* @internal
|
3765
|
-
*
|
3766
|
-
* @group ID3
|
3767
|
-
*/
|
3768
|
-
function decodeId3UrlFrame(frame) {
|
3769
|
-
if (frame.type === 'WXXX') {
|
3770
|
-
/*
|
3771
|
-
Format:
|
3772
|
-
[0] = {Text Encoding}
|
3773
|
-
[1-?] = {Description}\0{URL}
|
3774
|
-
*/
|
3775
|
-
if (frame.size < 2) {
|
3776
|
-
return undefined;
|
3777
|
-
}
|
3778
|
-
let index = 1;
|
3779
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3780
|
-
index += description.length + 1;
|
3781
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3782
|
-
return {
|
3783
|
-
key: frame.type,
|
3784
|
-
info: description,
|
3785
|
-
data: value
|
3786
|
-
};
|
3787
|
-
}
|
3788
|
-
/*
|
3789
|
-
Format:
|
3790
|
-
[0-?] = {URL}
|
3791
|
-
*/
|
3792
|
-
const url = utf8ArrayToStr(frame.data);
|
3793
|
-
return {
|
3794
|
-
key: frame.type,
|
3795
|
-
info: '',
|
3796
|
-
data: url
|
3797
|
-
};
|
3798
|
-
}
|
3799
|
-
|
3800
|
-
function toUint8(data, offset = 0, length = Infinity) {
|
3801
|
-
return view(data, offset, length, Uint8Array);
|
3802
|
-
}
|
3803
|
-
function view(data, offset, length, Type) {
|
3804
|
-
const buffer = unsafeGetArrayBuffer(data);
|
3805
|
-
let bytesPerElement = 1;
|
3806
|
-
if ('BYTES_PER_ELEMENT' in Type) {
|
3807
|
-
bytesPerElement = Type.BYTES_PER_ELEMENT;
|
3808
|
-
}
|
3809
|
-
// Absolute end of the |data| view within |buffer|.
|
3810
|
-
const dataOffset = isArrayBufferView(data) ? data.byteOffset : 0;
|
3811
|
-
const dataEnd = (dataOffset + data.byteLength) / bytesPerElement;
|
3812
|
-
// Absolute start of the result within |buffer|.
|
3813
|
-
const rawStart = (dataOffset + offset) / bytesPerElement;
|
3814
|
-
const start = Math.floor(Math.max(0, Math.min(rawStart, dataEnd)));
|
3815
|
-
// Absolute end of the result within |buffer|.
|
3816
|
-
const end = Math.floor(Math.min(start + Math.max(length, 0), dataEnd));
|
3817
|
-
return new Type(buffer, start, end - start);
|
3818
|
-
}
|
3819
|
-
function unsafeGetArrayBuffer(view) {
|
3820
|
-
if (view instanceof ArrayBuffer) {
|
3821
|
-
return view;
|
3822
|
-
} else {
|
3823
|
-
return view.buffer;
|
3824
|
-
}
|
3825
|
-
}
|
3826
|
-
function isArrayBufferView(obj) {
|
3827
|
-
return obj && obj.buffer instanceof ArrayBuffer && obj.byteLength !== undefined && obj.byteOffset !== undefined;
|
3828
|
-
}
|
3829
|
-
|
3830
|
-
function toArrayBuffer(view) {
|
3831
|
-
if (view instanceof ArrayBuffer) {
|
3832
|
-
return view;
|
3833
|
-
} else {
|
3834
|
-
if (view.byteOffset == 0 && view.byteLength == view.buffer.byteLength) {
|
3835
|
-
// This is a TypedArray over the whole buffer.
|
3836
|
-
return view.buffer;
|
3837
|
-
}
|
3838
|
-
// This is a 'view' on the buffer. Create a new buffer that only contains
|
3839
|
-
// the data. Note that since this isn't an ArrayBuffer, the 'new' call
|
3840
|
-
// will allocate a new buffer to hold the copy.
|
3841
|
-
return new Uint8Array(view).buffer;
|
3842
|
-
}
|
3843
|
-
}
|
3844
|
-
|
3845
|
-
function decodeId3ImageFrame(frame) {
|
3846
|
-
const metadataFrame = {
|
3847
|
-
key: frame.type,
|
3848
|
-
description: '',
|
3849
|
-
data: '',
|
3850
|
-
mimeType: null,
|
3851
|
-
pictureType: null
|
3852
|
-
};
|
3853
|
-
const utf8Encoding = 0x03;
|
3854
|
-
if (frame.size < 2) {
|
3855
|
-
return undefined;
|
3856
|
-
}
|
3857
|
-
if (frame.data[0] !== utf8Encoding) {
|
3858
|
-
console.log('Ignore frame with unrecognized character ' + 'encoding');
|
3859
|
-
return undefined;
|
3860
|
-
}
|
3861
|
-
const mimeTypeEndIndex = frame.data.subarray(1).indexOf(0);
|
3862
|
-
if (mimeTypeEndIndex === -1) {
|
3863
|
-
return undefined;
|
3864
|
-
}
|
3865
|
-
const mimeType = utf8ArrayToStr(toUint8(frame.data, 1, mimeTypeEndIndex));
|
3866
|
-
const pictureType = frame.data[2 + mimeTypeEndIndex];
|
3867
|
-
const descriptionEndIndex = frame.data.subarray(3 + mimeTypeEndIndex).indexOf(0);
|
3868
|
-
if (descriptionEndIndex === -1) {
|
3869
|
-
return undefined;
|
3870
|
-
}
|
3871
|
-
const description = utf8ArrayToStr(toUint8(frame.data, 3 + mimeTypeEndIndex, descriptionEndIndex));
|
3872
|
-
let data;
|
3873
|
-
if (mimeType === '-->') {
|
3874
|
-
data = utf8ArrayToStr(toUint8(frame.data, 4 + mimeTypeEndIndex + descriptionEndIndex));
|
3875
|
-
} else {
|
3876
|
-
data = toArrayBuffer(frame.data.subarray(4 + mimeTypeEndIndex + descriptionEndIndex));
|
3877
|
-
}
|
3878
|
-
metadataFrame.mimeType = mimeType;
|
3879
|
-
metadataFrame.pictureType = pictureType;
|
3880
|
-
metadataFrame.description = description;
|
3881
|
-
metadataFrame.data = data;
|
3882
|
-
return metadataFrame;
|
3883
|
-
}
|
3884
|
-
|
3885
|
-
/**
|
3886
|
-
* Decode an ID3 frame.
|
3887
|
-
*
|
3888
|
-
* @param frame - the ID3 frame
|
3889
|
-
*
|
3890
|
-
* @returns The decoded ID3 frame
|
3891
|
-
*
|
3892
|
-
* @internal
|
3893
|
-
*
|
3894
|
-
* @group ID3
|
3895
|
-
*/
|
3896
|
-
function decodeId3Frame(frame) {
|
3897
|
-
if (frame.type === 'PRIV') {
|
3898
|
-
return decodeId3PrivFrame(frame);
|
3899
|
-
} else if (frame.type[0] === 'W') {
|
3900
|
-
return decodeId3UrlFrame(frame);
|
3901
|
-
} else if (frame.type === 'APIC') {
|
3902
|
-
return decodeId3ImageFrame(frame);
|
3903
|
-
}
|
3904
|
-
return decodeId3TextFrame(frame);
|
3905
|
-
}
|
3906
|
-
|
3907
|
-
/**
|
3908
|
-
* Read ID3 size
|
3909
|
-
*
|
3910
|
-
* @param data - The data to read from
|
3911
|
-
* @param offset - The offset at which to start reading
|
3912
|
-
*
|
3913
|
-
* @returns The size
|
3914
|
-
*
|
3915
|
-
* @internal
|
3916
|
-
*
|
3917
|
-
* @group ID3
|
3918
|
-
*/
|
3919
|
-
function readId3Size(data, offset) {
|
3920
|
-
let size = 0;
|
3921
|
-
size = (data[offset] & 0x7f) << 21;
|
3922
|
-
size |= (data[offset + 1] & 0x7f) << 14;
|
3923
|
-
size |= (data[offset + 2] & 0x7f) << 7;
|
3924
|
-
size |= data[offset + 3] & 0x7f;
|
3925
|
-
return size;
|
3926
|
-
}
|
3927
|
-
|
3928
|
-
/**
|
3929
|
-
* Returns the data of an ID3 frame.
|
3930
|
-
*
|
3931
|
-
* @param data - The data to read from
|
3932
|
-
*
|
3933
|
-
* @returns The data of the ID3 frame
|
3934
|
-
*
|
3935
|
-
* @internal
|
3936
|
-
*
|
3937
|
-
* @group ID3
|
3938
|
-
*/
|
3939
|
-
function getId3FrameData(data) {
|
3940
|
-
/*
|
3941
|
-
Frame ID $xx xx xx xx (four characters)
|
3942
|
-
Size $xx xx xx xx
|
3943
|
-
Flags $xx xx
|
3944
|
-
*/
|
3945
|
-
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
3946
|
-
const size = readId3Size(data, 4);
|
3947
|
-
// skip frame id, size, and flags
|
3948
|
-
const offset = 10;
|
3949
|
-
return {
|
3950
|
-
type,
|
3951
|
-
size,
|
3952
|
-
data: data.subarray(offset, offset + size)
|
3953
|
-
};
|
3954
|
-
}
|
3955
|
-
|
3956
|
-
/**
|
3957
|
-
* Returns true if an ID3 footer can be found at offset in data
|
3958
|
-
*
|
3959
|
-
* @param data - The data to search in
|
3960
|
-
* @param offset - The offset at which to start searching
|
3961
|
-
*
|
3962
|
-
* @returns `true` if an ID3 footer is found
|
3963
|
-
*
|
3964
|
-
* @internal
|
3965
|
-
*
|
3966
|
-
* @group ID3
|
3967
|
-
*/
|
3968
|
-
function isId3Footer(data, offset) {
|
3969
|
-
/*
|
3970
|
-
* The footer is a copy of the header, but with a different identifier
|
3971
|
-
*/
|
3972
|
-
if (offset + 10 <= data.length) {
|
3973
|
-
// look for '3DI' identifier
|
3974
|
-
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
3975
|
-
// check version is within range
|
3976
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
3977
|
-
// check size is within range
|
3978
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
3979
|
-
return true;
|
3980
|
-
}
|
3981
|
-
}
|
3982
|
-
}
|
3983
|
-
}
|
3984
|
-
return false;
|
3985
|
-
}
|
3986
|
-
|
3987
|
-
/**
|
3988
|
-
* Returns true if an ID3 header can be found at offset in data
|
3989
|
-
*
|
3990
|
-
* @param data - The data to search in
|
3991
|
-
* @param offset - The offset at which to start searching
|
3992
|
-
*
|
3993
|
-
* @returns `true` if an ID3 header is found
|
3994
|
-
*
|
3995
|
-
* @internal
|
3996
|
-
*
|
3997
|
-
* @group ID3
|
3998
|
-
*/
|
3999
|
-
function isId3Header(data, offset) {
|
4000
|
-
/*
|
4001
|
-
* http://id3.org/id3v2.3.0
|
4002
|
-
* [0] = 'I'
|
4003
|
-
* [1] = 'D'
|
4004
|
-
* [2] = '3'
|
4005
|
-
* [3,4] = {Version}
|
4006
|
-
* [5] = {Flags}
|
4007
|
-
* [6-9] = {ID3 Size}
|
4008
|
-
*
|
4009
|
-
* An ID3v2 tag can be detected with the following pattern:
|
4010
|
-
* $49 44 33 yy yy xx zz zz zz zz
|
4011
|
-
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
4012
|
-
*/
|
4013
|
-
if (offset + 10 <= data.length) {
|
4014
|
-
// look for 'ID3' identifier
|
4015
|
-
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
4016
|
-
// check version is within range
|
4017
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
4018
|
-
// check size is within range
|
4019
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
4020
|
-
return true;
|
4021
|
-
}
|
4022
|
-
}
|
4023
|
-
}
|
4024
|
-
}
|
4025
|
-
return false;
|
4026
|
-
}
|
4027
|
-
|
4028
|
-
const HEADER_FOOTER_SIZE = 10;
|
4029
|
-
const FRAME_SIZE = 10;
|
4030
|
-
/**
|
4031
|
-
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
4032
|
-
*
|
4033
|
-
* @param id3Data - The ID3 data containing one or more ID3 tags
|
4034
|
-
*
|
4035
|
-
* @returns Array of ID3 frame objects
|
4036
|
-
*
|
4037
|
-
* @group ID3
|
4038
|
-
*
|
4039
|
-
* @beta
|
4040
|
-
*/
|
4041
|
-
function getId3Frames(id3Data) {
|
4042
|
-
let offset = 0;
|
4043
|
-
const frames = [];
|
4044
|
-
while (isId3Header(id3Data, offset)) {
|
4045
|
-
const size = readId3Size(id3Data, offset + 6);
|
4046
|
-
if (id3Data[offset + 5] >> 6 & 1) {
|
4047
|
-
// skip extended header
|
4048
|
-
offset += HEADER_FOOTER_SIZE;
|
4049
|
-
}
|
4050
|
-
// skip past ID3 header
|
4051
|
-
offset += HEADER_FOOTER_SIZE;
|
4052
|
-
const end = offset + size;
|
4053
|
-
// loop through frames in the ID3 tag
|
4054
|
-
while (offset + FRAME_SIZE < end) {
|
4055
|
-
const frameData = getId3FrameData(id3Data.subarray(offset));
|
4056
|
-
const frame = decodeId3Frame(frameData);
|
4057
|
-
if (frame) {
|
4058
|
-
frames.push(frame);
|
4059
|
-
}
|
4060
|
-
// skip frame header and frame data
|
4061
|
-
offset += frameData.size + HEADER_FOOTER_SIZE;
|
4062
|
-
}
|
4063
|
-
if (isId3Footer(id3Data, offset)) {
|
4064
|
-
offset += HEADER_FOOTER_SIZE;
|
4065
|
-
}
|
4066
|
-
}
|
4067
|
-
return frames;
|
4068
|
-
}
|
4069
|
-
|
4070
|
-
/**
|
4071
|
-
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
4072
|
-
*
|
4073
|
-
* @param frame - the ID3 frame
|
4074
|
-
*
|
4075
|
-
* @returns `true` if the ID3 frame is an Elementary Stream timestamp frame
|
4076
|
-
*
|
4077
|
-
* @internal
|
4078
|
-
*
|
4079
|
-
* @group ID3
|
4080
|
-
*/
|
4081
|
-
function isId3TimestampFrame(frame) {
|
4082
|
-
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
4083
|
-
}
|
4084
|
-
|
4085
|
-
const MIN_CUE_DURATION = 0.25;
|
4086
|
-
function getCueClass() {
|
4087
|
-
if (typeof self === 'undefined') return undefined;
|
4088
|
-
return self.VTTCue || self.TextTrackCue;
|
3901
|
+
const MIN_CUE_DURATION = 0.25;
|
3902
|
+
function getCueClass() {
|
3903
|
+
if (typeof self === 'undefined') return undefined;
|
3904
|
+
return self.VTTCue || self.TextTrackCue;
|
4089
3905
|
}
|
4090
3906
|
function createCueWithDataFields(Cue, startTime, endTime, data, type) {
|
4091
3907
|
let cue = new Cue(startTime, endTime, '');
|
@@ -4164,10 +3980,11 @@ class ID3TrackController {
|
|
4164
3980
|
this.media = data.media;
|
4165
3981
|
}
|
4166
3982
|
onMediaDetaching() {
|
4167
|
-
if (this.id3Track) {
|
4168
|
-
|
4169
|
-
this.id3Track = null;
|
3983
|
+
if (!this.id3Track) {
|
3984
|
+
return;
|
4170
3985
|
}
|
3986
|
+
clearCurrentCues(this.id3Track);
|
3987
|
+
this.id3Track = null;
|
4171
3988
|
this.media = null;
|
4172
3989
|
this.dateRangeCuesAppended = {};
|
4173
3990
|
}
|
@@ -4226,7 +4043,7 @@ class ID3TrackController {
|
|
4226
4043
|
if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
|
4227
4044
|
continue;
|
4228
4045
|
}
|
4229
|
-
const frames =
|
4046
|
+
const frames = getID3Frames(samples[i].data);
|
4230
4047
|
if (frames) {
|
4231
4048
|
const startTime = samples[i].pts;
|
4232
4049
|
let endTime = startTime + samples[i].duration;
|
@@ -4240,7 +4057,7 @@ class ID3TrackController {
|
|
4240
4057
|
for (let j = 0; j < frames.length; j++) {
|
4241
4058
|
const frame = frames[j];
|
4242
4059
|
// Safari doesn't put the timestamp frame in the TextTrack
|
4243
|
-
if (!
|
4060
|
+
if (!isTimeStampFrame(frame)) {
|
4244
4061
|
// add a bounds to any unbounded cues
|
4245
4062
|
this.updateId3CueEnds(startTime, type);
|
4246
4063
|
const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
|
@@ -4408,47 +4225,7 @@ class LatencyController {
|
|
4408
4225
|
this.currentTime = 0;
|
4409
4226
|
this.stallCount = 0;
|
4410
4227
|
this._latency = null;
|
4411
|
-
this.
|
4412
|
-
const {
|
4413
|
-
media,
|
4414
|
-
levelDetails
|
4415
|
-
} = this;
|
4416
|
-
if (!media || !levelDetails) {
|
4417
|
-
return;
|
4418
|
-
}
|
4419
|
-
this.currentTime = media.currentTime;
|
4420
|
-
const latency = this.computeLatency();
|
4421
|
-
if (latency === null) {
|
4422
|
-
return;
|
4423
|
-
}
|
4424
|
-
this._latency = latency;
|
4425
|
-
|
4426
|
-
// Adapt playbackRate to meet target latency in low-latency mode
|
4427
|
-
const {
|
4428
|
-
lowLatencyMode,
|
4429
|
-
maxLiveSyncPlaybackRate
|
4430
|
-
} = this.config;
|
4431
|
-
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4432
|
-
return;
|
4433
|
-
}
|
4434
|
-
const targetLatency = this.targetLatency;
|
4435
|
-
if (targetLatency === null) {
|
4436
|
-
return;
|
4437
|
-
}
|
4438
|
-
const distanceFromTarget = latency - targetLatency;
|
4439
|
-
// Only adjust playbackRate when within one target duration of targetLatency
|
4440
|
-
// and more than one second from under-buffering.
|
4441
|
-
// Playback further than one target duration from target can be considered DVR playback.
|
4442
|
-
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4443
|
-
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4444
|
-
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4445
|
-
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4446
|
-
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4447
|
-
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4448
|
-
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4449
|
-
media.playbackRate = 1;
|
4450
|
-
}
|
4451
|
-
};
|
4228
|
+
this.timeupdateHandler = () => this.timeupdate();
|
4452
4229
|
this.hls = hls;
|
4453
4230
|
this.config = hls.config;
|
4454
4231
|
this.registerListeners();
|
@@ -4540,7 +4317,7 @@ class LatencyController {
|
|
4540
4317
|
this.onMediaDetaching();
|
4541
4318
|
this.levelDetails = null;
|
4542
4319
|
// @ts-ignore
|
4543
|
-
this.hls = null;
|
4320
|
+
this.hls = this.timeupdateHandler = null;
|
4544
4321
|
}
|
4545
4322
|
registerListeners() {
|
4546
4323
|
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
@@ -4558,11 +4335,11 @@ class LatencyController {
|
|
4558
4335
|
}
|
4559
4336
|
onMediaAttached(event, data) {
|
4560
4337
|
this.media = data.media;
|
4561
|
-
this.media.addEventListener('timeupdate', this.
|
4338
|
+
this.media.addEventListener('timeupdate', this.timeupdateHandler);
|
4562
4339
|
}
|
4563
4340
|
onMediaDetaching() {
|
4564
4341
|
if (this.media) {
|
4565
|
-
this.media.removeEventListener('timeupdate', this.
|
4342
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4566
4343
|
this.media = null;
|
4567
4344
|
}
|
4568
4345
|
}
|
@@ -4576,10 +4353,10 @@ class LatencyController {
|
|
4576
4353
|
}) {
|
4577
4354
|
this.levelDetails = details;
|
4578
4355
|
if (details.advanced) {
|
4579
|
-
this.
|
4356
|
+
this.timeupdate();
|
4580
4357
|
}
|
4581
4358
|
if (!details.live && this.media) {
|
4582
|
-
this.media.removeEventListener('timeupdate', this.
|
4359
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4583
4360
|
}
|
4584
4361
|
}
|
4585
4362
|
onError(event, data) {
|
@@ -4589,7 +4366,48 @@ class LatencyController {
|
|
4589
4366
|
}
|
4590
4367
|
this.stallCount++;
|
4591
4368
|
if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
|
4592
|
-
|
4369
|
+
logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');
|
4370
|
+
}
|
4371
|
+
}
|
4372
|
+
timeupdate() {
|
4373
|
+
const {
|
4374
|
+
media,
|
4375
|
+
levelDetails
|
4376
|
+
} = this;
|
4377
|
+
if (!media || !levelDetails) {
|
4378
|
+
return;
|
4379
|
+
}
|
4380
|
+
this.currentTime = media.currentTime;
|
4381
|
+
const latency = this.computeLatency();
|
4382
|
+
if (latency === null) {
|
4383
|
+
return;
|
4384
|
+
}
|
4385
|
+
this._latency = latency;
|
4386
|
+
|
4387
|
+
// Adapt playbackRate to meet target latency in low-latency mode
|
4388
|
+
const {
|
4389
|
+
lowLatencyMode,
|
4390
|
+
maxLiveSyncPlaybackRate
|
4391
|
+
} = this.config;
|
4392
|
+
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4393
|
+
return;
|
4394
|
+
}
|
4395
|
+
const targetLatency = this.targetLatency;
|
4396
|
+
if (targetLatency === null) {
|
4397
|
+
return;
|
4398
|
+
}
|
4399
|
+
const distanceFromTarget = latency - targetLatency;
|
4400
|
+
// Only adjust playbackRate when within one target duration of targetLatency
|
4401
|
+
// and more than one second from under-buffering.
|
4402
|
+
// Playback further than one target duration from target can be considered DVR playback.
|
4403
|
+
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4404
|
+
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4405
|
+
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4406
|
+
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4407
|
+
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4408
|
+
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4409
|
+
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4410
|
+
media.playbackRate = 1;
|
4593
4411
|
}
|
4594
4412
|
}
|
4595
4413
|
estimateLiveEdge() {
|
@@ -5364,13 +5182,18 @@ var ErrorActionFlags = {
|
|
5364
5182
|
MoveAllAlternatesMatchingHDCP: 2,
|
5365
5183
|
SwitchToSDR: 4
|
5366
5184
|
}; // Reserved for future use
|
5367
|
-
class ErrorController
|
5185
|
+
class ErrorController {
|
5368
5186
|
constructor(hls) {
|
5369
|
-
super('error-controller', hls.logger);
|
5370
5187
|
this.hls = void 0;
|
5371
5188
|
this.playlistError = 0;
|
5372
5189
|
this.penalizedRenditions = {};
|
5190
|
+
this.log = void 0;
|
5191
|
+
this.warn = void 0;
|
5192
|
+
this.error = void 0;
|
5373
5193
|
this.hls = hls;
|
5194
|
+
this.log = logger.log.bind(logger, `[info]:`);
|
5195
|
+
this.warn = logger.warn.bind(logger, `[warning]:`);
|
5196
|
+
this.error = logger.error.bind(logger, `[error]:`);
|
5374
5197
|
this.registerListeners();
|
5375
5198
|
}
|
5376
5199
|
registerListeners() {
|
@@ -5722,13 +5545,16 @@ class ErrorController extends Logger {
|
|
5722
5545
|
}
|
5723
5546
|
}
|
5724
5547
|
|
5725
|
-
class BasePlaylistController
|
5548
|
+
class BasePlaylistController {
|
5726
5549
|
constructor(hls, logPrefix) {
|
5727
|
-
super(logPrefix, hls.logger);
|
5728
5550
|
this.hls = void 0;
|
5729
5551
|
this.timer = -1;
|
5730
5552
|
this.requestScheduled = -1;
|
5731
5553
|
this.canLoad = false;
|
5554
|
+
this.log = void 0;
|
5555
|
+
this.warn = void 0;
|
5556
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
5557
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
5732
5558
|
this.hls = hls;
|
5733
5559
|
}
|
5734
5560
|
destroy() {
|
@@ -5761,7 +5587,7 @@ class BasePlaylistController extends Logger {
|
|
5761
5587
|
try {
|
5762
5588
|
uri = new self.URL(attr.URI, previous.url).href;
|
5763
5589
|
} catch (error) {
|
5764
|
-
|
5590
|
+
logger.warn(`Could not construct new URL for Rendition Report: ${error}`);
|
5765
5591
|
uri = attr.URI || '';
|
5766
5592
|
}
|
5767
5593
|
// Use exact match. Otherwise, the last partial match, if any, will be used
|
@@ -5849,12 +5675,7 @@ class BasePlaylistController extends Logger {
|
|
5849
5675
|
const cdnAge = lastAdvanced + details.ageHeader;
|
5850
5676
|
let currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
|
5851
5677
|
if (currentGoal > 0) {
|
5852
|
-
if (
|
5853
|
-
// Omit segment and part directives when the last response was more than 3 target durations ago,
|
5854
|
-
this.log(`Playlist last advanced ${lastAdvanced.toFixed(2)}s ago. Omitting segment and part directives.`);
|
5855
|
-
msn = undefined;
|
5856
|
-
part = undefined;
|
5857
|
-
} else if (previousDetails != null && previousDetails.tuneInGoal && cdnAge - details.partTarget > previousDetails.tuneInGoal) {
|
5678
|
+
if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
|
5858
5679
|
// If we attempted to get the next or latest playlist update, but currentGoal increased,
|
5859
5680
|
// then we either can't catchup, or the "age" header cannot be trusted.
|
5860
5681
|
this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`);
|
@@ -6300,9 +6121,8 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
|
|
6300
6121
|
}, {});
|
6301
6122
|
}
|
6302
6123
|
|
6303
|
-
class AbrController
|
6124
|
+
class AbrController {
|
6304
6125
|
constructor(_hls) {
|
6305
|
-
super('abr', _hls.logger);
|
6306
6126
|
this.hls = void 0;
|
6307
6127
|
this.lastLevelLoadSec = 0;
|
6308
6128
|
this.lastLoadedFragLevel = -1;
|
@@ -6416,7 +6236,7 @@ class AbrController extends Logger {
|
|
6416
6236
|
this.resetEstimator(nextLoadLevelBitrate);
|
6417
6237
|
}
|
6418
6238
|
this.clearTimer();
|
6419
|
-
|
6239
|
+
logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
|
6420
6240
|
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
|
6421
6241
|
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
|
6422
6242
|
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
|
@@ -6436,7 +6256,7 @@ class AbrController extends Logger {
|
|
6436
6256
|
}
|
6437
6257
|
resetEstimator(abrEwmaDefaultEstimate) {
|
6438
6258
|
if (abrEwmaDefaultEstimate) {
|
6439
|
-
|
6259
|
+
logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
|
6440
6260
|
this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
|
6441
6261
|
}
|
6442
6262
|
this.firstSelection = -1;
|
@@ -6668,7 +6488,7 @@ class AbrController extends Logger {
|
|
6668
6488
|
}
|
6669
6489
|
const firstLevel = this.hls.firstLevel;
|
6670
6490
|
const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
|
6671
|
-
|
6491
|
+
logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
|
6672
6492
|
return clamped;
|
6673
6493
|
}
|
6674
6494
|
get forcedAutoLevel() {
|
@@ -6714,9 +6534,6 @@ class AbrController extends Logger {
|
|
6714
6534
|
partCurrent,
|
6715
6535
|
hls
|
6716
6536
|
} = this;
|
6717
|
-
if (hls.levels.length <= 1) {
|
6718
|
-
return hls.loadLevel;
|
6719
|
-
}
|
6720
6537
|
const {
|
6721
6538
|
maxAutoLevel,
|
6722
6539
|
config,
|
@@ -6749,13 +6566,13 @@ class AbrController extends Logger {
|
|
6749
6566
|
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
6750
6567
|
const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
|
6751
6568
|
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
|
6752
|
-
|
6569
|
+
logger.info(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
|
6753
6570
|
// don't use conservative factor on bitrate test
|
6754
6571
|
bwFactor = bwUpFactor = 1;
|
6755
6572
|
}
|
6756
6573
|
}
|
6757
6574
|
const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
|
6758
|
-
|
6575
|
+
logger.info(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
|
6759
6576
|
if (bestLevel > -1) {
|
6760
6577
|
return bestLevel;
|
6761
6578
|
}
|
@@ -6829,7 +6646,7 @@ class AbrController extends Logger {
|
|
6829
6646
|
currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
|
6830
6647
|
currentFrameRate = minFramerate;
|
6831
6648
|
currentBw = Math.max(currentBw, minBitrate);
|
6832
|
-
|
6649
|
+
logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
|
6833
6650
|
} else {
|
6834
6651
|
currentCodecSet = level == null ? void 0 : level.codecSet;
|
6835
6652
|
currentVideoRange = level == null ? void 0 : level.videoRange;
|
@@ -6882,9 +6699,9 @@ class AbrController extends Logger {
|
|
6882
6699
|
const forcedAutoLevel = this.forcedAutoLevel;
|
6883
6700
|
if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
|
6884
6701
|
if (levelsSkipped.length) {
|
6885
|
-
|
6702
|
+
logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
|
6886
6703
|
}
|
6887
|
-
|
6704
|
+
logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
|
6888
6705
|
}
|
6889
6706
|
if (firstSelection) {
|
6890
6707
|
this.firstSelection = i;
|
@@ -6927,29 +6744,40 @@ class BufferHelper {
|
|
6927
6744
|
* Return true if `media`'s buffered include `position`
|
6928
6745
|
*/
|
6929
6746
|
static isBuffered(media, position) {
|
6930
|
-
|
6931
|
-
|
6932
|
-
|
6933
|
-
|
6934
|
-
|
6747
|
+
try {
|
6748
|
+
if (media) {
|
6749
|
+
const buffered = BufferHelper.getBuffered(media);
|
6750
|
+
for (let i = 0; i < buffered.length; i++) {
|
6751
|
+
if (position >= buffered.start(i) && position <= buffered.end(i)) {
|
6752
|
+
return true;
|
6753
|
+
}
|
6935
6754
|
}
|
6936
6755
|
}
|
6756
|
+
} catch (error) {
|
6757
|
+
// this is to catch
|
6758
|
+
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6759
|
+
// This SourceBuffer has been removed from the parent media source
|
6937
6760
|
}
|
6938
6761
|
return false;
|
6939
6762
|
}
|
6940
6763
|
static bufferInfo(media, pos, maxHoleDuration) {
|
6941
|
-
|
6942
|
-
|
6943
|
-
|
6764
|
+
try {
|
6765
|
+
if (media) {
|
6766
|
+
const vbuffered = BufferHelper.getBuffered(media);
|
6944
6767
|
const buffered = [];
|
6945
|
-
|
6768
|
+
let i;
|
6769
|
+
for (i = 0; i < vbuffered.length; i++) {
|
6946
6770
|
buffered.push({
|
6947
6771
|
start: vbuffered.start(i),
|
6948
6772
|
end: vbuffered.end(i)
|
6949
6773
|
});
|
6950
6774
|
}
|
6951
|
-
return
|
6775
|
+
return this.bufferedInfo(buffered, pos, maxHoleDuration);
|
6952
6776
|
}
|
6777
|
+
} catch (error) {
|
6778
|
+
// this is to catch
|
6779
|
+
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6780
|
+
// This SourceBuffer has been removed from the parent media source
|
6953
6781
|
}
|
6954
6782
|
return {
|
6955
6783
|
len: 0,
|
@@ -6961,7 +6789,14 @@ class BufferHelper {
|
|
6961
6789
|
static bufferedInfo(buffered, pos, maxHoleDuration) {
|
6962
6790
|
pos = Math.max(0, pos);
|
6963
6791
|
// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
|
6964
|
-
buffered.sort((a, b)
|
6792
|
+
buffered.sort(function (a, b) {
|
6793
|
+
const diff = a.start - b.start;
|
6794
|
+
if (diff) {
|
6795
|
+
return diff;
|
6796
|
+
} else {
|
6797
|
+
return b.end - a.end;
|
6798
|
+
}
|
6799
|
+
});
|
6965
6800
|
let buffered2 = [];
|
6966
6801
|
if (maxHoleDuration) {
|
6967
6802
|
// there might be some small holes between buffer time range
|
@@ -7028,7 +6863,7 @@ class BufferHelper {
|
|
7028
6863
|
*/
|
7029
6864
|
static getBuffered(media) {
|
7030
6865
|
try {
|
7031
|
-
return media.buffered
|
6866
|
+
return media.buffered;
|
7032
6867
|
} catch (e) {
|
7033
6868
|
logger.log('failed to get media.buffered', e);
|
7034
6869
|
return noopBuffered;
|
@@ -7053,22 +6888,24 @@ class BufferOperationQueue {
|
|
7053
6888
|
this.executeNext(type);
|
7054
6889
|
}
|
7055
6890
|
}
|
6891
|
+
insertAbort(operation, type) {
|
6892
|
+
const queue = this.queues[type];
|
6893
|
+
queue.unshift(operation);
|
6894
|
+
this.executeNext(type);
|
6895
|
+
}
|
7056
6896
|
appendBlocker(type) {
|
7057
|
-
|
7058
|
-
|
7059
|
-
|
7060
|
-
onStart: () => {},
|
7061
|
-
onComplete: () => {},
|
7062
|
-
onError: () => {}
|
7063
|
-
};
|
7064
|
-
this.append(operation, type);
|
6897
|
+
let execute;
|
6898
|
+
const promise = new Promise(resolve => {
|
6899
|
+
execute = resolve;
|
7065
6900
|
});
|
7066
|
-
|
7067
|
-
|
7068
|
-
|
7069
|
-
|
7070
|
-
|
7071
|
-
}
|
6901
|
+
const operation = {
|
6902
|
+
execute,
|
6903
|
+
onStart: () => {},
|
6904
|
+
onComplete: () => {},
|
6905
|
+
onError: () => {}
|
6906
|
+
};
|
6907
|
+
this.append(operation, type);
|
6908
|
+
return promise;
|
7072
6909
|
}
|
7073
6910
|
executeNext(type) {
|
7074
6911
|
const queue = this.queues[type];
|
@@ -7100,9 +6937,8 @@ class BufferOperationQueue {
|
|
7100
6937
|
}
|
7101
6938
|
|
7102
6939
|
const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
7103
|
-
class BufferController
|
7104
|
-
constructor(hls
|
7105
|
-
super('buffer-controller', hls.logger);
|
6940
|
+
class BufferController {
|
6941
|
+
constructor(hls) {
|
7106
6942
|
// The level details used to determine duration, target-duration and live
|
7107
6943
|
this.details = null;
|
7108
6944
|
// cache the self generated object url to detect hijack of video tag
|
@@ -7112,7 +6948,6 @@ class BufferController extends Logger {
|
|
7112
6948
|
// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
|
7113
6949
|
this.listeners = void 0;
|
7114
6950
|
this.hls = void 0;
|
7115
|
-
this.fragmentTracker = void 0;
|
7116
6951
|
// The number of BUFFER_CODEC events received before any sourceBuffers are created
|
7117
6952
|
this.bufferCodecEventsExpected = 0;
|
7118
6953
|
// The total number of BUFFER_CODEC events received
|
@@ -7123,10 +6958,6 @@ class BufferController extends Logger {
|
|
7123
6958
|
this.mediaSource = null;
|
7124
6959
|
// Last MP3 audio chunk appended
|
7125
6960
|
this.lastMpegAudioChunk = null;
|
7126
|
-
// Audio fragment blocked from appending until corresponding video appends or context changes
|
7127
|
-
this.blockedAudioAppend = null;
|
7128
|
-
// Keep track of video append position for unblocking audio
|
7129
|
-
this.lastVideoAppendEnd = 0;
|
7130
6961
|
this.appendSource = void 0;
|
7131
6962
|
// counters
|
7132
6963
|
this.appendErrors = {
|
@@ -7137,6 +6968,9 @@ class BufferController extends Logger {
|
|
7137
6968
|
this.tracks = {};
|
7138
6969
|
this.pendingTracks = {};
|
7139
6970
|
this.sourceBuffer = void 0;
|
6971
|
+
this.log = void 0;
|
6972
|
+
this.warn = void 0;
|
6973
|
+
this.error = void 0;
|
7140
6974
|
this._onEndStreaming = event => {
|
7141
6975
|
if (!this.hls) {
|
7142
6976
|
return;
|
@@ -7158,10 +6992,7 @@ class BufferController extends Logger {
|
|
7158
6992
|
this.log('Media source opened');
|
7159
6993
|
if (media) {
|
7160
6994
|
media.removeEventListener('emptied', this._onMediaEmptied);
|
7161
|
-
|
7162
|
-
if (durationAndRange) {
|
7163
|
-
this.updateMediaSource(durationAndRange);
|
7164
|
-
}
|
6995
|
+
this.updateMediaElementDuration();
|
7165
6996
|
this.hls.trigger(Events.MEDIA_ATTACHED, {
|
7166
6997
|
media,
|
7167
6998
|
mediaSource: mediaSource
|
@@ -7185,12 +7016,15 @@ class BufferController extends Logger {
|
|
7185
7016
|
_objectUrl
|
7186
7017
|
} = this;
|
7187
7018
|
if (mediaSrc !== _objectUrl) {
|
7188
|
-
|
7019
|
+
logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
|
7189
7020
|
}
|
7190
7021
|
};
|
7191
7022
|
this.hls = hls;
|
7192
|
-
|
7023
|
+
const logPrefix = '[buffer-controller]';
|
7193
7024
|
this.appendSource = isManagedMediaSource(getMediaSource(hls.config.preferManagedMediaSource));
|
7025
|
+
this.log = logger.log.bind(logger, logPrefix);
|
7026
|
+
this.warn = logger.warn.bind(logger, logPrefix);
|
7027
|
+
this.error = logger.error.bind(logger, logPrefix);
|
7194
7028
|
this._initSourceBuffer();
|
7195
7029
|
this.registerListeners();
|
7196
7030
|
}
|
@@ -7202,13 +7036,7 @@ class BufferController extends Logger {
|
|
7202
7036
|
this.details = null;
|
7203
7037
|
this.lastMpegAudioChunk = null;
|
7204
7038
|
// @ts-ignore
|
7205
|
-
this.hls =
|
7206
|
-
// @ts-ignore
|
7207
|
-
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
7208
|
-
// @ts-ignore
|
7209
|
-
this._onMediaSourceEnded = null;
|
7210
|
-
// @ts-ignore
|
7211
|
-
this._onStartStreaming = this._onEndStreaming = null;
|
7039
|
+
this.hls = null;
|
7212
7040
|
}
|
7213
7041
|
registerListeners() {
|
7214
7042
|
const {
|
@@ -7258,8 +7086,6 @@ class BufferController extends Logger {
|
|
7258
7086
|
audiovideo: 0
|
7259
7087
|
};
|
7260
7088
|
this.lastMpegAudioChunk = null;
|
7261
|
-
this.blockedAudioAppend = null;
|
7262
|
-
this.lastVideoAppendEnd = 0;
|
7263
7089
|
}
|
7264
7090
|
onManifestLoading() {
|
7265
7091
|
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
|
@@ -7342,34 +7168,34 @@ class BufferController extends Logger {
|
|
7342
7168
|
mediaSource.removeEventListener('startstreaming', this._onStartStreaming);
|
7343
7169
|
mediaSource.removeEventListener('endstreaming', this._onEndStreaming);
|
7344
7170
|
}
|
7345
|
-
this.mediaSource = null;
|
7346
|
-
this._objectUrl = null;
|
7347
|
-
}
|
7348
7171
|
|
7349
|
-
|
7350
|
-
|
7351
|
-
|
7352
|
-
|
7353
|
-
|
7354
|
-
|
7355
|
-
|
7172
|
+
// Detach properly the MediaSource from the HTMLMediaElement as
|
7173
|
+
// suggested in https://github.com/w3c/media-source/issues/53.
|
7174
|
+
if (media) {
|
7175
|
+
media.removeEventListener('emptied', this._onMediaEmptied);
|
7176
|
+
if (_objectUrl) {
|
7177
|
+
self.URL.revokeObjectURL(_objectUrl);
|
7178
|
+
}
|
7356
7179
|
|
7357
|
-
|
7358
|
-
|
7359
|
-
|
7360
|
-
|
7361
|
-
|
7362
|
-
|
7180
|
+
// clean up video tag src only if it's our own url. some external libraries might
|
7181
|
+
// hijack the video tag and change its 'src' without destroying the Hls instance first
|
7182
|
+
if (this.mediaSrc === _objectUrl) {
|
7183
|
+
media.removeAttribute('src');
|
7184
|
+
if (this.appendSource) {
|
7185
|
+
removeSourceChildren(media);
|
7186
|
+
}
|
7187
|
+
media.load();
|
7188
|
+
} else {
|
7189
|
+
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7363
7190
|
}
|
7364
|
-
media.load();
|
7365
|
-
} else {
|
7366
|
-
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7367
7191
|
}
|
7192
|
+
this.mediaSource = null;
|
7368
7193
|
this.media = null;
|
7194
|
+
this._objectUrl = null;
|
7195
|
+
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7196
|
+
this.pendingTracks = {};
|
7197
|
+
this.tracks = {};
|
7369
7198
|
}
|
7370
|
-
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7371
|
-
this.pendingTracks = {};
|
7372
|
-
this.tracks = {};
|
7373
7199
|
this.hls.trigger(Events.MEDIA_DETACHED, undefined);
|
7374
7200
|
}
|
7375
7201
|
onBufferReset() {
|
@@ -7377,7 +7203,6 @@ class BufferController extends Logger {
|
|
7377
7203
|
this.resetBuffer(type);
|
7378
7204
|
});
|
7379
7205
|
this._initSourceBuffer();
|
7380
|
-
this.hls.resumeBuffering();
|
7381
7206
|
}
|
7382
7207
|
resetBuffer(type) {
|
7383
7208
|
const sb = this.sourceBuffer[type];
|
@@ -7401,10 +7226,9 @@ class BufferController extends Logger {
|
|
7401
7226
|
const trackNames = Object.keys(data);
|
7402
7227
|
trackNames.forEach(trackName => {
|
7403
7228
|
if (sourceBufferCount) {
|
7404
|
-
var _track$buffer;
|
7405
7229
|
// check if SourceBuffer codec needs to change
|
7406
7230
|
const track = this.tracks[trackName];
|
7407
|
-
if (track && typeof
|
7231
|
+
if (track && typeof track.buffer.changeType === 'function') {
|
7408
7232
|
var _trackCodec;
|
7409
7233
|
const {
|
7410
7234
|
id,
|
@@ -7474,54 +7298,20 @@ class BufferController extends Logger {
|
|
7474
7298
|
};
|
7475
7299
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7476
7300
|
}
|
7477
|
-
blockAudio(partOrFrag) {
|
7478
|
-
var _this$fragmentTracker;
|
7479
|
-
const pStart = partOrFrag.start;
|
7480
|
-
const pTime = pStart + partOrFrag.duration * 0.05;
|
7481
|
-
const atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
|
7482
|
-
if (atGap) {
|
7483
|
-
return;
|
7484
|
-
}
|
7485
|
-
const op = {
|
7486
|
-
execute: () => {
|
7487
|
-
var _this$fragmentTracker2;
|
7488
|
-
if (this.lastVideoAppendEnd > pTime || this.sourceBuffer.video && BufferHelper.isBuffered(this.sourceBuffer.video, pTime) || ((_this$fragmentTracker2 = this.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker2.gap) === true) {
|
7489
|
-
this.blockedAudioAppend = null;
|
7490
|
-
this.operationQueue.shiftAndExecuteNext('audio');
|
7491
|
-
}
|
7492
|
-
},
|
7493
|
-
onStart: () => {},
|
7494
|
-
onComplete: () => {},
|
7495
|
-
onError: () => {}
|
7496
|
-
};
|
7497
|
-
this.blockedAudioAppend = {
|
7498
|
-
op,
|
7499
|
-
frag: partOrFrag
|
7500
|
-
};
|
7501
|
-
this.operationQueue.append(op, 'audio', true);
|
7502
|
-
}
|
7503
|
-
unblockAudio() {
|
7504
|
-
const blockedAudioAppend = this.blockedAudioAppend;
|
7505
|
-
if (blockedAudioAppend) {
|
7506
|
-
this.blockedAudioAppend = null;
|
7507
|
-
this.operationQueue.unblockAudio(blockedAudioAppend.op);
|
7508
|
-
}
|
7509
|
-
}
|
7510
7301
|
onBufferAppending(event, eventData) {
|
7511
7302
|
const {
|
7303
|
+
hls,
|
7512
7304
|
operationQueue,
|
7513
7305
|
tracks
|
7514
7306
|
} = this;
|
7515
7307
|
const {
|
7516
7308
|
data,
|
7517
7309
|
type,
|
7518
|
-
parent,
|
7519
7310
|
frag,
|
7520
7311
|
part,
|
7521
7312
|
chunkMeta
|
7522
7313
|
} = eventData;
|
7523
7314
|
const chunkStats = chunkMeta.buffering[type];
|
7524
|
-
const sn = frag.sn;
|
7525
7315
|
const bufferAppendingStart = self.performance.now();
|
7526
7316
|
chunkStats.start = bufferAppendingStart;
|
7527
7317
|
const fragBuffering = frag.stats.buffering;
|
@@ -7544,36 +7334,7 @@ class BufferController extends Logger {
|
|
7544
7334
|
checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
|
7545
7335
|
this.lastMpegAudioChunk = chunkMeta;
|
7546
7336
|
}
|
7547
|
-
|
7548
|
-
// Block audio append until overlapping video append
|
7549
|
-
const videoSb = this.sourceBuffer.video;
|
7550
|
-
if (videoSb && sn !== 'initSegment') {
|
7551
|
-
const partOrFrag = part || frag;
|
7552
|
-
const blockedAudioAppend = this.blockedAudioAppend;
|
7553
|
-
if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
|
7554
|
-
const pStart = partOrFrag.start;
|
7555
|
-
const pTime = pStart + partOrFrag.duration * 0.05;
|
7556
|
-
const vbuffered = videoSb.buffered;
|
7557
|
-
const vappending = this.operationQueue.current('video');
|
7558
|
-
if (!vbuffered.length && !vappending) {
|
7559
|
-
// wait for video before appending audio
|
7560
|
-
this.blockAudio(partOrFrag);
|
7561
|
-
} else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
|
7562
|
-
// audio is ahead of video
|
7563
|
-
this.blockAudio(partOrFrag);
|
7564
|
-
}
|
7565
|
-
} else if (type === 'video') {
|
7566
|
-
const videoAppendEnd = partOrFrag.end;
|
7567
|
-
if (blockedAudioAppend) {
|
7568
|
-
const audioStart = blockedAudioAppend.frag.start;
|
7569
|
-
if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
|
7570
|
-
this.unblockAudio();
|
7571
|
-
}
|
7572
|
-
}
|
7573
|
-
this.lastVideoAppendEnd = videoAppendEnd;
|
7574
|
-
}
|
7575
|
-
}
|
7576
|
-
const fragStart = (part || frag).start;
|
7337
|
+
const fragStart = frag.start;
|
7577
7338
|
const operation = {
|
7578
7339
|
execute: () => {
|
7579
7340
|
chunkStats.executeStart = self.performance.now();
|
@@ -7582,7 +7343,7 @@ class BufferController extends Logger {
|
|
7582
7343
|
if (sb) {
|
7583
7344
|
const delta = fragStart - sb.timestampOffset;
|
7584
7345
|
if (Math.abs(delta) >= 0.1) {
|
7585
|
-
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${sn})`);
|
7346
|
+
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`);
|
7586
7347
|
sb.timestampOffset = fragStart;
|
7587
7348
|
}
|
7588
7349
|
}
|
@@ -7649,21 +7410,22 @@ class BufferController extends Logger {
|
|
7649
7410
|
/* with UHD content, we could get loop of quota exceeded error until
|
7650
7411
|
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
7651
7412
|
*/
|
7652
|
-
this.warn(`Failed ${appendErrorCount}/${
|
7653
|
-
if (appendErrorCount >=
|
7413
|
+
this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
|
7414
|
+
if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
|
7654
7415
|
event.fatal = true;
|
7655
7416
|
}
|
7656
7417
|
}
|
7657
|
-
|
7418
|
+
hls.trigger(Events.ERROR, event);
|
7658
7419
|
}
|
7659
7420
|
};
|
7660
7421
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7661
7422
|
}
|
7662
|
-
|
7663
|
-
|
7664
|
-
|
7665
|
-
|
7666
|
-
|
7423
|
+
onBufferFlushing(event, data) {
|
7424
|
+
const {
|
7425
|
+
operationQueue
|
7426
|
+
} = this;
|
7427
|
+
const flushOperation = type => ({
|
7428
|
+
execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset),
|
7667
7429
|
onStart: () => {
|
7668
7430
|
// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
7669
7431
|
},
|
@@ -7676,22 +7438,12 @@ class BufferController extends Logger {
|
|
7676
7438
|
onError: error => {
|
7677
7439
|
this.warn(`Failed to remove from ${type} SourceBuffer`, error);
|
7678
7440
|
}
|
7679
|
-
};
|
7680
|
-
|
7681
|
-
|
7682
|
-
const {
|
7683
|
-
operationQueue
|
7684
|
-
} = this;
|
7685
|
-
const {
|
7686
|
-
type,
|
7687
|
-
startOffset,
|
7688
|
-
endOffset
|
7689
|
-
} = data;
|
7690
|
-
if (type) {
|
7691
|
-
operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
|
7441
|
+
});
|
7442
|
+
if (data.type) {
|
7443
|
+
operationQueue.append(flushOperation(data.type), data.type);
|
7692
7444
|
} else {
|
7693
|
-
this.getSourceBufferTypes().forEach(
|
7694
|
-
operationQueue.append(
|
7445
|
+
this.getSourceBufferTypes().forEach(type => {
|
7446
|
+
operationQueue.append(flushOperation(type), type);
|
7695
7447
|
});
|
7696
7448
|
}
|
7697
7449
|
}
|
@@ -7738,9 +7490,6 @@ class BufferController extends Logger {
|
|
7738
7490
|
// on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
|
7739
7491
|
// an undefined data.type will mark all buffers as EOS.
|
7740
7492
|
onBufferEos(event, data) {
|
7741
|
-
if (data.type === 'video') {
|
7742
|
-
this.unblockAudio();
|
7743
|
-
}
|
7744
7493
|
const ended = this.getSourceBufferTypes().reduce((acc, type) => {
|
7745
7494
|
const sb = this.sourceBuffer[type];
|
7746
7495
|
if (sb && (!data.type || data.type === type)) {
|
@@ -7783,14 +7532,10 @@ class BufferController extends Logger {
|
|
7783
7532
|
return;
|
7784
7533
|
}
|
7785
7534
|
this.details = details;
|
7786
|
-
const durationAndRange = this.getDurationAndRange();
|
7787
|
-
if (!durationAndRange) {
|
7788
|
-
return;
|
7789
|
-
}
|
7790
7535
|
if (this.getSourceBufferTypes().length) {
|
7791
|
-
this.blockBuffers(
|
7536
|
+
this.blockBuffers(this.updateMediaElementDuration.bind(this));
|
7792
7537
|
} else {
|
7793
|
-
this.
|
7538
|
+
this.updateMediaElementDuration();
|
7794
7539
|
}
|
7795
7540
|
}
|
7796
7541
|
trimBuffers() {
|
@@ -7895,9 +7640,9 @@ class BufferController extends Logger {
|
|
7895
7640
|
* 'liveDurationInfinity` is set to `true`
|
7896
7641
|
* More details: https://github.com/video-dev/hls.js/issues/355
|
7897
7642
|
*/
|
7898
|
-
|
7643
|
+
updateMediaElementDuration() {
|
7899
7644
|
if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
7900
|
-
return
|
7645
|
+
return;
|
7901
7646
|
}
|
7902
7647
|
const {
|
7903
7648
|
details,
|
@@ -7911,41 +7656,25 @@ class BufferController extends Logger {
|
|
7911
7656
|
if (details.live && hls.config.liveDurationInfinity) {
|
7912
7657
|
// Override duration to Infinity
|
7913
7658
|
mediaSource.duration = Infinity;
|
7914
|
-
|
7915
|
-
if (len && details.live && !!mediaSource.setLiveSeekableRange) {
|
7916
|
-
const start = Math.max(0, details.fragments[0].start);
|
7917
|
-
const end = Math.max(start, start + details.totalduration);
|
7918
|
-
return {
|
7919
|
-
duration: Infinity,
|
7920
|
-
start,
|
7921
|
-
end
|
7922
|
-
};
|
7923
|
-
}
|
7924
|
-
return {
|
7925
|
-
duration: Infinity
|
7926
|
-
};
|
7659
|
+
this.updateSeekableRange(details);
|
7927
7660
|
} else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
|
7928
|
-
|
7929
|
-
|
7930
|
-
|
7661
|
+
// levelDuration was the last value we set.
|
7662
|
+
// not using mediaSource.duration as the browser may tweak this value
|
7663
|
+
// only update Media Source duration if its value increase, this is to avoid
|
7664
|
+
// flushing already buffered portion when switching between quality level
|
7665
|
+
this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);
|
7666
|
+
mediaSource.duration = levelDuration;
|
7931
7667
|
}
|
7932
|
-
return null;
|
7933
7668
|
}
|
7934
|
-
|
7935
|
-
|
7936
|
-
|
7937
|
-
|
7938
|
-
|
7939
|
-
|
7940
|
-
|
7941
|
-
|
7942
|
-
|
7943
|
-
this.log(`Updating Media Source duration to ${duration.toFixed(3)}`);
|
7944
|
-
}
|
7945
|
-
this.mediaSource.duration = duration;
|
7946
|
-
if (start !== undefined && end !== undefined) {
|
7947
|
-
this.log(`Media Source duration is set to ${this.mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7948
|
-
this.mediaSource.setLiveSeekableRange(start, end);
|
7669
|
+
updateSeekableRange(levelDetails) {
|
7670
|
+
const mediaSource = this.mediaSource;
|
7671
|
+
const fragments = levelDetails.fragments;
|
7672
|
+
const len = fragments.length;
|
7673
|
+
if (len && levelDetails.live && mediaSource != null && mediaSource.setLiveSeekableRange) {
|
7674
|
+
const start = Math.max(0, fragments[0].start);
|
7675
|
+
const end = Math.max(start, start + levelDetails.totalduration);
|
7676
|
+
this.log(`Media Source duration is set to ${mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7677
|
+
mediaSource.setLiveSeekableRange(start, end);
|
7949
7678
|
}
|
7950
7679
|
}
|
7951
7680
|
checkPendingTracks() {
|
@@ -8131,7 +7860,6 @@ class BufferController extends Logger {
|
|
8131
7860
|
}
|
8132
7861
|
return;
|
8133
7862
|
}
|
8134
|
-
sb.ending = false;
|
8135
7863
|
sb.ended = false;
|
8136
7864
|
sb.appendBuffer(data);
|
8137
7865
|
}
|
@@ -8151,14 +7879,10 @@ class BufferController extends Logger {
|
|
8151
7879
|
|
8152
7880
|
// logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
|
8153
7881
|
const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type));
|
8154
|
-
|
8155
|
-
if (audioBlocked) {
|
8156
|
-
this.unblockAudio();
|
8157
|
-
}
|
8158
|
-
Promise.all(blockingOperations).then(result => {
|
7882
|
+
Promise.all(blockingOperations).then(() => {
|
8159
7883
|
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
8160
7884
|
onUnblocked();
|
8161
|
-
buffers.forEach(
|
7885
|
+
buffers.forEach(type => {
|
8162
7886
|
const sb = this.sourceBuffer[type];
|
8163
7887
|
// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
8164
7888
|
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
@@ -8307,7 +8031,6 @@ class CapLevelController {
|
|
8307
8031
|
}
|
8308
8032
|
onMediaDetaching() {
|
8309
8033
|
this.stopCapping();
|
8310
|
-
this.media = null;
|
8311
8034
|
}
|
8312
8035
|
detectPlayerSize() {
|
8313
8036
|
if (this.media) {
|
@@ -8320,10 +8043,10 @@ class CapLevelController {
|
|
8320
8043
|
const hls = this.hls;
|
8321
8044
|
const maxLevel = this.getMaxLevel(levels.length - 1);
|
8322
8045
|
if (maxLevel !== this.autoLevelCapping) {
|
8323
|
-
|
8046
|
+
logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8324
8047
|
}
|
8325
8048
|
hls.autoLevelCapping = maxLevel;
|
8326
|
-
if (hls.
|
8049
|
+
if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
8327
8050
|
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
|
8328
8051
|
// usually happen when the user go to the fullscreen mode.
|
8329
8052
|
this.streamController.nextLevelSwitch();
|
@@ -8459,11 +8182,9 @@ class FPSController {
|
|
8459
8182
|
}
|
8460
8183
|
registerListeners() {
|
8461
8184
|
this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8462
|
-
this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8463
8185
|
}
|
8464
8186
|
unregisterListeners() {
|
8465
8187
|
this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8466
|
-
this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8467
8188
|
}
|
8468
8189
|
destroy() {
|
8469
8190
|
if (this.timer) {
|
@@ -8485,9 +8206,6 @@ class FPSController {
|
|
8485
8206
|
this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
|
8486
8207
|
}
|
8487
8208
|
}
|
8488
|
-
onMediaDetaching() {
|
8489
|
-
this.media = null;
|
8490
|
-
}
|
8491
8209
|
checkFPS(video, decodedFrames, droppedFrames) {
|
8492
8210
|
const currentTime = performance.now();
|
8493
8211
|
if (decodedFrames) {
|
@@ -8503,10 +8221,10 @@ class FPSController {
|
|
8503
8221
|
totalDroppedFrames: droppedFrames
|
8504
8222
|
});
|
8505
8223
|
if (droppedFPS > 0) {
|
8506
|
-
//
|
8224
|
+
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8507
8225
|
if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
8508
8226
|
let currentLevel = hls.currentLevel;
|
8509
|
-
|
8227
|
+
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8510
8228
|
if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
|
8511
8229
|
currentLevel = currentLevel - 1;
|
8512
8230
|
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
|
@@ -8539,14 +8257,14 @@ class FPSController {
|
|
8539
8257
|
}
|
8540
8258
|
|
8541
8259
|
const PATHWAY_PENALTY_DURATION_MS = 300000;
|
8542
|
-
class ContentSteeringController
|
8260
|
+
class ContentSteeringController {
|
8543
8261
|
constructor(hls) {
|
8544
|
-
super('content-steering', hls.logger);
|
8545
8262
|
this.hls = void 0;
|
8263
|
+
this.log = void 0;
|
8546
8264
|
this.loader = null;
|
8547
8265
|
this.uri = null;
|
8548
8266
|
this.pathwayId = '.';
|
8549
|
-
this.
|
8267
|
+
this.pathwayPriority = null;
|
8550
8268
|
this.timeToLoad = 300;
|
8551
8269
|
this.reloadTimer = -1;
|
8552
8270
|
this.updated = 0;
|
@@ -8557,6 +8275,7 @@ class ContentSteeringController extends Logger {
|
|
8557
8275
|
this.subtitleTracks = null;
|
8558
8276
|
this.penalizedPathways = {};
|
8559
8277
|
this.hls = hls;
|
8278
|
+
this.log = logger.log.bind(logger, `[content-steering]:`);
|
8560
8279
|
this.registerListeners();
|
8561
8280
|
}
|
8562
8281
|
registerListeners() {
|
@@ -8576,20 +8295,6 @@ class ContentSteeringController extends Logger {
|
|
8576
8295
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
8577
8296
|
hls.off(Events.ERROR, this.onError, this);
|
8578
8297
|
}
|
8579
|
-
pathways() {
|
8580
|
-
return (this.levels || []).reduce((pathways, level) => {
|
8581
|
-
if (pathways.indexOf(level.pathwayId) === -1) {
|
8582
|
-
pathways.push(level.pathwayId);
|
8583
|
-
}
|
8584
|
-
return pathways;
|
8585
|
-
}, []);
|
8586
|
-
}
|
8587
|
-
get pathwayPriority() {
|
8588
|
-
return this._pathwayPriority;
|
8589
|
-
}
|
8590
|
-
set pathwayPriority(pathwayPriority) {
|
8591
|
-
this.updatePathwayPriority(pathwayPriority);
|
8592
|
-
}
|
8593
8298
|
startLoad() {
|
8594
8299
|
this.started = true;
|
8595
8300
|
this.clearTimeout();
|
@@ -8663,7 +8368,7 @@ class ContentSteeringController extends Logger {
|
|
8663
8368
|
} = data;
|
8664
8369
|
if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox && errorAction.flags === ErrorActionFlags.MoveAllAlternatesMatchingHost) {
|
8665
8370
|
const levels = this.levels;
|
8666
|
-
let pathwayPriority = this.
|
8371
|
+
let pathwayPriority = this.pathwayPriority;
|
8667
8372
|
let errorPathway = this.pathwayId;
|
8668
8373
|
if (data.context) {
|
8669
8374
|
const {
|
@@ -8682,14 +8387,19 @@ class ContentSteeringController extends Logger {
|
|
8682
8387
|
}
|
8683
8388
|
if (!pathwayPriority && levels) {
|
8684
8389
|
// If PATHWAY-PRIORITY was not provided, list pathways for error handling
|
8685
|
-
pathwayPriority =
|
8390
|
+
pathwayPriority = levels.reduce((pathways, level) => {
|
8391
|
+
if (pathways.indexOf(level.pathwayId) === -1) {
|
8392
|
+
pathways.push(level.pathwayId);
|
8393
|
+
}
|
8394
|
+
return pathways;
|
8395
|
+
}, []);
|
8686
8396
|
}
|
8687
8397
|
if (pathwayPriority && pathwayPriority.length > 1) {
|
8688
8398
|
this.updatePathwayPriority(pathwayPriority);
|
8689
8399
|
errorAction.resolved = this.pathwayId !== errorPathway;
|
8690
8400
|
}
|
8691
8401
|
if (!errorAction.resolved) {
|
8692
|
-
|
8402
|
+
logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
|
8693
8403
|
}
|
8694
8404
|
}
|
8695
8405
|
}
|
@@ -8716,7 +8426,7 @@ class ContentSteeringController extends Logger {
|
|
8716
8426
|
return this.levels.filter(level => pathwayId === level.pathwayId);
|
8717
8427
|
}
|
8718
8428
|
updatePathwayPriority(pathwayPriority) {
|
8719
|
-
this.
|
8429
|
+
this.pathwayPriority = pathwayPriority;
|
8720
8430
|
let levels;
|
8721
8431
|
|
8722
8432
|
// Evaluate if we should remove the pathway from the penalized list
|
@@ -8860,7 +8570,7 @@ class ContentSteeringController extends Logger {
|
|
8860
8570
|
onSuccess: (response, stats, context, networkDetails) => {
|
8861
8571
|
this.log(`Loaded steering manifest: "${url}"`);
|
8862
8572
|
const steeringData = response.data;
|
8863
|
-
if (
|
8573
|
+
if (steeringData.VERSION !== 1) {
|
8864
8574
|
this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
|
8865
8575
|
return;
|
8866
8576
|
}
|
@@ -9768,7 +9478,7 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
|
|
9768
9478
|
});
|
9769
9479
|
function timelineConfig() {
|
9770
9480
|
return {
|
9771
|
-
cueHandler:
|
9481
|
+
cueHandler: Cues,
|
9772
9482
|
// used by timeline-controller
|
9773
9483
|
enableWebVTT: false,
|
9774
9484
|
// used by timeline-controller
|
@@ -9799,7 +9509,7 @@ function timelineConfig() {
|
|
9799
9509
|
/**
|
9800
9510
|
* @ignore
|
9801
9511
|
*/
|
9802
|
-
function mergeConfig(defaultConfig, userConfig
|
9512
|
+
function mergeConfig(defaultConfig, userConfig) {
|
9803
9513
|
if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
|
9804
9514
|
throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
|
9805
9515
|
}
|
@@ -9869,7 +9579,7 @@ function deepCpy(obj) {
|
|
9869
9579
|
/**
|
9870
9580
|
* @ignore
|
9871
9581
|
*/
|
9872
|
-
function enableStreamingMode(config
|
9582
|
+
function enableStreamingMode(config) {
|
9873
9583
|
const currentLoader = config.loader;
|
9874
9584
|
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
|
9875
9585
|
// If a developer has configured their own loader, respect that choice
|
@@ -9886,9 +9596,10 @@ function enableStreamingMode(config, logger) {
|
|
9886
9596
|
}
|
9887
9597
|
}
|
9888
9598
|
|
9599
|
+
let chromeOrFirefox;
|
9889
9600
|
class LevelController extends BasePlaylistController {
|
9890
9601
|
constructor(hls, contentSteeringController) {
|
9891
|
-
super(hls, 'level-controller');
|
9602
|
+
super(hls, '[level-controller]');
|
9892
9603
|
this._levels = [];
|
9893
9604
|
this._firstLevel = -1;
|
9894
9605
|
this._maxAutoLevel = -1;
|
@@ -9959,15 +9670,23 @@ class LevelController extends BasePlaylistController {
|
|
9959
9670
|
let videoCodecFound = false;
|
9960
9671
|
let audioCodecFound = false;
|
9961
9672
|
data.levels.forEach(levelParsed => {
|
9962
|
-
var _videoCodec;
|
9673
|
+
var _audioCodec, _videoCodec;
|
9963
9674
|
const attributes = levelParsed.attrs;
|
9675
|
+
|
9676
|
+
// erase audio codec info if browser does not support mp4a.40.34.
|
9677
|
+
// demuxer will autodetect codec and fallback to mpeg/audio
|
9964
9678
|
let {
|
9965
9679
|
audioCodec,
|
9966
9680
|
videoCodec
|
9967
9681
|
} = levelParsed;
|
9682
|
+
if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
|
9683
|
+
chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
|
9684
|
+
if (chromeOrFirefox) {
|
9685
|
+
levelParsed.audioCodec = audioCodec = undefined;
|
9686
|
+
}
|
9687
|
+
}
|
9968
9688
|
if (audioCodec) {
|
9969
|
-
|
9970
|
-
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
|
9689
|
+
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource);
|
9971
9690
|
}
|
9972
9691
|
if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
|
9973
9692
|
videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
|
@@ -10262,25 +9981,6 @@ class LevelController extends BasePlaylistController {
|
|
10262
9981
|
set startLevel(newLevel) {
|
10263
9982
|
this._startLevel = newLevel;
|
10264
9983
|
}
|
10265
|
-
get pathwayPriority() {
|
10266
|
-
if (this.steering) {
|
10267
|
-
return this.steering.pathwayPriority;
|
10268
|
-
}
|
10269
|
-
return null;
|
10270
|
-
}
|
10271
|
-
set pathwayPriority(pathwayPriority) {
|
10272
|
-
if (this.steering) {
|
10273
|
-
const pathwaysList = this.steering.pathways();
|
10274
|
-
const filteredPathwayPriority = pathwayPriority.filter(pathwayId => {
|
10275
|
-
return pathwaysList.indexOf(pathwayId) !== -1;
|
10276
|
-
});
|
10277
|
-
if (pathwayPriority.length < 1) {
|
10278
|
-
this.warn(`pathwayPriority ${pathwayPriority} should contain at least one pathway from list: ${pathwaysList}`);
|
10279
|
-
return;
|
10280
|
-
}
|
10281
|
-
this.steering.pathwayPriority = filteredPathwayPriority;
|
10282
|
-
}
|
10283
|
-
}
|
10284
9984
|
onError(event, data) {
|
10285
9985
|
if (data.fatal || !data.context) {
|
10286
9986
|
return;
|
@@ -10328,12 +10028,7 @@ class LevelController extends BasePlaylistController {
|
|
10328
10028
|
if (curLevel.fragmentError === 0) {
|
10329
10029
|
curLevel.loadError = 0;
|
10330
10030
|
}
|
10331
|
-
|
10332
|
-
let previousDetails = curLevel.details;
|
10333
|
-
if (previousDetails === data.details && previousDetails.advanced) {
|
10334
|
-
previousDetails = undefined;
|
10335
|
-
}
|
10336
|
-
this.playlistLoaded(level, data, previousDetails);
|
10031
|
+
this.playlistLoaded(level, data, curLevel.details);
|
10337
10032
|
} else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
|
10338
10033
|
// received a delta playlist update that cannot be merged
|
10339
10034
|
details.deltaUpdateFailed = true;
|
@@ -10511,16 +10206,13 @@ class FragmentTracker {
|
|
10511
10206
|
* If not found any Fragment, return null
|
10512
10207
|
*/
|
10513
10208
|
getBufferedFrag(position, levelType) {
|
10514
|
-
return this.getFragAtPos(position, levelType, true);
|
10515
|
-
}
|
10516
|
-
getFragAtPos(position, levelType, buffered) {
|
10517
10209
|
const {
|
10518
10210
|
fragments
|
10519
10211
|
} = this;
|
10520
10212
|
const keys = Object.keys(fragments);
|
10521
10213
|
for (let i = keys.length; i--;) {
|
10522
10214
|
const fragmentEntity = fragments[keys[i]];
|
10523
|
-
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType &&
|
10215
|
+
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
|
10524
10216
|
const frag = fragmentEntity.body;
|
10525
10217
|
if (frag.start <= position && position <= frag.end) {
|
10526
10218
|
return frag;
|
@@ -10775,8 +10467,7 @@ class FragmentTracker {
|
|
10775
10467
|
const {
|
10776
10468
|
frag,
|
10777
10469
|
part,
|
10778
|
-
timeRanges
|
10779
|
-
type
|
10470
|
+
timeRanges
|
10780
10471
|
} = data;
|
10781
10472
|
if (frag.sn === 'initSegment') {
|
10782
10473
|
return;
|
@@ -10791,8 +10482,10 @@ class FragmentTracker {
|
|
10791
10482
|
}
|
10792
10483
|
// Store the latest timeRanges loaded in the buffer
|
10793
10484
|
this.timeRanges = timeRanges;
|
10794
|
-
|
10795
|
-
|
10485
|
+
Object.keys(timeRanges).forEach(elementaryStream => {
|
10486
|
+
const timeRange = timeRanges[elementaryStream];
|
10487
|
+
this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
|
10488
|
+
});
|
10796
10489
|
}
|
10797
10490
|
onFragBuffered(event, data) {
|
10798
10491
|
this.detectPartialFragments(data);
|
@@ -11121,8 +10814,8 @@ function createLoaderContext(frag, part = null) {
|
|
11121
10814
|
var _frag$decryptdata;
|
11122
10815
|
let byteRangeStart = start;
|
11123
10816
|
let byteRangeEnd = end;
|
11124
|
-
if (frag.sn === 'initSegment' &&
|
11125
|
-
// MAP segment encrypted with method 'AES-128'
|
10817
|
+
if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') {
|
10818
|
+
// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
|
11126
10819
|
// has the unencrypted size specified in the range.
|
11127
10820
|
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
|
11128
10821
|
const fragmentLen = end - start;
|
@@ -11155,9 +10848,6 @@ function createGapLoadError(frag, part) {
|
|
11155
10848
|
(part ? part : frag).stats.aborted = true;
|
11156
10849
|
return new LoadError(errorData);
|
11157
10850
|
}
|
11158
|
-
function isMethodFullSegmentAesCbc(method) {
|
11159
|
-
return method === 'AES-128' || method === 'AES-256';
|
11160
|
-
}
|
11161
10851
|
class LoadError extends Error {
|
11162
10852
|
constructor(data) {
|
11163
10853
|
super(data.error.message);
|
@@ -11303,8 +10993,6 @@ class KeyLoader {
|
|
11303
10993
|
}
|
11304
10994
|
return this.loadKeyEME(keyInfo, frag);
|
11305
10995
|
case 'AES-128':
|
11306
|
-
case 'AES-256':
|
11307
|
-
case 'AES-256-CTR':
|
11308
10996
|
return this.loadKeyHTTP(keyInfo, frag);
|
11309
10997
|
default:
|
11310
10998
|
return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
|
@@ -11440,9 +11128,8 @@ class KeyLoader {
|
|
11440
11128
|
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
11441
11129
|
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
11442
11130
|
*/
|
11443
|
-
class TaskLoop
|
11444
|
-
constructor(
|
11445
|
-
super(label, logger);
|
11131
|
+
class TaskLoop {
|
11132
|
+
constructor() {
|
11446
11133
|
this._boundTick = void 0;
|
11447
11134
|
this._tickTimer = null;
|
11448
11135
|
this._tickInterval = null;
|
@@ -11710,61 +11397,33 @@ function alignMediaPlaylistByPDT(details, refDetails) {
|
|
11710
11397
|
}
|
11711
11398
|
|
11712
11399
|
class AESCrypto {
|
11713
|
-
constructor(subtle, iv
|
11400
|
+
constructor(subtle, iv) {
|
11714
11401
|
this.subtle = void 0;
|
11715
11402
|
this.aesIV = void 0;
|
11716
|
-
this.aesMode = void 0;
|
11717
11403
|
this.subtle = subtle;
|
11718
11404
|
this.aesIV = iv;
|
11719
|
-
this.aesMode = aesMode;
|
11720
11405
|
}
|
11721
11406
|
decrypt(data, key) {
|
11722
|
-
|
11723
|
-
|
11724
|
-
|
11725
|
-
|
11726
|
-
iv: this.aesIV
|
11727
|
-
}, key, data);
|
11728
|
-
case DecrypterAesMode.ctr:
|
11729
|
-
return this.subtle.decrypt({
|
11730
|
-
name: 'AES-CTR',
|
11731
|
-
counter: this.aesIV,
|
11732
|
-
length: 64
|
11733
|
-
},
|
11734
|
-
//64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
|
11735
|
-
key, data);
|
11736
|
-
default:
|
11737
|
-
throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
|
11738
|
-
}
|
11407
|
+
return this.subtle.decrypt({
|
11408
|
+
name: 'AES-CBC',
|
11409
|
+
iv: this.aesIV
|
11410
|
+
}, key, data);
|
11739
11411
|
}
|
11740
11412
|
}
|
11741
11413
|
|
11742
11414
|
class FastAESKey {
|
11743
|
-
constructor(subtle, key
|
11415
|
+
constructor(subtle, key) {
|
11744
11416
|
this.subtle = void 0;
|
11745
11417
|
this.key = void 0;
|
11746
|
-
this.aesMode = void 0;
|
11747
11418
|
this.subtle = subtle;
|
11748
11419
|
this.key = key;
|
11749
|
-
this.aesMode = aesMode;
|
11750
11420
|
}
|
11751
11421
|
expandKey() {
|
11752
|
-
const subtleAlgoName = getSubtleAlgoName(this.aesMode);
|
11753
11422
|
return this.subtle.importKey('raw', this.key, {
|
11754
|
-
name:
|
11423
|
+
name: 'AES-CBC'
|
11755
11424
|
}, false, ['encrypt', 'decrypt']);
|
11756
11425
|
}
|
11757
11426
|
}
|
11758
|
-
function getSubtleAlgoName(aesMode) {
|
11759
|
-
switch (aesMode) {
|
11760
|
-
case DecrypterAesMode.cbc:
|
11761
|
-
return 'AES-CBC';
|
11762
|
-
case DecrypterAesMode.ctr:
|
11763
|
-
return 'AES-CTR';
|
11764
|
-
default:
|
11765
|
-
throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
|
11766
|
-
}
|
11767
|
-
}
|
11768
11427
|
|
11769
11428
|
// PKCS7
|
11770
11429
|
function removePadding(array) {
|
@@ -12014,8 +11673,7 @@ class Decrypter {
|
|
12014
11673
|
this.currentIV = null;
|
12015
11674
|
this.currentResult = null;
|
12016
11675
|
this.useSoftware = void 0;
|
12017
|
-
this.
|
12018
|
-
this.enableSoftwareAES = config.enableSoftwareAES;
|
11676
|
+
this.useSoftware = config.enableSoftwareAES;
|
12019
11677
|
this.removePKCS7Padding = removePKCS7Padding;
|
12020
11678
|
// built in decryptor expects PKCS7 padding
|
12021
11679
|
if (removePKCS7Padding) {
|
@@ -12066,10 +11724,10 @@ class Decrypter {
|
|
12066
11724
|
this.softwareDecrypter = null;
|
12067
11725
|
}
|
12068
11726
|
}
|
12069
|
-
decrypt(data, key, iv
|
11727
|
+
decrypt(data, key, iv) {
|
12070
11728
|
if (this.useSoftware) {
|
12071
11729
|
return new Promise((resolve, reject) => {
|
12072
|
-
this.softwareDecrypt(new Uint8Array(data), key, iv
|
11730
|
+
this.softwareDecrypt(new Uint8Array(data), key, iv);
|
12073
11731
|
const decryptResult = this.flush();
|
12074
11732
|
if (decryptResult) {
|
12075
11733
|
resolve(decryptResult.buffer);
|
@@ -12078,21 +11736,17 @@ class Decrypter {
|
|
12078
11736
|
}
|
12079
11737
|
});
|
12080
11738
|
}
|
12081
|
-
return this.webCryptoDecrypt(new Uint8Array(data), key, iv
|
11739
|
+
return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
|
12082
11740
|
}
|
12083
11741
|
|
12084
11742
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
12085
11743
|
// data is handled in the flush() call
|
12086
|
-
softwareDecrypt(data, key, iv
|
11744
|
+
softwareDecrypt(data, key, iv) {
|
12087
11745
|
const {
|
12088
11746
|
currentIV,
|
12089
11747
|
currentResult,
|
12090
11748
|
remainderData
|
12091
11749
|
} = this;
|
12092
|
-
if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
|
12093
|
-
logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
|
12094
|
-
return null;
|
12095
|
-
}
|
12096
11750
|
this.logOnce('JS AES decrypt');
|
12097
11751
|
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
12098
11752
|
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
@@ -12125,13 +11779,13 @@ class Decrypter {
|
|
12125
11779
|
}
|
12126
11780
|
return result;
|
12127
11781
|
}
|
12128
|
-
webCryptoDecrypt(data, key, iv
|
11782
|
+
webCryptoDecrypt(data, key, iv) {
|
12129
11783
|
if (this.key !== key || !this.fastAesKey) {
|
12130
11784
|
if (!this.subtle) {
|
12131
|
-
return Promise.resolve(this.onWebCryptoError(data, key, iv
|
11785
|
+
return Promise.resolve(this.onWebCryptoError(data, key, iv));
|
12132
11786
|
}
|
12133
11787
|
this.key = key;
|
12134
|
-
this.fastAesKey = new FastAESKey(this.subtle, key
|
11788
|
+
this.fastAesKey = new FastAESKey(this.subtle, key);
|
12135
11789
|
}
|
12136
11790
|
return this.fastAesKey.expandKey().then(aesKey => {
|
12137
11791
|
// decrypt using web crypto
|
@@ -12139,25 +11793,22 @@ class Decrypter {
|
|
12139
11793
|
return Promise.reject(new Error('web crypto not initialized'));
|
12140
11794
|
}
|
12141
11795
|
this.logOnce('WebCrypto AES decrypt');
|
12142
|
-
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv)
|
11796
|
+
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv));
|
12143
11797
|
return crypto.decrypt(data.buffer, aesKey);
|
12144
11798
|
}).catch(err => {
|
12145
11799
|
logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
|
12146
|
-
return this.onWebCryptoError(data, key, iv
|
11800
|
+
return this.onWebCryptoError(data, key, iv);
|
12147
11801
|
});
|
12148
11802
|
}
|
12149
|
-
onWebCryptoError(data, key, iv
|
12150
|
-
|
12151
|
-
|
12152
|
-
|
12153
|
-
|
12154
|
-
|
12155
|
-
|
12156
|
-
if (decryptResult) {
|
12157
|
-
return decryptResult.buffer;
|
12158
|
-
}
|
11803
|
+
onWebCryptoError(data, key, iv) {
|
11804
|
+
this.useSoftware = true;
|
11805
|
+
this.logEnabled = true;
|
11806
|
+
this.softwareDecrypt(data, key, iv);
|
11807
|
+
const decryptResult = this.flush();
|
11808
|
+
if (decryptResult) {
|
11809
|
+
return decryptResult.buffer;
|
12159
11810
|
}
|
12160
|
-
throw new Error('WebCrypto
|
11811
|
+
throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
|
12161
11812
|
}
|
12162
11813
|
getValidChunk(data) {
|
12163
11814
|
let currentChunk = data;
|
@@ -12208,7 +11859,7 @@ const State = {
|
|
12208
11859
|
};
|
12209
11860
|
class BaseStreamController extends TaskLoop {
|
12210
11861
|
constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
|
12211
|
-
super(
|
11862
|
+
super();
|
12212
11863
|
this.hls = void 0;
|
12213
11864
|
this.fragPrevious = null;
|
12214
11865
|
this.fragCurrent = null;
|
@@ -12233,98 +11884,22 @@ class BaseStreamController extends TaskLoop {
|
|
12233
11884
|
this.startFragRequested = false;
|
12234
11885
|
this.decrypter = void 0;
|
12235
11886
|
this.initPTS = [];
|
12236
|
-
this.
|
12237
|
-
this.
|
12238
|
-
this.
|
12239
|
-
|
12240
|
-
|
12241
|
-
fragCurrent,
|
12242
|
-
media,
|
12243
|
-
mediaBuffer,
|
12244
|
-
state
|
12245
|
-
} = this;
|
12246
|
-
const currentTime = media ? media.currentTime : 0;
|
12247
|
-
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
12248
|
-
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
12249
|
-
if (this.state === State.ENDED) {
|
12250
|
-
this.resetLoadingState();
|
12251
|
-
} else if (fragCurrent) {
|
12252
|
-
// Seeking while frag load is in progress
|
12253
|
-
const tolerance = config.maxFragLookUpTolerance;
|
12254
|
-
const fragStartOffset = fragCurrent.start - tolerance;
|
12255
|
-
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12256
|
-
// if seeking out of buffered range or into new one
|
12257
|
-
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12258
|
-
const pastFragment = currentTime > fragEndOffset;
|
12259
|
-
// if the seek position is outside the current fragment range
|
12260
|
-
if (currentTime < fragStartOffset || pastFragment) {
|
12261
|
-
if (pastFragment && fragCurrent.loader) {
|
12262
|
-
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12263
|
-
fragCurrent.abortRequests();
|
12264
|
-
this.resetLoadingState();
|
12265
|
-
}
|
12266
|
-
this.fragPrevious = null;
|
12267
|
-
}
|
12268
|
-
}
|
12269
|
-
}
|
12270
|
-
if (media) {
|
12271
|
-
// Remove gap fragments
|
12272
|
-
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12273
|
-
this.lastCurrentTime = currentTime;
|
12274
|
-
if (!this.loadingParts) {
|
12275
|
-
const bufferEnd = Math.max(bufferInfo.end, currentTime);
|
12276
|
-
const shouldLoadParts = this.shouldLoadParts(this.getLevelDetails(), bufferEnd);
|
12277
|
-
if (shouldLoadParts) {
|
12278
|
-
this.log(`LL-Part loading ON after seeking to ${currentTime.toFixed(2)} with buffer @${bufferEnd.toFixed(2)}`);
|
12279
|
-
this.loadingParts = shouldLoadParts;
|
12280
|
-
}
|
12281
|
-
}
|
12282
|
-
}
|
12283
|
-
|
12284
|
-
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12285
|
-
if (!this.loadedmetadata && !bufferInfo.len) {
|
12286
|
-
this.nextLoadPosition = this.startPosition = currentTime;
|
12287
|
-
}
|
12288
|
-
|
12289
|
-
// Async tick to speed up processing
|
12290
|
-
this.tickImmediate();
|
12291
|
-
};
|
12292
|
-
this.onMediaEnded = () => {
|
12293
|
-
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12294
|
-
this.startPosition = this.lastCurrentTime = 0;
|
12295
|
-
if (this.playlistType === PlaylistLevelType.MAIN) {
|
12296
|
-
this.hls.trigger(Events.MEDIA_ENDED, {
|
12297
|
-
stalled: false
|
12298
|
-
});
|
12299
|
-
}
|
12300
|
-
};
|
11887
|
+
this.onvseeking = null;
|
11888
|
+
this.onvended = null;
|
11889
|
+
this.logPrefix = '';
|
11890
|
+
this.log = void 0;
|
11891
|
+
this.warn = void 0;
|
12301
11892
|
this.playlistType = playlistType;
|
11893
|
+
this.logPrefix = logPrefix;
|
11894
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
11895
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
12302
11896
|
this.hls = hls;
|
12303
11897
|
this.fragmentLoader = new FragmentLoader(hls.config);
|
12304
11898
|
this.keyLoader = keyLoader;
|
12305
11899
|
this.fragmentTracker = fragmentTracker;
|
12306
11900
|
this.config = hls.config;
|
12307
11901
|
this.decrypter = new Decrypter(hls.config);
|
12308
|
-
}
|
12309
|
-
registerListeners() {
|
12310
|
-
const {
|
12311
|
-
hls
|
12312
|
-
} = this;
|
12313
|
-
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12314
|
-
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12315
|
-
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12316
11902
|
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12317
|
-
hls.on(Events.ERROR, this.onError, this);
|
12318
|
-
}
|
12319
|
-
unregisterListeners() {
|
12320
|
-
const {
|
12321
|
-
hls
|
12322
|
-
} = this;
|
12323
|
-
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12324
|
-
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12325
|
-
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12326
|
-
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12327
|
-
hls.off(Events.ERROR, this.onError, this);
|
12328
11903
|
}
|
12329
11904
|
doTick() {
|
12330
11905
|
this.onTickEnd();
|
@@ -12348,12 +11923,6 @@ class BaseStreamController extends TaskLoop {
|
|
12348
11923
|
this.clearNextTick();
|
12349
11924
|
this.state = State.STOPPED;
|
12350
11925
|
}
|
12351
|
-
pauseBuffering() {
|
12352
|
-
this.buffering = false;
|
12353
|
-
}
|
12354
|
-
resumeBuffering() {
|
12355
|
-
this.buffering = true;
|
12356
|
-
}
|
12357
11926
|
_streamEnded(bufferInfo, levelDetails) {
|
12358
11927
|
// If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
|
12359
11928
|
// of nothing loading/loaded return false
|
@@ -12384,8 +11953,10 @@ class BaseStreamController extends TaskLoop {
|
|
12384
11953
|
}
|
12385
11954
|
onMediaAttached(event, data) {
|
12386
11955
|
const media = this.media = this.mediaBuffer = data.media;
|
12387
|
-
|
12388
|
-
|
11956
|
+
this.onvseeking = this.onMediaSeeking.bind(this);
|
11957
|
+
this.onvended = this.onMediaEnded.bind(this);
|
11958
|
+
media.addEventListener('seeking', this.onvseeking);
|
11959
|
+
media.addEventListener('ended', this.onvended);
|
12389
11960
|
const config = this.config;
|
12390
11961
|
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
12391
11962
|
this.startLoad(config.startPosition);
|
@@ -12399,9 +11970,10 @@ class BaseStreamController extends TaskLoop {
|
|
12399
11970
|
}
|
12400
11971
|
|
12401
11972
|
// remove video listeners
|
12402
|
-
if (media) {
|
12403
|
-
media.removeEventListener('seeking', this.
|
12404
|
-
media.removeEventListener('ended', this.
|
11973
|
+
if (media && this.onvseeking && this.onvended) {
|
11974
|
+
media.removeEventListener('seeking', this.onvseeking);
|
11975
|
+
media.removeEventListener('ended', this.onvended);
|
11976
|
+
this.onvseeking = this.onvended = null;
|
12405
11977
|
}
|
12406
11978
|
if (this.keyLoader) {
|
12407
11979
|
this.keyLoader.detach();
|
@@ -12411,26 +11983,75 @@ class BaseStreamController extends TaskLoop {
|
|
12411
11983
|
this.fragmentTracker.removeAllFragments();
|
12412
11984
|
this.stopLoad();
|
12413
11985
|
}
|
12414
|
-
|
12415
|
-
|
12416
|
-
|
12417
|
-
|
12418
|
-
|
12419
|
-
|
12420
|
-
|
12421
|
-
this
|
12422
|
-
|
12423
|
-
|
12424
|
-
this.
|
12425
|
-
|
12426
|
-
|
12427
|
-
|
12428
|
-
|
12429
|
-
|
12430
|
-
|
12431
|
-
|
12432
|
-
|
12433
|
-
|
11986
|
+
onMediaSeeking() {
|
11987
|
+
const {
|
11988
|
+
config,
|
11989
|
+
fragCurrent,
|
11990
|
+
media,
|
11991
|
+
mediaBuffer,
|
11992
|
+
state
|
11993
|
+
} = this;
|
11994
|
+
const currentTime = media ? media.currentTime : 0;
|
11995
|
+
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
11996
|
+
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
11997
|
+
if (this.state === State.ENDED) {
|
11998
|
+
this.resetLoadingState();
|
11999
|
+
} else if (fragCurrent) {
|
12000
|
+
// Seeking while frag load is in progress
|
12001
|
+
const tolerance = config.maxFragLookUpTolerance;
|
12002
|
+
const fragStartOffset = fragCurrent.start - tolerance;
|
12003
|
+
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12004
|
+
// if seeking out of buffered range or into new one
|
12005
|
+
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12006
|
+
const pastFragment = currentTime > fragEndOffset;
|
12007
|
+
// if the seek position is outside the current fragment range
|
12008
|
+
if (currentTime < fragStartOffset || pastFragment) {
|
12009
|
+
if (pastFragment && fragCurrent.loader) {
|
12010
|
+
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12011
|
+
fragCurrent.abortRequests();
|
12012
|
+
this.resetLoadingState();
|
12013
|
+
}
|
12014
|
+
this.fragPrevious = null;
|
12015
|
+
}
|
12016
|
+
}
|
12017
|
+
}
|
12018
|
+
if (media) {
|
12019
|
+
// Remove gap fragments
|
12020
|
+
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12021
|
+
this.lastCurrentTime = currentTime;
|
12022
|
+
}
|
12023
|
+
|
12024
|
+
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12025
|
+
if (!this.loadedmetadata && !bufferInfo.len) {
|
12026
|
+
this.nextLoadPosition = this.startPosition = currentTime;
|
12027
|
+
}
|
12028
|
+
|
12029
|
+
// Async tick to speed up processing
|
12030
|
+
this.tickImmediate();
|
12031
|
+
}
|
12032
|
+
onMediaEnded() {
|
12033
|
+
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12034
|
+
this.startPosition = this.lastCurrentTime = 0;
|
12035
|
+
}
|
12036
|
+
onManifestLoaded(event, data) {
|
12037
|
+
this.startTimeOffset = data.startTimeOffset;
|
12038
|
+
this.initPTS = [];
|
12039
|
+
}
|
12040
|
+
onHandlerDestroying() {
|
12041
|
+
this.hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12042
|
+
this.stopLoad();
|
12043
|
+
super.onHandlerDestroying();
|
12044
|
+
// @ts-ignore
|
12045
|
+
this.hls = null;
|
12046
|
+
}
|
12047
|
+
onHandlerDestroyed() {
|
12048
|
+
this.state = State.STOPPED;
|
12049
|
+
if (this.fragmentLoader) {
|
12050
|
+
this.fragmentLoader.destroy();
|
12051
|
+
}
|
12052
|
+
if (this.keyLoader) {
|
12053
|
+
this.keyLoader.destroy();
|
12054
|
+
}
|
12434
12055
|
if (this.decrypter) {
|
12435
12056
|
this.decrypter.destroy();
|
12436
12057
|
}
|
@@ -12555,10 +12176,10 @@ class BaseStreamController extends TaskLoop {
|
|
12555
12176
|
const decryptData = frag.decryptdata;
|
12556
12177
|
|
12557
12178
|
// check to see if the payload needs to be decrypted
|
12558
|
-
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv &&
|
12179
|
+
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
|
12559
12180
|
const startTime = self.performance.now();
|
12560
12181
|
// decrypt init segment data
|
12561
|
-
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer
|
12182
|
+
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
|
12562
12183
|
hls.trigger(Events.ERROR, {
|
12563
12184
|
type: ErrorTypes.MEDIA_ERROR,
|
12564
12185
|
details: ErrorDetails.FRAG_DECRYPT_ERROR,
|
@@ -12599,9 +12220,7 @@ class BaseStreamController extends TaskLoop {
|
|
12599
12220
|
throw new Error('init load aborted, missing levels');
|
12600
12221
|
}
|
12601
12222
|
const stats = data.frag.stats;
|
12602
|
-
|
12603
|
-
this.state = State.IDLE;
|
12604
|
-
}
|
12223
|
+
this.state = State.IDLE;
|
12605
12224
|
data.frag.data = new Uint8Array(data.payload);
|
12606
12225
|
stats.parsing.start = stats.buffering.start = self.performance.now();
|
12607
12226
|
stats.parsing.end = stats.buffering.end = self.performance.now();
|
@@ -12672,7 +12291,7 @@ class BaseStreamController extends TaskLoop {
|
|
12672
12291
|
}
|
12673
12292
|
let keyLoadingPromise = null;
|
12674
12293
|
if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
|
12675
|
-
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.
|
12294
|
+
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`);
|
12676
12295
|
this.state = State.KEY_LOADING;
|
12677
12296
|
this.fragCurrent = frag;
|
12678
12297
|
keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
|
@@ -12693,16 +12312,8 @@ class BaseStreamController extends TaskLoop {
|
|
12693
12312
|
} else if (!frag.encrypted && details.encryptedFragments.length) {
|
12694
12313
|
this.keyLoader.loadClear(frag, details.encryptedFragments);
|
12695
12314
|
}
|
12696
|
-
const fragPrevious = this.fragPrevious;
|
12697
|
-
if (frag.sn !== 'initSegment' && (!fragPrevious || frag.sn !== fragPrevious.sn)) {
|
12698
|
-
const shouldLoadParts = this.shouldLoadParts(level.details, frag.end);
|
12699
|
-
if (shouldLoadParts !== this.loadingParts) {
|
12700
|
-
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} loading sn ${fragPrevious == null ? void 0 : fragPrevious.sn}->${frag.sn}`);
|
12701
|
-
this.loadingParts = shouldLoadParts;
|
12702
|
-
}
|
12703
|
-
}
|
12704
12315
|
targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
|
12705
|
-
if (this.
|
12316
|
+
if (this.config.lowLatencyMode && frag.sn !== 'initSegment') {
|
12706
12317
|
const partList = details.partList;
|
12707
12318
|
if (partList && progressCallback) {
|
12708
12319
|
if (targetBufferTime > frag.end && details.fragmentHint) {
|
@@ -12711,7 +12322,7 @@ class BaseStreamController extends TaskLoop {
|
|
12711
12322
|
const partIndex = this.getNextPart(partList, frag, targetBufferTime);
|
12712
12323
|
if (partIndex > -1) {
|
12713
12324
|
const part = partList[partIndex];
|
12714
|
-
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.
|
12325
|
+
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12715
12326
|
this.nextLoadPosition = part.start + part.duration;
|
12716
12327
|
this.state = State.FRAG_LOADING;
|
12717
12328
|
let _result;
|
@@ -12740,14 +12351,7 @@ class BaseStreamController extends TaskLoop {
|
|
12740
12351
|
}
|
12741
12352
|
}
|
12742
12353
|
}
|
12743
|
-
|
12744
|
-
this.log(`LL-Part loading OFF after next part miss @${targetBufferTime.toFixed(2)}`);
|
12745
|
-
this.loadingParts = false;
|
12746
|
-
} else if (!frag.url) {
|
12747
|
-
// Selected fragment hint for part but not loading parts
|
12748
|
-
return Promise.resolve(null);
|
12749
|
-
}
|
12750
|
-
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12354
|
+
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12751
12355
|
// Don't update nextLoadPosition for fragments which are not buffered
|
12752
12356
|
if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
|
12753
12357
|
this.nextLoadPosition = frag.start + frag.duration;
|
@@ -12845,36 +12449,8 @@ class BaseStreamController extends TaskLoop {
|
|
12845
12449
|
if (part) {
|
12846
12450
|
part.stats.parsing.end = now;
|
12847
12451
|
}
|
12848
|
-
// See if part loading should be disabled/enabled based on buffer and playback position.
|
12849
|
-
if (frag.sn !== 'initSegment') {
|
12850
|
-
const levelDetails = this.getLevelDetails();
|
12851
|
-
const loadingPartsAtEdge = levelDetails && frag.sn > levelDetails.endSN;
|
12852
|
-
const shouldLoadParts = loadingPartsAtEdge || this.shouldLoadParts(levelDetails, frag.end);
|
12853
|
-
if (shouldLoadParts !== this.loadingParts) {
|
12854
|
-
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} after parsing segment ending @${frag.end.toFixed(2)}`);
|
12855
|
-
this.loadingParts = shouldLoadParts;
|
12856
|
-
}
|
12857
|
-
}
|
12858
12452
|
this.updateLevelTiming(frag, part, level, chunkMeta.partial);
|
12859
12453
|
}
|
12860
|
-
shouldLoadParts(details, bufferEnd) {
|
12861
|
-
if (this.config.lowLatencyMode) {
|
12862
|
-
if (!details) {
|
12863
|
-
return this.loadingParts;
|
12864
|
-
}
|
12865
|
-
if (details != null && details.partList) {
|
12866
|
-
var _details$fragmentHint;
|
12867
|
-
// Buffer must be ahead of first part + duration of parts after last segment
|
12868
|
-
// and playback must be at or past segment adjacent to part list
|
12869
|
-
const firstPart = details.partList[0];
|
12870
|
-
const safePartStart = firstPart.end + (((_details$fragmentHint = details.fragmentHint) == null ? void 0 : _details$fragmentHint.duration) || 0);
|
12871
|
-
if (bufferEnd >= safePartStart && this.lastCurrentTime > firstPart.start - firstPart.fragment.duration) {
|
12872
|
-
return true;
|
12873
|
-
}
|
12874
|
-
}
|
12875
|
-
}
|
12876
|
-
return false;
|
12877
|
-
}
|
12878
12454
|
getCurrentContext(chunkMeta) {
|
12879
12455
|
const {
|
12880
12456
|
levels,
|
@@ -12975,7 +12551,7 @@ class BaseStreamController extends TaskLoop {
|
|
12975
12551
|
// Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
|
12976
12552
|
if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
|
12977
12553
|
const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
|
12978
|
-
if (bufferedFragAtPos &&
|
12554
|
+
if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
|
12979
12555
|
return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
|
12980
12556
|
}
|
12981
12557
|
}
|
@@ -13024,8 +12600,7 @@ class BaseStreamController extends TaskLoop {
|
|
13024
12600
|
config
|
13025
12601
|
} = this;
|
13026
12602
|
const start = fragments[0].start;
|
13027
|
-
|
13028
|
-
let frag = null;
|
12603
|
+
let frag;
|
13029
12604
|
if (levelDetails.live) {
|
13030
12605
|
const initialLiveManifestSize = config.initialLiveManifestSize;
|
13031
12606
|
if (fragLen < initialLiveManifestSize) {
|
@@ -13037,10 +12612,6 @@ class BaseStreamController extends TaskLoop {
|
|
13037
12612
|
// Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
|
13038
12613
|
// we get the fragment matching that start time
|
13039
12614
|
if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1 || pos < start) {
|
13040
|
-
if (canLoadParts && !this.loadingParts) {
|
13041
|
-
this.log(`LL-Part loading ON for initial live fragment`);
|
13042
|
-
this.loadingParts = true;
|
13043
|
-
}
|
13044
12615
|
frag = this.getInitialLiveFragment(levelDetails, fragments);
|
13045
12616
|
this.startPosition = this.nextLoadPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
|
13046
12617
|
}
|
@@ -13051,7 +12622,7 @@ class BaseStreamController extends TaskLoop {
|
|
13051
12622
|
|
13052
12623
|
// If we haven't run into any special cases already, just load the fragment most closely matching the requested position
|
13053
12624
|
if (!frag) {
|
13054
|
-
const end =
|
12625
|
+
const end = config.lowLatencyMode ? levelDetails.partEnd : levelDetails.fragmentEnd;
|
13055
12626
|
frag = this.getFragmentAtPosition(pos, end, levelDetails);
|
13056
12627
|
}
|
13057
12628
|
return this.mapToInitFragWhenRequired(frag);
|
@@ -13173,7 +12744,7 @@ class BaseStreamController extends TaskLoop {
|
|
13173
12744
|
} = levelDetails;
|
13174
12745
|
const tolerance = config.maxFragLookUpTolerance;
|
13175
12746
|
const partList = levelDetails.partList;
|
13176
|
-
const loadingParts = !!(
|
12747
|
+
const loadingParts = !!(config.lowLatencyMode && partList != null && partList.length && fragmentHint);
|
13177
12748
|
if (loadingParts && fragmentHint && !this.bitrateTest) {
|
13178
12749
|
// Include incomplete fragment with parts at end
|
13179
12750
|
fragments = fragments.concat(fragmentHint);
|
@@ -13366,7 +12937,7 @@ class BaseStreamController extends TaskLoop {
|
|
13366
12937
|
errorAction.resolved = true;
|
13367
12938
|
}
|
13368
12939
|
} else {
|
13369
|
-
|
12940
|
+
logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
|
13370
12941
|
return;
|
13371
12942
|
}
|
13372
12943
|
} else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
|
@@ -13434,9 +13005,7 @@ class BaseStreamController extends TaskLoop {
|
|
13434
13005
|
this.log('Reset loading state');
|
13435
13006
|
this.fragCurrent = null;
|
13436
13007
|
this.fragPrevious = null;
|
13437
|
-
|
13438
|
-
this.state = State.IDLE;
|
13439
|
-
}
|
13008
|
+
this.state = State.IDLE;
|
13440
13009
|
}
|
13441
13010
|
resetStartWhenNotLoaded(level) {
|
13442
13011
|
// if loadedmetadata is not set, it means that first frag request failed
|
@@ -13615,104 +13184,6 @@ function dummyTrack(type = '', inputTimeScale = 90000) {
|
|
13615
13184
|
};
|
13616
13185
|
}
|
13617
13186
|
|
13618
|
-
/**
|
13619
|
-
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
13620
|
-
*
|
13621
|
-
* @param data - The data to search in
|
13622
|
-
* @param offset - The offset at which to start searching
|
13623
|
-
*
|
13624
|
-
* @returns The block of data containing any ID3 tags found
|
13625
|
-
* or `undefined` if no header is found at the starting offset
|
13626
|
-
*
|
13627
|
-
* @internal
|
13628
|
-
*
|
13629
|
-
* @group ID3
|
13630
|
-
*/
|
13631
|
-
function getId3Data(data, offset) {
|
13632
|
-
const front = offset;
|
13633
|
-
let length = 0;
|
13634
|
-
while (isId3Header(data, offset)) {
|
13635
|
-
// ID3 header is 10 bytes
|
13636
|
-
length += 10;
|
13637
|
-
const size = readId3Size(data, offset + 6);
|
13638
|
-
length += size;
|
13639
|
-
if (isId3Footer(data, offset + 10)) {
|
13640
|
-
// ID3 footer is 10 bytes
|
13641
|
-
length += 10;
|
13642
|
-
}
|
13643
|
-
offset += length;
|
13644
|
-
}
|
13645
|
-
if (length > 0) {
|
13646
|
-
return data.subarray(front, front + length);
|
13647
|
-
}
|
13648
|
-
return undefined;
|
13649
|
-
}
|
13650
|
-
|
13651
|
-
/**
|
13652
|
-
* Read a 33 bit timestamp from an ID3 frame.
|
13653
|
-
*
|
13654
|
-
* @param timeStampFrame - the ID3 frame
|
13655
|
-
*
|
13656
|
-
* @returns The timestamp
|
13657
|
-
*
|
13658
|
-
* @internal
|
13659
|
-
*
|
13660
|
-
* @group ID3
|
13661
|
-
*/
|
13662
|
-
function readId3Timestamp(timeStampFrame) {
|
13663
|
-
if (timeStampFrame.data.byteLength === 8) {
|
13664
|
-
const data = new Uint8Array(timeStampFrame.data);
|
13665
|
-
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
13666
|
-
// with the upper 31 bits set to zero.
|
13667
|
-
const pts33Bit = data[3] & 0x1;
|
13668
|
-
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
13669
|
-
timestamp /= 45;
|
13670
|
-
if (pts33Bit) {
|
13671
|
-
timestamp += 47721858.84;
|
13672
|
-
} // 2^32 / 90
|
13673
|
-
return Math.round(timestamp);
|
13674
|
-
}
|
13675
|
-
return undefined;
|
13676
|
-
}
|
13677
|
-
|
13678
|
-
/**
|
13679
|
-
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
13680
|
-
*
|
13681
|
-
* @param data - Block of data containing one or more ID3 tags
|
13682
|
-
*
|
13683
|
-
* @returns The timestamp
|
13684
|
-
*
|
13685
|
-
* @group ID3
|
13686
|
-
*
|
13687
|
-
* @beta
|
13688
|
-
*/
|
13689
|
-
function getId3Timestamp(data) {
|
13690
|
-
const frames = getId3Frames(data);
|
13691
|
-
for (let i = 0; i < frames.length; i++) {
|
13692
|
-
const frame = frames[i];
|
13693
|
-
if (isId3TimestampFrame(frame)) {
|
13694
|
-
return readId3Timestamp(frame);
|
13695
|
-
}
|
13696
|
-
}
|
13697
|
-
return undefined;
|
13698
|
-
}
|
13699
|
-
|
13700
|
-
/**
|
13701
|
-
* Checks if the given data contains an ID3 tag.
|
13702
|
-
*
|
13703
|
-
* @param data - The data to check
|
13704
|
-
* @param offset - The offset at which to start checking
|
13705
|
-
*
|
13706
|
-
* @returns `true` if an ID3 tag is found
|
13707
|
-
*
|
13708
|
-
* @group ID3
|
13709
|
-
*
|
13710
|
-
* @beta
|
13711
|
-
*/
|
13712
|
-
function canParseId3(data, offset) {
|
13713
|
-
return isId3Header(data, offset) && readId3Size(data, offset + 6) + 10 <= data.length - offset;
|
13714
|
-
}
|
13715
|
-
|
13716
13187
|
class BaseAudioDemuxer {
|
13717
13188
|
constructor() {
|
13718
13189
|
this._audioTrack = void 0;
|
@@ -13754,12 +13225,12 @@ class BaseAudioDemuxer {
|
|
13754
13225
|
data = appendUint8Array(this.cachedData, data);
|
13755
13226
|
this.cachedData = null;
|
13756
13227
|
}
|
13757
|
-
let id3Data =
|
13228
|
+
let id3Data = getID3Data(data, 0);
|
13758
13229
|
let offset = id3Data ? id3Data.length : 0;
|
13759
13230
|
let lastDataIndex;
|
13760
13231
|
const track = this._audioTrack;
|
13761
13232
|
const id3Track = this._id3Track;
|
13762
|
-
const timestamp = id3Data ?
|
13233
|
+
const timestamp = id3Data ? getTimeStamp(id3Data) : undefined;
|
13763
13234
|
const length = data.length;
|
13764
13235
|
if (this.basePTS === null || this.frameIndex === 0 && isFiniteNumber(timestamp)) {
|
13765
13236
|
this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS);
|
@@ -13790,9 +13261,9 @@ class BaseAudioDemuxer {
|
|
13790
13261
|
} else {
|
13791
13262
|
offset = length;
|
13792
13263
|
}
|
13793
|
-
} else if (
|
13794
|
-
// after a canParse, a call to
|
13795
|
-
id3Data =
|
13264
|
+
} else if (canParse$2(data, offset)) {
|
13265
|
+
// after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
|
13266
|
+
id3Data = getID3Data(data, offset);
|
13796
13267
|
id3Track.samples.push({
|
13797
13268
|
pts: this.lastPTS,
|
13798
13269
|
dts: this.lastPTS,
|
@@ -13861,7 +13332,6 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
|
|
13861
13332
|
*/
|
13862
13333
|
function getAudioConfig(observer, data, offset, audioCodec) {
|
13863
13334
|
let adtsObjectType;
|
13864
|
-
let originalAdtsObjectType;
|
13865
13335
|
let adtsExtensionSamplingIndex;
|
13866
13336
|
let adtsChannelConfig;
|
13867
13337
|
let config;
|
@@ -13869,7 +13339,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13869
13339
|
const manifestCodec = audioCodec;
|
13870
13340
|
const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
|
13871
13341
|
// byte 2
|
13872
|
-
adtsObjectType =
|
13342
|
+
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13873
13343
|
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
|
13874
13344
|
if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
|
13875
13345
|
const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
|
@@ -13886,8 +13356,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13886
13356
|
// byte 3
|
13887
13357
|
adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
|
13888
13358
|
logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
|
13889
|
-
//
|
13890
|
-
if (/firefox
|
13359
|
+
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
|
13360
|
+
if (/firefox/i.test(userAgent)) {
|
13891
13361
|
if (adtsSamplingIndex >= 6) {
|
13892
13362
|
adtsObjectType = 5;
|
13893
13363
|
config = new Array(4);
|
@@ -13981,7 +13451,6 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13981
13451
|
samplerate: adtsSamplingRates[adtsSamplingIndex],
|
13982
13452
|
channelCount: adtsChannelConfig,
|
13983
13453
|
codec: 'mp4a.40.' + adtsObjectType,
|
13984
|
-
parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
|
13985
13454
|
manifestCodec
|
13986
13455
|
};
|
13987
13456
|
}
|
@@ -14036,8 +13505,7 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
|
|
14036
13505
|
track.channelCount = config.channelCount;
|
14037
13506
|
track.codec = config.codec;
|
14038
13507
|
track.manifestCodec = config.manifestCodec;
|
14039
|
-
track.
|
14040
|
-
logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
13508
|
+
logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
14041
13509
|
}
|
14042
13510
|
}
|
14043
13511
|
function getFrameDuration(samplerate) {
|
@@ -14286,7 +13754,7 @@ class AACDemuxer extends BaseAudioDemuxer {
|
|
14286
13754
|
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
14287
13755
|
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
14288
13756
|
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
14289
|
-
const id3Data =
|
13757
|
+
const id3Data = getID3Data(data, 0);
|
14290
13758
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
14291
13759
|
if (probe(data, offset)) {
|
14292
13760
|
return false;
|
@@ -14479,6 +13947,20 @@ class BaseVideoParser {
|
|
14479
13947
|
length: 0
|
14480
13948
|
};
|
14481
13949
|
}
|
13950
|
+
getLastNalUnit(samples) {
|
13951
|
+
var _VideoSample;
|
13952
|
+
let VideoSample = this.VideoSample;
|
13953
|
+
let lastUnit;
|
13954
|
+
// try to fallback to previous sample if current one is empty
|
13955
|
+
if (!VideoSample || VideoSample.units.length === 0) {
|
13956
|
+
VideoSample = samples[samples.length - 1];
|
13957
|
+
}
|
13958
|
+
if ((_VideoSample = VideoSample) != null && _VideoSample.units) {
|
13959
|
+
const units = VideoSample.units;
|
13960
|
+
lastUnit = units[units.length - 1];
|
13961
|
+
}
|
13962
|
+
return lastUnit;
|
13963
|
+
}
|
14482
13964
|
pushAccessUnit(VideoSample, videoTrack) {
|
14483
13965
|
if (VideoSample.units.length && VideoSample.frame) {
|
14484
13966
|
// if sample does not have PTS/DTS, patch with last sample PTS/DTS
|
@@ -14501,122 +13983,6 @@ class BaseVideoParser {
|
|
14501
13983
|
logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
|
14502
13984
|
}
|
14503
13985
|
}
|
14504
|
-
parseNALu(track, array, last) {
|
14505
|
-
const len = array.byteLength;
|
14506
|
-
let state = track.naluState || 0;
|
14507
|
-
const lastState = state;
|
14508
|
-
const units = [];
|
14509
|
-
let i = 0;
|
14510
|
-
let value;
|
14511
|
-
let overflow;
|
14512
|
-
let unitType;
|
14513
|
-
let lastUnitStart = -1;
|
14514
|
-
let lastUnitType = 0;
|
14515
|
-
// logger.log('PES:' + Hex.hexDump(array));
|
14516
|
-
|
14517
|
-
if (state === -1) {
|
14518
|
-
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14519
|
-
lastUnitStart = 0;
|
14520
|
-
// NALu type is value read from offset 0
|
14521
|
-
lastUnitType = this.getNALuType(array, 0);
|
14522
|
-
state = 0;
|
14523
|
-
i = 1;
|
14524
|
-
}
|
14525
|
-
while (i < len) {
|
14526
|
-
value = array[i++];
|
14527
|
-
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14528
|
-
if (!state) {
|
14529
|
-
state = value ? 0 : 1;
|
14530
|
-
continue;
|
14531
|
-
}
|
14532
|
-
if (state === 1) {
|
14533
|
-
state = value ? 0 : 2;
|
14534
|
-
continue;
|
14535
|
-
}
|
14536
|
-
// here we have state either equal to 2 or 3
|
14537
|
-
if (!value) {
|
14538
|
-
state = 3;
|
14539
|
-
} else if (value === 1) {
|
14540
|
-
overflow = i - state - 1;
|
14541
|
-
if (lastUnitStart >= 0) {
|
14542
|
-
const unit = {
|
14543
|
-
data: array.subarray(lastUnitStart, overflow),
|
14544
|
-
type: lastUnitType
|
14545
|
-
};
|
14546
|
-
if (track.lastNalu) {
|
14547
|
-
units.push(track.lastNalu);
|
14548
|
-
track.lastNalu = null;
|
14549
|
-
}
|
14550
|
-
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14551
|
-
units.push(unit);
|
14552
|
-
} else {
|
14553
|
-
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14554
|
-
// first check if start code delimiter is overlapping between 2 PES packets,
|
14555
|
-
// ie it started in last packet (lastState not zero)
|
14556
|
-
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14557
|
-
const lastUnit = track.lastNalu;
|
14558
|
-
if (lastUnit) {
|
14559
|
-
if (lastState && i <= 4 - lastState) {
|
14560
|
-
// start delimiter overlapping between PES packets
|
14561
|
-
// strip start delimiter bytes from the end of last NAL unit
|
14562
|
-
// check if lastUnit had a state different from zero
|
14563
|
-
if (lastUnit.state) {
|
14564
|
-
// strip last bytes
|
14565
|
-
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14566
|
-
}
|
14567
|
-
}
|
14568
|
-
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14569
|
-
|
14570
|
-
if (overflow > 0) {
|
14571
|
-
// logger.log('first NALU found with overflow:' + overflow);
|
14572
|
-
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14573
|
-
lastUnit.state = 0;
|
14574
|
-
units.push(lastUnit);
|
14575
|
-
track.lastNalu = null;
|
14576
|
-
}
|
14577
|
-
}
|
14578
|
-
}
|
14579
|
-
// check if we can read unit type
|
14580
|
-
if (i < len) {
|
14581
|
-
unitType = this.getNALuType(array, i);
|
14582
|
-
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14583
|
-
lastUnitStart = i;
|
14584
|
-
lastUnitType = unitType;
|
14585
|
-
state = 0;
|
14586
|
-
} else {
|
14587
|
-
// not enough byte to read unit type. let's read it on next PES parsing
|
14588
|
-
state = -1;
|
14589
|
-
}
|
14590
|
-
} else {
|
14591
|
-
state = 0;
|
14592
|
-
}
|
14593
|
-
}
|
14594
|
-
if (lastUnitStart >= 0 && state >= 0) {
|
14595
|
-
const unit = {
|
14596
|
-
data: array.subarray(lastUnitStart, len),
|
14597
|
-
type: lastUnitType,
|
14598
|
-
state: state
|
14599
|
-
};
|
14600
|
-
if (!last) {
|
14601
|
-
track.lastNalu = unit;
|
14602
|
-
// logger.log('store NALu to push it on next PES');
|
14603
|
-
} else {
|
14604
|
-
units.push(unit);
|
14605
|
-
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
14606
|
-
}
|
14607
|
-
} else if (units.length === 0) {
|
14608
|
-
// no NALu found
|
14609
|
-
// append pes.data to previous NAL unit
|
14610
|
-
const lastUnit = track.lastNalu;
|
14611
|
-
if (lastUnit) {
|
14612
|
-
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
14613
|
-
units.push(lastUnit);
|
14614
|
-
track.lastNalu = null;
|
14615
|
-
}
|
14616
|
-
}
|
14617
|
-
track.naluState = state;
|
14618
|
-
return units;
|
14619
|
-
}
|
14620
13986
|
}
|
14621
13987
|
|
14622
13988
|
/**
|
@@ -14759,18 +14125,201 @@ class ExpGolomb {
|
|
14759
14125
|
readUInt() {
|
14760
14126
|
return this.readBits(32);
|
14761
14127
|
}
|
14762
|
-
}
|
14763
14128
|
|
14764
|
-
|
14765
|
-
|
14766
|
-
|
14767
|
-
|
14768
|
-
|
14769
|
-
|
14770
|
-
|
14771
|
-
|
14129
|
+
/**
|
14130
|
+
* Advance the ExpGolomb decoder past a scaling list. The scaling
|
14131
|
+
* list is optionally transmitted as part of a sequence parameter
|
14132
|
+
* set and is not relevant to transmuxing.
|
14133
|
+
* @param count the number of entries in this scaling list
|
14134
|
+
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
14135
|
+
*/
|
14136
|
+
skipScalingList(count) {
|
14137
|
+
let lastScale = 8;
|
14138
|
+
let nextScale = 8;
|
14139
|
+
let deltaScale;
|
14140
|
+
for (let j = 0; j < count; j++) {
|
14141
|
+
if (nextScale !== 0) {
|
14142
|
+
deltaScale = this.readEG();
|
14143
|
+
nextScale = (lastScale + deltaScale + 256) % 256;
|
14144
|
+
}
|
14145
|
+
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14146
|
+
}
|
14147
|
+
}
|
14772
14148
|
|
14773
|
-
|
14149
|
+
/**
|
14150
|
+
* Read a sequence parameter set and return some interesting video
|
14151
|
+
* properties. A sequence parameter set is the H264 metadata that
|
14152
|
+
* describes the properties of upcoming video frames.
|
14153
|
+
* @returns an object with configuration parsed from the
|
14154
|
+
* sequence parameter set, including the dimensions of the
|
14155
|
+
* associated video frames.
|
14156
|
+
*/
|
14157
|
+
readSPS() {
|
14158
|
+
let frameCropLeftOffset = 0;
|
14159
|
+
let frameCropRightOffset = 0;
|
14160
|
+
let frameCropTopOffset = 0;
|
14161
|
+
let frameCropBottomOffset = 0;
|
14162
|
+
let numRefFramesInPicOrderCntCycle;
|
14163
|
+
let scalingListCount;
|
14164
|
+
let i;
|
14165
|
+
const readUByte = this.readUByte.bind(this);
|
14166
|
+
const readBits = this.readBits.bind(this);
|
14167
|
+
const readUEG = this.readUEG.bind(this);
|
14168
|
+
const readBoolean = this.readBoolean.bind(this);
|
14169
|
+
const skipBits = this.skipBits.bind(this);
|
14170
|
+
const skipEG = this.skipEG.bind(this);
|
14171
|
+
const skipUEG = this.skipUEG.bind(this);
|
14172
|
+
const skipScalingList = this.skipScalingList.bind(this);
|
14173
|
+
readUByte();
|
14174
|
+
const profileIdc = readUByte(); // profile_idc
|
14175
|
+
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
|
14176
|
+
skipBits(3); // reserved_zero_3bits u(3),
|
14177
|
+
readUByte(); // level_idc u(8)
|
14178
|
+
skipUEG(); // seq_parameter_set_id
|
14179
|
+
// some profiles have more optional data we don't need
|
14180
|
+
if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
|
14181
|
+
const chromaFormatIdc = readUEG();
|
14182
|
+
if (chromaFormatIdc === 3) {
|
14183
|
+
skipBits(1);
|
14184
|
+
} // separate_colour_plane_flag
|
14185
|
+
|
14186
|
+
skipUEG(); // bit_depth_luma_minus8
|
14187
|
+
skipUEG(); // bit_depth_chroma_minus8
|
14188
|
+
skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
14189
|
+
if (readBoolean()) {
|
14190
|
+
// seq_scaling_matrix_present_flag
|
14191
|
+
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14192
|
+
for (i = 0; i < scalingListCount; i++) {
|
14193
|
+
if (readBoolean()) {
|
14194
|
+
// seq_scaling_list_present_flag[ i ]
|
14195
|
+
if (i < 6) {
|
14196
|
+
skipScalingList(16);
|
14197
|
+
} else {
|
14198
|
+
skipScalingList(64);
|
14199
|
+
}
|
14200
|
+
}
|
14201
|
+
}
|
14202
|
+
}
|
14203
|
+
}
|
14204
|
+
skipUEG(); // log2_max_frame_num_minus4
|
14205
|
+
const picOrderCntType = readUEG();
|
14206
|
+
if (picOrderCntType === 0) {
|
14207
|
+
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
|
14208
|
+
} else if (picOrderCntType === 1) {
|
14209
|
+
skipBits(1); // delta_pic_order_always_zero_flag
|
14210
|
+
skipEG(); // offset_for_non_ref_pic
|
14211
|
+
skipEG(); // offset_for_top_to_bottom_field
|
14212
|
+
numRefFramesInPicOrderCntCycle = readUEG();
|
14213
|
+
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14214
|
+
skipEG();
|
14215
|
+
} // offset_for_ref_frame[ i ]
|
14216
|
+
}
|
14217
|
+
skipUEG(); // max_num_ref_frames
|
14218
|
+
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
14219
|
+
const picWidthInMbsMinus1 = readUEG();
|
14220
|
+
const picHeightInMapUnitsMinus1 = readUEG();
|
14221
|
+
const frameMbsOnlyFlag = readBits(1);
|
14222
|
+
if (frameMbsOnlyFlag === 0) {
|
14223
|
+
skipBits(1);
|
14224
|
+
} // mb_adaptive_frame_field_flag
|
14225
|
+
|
14226
|
+
skipBits(1); // direct_8x8_inference_flag
|
14227
|
+
if (readBoolean()) {
|
14228
|
+
// frame_cropping_flag
|
14229
|
+
frameCropLeftOffset = readUEG();
|
14230
|
+
frameCropRightOffset = readUEG();
|
14231
|
+
frameCropTopOffset = readUEG();
|
14232
|
+
frameCropBottomOffset = readUEG();
|
14233
|
+
}
|
14234
|
+
let pixelRatio = [1, 1];
|
14235
|
+
if (readBoolean()) {
|
14236
|
+
// vui_parameters_present_flag
|
14237
|
+
if (readBoolean()) {
|
14238
|
+
// aspect_ratio_info_present_flag
|
14239
|
+
const aspectRatioIdc = readUByte();
|
14240
|
+
switch (aspectRatioIdc) {
|
14241
|
+
case 1:
|
14242
|
+
pixelRatio = [1, 1];
|
14243
|
+
break;
|
14244
|
+
case 2:
|
14245
|
+
pixelRatio = [12, 11];
|
14246
|
+
break;
|
14247
|
+
case 3:
|
14248
|
+
pixelRatio = [10, 11];
|
14249
|
+
break;
|
14250
|
+
case 4:
|
14251
|
+
pixelRatio = [16, 11];
|
14252
|
+
break;
|
14253
|
+
case 5:
|
14254
|
+
pixelRatio = [40, 33];
|
14255
|
+
break;
|
14256
|
+
case 6:
|
14257
|
+
pixelRatio = [24, 11];
|
14258
|
+
break;
|
14259
|
+
case 7:
|
14260
|
+
pixelRatio = [20, 11];
|
14261
|
+
break;
|
14262
|
+
case 8:
|
14263
|
+
pixelRatio = [32, 11];
|
14264
|
+
break;
|
14265
|
+
case 9:
|
14266
|
+
pixelRatio = [80, 33];
|
14267
|
+
break;
|
14268
|
+
case 10:
|
14269
|
+
pixelRatio = [18, 11];
|
14270
|
+
break;
|
14271
|
+
case 11:
|
14272
|
+
pixelRatio = [15, 11];
|
14273
|
+
break;
|
14274
|
+
case 12:
|
14275
|
+
pixelRatio = [64, 33];
|
14276
|
+
break;
|
14277
|
+
case 13:
|
14278
|
+
pixelRatio = [160, 99];
|
14279
|
+
break;
|
14280
|
+
case 14:
|
14281
|
+
pixelRatio = [4, 3];
|
14282
|
+
break;
|
14283
|
+
case 15:
|
14284
|
+
pixelRatio = [3, 2];
|
14285
|
+
break;
|
14286
|
+
case 16:
|
14287
|
+
pixelRatio = [2, 1];
|
14288
|
+
break;
|
14289
|
+
case 255:
|
14290
|
+
{
|
14291
|
+
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
14292
|
+
break;
|
14293
|
+
}
|
14294
|
+
}
|
14295
|
+
}
|
14296
|
+
}
|
14297
|
+
return {
|
14298
|
+
width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
|
14299
|
+
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
14300
|
+
pixelRatio: pixelRatio
|
14301
|
+
};
|
14302
|
+
}
|
14303
|
+
readSliceType() {
|
14304
|
+
// skip NALu type
|
14305
|
+
this.readUByte();
|
14306
|
+
// discard first_mb_in_slice
|
14307
|
+
this.readUEG();
|
14308
|
+
// return slice_type
|
14309
|
+
return this.readUEG();
|
14310
|
+
}
|
14311
|
+
}
|
14312
|
+
|
14313
|
+
class AvcVideoParser extends BaseVideoParser {
|
14314
|
+
parseAVCPES(track, textTrack, pes, last, duration) {
|
14315
|
+
const units = this.parseAVCNALu(track, pes.data);
|
14316
|
+
let VideoSample = this.VideoSample;
|
14317
|
+
let push;
|
14318
|
+
let spsfound = false;
|
14319
|
+
// free pes.data to save up some memory
|
14320
|
+
pes.data = null;
|
14321
|
+
|
14322
|
+
// if new NAL units found and last sample still there, let's push ...
|
14774
14323
|
// this helps parsing streams with missing AUD (only do this if AUD never found)
|
14775
14324
|
if (VideoSample && units.length && !track.audFound) {
|
14776
14325
|
this.pushAccessUnit(VideoSample, track);
|
@@ -14788,7 +14337,7 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14788
14337
|
// only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
|
14789
14338
|
if (spsfound && data.length > 4) {
|
14790
14339
|
// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
|
14791
|
-
const sliceType =
|
14340
|
+
const sliceType = new ExpGolomb(data).readSliceType();
|
14792
14341
|
// 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
|
14793
14342
|
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
|
14794
14343
|
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
|
@@ -14842,7 +14391,8 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14842
14391
|
push = true;
|
14843
14392
|
spsfound = true;
|
14844
14393
|
const sps = unit.data;
|
14845
|
-
const
|
14394
|
+
const expGolombDecoder = new ExpGolomb(sps);
|
14395
|
+
const config = expGolombDecoder.readSPS();
|
14846
14396
|
if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
|
14847
14397
|
track.width = config.width;
|
14848
14398
|
track.height = config.height;
|
@@ -14898,192 +14448,109 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14898
14448
|
this.VideoSample = null;
|
14899
14449
|
}
|
14900
14450
|
}
|
14901
|
-
|
14902
|
-
|
14903
|
-
|
14904
|
-
|
14905
|
-
const
|
14906
|
-
|
14907
|
-
|
14908
|
-
|
14909
|
-
|
14910
|
-
|
14911
|
-
|
14912
|
-
|
14451
|
+
parseAVCNALu(track, array) {
|
14452
|
+
const len = array.byteLength;
|
14453
|
+
let state = track.naluState || 0;
|
14454
|
+
const lastState = state;
|
14455
|
+
const units = [];
|
14456
|
+
let i = 0;
|
14457
|
+
let value;
|
14458
|
+
let overflow;
|
14459
|
+
let unitType;
|
14460
|
+
let lastUnitStart = -1;
|
14461
|
+
let lastUnitType = 0;
|
14462
|
+
// logger.log('PES:' + Hex.hexDump(array));
|
14913
14463
|
|
14914
|
-
|
14915
|
-
|
14916
|
-
|
14917
|
-
|
14918
|
-
|
14919
|
-
|
14920
|
-
|
14921
|
-
let lastScale = 8;
|
14922
|
-
let nextScale = 8;
|
14923
|
-
let deltaScale;
|
14924
|
-
for (let j = 0; j < count; j++) {
|
14925
|
-
if (nextScale !== 0) {
|
14926
|
-
deltaScale = reader.readEG();
|
14927
|
-
nextScale = (lastScale + deltaScale + 256) % 256;
|
14928
|
-
}
|
14929
|
-
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14464
|
+
if (state === -1) {
|
14465
|
+
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14466
|
+
lastUnitStart = 0;
|
14467
|
+
// NALu type is value read from offset 0
|
14468
|
+
lastUnitType = array[0] & 0x1f;
|
14469
|
+
state = 0;
|
14470
|
+
i = 1;
|
14930
14471
|
}
|
14931
|
-
|
14932
|
-
|
14933
|
-
|
14934
|
-
|
14935
|
-
|
14936
|
-
|
14937
|
-
|
14938
|
-
|
14939
|
-
|
14940
|
-
|
14941
|
-
|
14942
|
-
|
14943
|
-
|
14944
|
-
|
14945
|
-
|
14946
|
-
|
14947
|
-
|
14948
|
-
|
14949
|
-
|
14950
|
-
|
14951
|
-
|
14952
|
-
|
14953
|
-
|
14954
|
-
|
14955
|
-
|
14956
|
-
|
14957
|
-
|
14958
|
-
|
14959
|
-
|
14960
|
-
|
14961
|
-
|
14962
|
-
|
14963
|
-
|
14964
|
-
|
14965
|
-
|
14966
|
-
|
14967
|
-
|
14968
|
-
|
14969
|
-
|
14472
|
+
while (i < len) {
|
14473
|
+
value = array[i++];
|
14474
|
+
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14475
|
+
if (!state) {
|
14476
|
+
state = value ? 0 : 1;
|
14477
|
+
continue;
|
14478
|
+
}
|
14479
|
+
if (state === 1) {
|
14480
|
+
state = value ? 0 : 2;
|
14481
|
+
continue;
|
14482
|
+
}
|
14483
|
+
// here we have state either equal to 2 or 3
|
14484
|
+
if (!value) {
|
14485
|
+
state = 3;
|
14486
|
+
} else if (value === 1) {
|
14487
|
+
overflow = i - state - 1;
|
14488
|
+
if (lastUnitStart >= 0) {
|
14489
|
+
const unit = {
|
14490
|
+
data: array.subarray(lastUnitStart, overflow),
|
14491
|
+
type: lastUnitType
|
14492
|
+
};
|
14493
|
+
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14494
|
+
units.push(unit);
|
14495
|
+
} else {
|
14496
|
+
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14497
|
+
// first check if start code delimiter is overlapping between 2 PES packets,
|
14498
|
+
// ie it started in last packet (lastState not zero)
|
14499
|
+
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14500
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14501
|
+
if (lastUnit) {
|
14502
|
+
if (lastState && i <= 4 - lastState) {
|
14503
|
+
// start delimiter overlapping between PES packets
|
14504
|
+
// strip start delimiter bytes from the end of last NAL unit
|
14505
|
+
// check if lastUnit had a state different from zero
|
14506
|
+
if (lastUnit.state) {
|
14507
|
+
// strip last bytes
|
14508
|
+
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14509
|
+
}
|
14510
|
+
}
|
14511
|
+
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14970
14512
|
|
14971
|
-
|
14972
|
-
|
14973
|
-
|
14974
|
-
|
14975
|
-
// seq_scaling_matrix_present_flag
|
14976
|
-
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14977
|
-
for (i = 0; i < scalingListCount; i++) {
|
14978
|
-
if (readBoolean()) {
|
14979
|
-
// seq_scaling_list_present_flag[ i ]
|
14980
|
-
if (i < 6) {
|
14981
|
-
skipScalingList(16, eg);
|
14982
|
-
} else {
|
14983
|
-
skipScalingList(64, eg);
|
14513
|
+
if (overflow > 0) {
|
14514
|
+
// logger.log('first NALU found with overflow:' + overflow);
|
14515
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14516
|
+
lastUnit.state = 0;
|
14984
14517
|
}
|
14985
14518
|
}
|
14986
14519
|
}
|
14520
|
+
// check if we can read unit type
|
14521
|
+
if (i < len) {
|
14522
|
+
unitType = array[i] & 0x1f;
|
14523
|
+
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14524
|
+
lastUnitStart = i;
|
14525
|
+
lastUnitType = unitType;
|
14526
|
+
state = 0;
|
14527
|
+
} else {
|
14528
|
+
// not enough byte to read unit type. let's read it on next PES parsing
|
14529
|
+
state = -1;
|
14530
|
+
}
|
14531
|
+
} else {
|
14532
|
+
state = 0;
|
14987
14533
|
}
|
14988
14534
|
}
|
14989
|
-
|
14990
|
-
|
14991
|
-
|
14992
|
-
|
14993
|
-
|
14994
|
-
|
14995
|
-
|
14996
|
-
|
14997
|
-
numRefFramesInPicOrderCntCycle = readUEG();
|
14998
|
-
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14999
|
-
skipEG();
|
15000
|
-
} // offset_for_ref_frame[ i ]
|
15001
|
-
}
|
15002
|
-
skipUEG(); // max_num_ref_frames
|
15003
|
-
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
15004
|
-
const picWidthInMbsMinus1 = readUEG();
|
15005
|
-
const picHeightInMapUnitsMinus1 = readUEG();
|
15006
|
-
const frameMbsOnlyFlag = readBits(1);
|
15007
|
-
if (frameMbsOnlyFlag === 0) {
|
15008
|
-
skipBits(1);
|
15009
|
-
} // mb_adaptive_frame_field_flag
|
15010
|
-
|
15011
|
-
skipBits(1); // direct_8x8_inference_flag
|
15012
|
-
if (readBoolean()) {
|
15013
|
-
// frame_cropping_flag
|
15014
|
-
frameCropLeftOffset = readUEG();
|
15015
|
-
frameCropRightOffset = readUEG();
|
15016
|
-
frameCropTopOffset = readUEG();
|
15017
|
-
frameCropBottomOffset = readUEG();
|
14535
|
+
if (lastUnitStart >= 0 && state >= 0) {
|
14536
|
+
const unit = {
|
14537
|
+
data: array.subarray(lastUnitStart, len),
|
14538
|
+
type: lastUnitType,
|
14539
|
+
state: state
|
14540
|
+
};
|
14541
|
+
units.push(unit);
|
14542
|
+
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
15018
14543
|
}
|
15019
|
-
|
15020
|
-
if (
|
15021
|
-
//
|
15022
|
-
|
15023
|
-
|
15024
|
-
|
15025
|
-
switch (aspectRatioIdc) {
|
15026
|
-
case 1:
|
15027
|
-
pixelRatio = [1, 1];
|
15028
|
-
break;
|
15029
|
-
case 2:
|
15030
|
-
pixelRatio = [12, 11];
|
15031
|
-
break;
|
15032
|
-
case 3:
|
15033
|
-
pixelRatio = [10, 11];
|
15034
|
-
break;
|
15035
|
-
case 4:
|
15036
|
-
pixelRatio = [16, 11];
|
15037
|
-
break;
|
15038
|
-
case 5:
|
15039
|
-
pixelRatio = [40, 33];
|
15040
|
-
break;
|
15041
|
-
case 6:
|
15042
|
-
pixelRatio = [24, 11];
|
15043
|
-
break;
|
15044
|
-
case 7:
|
15045
|
-
pixelRatio = [20, 11];
|
15046
|
-
break;
|
15047
|
-
case 8:
|
15048
|
-
pixelRatio = [32, 11];
|
15049
|
-
break;
|
15050
|
-
case 9:
|
15051
|
-
pixelRatio = [80, 33];
|
15052
|
-
break;
|
15053
|
-
case 10:
|
15054
|
-
pixelRatio = [18, 11];
|
15055
|
-
break;
|
15056
|
-
case 11:
|
15057
|
-
pixelRatio = [15, 11];
|
15058
|
-
break;
|
15059
|
-
case 12:
|
15060
|
-
pixelRatio = [64, 33];
|
15061
|
-
break;
|
15062
|
-
case 13:
|
15063
|
-
pixelRatio = [160, 99];
|
15064
|
-
break;
|
15065
|
-
case 14:
|
15066
|
-
pixelRatio = [4, 3];
|
15067
|
-
break;
|
15068
|
-
case 15:
|
15069
|
-
pixelRatio = [3, 2];
|
15070
|
-
break;
|
15071
|
-
case 16:
|
15072
|
-
pixelRatio = [2, 1];
|
15073
|
-
break;
|
15074
|
-
case 255:
|
15075
|
-
{
|
15076
|
-
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
15077
|
-
break;
|
15078
|
-
}
|
15079
|
-
}
|
14544
|
+
// no NALu found
|
14545
|
+
if (units.length === 0) {
|
14546
|
+
// append pes.data to previous NAL unit
|
14547
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14548
|
+
if (lastUnit) {
|
14549
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
15080
14550
|
}
|
15081
14551
|
}
|
15082
|
-
|
15083
|
-
|
15084
|
-
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
15085
|
-
pixelRatio: pixelRatio
|
15086
|
-
};
|
14552
|
+
track.naluState = state;
|
14553
|
+
return units;
|
15087
14554
|
}
|
15088
14555
|
}
|
15089
14556
|
|
@@ -15101,7 +14568,7 @@ class SampleAesDecrypter {
|
|
15101
14568
|
});
|
15102
14569
|
}
|
15103
14570
|
decryptBuffer(encryptedData) {
|
15104
|
-
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer
|
14571
|
+
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
|
15105
14572
|
}
|
15106
14573
|
|
15107
14574
|
// AAC - encrypt all full 16 bytes blocks starting from offset 16
|
@@ -15215,7 +14682,7 @@ class TSDemuxer {
|
|
15215
14682
|
this.observer = observer;
|
15216
14683
|
this.config = config;
|
15217
14684
|
this.typeSupported = typeSupported;
|
15218
|
-
this.videoParser =
|
14685
|
+
this.videoParser = new AvcVideoParser();
|
15219
14686
|
}
|
15220
14687
|
static probe(data) {
|
15221
14688
|
const syncOffset = TSDemuxer.syncOffset(data);
|
@@ -15380,16 +14847,7 @@ class TSDemuxer {
|
|
15380
14847
|
case videoPid:
|
15381
14848
|
if (stt) {
|
15382
14849
|
if (videoData && (pes = parsePES(videoData))) {
|
15383
|
-
|
15384
|
-
switch (videoTrack.segmentCodec) {
|
15385
|
-
case 'avc':
|
15386
|
-
this.videoParser = new AvcVideoParser();
|
15387
|
-
break;
|
15388
|
-
}
|
15389
|
-
}
|
15390
|
-
if (this.videoParser !== null) {
|
15391
|
-
this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
|
15392
|
-
}
|
14850
|
+
this.videoParser.parseAVCPES(videoTrack, textTrack, pes, false, this._duration);
|
15393
14851
|
}
|
15394
14852
|
videoData = {
|
15395
14853
|
data: [],
|
@@ -15551,17 +15009,8 @@ class TSDemuxer {
|
|
15551
15009
|
// try to parse last PES packets
|
15552
15010
|
let pes;
|
15553
15011
|
if (videoData && (pes = parsePES(videoData))) {
|
15554
|
-
|
15555
|
-
|
15556
|
-
case 'avc':
|
15557
|
-
this.videoParser = new AvcVideoParser();
|
15558
|
-
break;
|
15559
|
-
}
|
15560
|
-
}
|
15561
|
-
if (this.videoParser !== null) {
|
15562
|
-
this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
|
15563
|
-
videoTrack.pesData = null;
|
15564
|
-
}
|
15012
|
+
this.videoParser.parseAVCPES(videoTrack, textTrack, pes, true, this._duration);
|
15013
|
+
videoTrack.pesData = null;
|
15565
15014
|
} else {
|
15566
15015
|
// either avcData null or PES truncated, keep it for next frag parsing
|
15567
15016
|
videoTrack.pesData = videoData;
|
@@ -15863,10 +15312,7 @@ function parsePMT(data, offset, typeSupported, isSampleAes) {
|
|
15863
15312
|
case 0x87:
|
15864
15313
|
throw new Error('Unsupported EC-3 in M2TS found');
|
15865
15314
|
case 0x24:
|
15866
|
-
|
15867
|
-
{
|
15868
|
-
throw new Error('Unsupported HEVC in M2TS found');
|
15869
|
-
}
|
15315
|
+
throw new Error('Unsupported HEVC in M2TS found');
|
15870
15316
|
}
|
15871
15317
|
// move to the next table entry
|
15872
15318
|
// skip past the elementary stream descriptors, if present
|
@@ -16009,11 +15455,11 @@ class MP3Demuxer extends BaseAudioDemuxer {
|
|
16009
15455
|
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
16010
15456
|
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
16011
15457
|
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
16012
|
-
const id3Data =
|
15458
|
+
const id3Data = getID3Data(data, 0);
|
16013
15459
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
16014
15460
|
|
16015
15461
|
// Check for ac-3|ec-3 sync bytes and return false if present
|
16016
|
-
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 &&
|
15462
|
+
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 && getTimeStamp(id3Data) !== undefined &&
|
16017
15463
|
// check the bsid to confirm ac-3 or ec-3 (not mp3)
|
16018
15464
|
getAudioBSID(data, offset) <= 16) {
|
16019
15465
|
return false;
|
@@ -16088,8 +15534,6 @@ class MP4 {
|
|
16088
15534
|
avc1: [],
|
16089
15535
|
// codingname
|
16090
15536
|
avcC: [],
|
16091
|
-
hvc1: [],
|
16092
|
-
hvcC: [],
|
16093
15537
|
btrt: [],
|
16094
15538
|
dinf: [],
|
16095
15539
|
dref: [],
|
@@ -16514,10 +15958,8 @@ class MP4 {
|
|
16514
15958
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
|
16515
15959
|
}
|
16516
15960
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
|
16517
|
-
} else if (track.segmentCodec === 'avc') {
|
16518
|
-
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16519
15961
|
} else {
|
16520
|
-
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.
|
15962
|
+
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16521
15963
|
}
|
16522
15964
|
}
|
16523
15965
|
static tkhd(track) {
|
@@ -16655,84 +16097,6 @@ class MP4 {
|
|
16655
16097
|
const result = appendUint8Array(MP4.FTYP, movie);
|
16656
16098
|
return result;
|
16657
16099
|
}
|
16658
|
-
static hvc1(track) {
|
16659
|
-
const ps = track.params;
|
16660
|
-
const units = [track.vps, track.sps, track.pps];
|
16661
|
-
const NALuLengthSize = 4;
|
16662
|
-
const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
|
16663
|
-
|
16664
|
-
// compute hvcC size in bytes
|
16665
|
-
let length = config.length;
|
16666
|
-
for (let i = 0; i < units.length; i += 1) {
|
16667
|
-
length += 3;
|
16668
|
-
for (let j = 0; j < units[i].length; j += 1) {
|
16669
|
-
length += 2 + units[i][j].length;
|
16670
|
-
}
|
16671
|
-
}
|
16672
|
-
const hvcC = new Uint8Array(length);
|
16673
|
-
hvcC.set(config, 0);
|
16674
|
-
length = config.length;
|
16675
|
-
// append parameter set units: one vps, one or more sps and pps
|
16676
|
-
const iMax = units.length - 1;
|
16677
|
-
for (let i = 0; i < units.length; i += 1) {
|
16678
|
-
hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
|
16679
|
-
length += 3;
|
16680
|
-
for (let j = 0; j < units[i].length; j += 1) {
|
16681
|
-
hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
|
16682
|
-
length += 2;
|
16683
|
-
hvcC.set(units[i][j], length);
|
16684
|
-
length += units[i][j].length;
|
16685
|
-
}
|
16686
|
-
}
|
16687
|
-
const hvcc = MP4.box(MP4.types.hvcC, hvcC);
|
16688
|
-
const width = track.width;
|
16689
|
-
const height = track.height;
|
16690
|
-
const hSpacing = track.pixelRatio[0];
|
16691
|
-
const vSpacing = track.pixelRatio[1];
|
16692
|
-
return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
|
16693
|
-
// reserved
|
16694
|
-
0x00, 0x00, 0x00,
|
16695
|
-
// reserved
|
16696
|
-
0x00, 0x01,
|
16697
|
-
// data_reference_index
|
16698
|
-
0x00, 0x00,
|
16699
|
-
// pre_defined
|
16700
|
-
0x00, 0x00,
|
16701
|
-
// reserved
|
16702
|
-
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16703
|
-
// pre_defined
|
16704
|
-
width >> 8 & 0xff, width & 0xff,
|
16705
|
-
// width
|
16706
|
-
height >> 8 & 0xff, height & 0xff,
|
16707
|
-
// height
|
16708
|
-
0x00, 0x48, 0x00, 0x00,
|
16709
|
-
// horizresolution
|
16710
|
-
0x00, 0x48, 0x00, 0x00,
|
16711
|
-
// vertresolution
|
16712
|
-
0x00, 0x00, 0x00, 0x00,
|
16713
|
-
// reserved
|
16714
|
-
0x00, 0x01,
|
16715
|
-
// frame_count
|
16716
|
-
0x12, 0x64, 0x61, 0x69, 0x6c,
|
16717
|
-
// dailymotion/hls.js
|
16718
|
-
0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16719
|
-
// compressorname
|
16720
|
-
0x00, 0x18,
|
16721
|
-
// depth = 24
|
16722
|
-
0x11, 0x11]),
|
16723
|
-
// pre_defined = -1
|
16724
|
-
hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
|
16725
|
-
// bufferSizeDB
|
16726
|
-
0x00, 0x2d, 0xc6, 0xc0,
|
16727
|
-
// maxBitrate
|
16728
|
-
0x00, 0x2d, 0xc6, 0xc0])),
|
16729
|
-
// avgBitrate
|
16730
|
-
MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
|
16731
|
-
// hSpacing
|
16732
|
-
hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
|
16733
|
-
// vSpacing
|
16734
|
-
vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
|
16735
|
-
}
|
16736
16100
|
}
|
16737
16101
|
MP4.types = void 0;
|
16738
16102
|
MP4.HDLR_TYPES = void 0;
|
@@ -17108,9 +16472,9 @@ class MP4Remuxer {
|
|
17108
16472
|
const foundOverlap = delta < -1;
|
17109
16473
|
if (foundHole || foundOverlap) {
|
17110
16474
|
if (foundHole) {
|
17111
|
-
logger.warn(
|
16475
|
+
logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
|
17112
16476
|
} else {
|
17113
|
-
logger.warn(
|
16477
|
+
logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
|
17114
16478
|
}
|
17115
16479
|
if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
|
17116
16480
|
firstDTS = nextAvcDts;
|
@@ -17119,24 +16483,12 @@ class MP4Remuxer {
|
|
17119
16483
|
inputSamples[0].dts = firstDTS;
|
17120
16484
|
inputSamples[0].pts = firstPTS;
|
17121
16485
|
} else {
|
17122
|
-
let isPTSOrderRetained = true;
|
17123
16486
|
for (let i = 0; i < inputSamples.length; i++) {
|
17124
|
-
if (inputSamples[i].dts > firstPTS
|
16487
|
+
if (inputSamples[i].dts > firstPTS) {
|
17125
16488
|
break;
|
17126
16489
|
}
|
17127
|
-
const prevPTS = inputSamples[i].pts;
|
17128
16490
|
inputSamples[i].dts -= delta;
|
17129
16491
|
inputSamples[i].pts -= delta;
|
17130
|
-
|
17131
|
-
// check to see if this sample's PTS order has changed
|
17132
|
-
// relative to the next one
|
17133
|
-
if (i < inputSamples.length - 1) {
|
17134
|
-
const nextSamplePTS = inputSamples[i + 1].pts;
|
17135
|
-
const currentSamplePTS = inputSamples[i].pts;
|
17136
|
-
const currentOrder = nextSamplePTS <= currentSamplePTS;
|
17137
|
-
const prevOrder = nextSamplePTS <= prevPTS;
|
17138
|
-
isPTSOrderRetained = currentOrder == prevOrder;
|
17139
|
-
}
|
17140
16492
|
}
|
17141
16493
|
}
|
17142
16494
|
logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
|
@@ -17284,7 +16636,7 @@ class MP4Remuxer {
|
|
17284
16636
|
}
|
17285
16637
|
}
|
17286
16638
|
}
|
17287
|
-
// next AVC
|
16639
|
+
// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
17288
16640
|
mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
|
17289
16641
|
this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
|
17290
16642
|
this.videoSampleDuration = mp4SampleDuration;
|
@@ -17417,7 +16769,7 @@ class MP4Remuxer {
|
|
17417
16769
|
logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
|
17418
16770
|
for (let j = 0; j < missing; j++) {
|
17419
16771
|
const newStamp = Math.max(nextPts, 0);
|
17420
|
-
let fillFrame = AAC.getSilentFrame(track.
|
16772
|
+
let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17421
16773
|
if (!fillFrame) {
|
17422
16774
|
logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
|
17423
16775
|
fillFrame = sample.unit.subarray();
|
@@ -17545,7 +16897,7 @@ class MP4Remuxer {
|
|
17545
16897
|
// samples count of this segment's duration
|
17546
16898
|
const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
|
17547
16899
|
// silent frame
|
17548
|
-
const silentFrame = AAC.getSilentFrame(track.
|
16900
|
+
const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17549
16901
|
logger.warn('[mp4-remuxer]: remux empty Audio');
|
17550
16902
|
// Can't remux if we can't generate a silent frame...
|
17551
16903
|
if (!silentFrame) {
|
@@ -17936,15 +17288,13 @@ class Transmuxer {
|
|
17936
17288
|
initSegmentData
|
17937
17289
|
} = transmuxConfig;
|
17938
17290
|
const keyData = getEncryptionType(uintData, decryptdata);
|
17939
|
-
if (keyData &&
|
17291
|
+
if (keyData && keyData.method === 'AES-128') {
|
17940
17292
|
const decrypter = this.getDecrypter();
|
17941
|
-
const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
|
17942
|
-
|
17943
17293
|
// Software decryption is synchronous; webCrypto is not
|
17944
17294
|
if (decrypter.isSync()) {
|
17945
17295
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
17946
17296
|
// data is handled in the flush() call
|
17947
|
-
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17297
|
+
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
|
17948
17298
|
// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
|
17949
17299
|
const loadingParts = chunkMeta.part > -1;
|
17950
17300
|
if (loadingParts) {
|
@@ -17956,7 +17306,7 @@ class Transmuxer {
|
|
17956
17306
|
}
|
17957
17307
|
uintData = new Uint8Array(decryptedData);
|
17958
17308
|
} else {
|
17959
|
-
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17309
|
+
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
|
17960
17310
|
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
17961
17311
|
// the decrypted data has been transmuxed
|
17962
17312
|
const result = this.push(decryptedData, null, chunkMeta);
|
@@ -18610,7 +17960,14 @@ class TransmuxerInterface {
|
|
18610
17960
|
this.observer = new EventEmitter();
|
18611
17961
|
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
|
18612
17962
|
this.observer.on(Events.ERROR, forwardMessage);
|
18613
|
-
const
|
17963
|
+
const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
|
17964
|
+
isTypeSupported: () => false
|
17965
|
+
};
|
17966
|
+
const m2tsTypeSupported = {
|
17967
|
+
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
17968
|
+
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
17969
|
+
ac3: false
|
17970
|
+
};
|
18614
17971
|
|
18615
17972
|
// navigator.vendor is not always available in Web Worker
|
18616
17973
|
// refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
|
@@ -18875,9 +18232,8 @@ const STALL_MINIMUM_DURATION_MS = 250;
|
|
18875
18232
|
const MAX_START_GAP_JUMP = 2.0;
|
18876
18233
|
const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
18877
18234
|
const SKIP_BUFFER_RANGE_START = 0.05;
|
18878
|
-
class GapController
|
18235
|
+
class GapController {
|
18879
18236
|
constructor(config, media, fragmentTracker, hls) {
|
18880
|
-
super('gap-controller', hls.logger);
|
18881
18237
|
this.config = void 0;
|
18882
18238
|
this.media = null;
|
18883
18239
|
this.fragmentTracker = void 0;
|
@@ -18887,7 +18243,6 @@ class GapController extends Logger {
|
|
18887
18243
|
this.stalled = null;
|
18888
18244
|
this.moved = false;
|
18889
18245
|
this.seeking = false;
|
18890
|
-
this.ended = 0;
|
18891
18246
|
this.config = config;
|
18892
18247
|
this.media = media;
|
18893
18248
|
this.fragmentTracker = fragmentTracker;
|
@@ -18905,7 +18260,7 @@ class GapController extends Logger {
|
|
18905
18260
|
*
|
18906
18261
|
* @param lastCurrentTime - Previously read playhead position
|
18907
18262
|
*/
|
18908
|
-
poll(lastCurrentTime, activeFrag
|
18263
|
+
poll(lastCurrentTime, activeFrag) {
|
18909
18264
|
const {
|
18910
18265
|
config,
|
18911
18266
|
media,
|
@@ -18924,7 +18279,6 @@ class GapController extends Logger {
|
|
18924
18279
|
|
18925
18280
|
// The playhead is moving, no-op
|
18926
18281
|
if (currentTime !== lastCurrentTime) {
|
18927
|
-
this.ended = 0;
|
18928
18282
|
this.moved = true;
|
18929
18283
|
if (!seeking) {
|
18930
18284
|
this.nudgeRetry = 0;
|
@@ -18933,7 +18287,7 @@ class GapController extends Logger {
|
|
18933
18287
|
// The playhead is now moving, but was previously stalled
|
18934
18288
|
if (this.stallReported) {
|
18935
18289
|
const _stalledDuration = self.performance.now() - stalled;
|
18936
|
-
|
18290
|
+
logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
|
18937
18291
|
this.stallReported = false;
|
18938
18292
|
}
|
18939
18293
|
this.stalled = null;
|
@@ -18969,6 +18323,7 @@ class GapController extends Logger {
|
|
18969
18323
|
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
18970
18324
|
// The addition poll gives the browser a chance to jump the gap for us
|
18971
18325
|
if (!this.moved && this.stalled !== null) {
|
18326
|
+
var _level$details;
|
18972
18327
|
// There is no playable buffer (seeked, waiting for buffer)
|
18973
18328
|
const isBuffered = bufferInfo.len > 0;
|
18974
18329
|
if (!isBuffered && !nextStart) {
|
@@ -18980,8 +18335,9 @@ class GapController extends Logger {
|
|
18980
18335
|
// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
|
18981
18336
|
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
|
18982
18337
|
// that begins over 1 target duration after the video start position.
|
18983
|
-
const
|
18984
|
-
const
|
18338
|
+
const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
|
18339
|
+
const isLive = level == null ? void 0 : (_level$details = level.details) == null ? void 0 : _level$details.live;
|
18340
|
+
const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
|
18985
18341
|
const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
|
18986
18342
|
if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
|
18987
18343
|
if (!media.paused) {
|
@@ -18999,17 +18355,6 @@ class GapController extends Logger {
|
|
18999
18355
|
}
|
19000
18356
|
const stalledDuration = tnow - stalled;
|
19001
18357
|
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
|
19002
|
-
// Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
|
19003
|
-
if (state === State.ENDED && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
|
19004
|
-
if (stalledDuration < 1000 || this.ended) {
|
19005
|
-
return;
|
19006
|
-
}
|
19007
|
-
this.ended = currentTime;
|
19008
|
-
this.hls.trigger(Events.MEDIA_ENDED, {
|
19009
|
-
stalled: true
|
19010
|
-
});
|
19011
|
-
return;
|
19012
|
-
}
|
19013
18358
|
// Report stalling after trying to fix
|
19014
18359
|
this._reportStall(bufferInfo);
|
19015
18360
|
if (!this.media) {
|
@@ -19053,7 +18398,7 @@ class GapController extends Logger {
|
|
19053
18398
|
// needs to cross some sort of threshold covering all source-buffers content
|
19054
18399
|
// to start playing properly.
|
19055
18400
|
if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
|
19056
|
-
|
18401
|
+
logger.warn('Trying to nudge playhead over buffer-hole');
|
19057
18402
|
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
19058
18403
|
// We only try to jump the hole if it's under the configured size
|
19059
18404
|
// Reset stalled so to rearm watchdog timer
|
@@ -19077,7 +18422,7 @@ class GapController extends Logger {
|
|
19077
18422
|
// Report stalled error once
|
19078
18423
|
this.stallReported = true;
|
19079
18424
|
const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
|
19080
|
-
|
18425
|
+
logger.warn(error.message);
|
19081
18426
|
hls.trigger(Events.ERROR, {
|
19082
18427
|
type: ErrorTypes.MEDIA_ERROR,
|
19083
18428
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -19145,7 +18490,7 @@ class GapController extends Logger {
|
|
19145
18490
|
}
|
19146
18491
|
}
|
19147
18492
|
const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
|
19148
|
-
|
18493
|
+
logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
|
19149
18494
|
this.moved = true;
|
19150
18495
|
this.stalled = null;
|
19151
18496
|
media.currentTime = targetTime;
|
@@ -19186,7 +18531,7 @@ class GapController extends Logger {
|
|
19186
18531
|
const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
|
19187
18532
|
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
19188
18533
|
const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
|
19189
|
-
|
18534
|
+
logger.warn(error.message);
|
19190
18535
|
media.currentTime = targetTime;
|
19191
18536
|
hls.trigger(Events.ERROR, {
|
19192
18537
|
type: ErrorTypes.MEDIA_ERROR,
|
@@ -19196,7 +18541,7 @@ class GapController extends Logger {
|
|
19196
18541
|
});
|
19197
18542
|
} else {
|
19198
18543
|
const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
|
19199
|
-
|
18544
|
+
logger.error(error.message);
|
19200
18545
|
hls.trigger(Events.ERROR, {
|
19201
18546
|
type: ErrorTypes.MEDIA_ERROR,
|
19202
18547
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -19211,7 +18556,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
|
|
19211
18556
|
|
19212
18557
|
class StreamController extends BaseStreamController {
|
19213
18558
|
constructor(hls, fragmentTracker, keyLoader) {
|
19214
|
-
super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
|
18559
|
+
super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN);
|
19215
18560
|
this.audioCodecSwap = false;
|
19216
18561
|
this.gapController = null;
|
19217
18562
|
this.level = -1;
|
@@ -19219,43 +18564,27 @@ class StreamController extends BaseStreamController {
|
|
19219
18564
|
this.altAudio = false;
|
19220
18565
|
this.audioOnly = false;
|
19221
18566
|
this.fragPlaying = null;
|
18567
|
+
this.onvplaying = null;
|
18568
|
+
this.onvseeked = null;
|
19222
18569
|
this.fragLastKbps = 0;
|
19223
18570
|
this.couldBacktrack = false;
|
19224
18571
|
this.backtrackFragment = null;
|
19225
18572
|
this.audioCodecSwitch = false;
|
19226
18573
|
this.videoBuffer = null;
|
19227
|
-
this.
|
19228
|
-
// tick to speed up FRAG_CHANGED triggering
|
19229
|
-
this.tick();
|
19230
|
-
};
|
19231
|
-
this.onMediaSeeked = () => {
|
19232
|
-
const media = this.media;
|
19233
|
-
const currentTime = media ? media.currentTime : null;
|
19234
|
-
if (isFiniteNumber(currentTime)) {
|
19235
|
-
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
19236
|
-
}
|
19237
|
-
|
19238
|
-
// If seeked was issued before buffer was appended do not tick immediately
|
19239
|
-
const bufferInfo = this.getMainFwdBufferInfo();
|
19240
|
-
if (bufferInfo === null || bufferInfo.len === 0) {
|
19241
|
-
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
19242
|
-
return;
|
19243
|
-
}
|
19244
|
-
|
19245
|
-
// tick to speed up FRAG_CHANGED triggering
|
19246
|
-
this.tick();
|
19247
|
-
};
|
19248
|
-
this.registerListeners();
|
18574
|
+
this._registerListeners();
|
19249
18575
|
}
|
19250
|
-
|
19251
|
-
super.registerListeners();
|
18576
|
+
_registerListeners() {
|
19252
18577
|
const {
|
19253
18578
|
hls
|
19254
18579
|
} = this;
|
18580
|
+
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18581
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18582
|
+
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
19255
18583
|
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
19256
18584
|
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
|
19257
18585
|
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
19258
18586
|
hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18587
|
+
hls.on(Events.ERROR, this.onError, this);
|
19259
18588
|
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
19260
18589
|
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
19261
18590
|
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -19263,14 +18592,17 @@ class StreamController extends BaseStreamController {
|
|
19263
18592
|
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
19264
18593
|
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
19265
18594
|
}
|
19266
|
-
|
19267
|
-
super.unregisterListeners();
|
18595
|
+
_unregisterListeners() {
|
19268
18596
|
const {
|
19269
18597
|
hls
|
19270
18598
|
} = this;
|
18599
|
+
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18600
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18601
|
+
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
19271
18602
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
19272
18603
|
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
19273
18604
|
hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18605
|
+
hls.off(Events.ERROR, this.onError, this);
|
19274
18606
|
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
19275
18607
|
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
19276
18608
|
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -19279,9 +18611,7 @@ class StreamController extends BaseStreamController {
|
|
19279
18611
|
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
19280
18612
|
}
|
19281
18613
|
onHandlerDestroying() {
|
19282
|
-
|
19283
|
-
this.onMediaPlaying = this.onMediaSeeked = null;
|
19284
|
-
this.unregisterListeners();
|
18614
|
+
this._unregisterListeners();
|
19285
18615
|
super.onHandlerDestroying();
|
19286
18616
|
}
|
19287
18617
|
startLoad(startPosition) {
|
@@ -19379,9 +18709,6 @@ class StreamController extends BaseStreamController {
|
|
19379
18709
|
this.checkFragmentChanged();
|
19380
18710
|
}
|
19381
18711
|
doTickIdle() {
|
19382
|
-
if (!this.buffering) {
|
19383
|
-
return;
|
19384
|
-
}
|
19385
18712
|
const {
|
19386
18713
|
hls,
|
19387
18714
|
levelLastLoaded,
|
@@ -19609,19 +18936,22 @@ class StreamController extends BaseStreamController {
|
|
19609
18936
|
onMediaAttached(event, data) {
|
19610
18937
|
super.onMediaAttached(event, data);
|
19611
18938
|
const media = data.media;
|
19612
|
-
|
19613
|
-
|
18939
|
+
this.onvplaying = this.onMediaPlaying.bind(this);
|
18940
|
+
this.onvseeked = this.onMediaSeeked.bind(this);
|
18941
|
+
media.addEventListener('playing', this.onvplaying);
|
18942
|
+
media.addEventListener('seeked', this.onvseeked);
|
19614
18943
|
this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
|
19615
18944
|
}
|
19616
18945
|
onMediaDetaching() {
|
19617
18946
|
const {
|
19618
18947
|
media
|
19619
18948
|
} = this;
|
19620
|
-
if (media) {
|
19621
|
-
media.removeEventListener('playing', this.
|
19622
|
-
media.removeEventListener('seeked', this.
|
18949
|
+
if (media && this.onvplaying && this.onvseeked) {
|
18950
|
+
media.removeEventListener('playing', this.onvplaying);
|
18951
|
+
media.removeEventListener('seeked', this.onvseeked);
|
18952
|
+
this.onvplaying = this.onvseeked = null;
|
18953
|
+
this.videoBuffer = null;
|
19623
18954
|
}
|
19624
|
-
this.videoBuffer = null;
|
19625
18955
|
this.fragPlaying = null;
|
19626
18956
|
if (this.gapController) {
|
19627
18957
|
this.gapController.destroy();
|
@@ -19629,6 +18959,27 @@ class StreamController extends BaseStreamController {
|
|
19629
18959
|
}
|
19630
18960
|
super.onMediaDetaching();
|
19631
18961
|
}
|
18962
|
+
onMediaPlaying() {
|
18963
|
+
// tick to speed up FRAG_CHANGED triggering
|
18964
|
+
this.tick();
|
18965
|
+
}
|
18966
|
+
onMediaSeeked() {
|
18967
|
+
const media = this.media;
|
18968
|
+
const currentTime = media ? media.currentTime : null;
|
18969
|
+
if (isFiniteNumber(currentTime)) {
|
18970
|
+
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
18971
|
+
}
|
18972
|
+
|
18973
|
+
// If seeked was issued before buffer was appended do not tick immediately
|
18974
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
18975
|
+
if (bufferInfo === null || bufferInfo.len === 0) {
|
18976
|
+
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
18977
|
+
return;
|
18978
|
+
}
|
18979
|
+
|
18980
|
+
// tick to speed up FRAG_CHANGED triggering
|
18981
|
+
this.tick();
|
18982
|
+
}
|
19632
18983
|
onManifestLoading() {
|
19633
18984
|
// reset buffer on manifest loading
|
19634
18985
|
this.log('Trigger BUFFER_RESET');
|
@@ -19920,10 +19271,8 @@ class StreamController extends BaseStreamController {
|
|
19920
19271
|
}
|
19921
19272
|
if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
|
19922
19273
|
// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
19923
|
-
const
|
19924
|
-
|
19925
|
-
const levelDetails = this.getLevelDetails();
|
19926
|
-
gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
|
19274
|
+
const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
|
19275
|
+
gapController.poll(this.lastCurrentTime, activeFrag);
|
19927
19276
|
}
|
19928
19277
|
this.lastCurrentTime = media.currentTime;
|
19929
19278
|
}
|
@@ -20256,17 +19605,6 @@ class StreamController extends BaseStreamController {
|
|
20256
19605
|
getMainFwdBufferInfo() {
|
20257
19606
|
return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN);
|
20258
19607
|
}
|
20259
|
-
get maxBufferLength() {
|
20260
|
-
const {
|
20261
|
-
levels,
|
20262
|
-
level
|
20263
|
-
} = this;
|
20264
|
-
const levelInfo = levels == null ? void 0 : levels[level];
|
20265
|
-
if (!levelInfo) {
|
20266
|
-
return this.config.maxBufferLength;
|
20267
|
-
}
|
20268
|
-
return this.getMaxBufferLength(levelInfo.maxBitrate);
|
20269
|
-
}
|
20270
19608
|
backtrack(frag) {
|
20271
19609
|
this.couldBacktrack = true;
|
20272
19610
|
// Causes findFragments to backtrack through fragments to find the keyframe
|
@@ -20372,7 +19710,7 @@ class Hls {
|
|
20372
19710
|
* Get the video-dev/hls.js package version.
|
20373
19711
|
*/
|
20374
19712
|
static get version() {
|
20375
|
-
return "1.5.9
|
19713
|
+
return "1.5.9";
|
20376
19714
|
}
|
20377
19715
|
|
20378
19716
|
/**
|
@@ -20435,12 +19773,9 @@ class Hls {
|
|
20435
19773
|
* The configuration object provided on player instantiation.
|
20436
19774
|
*/
|
20437
19775
|
this.userConfig = void 0;
|
20438
|
-
/**
|
20439
|
-
* The logger functions used by this player instance, configured on player instantiation.
|
20440
|
-
*/
|
20441
|
-
this.logger = void 0;
|
20442
19776
|
this.coreComponents = void 0;
|
20443
19777
|
this.networkControllers = void 0;
|
19778
|
+
this.started = false;
|
20444
19779
|
this._emitter = new EventEmitter();
|
20445
19780
|
this._autoLevelCapping = -1;
|
20446
19781
|
this._maxHdcpLevel = null;
|
@@ -20457,11 +19792,11 @@ class Hls {
|
|
20457
19792
|
this._media = null;
|
20458
19793
|
this.url = null;
|
20459
19794
|
this.triggeringException = void 0;
|
20460
|
-
|
20461
|
-
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig
|
19795
|
+
enableLogs(userConfig.debug || false, 'Hls instance');
|
19796
|
+
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
|
20462
19797
|
this.userConfig = userConfig;
|
20463
19798
|
if (config.progressive) {
|
20464
|
-
enableStreamingMode(config
|
19799
|
+
enableStreamingMode(config);
|
20465
19800
|
}
|
20466
19801
|
|
20467
19802
|
// core controllers and network loaders
|
@@ -20474,17 +19809,17 @@ class Hls {
|
|
20474
19809
|
} = config;
|
20475
19810
|
const errorController = new ConfigErrorController(this);
|
20476
19811
|
const abrController = this.abrController = new ConfigAbrController(this);
|
20477
|
-
|
20478
|
-
const fragmentTracker = new FragmentTracker(this);
|
20479
|
-
const bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
|
19812
|
+
const bufferController = this.bufferController = new ConfigBufferController(this);
|
20480
19813
|
const capLevelController = this.capLevelController = new ConfigCapLevelController(this);
|
20481
19814
|
const fpsController = new ConfigFpsController(this);
|
20482
19815
|
const playListLoader = new PlaylistLoader(this);
|
20483
19816
|
const id3TrackController = new ID3TrackController(this);
|
20484
19817
|
const ConfigContentSteeringController = config.contentSteeringController;
|
20485
|
-
//
|
19818
|
+
// ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
|
20486
19819
|
const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
|
20487
19820
|
const levelController = this.levelController = new LevelController(this, contentSteering);
|
19821
|
+
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
19822
|
+
const fragmentTracker = new FragmentTracker(this);
|
20488
19823
|
const keyLoader = new KeyLoader(this.config);
|
20489
19824
|
const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
|
20490
19825
|
|
@@ -20560,7 +19895,7 @@ class Hls {
|
|
20560
19895
|
try {
|
20561
19896
|
return this.emit(event, event, eventObject);
|
20562
19897
|
} catch (error) {
|
20563
|
-
|
19898
|
+
logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
20564
19899
|
// Prevent recursion in error event handlers that throw #5497
|
20565
19900
|
if (!this.triggeringException) {
|
20566
19901
|
this.triggeringException = true;
|
@@ -20586,7 +19921,7 @@ class Hls {
|
|
20586
19921
|
* Dispose of the instance
|
20587
19922
|
*/
|
20588
19923
|
destroy() {
|
20589
|
-
|
19924
|
+
logger.log('destroy');
|
20590
19925
|
this.trigger(Events.DESTROYING, undefined);
|
20591
19926
|
this.detachMedia();
|
20592
19927
|
this.removeAllListeners();
|
@@ -20607,7 +19942,7 @@ class Hls {
|
|
20607
19942
|
* Attaches Hls.js to a media element
|
20608
19943
|
*/
|
20609
19944
|
attachMedia(media) {
|
20610
|
-
|
19945
|
+
logger.log('attachMedia');
|
20611
19946
|
this._media = media;
|
20612
19947
|
this.trigger(Events.MEDIA_ATTACHING, {
|
20613
19948
|
media: media
|
@@ -20618,7 +19953,7 @@ class Hls {
|
|
20618
19953
|
* Detach Hls.js from the media
|
20619
19954
|
*/
|
20620
19955
|
detachMedia() {
|
20621
|
-
|
19956
|
+
logger.log('detachMedia');
|
20622
19957
|
this.trigger(Events.MEDIA_DETACHING, undefined);
|
20623
19958
|
this._media = null;
|
20624
19959
|
}
|
@@ -20635,7 +19970,7 @@ class Hls {
|
|
20635
19970
|
});
|
20636
19971
|
this._autoLevelCapping = -1;
|
20637
19972
|
this._maxHdcpLevel = null;
|
20638
|
-
|
19973
|
+
logger.log(`loadSource:${loadingSource}`);
|
20639
19974
|
if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
|
20640
19975
|
this.detachMedia();
|
20641
19976
|
this.attachMedia(media);
|
@@ -20654,7 +19989,8 @@ class Hls {
|
|
20654
19989
|
* Defaults to -1 (None: starts from earliest point)
|
20655
19990
|
*/
|
20656
19991
|
startLoad(startPosition = -1) {
|
20657
|
-
|
19992
|
+
logger.log(`startLoad(${startPosition})`);
|
19993
|
+
this.started = true;
|
20658
19994
|
this.networkControllers.forEach(controller => {
|
20659
19995
|
controller.startLoad(startPosition);
|
20660
19996
|
});
|
@@ -20664,31 +20000,34 @@ class Hls {
|
|
20664
20000
|
* Stop loading of any stream data.
|
20665
20001
|
*/
|
20666
20002
|
stopLoad() {
|
20667
|
-
|
20003
|
+
logger.log('stopLoad');
|
20004
|
+
this.started = false;
|
20668
20005
|
this.networkControllers.forEach(controller => {
|
20669
20006
|
controller.stopLoad();
|
20670
20007
|
});
|
20671
20008
|
}
|
20672
20009
|
|
20673
20010
|
/**
|
20674
|
-
* Resumes stream controller segment loading
|
20011
|
+
* Resumes stream controller segment loading if previously started.
|
20675
20012
|
*/
|
20676
20013
|
resumeBuffering() {
|
20677
|
-
this.
|
20678
|
-
|
20679
|
-
controller
|
20680
|
-
|
20681
|
-
|
20014
|
+
if (this.started) {
|
20015
|
+
this.networkControllers.forEach(controller => {
|
20016
|
+
if ('fragmentLoader' in controller) {
|
20017
|
+
controller.startLoad(-1);
|
20018
|
+
}
|
20019
|
+
});
|
20020
|
+
}
|
20682
20021
|
}
|
20683
20022
|
|
20684
20023
|
/**
|
20685
|
-
*
|
20024
|
+
* Stops stream controller segment loading without changing 'started' state like stopLoad().
|
20686
20025
|
* This allows for media buffering to be paused without interupting playlist loading.
|
20687
20026
|
*/
|
20688
20027
|
pauseBuffering() {
|
20689
20028
|
this.networkControllers.forEach(controller => {
|
20690
|
-
if (controller
|
20691
|
-
controller.
|
20029
|
+
if ('fragmentLoader' in controller) {
|
20030
|
+
controller.stopLoad();
|
20692
20031
|
}
|
20693
20032
|
});
|
20694
20033
|
}
|
@@ -20697,7 +20036,7 @@ class Hls {
|
|
20697
20036
|
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
20698
20037
|
*/
|
20699
20038
|
swapAudioCodec() {
|
20700
|
-
|
20039
|
+
logger.log('swapAudioCodec');
|
20701
20040
|
this.streamController.swapAudioCodec();
|
20702
20041
|
}
|
20703
20042
|
|
@@ -20708,7 +20047,7 @@ class Hls {
|
|
20708
20047
|
* Automatic recovery of media-errors by this process is configurable.
|
20709
20048
|
*/
|
20710
20049
|
recoverMediaError() {
|
20711
|
-
|
20050
|
+
logger.log('recoverMediaError');
|
20712
20051
|
const media = this._media;
|
20713
20052
|
this.detachMedia();
|
20714
20053
|
if (media) {
|
@@ -20738,7 +20077,7 @@ class Hls {
|
|
20738
20077
|
* Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
|
20739
20078
|
*/
|
20740
20079
|
set currentLevel(newLevel) {
|
20741
|
-
|
20080
|
+
logger.log(`set currentLevel:${newLevel}`);
|
20742
20081
|
this.levelController.manualLevel = newLevel;
|
20743
20082
|
this.streamController.immediateLevelSwitch();
|
20744
20083
|
}
|
@@ -20757,7 +20096,7 @@ class Hls {
|
|
20757
20096
|
* @param newLevel - Pass -1 for automatic level selection
|
20758
20097
|
*/
|
20759
20098
|
set nextLevel(newLevel) {
|
20760
|
-
|
20099
|
+
logger.log(`set nextLevel:${newLevel}`);
|
20761
20100
|
this.levelController.manualLevel = newLevel;
|
20762
20101
|
this.streamController.nextLevelSwitch();
|
20763
20102
|
}
|
@@ -20776,7 +20115,7 @@ class Hls {
|
|
20776
20115
|
* @param newLevel - Pass -1 for automatic level selection
|
20777
20116
|
*/
|
20778
20117
|
set loadLevel(newLevel) {
|
20779
|
-
|
20118
|
+
logger.log(`set loadLevel:${newLevel}`);
|
20780
20119
|
this.levelController.manualLevel = newLevel;
|
20781
20120
|
}
|
20782
20121
|
|
@@ -20807,7 +20146,7 @@ class Hls {
|
|
20807
20146
|
* Sets "first-level", see getter.
|
20808
20147
|
*/
|
20809
20148
|
set firstLevel(newLevel) {
|
20810
|
-
|
20149
|
+
logger.log(`set firstLevel:${newLevel}`);
|
20811
20150
|
this.levelController.firstLevel = newLevel;
|
20812
20151
|
}
|
20813
20152
|
|
@@ -20832,7 +20171,7 @@ class Hls {
|
|
20832
20171
|
* (determined from download of first segment)
|
20833
20172
|
*/
|
20834
20173
|
set startLevel(newLevel) {
|
20835
|
-
|
20174
|
+
logger.log(`set startLevel:${newLevel}`);
|
20836
20175
|
// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
20837
20176
|
if (newLevel !== -1) {
|
20838
20177
|
newLevel = Math.max(newLevel, this.minAutoLevel);
|
@@ -20907,7 +20246,7 @@ class Hls {
|
|
20907
20246
|
*/
|
20908
20247
|
set autoLevelCapping(newLevel) {
|
20909
20248
|
if (this._autoLevelCapping !== newLevel) {
|
20910
|
-
|
20249
|
+
logger.log(`set autoLevelCapping:${newLevel}`);
|
20911
20250
|
this._autoLevelCapping = newLevel;
|
20912
20251
|
this.levelController.checkMaxAutoUpdated();
|
20913
20252
|
}
|
@@ -21012,9 +20351,6 @@ class Hls {
|
|
21012
20351
|
get mainForwardBufferInfo() {
|
21013
20352
|
return this.streamController.getMainFwdBufferInfo();
|
21014
20353
|
}
|
21015
|
-
get maxBufferLength() {
|
21016
|
-
return this.streamController.maxBufferLength;
|
21017
|
-
}
|
21018
20354
|
|
21019
20355
|
/**
|
21020
20356
|
* Find and select the best matching audio track, making a level switch when a Group change is necessary.
|
@@ -21182,22 +20518,12 @@ class Hls {
|
|
21182
20518
|
get forceStartLoad() {
|
21183
20519
|
return this.streamController.forceStartLoad;
|
21184
20520
|
}
|
21185
|
-
|
21186
|
-
/**
|
21187
|
-
* ContentSteering pathwayPriority getter/setter
|
21188
|
-
*/
|
21189
|
-
get pathwayPriority() {
|
21190
|
-
return this.levelController.pathwayPriority;
|
21191
|
-
}
|
21192
|
-
set pathwayPriority(pathwayPriority) {
|
21193
|
-
this.levelController.pathwayPriority = pathwayPriority;
|
21194
|
-
}
|
21195
20521
|
}
|
21196
20522
|
Hls.defaultConfig = void 0;
|
21197
20523
|
|
21198
|
-
var KeySystemFormats =
|
21199
|
-
var KeySystems =
|
21200
|
-
var SubtitleStreamController =
|
21201
|
-
var TimelineController =
|
21202
|
-
export { AbrController, AttrList,
|
20524
|
+
var KeySystemFormats = empty.KeySystemFormats;
|
20525
|
+
var KeySystems = empty.KeySystems;
|
20526
|
+
var SubtitleStreamController = empty.SubtitleStreamController;
|
20527
|
+
var TimelineController = empty.TimelineController;
|
20528
|
+
export { AbrController, AttrList, Cues as AudioStreamController, Cues as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, Cues as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, Cues as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, Level, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, Cues as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
|
21203
20529
|
//# sourceMappingURL=hls.light.mjs.map
|