hls.js 1.5.11-0.canary.10334 → 1.5.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -4
- package/dist/hls-demo.js +38 -41
- package/dist/hls-demo.js.map +1 -1
- package/dist/hls.js +2196 -3475
- package/dist/hls.js.d.ts +85 -108
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +3138 -3785
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +1257 -1929
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +4185 -5487
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +36 -36
- package/src/config.ts +2 -3
- package/src/controller/abr-controller.ts +20 -24
- package/src/controller/audio-stream-controller.ts +74 -68
- package/src/controller/audio-track-controller.ts +1 -1
- package/src/controller/base-playlist-controller.ts +10 -27
- package/src/controller/base-stream-controller.ts +38 -160
- package/src/controller/buffer-controller.ts +92 -230
- package/src/controller/buffer-operation-queue.ts +19 -16
- package/src/controller/cap-level-controller.ts +2 -3
- package/src/controller/cmcd-controller.ts +14 -51
- package/src/controller/content-steering-controller.ts +15 -29
- package/src/controller/eme-controller.ts +23 -10
- package/src/controller/error-controller.ts +13 -7
- package/src/controller/fps-controller.ts +3 -8
- package/src/controller/fragment-tracker.ts +11 -15
- package/src/controller/gap-controller.ts +16 -43
- package/src/controller/id3-track-controller.ts +7 -7
- package/src/controller/latency-controller.ts +11 -9
- package/src/controller/level-controller.ts +19 -37
- package/src/controller/stream-controller.ts +32 -37
- package/src/controller/subtitle-stream-controller.ts +43 -28
- package/src/controller/subtitle-track-controller.ts +3 -5
- package/src/controller/timeline-controller.ts +21 -19
- package/src/crypt/aes-crypto.ts +2 -21
- package/src/crypt/decrypter.ts +16 -32
- package/src/crypt/fast-aes-key.ts +5 -28
- package/src/demux/audio/aacdemuxer.ts +2 -2
- package/src/demux/audio/ac3-demuxer.ts +3 -4
- package/src/demux/audio/adts.ts +4 -9
- package/src/demux/audio/base-audio-demuxer.ts +14 -16
- package/src/demux/audio/mp3demuxer.ts +3 -4
- package/src/demux/audio/mpegaudio.ts +1 -1
- package/src/demux/id3.ts +411 -0
- package/src/demux/mp4demuxer.ts +7 -7
- package/src/demux/sample-aes.ts +0 -2
- package/src/demux/transmuxer-interface.ts +12 -4
- package/src/demux/transmuxer-worker.ts +4 -4
- package/src/demux/transmuxer.ts +3 -16
- package/src/demux/tsdemuxer.ts +38 -75
- package/src/demux/video/avc-video-parser.ts +119 -208
- package/src/demux/video/base-video-parser.ts +18 -147
- package/src/demux/video/exp-golomb.ts +208 -0
- package/src/events.ts +1 -8
- package/src/exports-named.ts +1 -1
- package/src/hls.ts +38 -61
- package/src/loader/fragment-loader.ts +3 -10
- package/src/loader/key-loader.ts +1 -3
- package/src/loader/level-key.ts +9 -10
- package/src/loader/playlist-loader.ts +5 -4
- package/src/remux/mp4-generator.ts +1 -196
- package/src/remux/mp4-remuxer.ts +8 -24
- package/src/task-loop.ts +2 -5
- package/src/types/component-api.ts +1 -3
- package/src/types/demuxer.ts +0 -4
- package/src/types/events.ts +0 -4
- package/src/types/remuxer.ts +1 -1
- package/src/utils/buffer-helper.ts +31 -12
- package/src/utils/cea-608-parser.ts +3 -1
- package/src/utils/codecs.ts +5 -34
- package/src/utils/fetch-loader.ts +1 -1
- package/src/utils/imsc1-ttml-parser.ts +1 -1
- package/src/utils/keysystem-util.ts +6 -1
- package/src/utils/logger.ts +23 -58
- package/src/utils/mp4-tools.ts +3 -5
- package/src/utils/webvtt-parser.ts +1 -1
- package/src/crypt/decrypter-aes-mode.ts +0 -4
- package/src/demux/video/hevc-video-parser.ts +0 -749
- package/src/utils/encryption-methods-util.ts +0 -21
- package/src/utils/utf8-utils.ts +0 -18
package/dist/hls.light.mjs
CHANGED
@@ -176,23 +176,6 @@ var urlToolkit = {exports: {}};
|
|
176
176
|
|
177
177
|
var urlToolkitExports = urlToolkit.exports;
|
178
178
|
|
179
|
-
function _defineProperty(e, r, t) {
|
180
|
-
return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, {
|
181
|
-
value: t,
|
182
|
-
enumerable: !0,
|
183
|
-
configurable: !0,
|
184
|
-
writable: !0
|
185
|
-
}) : e[r] = t, e;
|
186
|
-
}
|
187
|
-
function _extends() {
|
188
|
-
return _extends = Object.assign ? Object.assign.bind() : function (n) {
|
189
|
-
for (var e = 1; e < arguments.length; e++) {
|
190
|
-
var t = arguments[e];
|
191
|
-
for (var r in t) ({}).hasOwnProperty.call(t, r) && (n[r] = t[r]);
|
192
|
-
}
|
193
|
-
return n;
|
194
|
-
}, _extends.apply(null, arguments);
|
195
|
-
}
|
196
179
|
function ownKeys(e, r) {
|
197
180
|
var t = Object.keys(e);
|
198
181
|
if (Object.getOwnPropertySymbols) {
|
@@ -226,7 +209,35 @@ function _toPrimitive(t, r) {
|
|
226
209
|
}
|
227
210
|
function _toPropertyKey(t) {
|
228
211
|
var i = _toPrimitive(t, "string");
|
229
|
-
return "symbol" == typeof i ? i : i
|
212
|
+
return "symbol" == typeof i ? i : String(i);
|
213
|
+
}
|
214
|
+
function _defineProperty(obj, key, value) {
|
215
|
+
key = _toPropertyKey(key);
|
216
|
+
if (key in obj) {
|
217
|
+
Object.defineProperty(obj, key, {
|
218
|
+
value: value,
|
219
|
+
enumerable: true,
|
220
|
+
configurable: true,
|
221
|
+
writable: true
|
222
|
+
});
|
223
|
+
} else {
|
224
|
+
obj[key] = value;
|
225
|
+
}
|
226
|
+
return obj;
|
227
|
+
}
|
228
|
+
function _extends() {
|
229
|
+
_extends = Object.assign ? Object.assign.bind() : function (target) {
|
230
|
+
for (var i = 1; i < arguments.length; i++) {
|
231
|
+
var source = arguments[i];
|
232
|
+
for (var key in source) {
|
233
|
+
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
234
|
+
target[key] = source[key];
|
235
|
+
}
|
236
|
+
}
|
237
|
+
}
|
238
|
+
return target;
|
239
|
+
};
|
240
|
+
return _extends.apply(this, arguments);
|
230
241
|
}
|
231
242
|
|
232
243
|
// https://caniuse.com/mdn-javascript_builtins_number_isfinite
|
@@ -245,7 +256,6 @@ let Events = /*#__PURE__*/function (Events) {
|
|
245
256
|
Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
|
246
257
|
Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
|
247
258
|
Events["MEDIA_DETACHED"] = "hlsMediaDetached";
|
248
|
-
Events["MEDIA_ENDED"] = "hlsMediaEnded";
|
249
259
|
Events["BUFFER_RESET"] = "hlsBufferReset";
|
250
260
|
Events["BUFFER_CODECS"] = "hlsBufferCodecs";
|
251
261
|
Events["BUFFER_CREATED"] = "hlsBufferCreated";
|
@@ -359,6 +369,58 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
|
|
359
369
|
return ErrorDetails;
|
360
370
|
}({});
|
361
371
|
|
372
|
+
const noop = function noop() {};
|
373
|
+
const fakeLogger = {
|
374
|
+
trace: noop,
|
375
|
+
debug: noop,
|
376
|
+
log: noop,
|
377
|
+
warn: noop,
|
378
|
+
info: noop,
|
379
|
+
error: noop
|
380
|
+
};
|
381
|
+
let exportedLogger = fakeLogger;
|
382
|
+
|
383
|
+
// let lastCallTime;
|
384
|
+
// function formatMsgWithTimeInfo(type, msg) {
|
385
|
+
// const now = Date.now();
|
386
|
+
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
387
|
+
// lastCallTime = now;
|
388
|
+
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
389
|
+
// return msg;
|
390
|
+
// }
|
391
|
+
|
392
|
+
function consolePrintFn(type) {
|
393
|
+
const func = self.console[type];
|
394
|
+
if (func) {
|
395
|
+
return func.bind(self.console, `[${type}] >`);
|
396
|
+
}
|
397
|
+
return noop;
|
398
|
+
}
|
399
|
+
function exportLoggerFunctions(debugConfig, ...functions) {
|
400
|
+
functions.forEach(function (type) {
|
401
|
+
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
402
|
+
});
|
403
|
+
}
|
404
|
+
function enableLogs(debugConfig, id) {
|
405
|
+
// check that console is available
|
406
|
+
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
407
|
+
exportLoggerFunctions(debugConfig,
|
408
|
+
// Remove out from list here to hard-disable a log-level
|
409
|
+
// 'trace',
|
410
|
+
'debug', 'log', 'info', 'warn', 'error');
|
411
|
+
// Some browsers don't allow to use bind on console object anyway
|
412
|
+
// fallback to default if needed
|
413
|
+
try {
|
414
|
+
exportedLogger.log(`Debug logs enabled for "${id}" in hls.js version ${"1.5.11"}`);
|
415
|
+
} catch (e) {
|
416
|
+
exportedLogger = fakeLogger;
|
417
|
+
}
|
418
|
+
} else {
|
419
|
+
exportedLogger = fakeLogger;
|
420
|
+
}
|
421
|
+
}
|
422
|
+
const logger = exportedLogger;
|
423
|
+
|
362
424
|
const DECIMAL_RESOLUTION_REGEX = /^(\d+)x(\d+)$/;
|
363
425
|
const ATTR_LIST_REGEX = /(.+?)=(".*?"|.*?)(?:,|$)/g;
|
364
426
|
|
@@ -440,84 +502,6 @@ class AttrList {
|
|
440
502
|
}
|
441
503
|
}
|
442
504
|
|
443
|
-
class Logger {
|
444
|
-
constructor(label, logger) {
|
445
|
-
this.trace = void 0;
|
446
|
-
this.debug = void 0;
|
447
|
-
this.log = void 0;
|
448
|
-
this.warn = void 0;
|
449
|
-
this.info = void 0;
|
450
|
-
this.error = void 0;
|
451
|
-
const lb = `[${label}]:`;
|
452
|
-
this.trace = noop;
|
453
|
-
this.debug = logger.debug.bind(null, lb);
|
454
|
-
this.log = logger.log.bind(null, lb);
|
455
|
-
this.warn = logger.warn.bind(null, lb);
|
456
|
-
this.info = logger.info.bind(null, lb);
|
457
|
-
this.error = logger.error.bind(null, lb);
|
458
|
-
}
|
459
|
-
}
|
460
|
-
const noop = function noop() {};
|
461
|
-
const fakeLogger = {
|
462
|
-
trace: noop,
|
463
|
-
debug: noop,
|
464
|
-
log: noop,
|
465
|
-
warn: noop,
|
466
|
-
info: noop,
|
467
|
-
error: noop
|
468
|
-
};
|
469
|
-
function createLogger() {
|
470
|
-
return _extends({}, fakeLogger);
|
471
|
-
}
|
472
|
-
|
473
|
-
// let lastCallTime;
|
474
|
-
// function formatMsgWithTimeInfo(type, msg) {
|
475
|
-
// const now = Date.now();
|
476
|
-
// const diff = lastCallTime ? '+' + (now - lastCallTime) : '0';
|
477
|
-
// lastCallTime = now;
|
478
|
-
// msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )';
|
479
|
-
// return msg;
|
480
|
-
// }
|
481
|
-
|
482
|
-
function consolePrintFn(type, id) {
|
483
|
-
const func = self.console[type];
|
484
|
-
return func ? func.bind(self.console, `${''}[${type}] >`) : noop;
|
485
|
-
}
|
486
|
-
function getLoggerFn(key, debugConfig, id) {
|
487
|
-
return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key);
|
488
|
-
}
|
489
|
-
const exportedLogger = createLogger();
|
490
|
-
function enableLogs(debugConfig, context, id) {
|
491
|
-
// check that console is available
|
492
|
-
const newLogger = createLogger();
|
493
|
-
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
494
|
-
const keys = [
|
495
|
-
// Remove out from list here to hard-disable a log-level
|
496
|
-
// 'trace',
|
497
|
-
'debug', 'log', 'info', 'warn', 'error'];
|
498
|
-
keys.forEach(key => {
|
499
|
-
newLogger[key] = getLoggerFn(key, debugConfig);
|
500
|
-
});
|
501
|
-
// Some browsers don't allow to use bind on console object anyway
|
502
|
-
// fallback to default if needed
|
503
|
-
try {
|
504
|
-
newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.11-0.canary.10334"}`);
|
505
|
-
} catch (e) {
|
506
|
-
/* log fn threw an exception. All logger methods are no-ops. */
|
507
|
-
return createLogger();
|
508
|
-
}
|
509
|
-
// global exported logger uses the same functions as new logger without `id`
|
510
|
-
keys.forEach(key => {
|
511
|
-
exportedLogger[key] = getLoggerFn(key, debugConfig);
|
512
|
-
});
|
513
|
-
} else {
|
514
|
-
// Reset global exported logger
|
515
|
-
_extends(exportedLogger, newLogger);
|
516
|
-
}
|
517
|
-
return newLogger;
|
518
|
-
}
|
519
|
-
const logger = exportedLogger;
|
520
|
-
|
521
505
|
// Avoid exporting const enum so that these values can be inlined
|
522
506
|
|
523
507
|
function isDateRangeCueAttribute(attrName) {
|
@@ -1007,32 +991,12 @@ class LevelDetails {
|
|
1007
991
|
}
|
1008
992
|
}
|
1009
993
|
|
1010
|
-
var DecrypterAesMode = {
|
1011
|
-
cbc: 0,
|
1012
|
-
ctr: 1
|
1013
|
-
};
|
1014
|
-
|
1015
|
-
function isFullSegmentEncryption(method) {
|
1016
|
-
return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
|
1017
|
-
}
|
1018
|
-
function getAesModeFromFullSegmentMethod(method) {
|
1019
|
-
switch (method) {
|
1020
|
-
case 'AES-128':
|
1021
|
-
case 'AES-256':
|
1022
|
-
return DecrypterAesMode.cbc;
|
1023
|
-
case 'AES-256-CTR':
|
1024
|
-
return DecrypterAesMode.ctr;
|
1025
|
-
default:
|
1026
|
-
throw new Error(`invalid full segment method ${method}`);
|
1027
|
-
}
|
1028
|
-
}
|
1029
|
-
|
1030
994
|
// This file is inserted as a shim for modules which we do not want to include into the distro.
|
1031
995
|
// This replacement is done in the "alias" plugin of the rollup config.
|
1032
996
|
// Use a ES dedicated file as Rollup assigns an object in the output
|
1033
997
|
// For example: "var KeySystemFormats = emptyEs.KeySystemFormats;"
|
1034
998
|
var emptyEs = {};
|
1035
|
-
var
|
999
|
+
var Cues = /*@__PURE__*/getDefaultExportFromCjs(emptyEs);
|
1036
1000
|
|
1037
1001
|
function sliceUint8(array, start, end) {
|
1038
1002
|
// @ts-expect-error This polyfills IE11 usage of Uint8Array slice.
|
@@ -1040,100 +1004,369 @@ function sliceUint8(array, start, end) {
|
|
1040
1004
|
return Uint8Array.prototype.slice ? array.slice(start, end) : new Uint8Array(Array.prototype.slice.call(array, start, end));
|
1041
1005
|
}
|
1042
1006
|
|
1043
|
-
//
|
1044
|
-
|
1045
|
-
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1046
|
-
*
|
1047
|
-
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1048
|
-
* Version: 1.0
|
1049
|
-
* LastModified: Dec 25 1999
|
1050
|
-
* This library is free. You can redistribute it and/or modify it.
|
1051
|
-
*/
|
1007
|
+
// breaking up those two types in order to clarify what is happening in the decoding path.
|
1008
|
+
|
1052
1009
|
/**
|
1053
|
-
*
|
1054
|
-
*
|
1055
|
-
* @param
|
1056
|
-
*
|
1057
|
-
* @returns The string
|
1058
|
-
*
|
1059
|
-
* @group Utils
|
1060
|
-
*
|
1061
|
-
* @beta
|
1010
|
+
* Returns true if an ID3 header can be found at offset in data
|
1011
|
+
* @param data - The data to search
|
1012
|
+
* @param offset - The offset at which to start searching
|
1062
1013
|
*/
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1014
|
+
const isHeader$2 = (data, offset) => {
|
1015
|
+
/*
|
1016
|
+
* http://id3.org/id3v2.3.0
|
1017
|
+
* [0] = 'I'
|
1018
|
+
* [1] = 'D'
|
1019
|
+
* [2] = '3'
|
1020
|
+
* [3,4] = {Version}
|
1021
|
+
* [5] = {Flags}
|
1022
|
+
* [6-9] = {ID3 Size}
|
1023
|
+
*
|
1024
|
+
* An ID3v2 tag can be detected with the following pattern:
|
1025
|
+
* $49 44 33 yy yy xx zz zz zz zz
|
1026
|
+
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
1027
|
+
*/
|
1028
|
+
if (offset + 10 <= data.length) {
|
1029
|
+
// look for 'ID3' identifier
|
1030
|
+
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
1031
|
+
// check version is within range
|
1032
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1033
|
+
// check size is within range
|
1034
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1035
|
+
return true;
|
1036
|
+
}
|
1037
|
+
}
|
1071
1038
|
}
|
1072
|
-
// remove any null characters
|
1073
|
-
return decoded.replace(/\0/g, '');
|
1074
1039
|
}
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
|
1089
|
-
|
1090
|
-
|
1091
|
-
|
1092
|
-
|
1093
|
-
|
1094
|
-
|
1095
|
-
|
1096
|
-
|
1097
|
-
case 7:
|
1098
|
-
// 0xxxxxxx
|
1099
|
-
out += String.fromCharCode(c);
|
1100
|
-
break;
|
1101
|
-
case 12:
|
1102
|
-
case 13:
|
1103
|
-
// 110x xxxx 10xx xxxx
|
1104
|
-
char2 = array[i++];
|
1105
|
-
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1106
|
-
break;
|
1107
|
-
case 14:
|
1108
|
-
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1109
|
-
char2 = array[i++];
|
1110
|
-
char3 = array[i++];
|
1111
|
-
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1112
|
-
break;
|
1040
|
+
return false;
|
1041
|
+
};
|
1042
|
+
|
1043
|
+
/**
|
1044
|
+
* Returns true if an ID3 footer can be found at offset in data
|
1045
|
+
* @param data - The data to search
|
1046
|
+
* @param offset - The offset at which to start searching
|
1047
|
+
*/
|
1048
|
+
const isFooter = (data, offset) => {
|
1049
|
+
/*
|
1050
|
+
* The footer is a copy of the header, but with a different identifier
|
1051
|
+
*/
|
1052
|
+
if (offset + 10 <= data.length) {
|
1053
|
+
// look for '3DI' identifier
|
1054
|
+
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
1055
|
+
// check version is within range
|
1056
|
+
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
1057
|
+
// check size is within range
|
1058
|
+
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
1059
|
+
return true;
|
1060
|
+
}
|
1061
|
+
}
|
1113
1062
|
}
|
1114
1063
|
}
|
1115
|
-
return
|
1116
|
-
}
|
1064
|
+
return false;
|
1065
|
+
};
|
1117
1066
|
|
1118
1067
|
/**
|
1119
|
-
*
|
1068
|
+
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
1069
|
+
* @param data - The data to search in
|
1070
|
+
* @param offset - The offset at which to start searching
|
1071
|
+
* @returns the block of data containing any ID3 tags found
|
1072
|
+
* or *undefined* if no header is found at the starting offset
|
1120
1073
|
*/
|
1074
|
+
const getID3Data = (data, offset) => {
|
1075
|
+
const front = offset;
|
1076
|
+
let length = 0;
|
1077
|
+
while (isHeader$2(data, offset)) {
|
1078
|
+
// ID3 header is 10 bytes
|
1079
|
+
length += 10;
|
1080
|
+
const size = readSize(data, offset + 6);
|
1081
|
+
length += size;
|
1082
|
+
if (isFooter(data, offset + 10)) {
|
1083
|
+
// ID3 footer is 10 bytes
|
1084
|
+
length += 10;
|
1085
|
+
}
|
1086
|
+
offset += length;
|
1087
|
+
}
|
1088
|
+
if (length > 0) {
|
1089
|
+
return data.subarray(front, front + length);
|
1090
|
+
}
|
1091
|
+
return undefined;
|
1092
|
+
};
|
1093
|
+
const readSize = (data, offset) => {
|
1094
|
+
let size = 0;
|
1095
|
+
size = (data[offset] & 0x7f) << 21;
|
1096
|
+
size |= (data[offset + 1] & 0x7f) << 14;
|
1097
|
+
size |= (data[offset + 2] & 0x7f) << 7;
|
1098
|
+
size |= data[offset + 3] & 0x7f;
|
1099
|
+
return size;
|
1100
|
+
};
|
1101
|
+
const canParse$2 = (data, offset) => {
|
1102
|
+
return isHeader$2(data, offset) && readSize(data, offset + 6) + 10 <= data.length - offset;
|
1103
|
+
};
|
1121
1104
|
|
1122
|
-
|
1123
|
-
|
1124
|
-
|
1125
|
-
|
1126
|
-
|
1127
|
-
|
1128
|
-
|
1129
|
-
|
1130
|
-
|
1105
|
+
/**
|
1106
|
+
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
1107
|
+
* @param data - Block of data containing one or more ID3 tags
|
1108
|
+
*/
|
1109
|
+
const getTimeStamp = data => {
|
1110
|
+
const frames = getID3Frames(data);
|
1111
|
+
for (let i = 0; i < frames.length; i++) {
|
1112
|
+
const frame = frames[i];
|
1113
|
+
if (isTimeStampFrame(frame)) {
|
1114
|
+
return readTimeStamp(frame);
|
1131
1115
|
}
|
1132
|
-
return str;
|
1133
1116
|
}
|
1117
|
+
return undefined;
|
1134
1118
|
};
|
1135
1119
|
|
1136
|
-
|
1120
|
+
/**
|
1121
|
+
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
1122
|
+
*/
|
1123
|
+
const isTimeStampFrame = frame => {
|
1124
|
+
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
1125
|
+
};
|
1126
|
+
const getFrameData = data => {
|
1127
|
+
/*
|
1128
|
+
Frame ID $xx xx xx xx (four characters)
|
1129
|
+
Size $xx xx xx xx
|
1130
|
+
Flags $xx xx
|
1131
|
+
*/
|
1132
|
+
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
1133
|
+
const size = readSize(data, 4);
|
1134
|
+
|
1135
|
+
// skip frame id, size, and flags
|
1136
|
+
const offset = 10;
|
1137
|
+
return {
|
1138
|
+
type,
|
1139
|
+
size,
|
1140
|
+
data: data.subarray(offset, offset + size)
|
1141
|
+
};
|
1142
|
+
};
|
1143
|
+
|
1144
|
+
/**
|
1145
|
+
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
1146
|
+
* @param id3Data - The ID3 data containing one or more ID3 tags
|
1147
|
+
*/
|
1148
|
+
const getID3Frames = id3Data => {
|
1149
|
+
let offset = 0;
|
1150
|
+
const frames = [];
|
1151
|
+
while (isHeader$2(id3Data, offset)) {
|
1152
|
+
const size = readSize(id3Data, offset + 6);
|
1153
|
+
// skip past ID3 header
|
1154
|
+
offset += 10;
|
1155
|
+
const end = offset + size;
|
1156
|
+
// loop through frames in the ID3 tag
|
1157
|
+
while (offset + 8 < end) {
|
1158
|
+
const frameData = getFrameData(id3Data.subarray(offset));
|
1159
|
+
const frame = decodeFrame(frameData);
|
1160
|
+
if (frame) {
|
1161
|
+
frames.push(frame);
|
1162
|
+
}
|
1163
|
+
|
1164
|
+
// skip frame header and frame data
|
1165
|
+
offset += frameData.size + 10;
|
1166
|
+
}
|
1167
|
+
if (isFooter(id3Data, offset)) {
|
1168
|
+
offset += 10;
|
1169
|
+
}
|
1170
|
+
}
|
1171
|
+
return frames;
|
1172
|
+
};
|
1173
|
+
const decodeFrame = frame => {
|
1174
|
+
if (frame.type === 'PRIV') {
|
1175
|
+
return decodePrivFrame(frame);
|
1176
|
+
} else if (frame.type[0] === 'W') {
|
1177
|
+
return decodeURLFrame(frame);
|
1178
|
+
}
|
1179
|
+
return decodeTextFrame(frame);
|
1180
|
+
};
|
1181
|
+
const decodePrivFrame = frame => {
|
1182
|
+
/*
|
1183
|
+
Format: <text string>\0<binary data>
|
1184
|
+
*/
|
1185
|
+
if (frame.size < 2) {
|
1186
|
+
return undefined;
|
1187
|
+
}
|
1188
|
+
const owner = utf8ArrayToStr(frame.data, true);
|
1189
|
+
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
1190
|
+
return {
|
1191
|
+
key: frame.type,
|
1192
|
+
info: owner,
|
1193
|
+
data: privateData.buffer
|
1194
|
+
};
|
1195
|
+
};
|
1196
|
+
const decodeTextFrame = frame => {
|
1197
|
+
if (frame.size < 2) {
|
1198
|
+
return undefined;
|
1199
|
+
}
|
1200
|
+
if (frame.type === 'TXXX') {
|
1201
|
+
/*
|
1202
|
+
Format:
|
1203
|
+
[0] = {Text Encoding}
|
1204
|
+
[1-?] = {Description}\0{Value}
|
1205
|
+
*/
|
1206
|
+
let index = 1;
|
1207
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1208
|
+
index += description.length + 1;
|
1209
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1210
|
+
return {
|
1211
|
+
key: frame.type,
|
1212
|
+
info: description,
|
1213
|
+
data: value
|
1214
|
+
};
|
1215
|
+
}
|
1216
|
+
/*
|
1217
|
+
Format:
|
1218
|
+
[0] = {Text Encoding}
|
1219
|
+
[1-?] = {Value}
|
1220
|
+
*/
|
1221
|
+
const text = utf8ArrayToStr(frame.data.subarray(1));
|
1222
|
+
return {
|
1223
|
+
key: frame.type,
|
1224
|
+
data: text
|
1225
|
+
};
|
1226
|
+
};
|
1227
|
+
const decodeURLFrame = frame => {
|
1228
|
+
if (frame.type === 'WXXX') {
|
1229
|
+
/*
|
1230
|
+
Format:
|
1231
|
+
[0] = {Text Encoding}
|
1232
|
+
[1-?] = {Description}\0{URL}
|
1233
|
+
*/
|
1234
|
+
if (frame.size < 2) {
|
1235
|
+
return undefined;
|
1236
|
+
}
|
1237
|
+
let index = 1;
|
1238
|
+
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
1239
|
+
index += description.length + 1;
|
1240
|
+
const value = utf8ArrayToStr(frame.data.subarray(index));
|
1241
|
+
return {
|
1242
|
+
key: frame.type,
|
1243
|
+
info: description,
|
1244
|
+
data: value
|
1245
|
+
};
|
1246
|
+
}
|
1247
|
+
/*
|
1248
|
+
Format:
|
1249
|
+
[0-?] = {URL}
|
1250
|
+
*/
|
1251
|
+
const url = utf8ArrayToStr(frame.data);
|
1252
|
+
return {
|
1253
|
+
key: frame.type,
|
1254
|
+
data: url
|
1255
|
+
};
|
1256
|
+
};
|
1257
|
+
const readTimeStamp = timeStampFrame => {
|
1258
|
+
if (timeStampFrame.data.byteLength === 8) {
|
1259
|
+
const data = new Uint8Array(timeStampFrame.data);
|
1260
|
+
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
1261
|
+
// with the upper 31 bits set to zero.
|
1262
|
+
const pts33Bit = data[3] & 0x1;
|
1263
|
+
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
1264
|
+
timestamp /= 45;
|
1265
|
+
if (pts33Bit) {
|
1266
|
+
timestamp += 47721858.84;
|
1267
|
+
} // 2^32 / 90
|
1268
|
+
|
1269
|
+
return Math.round(timestamp);
|
1270
|
+
}
|
1271
|
+
return undefined;
|
1272
|
+
};
|
1273
|
+
|
1274
|
+
// http://stackoverflow.com/questions/8936984/uint8array-to-string-in-javascript/22373197
|
1275
|
+
// http://www.onicos.com/staff/iz/amuse/javascript/expert/utf.txt
|
1276
|
+
/* utf.js - UTF-8 <=> UTF-16 convertion
|
1277
|
+
*
|
1278
|
+
* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
|
1279
|
+
* Version: 1.0
|
1280
|
+
* LastModified: Dec 25 1999
|
1281
|
+
* This library is free. You can redistribute it and/or modify it.
|
1282
|
+
*/
|
1283
|
+
const utf8ArrayToStr = (array, exitOnNull = false) => {
|
1284
|
+
const decoder = getTextDecoder();
|
1285
|
+
if (decoder) {
|
1286
|
+
const decoded = decoder.decode(array);
|
1287
|
+
if (exitOnNull) {
|
1288
|
+
// grab up to the first null
|
1289
|
+
const idx = decoded.indexOf('\0');
|
1290
|
+
return idx !== -1 ? decoded.substring(0, idx) : decoded;
|
1291
|
+
}
|
1292
|
+
|
1293
|
+
// remove any null characters
|
1294
|
+
return decoded.replace(/\0/g, '');
|
1295
|
+
}
|
1296
|
+
const len = array.length;
|
1297
|
+
let c;
|
1298
|
+
let char2;
|
1299
|
+
let char3;
|
1300
|
+
let out = '';
|
1301
|
+
let i = 0;
|
1302
|
+
while (i < len) {
|
1303
|
+
c = array[i++];
|
1304
|
+
if (c === 0x00 && exitOnNull) {
|
1305
|
+
return out;
|
1306
|
+
} else if (c === 0x00 || c === 0x03) {
|
1307
|
+
// If the character is 3 (END_OF_TEXT) or 0 (NULL) then skip it
|
1308
|
+
continue;
|
1309
|
+
}
|
1310
|
+
switch (c >> 4) {
|
1311
|
+
case 0:
|
1312
|
+
case 1:
|
1313
|
+
case 2:
|
1314
|
+
case 3:
|
1315
|
+
case 4:
|
1316
|
+
case 5:
|
1317
|
+
case 6:
|
1318
|
+
case 7:
|
1319
|
+
// 0xxxxxxx
|
1320
|
+
out += String.fromCharCode(c);
|
1321
|
+
break;
|
1322
|
+
case 12:
|
1323
|
+
case 13:
|
1324
|
+
// 110x xxxx 10xx xxxx
|
1325
|
+
char2 = array[i++];
|
1326
|
+
out += String.fromCharCode((c & 0x1f) << 6 | char2 & 0x3f);
|
1327
|
+
break;
|
1328
|
+
case 14:
|
1329
|
+
// 1110 xxxx 10xx xxxx 10xx xxxx
|
1330
|
+
char2 = array[i++];
|
1331
|
+
char3 = array[i++];
|
1332
|
+
out += String.fromCharCode((c & 0x0f) << 12 | (char2 & 0x3f) << 6 | (char3 & 0x3f) << 0);
|
1333
|
+
break;
|
1334
|
+
}
|
1335
|
+
}
|
1336
|
+
return out;
|
1337
|
+
};
|
1338
|
+
let decoder;
|
1339
|
+
function getTextDecoder() {
|
1340
|
+
// On Play Station 4, TextDecoder is defined but partially implemented.
|
1341
|
+
// Manual decoding option is preferable
|
1342
|
+
if (navigator.userAgent.includes('PlayStation 4')) {
|
1343
|
+
return;
|
1344
|
+
}
|
1345
|
+
if (!decoder && typeof self.TextDecoder !== 'undefined') {
|
1346
|
+
decoder = new self.TextDecoder('utf-8');
|
1347
|
+
}
|
1348
|
+
return decoder;
|
1349
|
+
}
|
1350
|
+
|
1351
|
+
/**
|
1352
|
+
* hex dump helper class
|
1353
|
+
*/
|
1354
|
+
|
1355
|
+
const Hex = {
|
1356
|
+
hexDump: function (array) {
|
1357
|
+
let str = '';
|
1358
|
+
for (let i = 0; i < array.length; i++) {
|
1359
|
+
let h = array[i].toString(16);
|
1360
|
+
if (h.length < 2) {
|
1361
|
+
h = '0' + h;
|
1362
|
+
}
|
1363
|
+
str += h;
|
1364
|
+
}
|
1365
|
+
return str;
|
1366
|
+
}
|
1367
|
+
};
|
1368
|
+
|
1369
|
+
const UINT32_MAX$1 = Math.pow(2, 32) - 1;
|
1137
1370
|
const push = [].push;
|
1138
1371
|
|
1139
1372
|
// We are using fixed track IDs for driving the MP4 remuxer
|
@@ -1395,7 +1628,7 @@ function parseStsd(stsd) {
|
|
1395
1628
|
{
|
1396
1629
|
const codecBox = findBox(sampleEntries, [fourCC])[0];
|
1397
1630
|
const esdsBox = findBox(codecBox.subarray(28), ['esds'])[0];
|
1398
|
-
if (esdsBox && esdsBox.length >
|
1631
|
+
if (esdsBox && esdsBox.length > 12) {
|
1399
1632
|
let i = 4;
|
1400
1633
|
// ES Descriptor tag
|
1401
1634
|
if (esdsBox[i++] !== 0x03) {
|
@@ -1510,9 +1743,7 @@ function parseStsd(stsd) {
|
|
1510
1743
|
}
|
1511
1744
|
function skipBERInteger(bytes, i) {
|
1512
1745
|
const limit = i + 5;
|
1513
|
-
while (bytes[i++] & 0x80 && i < limit) {
|
1514
|
-
/* do nothing */
|
1515
|
-
}
|
1746
|
+
while (bytes[i++] & 0x80 && i < limit) {}
|
1516
1747
|
return i;
|
1517
1748
|
}
|
1518
1749
|
function toHex(x) {
|
@@ -2204,12 +2435,12 @@ class LevelKey {
|
|
2204
2435
|
this.keyFormatVersions = formatversions;
|
2205
2436
|
this.iv = iv;
|
2206
2437
|
this.encrypted = method ? method !== 'NONE' : false;
|
2207
|
-
this.isCommonEncryption = this.encrypted &&
|
2438
|
+
this.isCommonEncryption = this.encrypted && method !== 'AES-128';
|
2208
2439
|
}
|
2209
2440
|
isSupported() {
|
2210
2441
|
// If it's Segment encryption or No encryption, just select that key system
|
2211
2442
|
if (this.method) {
|
2212
|
-
if (
|
2443
|
+
if (this.method === 'AES-128' || this.method === 'NONE') {
|
2213
2444
|
return true;
|
2214
2445
|
}
|
2215
2446
|
if (this.keyFormat === 'identity') {
|
@@ -2223,13 +2454,14 @@ class LevelKey {
|
|
2223
2454
|
if (!this.encrypted || !this.uri) {
|
2224
2455
|
return null;
|
2225
2456
|
}
|
2226
|
-
if (
|
2457
|
+
if (this.method === 'AES-128' && this.uri && !this.iv) {
|
2227
2458
|
if (typeof sn !== 'number') {
|
2228
2459
|
// We are fetching decryption data for a initialization segment
|
2229
|
-
// If the segment was encrypted with AES-128
|
2460
|
+
// If the segment was encrypted with AES-128
|
2230
2461
|
// It must have an IV defined. We cannot substitute the Segment Number in.
|
2231
|
-
|
2232
|
-
|
2462
|
+
if (this.method === 'AES-128' && !this.iv) {
|
2463
|
+
logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
|
2464
|
+
}
|
2233
2465
|
// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
2234
2466
|
sn = 0;
|
2235
2467
|
}
|
@@ -2379,28 +2611,23 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
|
|
2379
2611
|
if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
|
2380
2612
|
return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
|
2381
2613
|
}
|
2614
|
+
|
2615
|
+
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2616
|
+
// some browsers will report that fLaC is supported then fail.
|
2617
|
+
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2382
2618
|
const codecsToCheck = {
|
2383
|
-
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2384
|
-
// some browsers will report that fLaC is supported then fail.
|
2385
|
-
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2386
2619
|
flac: ['flac', 'fLaC', 'FLAC'],
|
2387
|
-
opus: ['opus', 'Opus']
|
2388
|
-
// Replace audio codec info if browser does not support mp4a.40.34,
|
2389
|
-
// and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
|
2390
|
-
'mp4a.40.34': ['mp3']
|
2620
|
+
opus: ['opus', 'Opus']
|
2391
2621
|
}[lowerCaseCodec];
|
2392
2622
|
for (let i = 0; i < codecsToCheck.length; i++) {
|
2393
|
-
var _getMediaSource;
|
2394
2623
|
if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
|
2395
2624
|
CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
|
2396
2625
|
return codecsToCheck[i];
|
2397
|
-
} else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
|
2398
|
-
return '';
|
2399
2626
|
}
|
2400
2627
|
}
|
2401
2628
|
return lowerCaseCodec;
|
2402
2629
|
}
|
2403
|
-
const AUDIO_CODEC_REGEXP = /flac|opus
|
2630
|
+
const AUDIO_CODEC_REGEXP = /flac|opus/i;
|
2404
2631
|
function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
|
2405
2632
|
return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
|
2406
2633
|
}
|
@@ -2423,16 +2650,6 @@ function convertAVC1ToAVCOTI(codec) {
|
|
2423
2650
|
}
|
2424
2651
|
return codec;
|
2425
2652
|
}
|
2426
|
-
function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
|
2427
|
-
const MediaSource = getMediaSource(preferManagedMediaSource) || {
|
2428
|
-
isTypeSupported: () => false
|
2429
|
-
};
|
2430
|
-
return {
|
2431
|
-
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
2432
|
-
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
2433
|
-
ac3: false
|
2434
|
-
};
|
2435
|
-
}
|
2436
2653
|
|
2437
2654
|
const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
|
2438
2655
|
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
|
@@ -3233,10 +3450,10 @@ class PlaylistLoader {
|
|
3233
3450
|
const loaderContext = loader.context;
|
3234
3451
|
if (loaderContext && loaderContext.url === context.url && loaderContext.level === context.level) {
|
3235
3452
|
// same URL can't overlap
|
3236
|
-
|
3453
|
+
logger.trace('[playlist-loader]: playlist request ongoing');
|
3237
3454
|
return;
|
3238
3455
|
}
|
3239
|
-
|
3456
|
+
logger.log(`[playlist-loader]: aborting previous loader for type: ${context.type}`);
|
3240
3457
|
loader.abort();
|
3241
3458
|
}
|
3242
3459
|
|
@@ -3346,7 +3563,7 @@ class PlaylistLoader {
|
|
3346
3563
|
// alt audio rendition in which quality levels (main)
|
3347
3564
|
// contains both audio+video. but with mixed audio track not signaled
|
3348
3565
|
if (!embeddedAudioFound && levels[0].audioCodec && !levels[0].attrs.AUDIO) {
|
3349
|
-
|
3566
|
+
logger.log('[playlist-loader]: audio codec signaled in quality level, but no embedded audio track signaled, create one');
|
3350
3567
|
audioTracks.unshift({
|
3351
3568
|
type: 'main',
|
3352
3569
|
name: 'main',
|
@@ -3445,7 +3662,7 @@ class PlaylistLoader {
|
|
3445
3662
|
message += ` id: ${context.id} group-id: "${context.groupId}"`;
|
3446
3663
|
}
|
3447
3664
|
const error = new Error(message);
|
3448
|
-
|
3665
|
+
logger.warn(`[playlist-loader]: ${message}`);
|
3449
3666
|
let details = ErrorDetails.UNKNOWN;
|
3450
3667
|
let fatal = false;
|
3451
3668
|
const loader = this.getInternalLoader(context);
|
@@ -3683,409 +3900,10 @@ var MetadataSchema = {
|
|
3683
3900
|
emsg: "https://aomedia.org/emsg/ID3"
|
3684
3901
|
};
|
3685
3902
|
|
3686
|
-
|
3687
|
-
|
3688
|
-
|
3689
|
-
|
3690
|
-
*
|
3691
|
-
* @returns The decoded ID3 PRIV frame
|
3692
|
-
*
|
3693
|
-
* @internal
|
3694
|
-
*
|
3695
|
-
* @group ID3
|
3696
|
-
*/
|
3697
|
-
function decodeId3PrivFrame(frame) {
|
3698
|
-
/*
|
3699
|
-
Format: <text string>\0<binary data>
|
3700
|
-
*/
|
3701
|
-
if (frame.size < 2) {
|
3702
|
-
return undefined;
|
3703
|
-
}
|
3704
|
-
const owner = utf8ArrayToStr(frame.data, true);
|
3705
|
-
const privateData = new Uint8Array(frame.data.subarray(owner.length + 1));
|
3706
|
-
return {
|
3707
|
-
key: frame.type,
|
3708
|
-
info: owner,
|
3709
|
-
data: privateData.buffer
|
3710
|
-
};
|
3711
|
-
}
|
3712
|
-
|
3713
|
-
/**
|
3714
|
-
* Decodes an ID3 text frame
|
3715
|
-
*
|
3716
|
-
* @param frame - the ID3 text frame
|
3717
|
-
*
|
3718
|
-
* @returns The decoded ID3 text frame
|
3719
|
-
*
|
3720
|
-
* @internal
|
3721
|
-
*
|
3722
|
-
* @group ID3
|
3723
|
-
*/
|
3724
|
-
function decodeId3TextFrame(frame) {
|
3725
|
-
if (frame.size < 2) {
|
3726
|
-
return undefined;
|
3727
|
-
}
|
3728
|
-
if (frame.type === 'TXXX') {
|
3729
|
-
/*
|
3730
|
-
Format:
|
3731
|
-
[0] = {Text Encoding}
|
3732
|
-
[1-?] = {Description}\0{Value}
|
3733
|
-
*/
|
3734
|
-
let index = 1;
|
3735
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3736
|
-
index += description.length + 1;
|
3737
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3738
|
-
return {
|
3739
|
-
key: frame.type,
|
3740
|
-
info: description,
|
3741
|
-
data: value
|
3742
|
-
};
|
3743
|
-
}
|
3744
|
-
/*
|
3745
|
-
Format:
|
3746
|
-
[0] = {Text Encoding}
|
3747
|
-
[1-?] = {Value}
|
3748
|
-
*/
|
3749
|
-
const text = utf8ArrayToStr(frame.data.subarray(1));
|
3750
|
-
return {
|
3751
|
-
key: frame.type,
|
3752
|
-
info: '',
|
3753
|
-
data: text
|
3754
|
-
};
|
3755
|
-
}
|
3756
|
-
|
3757
|
-
/**
|
3758
|
-
* Decode a URL frame
|
3759
|
-
*
|
3760
|
-
* @param frame - the ID3 URL frame
|
3761
|
-
*
|
3762
|
-
* @returns The decoded ID3 URL frame
|
3763
|
-
*
|
3764
|
-
* @internal
|
3765
|
-
*
|
3766
|
-
* @group ID3
|
3767
|
-
*/
|
3768
|
-
function decodeId3UrlFrame(frame) {
|
3769
|
-
if (frame.type === 'WXXX') {
|
3770
|
-
/*
|
3771
|
-
Format:
|
3772
|
-
[0] = {Text Encoding}
|
3773
|
-
[1-?] = {Description}\0{URL}
|
3774
|
-
*/
|
3775
|
-
if (frame.size < 2) {
|
3776
|
-
return undefined;
|
3777
|
-
}
|
3778
|
-
let index = 1;
|
3779
|
-
const description = utf8ArrayToStr(frame.data.subarray(index), true);
|
3780
|
-
index += description.length + 1;
|
3781
|
-
const value = utf8ArrayToStr(frame.data.subarray(index));
|
3782
|
-
return {
|
3783
|
-
key: frame.type,
|
3784
|
-
info: description,
|
3785
|
-
data: value
|
3786
|
-
};
|
3787
|
-
}
|
3788
|
-
/*
|
3789
|
-
Format:
|
3790
|
-
[0-?] = {URL}
|
3791
|
-
*/
|
3792
|
-
const url = utf8ArrayToStr(frame.data);
|
3793
|
-
return {
|
3794
|
-
key: frame.type,
|
3795
|
-
info: '',
|
3796
|
-
data: url
|
3797
|
-
};
|
3798
|
-
}
|
3799
|
-
|
3800
|
-
function toUint8(data, offset = 0, length = Infinity) {
|
3801
|
-
return view(data, offset, length, Uint8Array);
|
3802
|
-
}
|
3803
|
-
function view(data, offset, length, Type) {
|
3804
|
-
const buffer = unsafeGetArrayBuffer(data);
|
3805
|
-
let bytesPerElement = 1;
|
3806
|
-
if ('BYTES_PER_ELEMENT' in Type) {
|
3807
|
-
bytesPerElement = Type.BYTES_PER_ELEMENT;
|
3808
|
-
}
|
3809
|
-
// Absolute end of the |data| view within |buffer|.
|
3810
|
-
const dataOffset = isArrayBufferView(data) ? data.byteOffset : 0;
|
3811
|
-
const dataEnd = (dataOffset + data.byteLength) / bytesPerElement;
|
3812
|
-
// Absolute start of the result within |buffer|.
|
3813
|
-
const rawStart = (dataOffset + offset) / bytesPerElement;
|
3814
|
-
const start = Math.floor(Math.max(0, Math.min(rawStart, dataEnd)));
|
3815
|
-
// Absolute end of the result within |buffer|.
|
3816
|
-
const end = Math.floor(Math.min(start + Math.max(length, 0), dataEnd));
|
3817
|
-
return new Type(buffer, start, end - start);
|
3818
|
-
}
|
3819
|
-
function unsafeGetArrayBuffer(view) {
|
3820
|
-
if (view instanceof ArrayBuffer) {
|
3821
|
-
return view;
|
3822
|
-
} else {
|
3823
|
-
return view.buffer;
|
3824
|
-
}
|
3825
|
-
}
|
3826
|
-
function isArrayBufferView(obj) {
|
3827
|
-
return obj && obj.buffer instanceof ArrayBuffer && obj.byteLength !== undefined && obj.byteOffset !== undefined;
|
3828
|
-
}
|
3829
|
-
|
3830
|
-
function toArrayBuffer(view) {
|
3831
|
-
if (view instanceof ArrayBuffer) {
|
3832
|
-
return view;
|
3833
|
-
} else {
|
3834
|
-
if (view.byteOffset == 0 && view.byteLength == view.buffer.byteLength) {
|
3835
|
-
// This is a TypedArray over the whole buffer.
|
3836
|
-
return view.buffer;
|
3837
|
-
}
|
3838
|
-
// This is a 'view' on the buffer. Create a new buffer that only contains
|
3839
|
-
// the data. Note that since this isn't an ArrayBuffer, the 'new' call
|
3840
|
-
// will allocate a new buffer to hold the copy.
|
3841
|
-
return new Uint8Array(view).buffer;
|
3842
|
-
}
|
3843
|
-
}
|
3844
|
-
|
3845
|
-
function decodeId3ImageFrame(frame) {
|
3846
|
-
const metadataFrame = {
|
3847
|
-
key: frame.type,
|
3848
|
-
description: '',
|
3849
|
-
data: '',
|
3850
|
-
mimeType: null,
|
3851
|
-
pictureType: null
|
3852
|
-
};
|
3853
|
-
const utf8Encoding = 0x03;
|
3854
|
-
if (frame.size < 2) {
|
3855
|
-
return undefined;
|
3856
|
-
}
|
3857
|
-
if (frame.data[0] !== utf8Encoding) {
|
3858
|
-
console.log('Ignore frame with unrecognized character ' + 'encoding');
|
3859
|
-
return undefined;
|
3860
|
-
}
|
3861
|
-
const mimeTypeEndIndex = frame.data.subarray(1).indexOf(0);
|
3862
|
-
if (mimeTypeEndIndex === -1) {
|
3863
|
-
return undefined;
|
3864
|
-
}
|
3865
|
-
const mimeType = utf8ArrayToStr(toUint8(frame.data, 1, mimeTypeEndIndex));
|
3866
|
-
const pictureType = frame.data[2 + mimeTypeEndIndex];
|
3867
|
-
const descriptionEndIndex = frame.data.subarray(3 + mimeTypeEndIndex).indexOf(0);
|
3868
|
-
if (descriptionEndIndex === -1) {
|
3869
|
-
return undefined;
|
3870
|
-
}
|
3871
|
-
const description = utf8ArrayToStr(toUint8(frame.data, 3 + mimeTypeEndIndex, descriptionEndIndex));
|
3872
|
-
let data;
|
3873
|
-
if (mimeType === '-->') {
|
3874
|
-
data = utf8ArrayToStr(toUint8(frame.data, 4 + mimeTypeEndIndex + descriptionEndIndex));
|
3875
|
-
} else {
|
3876
|
-
data = toArrayBuffer(frame.data.subarray(4 + mimeTypeEndIndex + descriptionEndIndex));
|
3877
|
-
}
|
3878
|
-
metadataFrame.mimeType = mimeType;
|
3879
|
-
metadataFrame.pictureType = pictureType;
|
3880
|
-
metadataFrame.description = description;
|
3881
|
-
metadataFrame.data = data;
|
3882
|
-
return metadataFrame;
|
3883
|
-
}
|
3884
|
-
|
3885
|
-
/**
|
3886
|
-
* Decode an ID3 frame.
|
3887
|
-
*
|
3888
|
-
* @param frame - the ID3 frame
|
3889
|
-
*
|
3890
|
-
* @returns The decoded ID3 frame
|
3891
|
-
*
|
3892
|
-
* @internal
|
3893
|
-
*
|
3894
|
-
* @group ID3
|
3895
|
-
*/
|
3896
|
-
function decodeId3Frame(frame) {
|
3897
|
-
if (frame.type === 'PRIV') {
|
3898
|
-
return decodeId3PrivFrame(frame);
|
3899
|
-
} else if (frame.type[0] === 'W') {
|
3900
|
-
return decodeId3UrlFrame(frame);
|
3901
|
-
} else if (frame.type === 'APIC') {
|
3902
|
-
return decodeId3ImageFrame(frame);
|
3903
|
-
}
|
3904
|
-
return decodeId3TextFrame(frame);
|
3905
|
-
}
|
3906
|
-
|
3907
|
-
/**
|
3908
|
-
* Read ID3 size
|
3909
|
-
*
|
3910
|
-
* @param data - The data to read from
|
3911
|
-
* @param offset - The offset at which to start reading
|
3912
|
-
*
|
3913
|
-
* @returns The size
|
3914
|
-
*
|
3915
|
-
* @internal
|
3916
|
-
*
|
3917
|
-
* @group ID3
|
3918
|
-
*/
|
3919
|
-
function readId3Size(data, offset) {
|
3920
|
-
let size = 0;
|
3921
|
-
size = (data[offset] & 0x7f) << 21;
|
3922
|
-
size |= (data[offset + 1] & 0x7f) << 14;
|
3923
|
-
size |= (data[offset + 2] & 0x7f) << 7;
|
3924
|
-
size |= data[offset + 3] & 0x7f;
|
3925
|
-
return size;
|
3926
|
-
}
|
3927
|
-
|
3928
|
-
/**
|
3929
|
-
* Returns the data of an ID3 frame.
|
3930
|
-
*
|
3931
|
-
* @param data - The data to read from
|
3932
|
-
*
|
3933
|
-
* @returns The data of the ID3 frame
|
3934
|
-
*
|
3935
|
-
* @internal
|
3936
|
-
*
|
3937
|
-
* @group ID3
|
3938
|
-
*/
|
3939
|
-
function getId3FrameData(data) {
|
3940
|
-
/*
|
3941
|
-
Frame ID $xx xx xx xx (four characters)
|
3942
|
-
Size $xx xx xx xx
|
3943
|
-
Flags $xx xx
|
3944
|
-
*/
|
3945
|
-
const type = String.fromCharCode(data[0], data[1], data[2], data[3]);
|
3946
|
-
const size = readId3Size(data, 4);
|
3947
|
-
// skip frame id, size, and flags
|
3948
|
-
const offset = 10;
|
3949
|
-
return {
|
3950
|
-
type,
|
3951
|
-
size,
|
3952
|
-
data: data.subarray(offset, offset + size)
|
3953
|
-
};
|
3954
|
-
}
|
3955
|
-
|
3956
|
-
/**
|
3957
|
-
* Returns true if an ID3 footer can be found at offset in data
|
3958
|
-
*
|
3959
|
-
* @param data - The data to search in
|
3960
|
-
* @param offset - The offset at which to start searching
|
3961
|
-
*
|
3962
|
-
* @returns `true` if an ID3 footer is found
|
3963
|
-
*
|
3964
|
-
* @internal
|
3965
|
-
*
|
3966
|
-
* @group ID3
|
3967
|
-
*/
|
3968
|
-
function isId3Footer(data, offset) {
|
3969
|
-
/*
|
3970
|
-
* The footer is a copy of the header, but with a different identifier
|
3971
|
-
*/
|
3972
|
-
if (offset + 10 <= data.length) {
|
3973
|
-
// look for '3DI' identifier
|
3974
|
-
if (data[offset] === 0x33 && data[offset + 1] === 0x44 && data[offset + 2] === 0x49) {
|
3975
|
-
// check version is within range
|
3976
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
3977
|
-
// check size is within range
|
3978
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
3979
|
-
return true;
|
3980
|
-
}
|
3981
|
-
}
|
3982
|
-
}
|
3983
|
-
}
|
3984
|
-
return false;
|
3985
|
-
}
|
3986
|
-
|
3987
|
-
/**
|
3988
|
-
* Returns true if an ID3 header can be found at offset in data
|
3989
|
-
*
|
3990
|
-
* @param data - The data to search in
|
3991
|
-
* @param offset - The offset at which to start searching
|
3992
|
-
*
|
3993
|
-
* @returns `true` if an ID3 header is found
|
3994
|
-
*
|
3995
|
-
* @internal
|
3996
|
-
*
|
3997
|
-
* @group ID3
|
3998
|
-
*/
|
3999
|
-
function isId3Header(data, offset) {
|
4000
|
-
/*
|
4001
|
-
* http://id3.org/id3v2.3.0
|
4002
|
-
* [0] = 'I'
|
4003
|
-
* [1] = 'D'
|
4004
|
-
* [2] = '3'
|
4005
|
-
* [3,4] = {Version}
|
4006
|
-
* [5] = {Flags}
|
4007
|
-
* [6-9] = {ID3 Size}
|
4008
|
-
*
|
4009
|
-
* An ID3v2 tag can be detected with the following pattern:
|
4010
|
-
* $49 44 33 yy yy xx zz zz zz zz
|
4011
|
-
* Where yy is less than $FF, xx is the 'flags' byte and zz is less than $80
|
4012
|
-
*/
|
4013
|
-
if (offset + 10 <= data.length) {
|
4014
|
-
// look for 'ID3' identifier
|
4015
|
-
if (data[offset] === 0x49 && data[offset + 1] === 0x44 && data[offset + 2] === 0x33) {
|
4016
|
-
// check version is within range
|
4017
|
-
if (data[offset + 3] < 0xff && data[offset + 4] < 0xff) {
|
4018
|
-
// check size is within range
|
4019
|
-
if (data[offset + 6] < 0x80 && data[offset + 7] < 0x80 && data[offset + 8] < 0x80 && data[offset + 9] < 0x80) {
|
4020
|
-
return true;
|
4021
|
-
}
|
4022
|
-
}
|
4023
|
-
}
|
4024
|
-
}
|
4025
|
-
return false;
|
4026
|
-
}
|
4027
|
-
|
4028
|
-
const HEADER_FOOTER_SIZE = 10;
|
4029
|
-
const FRAME_SIZE = 10;
|
4030
|
-
/**
|
4031
|
-
* Returns an array of ID3 frames found in all the ID3 tags in the id3Data
|
4032
|
-
*
|
4033
|
-
* @param id3Data - The ID3 data containing one or more ID3 tags
|
4034
|
-
*
|
4035
|
-
* @returns Array of ID3 frame objects
|
4036
|
-
*
|
4037
|
-
* @group ID3
|
4038
|
-
*
|
4039
|
-
* @beta
|
4040
|
-
*/
|
4041
|
-
function getId3Frames(id3Data) {
|
4042
|
-
let offset = 0;
|
4043
|
-
const frames = [];
|
4044
|
-
while (isId3Header(id3Data, offset)) {
|
4045
|
-
const size = readId3Size(id3Data, offset + 6);
|
4046
|
-
if (id3Data[offset + 5] >> 6 & 1) {
|
4047
|
-
// skip extended header
|
4048
|
-
offset += HEADER_FOOTER_SIZE;
|
4049
|
-
}
|
4050
|
-
// skip past ID3 header
|
4051
|
-
offset += HEADER_FOOTER_SIZE;
|
4052
|
-
const end = offset + size;
|
4053
|
-
// loop through frames in the ID3 tag
|
4054
|
-
while (offset + FRAME_SIZE < end) {
|
4055
|
-
const frameData = getId3FrameData(id3Data.subarray(offset));
|
4056
|
-
const frame = decodeId3Frame(frameData);
|
4057
|
-
if (frame) {
|
4058
|
-
frames.push(frame);
|
4059
|
-
}
|
4060
|
-
// skip frame header and frame data
|
4061
|
-
offset += frameData.size + HEADER_FOOTER_SIZE;
|
4062
|
-
}
|
4063
|
-
if (isId3Footer(id3Data, offset)) {
|
4064
|
-
offset += HEADER_FOOTER_SIZE;
|
4065
|
-
}
|
4066
|
-
}
|
4067
|
-
return frames;
|
4068
|
-
}
|
4069
|
-
|
4070
|
-
/**
|
4071
|
-
* Returns true if the ID3 frame is an Elementary Stream timestamp frame
|
4072
|
-
*
|
4073
|
-
* @param frame - the ID3 frame
|
4074
|
-
*
|
4075
|
-
* @returns `true` if the ID3 frame is an Elementary Stream timestamp frame
|
4076
|
-
*
|
4077
|
-
* @internal
|
4078
|
-
*
|
4079
|
-
* @group ID3
|
4080
|
-
*/
|
4081
|
-
function isId3TimestampFrame(frame) {
|
4082
|
-
return frame && frame.key === 'PRIV' && frame.info === 'com.apple.streaming.transportStreamTimestamp';
|
4083
|
-
}
|
4084
|
-
|
4085
|
-
const MIN_CUE_DURATION = 0.25;
|
4086
|
-
function getCueClass() {
|
4087
|
-
if (typeof self === 'undefined') return undefined;
|
4088
|
-
return self.VTTCue || self.TextTrackCue;
|
3903
|
+
const MIN_CUE_DURATION = 0.25;
|
3904
|
+
function getCueClass() {
|
3905
|
+
if (typeof self === 'undefined') return undefined;
|
3906
|
+
return self.VTTCue || self.TextTrackCue;
|
4089
3907
|
}
|
4090
3908
|
function createCueWithDataFields(Cue, startTime, endTime, data, type) {
|
4091
3909
|
let cue = new Cue(startTime, endTime, '');
|
@@ -4164,10 +3982,11 @@ class ID3TrackController {
|
|
4164
3982
|
this.media = data.media;
|
4165
3983
|
}
|
4166
3984
|
onMediaDetaching() {
|
4167
|
-
if (this.id3Track) {
|
4168
|
-
|
4169
|
-
this.id3Track = null;
|
3985
|
+
if (!this.id3Track) {
|
3986
|
+
return;
|
4170
3987
|
}
|
3988
|
+
clearCurrentCues(this.id3Track);
|
3989
|
+
this.id3Track = null;
|
4171
3990
|
this.media = null;
|
4172
3991
|
this.dateRangeCuesAppended = {};
|
4173
3992
|
}
|
@@ -4226,7 +4045,7 @@ class ID3TrackController {
|
|
4226
4045
|
if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
|
4227
4046
|
continue;
|
4228
4047
|
}
|
4229
|
-
const frames =
|
4048
|
+
const frames = getID3Frames(samples[i].data);
|
4230
4049
|
if (frames) {
|
4231
4050
|
const startTime = samples[i].pts;
|
4232
4051
|
let endTime = startTime + samples[i].duration;
|
@@ -4240,7 +4059,7 @@ class ID3TrackController {
|
|
4240
4059
|
for (let j = 0; j < frames.length; j++) {
|
4241
4060
|
const frame = frames[j];
|
4242
4061
|
// Safari doesn't put the timestamp frame in the TextTrack
|
4243
|
-
if (!
|
4062
|
+
if (!isTimeStampFrame(frame)) {
|
4244
4063
|
// add a bounds to any unbounded cues
|
4245
4064
|
this.updateId3CueEnds(startTime, type);
|
4246
4065
|
const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
|
@@ -4408,47 +4227,7 @@ class LatencyController {
|
|
4408
4227
|
this.currentTime = 0;
|
4409
4228
|
this.stallCount = 0;
|
4410
4229
|
this._latency = null;
|
4411
|
-
this.
|
4412
|
-
const {
|
4413
|
-
media,
|
4414
|
-
levelDetails
|
4415
|
-
} = this;
|
4416
|
-
if (!media || !levelDetails) {
|
4417
|
-
return;
|
4418
|
-
}
|
4419
|
-
this.currentTime = media.currentTime;
|
4420
|
-
const latency = this.computeLatency();
|
4421
|
-
if (latency === null) {
|
4422
|
-
return;
|
4423
|
-
}
|
4424
|
-
this._latency = latency;
|
4425
|
-
|
4426
|
-
// Adapt playbackRate to meet target latency in low-latency mode
|
4427
|
-
const {
|
4428
|
-
lowLatencyMode,
|
4429
|
-
maxLiveSyncPlaybackRate
|
4430
|
-
} = this.config;
|
4431
|
-
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4432
|
-
return;
|
4433
|
-
}
|
4434
|
-
const targetLatency = this.targetLatency;
|
4435
|
-
if (targetLatency === null) {
|
4436
|
-
return;
|
4437
|
-
}
|
4438
|
-
const distanceFromTarget = latency - targetLatency;
|
4439
|
-
// Only adjust playbackRate when within one target duration of targetLatency
|
4440
|
-
// and more than one second from under-buffering.
|
4441
|
-
// Playback further than one target duration from target can be considered DVR playback.
|
4442
|
-
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4443
|
-
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4444
|
-
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4445
|
-
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4446
|
-
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4447
|
-
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4448
|
-
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4449
|
-
media.playbackRate = 1;
|
4450
|
-
}
|
4451
|
-
};
|
4230
|
+
this.timeupdateHandler = () => this.timeupdate();
|
4452
4231
|
this.hls = hls;
|
4453
4232
|
this.config = hls.config;
|
4454
4233
|
this.registerListeners();
|
@@ -4540,7 +4319,7 @@ class LatencyController {
|
|
4540
4319
|
this.onMediaDetaching();
|
4541
4320
|
this.levelDetails = null;
|
4542
4321
|
// @ts-ignore
|
4543
|
-
this.hls = null;
|
4322
|
+
this.hls = this.timeupdateHandler = null;
|
4544
4323
|
}
|
4545
4324
|
registerListeners() {
|
4546
4325
|
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
@@ -4558,11 +4337,11 @@ class LatencyController {
|
|
4558
4337
|
}
|
4559
4338
|
onMediaAttached(event, data) {
|
4560
4339
|
this.media = data.media;
|
4561
|
-
this.media.addEventListener('timeupdate', this.
|
4340
|
+
this.media.addEventListener('timeupdate', this.timeupdateHandler);
|
4562
4341
|
}
|
4563
4342
|
onMediaDetaching() {
|
4564
4343
|
if (this.media) {
|
4565
|
-
this.media.removeEventListener('timeupdate', this.
|
4344
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4566
4345
|
this.media = null;
|
4567
4346
|
}
|
4568
4347
|
}
|
@@ -4576,10 +4355,10 @@ class LatencyController {
|
|
4576
4355
|
}) {
|
4577
4356
|
this.levelDetails = details;
|
4578
4357
|
if (details.advanced) {
|
4579
|
-
this.
|
4358
|
+
this.timeupdate();
|
4580
4359
|
}
|
4581
4360
|
if (!details.live && this.media) {
|
4582
|
-
this.media.removeEventListener('timeupdate', this.
|
4361
|
+
this.media.removeEventListener('timeupdate', this.timeupdateHandler);
|
4583
4362
|
}
|
4584
4363
|
}
|
4585
4364
|
onError(event, data) {
|
@@ -4589,7 +4368,48 @@ class LatencyController {
|
|
4589
4368
|
}
|
4590
4369
|
this.stallCount++;
|
4591
4370
|
if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
|
4592
|
-
|
4371
|
+
logger.warn('[playback-rate-controller]: Stall detected, adjusting target latency');
|
4372
|
+
}
|
4373
|
+
}
|
4374
|
+
timeupdate() {
|
4375
|
+
const {
|
4376
|
+
media,
|
4377
|
+
levelDetails
|
4378
|
+
} = this;
|
4379
|
+
if (!media || !levelDetails) {
|
4380
|
+
return;
|
4381
|
+
}
|
4382
|
+
this.currentTime = media.currentTime;
|
4383
|
+
const latency = this.computeLatency();
|
4384
|
+
if (latency === null) {
|
4385
|
+
return;
|
4386
|
+
}
|
4387
|
+
this._latency = latency;
|
4388
|
+
|
4389
|
+
// Adapt playbackRate to meet target latency in low-latency mode
|
4390
|
+
const {
|
4391
|
+
lowLatencyMode,
|
4392
|
+
maxLiveSyncPlaybackRate
|
4393
|
+
} = this.config;
|
4394
|
+
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4395
|
+
return;
|
4396
|
+
}
|
4397
|
+
const targetLatency = this.targetLatency;
|
4398
|
+
if (targetLatency === null) {
|
4399
|
+
return;
|
4400
|
+
}
|
4401
|
+
const distanceFromTarget = latency - targetLatency;
|
4402
|
+
// Only adjust playbackRate when within one target duration of targetLatency
|
4403
|
+
// and more than one second from under-buffering.
|
4404
|
+
// Playback further than one target duration from target can be considered DVR playback.
|
4405
|
+
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4406
|
+
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4407
|
+
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4408
|
+
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4409
|
+
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4410
|
+
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4411
|
+
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4412
|
+
media.playbackRate = 1;
|
4593
4413
|
}
|
4594
4414
|
}
|
4595
4415
|
estimateLiveEdge() {
|
@@ -5376,13 +5196,18 @@ var ErrorActionFlags = {
|
|
5376
5196
|
MoveAllAlternatesMatchingHDCP: 2,
|
5377
5197
|
SwitchToSDR: 4
|
5378
5198
|
}; // Reserved for future use
|
5379
|
-
class ErrorController
|
5199
|
+
class ErrorController {
|
5380
5200
|
constructor(hls) {
|
5381
|
-
super('error-controller', hls.logger);
|
5382
5201
|
this.hls = void 0;
|
5383
5202
|
this.playlistError = 0;
|
5384
5203
|
this.penalizedRenditions = {};
|
5204
|
+
this.log = void 0;
|
5205
|
+
this.warn = void 0;
|
5206
|
+
this.error = void 0;
|
5385
5207
|
this.hls = hls;
|
5208
|
+
this.log = logger.log.bind(logger, `[info]:`);
|
5209
|
+
this.warn = logger.warn.bind(logger, `[warning]:`);
|
5210
|
+
this.error = logger.error.bind(logger, `[error]:`);
|
5386
5211
|
this.registerListeners();
|
5387
5212
|
}
|
5388
5213
|
registerListeners() {
|
@@ -5636,7 +5461,7 @@ class ErrorController extends Logger {
|
|
5636
5461
|
var _level$audioGroups, _level$subtitleGroups;
|
5637
5462
|
const levelCandidate = levels[candidate];
|
5638
5463
|
// Skip level switch if GAP tag is found in next level at same position
|
5639
|
-
if (errorDetails === ErrorDetails.FRAG_GAP && data.frag) {
|
5464
|
+
if (errorDetails === ErrorDetails.FRAG_GAP && fragErrorType === PlaylistLevelType.MAIN && data.frag) {
|
5640
5465
|
const levelDetails = levels[candidate].details;
|
5641
5466
|
if (levelDetails) {
|
5642
5467
|
const fragCandidate = findFragmentByPTS(data.frag, levelDetails.fragments, data.frag.start);
|
@@ -5734,13 +5559,16 @@ class ErrorController extends Logger {
|
|
5734
5559
|
}
|
5735
5560
|
}
|
5736
5561
|
|
5737
|
-
class BasePlaylistController
|
5562
|
+
class BasePlaylistController {
|
5738
5563
|
constructor(hls, logPrefix) {
|
5739
|
-
super(logPrefix, hls.logger);
|
5740
5564
|
this.hls = void 0;
|
5741
5565
|
this.timer = -1;
|
5742
5566
|
this.requestScheduled = -1;
|
5743
5567
|
this.canLoad = false;
|
5568
|
+
this.log = void 0;
|
5569
|
+
this.warn = void 0;
|
5570
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
5571
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
5744
5572
|
this.hls = hls;
|
5745
5573
|
}
|
5746
5574
|
destroy() {
|
@@ -5773,7 +5601,7 @@ class BasePlaylistController extends Logger {
|
|
5773
5601
|
try {
|
5774
5602
|
uri = new self.URL(attr.URI, previous.url).href;
|
5775
5603
|
} catch (error) {
|
5776
|
-
|
5604
|
+
logger.warn(`Could not construct new URL for Rendition Report: ${error}`);
|
5777
5605
|
uri = attr.URI || '';
|
5778
5606
|
}
|
5779
5607
|
// Use exact match. Otherwise, the last partial match, if any, will be used
|
@@ -5861,12 +5689,7 @@ class BasePlaylistController extends Logger {
|
|
5861
5689
|
const cdnAge = lastAdvanced + details.ageHeader;
|
5862
5690
|
let currentGoal = Math.min(cdnAge - details.partTarget, details.targetduration * 1.5);
|
5863
5691
|
if (currentGoal > 0) {
|
5864
|
-
if (
|
5865
|
-
// Omit segment and part directives when the last response was more than 3 target durations ago,
|
5866
|
-
this.log(`Playlist last advanced ${lastAdvanced.toFixed(2)}s ago. Omitting segment and part directives.`);
|
5867
|
-
msn = undefined;
|
5868
|
-
part = undefined;
|
5869
|
-
} else if (previousDetails != null && previousDetails.tuneInGoal && cdnAge - details.partTarget > previousDetails.tuneInGoal) {
|
5692
|
+
if (previousDetails && currentGoal > previousDetails.tuneInGoal) {
|
5870
5693
|
// If we attempted to get the next or latest playlist update, but currentGoal increased,
|
5871
5694
|
// then we either can't catchup, or the "age" header cannot be trusted.
|
5872
5695
|
this.warn(`CDN Tune-in goal increased from: ${previousDetails.tuneInGoal} to: ${currentGoal} with playlist age: ${details.age}`);
|
@@ -6312,9 +6135,8 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
|
|
6312
6135
|
}, {});
|
6313
6136
|
}
|
6314
6137
|
|
6315
|
-
class AbrController
|
6138
|
+
class AbrController {
|
6316
6139
|
constructor(_hls) {
|
6317
|
-
super('abr', _hls.logger);
|
6318
6140
|
this.hls = void 0;
|
6319
6141
|
this.lastLevelLoadSec = 0;
|
6320
6142
|
this.lastLoadedFragLevel = -1;
|
@@ -6428,7 +6250,7 @@ class AbrController extends Logger {
|
|
6428
6250
|
this.resetEstimator(nextLoadLevelBitrate);
|
6429
6251
|
}
|
6430
6252
|
this.clearTimer();
|
6431
|
-
|
6253
|
+
logger.warn(`[abr] Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
|
6432
6254
|
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
|
6433
6255
|
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
|
6434
6256
|
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
|
@@ -6448,7 +6270,7 @@ class AbrController extends Logger {
|
|
6448
6270
|
}
|
6449
6271
|
resetEstimator(abrEwmaDefaultEstimate) {
|
6450
6272
|
if (abrEwmaDefaultEstimate) {
|
6451
|
-
|
6273
|
+
logger.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
|
6452
6274
|
this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
|
6453
6275
|
}
|
6454
6276
|
this.firstSelection = -1;
|
@@ -6680,7 +6502,7 @@ class AbrController extends Logger {
|
|
6680
6502
|
}
|
6681
6503
|
const firstLevel = this.hls.firstLevel;
|
6682
6504
|
const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
|
6683
|
-
|
6505
|
+
logger.warn(`[abr] Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
|
6684
6506
|
return clamped;
|
6685
6507
|
}
|
6686
6508
|
get forcedAutoLevel() {
|
@@ -6726,9 +6548,6 @@ class AbrController extends Logger {
|
|
6726
6548
|
partCurrent,
|
6727
6549
|
hls
|
6728
6550
|
} = this;
|
6729
|
-
if (hls.levels.length <= 1) {
|
6730
|
-
return hls.loadLevel;
|
6731
|
-
}
|
6732
6551
|
const {
|
6733
6552
|
maxAutoLevel,
|
6734
6553
|
config,
|
@@ -6761,13 +6580,13 @@ class AbrController extends Logger {
|
|
6761
6580
|
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
6762
6581
|
const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
|
6763
6582
|
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
|
6764
|
-
|
6583
|
+
logger.info(`[abr] bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
|
6765
6584
|
// don't use conservative factor on bitrate test
|
6766
6585
|
bwFactor = bwUpFactor = 1;
|
6767
6586
|
}
|
6768
6587
|
}
|
6769
6588
|
const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
|
6770
|
-
|
6589
|
+
logger.info(`[abr] ${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
|
6771
6590
|
if (bestLevel > -1) {
|
6772
6591
|
return bestLevel;
|
6773
6592
|
}
|
@@ -6841,7 +6660,7 @@ class AbrController extends Logger {
|
|
6841
6660
|
currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
|
6842
6661
|
currentFrameRate = minFramerate;
|
6843
6662
|
currentBw = Math.max(currentBw, minBitrate);
|
6844
|
-
|
6663
|
+
logger.log(`[abr] picked start tier ${JSON.stringify(startTier)}`);
|
6845
6664
|
} else {
|
6846
6665
|
currentCodecSet = level == null ? void 0 : level.codecSet;
|
6847
6666
|
currentVideoRange = level == null ? void 0 : level.videoRange;
|
@@ -6894,9 +6713,9 @@ class AbrController extends Logger {
|
|
6894
6713
|
const forcedAutoLevel = this.forcedAutoLevel;
|
6895
6714
|
if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
|
6896
6715
|
if (levelsSkipped.length) {
|
6897
|
-
|
6716
|
+
logger.trace(`[abr] Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
|
6898
6717
|
}
|
6899
|
-
|
6718
|
+
logger.info(`[abr] switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
|
6900
6719
|
}
|
6901
6720
|
if (firstSelection) {
|
6902
6721
|
this.firstSelection = i;
|
@@ -6939,29 +6758,40 @@ class BufferHelper {
|
|
6939
6758
|
* Return true if `media`'s buffered include `position`
|
6940
6759
|
*/
|
6941
6760
|
static isBuffered(media, position) {
|
6942
|
-
|
6943
|
-
|
6944
|
-
|
6945
|
-
|
6946
|
-
|
6761
|
+
try {
|
6762
|
+
if (media) {
|
6763
|
+
const buffered = BufferHelper.getBuffered(media);
|
6764
|
+
for (let i = 0; i < buffered.length; i++) {
|
6765
|
+
if (position >= buffered.start(i) && position <= buffered.end(i)) {
|
6766
|
+
return true;
|
6767
|
+
}
|
6947
6768
|
}
|
6948
6769
|
}
|
6770
|
+
} catch (error) {
|
6771
|
+
// this is to catch
|
6772
|
+
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6773
|
+
// This SourceBuffer has been removed from the parent media source
|
6949
6774
|
}
|
6950
6775
|
return false;
|
6951
6776
|
}
|
6952
6777
|
static bufferInfo(media, pos, maxHoleDuration) {
|
6953
|
-
|
6954
|
-
|
6955
|
-
|
6778
|
+
try {
|
6779
|
+
if (media) {
|
6780
|
+
const vbuffered = BufferHelper.getBuffered(media);
|
6956
6781
|
const buffered = [];
|
6957
|
-
|
6782
|
+
let i;
|
6783
|
+
for (i = 0; i < vbuffered.length; i++) {
|
6958
6784
|
buffered.push({
|
6959
6785
|
start: vbuffered.start(i),
|
6960
6786
|
end: vbuffered.end(i)
|
6961
6787
|
});
|
6962
6788
|
}
|
6963
|
-
return
|
6789
|
+
return this.bufferedInfo(buffered, pos, maxHoleDuration);
|
6964
6790
|
}
|
6791
|
+
} catch (error) {
|
6792
|
+
// this is to catch
|
6793
|
+
// InvalidStateError: Failed to read the 'buffered' property from 'SourceBuffer':
|
6794
|
+
// This SourceBuffer has been removed from the parent media source
|
6965
6795
|
}
|
6966
6796
|
return {
|
6967
6797
|
len: 0,
|
@@ -6973,7 +6803,14 @@ class BufferHelper {
|
|
6973
6803
|
static bufferedInfo(buffered, pos, maxHoleDuration) {
|
6974
6804
|
pos = Math.max(0, pos);
|
6975
6805
|
// sort on buffer.start/smaller end (IE does not always return sorted buffered range)
|
6976
|
-
buffered.sort((a, b)
|
6806
|
+
buffered.sort(function (a, b) {
|
6807
|
+
const diff = a.start - b.start;
|
6808
|
+
if (diff) {
|
6809
|
+
return diff;
|
6810
|
+
} else {
|
6811
|
+
return b.end - a.end;
|
6812
|
+
}
|
6813
|
+
});
|
6977
6814
|
let buffered2 = [];
|
6978
6815
|
if (maxHoleDuration) {
|
6979
6816
|
// there might be some small holes between buffer time range
|
@@ -7040,7 +6877,7 @@ class BufferHelper {
|
|
7040
6877
|
*/
|
7041
6878
|
static getBuffered(media) {
|
7042
6879
|
try {
|
7043
|
-
return media.buffered
|
6880
|
+
return media.buffered;
|
7044
6881
|
} catch (e) {
|
7045
6882
|
logger.log('failed to get media.buffered', e);
|
7046
6883
|
return noopBuffered;
|
@@ -7065,22 +6902,24 @@ class BufferOperationQueue {
|
|
7065
6902
|
this.executeNext(type);
|
7066
6903
|
}
|
7067
6904
|
}
|
6905
|
+
insertAbort(operation, type) {
|
6906
|
+
const queue = this.queues[type];
|
6907
|
+
queue.unshift(operation);
|
6908
|
+
this.executeNext(type);
|
6909
|
+
}
|
7068
6910
|
appendBlocker(type) {
|
7069
|
-
|
7070
|
-
|
7071
|
-
|
7072
|
-
onStart: () => {},
|
7073
|
-
onComplete: () => {},
|
7074
|
-
onError: () => {}
|
7075
|
-
};
|
7076
|
-
this.append(operation, type);
|
6911
|
+
let execute;
|
6912
|
+
const promise = new Promise(resolve => {
|
6913
|
+
execute = resolve;
|
7077
6914
|
});
|
7078
|
-
|
7079
|
-
|
7080
|
-
|
7081
|
-
|
7082
|
-
|
7083
|
-
}
|
6915
|
+
const operation = {
|
6916
|
+
execute,
|
6917
|
+
onStart: () => {},
|
6918
|
+
onComplete: () => {},
|
6919
|
+
onError: () => {}
|
6920
|
+
};
|
6921
|
+
this.append(operation, type);
|
6922
|
+
return promise;
|
7084
6923
|
}
|
7085
6924
|
executeNext(type) {
|
7086
6925
|
const queue = this.queues[type];
|
@@ -7112,9 +6951,8 @@ class BufferOperationQueue {
|
|
7112
6951
|
}
|
7113
6952
|
|
7114
6953
|
const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
7115
|
-
class BufferController
|
7116
|
-
constructor(hls
|
7117
|
-
super('buffer-controller', hls.logger);
|
6954
|
+
class BufferController {
|
6955
|
+
constructor(hls) {
|
7118
6956
|
// The level details used to determine duration, target-duration and live
|
7119
6957
|
this.details = null;
|
7120
6958
|
// cache the self generated object url to detect hijack of video tag
|
@@ -7124,7 +6962,6 @@ class BufferController extends Logger {
|
|
7124
6962
|
// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
|
7125
6963
|
this.listeners = void 0;
|
7126
6964
|
this.hls = void 0;
|
7127
|
-
this.fragmentTracker = void 0;
|
7128
6965
|
// The number of BUFFER_CODEC events received before any sourceBuffers are created
|
7129
6966
|
this.bufferCodecEventsExpected = 0;
|
7130
6967
|
// The total number of BUFFER_CODEC events received
|
@@ -7135,10 +6972,6 @@ class BufferController extends Logger {
|
|
7135
6972
|
this.mediaSource = null;
|
7136
6973
|
// Last MP3 audio chunk appended
|
7137
6974
|
this.lastMpegAudioChunk = null;
|
7138
|
-
// Audio fragment blocked from appending until corresponding video appends or context changes
|
7139
|
-
this.blockedAudioAppend = null;
|
7140
|
-
// Keep track of video append position for unblocking audio
|
7141
|
-
this.lastVideoAppendEnd = 0;
|
7142
6975
|
this.appendSource = void 0;
|
7143
6976
|
// counters
|
7144
6977
|
this.appendErrors = {
|
@@ -7149,6 +6982,9 @@ class BufferController extends Logger {
|
|
7149
6982
|
this.tracks = {};
|
7150
6983
|
this.pendingTracks = {};
|
7151
6984
|
this.sourceBuffer = void 0;
|
6985
|
+
this.log = void 0;
|
6986
|
+
this.warn = void 0;
|
6987
|
+
this.error = void 0;
|
7152
6988
|
this._onEndStreaming = event => {
|
7153
6989
|
if (!this.hls) {
|
7154
6990
|
return;
|
@@ -7170,10 +7006,7 @@ class BufferController extends Logger {
|
|
7170
7006
|
this.log('Media source opened');
|
7171
7007
|
if (media) {
|
7172
7008
|
media.removeEventListener('emptied', this._onMediaEmptied);
|
7173
|
-
|
7174
|
-
if (durationAndRange) {
|
7175
|
-
this.updateMediaSource(durationAndRange);
|
7176
|
-
}
|
7009
|
+
this.updateMediaElementDuration();
|
7177
7010
|
this.hls.trigger(Events.MEDIA_ATTACHED, {
|
7178
7011
|
media,
|
7179
7012
|
mediaSource: mediaSource
|
@@ -7197,12 +7030,15 @@ class BufferController extends Logger {
|
|
7197
7030
|
_objectUrl
|
7198
7031
|
} = this;
|
7199
7032
|
if (mediaSrc !== _objectUrl) {
|
7200
|
-
|
7033
|
+
logger.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
|
7201
7034
|
}
|
7202
7035
|
};
|
7203
7036
|
this.hls = hls;
|
7204
|
-
|
7037
|
+
const logPrefix = '[buffer-controller]';
|
7205
7038
|
this.appendSource = isManagedMediaSource(getMediaSource(hls.config.preferManagedMediaSource));
|
7039
|
+
this.log = logger.log.bind(logger, logPrefix);
|
7040
|
+
this.warn = logger.warn.bind(logger, logPrefix);
|
7041
|
+
this.error = logger.error.bind(logger, logPrefix);
|
7206
7042
|
this._initSourceBuffer();
|
7207
7043
|
this.registerListeners();
|
7208
7044
|
}
|
@@ -7214,13 +7050,7 @@ class BufferController extends Logger {
|
|
7214
7050
|
this.details = null;
|
7215
7051
|
this.lastMpegAudioChunk = null;
|
7216
7052
|
// @ts-ignore
|
7217
|
-
this.hls =
|
7218
|
-
// @ts-ignore
|
7219
|
-
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
7220
|
-
// @ts-ignore
|
7221
|
-
this._onMediaSourceEnded = null;
|
7222
|
-
// @ts-ignore
|
7223
|
-
this._onStartStreaming = this._onEndStreaming = null;
|
7053
|
+
this.hls = null;
|
7224
7054
|
}
|
7225
7055
|
registerListeners() {
|
7226
7056
|
const {
|
@@ -7270,8 +7100,6 @@ class BufferController extends Logger {
|
|
7270
7100
|
audiovideo: 0
|
7271
7101
|
};
|
7272
7102
|
this.lastMpegAudioChunk = null;
|
7273
|
-
this.blockedAudioAppend = null;
|
7274
|
-
this.lastVideoAppendEnd = 0;
|
7275
7103
|
}
|
7276
7104
|
onManifestLoading() {
|
7277
7105
|
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
|
@@ -7354,34 +7182,34 @@ class BufferController extends Logger {
|
|
7354
7182
|
mediaSource.removeEventListener('startstreaming', this._onStartStreaming);
|
7355
7183
|
mediaSource.removeEventListener('endstreaming', this._onEndStreaming);
|
7356
7184
|
}
|
7357
|
-
this.mediaSource = null;
|
7358
|
-
this._objectUrl = null;
|
7359
|
-
}
|
7360
7185
|
|
7361
|
-
|
7362
|
-
|
7363
|
-
|
7364
|
-
|
7365
|
-
|
7366
|
-
|
7367
|
-
|
7186
|
+
// Detach properly the MediaSource from the HTMLMediaElement as
|
7187
|
+
// suggested in https://github.com/w3c/media-source/issues/53.
|
7188
|
+
if (media) {
|
7189
|
+
media.removeEventListener('emptied', this._onMediaEmptied);
|
7190
|
+
if (_objectUrl) {
|
7191
|
+
self.URL.revokeObjectURL(_objectUrl);
|
7192
|
+
}
|
7368
7193
|
|
7369
|
-
|
7370
|
-
|
7371
|
-
|
7372
|
-
|
7373
|
-
|
7374
|
-
|
7194
|
+
// clean up video tag src only if it's our own url. some external libraries might
|
7195
|
+
// hijack the video tag and change its 'src' without destroying the Hls instance first
|
7196
|
+
if (this.mediaSrc === _objectUrl) {
|
7197
|
+
media.removeAttribute('src');
|
7198
|
+
if (this.appendSource) {
|
7199
|
+
removeSourceChildren(media);
|
7200
|
+
}
|
7201
|
+
media.load();
|
7202
|
+
} else {
|
7203
|
+
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7375
7204
|
}
|
7376
|
-
media.load();
|
7377
|
-
} else {
|
7378
|
-
this.warn('media|source.src was changed by a third party - skip cleanup');
|
7379
7205
|
}
|
7206
|
+
this.mediaSource = null;
|
7380
7207
|
this.media = null;
|
7208
|
+
this._objectUrl = null;
|
7209
|
+
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7210
|
+
this.pendingTracks = {};
|
7211
|
+
this.tracks = {};
|
7381
7212
|
}
|
7382
|
-
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal;
|
7383
|
-
this.pendingTracks = {};
|
7384
|
-
this.tracks = {};
|
7385
7213
|
this.hls.trigger(Events.MEDIA_DETACHED, undefined);
|
7386
7214
|
}
|
7387
7215
|
onBufferReset() {
|
@@ -7389,7 +7217,6 @@ class BufferController extends Logger {
|
|
7389
7217
|
this.resetBuffer(type);
|
7390
7218
|
});
|
7391
7219
|
this._initSourceBuffer();
|
7392
|
-
this.hls.resumeBuffering();
|
7393
7220
|
}
|
7394
7221
|
resetBuffer(type) {
|
7395
7222
|
const sb = this.sourceBuffer[type];
|
@@ -7413,10 +7240,9 @@ class BufferController extends Logger {
|
|
7413
7240
|
const trackNames = Object.keys(data);
|
7414
7241
|
trackNames.forEach(trackName => {
|
7415
7242
|
if (sourceBufferCount) {
|
7416
|
-
var _track$buffer;
|
7417
7243
|
// check if SourceBuffer codec needs to change
|
7418
7244
|
const track = this.tracks[trackName];
|
7419
|
-
if (track && typeof
|
7245
|
+
if (track && typeof track.buffer.changeType === 'function') {
|
7420
7246
|
var _trackCodec;
|
7421
7247
|
const {
|
7422
7248
|
id,
|
@@ -7486,54 +7312,20 @@ class BufferController extends Logger {
|
|
7486
7312
|
};
|
7487
7313
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7488
7314
|
}
|
7489
|
-
blockAudio(partOrFrag) {
|
7490
|
-
var _this$fragmentTracker;
|
7491
|
-
const pStart = partOrFrag.start;
|
7492
|
-
const pTime = pStart + partOrFrag.duration * 0.05;
|
7493
|
-
const atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
|
7494
|
-
if (atGap) {
|
7495
|
-
return;
|
7496
|
-
}
|
7497
|
-
const op = {
|
7498
|
-
execute: () => {
|
7499
|
-
var _this$fragmentTracker2;
|
7500
|
-
if (this.lastVideoAppendEnd > pTime || this.sourceBuffer.video && BufferHelper.isBuffered(this.sourceBuffer.video, pTime) || ((_this$fragmentTracker2 = this.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker2.gap) === true) {
|
7501
|
-
this.blockedAudioAppend = null;
|
7502
|
-
this.operationQueue.shiftAndExecuteNext('audio');
|
7503
|
-
}
|
7504
|
-
},
|
7505
|
-
onStart: () => {},
|
7506
|
-
onComplete: () => {},
|
7507
|
-
onError: () => {}
|
7508
|
-
};
|
7509
|
-
this.blockedAudioAppend = {
|
7510
|
-
op,
|
7511
|
-
frag: partOrFrag
|
7512
|
-
};
|
7513
|
-
this.operationQueue.append(op, 'audio', true);
|
7514
|
-
}
|
7515
|
-
unblockAudio() {
|
7516
|
-
const blockedAudioAppend = this.blockedAudioAppend;
|
7517
|
-
if (blockedAudioAppend) {
|
7518
|
-
this.blockedAudioAppend = null;
|
7519
|
-
this.operationQueue.unblockAudio(blockedAudioAppend.op);
|
7520
|
-
}
|
7521
|
-
}
|
7522
7315
|
onBufferAppending(event, eventData) {
|
7523
7316
|
const {
|
7317
|
+
hls,
|
7524
7318
|
operationQueue,
|
7525
7319
|
tracks
|
7526
7320
|
} = this;
|
7527
7321
|
const {
|
7528
7322
|
data,
|
7529
7323
|
type,
|
7530
|
-
parent,
|
7531
7324
|
frag,
|
7532
7325
|
part,
|
7533
7326
|
chunkMeta
|
7534
7327
|
} = eventData;
|
7535
7328
|
const chunkStats = chunkMeta.buffering[type];
|
7536
|
-
const sn = frag.sn;
|
7537
7329
|
const bufferAppendingStart = self.performance.now();
|
7538
7330
|
chunkStats.start = bufferAppendingStart;
|
7539
7331
|
const fragBuffering = frag.stats.buffering;
|
@@ -7556,36 +7348,7 @@ class BufferController extends Logger {
|
|
7556
7348
|
checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
|
7557
7349
|
this.lastMpegAudioChunk = chunkMeta;
|
7558
7350
|
}
|
7559
|
-
|
7560
|
-
// Block audio append until overlapping video append
|
7561
|
-
const videoSb = this.sourceBuffer.video;
|
7562
|
-
if (videoSb && sn !== 'initSegment') {
|
7563
|
-
const partOrFrag = part || frag;
|
7564
|
-
const blockedAudioAppend = this.blockedAudioAppend;
|
7565
|
-
if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
|
7566
|
-
const pStart = partOrFrag.start;
|
7567
|
-
const pTime = pStart + partOrFrag.duration * 0.05;
|
7568
|
-
const vbuffered = videoSb.buffered;
|
7569
|
-
const vappending = this.operationQueue.current('video');
|
7570
|
-
if (!vbuffered.length && !vappending) {
|
7571
|
-
// wait for video before appending audio
|
7572
|
-
this.blockAudio(partOrFrag);
|
7573
|
-
} else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
|
7574
|
-
// audio is ahead of video
|
7575
|
-
this.blockAudio(partOrFrag);
|
7576
|
-
}
|
7577
|
-
} else if (type === 'video') {
|
7578
|
-
const videoAppendEnd = partOrFrag.end;
|
7579
|
-
if (blockedAudioAppend) {
|
7580
|
-
const audioStart = blockedAudioAppend.frag.start;
|
7581
|
-
if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
|
7582
|
-
this.unblockAudio();
|
7583
|
-
}
|
7584
|
-
}
|
7585
|
-
this.lastVideoAppendEnd = videoAppendEnd;
|
7586
|
-
}
|
7587
|
-
}
|
7588
|
-
const fragStart = (part || frag).start;
|
7351
|
+
const fragStart = frag.start;
|
7589
7352
|
const operation = {
|
7590
7353
|
execute: () => {
|
7591
7354
|
chunkStats.executeStart = self.performance.now();
|
@@ -7594,7 +7357,7 @@ class BufferController extends Logger {
|
|
7594
7357
|
if (sb) {
|
7595
7358
|
const delta = fragStart - sb.timestampOffset;
|
7596
7359
|
if (Math.abs(delta) >= 0.1) {
|
7597
|
-
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${sn})`);
|
7360
|
+
this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`);
|
7598
7361
|
sb.timestampOffset = fragStart;
|
7599
7362
|
}
|
7600
7363
|
}
|
@@ -7661,21 +7424,22 @@ class BufferController extends Logger {
|
|
7661
7424
|
/* with UHD content, we could get loop of quota exceeded error until
|
7662
7425
|
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
7663
7426
|
*/
|
7664
|
-
this.warn(`Failed ${appendErrorCount}/${
|
7665
|
-
if (appendErrorCount >=
|
7427
|
+
this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
|
7428
|
+
if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
|
7666
7429
|
event.fatal = true;
|
7667
7430
|
}
|
7668
7431
|
}
|
7669
|
-
|
7432
|
+
hls.trigger(Events.ERROR, event);
|
7670
7433
|
}
|
7671
7434
|
};
|
7672
7435
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
7673
7436
|
}
|
7674
|
-
|
7675
|
-
|
7676
|
-
|
7677
|
-
|
7678
|
-
|
7437
|
+
onBufferFlushing(event, data) {
|
7438
|
+
const {
|
7439
|
+
operationQueue
|
7440
|
+
} = this;
|
7441
|
+
const flushOperation = type => ({
|
7442
|
+
execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset),
|
7679
7443
|
onStart: () => {
|
7680
7444
|
// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
7681
7445
|
},
|
@@ -7688,22 +7452,12 @@ class BufferController extends Logger {
|
|
7688
7452
|
onError: error => {
|
7689
7453
|
this.warn(`Failed to remove from ${type} SourceBuffer`, error);
|
7690
7454
|
}
|
7691
|
-
};
|
7692
|
-
|
7693
|
-
|
7694
|
-
const {
|
7695
|
-
operationQueue
|
7696
|
-
} = this;
|
7697
|
-
const {
|
7698
|
-
type,
|
7699
|
-
startOffset,
|
7700
|
-
endOffset
|
7701
|
-
} = data;
|
7702
|
-
if (type) {
|
7703
|
-
operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
|
7455
|
+
});
|
7456
|
+
if (data.type) {
|
7457
|
+
operationQueue.append(flushOperation(data.type), data.type);
|
7704
7458
|
} else {
|
7705
|
-
this.getSourceBufferTypes().forEach(
|
7706
|
-
operationQueue.append(
|
7459
|
+
this.getSourceBufferTypes().forEach(type => {
|
7460
|
+
operationQueue.append(flushOperation(type), type);
|
7707
7461
|
});
|
7708
7462
|
}
|
7709
7463
|
}
|
@@ -7750,9 +7504,6 @@ class BufferController extends Logger {
|
|
7750
7504
|
// on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
|
7751
7505
|
// an undefined data.type will mark all buffers as EOS.
|
7752
7506
|
onBufferEos(event, data) {
|
7753
|
-
if (data.type === 'video') {
|
7754
|
-
this.unblockAudio();
|
7755
|
-
}
|
7756
7507
|
const ended = this.getSourceBufferTypes().reduce((acc, type) => {
|
7757
7508
|
const sb = this.sourceBuffer[type];
|
7758
7509
|
if (sb && (!data.type || data.type === type)) {
|
@@ -7795,14 +7546,10 @@ class BufferController extends Logger {
|
|
7795
7546
|
return;
|
7796
7547
|
}
|
7797
7548
|
this.details = details;
|
7798
|
-
const durationAndRange = this.getDurationAndRange();
|
7799
|
-
if (!durationAndRange) {
|
7800
|
-
return;
|
7801
|
-
}
|
7802
7549
|
if (this.getSourceBufferTypes().length) {
|
7803
|
-
this.blockBuffers(
|
7550
|
+
this.blockBuffers(this.updateMediaElementDuration.bind(this));
|
7804
7551
|
} else {
|
7805
|
-
this.
|
7552
|
+
this.updateMediaElementDuration();
|
7806
7553
|
}
|
7807
7554
|
}
|
7808
7555
|
trimBuffers() {
|
@@ -7907,9 +7654,9 @@ class BufferController extends Logger {
|
|
7907
7654
|
* 'liveDurationInfinity` is set to `true`
|
7908
7655
|
* More details: https://github.com/video-dev/hls.js/issues/355
|
7909
7656
|
*/
|
7910
|
-
|
7657
|
+
updateMediaElementDuration() {
|
7911
7658
|
if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
7912
|
-
return
|
7659
|
+
return;
|
7913
7660
|
}
|
7914
7661
|
const {
|
7915
7662
|
details,
|
@@ -7923,41 +7670,25 @@ class BufferController extends Logger {
|
|
7923
7670
|
if (details.live && hls.config.liveDurationInfinity) {
|
7924
7671
|
// Override duration to Infinity
|
7925
7672
|
mediaSource.duration = Infinity;
|
7926
|
-
|
7927
|
-
if (len && details.live && !!mediaSource.setLiveSeekableRange) {
|
7928
|
-
const start = Math.max(0, details.fragments[0].start);
|
7929
|
-
const end = Math.max(start, start + details.totalduration);
|
7930
|
-
return {
|
7931
|
-
duration: Infinity,
|
7932
|
-
start,
|
7933
|
-
end
|
7934
|
-
};
|
7935
|
-
}
|
7936
|
-
return {
|
7937
|
-
duration: Infinity
|
7938
|
-
};
|
7673
|
+
this.updateSeekableRange(details);
|
7939
7674
|
} else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
|
7940
|
-
|
7941
|
-
|
7942
|
-
|
7675
|
+
// levelDuration was the last value we set.
|
7676
|
+
// not using mediaSource.duration as the browser may tweak this value
|
7677
|
+
// only update Media Source duration if its value increase, this is to avoid
|
7678
|
+
// flushing already buffered portion when switching between quality level
|
7679
|
+
this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);
|
7680
|
+
mediaSource.duration = levelDuration;
|
7943
7681
|
}
|
7944
|
-
return null;
|
7945
7682
|
}
|
7946
|
-
|
7947
|
-
|
7948
|
-
|
7949
|
-
|
7950
|
-
|
7951
|
-
|
7952
|
-
|
7953
|
-
|
7954
|
-
|
7955
|
-
this.log(`Updating Media Source duration to ${duration.toFixed(3)}`);
|
7956
|
-
}
|
7957
|
-
this.mediaSource.duration = duration;
|
7958
|
-
if (start !== undefined && end !== undefined) {
|
7959
|
-
this.log(`Media Source duration is set to ${this.mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7960
|
-
this.mediaSource.setLiveSeekableRange(start, end);
|
7683
|
+
updateSeekableRange(levelDetails) {
|
7684
|
+
const mediaSource = this.mediaSource;
|
7685
|
+
const fragments = levelDetails.fragments;
|
7686
|
+
const len = fragments.length;
|
7687
|
+
if (len && levelDetails.live && mediaSource != null && mediaSource.setLiveSeekableRange) {
|
7688
|
+
const start = Math.max(0, fragments[0].start);
|
7689
|
+
const end = Math.max(start, start + levelDetails.totalduration);
|
7690
|
+
this.log(`Media Source duration is set to ${mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
|
7691
|
+
mediaSource.setLiveSeekableRange(start, end);
|
7961
7692
|
}
|
7962
7693
|
}
|
7963
7694
|
checkPendingTracks() {
|
@@ -8143,7 +7874,6 @@ class BufferController extends Logger {
|
|
8143
7874
|
}
|
8144
7875
|
return;
|
8145
7876
|
}
|
8146
|
-
sb.ending = false;
|
8147
7877
|
sb.ended = false;
|
8148
7878
|
sb.appendBuffer(data);
|
8149
7879
|
}
|
@@ -8163,14 +7893,10 @@ class BufferController extends Logger {
|
|
8163
7893
|
|
8164
7894
|
// logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
|
8165
7895
|
const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type));
|
8166
|
-
|
8167
|
-
if (audioBlocked) {
|
8168
|
-
this.unblockAudio();
|
8169
|
-
}
|
8170
|
-
Promise.all(blockingOperations).then(result => {
|
7896
|
+
Promise.all(blockingOperations).then(() => {
|
8171
7897
|
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
8172
7898
|
onUnblocked();
|
8173
|
-
buffers.forEach(
|
7899
|
+
buffers.forEach(type => {
|
8174
7900
|
const sb = this.sourceBuffer[type];
|
8175
7901
|
// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
8176
7902
|
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
@@ -8319,7 +8045,6 @@ class CapLevelController {
|
|
8319
8045
|
}
|
8320
8046
|
onMediaDetaching() {
|
8321
8047
|
this.stopCapping();
|
8322
|
-
this.media = null;
|
8323
8048
|
}
|
8324
8049
|
detectPlayerSize() {
|
8325
8050
|
if (this.media) {
|
@@ -8332,10 +8057,10 @@ class CapLevelController {
|
|
8332
8057
|
const hls = this.hls;
|
8333
8058
|
const maxLevel = this.getMaxLevel(levels.length - 1);
|
8334
8059
|
if (maxLevel !== this.autoLevelCapping) {
|
8335
|
-
|
8060
|
+
logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8336
8061
|
}
|
8337
8062
|
hls.autoLevelCapping = maxLevel;
|
8338
|
-
if (hls.
|
8063
|
+
if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
8339
8064
|
// if auto level capping has a higher value for the previous one, flush the buffer using nextLevelSwitch
|
8340
8065
|
// usually happen when the user go to the fullscreen mode.
|
8341
8066
|
this.streamController.nextLevelSwitch();
|
@@ -8471,11 +8196,9 @@ class FPSController {
|
|
8471
8196
|
}
|
8472
8197
|
registerListeners() {
|
8473
8198
|
this.hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8474
|
-
this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8475
8199
|
}
|
8476
8200
|
unregisterListeners() {
|
8477
8201
|
this.hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
8478
|
-
this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
8479
8202
|
}
|
8480
8203
|
destroy() {
|
8481
8204
|
if (this.timer) {
|
@@ -8497,9 +8220,6 @@ class FPSController {
|
|
8497
8220
|
this.timer = self.setInterval(this.checkFPSInterval.bind(this), config.fpsDroppedMonitoringPeriod);
|
8498
8221
|
}
|
8499
8222
|
}
|
8500
|
-
onMediaDetaching() {
|
8501
|
-
this.media = null;
|
8502
|
-
}
|
8503
8223
|
checkFPS(video, decodedFrames, droppedFrames) {
|
8504
8224
|
const currentTime = performance.now();
|
8505
8225
|
if (decodedFrames) {
|
@@ -8515,10 +8235,10 @@ class FPSController {
|
|
8515
8235
|
totalDroppedFrames: droppedFrames
|
8516
8236
|
});
|
8517
8237
|
if (droppedFPS > 0) {
|
8518
|
-
//
|
8238
|
+
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8519
8239
|
if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
8520
8240
|
let currentLevel = hls.currentLevel;
|
8521
|
-
|
8241
|
+
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8522
8242
|
if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
|
8523
8243
|
currentLevel = currentLevel - 1;
|
8524
8244
|
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
|
@@ -8551,14 +8271,14 @@ class FPSController {
|
|
8551
8271
|
}
|
8552
8272
|
|
8553
8273
|
const PATHWAY_PENALTY_DURATION_MS = 300000;
|
8554
|
-
class ContentSteeringController
|
8274
|
+
class ContentSteeringController {
|
8555
8275
|
constructor(hls) {
|
8556
|
-
super('content-steering', hls.logger);
|
8557
8276
|
this.hls = void 0;
|
8277
|
+
this.log = void 0;
|
8558
8278
|
this.loader = null;
|
8559
8279
|
this.uri = null;
|
8560
8280
|
this.pathwayId = '.';
|
8561
|
-
this.
|
8281
|
+
this.pathwayPriority = null;
|
8562
8282
|
this.timeToLoad = 300;
|
8563
8283
|
this.reloadTimer = -1;
|
8564
8284
|
this.updated = 0;
|
@@ -8569,6 +8289,7 @@ class ContentSteeringController extends Logger {
|
|
8569
8289
|
this.subtitleTracks = null;
|
8570
8290
|
this.penalizedPathways = {};
|
8571
8291
|
this.hls = hls;
|
8292
|
+
this.log = logger.log.bind(logger, `[content-steering]:`);
|
8572
8293
|
this.registerListeners();
|
8573
8294
|
}
|
8574
8295
|
registerListeners() {
|
@@ -8588,20 +8309,6 @@ class ContentSteeringController extends Logger {
|
|
8588
8309
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
8589
8310
|
hls.off(Events.ERROR, this.onError, this);
|
8590
8311
|
}
|
8591
|
-
pathways() {
|
8592
|
-
return (this.levels || []).reduce((pathways, level) => {
|
8593
|
-
if (pathways.indexOf(level.pathwayId) === -1) {
|
8594
|
-
pathways.push(level.pathwayId);
|
8595
|
-
}
|
8596
|
-
return pathways;
|
8597
|
-
}, []);
|
8598
|
-
}
|
8599
|
-
get pathwayPriority() {
|
8600
|
-
return this._pathwayPriority;
|
8601
|
-
}
|
8602
|
-
set pathwayPriority(pathwayPriority) {
|
8603
|
-
this.updatePathwayPriority(pathwayPriority);
|
8604
|
-
}
|
8605
8312
|
startLoad() {
|
8606
8313
|
this.started = true;
|
8607
8314
|
this.clearTimeout();
|
@@ -8675,7 +8382,7 @@ class ContentSteeringController extends Logger {
|
|
8675
8382
|
} = data;
|
8676
8383
|
if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox && errorAction.flags === ErrorActionFlags.MoveAllAlternatesMatchingHost) {
|
8677
8384
|
const levels = this.levels;
|
8678
|
-
let pathwayPriority = this.
|
8385
|
+
let pathwayPriority = this.pathwayPriority;
|
8679
8386
|
let errorPathway = this.pathwayId;
|
8680
8387
|
if (data.context) {
|
8681
8388
|
const {
|
@@ -8694,14 +8401,19 @@ class ContentSteeringController extends Logger {
|
|
8694
8401
|
}
|
8695
8402
|
if (!pathwayPriority && levels) {
|
8696
8403
|
// If PATHWAY-PRIORITY was not provided, list pathways for error handling
|
8697
|
-
pathwayPriority =
|
8404
|
+
pathwayPriority = levels.reduce((pathways, level) => {
|
8405
|
+
if (pathways.indexOf(level.pathwayId) === -1) {
|
8406
|
+
pathways.push(level.pathwayId);
|
8407
|
+
}
|
8408
|
+
return pathways;
|
8409
|
+
}, []);
|
8698
8410
|
}
|
8699
8411
|
if (pathwayPriority && pathwayPriority.length > 1) {
|
8700
8412
|
this.updatePathwayPriority(pathwayPriority);
|
8701
8413
|
errorAction.resolved = this.pathwayId !== errorPathway;
|
8702
8414
|
}
|
8703
8415
|
if (!errorAction.resolved) {
|
8704
|
-
|
8416
|
+
logger.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
|
8705
8417
|
}
|
8706
8418
|
}
|
8707
8419
|
}
|
@@ -8728,7 +8440,7 @@ class ContentSteeringController extends Logger {
|
|
8728
8440
|
return this.levels.filter(level => pathwayId === level.pathwayId);
|
8729
8441
|
}
|
8730
8442
|
updatePathwayPriority(pathwayPriority) {
|
8731
|
-
this.
|
8443
|
+
this.pathwayPriority = pathwayPriority;
|
8732
8444
|
let levels;
|
8733
8445
|
|
8734
8446
|
// Evaluate if we should remove the pathway from the penalized list
|
@@ -8872,7 +8584,7 @@ class ContentSteeringController extends Logger {
|
|
8872
8584
|
onSuccess: (response, stats, context, networkDetails) => {
|
8873
8585
|
this.log(`Loaded steering manifest: "${url}"`);
|
8874
8586
|
const steeringData = response.data;
|
8875
|
-
if (
|
8587
|
+
if (steeringData.VERSION !== 1) {
|
8876
8588
|
this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
|
8877
8589
|
return;
|
8878
8590
|
}
|
@@ -9779,7 +9491,7 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
|
|
9779
9491
|
});
|
9780
9492
|
function timelineConfig() {
|
9781
9493
|
return {
|
9782
|
-
cueHandler:
|
9494
|
+
cueHandler: Cues,
|
9783
9495
|
// used by timeline-controller
|
9784
9496
|
enableWebVTT: false,
|
9785
9497
|
// used by timeline-controller
|
@@ -9810,7 +9522,7 @@ function timelineConfig() {
|
|
9810
9522
|
/**
|
9811
9523
|
* @ignore
|
9812
9524
|
*/
|
9813
|
-
function mergeConfig(defaultConfig, userConfig
|
9525
|
+
function mergeConfig(defaultConfig, userConfig) {
|
9814
9526
|
if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
|
9815
9527
|
throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
|
9816
9528
|
}
|
@@ -9880,7 +9592,7 @@ function deepCpy(obj) {
|
|
9880
9592
|
/**
|
9881
9593
|
* @ignore
|
9882
9594
|
*/
|
9883
|
-
function enableStreamingMode(config
|
9595
|
+
function enableStreamingMode(config) {
|
9884
9596
|
const currentLoader = config.loader;
|
9885
9597
|
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
|
9886
9598
|
// If a developer has configured their own loader, respect that choice
|
@@ -9897,9 +9609,10 @@ function enableStreamingMode(config, logger) {
|
|
9897
9609
|
}
|
9898
9610
|
}
|
9899
9611
|
|
9612
|
+
let chromeOrFirefox;
|
9900
9613
|
class LevelController extends BasePlaylistController {
|
9901
9614
|
constructor(hls, contentSteeringController) {
|
9902
|
-
super(hls, 'level-controller');
|
9615
|
+
super(hls, '[level-controller]');
|
9903
9616
|
this._levels = [];
|
9904
9617
|
this._firstLevel = -1;
|
9905
9618
|
this._maxAutoLevel = -1;
|
@@ -9970,15 +9683,23 @@ class LevelController extends BasePlaylistController {
|
|
9970
9683
|
let videoCodecFound = false;
|
9971
9684
|
let audioCodecFound = false;
|
9972
9685
|
data.levels.forEach(levelParsed => {
|
9973
|
-
var _videoCodec;
|
9686
|
+
var _audioCodec, _videoCodec;
|
9974
9687
|
const attributes = levelParsed.attrs;
|
9688
|
+
|
9689
|
+
// erase audio codec info if browser does not support mp4a.40.34.
|
9690
|
+
// demuxer will autodetect codec and fallback to mpeg/audio
|
9975
9691
|
let {
|
9976
9692
|
audioCodec,
|
9977
9693
|
videoCodec
|
9978
9694
|
} = levelParsed;
|
9695
|
+
if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
|
9696
|
+
chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
|
9697
|
+
if (chromeOrFirefox) {
|
9698
|
+
levelParsed.audioCodec = audioCodec = undefined;
|
9699
|
+
}
|
9700
|
+
}
|
9979
9701
|
if (audioCodec) {
|
9980
|
-
|
9981
|
-
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
|
9702
|
+
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource);
|
9982
9703
|
}
|
9983
9704
|
if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
|
9984
9705
|
videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
|
@@ -10273,25 +9994,6 @@ class LevelController extends BasePlaylistController {
|
|
10273
9994
|
set startLevel(newLevel) {
|
10274
9995
|
this._startLevel = newLevel;
|
10275
9996
|
}
|
10276
|
-
get pathwayPriority() {
|
10277
|
-
if (this.steering) {
|
10278
|
-
return this.steering.pathwayPriority;
|
10279
|
-
}
|
10280
|
-
return null;
|
10281
|
-
}
|
10282
|
-
set pathwayPriority(pathwayPriority) {
|
10283
|
-
if (this.steering) {
|
10284
|
-
const pathwaysList = this.steering.pathways();
|
10285
|
-
const filteredPathwayPriority = pathwayPriority.filter(pathwayId => {
|
10286
|
-
return pathwaysList.indexOf(pathwayId) !== -1;
|
10287
|
-
});
|
10288
|
-
if (pathwayPriority.length < 1) {
|
10289
|
-
this.warn(`pathwayPriority ${pathwayPriority} should contain at least one pathway from list: ${pathwaysList}`);
|
10290
|
-
return;
|
10291
|
-
}
|
10292
|
-
this.steering.pathwayPriority = filteredPathwayPriority;
|
10293
|
-
}
|
10294
|
-
}
|
10295
9997
|
onError(event, data) {
|
10296
9998
|
if (data.fatal || !data.context) {
|
10297
9999
|
return;
|
@@ -10339,12 +10041,7 @@ class LevelController extends BasePlaylistController {
|
|
10339
10041
|
if (curLevel.fragmentError === 0) {
|
10340
10042
|
curLevel.loadError = 0;
|
10341
10043
|
}
|
10342
|
-
|
10343
|
-
let previousDetails = curLevel.details;
|
10344
|
-
if (previousDetails === data.details && previousDetails.advanced) {
|
10345
|
-
previousDetails = undefined;
|
10346
|
-
}
|
10347
|
-
this.playlistLoaded(level, data, previousDetails);
|
10044
|
+
this.playlistLoaded(level, data, curLevel.details);
|
10348
10045
|
} else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
|
10349
10046
|
// received a delta playlist update that cannot be merged
|
10350
10047
|
details.deltaUpdateFailed = true;
|
@@ -10522,16 +10219,13 @@ class FragmentTracker {
|
|
10522
10219
|
* If not found any Fragment, return null
|
10523
10220
|
*/
|
10524
10221
|
getBufferedFrag(position, levelType) {
|
10525
|
-
return this.getFragAtPos(position, levelType, true);
|
10526
|
-
}
|
10527
|
-
getFragAtPos(position, levelType, buffered) {
|
10528
10222
|
const {
|
10529
10223
|
fragments
|
10530
10224
|
} = this;
|
10531
10225
|
const keys = Object.keys(fragments);
|
10532
10226
|
for (let i = keys.length; i--;) {
|
10533
10227
|
const fragmentEntity = fragments[keys[i]];
|
10534
|
-
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType &&
|
10228
|
+
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
|
10535
10229
|
const frag = fragmentEntity.body;
|
10536
10230
|
if (frag.start <= position && position <= frag.end) {
|
10537
10231
|
return frag;
|
@@ -10786,8 +10480,7 @@ class FragmentTracker {
|
|
10786
10480
|
const {
|
10787
10481
|
frag,
|
10788
10482
|
part,
|
10789
|
-
timeRanges
|
10790
|
-
type
|
10483
|
+
timeRanges
|
10791
10484
|
} = data;
|
10792
10485
|
if (frag.sn === 'initSegment') {
|
10793
10486
|
return;
|
@@ -10802,8 +10495,10 @@ class FragmentTracker {
|
|
10802
10495
|
}
|
10803
10496
|
// Store the latest timeRanges loaded in the buffer
|
10804
10497
|
this.timeRanges = timeRanges;
|
10805
|
-
|
10806
|
-
|
10498
|
+
Object.keys(timeRanges).forEach(elementaryStream => {
|
10499
|
+
const timeRange = timeRanges[elementaryStream];
|
10500
|
+
this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
|
10501
|
+
});
|
10807
10502
|
}
|
10808
10503
|
onFragBuffered(event, data) {
|
10809
10504
|
this.detectPartialFragments(data);
|
@@ -11132,8 +10827,8 @@ function createLoaderContext(frag, part = null) {
|
|
11132
10827
|
var _frag$decryptdata;
|
11133
10828
|
let byteRangeStart = start;
|
11134
10829
|
let byteRangeEnd = end;
|
11135
|
-
if (frag.sn === 'initSegment' &&
|
11136
|
-
// MAP segment encrypted with method 'AES-128'
|
10830
|
+
if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method) === 'AES-128') {
|
10831
|
+
// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
|
11137
10832
|
// has the unencrypted size specified in the range.
|
11138
10833
|
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
|
11139
10834
|
const fragmentLen = end - start;
|
@@ -11166,9 +10861,6 @@ function createGapLoadError(frag, part) {
|
|
11166
10861
|
(part ? part : frag).stats.aborted = true;
|
11167
10862
|
return new LoadError(errorData);
|
11168
10863
|
}
|
11169
|
-
function isMethodFullSegmentAesCbc(method) {
|
11170
|
-
return method === 'AES-128' || method === 'AES-256';
|
11171
|
-
}
|
11172
10864
|
class LoadError extends Error {
|
11173
10865
|
constructor(data) {
|
11174
10866
|
super(data.error.message);
|
@@ -11314,8 +11006,6 @@ class KeyLoader {
|
|
11314
11006
|
}
|
11315
11007
|
return this.loadKeyEME(keyInfo, frag);
|
11316
11008
|
case 'AES-128':
|
11317
|
-
case 'AES-256':
|
11318
|
-
case 'AES-256-CTR':
|
11319
11009
|
return this.loadKeyHTTP(keyInfo, frag);
|
11320
11010
|
default:
|
11321
11011
|
return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
|
@@ -11451,9 +11141,8 @@ class KeyLoader {
|
|
11451
11141
|
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
11452
11142
|
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
11453
11143
|
*/
|
11454
|
-
class TaskLoop
|
11455
|
-
constructor(
|
11456
|
-
super(label, logger);
|
11144
|
+
class TaskLoop {
|
11145
|
+
constructor() {
|
11457
11146
|
this._boundTick = void 0;
|
11458
11147
|
this._tickTimer = null;
|
11459
11148
|
this._tickInterval = null;
|
@@ -11721,61 +11410,33 @@ function alignMediaPlaylistByPDT(details, refDetails) {
|
|
11721
11410
|
}
|
11722
11411
|
|
11723
11412
|
class AESCrypto {
|
11724
|
-
constructor(subtle, iv
|
11413
|
+
constructor(subtle, iv) {
|
11725
11414
|
this.subtle = void 0;
|
11726
11415
|
this.aesIV = void 0;
|
11727
|
-
this.aesMode = void 0;
|
11728
11416
|
this.subtle = subtle;
|
11729
11417
|
this.aesIV = iv;
|
11730
|
-
this.aesMode = aesMode;
|
11731
11418
|
}
|
11732
11419
|
decrypt(data, key) {
|
11733
|
-
|
11734
|
-
|
11735
|
-
|
11736
|
-
|
11737
|
-
iv: this.aesIV
|
11738
|
-
}, key, data);
|
11739
|
-
case DecrypterAesMode.ctr:
|
11740
|
-
return this.subtle.decrypt({
|
11741
|
-
name: 'AES-CTR',
|
11742
|
-
counter: this.aesIV,
|
11743
|
-
length: 64
|
11744
|
-
},
|
11745
|
-
//64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
|
11746
|
-
key, data);
|
11747
|
-
default:
|
11748
|
-
throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
|
11749
|
-
}
|
11420
|
+
return this.subtle.decrypt({
|
11421
|
+
name: 'AES-CBC',
|
11422
|
+
iv: this.aesIV
|
11423
|
+
}, key, data);
|
11750
11424
|
}
|
11751
11425
|
}
|
11752
11426
|
|
11753
11427
|
class FastAESKey {
|
11754
|
-
constructor(subtle, key
|
11428
|
+
constructor(subtle, key) {
|
11755
11429
|
this.subtle = void 0;
|
11756
11430
|
this.key = void 0;
|
11757
|
-
this.aesMode = void 0;
|
11758
11431
|
this.subtle = subtle;
|
11759
11432
|
this.key = key;
|
11760
|
-
this.aesMode = aesMode;
|
11761
11433
|
}
|
11762
11434
|
expandKey() {
|
11763
|
-
const subtleAlgoName = getSubtleAlgoName(this.aesMode);
|
11764
11435
|
return this.subtle.importKey('raw', this.key, {
|
11765
|
-
name:
|
11436
|
+
name: 'AES-CBC'
|
11766
11437
|
}, false, ['encrypt', 'decrypt']);
|
11767
11438
|
}
|
11768
11439
|
}
|
11769
|
-
function getSubtleAlgoName(aesMode) {
|
11770
|
-
switch (aesMode) {
|
11771
|
-
case DecrypterAesMode.cbc:
|
11772
|
-
return 'AES-CBC';
|
11773
|
-
case DecrypterAesMode.ctr:
|
11774
|
-
return 'AES-CTR';
|
11775
|
-
default:
|
11776
|
-
throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
|
11777
|
-
}
|
11778
|
-
}
|
11779
11440
|
|
11780
11441
|
// PKCS7
|
11781
11442
|
function removePadding(array) {
|
@@ -12025,8 +11686,7 @@ class Decrypter {
|
|
12025
11686
|
this.currentIV = null;
|
12026
11687
|
this.currentResult = null;
|
12027
11688
|
this.useSoftware = void 0;
|
12028
|
-
this.
|
12029
|
-
this.enableSoftwareAES = config.enableSoftwareAES;
|
11689
|
+
this.useSoftware = config.enableSoftwareAES;
|
12030
11690
|
this.removePKCS7Padding = removePKCS7Padding;
|
12031
11691
|
// built in decryptor expects PKCS7 padding
|
12032
11692
|
if (removePKCS7Padding) {
|
@@ -12077,10 +11737,10 @@ class Decrypter {
|
|
12077
11737
|
this.softwareDecrypter = null;
|
12078
11738
|
}
|
12079
11739
|
}
|
12080
|
-
decrypt(data, key, iv
|
11740
|
+
decrypt(data, key, iv) {
|
12081
11741
|
if (this.useSoftware) {
|
12082
11742
|
return new Promise((resolve, reject) => {
|
12083
|
-
this.softwareDecrypt(new Uint8Array(data), key, iv
|
11743
|
+
this.softwareDecrypt(new Uint8Array(data), key, iv);
|
12084
11744
|
const decryptResult = this.flush();
|
12085
11745
|
if (decryptResult) {
|
12086
11746
|
resolve(decryptResult.buffer);
|
@@ -12089,21 +11749,17 @@ class Decrypter {
|
|
12089
11749
|
}
|
12090
11750
|
});
|
12091
11751
|
}
|
12092
|
-
return this.webCryptoDecrypt(new Uint8Array(data), key, iv
|
11752
|
+
return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
|
12093
11753
|
}
|
12094
11754
|
|
12095
11755
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
12096
11756
|
// data is handled in the flush() call
|
12097
|
-
softwareDecrypt(data, key, iv
|
11757
|
+
softwareDecrypt(data, key, iv) {
|
12098
11758
|
const {
|
12099
11759
|
currentIV,
|
12100
11760
|
currentResult,
|
12101
11761
|
remainderData
|
12102
11762
|
} = this;
|
12103
|
-
if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
|
12104
|
-
logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
|
12105
|
-
return null;
|
12106
|
-
}
|
12107
11763
|
this.logOnce('JS AES decrypt');
|
12108
11764
|
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
12109
11765
|
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
@@ -12136,13 +11792,13 @@ class Decrypter {
|
|
12136
11792
|
}
|
12137
11793
|
return result;
|
12138
11794
|
}
|
12139
|
-
webCryptoDecrypt(data, key, iv
|
11795
|
+
webCryptoDecrypt(data, key, iv) {
|
12140
11796
|
if (this.key !== key || !this.fastAesKey) {
|
12141
11797
|
if (!this.subtle) {
|
12142
|
-
return Promise.resolve(this.onWebCryptoError(data, key, iv
|
11798
|
+
return Promise.resolve(this.onWebCryptoError(data, key, iv));
|
12143
11799
|
}
|
12144
11800
|
this.key = key;
|
12145
|
-
this.fastAesKey = new FastAESKey(this.subtle, key
|
11801
|
+
this.fastAesKey = new FastAESKey(this.subtle, key);
|
12146
11802
|
}
|
12147
11803
|
return this.fastAesKey.expandKey().then(aesKey => {
|
12148
11804
|
// decrypt using web crypto
|
@@ -12150,25 +11806,22 @@ class Decrypter {
|
|
12150
11806
|
return Promise.reject(new Error('web crypto not initialized'));
|
12151
11807
|
}
|
12152
11808
|
this.logOnce('WebCrypto AES decrypt');
|
12153
|
-
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv)
|
11809
|
+
const crypto = new AESCrypto(this.subtle, new Uint8Array(iv));
|
12154
11810
|
return crypto.decrypt(data.buffer, aesKey);
|
12155
11811
|
}).catch(err => {
|
12156
11812
|
logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
|
12157
|
-
return this.onWebCryptoError(data, key, iv
|
11813
|
+
return this.onWebCryptoError(data, key, iv);
|
12158
11814
|
});
|
12159
11815
|
}
|
12160
|
-
onWebCryptoError(data, key, iv
|
12161
|
-
|
12162
|
-
|
12163
|
-
|
12164
|
-
|
12165
|
-
|
12166
|
-
|
12167
|
-
if (decryptResult) {
|
12168
|
-
return decryptResult.buffer;
|
12169
|
-
}
|
11816
|
+
onWebCryptoError(data, key, iv) {
|
11817
|
+
this.useSoftware = true;
|
11818
|
+
this.logEnabled = true;
|
11819
|
+
this.softwareDecrypt(data, key, iv);
|
11820
|
+
const decryptResult = this.flush();
|
11821
|
+
if (decryptResult) {
|
11822
|
+
return decryptResult.buffer;
|
12170
11823
|
}
|
12171
|
-
throw new Error('WebCrypto
|
11824
|
+
throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
|
12172
11825
|
}
|
12173
11826
|
getValidChunk(data) {
|
12174
11827
|
let currentChunk = data;
|
@@ -12219,7 +11872,7 @@ const State = {
|
|
12219
11872
|
};
|
12220
11873
|
class BaseStreamController extends TaskLoop {
|
12221
11874
|
constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
|
12222
|
-
super(
|
11875
|
+
super();
|
12223
11876
|
this.hls = void 0;
|
12224
11877
|
this.fragPrevious = null;
|
12225
11878
|
this.fragCurrent = null;
|
@@ -12244,98 +11897,22 @@ class BaseStreamController extends TaskLoop {
|
|
12244
11897
|
this.startFragRequested = false;
|
12245
11898
|
this.decrypter = void 0;
|
12246
11899
|
this.initPTS = [];
|
12247
|
-
this.
|
12248
|
-
this.
|
12249
|
-
this.
|
12250
|
-
|
12251
|
-
|
12252
|
-
fragCurrent,
|
12253
|
-
media,
|
12254
|
-
mediaBuffer,
|
12255
|
-
state
|
12256
|
-
} = this;
|
12257
|
-
const currentTime = media ? media.currentTime : 0;
|
12258
|
-
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
12259
|
-
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
12260
|
-
if (this.state === State.ENDED) {
|
12261
|
-
this.resetLoadingState();
|
12262
|
-
} else if (fragCurrent) {
|
12263
|
-
// Seeking while frag load is in progress
|
12264
|
-
const tolerance = config.maxFragLookUpTolerance;
|
12265
|
-
const fragStartOffset = fragCurrent.start - tolerance;
|
12266
|
-
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12267
|
-
// if seeking out of buffered range or into new one
|
12268
|
-
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12269
|
-
const pastFragment = currentTime > fragEndOffset;
|
12270
|
-
// if the seek position is outside the current fragment range
|
12271
|
-
if (currentTime < fragStartOffset || pastFragment) {
|
12272
|
-
if (pastFragment && fragCurrent.loader) {
|
12273
|
-
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12274
|
-
fragCurrent.abortRequests();
|
12275
|
-
this.resetLoadingState();
|
12276
|
-
}
|
12277
|
-
this.fragPrevious = null;
|
12278
|
-
}
|
12279
|
-
}
|
12280
|
-
}
|
12281
|
-
if (media) {
|
12282
|
-
// Remove gap fragments
|
12283
|
-
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12284
|
-
this.lastCurrentTime = currentTime;
|
12285
|
-
if (!this.loadingParts) {
|
12286
|
-
const bufferEnd = Math.max(bufferInfo.end, currentTime);
|
12287
|
-
const shouldLoadParts = this.shouldLoadParts(this.getLevelDetails(), bufferEnd);
|
12288
|
-
if (shouldLoadParts) {
|
12289
|
-
this.log(`LL-Part loading ON after seeking to ${currentTime.toFixed(2)} with buffer @${bufferEnd.toFixed(2)}`);
|
12290
|
-
this.loadingParts = shouldLoadParts;
|
12291
|
-
}
|
12292
|
-
}
|
12293
|
-
}
|
12294
|
-
|
12295
|
-
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12296
|
-
if (!this.loadedmetadata && !bufferInfo.len) {
|
12297
|
-
this.nextLoadPosition = this.startPosition = currentTime;
|
12298
|
-
}
|
12299
|
-
|
12300
|
-
// Async tick to speed up processing
|
12301
|
-
this.tickImmediate();
|
12302
|
-
};
|
12303
|
-
this.onMediaEnded = () => {
|
12304
|
-
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12305
|
-
this.startPosition = this.lastCurrentTime = 0;
|
12306
|
-
if (this.playlistType === PlaylistLevelType.MAIN) {
|
12307
|
-
this.hls.trigger(Events.MEDIA_ENDED, {
|
12308
|
-
stalled: false
|
12309
|
-
});
|
12310
|
-
}
|
12311
|
-
};
|
11900
|
+
this.onvseeking = null;
|
11901
|
+
this.onvended = null;
|
11902
|
+
this.logPrefix = '';
|
11903
|
+
this.log = void 0;
|
11904
|
+
this.warn = void 0;
|
12312
11905
|
this.playlistType = playlistType;
|
11906
|
+
this.logPrefix = logPrefix;
|
11907
|
+
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
11908
|
+
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
12313
11909
|
this.hls = hls;
|
12314
11910
|
this.fragmentLoader = new FragmentLoader(hls.config);
|
12315
11911
|
this.keyLoader = keyLoader;
|
12316
11912
|
this.fragmentTracker = fragmentTracker;
|
12317
11913
|
this.config = hls.config;
|
12318
11914
|
this.decrypter = new Decrypter(hls.config);
|
12319
|
-
}
|
12320
|
-
registerListeners() {
|
12321
|
-
const {
|
12322
|
-
hls
|
12323
|
-
} = this;
|
12324
|
-
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12325
|
-
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12326
|
-
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12327
11915
|
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12328
|
-
hls.on(Events.ERROR, this.onError, this);
|
12329
|
-
}
|
12330
|
-
unregisterListeners() {
|
12331
|
-
const {
|
12332
|
-
hls
|
12333
|
-
} = this;
|
12334
|
-
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12335
|
-
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12336
|
-
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12337
|
-
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12338
|
-
hls.off(Events.ERROR, this.onError, this);
|
12339
11916
|
}
|
12340
11917
|
doTick() {
|
12341
11918
|
this.onTickEnd();
|
@@ -12359,12 +11936,6 @@ class BaseStreamController extends TaskLoop {
|
|
12359
11936
|
this.clearNextTick();
|
12360
11937
|
this.state = State.STOPPED;
|
12361
11938
|
}
|
12362
|
-
pauseBuffering() {
|
12363
|
-
this.buffering = false;
|
12364
|
-
}
|
12365
|
-
resumeBuffering() {
|
12366
|
-
this.buffering = true;
|
12367
|
-
}
|
12368
11939
|
_streamEnded(bufferInfo, levelDetails) {
|
12369
11940
|
// If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
|
12370
11941
|
// of nothing loading/loaded return false
|
@@ -12395,8 +11966,10 @@ class BaseStreamController extends TaskLoop {
|
|
12395
11966
|
}
|
12396
11967
|
onMediaAttached(event, data) {
|
12397
11968
|
const media = this.media = this.mediaBuffer = data.media;
|
12398
|
-
|
12399
|
-
|
11969
|
+
this.onvseeking = this.onMediaSeeking.bind(this);
|
11970
|
+
this.onvended = this.onMediaEnded.bind(this);
|
11971
|
+
media.addEventListener('seeking', this.onvseeking);
|
11972
|
+
media.addEventListener('ended', this.onvended);
|
12400
11973
|
const config = this.config;
|
12401
11974
|
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
12402
11975
|
this.startLoad(config.startPosition);
|
@@ -12410,9 +11983,10 @@ class BaseStreamController extends TaskLoop {
|
|
12410
11983
|
}
|
12411
11984
|
|
12412
11985
|
// remove video listeners
|
12413
|
-
if (media) {
|
12414
|
-
media.removeEventListener('seeking', this.
|
12415
|
-
media.removeEventListener('ended', this.
|
11986
|
+
if (media && this.onvseeking && this.onvended) {
|
11987
|
+
media.removeEventListener('seeking', this.onvseeking);
|
11988
|
+
media.removeEventListener('ended', this.onvended);
|
11989
|
+
this.onvseeking = this.onvended = null;
|
12416
11990
|
}
|
12417
11991
|
if (this.keyLoader) {
|
12418
11992
|
this.keyLoader.detach();
|
@@ -12422,26 +11996,75 @@ class BaseStreamController extends TaskLoop {
|
|
12422
11996
|
this.fragmentTracker.removeAllFragments();
|
12423
11997
|
this.stopLoad();
|
12424
11998
|
}
|
12425
|
-
|
12426
|
-
|
12427
|
-
|
12428
|
-
|
12429
|
-
|
12430
|
-
|
12431
|
-
|
12432
|
-
this
|
12433
|
-
|
12434
|
-
|
12435
|
-
this.
|
12436
|
-
|
12437
|
-
|
12438
|
-
|
12439
|
-
|
12440
|
-
|
12441
|
-
|
12442
|
-
|
12443
|
-
|
12444
|
-
|
11999
|
+
onMediaSeeking() {
|
12000
|
+
const {
|
12001
|
+
config,
|
12002
|
+
fragCurrent,
|
12003
|
+
media,
|
12004
|
+
mediaBuffer,
|
12005
|
+
state
|
12006
|
+
} = this;
|
12007
|
+
const currentTime = media ? media.currentTime : 0;
|
12008
|
+
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
12009
|
+
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
12010
|
+
if (this.state === State.ENDED) {
|
12011
|
+
this.resetLoadingState();
|
12012
|
+
} else if (fragCurrent) {
|
12013
|
+
// Seeking while frag load is in progress
|
12014
|
+
const tolerance = config.maxFragLookUpTolerance;
|
12015
|
+
const fragStartOffset = fragCurrent.start - tolerance;
|
12016
|
+
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
12017
|
+
// if seeking out of buffered range or into new one
|
12018
|
+
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
12019
|
+
const pastFragment = currentTime > fragEndOffset;
|
12020
|
+
// if the seek position is outside the current fragment range
|
12021
|
+
if (currentTime < fragStartOffset || pastFragment) {
|
12022
|
+
if (pastFragment && fragCurrent.loader) {
|
12023
|
+
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
12024
|
+
fragCurrent.abortRequests();
|
12025
|
+
this.resetLoadingState();
|
12026
|
+
}
|
12027
|
+
this.fragPrevious = null;
|
12028
|
+
}
|
12029
|
+
}
|
12030
|
+
}
|
12031
|
+
if (media) {
|
12032
|
+
// Remove gap fragments
|
12033
|
+
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
12034
|
+
this.lastCurrentTime = currentTime;
|
12035
|
+
}
|
12036
|
+
|
12037
|
+
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12038
|
+
if (!this.loadedmetadata && !bufferInfo.len) {
|
12039
|
+
this.nextLoadPosition = this.startPosition = currentTime;
|
12040
|
+
}
|
12041
|
+
|
12042
|
+
// Async tick to speed up processing
|
12043
|
+
this.tickImmediate();
|
12044
|
+
}
|
12045
|
+
onMediaEnded() {
|
12046
|
+
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12047
|
+
this.startPosition = this.lastCurrentTime = 0;
|
12048
|
+
}
|
12049
|
+
onManifestLoaded(event, data) {
|
12050
|
+
this.startTimeOffset = data.startTimeOffset;
|
12051
|
+
this.initPTS = [];
|
12052
|
+
}
|
12053
|
+
onHandlerDestroying() {
|
12054
|
+
this.hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12055
|
+
this.stopLoad();
|
12056
|
+
super.onHandlerDestroying();
|
12057
|
+
// @ts-ignore
|
12058
|
+
this.hls = null;
|
12059
|
+
}
|
12060
|
+
onHandlerDestroyed() {
|
12061
|
+
this.state = State.STOPPED;
|
12062
|
+
if (this.fragmentLoader) {
|
12063
|
+
this.fragmentLoader.destroy();
|
12064
|
+
}
|
12065
|
+
if (this.keyLoader) {
|
12066
|
+
this.keyLoader.destroy();
|
12067
|
+
}
|
12445
12068
|
if (this.decrypter) {
|
12446
12069
|
this.decrypter.destroy();
|
12447
12070
|
}
|
@@ -12566,10 +12189,10 @@ class BaseStreamController extends TaskLoop {
|
|
12566
12189
|
const decryptData = frag.decryptdata;
|
12567
12190
|
|
12568
12191
|
// check to see if the payload needs to be decrypted
|
12569
|
-
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv &&
|
12192
|
+
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method === 'AES-128') {
|
12570
12193
|
const startTime = self.performance.now();
|
12571
12194
|
// decrypt init segment data
|
12572
|
-
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer
|
12195
|
+
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
|
12573
12196
|
hls.trigger(Events.ERROR, {
|
12574
12197
|
type: ErrorTypes.MEDIA_ERROR,
|
12575
12198
|
details: ErrorDetails.FRAG_DECRYPT_ERROR,
|
@@ -12610,9 +12233,7 @@ class BaseStreamController extends TaskLoop {
|
|
12610
12233
|
throw new Error('init load aborted, missing levels');
|
12611
12234
|
}
|
12612
12235
|
const stats = data.frag.stats;
|
12613
|
-
|
12614
|
-
this.state = State.IDLE;
|
12615
|
-
}
|
12236
|
+
this.state = State.IDLE;
|
12616
12237
|
data.frag.data = new Uint8Array(data.payload);
|
12617
12238
|
stats.parsing.start = stats.buffering.start = self.performance.now();
|
12618
12239
|
stats.parsing.end = stats.buffering.end = self.performance.now();
|
@@ -12683,7 +12304,7 @@ class BaseStreamController extends TaskLoop {
|
|
12683
12304
|
}
|
12684
12305
|
let keyLoadingPromise = null;
|
12685
12306
|
if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
|
12686
|
-
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.
|
12307
|
+
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'} ${frag.level}`);
|
12687
12308
|
this.state = State.KEY_LOADING;
|
12688
12309
|
this.fragCurrent = frag;
|
12689
12310
|
keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
|
@@ -12704,16 +12325,8 @@ class BaseStreamController extends TaskLoop {
|
|
12704
12325
|
} else if (!frag.encrypted && details.encryptedFragments.length) {
|
12705
12326
|
this.keyLoader.loadClear(frag, details.encryptedFragments);
|
12706
12327
|
}
|
12707
|
-
const fragPrevious = this.fragPrevious;
|
12708
|
-
if (frag.sn !== 'initSegment' && (!fragPrevious || frag.sn !== fragPrevious.sn)) {
|
12709
|
-
const shouldLoadParts = this.shouldLoadParts(level.details, frag.end);
|
12710
|
-
if (shouldLoadParts !== this.loadingParts) {
|
12711
|
-
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} loading sn ${fragPrevious == null ? void 0 : fragPrevious.sn}->${frag.sn}`);
|
12712
|
-
this.loadingParts = shouldLoadParts;
|
12713
|
-
}
|
12714
|
-
}
|
12715
12328
|
targetBufferTime = Math.max(frag.start, targetBufferTime || 0);
|
12716
|
-
if (this.
|
12329
|
+
if (this.config.lowLatencyMode && frag.sn !== 'initSegment') {
|
12717
12330
|
const partList = details.partList;
|
12718
12331
|
if (partList && progressCallback) {
|
12719
12332
|
if (targetBufferTime > frag.end && details.fragmentHint) {
|
@@ -12722,7 +12335,7 @@ class BaseStreamController extends TaskLoop {
|
|
12722
12335
|
const partIndex = this.getNextPart(partList, frag, targetBufferTime);
|
12723
12336
|
if (partIndex > -1) {
|
12724
12337
|
const part = partList[partIndex];
|
12725
|
-
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.
|
12338
|
+
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12726
12339
|
this.nextLoadPosition = part.start + part.duration;
|
12727
12340
|
this.state = State.FRAG_LOADING;
|
12728
12341
|
let _result;
|
@@ -12751,14 +12364,7 @@ class BaseStreamController extends TaskLoop {
|
|
12751
12364
|
}
|
12752
12365
|
}
|
12753
12366
|
}
|
12754
|
-
|
12755
|
-
this.log(`LL-Part loading OFF after next part miss @${targetBufferTime.toFixed(2)}`);
|
12756
|
-
this.loadingParts = false;
|
12757
|
-
} else if (!frag.url) {
|
12758
|
-
// Selected fragment hint for part but not loading parts
|
12759
|
-
return Promise.resolve(null);
|
12760
|
-
}
|
12761
|
-
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12367
|
+
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.logPrefix === '[stream-controller]' ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12762
12368
|
// Don't update nextLoadPosition for fragments which are not buffered
|
12763
12369
|
if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
|
12764
12370
|
this.nextLoadPosition = frag.start + frag.duration;
|
@@ -12856,36 +12462,8 @@ class BaseStreamController extends TaskLoop {
|
|
12856
12462
|
if (part) {
|
12857
12463
|
part.stats.parsing.end = now;
|
12858
12464
|
}
|
12859
|
-
// See if part loading should be disabled/enabled based on buffer and playback position.
|
12860
|
-
if (frag.sn !== 'initSegment') {
|
12861
|
-
const levelDetails = this.getLevelDetails();
|
12862
|
-
const loadingPartsAtEdge = levelDetails && frag.sn > levelDetails.endSN;
|
12863
|
-
const shouldLoadParts = loadingPartsAtEdge || this.shouldLoadParts(levelDetails, frag.end);
|
12864
|
-
if (shouldLoadParts !== this.loadingParts) {
|
12865
|
-
this.log(`LL-Part loading ${shouldLoadParts ? 'ON' : 'OFF'} after parsing segment ending @${frag.end.toFixed(2)}`);
|
12866
|
-
this.loadingParts = shouldLoadParts;
|
12867
|
-
}
|
12868
|
-
}
|
12869
12465
|
this.updateLevelTiming(frag, part, level, chunkMeta.partial);
|
12870
12466
|
}
|
12871
|
-
shouldLoadParts(details, bufferEnd) {
|
12872
|
-
if (this.config.lowLatencyMode) {
|
12873
|
-
if (!details) {
|
12874
|
-
return this.loadingParts;
|
12875
|
-
}
|
12876
|
-
if (details != null && details.partList) {
|
12877
|
-
var _details$fragmentHint;
|
12878
|
-
// Buffer must be ahead of first part + duration of parts after last segment
|
12879
|
-
// and playback must be at or past segment adjacent to part list
|
12880
|
-
const firstPart = details.partList[0];
|
12881
|
-
const safePartStart = firstPart.end + (((_details$fragmentHint = details.fragmentHint) == null ? void 0 : _details$fragmentHint.duration) || 0);
|
12882
|
-
if (bufferEnd >= safePartStart && this.lastCurrentTime > firstPart.start - firstPart.fragment.duration) {
|
12883
|
-
return true;
|
12884
|
-
}
|
12885
|
-
}
|
12886
|
-
}
|
12887
|
-
return false;
|
12888
|
-
}
|
12889
12467
|
getCurrentContext(chunkMeta) {
|
12890
12468
|
const {
|
12891
12469
|
levels,
|
@@ -12986,7 +12564,7 @@ class BaseStreamController extends TaskLoop {
|
|
12986
12564
|
// Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
|
12987
12565
|
if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
|
12988
12566
|
const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
|
12989
|
-
if (bufferedFragAtPos &&
|
12567
|
+
if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
|
12990
12568
|
return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
|
12991
12569
|
}
|
12992
12570
|
}
|
@@ -13035,8 +12613,7 @@ class BaseStreamController extends TaskLoop {
|
|
13035
12613
|
config
|
13036
12614
|
} = this;
|
13037
12615
|
const start = fragments[0].start;
|
13038
|
-
|
13039
|
-
let frag = null;
|
12616
|
+
let frag;
|
13040
12617
|
if (levelDetails.live) {
|
13041
12618
|
const initialLiveManifestSize = config.initialLiveManifestSize;
|
13042
12619
|
if (fragLen < initialLiveManifestSize) {
|
@@ -13048,10 +12625,6 @@ class BaseStreamController extends TaskLoop {
|
|
13048
12625
|
// Do not load using live logic if the starting frag is requested - we want to use getFragmentAtPosition() so that
|
13049
12626
|
// we get the fragment matching that start time
|
13050
12627
|
if (!levelDetails.PTSKnown && !this.startFragRequested && this.startPosition === -1 || pos < start) {
|
13051
|
-
if (canLoadParts && !this.loadingParts) {
|
13052
|
-
this.log(`LL-Part loading ON for initial live fragment`);
|
13053
|
-
this.loadingParts = true;
|
13054
|
-
}
|
13055
12628
|
frag = this.getInitialLiveFragment(levelDetails, fragments);
|
13056
12629
|
this.startPosition = this.nextLoadPosition = frag ? this.hls.liveSyncPosition || frag.start : pos;
|
13057
12630
|
}
|
@@ -13062,7 +12635,7 @@ class BaseStreamController extends TaskLoop {
|
|
13062
12635
|
|
13063
12636
|
// If we haven't run into any special cases already, just load the fragment most closely matching the requested position
|
13064
12637
|
if (!frag) {
|
13065
|
-
const end =
|
12638
|
+
const end = config.lowLatencyMode ? levelDetails.partEnd : levelDetails.fragmentEnd;
|
13066
12639
|
frag = this.getFragmentAtPosition(pos, end, levelDetails);
|
13067
12640
|
}
|
13068
12641
|
return this.mapToInitFragWhenRequired(frag);
|
@@ -13186,7 +12759,7 @@ class BaseStreamController extends TaskLoop {
|
|
13186
12759
|
maxFragLookUpTolerance
|
13187
12760
|
} = config;
|
13188
12761
|
const partList = levelDetails.partList;
|
13189
|
-
const loadingParts = !!(
|
12762
|
+
const loadingParts = !!(config.lowLatencyMode && partList != null && partList.length && fragmentHint);
|
13190
12763
|
if (loadingParts && fragmentHint && !this.bitrateTest) {
|
13191
12764
|
// Include incomplete fragment with parts at end
|
13192
12765
|
fragments = fragments.concat(fragmentHint);
|
@@ -13379,7 +12952,7 @@ class BaseStreamController extends TaskLoop {
|
|
13379
12952
|
errorAction.resolved = true;
|
13380
12953
|
}
|
13381
12954
|
} else {
|
13382
|
-
|
12955
|
+
logger.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
|
13383
12956
|
return;
|
13384
12957
|
}
|
13385
12958
|
} else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
|
@@ -13447,9 +13020,7 @@ class BaseStreamController extends TaskLoop {
|
|
13447
13020
|
this.log('Reset loading state');
|
13448
13021
|
this.fragCurrent = null;
|
13449
13022
|
this.fragPrevious = null;
|
13450
|
-
|
13451
|
-
this.state = State.IDLE;
|
13452
|
-
}
|
13023
|
+
this.state = State.IDLE;
|
13453
13024
|
}
|
13454
13025
|
resetStartWhenNotLoaded(level) {
|
13455
13026
|
// if loadedmetadata is not set, it means that first frag request failed
|
@@ -13628,104 +13199,6 @@ function dummyTrack(type = '', inputTimeScale = 90000) {
|
|
13628
13199
|
};
|
13629
13200
|
}
|
13630
13201
|
|
13631
|
-
/**
|
13632
|
-
* Returns any adjacent ID3 tags found in data starting at offset, as one block of data
|
13633
|
-
*
|
13634
|
-
* @param data - The data to search in
|
13635
|
-
* @param offset - The offset at which to start searching
|
13636
|
-
*
|
13637
|
-
* @returns The block of data containing any ID3 tags found
|
13638
|
-
* or `undefined` if no header is found at the starting offset
|
13639
|
-
*
|
13640
|
-
* @internal
|
13641
|
-
*
|
13642
|
-
* @group ID3
|
13643
|
-
*/
|
13644
|
-
function getId3Data(data, offset) {
|
13645
|
-
const front = offset;
|
13646
|
-
let length = 0;
|
13647
|
-
while (isId3Header(data, offset)) {
|
13648
|
-
// ID3 header is 10 bytes
|
13649
|
-
length += 10;
|
13650
|
-
const size = readId3Size(data, offset + 6);
|
13651
|
-
length += size;
|
13652
|
-
if (isId3Footer(data, offset + 10)) {
|
13653
|
-
// ID3 footer is 10 bytes
|
13654
|
-
length += 10;
|
13655
|
-
}
|
13656
|
-
offset += length;
|
13657
|
-
}
|
13658
|
-
if (length > 0) {
|
13659
|
-
return data.subarray(front, front + length);
|
13660
|
-
}
|
13661
|
-
return undefined;
|
13662
|
-
}
|
13663
|
-
|
13664
|
-
/**
|
13665
|
-
* Read a 33 bit timestamp from an ID3 frame.
|
13666
|
-
*
|
13667
|
-
* @param timeStampFrame - the ID3 frame
|
13668
|
-
*
|
13669
|
-
* @returns The timestamp
|
13670
|
-
*
|
13671
|
-
* @internal
|
13672
|
-
*
|
13673
|
-
* @group ID3
|
13674
|
-
*/
|
13675
|
-
function readId3Timestamp(timeStampFrame) {
|
13676
|
-
if (timeStampFrame.data.byteLength === 8) {
|
13677
|
-
const data = new Uint8Array(timeStampFrame.data);
|
13678
|
-
// timestamp is 33 bit expressed as a big-endian eight-octet number,
|
13679
|
-
// with the upper 31 bits set to zero.
|
13680
|
-
const pts33Bit = data[3] & 0x1;
|
13681
|
-
let timestamp = (data[4] << 23) + (data[5] << 15) + (data[6] << 7) + data[7];
|
13682
|
-
timestamp /= 45;
|
13683
|
-
if (pts33Bit) {
|
13684
|
-
timestamp += 47721858.84;
|
13685
|
-
} // 2^32 / 90
|
13686
|
-
return Math.round(timestamp);
|
13687
|
-
}
|
13688
|
-
return undefined;
|
13689
|
-
}
|
13690
|
-
|
13691
|
-
/**
|
13692
|
-
* Searches for the Elementary Stream timestamp found in the ID3 data chunk
|
13693
|
-
*
|
13694
|
-
* @param data - Block of data containing one or more ID3 tags
|
13695
|
-
*
|
13696
|
-
* @returns The timestamp
|
13697
|
-
*
|
13698
|
-
* @group ID3
|
13699
|
-
*
|
13700
|
-
* @beta
|
13701
|
-
*/
|
13702
|
-
function getId3Timestamp(data) {
|
13703
|
-
const frames = getId3Frames(data);
|
13704
|
-
for (let i = 0; i < frames.length; i++) {
|
13705
|
-
const frame = frames[i];
|
13706
|
-
if (isId3TimestampFrame(frame)) {
|
13707
|
-
return readId3Timestamp(frame);
|
13708
|
-
}
|
13709
|
-
}
|
13710
|
-
return undefined;
|
13711
|
-
}
|
13712
|
-
|
13713
|
-
/**
|
13714
|
-
* Checks if the given data contains an ID3 tag.
|
13715
|
-
*
|
13716
|
-
* @param data - The data to check
|
13717
|
-
* @param offset - The offset at which to start checking
|
13718
|
-
*
|
13719
|
-
* @returns `true` if an ID3 tag is found
|
13720
|
-
*
|
13721
|
-
* @group ID3
|
13722
|
-
*
|
13723
|
-
* @beta
|
13724
|
-
*/
|
13725
|
-
function canParseId3(data, offset) {
|
13726
|
-
return isId3Header(data, offset) && readId3Size(data, offset + 6) + 10 <= data.length - offset;
|
13727
|
-
}
|
13728
|
-
|
13729
13202
|
class BaseAudioDemuxer {
|
13730
13203
|
constructor() {
|
13731
13204
|
this._audioTrack = void 0;
|
@@ -13767,12 +13240,12 @@ class BaseAudioDemuxer {
|
|
13767
13240
|
data = appendUint8Array(this.cachedData, data);
|
13768
13241
|
this.cachedData = null;
|
13769
13242
|
}
|
13770
|
-
let id3Data =
|
13243
|
+
let id3Data = getID3Data(data, 0);
|
13771
13244
|
let offset = id3Data ? id3Data.length : 0;
|
13772
13245
|
let lastDataIndex;
|
13773
13246
|
const track = this._audioTrack;
|
13774
13247
|
const id3Track = this._id3Track;
|
13775
|
-
const timestamp = id3Data ?
|
13248
|
+
const timestamp = id3Data ? getTimeStamp(id3Data) : undefined;
|
13776
13249
|
const length = data.length;
|
13777
13250
|
if (this.basePTS === null || this.frameIndex === 0 && isFiniteNumber(timestamp)) {
|
13778
13251
|
this.basePTS = initPTSFn(timestamp, timeOffset, this.initPTS);
|
@@ -13803,9 +13276,9 @@ class BaseAudioDemuxer {
|
|
13803
13276
|
} else {
|
13804
13277
|
offset = length;
|
13805
13278
|
}
|
13806
|
-
} else if (
|
13807
|
-
// after a canParse, a call to
|
13808
|
-
id3Data =
|
13279
|
+
} else if (canParse$2(data, offset)) {
|
13280
|
+
// after a ID3.canParse, a call to ID3.getID3Data *should* always returns some data
|
13281
|
+
id3Data = getID3Data(data, offset);
|
13809
13282
|
id3Track.samples.push({
|
13810
13283
|
pts: this.lastPTS,
|
13811
13284
|
dts: this.lastPTS,
|
@@ -13874,7 +13347,6 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
|
|
13874
13347
|
*/
|
13875
13348
|
function getAudioConfig(observer, data, offset, audioCodec) {
|
13876
13349
|
let adtsObjectType;
|
13877
|
-
let originalAdtsObjectType;
|
13878
13350
|
let adtsExtensionSamplingIndex;
|
13879
13351
|
let adtsChannelConfig;
|
13880
13352
|
let config;
|
@@ -13882,7 +13354,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13882
13354
|
const manifestCodec = audioCodec;
|
13883
13355
|
const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
|
13884
13356
|
// byte 2
|
13885
|
-
adtsObjectType =
|
13357
|
+
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13886
13358
|
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
|
13887
13359
|
if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
|
13888
13360
|
const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
|
@@ -13899,8 +13371,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13899
13371
|
// byte 3
|
13900
13372
|
adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
|
13901
13373
|
logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
|
13902
|
-
//
|
13903
|
-
if (/firefox
|
13374
|
+
// firefox: freq less than 24kHz = AAC SBR (HE-AAC)
|
13375
|
+
if (/firefox/i.test(userAgent)) {
|
13904
13376
|
if (adtsSamplingIndex >= 6) {
|
13905
13377
|
adtsObjectType = 5;
|
13906
13378
|
config = new Array(4);
|
@@ -13994,7 +13466,6 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13994
13466
|
samplerate: adtsSamplingRates[adtsSamplingIndex],
|
13995
13467
|
channelCount: adtsChannelConfig,
|
13996
13468
|
codec: 'mp4a.40.' + adtsObjectType,
|
13997
|
-
parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
|
13998
13469
|
manifestCodec
|
13999
13470
|
};
|
14000
13471
|
}
|
@@ -14049,8 +13520,7 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
|
|
14049
13520
|
track.channelCount = config.channelCount;
|
14050
13521
|
track.codec = config.codec;
|
14051
13522
|
track.manifestCodec = config.manifestCodec;
|
14052
|
-
track.
|
14053
|
-
logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
13523
|
+
logger.log(`parsed codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
14054
13524
|
}
|
14055
13525
|
}
|
14056
13526
|
function getFrameDuration(samplerate) {
|
@@ -14299,7 +13769,7 @@ class AACDemuxer extends BaseAudioDemuxer {
|
|
14299
13769
|
// Look for ADTS header | 1111 1111 | 1111 X00X | where X can be either 0 or 1
|
14300
13770
|
// Layer bits (position 14 and 15) in header should be always 0 for ADTS
|
14301
13771
|
// More info https://wiki.multimedia.cx/index.php?title=ADTS
|
14302
|
-
const id3Data =
|
13772
|
+
const id3Data = getID3Data(data, 0);
|
14303
13773
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
14304
13774
|
if (probe(data, offset)) {
|
14305
13775
|
return false;
|
@@ -14492,6 +13962,20 @@ class BaseVideoParser {
|
|
14492
13962
|
length: 0
|
14493
13963
|
};
|
14494
13964
|
}
|
13965
|
+
getLastNalUnit(samples) {
|
13966
|
+
var _VideoSample;
|
13967
|
+
let VideoSample = this.VideoSample;
|
13968
|
+
let lastUnit;
|
13969
|
+
// try to fallback to previous sample if current one is empty
|
13970
|
+
if (!VideoSample || VideoSample.units.length === 0) {
|
13971
|
+
VideoSample = samples[samples.length - 1];
|
13972
|
+
}
|
13973
|
+
if ((_VideoSample = VideoSample) != null && _VideoSample.units) {
|
13974
|
+
const units = VideoSample.units;
|
13975
|
+
lastUnit = units[units.length - 1];
|
13976
|
+
}
|
13977
|
+
return lastUnit;
|
13978
|
+
}
|
14495
13979
|
pushAccessUnit(VideoSample, videoTrack) {
|
14496
13980
|
if (VideoSample.units.length && VideoSample.frame) {
|
14497
13981
|
// if sample does not have PTS/DTS, patch with last sample PTS/DTS
|
@@ -14514,122 +13998,6 @@ class BaseVideoParser {
|
|
14514
13998
|
logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
|
14515
13999
|
}
|
14516
14000
|
}
|
14517
|
-
parseNALu(track, array, last) {
|
14518
|
-
const len = array.byteLength;
|
14519
|
-
let state = track.naluState || 0;
|
14520
|
-
const lastState = state;
|
14521
|
-
const units = [];
|
14522
|
-
let i = 0;
|
14523
|
-
let value;
|
14524
|
-
let overflow;
|
14525
|
-
let unitType;
|
14526
|
-
let lastUnitStart = -1;
|
14527
|
-
let lastUnitType = 0;
|
14528
|
-
// logger.log('PES:' + Hex.hexDump(array));
|
14529
|
-
|
14530
|
-
if (state === -1) {
|
14531
|
-
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14532
|
-
lastUnitStart = 0;
|
14533
|
-
// NALu type is value read from offset 0
|
14534
|
-
lastUnitType = this.getNALuType(array, 0);
|
14535
|
-
state = 0;
|
14536
|
-
i = 1;
|
14537
|
-
}
|
14538
|
-
while (i < len) {
|
14539
|
-
value = array[i++];
|
14540
|
-
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14541
|
-
if (!state) {
|
14542
|
-
state = value ? 0 : 1;
|
14543
|
-
continue;
|
14544
|
-
}
|
14545
|
-
if (state === 1) {
|
14546
|
-
state = value ? 0 : 2;
|
14547
|
-
continue;
|
14548
|
-
}
|
14549
|
-
// here we have state either equal to 2 or 3
|
14550
|
-
if (!value) {
|
14551
|
-
state = 3;
|
14552
|
-
} else if (value === 1) {
|
14553
|
-
overflow = i - state - 1;
|
14554
|
-
if (lastUnitStart >= 0) {
|
14555
|
-
const unit = {
|
14556
|
-
data: array.subarray(lastUnitStart, overflow),
|
14557
|
-
type: lastUnitType
|
14558
|
-
};
|
14559
|
-
if (track.lastNalu) {
|
14560
|
-
units.push(track.lastNalu);
|
14561
|
-
track.lastNalu = null;
|
14562
|
-
}
|
14563
|
-
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14564
|
-
units.push(unit);
|
14565
|
-
} else {
|
14566
|
-
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14567
|
-
// first check if start code delimiter is overlapping between 2 PES packets,
|
14568
|
-
// ie it started in last packet (lastState not zero)
|
14569
|
-
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14570
|
-
const lastUnit = track.lastNalu;
|
14571
|
-
if (lastUnit) {
|
14572
|
-
if (lastState && i <= 4 - lastState) {
|
14573
|
-
// start delimiter overlapping between PES packets
|
14574
|
-
// strip start delimiter bytes from the end of last NAL unit
|
14575
|
-
// check if lastUnit had a state different from zero
|
14576
|
-
if (lastUnit.state) {
|
14577
|
-
// strip last bytes
|
14578
|
-
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14579
|
-
}
|
14580
|
-
}
|
14581
|
-
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14582
|
-
|
14583
|
-
if (overflow > 0) {
|
14584
|
-
// logger.log('first NALU found with overflow:' + overflow);
|
14585
|
-
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14586
|
-
lastUnit.state = 0;
|
14587
|
-
units.push(lastUnit);
|
14588
|
-
track.lastNalu = null;
|
14589
|
-
}
|
14590
|
-
}
|
14591
|
-
}
|
14592
|
-
// check if we can read unit type
|
14593
|
-
if (i < len) {
|
14594
|
-
unitType = this.getNALuType(array, i);
|
14595
|
-
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14596
|
-
lastUnitStart = i;
|
14597
|
-
lastUnitType = unitType;
|
14598
|
-
state = 0;
|
14599
|
-
} else {
|
14600
|
-
// not enough byte to read unit type. let's read it on next PES parsing
|
14601
|
-
state = -1;
|
14602
|
-
}
|
14603
|
-
} else {
|
14604
|
-
state = 0;
|
14605
|
-
}
|
14606
|
-
}
|
14607
|
-
if (lastUnitStart >= 0 && state >= 0) {
|
14608
|
-
const unit = {
|
14609
|
-
data: array.subarray(lastUnitStart, len),
|
14610
|
-
type: lastUnitType,
|
14611
|
-
state: state
|
14612
|
-
};
|
14613
|
-
if (!last) {
|
14614
|
-
track.lastNalu = unit;
|
14615
|
-
// logger.log('store NALu to push it on next PES');
|
14616
|
-
} else {
|
14617
|
-
units.push(unit);
|
14618
|
-
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
14619
|
-
}
|
14620
|
-
} else if (units.length === 0) {
|
14621
|
-
// no NALu found
|
14622
|
-
// append pes.data to previous NAL unit
|
14623
|
-
const lastUnit = track.lastNalu;
|
14624
|
-
if (lastUnit) {
|
14625
|
-
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
14626
|
-
units.push(lastUnit);
|
14627
|
-
track.lastNalu = null;
|
14628
|
-
}
|
14629
|
-
}
|
14630
|
-
track.naluState = state;
|
14631
|
-
return units;
|
14632
|
-
}
|
14633
14001
|
}
|
14634
14002
|
|
14635
14003
|
/**
|
@@ -14772,18 +14140,201 @@ class ExpGolomb {
|
|
14772
14140
|
readUInt() {
|
14773
14141
|
return this.readBits(32);
|
14774
14142
|
}
|
14775
|
-
}
|
14776
14143
|
|
14777
|
-
|
14778
|
-
|
14779
|
-
|
14780
|
-
|
14781
|
-
|
14782
|
-
|
14783
|
-
|
14784
|
-
|
14144
|
+
/**
|
14145
|
+
* Advance the ExpGolomb decoder past a scaling list. The scaling
|
14146
|
+
* list is optionally transmitted as part of a sequence parameter
|
14147
|
+
* set and is not relevant to transmuxing.
|
14148
|
+
* @param count the number of entries in this scaling list
|
14149
|
+
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
14150
|
+
*/
|
14151
|
+
skipScalingList(count) {
|
14152
|
+
let lastScale = 8;
|
14153
|
+
let nextScale = 8;
|
14154
|
+
let deltaScale;
|
14155
|
+
for (let j = 0; j < count; j++) {
|
14156
|
+
if (nextScale !== 0) {
|
14157
|
+
deltaScale = this.readEG();
|
14158
|
+
nextScale = (lastScale + deltaScale + 256) % 256;
|
14159
|
+
}
|
14160
|
+
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14161
|
+
}
|
14162
|
+
}
|
14785
14163
|
|
14786
|
-
|
14164
|
+
/**
|
14165
|
+
* Read a sequence parameter set and return some interesting video
|
14166
|
+
* properties. A sequence parameter set is the H264 metadata that
|
14167
|
+
* describes the properties of upcoming video frames.
|
14168
|
+
* @returns an object with configuration parsed from the
|
14169
|
+
* sequence parameter set, including the dimensions of the
|
14170
|
+
* associated video frames.
|
14171
|
+
*/
|
14172
|
+
readSPS() {
|
14173
|
+
let frameCropLeftOffset = 0;
|
14174
|
+
let frameCropRightOffset = 0;
|
14175
|
+
let frameCropTopOffset = 0;
|
14176
|
+
let frameCropBottomOffset = 0;
|
14177
|
+
let numRefFramesInPicOrderCntCycle;
|
14178
|
+
let scalingListCount;
|
14179
|
+
let i;
|
14180
|
+
const readUByte = this.readUByte.bind(this);
|
14181
|
+
const readBits = this.readBits.bind(this);
|
14182
|
+
const readUEG = this.readUEG.bind(this);
|
14183
|
+
const readBoolean = this.readBoolean.bind(this);
|
14184
|
+
const skipBits = this.skipBits.bind(this);
|
14185
|
+
const skipEG = this.skipEG.bind(this);
|
14186
|
+
const skipUEG = this.skipUEG.bind(this);
|
14187
|
+
const skipScalingList = this.skipScalingList.bind(this);
|
14188
|
+
readUByte();
|
14189
|
+
const profileIdc = readUByte(); // profile_idc
|
14190
|
+
readBits(5); // profileCompat constraint_set[0-4]_flag, u(5)
|
14191
|
+
skipBits(3); // reserved_zero_3bits u(3),
|
14192
|
+
readUByte(); // level_idc u(8)
|
14193
|
+
skipUEG(); // seq_parameter_set_id
|
14194
|
+
// some profiles have more optional data we don't need
|
14195
|
+
if (profileIdc === 100 || profileIdc === 110 || profileIdc === 122 || profileIdc === 244 || profileIdc === 44 || profileIdc === 83 || profileIdc === 86 || profileIdc === 118 || profileIdc === 128) {
|
14196
|
+
const chromaFormatIdc = readUEG();
|
14197
|
+
if (chromaFormatIdc === 3) {
|
14198
|
+
skipBits(1);
|
14199
|
+
} // separate_colour_plane_flag
|
14200
|
+
|
14201
|
+
skipUEG(); // bit_depth_luma_minus8
|
14202
|
+
skipUEG(); // bit_depth_chroma_minus8
|
14203
|
+
skipBits(1); // qpprime_y_zero_transform_bypass_flag
|
14204
|
+
if (readBoolean()) {
|
14205
|
+
// seq_scaling_matrix_present_flag
|
14206
|
+
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14207
|
+
for (i = 0; i < scalingListCount; i++) {
|
14208
|
+
if (readBoolean()) {
|
14209
|
+
// seq_scaling_list_present_flag[ i ]
|
14210
|
+
if (i < 6) {
|
14211
|
+
skipScalingList(16);
|
14212
|
+
} else {
|
14213
|
+
skipScalingList(64);
|
14214
|
+
}
|
14215
|
+
}
|
14216
|
+
}
|
14217
|
+
}
|
14218
|
+
}
|
14219
|
+
skipUEG(); // log2_max_frame_num_minus4
|
14220
|
+
const picOrderCntType = readUEG();
|
14221
|
+
if (picOrderCntType === 0) {
|
14222
|
+
readUEG(); // log2_max_pic_order_cnt_lsb_minus4
|
14223
|
+
} else if (picOrderCntType === 1) {
|
14224
|
+
skipBits(1); // delta_pic_order_always_zero_flag
|
14225
|
+
skipEG(); // offset_for_non_ref_pic
|
14226
|
+
skipEG(); // offset_for_top_to_bottom_field
|
14227
|
+
numRefFramesInPicOrderCntCycle = readUEG();
|
14228
|
+
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
14229
|
+
skipEG();
|
14230
|
+
} // offset_for_ref_frame[ i ]
|
14231
|
+
}
|
14232
|
+
skipUEG(); // max_num_ref_frames
|
14233
|
+
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
14234
|
+
const picWidthInMbsMinus1 = readUEG();
|
14235
|
+
const picHeightInMapUnitsMinus1 = readUEG();
|
14236
|
+
const frameMbsOnlyFlag = readBits(1);
|
14237
|
+
if (frameMbsOnlyFlag === 0) {
|
14238
|
+
skipBits(1);
|
14239
|
+
} // mb_adaptive_frame_field_flag
|
14240
|
+
|
14241
|
+
skipBits(1); // direct_8x8_inference_flag
|
14242
|
+
if (readBoolean()) {
|
14243
|
+
// frame_cropping_flag
|
14244
|
+
frameCropLeftOffset = readUEG();
|
14245
|
+
frameCropRightOffset = readUEG();
|
14246
|
+
frameCropTopOffset = readUEG();
|
14247
|
+
frameCropBottomOffset = readUEG();
|
14248
|
+
}
|
14249
|
+
let pixelRatio = [1, 1];
|
14250
|
+
if (readBoolean()) {
|
14251
|
+
// vui_parameters_present_flag
|
14252
|
+
if (readBoolean()) {
|
14253
|
+
// aspect_ratio_info_present_flag
|
14254
|
+
const aspectRatioIdc = readUByte();
|
14255
|
+
switch (aspectRatioIdc) {
|
14256
|
+
case 1:
|
14257
|
+
pixelRatio = [1, 1];
|
14258
|
+
break;
|
14259
|
+
case 2:
|
14260
|
+
pixelRatio = [12, 11];
|
14261
|
+
break;
|
14262
|
+
case 3:
|
14263
|
+
pixelRatio = [10, 11];
|
14264
|
+
break;
|
14265
|
+
case 4:
|
14266
|
+
pixelRatio = [16, 11];
|
14267
|
+
break;
|
14268
|
+
case 5:
|
14269
|
+
pixelRatio = [40, 33];
|
14270
|
+
break;
|
14271
|
+
case 6:
|
14272
|
+
pixelRatio = [24, 11];
|
14273
|
+
break;
|
14274
|
+
case 7:
|
14275
|
+
pixelRatio = [20, 11];
|
14276
|
+
break;
|
14277
|
+
case 8:
|
14278
|
+
pixelRatio = [32, 11];
|
14279
|
+
break;
|
14280
|
+
case 9:
|
14281
|
+
pixelRatio = [80, 33];
|
14282
|
+
break;
|
14283
|
+
case 10:
|
14284
|
+
pixelRatio = [18, 11];
|
14285
|
+
break;
|
14286
|
+
case 11:
|
14287
|
+
pixelRatio = [15, 11];
|
14288
|
+
break;
|
14289
|
+
case 12:
|
14290
|
+
pixelRatio = [64, 33];
|
14291
|
+
break;
|
14292
|
+
case 13:
|
14293
|
+
pixelRatio = [160, 99];
|
14294
|
+
break;
|
14295
|
+
case 14:
|
14296
|
+
pixelRatio = [4, 3];
|
14297
|
+
break;
|
14298
|
+
case 15:
|
14299
|
+
pixelRatio = [3, 2];
|
14300
|
+
break;
|
14301
|
+
case 16:
|
14302
|
+
pixelRatio = [2, 1];
|
14303
|
+
break;
|
14304
|
+
case 255:
|
14305
|
+
{
|
14306
|
+
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
14307
|
+
break;
|
14308
|
+
}
|
14309
|
+
}
|
14310
|
+
}
|
14311
|
+
}
|
14312
|
+
return {
|
14313
|
+
width: Math.ceil((picWidthInMbsMinus1 + 1) * 16 - frameCropLeftOffset * 2 - frameCropRightOffset * 2),
|
14314
|
+
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
14315
|
+
pixelRatio: pixelRatio
|
14316
|
+
};
|
14317
|
+
}
|
14318
|
+
readSliceType() {
|
14319
|
+
// skip NALu type
|
14320
|
+
this.readUByte();
|
14321
|
+
// discard first_mb_in_slice
|
14322
|
+
this.readUEG();
|
14323
|
+
// return slice_type
|
14324
|
+
return this.readUEG();
|
14325
|
+
}
|
14326
|
+
}
|
14327
|
+
|
14328
|
+
class AvcVideoParser extends BaseVideoParser {
|
14329
|
+
parseAVCPES(track, textTrack, pes, last, duration) {
|
14330
|
+
const units = this.parseAVCNALu(track, pes.data);
|
14331
|
+
let VideoSample = this.VideoSample;
|
14332
|
+
let push;
|
14333
|
+
let spsfound = false;
|
14334
|
+
// free pes.data to save up some memory
|
14335
|
+
pes.data = null;
|
14336
|
+
|
14337
|
+
// if new NAL units found and last sample still there, let's push ...
|
14787
14338
|
// this helps parsing streams with missing AUD (only do this if AUD never found)
|
14788
14339
|
if (VideoSample && units.length && !track.audFound) {
|
14789
14340
|
this.pushAccessUnit(VideoSample, track);
|
@@ -14801,7 +14352,7 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14801
14352
|
// only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
|
14802
14353
|
if (spsfound && data.length > 4) {
|
14803
14354
|
// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
|
14804
|
-
const sliceType =
|
14355
|
+
const sliceType = new ExpGolomb(data).readSliceType();
|
14805
14356
|
// 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
|
14806
14357
|
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
|
14807
14358
|
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
|
@@ -14855,7 +14406,8 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14855
14406
|
push = true;
|
14856
14407
|
spsfound = true;
|
14857
14408
|
const sps = unit.data;
|
14858
|
-
const
|
14409
|
+
const expGolombDecoder = new ExpGolomb(sps);
|
14410
|
+
const config = expGolombDecoder.readSPS();
|
14859
14411
|
if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
|
14860
14412
|
track.width = config.width;
|
14861
14413
|
track.height = config.height;
|
@@ -14911,192 +14463,109 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14911
14463
|
this.VideoSample = null;
|
14912
14464
|
}
|
14913
14465
|
}
|
14914
|
-
|
14915
|
-
|
14916
|
-
|
14917
|
-
|
14918
|
-
const
|
14919
|
-
|
14920
|
-
|
14921
|
-
|
14922
|
-
|
14923
|
-
|
14924
|
-
|
14925
|
-
|
14466
|
+
parseAVCNALu(track, array) {
|
14467
|
+
const len = array.byteLength;
|
14468
|
+
let state = track.naluState || 0;
|
14469
|
+
const lastState = state;
|
14470
|
+
const units = [];
|
14471
|
+
let i = 0;
|
14472
|
+
let value;
|
14473
|
+
let overflow;
|
14474
|
+
let unitType;
|
14475
|
+
let lastUnitStart = -1;
|
14476
|
+
let lastUnitType = 0;
|
14477
|
+
// logger.log('PES:' + Hex.hexDump(array));
|
14926
14478
|
|
14927
|
-
|
14928
|
-
|
14929
|
-
|
14930
|
-
|
14931
|
-
|
14932
|
-
|
14933
|
-
|
14934
|
-
let lastScale = 8;
|
14935
|
-
let nextScale = 8;
|
14936
|
-
let deltaScale;
|
14937
|
-
for (let j = 0; j < count; j++) {
|
14938
|
-
if (nextScale !== 0) {
|
14939
|
-
deltaScale = reader.readEG();
|
14940
|
-
nextScale = (lastScale + deltaScale + 256) % 256;
|
14941
|
-
}
|
14942
|
-
lastScale = nextScale === 0 ? lastScale : nextScale;
|
14479
|
+
if (state === -1) {
|
14480
|
+
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14481
|
+
lastUnitStart = 0;
|
14482
|
+
// NALu type is value read from offset 0
|
14483
|
+
lastUnitType = array[0] & 0x1f;
|
14484
|
+
state = 0;
|
14485
|
+
i = 1;
|
14943
14486
|
}
|
14944
|
-
|
14945
|
-
|
14946
|
-
|
14947
|
-
|
14948
|
-
|
14949
|
-
|
14950
|
-
|
14951
|
-
|
14952
|
-
|
14953
|
-
|
14954
|
-
|
14955
|
-
|
14956
|
-
|
14957
|
-
|
14958
|
-
|
14959
|
-
|
14960
|
-
|
14961
|
-
|
14962
|
-
|
14963
|
-
|
14964
|
-
|
14965
|
-
|
14966
|
-
|
14967
|
-
|
14968
|
-
|
14969
|
-
|
14970
|
-
|
14971
|
-
|
14972
|
-
|
14973
|
-
|
14974
|
-
|
14975
|
-
|
14976
|
-
|
14977
|
-
|
14978
|
-
|
14979
|
-
|
14980
|
-
|
14981
|
-
|
14982
|
-
|
14487
|
+
while (i < len) {
|
14488
|
+
value = array[i++];
|
14489
|
+
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14490
|
+
if (!state) {
|
14491
|
+
state = value ? 0 : 1;
|
14492
|
+
continue;
|
14493
|
+
}
|
14494
|
+
if (state === 1) {
|
14495
|
+
state = value ? 0 : 2;
|
14496
|
+
continue;
|
14497
|
+
}
|
14498
|
+
// here we have state either equal to 2 or 3
|
14499
|
+
if (!value) {
|
14500
|
+
state = 3;
|
14501
|
+
} else if (value === 1) {
|
14502
|
+
overflow = i - state - 1;
|
14503
|
+
if (lastUnitStart >= 0) {
|
14504
|
+
const unit = {
|
14505
|
+
data: array.subarray(lastUnitStart, overflow),
|
14506
|
+
type: lastUnitType
|
14507
|
+
};
|
14508
|
+
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14509
|
+
units.push(unit);
|
14510
|
+
} else {
|
14511
|
+
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14512
|
+
// first check if start code delimiter is overlapping between 2 PES packets,
|
14513
|
+
// ie it started in last packet (lastState not zero)
|
14514
|
+
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14515
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14516
|
+
if (lastUnit) {
|
14517
|
+
if (lastState && i <= 4 - lastState) {
|
14518
|
+
// start delimiter overlapping between PES packets
|
14519
|
+
// strip start delimiter bytes from the end of last NAL unit
|
14520
|
+
// check if lastUnit had a state different from zero
|
14521
|
+
if (lastUnit.state) {
|
14522
|
+
// strip last bytes
|
14523
|
+
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14524
|
+
}
|
14525
|
+
}
|
14526
|
+
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14983
14527
|
|
14984
|
-
|
14985
|
-
|
14986
|
-
|
14987
|
-
|
14988
|
-
// seq_scaling_matrix_present_flag
|
14989
|
-
scalingListCount = chromaFormatIdc !== 3 ? 8 : 12;
|
14990
|
-
for (i = 0; i < scalingListCount; i++) {
|
14991
|
-
if (readBoolean()) {
|
14992
|
-
// seq_scaling_list_present_flag[ i ]
|
14993
|
-
if (i < 6) {
|
14994
|
-
skipScalingList(16, eg);
|
14995
|
-
} else {
|
14996
|
-
skipScalingList(64, eg);
|
14528
|
+
if (overflow > 0) {
|
14529
|
+
// logger.log('first NALU found with overflow:' + overflow);
|
14530
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14531
|
+
lastUnit.state = 0;
|
14997
14532
|
}
|
14998
14533
|
}
|
14999
14534
|
}
|
14535
|
+
// check if we can read unit type
|
14536
|
+
if (i < len) {
|
14537
|
+
unitType = array[i] & 0x1f;
|
14538
|
+
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14539
|
+
lastUnitStart = i;
|
14540
|
+
lastUnitType = unitType;
|
14541
|
+
state = 0;
|
14542
|
+
} else {
|
14543
|
+
// not enough byte to read unit type. let's read it on next PES parsing
|
14544
|
+
state = -1;
|
14545
|
+
}
|
14546
|
+
} else {
|
14547
|
+
state = 0;
|
15000
14548
|
}
|
15001
14549
|
}
|
15002
|
-
|
15003
|
-
|
15004
|
-
|
15005
|
-
|
15006
|
-
|
15007
|
-
|
15008
|
-
|
15009
|
-
|
15010
|
-
numRefFramesInPicOrderCntCycle = readUEG();
|
15011
|
-
for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
|
15012
|
-
skipEG();
|
15013
|
-
} // offset_for_ref_frame[ i ]
|
15014
|
-
}
|
15015
|
-
skipUEG(); // max_num_ref_frames
|
15016
|
-
skipBits(1); // gaps_in_frame_num_value_allowed_flag
|
15017
|
-
const picWidthInMbsMinus1 = readUEG();
|
15018
|
-
const picHeightInMapUnitsMinus1 = readUEG();
|
15019
|
-
const frameMbsOnlyFlag = readBits(1);
|
15020
|
-
if (frameMbsOnlyFlag === 0) {
|
15021
|
-
skipBits(1);
|
15022
|
-
} // mb_adaptive_frame_field_flag
|
15023
|
-
|
15024
|
-
skipBits(1); // direct_8x8_inference_flag
|
15025
|
-
if (readBoolean()) {
|
15026
|
-
// frame_cropping_flag
|
15027
|
-
frameCropLeftOffset = readUEG();
|
15028
|
-
frameCropRightOffset = readUEG();
|
15029
|
-
frameCropTopOffset = readUEG();
|
15030
|
-
frameCropBottomOffset = readUEG();
|
14550
|
+
if (lastUnitStart >= 0 && state >= 0) {
|
14551
|
+
const unit = {
|
14552
|
+
data: array.subarray(lastUnitStart, len),
|
14553
|
+
type: lastUnitType,
|
14554
|
+
state: state
|
14555
|
+
};
|
14556
|
+
units.push(unit);
|
14557
|
+
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
15031
14558
|
}
|
15032
|
-
|
15033
|
-
if (
|
15034
|
-
//
|
15035
|
-
|
15036
|
-
|
15037
|
-
|
15038
|
-
switch (aspectRatioIdc) {
|
15039
|
-
case 1:
|
15040
|
-
pixelRatio = [1, 1];
|
15041
|
-
break;
|
15042
|
-
case 2:
|
15043
|
-
pixelRatio = [12, 11];
|
15044
|
-
break;
|
15045
|
-
case 3:
|
15046
|
-
pixelRatio = [10, 11];
|
15047
|
-
break;
|
15048
|
-
case 4:
|
15049
|
-
pixelRatio = [16, 11];
|
15050
|
-
break;
|
15051
|
-
case 5:
|
15052
|
-
pixelRatio = [40, 33];
|
15053
|
-
break;
|
15054
|
-
case 6:
|
15055
|
-
pixelRatio = [24, 11];
|
15056
|
-
break;
|
15057
|
-
case 7:
|
15058
|
-
pixelRatio = [20, 11];
|
15059
|
-
break;
|
15060
|
-
case 8:
|
15061
|
-
pixelRatio = [32, 11];
|
15062
|
-
break;
|
15063
|
-
case 9:
|
15064
|
-
pixelRatio = [80, 33];
|
15065
|
-
break;
|
15066
|
-
case 10:
|
15067
|
-
pixelRatio = [18, 11];
|
15068
|
-
break;
|
15069
|
-
case 11:
|
15070
|
-
pixelRatio = [15, 11];
|
15071
|
-
break;
|
15072
|
-
case 12:
|
15073
|
-
pixelRatio = [64, 33];
|
15074
|
-
break;
|
15075
|
-
case 13:
|
15076
|
-
pixelRatio = [160, 99];
|
15077
|
-
break;
|
15078
|
-
case 14:
|
15079
|
-
pixelRatio = [4, 3];
|
15080
|
-
break;
|
15081
|
-
case 15:
|
15082
|
-
pixelRatio = [3, 2];
|
15083
|
-
break;
|
15084
|
-
case 16:
|
15085
|
-
pixelRatio = [2, 1];
|
15086
|
-
break;
|
15087
|
-
case 255:
|
15088
|
-
{
|
15089
|
-
pixelRatio = [readUByte() << 8 | readUByte(), readUByte() << 8 | readUByte()];
|
15090
|
-
break;
|
15091
|
-
}
|
15092
|
-
}
|
14559
|
+
// no NALu found
|
14560
|
+
if (units.length === 0) {
|
14561
|
+
// append pes.data to previous NAL unit
|
14562
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14563
|
+
if (lastUnit) {
|
14564
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
15093
14565
|
}
|
15094
14566
|
}
|
15095
|
-
|
15096
|
-
|
15097
|
-
height: (2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16 - (frameMbsOnlyFlag ? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset),
|
15098
|
-
pixelRatio: pixelRatio
|
15099
|
-
};
|
14567
|
+
track.naluState = state;
|
14568
|
+
return units;
|
15100
14569
|
}
|
15101
14570
|
}
|
15102
14571
|
|
@@ -15114,7 +14583,7 @@ class SampleAesDecrypter {
|
|
15114
14583
|
});
|
15115
14584
|
}
|
15116
14585
|
decryptBuffer(encryptedData) {
|
15117
|
-
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer
|
14586
|
+
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
|
15118
14587
|
}
|
15119
14588
|
|
15120
14589
|
// AAC - encrypt all full 16 bytes blocks starting from offset 16
|
@@ -15228,7 +14697,7 @@ class TSDemuxer {
|
|
15228
14697
|
this.observer = observer;
|
15229
14698
|
this.config = config;
|
15230
14699
|
this.typeSupported = typeSupported;
|
15231
|
-
this.videoParser =
|
14700
|
+
this.videoParser = new AvcVideoParser();
|
15232
14701
|
}
|
15233
14702
|
static probe(data) {
|
15234
14703
|
const syncOffset = TSDemuxer.syncOffset(data);
|
@@ -15393,16 +14862,7 @@ class TSDemuxer {
|
|
15393
14862
|
case videoPid:
|
15394
14863
|
if (stt) {
|
15395
14864
|
if (videoData && (pes = parsePES(videoData))) {
|
15396
|
-
|
15397
|
-
switch (videoTrack.segmentCodec) {
|
15398
|
-
case 'avc':
|
15399
|
-
this.videoParser = new AvcVideoParser();
|
15400
|
-
break;
|
15401
|
-
}
|
15402
|
-
}
|
15403
|
-
if (this.videoParser !== null) {
|
15404
|
-
this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
|
15405
|
-
}
|
14865
|
+
this.videoParser.parseAVCPES(videoTrack, textTrack, pes, false, this._duration);
|
15406
14866
|
}
|
15407
14867
|
videoData = {
|
15408
14868
|
data: [],
|
@@ -15557,17 +15017,8 @@ class TSDemuxer {
|
|
15557
15017
|
// try to parse last PES packets
|
15558
15018
|
let pes;
|
15559
15019
|
if (videoData && (pes = parsePES(videoData))) {
|
15560
|
-
|
15561
|
-
|
15562
|
-
case 'avc':
|
15563
|
-
this.videoParser = new AvcVideoParser();
|
15564
|
-
break;
|
15565
|
-
}
|
15566
|
-
}
|
15567
|
-
if (this.videoParser !== null) {
|
15568
|
-
this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
|
15569
|
-
videoTrack.pesData = null;
|
15570
|
-
}
|
15020
|
+
this.videoParser.parseAVCPES(videoTrack, textTrack, pes, true, this._duration);
|
15021
|
+
videoTrack.pesData = null;
|
15571
15022
|
} else {
|
15572
15023
|
// either avcData null or PES truncated, keep it for next frag parsing
|
15573
15024
|
videoTrack.pesData = videoData;
|
@@ -15861,11 +15312,8 @@ function parsePMT(data, offset, typeSupported, isSampleAes, observer) {
|
|
15861
15312
|
emitParsingError(observer, new Error('Unsupported EC-3 in M2TS found'));
|
15862
15313
|
return result;
|
15863
15314
|
case 0x24:
|
15864
|
-
|
15865
|
-
|
15866
|
-
emitParsingError(observer, new Error('Unsupported HEVC in M2TS found'));
|
15867
|
-
return result;
|
15868
|
-
}
|
15315
|
+
emitParsingError(observer, new Error('Unsupported HEVC in M2TS found'));
|
15316
|
+
return result;
|
15869
15317
|
}
|
15870
15318
|
// move to the next table entry
|
15871
15319
|
// skip past the elementary stream descriptors, if present
|
@@ -16019,11 +15467,11 @@ class MP3Demuxer extends BaseAudioDemuxer {
|
|
16019
15467
|
// Look for MPEG header | 1111 1111 | 111X XYZX | where X can be either 0 or 1 and Y or Z should be 1
|
16020
15468
|
// Layer bits (position 14 and 15) in header should be always different from 0 (Layer I or Layer II or Layer III)
|
16021
15469
|
// More info http://www.mp3-tech.org/programmer/frame_header.html
|
16022
|
-
const id3Data =
|
15470
|
+
const id3Data = getID3Data(data, 0);
|
16023
15471
|
let offset = (id3Data == null ? void 0 : id3Data.length) || 0;
|
16024
15472
|
|
16025
15473
|
// Check for ac-3|ec-3 sync bytes and return false if present
|
16026
|
-
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 &&
|
15474
|
+
if (id3Data && data[offset] === 0x0b && data[offset + 1] === 0x77 && getTimeStamp(id3Data) !== undefined &&
|
16027
15475
|
// check the bsid to confirm ac-3 or ec-3 (not mp3)
|
16028
15476
|
getAudioBSID(data, offset) <= 16) {
|
16029
15477
|
return false;
|
@@ -16098,8 +15546,6 @@ class MP4 {
|
|
16098
15546
|
avc1: [],
|
16099
15547
|
// codingname
|
16100
15548
|
avcC: [],
|
16101
|
-
hvc1: [],
|
16102
|
-
hvcC: [],
|
16103
15549
|
btrt: [],
|
16104
15550
|
dinf: [],
|
16105
15551
|
dref: [],
|
@@ -16524,10 +15970,8 @@ class MP4 {
|
|
16524
15970
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
|
16525
15971
|
}
|
16526
15972
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
|
16527
|
-
} else if (track.segmentCodec === 'avc') {
|
16528
|
-
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16529
15973
|
} else {
|
16530
|
-
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.
|
15974
|
+
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16531
15975
|
}
|
16532
15976
|
}
|
16533
15977
|
static tkhd(track) {
|
@@ -16665,84 +16109,6 @@ class MP4 {
|
|
16665
16109
|
const result = appendUint8Array(MP4.FTYP, movie);
|
16666
16110
|
return result;
|
16667
16111
|
}
|
16668
|
-
static hvc1(track) {
|
16669
|
-
const ps = track.params;
|
16670
|
-
const units = [track.vps, track.sps, track.pps];
|
16671
|
-
const NALuLengthSize = 4;
|
16672
|
-
const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
|
16673
|
-
|
16674
|
-
// compute hvcC size in bytes
|
16675
|
-
let length = config.length;
|
16676
|
-
for (let i = 0; i < units.length; i += 1) {
|
16677
|
-
length += 3;
|
16678
|
-
for (let j = 0; j < units[i].length; j += 1) {
|
16679
|
-
length += 2 + units[i][j].length;
|
16680
|
-
}
|
16681
|
-
}
|
16682
|
-
const hvcC = new Uint8Array(length);
|
16683
|
-
hvcC.set(config, 0);
|
16684
|
-
length = config.length;
|
16685
|
-
// append parameter set units: one vps, one or more sps and pps
|
16686
|
-
const iMax = units.length - 1;
|
16687
|
-
for (let i = 0; i < units.length; i += 1) {
|
16688
|
-
hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
|
16689
|
-
length += 3;
|
16690
|
-
for (let j = 0; j < units[i].length; j += 1) {
|
16691
|
-
hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
|
16692
|
-
length += 2;
|
16693
|
-
hvcC.set(units[i][j], length);
|
16694
|
-
length += units[i][j].length;
|
16695
|
-
}
|
16696
|
-
}
|
16697
|
-
const hvcc = MP4.box(MP4.types.hvcC, hvcC);
|
16698
|
-
const width = track.width;
|
16699
|
-
const height = track.height;
|
16700
|
-
const hSpacing = track.pixelRatio[0];
|
16701
|
-
const vSpacing = track.pixelRatio[1];
|
16702
|
-
return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
|
16703
|
-
// reserved
|
16704
|
-
0x00, 0x00, 0x00,
|
16705
|
-
// reserved
|
16706
|
-
0x00, 0x01,
|
16707
|
-
// data_reference_index
|
16708
|
-
0x00, 0x00,
|
16709
|
-
// pre_defined
|
16710
|
-
0x00, 0x00,
|
16711
|
-
// reserved
|
16712
|
-
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16713
|
-
// pre_defined
|
16714
|
-
width >> 8 & 0xff, width & 0xff,
|
16715
|
-
// width
|
16716
|
-
height >> 8 & 0xff, height & 0xff,
|
16717
|
-
// height
|
16718
|
-
0x00, 0x48, 0x00, 0x00,
|
16719
|
-
// horizresolution
|
16720
|
-
0x00, 0x48, 0x00, 0x00,
|
16721
|
-
// vertresolution
|
16722
|
-
0x00, 0x00, 0x00, 0x00,
|
16723
|
-
// reserved
|
16724
|
-
0x00, 0x01,
|
16725
|
-
// frame_count
|
16726
|
-
0x12, 0x64, 0x61, 0x69, 0x6c,
|
16727
|
-
// dailymotion/hls.js
|
16728
|
-
0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16729
|
-
// compressorname
|
16730
|
-
0x00, 0x18,
|
16731
|
-
// depth = 24
|
16732
|
-
0x11, 0x11]),
|
16733
|
-
// pre_defined = -1
|
16734
|
-
hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
|
16735
|
-
// bufferSizeDB
|
16736
|
-
0x00, 0x2d, 0xc6, 0xc0,
|
16737
|
-
// maxBitrate
|
16738
|
-
0x00, 0x2d, 0xc6, 0xc0])),
|
16739
|
-
// avgBitrate
|
16740
|
-
MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
|
16741
|
-
// hSpacing
|
16742
|
-
hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
|
16743
|
-
// vSpacing
|
16744
|
-
vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
|
16745
|
-
}
|
16746
16112
|
}
|
16747
16113
|
MP4.types = void 0;
|
16748
16114
|
MP4.HDLR_TYPES = void 0;
|
@@ -17118,9 +16484,9 @@ class MP4Remuxer {
|
|
17118
16484
|
const foundOverlap = delta < -1;
|
17119
16485
|
if (foundHole || foundOverlap) {
|
17120
16486
|
if (foundHole) {
|
17121
|
-
logger.warn(
|
16487
|
+
logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
|
17122
16488
|
} else {
|
17123
|
-
logger.warn(
|
16489
|
+
logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
|
17124
16490
|
}
|
17125
16491
|
if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
|
17126
16492
|
firstDTS = nextAvcDts;
|
@@ -17129,24 +16495,12 @@ class MP4Remuxer {
|
|
17129
16495
|
inputSamples[0].dts = firstDTS;
|
17130
16496
|
inputSamples[0].pts = firstPTS;
|
17131
16497
|
} else {
|
17132
|
-
let isPTSOrderRetained = true;
|
17133
16498
|
for (let i = 0; i < inputSamples.length; i++) {
|
17134
|
-
if (inputSamples[i].dts > firstPTS
|
16499
|
+
if (inputSamples[i].dts > firstPTS) {
|
17135
16500
|
break;
|
17136
16501
|
}
|
17137
|
-
const prevPTS = inputSamples[i].pts;
|
17138
16502
|
inputSamples[i].dts -= delta;
|
17139
16503
|
inputSamples[i].pts -= delta;
|
17140
|
-
|
17141
|
-
// check to see if this sample's PTS order has changed
|
17142
|
-
// relative to the next one
|
17143
|
-
if (i < inputSamples.length - 1) {
|
17144
|
-
const nextSamplePTS = inputSamples[i + 1].pts;
|
17145
|
-
const currentSamplePTS = inputSamples[i].pts;
|
17146
|
-
const currentOrder = nextSamplePTS <= currentSamplePTS;
|
17147
|
-
const prevOrder = nextSamplePTS <= prevPTS;
|
17148
|
-
isPTSOrderRetained = currentOrder == prevOrder;
|
17149
|
-
}
|
17150
16504
|
}
|
17151
16505
|
}
|
17152
16506
|
logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
|
@@ -17294,7 +16648,7 @@ class MP4Remuxer {
|
|
17294
16648
|
}
|
17295
16649
|
}
|
17296
16650
|
}
|
17297
|
-
// next AVC
|
16651
|
+
// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
17298
16652
|
mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
|
17299
16653
|
this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
|
17300
16654
|
this.videoSampleDuration = mp4SampleDuration;
|
@@ -17427,7 +16781,7 @@ class MP4Remuxer {
|
|
17427
16781
|
logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
|
17428
16782
|
for (let j = 0; j < missing; j++) {
|
17429
16783
|
const newStamp = Math.max(nextPts, 0);
|
17430
|
-
let fillFrame = AAC.getSilentFrame(track.
|
16784
|
+
let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17431
16785
|
if (!fillFrame) {
|
17432
16786
|
logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
|
17433
16787
|
fillFrame = sample.unit.subarray();
|
@@ -17555,7 +16909,7 @@ class MP4Remuxer {
|
|
17555
16909
|
// samples count of this segment's duration
|
17556
16910
|
const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
|
17557
16911
|
// silent frame
|
17558
|
-
const silentFrame = AAC.getSilentFrame(track.
|
16912
|
+
const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17559
16913
|
logger.warn('[mp4-remuxer]: remux empty Audio');
|
17560
16914
|
// Can't remux if we can't generate a silent frame...
|
17561
16915
|
if (!silentFrame) {
|
@@ -17946,15 +17300,13 @@ class Transmuxer {
|
|
17946
17300
|
initSegmentData
|
17947
17301
|
} = transmuxConfig;
|
17948
17302
|
const keyData = getEncryptionType(uintData, decryptdata);
|
17949
|
-
if (keyData &&
|
17303
|
+
if (keyData && keyData.method === 'AES-128') {
|
17950
17304
|
const decrypter = this.getDecrypter();
|
17951
|
-
const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
|
17952
|
-
|
17953
17305
|
// Software decryption is synchronous; webCrypto is not
|
17954
17306
|
if (decrypter.isSync()) {
|
17955
17307
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
17956
17308
|
// data is handled in the flush() call
|
17957
|
-
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17309
|
+
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
|
17958
17310
|
// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
|
17959
17311
|
const loadingParts = chunkMeta.part > -1;
|
17960
17312
|
if (loadingParts) {
|
@@ -17966,7 +17318,7 @@ class Transmuxer {
|
|
17966
17318
|
}
|
17967
17319
|
uintData = new Uint8Array(decryptedData);
|
17968
17320
|
} else {
|
17969
|
-
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer
|
17321
|
+
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
|
17970
17322
|
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
17971
17323
|
// the decrypted data has been transmuxed
|
17972
17324
|
const result = this.push(decryptedData, null, chunkMeta);
|
@@ -18620,7 +17972,14 @@ class TransmuxerInterface {
|
|
18620
17972
|
this.observer = new EventEmitter();
|
18621
17973
|
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
|
18622
17974
|
this.observer.on(Events.ERROR, forwardMessage);
|
18623
|
-
const
|
17975
|
+
const MediaSource = getMediaSource(config.preferManagedMediaSource) || {
|
17976
|
+
isTypeSupported: () => false
|
17977
|
+
};
|
17978
|
+
const m2tsTypeSupported = {
|
17979
|
+
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
17980
|
+
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
17981
|
+
ac3: false
|
17982
|
+
};
|
18624
17983
|
if (this.useWorker && typeof Worker !== 'undefined') {
|
18625
17984
|
const canCreateWorker = config.workerPath || hasUMDWorker();
|
18626
17985
|
if (canCreateWorker) {
|
@@ -18888,9 +18247,8 @@ const STALL_MINIMUM_DURATION_MS = 250;
|
|
18888
18247
|
const MAX_START_GAP_JUMP = 2.0;
|
18889
18248
|
const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
18890
18249
|
const SKIP_BUFFER_RANGE_START = 0.05;
|
18891
|
-
class GapController
|
18250
|
+
class GapController {
|
18892
18251
|
constructor(config, media, fragmentTracker, hls) {
|
18893
|
-
super('gap-controller', hls.logger);
|
18894
18252
|
this.config = void 0;
|
18895
18253
|
this.media = null;
|
18896
18254
|
this.fragmentTracker = void 0;
|
@@ -18900,7 +18258,6 @@ class GapController extends Logger {
|
|
18900
18258
|
this.stalled = null;
|
18901
18259
|
this.moved = false;
|
18902
18260
|
this.seeking = false;
|
18903
|
-
this.ended = 0;
|
18904
18261
|
this.config = config;
|
18905
18262
|
this.media = media;
|
18906
18263
|
this.fragmentTracker = fragmentTracker;
|
@@ -18918,7 +18275,7 @@ class GapController extends Logger {
|
|
18918
18275
|
*
|
18919
18276
|
* @param lastCurrentTime - Previously read playhead position
|
18920
18277
|
*/
|
18921
|
-
poll(lastCurrentTime, activeFrag
|
18278
|
+
poll(lastCurrentTime, activeFrag) {
|
18922
18279
|
const {
|
18923
18280
|
config,
|
18924
18281
|
media,
|
@@ -18937,7 +18294,6 @@ class GapController extends Logger {
|
|
18937
18294
|
|
18938
18295
|
// The playhead is moving, no-op
|
18939
18296
|
if (currentTime !== lastCurrentTime) {
|
18940
|
-
this.ended = 0;
|
18941
18297
|
this.moved = true;
|
18942
18298
|
if (!seeking) {
|
18943
18299
|
this.nudgeRetry = 0;
|
@@ -18946,7 +18302,7 @@ class GapController extends Logger {
|
|
18946
18302
|
// The playhead is now moving, but was previously stalled
|
18947
18303
|
if (this.stallReported) {
|
18948
18304
|
const _stalledDuration = self.performance.now() - stalled;
|
18949
|
-
|
18305
|
+
logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
|
18950
18306
|
this.stallReported = false;
|
18951
18307
|
}
|
18952
18308
|
this.stalled = null;
|
@@ -18982,6 +18338,7 @@ class GapController extends Logger {
|
|
18982
18338
|
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
18983
18339
|
// The addition poll gives the browser a chance to jump the gap for us
|
18984
18340
|
if (!this.moved && this.stalled !== null) {
|
18341
|
+
var _level$details;
|
18985
18342
|
// There is no playable buffer (seeked, waiting for buffer)
|
18986
18343
|
const isBuffered = bufferInfo.len > 0;
|
18987
18344
|
if (!isBuffered && !nextStart) {
|
@@ -18993,8 +18350,9 @@ class GapController extends Logger {
|
|
18993
18350
|
// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
|
18994
18351
|
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
|
18995
18352
|
// that begins over 1 target duration after the video start position.
|
18996
|
-
const
|
18997
|
-
const
|
18353
|
+
const level = this.hls.levels ? this.hls.levels[this.hls.currentLevel] : null;
|
18354
|
+
const isLive = level == null ? void 0 : (_level$details = level.details) == null ? void 0 : _level$details.live;
|
18355
|
+
const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
|
18998
18356
|
const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
|
18999
18357
|
if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
|
19000
18358
|
if (!media.paused) {
|
@@ -19012,17 +18370,6 @@ class GapController extends Logger {
|
|
19012
18370
|
}
|
19013
18371
|
const stalledDuration = tnow - stalled;
|
19014
18372
|
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
|
19015
|
-
// Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
|
19016
|
-
if (state === State.ENDED && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
|
19017
|
-
if (stalledDuration < 1000 || this.ended) {
|
19018
|
-
return;
|
19019
|
-
}
|
19020
|
-
this.ended = currentTime;
|
19021
|
-
this.hls.trigger(Events.MEDIA_ENDED, {
|
19022
|
-
stalled: true
|
19023
|
-
});
|
19024
|
-
return;
|
19025
|
-
}
|
19026
18373
|
// Report stalling after trying to fix
|
19027
18374
|
this._reportStall(bufferInfo);
|
19028
18375
|
if (!this.media) {
|
@@ -19066,7 +18413,7 @@ class GapController extends Logger {
|
|
19066
18413
|
// needs to cross some sort of threshold covering all source-buffers content
|
19067
18414
|
// to start playing properly.
|
19068
18415
|
if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
|
19069
|
-
|
18416
|
+
logger.warn('Trying to nudge playhead over buffer-hole');
|
19070
18417
|
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
19071
18418
|
// We only try to jump the hole if it's under the configured size
|
19072
18419
|
// Reset stalled so to rearm watchdog timer
|
@@ -19090,7 +18437,7 @@ class GapController extends Logger {
|
|
19090
18437
|
// Report stalled error once
|
19091
18438
|
this.stallReported = true;
|
19092
18439
|
const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
|
19093
|
-
|
18440
|
+
logger.warn(error.message);
|
19094
18441
|
hls.trigger(Events.ERROR, {
|
19095
18442
|
type: ErrorTypes.MEDIA_ERROR,
|
19096
18443
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -19158,7 +18505,7 @@ class GapController extends Logger {
|
|
19158
18505
|
}
|
19159
18506
|
}
|
19160
18507
|
const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
|
19161
|
-
|
18508
|
+
logger.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
|
19162
18509
|
this.moved = true;
|
19163
18510
|
this.stalled = null;
|
19164
18511
|
media.currentTime = targetTime;
|
@@ -19199,7 +18546,7 @@ class GapController extends Logger {
|
|
19199
18546
|
const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
|
19200
18547
|
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
19201
18548
|
const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
|
19202
|
-
|
18549
|
+
logger.warn(error.message);
|
19203
18550
|
media.currentTime = targetTime;
|
19204
18551
|
hls.trigger(Events.ERROR, {
|
19205
18552
|
type: ErrorTypes.MEDIA_ERROR,
|
@@ -19209,7 +18556,7 @@ class GapController extends Logger {
|
|
19209
18556
|
});
|
19210
18557
|
} else {
|
19211
18558
|
const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
|
19212
|
-
|
18559
|
+
logger.error(error.message);
|
19213
18560
|
hls.trigger(Events.ERROR, {
|
19214
18561
|
type: ErrorTypes.MEDIA_ERROR,
|
19215
18562
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -19224,7 +18571,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
|
|
19224
18571
|
|
19225
18572
|
class StreamController extends BaseStreamController {
|
19226
18573
|
constructor(hls, fragmentTracker, keyLoader) {
|
19227
|
-
super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
|
18574
|
+
super(hls, fragmentTracker, keyLoader, '[stream-controller]', PlaylistLevelType.MAIN);
|
19228
18575
|
this.audioCodecSwap = false;
|
19229
18576
|
this.gapController = null;
|
19230
18577
|
this.level = -1;
|
@@ -19232,43 +18579,27 @@ class StreamController extends BaseStreamController {
|
|
19232
18579
|
this.altAudio = false;
|
19233
18580
|
this.audioOnly = false;
|
19234
18581
|
this.fragPlaying = null;
|
18582
|
+
this.onvplaying = null;
|
18583
|
+
this.onvseeked = null;
|
19235
18584
|
this.fragLastKbps = 0;
|
19236
18585
|
this.couldBacktrack = false;
|
19237
18586
|
this.backtrackFragment = null;
|
19238
18587
|
this.audioCodecSwitch = false;
|
19239
18588
|
this.videoBuffer = null;
|
19240
|
-
this.
|
19241
|
-
// tick to speed up FRAG_CHANGED triggering
|
19242
|
-
this.tick();
|
19243
|
-
};
|
19244
|
-
this.onMediaSeeked = () => {
|
19245
|
-
const media = this.media;
|
19246
|
-
const currentTime = media ? media.currentTime : null;
|
19247
|
-
if (isFiniteNumber(currentTime)) {
|
19248
|
-
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
19249
|
-
}
|
19250
|
-
|
19251
|
-
// If seeked was issued before buffer was appended do not tick immediately
|
19252
|
-
const bufferInfo = this.getMainFwdBufferInfo();
|
19253
|
-
if (bufferInfo === null || bufferInfo.len === 0) {
|
19254
|
-
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
19255
|
-
return;
|
19256
|
-
}
|
19257
|
-
|
19258
|
-
// tick to speed up FRAG_CHANGED triggering
|
19259
|
-
this.tick();
|
19260
|
-
};
|
19261
|
-
this.registerListeners();
|
18589
|
+
this._registerListeners();
|
19262
18590
|
}
|
19263
|
-
|
19264
|
-
super.registerListeners();
|
18591
|
+
_registerListeners() {
|
19265
18592
|
const {
|
19266
18593
|
hls
|
19267
18594
|
} = this;
|
18595
|
+
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18596
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18597
|
+
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
19268
18598
|
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
19269
18599
|
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
|
19270
18600
|
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
19271
18601
|
hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18602
|
+
hls.on(Events.ERROR, this.onError, this);
|
19272
18603
|
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
19273
18604
|
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
19274
18605
|
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -19276,14 +18607,17 @@ class StreamController extends BaseStreamController {
|
|
19276
18607
|
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
19277
18608
|
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
19278
18609
|
}
|
19279
|
-
|
19280
|
-
super.unregisterListeners();
|
18610
|
+
_unregisterListeners() {
|
19281
18611
|
const {
|
19282
18612
|
hls
|
19283
18613
|
} = this;
|
18614
|
+
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18615
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18616
|
+
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
19284
18617
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
19285
18618
|
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
19286
18619
|
hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18620
|
+
hls.off(Events.ERROR, this.onError, this);
|
19287
18621
|
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
19288
18622
|
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
19289
18623
|
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -19292,9 +18626,7 @@ class StreamController extends BaseStreamController {
|
|
19292
18626
|
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
19293
18627
|
}
|
19294
18628
|
onHandlerDestroying() {
|
19295
|
-
|
19296
|
-
this.onMediaPlaying = this.onMediaSeeked = null;
|
19297
|
-
this.unregisterListeners();
|
18629
|
+
this._unregisterListeners();
|
19298
18630
|
super.onHandlerDestroying();
|
19299
18631
|
}
|
19300
18632
|
startLoad(startPosition) {
|
@@ -19392,9 +18724,6 @@ class StreamController extends BaseStreamController {
|
|
19392
18724
|
this.checkFragmentChanged();
|
19393
18725
|
}
|
19394
18726
|
doTickIdle() {
|
19395
|
-
if (!this.buffering) {
|
19396
|
-
return;
|
19397
|
-
}
|
19398
18727
|
const {
|
19399
18728
|
hls,
|
19400
18729
|
levelLastLoaded,
|
@@ -19622,19 +18951,22 @@ class StreamController extends BaseStreamController {
|
|
19622
18951
|
onMediaAttached(event, data) {
|
19623
18952
|
super.onMediaAttached(event, data);
|
19624
18953
|
const media = data.media;
|
19625
|
-
|
19626
|
-
|
18954
|
+
this.onvplaying = this.onMediaPlaying.bind(this);
|
18955
|
+
this.onvseeked = this.onMediaSeeked.bind(this);
|
18956
|
+
media.addEventListener('playing', this.onvplaying);
|
18957
|
+
media.addEventListener('seeked', this.onvseeked);
|
19627
18958
|
this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
|
19628
18959
|
}
|
19629
18960
|
onMediaDetaching() {
|
19630
18961
|
const {
|
19631
18962
|
media
|
19632
18963
|
} = this;
|
19633
|
-
if (media) {
|
19634
|
-
media.removeEventListener('playing', this.
|
19635
|
-
media.removeEventListener('seeked', this.
|
18964
|
+
if (media && this.onvplaying && this.onvseeked) {
|
18965
|
+
media.removeEventListener('playing', this.onvplaying);
|
18966
|
+
media.removeEventListener('seeked', this.onvseeked);
|
18967
|
+
this.onvplaying = this.onvseeked = null;
|
18968
|
+
this.videoBuffer = null;
|
19636
18969
|
}
|
19637
|
-
this.videoBuffer = null;
|
19638
18970
|
this.fragPlaying = null;
|
19639
18971
|
if (this.gapController) {
|
19640
18972
|
this.gapController.destroy();
|
@@ -19642,6 +18974,27 @@ class StreamController extends BaseStreamController {
|
|
19642
18974
|
}
|
19643
18975
|
super.onMediaDetaching();
|
19644
18976
|
}
|
18977
|
+
onMediaPlaying() {
|
18978
|
+
// tick to speed up FRAG_CHANGED triggering
|
18979
|
+
this.tick();
|
18980
|
+
}
|
18981
|
+
onMediaSeeked() {
|
18982
|
+
const media = this.media;
|
18983
|
+
const currentTime = media ? media.currentTime : null;
|
18984
|
+
if (isFiniteNumber(currentTime)) {
|
18985
|
+
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
18986
|
+
}
|
18987
|
+
|
18988
|
+
// If seeked was issued before buffer was appended do not tick immediately
|
18989
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
18990
|
+
if (bufferInfo === null || bufferInfo.len === 0) {
|
18991
|
+
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
18992
|
+
return;
|
18993
|
+
}
|
18994
|
+
|
18995
|
+
// tick to speed up FRAG_CHANGED triggering
|
18996
|
+
this.tick();
|
18997
|
+
}
|
19645
18998
|
onManifestLoading() {
|
19646
18999
|
// reset buffer on manifest loading
|
19647
19000
|
this.log('Trigger BUFFER_RESET');
|
@@ -19933,10 +19286,8 @@ class StreamController extends BaseStreamController {
|
|
19933
19286
|
}
|
19934
19287
|
if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
|
19935
19288
|
// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
19936
|
-
const
|
19937
|
-
|
19938
|
-
const levelDetails = this.getLevelDetails();
|
19939
|
-
gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
|
19289
|
+
const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
|
19290
|
+
gapController.poll(this.lastCurrentTime, activeFrag);
|
19940
19291
|
}
|
19941
19292
|
this.lastCurrentTime = media.currentTime;
|
19942
19293
|
}
|
@@ -20270,17 +19621,6 @@ class StreamController extends BaseStreamController {
|
|
20270
19621
|
getMainFwdBufferInfo() {
|
20271
19622
|
return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN);
|
20272
19623
|
}
|
20273
|
-
get maxBufferLength() {
|
20274
|
-
const {
|
20275
|
-
levels,
|
20276
|
-
level
|
20277
|
-
} = this;
|
20278
|
-
const levelInfo = levels == null ? void 0 : levels[level];
|
20279
|
-
if (!levelInfo) {
|
20280
|
-
return this.config.maxBufferLength;
|
20281
|
-
}
|
20282
|
-
return this.getMaxBufferLength(levelInfo.maxBitrate);
|
20283
|
-
}
|
20284
19624
|
backtrack(frag) {
|
20285
19625
|
this.couldBacktrack = true;
|
20286
19626
|
// Causes findFragments to backtrack through fragments to find the keyframe
|
@@ -20386,7 +19726,7 @@ class Hls {
|
|
20386
19726
|
* Get the video-dev/hls.js package version.
|
20387
19727
|
*/
|
20388
19728
|
static get version() {
|
20389
|
-
return "1.5.11
|
19729
|
+
return "1.5.11";
|
20390
19730
|
}
|
20391
19731
|
|
20392
19732
|
/**
|
@@ -20449,12 +19789,9 @@ class Hls {
|
|
20449
19789
|
* The configuration object provided on player instantiation.
|
20450
19790
|
*/
|
20451
19791
|
this.userConfig = void 0;
|
20452
|
-
/**
|
20453
|
-
* The logger functions used by this player instance, configured on player instantiation.
|
20454
|
-
*/
|
20455
|
-
this.logger = void 0;
|
20456
19792
|
this.coreComponents = void 0;
|
20457
19793
|
this.networkControllers = void 0;
|
19794
|
+
this.started = false;
|
20458
19795
|
this._emitter = new EventEmitter();
|
20459
19796
|
this._autoLevelCapping = -1;
|
20460
19797
|
this._maxHdcpLevel = null;
|
@@ -20471,11 +19808,11 @@ class Hls {
|
|
20471
19808
|
this._media = null;
|
20472
19809
|
this.url = null;
|
20473
19810
|
this.triggeringException = void 0;
|
20474
|
-
|
20475
|
-
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig
|
19811
|
+
enableLogs(userConfig.debug || false, 'Hls instance');
|
19812
|
+
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
|
20476
19813
|
this.userConfig = userConfig;
|
20477
19814
|
if (config.progressive) {
|
20478
|
-
enableStreamingMode(config
|
19815
|
+
enableStreamingMode(config);
|
20479
19816
|
}
|
20480
19817
|
|
20481
19818
|
// core controllers and network loaders
|
@@ -20488,17 +19825,17 @@ class Hls {
|
|
20488
19825
|
} = config;
|
20489
19826
|
const errorController = new ConfigErrorController(this);
|
20490
19827
|
const abrController = this.abrController = new ConfigAbrController(this);
|
20491
|
-
|
20492
|
-
const fragmentTracker = new FragmentTracker(this);
|
20493
|
-
const bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
|
19828
|
+
const bufferController = this.bufferController = new ConfigBufferController(this);
|
20494
19829
|
const capLevelController = this.capLevelController = new ConfigCapLevelController(this);
|
20495
19830
|
const fpsController = new ConfigFpsController(this);
|
20496
19831
|
const playListLoader = new PlaylistLoader(this);
|
20497
19832
|
const id3TrackController = new ID3TrackController(this);
|
20498
19833
|
const ConfigContentSteeringController = config.contentSteeringController;
|
20499
|
-
//
|
19834
|
+
// ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
|
20500
19835
|
const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
|
20501
19836
|
const levelController = this.levelController = new LevelController(this, contentSteering);
|
19837
|
+
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
19838
|
+
const fragmentTracker = new FragmentTracker(this);
|
20502
19839
|
const keyLoader = new KeyLoader(this.config);
|
20503
19840
|
const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
|
20504
19841
|
|
@@ -20574,7 +19911,7 @@ class Hls {
|
|
20574
19911
|
try {
|
20575
19912
|
return this.emit(event, event, eventObject);
|
20576
19913
|
} catch (error) {
|
20577
|
-
|
19914
|
+
logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
20578
19915
|
// Prevent recursion in error event handlers that throw #5497
|
20579
19916
|
if (!this.triggeringException) {
|
20580
19917
|
this.triggeringException = true;
|
@@ -20600,7 +19937,7 @@ class Hls {
|
|
20600
19937
|
* Dispose of the instance
|
20601
19938
|
*/
|
20602
19939
|
destroy() {
|
20603
|
-
|
19940
|
+
logger.log('destroy');
|
20604
19941
|
this.trigger(Events.DESTROYING, undefined);
|
20605
19942
|
this.detachMedia();
|
20606
19943
|
this.removeAllListeners();
|
@@ -20621,7 +19958,7 @@ class Hls {
|
|
20621
19958
|
* Attaches Hls.js to a media element
|
20622
19959
|
*/
|
20623
19960
|
attachMedia(media) {
|
20624
|
-
|
19961
|
+
logger.log('attachMedia');
|
20625
19962
|
this._media = media;
|
20626
19963
|
this.trigger(Events.MEDIA_ATTACHING, {
|
20627
19964
|
media: media
|
@@ -20632,7 +19969,7 @@ class Hls {
|
|
20632
19969
|
* Detach Hls.js from the media
|
20633
19970
|
*/
|
20634
19971
|
detachMedia() {
|
20635
|
-
|
19972
|
+
logger.log('detachMedia');
|
20636
19973
|
this.trigger(Events.MEDIA_DETACHING, undefined);
|
20637
19974
|
this._media = null;
|
20638
19975
|
}
|
@@ -20649,7 +19986,7 @@ class Hls {
|
|
20649
19986
|
});
|
20650
19987
|
this._autoLevelCapping = -1;
|
20651
19988
|
this._maxHdcpLevel = null;
|
20652
|
-
|
19989
|
+
logger.log(`loadSource:${loadingSource}`);
|
20653
19990
|
if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
|
20654
19991
|
this.detachMedia();
|
20655
19992
|
this.attachMedia(media);
|
@@ -20668,7 +20005,8 @@ class Hls {
|
|
20668
20005
|
* Defaults to -1 (None: starts from earliest point)
|
20669
20006
|
*/
|
20670
20007
|
startLoad(startPosition = -1) {
|
20671
|
-
|
20008
|
+
logger.log(`startLoad(${startPosition})`);
|
20009
|
+
this.started = true;
|
20672
20010
|
this.networkControllers.forEach(controller => {
|
20673
20011
|
controller.startLoad(startPosition);
|
20674
20012
|
});
|
@@ -20678,31 +20016,34 @@ class Hls {
|
|
20678
20016
|
* Stop loading of any stream data.
|
20679
20017
|
*/
|
20680
20018
|
stopLoad() {
|
20681
|
-
|
20019
|
+
logger.log('stopLoad');
|
20020
|
+
this.started = false;
|
20682
20021
|
this.networkControllers.forEach(controller => {
|
20683
20022
|
controller.stopLoad();
|
20684
20023
|
});
|
20685
20024
|
}
|
20686
20025
|
|
20687
20026
|
/**
|
20688
|
-
* Resumes stream controller segment loading
|
20027
|
+
* Resumes stream controller segment loading if previously started.
|
20689
20028
|
*/
|
20690
20029
|
resumeBuffering() {
|
20691
|
-
this.
|
20692
|
-
|
20693
|
-
controller
|
20694
|
-
|
20695
|
-
|
20030
|
+
if (this.started) {
|
20031
|
+
this.networkControllers.forEach(controller => {
|
20032
|
+
if ('fragmentLoader' in controller) {
|
20033
|
+
controller.startLoad(-1);
|
20034
|
+
}
|
20035
|
+
});
|
20036
|
+
}
|
20696
20037
|
}
|
20697
20038
|
|
20698
20039
|
/**
|
20699
|
-
*
|
20040
|
+
* Stops stream controller segment loading without changing 'started' state like stopLoad().
|
20700
20041
|
* This allows for media buffering to be paused without interupting playlist loading.
|
20701
20042
|
*/
|
20702
20043
|
pauseBuffering() {
|
20703
20044
|
this.networkControllers.forEach(controller => {
|
20704
|
-
if (controller
|
20705
|
-
controller.
|
20045
|
+
if ('fragmentLoader' in controller) {
|
20046
|
+
controller.stopLoad();
|
20706
20047
|
}
|
20707
20048
|
});
|
20708
20049
|
}
|
@@ -20711,7 +20052,7 @@ class Hls {
|
|
20711
20052
|
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
20712
20053
|
*/
|
20713
20054
|
swapAudioCodec() {
|
20714
|
-
|
20055
|
+
logger.log('swapAudioCodec');
|
20715
20056
|
this.streamController.swapAudioCodec();
|
20716
20057
|
}
|
20717
20058
|
|
@@ -20722,7 +20063,7 @@ class Hls {
|
|
20722
20063
|
* Automatic recovery of media-errors by this process is configurable.
|
20723
20064
|
*/
|
20724
20065
|
recoverMediaError() {
|
20725
|
-
|
20066
|
+
logger.log('recoverMediaError');
|
20726
20067
|
const media = this._media;
|
20727
20068
|
this.detachMedia();
|
20728
20069
|
if (media) {
|
@@ -20752,7 +20093,7 @@ class Hls {
|
|
20752
20093
|
* Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
|
20753
20094
|
*/
|
20754
20095
|
set currentLevel(newLevel) {
|
20755
|
-
|
20096
|
+
logger.log(`set currentLevel:${newLevel}`);
|
20756
20097
|
this.levelController.manualLevel = newLevel;
|
20757
20098
|
this.streamController.immediateLevelSwitch();
|
20758
20099
|
}
|
@@ -20771,7 +20112,7 @@ class Hls {
|
|
20771
20112
|
* @param newLevel - Pass -1 for automatic level selection
|
20772
20113
|
*/
|
20773
20114
|
set nextLevel(newLevel) {
|
20774
|
-
|
20115
|
+
logger.log(`set nextLevel:${newLevel}`);
|
20775
20116
|
this.levelController.manualLevel = newLevel;
|
20776
20117
|
this.streamController.nextLevelSwitch();
|
20777
20118
|
}
|
@@ -20790,7 +20131,7 @@ class Hls {
|
|
20790
20131
|
* @param newLevel - Pass -1 for automatic level selection
|
20791
20132
|
*/
|
20792
20133
|
set loadLevel(newLevel) {
|
20793
|
-
|
20134
|
+
logger.log(`set loadLevel:${newLevel}`);
|
20794
20135
|
this.levelController.manualLevel = newLevel;
|
20795
20136
|
}
|
20796
20137
|
|
@@ -20821,7 +20162,7 @@ class Hls {
|
|
20821
20162
|
* Sets "first-level", see getter.
|
20822
20163
|
*/
|
20823
20164
|
set firstLevel(newLevel) {
|
20824
|
-
|
20165
|
+
logger.log(`set firstLevel:${newLevel}`);
|
20825
20166
|
this.levelController.firstLevel = newLevel;
|
20826
20167
|
}
|
20827
20168
|
|
@@ -20846,7 +20187,7 @@ class Hls {
|
|
20846
20187
|
* (determined from download of first segment)
|
20847
20188
|
*/
|
20848
20189
|
set startLevel(newLevel) {
|
20849
|
-
|
20190
|
+
logger.log(`set startLevel:${newLevel}`);
|
20850
20191
|
// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
20851
20192
|
if (newLevel !== -1) {
|
20852
20193
|
newLevel = Math.max(newLevel, this.minAutoLevel);
|
@@ -20921,7 +20262,7 @@ class Hls {
|
|
20921
20262
|
*/
|
20922
20263
|
set autoLevelCapping(newLevel) {
|
20923
20264
|
if (this._autoLevelCapping !== newLevel) {
|
20924
|
-
|
20265
|
+
logger.log(`set autoLevelCapping:${newLevel}`);
|
20925
20266
|
this._autoLevelCapping = newLevel;
|
20926
20267
|
this.levelController.checkMaxAutoUpdated();
|
20927
20268
|
}
|
@@ -21026,9 +20367,6 @@ class Hls {
|
|
21026
20367
|
get mainForwardBufferInfo() {
|
21027
20368
|
return this.streamController.getMainFwdBufferInfo();
|
21028
20369
|
}
|
21029
|
-
get maxBufferLength() {
|
21030
|
-
return this.streamController.maxBufferLength;
|
21031
|
-
}
|
21032
20370
|
|
21033
20371
|
/**
|
21034
20372
|
* Find and select the best matching audio track, making a level switch when a Group change is necessary.
|
@@ -21196,16 +20534,6 @@ class Hls {
|
|
21196
20534
|
get forceStartLoad() {
|
21197
20535
|
return this.streamController.forceStartLoad;
|
21198
20536
|
}
|
21199
|
-
|
21200
|
-
/**
|
21201
|
-
* ContentSteering pathwayPriority getter/setter
|
21202
|
-
*/
|
21203
|
-
get pathwayPriority() {
|
21204
|
-
return this.levelController.pathwayPriority;
|
21205
|
-
}
|
21206
|
-
set pathwayPriority(pathwayPriority) {
|
21207
|
-
this.levelController.pathwayPriority = pathwayPriority;
|
21208
|
-
}
|
21209
20537
|
}
|
21210
20538
|
Hls.defaultConfig = void 0;
|
21211
20539
|
|
@@ -21213,5 +20541,5 @@ var KeySystemFormats = emptyEs.KeySystemFormats;
|
|
21213
20541
|
var KeySystems = emptyEs.KeySystems;
|
21214
20542
|
var SubtitleStreamController = emptyEs.SubtitleStreamController;
|
21215
20543
|
var TimelineController = emptyEs.TimelineController;
|
21216
|
-
export { AbrController, AttrList,
|
20544
|
+
export { AbrController, AttrList, Cues as AudioStreamController, Cues as AudioTrackController, BasePlaylistController, BaseSegment, BaseStreamController, BufferController, Cues as CMCDController, CapLevelController, ChunkMetadata, ContentSteeringController, DateRange, Cues as EMEController, ErrorActionFlags, ErrorController, ErrorDetails, ErrorTypes, Events, FPSController, Fragment, Hls, HlsSkip, HlsUrlParameters, KeySystemFormats, KeySystems, Level, LevelDetails, LevelKey, LoadStats, MetadataSchema, NetworkErrorAction, Part, PlaylistLevelType, SubtitleStreamController, Cues as SubtitleTrackController, TimelineController, Hls as default, getMediaSource, isMSESupported, isSupported };
|
21217
20545
|
//# sourceMappingURL=hls.light.mjs.map
|