hls.js 1.5.3 → 1.5.5-0.canary.9977
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/hls-demo.js +10 -0
- package/dist/hls-demo.js.map +1 -1
- package/dist/hls.js +1954 -1103
- package/dist/hls.js.d.ts +63 -50
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +1631 -784
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +1428 -590
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +1703 -866
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +18 -18
- package/src/config.ts +3 -2
- package/src/controller/abr-controller.ts +24 -22
- package/src/controller/audio-stream-controller.ts +16 -17
- package/src/controller/audio-track-controller.ts +1 -1
- package/src/controller/base-playlist-controller.ts +7 -7
- package/src/controller/base-stream-controller.ts +56 -29
- package/src/controller/buffer-controller.ts +11 -11
- package/src/controller/cap-level-controller.ts +1 -2
- package/src/controller/cmcd-controller.ts +25 -3
- package/src/controller/content-steering-controller.ts +8 -6
- package/src/controller/eme-controller.ts +9 -22
- package/src/controller/error-controller.ts +6 -8
- package/src/controller/fps-controller.ts +2 -3
- package/src/controller/gap-controller.ts +43 -16
- package/src/controller/latency-controller.ts +9 -11
- package/src/controller/level-controller.ts +5 -17
- package/src/controller/stream-controller.ts +27 -33
- package/src/controller/subtitle-stream-controller.ts +14 -15
- package/src/controller/subtitle-track-controller.ts +5 -3
- package/src/controller/timeline-controller.ts +23 -30
- package/src/crypt/aes-crypto.ts +21 -2
- package/src/crypt/decrypter-aes-mode.ts +4 -0
- package/src/crypt/decrypter.ts +32 -18
- package/src/crypt/fast-aes-key.ts +24 -5
- package/src/demux/audio/adts.ts +9 -4
- package/src/demux/sample-aes.ts +2 -0
- package/src/demux/transmuxer-interface.ts +4 -12
- package/src/demux/transmuxer-worker.ts +4 -4
- package/src/demux/transmuxer.ts +16 -3
- package/src/demux/tsdemuxer.ts +63 -37
- package/src/demux/video/avc-video-parser.ts +208 -119
- package/src/demux/video/base-video-parser.ts +134 -2
- package/src/demux/video/exp-golomb.ts +0 -208
- package/src/demux/video/hevc-video-parser.ts +746 -0
- package/src/events.ts +7 -0
- package/src/hls.ts +42 -34
- package/src/loader/fragment-loader.ts +9 -2
- package/src/loader/key-loader.ts +2 -0
- package/src/loader/level-key.ts +10 -9
- package/src/remux/mp4-generator.ts +196 -1
- package/src/remux/mp4-remuxer.ts +23 -7
- package/src/task-loop.ts +5 -2
- package/src/types/component-api.ts +2 -0
- package/src/types/demuxer.ts +3 -0
- package/src/types/events.ts +4 -0
- package/src/utils/codecs.ts +33 -4
- package/src/utils/encryption-methods-util.ts +21 -0
- package/src/utils/logger.ts +53 -24
- package/src/utils/mp4-tools.ts +28 -9
package/dist/hls.light.mjs
CHANGED
@@ -256,6 +256,7 @@ let Events = /*#__PURE__*/function (Events) {
|
|
256
256
|
Events["MEDIA_ATTACHED"] = "hlsMediaAttached";
|
257
257
|
Events["MEDIA_DETACHING"] = "hlsMediaDetaching";
|
258
258
|
Events["MEDIA_DETACHED"] = "hlsMediaDetached";
|
259
|
+
Events["MEDIA_ENDED"] = "hlsMediaEnded";
|
259
260
|
Events["BUFFER_RESET"] = "hlsBufferReset";
|
260
261
|
Events["BUFFER_CODECS"] = "hlsBufferCodecs";
|
261
262
|
Events["BUFFER_CREATED"] = "hlsBufferCreated";
|
@@ -369,6 +370,23 @@ let ErrorDetails = /*#__PURE__*/function (ErrorDetails) {
|
|
369
370
|
return ErrorDetails;
|
370
371
|
}({});
|
371
372
|
|
373
|
+
class Logger {
|
374
|
+
constructor(label, logger) {
|
375
|
+
this.trace = void 0;
|
376
|
+
this.debug = void 0;
|
377
|
+
this.log = void 0;
|
378
|
+
this.warn = void 0;
|
379
|
+
this.info = void 0;
|
380
|
+
this.error = void 0;
|
381
|
+
const lb = `[${label}]:`;
|
382
|
+
this.trace = noop;
|
383
|
+
this.debug = logger.debug.bind(null, lb);
|
384
|
+
this.log = logger.log.bind(null, lb);
|
385
|
+
this.warn = logger.warn.bind(null, lb);
|
386
|
+
this.info = logger.info.bind(null, lb);
|
387
|
+
this.error = logger.error.bind(null, lb);
|
388
|
+
}
|
389
|
+
}
|
372
390
|
const noop = function noop() {};
|
373
391
|
const fakeLogger = {
|
374
392
|
trace: noop,
|
@@ -378,7 +396,9 @@ const fakeLogger = {
|
|
378
396
|
info: noop,
|
379
397
|
error: noop
|
380
398
|
};
|
381
|
-
|
399
|
+
function createLogger() {
|
400
|
+
return _extends({}, fakeLogger);
|
401
|
+
}
|
382
402
|
|
383
403
|
// let lastCallTime;
|
384
404
|
// function formatMsgWithTimeInfo(type, msg) {
|
@@ -389,35 +409,36 @@ let exportedLogger = fakeLogger;
|
|
389
409
|
// return msg;
|
390
410
|
// }
|
391
411
|
|
392
|
-
function consolePrintFn(type) {
|
412
|
+
function consolePrintFn(type, id) {
|
393
413
|
const func = self.console[type];
|
394
|
-
|
395
|
-
return func.bind(self.console, `[${type}] >`);
|
396
|
-
}
|
397
|
-
return noop;
|
414
|
+
return func ? func.bind(self.console, `${id ? '[' + id + '] ' : ''}[${type}] >`) : noop;
|
398
415
|
}
|
399
|
-
function
|
400
|
-
|
401
|
-
exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type);
|
402
|
-
});
|
416
|
+
function getLoggerFn(key, debugConfig, id) {
|
417
|
+
return debugConfig[key] ? debugConfig[key].bind(debugConfig) : consolePrintFn(key, id);
|
403
418
|
}
|
404
|
-
|
419
|
+
let exportedLogger = createLogger();
|
420
|
+
function enableLogs(debugConfig, context, id) {
|
405
421
|
// check that console is available
|
422
|
+
const newLogger = createLogger();
|
406
423
|
if (typeof console === 'object' && debugConfig === true || typeof debugConfig === 'object') {
|
407
|
-
|
424
|
+
const keys = [
|
408
425
|
// Remove out from list here to hard-disable a log-level
|
409
426
|
// 'trace',
|
410
|
-
'debug', 'log', 'info', 'warn', 'error'
|
427
|
+
'debug', 'log', 'info', 'warn', 'error'];
|
428
|
+
keys.forEach(key => {
|
429
|
+
newLogger[key] = getLoggerFn(key, debugConfig, id);
|
430
|
+
});
|
411
431
|
// Some browsers don't allow to use bind on console object anyway
|
412
432
|
// fallback to default if needed
|
413
433
|
try {
|
414
|
-
|
434
|
+
newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.5-0.canary.9977"}`);
|
415
435
|
} catch (e) {
|
416
|
-
|
436
|
+
/* log fn threw an exception. All logger methods are no-ops. */
|
437
|
+
return createLogger();
|
417
438
|
}
|
418
|
-
} else {
|
419
|
-
exportedLogger = fakeLogger;
|
420
439
|
}
|
440
|
+
exportedLogger = newLogger;
|
441
|
+
return newLogger;
|
421
442
|
}
|
422
443
|
const logger = exportedLogger;
|
423
444
|
|
@@ -991,6 +1012,26 @@ class LevelDetails {
|
|
991
1012
|
}
|
992
1013
|
}
|
993
1014
|
|
1015
|
+
var DecrypterAesMode = {
|
1016
|
+
cbc: 0,
|
1017
|
+
ctr: 1
|
1018
|
+
};
|
1019
|
+
|
1020
|
+
function isFullSegmentEncryption(method) {
|
1021
|
+
return method === 'AES-128' || method === 'AES-256' || method === 'AES-256-CTR';
|
1022
|
+
}
|
1023
|
+
function getAesModeFromFullSegmentMethod(method) {
|
1024
|
+
switch (method) {
|
1025
|
+
case 'AES-128':
|
1026
|
+
case 'AES-256':
|
1027
|
+
return DecrypterAesMode.cbc;
|
1028
|
+
case 'AES-256-CTR':
|
1029
|
+
return DecrypterAesMode.ctr;
|
1030
|
+
default:
|
1031
|
+
throw new Error(`invalid full segment method ${method}`);
|
1032
|
+
}
|
1033
|
+
}
|
1034
|
+
|
994
1035
|
// This file is inserted as a shim for modules which we do not want to include into the distro.
|
995
1036
|
// This replacement is done in the "alias" plugin of the rollup config.
|
996
1037
|
var empty = undefined;
|
@@ -1392,6 +1433,12 @@ function readUint32(buffer, offset) {
|
|
1392
1433
|
const val = readSint32(buffer, offset);
|
1393
1434
|
return val < 0 ? 4294967296 + val : val;
|
1394
1435
|
}
|
1436
|
+
function readUint64(buffer, offset) {
|
1437
|
+
let result = readUint32(buffer, offset);
|
1438
|
+
result *= Math.pow(2, 32);
|
1439
|
+
result += readUint32(buffer, offset + 4);
|
1440
|
+
return result;
|
1441
|
+
}
|
1395
1442
|
function readSint32(buffer, offset) {
|
1396
1443
|
return buffer[offset] << 24 | buffer[offset + 1] << 16 | buffer[offset + 2] << 8 | buffer[offset + 3];
|
1397
1444
|
}
|
@@ -1454,15 +1501,14 @@ function parseSegmentIndex(sidx) {
|
|
1454
1501
|
let index = 8;
|
1455
1502
|
const timescale = readUint32(sidx, index);
|
1456
1503
|
index += 4;
|
1457
|
-
|
1458
|
-
|
1459
|
-
// usually zero in our case
|
1460
|
-
const earliestPresentationTime = 0;
|
1461
|
-
const firstOffset = 0;
|
1504
|
+
let earliestPresentationTime = 0;
|
1505
|
+
let firstOffset = 0;
|
1462
1506
|
if (version === 0) {
|
1463
|
-
index +=
|
1507
|
+
earliestPresentationTime = readUint32(sidx, index += 4);
|
1508
|
+
firstOffset = readUint32(sidx, index += 4);
|
1464
1509
|
} else {
|
1465
|
-
index +=
|
1510
|
+
earliestPresentationTime = readUint64(sidx, index += 8);
|
1511
|
+
firstOffset = readUint64(sidx, index += 8);
|
1466
1512
|
}
|
1467
1513
|
|
1468
1514
|
// skip reserved
|
@@ -1910,15 +1956,22 @@ function getDuration(data, initData) {
|
|
1910
1956
|
}
|
1911
1957
|
if (videoDuration === 0 && audioDuration === 0) {
|
1912
1958
|
// If duration samples are not available in the traf use sidx subsegment_duration
|
1959
|
+
let sidxMinStart = Infinity;
|
1960
|
+
let sidxMaxEnd = 0;
|
1913
1961
|
let sidxDuration = 0;
|
1914
1962
|
const sidxs = findBox(data, ['sidx']);
|
1915
1963
|
for (let i = 0; i < sidxs.length; i++) {
|
1916
1964
|
const sidx = parseSegmentIndex(sidxs[i]);
|
1917
1965
|
if (sidx != null && sidx.references) {
|
1918
|
-
|
1966
|
+
sidxMinStart = Math.min(sidxMinStart, sidx.earliestPresentationTime / sidx.timescale);
|
1967
|
+
const subSegmentDuration = sidx.references.reduce((dur, ref) => dur + ref.info.duration || 0, 0);
|
1968
|
+
sidxMaxEnd = Math.max(sidxMaxEnd, subSegmentDuration + sidx.earliestPresentationTime / sidx.timescale);
|
1969
|
+
sidxDuration = sidxMaxEnd - sidxMinStart;
|
1919
1970
|
}
|
1920
1971
|
}
|
1921
|
-
|
1972
|
+
if (sidxDuration && isFiniteNumber(sidxDuration)) {
|
1973
|
+
return sidxDuration;
|
1974
|
+
}
|
1922
1975
|
}
|
1923
1976
|
if (videoDuration) {
|
1924
1977
|
return videoDuration;
|
@@ -2419,12 +2472,12 @@ class LevelKey {
|
|
2419
2472
|
this.keyFormatVersions = formatversions;
|
2420
2473
|
this.iv = iv;
|
2421
2474
|
this.encrypted = method ? method !== 'NONE' : false;
|
2422
|
-
this.isCommonEncryption = this.encrypted && method
|
2475
|
+
this.isCommonEncryption = this.encrypted && !isFullSegmentEncryption(method);
|
2423
2476
|
}
|
2424
2477
|
isSupported() {
|
2425
2478
|
// If it's Segment encryption or No encryption, just select that key system
|
2426
2479
|
if (this.method) {
|
2427
|
-
if (this.method
|
2480
|
+
if (isFullSegmentEncryption(this.method) || this.method === 'NONE') {
|
2428
2481
|
return true;
|
2429
2482
|
}
|
2430
2483
|
if (this.keyFormat === 'identity') {
|
@@ -2438,14 +2491,13 @@ class LevelKey {
|
|
2438
2491
|
if (!this.encrypted || !this.uri) {
|
2439
2492
|
return null;
|
2440
2493
|
}
|
2441
|
-
if (this.method
|
2494
|
+
if (isFullSegmentEncryption(this.method) && this.uri && !this.iv) {
|
2442
2495
|
if (typeof sn !== 'number') {
|
2443
2496
|
// We are fetching decryption data for a initialization segment
|
2444
|
-
// If the segment was encrypted with AES-128
|
2497
|
+
// If the segment was encrypted with AES-128/256
|
2445
2498
|
// It must have an IV defined. We cannot substitute the Segment Number in.
|
2446
|
-
|
2447
|
-
|
2448
|
-
}
|
2499
|
+
logger.warn(`missing IV for initialization segment with method="${this.method}" - compliance issue`);
|
2500
|
+
|
2449
2501
|
// Explicitly set sn to resulting value from implicit conversions 'initSegment' values for IV generation.
|
2450
2502
|
sn = 0;
|
2451
2503
|
}
|
@@ -2592,23 +2644,28 @@ function getCodecCompatibleNameLower(lowerCaseCodec, preferManagedMediaSource =
|
|
2592
2644
|
if (CODEC_COMPATIBLE_NAMES[lowerCaseCodec]) {
|
2593
2645
|
return CODEC_COMPATIBLE_NAMES[lowerCaseCodec];
|
2594
2646
|
}
|
2595
|
-
|
2596
|
-
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2597
|
-
// some browsers will report that fLaC is supported then fail.
|
2598
|
-
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2599
2647
|
const codecsToCheck = {
|
2648
|
+
// Idealy fLaC and Opus would be first (spec-compliant) but
|
2649
|
+
// some browsers will report that fLaC is supported then fail.
|
2650
|
+
// see: https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
|
2600
2651
|
flac: ['flac', 'fLaC', 'FLAC'],
|
2601
|
-
opus: ['opus', 'Opus']
|
2652
|
+
opus: ['opus', 'Opus'],
|
2653
|
+
// Replace audio codec info if browser does not support mp4a.40.34,
|
2654
|
+
// and demuxer can fallback to 'audio/mpeg' or 'audio/mp4;codecs="mp3"'
|
2655
|
+
'mp4a.40.34': ['mp3']
|
2602
2656
|
}[lowerCaseCodec];
|
2603
2657
|
for (let i = 0; i < codecsToCheck.length; i++) {
|
2658
|
+
var _getMediaSource;
|
2604
2659
|
if (isCodecMediaSourceSupported(codecsToCheck[i], 'audio', preferManagedMediaSource)) {
|
2605
2660
|
CODEC_COMPATIBLE_NAMES[lowerCaseCodec] = codecsToCheck[i];
|
2606
2661
|
return codecsToCheck[i];
|
2662
|
+
} else if (codecsToCheck[i] === 'mp3' && (_getMediaSource = getMediaSource(preferManagedMediaSource)) != null && _getMediaSource.isTypeSupported('audio/mpeg')) {
|
2663
|
+
return '';
|
2607
2664
|
}
|
2608
2665
|
}
|
2609
2666
|
return lowerCaseCodec;
|
2610
2667
|
}
|
2611
|
-
const AUDIO_CODEC_REGEXP = /flac|opus/i;
|
2668
|
+
const AUDIO_CODEC_REGEXP = /flac|opus|mp4a\.40\.34/i;
|
2612
2669
|
function getCodecCompatibleName(codec, preferManagedMediaSource = true) {
|
2613
2670
|
return codec.replace(AUDIO_CODEC_REGEXP, m => getCodecCompatibleNameLower(m.toLowerCase(), preferManagedMediaSource));
|
2614
2671
|
}
|
@@ -2631,6 +2688,16 @@ function convertAVC1ToAVCOTI(codec) {
|
|
2631
2688
|
}
|
2632
2689
|
return codec;
|
2633
2690
|
}
|
2691
|
+
function getM2TSSupportedAudioTypes(preferManagedMediaSource) {
|
2692
|
+
const MediaSource = getMediaSource(preferManagedMediaSource) || {
|
2693
|
+
isTypeSupported: () => false
|
2694
|
+
};
|
2695
|
+
return {
|
2696
|
+
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
2697
|
+
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
2698
|
+
ac3: false
|
2699
|
+
};
|
2700
|
+
}
|
2634
2701
|
|
2635
2702
|
const MASTER_PLAYLIST_REGEX = /#EXT-X-STREAM-INF:([^\r\n]*)(?:[\r\n](?:#[^\r\n]*)?)*([^\r\n]+)|#EXT-X-(SESSION-DATA|SESSION-KEY|DEFINE|CONTENT-STEERING|START):([^\r\n]*)[\r\n]+/g;
|
2636
2703
|
const MASTER_PLAYLIST_MEDIA_REGEX = /#EXT-X-MEDIA:(.*)/g;
|
@@ -4208,7 +4275,47 @@ class LatencyController {
|
|
4208
4275
|
this.currentTime = 0;
|
4209
4276
|
this.stallCount = 0;
|
4210
4277
|
this._latency = null;
|
4211
|
-
this.
|
4278
|
+
this.onTimeupdate = () => {
|
4279
|
+
const {
|
4280
|
+
media,
|
4281
|
+
levelDetails
|
4282
|
+
} = this;
|
4283
|
+
if (!media || !levelDetails) {
|
4284
|
+
return;
|
4285
|
+
}
|
4286
|
+
this.currentTime = media.currentTime;
|
4287
|
+
const latency = this.computeLatency();
|
4288
|
+
if (latency === null) {
|
4289
|
+
return;
|
4290
|
+
}
|
4291
|
+
this._latency = latency;
|
4292
|
+
|
4293
|
+
// Adapt playbackRate to meet target latency in low-latency mode
|
4294
|
+
const {
|
4295
|
+
lowLatencyMode,
|
4296
|
+
maxLiveSyncPlaybackRate
|
4297
|
+
} = this.config;
|
4298
|
+
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4299
|
+
return;
|
4300
|
+
}
|
4301
|
+
const targetLatency = this.targetLatency;
|
4302
|
+
if (targetLatency === null) {
|
4303
|
+
return;
|
4304
|
+
}
|
4305
|
+
const distanceFromTarget = latency - targetLatency;
|
4306
|
+
// Only adjust playbackRate when within one target duration of targetLatency
|
4307
|
+
// and more than one second from under-buffering.
|
4308
|
+
// Playback further than one target duration from target can be considered DVR playback.
|
4309
|
+
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4310
|
+
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4311
|
+
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4312
|
+
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4313
|
+
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4314
|
+
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4315
|
+
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4316
|
+
media.playbackRate = 1;
|
4317
|
+
}
|
4318
|
+
};
|
4212
4319
|
this.hls = hls;
|
4213
4320
|
this.config = hls.config;
|
4214
4321
|
this.registerListeners();
|
@@ -4300,7 +4407,7 @@ class LatencyController {
|
|
4300
4407
|
this.onMediaDetaching();
|
4301
4408
|
this.levelDetails = null;
|
4302
4409
|
// @ts-ignore
|
4303
|
-
this.hls =
|
4410
|
+
this.hls = null;
|
4304
4411
|
}
|
4305
4412
|
registerListeners() {
|
4306
4413
|
this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
@@ -4318,11 +4425,11 @@ class LatencyController {
|
|
4318
4425
|
}
|
4319
4426
|
onMediaAttached(event, data) {
|
4320
4427
|
this.media = data.media;
|
4321
|
-
this.media.addEventListener('timeupdate', this.
|
4428
|
+
this.media.addEventListener('timeupdate', this.onTimeupdate);
|
4322
4429
|
}
|
4323
4430
|
onMediaDetaching() {
|
4324
4431
|
if (this.media) {
|
4325
|
-
this.media.removeEventListener('timeupdate', this.
|
4432
|
+
this.media.removeEventListener('timeupdate', this.onTimeupdate);
|
4326
4433
|
this.media = null;
|
4327
4434
|
}
|
4328
4435
|
}
|
@@ -4336,10 +4443,10 @@ class LatencyController {
|
|
4336
4443
|
}) {
|
4337
4444
|
this.levelDetails = details;
|
4338
4445
|
if (details.advanced) {
|
4339
|
-
this.
|
4446
|
+
this.onTimeupdate();
|
4340
4447
|
}
|
4341
4448
|
if (!details.live && this.media) {
|
4342
|
-
this.media.removeEventListener('timeupdate', this.
|
4449
|
+
this.media.removeEventListener('timeupdate', this.onTimeupdate);
|
4343
4450
|
}
|
4344
4451
|
}
|
4345
4452
|
onError(event, data) {
|
@@ -4349,48 +4456,7 @@ class LatencyController {
|
|
4349
4456
|
}
|
4350
4457
|
this.stallCount++;
|
4351
4458
|
if ((_this$levelDetails = this.levelDetails) != null && _this$levelDetails.live) {
|
4352
|
-
logger.warn('[
|
4353
|
-
}
|
4354
|
-
}
|
4355
|
-
timeupdate() {
|
4356
|
-
const {
|
4357
|
-
media,
|
4358
|
-
levelDetails
|
4359
|
-
} = this;
|
4360
|
-
if (!media || !levelDetails) {
|
4361
|
-
return;
|
4362
|
-
}
|
4363
|
-
this.currentTime = media.currentTime;
|
4364
|
-
const latency = this.computeLatency();
|
4365
|
-
if (latency === null) {
|
4366
|
-
return;
|
4367
|
-
}
|
4368
|
-
this._latency = latency;
|
4369
|
-
|
4370
|
-
// Adapt playbackRate to meet target latency in low-latency mode
|
4371
|
-
const {
|
4372
|
-
lowLatencyMode,
|
4373
|
-
maxLiveSyncPlaybackRate
|
4374
|
-
} = this.config;
|
4375
|
-
if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
|
4376
|
-
return;
|
4377
|
-
}
|
4378
|
-
const targetLatency = this.targetLatency;
|
4379
|
-
if (targetLatency === null) {
|
4380
|
-
return;
|
4381
|
-
}
|
4382
|
-
const distanceFromTarget = latency - targetLatency;
|
4383
|
-
// Only adjust playbackRate when within one target duration of targetLatency
|
4384
|
-
// and more than one second from under-buffering.
|
4385
|
-
// Playback further than one target duration from target can be considered DVR playback.
|
4386
|
-
const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
|
4387
|
-
const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
|
4388
|
-
if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
|
4389
|
-
const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
|
4390
|
-
const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
|
4391
|
-
media.playbackRate = Math.min(max, Math.max(1, rate));
|
4392
|
-
} else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
|
4393
|
-
media.playbackRate = 1;
|
4459
|
+
this.hls.logger.warn('[latency-controller]: Stall detected, adjusting target latency');
|
4394
4460
|
}
|
4395
4461
|
}
|
4396
4462
|
estimateLiveEdge() {
|
@@ -5162,18 +5228,13 @@ var ErrorActionFlags = {
|
|
5162
5228
|
MoveAllAlternatesMatchingHDCP: 2,
|
5163
5229
|
SwitchToSDR: 4
|
5164
5230
|
}; // Reserved for future use
|
5165
|
-
class ErrorController {
|
5231
|
+
class ErrorController extends Logger {
|
5166
5232
|
constructor(hls) {
|
5233
|
+
super('error-controller', hls.logger);
|
5167
5234
|
this.hls = void 0;
|
5168
5235
|
this.playlistError = 0;
|
5169
5236
|
this.penalizedRenditions = {};
|
5170
|
-
this.log = void 0;
|
5171
|
-
this.warn = void 0;
|
5172
|
-
this.error = void 0;
|
5173
5237
|
this.hls = hls;
|
5174
|
-
this.log = logger.log.bind(logger, `[info]:`);
|
5175
|
-
this.warn = logger.warn.bind(logger, `[warning]:`);
|
5176
|
-
this.error = logger.error.bind(logger, `[error]:`);
|
5177
5238
|
this.registerListeners();
|
5178
5239
|
}
|
5179
5240
|
registerListeners() {
|
@@ -5525,16 +5586,13 @@ class ErrorController {
|
|
5525
5586
|
}
|
5526
5587
|
}
|
5527
5588
|
|
5528
|
-
class BasePlaylistController {
|
5589
|
+
class BasePlaylistController extends Logger {
|
5529
5590
|
constructor(hls, logPrefix) {
|
5591
|
+
super(logPrefix, hls.logger);
|
5530
5592
|
this.hls = void 0;
|
5531
5593
|
this.timer = -1;
|
5532
5594
|
this.requestScheduled = -1;
|
5533
5595
|
this.canLoad = false;
|
5534
|
-
this.log = void 0;
|
5535
|
-
this.warn = void 0;
|
5536
|
-
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
5537
|
-
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
5538
5596
|
this.hls = hls;
|
5539
5597
|
}
|
5540
5598
|
destroy() {
|
@@ -5567,7 +5625,7 @@ class BasePlaylistController {
|
|
5567
5625
|
try {
|
5568
5626
|
uri = new self.URL(attr.URI, previous.url).href;
|
5569
5627
|
} catch (error) {
|
5570
|
-
|
5628
|
+
this.warn(`Could not construct new URL for Rendition Report: ${error}`);
|
5571
5629
|
uri = attr.URI || '';
|
5572
5630
|
}
|
5573
5631
|
// Use exact match. Otherwise, the last partial match, if any, will be used
|
@@ -6113,8 +6171,9 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
|
|
6113
6171
|
}, {});
|
6114
6172
|
}
|
6115
6173
|
|
6116
|
-
class AbrController {
|
6174
|
+
class AbrController extends Logger {
|
6117
6175
|
constructor(_hls) {
|
6176
|
+
super('abr', _hls.logger);
|
6118
6177
|
this.hls = void 0;
|
6119
6178
|
this.lastLevelLoadSec = 0;
|
6120
6179
|
this.lastLoadedFragLevel = -1;
|
@@ -6228,7 +6287,7 @@ class AbrController {
|
|
6228
6287
|
this.resetEstimator(nextLoadLevelBitrate);
|
6229
6288
|
}
|
6230
6289
|
this.clearTimer();
|
6231
|
-
|
6290
|
+
this.warn(`Fragment ${frag.sn}${part ? ' part ' + part.index : ''} of level ${frag.level} is loading too slowly;
|
6232
6291
|
Time to underbuffer: ${bufferStarvationDelay.toFixed(3)} s
|
6233
6292
|
Estimated load time for current fragment: ${fragLoadedDelay.toFixed(3)} s
|
6234
6293
|
Estimated load time for down switch fragment: ${fragLevelNextLoadedDelay.toFixed(3)} s
|
@@ -6248,7 +6307,7 @@ class AbrController {
|
|
6248
6307
|
}
|
6249
6308
|
resetEstimator(abrEwmaDefaultEstimate) {
|
6250
6309
|
if (abrEwmaDefaultEstimate) {
|
6251
|
-
|
6310
|
+
this.log(`setting initial bwe to ${abrEwmaDefaultEstimate}`);
|
6252
6311
|
this.hls.config.abrEwmaDefaultEstimate = abrEwmaDefaultEstimate;
|
6253
6312
|
}
|
6254
6313
|
this.firstSelection = -1;
|
@@ -6480,7 +6539,7 @@ class AbrController {
|
|
6480
6539
|
}
|
6481
6540
|
const firstLevel = this.hls.firstLevel;
|
6482
6541
|
const clamped = Math.min(Math.max(firstLevel, minAutoLevel), maxAutoLevel);
|
6483
|
-
|
6542
|
+
this.warn(`Could not find best starting auto level. Defaulting to first in playlist ${firstLevel} clamped to ${clamped}`);
|
6484
6543
|
return clamped;
|
6485
6544
|
}
|
6486
6545
|
get forcedAutoLevel() {
|
@@ -6565,13 +6624,13 @@ class AbrController {
|
|
6565
6624
|
// cap maxLoadingDelay and ensure it is not bigger 'than bitrate test' frag duration
|
6566
6625
|
const maxLoadingDelay = currentFragDuration ? Math.min(currentFragDuration, config.maxLoadingDelay) : config.maxLoadingDelay;
|
6567
6626
|
maxStarvationDelay = maxLoadingDelay - bitrateTestDelay;
|
6568
|
-
|
6627
|
+
this.info(`bitrate test took ${Math.round(1000 * bitrateTestDelay)}ms, set first fragment max fetchDuration to ${Math.round(1000 * maxStarvationDelay)} ms`);
|
6569
6628
|
// don't use conservative factor on bitrate test
|
6570
6629
|
bwFactor = bwUpFactor = 1;
|
6571
6630
|
}
|
6572
6631
|
}
|
6573
6632
|
const bestLevel = this.findBestLevel(avgbw, minAutoLevel, maxAutoLevel, bufferStarvationDelay, maxStarvationDelay, bwFactor, bwUpFactor);
|
6574
|
-
|
6633
|
+
this.info(`${bufferStarvationDelay ? 'rebuffering expected' : 'buffer is empty'}, optimal quality level ${bestLevel}`);
|
6575
6634
|
if (bestLevel > -1) {
|
6576
6635
|
return bestLevel;
|
6577
6636
|
}
|
@@ -6633,7 +6692,7 @@ class AbrController {
|
|
6633
6692
|
currentVideoRange = preferHDR ? videoRanges[videoRanges.length - 1] : videoRanges[0];
|
6634
6693
|
currentFrameRate = minFramerate;
|
6635
6694
|
currentBw = Math.max(currentBw, minBitrate);
|
6636
|
-
|
6695
|
+
this.log(`picked start tier ${JSON.stringify(startTier)}`);
|
6637
6696
|
} else {
|
6638
6697
|
currentCodecSet = level == null ? void 0 : level.codecSet;
|
6639
6698
|
currentVideoRange = level == null ? void 0 : level.videoRange;
|
@@ -6686,9 +6745,9 @@ class AbrController {
|
|
6686
6745
|
const forcedAutoLevel = this.forcedAutoLevel;
|
6687
6746
|
if (i !== loadLevel && (forcedAutoLevel === -1 || forcedAutoLevel !== loadLevel)) {
|
6688
6747
|
if (levelsSkipped.length) {
|
6689
|
-
|
6748
|
+
this.trace(`Skipped level(s) ${levelsSkipped.join(',')} of ${maxAutoLevel} max with CODECS and VIDEO-RANGE:"${levels[levelsSkipped[0]].codecs}" ${levels[levelsSkipped[0]].videoRange}; not compatible with "${level.codecs}" ${currentVideoRange}`);
|
6690
6749
|
}
|
6691
|
-
|
6750
|
+
this.info(`switch candidate:${selectionBaseLevel}->${i} adjustedbw(${Math.round(adjustedbw)})-bitrate=${Math.round(adjustedbw - bitrate)} ttfb:${ttfbEstimateSec.toFixed(1)} avgDuration:${avgDuration.toFixed(1)} maxFetchDuration:${maxFetchDuration.toFixed(1)} fetchDuration:${fetchDuration.toFixed(1)} firstSelection:${firstSelection} codecSet:${currentCodecSet} videoRange:${currentVideoRange} hls.loadLevel:${loadLevel}`);
|
6692
6751
|
}
|
6693
6752
|
if (firstSelection) {
|
6694
6753
|
this.firstSelection = i;
|
@@ -6701,8 +6760,12 @@ class AbrController {
|
|
6701
6760
|
return -1;
|
6702
6761
|
}
|
6703
6762
|
set nextAutoLevel(nextLevel) {
|
6704
|
-
const
|
6705
|
-
|
6763
|
+
const {
|
6764
|
+
maxAutoLevel,
|
6765
|
+
minAutoLevel
|
6766
|
+
} = this.hls;
|
6767
|
+
const value = Math.min(Math.max(nextLevel, minAutoLevel), maxAutoLevel);
|
6768
|
+
if (this._nextAutoLevel !== value) {
|
6706
6769
|
this.nextAutoLevelKey = '';
|
6707
6770
|
this._nextAutoLevel = value;
|
6708
6771
|
}
|
@@ -6920,8 +6983,9 @@ class BufferOperationQueue {
|
|
6920
6983
|
}
|
6921
6984
|
|
6922
6985
|
const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
6923
|
-
class BufferController {
|
6986
|
+
class BufferController extends Logger {
|
6924
6987
|
constructor(hls) {
|
6988
|
+
super('buffer-controller', hls.logger);
|
6925
6989
|
// The level details used to determine duration, target-duration and live
|
6926
6990
|
this.details = null;
|
6927
6991
|
// cache the self generated object url to detect hijack of video tag
|
@@ -6951,9 +7015,6 @@ class BufferController {
|
|
6951
7015
|
this.tracks = {};
|
6952
7016
|
this.pendingTracks = {};
|
6953
7017
|
this.sourceBuffer = void 0;
|
6954
|
-
this.log = void 0;
|
6955
|
-
this.warn = void 0;
|
6956
|
-
this.error = void 0;
|
6957
7018
|
this._onEndStreaming = event => {
|
6958
7019
|
if (!this.hls) {
|
6959
7020
|
return;
|
@@ -6999,15 +7060,11 @@ class BufferController {
|
|
6999
7060
|
_objectUrl
|
7000
7061
|
} = this;
|
7001
7062
|
if (mediaSrc !== _objectUrl) {
|
7002
|
-
|
7063
|
+
this.error(`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`);
|
7003
7064
|
}
|
7004
7065
|
};
|
7005
7066
|
this.hls = hls;
|
7006
|
-
const logPrefix = '[buffer-controller]';
|
7007
7067
|
this.appendSource = hls.config.preferManagedMediaSource;
|
7008
|
-
this.log = logger.log.bind(logger, logPrefix);
|
7009
|
-
this.warn = logger.warn.bind(logger, logPrefix);
|
7010
|
-
this.error = logger.error.bind(logger, logPrefix);
|
7011
7068
|
this._initSourceBuffer();
|
7012
7069
|
this.registerListeners();
|
7013
7070
|
}
|
@@ -7020,6 +7077,12 @@ class BufferController {
|
|
7020
7077
|
this.lastMpegAudioChunk = null;
|
7021
7078
|
// @ts-ignore
|
7022
7079
|
this.hls = null;
|
7080
|
+
// @ts-ignore
|
7081
|
+
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
7082
|
+
// @ts-ignore
|
7083
|
+
this._onMediaSourceEnded = null;
|
7084
|
+
// @ts-ignore
|
7085
|
+
this._onStartStreaming = this._onEndStreaming = null;
|
7023
7086
|
}
|
7024
7087
|
registerListeners() {
|
7025
7088
|
const {
|
@@ -7182,6 +7245,7 @@ class BufferController {
|
|
7182
7245
|
this.resetBuffer(type);
|
7183
7246
|
});
|
7184
7247
|
this._initSourceBuffer();
|
7248
|
+
this.hls.resumeBuffering();
|
7185
7249
|
}
|
7186
7250
|
resetBuffer(type) {
|
7187
7251
|
const sb = this.sourceBuffer[type];
|
@@ -8019,7 +8083,7 @@ class CapLevelController {
|
|
8019
8083
|
const hls = this.hls;
|
8020
8084
|
const maxLevel = this.getMaxLevel(levels.length - 1);
|
8021
8085
|
if (maxLevel !== this.autoLevelCapping) {
|
8022
|
-
logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8086
|
+
hls.logger.log(`Setting autoLevelCapping to ${maxLevel}: ${levels[maxLevel].height}p@${levels[maxLevel].bitrate} for media ${this.mediaWidth}x${this.mediaHeight}`);
|
8023
8087
|
}
|
8024
8088
|
hls.autoLevelCapping = maxLevel;
|
8025
8089
|
if (hls.autoLevelCapping > this.autoLevelCapping && this.streamController) {
|
@@ -8197,10 +8261,10 @@ class FPSController {
|
|
8197
8261
|
totalDroppedFrames: droppedFrames
|
8198
8262
|
});
|
8199
8263
|
if (droppedFPS > 0) {
|
8200
|
-
// logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8264
|
+
// hls.logger.log('checkFPS : droppedFPS/decodedFPS:' + droppedFPS/(1000 * currentDecoded / currentPeriod));
|
8201
8265
|
if (currentDropped > hls.config.fpsDroppedMonitoringThreshold * currentDecoded) {
|
8202
8266
|
let currentLevel = hls.currentLevel;
|
8203
|
-
logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8267
|
+
hls.logger.warn('drop FPS ratio greater than max allowed value for currentLevel: ' + currentLevel);
|
8204
8268
|
if (currentLevel > 0 && (hls.autoLevelCapping === -1 || hls.autoLevelCapping >= currentLevel)) {
|
8205
8269
|
currentLevel = currentLevel - 1;
|
8206
8270
|
hls.trigger(Events.FPS_DROP_LEVEL_CAPPING, {
|
@@ -8233,10 +8297,10 @@ class FPSController {
|
|
8233
8297
|
}
|
8234
8298
|
|
8235
8299
|
const PATHWAY_PENALTY_DURATION_MS = 300000;
|
8236
|
-
class ContentSteeringController {
|
8300
|
+
class ContentSteeringController extends Logger {
|
8237
8301
|
constructor(hls) {
|
8302
|
+
super('content-steering', hls.logger);
|
8238
8303
|
this.hls = void 0;
|
8239
|
-
this.log = void 0;
|
8240
8304
|
this.loader = null;
|
8241
8305
|
this.uri = null;
|
8242
8306
|
this.pathwayId = '.';
|
@@ -8251,7 +8315,6 @@ class ContentSteeringController {
|
|
8251
8315
|
this.subtitleTracks = null;
|
8252
8316
|
this.penalizedPathways = {};
|
8253
8317
|
this.hls = hls;
|
8254
|
-
this.log = logger.log.bind(logger, `[content-steering]:`);
|
8255
8318
|
this.registerListeners();
|
8256
8319
|
}
|
8257
8320
|
registerListeners() {
|
@@ -8375,7 +8438,7 @@ class ContentSteeringController {
|
|
8375
8438
|
errorAction.resolved = this.pathwayId !== errorPathway;
|
8376
8439
|
}
|
8377
8440
|
if (!errorAction.resolved) {
|
8378
|
-
|
8441
|
+
this.warn(`Could not resolve ${data.details} ("${data.error.message}") with content-steering for Pathway: ${errorPathway} levels: ${levels ? levels.length : levels} priorities: ${JSON.stringify(pathwayPriority)} penalized: ${JSON.stringify(this.penalizedPathways)}`);
|
8379
8442
|
}
|
8380
8443
|
}
|
8381
8444
|
}
|
@@ -8546,7 +8609,7 @@ class ContentSteeringController {
|
|
8546
8609
|
onSuccess: (response, stats, context, networkDetails) => {
|
8547
8610
|
this.log(`Loaded steering manifest: "${url}"`);
|
8548
8611
|
const steeringData = response.data;
|
8549
|
-
if (steeringData.VERSION !== 1) {
|
8612
|
+
if ((steeringData == null ? void 0 : steeringData.VERSION) !== 1) {
|
8550
8613
|
this.log(`Steering VERSION ${steeringData.VERSION} not supported!`);
|
8551
8614
|
return;
|
8552
8615
|
}
|
@@ -9485,7 +9548,7 @@ function timelineConfig() {
|
|
9485
9548
|
/**
|
9486
9549
|
* @ignore
|
9487
9550
|
*/
|
9488
|
-
function mergeConfig(defaultConfig, userConfig) {
|
9551
|
+
function mergeConfig(defaultConfig, userConfig, logger) {
|
9489
9552
|
if ((userConfig.liveSyncDurationCount || userConfig.liveMaxLatencyDurationCount) && (userConfig.liveSyncDuration || userConfig.liveMaxLatencyDuration)) {
|
9490
9553
|
throw new Error("Illegal hls.js config: don't mix up liveSyncDurationCount/liveMaxLatencyDurationCount and liveSyncDuration/liveMaxLatencyDuration");
|
9491
9554
|
}
|
@@ -9555,7 +9618,7 @@ function deepCpy(obj) {
|
|
9555
9618
|
/**
|
9556
9619
|
* @ignore
|
9557
9620
|
*/
|
9558
|
-
function enableStreamingMode(config) {
|
9621
|
+
function enableStreamingMode(config, logger) {
|
9559
9622
|
const currentLoader = config.loader;
|
9560
9623
|
if (currentLoader !== FetchLoader && currentLoader !== XhrLoader) {
|
9561
9624
|
// If a developer has configured their own loader, respect that choice
|
@@ -9572,10 +9635,9 @@ function enableStreamingMode(config) {
|
|
9572
9635
|
}
|
9573
9636
|
}
|
9574
9637
|
|
9575
|
-
let chromeOrFirefox;
|
9576
9638
|
class LevelController extends BasePlaylistController {
|
9577
9639
|
constructor(hls, contentSteeringController) {
|
9578
|
-
super(hls, '
|
9640
|
+
super(hls, 'level-controller');
|
9579
9641
|
this._levels = [];
|
9580
9642
|
this._firstLevel = -1;
|
9581
9643
|
this._maxAutoLevel = -1;
|
@@ -9646,23 +9708,15 @@ class LevelController extends BasePlaylistController {
|
|
9646
9708
|
let videoCodecFound = false;
|
9647
9709
|
let audioCodecFound = false;
|
9648
9710
|
data.levels.forEach(levelParsed => {
|
9649
|
-
var
|
9711
|
+
var _videoCodec;
|
9650
9712
|
const attributes = levelParsed.attrs;
|
9651
|
-
|
9652
|
-
// erase audio codec info if browser does not support mp4a.40.34.
|
9653
|
-
// demuxer will autodetect codec and fallback to mpeg/audio
|
9654
9713
|
let {
|
9655
9714
|
audioCodec,
|
9656
9715
|
videoCodec
|
9657
9716
|
} = levelParsed;
|
9658
|
-
if (((_audioCodec = audioCodec) == null ? void 0 : _audioCodec.indexOf('mp4a.40.34')) !== -1) {
|
9659
|
-
chromeOrFirefox || (chromeOrFirefox = /chrome|firefox/i.test(navigator.userAgent));
|
9660
|
-
if (chromeOrFirefox) {
|
9661
|
-
levelParsed.audioCodec = audioCodec = undefined;
|
9662
|
-
}
|
9663
|
-
}
|
9664
9717
|
if (audioCodec) {
|
9665
|
-
|
9718
|
+
// Returns empty and set to undefined for 'mp4a.40.34' with fallback to 'audio/mpeg' SourceBuffer
|
9719
|
+
levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
|
9666
9720
|
}
|
9667
9721
|
if (((_videoCodec = videoCodec) == null ? void 0 : _videoCodec.indexOf('avc1')) === 0) {
|
9668
9722
|
videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
|
@@ -10790,8 +10844,8 @@ function createLoaderContext(frag, part = null) {
|
|
10790
10844
|
var _frag$decryptdata;
|
10791
10845
|
let byteRangeStart = start;
|
10792
10846
|
let byteRangeEnd = end;
|
10793
|
-
if (frag.sn === 'initSegment' && ((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method)
|
10794
|
-
// MAP segment encrypted with method 'AES-128', when served with HTTP Range,
|
10847
|
+
if (frag.sn === 'initSegment' && isMethodFullSegmentAesCbc((_frag$decryptdata = frag.decryptdata) == null ? void 0 : _frag$decryptdata.method)) {
|
10848
|
+
// MAP segment encrypted with method 'AES-128' or 'AES-256' (cbc), when served with HTTP Range,
|
10795
10849
|
// has the unencrypted size specified in the range.
|
10796
10850
|
// Ref: https://tools.ietf.org/html/draft-pantos-hls-rfc8216bis-08#section-6.3.6
|
10797
10851
|
const fragmentLen = end - start;
|
@@ -10824,6 +10878,9 @@ function createGapLoadError(frag, part) {
|
|
10824
10878
|
(part ? part : frag).stats.aborted = true;
|
10825
10879
|
return new LoadError(errorData);
|
10826
10880
|
}
|
10881
|
+
function isMethodFullSegmentAesCbc(method) {
|
10882
|
+
return method === 'AES-128' || method === 'AES-256';
|
10883
|
+
}
|
10827
10884
|
class LoadError extends Error {
|
10828
10885
|
constructor(data) {
|
10829
10886
|
super(data.error.message);
|
@@ -10969,6 +11026,8 @@ class KeyLoader {
|
|
10969
11026
|
}
|
10970
11027
|
return this.loadKeyEME(keyInfo, frag);
|
10971
11028
|
case 'AES-128':
|
11029
|
+
case 'AES-256':
|
11030
|
+
case 'AES-256-CTR':
|
10972
11031
|
return this.loadKeyHTTP(keyInfo, frag);
|
10973
11032
|
default:
|
10974
11033
|
return Promise.reject(this.createKeyLoadError(frag, ErrorDetails.KEY_LOAD_ERROR, new Error(`Key supplied with unsupported METHOD: "${decryptdata.method}"`)));
|
@@ -11104,8 +11163,9 @@ class KeyLoader {
|
|
11104
11163
|
* we are limiting the task execution per call stack to exactly one, but scheduling/post-poning further
|
11105
11164
|
* task processing on the next main loop iteration (also known as "next tick" in the Node/JS runtime lingo).
|
11106
11165
|
*/
|
11107
|
-
class TaskLoop {
|
11108
|
-
constructor() {
|
11166
|
+
class TaskLoop extends Logger {
|
11167
|
+
constructor(label, logger) {
|
11168
|
+
super(label, logger);
|
11109
11169
|
this._boundTick = void 0;
|
11110
11170
|
this._tickTimer = null;
|
11111
11171
|
this._tickInterval = null;
|
@@ -11373,33 +11433,61 @@ function alignMediaPlaylistByPDT(details, refDetails) {
|
|
11373
11433
|
}
|
11374
11434
|
|
11375
11435
|
class AESCrypto {
|
11376
|
-
constructor(subtle, iv) {
|
11436
|
+
constructor(subtle, iv, aesMode) {
|
11377
11437
|
this.subtle = void 0;
|
11378
11438
|
this.aesIV = void 0;
|
11439
|
+
this.aesMode = void 0;
|
11379
11440
|
this.subtle = subtle;
|
11380
11441
|
this.aesIV = iv;
|
11442
|
+
this.aesMode = aesMode;
|
11381
11443
|
}
|
11382
11444
|
decrypt(data, key) {
|
11383
|
-
|
11384
|
-
|
11385
|
-
|
11386
|
-
|
11445
|
+
switch (this.aesMode) {
|
11446
|
+
case DecrypterAesMode.cbc:
|
11447
|
+
return this.subtle.decrypt({
|
11448
|
+
name: 'AES-CBC',
|
11449
|
+
iv: this.aesIV
|
11450
|
+
}, key, data);
|
11451
|
+
case DecrypterAesMode.ctr:
|
11452
|
+
return this.subtle.decrypt({
|
11453
|
+
name: 'AES-CTR',
|
11454
|
+
counter: this.aesIV,
|
11455
|
+
length: 64
|
11456
|
+
},
|
11457
|
+
//64 : NIST SP800-38A standard suggests that the counter should occupy half of the counter block
|
11458
|
+
key, data);
|
11459
|
+
default:
|
11460
|
+
throw new Error(`[AESCrypto] invalid aes mode ${this.aesMode}`);
|
11461
|
+
}
|
11387
11462
|
}
|
11388
11463
|
}
|
11389
11464
|
|
11390
11465
|
class FastAESKey {
|
11391
|
-
constructor(subtle, key) {
|
11466
|
+
constructor(subtle, key, aesMode) {
|
11392
11467
|
this.subtle = void 0;
|
11393
11468
|
this.key = void 0;
|
11469
|
+
this.aesMode = void 0;
|
11394
11470
|
this.subtle = subtle;
|
11395
11471
|
this.key = key;
|
11472
|
+
this.aesMode = aesMode;
|
11396
11473
|
}
|
11397
11474
|
expandKey() {
|
11475
|
+
const subtleAlgoName = getSubtleAlgoName(this.aesMode);
|
11398
11476
|
return this.subtle.importKey('raw', this.key, {
|
11399
|
-
name:
|
11477
|
+
name: subtleAlgoName
|
11400
11478
|
}, false, ['encrypt', 'decrypt']);
|
11401
11479
|
}
|
11402
11480
|
}
|
11481
|
+
function getSubtleAlgoName(aesMode) {
|
11482
|
+
switch (aesMode) {
|
11483
|
+
case DecrypterAesMode.cbc:
|
11484
|
+
return 'AES-CBC';
|
11485
|
+
case DecrypterAesMode.ctr:
|
11486
|
+
return 'AES-CTR';
|
11487
|
+
default:
|
11488
|
+
throw new Error(`[FastAESKey] invalid aes mode ${aesMode}`);
|
11489
|
+
}
|
11490
|
+
}
|
11403
11491
|
|
11404
11492
|
// PKCS7
|
11405
11493
|
function removePadding(array) {
|
@@ -11649,7 +11737,8 @@ class Decrypter {
|
|
11649
11737
|
this.currentIV = null;
|
11650
11738
|
this.currentResult = null;
|
11651
11739
|
this.useSoftware = void 0;
|
11652
|
-
this.
|
11740
|
+
this.enableSoftwareAES = void 0;
|
11741
|
+
this.enableSoftwareAES = config.enableSoftwareAES;
|
11653
11742
|
this.removePKCS7Padding = removePKCS7Padding;
|
11654
11743
|
// built in decryptor expects PKCS7 padding
|
11655
11744
|
if (removePKCS7Padding) {
|
@@ -11662,9 +11751,7 @@ class Decrypter {
|
|
11662
11751
|
/* no-op */
|
11663
11752
|
}
|
11664
11753
|
}
|
11665
|
-
|
11666
|
-
this.useSoftware = true;
|
11667
|
-
}
|
11754
|
+
this.useSoftware = this.subtle === null;
|
11668
11755
|
}
|
11669
11756
|
destroy() {
|
11670
11757
|
this.subtle = null;
|
@@ -11702,10 +11789,10 @@ class Decrypter {
|
|
11702
11789
|
this.softwareDecrypter = null;
|
11703
11790
|
}
|
11704
11791
|
}
|
11705
|
-
decrypt(data, key, iv) {
|
11792
|
+
decrypt(data, key, iv, aesMode) {
|
11706
11793
|
if (this.useSoftware) {
|
11707
11794
|
return new Promise((resolve, reject) => {
|
11708
|
-
this.softwareDecrypt(new Uint8Array(data), key, iv);
|
11795
|
+
this.softwareDecrypt(new Uint8Array(data), key, iv, aesMode);
|
11709
11796
|
const decryptResult = this.flush();
|
11710
11797
|
if (decryptResult) {
|
11711
11798
|
resolve(decryptResult.buffer);
|
@@ -11714,17 +11801,21 @@ class Decrypter {
|
|
11714
11801
|
}
|
11715
11802
|
});
|
11716
11803
|
}
|
11717
|
-
return this.webCryptoDecrypt(new Uint8Array(data), key, iv);
|
11804
|
+
return this.webCryptoDecrypt(new Uint8Array(data), key, iv, aesMode);
|
11718
11805
|
}
|
11719
11806
|
|
11720
11807
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
11721
11808
|
// data is handled in the flush() call
|
11722
|
-
softwareDecrypt(data, key, iv) {
|
11809
|
+
softwareDecrypt(data, key, iv, aesMode) {
|
11723
11810
|
const {
|
11724
11811
|
currentIV,
|
11725
11812
|
currentResult,
|
11726
11813
|
remainderData
|
11727
11814
|
} = this;
|
11815
|
+
if (aesMode !== DecrypterAesMode.cbc || key.byteLength !== 16) {
|
11816
|
+
logger.warn('SoftwareDecrypt: can only handle AES-128-CBC');
|
11817
|
+
return null;
|
11818
|
+
}
|
11728
11819
|
this.logOnce('JS AES decrypt');
|
11729
11820
|
// The output is staggered during progressive parsing - the current result is cached, and emitted on the next call
|
11730
11821
|
// This is done in order to strip PKCS7 padding, which is found at the end of each segment. We only know we've reached
|
@@ -11757,11 +11848,11 @@ class Decrypter {
|
|
11757
11848
|
}
|
11758
11849
|
return result;
|
11759
11850
|
}
|
11760
|
-
webCryptoDecrypt(data, key, iv) {
|
11851
|
+
webCryptoDecrypt(data, key, iv, aesMode) {
|
11761
11852
|
const subtle = this.subtle;
|
11762
11853
|
if (this.key !== key || !this.fastAesKey) {
|
11763
11854
|
this.key = key;
|
11764
|
-
this.fastAesKey = new FastAESKey(subtle, key);
|
11855
|
+
this.fastAesKey = new FastAESKey(subtle, key, aesMode);
|
11765
11856
|
}
|
11766
11857
|
return this.fastAesKey.expandKey().then(aesKey => {
|
11767
11858
|
// decrypt using web crypto
|
@@ -11769,22 +11860,25 @@ class Decrypter {
|
|
11769
11860
|
return Promise.reject(new Error('web crypto not initialized'));
|
11770
11861
|
}
|
11771
11862
|
this.logOnce('WebCrypto AES decrypt');
|
11772
|
-
const crypto = new AESCrypto(subtle, new Uint8Array(iv));
|
11863
|
+
const crypto = new AESCrypto(subtle, new Uint8Array(iv), aesMode);
|
11773
11864
|
return crypto.decrypt(data.buffer, aesKey);
|
11774
11865
|
}).catch(err => {
|
11775
11866
|
logger.warn(`[decrypter]: WebCrypto Error, disable WebCrypto API, ${err.name}: ${err.message}`);
|
11776
|
-
return this.onWebCryptoError(data, key, iv);
|
11867
|
+
return this.onWebCryptoError(data, key, iv, aesMode);
|
11777
11868
|
});
|
11778
11869
|
}
|
11779
|
-
onWebCryptoError(data, key, iv) {
|
11780
|
-
|
11781
|
-
|
11782
|
-
|
11783
|
-
|
11784
|
-
|
11785
|
-
|
11870
|
+
onWebCryptoError(data, key, iv, aesMode) {
|
11871
|
+
const enableSoftwareAES = this.enableSoftwareAES;
|
11872
|
+
if (enableSoftwareAES) {
|
11873
|
+
this.useSoftware = true;
|
11874
|
+
this.logEnabled = true;
|
11875
|
+
this.softwareDecrypt(data, key, iv, aesMode);
|
11876
|
+
const decryptResult = this.flush();
|
11877
|
+
if (decryptResult) {
|
11878
|
+
return decryptResult.buffer;
|
11879
|
+
}
|
11786
11880
|
}
|
11787
|
-
throw new Error('WebCrypto and softwareDecrypt: failed to decrypt data');
|
11881
|
+
throw new Error('WebCrypto' + (enableSoftwareAES ? ' and softwareDecrypt' : '') + ': failed to decrypt data');
|
11788
11882
|
}
|
11789
11883
|
getValidChunk(data) {
|
11790
11884
|
let currentChunk = data;
|
@@ -11835,7 +11929,7 @@ const State = {
|
|
11835
11929
|
};
|
11836
11930
|
class BaseStreamController extends TaskLoop {
|
11837
11931
|
constructor(hls, fragmentTracker, keyLoader, logPrefix, playlistType) {
|
11838
|
-
super();
|
11932
|
+
super(logPrefix, hls.logger);
|
11839
11933
|
this.hls = void 0;
|
11840
11934
|
this.fragPrevious = null;
|
11841
11935
|
this.fragCurrent = null;
|
@@ -11860,22 +11954,89 @@ class BaseStreamController extends TaskLoop {
|
|
11860
11954
|
this.startFragRequested = false;
|
11861
11955
|
this.decrypter = void 0;
|
11862
11956
|
this.initPTS = [];
|
11863
|
-
this.
|
11864
|
-
this.
|
11865
|
-
|
11866
|
-
|
11867
|
-
|
11957
|
+
this.buffering = true;
|
11958
|
+
this.onMediaSeeking = () => {
|
11959
|
+
const {
|
11960
|
+
config,
|
11961
|
+
fragCurrent,
|
11962
|
+
media,
|
11963
|
+
mediaBuffer,
|
11964
|
+
state
|
11965
|
+
} = this;
|
11966
|
+
const currentTime = media ? media.currentTime : 0;
|
11967
|
+
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
11968
|
+
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
11969
|
+
if (this.state === State.ENDED) {
|
11970
|
+
this.resetLoadingState();
|
11971
|
+
} else if (fragCurrent) {
|
11972
|
+
// Seeking while frag load is in progress
|
11973
|
+
const tolerance = config.maxFragLookUpTolerance;
|
11974
|
+
const fragStartOffset = fragCurrent.start - tolerance;
|
11975
|
+
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
11976
|
+
// if seeking out of buffered range or into new one
|
11977
|
+
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
11978
|
+
const pastFragment = currentTime > fragEndOffset;
|
11979
|
+
// if the seek position is outside the current fragment range
|
11980
|
+
if (currentTime < fragStartOffset || pastFragment) {
|
11981
|
+
if (pastFragment && fragCurrent.loader) {
|
11982
|
+
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
11983
|
+
fragCurrent.abortRequests();
|
11984
|
+
this.resetLoadingState();
|
11985
|
+
}
|
11986
|
+
this.fragPrevious = null;
|
11987
|
+
}
|
11988
|
+
}
|
11989
|
+
}
|
11990
|
+
if (media) {
|
11991
|
+
// Remove gap fragments
|
11992
|
+
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
11993
|
+
this.lastCurrentTime = currentTime;
|
11994
|
+
}
|
11995
|
+
|
11996
|
+
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
11997
|
+
if (!this.loadedmetadata && !bufferInfo.len) {
|
11998
|
+
this.nextLoadPosition = this.startPosition = currentTime;
|
11999
|
+
}
|
12000
|
+
|
12001
|
+
// Async tick to speed up processing
|
12002
|
+
this.tickImmediate();
|
12003
|
+
};
|
12004
|
+
this.onMediaEnded = () => {
|
12005
|
+
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12006
|
+
this.startPosition = this.lastCurrentTime = 0;
|
12007
|
+
if (this.playlistType === PlaylistLevelType.MAIN) {
|
12008
|
+
this.hls.trigger(Events.MEDIA_ENDED, {
|
12009
|
+
stalled: false
|
12010
|
+
});
|
12011
|
+
}
|
12012
|
+
};
|
11868
12013
|
this.playlistType = playlistType;
|
11869
|
-
this.logPrefix = logPrefix;
|
11870
|
-
this.log = logger.log.bind(logger, `${logPrefix}:`);
|
11871
|
-
this.warn = logger.warn.bind(logger, `${logPrefix}:`);
|
11872
12014
|
this.hls = hls;
|
11873
12015
|
this.fragmentLoader = new FragmentLoader(hls.config);
|
11874
12016
|
this.keyLoader = keyLoader;
|
11875
12017
|
this.fragmentTracker = fragmentTracker;
|
11876
12018
|
this.config = hls.config;
|
11877
12019
|
this.decrypter = new Decrypter(hls.config);
|
12020
|
+
}
|
12021
|
+
registerListeners() {
|
12022
|
+
const {
|
12023
|
+
hls
|
12024
|
+
} = this;
|
12025
|
+
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12026
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12027
|
+
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
11878
12028
|
hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12029
|
+
hls.on(Events.ERROR, this.onError, this);
|
12030
|
+
}
|
12031
|
+
unregisterListeners() {
|
12032
|
+
const {
|
12033
|
+
hls
|
12034
|
+
} = this;
|
12035
|
+
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
12036
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
12037
|
+
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
12038
|
+
hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
|
12039
|
+
hls.off(Events.ERROR, this.onError, this);
|
11879
12040
|
}
|
11880
12041
|
doTick() {
|
11881
12042
|
this.onTickEnd();
|
@@ -11899,6 +12060,12 @@ class BaseStreamController extends TaskLoop {
|
|
11899
12060
|
this.clearNextTick();
|
11900
12061
|
this.state = State.STOPPED;
|
11901
12062
|
}
|
12063
|
+
pauseBuffering() {
|
12064
|
+
this.buffering = false;
|
12065
|
+
}
|
12066
|
+
resumeBuffering() {
|
12067
|
+
this.buffering = true;
|
12068
|
+
}
|
11902
12069
|
_streamEnded(bufferInfo, levelDetails) {
|
11903
12070
|
// If playlist is live, there is another buffered range after the current range, nothing buffered, media is detached,
|
11904
12071
|
// of nothing loading/loaded return false
|
@@ -11929,10 +12096,8 @@ class BaseStreamController extends TaskLoop {
|
|
11929
12096
|
}
|
11930
12097
|
onMediaAttached(event, data) {
|
11931
12098
|
const media = this.media = this.mediaBuffer = data.media;
|
11932
|
-
|
11933
|
-
|
11934
|
-
media.addEventListener('seeking', this.onvseeking);
|
11935
|
-
media.addEventListener('ended', this.onvended);
|
12099
|
+
media.addEventListener('seeking', this.onMediaSeeking);
|
12100
|
+
media.addEventListener('ended', this.onMediaEnded);
|
11936
12101
|
const config = this.config;
|
11937
12102
|
if (this.levels && config.autoStartLoad && this.state === State.STOPPED) {
|
11938
12103
|
this.startLoad(config.startPosition);
|
@@ -11946,10 +12111,9 @@ class BaseStreamController extends TaskLoop {
|
|
11946
12111
|
}
|
11947
12112
|
|
11948
12113
|
// remove video listeners
|
11949
|
-
if (media
|
11950
|
-
media.removeEventListener('seeking', this.
|
11951
|
-
media.removeEventListener('ended', this.
|
11952
|
-
this.onvseeking = this.onvended = null;
|
12114
|
+
if (media) {
|
12115
|
+
media.removeEventListener('seeking', this.onMediaSeeking);
|
12116
|
+
media.removeEventListener('ended', this.onMediaEnded);
|
11953
12117
|
}
|
11954
12118
|
if (this.keyLoader) {
|
11955
12119
|
this.keyLoader.detach();
|
@@ -11959,56 +12123,8 @@ class BaseStreamController extends TaskLoop {
|
|
11959
12123
|
this.fragmentTracker.removeAllFragments();
|
11960
12124
|
this.stopLoad();
|
11961
12125
|
}
|
11962
|
-
|
11963
|
-
|
11964
|
-
config,
|
11965
|
-
fragCurrent,
|
11966
|
-
media,
|
11967
|
-
mediaBuffer,
|
11968
|
-
state
|
11969
|
-
} = this;
|
11970
|
-
const currentTime = media ? media.currentTime : 0;
|
11971
|
-
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer ? mediaBuffer : media, currentTime, config.maxBufferHole);
|
11972
|
-
this.log(`media seeking to ${isFiniteNumber(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`);
|
11973
|
-
if (this.state === State.ENDED) {
|
11974
|
-
this.resetLoadingState();
|
11975
|
-
} else if (fragCurrent) {
|
11976
|
-
// Seeking while frag load is in progress
|
11977
|
-
const tolerance = config.maxFragLookUpTolerance;
|
11978
|
-
const fragStartOffset = fragCurrent.start - tolerance;
|
11979
|
-
const fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
|
11980
|
-
// if seeking out of buffered range or into new one
|
11981
|
-
if (!bufferInfo.len || fragEndOffset < bufferInfo.start || fragStartOffset > bufferInfo.end) {
|
11982
|
-
const pastFragment = currentTime > fragEndOffset;
|
11983
|
-
// if the seek position is outside the current fragment range
|
11984
|
-
if (currentTime < fragStartOffset || pastFragment) {
|
11985
|
-
if (pastFragment && fragCurrent.loader) {
|
11986
|
-
this.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
|
11987
|
-
fragCurrent.abortRequests();
|
11988
|
-
this.resetLoadingState();
|
11989
|
-
}
|
11990
|
-
this.fragPrevious = null;
|
11991
|
-
}
|
11992
|
-
}
|
11993
|
-
}
|
11994
|
-
if (media) {
|
11995
|
-
// Remove gap fragments
|
11996
|
-
this.fragmentTracker.removeFragmentsInRange(currentTime, Infinity, this.playlistType, true);
|
11997
|
-
this.lastCurrentTime = currentTime;
|
11998
|
-
}
|
11999
|
-
|
12000
|
-
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
|
12001
|
-
if (!this.loadedmetadata && !bufferInfo.len) {
|
12002
|
-
this.nextLoadPosition = this.startPosition = currentTime;
|
12003
|
-
}
|
12004
|
-
|
12005
|
-
// Async tick to speed up processing
|
12006
|
-
this.tickImmediate();
|
12007
|
-
}
|
12008
|
-
onMediaEnded() {
|
12009
|
-
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
12010
|
-
this.startPosition = this.lastCurrentTime = 0;
|
12011
|
-
}
|
12126
|
+
onManifestLoading() {}
|
12127
|
+
onError(event, data) {}
|
12012
12128
|
onManifestLoaded(event, data) {
|
12013
12129
|
this.startTimeOffset = data.startTimeOffset;
|
12014
12130
|
this.initPTS = [];
|
@@ -12018,7 +12134,7 @@ class BaseStreamController extends TaskLoop {
|
|
12018
12134
|
this.stopLoad();
|
12019
12135
|
super.onHandlerDestroying();
|
12020
12136
|
// @ts-ignore
|
12021
|
-
this.hls = null;
|
12137
|
+
this.hls = this.onMediaSeeking = this.onMediaEnded = null;
|
12022
12138
|
}
|
12023
12139
|
onHandlerDestroyed() {
|
12024
12140
|
this.state = State.STOPPED;
|
@@ -12149,10 +12265,10 @@ class BaseStreamController extends TaskLoop {
|
|
12149
12265
|
const decryptData = frag.decryptdata;
|
12150
12266
|
|
12151
12267
|
// check to see if the payload needs to be decrypted
|
12152
|
-
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && decryptData.method
|
12268
|
+
if (payload && payload.byteLength > 0 && decryptData != null && decryptData.key && decryptData.iv && isFullSegmentEncryption(decryptData.method)) {
|
12153
12269
|
const startTime = self.performance.now();
|
12154
12270
|
// decrypt init segment data
|
12155
|
-
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer).catch(err => {
|
12271
|
+
return this.decrypter.decrypt(new Uint8Array(payload), decryptData.key.buffer, decryptData.iv.buffer, getAesModeFromFullSegmentMethod(decryptData.method)).catch(err => {
|
12156
12272
|
hls.trigger(Events.ERROR, {
|
12157
12273
|
type: ErrorTypes.MEDIA_ERROR,
|
12158
12274
|
details: ErrorDetails.FRAG_DECRYPT_ERROR,
|
@@ -12264,7 +12380,7 @@ class BaseStreamController extends TaskLoop {
|
|
12264
12380
|
}
|
12265
12381
|
let keyLoadingPromise = null;
|
12266
12382
|
if (frag.encrypted && !((_frag$decryptdata = frag.decryptdata) != null && _frag$decryptdata.key)) {
|
12267
|
-
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.
|
12383
|
+
this.log(`Loading key for ${frag.sn} of [${details.startSN}-${details.endSN}], ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'} ${frag.level}`);
|
12268
12384
|
this.state = State.KEY_LOADING;
|
12269
12385
|
this.fragCurrent = frag;
|
12270
12386
|
keyLoadingPromise = this.keyLoader.load(frag).then(keyLoadedData => {
|
@@ -12295,7 +12411,7 @@ class BaseStreamController extends TaskLoop {
|
|
12295
12411
|
const partIndex = this.getNextPart(partList, frag, targetBufferTime);
|
12296
12412
|
if (partIndex > -1) {
|
12297
12413
|
const part = partList[partIndex];
|
12298
|
-
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.
|
12414
|
+
this.log(`Loading part sn: ${frag.sn} p: ${part.index} cc: ${frag.cc} of playlist [${details.startSN}-${details.endSN}] parts [0-${partIndex}-${partList.length - 1}] ${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12299
12415
|
this.nextLoadPosition = part.start + part.duration;
|
12300
12416
|
this.state = State.FRAG_LOADING;
|
12301
12417
|
let _result;
|
@@ -12324,7 +12440,7 @@ class BaseStreamController extends TaskLoop {
|
|
12324
12440
|
}
|
12325
12441
|
}
|
12326
12442
|
}
|
12327
|
-
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.
|
12443
|
+
this.log(`Loading fragment ${frag.sn} cc: ${frag.cc} ${details ? 'of [' + details.startSN + '-' + details.endSN + '] ' : ''}${this.playlistType === PlaylistLevelType.MAIN ? 'level' : 'track'}: ${frag.level}, target: ${parseFloat(targetBufferTime.toFixed(3))}`);
|
12328
12444
|
// Don't update nextLoadPosition for fragments which are not buffered
|
12329
12445
|
if (isFiniteNumber(frag.sn) && !this.bitrateTest) {
|
12330
12446
|
this.nextLoadPosition = frag.start + frag.duration;
|
@@ -12909,7 +13025,7 @@ class BaseStreamController extends TaskLoop {
|
|
12909
13025
|
errorAction.resolved = true;
|
12910
13026
|
}
|
12911
13027
|
} else {
|
12912
|
-
|
13028
|
+
this.warn(`${data.details} reached or exceeded max retry (${retryCount})`);
|
12913
13029
|
return;
|
12914
13030
|
}
|
12915
13031
|
} else if ((errorAction == null ? void 0 : errorAction.action) === NetworkErrorAction.SendAlternateToPenaltyBox) {
|
@@ -13304,6 +13420,7 @@ const initPTSFn = (timestamp, timeOffset, initPTS) => {
|
|
13304
13420
|
*/
|
13305
13421
|
function getAudioConfig(observer, data, offset, audioCodec) {
|
13306
13422
|
let adtsObjectType;
|
13423
|
+
let originalAdtsObjectType;
|
13307
13424
|
let adtsExtensionSamplingIndex;
|
13308
13425
|
let adtsChannelConfig;
|
13309
13426
|
let config;
|
@@ -13311,7 +13428,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13311
13428
|
const manifestCodec = audioCodec;
|
13312
13429
|
const adtsSamplingRates = [96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350];
|
13313
13430
|
// byte 2
|
13314
|
-
adtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13431
|
+
adtsObjectType = originalAdtsObjectType = ((data[offset + 2] & 0xc0) >>> 6) + 1;
|
13315
13432
|
const adtsSamplingIndex = (data[offset + 2] & 0x3c) >>> 2;
|
13316
13433
|
if (adtsSamplingIndex > adtsSamplingRates.length - 1) {
|
13317
13434
|
const error = new Error(`invalid ADTS sampling index:${adtsSamplingIndex}`);
|
@@ -13328,8 +13445,8 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13328
13445
|
// byte 3
|
13329
13446
|
adtsChannelConfig |= (data[offset + 3] & 0xc0) >>> 6;
|
13330
13447
|
logger.log(`manifest codec:${audioCodec}, ADTS type:${adtsObjectType}, samplingIndex:${adtsSamplingIndex}`);
|
13331
|
-
//
|
13332
|
-
if (/firefox/i.test(userAgent)) {
|
13448
|
+
// Firefox and Pale Moon: freq less than 24kHz = AAC SBR (HE-AAC)
|
13449
|
+
if (/firefox|palemoon/i.test(userAgent)) {
|
13333
13450
|
if (adtsSamplingIndex >= 6) {
|
13334
13451
|
adtsObjectType = 5;
|
13335
13452
|
config = new Array(4);
|
@@ -13423,6 +13540,7 @@ function getAudioConfig(observer, data, offset, audioCodec) {
|
|
13423
13540
|
samplerate: adtsSamplingRates[adtsSamplingIndex],
|
13424
13541
|
channelCount: adtsChannelConfig,
|
13425
13542
|
codec: 'mp4a.40.' + adtsObjectType,
|
13543
|
+
parsedCodec: 'mp4a.40.' + originalAdtsObjectType,
|
13426
13544
|
manifestCodec
|
13427
13545
|
};
|
13428
13546
|
}
|
@@ -13477,7 +13595,8 @@ function initTrackConfig(track, observer, data, offset, audioCodec) {
|
|
13477
13595
|
track.channelCount = config.channelCount;
|
13478
13596
|
track.codec = config.codec;
|
13479
13597
|
track.manifestCodec = config.manifestCodec;
|
13480
|
-
|
13598
|
+
track.parsedCodec = config.parsedCodec;
|
13599
|
+
logger.log(`parsed codec:${track.parsedCodec}, codec:${track.codec}, rate:${config.samplerate}, channels:${config.channelCount}`);
|
13481
13600
|
}
|
13482
13601
|
}
|
13483
13602
|
function getFrameDuration(samplerate) {
|
@@ -13955,45 +14074,149 @@ class BaseVideoParser {
|
|
13955
14074
|
logger.log(VideoSample.pts + '/' + VideoSample.dts + ':' + VideoSample.debug);
|
13956
14075
|
}
|
13957
14076
|
}
|
13958
|
-
|
13959
|
-
|
13960
|
-
|
13961
|
-
|
13962
|
-
|
13963
|
-
|
13964
|
-
|
13965
|
-
|
13966
|
-
|
13967
|
-
|
13968
|
-
|
13969
|
-
|
13970
|
-
this.data = data;
|
13971
|
-
// the number of bytes left to examine in this.data
|
13972
|
-
this.bytesAvailable = data.byteLength;
|
13973
|
-
// the current word being examined
|
13974
|
-
this.word = 0; // :uint
|
13975
|
-
// the number of bits left to examine in the current word
|
13976
|
-
this.bitsAvailable = 0; // :uint
|
13977
|
-
}
|
14077
|
+
parseNALu(track, array) {
|
14078
|
+
const len = array.byteLength;
|
14079
|
+
let state = track.naluState || 0;
|
14080
|
+
const lastState = state;
|
14081
|
+
const units = [];
|
14082
|
+
let i = 0;
|
14083
|
+
let value;
|
14084
|
+
let overflow;
|
14085
|
+
let unitType;
|
14086
|
+
let lastUnitStart = -1;
|
14087
|
+
let lastUnitType = 0;
|
14088
|
+
// logger.log('PES:' + Hex.hexDump(array));
|
13978
14089
|
|
13979
|
-
|
13980
|
-
|
13981
|
-
|
13982
|
-
|
13983
|
-
|
13984
|
-
|
13985
|
-
|
13986
|
-
if (availableBytes === 0) {
|
13987
|
-
throw new Error('no bytes available');
|
14090
|
+
if (state === -1) {
|
14091
|
+
// special use case where we found 3 or 4-byte start codes exactly at the end of previous PES packet
|
14092
|
+
lastUnitStart = 0;
|
14093
|
+
// NALu type is value read from offset 0
|
14094
|
+
lastUnitType = this.getNALuType(array, 0);
|
14095
|
+
state = 0;
|
14096
|
+
i = 1;
|
13988
14097
|
}
|
13989
|
-
|
13990
|
-
|
13991
|
-
|
13992
|
-
|
13993
|
-
|
13994
|
-
|
14098
|
+
while (i < len) {
|
14099
|
+
value = array[i++];
|
14100
|
+
// optimization. state 0 and 1 are the predominant case. let's handle them outside of the switch/case
|
14101
|
+
if (!state) {
|
14102
|
+
state = value ? 0 : 1;
|
14103
|
+
continue;
|
14104
|
+
}
|
14105
|
+
if (state === 1) {
|
14106
|
+
state = value ? 0 : 2;
|
14107
|
+
continue;
|
14108
|
+
}
|
14109
|
+
// here we have state either equal to 2 or 3
|
14110
|
+
if (!value) {
|
14111
|
+
state = 3;
|
14112
|
+
} else if (value === 1) {
|
14113
|
+
overflow = i - state - 1;
|
14114
|
+
if (lastUnitStart >= 0) {
|
14115
|
+
const unit = {
|
14116
|
+
data: array.subarray(lastUnitStart, overflow),
|
14117
|
+
type: lastUnitType
|
14118
|
+
};
|
14119
|
+
// logger.log('pushing NALU, type/size:' + unit.type + '/' + unit.data.byteLength);
|
14120
|
+
units.push(unit);
|
14121
|
+
} else {
|
14122
|
+
// lastUnitStart is undefined => this is the first start code found in this PES packet
|
14123
|
+
// first check if start code delimiter is overlapping between 2 PES packets,
|
14124
|
+
// ie it started in last packet (lastState not zero)
|
14125
|
+
// and ended at the beginning of this PES packet (i <= 4 - lastState)
|
14126
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14127
|
+
if (lastUnit) {
|
14128
|
+
if (lastState && i <= 4 - lastState) {
|
14129
|
+
// start delimiter overlapping between PES packets
|
14130
|
+
// strip start delimiter bytes from the end of last NAL unit
|
14131
|
+
// check if lastUnit had a state different from zero
|
14132
|
+
if (lastUnit.state) {
|
14133
|
+
// strip last bytes
|
14134
|
+
lastUnit.data = lastUnit.data.subarray(0, lastUnit.data.byteLength - lastState);
|
14135
|
+
}
|
14136
|
+
}
|
14137
|
+
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
13995
14138
|
|
13996
|
-
|
14139
|
+
if (overflow > 0) {
|
14140
|
+
// logger.log('first NALU found with overflow:' + overflow);
|
14141
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14142
|
+
lastUnit.state = 0;
|
14143
|
+
}
|
14144
|
+
}
|
14145
|
+
}
|
14146
|
+
// check if we can read unit type
|
14147
|
+
if (i < len) {
|
14148
|
+
unitType = this.getNALuType(array, i);
|
14149
|
+
// logger.log('find NALU @ offset:' + i + ',type:' + unitType);
|
14150
|
+
lastUnitStart = i;
|
14151
|
+
lastUnitType = unitType;
|
14152
|
+
state = 0;
|
14153
|
+
} else {
|
14154
|
+
// not enough byte to read unit type. let's read it on next PES parsing
|
14155
|
+
state = -1;
|
14156
|
+
}
|
14157
|
+
} else {
|
14158
|
+
state = 0;
|
14159
|
+
}
|
14160
|
+
}
|
14161
|
+
if (lastUnitStart >= 0 && state >= 0) {
|
14162
|
+
const unit = {
|
14163
|
+
data: array.subarray(lastUnitStart, len),
|
14164
|
+
type: lastUnitType,
|
14165
|
+
state: state
|
14166
|
+
};
|
14167
|
+
units.push(unit);
|
14168
|
+
// logger.log('pushing NALU, type/size/state:' + unit.type + '/' + unit.data.byteLength + '/' + state);
|
14169
|
+
}
|
14170
|
+
// no NALu found
|
14171
|
+
if (units.length === 0) {
|
14172
|
+
// append pes.data to previous NAL unit
|
14173
|
+
const lastUnit = this.getLastNalUnit(track.samples);
|
14174
|
+
if (lastUnit) {
|
14175
|
+
lastUnit.data = appendUint8Array(lastUnit.data, array);
|
14176
|
+
}
|
14177
|
+
}
|
14178
|
+
track.naluState = state;
|
14179
|
+
return units;
|
14180
|
+
}
|
14181
|
+
}
|
14182
|
+
|
14183
|
+
/**
|
14184
|
+
* Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264.
|
14185
|
+
*/
|
14186
|
+
|
14187
|
+
class ExpGolomb {
|
14188
|
+
constructor(data) {
|
14189
|
+
this.data = void 0;
|
14190
|
+
this.bytesAvailable = void 0;
|
14191
|
+
this.word = void 0;
|
14192
|
+
this.bitsAvailable = void 0;
|
14193
|
+
this.data = data;
|
14194
|
+
// the number of bytes left to examine in this.data
|
14195
|
+
this.bytesAvailable = data.byteLength;
|
14196
|
+
// the current word being examined
|
14197
|
+
this.word = 0; // :uint
|
14198
|
+
// the number of bits left to examine in the current word
|
14199
|
+
this.bitsAvailable = 0; // :uint
|
14200
|
+
}
|
14201
|
+
|
14202
|
+
// ():void
|
14203
|
+
loadWord() {
|
14204
|
+
const data = this.data;
|
14205
|
+
const bytesAvailable = this.bytesAvailable;
|
14206
|
+
const position = data.byteLength - bytesAvailable;
|
14207
|
+
const workingBytes = new Uint8Array(4);
|
14208
|
+
const availableBytes = Math.min(4, bytesAvailable);
|
14209
|
+
if (availableBytes === 0) {
|
14210
|
+
throw new Error('no bytes available');
|
14211
|
+
}
|
14212
|
+
workingBytes.set(data.subarray(position, position + availableBytes));
|
14213
|
+
this.word = new DataView(workingBytes.buffer).getUint32(0);
|
14214
|
+
// track the amount of this.data that has been processed
|
14215
|
+
this.bitsAvailable = availableBytes * 8;
|
14216
|
+
this.bytesAvailable -= availableBytes;
|
14217
|
+
}
|
14218
|
+
|
14219
|
+
// (count:int):void
|
13997
14220
|
skipBits(count) {
|
13998
14221
|
let skipBytes; // :int
|
13999
14222
|
count = Math.min(count, this.bytesAvailable * 8 + this.bitsAvailable);
|
@@ -14097,21 +14320,171 @@ class ExpGolomb {
|
|
14097
14320
|
readUInt() {
|
14098
14321
|
return this.readBits(32);
|
14099
14322
|
}
|
14323
|
+
}
|
14324
|
+
|
14325
|
+
class AvcVideoParser extends BaseVideoParser {
|
14326
|
+
parsePES(track, textTrack, pes, last, duration) {
|
14327
|
+
const units = this.parseNALu(track, pes.data);
|
14328
|
+
let VideoSample = this.VideoSample;
|
14329
|
+
let push;
|
14330
|
+
let spsfound = false;
|
14331
|
+
// free pes.data to save up some memory
|
14332
|
+
pes.data = null;
|
14333
|
+
|
14334
|
+
// if new NAL units found and last sample still there, let's push ...
|
14335
|
+
// this helps parsing streams with missing AUD (only do this if AUD never found)
|
14336
|
+
if (VideoSample && units.length && !track.audFound) {
|
14337
|
+
this.pushAccessUnit(VideoSample, track);
|
14338
|
+
VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
|
14339
|
+
}
|
14340
|
+
units.forEach(unit => {
|
14341
|
+
var _VideoSample2;
|
14342
|
+
switch (unit.type) {
|
14343
|
+
// NDR
|
14344
|
+
case 1:
|
14345
|
+
{
|
14346
|
+
let iskey = false;
|
14347
|
+
push = true;
|
14348
|
+
const data = unit.data;
|
14349
|
+
// only check slice type to detect KF in case SPS found in same packet (any keyframe is preceded by SPS ...)
|
14350
|
+
if (spsfound && data.length > 4) {
|
14351
|
+
// retrieve slice type by parsing beginning of NAL unit (follow H264 spec, slice_header definition) to detect keyframe embedded in NDR
|
14352
|
+
const sliceType = this.readSliceType(data);
|
14353
|
+
// 2 : I slice, 4 : SI slice, 7 : I slice, 9: SI slice
|
14354
|
+
// SI slice : A slice that is coded using intra prediction only and using quantisation of the prediction samples.
|
14355
|
+
// An SI slice can be coded such that its decoded samples can be constructed identically to an SP slice.
|
14356
|
+
// I slice: A slice that is not an SI slice that is decoded using intra prediction only.
|
14357
|
+
// if (sliceType === 2 || sliceType === 7) {
|
14358
|
+
if (sliceType === 2 || sliceType === 4 || sliceType === 7 || sliceType === 9) {
|
14359
|
+
iskey = true;
|
14360
|
+
}
|
14361
|
+
}
|
14362
|
+
if (iskey) {
|
14363
|
+
var _VideoSample;
|
14364
|
+
// if we have non-keyframe data already, that cannot belong to the same frame as a keyframe, so force a push
|
14365
|
+
if ((_VideoSample = VideoSample) != null && _VideoSample.frame && !VideoSample.key) {
|
14366
|
+
this.pushAccessUnit(VideoSample, track);
|
14367
|
+
VideoSample = this.VideoSample = null;
|
14368
|
+
}
|
14369
|
+
}
|
14370
|
+
if (!VideoSample) {
|
14371
|
+
VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
|
14372
|
+
}
|
14373
|
+
VideoSample.frame = true;
|
14374
|
+
VideoSample.key = iskey;
|
14375
|
+
break;
|
14376
|
+
// IDR
|
14377
|
+
}
|
14378
|
+
case 5:
|
14379
|
+
push = true;
|
14380
|
+
// handle PES not starting with AUD
|
14381
|
+
// if we have frame data already, that cannot belong to the same frame, so force a push
|
14382
|
+
if ((_VideoSample2 = VideoSample) != null && _VideoSample2.frame && !VideoSample.key) {
|
14383
|
+
this.pushAccessUnit(VideoSample, track);
|
14384
|
+
VideoSample = this.VideoSample = null;
|
14385
|
+
}
|
14386
|
+
if (!VideoSample) {
|
14387
|
+
VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
|
14388
|
+
}
|
14389
|
+
VideoSample.key = true;
|
14390
|
+
VideoSample.frame = true;
|
14391
|
+
break;
|
14392
|
+
// SEI
|
14393
|
+
case 6:
|
14394
|
+
{
|
14395
|
+
push = true;
|
14396
|
+
parseSEIMessageFromNALu(unit.data, 1, pes.pts, textTrack.samples);
|
14397
|
+
break;
|
14398
|
+
// SPS
|
14399
|
+
}
|
14400
|
+
case 7:
|
14401
|
+
{
|
14402
|
+
var _track$pixelRatio, _track$pixelRatio2;
|
14403
|
+
push = true;
|
14404
|
+
spsfound = true;
|
14405
|
+
const sps = unit.data;
|
14406
|
+
const config = this.readSPS(sps);
|
14407
|
+
if (!track.sps || track.width !== config.width || track.height !== config.height || ((_track$pixelRatio = track.pixelRatio) == null ? void 0 : _track$pixelRatio[0]) !== config.pixelRatio[0] || ((_track$pixelRatio2 = track.pixelRatio) == null ? void 0 : _track$pixelRatio2[1]) !== config.pixelRatio[1]) {
|
14408
|
+
track.width = config.width;
|
14409
|
+
track.height = config.height;
|
14410
|
+
track.pixelRatio = config.pixelRatio;
|
14411
|
+
track.sps = [sps];
|
14412
|
+
track.duration = duration;
|
14413
|
+
const codecarray = sps.subarray(1, 4);
|
14414
|
+
let codecstring = 'avc1.';
|
14415
|
+
for (let i = 0; i < 3; i++) {
|
14416
|
+
let h = codecarray[i].toString(16);
|
14417
|
+
if (h.length < 2) {
|
14418
|
+
h = '0' + h;
|
14419
|
+
}
|
14420
|
+
codecstring += h;
|
14421
|
+
}
|
14422
|
+
track.codec = codecstring;
|
14423
|
+
}
|
14424
|
+
break;
|
14425
|
+
}
|
14426
|
+
// PPS
|
14427
|
+
case 8:
|
14428
|
+
push = true;
|
14429
|
+
track.pps = [unit.data];
|
14430
|
+
break;
|
14431
|
+
// AUD
|
14432
|
+
case 9:
|
14433
|
+
push = true;
|
14434
|
+
track.audFound = true;
|
14435
|
+
if (VideoSample) {
|
14436
|
+
this.pushAccessUnit(VideoSample, track);
|
14437
|
+
}
|
14438
|
+
VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
|
14439
|
+
break;
|
14440
|
+
// Filler Data
|
14441
|
+
case 12:
|
14442
|
+
push = true;
|
14443
|
+
break;
|
14444
|
+
default:
|
14445
|
+
push = false;
|
14446
|
+
if (VideoSample) {
|
14447
|
+
VideoSample.debug += 'unknown NAL ' + unit.type + ' ';
|
14448
|
+
}
|
14449
|
+
break;
|
14450
|
+
}
|
14451
|
+
if (VideoSample && push) {
|
14452
|
+
const units = VideoSample.units;
|
14453
|
+
units.push(unit);
|
14454
|
+
}
|
14455
|
+
});
|
14456
|
+
// if last PES packet, push samples
|
14457
|
+
if (last && VideoSample) {
|
14458
|
+
this.pushAccessUnit(VideoSample, track);
|
14459
|
+
this.VideoSample = null;
|
14460
|
+
}
|
14461
|
+
}
|
14462
|
+
getNALuType(data, offset) {
|
14463
|
+
return data[offset] & 0x1f;
|
14464
|
+
}
|
14465
|
+
readSliceType(data) {
|
14466
|
+
const eg = new ExpGolomb(data);
|
14467
|
+
// skip NALu type
|
14468
|
+
eg.readUByte();
|
14469
|
+
// discard first_mb_in_slice
|
14470
|
+
eg.readUEG();
|
14471
|
+
// return slice_type
|
14472
|
+
return eg.readUEG();
|
14473
|
+
}
|
14100
14474
|
|
14101
14475
|
/**
|
14102
|
-
*
|
14103
|
-
* list is optionally transmitted as part of a sequence parameter
|
14476
|
+
* The scaling list is optionally transmitted as part of a sequence parameter
|
14104
14477
|
* set and is not relevant to transmuxing.
|
14105
14478
|
* @param count the number of entries in this scaling list
|
14106
14479
|
* @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
|
14107
14480
|
*/
|
14108
|
-
skipScalingList(count) {
|
14481
|
+
skipScalingList(count, reader) {
|
14109
14482
|
let lastScale = 8;
|
14110
14483
|
let nextScale = 8;
|
14111
14484
|
let deltaScale;
|
14112
14485
|
for (let j = 0; j < count; j++) {
|
14113
14486
|
if (nextScale !== 0) {
|
14114
|
-
deltaScale =
|
14487
|
+
deltaScale = reader.readEG();
|
14115
14488
|
nextScale = (lastScale + deltaScale + 256) % 256;
|
14116
14489
|
}
|
14117
14490
|
lastScale = nextScale === 0 ? lastScale : nextScale;
|
@@ -14126,7 +14499,8 @@ class ExpGolomb {
|
|
14126
14499
|
* sequence parameter set, including the dimensions of the
|
14127
14500
|
* associated video frames.
|
14128
14501
|
*/
|
14129
|
-
readSPS() {
|
14502
|
+
readSPS(sps) {
|
14503
|
+
const eg = new ExpGolomb(sps);
|
14130
14504
|
let frameCropLeftOffset = 0;
|
14131
14505
|
let frameCropRightOffset = 0;
|
14132
14506
|
let frameCropTopOffset = 0;
|
@@ -14134,13 +14508,13 @@ class ExpGolomb {
|
|
14134
14508
|
let numRefFramesInPicOrderCntCycle;
|
14135
14509
|
let scalingListCount;
|
14136
14510
|
let i;
|
14137
|
-
const readUByte =
|
14138
|
-
const readBits =
|
14139
|
-
const readUEG =
|
14140
|
-
const readBoolean =
|
14141
|
-
const skipBits =
|
14142
|
-
const skipEG =
|
14143
|
-
const skipUEG =
|
14511
|
+
const readUByte = eg.readUByte.bind(eg);
|
14512
|
+
const readBits = eg.readBits.bind(eg);
|
14513
|
+
const readUEG = eg.readUEG.bind(eg);
|
14514
|
+
const readBoolean = eg.readBoolean.bind(eg);
|
14515
|
+
const skipBits = eg.skipBits.bind(eg);
|
14516
|
+
const skipEG = eg.skipEG.bind(eg);
|
14517
|
+
const skipUEG = eg.skipUEG.bind(eg);
|
14144
14518
|
const skipScalingList = this.skipScalingList.bind(this);
|
14145
14519
|
readUByte();
|
14146
14520
|
const profileIdc = readUByte(); // profile_idc
|
@@ -14165,9 +14539,9 @@ class ExpGolomb {
|
|
14165
14539
|
if (readBoolean()) {
|
14166
14540
|
// seq_scaling_list_present_flag[ i ]
|
14167
14541
|
if (i < 6) {
|
14168
|
-
skipScalingList(16);
|
14542
|
+
skipScalingList(16, eg);
|
14169
14543
|
} else {
|
14170
|
-
skipScalingList(64);
|
14544
|
+
skipScalingList(64, eg);
|
14171
14545
|
}
|
14172
14546
|
}
|
14173
14547
|
}
|
@@ -14272,19 +14646,15 @@ class ExpGolomb {
|
|
14272
14646
|
pixelRatio: pixelRatio
|
14273
14647
|
};
|
14274
14648
|
}
|
14275
|
-
readSliceType() {
|
14276
|
-
// skip NALu type
|
14277
|
-
this.readUByte();
|
14278
|
-
// discard first_mb_in_slice
|
14279
|
-
this.readUEG();
|
14280
|
-
// return slice_type
|
14281
|
-
return this.readUEG();
|
14282
|
-
}
|
14283
14649
|
}
|
14284
14650
|
|
14285
|
-
class
|
14286
|
-
|
14287
|
-
|
14651
|
+
class HevcVideoParser extends BaseVideoParser {
|
14652
|
+
constructor(...args) {
|
14653
|
+
super(...args);
|
14654
|
+
this.initVPS = null;
|
14655
|
+
}
|
14656
|
+
parsePES(track, textTrack, pes, last, duration) {
|
14657
|
+
const units = this.parseNALu(track, pes.data);
|
14288
14658
|
let VideoSample = this.VideoSample;
|
14289
14659
|
let push;
|
14290
14660
|
let spsfound = false;
|
@@ -14300,42 +14670,49 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14300
14670
|
units.forEach(unit => {
|
14301
14671
|
var _VideoSample2;
|
14302
14672
|
switch (unit.type) {
|
14303
|
-
//
|
14673
|
+
// NON-IDR, NON RANDOM ACCESS SLICE
|
14674
|
+
case 0:
|
14304
14675
|
case 1:
|
14305
|
-
|
14306
|
-
|
14307
|
-
|
14308
|
-
|
14309
|
-
|
14310
|
-
|
14311
|
-
|
14312
|
-
|
14313
|
-
|
14314
|
-
|
14315
|
-
|
14316
|
-
|
14317
|
-
|
14318
|
-
|
14319
|
-
|
14320
|
-
|
14321
|
-
|
14322
|
-
|
14323
|
-
|
14324
|
-
|
14325
|
-
|
14326
|
-
|
14327
|
-
|
14328
|
-
|
14329
|
-
|
14330
|
-
if (!VideoSample) {
|
14331
|
-
|
14676
|
+
case 2:
|
14677
|
+
case 3:
|
14678
|
+
case 4:
|
14679
|
+
case 5:
|
14680
|
+
case 6:
|
14681
|
+
case 7:
|
14682
|
+
case 8:
|
14683
|
+
case 9:
|
14684
|
+
if (!VideoSample) {
|
14685
|
+
VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
|
14686
|
+
}
|
14687
|
+
VideoSample.frame = true;
|
14688
|
+
push = true;
|
14689
|
+
break;
|
14690
|
+
|
14691
|
+
// CRA, BLA (random access picture)
|
14692
|
+
case 16:
|
14693
|
+
case 17:
|
14694
|
+
case 18:
|
14695
|
+
case 21:
|
14696
|
+
push = true;
|
14697
|
+
if (spsfound) {
|
14698
|
+
var _VideoSample;
|
14699
|
+
// handle PES not starting with AUD
|
14700
|
+
// if we have frame data already, that cannot belong to the same frame, so force a push
|
14701
|
+
if ((_VideoSample = VideoSample) != null && _VideoSample.frame && !VideoSample.key) {
|
14702
|
+
this.pushAccessUnit(VideoSample, track);
|
14703
|
+
VideoSample = this.VideoSample = null;
|
14332
14704
|
}
|
14333
|
-
VideoSample.frame = true;
|
14334
|
-
VideoSample.key = iskey;
|
14335
|
-
break;
|
14336
|
-
// IDR
|
14337
14705
|
}
|
14338
|
-
|
14706
|
+
if (!VideoSample) {
|
14707
|
+
VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
|
14708
|
+
}
|
14709
|
+
VideoSample.key = true;
|
14710
|
+
VideoSample.frame = true;
|
14711
|
+
break;
|
14712
|
+
|
14713
|
+
// IDR
|
14714
|
+
case 19:
|
14715
|
+
case 20:
|
14339
14716
|
push = true;
|
14340
14717
|
// handle PES not starting with AUD
|
14341
14718
|
// if we have frame data already, that cannot belong to the same frame, so force a push
|
@@ -14349,48 +14726,76 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14349
14726
|
VideoSample.key = true;
|
14350
14727
|
VideoSample.frame = true;
|
14351
14728
|
break;
|
14729
|
+
|
14352
14730
|
// SEI
|
14353
|
-
case
|
14354
|
-
|
14355
|
-
|
14356
|
-
|
14357
|
-
|
14358
|
-
|
14731
|
+
case 39:
|
14732
|
+
push = true;
|
14733
|
+
parseSEIMessageFromNALu(unit.data, 2,
|
14734
|
+
// NALu header size
|
14735
|
+
pes.pts, textTrack.samples);
|
14736
|
+
break;
|
14737
|
+
|
14738
|
+
// VPS
|
14739
|
+
case 32:
|
14740
|
+
push = true;
|
14741
|
+
if (!track.vps) {
|
14742
|
+
const config = this.readVPS(unit.data);
|
14743
|
+
track.params = _objectSpread2({}, config);
|
14744
|
+
this.initVPS = unit.data;
|
14359
14745
|
}
|
14360
|
-
|
14361
|
-
|
14362
|
-
|
14363
|
-
|
14364
|
-
|
14365
|
-
|
14366
|
-
|
14367
|
-
|
14368
|
-
if (
|
14746
|
+
track.vps = [unit.data];
|
14747
|
+
break;
|
14748
|
+
|
14749
|
+
// SPS
|
14750
|
+
case 33:
|
14751
|
+
push = true;
|
14752
|
+
spsfound = true;
|
14753
|
+
if (typeof track.params === 'object') {
|
14754
|
+
if (track.vps !== undefined && track.vps[0] !== this.initVPS && track.sps !== undefined && !this.matchSPS(track.sps[0], unit.data)) {
|
14755
|
+
this.initVPS = track.vps[0];
|
14756
|
+
track.sps = track.pps = undefined;
|
14757
|
+
}
|
14758
|
+
if (!track.sps) {
|
14759
|
+
const config = this.readSPS(unit.data);
|
14369
14760
|
track.width = config.width;
|
14370
14761
|
track.height = config.height;
|
14371
14762
|
track.pixelRatio = config.pixelRatio;
|
14372
|
-
track.sps = [sps];
|
14373
14763
|
track.duration = duration;
|
14374
|
-
|
14375
|
-
|
14376
|
-
for (
|
14377
|
-
|
14378
|
-
if (h.length < 2) {
|
14379
|
-
h = '0' + h;
|
14380
|
-
}
|
14381
|
-
codecstring += h;
|
14764
|
+
track.codec = config.codecString;
|
14765
|
+
track.sps = [];
|
14766
|
+
for (const prop in config.params) {
|
14767
|
+
track.params[prop] = config.params[prop];
|
14382
14768
|
}
|
14383
|
-
track.codec = codecstring;
|
14384
14769
|
}
|
14385
|
-
|
14770
|
+
if (track.vps !== undefined && track.vps[0] === this.initVPS) {
|
14771
|
+
track.sps.push(unit.data);
|
14772
|
+
}
|
14386
14773
|
}
|
14774
|
+
if (!VideoSample) {
|
14775
|
+
VideoSample = this.VideoSample = this.createVideoSample(true, pes.pts, pes.dts, '');
|
14776
|
+
}
|
14777
|
+
VideoSample.key = true;
|
14778
|
+
break;
|
14779
|
+
|
14387
14780
|
// PPS
|
14388
|
-
case
|
14781
|
+
case 34:
|
14389
14782
|
push = true;
|
14390
|
-
track.
|
14783
|
+
if (typeof track.params === 'object') {
|
14784
|
+
if (!track.pps) {
|
14785
|
+
track.pps = [];
|
14786
|
+
const config = this.readPPS(unit.data);
|
14787
|
+
for (const prop in config) {
|
14788
|
+
track.params[prop] = config[prop];
|
14789
|
+
}
|
14790
|
+
}
|
14791
|
+
if (this.initVPS !== null || track.pps.length === 0) {
|
14792
|
+
track.pps.push(unit.data);
|
14793
|
+
}
|
14794
|
+
}
|
14391
14795
|
break;
|
14392
|
-
|
14393
|
-
|
14796
|
+
|
14797
|
+
// ACCESS UNIT DELIMITER
|
14798
|
+
case 35:
|
14394
14799
|
push = true;
|
14395
14800
|
track.audFound = true;
|
14396
14801
|
if (VideoSample) {
|
@@ -14398,14 +14803,10 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14398
14803
|
}
|
14399
14804
|
VideoSample = this.VideoSample = this.createVideoSample(false, pes.pts, pes.dts, '');
|
14400
14805
|
break;
|
14401
|
-
// Filler Data
|
14402
|
-
case 12:
|
14403
|
-
push = true;
|
14404
|
-
break;
|
14405
14806
|
default:
|
14406
14807
|
push = false;
|
14407
14808
|
if (VideoSample) {
|
14408
|
-
VideoSample.debug += 'unknown NAL ' + unit.type + ' ';
|
14809
|
+
VideoSample.debug += 'unknown or irrelevant NAL ' + unit.type + ' ';
|
14409
14810
|
}
|
14410
14811
|
break;
|
14411
14812
|
}
|
@@ -14420,109 +14821,423 @@ class AvcVideoParser extends BaseVideoParser {
|
|
14420
14821
|
this.VideoSample = null;
|
14421
14822
|
}
|
14422
14823
|
}
|
14423
|
-
|
14424
|
-
|
14425
|
-
|
14426
|
-
|
14427
|
-
const
|
14428
|
-
let
|
14429
|
-
let
|
14430
|
-
|
14431
|
-
|
14432
|
-
|
14433
|
-
|
14434
|
-
|
14824
|
+
getNALuType(data, offset) {
|
14825
|
+
return (data[offset] & 0x7e) >>> 1;
|
14826
|
+
}
|
14827
|
+
ebsp2rbsp(arr) {
|
14828
|
+
const dst = new Uint8Array(arr.byteLength);
|
14829
|
+
let dstIdx = 0;
|
14830
|
+
for (let i = 0; i < arr.byteLength; i++) {
|
14831
|
+
if (i >= 2) {
|
14832
|
+
// Unescape: Skip 0x03 after 00 00
|
14833
|
+
if (arr[i] === 0x03 && arr[i - 1] === 0x00 && arr[i - 2] === 0x00) {
|
14834
|
+
continue;
|
14835
|
+
}
|
14836
|
+
}
|
14837
|
+
dst[dstIdx] = arr[i];
|
14838
|
+
dstIdx++;
|
14839
|
+
}
|
14840
|
+
return new Uint8Array(dst.buffer, 0, dstIdx);
|
14841
|
+
}
|
14842
|
+
readVPS(vps) {
|
14843
|
+
const eg = new ExpGolomb(vps);
|
14844
|
+
// remove header
|
14845
|
+
eg.readUByte();
|
14846
|
+
eg.readUByte();
|
14847
|
+
eg.readBits(4); // video_parameter_set_id
|
14848
|
+
eg.skipBits(2);
|
14849
|
+
eg.readBits(6); // max_layers_minus1
|
14850
|
+
const max_sub_layers_minus1 = eg.readBits(3);
|
14851
|
+
const temporal_id_nesting_flag = eg.readBoolean();
|
14852
|
+
// ...vui fps can be here, but empty fps value is not critical for metadata
|
14435
14853
|
|
14436
|
-
|
14437
|
-
|
14438
|
-
|
14439
|
-
|
14440
|
-
|
14441
|
-
|
14442
|
-
|
14443
|
-
|
14444
|
-
|
14445
|
-
|
14446
|
-
|
14447
|
-
|
14448
|
-
|
14449
|
-
|
14450
|
-
|
14451
|
-
|
14452
|
-
|
14453
|
-
|
14454
|
-
|
14455
|
-
|
14456
|
-
|
14457
|
-
|
14458
|
-
|
14459
|
-
|
14460
|
-
|
14461
|
-
|
14462
|
-
|
14463
|
-
|
14464
|
-
|
14465
|
-
|
14466
|
-
|
14467
|
-
|
14468
|
-
|
14469
|
-
|
14470
|
-
|
14471
|
-
|
14472
|
-
|
14473
|
-
|
14474
|
-
|
14475
|
-
|
14476
|
-
|
14477
|
-
|
14478
|
-
|
14479
|
-
|
14480
|
-
|
14854
|
+
return {
|
14855
|
+
numTemporalLayers: max_sub_layers_minus1 + 1,
|
14856
|
+
temporalIdNested: temporal_id_nesting_flag
|
14857
|
+
};
|
14858
|
+
}
|
14859
|
+
readSPS(sps) {
|
14860
|
+
const eg = new ExpGolomb(this.ebsp2rbsp(sps));
|
14861
|
+
eg.readUByte();
|
14862
|
+
eg.readUByte();
|
14863
|
+
eg.readBits(4); //video_parameter_set_id
|
14864
|
+
const max_sub_layers_minus1 = eg.readBits(3);
|
14865
|
+
eg.readBoolean(); // temporal_id_nesting_flag
|
14866
|
+
|
14867
|
+
// profile_tier_level
|
14868
|
+
const general_profile_space = eg.readBits(2);
|
14869
|
+
const general_tier_flag = eg.readBoolean();
|
14870
|
+
const general_profile_idc = eg.readBits(5);
|
14871
|
+
const general_profile_compatibility_flags_1 = eg.readUByte();
|
14872
|
+
const general_profile_compatibility_flags_2 = eg.readUByte();
|
14873
|
+
const general_profile_compatibility_flags_3 = eg.readUByte();
|
14874
|
+
const general_profile_compatibility_flags_4 = eg.readUByte();
|
14875
|
+
const general_constraint_indicator_flags_1 = eg.readUByte();
|
14876
|
+
const general_constraint_indicator_flags_2 = eg.readUByte();
|
14877
|
+
const general_constraint_indicator_flags_3 = eg.readUByte();
|
14878
|
+
const general_constraint_indicator_flags_4 = eg.readUByte();
|
14879
|
+
const general_constraint_indicator_flags_5 = eg.readUByte();
|
14880
|
+
const general_constraint_indicator_flags_6 = eg.readUByte();
|
14881
|
+
const general_level_idc = eg.readUByte();
|
14882
|
+
const sub_layer_profile_present_flags = [];
|
14883
|
+
const sub_layer_level_present_flags = [];
|
14884
|
+
for (let i = 0; i < max_sub_layers_minus1; i++) {
|
14885
|
+
sub_layer_profile_present_flags.push(eg.readBoolean());
|
14886
|
+
sub_layer_level_present_flags.push(eg.readBoolean());
|
14887
|
+
}
|
14888
|
+
if (max_sub_layers_minus1 > 0) {
|
14889
|
+
for (let i = max_sub_layers_minus1; i < 8; i++) {
|
14890
|
+
eg.readBits(2);
|
14891
|
+
}
|
14892
|
+
}
|
14893
|
+
for (let i = 0; i < max_sub_layers_minus1; i++) {
|
14894
|
+
if (sub_layer_profile_present_flags[i]) {
|
14895
|
+
eg.readUByte(); // sub_layer_profile_space, sub_layer_tier_flag, sub_layer_profile_idc
|
14896
|
+
eg.readUByte();
|
14897
|
+
eg.readUByte();
|
14898
|
+
eg.readUByte();
|
14899
|
+
eg.readUByte(); // sub_layer_profile_compatibility_flag
|
14900
|
+
eg.readUByte();
|
14901
|
+
eg.readUByte();
|
14902
|
+
eg.readUByte();
|
14903
|
+
eg.readUByte();
|
14904
|
+
eg.readUByte();
|
14905
|
+
eg.readUByte();
|
14906
|
+
}
|
14907
|
+
if (sub_layer_level_present_flags[i]) {
|
14908
|
+
eg.readUByte();
|
14909
|
+
}
|
14910
|
+
}
|
14911
|
+
eg.readUEG(); // seq_parameter_set_id
|
14912
|
+
const chroma_format_idc = eg.readUEG();
|
14913
|
+
if (chroma_format_idc == 3) {
|
14914
|
+
eg.skipBits(1); //separate_colour_plane_flag
|
14915
|
+
}
|
14916
|
+
const pic_width_in_luma_samples = eg.readUEG();
|
14917
|
+
const pic_height_in_luma_samples = eg.readUEG();
|
14918
|
+
const conformance_window_flag = eg.readBoolean();
|
14919
|
+
let pic_left_offset = 0,
|
14920
|
+
pic_right_offset = 0,
|
14921
|
+
pic_top_offset = 0,
|
14922
|
+
pic_bottom_offset = 0;
|
14923
|
+
if (conformance_window_flag) {
|
14924
|
+
pic_left_offset += eg.readUEG();
|
14925
|
+
pic_right_offset += eg.readUEG();
|
14926
|
+
pic_top_offset += eg.readUEG();
|
14927
|
+
pic_bottom_offset += eg.readUEG();
|
14928
|
+
}
|
14929
|
+
const bit_depth_luma_minus8 = eg.readUEG();
|
14930
|
+
const bit_depth_chroma_minus8 = eg.readUEG();
|
14931
|
+
const log2_max_pic_order_cnt_lsb_minus4 = eg.readUEG();
|
14932
|
+
const sub_layer_ordering_info_present_flag = eg.readBoolean();
|
14933
|
+
for (let i = sub_layer_ordering_info_present_flag ? 0 : max_sub_layers_minus1; i <= max_sub_layers_minus1; i++) {
|
14934
|
+
eg.skipUEG(); // max_dec_pic_buffering_minus1[i]
|
14935
|
+
eg.skipUEG(); // max_num_reorder_pics[i]
|
14936
|
+
eg.skipUEG(); // max_latency_increase_plus1[i]
|
14937
|
+
}
|
14938
|
+
eg.skipUEG(); // log2_min_luma_coding_block_size_minus3
|
14939
|
+
eg.skipUEG(); // log2_diff_max_min_luma_coding_block_size
|
14940
|
+
eg.skipUEG(); // log2_min_transform_block_size_minus2
|
14941
|
+
eg.skipUEG(); // log2_diff_max_min_transform_block_size
|
14942
|
+
eg.skipUEG(); // max_transform_hierarchy_depth_inter
|
14943
|
+
eg.skipUEG(); // max_transform_hierarchy_depth_intra
|
14944
|
+
const scaling_list_enabled_flag = eg.readBoolean();
|
14945
|
+
if (scaling_list_enabled_flag) {
|
14946
|
+
const sps_scaling_list_data_present_flag = eg.readBoolean();
|
14947
|
+
if (sps_scaling_list_data_present_flag) {
|
14948
|
+
for (let sizeId = 0; sizeId < 4; sizeId++) {
|
14949
|
+
for (let matrixId = 0; matrixId < (sizeId === 3 ? 2 : 6); matrixId++) {
|
14950
|
+
const scaling_list_pred_mode_flag = eg.readBoolean();
|
14951
|
+
if (!scaling_list_pred_mode_flag) {
|
14952
|
+
eg.readUEG(); // scaling_list_pred_matrix_id_delta
|
14953
|
+
} else {
|
14954
|
+
const coefNum = Math.min(64, 1 << 4 + (sizeId << 1));
|
14955
|
+
if (sizeId > 1) {
|
14956
|
+
eg.readEG();
|
14957
|
+
}
|
14958
|
+
for (let i = 0; i < coefNum; i++) {
|
14959
|
+
eg.readEG();
|
14481
14960
|
}
|
14482
|
-
}
|
14483
|
-
// If NAL units are not starting right at the beginning of the PES packet, push preceding data into previous NAL unit.
|
14484
|
-
|
14485
|
-
if (overflow > 0) {
|
14486
|
-
// logger.log('first NALU found with overflow:' + overflow);
|
14487
|
-
lastUnit.data = appendUint8Array(lastUnit.data, array.subarray(0, overflow));
|
14488
|
-
lastUnit.state = 0;
|
14489
14961
|
}
|
14490
14962
|
}
|
14491
14963
|
}
|
14492
|
-
|
14493
|
-
|
14494
|
-
|
14495
|
-
|
14496
|
-
|
14497
|
-
|
14498
|
-
|
14499
|
-
|
14500
|
-
|
14501
|
-
|
14964
|
+
}
|
14965
|
+
}
|
14966
|
+
eg.readBoolean(); // amp_enabled_flag
|
14967
|
+
eg.readBoolean(); // sample_adaptive_offset_enabled_flag
|
14968
|
+
const pcm_enabled_flag = eg.readBoolean();
|
14969
|
+
if (pcm_enabled_flag) {
|
14970
|
+
eg.readUByte();
|
14971
|
+
eg.skipUEG();
|
14972
|
+
eg.skipUEG();
|
14973
|
+
eg.readBoolean();
|
14974
|
+
}
|
14975
|
+
const num_short_term_ref_pic_sets = eg.readUEG();
|
14976
|
+
let num_delta_pocs = 0;
|
14977
|
+
for (let i = 0; i < num_short_term_ref_pic_sets; i++) {
|
14978
|
+
let inter_ref_pic_set_prediction_flag = false;
|
14979
|
+
if (i !== 0) {
|
14980
|
+
inter_ref_pic_set_prediction_flag = eg.readBoolean();
|
14981
|
+
}
|
14982
|
+
if (inter_ref_pic_set_prediction_flag) {
|
14983
|
+
if (i === num_short_term_ref_pic_sets) {
|
14984
|
+
eg.readUEG();
|
14985
|
+
}
|
14986
|
+
eg.readBoolean();
|
14987
|
+
eg.readUEG();
|
14988
|
+
let next_num_delta_pocs = 0;
|
14989
|
+
for (let j = 0; j <= num_delta_pocs; j++) {
|
14990
|
+
const used_by_curr_pic_flag = eg.readBoolean();
|
14991
|
+
let use_delta_flag = false;
|
14992
|
+
if (!used_by_curr_pic_flag) {
|
14993
|
+
use_delta_flag = eg.readBoolean();
|
14994
|
+
}
|
14995
|
+
if (used_by_curr_pic_flag || use_delta_flag) {
|
14996
|
+
next_num_delta_pocs++;
|
14997
|
+
}
|
14502
14998
|
}
|
14999
|
+
num_delta_pocs = next_num_delta_pocs;
|
14503
15000
|
} else {
|
14504
|
-
|
15001
|
+
const num_negative_pics = eg.readUEG();
|
15002
|
+
const num_positive_pics = eg.readUEG();
|
15003
|
+
num_delta_pocs = num_negative_pics + num_positive_pics;
|
15004
|
+
for (let j = 0; j < num_negative_pics; j++) {
|
15005
|
+
eg.readUEG();
|
15006
|
+
eg.readBoolean();
|
15007
|
+
}
|
15008
|
+
for (let j = 0; j < num_positive_pics; j++) {
|
15009
|
+
eg.readUEG();
|
15010
|
+
eg.readBoolean();
|
15011
|
+
}
|
14505
15012
|
}
|
14506
15013
|
}
|
14507
|
-
|
14508
|
-
|
14509
|
-
|
14510
|
-
|
14511
|
-
|
14512
|
-
|
14513
|
-
|
14514
|
-
|
14515
|
-
|
14516
|
-
|
14517
|
-
|
14518
|
-
|
14519
|
-
|
14520
|
-
|
14521
|
-
|
15014
|
+
const long_term_ref_pics_present_flag = eg.readBoolean();
|
15015
|
+
if (long_term_ref_pics_present_flag) {
|
15016
|
+
const num_long_term_ref_pics_sps = eg.readUEG();
|
15017
|
+
for (let i = 0; i < num_long_term_ref_pics_sps; i++) {
|
15018
|
+
for (let j = 0; j < log2_max_pic_order_cnt_lsb_minus4 + 4; j++) {
|
15019
|
+
eg.readBits(1);
|
15020
|
+
}
|
15021
|
+
eg.readBits(1);
|
15022
|
+
}
|
15023
|
+
}
|
15024
|
+
let min_spatial_segmentation_idc = 0;
|
15025
|
+
let sar_width = 1,
|
15026
|
+
sar_height = 1;
|
15027
|
+
let fps_fixed = true,
|
15028
|
+
fps_den = 1,
|
15029
|
+
fps_num = 0;
|
15030
|
+
eg.readBoolean(); // sps_temporal_mvp_enabled_flag
|
15031
|
+
eg.readBoolean(); // strong_intra_smoothing_enabled_flag
|
15032
|
+
let default_display_window_flag = false;
|
15033
|
+
const vui_parameters_present_flag = eg.readBoolean();
|
15034
|
+
if (vui_parameters_present_flag) {
|
15035
|
+
const aspect_ratio_info_present_flag = eg.readBoolean();
|
15036
|
+
if (aspect_ratio_info_present_flag) {
|
15037
|
+
const aspect_ratio_idc = eg.readUByte();
|
15038
|
+
const sar_width_table = [1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160, 4, 3, 2];
|
15039
|
+
const sar_height_table = [1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99, 3, 2, 1];
|
15040
|
+
if (aspect_ratio_idc > 0 && aspect_ratio_idc < 16) {
|
15041
|
+
sar_width = sar_width_table[aspect_ratio_idc - 1];
|
15042
|
+
sar_height = sar_height_table[aspect_ratio_idc - 1];
|
15043
|
+
} else if (aspect_ratio_idc === 255) {
|
15044
|
+
sar_width = eg.readBits(16);
|
15045
|
+
sar_height = eg.readBits(16);
|
15046
|
+
}
|
15047
|
+
}
|
15048
|
+
const overscan_info_present_flag = eg.readBoolean();
|
15049
|
+
if (overscan_info_present_flag) {
|
15050
|
+
eg.readBoolean();
|
15051
|
+
}
|
15052
|
+
const video_signal_type_present_flag = eg.readBoolean();
|
15053
|
+
if (video_signal_type_present_flag) {
|
15054
|
+
eg.readBits(3);
|
15055
|
+
eg.readBoolean();
|
15056
|
+
const colour_description_present_flag = eg.readBoolean();
|
15057
|
+
if (colour_description_present_flag) {
|
15058
|
+
eg.readUByte();
|
15059
|
+
eg.readUByte();
|
15060
|
+
eg.readUByte();
|
15061
|
+
}
|
15062
|
+
}
|
15063
|
+
const chroma_loc_info_present_flag = eg.readBoolean();
|
15064
|
+
if (chroma_loc_info_present_flag) {
|
15065
|
+
eg.readUEG();
|
15066
|
+
eg.readUEG();
|
15067
|
+
}
|
15068
|
+
eg.readBoolean(); // neutral_chroma_indication_flag
|
15069
|
+
eg.readBoolean(); // field_seq_flag
|
15070
|
+
eg.readBoolean(); // frame_field_info_present_flag
|
15071
|
+
default_display_window_flag = eg.readBoolean();
|
15072
|
+
if (default_display_window_flag) {
|
15073
|
+
pic_left_offset += eg.readUEG();
|
15074
|
+
pic_right_offset += eg.readUEG();
|
15075
|
+
pic_top_offset += eg.readUEG();
|
15076
|
+
pic_bottom_offset += eg.readUEG();
|
15077
|
+
}
|
15078
|
+
const vui_timing_info_present_flag = eg.readBoolean();
|
15079
|
+
if (vui_timing_info_present_flag) {
|
15080
|
+
fps_den = eg.readBits(32);
|
15081
|
+
fps_num = eg.readBits(32);
|
15082
|
+
const vui_poc_proportional_to_timing_flag = eg.readBoolean();
|
15083
|
+
if (vui_poc_proportional_to_timing_flag) {
|
15084
|
+
eg.readUEG();
|
15085
|
+
}
|
15086
|
+
const vui_hrd_parameters_present_flag = eg.readBoolean();
|
15087
|
+
if (vui_hrd_parameters_present_flag) {
|
15088
|
+
//const commonInfPresentFlag = true;
|
15089
|
+
//if (commonInfPresentFlag) {
|
15090
|
+
const nal_hrd_parameters_present_flag = eg.readBoolean();
|
15091
|
+
const vcl_hrd_parameters_present_flag = eg.readBoolean();
|
15092
|
+
let sub_pic_hrd_params_present_flag = false;
|
15093
|
+
if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) {
|
15094
|
+
sub_pic_hrd_params_present_flag = eg.readBoolean();
|
15095
|
+
if (sub_pic_hrd_params_present_flag) {
|
15096
|
+
eg.readUByte();
|
15097
|
+
eg.readBits(5);
|
15098
|
+
eg.readBoolean();
|
15099
|
+
eg.readBits(5);
|
15100
|
+
}
|
15101
|
+
eg.readBits(4); // bit_rate_scale
|
15102
|
+
eg.readBits(4); // cpb_size_scale
|
15103
|
+
if (sub_pic_hrd_params_present_flag) {
|
15104
|
+
eg.readBits(4);
|
15105
|
+
}
|
15106
|
+
eg.readBits(5);
|
15107
|
+
eg.readBits(5);
|
15108
|
+
eg.readBits(5);
|
15109
|
+
}
|
15110
|
+
//}
|
15111
|
+
for (let i = 0; i <= max_sub_layers_minus1; i++) {
|
15112
|
+
fps_fixed = eg.readBoolean(); // fixed_pic_rate_general_flag
|
15113
|
+
const fixed_pic_rate_within_cvs_flag = fps_fixed || eg.readBoolean();
|
15114
|
+
let low_delay_hrd_flag = false;
|
15115
|
+
if (fixed_pic_rate_within_cvs_flag) {
|
15116
|
+
eg.readEG();
|
15117
|
+
} else {
|
15118
|
+
low_delay_hrd_flag = eg.readBoolean();
|
15119
|
+
}
|
15120
|
+
const cpb_cnt = low_delay_hrd_flag ? 1 : eg.readUEG() + 1;
|
15121
|
+
if (nal_hrd_parameters_present_flag) {
|
15122
|
+
for (let j = 0; j < cpb_cnt; j++) {
|
15123
|
+
eg.readUEG();
|
15124
|
+
eg.readUEG();
|
15125
|
+
if (sub_pic_hrd_params_present_flag) {
|
15126
|
+
eg.readUEG();
|
15127
|
+
eg.readUEG();
|
15128
|
+
}
|
15129
|
+
eg.skipBits(1);
|
15130
|
+
}
|
15131
|
+
}
|
15132
|
+
if (vcl_hrd_parameters_present_flag) {
|
15133
|
+
for (let j = 0; j < cpb_cnt; j++) {
|
15134
|
+
eg.readUEG();
|
15135
|
+
eg.readUEG();
|
15136
|
+
if (sub_pic_hrd_params_present_flag) {
|
15137
|
+
eg.readUEG();
|
15138
|
+
eg.readUEG();
|
15139
|
+
}
|
15140
|
+
eg.skipBits(1);
|
15141
|
+
}
|
15142
|
+
}
|
15143
|
+
}
|
15144
|
+
}
|
14522
15145
|
}
|
15146
|
+
const bitstream_restriction_flag = eg.readBoolean();
|
15147
|
+
if (bitstream_restriction_flag) {
|
15148
|
+
eg.readBoolean(); // tiles_fixed_structure_flag
|
15149
|
+
eg.readBoolean(); // motion_vectors_over_pic_boundaries_flag
|
15150
|
+
eg.readBoolean(); // restricted_ref_pic_lists_flag
|
15151
|
+
min_spatial_segmentation_idc = eg.readUEG();
|
15152
|
+
}
|
15153
|
+
}
|
15154
|
+
let width = pic_width_in_luma_samples,
|
15155
|
+
height = pic_height_in_luma_samples;
|
15156
|
+
if (conformance_window_flag || default_display_window_flag) {
|
15157
|
+
let chroma_scale_w = 1,
|
15158
|
+
chroma_scale_h = 1;
|
15159
|
+
if (chroma_format_idc === 1) {
|
15160
|
+
// YUV 420
|
15161
|
+
chroma_scale_w = chroma_scale_h = 2;
|
15162
|
+
} else if (chroma_format_idc == 2) {
|
15163
|
+
// YUV 422
|
15164
|
+
chroma_scale_w = 2;
|
15165
|
+
}
|
15166
|
+
width = pic_width_in_luma_samples - chroma_scale_w * pic_right_offset - chroma_scale_w * pic_left_offset;
|
15167
|
+
height = pic_height_in_luma_samples - chroma_scale_h * pic_bottom_offset - chroma_scale_h * pic_top_offset;
|
15168
|
+
}
|
15169
|
+
const profile_space_string = general_profile_space ? ['A', 'B', 'C'][general_profile_space] : '';
|
15170
|
+
const profile_compatibility_buf = general_profile_compatibility_flags_1 << 24 | general_profile_compatibility_flags_2 << 16 | general_profile_compatibility_flags_3 << 8 | general_profile_compatibility_flags_4;
|
15171
|
+
let profile_compatibility_rev = 0;
|
15172
|
+
for (let i = 0; i < 32; i++) {
|
15173
|
+
profile_compatibility_rev = (profile_compatibility_rev | (profile_compatibility_buf >> i & 1) << 31 - i) >>> 0; // reverse bit position (and cast as UInt32)
|
15174
|
+
}
|
15175
|
+
let profile_compatibility_flags_string = profile_compatibility_rev.toString(16);
|
15176
|
+
if (general_profile_idc === 1 && profile_compatibility_flags_string === '2') {
|
15177
|
+
profile_compatibility_flags_string = '6';
|
15178
|
+
}
|
15179
|
+
const tier_flag_string = general_tier_flag ? 'H' : 'L';
|
15180
|
+
return {
|
15181
|
+
codecString: `hvc1.${profile_space_string}${general_profile_idc}.${profile_compatibility_flags_string}.${tier_flag_string}${general_level_idc}.B0`,
|
15182
|
+
params: {
|
15183
|
+
general_tier_flag,
|
15184
|
+
general_profile_idc,
|
15185
|
+
general_profile_space,
|
15186
|
+
general_profile_compatibility_flags: [general_profile_compatibility_flags_1, general_profile_compatibility_flags_2, general_profile_compatibility_flags_3, general_profile_compatibility_flags_4],
|
15187
|
+
general_constraint_indicator_flags: [general_constraint_indicator_flags_1, general_constraint_indicator_flags_2, general_constraint_indicator_flags_3, general_constraint_indicator_flags_4, general_constraint_indicator_flags_5, general_constraint_indicator_flags_6],
|
15188
|
+
general_level_idc,
|
15189
|
+
bit_depth: bit_depth_luma_minus8 + 8,
|
15190
|
+
bit_depth_luma_minus8,
|
15191
|
+
bit_depth_chroma_minus8,
|
15192
|
+
min_spatial_segmentation_idc,
|
15193
|
+
chroma_format_idc: chroma_format_idc,
|
15194
|
+
frame_rate: {
|
15195
|
+
fixed: fps_fixed,
|
15196
|
+
fps: fps_num / fps_den
|
15197
|
+
}
|
15198
|
+
},
|
15199
|
+
width,
|
15200
|
+
height,
|
15201
|
+
pixelRatio: [sar_width, sar_height]
|
15202
|
+
};
|
15203
|
+
}
|
15204
|
+
readPPS(pps) {
|
15205
|
+
const eg = new ExpGolomb(this.ebsp2rbsp(pps));
|
15206
|
+
eg.readUByte();
|
15207
|
+
eg.readUByte();
|
15208
|
+
eg.skipUEG(); // pic_parameter_set_id
|
15209
|
+
eg.skipUEG(); // seq_parameter_set_id
|
15210
|
+
eg.skipBits(2); // dependent_slice_segments_enabled_flag, output_flag_present_flag
|
15211
|
+
eg.skipBits(3); // num_extra_slice_header_bits
|
15212
|
+
eg.skipBits(2); // sign_data_hiding_enabled_flag, cabac_init_present_flag
|
15213
|
+
eg.skipUEG();
|
15214
|
+
eg.skipUEG();
|
15215
|
+
eg.skipEG(); // init_qp_minus26
|
15216
|
+
eg.skipBits(2); // constrained_intra_pred_flag, transform_skip_enabled_flag
|
15217
|
+
const cu_qp_delta_enabled_flag = eg.readBoolean();
|
15218
|
+
if (cu_qp_delta_enabled_flag) {
|
15219
|
+
eg.skipUEG();
|
15220
|
+
}
|
15221
|
+
eg.skipEG(); // cb_qp_offset
|
15222
|
+
eg.skipEG(); // cr_qp_offset
|
15223
|
+
eg.skipBits(4); // pps_slice_chroma_qp_offsets_present_flag, weighted_pred_flag, weighted_bipred_flag, transquant_bypass_enabled_flag
|
15224
|
+
const tiles_enabled_flag = eg.readBoolean();
|
15225
|
+
const entropy_coding_sync_enabled_flag = eg.readBoolean();
|
15226
|
+
let parallelismType = 1; // slice-based parallel decoding
|
15227
|
+
if (entropy_coding_sync_enabled_flag && tiles_enabled_flag) {
|
15228
|
+
parallelismType = 0; // mixed-type parallel decoding
|
15229
|
+
} else if (entropy_coding_sync_enabled_flag) {
|
15230
|
+
parallelismType = 3; // wavefront-based parallel decoding
|
15231
|
+
} else if (tiles_enabled_flag) {
|
15232
|
+
parallelismType = 2; // tile-based parallel decoding
|
14523
15233
|
}
|
14524
|
-
|
14525
|
-
|
15234
|
+
return {
|
15235
|
+
parallelismType
|
15236
|
+
};
|
15237
|
+
}
|
15238
|
+
matchSPS(sps1, sps2) {
|
15239
|
+
// compare without headers and VPS related params
|
15240
|
+
return String.fromCharCode.apply(null, sps1).substr(3) === String.fromCharCode.apply(null, sps2).substr(3);
|
14526
15241
|
}
|
14527
15242
|
}
|
14528
15243
|
|
@@ -14540,7 +15255,7 @@ class SampleAesDecrypter {
|
|
14540
15255
|
});
|
14541
15256
|
}
|
14542
15257
|
decryptBuffer(encryptedData) {
|
14543
|
-
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer);
|
15258
|
+
return this.decrypter.decrypt(encryptedData, this.keyData.key.buffer, this.keyData.iv.buffer, DecrypterAesMode.cbc);
|
14544
15259
|
}
|
14545
15260
|
|
14546
15261
|
// AAC - encrypt all full 16 bytes blocks starting from offset 16
|
@@ -14654,7 +15369,7 @@ class TSDemuxer {
|
|
14654
15369
|
this.observer = observer;
|
14655
15370
|
this.config = config;
|
14656
15371
|
this.typeSupported = typeSupported;
|
14657
|
-
this.videoParser =
|
15372
|
+
this.videoParser = null;
|
14658
15373
|
}
|
14659
15374
|
static probe(data) {
|
14660
15375
|
const syncOffset = TSDemuxer.syncOffset(data);
|
@@ -14819,7 +15534,19 @@ class TSDemuxer {
|
|
14819
15534
|
case videoPid:
|
14820
15535
|
if (stt) {
|
14821
15536
|
if (videoData && (pes = parsePES(videoData))) {
|
14822
|
-
this.videoParser
|
15537
|
+
if (this.videoParser === null) {
|
15538
|
+
switch (videoTrack.segmentCodec) {
|
15539
|
+
case 'avc':
|
15540
|
+
this.videoParser = new AvcVideoParser();
|
15541
|
+
break;
|
15542
|
+
case 'hevc':
|
15543
|
+
this.videoParser = new HevcVideoParser();
|
15544
|
+
break;
|
15545
|
+
}
|
15546
|
+
}
|
15547
|
+
if (this.videoParser !== null) {
|
15548
|
+
this.videoParser.parsePES(videoTrack, textTrack, pes, false, this._duration);
|
15549
|
+
}
|
14823
15550
|
}
|
14824
15551
|
videoData = {
|
14825
15552
|
data: [],
|
@@ -14981,8 +15708,20 @@ class TSDemuxer {
|
|
14981
15708
|
// try to parse last PES packets
|
14982
15709
|
let pes;
|
14983
15710
|
if (videoData && (pes = parsePES(videoData))) {
|
14984
|
-
this.videoParser
|
14985
|
-
|
15711
|
+
if (this.videoParser === null) {
|
15712
|
+
switch (videoTrack.segmentCodec) {
|
15713
|
+
case 'avc':
|
15714
|
+
this.videoParser = new AvcVideoParser();
|
15715
|
+
break;
|
15716
|
+
case 'hevc':
|
15717
|
+
this.videoParser = new HevcVideoParser();
|
15718
|
+
break;
|
15719
|
+
}
|
15720
|
+
}
|
15721
|
+
if (this.videoParser !== null) {
|
15722
|
+
this.videoParser.parsePES(videoTrack, textTrack, pes, true, this._duration);
|
15723
|
+
videoTrack.pesData = null;
|
15724
|
+
}
|
14986
15725
|
} else {
|
14987
15726
|
// either avcData null or PES truncated, keep it for next frag parsing
|
14988
15727
|
videoTrack.pesData = videoData;
|
@@ -15285,7 +16024,12 @@ function parsePMT(data, offset, typeSupported, isSampleAes) {
|
|
15285
16024
|
logger.warn('Unsupported EC-3 in M2TS found');
|
15286
16025
|
break;
|
15287
16026
|
case 0x24:
|
15288
|
-
|
16027
|
+
// ITU-T Rec. H.265 and ISO/IEC 23008-2 (HEVC)
|
16028
|
+
if (result.videoPid === -1) {
|
16029
|
+
result.videoPid = pid;
|
16030
|
+
result.segmentVideoCodec = 'hevc';
|
16031
|
+
logger.log('HEVC in M2TS found');
|
16032
|
+
}
|
15289
16033
|
break;
|
15290
16034
|
}
|
15291
16035
|
// move to the next table entry
|
@@ -15508,6 +16252,8 @@ class MP4 {
|
|
15508
16252
|
avc1: [],
|
15509
16253
|
// codingname
|
15510
16254
|
avcC: [],
|
16255
|
+
hvc1: [],
|
16256
|
+
hvcC: [],
|
15511
16257
|
btrt: [],
|
15512
16258
|
dinf: [],
|
15513
16259
|
dref: [],
|
@@ -15932,8 +16678,10 @@ class MP4 {
|
|
15932
16678
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.ac3(track));
|
15933
16679
|
}
|
15934
16680
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track));
|
15935
|
-
} else {
|
16681
|
+
} else if (track.segmentCodec === 'avc') {
|
15936
16682
|
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track));
|
16683
|
+
} else {
|
16684
|
+
return MP4.box(MP4.types.stsd, MP4.STSD, MP4.hvc1(track));
|
15937
16685
|
}
|
15938
16686
|
}
|
15939
16687
|
static tkhd(track) {
|
@@ -16071,6 +16819,84 @@ class MP4 {
|
|
16071
16819
|
const result = appendUint8Array(MP4.FTYP, movie);
|
16072
16820
|
return result;
|
16073
16821
|
}
|
16822
|
+
static hvc1(track) {
|
16823
|
+
const ps = track.params;
|
16824
|
+
const units = [track.vps, track.sps, track.pps];
|
16825
|
+
const NALuLengthSize = 4;
|
16826
|
+
const config = new Uint8Array([0x01, ps.general_profile_space << 6 | (ps.general_tier_flag ? 32 : 0) | ps.general_profile_idc, ps.general_profile_compatibility_flags[0], ps.general_profile_compatibility_flags[1], ps.general_profile_compatibility_flags[2], ps.general_profile_compatibility_flags[3], ps.general_constraint_indicator_flags[0], ps.general_constraint_indicator_flags[1], ps.general_constraint_indicator_flags[2], ps.general_constraint_indicator_flags[3], ps.general_constraint_indicator_flags[4], ps.general_constraint_indicator_flags[5], ps.general_level_idc, 240 | ps.min_spatial_segmentation_idc >> 8, 255 & ps.min_spatial_segmentation_idc, 252 | ps.parallelismType, 252 | ps.chroma_format_idc, 248 | ps.bit_depth_luma_minus8, 248 | ps.bit_depth_chroma_minus8, 0x00, parseInt(ps.frame_rate.fps), NALuLengthSize - 1 | ps.temporal_id_nested << 2 | ps.num_temporal_layers << 3 | (ps.frame_rate.fixed ? 64 : 0), units.length]);
|
16827
|
+
|
16828
|
+
// compute hvcC size in bytes
|
16829
|
+
let length = config.length;
|
16830
|
+
for (let i = 0; i < units.length; i += 1) {
|
16831
|
+
length += 3;
|
16832
|
+
for (let j = 0; j < units[i].length; j += 1) {
|
16833
|
+
length += 2 + units[i][j].length;
|
16834
|
+
}
|
16835
|
+
}
|
16836
|
+
const hvcC = new Uint8Array(length);
|
16837
|
+
hvcC.set(config, 0);
|
16838
|
+
length = config.length;
|
16839
|
+
// append parameter set units: one vps, one or more sps and pps
|
16840
|
+
const iMax = units.length - 1;
|
16841
|
+
for (let i = 0; i < units.length; i += 1) {
|
16842
|
+
hvcC.set(new Uint8Array([32 + i | (i === iMax ? 128 : 0), 0x00, units[i].length]), length);
|
16843
|
+
length += 3;
|
16844
|
+
for (let j = 0; j < units[i].length; j += 1) {
|
16845
|
+
hvcC.set(new Uint8Array([units[i][j].length >> 8, units[i][j].length & 255]), length);
|
16846
|
+
length += 2;
|
16847
|
+
hvcC.set(units[i][j], length);
|
16848
|
+
length += units[i][j].length;
|
16849
|
+
}
|
16850
|
+
}
|
16851
|
+
const hvcc = MP4.box(MP4.types.hvcC, hvcC);
|
16852
|
+
const width = track.width;
|
16853
|
+
const height = track.height;
|
16854
|
+
const hSpacing = track.pixelRatio[0];
|
16855
|
+
const vSpacing = track.pixelRatio[1];
|
16856
|
+
return MP4.box(MP4.types.hvc1, new Uint8Array([0x00, 0x00, 0x00,
|
16857
|
+
// reserved
|
16858
|
+
0x00, 0x00, 0x00,
|
16859
|
+
// reserved
|
16860
|
+
0x00, 0x01,
|
16861
|
+
// data_reference_index
|
16862
|
+
0x00, 0x00,
|
16863
|
+
// pre_defined
|
16864
|
+
0x00, 0x00,
|
16865
|
+
// reserved
|
16866
|
+
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16867
|
+
// pre_defined
|
16868
|
+
width >> 8 & 0xff, width & 0xff,
|
16869
|
+
// width
|
16870
|
+
height >> 8 & 0xff, height & 0xff,
|
16871
|
+
// height
|
16872
|
+
0x00, 0x48, 0x00, 0x00,
|
16873
|
+
// horizresolution
|
16874
|
+
0x00, 0x48, 0x00, 0x00,
|
16875
|
+
// vertresolution
|
16876
|
+
0x00, 0x00, 0x00, 0x00,
|
16877
|
+
// reserved
|
16878
|
+
0x00, 0x01,
|
16879
|
+
// frame_count
|
16880
|
+
0x12, 0x64, 0x61, 0x69, 0x6c,
|
16881
|
+
// dailymotion/hls.js
|
16882
|
+
0x79, 0x6d, 0x6f, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x68, 0x6c, 0x73, 0x2e, 0x6a, 0x73, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
16883
|
+
// compressorname
|
16884
|
+
0x00, 0x18,
|
16885
|
+
// depth = 24
|
16886
|
+
0x11, 0x11]),
|
16887
|
+
// pre_defined = -1
|
16888
|
+
hvcc, MP4.box(MP4.types.btrt, new Uint8Array([0x00, 0x1c, 0x9c, 0x80,
|
16889
|
+
// bufferSizeDB
|
16890
|
+
0x00, 0x2d, 0xc6, 0xc0,
|
16891
|
+
// maxBitrate
|
16892
|
+
0x00, 0x2d, 0xc6, 0xc0])),
|
16893
|
+
// avgBitrate
|
16894
|
+
MP4.box(MP4.types.pasp, new Uint8Array([hSpacing >> 24,
|
16895
|
+
// hSpacing
|
16896
|
+
hSpacing >> 16 & 0xff, hSpacing >> 8 & 0xff, hSpacing & 0xff, vSpacing >> 24,
|
16897
|
+
// vSpacing
|
16898
|
+
vSpacing >> 16 & 0xff, vSpacing >> 8 & 0xff, vSpacing & 0xff])));
|
16899
|
+
}
|
16074
16900
|
}
|
16075
16901
|
MP4.types = void 0;
|
16076
16902
|
MP4.HDLR_TYPES = void 0;
|
@@ -16446,9 +17272,9 @@ class MP4Remuxer {
|
|
16446
17272
|
const foundOverlap = delta < -1;
|
16447
17273
|
if (foundHole || foundOverlap) {
|
16448
17274
|
if (foundHole) {
|
16449
|
-
logger.warn(
|
17275
|
+
logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected at ${timeOffset.toFixed(3)}`);
|
16450
17276
|
} else {
|
16451
|
-
logger.warn(
|
17277
|
+
logger.warn(`${(track.segmentCodec || '').toUpperCase()}: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected at ${timeOffset.toFixed(3)}`);
|
16452
17278
|
}
|
16453
17279
|
if (!foundOverlap || nextAvcDts >= inputSamples[0].pts || chromeVersion) {
|
16454
17280
|
firstDTS = nextAvcDts;
|
@@ -16457,12 +17283,24 @@ class MP4Remuxer {
|
|
16457
17283
|
inputSamples[0].dts = firstDTS;
|
16458
17284
|
inputSamples[0].pts = firstPTS;
|
16459
17285
|
} else {
|
17286
|
+
let isPTSOrderRetained = true;
|
16460
17287
|
for (let i = 0; i < inputSamples.length; i++) {
|
16461
|
-
if (inputSamples[i].dts > firstPTS) {
|
17288
|
+
if (inputSamples[i].dts > firstPTS && isPTSOrderRetained) {
|
16462
17289
|
break;
|
16463
17290
|
}
|
17291
|
+
const prevPTS = inputSamples[i].pts;
|
16464
17292
|
inputSamples[i].dts -= delta;
|
16465
17293
|
inputSamples[i].pts -= delta;
|
17294
|
+
|
17295
|
+
// check to see if this sample's PTS order has changed
|
17296
|
+
// relative to the next one
|
17297
|
+
if (i < inputSamples.length - 1) {
|
17298
|
+
const nextSamplePTS = inputSamples[i + 1].pts;
|
17299
|
+
const currentSamplePTS = inputSamples[i].pts;
|
17300
|
+
const currentOrder = nextSamplePTS <= currentSamplePTS;
|
17301
|
+
const prevOrder = nextSamplePTS <= prevPTS;
|
17302
|
+
isPTSOrderRetained = currentOrder == prevOrder;
|
17303
|
+
}
|
16466
17304
|
}
|
16467
17305
|
}
|
16468
17306
|
logger.log(`Video: Initial PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`);
|
@@ -16610,7 +17448,7 @@ class MP4Remuxer {
|
|
16610
17448
|
}
|
16611
17449
|
}
|
16612
17450
|
}
|
16613
|
-
// next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
17451
|
+
// next AVC/HEVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale)
|
16614
17452
|
mp4SampleDuration = stretchedLastFrame || !mp4SampleDuration ? averageSampleDuration : mp4SampleDuration;
|
16615
17453
|
this.nextAvcDts = nextAvcDts = lastDTS + mp4SampleDuration;
|
16616
17454
|
this.videoSampleDuration = mp4SampleDuration;
|
@@ -16743,7 +17581,7 @@ class MP4Remuxer {
|
|
16743
17581
|
logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`);
|
16744
17582
|
for (let j = 0; j < missing; j++) {
|
16745
17583
|
const newStamp = Math.max(nextPts, 0);
|
16746
|
-
let fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17584
|
+
let fillFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
|
16747
17585
|
if (!fillFrame) {
|
16748
17586
|
logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.');
|
16749
17587
|
fillFrame = sample.unit.subarray();
|
@@ -16871,7 +17709,7 @@ class MP4Remuxer {
|
|
16871
17709
|
// samples count of this segment's duration
|
16872
17710
|
const nbSamples = Math.ceil((endDTS - startDTS) / frameDuration);
|
16873
17711
|
// silent frame
|
16874
|
-
const silentFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount);
|
17712
|
+
const silentFrame = AAC.getSilentFrame(track.parsedCodec || track.manifestCodec || track.codec, track.channelCount);
|
16875
17713
|
logger.warn('[mp4-remuxer]: remux empty Audio');
|
16876
17714
|
// Can't remux if we can't generate a silent frame...
|
16877
17715
|
if (!silentFrame) {
|
@@ -17262,13 +18100,15 @@ class Transmuxer {
|
|
17262
18100
|
initSegmentData
|
17263
18101
|
} = transmuxConfig;
|
17264
18102
|
const keyData = getEncryptionType(uintData, decryptdata);
|
17265
|
-
if (keyData && keyData.method
|
18103
|
+
if (keyData && isFullSegmentEncryption(keyData.method)) {
|
17266
18104
|
const decrypter = this.getDecrypter();
|
18105
|
+
const aesMode = getAesModeFromFullSegmentMethod(keyData.method);
|
18106
|
+
|
17267
18107
|
// Software decryption is synchronous; webCrypto is not
|
17268
18108
|
if (decrypter.isSync()) {
|
17269
18109
|
// Software decryption is progressive. Progressive decryption may not return a result on each call. Any cached
|
17270
18110
|
// data is handled in the flush() call
|
17271
|
-
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer);
|
18111
|
+
let decryptedData = decrypter.softwareDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode);
|
17272
18112
|
// For Low-Latency HLS Parts, decrypt in place, since part parsing is expected on push progress
|
17273
18113
|
const loadingParts = chunkMeta.part > -1;
|
17274
18114
|
if (loadingParts) {
|
@@ -17280,7 +18120,7 @@ class Transmuxer {
|
|
17280
18120
|
}
|
17281
18121
|
uintData = new Uint8Array(decryptedData);
|
17282
18122
|
} else {
|
17283
|
-
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer).then(decryptedData => {
|
18123
|
+
this.decryptionPromise = decrypter.webCryptoDecrypt(uintData, keyData.key.buffer, keyData.iv.buffer, aesMode).then(decryptedData => {
|
17284
18124
|
// Calling push here is important; if flush() is called while this is still resolving, this ensures that
|
17285
18125
|
// the decrypted data has been transmuxed
|
17286
18126
|
const result = this.push(decryptedData, null, chunkMeta);
|
@@ -17934,14 +18774,7 @@ class TransmuxerInterface {
|
|
17934
18774
|
this.observer = new EventEmitter();
|
17935
18775
|
this.observer.on(Events.FRAG_DECRYPTED, forwardMessage);
|
17936
18776
|
this.observer.on(Events.ERROR, forwardMessage);
|
17937
|
-
const
|
17938
|
-
isTypeSupported: () => false
|
17939
|
-
};
|
17940
|
-
const m2tsTypeSupported = {
|
17941
|
-
mpeg: MediaSource.isTypeSupported('audio/mpeg'),
|
17942
|
-
mp3: MediaSource.isTypeSupported('audio/mp4; codecs="mp3"'),
|
17943
|
-
ac3: false
|
17944
|
-
};
|
18777
|
+
const m2tsTypeSupported = getM2TSSupportedAudioTypes(config.preferManagedMediaSource);
|
17945
18778
|
|
17946
18779
|
// navigator.vendor is not always available in Web Worker
|
17947
18780
|
// refer to https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope/navigator
|
@@ -18205,8 +19038,9 @@ const STALL_MINIMUM_DURATION_MS = 250;
|
|
18205
19038
|
const MAX_START_GAP_JUMP = 2.0;
|
18206
19039
|
const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
18207
19040
|
const SKIP_BUFFER_RANGE_START = 0.05;
|
18208
|
-
class GapController {
|
19041
|
+
class GapController extends Logger {
|
18209
19042
|
constructor(config, media, fragmentTracker, hls) {
|
19043
|
+
super('gap-controller', hls.logger);
|
18210
19044
|
this.config = void 0;
|
18211
19045
|
this.media = null;
|
18212
19046
|
this.fragmentTracker = void 0;
|
@@ -18216,6 +19050,7 @@ class GapController {
|
|
18216
19050
|
this.stalled = null;
|
18217
19051
|
this.moved = false;
|
18218
19052
|
this.seeking = false;
|
19053
|
+
this.ended = 0;
|
18219
19054
|
this.config = config;
|
18220
19055
|
this.media = media;
|
18221
19056
|
this.fragmentTracker = fragmentTracker;
|
@@ -18233,7 +19068,7 @@ class GapController {
|
|
18233
19068
|
*
|
18234
19069
|
* @param lastCurrentTime - Previously read playhead position
|
18235
19070
|
*/
|
18236
|
-
poll(lastCurrentTime, activeFrag) {
|
19071
|
+
poll(lastCurrentTime, activeFrag, levelDetails, state) {
|
18237
19072
|
const {
|
18238
19073
|
config,
|
18239
19074
|
media,
|
@@ -18252,6 +19087,7 @@ class GapController {
|
|
18252
19087
|
|
18253
19088
|
// The playhead is moving, no-op
|
18254
19089
|
if (currentTime !== lastCurrentTime) {
|
19090
|
+
this.ended = 0;
|
18255
19091
|
this.moved = true;
|
18256
19092
|
if (!seeking) {
|
18257
19093
|
this.nudgeRetry = 0;
|
@@ -18260,7 +19096,7 @@ class GapController {
|
|
18260
19096
|
// The playhead is now moving, but was previously stalled
|
18261
19097
|
if (this.stallReported) {
|
18262
19098
|
const _stalledDuration = self.performance.now() - stalled;
|
18263
|
-
|
19099
|
+
this.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(_stalledDuration)}ms`);
|
18264
19100
|
this.stallReported = false;
|
18265
19101
|
}
|
18266
19102
|
this.stalled = null;
|
@@ -18296,7 +19132,6 @@ class GapController {
|
|
18296
19132
|
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
18297
19133
|
// The addition poll gives the browser a chance to jump the gap for us
|
18298
19134
|
if (!this.moved && this.stalled !== null) {
|
18299
|
-
var _level$details;
|
18300
19135
|
// There is no playable buffer (seeked, waiting for buffer)
|
18301
19136
|
const isBuffered = bufferInfo.len > 0;
|
18302
19137
|
if (!isBuffered && !nextStart) {
|
@@ -18308,9 +19143,8 @@ class GapController {
|
|
18308
19143
|
// When joining a live stream with audio tracks, account for live playlist window sliding by allowing
|
18309
19144
|
// a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
|
18310
19145
|
// that begins over 1 target duration after the video start position.
|
18311
|
-
const
|
18312
|
-
const
|
18313
|
-
const maxStartGapJump = isLive ? level.details.targetduration * 2 : MAX_START_GAP_JUMP;
|
19146
|
+
const isLive = !!(levelDetails != null && levelDetails.live);
|
19147
|
+
const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
|
18314
19148
|
const partialOrGap = this.fragmentTracker.getPartialFragment(currentTime);
|
18315
19149
|
if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
|
18316
19150
|
if (!media.paused) {
|
@@ -18328,6 +19162,17 @@ class GapController {
|
|
18328
19162
|
}
|
18329
19163
|
const stalledDuration = tnow - stalled;
|
18330
19164
|
if (!seeking && stalledDuration >= STALL_MINIMUM_DURATION_MS) {
|
19165
|
+
// Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
|
19166
|
+
if (state === State.ENDED && !(levelDetails && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? void 0 : levelDetails.edge) || 0)) < 1) {
|
19167
|
+
if (stalledDuration < 1000 || this.ended) {
|
19168
|
+
return;
|
19169
|
+
}
|
19170
|
+
this.ended = currentTime;
|
19171
|
+
this.hls.trigger(Events.MEDIA_ENDED, {
|
19172
|
+
stalled: true
|
19173
|
+
});
|
19174
|
+
return;
|
19175
|
+
}
|
18331
19176
|
// Report stalling after trying to fix
|
18332
19177
|
this._reportStall(bufferInfo);
|
18333
19178
|
if (!this.media) {
|
@@ -18371,7 +19216,7 @@ class GapController {
|
|
18371
19216
|
// needs to cross some sort of threshold covering all source-buffers content
|
18372
19217
|
// to start playing properly.
|
18373
19218
|
if ((bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
|
18374
|
-
|
19219
|
+
this.warn('Trying to nudge playhead over buffer-hole');
|
18375
19220
|
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
18376
19221
|
// We only try to jump the hole if it's under the configured size
|
18377
19222
|
// Reset stalled so to rearm watchdog timer
|
@@ -18395,7 +19240,7 @@ class GapController {
|
|
18395
19240
|
// Report stalled error once
|
18396
19241
|
this.stallReported = true;
|
18397
19242
|
const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
|
18398
|
-
|
19243
|
+
this.warn(error.message);
|
18399
19244
|
hls.trigger(Events.ERROR, {
|
18400
19245
|
type: ErrorTypes.MEDIA_ERROR,
|
18401
19246
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -18463,7 +19308,7 @@ class GapController {
|
|
18463
19308
|
}
|
18464
19309
|
}
|
18465
19310
|
const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
|
18466
|
-
|
19311
|
+
this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
|
18467
19312
|
this.moved = true;
|
18468
19313
|
this.stalled = null;
|
18469
19314
|
media.currentTime = targetTime;
|
@@ -18504,7 +19349,7 @@ class GapController {
|
|
18504
19349
|
const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
|
18505
19350
|
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
|
18506
19351
|
const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
|
18507
|
-
|
19352
|
+
this.warn(error.message);
|
18508
19353
|
media.currentTime = targetTime;
|
18509
19354
|
hls.trigger(Events.ERROR, {
|
18510
19355
|
type: ErrorTypes.MEDIA_ERROR,
|
@@ -18514,7 +19359,7 @@ class GapController {
|
|
18514
19359
|
});
|
18515
19360
|
} else {
|
18516
19361
|
const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
|
18517
|
-
|
19362
|
+
this.error(error.message);
|
18518
19363
|
hls.trigger(Events.ERROR, {
|
18519
19364
|
type: ErrorTypes.MEDIA_ERROR,
|
18520
19365
|
details: ErrorDetails.BUFFER_STALLED_ERROR,
|
@@ -18529,7 +19374,7 @@ const TICK_INTERVAL = 100; // how often to tick in ms
|
|
18529
19374
|
|
18530
19375
|
class StreamController extends BaseStreamController {
|
18531
19376
|
constructor(hls, fragmentTracker, keyLoader) {
|
18532
|
-
super(hls, fragmentTracker, keyLoader, '
|
19377
|
+
super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
|
18533
19378
|
this.audioCodecSwap = false;
|
18534
19379
|
this.gapController = null;
|
18535
19380
|
this.level = -1;
|
@@ -18537,27 +19382,43 @@ class StreamController extends BaseStreamController {
|
|
18537
19382
|
this.altAudio = false;
|
18538
19383
|
this.audioOnly = false;
|
18539
19384
|
this.fragPlaying = null;
|
18540
|
-
this.onvplaying = null;
|
18541
|
-
this.onvseeked = null;
|
18542
19385
|
this.fragLastKbps = 0;
|
18543
19386
|
this.couldBacktrack = false;
|
18544
19387
|
this.backtrackFragment = null;
|
18545
19388
|
this.audioCodecSwitch = false;
|
18546
19389
|
this.videoBuffer = null;
|
18547
|
-
this.
|
19390
|
+
this.onMediaPlaying = () => {
|
19391
|
+
// tick to speed up FRAG_CHANGED triggering
|
19392
|
+
this.tick();
|
19393
|
+
};
|
19394
|
+
this.onMediaSeeked = () => {
|
19395
|
+
const media = this.media;
|
19396
|
+
const currentTime = media ? media.currentTime : null;
|
19397
|
+
if (isFiniteNumber(currentTime)) {
|
19398
|
+
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
19399
|
+
}
|
19400
|
+
|
19401
|
+
// If seeked was issued before buffer was appended do not tick immediately
|
19402
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
19403
|
+
if (bufferInfo === null || bufferInfo.len === 0) {
|
19404
|
+
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
19405
|
+
return;
|
19406
|
+
}
|
19407
|
+
|
19408
|
+
// tick to speed up FRAG_CHANGED triggering
|
19409
|
+
this.tick();
|
19410
|
+
};
|
19411
|
+
this.registerListeners();
|
18548
19412
|
}
|
18549
|
-
|
19413
|
+
registerListeners() {
|
19414
|
+
super.registerListeners();
|
18550
19415
|
const {
|
18551
19416
|
hls
|
18552
19417
|
} = this;
|
18553
|
-
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18554
|
-
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18555
|
-
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
18556
19418
|
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
18557
19419
|
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
|
18558
19420
|
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
18559
19421
|
hls.on(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18560
|
-
hls.on(Events.ERROR, this.onError, this);
|
18561
19422
|
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
18562
19423
|
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
18563
19424
|
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -18565,17 +19426,14 @@ class StreamController extends BaseStreamController {
|
|
18565
19426
|
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
18566
19427
|
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
18567
19428
|
}
|
18568
|
-
|
19429
|
+
unregisterListeners() {
|
19430
|
+
super.unregisterListeners();
|
18569
19431
|
const {
|
18570
19432
|
hls
|
18571
19433
|
} = this;
|
18572
|
-
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
18573
|
-
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
18574
|
-
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
18575
19434
|
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
18576
19435
|
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
18577
19436
|
hls.off(Events.FRAG_LOAD_EMERGENCY_ABORTED, this.onFragLoadEmergencyAborted, this);
|
18578
|
-
hls.off(Events.ERROR, this.onError, this);
|
18579
19437
|
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
18580
19438
|
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
18581
19439
|
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
@@ -18584,7 +19442,9 @@ class StreamController extends BaseStreamController {
|
|
18584
19442
|
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
18585
19443
|
}
|
18586
19444
|
onHandlerDestroying() {
|
18587
|
-
|
19445
|
+
// @ts-ignore
|
19446
|
+
this.onMediaPlaying = this.onMediaSeeked = null;
|
19447
|
+
this.unregisterListeners();
|
18588
19448
|
super.onHandlerDestroying();
|
18589
19449
|
}
|
18590
19450
|
startLoad(startPosition) {
|
@@ -18610,7 +19470,8 @@ class StreamController extends BaseStreamController {
|
|
18610
19470
|
}
|
18611
19471
|
// set new level to playlist loader : this will trigger start level load
|
18612
19472
|
// hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
|
18613
|
-
|
19473
|
+
hls.nextLoadLevel = startLevel;
|
19474
|
+
this.level = hls.loadLevel;
|
18614
19475
|
this.loadedmetadata = false;
|
18615
19476
|
}
|
18616
19477
|
// if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
|
@@ -18703,7 +19564,7 @@ class StreamController extends BaseStreamController {
|
|
18703
19564
|
if (this.altAudio && this.audioOnly) {
|
18704
19565
|
return;
|
18705
19566
|
}
|
18706
|
-
if (!(levels != null && levels[level])) {
|
19567
|
+
if (!this.buffering || !(levels != null && levels[level])) {
|
18707
19568
|
return;
|
18708
19569
|
}
|
18709
19570
|
const levelInfo = levels[level];
|
@@ -18911,20 +19772,17 @@ class StreamController extends BaseStreamController {
|
|
18911
19772
|
onMediaAttached(event, data) {
|
18912
19773
|
super.onMediaAttached(event, data);
|
18913
19774
|
const media = data.media;
|
18914
|
-
|
18915
|
-
|
18916
|
-
media.addEventListener('playing', this.onvplaying);
|
18917
|
-
media.addEventListener('seeked', this.onvseeked);
|
19775
|
+
media.addEventListener('playing', this.onMediaPlaying);
|
19776
|
+
media.addEventListener('seeked', this.onMediaSeeked);
|
18918
19777
|
this.gapController = new GapController(this.config, media, this.fragmentTracker, this.hls);
|
18919
19778
|
}
|
18920
19779
|
onMediaDetaching() {
|
18921
19780
|
const {
|
18922
19781
|
media
|
18923
19782
|
} = this;
|
18924
|
-
if (media
|
18925
|
-
media.removeEventListener('playing', this.
|
18926
|
-
media.removeEventListener('seeked', this.
|
18927
|
-
this.onvplaying = this.onvseeked = null;
|
19783
|
+
if (media) {
|
19784
|
+
media.removeEventListener('playing', this.onMediaPlaying);
|
19785
|
+
media.removeEventListener('seeked', this.onMediaSeeked);
|
18928
19786
|
this.videoBuffer = null;
|
18929
19787
|
}
|
18930
19788
|
this.fragPlaying = null;
|
@@ -18934,27 +19792,6 @@ class StreamController extends BaseStreamController {
|
|
18934
19792
|
}
|
18935
19793
|
super.onMediaDetaching();
|
18936
19794
|
}
|
18937
|
-
onMediaPlaying() {
|
18938
|
-
// tick to speed up FRAG_CHANGED triggering
|
18939
|
-
this.tick();
|
18940
|
-
}
|
18941
|
-
onMediaSeeked() {
|
18942
|
-
const media = this.media;
|
18943
|
-
const currentTime = media ? media.currentTime : null;
|
18944
|
-
if (isFiniteNumber(currentTime)) {
|
18945
|
-
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
18946
|
-
}
|
18947
|
-
|
18948
|
-
// If seeked was issued before buffer was appended do not tick immediately
|
18949
|
-
const bufferInfo = this.getMainFwdBufferInfo();
|
18950
|
-
if (bufferInfo === null || bufferInfo.len === 0) {
|
18951
|
-
this.warn(`Main forward buffer length on "seeked" event ${bufferInfo ? bufferInfo.len : 'empty'})`);
|
18952
|
-
return;
|
18953
|
-
}
|
18954
|
-
|
18955
|
-
// tick to speed up FRAG_CHANGED triggering
|
18956
|
-
this.tick();
|
18957
|
-
}
|
18958
19795
|
onManifestLoading() {
|
18959
19796
|
// reset buffer on manifest loading
|
18960
19797
|
this.log('Trigger BUFFER_RESET');
|
@@ -19246,8 +20083,10 @@ class StreamController extends BaseStreamController {
|
|
19246
20083
|
}
|
19247
20084
|
if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
|
19248
20085
|
// Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
19249
|
-
const
|
19250
|
-
|
20086
|
+
const state = this.state;
|
20087
|
+
const activeFrag = state !== State.IDLE ? this.fragCurrent : null;
|
20088
|
+
const levelDetails = this.getLevelDetails();
|
20089
|
+
gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
|
19251
20090
|
}
|
19252
20091
|
this.lastCurrentTime = media.currentTime;
|
19253
20092
|
}
|
@@ -19685,7 +20524,7 @@ class Hls {
|
|
19685
20524
|
* Get the video-dev/hls.js package version.
|
19686
20525
|
*/
|
19687
20526
|
static get version() {
|
19688
|
-
return "1.5.
|
20527
|
+
return "1.5.5-0.canary.9977";
|
19689
20528
|
}
|
19690
20529
|
|
19691
20530
|
/**
|
@@ -19748,9 +20587,12 @@ class Hls {
|
|
19748
20587
|
* The configuration object provided on player instantiation.
|
19749
20588
|
*/
|
19750
20589
|
this.userConfig = void 0;
|
20590
|
+
/**
|
20591
|
+
* The logger functions used by this player instance, configured on player instantiation.
|
20592
|
+
*/
|
20593
|
+
this.logger = void 0;
|
19751
20594
|
this.coreComponents = void 0;
|
19752
20595
|
this.networkControllers = void 0;
|
19753
|
-
this.started = false;
|
19754
20596
|
this._emitter = new EventEmitter();
|
19755
20597
|
this._autoLevelCapping = -1;
|
19756
20598
|
this._maxHdcpLevel = null;
|
@@ -19767,11 +20609,11 @@ class Hls {
|
|
19767
20609
|
this._media = null;
|
19768
20610
|
this.url = null;
|
19769
20611
|
this.triggeringException = void 0;
|
19770
|
-
enableLogs(userConfig.debug || false, 'Hls instance');
|
19771
|
-
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig);
|
20612
|
+
const logger = this.logger = enableLogs(userConfig.debug || false, 'Hls instance');
|
20613
|
+
const config = this.config = mergeConfig(Hls.DefaultConfig, userConfig, logger);
|
19772
20614
|
this.userConfig = userConfig;
|
19773
20615
|
if (config.progressive) {
|
19774
|
-
enableStreamingMode(config);
|
20616
|
+
enableStreamingMode(config, logger);
|
19775
20617
|
}
|
19776
20618
|
|
19777
20619
|
// core controllers and network loaders
|
@@ -19870,7 +20712,7 @@ class Hls {
|
|
19870
20712
|
try {
|
19871
20713
|
return this.emit(event, event, eventObject);
|
19872
20714
|
} catch (error) {
|
19873
|
-
logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
20715
|
+
this.logger.error('An internal error happened while handling event ' + event + '. Error message: "' + error.message + '". Here is a stacktrace:', error);
|
19874
20716
|
// Prevent recursion in error event handlers that throw #5497
|
19875
20717
|
if (!this.triggeringException) {
|
19876
20718
|
this.triggeringException = true;
|
@@ -19896,7 +20738,7 @@ class Hls {
|
|
19896
20738
|
* Dispose of the instance
|
19897
20739
|
*/
|
19898
20740
|
destroy() {
|
19899
|
-
logger.log('destroy');
|
20741
|
+
this.logger.log('destroy');
|
19900
20742
|
this.trigger(Events.DESTROYING, undefined);
|
19901
20743
|
this.detachMedia();
|
19902
20744
|
this.removeAllListeners();
|
@@ -19917,7 +20759,7 @@ class Hls {
|
|
19917
20759
|
* Attaches Hls.js to a media element
|
19918
20760
|
*/
|
19919
20761
|
attachMedia(media) {
|
19920
|
-
logger.log('attachMedia');
|
20762
|
+
this.logger.log('attachMedia');
|
19921
20763
|
this._media = media;
|
19922
20764
|
this.trigger(Events.MEDIA_ATTACHING, {
|
19923
20765
|
media: media
|
@@ -19928,7 +20770,7 @@ class Hls {
|
|
19928
20770
|
* Detach Hls.js from the media
|
19929
20771
|
*/
|
19930
20772
|
detachMedia() {
|
19931
|
-
logger.log('detachMedia');
|
20773
|
+
this.logger.log('detachMedia');
|
19932
20774
|
this.trigger(Events.MEDIA_DETACHING, undefined);
|
19933
20775
|
this._media = null;
|
19934
20776
|
}
|
@@ -19945,7 +20787,7 @@ class Hls {
|
|
19945
20787
|
});
|
19946
20788
|
this._autoLevelCapping = -1;
|
19947
20789
|
this._maxHdcpLevel = null;
|
19948
|
-
logger.log(`loadSource:${loadingSource}`);
|
20790
|
+
this.logger.log(`loadSource:${loadingSource}`);
|
19949
20791
|
if (media && loadedSource && (loadedSource !== loadingSource || this.bufferController.hasSourceTypes())) {
|
19950
20792
|
this.detachMedia();
|
19951
20793
|
this.attachMedia(media);
|
@@ -19964,8 +20806,7 @@ class Hls {
|
|
19964
20806
|
* Defaults to -1 (None: starts from earliest point)
|
19965
20807
|
*/
|
19966
20808
|
startLoad(startPosition = -1) {
|
19967
|
-
logger.log(`startLoad(${startPosition})`);
|
19968
|
-
this.started = true;
|
20809
|
+
this.logger.log(`startLoad(${startPosition})`);
|
19969
20810
|
this.networkControllers.forEach(controller => {
|
19970
20811
|
controller.startLoad(startPosition);
|
19971
20812
|
});
|
@@ -19975,34 +20816,31 @@ class Hls {
|
|
19975
20816
|
* Stop loading of any stream data.
|
19976
20817
|
*/
|
19977
20818
|
stopLoad() {
|
19978
|
-
logger.log('stopLoad');
|
19979
|
-
this.started = false;
|
20819
|
+
this.logger.log('stopLoad');
|
19980
20820
|
this.networkControllers.forEach(controller => {
|
19981
20821
|
controller.stopLoad();
|
19982
20822
|
});
|
19983
20823
|
}
|
19984
20824
|
|
19985
20825
|
/**
|
19986
|
-
* Resumes stream controller segment loading
|
20826
|
+
* Resumes stream controller segment loading after `pauseBuffering` has been called.
|
19987
20827
|
*/
|
19988
20828
|
resumeBuffering() {
|
19989
|
-
|
19990
|
-
|
19991
|
-
|
19992
|
-
|
19993
|
-
|
19994
|
-
});
|
19995
|
-
}
|
20829
|
+
this.networkControllers.forEach(controller => {
|
20830
|
+
if (controller.resumeBuffering) {
|
20831
|
+
controller.resumeBuffering();
|
20832
|
+
}
|
20833
|
+
});
|
19996
20834
|
}
|
19997
20835
|
|
19998
20836
|
/**
|
19999
|
-
*
|
20837
|
+
* Prevents stream controller from loading new segments until `resumeBuffering` is called.
|
20000
20838
|
* This allows for media buffering to be paused without interupting playlist loading.
|
20001
20839
|
*/
|
20002
20840
|
pauseBuffering() {
|
20003
20841
|
this.networkControllers.forEach(controller => {
|
20004
|
-
if (
|
20005
|
-
controller.
|
20842
|
+
if (controller.pauseBuffering) {
|
20843
|
+
controller.pauseBuffering();
|
20006
20844
|
}
|
20007
20845
|
});
|
20008
20846
|
}
|
@@ -20011,7 +20849,7 @@ class Hls {
|
|
20011
20849
|
* Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
|
20012
20850
|
*/
|
20013
20851
|
swapAudioCodec() {
|
20014
|
-
logger.log('swapAudioCodec');
|
20852
|
+
this.logger.log('swapAudioCodec');
|
20015
20853
|
this.streamController.swapAudioCodec();
|
20016
20854
|
}
|
20017
20855
|
|
@@ -20022,7 +20860,7 @@ class Hls {
|
|
20022
20860
|
* Automatic recovery of media-errors by this process is configurable.
|
20023
20861
|
*/
|
20024
20862
|
recoverMediaError() {
|
20025
|
-
logger.log('recoverMediaError');
|
20863
|
+
this.logger.log('recoverMediaError');
|
20026
20864
|
const media = this._media;
|
20027
20865
|
this.detachMedia();
|
20028
20866
|
if (media) {
|
@@ -20052,7 +20890,7 @@ class Hls {
|
|
20052
20890
|
* Set quality level index immediately. This will flush the current buffer to replace the quality asap. That means playback will interrupt at least shortly to re-buffer and re-sync eventually. Set to -1 for automatic level selection.
|
20053
20891
|
*/
|
20054
20892
|
set currentLevel(newLevel) {
|
20055
|
-
logger.log(`set currentLevel:${newLevel}`);
|
20893
|
+
this.logger.log(`set currentLevel:${newLevel}`);
|
20056
20894
|
this.levelController.manualLevel = newLevel;
|
20057
20895
|
this.streamController.immediateLevelSwitch();
|
20058
20896
|
}
|
@@ -20071,7 +20909,7 @@ class Hls {
|
|
20071
20909
|
* @param newLevel - Pass -1 for automatic level selection
|
20072
20910
|
*/
|
20073
20911
|
set nextLevel(newLevel) {
|
20074
|
-
logger.log(`set nextLevel:${newLevel}`);
|
20912
|
+
this.logger.log(`set nextLevel:${newLevel}`);
|
20075
20913
|
this.levelController.manualLevel = newLevel;
|
20076
20914
|
this.streamController.nextLevelSwitch();
|
20077
20915
|
}
|
@@ -20090,7 +20928,7 @@ class Hls {
|
|
20090
20928
|
* @param newLevel - Pass -1 for automatic level selection
|
20091
20929
|
*/
|
20092
20930
|
set loadLevel(newLevel) {
|
20093
|
-
logger.log(`set loadLevel:${newLevel}`);
|
20931
|
+
this.logger.log(`set loadLevel:${newLevel}`);
|
20094
20932
|
this.levelController.manualLevel = newLevel;
|
20095
20933
|
}
|
20096
20934
|
|
@@ -20121,7 +20959,7 @@ class Hls {
|
|
20121
20959
|
* Sets "first-level", see getter.
|
20122
20960
|
*/
|
20123
20961
|
set firstLevel(newLevel) {
|
20124
|
-
logger.log(`set firstLevel:${newLevel}`);
|
20962
|
+
this.logger.log(`set firstLevel:${newLevel}`);
|
20125
20963
|
this.levelController.firstLevel = newLevel;
|
20126
20964
|
}
|
20127
20965
|
|
@@ -20146,7 +20984,7 @@ class Hls {
|
|
20146
20984
|
* (determined from download of first segment)
|
20147
20985
|
*/
|
20148
20986
|
set startLevel(newLevel) {
|
20149
|
-
logger.log(`set startLevel:${newLevel}`);
|
20987
|
+
this.logger.log(`set startLevel:${newLevel}`);
|
20150
20988
|
// if not in automatic start level detection, ensure startLevel is greater than minAutoLevel
|
20151
20989
|
if (newLevel !== -1) {
|
20152
20990
|
newLevel = Math.max(newLevel, this.minAutoLevel);
|
@@ -20221,7 +21059,7 @@ class Hls {
|
|
20221
21059
|
*/
|
20222
21060
|
set autoLevelCapping(newLevel) {
|
20223
21061
|
if (this._autoLevelCapping !== newLevel) {
|
20224
|
-
logger.log(`set autoLevelCapping:${newLevel}`);
|
21062
|
+
this.logger.log(`set autoLevelCapping:${newLevel}`);
|
20225
21063
|
this._autoLevelCapping = newLevel;
|
20226
21064
|
this.levelController.checkMaxAutoUpdated();
|
20227
21065
|
}
|