hls.js 1.6.0-beta.2.0.canary.10923 → 1.6.0-beta.2.0.canary.10925

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -402,7 +402,7 @@ function enableLogs(debugConfig, context, id) {
402
402
  // Some browsers don't allow to use bind on console object anyway
403
403
  // fallback to default if needed
404
404
  try {
405
- newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.6.0-beta.2.0.canary.10923"}`);
405
+ newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.6.0-beta.2.0.canary.10925"}`);
406
406
  } catch (e) {
407
407
  /* log fn threw an exception. All logger methods are no-ops. */
408
408
  return createLogger();
@@ -7254,36 +7254,53 @@ class BufferHelper {
7254
7254
  }
7255
7255
  return false;
7256
7256
  }
7257
+ static bufferedRanges(media) {
7258
+ if (media) {
7259
+ const timeRanges = BufferHelper.getBuffered(media);
7260
+ return BufferHelper.timeRangesToArray(timeRanges);
7261
+ }
7262
+ return [];
7263
+ }
7264
+ static timeRangesToArray(timeRanges) {
7265
+ const buffered = [];
7266
+ for (let i = 0; i < timeRanges.length; i++) {
7267
+ buffered.push({
7268
+ start: timeRanges.start(i),
7269
+ end: timeRanges.end(i)
7270
+ });
7271
+ }
7272
+ return buffered;
7273
+ }
7257
7274
  static bufferInfo(media, pos, maxHoleDuration) {
7258
7275
  if (media) {
7259
- const vbuffered = BufferHelper.getBuffered(media);
7260
- if (vbuffered.length) {
7261
- const buffered = [];
7262
- for (let i = 0; i < vbuffered.length; i++) {
7263
- buffered.push({
7264
- start: vbuffered.start(i),
7265
- end: vbuffered.end(i)
7266
- });
7267
- }
7276
+ const buffered = BufferHelper.bufferedRanges(media);
7277
+ if (buffered.length) {
7268
7278
  return BufferHelper.bufferedInfo(buffered, pos, maxHoleDuration);
7269
7279
  }
7270
7280
  }
7271
7281
  return {
7272
7282
  len: 0,
7273
7283
  start: pos,
7274
- end: pos
7284
+ end: pos,
7285
+ bufferedIndex: -1
7275
7286
  };
7276
7287
  }
7277
7288
  static bufferedInfo(buffered, pos, maxHoleDuration) {
7278
7289
  pos = Math.max(0, pos);
7279
7290
  // sort on buffer.start/smaller end (IE does not always return sorted buffered range)
7280
- buffered.sort((a, b) => a.start - b.start || b.end - a.end);
7291
+ if (buffered.length > 1) {
7292
+ buffered.sort((a, b) => a.start - b.start || b.end - a.end);
7293
+ }
7294
+ let bufferedIndex = -1;
7281
7295
  let buffered2 = [];
7282
7296
  if (maxHoleDuration) {
7283
7297
  // there might be some small holes between buffer time range
7284
7298
  // consider that holes smaller than maxHoleDuration are irrelevant and build another
7285
7299
  // buffer time range representations that discards those holes
7286
7300
  for (let i = 0; i < buffered.length; i++) {
7301
+ if (pos >= buffered[i].start && pos <= buffered[i].end) {
7302
+ bufferedIndex = i;
7303
+ }
7287
7304
  const buf2len = buffered2.length;
7288
7305
  if (buf2len) {
7289
7306
  const buf2end = buffered2[buf2len - 1].end;
@@ -7309,24 +7326,25 @@ class BufferHelper {
7309
7326
  buffered2 = buffered;
7310
7327
  }
7311
7328
  let bufferLen = 0;
7329
+ let nextStart;
7312
7330
 
7313
- // bufferStartNext can possibly be undefined based on the conditional logic below
7314
- let bufferStartNext;
7315
-
7316
- // bufferStart and bufferEnd are buffer boundaries around current video position
7331
+ // bufferStart and bufferEnd are buffer boundaries around current playback position (pos)
7317
7332
  let bufferStart = pos;
7318
7333
  let bufferEnd = pos;
7319
7334
  for (let i = 0; i < buffered2.length; i++) {
7320
7335
  const start = buffered2[i].start;
7321
7336
  const end = buffered2[i].end;
7322
7337
  // logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
7338
+ if (bufferedIndex === -1 && pos >= start && pos <= end) {
7339
+ bufferedIndex = i;
7340
+ }
7323
7341
  if (pos + maxHoleDuration >= start && pos < end) {
7324
7342
  // play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
7325
7343
  bufferStart = start;
7326
7344
  bufferEnd = end;
7327
7345
  bufferLen = bufferEnd - pos;
7328
7346
  } else if (pos + maxHoleDuration < start) {
7329
- bufferStartNext = start;
7347
+ nextStart = start;
7330
7348
  break;
7331
7349
  }
7332
7350
  }
@@ -7334,8 +7352,9 @@ class BufferHelper {
7334
7352
  len: bufferLen,
7335
7353
  start: bufferStart || 0,
7336
7354
  end: bufferEnd || 0,
7337
- nextStart: bufferStartNext,
7338
- buffered
7355
+ nextStart,
7356
+ buffered,
7357
+ bufferedIndex
7339
7358
  };
7340
7359
  }
7341
7360
 
@@ -7607,7 +7626,6 @@ class BaseStreamController extends TaskLoop {
7607
7626
  // reset startPosition and lastCurrentTime to restart playback @ stream beginning
7608
7627
  this.log(`setting startPosition to 0 because media ended`);
7609
7628
  this.startPosition = this.lastCurrentTime = 0;
7610
- this.triggerEnded();
7611
7629
  };
7612
7630
  this.playlistType = playlistType;
7613
7631
  this.hls = hls;
@@ -7681,6 +7699,12 @@ class BaseStreamController extends TaskLoop {
7681
7699
  resumeBuffering() {
7682
7700
  this.buffering = true;
7683
7701
  }
7702
+ get inFlightFrag() {
7703
+ return {
7704
+ frag: this.fragCurrent,
7705
+ state: this.state
7706
+ };
7707
+ }
7684
7708
  _streamEnded(bufferInfo, levelDetails) {
7685
7709
  // Stream is never "ended" when playlist is live or media is detached
7686
7710
  if (levelDetails.live || !this.media) {
@@ -7769,9 +7793,6 @@ class BaseStreamController extends TaskLoop {
7769
7793
  this.startFragRequested = false;
7770
7794
  }
7771
7795
  onError(event, data) {}
7772
- triggerEnded() {
7773
- /* overridden in stream-controller */
7774
- }
7775
7796
  onManifestLoaded(event, data) {
7776
7797
  this.startTimeOffset = data.startTimeOffset;
7777
7798
  }
@@ -9575,7 +9596,6 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => key === 'in
9575
9596
  const sbTrack = transferredTrack != null && transferredTrack.buffer ? transferredTrack : track;
9576
9597
  const sbCodec = (sbTrack == null ? undefined : sbTrack.pendingCodec) || (sbTrack == null ? undefined : sbTrack.codec);
9577
9598
  const trackLevelCodec = sbTrack == null ? undefined : sbTrack.levelCodec;
9578
- const forceChangeType = !sbTrack || !!this.hls.config.assetPlayerId;
9579
9599
  if (!track) {
9580
9600
  track = tracks[trackName] = {
9581
9601
  buffer: undefined,
@@ -9592,7 +9612,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => key === 'in
9592
9612
  const currentCodec = currentCodecFull == null ? undefined : currentCodecFull.replace(VIDEO_CODEC_PROFILE_REPLACE, '$1');
9593
9613
  let trackCodec = pickMostCompleteCodecName(codec, levelCodec);
9594
9614
  const nextCodec = (_trackCodec = trackCodec) == null ? undefined : _trackCodec.replace(VIDEO_CODEC_PROFILE_REPLACE, '$1');
9595
- if (trackCodec && (currentCodec !== nextCodec || forceChangeType)) {
9615
+ if (trackCodec && currentCodecFull && currentCodec !== nextCodec) {
9596
9616
  if (trackName.slice(0, 5) === 'audio') {
9597
9617
  trackCodec = getCodecCompatibleName(trackCodec, this.appendSource);
9598
9618
  }
@@ -17167,16 +17187,20 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
17167
17187
  frontBufferFlushThreshold: Infinity,
17168
17188
  maxBufferSize: 60 * 1000 * 1000,
17169
17189
  // used by stream-controller
17170
- maxBufferHole: 0.1,
17190
+ maxFragLookUpTolerance: 0.25,
17171
17191
  // used by stream-controller
17192
+ maxBufferHole: 0.1,
17193
+ // used by stream-controller and gap-controller
17194
+ detectStallWithCurrentTimeMs: 1250,
17195
+ // used by gap-controller
17172
17196
  highBufferWatchdogPeriod: 2,
17173
- // used by stream-controller
17197
+ // used by gap-controller
17174
17198
  nudgeOffset: 0.1,
17175
- // used by stream-controller
17199
+ // used by gap-controller
17176
17200
  nudgeMaxRetry: 3,
17177
- // used by stream-controller
17178
- maxFragLookUpTolerance: 0.25,
17179
- // used by stream-controller
17201
+ // used by gap-controller
17202
+ nudgeOnVideoHole: true,
17203
+ // used by gap-controller
17180
17204
  liveSyncDurationCount: 3,
17181
17205
  // used by latency-controller
17182
17206
  liveSyncOnStallIncrease: 1,
@@ -17275,7 +17299,6 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
17275
17299
  progressive: false,
17276
17300
  lowLatencyMode: true,
17277
17301
  cmcd: undefined,
17278
- detectStallWithCurrentTimeMs: 1250,
17279
17302
  enableDateRangeMetadataCues: true,
17280
17303
  enableEmsgMetadataCues: true,
17281
17304
  enableEmsgKLVMetadata: false,
@@ -17517,1627 +17540,1798 @@ function enableStreamingMode(config, logger) {
17517
17540
  }
17518
17541
  }
17519
17542
 
17520
- function sendAddTrackEvent(track, videoEl) {
17521
- let event;
17522
- try {
17523
- event = new Event('addtrack');
17524
- } catch (err) {
17525
- // for IE11
17526
- event = document.createEvent('Event');
17527
- event.initEvent('addtrack', false, false);
17528
- }
17529
- event.track = track;
17530
- videoEl.dispatchEvent(event);
17531
- }
17532
- function clearCurrentCues(track, enterHandler) {
17533
- // When track.mode is disabled, track.cues will be null.
17534
- // To guarantee the removal of cues, we need to temporarily
17535
- // change the mode to hidden
17536
- const mode = track.mode;
17537
- if (mode === 'disabled') {
17538
- track.mode = 'hidden';
17539
- }
17540
- if (track.cues) {
17541
- for (let i = track.cues.length; i--;) {
17542
- if (enterHandler) {
17543
- track.cues[i].removeEventListener('enter', enterHandler);
17544
- }
17545
- track.removeCue(track.cues[i]);
17546
- }
17547
- }
17548
- if (mode === 'disabled') {
17549
- track.mode = mode;
17550
- }
17551
- }
17552
- function removeCuesInRange(track, start, end, predicate) {
17553
- const mode = track.mode;
17554
- if (mode === 'disabled') {
17555
- track.mode = 'hidden';
17556
- }
17557
- if (track.cues && track.cues.length > 0) {
17558
- const cues = getCuesInRange(track.cues, start, end);
17559
- for (let i = 0; i < cues.length; i++) {
17560
- if (!predicate || predicate(cues[i])) {
17561
- track.removeCue(cues[i]);
17562
- }
17563
- }
17564
- }
17565
- if (mode === 'disabled') {
17566
- track.mode = mode;
17567
- }
17568
- }
17569
-
17570
- // Find first cue starting after given time.
17571
- // Modified version of binary search O(log(n)).
17572
- function getFirstCueIndexAfterTime(cues, time) {
17573
- // If first cue starts after time, start there
17574
- if (time < cues[0].startTime) {
17575
- return 0;
17576
- }
17577
- // If the last cue ends before time there is no overlap
17578
- const len = cues.length - 1;
17579
- if (time > cues[len].endTime) {
17580
- return -1;
17581
- }
17582
- let left = 0;
17583
- let right = len;
17584
- while (left <= right) {
17585
- const mid = Math.floor((right + left) / 2);
17586
- if (time < cues[mid].startTime) {
17587
- right = mid - 1;
17588
- } else if (time > cues[mid].startTime && left < len) {
17589
- left = mid + 1;
17590
- } else {
17591
- // If it's not lower or higher, it must be equal.
17592
- return mid;
17593
- }
17594
- }
17595
- // At this point, left and right have swapped.
17596
- // No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
17597
- return cues[left].startTime - time < time - cues[right].startTime ? left : right;
17598
- }
17599
- function getCuesInRange(cues, start, end) {
17600
- const cuesFound = [];
17601
- const firstCueInRange = getFirstCueIndexAfterTime(cues, start);
17602
- if (firstCueInRange > -1) {
17603
- for (let i = firstCueInRange, len = cues.length; i < len; i++) {
17604
- const cue = cues[i];
17605
- if (cue.startTime >= start && cue.endTime <= end) {
17606
- cuesFound.push(cue);
17607
- } else if (cue.startTime > end) {
17608
- return cuesFound;
17609
- }
17610
- }
17611
- }
17612
- return cuesFound;
17613
- }
17614
-
17615
- const MIN_CUE_DURATION = 0.25;
17616
- function getCueClass() {
17617
- if (typeof self === 'undefined') return undefined;
17618
- return self.VTTCue || self.TextTrackCue;
17543
+ function addEventListener(el, type, listener) {
17544
+ removeEventListener(el, type, listener);
17545
+ el.addEventListener(type, listener);
17619
17546
  }
17620
- function createCueWithDataFields(Cue, startTime, endTime, data, type) {
17621
- let cue = new Cue(startTime, endTime, '');
17622
- try {
17623
- cue.value = data;
17624
- if (type) {
17625
- cue.type = type;
17626
- }
17627
- } catch (e) {
17628
- cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
17629
- type
17630
- }, data) : data));
17631
- }
17632
- return cue;
17547
+ function removeEventListener(el, type, listener) {
17548
+ el.removeEventListener(type, listener);
17633
17549
  }
17634
17550
 
17635
- // VTTCue latest draft allows an infinite duration, fallback
17636
- // to MAX_VALUE if necessary
17637
- const MAX_CUE_ENDTIME = (() => {
17638
- const Cue = getCueClass();
17639
- try {
17640
- Cue && new Cue(0, Number.POSITIVE_INFINITY, '');
17641
- } catch (e) {
17642
- return Number.MAX_VALUE;
17643
- }
17644
- return Number.POSITIVE_INFINITY;
17645
- })();
17646
- function hexToArrayBuffer(str) {
17647
- return Uint8Array.from(str.replace(/^0x/, '').replace(/([\da-fA-F]{2}) ?/g, '0x$1 ').replace(/ +$/, '').split(' ')).buffer;
17648
- }
17649
- class ID3TrackController {
17650
- constructor(hls) {
17651
- this.hls = undefined;
17652
- this.id3Track = null;
17551
+ const MAX_START_GAP_JUMP = 2.0;
17552
+ const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
17553
+ const SKIP_BUFFER_RANGE_START = 0.05;
17554
+ const TICK_INTERVAL$1 = 100;
17555
+ class GapController extends TaskLoop {
17556
+ constructor(hls, fragmentTracker) {
17557
+ super('gap-controller', hls.logger);
17558
+ this.hls = null;
17559
+ this.fragmentTracker = null;
17653
17560
  this.media = null;
17654
- this.dateRangeCuesAppended = {};
17655
- this.removeCues = true;
17656
- this.onEventCueEnter = () => {
17657
- if (!this.hls) {
17561
+ this.mediaSource = undefined;
17562
+ this.nudgeRetry = 0;
17563
+ this.stallReported = false;
17564
+ this.stalled = null;
17565
+ this.moved = false;
17566
+ this.seeking = false;
17567
+ this.buffered = {};
17568
+ this.lastCurrentTime = 0;
17569
+ this.ended = 0;
17570
+ this.waiting = 0;
17571
+ this.onMediaPlaying = () => {
17572
+ this.ended = 0;
17573
+ this.waiting = 0;
17574
+ };
17575
+ this.onMediaWaiting = () => {
17576
+ var _this$media;
17577
+ if ((_this$media = this.media) != null && _this$media.seeking) {
17658
17578
  return;
17659
17579
  }
17660
- this.hls.trigger(Events.EVENT_CUE_ENTER, {});
17580
+ this.waiting = self.performance.now();
17581
+ this.tick();
17582
+ };
17583
+ this.onMediaEnded = () => {
17584
+ if (this.hls) {
17585
+ var _this$media2;
17586
+ // ended is set when triggering MEDIA_ENDED so that we do not trigger it again on stall or on tick with media.ended
17587
+ this.ended = ((_this$media2 = this.media) == null ? undefined : _this$media2.currentTime) || 1;
17588
+ this.hls.trigger(Events.MEDIA_ENDED, {
17589
+ stalled: false
17590
+ });
17591
+ }
17661
17592
  };
17662
17593
  this.hls = hls;
17663
- this._registerListeners();
17664
- }
17665
- destroy() {
17666
- this._unregisterListeners();
17667
- this.id3Track = null;
17668
- this.media = null;
17669
- this.dateRangeCuesAppended = {};
17670
- // @ts-ignore
17671
- this.hls = this.onEventCueEnter = null;
17594
+ this.fragmentTracker = fragmentTracker;
17595
+ this.registerListeners();
17672
17596
  }
17673
- _registerListeners() {
17597
+ registerListeners() {
17674
17598
  const {
17675
17599
  hls
17676
17600
  } = this;
17677
- hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
17678
- hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
17679
- hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
17680
- hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
17681
- hls.on(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
17682
- hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
17683
- hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
17684
- hls.on(Events.LEVEL_PTS_UPDATED, this.onLevelPtsUpdated, this);
17601
+ if (hls) {
17602
+ hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
17603
+ hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
17604
+ hls.on(Events.BUFFER_APPENDED, this.onBufferAppended, this);
17605
+ }
17685
17606
  }
17686
- _unregisterListeners() {
17607
+ unregisterListeners() {
17687
17608
  const {
17688
17609
  hls
17689
17610
  } = this;
17690
- hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
17691
- hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
17692
- hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
17693
- hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
17694
- hls.off(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
17695
- hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
17696
- hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
17697
- hls.off(Events.LEVEL_PTS_UPDATED, this.onLevelPtsUpdated, this);
17698
- }
17699
- // Add ID3 metatadata text track.
17700
- onMediaAttaching(event, data) {
17701
- var _data$overrides;
17702
- this.media = data.media;
17703
- if (((_data$overrides = data.overrides) == null ? undefined : _data$overrides.cueRemoval) === false) {
17704
- this.removeCues = false;
17611
+ if (hls) {
17612
+ hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
17613
+ hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
17614
+ hls.off(Events.BUFFER_APPENDED, this.onBufferAppended, this);
17705
17615
  }
17706
17616
  }
17707
- onMediaAttached() {
17708
- const details = this.hls.latestLevelDetails;
17709
- if (details) {
17710
- this.updateDateRangeCues(details);
17711
- }
17617
+ destroy() {
17618
+ super.destroy();
17619
+ this.unregisterListeners();
17620
+ this.media = this.hls = this.fragmentTracker = null;
17621
+ this.mediaSource = undefined;
17622
+ }
17623
+ onMediaAttached(event, data) {
17624
+ this.setInterval(TICK_INTERVAL$1);
17625
+ this.mediaSource = data.mediaSource;
17626
+ const media = this.media = data.media;
17627
+ addEventListener(media, 'playing', this.onMediaPlaying);
17628
+ addEventListener(media, 'waiting', this.onMediaWaiting);
17629
+ addEventListener(media, 'ended', this.onMediaEnded);
17712
17630
  }
17713
17631
  onMediaDetaching(event, data) {
17714
- this.media = null;
17715
- const transferringMedia = !!data.transferMedia;
17716
- if (transferringMedia) {
17717
- return;
17718
- }
17719
- if (this.id3Track) {
17720
- if (this.removeCues) {
17721
- clearCurrentCues(this.id3Track, this.onEventCueEnter);
17722
- }
17723
- this.id3Track = null;
17632
+ this.clearInterval();
17633
+ const {
17634
+ media
17635
+ } = this;
17636
+ if (media) {
17637
+ removeEventListener(media, 'playing', this.onMediaPlaying);
17638
+ removeEventListener(media, 'waiting', this.onMediaWaiting);
17639
+ removeEventListener(media, 'ended', this.onMediaEnded);
17640
+ this.media = null;
17724
17641
  }
17725
- this.dateRangeCuesAppended = {};
17642
+ this.mediaSource = undefined;
17726
17643
  }
17727
- onManifestLoading() {
17728
- this.dateRangeCuesAppended = {};
17644
+ onBufferAppended(event, data) {
17645
+ this.buffered = data.timeRanges;
17729
17646
  }
17730
- createTrack(media) {
17731
- const track = this.getID3Track(media.textTracks);
17732
- track.mode = 'hidden';
17733
- return track;
17647
+ get hasBuffered() {
17648
+ return Object.keys(this.buffered).length > 0;
17734
17649
  }
17735
- getID3Track(textTracks) {
17736
- if (!this.media) {
17650
+ tick() {
17651
+ var _this$media3;
17652
+ if (!((_this$media3 = this.media) != null && _this$media3.readyState) || !this.hasBuffered) {
17737
17653
  return;
17738
17654
  }
17739
- for (let i = 0; i < textTracks.length; i++) {
17740
- const textTrack = textTracks[i];
17741
- if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
17742
- // send 'addtrack' when reusing the textTrack for metadata,
17743
- // same as what we do for captions
17744
- sendAddTrackEvent(textTrack, this.media);
17745
- return textTrack;
17746
- }
17747
- }
17748
- return this.media.addTextTrack('metadata', 'id3');
17655
+ const currentTime = this.media.currentTime;
17656
+ this.poll(currentTime, this.lastCurrentTime);
17657
+ this.lastCurrentTime = currentTime;
17749
17658
  }
17750
- onFragParsingMetadata(event, data) {
17751
- if (!this.media) {
17659
+
17660
+ /**
17661
+ * Checks if the playhead is stuck within a gap, and if so, attempts to free it.
17662
+ * A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
17663
+ *
17664
+ * @param lastCurrentTime - Previously read playhead position
17665
+ */
17666
+ poll(currentTime, lastCurrentTime) {
17667
+ var _this$hls, _this$hls2;
17668
+ const config = (_this$hls = this.hls) == null ? undefined : _this$hls.config;
17669
+ if (!config) {
17752
17670
  return;
17753
17671
  }
17754
17672
  const {
17755
- hls: {
17756
- config: {
17757
- enableEmsgMetadataCues,
17758
- enableID3MetadataCues
17759
- }
17760
- }
17673
+ media,
17674
+ stalled
17761
17675
  } = this;
17762
- if (!enableEmsgMetadataCues && !enableID3MetadataCues) {
17676
+ if (!media) {
17763
17677
  return;
17764
17678
  }
17765
17679
  const {
17766
- samples
17767
- } = data;
17680
+ seeking
17681
+ } = media;
17682
+ const seeked = this.seeking && !seeking;
17683
+ const beginSeek = !this.seeking && seeking;
17684
+ const pausedEndedOrHalted = media.paused && !seeking || media.ended || media.playbackRate === 0;
17685
+ this.seeking = seeking;
17768
17686
 
17769
- // create track dynamically
17770
- if (!this.id3Track) {
17771
- this.id3Track = this.createTrack(this.media);
17687
+ // The playhead is moving, no-op
17688
+ if (currentTime !== lastCurrentTime) {
17689
+ if (lastCurrentTime) {
17690
+ this.ended = 0;
17691
+ }
17692
+ this.moved = true;
17693
+ if (!seeking) {
17694
+ this.nudgeRetry = 0;
17695
+ // When crossing between buffered video time ranges, but not audio, flush pipeline with seek (Chrome)
17696
+ if (config.nudgeOnVideoHole && !pausedEndedOrHalted && currentTime > lastCurrentTime) {
17697
+ this.nudgeOnVideoHole(currentTime, lastCurrentTime);
17698
+ }
17699
+ }
17700
+ if (this.waiting === 0) {
17701
+ this.stallResolved(currentTime);
17702
+ }
17703
+ return;
17772
17704
  }
17773
- const Cue = getCueClass();
17774
- if (!Cue) {
17705
+
17706
+ // Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
17707
+ if (beginSeek || seeked) {
17708
+ if (seeked) {
17709
+ this.stallResolved(currentTime);
17710
+ }
17775
17711
  return;
17776
17712
  }
17777
- for (let i = 0; i < samples.length; i++) {
17778
- const type = samples[i].type;
17779
- if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
17780
- continue;
17713
+
17714
+ // The playhead should not be moving
17715
+ if (pausedEndedOrHalted) {
17716
+ this.nudgeRetry = 0;
17717
+ this.stallResolved(currentTime);
17718
+ // Fire MEDIA_ENDED to workaround event not being dispatched by browser
17719
+ if (!this.ended && media.ended && this.hls) {
17720
+ this.ended = currentTime || 1;
17721
+ this.hls.trigger(Events.MEDIA_ENDED, {
17722
+ stalled: false
17723
+ });
17781
17724
  }
17782
- const frames = getId3Frames(samples[i].data);
17783
- if (frames) {
17784
- const startTime = samples[i].pts;
17785
- let endTime = startTime + samples[i].duration;
17786
- if (endTime > MAX_CUE_ENDTIME) {
17787
- endTime = MAX_CUE_ENDTIME;
17788
- }
17789
- const timeDiff = endTime - startTime;
17790
- if (timeDiff <= 0) {
17791
- endTime = startTime + MIN_CUE_DURATION;
17792
- }
17793
- for (let j = 0; j < frames.length; j++) {
17794
- const frame = frames[j];
17795
- // Safari doesn't put the timestamp frame in the TextTrack
17796
- if (!isId3TimestampFrame(frame)) {
17797
- // add a bounds to any unbounded cues
17798
- this.updateId3CueEnds(startTime, type);
17799
- const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
17800
- if (cue) {
17801
- this.id3Track.addCue(cue);
17802
- }
17803
- }
17804
- }
17725
+ return;
17726
+ }
17727
+ if (!BufferHelper.getBuffered(media).length) {
17728
+ this.nudgeRetry = 0;
17729
+ return;
17730
+ }
17731
+
17732
+ // Resolve stalls at buffer holes using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
17733
+ const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
17734
+ const nextStart = bufferInfo.nextStart || 0;
17735
+ const fragmentTracker = this.fragmentTracker;
17736
+ if (seeking && fragmentTracker && this.hls) {
17737
+ // Is there a fragment loading/parsing/appending before currentTime?
17738
+ const inFlightDependency = getInFlightDependency(this.hls.inFlightFragments, currentTime);
17739
+
17740
+ // Waiting for seeking in a buffered range to complete
17741
+ const hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP;
17742
+ // Next buffered range is too far ahead to jump to while still seeking
17743
+ const noBufferHole = !nextStart || inFlightDependency || nextStart - currentTime > MAX_START_GAP_JUMP && !fragmentTracker.getPartialFragment(currentTime);
17744
+ if (hasEnoughBuffer || noBufferHole) {
17745
+ return;
17805
17746
  }
17747
+ // Reset moved state when seeking to a point in or before a gap/hole
17748
+ this.moved = false;
17806
17749
  }
17807
- }
17808
- updateId3CueEnds(startTime, type) {
17809
- var _this$id3Track;
17810
- const cues = (_this$id3Track = this.id3Track) == null ? undefined : _this$id3Track.cues;
17811
- if (cues) {
17812
- for (let i = cues.length; i--;) {
17813
- const cue = cues[i];
17814
- if (cue.type === type && cue.startTime < startTime && cue.endTime === MAX_CUE_ENDTIME) {
17815
- cue.endTime = startTime;
17750
+
17751
+ // Skip start gaps if we haven't played, but the last poll detected the start of a stall
17752
+ // The addition poll gives the browser a chance to jump the gap for us
17753
+ const levelDetails = (_this$hls2 = this.hls) == null ? undefined : _this$hls2.latestLevelDetails;
17754
+ if (!this.moved && this.stalled !== null && fragmentTracker) {
17755
+ // There is no playable buffer (seeked, waiting for buffer)
17756
+ const isBuffered = bufferInfo.len > 0;
17757
+ if (!isBuffered && !nextStart) {
17758
+ return;
17759
+ }
17760
+ // Jump start gaps within jump threshold
17761
+ const startJump = Math.max(nextStart, bufferInfo.start || 0) - currentTime;
17762
+
17763
+ // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
17764
+ // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
17765
+ // that begins over 1 target duration after the video start position.
17766
+ const isLive = !!(levelDetails != null && levelDetails.live);
17767
+ const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
17768
+ const partialOrGap = fragmentTracker.getPartialFragment(currentTime);
17769
+ if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
17770
+ if (!media.paused) {
17771
+ this._trySkipBufferHole(partialOrGap);
17816
17772
  }
17773
+ return;
17817
17774
  }
17818
17775
  }
17819
- }
17820
- onBufferFlushing(event, {
17821
- startOffset,
17822
- endOffset,
17823
- type
17824
- }) {
17825
- const {
17826
- id3Track,
17827
- hls
17828
- } = this;
17829
- if (!hls) {
17776
+
17777
+ // Start tracking stall time
17778
+ const detectStallWithCurrentTimeMs = config.detectStallWithCurrentTimeMs;
17779
+ const tnow = self.performance.now();
17780
+ const tWaiting = this.waiting;
17781
+ if (stalled === null) {
17782
+ // Use time of recent "waiting" event
17783
+ if (tWaiting > 0 && tnow - tWaiting < detectStallWithCurrentTimeMs) {
17784
+ this.stalled = tWaiting;
17785
+ } else {
17786
+ this.stalled = tnow;
17787
+ }
17830
17788
  return;
17831
17789
  }
17832
- const {
17833
- config: {
17834
- enableEmsgMetadataCues,
17835
- enableID3MetadataCues
17790
+ const stalledDuration = tnow - stalled;
17791
+ if (!seeking && (stalledDuration >= detectStallWithCurrentTimeMs || tWaiting) && this.hls) {
17792
+ var _this$mediaSource;
17793
+ // Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
17794
+ if (((_this$mediaSource = this.mediaSource) == null ? undefined : _this$mediaSource.readyState) === 'ended' && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? undefined : levelDetails.edge) || 0)) < 1) {
17795
+ if (this.ended) {
17796
+ return;
17797
+ }
17798
+ this.ended = currentTime || 1;
17799
+ this.hls.trigger(Events.MEDIA_ENDED, {
17800
+ stalled: true
17801
+ });
17802
+ return;
17836
17803
  }
17837
- } = hls;
17838
- if (id3Track && (enableEmsgMetadataCues || enableID3MetadataCues)) {
17839
- let predicate;
17840
- if (type === 'audio') {
17841
- predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues;
17842
- } else if (type === 'video') {
17843
- predicate = cue => cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
17844
- } else {
17845
- predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues || cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
17804
+ // Report stalling after trying to fix
17805
+ this._reportStall(bufferInfo);
17806
+ if (!this.media || !this.hls) {
17807
+ return;
17846
17808
  }
17847
- removeCuesInRange(id3Track, startOffset, endOffset, predicate);
17848
17809
  }
17810
+ const bufferedWithHoles = BufferHelper.bufferInfo(media, currentTime, config.maxBufferHole);
17811
+ this._tryFixBufferStall(bufferedWithHoles, stalledDuration);
17849
17812
  }
17850
- onLevelUpdated(event, {
17851
- details
17852
- }) {
17853
- this.updateDateRangeCues(details, true);
17854
- }
17855
- onLevelPtsUpdated(event, data) {
17856
- if (Math.abs(data.drift) > 0.01) {
17857
- this.updateDateRangeCues(data.details);
17813
+ stallResolved(currentTime) {
17814
+ const stalled = this.stalled;
17815
+ if (stalled && this.hls) {
17816
+ this.stalled = null;
17817
+ // The playhead is now moving, but was previously stalled
17818
+ if (this.stallReported) {
17819
+ const stalledDuration = self.performance.now() - stalled;
17820
+ this.log(`playback not stuck anymore @${currentTime}, after ${Math.round(stalledDuration)}ms`);
17821
+ this.stallReported = false;
17822
+ this.waiting = 0;
17823
+ this.hls.trigger(Events.STALL_RESOLVED, {});
17824
+ }
17858
17825
  }
17859
17826
  }
17860
- updateDateRangeCues(details, removeOldCues) {
17861
- if (!this.media || !details.hasProgramDateTime || !this.hls.config.enableDateRangeMetadataCues) {
17862
- return;
17827
+ nudgeOnVideoHole(currentTime, lastCurrentTime) {
17828
+ var _this$buffered$audio;
17829
+ // Chrome will play one second past a hole in video buffered time ranges without rendering any video from the subsequent range and then stall as long as audio is buffered:
17830
+ // https://github.com/video-dev/hls.js/issues/5631
17831
+ // https://issues.chromium.org/issues/40280613#comment10
17832
+ // Detect the potential for this situation and proactively seek to flush the video pipeline once the playhead passes the start of the video hole.
17833
+ // When there are audio and video buffers and currentTime is past the end of the first video buffered range...
17834
+ const videoSourceBuffered = this.buffered.video;
17835
+ if (this.hls && this.media && this.fragmentTracker && (_this$buffered$audio = this.buffered.audio) != null && _this$buffered$audio.length && videoSourceBuffered && videoSourceBuffered.length > 1 && currentTime > videoSourceBuffered.end(0)) {
17836
+ // and audio is buffered at the playhead
17837
+ const audioBufferInfo = BufferHelper.bufferedInfo(BufferHelper.timeRangesToArray(this.buffered.audio), currentTime, 0);
17838
+ if (audioBufferInfo.len > 1 && lastCurrentTime >= audioBufferInfo.start) {
17839
+ const videoTimes = BufferHelper.timeRangesToArray(videoSourceBuffered);
17840
+ const lastBufferedIndex = BufferHelper.bufferedInfo(videoTimes, lastCurrentTime, 0).bufferedIndex;
17841
+ // nudge when crossing into another video buffered range (hole).
17842
+ if (lastBufferedIndex > -1 && lastBufferedIndex < videoTimes.length - 1) {
17843
+ const bufferedIndex = BufferHelper.bufferedInfo(videoTimes, currentTime, 0).bufferedIndex;
17844
+ const holeStart = videoTimes[lastBufferedIndex].end;
17845
+ const holeEnd = videoTimes[lastBufferedIndex + 1].start;
17846
+ if ((bufferedIndex === -1 || bufferedIndex > lastBufferedIndex) && holeEnd - holeStart < 1 &&
17847
+ // `maxBufferHole` may be too small and setting it to 0 should not disable this feature
17848
+ currentTime - holeStart < 2) {
17849
+ const error = new Error(`nudging playhead to flush pipeline after video hole. currentTime: ${currentTime} hole: ${holeStart} -> ${holeEnd} buffered index: ${bufferedIndex}`);
17850
+ this.warn(error.message);
17851
+ // Magic number to flush the pipeline without interuption to audio playback:
17852
+ this.media.currentTime += 0.000001;
17853
+ const frag = this.fragmentTracker.getPartialFragment(currentTime) || undefined;
17854
+ const bufferInfo = BufferHelper.bufferInfo(this.media, currentTime, 0);
17855
+ this.hls.trigger(Events.ERROR, {
17856
+ type: ErrorTypes.MEDIA_ERROR,
17857
+ details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
17858
+ fatal: false,
17859
+ error,
17860
+ reason: error.message,
17861
+ frag,
17862
+ buffer: bufferInfo.len,
17863
+ bufferInfo
17864
+ });
17865
+ }
17866
+ }
17867
+ }
17863
17868
  }
17869
+ }
17870
+
17871
+ /**
17872
+ * Detects and attempts to fix known buffer stalling issues.
17873
+ * @param bufferInfo - The properties of the current buffer.
17874
+ * @param stalledDurationMs - The amount of time Hls.js has been stalling for.
17875
+ * @private
17876
+ */
17877
+ _tryFixBufferStall(bufferInfo, stalledDurationMs) {
17878
+ var _this$hls3;
17864
17879
  const {
17865
- id3Track
17880
+ fragmentTracker,
17881
+ media
17866
17882
  } = this;
17867
- const {
17868
- dateRanges
17869
- } = details;
17870
- const ids = Object.keys(dateRanges);
17871
- let dateRangeCuesAppended = this.dateRangeCuesAppended;
17872
- // Remove cues from track not found in details.dateRanges
17873
- if (id3Track && removeOldCues) {
17874
- var _id3Track$cues;
17875
- if ((_id3Track$cues = id3Track.cues) != null && _id3Track$cues.length) {
17876
- const idsToRemove = Object.keys(dateRangeCuesAppended).filter(id => !ids.includes(id));
17877
- for (let i = idsToRemove.length; i--;) {
17878
- const id = idsToRemove[i];
17879
- const cues = dateRangeCuesAppended[id].cues;
17880
- delete dateRangeCuesAppended[id];
17881
- Object.keys(cues).forEach(key => {
17882
- try {
17883
- const cue = cues[key];
17884
- cue.removeEventListener('enter', this.onEventCueEnter);
17885
- id3Track.removeCue(cue);
17886
- } catch (e) {
17887
- /* no-op */
17888
- }
17889
- });
17890
- }
17891
- } else {
17892
- dateRangeCuesAppended = this.dateRangeCuesAppended = {};
17893
- }
17894
- }
17895
- // Exit if the playlist does not have Date Ranges or does not have Program Date Time
17896
- const lastFragment = details.fragments[details.fragments.length - 1];
17897
- if (ids.length === 0 || !isFiniteNumber(lastFragment == null ? undefined : lastFragment.programDateTime)) {
17883
+ const config = (_this$hls3 = this.hls) == null ? undefined : _this$hls3.config;
17884
+ if (!media || !fragmentTracker || !config) {
17898
17885
  return;
17899
17886
  }
17900
- if (!this.id3Track) {
17901
- this.id3Track = this.createTrack(this.media);
17902
- }
17903
- const Cue = getCueClass();
17904
- for (let i = 0; i < ids.length; i++) {
17905
- const id = ids[i];
17906
- const dateRange = dateRanges[id];
17907
- const startTime = dateRange.startTime;
17908
-
17909
- // Process DateRanges to determine end-time (known DURATION, END-DATE, or END-ON-NEXT)
17910
- const appendedDateRangeCues = dateRangeCuesAppended[id];
17911
- const cues = (appendedDateRangeCues == null ? undefined : appendedDateRangeCues.cues) || {};
17912
- let durationKnown = (appendedDateRangeCues == null ? undefined : appendedDateRangeCues.durationKnown) || false;
17913
- let endTime = MAX_CUE_ENDTIME;
17914
- const {
17915
- duration,
17916
- endDate
17917
- } = dateRange;
17918
- if (endDate && duration !== null) {
17919
- endTime = startTime + duration;
17920
- durationKnown = true;
17921
- } else if (dateRange.endOnNext && !durationKnown) {
17922
- const nextDateRangeWithSameClass = ids.reduce((candidateDateRange, id) => {
17923
- if (id !== dateRange.id) {
17924
- const otherDateRange = dateRanges[id];
17925
- if (otherDateRange.class === dateRange.class && otherDateRange.startDate > dateRange.startDate && (!candidateDateRange || dateRange.startDate < candidateDateRange.startDate)) {
17926
- return otherDateRange;
17927
- }
17928
- }
17929
- return candidateDateRange;
17930
- }, null);
17931
- if (nextDateRangeWithSameClass) {
17932
- endTime = nextDateRangeWithSameClass.startTime;
17933
- durationKnown = true;
17934
- }
17935
- }
17936
-
17937
- // Create TextTrack Cues for each MetadataGroup Item (select DateRange attribute)
17938
- // This is to emulate Safari HLS playback handling of DateRange tags
17939
- const attributes = Object.keys(dateRange.attr);
17940
- for (let j = 0; j < attributes.length; j++) {
17941
- const key = attributes[j];
17942
- if (!isDateRangeCueAttribute(key)) {
17943
- continue;
17944
- }
17945
- const cue = cues[key];
17946
- if (cue) {
17947
- if (durationKnown && !appendedDateRangeCues.durationKnown) {
17948
- cue.endTime = endTime;
17949
- } else if (Math.abs(cue.startTime - startTime) > 0.01) {
17950
- cue.startTime = startTime;
17951
- cue.endTime = endTime;
17952
- }
17953
- } else if (Cue) {
17954
- let data = dateRange.attr[key];
17955
- if (isSCTE35Attribute(key)) {
17956
- data = hexToArrayBuffer(data);
17957
- }
17958
- const payload = {
17959
- key,
17960
- data
17961
- };
17962
- const _cue = createCueWithDataFields(Cue, startTime, endTime, payload, MetadataSchema.dateRange);
17963
- if (_cue) {
17964
- _cue.id = id;
17965
- this.id3Track.addCue(_cue);
17966
- cues[key] = _cue;
17967
- }
17968
- }
17887
+ const currentTime = media.currentTime;
17888
+ const partial = fragmentTracker.getPartialFragment(currentTime);
17889
+ if (partial) {
17890
+ // Try to skip over the buffer hole caused by a partial fragment
17891
+ // This method isn't limited by the size of the gap between buffered ranges
17892
+ const targetTime = this._trySkipBufferHole(partial);
17893
+ // we return here in this case, meaning
17894
+ // the branch below only executes when we haven't seeked to a new position
17895
+ if (targetTime || !this.media) {
17896
+ return;
17969
17897
  }
17898
+ }
17970
17899
 
17971
- // Keep track of processed DateRanges by ID for updating cues with new DateRange tag attributes
17972
- dateRangeCuesAppended[id] = {
17973
- cues,
17974
- dateRange,
17975
- durationKnown
17976
- };
17900
+ // if we haven't had to skip over a buffer hole of a partial fragment
17901
+ // we may just have to "nudge" the playlist as the browser decoding/rendering engine
17902
+ // needs to cross some sort of threshold covering all source-buffers content
17903
+ // to start playing properly.
17904
+ const bufferedRanges = bufferInfo.buffered;
17905
+ if ((bufferedRanges && bufferedRanges.length > 1 && bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && (stalledDurationMs > config.highBufferWatchdogPeriod * 1000 || this.waiting)) {
17906
+ this.warn('Trying to nudge playhead over buffer-hole');
17907
+ // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
17908
+ // We only try to jump the hole if it's under the configured size
17909
+ this._tryNudgeBuffer(bufferInfo);
17977
17910
  }
17978
17911
  }
17979
- }
17980
17912
 
17981
- class LatencyController {
17982
- constructor(hls) {
17983
- this.hls = undefined;
17984
- this.config = undefined;
17985
- this.media = null;
17986
- this.currentTime = 0;
17987
- this.stallCount = 0;
17988
- this._latency = null;
17989
- this._targetLatencyUpdated = false;
17990
- this.onTimeupdate = () => {
17991
- const {
17992
- media
17993
- } = this;
17994
- const levelDetails = this.hls.latestLevelDetails;
17995
- if (!media || !levelDetails) {
17996
- return;
17997
- }
17998
- this.currentTime = media.currentTime;
17999
- const latency = this.computeLatency();
18000
- if (latency === null) {
18001
- return;
18002
- }
18003
- this._latency = latency;
18004
-
18005
- // Adapt playbackRate to meet target latency in low-latency mode
18006
- const {
18007
- lowLatencyMode,
18008
- maxLiveSyncPlaybackRate
18009
- } = this.config;
18010
- if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
18011
- return;
18012
- }
18013
- const targetLatency = this.targetLatency;
18014
- if (targetLatency === null) {
18015
- return;
18016
- }
18017
- const distanceFromTarget = latency - targetLatency;
18018
- // Only adjust playbackRate when within one target duration of targetLatency
18019
- // and more than one second from under-buffering.
18020
- // Playback further than one target duration from target can be considered DVR playback.
18021
- const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
18022
- const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
18023
- if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
18024
- const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
18025
- const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
18026
- media.playbackRate = Math.min(max, Math.max(1, rate));
18027
- } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
18028
- media.playbackRate = 1;
18029
- }
18030
- };
18031
- this.hls = hls;
18032
- this.config = hls.config;
18033
- this.registerListeners();
18034
- }
18035
- get latency() {
18036
- return this._latency || 0;
18037
- }
18038
- get maxLatency() {
18039
- var _this$hls;
17913
+ /**
17914
+ * Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
17915
+ * @param bufferLen - The playhead distance from the end of the current buffer segment.
17916
+ * @private
17917
+ */
17918
+ _reportStall(bufferInfo) {
18040
17919
  const {
18041
- config
17920
+ hls,
17921
+ media,
17922
+ stallReported,
17923
+ stalled
18042
17924
  } = this;
18043
- if (config.liveMaxLatencyDuration !== undefined) {
18044
- return config.liveMaxLatencyDuration;
17925
+ if (!stallReported && stalled !== null && media && hls) {
17926
+ // Report stalled error once
17927
+ this.stallReported = true;
17928
+ const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
17929
+ this.warn(error.message);
17930
+ hls.trigger(Events.ERROR, {
17931
+ type: ErrorTypes.MEDIA_ERROR,
17932
+ details: ErrorDetails.BUFFER_STALLED_ERROR,
17933
+ fatal: false,
17934
+ error,
17935
+ buffer: bufferInfo.len,
17936
+ bufferInfo,
17937
+ stalled: {
17938
+ start: stalled
17939
+ }
17940
+ });
18045
17941
  }
18046
- const levelDetails = (_this$hls = this.hls) == null ? undefined : _this$hls.latestLevelDetails;
18047
- return levelDetails ? config.liveMaxLatencyDurationCount * levelDetails.targetduration : 0;
18048
17942
  }
18049
- get targetLatency() {
18050
- const levelDetails = this.hls.latestLevelDetails;
18051
- if (levelDetails === null) {
18052
- return null;
18053
- }
18054
- const {
18055
- holdBack,
18056
- partHoldBack,
18057
- targetduration
18058
- } = levelDetails;
17943
+
17944
+ /**
17945
+ * Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
17946
+ * @param partial - The partial fragment found at the current time (where playback is stalling).
17947
+ * @private
17948
+ */
17949
+ _trySkipBufferHole(partial) {
17950
+ var _this$hls4;
18059
17951
  const {
18060
- liveSyncDuration,
18061
- liveSyncDurationCount,
18062
- lowLatencyMode
18063
- } = this.config;
18064
- const userConfig = this.hls.userConfig;
18065
- let targetLatency = lowLatencyMode ? partHoldBack || holdBack : holdBack;
18066
- if (this._targetLatencyUpdated || userConfig.liveSyncDuration || userConfig.liveSyncDurationCount || targetLatency === 0) {
18067
- targetLatency = liveSyncDuration !== undefined ? liveSyncDuration : liveSyncDurationCount * targetduration;
18068
- }
18069
- const maxLiveSyncOnStallIncrease = targetduration;
18070
- return targetLatency + Math.min(this.stallCount * this.config.liveSyncOnStallIncrease, maxLiveSyncOnStallIncrease);
18071
- }
18072
- set targetLatency(latency) {
18073
- this.stallCount = 0;
18074
- this.config.liveSyncDuration = latency;
18075
- this._targetLatencyUpdated = true;
18076
- }
18077
- get liveSyncPosition() {
18078
- const liveEdge = this.estimateLiveEdge();
18079
- const targetLatency = this.targetLatency;
18080
- if (liveEdge === null || targetLatency === null) {
18081
- return null;
18082
- }
18083
- const levelDetails = this.hls.latestLevelDetails;
18084
- if (levelDetails === null) {
18085
- return null;
18086
- }
18087
- const edge = levelDetails.edge;
18088
- const syncPosition = liveEdge - targetLatency - this.edgeStalled;
18089
- const min = edge - levelDetails.totalduration;
18090
- const max = edge - (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration);
18091
- return Math.min(Math.max(min, syncPosition), max);
18092
- }
18093
- get drift() {
18094
- const levelDetails = this.hls.latestLevelDetails;
18095
- if (levelDetails === null) {
18096
- return 1;
18097
- }
18098
- return levelDetails.drift;
18099
- }
18100
- get edgeStalled() {
18101
- const levelDetails = this.hls.latestLevelDetails;
18102
- if (levelDetails === null) {
17952
+ fragmentTracker,
17953
+ media
17954
+ } = this;
17955
+ const config = (_this$hls4 = this.hls) == null ? undefined : _this$hls4.config;
17956
+ if (!media || !fragmentTracker || !config) {
18103
17957
  return 0;
18104
17958
  }
18105
- const maxLevelUpdateAge = (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration) * 3;
18106
- return Math.max(levelDetails.age - maxLevelUpdateAge, 0);
17959
+
17960
+ // Check if currentTime is between unbuffered regions of partial fragments
17961
+ const currentTime = media.currentTime;
17962
+ const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
17963
+ const startTime = currentTime < bufferInfo.start ? bufferInfo.start : bufferInfo.nextStart;
17964
+ if (startTime) {
17965
+ const bufferStarved = bufferInfo.len <= config.maxBufferHole;
17966
+ const waiting = bufferInfo.len > 0 && bufferInfo.len < 1 && media.readyState < 3;
17967
+ const gapLength = startTime - currentTime;
17968
+ if (gapLength > 0 && (bufferStarved || waiting)) {
17969
+ // Only allow large gaps to be skipped if it is a start gap, or all fragments in skip range are partial
17970
+ if (gapLength > config.maxBufferHole) {
17971
+ let startGap = false;
17972
+ if (currentTime === 0) {
17973
+ const startFrag = fragmentTracker.getAppendedFrag(0, PlaylistLevelType.MAIN);
17974
+ if (startFrag && startTime < startFrag.end) {
17975
+ startGap = true;
17976
+ }
17977
+ }
17978
+ if (!startGap) {
17979
+ const startProvisioned = partial || fragmentTracker.getAppendedFrag(currentTime, PlaylistLevelType.MAIN);
17980
+ if (startProvisioned) {
17981
+ let moreToLoad = false;
17982
+ let pos = startProvisioned.end;
17983
+ while (pos < startTime) {
17984
+ const provisioned = fragmentTracker.getPartialFragment(pos);
17985
+ if (provisioned) {
17986
+ pos += provisioned.duration;
17987
+ } else {
17988
+ moreToLoad = true;
17989
+ break;
17990
+ }
17991
+ }
17992
+ if (moreToLoad) {
17993
+ return 0;
17994
+ }
17995
+ }
17996
+ }
17997
+ }
17998
+ const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
17999
+ this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
18000
+ this.moved = true;
18001
+ media.currentTime = targetTime;
18002
+ if (!(partial != null && partial.gap) && this.hls) {
18003
+ const error = new Error(`fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`);
18004
+ this.hls.trigger(Events.ERROR, {
18005
+ type: ErrorTypes.MEDIA_ERROR,
18006
+ details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
18007
+ fatal: false,
18008
+ error,
18009
+ reason: error.message,
18010
+ frag: partial || undefined,
18011
+ buffer: bufferInfo.len,
18012
+ bufferInfo
18013
+ });
18014
+ }
18015
+ return targetTime;
18016
+ }
18017
+ }
18018
+ return 0;
18107
18019
  }
18108
- get forwardBufferLength() {
18020
+
18021
+ /**
18022
+ * Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
18023
+ * @private
18024
+ */
18025
+ _tryNudgeBuffer(bufferInfo) {
18109
18026
  const {
18110
- media
18027
+ hls,
18028
+ media,
18029
+ nudgeRetry
18111
18030
  } = this;
18112
- const levelDetails = this.hls.latestLevelDetails;
18113
- if (!media || !levelDetails) {
18031
+ const config = hls == null ? undefined : hls.config;
18032
+ if (!media || !config) {
18114
18033
  return 0;
18115
18034
  }
18116
- const bufferedRanges = media.buffered.length;
18117
- return (bufferedRanges ? media.buffered.end(bufferedRanges - 1) : levelDetails.edge) - this.currentTime;
18035
+ const currentTime = media.currentTime;
18036
+ this.nudgeRetry++;
18037
+ if (nudgeRetry < config.nudgeMaxRetry) {
18038
+ const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
18039
+ // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
18040
+ const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
18041
+ this.warn(error.message);
18042
+ media.currentTime = targetTime;
18043
+ hls.trigger(Events.ERROR, {
18044
+ type: ErrorTypes.MEDIA_ERROR,
18045
+ details: ErrorDetails.BUFFER_NUDGE_ON_STALL,
18046
+ error,
18047
+ fatal: false,
18048
+ buffer: bufferInfo.len,
18049
+ bufferInfo
18050
+ });
18051
+ } else {
18052
+ const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
18053
+ this.error(error.message);
18054
+ hls.trigger(Events.ERROR, {
18055
+ type: ErrorTypes.MEDIA_ERROR,
18056
+ details: ErrorDetails.BUFFER_STALLED_ERROR,
18057
+ error,
18058
+ fatal: true,
18059
+ buffer: bufferInfo.len,
18060
+ bufferInfo
18061
+ });
18062
+ }
18118
18063
  }
18119
- destroy() {
18120
- this.unregisterListeners();
18121
- this.onMediaDetaching();
18122
- // @ts-ignore
18123
- this.hls = null;
18064
+ }
18065
+ function getInFlightDependency(inFlightFragments, currentTime) {
18066
+ const main = inFlight(inFlightFragments.main);
18067
+ if (main && main.start <= currentTime) {
18068
+ return main;
18124
18069
  }
18125
- registerListeners() {
18126
- this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18127
- this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18128
- this.hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18129
- this.hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
18130
- this.hls.on(Events.ERROR, this.onError, this);
18070
+ const audio = inFlight(inFlightFragments.audio);
18071
+ if (audio && audio.start <= currentTime) {
18072
+ return audio;
18131
18073
  }
18132
- unregisterListeners() {
18133
- this.hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18134
- this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18135
- this.hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18136
- this.hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
18137
- this.hls.off(Events.ERROR, this.onError, this);
18074
+ return null;
18075
+ }
18076
+ function inFlight(inFlightData) {
18077
+ if (!inFlightData) {
18078
+ return null;
18138
18079
  }
18139
- onMediaAttached(event, data) {
18140
- this.media = data.media;
18141
- this.media.addEventListener('timeupdate', this.onTimeupdate);
18080
+ switch (inFlightData.state) {
18081
+ case State.IDLE:
18082
+ case State.STOPPED:
18083
+ case State.ENDED:
18084
+ case State.ERROR:
18085
+ return null;
18142
18086
  }
18143
- onMediaDetaching() {
18144
- if (this.media) {
18145
- this.media.removeEventListener('timeupdate', this.onTimeupdate);
18146
- this.media = null;
18147
- }
18087
+ return inFlightData.frag;
18088
+ }
18089
+
18090
+ function sendAddTrackEvent(track, videoEl) {
18091
+ let event;
18092
+ try {
18093
+ event = new Event('addtrack');
18094
+ } catch (err) {
18095
+ // for IE11
18096
+ event = document.createEvent('Event');
18097
+ event.initEvent('addtrack', false, false);
18148
18098
  }
18149
- onManifestLoading() {
18150
- this._latency = null;
18151
- this.stallCount = 0;
18099
+ event.track = track;
18100
+ videoEl.dispatchEvent(event);
18101
+ }
18102
+ function clearCurrentCues(track, enterHandler) {
18103
+ // When track.mode is disabled, track.cues will be null.
18104
+ // To guarantee the removal of cues, we need to temporarily
18105
+ // change the mode to hidden
18106
+ const mode = track.mode;
18107
+ if (mode === 'disabled') {
18108
+ track.mode = 'hidden';
18152
18109
  }
18153
- onLevelUpdated(event, {
18154
- details
18155
- }) {
18156
- if (details.advanced) {
18157
- this.onTimeupdate();
18158
- }
18159
- if (!details.live && this.media) {
18160
- this.media.removeEventListener('timeupdate', this.onTimeupdate);
18110
+ if (track.cues) {
18111
+ for (let i = track.cues.length; i--;) {
18112
+ if (enterHandler) {
18113
+ track.cues[i].removeEventListener('enter', enterHandler);
18114
+ }
18115
+ track.removeCue(track.cues[i]);
18161
18116
  }
18162
18117
  }
18163
- onError(event, data) {
18164
- var _this$hls$latestLevel;
18165
- if (data.details !== ErrorDetails.BUFFER_STALLED_ERROR) {
18166
- return;
18118
+ if (mode === 'disabled') {
18119
+ track.mode = mode;
18120
+ }
18121
+ }
18122
+ function removeCuesInRange(track, start, end, predicate) {
18123
+ const mode = track.mode;
18124
+ if (mode === 'disabled') {
18125
+ track.mode = 'hidden';
18126
+ }
18127
+ if (track.cues && track.cues.length > 0) {
18128
+ const cues = getCuesInRange(track.cues, start, end);
18129
+ for (let i = 0; i < cues.length; i++) {
18130
+ if (!predicate || predicate(cues[i])) {
18131
+ track.removeCue(cues[i]);
18132
+ }
18167
18133
  }
18168
- this.stallCount++;
18169
- if ((_this$hls$latestLevel = this.hls.latestLevelDetails) != null && _this$hls$latestLevel.live) {
18170
- this.hls.logger.warn('[latency-controller]: Stall detected, adjusting target latency');
18134
+ }
18135
+ if (mode === 'disabled') {
18136
+ track.mode = mode;
18137
+ }
18138
+ }
18139
+
18140
+ // Find first cue starting after given time.
18141
+ // Modified version of binary search O(log(n)).
18142
+ function getFirstCueIndexAfterTime(cues, time) {
18143
+ // If first cue starts after time, start there
18144
+ if (time < cues[0].startTime) {
18145
+ return 0;
18146
+ }
18147
+ // If the last cue ends before time there is no overlap
18148
+ const len = cues.length - 1;
18149
+ if (time > cues[len].endTime) {
18150
+ return -1;
18151
+ }
18152
+ let left = 0;
18153
+ let right = len;
18154
+ while (left <= right) {
18155
+ const mid = Math.floor((right + left) / 2);
18156
+ if (time < cues[mid].startTime) {
18157
+ right = mid - 1;
18158
+ } else if (time > cues[mid].startTime && left < len) {
18159
+ left = mid + 1;
18160
+ } else {
18161
+ // If it's not lower or higher, it must be equal.
18162
+ return mid;
18171
18163
  }
18172
18164
  }
18173
- estimateLiveEdge() {
18174
- const levelDetails = this.hls.latestLevelDetails;
18175
- if (levelDetails === null) {
18176
- return null;
18165
+ // At this point, left and right have swapped.
18166
+ // No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
18167
+ return cues[left].startTime - time < time - cues[right].startTime ? left : right;
18168
+ }
18169
+ function getCuesInRange(cues, start, end) {
18170
+ const cuesFound = [];
18171
+ const firstCueInRange = getFirstCueIndexAfterTime(cues, start);
18172
+ if (firstCueInRange > -1) {
18173
+ for (let i = firstCueInRange, len = cues.length; i < len; i++) {
18174
+ const cue = cues[i];
18175
+ if (cue.startTime >= start && cue.endTime <= end) {
18176
+ cuesFound.push(cue);
18177
+ } else if (cue.startTime > end) {
18178
+ return cuesFound;
18179
+ }
18177
18180
  }
18178
- return levelDetails.edge + levelDetails.age;
18179
18181
  }
18180
- computeLatency() {
18181
- const liveEdge = this.estimateLiveEdge();
18182
- if (liveEdge === null) {
18183
- return null;
18182
+ return cuesFound;
18183
+ }
18184
+
18185
+ const MIN_CUE_DURATION = 0.25;
18186
+ function getCueClass() {
18187
+ if (typeof self === 'undefined') return undefined;
18188
+ return self.VTTCue || self.TextTrackCue;
18189
+ }
18190
+ function createCueWithDataFields(Cue, startTime, endTime, data, type) {
18191
+ let cue = new Cue(startTime, endTime, '');
18192
+ try {
18193
+ cue.value = data;
18194
+ if (type) {
18195
+ cue.type = type;
18184
18196
  }
18185
- return liveEdge - this.currentTime;
18197
+ } catch (e) {
18198
+ cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
18199
+ type
18200
+ }, data) : data));
18186
18201
  }
18202
+ return cue;
18187
18203
  }
18188
18204
 
18189
- class LevelController extends BasePlaylistController {
18190
- constructor(hls, contentSteeringController) {
18191
- super(hls, 'level-controller');
18192
- this._levels = [];
18193
- this._firstLevel = -1;
18194
- this._maxAutoLevel = -1;
18195
- this._startLevel = undefined;
18196
- this.currentLevel = null;
18197
- this.currentLevelIndex = -1;
18198
- this.manualLevelIndex = -1;
18199
- this.steering = undefined;
18200
- this.onParsedComplete = undefined;
18201
- this.steering = contentSteeringController;
18205
+ // VTTCue latest draft allows an infinite duration, fallback
18206
+ // to MAX_VALUE if necessary
18207
+ const MAX_CUE_ENDTIME = (() => {
18208
+ const Cue = getCueClass();
18209
+ try {
18210
+ Cue && new Cue(0, Number.POSITIVE_INFINITY, '');
18211
+ } catch (e) {
18212
+ return Number.MAX_VALUE;
18213
+ }
18214
+ return Number.POSITIVE_INFINITY;
18215
+ })();
18216
+ function hexToArrayBuffer(str) {
18217
+ return Uint8Array.from(str.replace(/^0x/, '').replace(/([\da-fA-F]{2}) ?/g, '0x$1 ').replace(/ +$/, '').split(' ')).buffer;
18218
+ }
18219
+ class ID3TrackController {
18220
+ constructor(hls) {
18221
+ this.hls = undefined;
18222
+ this.id3Track = null;
18223
+ this.media = null;
18224
+ this.dateRangeCuesAppended = {};
18225
+ this.removeCues = true;
18226
+ this.onEventCueEnter = () => {
18227
+ if (!this.hls) {
18228
+ return;
18229
+ }
18230
+ this.hls.trigger(Events.EVENT_CUE_ENTER, {});
18231
+ };
18232
+ this.hls = hls;
18202
18233
  this._registerListeners();
18203
18234
  }
18235
+ destroy() {
18236
+ this._unregisterListeners();
18237
+ this.id3Track = null;
18238
+ this.media = null;
18239
+ this.dateRangeCuesAppended = {};
18240
+ // @ts-ignore
18241
+ this.hls = this.onEventCueEnter = null;
18242
+ }
18204
18243
  _registerListeners() {
18205
18244
  const {
18206
18245
  hls
18207
18246
  } = this;
18247
+ hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
18248
+ hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18249
+ hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18208
18250
  hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18209
- hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
18210
- hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
18211
- hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
18212
- hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
18213
- hls.on(Events.ERROR, this.onError, this);
18251
+ hls.on(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
18252
+ hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
18253
+ hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
18254
+ hls.on(Events.LEVEL_PTS_UPDATED, this.onLevelPtsUpdated, this);
18214
18255
  }
18215
18256
  _unregisterListeners() {
18216
18257
  const {
18217
18258
  hls
18218
18259
  } = this;
18260
+ hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
18261
+ hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18262
+ hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18219
18263
  hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18220
- hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
18221
- hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
18222
- hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
18223
- hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
18224
- hls.off(Events.ERROR, this.onError, this);
18264
+ hls.off(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
18265
+ hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
18266
+ hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
18267
+ hls.off(Events.LEVEL_PTS_UPDATED, this.onLevelPtsUpdated, this);
18225
18268
  }
18226
- destroy() {
18227
- this._unregisterListeners();
18228
- this.steering = null;
18229
- this.resetLevels();
18230
- super.destroy();
18269
+ // Add ID3 metatadata text track.
18270
+ onMediaAttaching(event, data) {
18271
+ var _data$overrides;
18272
+ this.media = data.media;
18273
+ if (((_data$overrides = data.overrides) == null ? undefined : _data$overrides.cueRemoval) === false) {
18274
+ this.removeCues = false;
18275
+ }
18231
18276
  }
18232
- stopLoad() {
18233
- const levels = this._levels;
18234
-
18235
- // clean up live level details to force reload them, and reset load errors
18236
- levels.forEach(level => {
18237
- level.loadError = 0;
18238
- level.fragmentError = 0;
18239
- });
18240
- super.stopLoad();
18277
+ onMediaAttached() {
18278
+ const details = this.hls.latestLevelDetails;
18279
+ if (details) {
18280
+ this.updateDateRangeCues(details);
18281
+ }
18241
18282
  }
18242
- resetLevels() {
18243
- this._startLevel = undefined;
18244
- this.manualLevelIndex = -1;
18245
- this.currentLevelIndex = -1;
18246
- this.currentLevel = null;
18247
- this._levels = [];
18248
- this._maxAutoLevel = -1;
18283
+ onMediaDetaching(event, data) {
18284
+ this.media = null;
18285
+ const transferringMedia = !!data.transferMedia;
18286
+ if (transferringMedia) {
18287
+ return;
18288
+ }
18289
+ if (this.id3Track) {
18290
+ if (this.removeCues) {
18291
+ clearCurrentCues(this.id3Track, this.onEventCueEnter);
18292
+ }
18293
+ this.id3Track = null;
18294
+ }
18295
+ this.dateRangeCuesAppended = {};
18249
18296
  }
18250
- onManifestLoading(event, data) {
18251
- this.resetLevels();
18297
+ onManifestLoading() {
18298
+ this.dateRangeCuesAppended = {};
18252
18299
  }
18253
- onManifestLoaded(event, data) {
18254
- const preferManagedMediaSource = this.hls.config.preferManagedMediaSource;
18255
- const levels = [];
18256
- const redundantSet = {};
18257
- const generatePathwaySet = {};
18258
- let resolutionFound = false;
18259
- let videoCodecFound = false;
18260
- let audioCodecFound = false;
18261
- data.levels.forEach(levelParsed => {
18262
- var _videoCodec;
18263
- const attributes = levelParsed.attrs;
18264
- let {
18265
- audioCodec,
18266
- videoCodec
18267
- } = levelParsed;
18268
- if (audioCodec) {
18269
- // Returns empty and set to undefined for 'mp4a.40.34' with fallback to 'audio/mpeg' SourceBuffer
18270
- levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
18300
+ createTrack(media) {
18301
+ const track = this.getID3Track(media.textTracks);
18302
+ track.mode = 'hidden';
18303
+ return track;
18304
+ }
18305
+ getID3Track(textTracks) {
18306
+ if (!this.media) {
18307
+ return;
18308
+ }
18309
+ for (let i = 0; i < textTracks.length; i++) {
18310
+ const textTrack = textTracks[i];
18311
+ if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
18312
+ // send 'addtrack' when reusing the textTrack for metadata,
18313
+ // same as what we do for captions
18314
+ sendAddTrackEvent(textTrack, this.media);
18315
+ return textTrack;
18271
18316
  }
18272
- if (((_videoCodec = videoCodec) == null ? undefined : _videoCodec.indexOf('avc1')) === 0) {
18273
- videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
18317
+ }
18318
+ return this.media.addTextTrack('metadata', 'id3');
18319
+ }
18320
+ onFragParsingMetadata(event, data) {
18321
+ if (!this.media) {
18322
+ return;
18323
+ }
18324
+ const {
18325
+ hls: {
18326
+ config: {
18327
+ enableEmsgMetadataCues,
18328
+ enableID3MetadataCues
18329
+ }
18274
18330
  }
18331
+ } = this;
18332
+ if (!enableEmsgMetadataCues && !enableID3MetadataCues) {
18333
+ return;
18334
+ }
18335
+ const {
18336
+ samples
18337
+ } = data;
18275
18338
 
18276
- // only keep levels with supported audio/video codecs
18277
- const {
18278
- width,
18279
- height,
18280
- unknownCodecs
18281
- } = levelParsed;
18282
- let unknownUnsupportedCodecCount = unknownCodecs ? unknownCodecs.length : 0;
18283
- if (unknownCodecs) {
18284
- // Treat unknown codec as audio or video codec based on passing `isTypeSupported` check
18285
- // (allows for playback of any supported codec even if not indexed in utils/codecs)
18286
- for (let i = unknownUnsupportedCodecCount; i--;) {
18287
- const unknownCodec = unknownCodecs[i];
18288
- if (this.isAudioSupported(unknownCodec)) {
18289
- levelParsed.audioCodec = audioCodec = audioCodec ? `${audioCodec},${unknownCodec}` : unknownCodec;
18290
- unknownUnsupportedCodecCount--;
18291
- sampleEntryCodesISO.audio[audioCodec.substring(0, 4)] = 2;
18292
- } else if (this.isVideoSupported(unknownCodec)) {
18293
- levelParsed.videoCodec = videoCodec = videoCodec ? `${videoCodec},${unknownCodec}` : unknownCodec;
18294
- unknownUnsupportedCodecCount--;
18295
- sampleEntryCodesISO.video[videoCodec.substring(0, 4)] = 2;
18339
+ // create track dynamically
18340
+ if (!this.id3Track) {
18341
+ this.id3Track = this.createTrack(this.media);
18342
+ }
18343
+ const Cue = getCueClass();
18344
+ if (!Cue) {
18345
+ return;
18346
+ }
18347
+ for (let i = 0; i < samples.length; i++) {
18348
+ const type = samples[i].type;
18349
+ if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
18350
+ continue;
18351
+ }
18352
+ const frames = getId3Frames(samples[i].data);
18353
+ if (frames) {
18354
+ const startTime = samples[i].pts;
18355
+ let endTime = startTime + samples[i].duration;
18356
+ if (endTime > MAX_CUE_ENDTIME) {
18357
+ endTime = MAX_CUE_ENDTIME;
18358
+ }
18359
+ const timeDiff = endTime - startTime;
18360
+ if (timeDiff <= 0) {
18361
+ endTime = startTime + MIN_CUE_DURATION;
18362
+ }
18363
+ for (let j = 0; j < frames.length; j++) {
18364
+ const frame = frames[j];
18365
+ // Safari doesn't put the timestamp frame in the TextTrack
18366
+ if (!isId3TimestampFrame(frame)) {
18367
+ // add a bounds to any unbounded cues
18368
+ this.updateId3CueEnds(startTime, type);
18369
+ const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
18370
+ if (cue) {
18371
+ this.id3Track.addCue(cue);
18372
+ }
18296
18373
  }
18297
18374
  }
18298
18375
  }
18299
- resolutionFound || (resolutionFound = !!(width && height));
18300
- videoCodecFound || (videoCodecFound = !!videoCodec);
18301
- audioCodecFound || (audioCodecFound = !!audioCodec);
18302
- if (unknownUnsupportedCodecCount || audioCodec && !this.isAudioSupported(audioCodec) || videoCodec && !this.isVideoSupported(videoCodec)) {
18303
- return;
18376
+ }
18377
+ }
18378
+ updateId3CueEnds(startTime, type) {
18379
+ var _this$id3Track;
18380
+ const cues = (_this$id3Track = this.id3Track) == null ? undefined : _this$id3Track.cues;
18381
+ if (cues) {
18382
+ for (let i = cues.length; i--;) {
18383
+ const cue = cues[i];
18384
+ if (cue.type === type && cue.startTime < startTime && cue.endTime === MAX_CUE_ENDTIME) {
18385
+ cue.endTime = startTime;
18386
+ }
18304
18387
  }
18305
- const {
18306
- CODECS,
18307
- 'FRAME-RATE': FRAMERATE,
18308
- 'HDCP-LEVEL': HDCP,
18309
- 'PATHWAY-ID': PATHWAY,
18310
- RESOLUTION,
18311
- 'VIDEO-RANGE': VIDEO_RANGE
18312
- } = attributes;
18313
- const contentSteeringPrefix = `${PATHWAY || '.'}-`;
18314
- const levelKey = `${contentSteeringPrefix}${levelParsed.bitrate}-${RESOLUTION}-${FRAMERATE}-${CODECS}-${VIDEO_RANGE}-${HDCP}`;
18315
- if (!redundantSet[levelKey]) {
18316
- const level = new Level(levelParsed);
18317
- redundantSet[levelKey] = level;
18318
- generatePathwaySet[levelKey] = 1;
18319
- levels.push(level);
18320
- } else if (redundantSet[levelKey].uri !== levelParsed.url && !levelParsed.attrs['PATHWAY-ID']) {
18321
- // Assign Pathway IDs to Redundant Streams (default Pathways is ".". Redundant Streams "..", "...", and so on.)
18322
- // Content Steering controller to handles Pathway fallback on error
18323
- const pathwayCount = generatePathwaySet[levelKey] += 1;
18324
- levelParsed.attrs['PATHWAY-ID'] = new Array(pathwayCount + 1).join('.');
18325
- const level = new Level(levelParsed);
18326
- redundantSet[levelKey] = level;
18327
- levels.push(level);
18388
+ }
18389
+ }
18390
+ onBufferFlushing(event, {
18391
+ startOffset,
18392
+ endOffset,
18393
+ type
18394
+ }) {
18395
+ const {
18396
+ id3Track,
18397
+ hls
18398
+ } = this;
18399
+ if (!hls) {
18400
+ return;
18401
+ }
18402
+ const {
18403
+ config: {
18404
+ enableEmsgMetadataCues,
18405
+ enableID3MetadataCues
18406
+ }
18407
+ } = hls;
18408
+ if (id3Track && (enableEmsgMetadataCues || enableID3MetadataCues)) {
18409
+ let predicate;
18410
+ if (type === 'audio') {
18411
+ predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues;
18412
+ } else if (type === 'video') {
18413
+ predicate = cue => cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
18328
18414
  } else {
18329
- redundantSet[levelKey].addGroupId('audio', attributes.AUDIO);
18330
- redundantSet[levelKey].addGroupId('text', attributes.SUBTITLES);
18415
+ predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues || cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
18331
18416
  }
18332
- });
18333
- this.filterAndSortMediaOptions(levels, data, resolutionFound, videoCodecFound, audioCodecFound);
18417
+ removeCuesInRange(id3Track, startOffset, endOffset, predicate);
18418
+ }
18334
18419
  }
18335
- isAudioSupported(codec) {
18336
- return areCodecsMediaSourceSupported(codec, 'audio', this.hls.config.preferManagedMediaSource);
18420
+ onLevelUpdated(event, {
18421
+ details
18422
+ }) {
18423
+ this.updateDateRangeCues(details, true);
18337
18424
  }
18338
- isVideoSupported(codec) {
18339
- return areCodecsMediaSourceSupported(codec, 'video', this.hls.config.preferManagedMediaSource);
18425
+ onLevelPtsUpdated(event, data) {
18426
+ if (Math.abs(data.drift) > 0.01) {
18427
+ this.updateDateRangeCues(data.details);
18428
+ }
18340
18429
  }
18341
- filterAndSortMediaOptions(filteredLevels, data, resolutionFound, videoCodecFound, audioCodecFound) {
18342
- let audioTracks = [];
18343
- let subtitleTracks = [];
18344
- let levels = filteredLevels;
18345
-
18346
- // remove audio-only and invalid video-range levels if we also have levels with video codecs or RESOLUTION signalled
18347
- if ((resolutionFound || videoCodecFound) && audioCodecFound) {
18348
- levels = levels.filter(({
18349
- videoCodec,
18350
- videoRange,
18351
- width,
18352
- height
18353
- }) => (!!videoCodec || !!(width && height)) && isVideoRange(videoRange));
18430
+ updateDateRangeCues(details, removeOldCues) {
18431
+ if (!this.media || !details.hasProgramDateTime || !this.hls.config.enableDateRangeMetadataCues) {
18432
+ return;
18354
18433
  }
18355
- if (levels.length === 0) {
18356
- // Dispatch error after MANIFEST_LOADED is done propagating
18357
- Promise.resolve().then(() => {
18358
- if (this.hls) {
18359
- if (data.levels.length) {
18360
- this.warn(`One or more CODECS in variant not supported: ${JSON.stringify(data.levels[0].attrs)}`);
18361
- }
18362
- const error = new Error('no level with compatible codecs found in manifest');
18363
- this.hls.trigger(Events.ERROR, {
18364
- type: ErrorTypes.MEDIA_ERROR,
18365
- details: ErrorDetails.MANIFEST_INCOMPATIBLE_CODECS_ERROR,
18366
- fatal: true,
18367
- url: data.url,
18368
- error,
18369
- reason: error.message
18434
+ const {
18435
+ id3Track
18436
+ } = this;
18437
+ const {
18438
+ dateRanges
18439
+ } = details;
18440
+ const ids = Object.keys(dateRanges);
18441
+ let dateRangeCuesAppended = this.dateRangeCuesAppended;
18442
+ // Remove cues from track not found in details.dateRanges
18443
+ if (id3Track && removeOldCues) {
18444
+ var _id3Track$cues;
18445
+ if ((_id3Track$cues = id3Track.cues) != null && _id3Track$cues.length) {
18446
+ const idsToRemove = Object.keys(dateRangeCuesAppended).filter(id => !ids.includes(id));
18447
+ for (let i = idsToRemove.length; i--;) {
18448
+ const id = idsToRemove[i];
18449
+ const cues = dateRangeCuesAppended[id].cues;
18450
+ delete dateRangeCuesAppended[id];
18451
+ Object.keys(cues).forEach(key => {
18452
+ try {
18453
+ const cue = cues[key];
18454
+ cue.removeEventListener('enter', this.onEventCueEnter);
18455
+ id3Track.removeCue(cue);
18456
+ } catch (e) {
18457
+ /* no-op */
18458
+ }
18370
18459
  });
18371
18460
  }
18372
- });
18373
- return;
18461
+ } else {
18462
+ dateRangeCuesAppended = this.dateRangeCuesAppended = {};
18463
+ }
18374
18464
  }
18375
- if (data.audioTracks) {
18376
- audioTracks = data.audioTracks.filter(track => !track.audioCodec || this.isAudioSupported(track.audioCodec));
18377
- // Assign ids after filtering as array indices by group-id
18378
- assignTrackIdsByGroup(audioTracks);
18465
+ // Exit if the playlist does not have Date Ranges or does not have Program Date Time
18466
+ const lastFragment = details.fragments[details.fragments.length - 1];
18467
+ if (ids.length === 0 || !isFiniteNumber(lastFragment == null ? undefined : lastFragment.programDateTime)) {
18468
+ return;
18379
18469
  }
18380
- if (data.subtitles) {
18381
- subtitleTracks = data.subtitles;
18382
- assignTrackIdsByGroup(subtitleTracks);
18470
+ if (!this.id3Track) {
18471
+ this.id3Track = this.createTrack(this.media);
18383
18472
  }
18384
- // start bitrate is the first bitrate of the manifest
18385
- const unsortedLevels = levels.slice(0);
18386
- // sort levels from lowest to highest
18387
- levels.sort((a, b) => {
18388
- if (a.attrs['HDCP-LEVEL'] !== b.attrs['HDCP-LEVEL']) {
18389
- return (a.attrs['HDCP-LEVEL'] || '') > (b.attrs['HDCP-LEVEL'] || '') ? 1 : -1;
18390
- }
18391
- // sort on height before bitrate for cap-level-controller
18392
- if (resolutionFound && a.height !== b.height) {
18393
- return a.height - b.height;
18394
- }
18395
- if (a.frameRate !== b.frameRate) {
18396
- return a.frameRate - b.frameRate;
18397
- }
18398
- if (a.videoRange !== b.videoRange) {
18399
- return VideoRangeValues.indexOf(a.videoRange) - VideoRangeValues.indexOf(b.videoRange);
18400
- }
18401
- if (a.videoCodec !== b.videoCodec) {
18402
- const valueA = videoCodecPreferenceValue(a.videoCodec);
18403
- const valueB = videoCodecPreferenceValue(b.videoCodec);
18404
- if (valueA !== valueB) {
18405
- return valueB - valueA;
18473
+ const Cue = getCueClass();
18474
+ for (let i = 0; i < ids.length; i++) {
18475
+ const id = ids[i];
18476
+ const dateRange = dateRanges[id];
18477
+ const startTime = dateRange.startTime;
18478
+
18479
+ // Process DateRanges to determine end-time (known DURATION, END-DATE, or END-ON-NEXT)
18480
+ const appendedDateRangeCues = dateRangeCuesAppended[id];
18481
+ const cues = (appendedDateRangeCues == null ? undefined : appendedDateRangeCues.cues) || {};
18482
+ let durationKnown = (appendedDateRangeCues == null ? undefined : appendedDateRangeCues.durationKnown) || false;
18483
+ let endTime = MAX_CUE_ENDTIME;
18484
+ const {
18485
+ duration,
18486
+ endDate
18487
+ } = dateRange;
18488
+ if (endDate && duration !== null) {
18489
+ endTime = startTime + duration;
18490
+ durationKnown = true;
18491
+ } else if (dateRange.endOnNext && !durationKnown) {
18492
+ const nextDateRangeWithSameClass = ids.reduce((candidateDateRange, id) => {
18493
+ if (id !== dateRange.id) {
18494
+ const otherDateRange = dateRanges[id];
18495
+ if (otherDateRange.class === dateRange.class && otherDateRange.startDate > dateRange.startDate && (!candidateDateRange || dateRange.startDate < candidateDateRange.startDate)) {
18496
+ return otherDateRange;
18497
+ }
18498
+ }
18499
+ return candidateDateRange;
18500
+ }, null);
18501
+ if (nextDateRangeWithSameClass) {
18502
+ endTime = nextDateRangeWithSameClass.startTime;
18503
+ durationKnown = true;
18406
18504
  }
18407
18505
  }
18408
- if (a.uri === b.uri && a.codecSet !== b.codecSet) {
18409
- const valueA = codecsSetSelectionPreferenceValue(a.codecSet);
18410
- const valueB = codecsSetSelectionPreferenceValue(b.codecSet);
18411
- if (valueA !== valueB) {
18412
- return valueB - valueA;
18506
+
18507
+ // Create TextTrack Cues for each MetadataGroup Item (select DateRange attribute)
18508
+ // This is to emulate Safari HLS playback handling of DateRange tags
18509
+ const attributes = Object.keys(dateRange.attr);
18510
+ for (let j = 0; j < attributes.length; j++) {
18511
+ const key = attributes[j];
18512
+ if (!isDateRangeCueAttribute(key)) {
18513
+ continue;
18413
18514
  }
18414
- }
18415
- if (a.averageBitrate !== b.averageBitrate) {
18416
- return a.averageBitrate - b.averageBitrate;
18417
- }
18418
- return 0;
18419
- });
18420
- let firstLevelInPlaylist = unsortedLevels[0];
18421
- if (this.steering) {
18422
- levels = this.steering.filterParsedLevels(levels);
18423
- if (levels.length !== unsortedLevels.length) {
18424
- for (let i = 0; i < unsortedLevels.length; i++) {
18425
- if (unsortedLevels[i].pathwayId === levels[0].pathwayId) {
18426
- firstLevelInPlaylist = unsortedLevels[i];
18427
- break;
18515
+ const cue = cues[key];
18516
+ if (cue) {
18517
+ if (durationKnown && !appendedDateRangeCues.durationKnown) {
18518
+ cue.endTime = endTime;
18519
+ } else if (Math.abs(cue.startTime - startTime) > 0.01) {
18520
+ cue.startTime = startTime;
18521
+ cue.endTime = endTime;
18522
+ }
18523
+ } else if (Cue) {
18524
+ let data = dateRange.attr[key];
18525
+ if (isSCTE35Attribute(key)) {
18526
+ data = hexToArrayBuffer(data);
18527
+ }
18528
+ const payload = {
18529
+ key,
18530
+ data
18531
+ };
18532
+ const _cue = createCueWithDataFields(Cue, startTime, endTime, payload, MetadataSchema.dateRange);
18533
+ if (_cue) {
18534
+ _cue.id = id;
18535
+ this.id3Track.addCue(_cue);
18536
+ cues[key] = _cue;
18428
18537
  }
18429
18538
  }
18430
18539
  }
18540
+
18541
+ // Keep track of processed DateRanges by ID for updating cues with new DateRange tag attributes
18542
+ dateRangeCuesAppended[id] = {
18543
+ cues,
18544
+ dateRange,
18545
+ durationKnown
18546
+ };
18431
18547
  }
18432
- this._levels = levels;
18548
+ }
18549
+ }
18433
18550
 
18434
- // find index of first level in sorted levels
18435
- for (let i = 0; i < levels.length; i++) {
18436
- if (levels[i] === firstLevelInPlaylist) {
18437
- var _this$hls$userConfig;
18438
- this._firstLevel = i;
18439
- const firstLevelBitrate = firstLevelInPlaylist.bitrate;
18440
- const bandwidthEstimate = this.hls.bandwidthEstimate;
18441
- this.log(`manifest loaded, ${levels.length} level(s) found, first bitrate: ${firstLevelBitrate}`);
18442
- // Update default bwe to first variant bitrate as long it has not been configured or set
18443
- if (((_this$hls$userConfig = this.hls.userConfig) == null ? undefined : _this$hls$userConfig.abrEwmaDefaultEstimate) === undefined) {
18444
- const startingBwEstimate = Math.min(firstLevelBitrate, this.hls.config.abrEwmaDefaultEstimateMax);
18445
- if (startingBwEstimate > bandwidthEstimate && bandwidthEstimate === this.hls.abrEwmaDefaultEstimate) {
18446
- this.hls.bandwidthEstimate = startingBwEstimate;
18447
- }
18448
- }
18449
- break;
18551
+ class LatencyController {
18552
+ constructor(hls) {
18553
+ this.hls = undefined;
18554
+ this.config = undefined;
18555
+ this.media = null;
18556
+ this.currentTime = 0;
18557
+ this.stallCount = 0;
18558
+ this._latency = null;
18559
+ this._targetLatencyUpdated = false;
18560
+ this.onTimeupdate = () => {
18561
+ const {
18562
+ media
18563
+ } = this;
18564
+ const levelDetails = this.hls.latestLevelDetails;
18565
+ if (!media || !levelDetails) {
18566
+ return;
18567
+ }
18568
+ this.currentTime = media.currentTime;
18569
+ const latency = this.computeLatency();
18570
+ if (latency === null) {
18571
+ return;
18450
18572
  }
18451
- }
18573
+ this._latency = latency;
18452
18574
 
18453
- // Audio is only alternate if manifest include a URI along with the audio group tag,
18454
- // and this is not an audio-only stream where levels contain audio-only
18455
- const audioOnly = audioCodecFound && !videoCodecFound;
18456
- const edata = {
18457
- levels,
18458
- audioTracks,
18459
- subtitleTracks,
18460
- sessionData: data.sessionData,
18461
- sessionKeys: data.sessionKeys,
18462
- firstLevel: this._firstLevel,
18463
- stats: data.stats,
18464
- audio: audioCodecFound,
18465
- video: videoCodecFound,
18466
- altAudio: !audioOnly && audioTracks.some(t => !!t.url)
18575
+ // Adapt playbackRate to meet target latency in low-latency mode
18576
+ const {
18577
+ lowLatencyMode,
18578
+ maxLiveSyncPlaybackRate
18579
+ } = this.config;
18580
+ if (!lowLatencyMode || maxLiveSyncPlaybackRate === 1 || !levelDetails.live) {
18581
+ return;
18582
+ }
18583
+ const targetLatency = this.targetLatency;
18584
+ if (targetLatency === null) {
18585
+ return;
18586
+ }
18587
+ const distanceFromTarget = latency - targetLatency;
18588
+ // Only adjust playbackRate when within one target duration of targetLatency
18589
+ // and more than one second from under-buffering.
18590
+ // Playback further than one target duration from target can be considered DVR playback.
18591
+ const liveMinLatencyDuration = Math.min(this.maxLatency, targetLatency + levelDetails.targetduration);
18592
+ const inLiveRange = distanceFromTarget < liveMinLatencyDuration;
18593
+ if (inLiveRange && distanceFromTarget > 0.05 && this.forwardBufferLength > 1) {
18594
+ const max = Math.min(2, Math.max(1.0, maxLiveSyncPlaybackRate));
18595
+ const rate = Math.round(2 / (1 + Math.exp(-0.75 * distanceFromTarget - this.edgeStalled)) * 20) / 20;
18596
+ media.playbackRate = Math.min(max, Math.max(1, rate));
18597
+ } else if (media.playbackRate !== 1 && media.playbackRate !== 0) {
18598
+ media.playbackRate = 1;
18599
+ }
18467
18600
  };
18468
- this.hls.trigger(Events.MANIFEST_PARSED, edata);
18601
+ this.hls = hls;
18602
+ this.config = hls.config;
18603
+ this.registerListeners();
18469
18604
  }
18470
- get levels() {
18471
- if (this._levels.length === 0) {
18472
- return null;
18473
- }
18474
- return this._levels;
18605
+ get latency() {
18606
+ return this._latency || 0;
18475
18607
  }
18476
- get level() {
18477
- return this.currentLevelIndex;
18608
+ get maxLatency() {
18609
+ var _this$hls;
18610
+ const {
18611
+ config
18612
+ } = this;
18613
+ if (config.liveMaxLatencyDuration !== undefined) {
18614
+ return config.liveMaxLatencyDuration;
18615
+ }
18616
+ const levelDetails = (_this$hls = this.hls) == null ? undefined : _this$hls.latestLevelDetails;
18617
+ return levelDetails ? config.liveMaxLatencyDurationCount * levelDetails.targetduration : 0;
18478
18618
  }
18479
- set level(newLevel) {
18480
- const levels = this._levels;
18481
- if (levels.length === 0) {
18482
- return;
18619
+ get targetLatency() {
18620
+ const levelDetails = this.hls.latestLevelDetails;
18621
+ if (levelDetails === null) {
18622
+ return null;
18483
18623
  }
18484
- // check if level idx is valid
18485
- if (newLevel < 0 || newLevel >= levels.length) {
18486
- // invalid level id given, trigger error
18487
- const error = new Error('invalid level idx');
18488
- const fatal = newLevel < 0;
18489
- this.hls.trigger(Events.ERROR, {
18490
- type: ErrorTypes.OTHER_ERROR,
18491
- details: ErrorDetails.LEVEL_SWITCH_ERROR,
18492
- level: newLevel,
18493
- fatal,
18494
- error,
18495
- reason: error.message
18496
- });
18497
- if (fatal) {
18498
- return;
18499
- }
18500
- newLevel = Math.min(newLevel, levels.length - 1);
18624
+ const {
18625
+ holdBack,
18626
+ partHoldBack,
18627
+ targetduration
18628
+ } = levelDetails;
18629
+ const {
18630
+ liveSyncDuration,
18631
+ liveSyncDurationCount,
18632
+ lowLatencyMode
18633
+ } = this.config;
18634
+ const userConfig = this.hls.userConfig;
18635
+ let targetLatency = lowLatencyMode ? partHoldBack || holdBack : holdBack;
18636
+ if (this._targetLatencyUpdated || userConfig.liveSyncDuration || userConfig.liveSyncDurationCount || targetLatency === 0) {
18637
+ targetLatency = liveSyncDuration !== undefined ? liveSyncDuration : liveSyncDurationCount * targetduration;
18501
18638
  }
18502
- const lastLevelIndex = this.currentLevelIndex;
18503
- const lastLevel = this.currentLevel;
18504
- const lastPathwayId = lastLevel ? lastLevel.attrs['PATHWAY-ID'] : undefined;
18505
- const level = levels[newLevel];
18506
- const pathwayId = level.attrs['PATHWAY-ID'];
18507
- this.currentLevelIndex = newLevel;
18508
- this.currentLevel = level;
18509
- if (lastLevelIndex === newLevel && lastLevel && lastPathwayId === pathwayId) {
18510
- return;
18639
+ const maxLiveSyncOnStallIncrease = targetduration;
18640
+ return targetLatency + Math.min(this.stallCount * this.config.liveSyncOnStallIncrease, maxLiveSyncOnStallIncrease);
18641
+ }
18642
+ set targetLatency(latency) {
18643
+ this.stallCount = 0;
18644
+ this.config.liveSyncDuration = latency;
18645
+ this._targetLatencyUpdated = true;
18646
+ }
18647
+ get liveSyncPosition() {
18648
+ const liveEdge = this.estimateLiveEdge();
18649
+ const targetLatency = this.targetLatency;
18650
+ if (liveEdge === null || targetLatency === null) {
18651
+ return null;
18511
18652
  }
18512
- this.log(`Switching to level ${newLevel} (${level.height ? level.height + 'p ' : ''}${level.videoRange ? level.videoRange + ' ' : ''}${level.codecSet ? level.codecSet + ' ' : ''}@${level.bitrate})${pathwayId ? ' with Pathway ' + pathwayId : ''} from level ${lastLevelIndex}${lastPathwayId ? ' with Pathway ' + lastPathwayId : ''}`);
18513
- const levelSwitchingData = {
18514
- level: newLevel,
18515
- attrs: level.attrs,
18516
- details: level.details,
18517
- bitrate: level.bitrate,
18518
- averageBitrate: level.averageBitrate,
18519
- maxBitrate: level.maxBitrate,
18520
- realBitrate: level.realBitrate,
18521
- width: level.width,
18522
- height: level.height,
18523
- codecSet: level.codecSet,
18524
- audioCodec: level.audioCodec,
18525
- videoCodec: level.videoCodec,
18526
- audioGroups: level.audioGroups,
18527
- subtitleGroups: level.subtitleGroups,
18528
- loaded: level.loaded,
18529
- loadError: level.loadError,
18530
- fragmentError: level.fragmentError,
18531
- name: level.name,
18532
- id: level.id,
18533
- uri: level.uri,
18534
- url: level.url,
18535
- urlId: 0,
18536
- audioGroupIds: level.audioGroupIds,
18537
- textGroupIds: level.textGroupIds
18538
- };
18539
- this.hls.trigger(Events.LEVEL_SWITCHING, levelSwitchingData);
18540
- // check if we need to load playlist for this level
18541
- const levelDetails = level.details;
18542
- if (!levelDetails || levelDetails.live) {
18543
- // level not retrieved yet, or live playlist we need to (re)load it
18544
- const hlsUrlParameters = this.switchParams(level.uri, lastLevel == null ? undefined : lastLevel.details, levelDetails);
18545
- this.loadPlaylist(hlsUrlParameters);
18653
+ const levelDetails = this.hls.latestLevelDetails;
18654
+ if (levelDetails === null) {
18655
+ return null;
18546
18656
  }
18657
+ const edge = levelDetails.edge;
18658
+ const syncPosition = liveEdge - targetLatency - this.edgeStalled;
18659
+ const min = edge - levelDetails.totalduration;
18660
+ const max = edge - (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration);
18661
+ return Math.min(Math.max(min, syncPosition), max);
18547
18662
  }
18548
- get manualLevel() {
18549
- return this.manualLevelIndex;
18663
+ get drift() {
18664
+ const levelDetails = this.hls.latestLevelDetails;
18665
+ if (levelDetails === null) {
18666
+ return 1;
18667
+ }
18668
+ return levelDetails.drift;
18550
18669
  }
18551
- set manualLevel(newLevel) {
18552
- this.manualLevelIndex = newLevel;
18553
- if (this._startLevel === undefined) {
18554
- this._startLevel = newLevel;
18670
+ get edgeStalled() {
18671
+ const levelDetails = this.hls.latestLevelDetails;
18672
+ if (levelDetails === null) {
18673
+ return 0;
18555
18674
  }
18556
- if (newLevel !== -1) {
18557
- this.level = newLevel;
18675
+ const maxLevelUpdateAge = (this.config.lowLatencyMode && levelDetails.partTarget || levelDetails.targetduration) * 3;
18676
+ return Math.max(levelDetails.age - maxLevelUpdateAge, 0);
18677
+ }
18678
+ get forwardBufferLength() {
18679
+ const {
18680
+ media
18681
+ } = this;
18682
+ const levelDetails = this.hls.latestLevelDetails;
18683
+ if (!media || !levelDetails) {
18684
+ return 0;
18558
18685
  }
18686
+ const bufferedRanges = media.buffered.length;
18687
+ return (bufferedRanges ? media.buffered.end(bufferedRanges - 1) : levelDetails.edge) - this.currentTime;
18559
18688
  }
18560
- get firstLevel() {
18561
- return this._firstLevel;
18689
+ destroy() {
18690
+ this.unregisterListeners();
18691
+ this.onMediaDetaching();
18692
+ // @ts-ignore
18693
+ this.hls = null;
18562
18694
  }
18563
- set firstLevel(newLevel) {
18564
- this._firstLevel = newLevel;
18695
+ registerListeners() {
18696
+ this.hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18697
+ this.hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18698
+ this.hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18699
+ this.hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
18700
+ this.hls.on(Events.ERROR, this.onError, this);
18565
18701
  }
18566
- get startLevel() {
18567
- // Setting hls.startLevel (this._startLevel) overrides config.startLevel
18568
- if (this._startLevel === undefined) {
18569
- const configStartLevel = this.hls.config.startLevel;
18570
- if (configStartLevel !== undefined) {
18571
- return configStartLevel;
18572
- }
18573
- return this.hls.firstAutoLevel;
18574
- }
18575
- return this._startLevel;
18702
+ unregisterListeners() {
18703
+ this.hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18704
+ this.hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18705
+ this.hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18706
+ this.hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
18707
+ this.hls.off(Events.ERROR, this.onError, this);
18576
18708
  }
18577
- set startLevel(newLevel) {
18578
- this._startLevel = newLevel;
18709
+ onMediaAttached(event, data) {
18710
+ this.media = data.media;
18711
+ this.media.addEventListener('timeupdate', this.onTimeupdate);
18579
18712
  }
18580
- get pathwayPriority() {
18581
- if (this.steering) {
18582
- return this.steering.pathwayPriority;
18713
+ onMediaDetaching() {
18714
+ if (this.media) {
18715
+ this.media.removeEventListener('timeupdate', this.onTimeupdate);
18716
+ this.media = null;
18583
18717
  }
18584
- return null;
18585
18718
  }
18586
- set pathwayPriority(pathwayPriority) {
18587
- if (this.steering) {
18588
- const pathwaysList = this.steering.pathways();
18589
- const filteredPathwayPriority = pathwayPriority.filter(pathwayId => {
18590
- return pathwaysList.indexOf(pathwayId) !== -1;
18591
- });
18592
- if (pathwayPriority.length < 1) {
18593
- this.warn(`pathwayPriority ${pathwayPriority} should contain at least one pathway from list: ${pathwaysList}`);
18594
- return;
18595
- }
18596
- this.steering.pathwayPriority = filteredPathwayPriority;
18719
+ onManifestLoading() {
18720
+ this._latency = null;
18721
+ this.stallCount = 0;
18722
+ }
18723
+ onLevelUpdated(event, {
18724
+ details
18725
+ }) {
18726
+ if (details.advanced) {
18727
+ this.onTimeupdate();
18728
+ }
18729
+ if (!details.live && this.media) {
18730
+ this.media.removeEventListener('timeupdate', this.onTimeupdate);
18597
18731
  }
18598
18732
  }
18599
18733
  onError(event, data) {
18600
- if (data.fatal || !data.context) {
18734
+ var _this$hls$latestLevel;
18735
+ if (data.details !== ErrorDetails.BUFFER_STALLED_ERROR) {
18601
18736
  return;
18602
18737
  }
18603
- if (data.context.type === PlaylistContextType.LEVEL && data.context.level === this.level) {
18604
- this.checkRetry(data);
18738
+ this.stallCount++;
18739
+ if ((_this$hls$latestLevel = this.hls.latestLevelDetails) != null && _this$hls$latestLevel.live) {
18740
+ this.hls.logger.warn('[latency-controller]: Stall detected, adjusting target latency');
18605
18741
  }
18606
18742
  }
18607
-
18608
- // reset errors on the successful load of a fragment
18609
- onFragBuffered(event, {
18610
- frag
18611
- }) {
18612
- if (frag !== undefined && frag.type === PlaylistLevelType.MAIN) {
18613
- const el = frag.elementaryStreams;
18614
- if (!Object.keys(el).some(type => !!el[type])) {
18615
- return;
18616
- }
18617
- const level = this._levels[frag.level];
18618
- if (level != null && level.loadError) {
18619
- this.log(`Resetting level error count of ${level.loadError} on frag buffered`);
18620
- level.loadError = 0;
18621
- }
18743
+ estimateLiveEdge() {
18744
+ const levelDetails = this.hls.latestLevelDetails;
18745
+ if (levelDetails === null) {
18746
+ return null;
18622
18747
  }
18748
+ return levelDetails.edge + levelDetails.age;
18623
18749
  }
18624
- onLevelLoaded(event, data) {
18625
- var _data$deliveryDirecti2;
18750
+ computeLatency() {
18751
+ const liveEdge = this.estimateLiveEdge();
18752
+ if (liveEdge === null) {
18753
+ return null;
18754
+ }
18755
+ return liveEdge - this.currentTime;
18756
+ }
18757
+ }
18758
+
18759
+ class LevelController extends BasePlaylistController {
18760
+ constructor(hls, contentSteeringController) {
18761
+ super(hls, 'level-controller');
18762
+ this._levels = [];
18763
+ this._firstLevel = -1;
18764
+ this._maxAutoLevel = -1;
18765
+ this._startLevel = undefined;
18766
+ this.currentLevel = null;
18767
+ this.currentLevelIndex = -1;
18768
+ this.manualLevelIndex = -1;
18769
+ this.steering = undefined;
18770
+ this.onParsedComplete = undefined;
18771
+ this.steering = contentSteeringController;
18772
+ this._registerListeners();
18773
+ }
18774
+ _registerListeners() {
18626
18775
  const {
18627
- level,
18628
- details
18629
- } = data;
18630
- const curLevel = data.levelInfo;
18631
- if (!curLevel) {
18632
- var _data$deliveryDirecti;
18633
- this.warn(`Invalid level index ${level}`);
18634
- if ((_data$deliveryDirecti = data.deliveryDirectives) != null && _data$deliveryDirecti.skip) {
18635
- details.deltaUpdateFailed = true;
18776
+ hls
18777
+ } = this;
18778
+ hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18779
+ hls.on(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
18780
+ hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
18781
+ hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
18782
+ hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
18783
+ hls.on(Events.ERROR, this.onError, this);
18784
+ }
18785
+ _unregisterListeners() {
18786
+ const {
18787
+ hls
18788
+ } = this;
18789
+ hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18790
+ hls.off(Events.MANIFEST_LOADED, this.onManifestLoaded, this);
18791
+ hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
18792
+ hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
18793
+ hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
18794
+ hls.off(Events.ERROR, this.onError, this);
18795
+ }
18796
+ destroy() {
18797
+ this._unregisterListeners();
18798
+ this.steering = null;
18799
+ this.resetLevels();
18800
+ super.destroy();
18801
+ }
18802
+ stopLoad() {
18803
+ const levels = this._levels;
18804
+
18805
+ // clean up live level details to force reload them, and reset load errors
18806
+ levels.forEach(level => {
18807
+ level.loadError = 0;
18808
+ level.fragmentError = 0;
18809
+ });
18810
+ super.stopLoad();
18811
+ }
18812
+ resetLevels() {
18813
+ this._startLevel = undefined;
18814
+ this.manualLevelIndex = -1;
18815
+ this.currentLevelIndex = -1;
18816
+ this.currentLevel = null;
18817
+ this._levels = [];
18818
+ this._maxAutoLevel = -1;
18819
+ }
18820
+ onManifestLoading(event, data) {
18821
+ this.resetLevels();
18822
+ }
18823
+ onManifestLoaded(event, data) {
18824
+ const preferManagedMediaSource = this.hls.config.preferManagedMediaSource;
18825
+ const levels = [];
18826
+ const redundantSet = {};
18827
+ const generatePathwaySet = {};
18828
+ let resolutionFound = false;
18829
+ let videoCodecFound = false;
18830
+ let audioCodecFound = false;
18831
+ data.levels.forEach(levelParsed => {
18832
+ var _videoCodec;
18833
+ const attributes = levelParsed.attrs;
18834
+ let {
18835
+ audioCodec,
18836
+ videoCodec
18837
+ } = levelParsed;
18838
+ if (audioCodec) {
18839
+ // Returns empty and set to undefined for 'mp4a.40.34' with fallback to 'audio/mpeg' SourceBuffer
18840
+ levelParsed.audioCodec = audioCodec = getCodecCompatibleName(audioCodec, preferManagedMediaSource) || undefined;
18841
+ }
18842
+ if (((_videoCodec = videoCodec) == null ? undefined : _videoCodec.indexOf('avc1')) === 0) {
18843
+ videoCodec = levelParsed.videoCodec = convertAVC1ToAVCOTI(videoCodec);
18636
18844
  }
18637
- return;
18638
- }
18639
18845
 
18640
- // only process level loaded events matching with expected level or prior to switch when media playlist is loaded directly
18641
- if (curLevel === this.currentLevel || data.withoutMultiVariant) {
18642
- // reset level load error counter on successful level loaded only if there is no issues with fragments
18643
- if (curLevel.fragmentError === 0) {
18644
- curLevel.loadError = 0;
18846
+ // only keep levels with supported audio/video codecs
18847
+ const {
18848
+ width,
18849
+ height,
18850
+ unknownCodecs
18851
+ } = levelParsed;
18852
+ let unknownUnsupportedCodecCount = unknownCodecs ? unknownCodecs.length : 0;
18853
+ if (unknownCodecs) {
18854
+ // Treat unknown codec as audio or video codec based on passing `isTypeSupported` check
18855
+ // (allows for playback of any supported codec even if not indexed in utils/codecs)
18856
+ for (let i = unknownUnsupportedCodecCount; i--;) {
18857
+ const unknownCodec = unknownCodecs[i];
18858
+ if (this.isAudioSupported(unknownCodec)) {
18859
+ levelParsed.audioCodec = audioCodec = audioCodec ? `${audioCodec},${unknownCodec}` : unknownCodec;
18860
+ unknownUnsupportedCodecCount--;
18861
+ sampleEntryCodesISO.audio[audioCodec.substring(0, 4)] = 2;
18862
+ } else if (this.isVideoSupported(unknownCodec)) {
18863
+ levelParsed.videoCodec = videoCodec = videoCodec ? `${videoCodec},${unknownCodec}` : unknownCodec;
18864
+ unknownUnsupportedCodecCount--;
18865
+ sampleEntryCodesISO.video[videoCodec.substring(0, 4)] = 2;
18866
+ }
18867
+ }
18645
18868
  }
18646
- // Ignore matching details populated by loading a Media Playlist directly
18647
- let previousDetails = curLevel.details;
18648
- if (previousDetails === data.details && previousDetails.advanced) {
18649
- previousDetails = undefined;
18869
+ resolutionFound || (resolutionFound = !!(width && height));
18870
+ videoCodecFound || (videoCodecFound = !!videoCodec);
18871
+ audioCodecFound || (audioCodecFound = !!audioCodec);
18872
+ if (unknownUnsupportedCodecCount || audioCodec && !this.isAudioSupported(audioCodec) || videoCodec && !this.isVideoSupported(videoCodec)) {
18873
+ return;
18874
+ }
18875
+ const {
18876
+ CODECS,
18877
+ 'FRAME-RATE': FRAMERATE,
18878
+ 'HDCP-LEVEL': HDCP,
18879
+ 'PATHWAY-ID': PATHWAY,
18880
+ RESOLUTION,
18881
+ 'VIDEO-RANGE': VIDEO_RANGE
18882
+ } = attributes;
18883
+ const contentSteeringPrefix = `${PATHWAY || '.'}-`;
18884
+ const levelKey = `${contentSteeringPrefix}${levelParsed.bitrate}-${RESOLUTION}-${FRAMERATE}-${CODECS}-${VIDEO_RANGE}-${HDCP}`;
18885
+ if (!redundantSet[levelKey]) {
18886
+ const level = new Level(levelParsed);
18887
+ redundantSet[levelKey] = level;
18888
+ generatePathwaySet[levelKey] = 1;
18889
+ levels.push(level);
18890
+ } else if (redundantSet[levelKey].uri !== levelParsed.url && !levelParsed.attrs['PATHWAY-ID']) {
18891
+ // Assign Pathway IDs to Redundant Streams (default Pathways is ".". Redundant Streams "..", "...", and so on.)
18892
+ // Content Steering controller to handles Pathway fallback on error
18893
+ const pathwayCount = generatePathwaySet[levelKey] += 1;
18894
+ levelParsed.attrs['PATHWAY-ID'] = new Array(pathwayCount + 1).join('.');
18895
+ const level = new Level(levelParsed);
18896
+ redundantSet[levelKey] = level;
18897
+ levels.push(level);
18898
+ } else {
18899
+ redundantSet[levelKey].addGroupId('audio', attributes.AUDIO);
18900
+ redundantSet[levelKey].addGroupId('text', attributes.SUBTITLES);
18650
18901
  }
18651
- this.playlistLoaded(level, data, previousDetails);
18652
- } else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
18653
- // received a delta playlist update that cannot be merged
18654
- details.deltaUpdateFailed = true;
18655
- }
18656
- }
18657
- loadPlaylist(hlsUrlParameters) {
18658
- super.loadPlaylist();
18659
- if (this.shouldLoadPlaylist(this.currentLevel)) {
18660
- this.scheduleLoading(this.currentLevel, hlsUrlParameters);
18661
- }
18662
- }
18663
- loadingPlaylist(currentLevel, hlsUrlParameters) {
18664
- super.loadingPlaylist(currentLevel, hlsUrlParameters);
18665
- const url = this.getUrlWithDirectives(currentLevel.uri, hlsUrlParameters);
18666
- const currentLevelIndex = this.currentLevelIndex;
18667
- const pathwayId = currentLevel.attrs['PATHWAY-ID'];
18668
- const details = currentLevel.details;
18669
- const age = details == null ? undefined : details.age;
18670
- this.log(`Loading level index ${currentLevelIndex}${(hlsUrlParameters == null ? undefined : hlsUrlParameters.msn) !== undefined ? ' at sn ' + hlsUrlParameters.msn + ' part ' + hlsUrlParameters.part : ''}${pathwayId ? ' Pathway ' + pathwayId : ''}${age && details.live ? ' age ' + age.toFixed(1) + (details.type ? ' ' + details.type || '' : '') : ''} ${url}`);
18671
- this.hls.trigger(Events.LEVEL_LOADING, {
18672
- url,
18673
- level: currentLevelIndex,
18674
- levelInfo: currentLevel,
18675
- pathwayId: currentLevel.attrs['PATHWAY-ID'],
18676
- id: 0,
18677
- // Deprecated Level urlId
18678
- deliveryDirectives: hlsUrlParameters || null
18679
18902
  });
18903
+ this.filterAndSortMediaOptions(levels, data, resolutionFound, videoCodecFound, audioCodecFound);
18680
18904
  }
18681
- get nextLoadLevel() {
18682
- if (this.manualLevelIndex !== -1) {
18683
- return this.manualLevelIndex;
18684
- } else {
18685
- return this.hls.nextAutoLevel;
18686
- }
18905
+ isAudioSupported(codec) {
18906
+ return areCodecsMediaSourceSupported(codec, 'audio', this.hls.config.preferManagedMediaSource);
18687
18907
  }
18688
- set nextLoadLevel(nextLevel) {
18689
- this.level = nextLevel;
18690
- if (this.manualLevelIndex === -1) {
18691
- this.hls.nextAutoLevel = nextLevel;
18908
+ isVideoSupported(codec) {
18909
+ return areCodecsMediaSourceSupported(codec, 'video', this.hls.config.preferManagedMediaSource);
18910
+ }
18911
+ filterAndSortMediaOptions(filteredLevels, data, resolutionFound, videoCodecFound, audioCodecFound) {
18912
+ let audioTracks = [];
18913
+ let subtitleTracks = [];
18914
+ let levels = filteredLevels;
18915
+
18916
+ // remove audio-only and invalid video-range levels if we also have levels with video codecs or RESOLUTION signalled
18917
+ if ((resolutionFound || videoCodecFound) && audioCodecFound) {
18918
+ levels = levels.filter(({
18919
+ videoCodec,
18920
+ videoRange,
18921
+ width,
18922
+ height
18923
+ }) => (!!videoCodec || !!(width && height)) && isVideoRange(videoRange));
18692
18924
  }
18693
- }
18694
- removeLevel(levelIndex) {
18695
- var _this$currentLevel;
18696
- if (this._levels.length === 1) {
18925
+ if (levels.length === 0) {
18926
+ // Dispatch error after MANIFEST_LOADED is done propagating
18927
+ Promise.resolve().then(() => {
18928
+ if (this.hls) {
18929
+ if (data.levels.length) {
18930
+ this.warn(`One or more CODECS in variant not supported: ${JSON.stringify(data.levels[0].attrs)}`);
18931
+ }
18932
+ const error = new Error('no level with compatible codecs found in manifest');
18933
+ this.hls.trigger(Events.ERROR, {
18934
+ type: ErrorTypes.MEDIA_ERROR,
18935
+ details: ErrorDetails.MANIFEST_INCOMPATIBLE_CODECS_ERROR,
18936
+ fatal: true,
18937
+ url: data.url,
18938
+ error,
18939
+ reason: error.message
18940
+ });
18941
+ }
18942
+ });
18697
18943
  return;
18698
18944
  }
18699
- const levels = this._levels.filter((level, index) => {
18700
- if (index !== levelIndex) {
18701
- return true;
18945
+ if (data.audioTracks) {
18946
+ audioTracks = data.audioTracks.filter(track => !track.audioCodec || this.isAudioSupported(track.audioCodec));
18947
+ // Assign ids after filtering as array indices by group-id
18948
+ assignTrackIdsByGroup(audioTracks);
18949
+ }
18950
+ if (data.subtitles) {
18951
+ subtitleTracks = data.subtitles;
18952
+ assignTrackIdsByGroup(subtitleTracks);
18953
+ }
18954
+ // start bitrate is the first bitrate of the manifest
18955
+ const unsortedLevels = levels.slice(0);
18956
+ // sort levels from lowest to highest
18957
+ levels.sort((a, b) => {
18958
+ if (a.attrs['HDCP-LEVEL'] !== b.attrs['HDCP-LEVEL']) {
18959
+ return (a.attrs['HDCP-LEVEL'] || '') > (b.attrs['HDCP-LEVEL'] || '') ? 1 : -1;
18702
18960
  }
18703
- if (this.steering) {
18704
- this.steering.removeLevel(level);
18961
+ // sort on height before bitrate for cap-level-controller
18962
+ if (resolutionFound && a.height !== b.height) {
18963
+ return a.height - b.height;
18705
18964
  }
18706
- if (level === this.currentLevel) {
18707
- this.currentLevel = null;
18708
- this.currentLevelIndex = -1;
18709
- if (level.details) {
18710
- level.details.fragments.forEach(f => f.level = -1);
18965
+ if (a.frameRate !== b.frameRate) {
18966
+ return a.frameRate - b.frameRate;
18967
+ }
18968
+ if (a.videoRange !== b.videoRange) {
18969
+ return VideoRangeValues.indexOf(a.videoRange) - VideoRangeValues.indexOf(b.videoRange);
18970
+ }
18971
+ if (a.videoCodec !== b.videoCodec) {
18972
+ const valueA = videoCodecPreferenceValue(a.videoCodec);
18973
+ const valueB = videoCodecPreferenceValue(b.videoCodec);
18974
+ if (valueA !== valueB) {
18975
+ return valueB - valueA;
18711
18976
  }
18712
18977
  }
18713
- return false;
18978
+ if (a.uri === b.uri && a.codecSet !== b.codecSet) {
18979
+ const valueA = codecsSetSelectionPreferenceValue(a.codecSet);
18980
+ const valueB = codecsSetSelectionPreferenceValue(b.codecSet);
18981
+ if (valueA !== valueB) {
18982
+ return valueB - valueA;
18983
+ }
18984
+ }
18985
+ if (a.averageBitrate !== b.averageBitrate) {
18986
+ return a.averageBitrate - b.averageBitrate;
18987
+ }
18988
+ return 0;
18714
18989
  });
18715
- reassignFragmentLevelIndexes(levels);
18716
- this._levels = levels;
18717
- if (this.currentLevelIndex > -1 && (_this$currentLevel = this.currentLevel) != null && _this$currentLevel.details) {
18718
- this.currentLevelIndex = this.currentLevel.details.fragments[0].level;
18719
- }
18720
- if (this.manualLevelIndex > -1) {
18721
- this.manualLevelIndex = this.currentLevelIndex;
18722
- }
18723
- const maxLevel = levels.length - 1;
18724
- this._firstLevel = Math.min(this._firstLevel, maxLevel);
18725
- if (this._startLevel) {
18726
- this._startLevel = Math.min(this._startLevel, maxLevel);
18990
+ let firstLevelInPlaylist = unsortedLevels[0];
18991
+ if (this.steering) {
18992
+ levels = this.steering.filterParsedLevels(levels);
18993
+ if (levels.length !== unsortedLevels.length) {
18994
+ for (let i = 0; i < unsortedLevels.length; i++) {
18995
+ if (unsortedLevels[i].pathwayId === levels[0].pathwayId) {
18996
+ firstLevelInPlaylist = unsortedLevels[i];
18997
+ break;
18998
+ }
18999
+ }
19000
+ }
18727
19001
  }
18728
- this.hls.trigger(Events.LEVELS_UPDATED, {
18729
- levels
18730
- });
18731
- }
18732
- onLevelsUpdated(event, {
18733
- levels
18734
- }) {
18735
19002
  this._levels = levels;
18736
- }
18737
- checkMaxAutoUpdated() {
18738
- const {
18739
- autoLevelCapping,
18740
- maxAutoLevel,
18741
- maxHdcpLevel
18742
- } = this.hls;
18743
- if (this._maxAutoLevel !== maxAutoLevel) {
18744
- this._maxAutoLevel = maxAutoLevel;
18745
- this.hls.trigger(Events.MAX_AUTO_LEVEL_UPDATED, {
18746
- autoLevelCapping,
18747
- levels: this.levels,
18748
- maxAutoLevel,
18749
- minAutoLevel: this.hls.minAutoLevel,
18750
- maxHdcpLevel
18751
- });
19003
+
19004
+ // find index of first level in sorted levels
19005
+ for (let i = 0; i < levels.length; i++) {
19006
+ if (levels[i] === firstLevelInPlaylist) {
19007
+ var _this$hls$userConfig;
19008
+ this._firstLevel = i;
19009
+ const firstLevelBitrate = firstLevelInPlaylist.bitrate;
19010
+ const bandwidthEstimate = this.hls.bandwidthEstimate;
19011
+ this.log(`manifest loaded, ${levels.length} level(s) found, first bitrate: ${firstLevelBitrate}`);
19012
+ // Update default bwe to first variant bitrate as long it has not been configured or set
19013
+ if (((_this$hls$userConfig = this.hls.userConfig) == null ? undefined : _this$hls$userConfig.abrEwmaDefaultEstimate) === undefined) {
19014
+ const startingBwEstimate = Math.min(firstLevelBitrate, this.hls.config.abrEwmaDefaultEstimateMax);
19015
+ if (startingBwEstimate > bandwidthEstimate && bandwidthEstimate === this.hls.abrEwmaDefaultEstimate) {
19016
+ this.hls.bandwidthEstimate = startingBwEstimate;
19017
+ }
19018
+ }
19019
+ break;
19020
+ }
18752
19021
  }
18753
- }
18754
- }
18755
- function assignTrackIdsByGroup(tracks) {
18756
- const groups = {};
18757
- tracks.forEach(track => {
18758
- const groupId = track.groupId || '';
18759
- track.id = groups[groupId] = groups[groupId] || 0;
18760
- groups[groupId]++;
18761
- });
18762
- }
18763
19022
 
18764
- const MAX_START_GAP_JUMP = 2.0;
18765
- const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
18766
- const SKIP_BUFFER_RANGE_START = 0.05;
18767
- class GapController extends Logger {
18768
- constructor(media, fragmentTracker, hls) {
18769
- super('gap-controller', hls.logger);
18770
- this.media = null;
18771
- this.fragmentTracker = null;
18772
- this.hls = null;
18773
- this.nudgeRetry = 0;
18774
- this.stallReported = false;
18775
- this.stalled = null;
18776
- this.moved = false;
18777
- this.seeking = false;
18778
- this.ended = 0;
18779
- this.waiting = 0;
18780
- this.media = media;
18781
- this.fragmentTracker = fragmentTracker;
18782
- this.hls = hls;
19023
+ // Audio is only alternate if manifest include a URI along with the audio group tag,
19024
+ // and this is not an audio-only stream where levels contain audio-only
19025
+ const audioOnly = audioCodecFound && !videoCodecFound;
19026
+ const edata = {
19027
+ levels,
19028
+ audioTracks,
19029
+ subtitleTracks,
19030
+ sessionData: data.sessionData,
19031
+ sessionKeys: data.sessionKeys,
19032
+ firstLevel: this._firstLevel,
19033
+ stats: data.stats,
19034
+ audio: audioCodecFound,
19035
+ video: videoCodecFound,
19036
+ altAudio: !audioOnly && audioTracks.some(t => !!t.url)
19037
+ };
19038
+ this.hls.trigger(Events.MANIFEST_PARSED, edata);
18783
19039
  }
18784
- destroy() {
18785
- this.media = this.hls = this.fragmentTracker = null;
19040
+ get levels() {
19041
+ if (this._levels.length === 0) {
19042
+ return null;
19043
+ }
19044
+ return this._levels;
18786
19045
  }
18787
-
18788
- /**
18789
- * Checks if the playhead is stuck within a gap, and if so, attempts to free it.
18790
- * A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
18791
- *
18792
- * @param lastCurrentTime - Previously read playhead position
18793
- */
18794
- poll(lastCurrentTime, activeFrag, levelDetails, state) {
18795
- var _this$hls;
18796
- const {
18797
- media,
18798
- stalled
18799
- } = this;
18800
- if (!media) {
19046
+ get level() {
19047
+ return this.currentLevelIndex;
19048
+ }
19049
+ set level(newLevel) {
19050
+ const levels = this._levels;
19051
+ if (levels.length === 0) {
18801
19052
  return;
18802
19053
  }
18803
- const {
18804
- currentTime,
18805
- seeking
18806
- } = media;
18807
- const seeked = this.seeking && !seeking;
18808
- const beginSeek = !this.seeking && seeking;
18809
- this.seeking = seeking;
18810
-
18811
- // The playhead is moving, no-op
18812
- if (currentTime !== lastCurrentTime) {
18813
- if (lastCurrentTime) {
18814
- this.ended = 0;
18815
- }
18816
- this.moved = true;
18817
- if (!seeking) {
18818
- this.nudgeRetry = 0;
18819
- }
18820
- if (this.waiting === 0) {
18821
- this.stallResolved(currentTime);
19054
+ // check if level idx is valid
19055
+ if (newLevel < 0 || newLevel >= levels.length) {
19056
+ // invalid level id given, trigger error
19057
+ const error = new Error('invalid level idx');
19058
+ const fatal = newLevel < 0;
19059
+ this.hls.trigger(Events.ERROR, {
19060
+ type: ErrorTypes.OTHER_ERROR,
19061
+ details: ErrorDetails.LEVEL_SWITCH_ERROR,
19062
+ level: newLevel,
19063
+ fatal,
19064
+ error,
19065
+ reason: error.message
19066
+ });
19067
+ if (fatal) {
19068
+ return;
18822
19069
  }
19070
+ newLevel = Math.min(newLevel, levels.length - 1);
19071
+ }
19072
+ const lastLevelIndex = this.currentLevelIndex;
19073
+ const lastLevel = this.currentLevel;
19074
+ const lastPathwayId = lastLevel ? lastLevel.attrs['PATHWAY-ID'] : undefined;
19075
+ const level = levels[newLevel];
19076
+ const pathwayId = level.attrs['PATHWAY-ID'];
19077
+ this.currentLevelIndex = newLevel;
19078
+ this.currentLevel = level;
19079
+ if (lastLevelIndex === newLevel && lastLevel && lastPathwayId === pathwayId) {
18823
19080
  return;
18824
19081
  }
18825
-
18826
- // Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
18827
- if (beginSeek || seeked) {
18828
- if (seeked) {
18829
- this.stallResolved(currentTime);
18830
- }
18831
- return;
19082
+ this.log(`Switching to level ${newLevel} (${level.height ? level.height + 'p ' : ''}${level.videoRange ? level.videoRange + ' ' : ''}${level.codecSet ? level.codecSet + ' ' : ''}@${level.bitrate})${pathwayId ? ' with Pathway ' + pathwayId : ''} from level ${lastLevelIndex}${lastPathwayId ? ' with Pathway ' + lastPathwayId : ''}`);
19083
+ const levelSwitchingData = {
19084
+ level: newLevel,
19085
+ attrs: level.attrs,
19086
+ details: level.details,
19087
+ bitrate: level.bitrate,
19088
+ averageBitrate: level.averageBitrate,
19089
+ maxBitrate: level.maxBitrate,
19090
+ realBitrate: level.realBitrate,
19091
+ width: level.width,
19092
+ height: level.height,
19093
+ codecSet: level.codecSet,
19094
+ audioCodec: level.audioCodec,
19095
+ videoCodec: level.videoCodec,
19096
+ audioGroups: level.audioGroups,
19097
+ subtitleGroups: level.subtitleGroups,
19098
+ loaded: level.loaded,
19099
+ loadError: level.loadError,
19100
+ fragmentError: level.fragmentError,
19101
+ name: level.name,
19102
+ id: level.id,
19103
+ uri: level.uri,
19104
+ url: level.url,
19105
+ urlId: 0,
19106
+ audioGroupIds: level.audioGroupIds,
19107
+ textGroupIds: level.textGroupIds
19108
+ };
19109
+ this.hls.trigger(Events.LEVEL_SWITCHING, levelSwitchingData);
19110
+ // check if we need to load playlist for this level
19111
+ const levelDetails = level.details;
19112
+ if (!levelDetails || levelDetails.live) {
19113
+ // level not retrieved yet, or live playlist we need to (re)load it
19114
+ const hlsUrlParameters = this.switchParams(level.uri, lastLevel == null ? undefined : lastLevel.details, levelDetails);
19115
+ this.loadPlaylist(hlsUrlParameters);
18832
19116
  }
18833
-
18834
- // The playhead should not be moving
18835
- if (media.paused && !seeking || media.ended || media.playbackRate === 0) {
18836
- this.nudgeRetry = 0;
18837
- this.stallResolved(currentTime);
18838
- // Fire MEDIA_ENDED to workaround event not being dispatched by browser
18839
- if (!this.ended && media.ended && this.hls) {
18840
- this.ended = currentTime || 1;
18841
- this.hls.trigger(Events.MEDIA_ENDED, {
18842
- stalled: false
18843
- });
18844
- }
18845
- return;
19117
+ }
19118
+ get manualLevel() {
19119
+ return this.manualLevelIndex;
19120
+ }
19121
+ set manualLevel(newLevel) {
19122
+ this.manualLevelIndex = newLevel;
19123
+ if (this._startLevel === undefined) {
19124
+ this._startLevel = newLevel;
18846
19125
  }
18847
- if (!BufferHelper.getBuffered(media).length) {
18848
- this.nudgeRetry = 0;
18849
- return;
19126
+ if (newLevel !== -1) {
19127
+ this.level = newLevel;
18850
19128
  }
18851
- const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
18852
- const nextStart = bufferInfo.nextStart || 0;
18853
- const fragmentTracker = this.fragmentTracker;
18854
- if (seeking && fragmentTracker) {
18855
- // Waiting for seeking in a buffered range to complete
18856
- const hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP;
18857
- // Next buffered range is too far ahead to jump to while still seeking
18858
- const noBufferGap = !nextStart || activeFrag && activeFrag.start <= currentTime || nextStart - currentTime > MAX_START_GAP_JUMP && !fragmentTracker.getPartialFragment(currentTime);
18859
- if (hasEnoughBuffer || noBufferGap) {
18860
- return;
19129
+ }
19130
+ get firstLevel() {
19131
+ return this._firstLevel;
19132
+ }
19133
+ set firstLevel(newLevel) {
19134
+ this._firstLevel = newLevel;
19135
+ }
19136
+ get startLevel() {
19137
+ // Setting hls.startLevel (this._startLevel) overrides config.startLevel
19138
+ if (this._startLevel === undefined) {
19139
+ const configStartLevel = this.hls.config.startLevel;
19140
+ if (configStartLevel !== undefined) {
19141
+ return configStartLevel;
18861
19142
  }
18862
- // Reset moved state when seeking to a point in or before a gap
18863
- this.moved = false;
19143
+ return this.hls.firstAutoLevel;
18864
19144
  }
18865
-
18866
- // Skip start gaps if we haven't played, but the last poll detected the start of a stall
18867
- // The addition poll gives the browser a chance to jump the gap for us
18868
- if (!this.moved && this.stalled !== null && fragmentTracker) {
18869
- // There is no playable buffer (seeked, waiting for buffer)
18870
- const isBuffered = bufferInfo.len > 0;
18871
- if (!isBuffered && !nextStart) {
18872
- return;
18873
- }
18874
- // Jump start gaps within jump threshold
18875
- const startJump = Math.max(nextStart, bufferInfo.start || 0) - currentTime;
18876
-
18877
- // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
18878
- // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
18879
- // that begins over 1 target duration after the video start position.
18880
- const isLive = !!(levelDetails != null && levelDetails.live);
18881
- const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
18882
- const partialOrGap = fragmentTracker.getPartialFragment(currentTime);
18883
- if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
18884
- if (!media.paused) {
18885
- this._trySkipBufferHole(partialOrGap);
18886
- }
19145
+ return this._startLevel;
19146
+ }
19147
+ set startLevel(newLevel) {
19148
+ this._startLevel = newLevel;
19149
+ }
19150
+ get pathwayPriority() {
19151
+ if (this.steering) {
19152
+ return this.steering.pathwayPriority;
19153
+ }
19154
+ return null;
19155
+ }
19156
+ set pathwayPriority(pathwayPriority) {
19157
+ if (this.steering) {
19158
+ const pathwaysList = this.steering.pathways();
19159
+ const filteredPathwayPriority = pathwayPriority.filter(pathwayId => {
19160
+ return pathwaysList.indexOf(pathwayId) !== -1;
19161
+ });
19162
+ if (pathwayPriority.length < 1) {
19163
+ this.warn(`pathwayPriority ${pathwayPriority} should contain at least one pathway from list: ${pathwaysList}`);
18887
19164
  return;
18888
19165
  }
19166
+ this.steering.pathwayPriority = filteredPathwayPriority;
18889
19167
  }
18890
-
18891
- // Start tracking stall time
18892
- const config = (_this$hls = this.hls) == null ? undefined : _this$hls.config;
18893
- if (!config) {
19168
+ }
19169
+ onError(event, data) {
19170
+ if (data.fatal || !data.context) {
18894
19171
  return;
18895
19172
  }
18896
- const detectStallWithCurrentTimeMs = config.detectStallWithCurrentTimeMs;
18897
- const tnow = self.performance.now();
18898
- const tWaiting = this.waiting;
18899
- if (stalled === null) {
18900
- // Use time of recent "waiting" event
18901
- if (tWaiting > 0 && tnow - tWaiting < detectStallWithCurrentTimeMs) {
18902
- this.stalled = tWaiting;
18903
- } else {
18904
- this.stalled = tnow;
18905
- }
18906
- return;
19173
+ if (data.context.type === PlaylistContextType.LEVEL && data.context.level === this.level) {
19174
+ this.checkRetry(data);
18907
19175
  }
18908
- const stalledDuration = tnow - stalled;
18909
- if (!seeking && (stalledDuration >= detectStallWithCurrentTimeMs || tWaiting) && this.hls) {
18910
- // Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
18911
- if (state === State.ENDED && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? undefined : levelDetails.edge) || 0)) < 1) {
18912
- if (this.ended) {
18913
- return;
18914
- }
18915
- this.ended = currentTime || 1;
18916
- this.hls.trigger(Events.MEDIA_ENDED, {
18917
- stalled: true
18918
- });
18919
- return;
18920
- }
18921
- // Report stalling after trying to fix
18922
- this._reportStall(bufferInfo);
18923
- if (!this.media || !this.hls) {
19176
+ }
19177
+
19178
+ // reset errors on the successful load of a fragment
19179
+ onFragBuffered(event, {
19180
+ frag
19181
+ }) {
19182
+ if (frag !== undefined && frag.type === PlaylistLevelType.MAIN) {
19183
+ const el = frag.elementaryStreams;
19184
+ if (!Object.keys(el).some(type => !!el[type])) {
18924
19185
  return;
18925
19186
  }
18926
- }
18927
- const bufferedWithHoles = BufferHelper.bufferInfo(media, currentTime, config.maxBufferHole);
18928
- this._tryFixBufferStall(bufferedWithHoles, stalledDuration);
18929
- }
18930
- stallResolved(currentTime) {
18931
- const stalled = this.stalled;
18932
- if (stalled && this.hls) {
18933
- this.stalled = null;
18934
- // The playhead is now moving, but was previously stalled
18935
- if (this.stallReported) {
18936
- const stalledDuration = self.performance.now() - stalled;
18937
- this.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(stalledDuration)}ms`);
18938
- this.stallReported = false;
18939
- this.waiting = 0;
18940
- this.hls.trigger(Events.STALL_RESOLVED, {});
19187
+ const level = this._levels[frag.level];
19188
+ if (level != null && level.loadError) {
19189
+ this.log(`Resetting level error count of ${level.loadError} on frag buffered`);
19190
+ level.loadError = 0;
18941
19191
  }
18942
19192
  }
18943
19193
  }
18944
-
18945
- /**
18946
- * Detects and attempts to fix known buffer stalling issues.
18947
- * @param bufferInfo - The properties of the current buffer.
18948
- * @param stalledDurationMs - The amount of time Hls.js has been stalling for.
18949
- * @private
18950
- */
18951
- _tryFixBufferStall(bufferInfo, stalledDurationMs) {
18952
- var _this$hls2;
19194
+ onLevelLoaded(event, data) {
19195
+ var _data$deliveryDirecti2;
18953
19196
  const {
18954
- fragmentTracker,
18955
- media
18956
- } = this;
18957
- const config = (_this$hls2 = this.hls) == null ? undefined : _this$hls2.config;
18958
- if (!media || !fragmentTracker || !config) {
19197
+ level,
19198
+ details
19199
+ } = data;
19200
+ const curLevel = data.levelInfo;
19201
+ if (!curLevel) {
19202
+ var _data$deliveryDirecti;
19203
+ this.warn(`Invalid level index ${level}`);
19204
+ if ((_data$deliveryDirecti = data.deliveryDirectives) != null && _data$deliveryDirecti.skip) {
19205
+ details.deltaUpdateFailed = true;
19206
+ }
18959
19207
  return;
18960
19208
  }
18961
- const currentTime = media.currentTime;
18962
- const partial = fragmentTracker.getPartialFragment(currentTime);
18963
- if (partial) {
18964
- // Try to skip over the buffer hole caused by a partial fragment
18965
- // This method isn't limited by the size of the gap between buffered ranges
18966
- const targetTime = this._trySkipBufferHole(partial);
18967
- // we return here in this case, meaning
18968
- // the branch below only executes when we haven't seeked to a new position
18969
- if (targetTime || !this.media) {
18970
- return;
19209
+
19210
+ // only process level loaded events matching with expected level or prior to switch when media playlist is loaded directly
19211
+ if (curLevel === this.currentLevel || data.withoutMultiVariant) {
19212
+ // reset level load error counter on successful level loaded only if there is no issues with fragments
19213
+ if (curLevel.fragmentError === 0) {
19214
+ curLevel.loadError = 0;
19215
+ }
19216
+ // Ignore matching details populated by loading a Media Playlist directly
19217
+ let previousDetails = curLevel.details;
19218
+ if (previousDetails === data.details && previousDetails.advanced) {
19219
+ previousDetails = undefined;
18971
19220
  }
19221
+ this.playlistLoaded(level, data, previousDetails);
19222
+ } else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
19223
+ // received a delta playlist update that cannot be merged
19224
+ details.deltaUpdateFailed = true;
18972
19225
  }
18973
-
18974
- // if we haven't had to skip over a buffer hole of a partial fragment
18975
- // we may just have to "nudge" the playlist as the browser decoding/rendering engine
18976
- // needs to cross some sort of threshold covering all source-buffers content
18977
- // to start playing properly.
18978
- const bufferedRanges = bufferInfo.buffered;
18979
- if ((bufferedRanges && bufferedRanges.length > 1 && bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
18980
- this.warn('Trying to nudge playhead over buffer-hole');
18981
- // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
18982
- // We only try to jump the hole if it's under the configured size
18983
- this._tryNudgeBuffer(bufferInfo);
19226
+ }
19227
+ loadPlaylist(hlsUrlParameters) {
19228
+ super.loadPlaylist();
19229
+ if (this.shouldLoadPlaylist(this.currentLevel)) {
19230
+ this.scheduleLoading(this.currentLevel, hlsUrlParameters);
18984
19231
  }
18985
19232
  }
18986
-
18987
- /**
18988
- * Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
18989
- * @param bufferLen - The playhead distance from the end of the current buffer segment.
18990
- * @private
18991
- */
18992
- _reportStall(bufferInfo) {
18993
- const {
18994
- hls,
18995
- media,
18996
- stallReported,
18997
- stalled
18998
- } = this;
18999
- if (!stallReported && stalled !== null && media && hls) {
19000
- // Report stalled error once
19001
- this.stallReported = true;
19002
- const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
19003
- this.warn(error.message);
19004
- hls.trigger(Events.ERROR, {
19005
- type: ErrorTypes.MEDIA_ERROR,
19006
- details: ErrorDetails.BUFFER_STALLED_ERROR,
19007
- fatal: false,
19008
- error,
19009
- buffer: bufferInfo.len,
19010
- bufferInfo,
19011
- stalled: {
19012
- start: stalled
19013
- }
19014
- });
19233
+ loadingPlaylist(currentLevel, hlsUrlParameters) {
19234
+ super.loadingPlaylist(currentLevel, hlsUrlParameters);
19235
+ const url = this.getUrlWithDirectives(currentLevel.uri, hlsUrlParameters);
19236
+ const currentLevelIndex = this.currentLevelIndex;
19237
+ const pathwayId = currentLevel.attrs['PATHWAY-ID'];
19238
+ const details = currentLevel.details;
19239
+ const age = details == null ? undefined : details.age;
19240
+ this.log(`Loading level index ${currentLevelIndex}${(hlsUrlParameters == null ? undefined : hlsUrlParameters.msn) !== undefined ? ' at sn ' + hlsUrlParameters.msn + ' part ' + hlsUrlParameters.part : ''}${pathwayId ? ' Pathway ' + pathwayId : ''}${age && details.live ? ' age ' + age.toFixed(1) + (details.type ? ' ' + details.type || '' : '') : ''} ${url}`);
19241
+ this.hls.trigger(Events.LEVEL_LOADING, {
19242
+ url,
19243
+ level: currentLevelIndex,
19244
+ levelInfo: currentLevel,
19245
+ pathwayId: currentLevel.attrs['PATHWAY-ID'],
19246
+ id: 0,
19247
+ // Deprecated Level urlId
19248
+ deliveryDirectives: hlsUrlParameters || null
19249
+ });
19250
+ }
19251
+ get nextLoadLevel() {
19252
+ if (this.manualLevelIndex !== -1) {
19253
+ return this.manualLevelIndex;
19254
+ } else {
19255
+ return this.hls.nextAutoLevel;
19015
19256
  }
19016
19257
  }
19017
-
19018
- /**
19019
- * Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
19020
- * @param partial - The partial fragment found at the current time (where playback is stalling).
19021
- * @private
19022
- */
19023
- _trySkipBufferHole(partial) {
19024
- var _this$hls3;
19025
- const {
19026
- fragmentTracker,
19027
- media
19028
- } = this;
19029
- const config = (_this$hls3 = this.hls) == null ? undefined : _this$hls3.config;
19030
- if (!media || !fragmentTracker || !config) {
19031
- return 0;
19258
+ set nextLoadLevel(nextLevel) {
19259
+ this.level = nextLevel;
19260
+ if (this.manualLevelIndex === -1) {
19261
+ this.hls.nextAutoLevel = nextLevel;
19032
19262
  }
19033
-
19034
- // Check if currentTime is between unbuffered regions of partial fragments
19035
- const currentTime = media.currentTime;
19036
- const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
19037
- const startTime = currentTime < bufferInfo.start ? bufferInfo.start : bufferInfo.nextStart;
19038
- if (startTime) {
19039
- const bufferStarved = bufferInfo.len <= config.maxBufferHole;
19040
- const waiting = bufferInfo.len > 0 && bufferInfo.len < 1 && media.readyState < 3;
19041
- const gapLength = startTime - currentTime;
19042
- if (gapLength > 0 && (bufferStarved || waiting)) {
19043
- // Only allow large gaps to be skipped if it is a start gap, or all fragments in skip range are partial
19044
- if (gapLength > config.maxBufferHole) {
19045
- let startGap = false;
19046
- if (currentTime === 0) {
19047
- const startFrag = fragmentTracker.getAppendedFrag(0, PlaylistLevelType.MAIN);
19048
- if (startFrag && startTime < startFrag.end) {
19049
- startGap = true;
19050
- }
19051
- }
19052
- if (!startGap) {
19053
- const startProvisioned = partial || fragmentTracker.getAppendedFrag(currentTime, PlaylistLevelType.MAIN);
19054
- if (startProvisioned) {
19055
- let moreToLoad = false;
19056
- let pos = startProvisioned.end;
19057
- while (pos < startTime) {
19058
- const provisioned = fragmentTracker.getPartialFragment(pos);
19059
- if (provisioned) {
19060
- pos += provisioned.duration;
19061
- } else {
19062
- moreToLoad = true;
19063
- break;
19064
- }
19065
- }
19066
- if (moreToLoad) {
19067
- return 0;
19068
- }
19069
- }
19070
- }
19071
- }
19072
- const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
19073
- this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
19074
- this.moved = true;
19075
- media.currentTime = targetTime;
19076
- if (partial && !partial.gap && this.hls) {
19077
- const error = new Error(`fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`);
19078
- this.hls.trigger(Events.ERROR, {
19079
- type: ErrorTypes.MEDIA_ERROR,
19080
- details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
19081
- fatal: false,
19082
- error,
19083
- reason: error.message,
19084
- frag: partial,
19085
- buffer: bufferInfo.len,
19086
- bufferInfo
19087
- });
19263
+ }
19264
+ removeLevel(levelIndex) {
19265
+ var _this$currentLevel;
19266
+ if (this._levels.length === 1) {
19267
+ return;
19268
+ }
19269
+ const levels = this._levels.filter((level, index) => {
19270
+ if (index !== levelIndex) {
19271
+ return true;
19272
+ }
19273
+ if (this.steering) {
19274
+ this.steering.removeLevel(level);
19275
+ }
19276
+ if (level === this.currentLevel) {
19277
+ this.currentLevel = null;
19278
+ this.currentLevelIndex = -1;
19279
+ if (level.details) {
19280
+ level.details.fragments.forEach(f => f.level = -1);
19088
19281
  }
19089
- return targetTime;
19090
19282
  }
19283
+ return false;
19284
+ });
19285
+ reassignFragmentLevelIndexes(levels);
19286
+ this._levels = levels;
19287
+ if (this.currentLevelIndex > -1 && (_this$currentLevel = this.currentLevel) != null && _this$currentLevel.details) {
19288
+ this.currentLevelIndex = this.currentLevel.details.fragments[0].level;
19091
19289
  }
19092
- return 0;
19290
+ if (this.manualLevelIndex > -1) {
19291
+ this.manualLevelIndex = this.currentLevelIndex;
19292
+ }
19293
+ const maxLevel = levels.length - 1;
19294
+ this._firstLevel = Math.min(this._firstLevel, maxLevel);
19295
+ if (this._startLevel) {
19296
+ this._startLevel = Math.min(this._startLevel, maxLevel);
19297
+ }
19298
+ this.hls.trigger(Events.LEVELS_UPDATED, {
19299
+ levels
19300
+ });
19093
19301
  }
19094
-
19095
- /**
19096
- * Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
19097
- * @private
19098
- */
19099
- _tryNudgeBuffer(bufferInfo) {
19302
+ onLevelsUpdated(event, {
19303
+ levels
19304
+ }) {
19305
+ this._levels = levels;
19306
+ }
19307
+ checkMaxAutoUpdated() {
19100
19308
  const {
19101
- hls,
19102
- media,
19103
- nudgeRetry
19104
- } = this;
19105
- const config = hls == null ? undefined : hls.config;
19106
- if (!media || !config) {
19107
- return 0;
19108
- }
19109
- const currentTime = media.currentTime;
19110
- this.nudgeRetry++;
19111
- if (nudgeRetry < config.nudgeMaxRetry) {
19112
- const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
19113
- // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
19114
- const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
19115
- this.warn(error.message);
19116
- media.currentTime = targetTime;
19117
- hls.trigger(Events.ERROR, {
19118
- type: ErrorTypes.MEDIA_ERROR,
19119
- details: ErrorDetails.BUFFER_NUDGE_ON_STALL,
19120
- error,
19121
- fatal: false,
19122
- buffer: bufferInfo.len,
19123
- bufferInfo
19124
- });
19125
- } else {
19126
- const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
19127
- this.error(error.message);
19128
- hls.trigger(Events.ERROR, {
19129
- type: ErrorTypes.MEDIA_ERROR,
19130
- details: ErrorDetails.BUFFER_STALLED_ERROR,
19131
- error,
19132
- fatal: true,
19133
- buffer: bufferInfo.len,
19134
- bufferInfo
19309
+ autoLevelCapping,
19310
+ maxAutoLevel,
19311
+ maxHdcpLevel
19312
+ } = this.hls;
19313
+ if (this._maxAutoLevel !== maxAutoLevel) {
19314
+ this._maxAutoLevel = maxAutoLevel;
19315
+ this.hls.trigger(Events.MAX_AUTO_LEVEL_UPDATED, {
19316
+ autoLevelCapping,
19317
+ levels: this.levels,
19318
+ maxAutoLevel,
19319
+ minAutoLevel: this.hls.minAutoLevel,
19320
+ maxHdcpLevel
19135
19321
  });
19136
19322
  }
19137
19323
  }
19138
19324
  }
19325
+ function assignTrackIdsByGroup(tracks) {
19326
+ const groups = {};
19327
+ tracks.forEach(track => {
19328
+ const groupId = track.groupId || '';
19329
+ track.id = groups[groupId] = groups[groupId] || 0;
19330
+ groups[groupId]++;
19331
+ });
19332
+ }
19139
19333
 
19140
- const version = "1.6.0-beta.2.0.canary.10923";
19334
+ const version = "1.6.0-beta.2.0.canary.10925";
19141
19335
 
19142
19336
  // ensure the worker ends up in the bundle
19143
19337
  // If the worker should not be included this gets aliased to empty.js
@@ -19552,7 +19746,6 @@ class StreamController extends BaseStreamController {
19552
19746
  constructor(hls, fragmentTracker, keyLoader) {
19553
19747
  super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
19554
19748
  this.audioCodecSwap = false;
19555
- this.gapController = null;
19556
19749
  this.level = -1;
19557
19750
  this._forceStartLoad = false;
19558
19751
  this._hasEnoughToStart = false;
@@ -19564,19 +19757,8 @@ class StreamController extends BaseStreamController {
19564
19757
  this.backtrackFragment = null;
19565
19758
  this.audioCodecSwitch = false;
19566
19759
  this.videoBuffer = null;
19567
- this.onMediaWaiting = () => {
19568
- const gapController = this.gapController;
19569
- if (gapController) {
19570
- gapController.waiting = self.performance.now();
19571
- }
19572
- };
19573
19760
  this.onMediaPlaying = () => {
19574
19761
  // tick to speed up FRAG_CHANGED triggering
19575
- const gapController = this.gapController;
19576
- if (gapController) {
19577
- gapController.ended = 0;
19578
- gapController.waiting = 0;
19579
- }
19580
19762
  this.tick();
19581
19763
  };
19582
19764
  this.onMediaSeeked = () => {
@@ -19631,7 +19813,7 @@ class StreamController extends BaseStreamController {
19631
19813
  }
19632
19814
  onHandlerDestroying() {
19633
19815
  // @ts-ignore
19634
- this.onMediaPlaying = this.onMediaSeeked = this.onMediaWaiting = null;
19816
+ this.onMediaPlaying = this.onMediaSeeked = null;
19635
19817
  this.unregisterListeners();
19636
19818
  super.onHandlerDestroying();
19637
19819
  }
@@ -19726,8 +19908,11 @@ class StreamController extends BaseStreamController {
19726
19908
  this.onTickEnd();
19727
19909
  }
19728
19910
  onTickEnd() {
19911
+ var _this$media2;
19729
19912
  super.onTickEnd();
19730
- this.checkBuffer();
19913
+ if ((_this$media2 = this.media) != null && _this$media2.readyState) {
19914
+ this.lastCurrentTime = this.media.currentTime;
19915
+ }
19731
19916
  this.checkFragmentChanged();
19732
19917
  }
19733
19918
  doTickIdle() {
@@ -19960,29 +20145,19 @@ class StreamController extends BaseStreamController {
19960
20145
  onMediaAttached(event, data) {
19961
20146
  super.onMediaAttached(event, data);
19962
20147
  const media = data.media;
19963
- media.removeEventListener('playing', this.onMediaPlaying);
19964
- media.removeEventListener('seeked', this.onMediaSeeked);
19965
- media.removeEventListener('waiting', this.onMediaWaiting);
19966
- media.addEventListener('playing', this.onMediaPlaying);
19967
- media.addEventListener('seeked', this.onMediaSeeked);
19968
- media.addEventListener('waiting', this.onMediaWaiting);
19969
- this.gapController = new GapController(media, this.fragmentTracker, this.hls);
20148
+ addEventListener(media, 'playing', this.onMediaPlaying);
20149
+ addEventListener(media, 'seeked', this.onMediaSeeked);
19970
20150
  }
19971
20151
  onMediaDetaching(event, data) {
19972
20152
  const {
19973
20153
  media
19974
20154
  } = this;
19975
20155
  if (media) {
19976
- media.removeEventListener('playing', this.onMediaPlaying);
19977
- media.removeEventListener('seeked', this.onMediaSeeked);
19978
- media.removeEventListener('waiting', this.onMediaWaiting);
20156
+ removeEventListener(media, 'playing', this.onMediaPlaying);
20157
+ removeEventListener(media, 'seeked', this.onMediaSeeked);
19979
20158
  }
19980
20159
  this.videoBuffer = null;
19981
20160
  this.fragPlaying = null;
19982
- if (this.gapController) {
19983
- this.gapController.destroy();
19984
- this.gapController = null;
19985
- }
19986
20161
  super.onMediaDetaching(event, data);
19987
20162
  const transferringMedia = !!data.transferMedia;
19988
20163
  if (transferringMedia) {
@@ -19990,19 +20165,6 @@ class StreamController extends BaseStreamController {
19990
20165
  }
19991
20166
  this._hasEnoughToStart = false;
19992
20167
  }
19993
- triggerEnded() {
19994
- const gapController = this.gapController;
19995
- if (gapController) {
19996
- var _this$media2;
19997
- if (gapController.ended) {
19998
- return;
19999
- }
20000
- gapController.ended = ((_this$media2 = this.media) == null ? undefined : _this$media2.currentTime) || 1;
20001
- }
20002
- this.hls.trigger(Events.MEDIA_ENDED, {
20003
- stalled: false
20004
- });
20005
- }
20006
20168
  onManifestLoading() {
20007
20169
  super.onManifestLoading();
20008
20170
  // reset buffer on manifest loading
@@ -20337,26 +20499,6 @@ class StreamController extends BaseStreamController {
20337
20499
  break;
20338
20500
  }
20339
20501
  }
20340
-
20341
- // Checks the health of the buffer and attempts to resolve playback stalls.
20342
- checkBuffer() {
20343
- const {
20344
- media,
20345
- gapController
20346
- } = this;
20347
- if (!media || !gapController || !media.readyState) {
20348
- // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
20349
- return;
20350
- }
20351
- if (this._hasEnoughToStart || !BufferHelper.getBuffered(media).length) {
20352
- // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
20353
- const state = this.state;
20354
- const activeFrag = state !== State.IDLE ? this.fragCurrent : null;
20355
- const levelDetails = this.getLevelDetails();
20356
- gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
20357
- }
20358
- this.lastCurrentTime = media.currentTime;
20359
- }
20360
20502
  onFragLoadEmergencyAborted() {
20361
20503
  this.state = State.IDLE;
20362
20504
  // if loadedmetadata is not set, it means that we are emergency switch down on first frag
@@ -20372,8 +20514,10 @@ class StreamController extends BaseStreamController {
20372
20514
  }) {
20373
20515
  if (type !== ElementaryStreamTypes.AUDIO || !this.altAudio) {
20374
20516
  const mediaBuffer = (type === ElementaryStreamTypes.VIDEO ? this.videoBuffer : this.mediaBuffer) || this.media;
20375
- this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
20376
- this.tick();
20517
+ if (mediaBuffer) {
20518
+ this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
20519
+ this.tick();
20520
+ }
20377
20521
  }
20378
20522
  }
20379
20523
  onLevelsUpdated(event, data) {
@@ -21847,9 +21991,12 @@ class Hls {
21847
21991
  this.latencyController = undefined;
21848
21992
  this.levelController = undefined;
21849
21993
  this.streamController = undefined;
21994
+ this.audioStreamController = undefined;
21995
+ this.subtititleStreamController = undefined;
21850
21996
  this.audioTrackController = undefined;
21851
21997
  this.subtitleTrackController = undefined;
21852
21998
  this.interstitialsController = undefined;
21999
+ this.gapController = undefined;
21853
22000
  this.emeController = undefined;
21854
22001
  this.cmcdController = undefined;
21855
22002
  this._media = null;
@@ -21889,6 +22036,7 @@ class Hls {
21889
22036
  const id3TrackController = new ID3TrackController(this);
21890
22037
  const keyLoader = new KeyLoader(this.config);
21891
22038
  const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
22039
+ const gapController = this.gapController = new GapController(this, fragmentTracker);
21892
22040
 
21893
22041
  // Cap level controller uses streamController to flush the buffer
21894
22042
  capLevelController.setStreamController(streamController);
@@ -21902,17 +22050,17 @@ class Hls {
21902
22050
  networkControllers.splice(1, 0, contentSteering);
21903
22051
  }
21904
22052
  this.networkControllers = networkControllers;
21905
- const coreComponents = [abrController, bufferController, capLevelController, fpsController, id3TrackController, fragmentTracker];
22053
+ const coreComponents = [abrController, bufferController, gapController, capLevelController, fpsController, id3TrackController, fragmentTracker];
21906
22054
  this.audioTrackController = this.createController(config.audioTrackController, networkControllers);
21907
22055
  const AudioStreamControllerClass = config.audioStreamController;
21908
22056
  if (AudioStreamControllerClass) {
21909
- networkControllers.push(new AudioStreamControllerClass(this, fragmentTracker, keyLoader));
22057
+ networkControllers.push(this.audioStreamController = new AudioStreamControllerClass(this, fragmentTracker, keyLoader));
21910
22058
  }
21911
22059
  // Instantiate subtitleTrackController before SubtitleStreamController to receive level events first
21912
22060
  this.subtitleTrackController = this.createController(config.subtitleTrackController, networkControllers);
21913
22061
  const SubtitleStreamControllerClass = config.subtitleStreamController;
21914
22062
  if (SubtitleStreamControllerClass) {
21915
- networkControllers.push(new SubtitleStreamControllerClass(this, fragmentTracker, keyLoader));
22063
+ networkControllers.push(this.subtititleStreamController = new SubtitleStreamControllerClass(this, fragmentTracker, keyLoader));
21916
22064
  }
21917
22065
  this.createController(config.timelineController, coreComponents);
21918
22066
  keyLoader.emeController = this.emeController = this.createController(config.emeController, coreComponents);
@@ -22179,6 +22327,18 @@ class Hls {
22179
22327
  });
22180
22328
  }
22181
22329
  }
22330
+ get inFlightFragments() {
22331
+ const inFlightData = {
22332
+ [PlaylistLevelType.MAIN]: this.streamController.inFlightFrag
22333
+ };
22334
+ if (this.audioStreamController) {
22335
+ inFlightData[PlaylistLevelType.AUDIO] = this.audioStreamController.inFlightFrag;
22336
+ }
22337
+ if (this.subtititleStreamController) {
22338
+ inFlightData[PlaylistLevelType.SUBTITLE] = this.subtititleStreamController.inFlightFrag;
22339
+ }
22340
+ return inFlightData;
22341
+ }
22182
22342
 
22183
22343
  /**
22184
22344
  * Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)