hls.js 1.6.0-beta.2.0.canary.10924 → 1.6.0-beta.2.0.canary.10926

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -402,7 +402,7 @@ function enableLogs(debugConfig, context, id) {
402
402
  // Some browsers don't allow to use bind on console object anyway
403
403
  // fallback to default if needed
404
404
  try {
405
- newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.6.0-beta.2.0.canary.10924"}`);
405
+ newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.6.0-beta.2.0.canary.10926"}`);
406
406
  } catch (e) {
407
407
  /* log fn threw an exception. All logger methods are no-ops. */
408
408
  return createLogger();
@@ -1062,8 +1062,8 @@ function getCodecTiers(levels, audioTracksByGroup, minAutoLevel, maxAutoLevel) {
1062
1062
  }, {});
1063
1063
  }
1064
1064
  function useAlternateAudio(audioTrackUrl, hls) {
1065
- var _hls$levels$hls$loadL;
1066
- return !!audioTrackUrl && audioTrackUrl !== ((_hls$levels$hls$loadL = hls.levels[hls.loadLevel]) == null ? undefined : _hls$levels$hls$loadL.uri);
1065
+ var _hls$loadLevelObj;
1066
+ return !!audioTrackUrl && audioTrackUrl !== ((_hls$loadLevelObj = hls.loadLevelObj) == null ? undefined : _hls$loadLevelObj.uri);
1067
1067
  }
1068
1068
 
1069
1069
  class AbrController extends Logger {
@@ -1557,8 +1557,8 @@ class AbrController extends Logger {
1557
1557
  }
1558
1558
  // If no matching level found, see if min auto level would be a better option
1559
1559
  const minLevel = hls.levels[minAutoLevel];
1560
- const autoLevel = hls.levels[hls.loadLevel];
1561
- if ((minLevel == null ? undefined : minLevel.bitrate) < (autoLevel == null ? undefined : autoLevel.bitrate)) {
1560
+ const autoLevel = hls.loadLevelObj;
1561
+ if (autoLevel && (minLevel == null ? undefined : minLevel.bitrate) < autoLevel.bitrate) {
1562
1562
  return minAutoLevel;
1563
1563
  }
1564
1564
  // or if bitrate is not lower, continue to use loadLevel
@@ -2042,7 +2042,7 @@ class ErrorController extends Logger {
2042
2042
  case ErrorDetails.SUBTITLE_LOAD_ERROR:
2043
2043
  case ErrorDetails.SUBTITLE_TRACK_LOAD_TIMEOUT:
2044
2044
  if (context) {
2045
- const level = hls.levels[hls.loadLevel];
2045
+ const level = hls.loadLevelObj;
2046
2046
  if (level && (context.type === PlaylistContextType.AUDIO_TRACK && level.hasAudioGroup(context.groupId) || context.type === PlaylistContextType.SUBTITLE_TRACK && level.hasSubtitleGroup(context.groupId))) {
2047
2047
  // Perform Pathway switch or Redundant failover if possible for fastest recovery
2048
2048
  // otherwise allow playlist retry count to reach max error retries
@@ -2055,7 +2055,7 @@ class ErrorController extends Logger {
2055
2055
  return;
2056
2056
  case ErrorDetails.KEY_SYSTEM_STATUS_OUTPUT_RESTRICTED:
2057
2057
  {
2058
- const level = hls.levels[hls.loadLevel];
2058
+ const level = hls.loadLevelObj;
2059
2059
  const restrictedHdcpLevel = level == null ? undefined : level.attrs['HDCP-LEVEL'];
2060
2060
  if (restrictedHdcpLevel) {
2061
2061
  data.errorAction = {
@@ -7254,36 +7254,53 @@ class BufferHelper {
7254
7254
  }
7255
7255
  return false;
7256
7256
  }
7257
+ static bufferedRanges(media) {
7258
+ if (media) {
7259
+ const timeRanges = BufferHelper.getBuffered(media);
7260
+ return BufferHelper.timeRangesToArray(timeRanges);
7261
+ }
7262
+ return [];
7263
+ }
7264
+ static timeRangesToArray(timeRanges) {
7265
+ const buffered = [];
7266
+ for (let i = 0; i < timeRanges.length; i++) {
7267
+ buffered.push({
7268
+ start: timeRanges.start(i),
7269
+ end: timeRanges.end(i)
7270
+ });
7271
+ }
7272
+ return buffered;
7273
+ }
7257
7274
  static bufferInfo(media, pos, maxHoleDuration) {
7258
7275
  if (media) {
7259
- const vbuffered = BufferHelper.getBuffered(media);
7260
- if (vbuffered.length) {
7261
- const buffered = [];
7262
- for (let i = 0; i < vbuffered.length; i++) {
7263
- buffered.push({
7264
- start: vbuffered.start(i),
7265
- end: vbuffered.end(i)
7266
- });
7267
- }
7276
+ const buffered = BufferHelper.bufferedRanges(media);
7277
+ if (buffered.length) {
7268
7278
  return BufferHelper.bufferedInfo(buffered, pos, maxHoleDuration);
7269
7279
  }
7270
7280
  }
7271
7281
  return {
7272
7282
  len: 0,
7273
7283
  start: pos,
7274
- end: pos
7284
+ end: pos,
7285
+ bufferedIndex: -1
7275
7286
  };
7276
7287
  }
7277
7288
  static bufferedInfo(buffered, pos, maxHoleDuration) {
7278
7289
  pos = Math.max(0, pos);
7279
7290
  // sort on buffer.start/smaller end (IE does not always return sorted buffered range)
7280
- buffered.sort((a, b) => a.start - b.start || b.end - a.end);
7291
+ if (buffered.length > 1) {
7292
+ buffered.sort((a, b) => a.start - b.start || b.end - a.end);
7293
+ }
7294
+ let bufferedIndex = -1;
7281
7295
  let buffered2 = [];
7282
7296
  if (maxHoleDuration) {
7283
7297
  // there might be some small holes between buffer time range
7284
7298
  // consider that holes smaller than maxHoleDuration are irrelevant and build another
7285
7299
  // buffer time range representations that discards those holes
7286
7300
  for (let i = 0; i < buffered.length; i++) {
7301
+ if (pos >= buffered[i].start && pos <= buffered[i].end) {
7302
+ bufferedIndex = i;
7303
+ }
7287
7304
  const buf2len = buffered2.length;
7288
7305
  if (buf2len) {
7289
7306
  const buf2end = buffered2[buf2len - 1].end;
@@ -7309,24 +7326,25 @@ class BufferHelper {
7309
7326
  buffered2 = buffered;
7310
7327
  }
7311
7328
  let bufferLen = 0;
7329
+ let nextStart;
7312
7330
 
7313
- // bufferStartNext can possibly be undefined based on the conditional logic below
7314
- let bufferStartNext;
7315
-
7316
- // bufferStart and bufferEnd are buffer boundaries around current video position
7331
+ // bufferStart and bufferEnd are buffer boundaries around current playback position (pos)
7317
7332
  let bufferStart = pos;
7318
7333
  let bufferEnd = pos;
7319
7334
  for (let i = 0; i < buffered2.length; i++) {
7320
7335
  const start = buffered2[i].start;
7321
7336
  const end = buffered2[i].end;
7322
7337
  // logger.log('buf start/end:' + buffered.start(i) + '/' + buffered.end(i));
7338
+ if (bufferedIndex === -1 && pos >= start && pos <= end) {
7339
+ bufferedIndex = i;
7340
+ }
7323
7341
  if (pos + maxHoleDuration >= start && pos < end) {
7324
7342
  // play position is inside this buffer TimeRange, retrieve end of buffer position and buffer length
7325
7343
  bufferStart = start;
7326
7344
  bufferEnd = end;
7327
7345
  bufferLen = bufferEnd - pos;
7328
7346
  } else if (pos + maxHoleDuration < start) {
7329
- bufferStartNext = start;
7347
+ nextStart = start;
7330
7348
  break;
7331
7349
  }
7332
7350
  }
@@ -7334,8 +7352,9 @@ class BufferHelper {
7334
7352
  len: bufferLen,
7335
7353
  start: bufferStart || 0,
7336
7354
  end: bufferEnd || 0,
7337
- nextStart: bufferStartNext,
7338
- buffered
7355
+ nextStart,
7356
+ buffered,
7357
+ bufferedIndex
7339
7358
  };
7340
7359
  }
7341
7360
 
@@ -7607,7 +7626,6 @@ class BaseStreamController extends TaskLoop {
7607
7626
  // reset startPosition and lastCurrentTime to restart playback @ stream beginning
7608
7627
  this.log(`setting startPosition to 0 because media ended`);
7609
7628
  this.startPosition = this.lastCurrentTime = 0;
7610
- this.triggerEnded();
7611
7629
  };
7612
7630
  this.playlistType = playlistType;
7613
7631
  this.hls = hls;
@@ -7681,6 +7699,12 @@ class BaseStreamController extends TaskLoop {
7681
7699
  resumeBuffering() {
7682
7700
  this.buffering = true;
7683
7701
  }
7702
+ get inFlightFrag() {
7703
+ return {
7704
+ frag: this.fragCurrent,
7705
+ state: this.state
7706
+ };
7707
+ }
7684
7708
  _streamEnded(bufferInfo, levelDetails) {
7685
7709
  // Stream is never "ended" when playlist is live or media is detached
7686
7710
  if (levelDetails.live || !this.media) {
@@ -7769,9 +7793,6 @@ class BaseStreamController extends TaskLoop {
7769
7793
  this.startFragRequested = false;
7770
7794
  }
7771
7795
  onError(event, data) {}
7772
- triggerEnded() {
7773
- /* overridden in stream-controller */
7774
- }
7775
7796
  onManifestLoaded(event, data) {
7776
7797
  this.startTimeOffset = data.startTimeOffset;
7777
7798
  }
@@ -8341,7 +8362,8 @@ class BaseStreamController extends TaskLoop {
8341
8362
  if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
8342
8363
  const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
8343
8364
  if (bufferedFragAtPos && (bufferInfo.nextStart <= bufferedFragAtPos.end || bufferedFragAtPos.gap)) {
8344
- return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
8365
+ const gapDuration = Math.max(Math.min(bufferInfo.nextStart, bufferedFragAtPos.end) - pos, maxBufferHole);
8366
+ return BufferHelper.bufferInfo(bufferable, pos, gapDuration);
8345
8367
  }
8346
8368
  }
8347
8369
  return bufferInfo;
@@ -9575,7 +9597,6 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => key === 'in
9575
9597
  const sbTrack = transferredTrack != null && transferredTrack.buffer ? transferredTrack : track;
9576
9598
  const sbCodec = (sbTrack == null ? undefined : sbTrack.pendingCodec) || (sbTrack == null ? undefined : sbTrack.codec);
9577
9599
  const trackLevelCodec = sbTrack == null ? undefined : sbTrack.levelCodec;
9578
- const forceChangeType = !sbTrack || !!this.hls.config.assetPlayerId;
9579
9600
  if (!track) {
9580
9601
  track = tracks[trackName] = {
9581
9602
  buffer: undefined,
@@ -9592,7 +9613,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => key === 'in
9592
9613
  const currentCodec = currentCodecFull == null ? undefined : currentCodecFull.replace(VIDEO_CODEC_PROFILE_REPLACE, '$1');
9593
9614
  let trackCodec = pickMostCompleteCodecName(codec, levelCodec);
9594
9615
  const nextCodec = (_trackCodec = trackCodec) == null ? undefined : _trackCodec.replace(VIDEO_CODEC_PROFILE_REPLACE, '$1');
9595
- if (trackCodec && (currentCodec !== nextCodec || forceChangeType)) {
9616
+ if (trackCodec && currentCodecFull && currentCodec !== nextCodec) {
9596
9617
  if (trackName.slice(0, 5) === 'audio') {
9597
9618
  trackCodec = getCodecCompatibleName(trackCodec, this.appendSource);
9598
9619
  }
@@ -17167,16 +17188,20 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
17167
17188
  frontBufferFlushThreshold: Infinity,
17168
17189
  maxBufferSize: 60 * 1000 * 1000,
17169
17190
  // used by stream-controller
17170
- maxBufferHole: 0.1,
17191
+ maxFragLookUpTolerance: 0.25,
17171
17192
  // used by stream-controller
17193
+ maxBufferHole: 0.1,
17194
+ // used by stream-controller and gap-controller
17195
+ detectStallWithCurrentTimeMs: 1250,
17196
+ // used by gap-controller
17172
17197
  highBufferWatchdogPeriod: 2,
17173
- // used by stream-controller
17198
+ // used by gap-controller
17174
17199
  nudgeOffset: 0.1,
17175
- // used by stream-controller
17200
+ // used by gap-controller
17176
17201
  nudgeMaxRetry: 3,
17177
- // used by stream-controller
17178
- maxFragLookUpTolerance: 0.25,
17179
- // used by stream-controller
17202
+ // used by gap-controller
17203
+ nudgeOnVideoHole: true,
17204
+ // used by gap-controller
17180
17205
  liveSyncDurationCount: 3,
17181
17206
  // used by latency-controller
17182
17207
  liveSyncOnStallIncrease: 1,
@@ -17275,7 +17300,6 @@ const hlsDefaultConfig = _objectSpread2(_objectSpread2({
17275
17300
  progressive: false,
17276
17301
  lowLatencyMode: true,
17277
17302
  cmcd: undefined,
17278
- detectStallWithCurrentTimeMs: 1250,
17279
17303
  enableDateRangeMetadataCues: true,
17280
17304
  enableEmsgMetadataCues: true,
17281
17305
  enableEmsgKLVMetadata: false,
@@ -17517,356 +17541,914 @@ function enableStreamingMode(config, logger) {
17517
17541
  }
17518
17542
  }
17519
17543
 
17520
- function sendAddTrackEvent(track, videoEl) {
17521
- let event;
17522
- try {
17523
- event = new Event('addtrack');
17524
- } catch (err) {
17525
- // for IE11
17526
- event = document.createEvent('Event');
17527
- event.initEvent('addtrack', false, false);
17528
- }
17529
- event.track = track;
17530
- videoEl.dispatchEvent(event);
17531
- }
17532
- function clearCurrentCues(track, enterHandler) {
17533
- // When track.mode is disabled, track.cues will be null.
17534
- // To guarantee the removal of cues, we need to temporarily
17535
- // change the mode to hidden
17536
- const mode = track.mode;
17537
- if (mode === 'disabled') {
17538
- track.mode = 'hidden';
17539
- }
17540
- if (track.cues) {
17541
- for (let i = track.cues.length; i--;) {
17542
- if (enterHandler) {
17543
- track.cues[i].removeEventListener('enter', enterHandler);
17544
- }
17545
- track.removeCue(track.cues[i]);
17546
- }
17547
- }
17548
- if (mode === 'disabled') {
17549
- track.mode = mode;
17550
- }
17551
- }
17552
- function removeCuesInRange(track, start, end, predicate) {
17553
- const mode = track.mode;
17554
- if (mode === 'disabled') {
17555
- track.mode = 'hidden';
17556
- }
17557
- if (track.cues && track.cues.length > 0) {
17558
- const cues = getCuesInRange(track.cues, start, end);
17559
- for (let i = 0; i < cues.length; i++) {
17560
- if (!predicate || predicate(cues[i])) {
17561
- track.removeCue(cues[i]);
17562
- }
17563
- }
17564
- }
17565
- if (mode === 'disabled') {
17566
- track.mode = mode;
17567
- }
17568
- }
17569
-
17570
- // Find first cue starting after given time.
17571
- // Modified version of binary search O(log(n)).
17572
- function getFirstCueIndexAfterTime(cues, time) {
17573
- // If first cue starts after time, start there
17574
- if (time < cues[0].startTime) {
17575
- return 0;
17576
- }
17577
- // If the last cue ends before time there is no overlap
17578
- const len = cues.length - 1;
17579
- if (time > cues[len].endTime) {
17580
- return -1;
17581
- }
17582
- let left = 0;
17583
- let right = len;
17584
- while (left <= right) {
17585
- const mid = Math.floor((right + left) / 2);
17586
- if (time < cues[mid].startTime) {
17587
- right = mid - 1;
17588
- } else if (time > cues[mid].startTime && left < len) {
17589
- left = mid + 1;
17590
- } else {
17591
- // If it's not lower or higher, it must be equal.
17592
- return mid;
17593
- }
17594
- }
17595
- // At this point, left and right have swapped.
17596
- // No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
17597
- return cues[left].startTime - time < time - cues[right].startTime ? left : right;
17598
- }
17599
- function getCuesInRange(cues, start, end) {
17600
- const cuesFound = [];
17601
- const firstCueInRange = getFirstCueIndexAfterTime(cues, start);
17602
- if (firstCueInRange > -1) {
17603
- for (let i = firstCueInRange, len = cues.length; i < len; i++) {
17604
- const cue = cues[i];
17605
- if (cue.startTime >= start && cue.endTime <= end) {
17606
- cuesFound.push(cue);
17607
- } else if (cue.startTime > end) {
17608
- return cuesFound;
17609
- }
17610
- }
17611
- }
17612
- return cuesFound;
17613
- }
17614
-
17615
- const MIN_CUE_DURATION = 0.25;
17616
- function getCueClass() {
17617
- if (typeof self === 'undefined') return undefined;
17618
- return self.VTTCue || self.TextTrackCue;
17544
+ function addEventListener(el, type, listener) {
17545
+ removeEventListener(el, type, listener);
17546
+ el.addEventListener(type, listener);
17619
17547
  }
17620
- function createCueWithDataFields(Cue, startTime, endTime, data, type) {
17621
- let cue = new Cue(startTime, endTime, '');
17622
- try {
17623
- cue.value = data;
17624
- if (type) {
17625
- cue.type = type;
17626
- }
17627
- } catch (e) {
17628
- cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
17629
- type
17630
- }, data) : data));
17631
- }
17632
- return cue;
17548
+ function removeEventListener(el, type, listener) {
17549
+ el.removeEventListener(type, listener);
17633
17550
  }
17634
17551
 
17635
- // VTTCue latest draft allows an infinite duration, fallback
17636
- // to MAX_VALUE if necessary
17637
- const MAX_CUE_ENDTIME = (() => {
17638
- const Cue = getCueClass();
17639
- try {
17640
- Cue && new Cue(0, Number.POSITIVE_INFINITY, '');
17641
- } catch (e) {
17642
- return Number.MAX_VALUE;
17643
- }
17644
- return Number.POSITIVE_INFINITY;
17645
- })();
17646
- function hexToArrayBuffer(str) {
17647
- return Uint8Array.from(str.replace(/^0x/, '').replace(/([\da-fA-F]{2}) ?/g, '0x$1 ').replace(/ +$/, '').split(' ')).buffer;
17648
- }
17649
- class ID3TrackController {
17650
- constructor(hls) {
17651
- this.hls = undefined;
17652
- this.id3Track = null;
17552
+ const MAX_START_GAP_JUMP = 2.0;
17553
+ const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
17554
+ const SKIP_BUFFER_RANGE_START = 0.05;
17555
+ const TICK_INTERVAL$1 = 100;
17556
+ class GapController extends TaskLoop {
17557
+ constructor(hls, fragmentTracker) {
17558
+ super('gap-controller', hls.logger);
17559
+ this.hls = null;
17560
+ this.fragmentTracker = null;
17653
17561
  this.media = null;
17654
- this.dateRangeCuesAppended = {};
17655
- this.removeCues = true;
17656
- this.onEventCueEnter = () => {
17657
- if (!this.hls) {
17562
+ this.mediaSource = undefined;
17563
+ this.nudgeRetry = 0;
17564
+ this.stallReported = false;
17565
+ this.stalled = null;
17566
+ this.moved = false;
17567
+ this.seeking = false;
17568
+ this.buffered = {};
17569
+ this.lastCurrentTime = 0;
17570
+ this.ended = 0;
17571
+ this.waiting = 0;
17572
+ this.onMediaPlaying = () => {
17573
+ this.ended = 0;
17574
+ this.waiting = 0;
17575
+ };
17576
+ this.onMediaWaiting = () => {
17577
+ var _this$media;
17578
+ if ((_this$media = this.media) != null && _this$media.seeking) {
17658
17579
  return;
17659
17580
  }
17660
- this.hls.trigger(Events.EVENT_CUE_ENTER, {});
17581
+ this.waiting = self.performance.now();
17582
+ this.tick();
17583
+ };
17584
+ this.onMediaEnded = () => {
17585
+ if (this.hls) {
17586
+ var _this$media2;
17587
+ // ended is set when triggering MEDIA_ENDED so that we do not trigger it again on stall or on tick with media.ended
17588
+ this.ended = ((_this$media2 = this.media) == null ? undefined : _this$media2.currentTime) || 1;
17589
+ this.hls.trigger(Events.MEDIA_ENDED, {
17590
+ stalled: false
17591
+ });
17592
+ }
17661
17593
  };
17662
17594
  this.hls = hls;
17663
- this._registerListeners();
17664
- }
17665
- destroy() {
17666
- this._unregisterListeners();
17667
- this.id3Track = null;
17668
- this.media = null;
17669
- this.dateRangeCuesAppended = {};
17670
- // @ts-ignore
17671
- this.hls = this.onEventCueEnter = null;
17595
+ this.fragmentTracker = fragmentTracker;
17596
+ this.registerListeners();
17672
17597
  }
17673
- _registerListeners() {
17598
+ registerListeners() {
17674
17599
  const {
17675
17600
  hls
17676
17601
  } = this;
17677
- hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
17678
- hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
17679
- hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
17680
- hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
17681
- hls.on(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
17682
- hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
17683
- hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
17684
- hls.on(Events.LEVEL_PTS_UPDATED, this.onLevelPtsUpdated, this);
17602
+ if (hls) {
17603
+ hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
17604
+ hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
17605
+ hls.on(Events.BUFFER_APPENDED, this.onBufferAppended, this);
17606
+ }
17685
17607
  }
17686
- _unregisterListeners() {
17608
+ unregisterListeners() {
17687
17609
  const {
17688
17610
  hls
17689
17611
  } = this;
17690
- hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
17691
- hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
17692
- hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
17693
- hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
17694
- hls.off(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
17695
- hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
17696
- hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
17697
- hls.off(Events.LEVEL_PTS_UPDATED, this.onLevelPtsUpdated, this);
17698
- }
17699
- // Add ID3 metatadata text track.
17700
- onMediaAttaching(event, data) {
17701
- var _data$overrides;
17702
- this.media = data.media;
17703
- if (((_data$overrides = data.overrides) == null ? undefined : _data$overrides.cueRemoval) === false) {
17704
- this.removeCues = false;
17612
+ if (hls) {
17613
+ hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
17614
+ hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
17615
+ hls.off(Events.BUFFER_APPENDED, this.onBufferAppended, this);
17705
17616
  }
17706
17617
  }
17707
- onMediaAttached() {
17708
- const details = this.hls.latestLevelDetails;
17709
- if (details) {
17710
- this.updateDateRangeCues(details);
17711
- }
17618
+ destroy() {
17619
+ super.destroy();
17620
+ this.unregisterListeners();
17621
+ this.media = this.hls = this.fragmentTracker = null;
17622
+ this.mediaSource = undefined;
17623
+ }
17624
+ onMediaAttached(event, data) {
17625
+ this.setInterval(TICK_INTERVAL$1);
17626
+ this.mediaSource = data.mediaSource;
17627
+ const media = this.media = data.media;
17628
+ addEventListener(media, 'playing', this.onMediaPlaying);
17629
+ addEventListener(media, 'waiting', this.onMediaWaiting);
17630
+ addEventListener(media, 'ended', this.onMediaEnded);
17712
17631
  }
17713
17632
  onMediaDetaching(event, data) {
17714
- this.media = null;
17715
- const transferringMedia = !!data.transferMedia;
17716
- if (transferringMedia) {
17717
- return;
17718
- }
17719
- if (this.id3Track) {
17720
- if (this.removeCues) {
17721
- clearCurrentCues(this.id3Track, this.onEventCueEnter);
17722
- }
17723
- this.id3Track = null;
17633
+ this.clearInterval();
17634
+ const {
17635
+ media
17636
+ } = this;
17637
+ if (media) {
17638
+ removeEventListener(media, 'playing', this.onMediaPlaying);
17639
+ removeEventListener(media, 'waiting', this.onMediaWaiting);
17640
+ removeEventListener(media, 'ended', this.onMediaEnded);
17641
+ this.media = null;
17724
17642
  }
17725
- this.dateRangeCuesAppended = {};
17643
+ this.mediaSource = undefined;
17726
17644
  }
17727
- onManifestLoading() {
17728
- this.dateRangeCuesAppended = {};
17645
+ onBufferAppended(event, data) {
17646
+ this.buffered = data.timeRanges;
17729
17647
  }
17730
- createTrack(media) {
17731
- const track = this.getID3Track(media.textTracks);
17732
- track.mode = 'hidden';
17733
- return track;
17648
+ get hasBuffered() {
17649
+ return Object.keys(this.buffered).length > 0;
17734
17650
  }
17735
- getID3Track(textTracks) {
17736
- if (!this.media) {
17651
+ tick() {
17652
+ var _this$media3;
17653
+ if (!((_this$media3 = this.media) != null && _this$media3.readyState) || !this.hasBuffered) {
17737
17654
  return;
17738
17655
  }
17739
- for (let i = 0; i < textTracks.length; i++) {
17740
- const textTrack = textTracks[i];
17741
- if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
17742
- // send 'addtrack' when reusing the textTrack for metadata,
17743
- // same as what we do for captions
17744
- sendAddTrackEvent(textTrack, this.media);
17745
- return textTrack;
17746
- }
17747
- }
17748
- return this.media.addTextTrack('metadata', 'id3');
17656
+ const currentTime = this.media.currentTime;
17657
+ this.poll(currentTime, this.lastCurrentTime);
17658
+ this.lastCurrentTime = currentTime;
17749
17659
  }
17750
- onFragParsingMetadata(event, data) {
17751
- if (!this.media) {
17660
+
17661
+ /**
17662
+ * Checks if the playhead is stuck within a gap, and if so, attempts to free it.
17663
+ * A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
17664
+ *
17665
+ * @param lastCurrentTime - Previously read playhead position
17666
+ */
17667
+ poll(currentTime, lastCurrentTime) {
17668
+ var _this$hls, _this$hls2;
17669
+ const config = (_this$hls = this.hls) == null ? undefined : _this$hls.config;
17670
+ if (!config) {
17752
17671
  return;
17753
17672
  }
17754
17673
  const {
17755
- hls: {
17756
- config: {
17757
- enableEmsgMetadataCues,
17758
- enableID3MetadataCues
17759
- }
17760
- }
17674
+ media,
17675
+ stalled
17761
17676
  } = this;
17762
- if (!enableEmsgMetadataCues && !enableID3MetadataCues) {
17677
+ if (!media) {
17763
17678
  return;
17764
17679
  }
17765
17680
  const {
17766
- samples
17767
- } = data;
17681
+ seeking
17682
+ } = media;
17683
+ const seeked = this.seeking && !seeking;
17684
+ const beginSeek = !this.seeking && seeking;
17685
+ const pausedEndedOrHalted = media.paused && !seeking || media.ended || media.playbackRate === 0;
17686
+ this.seeking = seeking;
17768
17687
 
17769
- // create track dynamically
17770
- if (!this.id3Track) {
17771
- this.id3Track = this.createTrack(this.media);
17688
+ // The playhead is moving, no-op
17689
+ if (currentTime !== lastCurrentTime) {
17690
+ if (lastCurrentTime) {
17691
+ this.ended = 0;
17692
+ }
17693
+ this.moved = true;
17694
+ if (!seeking) {
17695
+ this.nudgeRetry = 0;
17696
+ // When crossing between buffered video time ranges, but not audio, flush pipeline with seek (Chrome)
17697
+ if (config.nudgeOnVideoHole && !pausedEndedOrHalted && currentTime > lastCurrentTime) {
17698
+ this.nudgeOnVideoHole(currentTime, lastCurrentTime);
17699
+ }
17700
+ }
17701
+ if (this.waiting === 0) {
17702
+ this.stallResolved(currentTime);
17703
+ }
17704
+ return;
17772
17705
  }
17773
- const Cue = getCueClass();
17774
- if (!Cue) {
17706
+
17707
+ // Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
17708
+ if (beginSeek || seeked) {
17709
+ if (seeked) {
17710
+ this.stallResolved(currentTime);
17711
+ }
17775
17712
  return;
17776
17713
  }
17777
- for (let i = 0; i < samples.length; i++) {
17778
- const type = samples[i].type;
17779
- if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
17780
- continue;
17714
+
17715
+ // The playhead should not be moving
17716
+ if (pausedEndedOrHalted) {
17717
+ this.nudgeRetry = 0;
17718
+ this.stallResolved(currentTime);
17719
+ // Fire MEDIA_ENDED to workaround event not being dispatched by browser
17720
+ if (!this.ended && media.ended && this.hls) {
17721
+ this.ended = currentTime || 1;
17722
+ this.hls.trigger(Events.MEDIA_ENDED, {
17723
+ stalled: false
17724
+ });
17781
17725
  }
17782
- const frames = getId3Frames(samples[i].data);
17783
- if (frames) {
17784
- const startTime = samples[i].pts;
17785
- let endTime = startTime + samples[i].duration;
17786
- if (endTime > MAX_CUE_ENDTIME) {
17787
- endTime = MAX_CUE_ENDTIME;
17788
- }
17789
- const timeDiff = endTime - startTime;
17790
- if (timeDiff <= 0) {
17791
- endTime = startTime + MIN_CUE_DURATION;
17792
- }
17793
- for (let j = 0; j < frames.length; j++) {
17794
- const frame = frames[j];
17795
- // Safari doesn't put the timestamp frame in the TextTrack
17796
- if (!isId3TimestampFrame(frame)) {
17797
- // add a bounds to any unbounded cues
17798
- this.updateId3CueEnds(startTime, type);
17799
- const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
17800
- if (cue) {
17801
- this.id3Track.addCue(cue);
17802
- }
17803
- }
17804
- }
17726
+ return;
17727
+ }
17728
+ if (!BufferHelper.getBuffered(media).length) {
17729
+ this.nudgeRetry = 0;
17730
+ return;
17731
+ }
17732
+
17733
+ // Resolve stalls at buffer holes using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
17734
+ const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
17735
+ const nextStart = bufferInfo.nextStart || 0;
17736
+ const fragmentTracker = this.fragmentTracker;
17737
+ if (seeking && fragmentTracker && this.hls) {
17738
+ // Is there a fragment loading/parsing/appending before currentTime?
17739
+ const inFlightDependency = getInFlightDependency(this.hls.inFlightFragments, currentTime);
17740
+
17741
+ // Waiting for seeking in a buffered range to complete
17742
+ const hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP;
17743
+ // Next buffered range is too far ahead to jump to while still seeking
17744
+ const noBufferHole = !nextStart || inFlightDependency || nextStart - currentTime > MAX_START_GAP_JUMP && !fragmentTracker.getPartialFragment(currentTime);
17745
+ if (hasEnoughBuffer || noBufferHole) {
17746
+ return;
17805
17747
  }
17748
+ // Reset moved state when seeking to a point in or before a gap/hole
17749
+ this.moved = false;
17806
17750
  }
17807
- }
17808
- updateId3CueEnds(startTime, type) {
17809
- var _this$id3Track;
17810
- const cues = (_this$id3Track = this.id3Track) == null ? undefined : _this$id3Track.cues;
17811
- if (cues) {
17812
- for (let i = cues.length; i--;) {
17813
- const cue = cues[i];
17814
- if (cue.type === type && cue.startTime < startTime && cue.endTime === MAX_CUE_ENDTIME) {
17815
- cue.endTime = startTime;
17751
+
17752
+ // Skip start gaps if we haven't played, but the last poll detected the start of a stall
17753
+ // The addition poll gives the browser a chance to jump the gap for us
17754
+ const levelDetails = (_this$hls2 = this.hls) == null ? undefined : _this$hls2.latestLevelDetails;
17755
+ if (!this.moved && this.stalled !== null && fragmentTracker) {
17756
+ // There is no playable buffer (seeked, waiting for buffer)
17757
+ const isBuffered = bufferInfo.len > 0;
17758
+ if (!isBuffered && !nextStart) {
17759
+ return;
17760
+ }
17761
+ // Jump start gaps within jump threshold
17762
+ const startJump = Math.max(nextStart, bufferInfo.start || 0) - currentTime;
17763
+
17764
+ // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
17765
+ // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
17766
+ // that begins over 1 target duration after the video start position.
17767
+ const isLive = !!(levelDetails != null && levelDetails.live);
17768
+ const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
17769
+ const partialOrGap = fragmentTracker.getPartialFragment(currentTime);
17770
+ if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
17771
+ if (!media.paused) {
17772
+ this._trySkipBufferHole(partialOrGap);
17816
17773
  }
17774
+ return;
17817
17775
  }
17818
17776
  }
17819
- }
17820
- onBufferFlushing(event, {
17821
- startOffset,
17822
- endOffset,
17823
- type
17824
- }) {
17825
- const {
17826
- id3Track,
17827
- hls
17828
- } = this;
17829
- if (!hls) {
17777
+
17778
+ // Start tracking stall time
17779
+ const detectStallWithCurrentTimeMs = config.detectStallWithCurrentTimeMs;
17780
+ const tnow = self.performance.now();
17781
+ const tWaiting = this.waiting;
17782
+ if (stalled === null) {
17783
+ // Use time of recent "waiting" event
17784
+ if (tWaiting > 0 && tnow - tWaiting < detectStallWithCurrentTimeMs) {
17785
+ this.stalled = tWaiting;
17786
+ } else {
17787
+ this.stalled = tnow;
17788
+ }
17830
17789
  return;
17831
17790
  }
17832
- const {
17833
- config: {
17834
- enableEmsgMetadataCues,
17835
- enableID3MetadataCues
17791
+ const stalledDuration = tnow - stalled;
17792
+ if (!seeking && (stalledDuration >= detectStallWithCurrentTimeMs || tWaiting) && this.hls) {
17793
+ var _this$mediaSource;
17794
+ // Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
17795
+ if (((_this$mediaSource = this.mediaSource) == null ? undefined : _this$mediaSource.readyState) === 'ended' && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? undefined : levelDetails.edge) || 0)) < 1) {
17796
+ if (this.ended) {
17797
+ return;
17798
+ }
17799
+ this.ended = currentTime || 1;
17800
+ this.hls.trigger(Events.MEDIA_ENDED, {
17801
+ stalled: true
17802
+ });
17803
+ return;
17836
17804
  }
17837
- } = hls;
17838
- if (id3Track && (enableEmsgMetadataCues || enableID3MetadataCues)) {
17839
- let predicate;
17840
- if (type === 'audio') {
17841
- predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues;
17842
- } else if (type === 'video') {
17843
- predicate = cue => cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
17844
- } else {
17845
- predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues || cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
17805
+ // Report stalling after trying to fix
17806
+ this._reportStall(bufferInfo);
17807
+ if (!this.media || !this.hls) {
17808
+ return;
17846
17809
  }
17847
- removeCuesInRange(id3Track, startOffset, endOffset, predicate);
17848
17810
  }
17811
+ const bufferedWithHoles = BufferHelper.bufferInfo(media, currentTime, config.maxBufferHole);
17812
+ this._tryFixBufferStall(bufferedWithHoles, stalledDuration);
17849
17813
  }
17850
- onLevelUpdated(event, {
17851
- details
17852
- }) {
17853
- this.updateDateRangeCues(details, true);
17854
- }
17855
- onLevelPtsUpdated(event, data) {
17856
- if (Math.abs(data.drift) > 0.01) {
17857
- this.updateDateRangeCues(data.details);
17814
+ stallResolved(currentTime) {
17815
+ const stalled = this.stalled;
17816
+ if (stalled && this.hls) {
17817
+ this.stalled = null;
17818
+ // The playhead is now moving, but was previously stalled
17819
+ if (this.stallReported) {
17820
+ const stalledDuration = self.performance.now() - stalled;
17821
+ this.log(`playback not stuck anymore @${currentTime}, after ${Math.round(stalledDuration)}ms`);
17822
+ this.stallReported = false;
17823
+ this.waiting = 0;
17824
+ this.hls.trigger(Events.STALL_RESOLVED, {});
17825
+ }
17858
17826
  }
17859
17827
  }
17860
- updateDateRangeCues(details, removeOldCues) {
17861
- if (!this.media || !details.hasProgramDateTime || !this.hls.config.enableDateRangeMetadataCues) {
17862
- return;
17828
+ nudgeOnVideoHole(currentTime, lastCurrentTime) {
17829
+ var _this$buffered$audio;
17830
+ // Chrome will play one second past a hole in video buffered time ranges without rendering any video from the subsequent range and then stall as long as audio is buffered:
17831
+ // https://github.com/video-dev/hls.js/issues/5631
17832
+ // https://issues.chromium.org/issues/40280613#comment10
17833
+ // Detect the potential for this situation and proactively seek to flush the video pipeline once the playhead passes the start of the video hole.
17834
+ // When there are audio and video buffers and currentTime is past the end of the first video buffered range...
17835
+ const videoSourceBuffered = this.buffered.video;
17836
+ if (this.hls && this.media && this.fragmentTracker && (_this$buffered$audio = this.buffered.audio) != null && _this$buffered$audio.length && videoSourceBuffered && videoSourceBuffered.length > 1 && currentTime > videoSourceBuffered.end(0)) {
17837
+ // and audio is buffered at the playhead
17838
+ const audioBufferInfo = BufferHelper.bufferedInfo(BufferHelper.timeRangesToArray(this.buffered.audio), currentTime, 0);
17839
+ if (audioBufferInfo.len > 1 && lastCurrentTime >= audioBufferInfo.start) {
17840
+ const videoTimes = BufferHelper.timeRangesToArray(videoSourceBuffered);
17841
+ const lastBufferedIndex = BufferHelper.bufferedInfo(videoTimes, lastCurrentTime, 0).bufferedIndex;
17842
+ // nudge when crossing into another video buffered range (hole).
17843
+ if (lastBufferedIndex > -1 && lastBufferedIndex < videoTimes.length - 1) {
17844
+ const bufferedIndex = BufferHelper.bufferedInfo(videoTimes, currentTime, 0).bufferedIndex;
17845
+ const holeStart = videoTimes[lastBufferedIndex].end;
17846
+ const holeEnd = videoTimes[lastBufferedIndex + 1].start;
17847
+ if ((bufferedIndex === -1 || bufferedIndex > lastBufferedIndex) && holeEnd - holeStart < 1 &&
17848
+ // `maxBufferHole` may be too small and setting it to 0 should not disable this feature
17849
+ currentTime - holeStart < 2) {
17850
+ const error = new Error(`nudging playhead to flush pipeline after video hole. currentTime: ${currentTime} hole: ${holeStart} -> ${holeEnd} buffered index: ${bufferedIndex}`);
17851
+ this.warn(error.message);
17852
+ // Magic number to flush the pipeline without interuption to audio playback:
17853
+ this.media.currentTime += 0.000001;
17854
+ const frag = this.fragmentTracker.getPartialFragment(currentTime) || undefined;
17855
+ const bufferInfo = BufferHelper.bufferInfo(this.media, currentTime, 0);
17856
+ this.hls.trigger(Events.ERROR, {
17857
+ type: ErrorTypes.MEDIA_ERROR,
17858
+ details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
17859
+ fatal: false,
17860
+ error,
17861
+ reason: error.message,
17862
+ frag,
17863
+ buffer: bufferInfo.len,
17864
+ bufferInfo
17865
+ });
17866
+ }
17867
+ }
17868
+ }
17863
17869
  }
17870
+ }
17871
+
17872
+ /**
17873
+ * Detects and attempts to fix known buffer stalling issues.
17874
+ * @param bufferInfo - The properties of the current buffer.
17875
+ * @param stalledDurationMs - The amount of time Hls.js has been stalling for.
17876
+ * @private
17877
+ */
17878
+ _tryFixBufferStall(bufferInfo, stalledDurationMs) {
17879
+ var _this$hls3;
17864
17880
  const {
17865
- id3Track
17881
+ fragmentTracker,
17882
+ media
17866
17883
  } = this;
17867
- const {
17868
- dateRanges
17869
- } = details;
17884
+ const config = (_this$hls3 = this.hls) == null ? undefined : _this$hls3.config;
17885
+ if (!media || !fragmentTracker || !config) {
17886
+ return;
17887
+ }
17888
+ const currentTime = media.currentTime;
17889
+ const partial = fragmentTracker.getPartialFragment(currentTime);
17890
+ if (partial) {
17891
+ // Try to skip over the buffer hole caused by a partial fragment
17892
+ // This method isn't limited by the size of the gap between buffered ranges
17893
+ const targetTime = this._trySkipBufferHole(partial);
17894
+ // we return here in this case, meaning
17895
+ // the branch below only executes when we haven't seeked to a new position
17896
+ if (targetTime || !this.media) {
17897
+ return;
17898
+ }
17899
+ }
17900
+
17901
+ // if we haven't had to skip over a buffer hole of a partial fragment
17902
+ // we may just have to "nudge" the playlist as the browser decoding/rendering engine
17903
+ // needs to cross some sort of threshold covering all source-buffers content
17904
+ // to start playing properly.
17905
+ const bufferedRanges = bufferInfo.buffered;
17906
+ if ((bufferedRanges && bufferedRanges.length > 1 && bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && (stalledDurationMs > config.highBufferWatchdogPeriod * 1000 || this.waiting)) {
17907
+ this.warn('Trying to nudge playhead over buffer-hole');
17908
+ // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
17909
+ // We only try to jump the hole if it's under the configured size
17910
+ this._tryNudgeBuffer(bufferInfo);
17911
+ }
17912
+ }
17913
+
17914
+ /**
17915
+ * Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
17916
+ * @param bufferLen - The playhead distance from the end of the current buffer segment.
17917
+ * @private
17918
+ */
17919
+ _reportStall(bufferInfo) {
17920
+ const {
17921
+ hls,
17922
+ media,
17923
+ stallReported,
17924
+ stalled
17925
+ } = this;
17926
+ if (!stallReported && stalled !== null && media && hls) {
17927
+ // Report stalled error once
17928
+ this.stallReported = true;
17929
+ const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
17930
+ this.warn(error.message);
17931
+ hls.trigger(Events.ERROR, {
17932
+ type: ErrorTypes.MEDIA_ERROR,
17933
+ details: ErrorDetails.BUFFER_STALLED_ERROR,
17934
+ fatal: false,
17935
+ error,
17936
+ buffer: bufferInfo.len,
17937
+ bufferInfo,
17938
+ stalled: {
17939
+ start: stalled
17940
+ }
17941
+ });
17942
+ }
17943
+ }
17944
+
17945
+ /**
17946
+ * Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
17947
+ * @param partial - The partial fragment found at the current time (where playback is stalling).
17948
+ * @private
17949
+ */
17950
+ _trySkipBufferHole(partial) {
17951
+ var _this$hls4;
17952
+ const {
17953
+ fragmentTracker,
17954
+ media
17955
+ } = this;
17956
+ const config = (_this$hls4 = this.hls) == null ? undefined : _this$hls4.config;
17957
+ if (!media || !fragmentTracker || !config) {
17958
+ return 0;
17959
+ }
17960
+
17961
+ // Check if currentTime is between unbuffered regions of partial fragments
17962
+ const currentTime = media.currentTime;
17963
+ const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
17964
+ const startTime = currentTime < bufferInfo.start ? bufferInfo.start : bufferInfo.nextStart;
17965
+ if (startTime && this.hls) {
17966
+ const bufferStarved = bufferInfo.len <= config.maxBufferHole;
17967
+ const waiting = bufferInfo.len > 0 && bufferInfo.len < 1 && media.readyState < 3;
17968
+ const gapLength = startTime - currentTime;
17969
+ if (gapLength > 0 && (bufferStarved || waiting)) {
17970
+ // Only allow large gaps to be skipped if it is a start gap, or all fragments in skip range are partial
17971
+ if (gapLength > config.maxBufferHole) {
17972
+ let startGap = false;
17973
+ if (currentTime === 0) {
17974
+ const startFrag = fragmentTracker.getAppendedFrag(0, PlaylistLevelType.MAIN);
17975
+ if (startFrag && startTime < startFrag.end) {
17976
+ startGap = true;
17977
+ }
17978
+ }
17979
+ if (!startGap) {
17980
+ const startProvisioned = partial || fragmentTracker.getAppendedFrag(currentTime, PlaylistLevelType.MAIN);
17981
+ if (startProvisioned) {
17982
+ var _this$hls$loadLevelOb;
17983
+ // Do not seek when selected variant playlist is unloaded
17984
+ if (!((_this$hls$loadLevelOb = this.hls.loadLevelObj) != null && _this$hls$loadLevelOb.details)) {
17985
+ return 0;
17986
+ }
17987
+ // Do not seek when required fragments are inflight or appending
17988
+ const inFlightDependency = getInFlightDependency(this.hls.inFlightFragments, startTime);
17989
+ if (inFlightDependency) {
17990
+ return 0;
17991
+ }
17992
+ // Do not seek if we can't walk tracked fragments to end of gap
17993
+ let moreToLoad = false;
17994
+ let pos = startProvisioned.end;
17995
+ while (pos < startTime) {
17996
+ const provisioned = fragmentTracker.getPartialFragment(pos);
17997
+ if (provisioned) {
17998
+ pos += provisioned.duration;
17999
+ } else {
18000
+ moreToLoad = true;
18001
+ break;
18002
+ }
18003
+ }
18004
+ if (moreToLoad) {
18005
+ return 0;
18006
+ }
18007
+ }
18008
+ }
18009
+ }
18010
+ const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
18011
+ this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
18012
+ this.moved = true;
18013
+ media.currentTime = targetTime;
18014
+ if (!(partial != null && partial.gap)) {
18015
+ const error = new Error(`fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`);
18016
+ this.hls.trigger(Events.ERROR, {
18017
+ type: ErrorTypes.MEDIA_ERROR,
18018
+ details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
18019
+ fatal: false,
18020
+ error,
18021
+ reason: error.message,
18022
+ frag: partial || undefined,
18023
+ buffer: bufferInfo.len,
18024
+ bufferInfo
18025
+ });
18026
+ }
18027
+ return targetTime;
18028
+ }
18029
+ }
18030
+ return 0;
18031
+ }
18032
+
18033
+ /**
18034
+ * Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
18035
+ * @private
18036
+ */
18037
+ _tryNudgeBuffer(bufferInfo) {
18038
+ const {
18039
+ hls,
18040
+ media,
18041
+ nudgeRetry
18042
+ } = this;
18043
+ const config = hls == null ? undefined : hls.config;
18044
+ if (!media || !config) {
18045
+ return 0;
18046
+ }
18047
+ const currentTime = media.currentTime;
18048
+ this.nudgeRetry++;
18049
+ if (nudgeRetry < config.nudgeMaxRetry) {
18050
+ const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
18051
+ // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
18052
+ const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
18053
+ this.warn(error.message);
18054
+ media.currentTime = targetTime;
18055
+ hls.trigger(Events.ERROR, {
18056
+ type: ErrorTypes.MEDIA_ERROR,
18057
+ details: ErrorDetails.BUFFER_NUDGE_ON_STALL,
18058
+ error,
18059
+ fatal: false,
18060
+ buffer: bufferInfo.len,
18061
+ bufferInfo
18062
+ });
18063
+ } else {
18064
+ const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
18065
+ this.error(error.message);
18066
+ hls.trigger(Events.ERROR, {
18067
+ type: ErrorTypes.MEDIA_ERROR,
18068
+ details: ErrorDetails.BUFFER_STALLED_ERROR,
18069
+ error,
18070
+ fatal: true,
18071
+ buffer: bufferInfo.len,
18072
+ bufferInfo
18073
+ });
18074
+ }
18075
+ }
18076
+ }
18077
+ function getInFlightDependency(inFlightFragments, currentTime) {
18078
+ const main = inFlight(inFlightFragments.main);
18079
+ if (main && main.start <= currentTime) {
18080
+ return main;
18081
+ }
18082
+ const audio = inFlight(inFlightFragments.audio);
18083
+ if (audio && audio.start <= currentTime) {
18084
+ return audio;
18085
+ }
18086
+ return null;
18087
+ }
18088
+ function inFlight(inFlightData) {
18089
+ if (!inFlightData) {
18090
+ return null;
18091
+ }
18092
+ switch (inFlightData.state) {
18093
+ case State.IDLE:
18094
+ case State.STOPPED:
18095
+ case State.ENDED:
18096
+ case State.ERROR:
18097
+ return null;
18098
+ }
18099
+ return inFlightData.frag;
18100
+ }
18101
+
18102
+ function sendAddTrackEvent(track, videoEl) {
18103
+ let event;
18104
+ try {
18105
+ event = new Event('addtrack');
18106
+ } catch (err) {
18107
+ // for IE11
18108
+ event = document.createEvent('Event');
18109
+ event.initEvent('addtrack', false, false);
18110
+ }
18111
+ event.track = track;
18112
+ videoEl.dispatchEvent(event);
18113
+ }
18114
+ function clearCurrentCues(track, enterHandler) {
18115
+ // When track.mode is disabled, track.cues will be null.
18116
+ // To guarantee the removal of cues, we need to temporarily
18117
+ // change the mode to hidden
18118
+ const mode = track.mode;
18119
+ if (mode === 'disabled') {
18120
+ track.mode = 'hidden';
18121
+ }
18122
+ if (track.cues) {
18123
+ for (let i = track.cues.length; i--;) {
18124
+ if (enterHandler) {
18125
+ track.cues[i].removeEventListener('enter', enterHandler);
18126
+ }
18127
+ track.removeCue(track.cues[i]);
18128
+ }
18129
+ }
18130
+ if (mode === 'disabled') {
18131
+ track.mode = mode;
18132
+ }
18133
+ }
18134
+ function removeCuesInRange(track, start, end, predicate) {
18135
+ const mode = track.mode;
18136
+ if (mode === 'disabled') {
18137
+ track.mode = 'hidden';
18138
+ }
18139
+ if (track.cues && track.cues.length > 0) {
18140
+ const cues = getCuesInRange(track.cues, start, end);
18141
+ for (let i = 0; i < cues.length; i++) {
18142
+ if (!predicate || predicate(cues[i])) {
18143
+ track.removeCue(cues[i]);
18144
+ }
18145
+ }
18146
+ }
18147
+ if (mode === 'disabled') {
18148
+ track.mode = mode;
18149
+ }
18150
+ }
18151
+
18152
+ // Find first cue starting after given time.
18153
+ // Modified version of binary search O(log(n)).
18154
+ function getFirstCueIndexAfterTime(cues, time) {
18155
+ // If first cue starts after time, start there
18156
+ if (time < cues[0].startTime) {
18157
+ return 0;
18158
+ }
18159
+ // If the last cue ends before time there is no overlap
18160
+ const len = cues.length - 1;
18161
+ if (time > cues[len].endTime) {
18162
+ return -1;
18163
+ }
18164
+ let left = 0;
18165
+ let right = len;
18166
+ while (left <= right) {
18167
+ const mid = Math.floor((right + left) / 2);
18168
+ if (time < cues[mid].startTime) {
18169
+ right = mid - 1;
18170
+ } else if (time > cues[mid].startTime && left < len) {
18171
+ left = mid + 1;
18172
+ } else {
18173
+ // If it's not lower or higher, it must be equal.
18174
+ return mid;
18175
+ }
18176
+ }
18177
+ // At this point, left and right have swapped.
18178
+ // No direct match was found, left or right element must be the closest. Check which one has the smallest diff.
18179
+ return cues[left].startTime - time < time - cues[right].startTime ? left : right;
18180
+ }
18181
+ function getCuesInRange(cues, start, end) {
18182
+ const cuesFound = [];
18183
+ const firstCueInRange = getFirstCueIndexAfterTime(cues, start);
18184
+ if (firstCueInRange > -1) {
18185
+ for (let i = firstCueInRange, len = cues.length; i < len; i++) {
18186
+ const cue = cues[i];
18187
+ if (cue.startTime >= start && cue.endTime <= end) {
18188
+ cuesFound.push(cue);
18189
+ } else if (cue.startTime > end) {
18190
+ return cuesFound;
18191
+ }
18192
+ }
18193
+ }
18194
+ return cuesFound;
18195
+ }
18196
+
18197
+ const MIN_CUE_DURATION = 0.25;
18198
+ function getCueClass() {
18199
+ if (typeof self === 'undefined') return undefined;
18200
+ return self.VTTCue || self.TextTrackCue;
18201
+ }
18202
+ function createCueWithDataFields(Cue, startTime, endTime, data, type) {
18203
+ let cue = new Cue(startTime, endTime, '');
18204
+ try {
18205
+ cue.value = data;
18206
+ if (type) {
18207
+ cue.type = type;
18208
+ }
18209
+ } catch (e) {
18210
+ cue = new Cue(startTime, endTime, JSON.stringify(type ? _objectSpread2({
18211
+ type
18212
+ }, data) : data));
18213
+ }
18214
+ return cue;
18215
+ }
18216
+
18217
+ // VTTCue latest draft allows an infinite duration, fallback
18218
+ // to MAX_VALUE if necessary
18219
+ const MAX_CUE_ENDTIME = (() => {
18220
+ const Cue = getCueClass();
18221
+ try {
18222
+ Cue && new Cue(0, Number.POSITIVE_INFINITY, '');
18223
+ } catch (e) {
18224
+ return Number.MAX_VALUE;
18225
+ }
18226
+ return Number.POSITIVE_INFINITY;
18227
+ })();
18228
+ function hexToArrayBuffer(str) {
18229
+ return Uint8Array.from(str.replace(/^0x/, '').replace(/([\da-fA-F]{2}) ?/g, '0x$1 ').replace(/ +$/, '').split(' ')).buffer;
18230
+ }
18231
+ class ID3TrackController {
18232
+ constructor(hls) {
18233
+ this.hls = undefined;
18234
+ this.id3Track = null;
18235
+ this.media = null;
18236
+ this.dateRangeCuesAppended = {};
18237
+ this.removeCues = true;
18238
+ this.onEventCueEnter = () => {
18239
+ if (!this.hls) {
18240
+ return;
18241
+ }
18242
+ this.hls.trigger(Events.EVENT_CUE_ENTER, {});
18243
+ };
18244
+ this.hls = hls;
18245
+ this._registerListeners();
18246
+ }
18247
+ destroy() {
18248
+ this._unregisterListeners();
18249
+ this.id3Track = null;
18250
+ this.media = null;
18251
+ this.dateRangeCuesAppended = {};
18252
+ // @ts-ignore
18253
+ this.hls = this.onEventCueEnter = null;
18254
+ }
18255
+ _registerListeners() {
18256
+ const {
18257
+ hls
18258
+ } = this;
18259
+ hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
18260
+ hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18261
+ hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18262
+ hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18263
+ hls.on(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
18264
+ hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
18265
+ hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
18266
+ hls.on(Events.LEVEL_PTS_UPDATED, this.onLevelPtsUpdated, this);
18267
+ }
18268
+ _unregisterListeners() {
18269
+ const {
18270
+ hls
18271
+ } = this;
18272
+ hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
18273
+ hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
18274
+ hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
18275
+ hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
18276
+ hls.off(Events.FRAG_PARSING_METADATA, this.onFragParsingMetadata, this);
18277
+ hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
18278
+ hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
18279
+ hls.off(Events.LEVEL_PTS_UPDATED, this.onLevelPtsUpdated, this);
18280
+ }
18281
+ // Add ID3 metatadata text track.
18282
+ onMediaAttaching(event, data) {
18283
+ var _data$overrides;
18284
+ this.media = data.media;
18285
+ if (((_data$overrides = data.overrides) == null ? undefined : _data$overrides.cueRemoval) === false) {
18286
+ this.removeCues = false;
18287
+ }
18288
+ }
18289
+ onMediaAttached() {
18290
+ const details = this.hls.latestLevelDetails;
18291
+ if (details) {
18292
+ this.updateDateRangeCues(details);
18293
+ }
18294
+ }
18295
+ onMediaDetaching(event, data) {
18296
+ this.media = null;
18297
+ const transferringMedia = !!data.transferMedia;
18298
+ if (transferringMedia) {
18299
+ return;
18300
+ }
18301
+ if (this.id3Track) {
18302
+ if (this.removeCues) {
18303
+ clearCurrentCues(this.id3Track, this.onEventCueEnter);
18304
+ }
18305
+ this.id3Track = null;
18306
+ }
18307
+ this.dateRangeCuesAppended = {};
18308
+ }
18309
+ onManifestLoading() {
18310
+ this.dateRangeCuesAppended = {};
18311
+ }
18312
+ createTrack(media) {
18313
+ const track = this.getID3Track(media.textTracks);
18314
+ track.mode = 'hidden';
18315
+ return track;
18316
+ }
18317
+ getID3Track(textTracks) {
18318
+ if (!this.media) {
18319
+ return;
18320
+ }
18321
+ for (let i = 0; i < textTracks.length; i++) {
18322
+ const textTrack = textTracks[i];
18323
+ if (textTrack.kind === 'metadata' && textTrack.label === 'id3') {
18324
+ // send 'addtrack' when reusing the textTrack for metadata,
18325
+ // same as what we do for captions
18326
+ sendAddTrackEvent(textTrack, this.media);
18327
+ return textTrack;
18328
+ }
18329
+ }
18330
+ return this.media.addTextTrack('metadata', 'id3');
18331
+ }
18332
+ onFragParsingMetadata(event, data) {
18333
+ if (!this.media) {
18334
+ return;
18335
+ }
18336
+ const {
18337
+ hls: {
18338
+ config: {
18339
+ enableEmsgMetadataCues,
18340
+ enableID3MetadataCues
18341
+ }
18342
+ }
18343
+ } = this;
18344
+ if (!enableEmsgMetadataCues && !enableID3MetadataCues) {
18345
+ return;
18346
+ }
18347
+ const {
18348
+ samples
18349
+ } = data;
18350
+
18351
+ // create track dynamically
18352
+ if (!this.id3Track) {
18353
+ this.id3Track = this.createTrack(this.media);
18354
+ }
18355
+ const Cue = getCueClass();
18356
+ if (!Cue) {
18357
+ return;
18358
+ }
18359
+ for (let i = 0; i < samples.length; i++) {
18360
+ const type = samples[i].type;
18361
+ if (type === MetadataSchema.emsg && !enableEmsgMetadataCues || !enableID3MetadataCues) {
18362
+ continue;
18363
+ }
18364
+ const frames = getId3Frames(samples[i].data);
18365
+ if (frames) {
18366
+ const startTime = samples[i].pts;
18367
+ let endTime = startTime + samples[i].duration;
18368
+ if (endTime > MAX_CUE_ENDTIME) {
18369
+ endTime = MAX_CUE_ENDTIME;
18370
+ }
18371
+ const timeDiff = endTime - startTime;
18372
+ if (timeDiff <= 0) {
18373
+ endTime = startTime + MIN_CUE_DURATION;
18374
+ }
18375
+ for (let j = 0; j < frames.length; j++) {
18376
+ const frame = frames[j];
18377
+ // Safari doesn't put the timestamp frame in the TextTrack
18378
+ if (!isId3TimestampFrame(frame)) {
18379
+ // add a bounds to any unbounded cues
18380
+ this.updateId3CueEnds(startTime, type);
18381
+ const cue = createCueWithDataFields(Cue, startTime, endTime, frame, type);
18382
+ if (cue) {
18383
+ this.id3Track.addCue(cue);
18384
+ }
18385
+ }
18386
+ }
18387
+ }
18388
+ }
18389
+ }
18390
+ updateId3CueEnds(startTime, type) {
18391
+ var _this$id3Track;
18392
+ const cues = (_this$id3Track = this.id3Track) == null ? undefined : _this$id3Track.cues;
18393
+ if (cues) {
18394
+ for (let i = cues.length; i--;) {
18395
+ const cue = cues[i];
18396
+ if (cue.type === type && cue.startTime < startTime && cue.endTime === MAX_CUE_ENDTIME) {
18397
+ cue.endTime = startTime;
18398
+ }
18399
+ }
18400
+ }
18401
+ }
18402
+ onBufferFlushing(event, {
18403
+ startOffset,
18404
+ endOffset,
18405
+ type
18406
+ }) {
18407
+ const {
18408
+ id3Track,
18409
+ hls
18410
+ } = this;
18411
+ if (!hls) {
18412
+ return;
18413
+ }
18414
+ const {
18415
+ config: {
18416
+ enableEmsgMetadataCues,
18417
+ enableID3MetadataCues
18418
+ }
18419
+ } = hls;
18420
+ if (id3Track && (enableEmsgMetadataCues || enableID3MetadataCues)) {
18421
+ let predicate;
18422
+ if (type === 'audio') {
18423
+ predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues;
18424
+ } else if (type === 'video') {
18425
+ predicate = cue => cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
18426
+ } else {
18427
+ predicate = cue => cue.type === MetadataSchema.audioId3 && enableID3MetadataCues || cue.type === MetadataSchema.emsg && enableEmsgMetadataCues;
18428
+ }
18429
+ removeCuesInRange(id3Track, startOffset, endOffset, predicate);
18430
+ }
18431
+ }
18432
+ onLevelUpdated(event, {
18433
+ details
18434
+ }) {
18435
+ this.updateDateRangeCues(details, true);
18436
+ }
18437
+ onLevelPtsUpdated(event, data) {
18438
+ if (Math.abs(data.drift) > 0.01) {
18439
+ this.updateDateRangeCues(data.details);
18440
+ }
18441
+ }
18442
+ updateDateRangeCues(details, removeOldCues) {
18443
+ if (!this.media || !details.hasProgramDateTime || !this.hls.config.enableDateRangeMetadataCues) {
18444
+ return;
18445
+ }
18446
+ const {
18447
+ id3Track
18448
+ } = this;
18449
+ const {
18450
+ dateRanges
18451
+ } = details;
17870
18452
  const ids = Object.keys(dateRanges);
17871
18453
  let dateRangeCuesAppended = this.dateRangeCuesAppended;
17872
18454
  // Remove cues from track not found in details.dateRanges
@@ -18473,6 +19055,9 @@ class LevelController extends BasePlaylistController {
18473
19055
  }
18474
19056
  return this._levels;
18475
19057
  }
19058
+ get loadLevelObj() {
19059
+ return this.currentLevel;
19060
+ }
18476
19061
  get level() {
18477
19062
  return this.currentLevelIndex;
18478
19063
  }
@@ -18545,599 +19130,223 @@ class LevelController extends BasePlaylistController {
18545
19130
  this.loadPlaylist(hlsUrlParameters);
18546
19131
  }
18547
19132
  }
18548
- get manualLevel() {
18549
- return this.manualLevelIndex;
18550
- }
18551
- set manualLevel(newLevel) {
18552
- this.manualLevelIndex = newLevel;
18553
- if (this._startLevel === undefined) {
18554
- this._startLevel = newLevel;
18555
- }
18556
- if (newLevel !== -1) {
18557
- this.level = newLevel;
18558
- }
18559
- }
18560
- get firstLevel() {
18561
- return this._firstLevel;
18562
- }
18563
- set firstLevel(newLevel) {
18564
- this._firstLevel = newLevel;
18565
- }
18566
- get startLevel() {
18567
- // Setting hls.startLevel (this._startLevel) overrides config.startLevel
18568
- if (this._startLevel === undefined) {
18569
- const configStartLevel = this.hls.config.startLevel;
18570
- if (configStartLevel !== undefined) {
18571
- return configStartLevel;
18572
- }
18573
- return this.hls.firstAutoLevel;
18574
- }
18575
- return this._startLevel;
18576
- }
18577
- set startLevel(newLevel) {
18578
- this._startLevel = newLevel;
18579
- }
18580
- get pathwayPriority() {
18581
- if (this.steering) {
18582
- return this.steering.pathwayPriority;
18583
- }
18584
- return null;
18585
- }
18586
- set pathwayPriority(pathwayPriority) {
18587
- if (this.steering) {
18588
- const pathwaysList = this.steering.pathways();
18589
- const filteredPathwayPriority = pathwayPriority.filter(pathwayId => {
18590
- return pathwaysList.indexOf(pathwayId) !== -1;
18591
- });
18592
- if (pathwayPriority.length < 1) {
18593
- this.warn(`pathwayPriority ${pathwayPriority} should contain at least one pathway from list: ${pathwaysList}`);
18594
- return;
18595
- }
18596
- this.steering.pathwayPriority = filteredPathwayPriority;
18597
- }
18598
- }
18599
- onError(event, data) {
18600
- if (data.fatal || !data.context) {
18601
- return;
18602
- }
18603
- if (data.context.type === PlaylistContextType.LEVEL && data.context.level === this.level) {
18604
- this.checkRetry(data);
18605
- }
18606
- }
18607
-
18608
- // reset errors on the successful load of a fragment
18609
- onFragBuffered(event, {
18610
- frag
18611
- }) {
18612
- if (frag !== undefined && frag.type === PlaylistLevelType.MAIN) {
18613
- const el = frag.elementaryStreams;
18614
- if (!Object.keys(el).some(type => !!el[type])) {
18615
- return;
18616
- }
18617
- const level = this._levels[frag.level];
18618
- if (level != null && level.loadError) {
18619
- this.log(`Resetting level error count of ${level.loadError} on frag buffered`);
18620
- level.loadError = 0;
18621
- }
18622
- }
18623
- }
18624
- onLevelLoaded(event, data) {
18625
- var _data$deliveryDirecti2;
18626
- const {
18627
- level,
18628
- details
18629
- } = data;
18630
- const curLevel = data.levelInfo;
18631
- if (!curLevel) {
18632
- var _data$deliveryDirecti;
18633
- this.warn(`Invalid level index ${level}`);
18634
- if ((_data$deliveryDirecti = data.deliveryDirectives) != null && _data$deliveryDirecti.skip) {
18635
- details.deltaUpdateFailed = true;
18636
- }
18637
- return;
18638
- }
18639
-
18640
- // only process level loaded events matching with expected level or prior to switch when media playlist is loaded directly
18641
- if (curLevel === this.currentLevel || data.withoutMultiVariant) {
18642
- // reset level load error counter on successful level loaded only if there is no issues with fragments
18643
- if (curLevel.fragmentError === 0) {
18644
- curLevel.loadError = 0;
18645
- }
18646
- // Ignore matching details populated by loading a Media Playlist directly
18647
- let previousDetails = curLevel.details;
18648
- if (previousDetails === data.details && previousDetails.advanced) {
18649
- previousDetails = undefined;
18650
- }
18651
- this.playlistLoaded(level, data, previousDetails);
18652
- } else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
18653
- // received a delta playlist update that cannot be merged
18654
- details.deltaUpdateFailed = true;
18655
- }
18656
- }
18657
- loadPlaylist(hlsUrlParameters) {
18658
- super.loadPlaylist();
18659
- if (this.shouldLoadPlaylist(this.currentLevel)) {
18660
- this.scheduleLoading(this.currentLevel, hlsUrlParameters);
18661
- }
18662
- }
18663
- loadingPlaylist(currentLevel, hlsUrlParameters) {
18664
- super.loadingPlaylist(currentLevel, hlsUrlParameters);
18665
- const url = this.getUrlWithDirectives(currentLevel.uri, hlsUrlParameters);
18666
- const currentLevelIndex = this.currentLevelIndex;
18667
- const pathwayId = currentLevel.attrs['PATHWAY-ID'];
18668
- const details = currentLevel.details;
18669
- const age = details == null ? undefined : details.age;
18670
- this.log(`Loading level index ${currentLevelIndex}${(hlsUrlParameters == null ? undefined : hlsUrlParameters.msn) !== undefined ? ' at sn ' + hlsUrlParameters.msn + ' part ' + hlsUrlParameters.part : ''}${pathwayId ? ' Pathway ' + pathwayId : ''}${age && details.live ? ' age ' + age.toFixed(1) + (details.type ? ' ' + details.type || '' : '') : ''} ${url}`);
18671
- this.hls.trigger(Events.LEVEL_LOADING, {
18672
- url,
18673
- level: currentLevelIndex,
18674
- levelInfo: currentLevel,
18675
- pathwayId: currentLevel.attrs['PATHWAY-ID'],
18676
- id: 0,
18677
- // Deprecated Level urlId
18678
- deliveryDirectives: hlsUrlParameters || null
18679
- });
18680
- }
18681
- get nextLoadLevel() {
18682
- if (this.manualLevelIndex !== -1) {
18683
- return this.manualLevelIndex;
18684
- } else {
18685
- return this.hls.nextAutoLevel;
18686
- }
18687
- }
18688
- set nextLoadLevel(nextLevel) {
18689
- this.level = nextLevel;
18690
- if (this.manualLevelIndex === -1) {
18691
- this.hls.nextAutoLevel = nextLevel;
18692
- }
18693
- }
18694
- removeLevel(levelIndex) {
18695
- var _this$currentLevel;
18696
- if (this._levels.length === 1) {
18697
- return;
18698
- }
18699
- const levels = this._levels.filter((level, index) => {
18700
- if (index !== levelIndex) {
18701
- return true;
18702
- }
18703
- if (this.steering) {
18704
- this.steering.removeLevel(level);
18705
- }
18706
- if (level === this.currentLevel) {
18707
- this.currentLevel = null;
18708
- this.currentLevelIndex = -1;
18709
- if (level.details) {
18710
- level.details.fragments.forEach(f => f.level = -1);
18711
- }
18712
- }
18713
- return false;
18714
- });
18715
- reassignFragmentLevelIndexes(levels);
18716
- this._levels = levels;
18717
- if (this.currentLevelIndex > -1 && (_this$currentLevel = this.currentLevel) != null && _this$currentLevel.details) {
18718
- this.currentLevelIndex = this.currentLevel.details.fragments[0].level;
18719
- }
18720
- if (this.manualLevelIndex > -1) {
18721
- this.manualLevelIndex = this.currentLevelIndex;
18722
- }
18723
- const maxLevel = levels.length - 1;
18724
- this._firstLevel = Math.min(this._firstLevel, maxLevel);
18725
- if (this._startLevel) {
18726
- this._startLevel = Math.min(this._startLevel, maxLevel);
18727
- }
18728
- this.hls.trigger(Events.LEVELS_UPDATED, {
18729
- levels
18730
- });
18731
- }
18732
- onLevelsUpdated(event, {
18733
- levels
18734
- }) {
18735
- this._levels = levels;
18736
- }
18737
- checkMaxAutoUpdated() {
18738
- const {
18739
- autoLevelCapping,
18740
- maxAutoLevel,
18741
- maxHdcpLevel
18742
- } = this.hls;
18743
- if (this._maxAutoLevel !== maxAutoLevel) {
18744
- this._maxAutoLevel = maxAutoLevel;
18745
- this.hls.trigger(Events.MAX_AUTO_LEVEL_UPDATED, {
18746
- autoLevelCapping,
18747
- levels: this.levels,
18748
- maxAutoLevel,
18749
- minAutoLevel: this.hls.minAutoLevel,
18750
- maxHdcpLevel
18751
- });
18752
- }
18753
- }
18754
- }
18755
- function assignTrackIdsByGroup(tracks) {
18756
- const groups = {};
18757
- tracks.forEach(track => {
18758
- const groupId = track.groupId || '';
18759
- track.id = groups[groupId] = groups[groupId] || 0;
18760
- groups[groupId]++;
18761
- });
18762
- }
18763
-
18764
- const MAX_START_GAP_JUMP = 2.0;
18765
- const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
18766
- const SKIP_BUFFER_RANGE_START = 0.05;
18767
- class GapController extends Logger {
18768
- constructor(media, fragmentTracker, hls) {
18769
- super('gap-controller', hls.logger);
18770
- this.media = null;
18771
- this.fragmentTracker = null;
18772
- this.hls = null;
18773
- this.nudgeRetry = 0;
18774
- this.stallReported = false;
18775
- this.stalled = null;
18776
- this.moved = false;
18777
- this.seeking = false;
18778
- this.ended = 0;
18779
- this.waiting = 0;
18780
- this.media = media;
18781
- this.fragmentTracker = fragmentTracker;
18782
- this.hls = hls;
18783
- }
18784
- destroy() {
18785
- this.media = this.hls = this.fragmentTracker = null;
19133
+ get manualLevel() {
19134
+ return this.manualLevelIndex;
18786
19135
  }
18787
-
18788
- /**
18789
- * Checks if the playhead is stuck within a gap, and if so, attempts to free it.
18790
- * A gap is an unbuffered range between two buffered ranges (or the start and the first buffered range).
18791
- *
18792
- * @param lastCurrentTime - Previously read playhead position
18793
- */
18794
- poll(lastCurrentTime, activeFrag, levelDetails, state) {
18795
- var _this$hls;
18796
- const {
18797
- media,
18798
- stalled
18799
- } = this;
18800
- if (!media) {
18801
- return;
19136
+ set manualLevel(newLevel) {
19137
+ this.manualLevelIndex = newLevel;
19138
+ if (this._startLevel === undefined) {
19139
+ this._startLevel = newLevel;
18802
19140
  }
18803
- const {
18804
- currentTime,
18805
- seeking
18806
- } = media;
18807
- const seeked = this.seeking && !seeking;
18808
- const beginSeek = !this.seeking && seeking;
18809
- this.seeking = seeking;
18810
-
18811
- // The playhead is moving, no-op
18812
- if (currentTime !== lastCurrentTime) {
18813
- if (lastCurrentTime) {
18814
- this.ended = 0;
18815
- }
18816
- this.moved = true;
18817
- if (!seeking) {
18818
- this.nudgeRetry = 0;
18819
- }
18820
- if (this.waiting === 0) {
18821
- this.stallResolved(currentTime);
18822
- }
18823
- return;
19141
+ if (newLevel !== -1) {
19142
+ this.level = newLevel;
18824
19143
  }
18825
-
18826
- // Clear stalled state when beginning or finishing seeking so that we don't report stalls coming out of a seek
18827
- if (beginSeek || seeked) {
18828
- if (seeked) {
18829
- this.stallResolved(currentTime);
19144
+ }
19145
+ get firstLevel() {
19146
+ return this._firstLevel;
19147
+ }
19148
+ set firstLevel(newLevel) {
19149
+ this._firstLevel = newLevel;
19150
+ }
19151
+ get startLevel() {
19152
+ // Setting hls.startLevel (this._startLevel) overrides config.startLevel
19153
+ if (this._startLevel === undefined) {
19154
+ const configStartLevel = this.hls.config.startLevel;
19155
+ if (configStartLevel !== undefined) {
19156
+ return configStartLevel;
18830
19157
  }
18831
- return;
19158
+ return this.hls.firstAutoLevel;
18832
19159
  }
18833
-
18834
- // The playhead should not be moving
18835
- if (media.paused && !seeking || media.ended || media.playbackRate === 0) {
18836
- this.nudgeRetry = 0;
18837
- this.stallResolved(currentTime);
18838
- // Fire MEDIA_ENDED to workaround event not being dispatched by browser
18839
- if (!this.ended && media.ended && this.hls) {
18840
- this.ended = currentTime || 1;
18841
- this.hls.trigger(Events.MEDIA_ENDED, {
18842
- stalled: false
18843
- });
19160
+ return this._startLevel;
19161
+ }
19162
+ set startLevel(newLevel) {
19163
+ this._startLevel = newLevel;
19164
+ }
19165
+ get pathwayPriority() {
19166
+ if (this.steering) {
19167
+ return this.steering.pathwayPriority;
19168
+ }
19169
+ return null;
19170
+ }
19171
+ set pathwayPriority(pathwayPriority) {
19172
+ if (this.steering) {
19173
+ const pathwaysList = this.steering.pathways();
19174
+ const filteredPathwayPriority = pathwayPriority.filter(pathwayId => {
19175
+ return pathwaysList.indexOf(pathwayId) !== -1;
19176
+ });
19177
+ if (pathwayPriority.length < 1) {
19178
+ this.warn(`pathwayPriority ${pathwayPriority} should contain at least one pathway from list: ${pathwaysList}`);
19179
+ return;
18844
19180
  }
18845
- return;
19181
+ this.steering.pathwayPriority = filteredPathwayPriority;
18846
19182
  }
18847
- if (!BufferHelper.getBuffered(media).length) {
18848
- this.nudgeRetry = 0;
19183
+ }
19184
+ onError(event, data) {
19185
+ if (data.fatal || !data.context) {
18849
19186
  return;
18850
19187
  }
18851
- const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
18852
- const nextStart = bufferInfo.nextStart || 0;
18853
- const fragmentTracker = this.fragmentTracker;
18854
- if (seeking && fragmentTracker) {
18855
- // Waiting for seeking in a buffered range to complete
18856
- const hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP;
18857
- // Next buffered range is too far ahead to jump to while still seeking
18858
- const noBufferGap = !nextStart || activeFrag && activeFrag.start <= currentTime || nextStart - currentTime > MAX_START_GAP_JUMP && !fragmentTracker.getPartialFragment(currentTime);
18859
- if (hasEnoughBuffer || noBufferGap) {
18860
- return;
18861
- }
18862
- // Reset moved state when seeking to a point in or before a gap
18863
- this.moved = false;
19188
+ if (data.context.type === PlaylistContextType.LEVEL && data.context.level === this.level) {
19189
+ this.checkRetry(data);
18864
19190
  }
19191
+ }
18865
19192
 
18866
- // Skip start gaps if we haven't played, but the last poll detected the start of a stall
18867
- // The addition poll gives the browser a chance to jump the gap for us
18868
- if (!this.moved && this.stalled !== null && fragmentTracker) {
18869
- // There is no playable buffer (seeked, waiting for buffer)
18870
- const isBuffered = bufferInfo.len > 0;
18871
- if (!isBuffered && !nextStart) {
19193
+ // reset errors on the successful load of a fragment
19194
+ onFragBuffered(event, {
19195
+ frag
19196
+ }) {
19197
+ if (frag !== undefined && frag.type === PlaylistLevelType.MAIN) {
19198
+ const el = frag.elementaryStreams;
19199
+ if (!Object.keys(el).some(type => !!el[type])) {
18872
19200
  return;
18873
19201
  }
18874
- // Jump start gaps within jump threshold
18875
- const startJump = Math.max(nextStart, bufferInfo.start || 0) - currentTime;
18876
-
18877
- // When joining a live stream with audio tracks, account for live playlist window sliding by allowing
18878
- // a larger jump over start gaps caused by the audio-stream-controller buffering a start fragment
18879
- // that begins over 1 target duration after the video start position.
18880
- const isLive = !!(levelDetails != null && levelDetails.live);
18881
- const maxStartGapJump = isLive ? levelDetails.targetduration * 2 : MAX_START_GAP_JUMP;
18882
- const partialOrGap = fragmentTracker.getPartialFragment(currentTime);
18883
- if (startJump > 0 && (startJump <= maxStartGapJump || partialOrGap)) {
18884
- if (!media.paused) {
18885
- this._trySkipBufferHole(partialOrGap);
18886
- }
18887
- return;
19202
+ const level = this._levels[frag.level];
19203
+ if (level != null && level.loadError) {
19204
+ this.log(`Resetting level error count of ${level.loadError} on frag buffered`);
19205
+ level.loadError = 0;
18888
19206
  }
18889
19207
  }
18890
-
18891
- // Start tracking stall time
18892
- const config = (_this$hls = this.hls) == null ? undefined : _this$hls.config;
18893
- if (!config) {
18894
- return;
18895
- }
18896
- const detectStallWithCurrentTimeMs = config.detectStallWithCurrentTimeMs;
18897
- const tnow = self.performance.now();
18898
- const tWaiting = this.waiting;
18899
- if (stalled === null) {
18900
- // Use time of recent "waiting" event
18901
- if (tWaiting > 0 && tnow - tWaiting < detectStallWithCurrentTimeMs) {
18902
- this.stalled = tWaiting;
18903
- } else {
18904
- this.stalled = tnow;
19208
+ }
19209
+ onLevelLoaded(event, data) {
19210
+ var _data$deliveryDirecti2;
19211
+ const {
19212
+ level,
19213
+ details
19214
+ } = data;
19215
+ const curLevel = data.levelInfo;
19216
+ if (!curLevel) {
19217
+ var _data$deliveryDirecti;
19218
+ this.warn(`Invalid level index ${level}`);
19219
+ if ((_data$deliveryDirecti = data.deliveryDirectives) != null && _data$deliveryDirecti.skip) {
19220
+ details.deltaUpdateFailed = true;
18905
19221
  }
18906
19222
  return;
18907
19223
  }
18908
- const stalledDuration = tnow - stalled;
18909
- if (!seeking && (stalledDuration >= detectStallWithCurrentTimeMs || tWaiting) && this.hls) {
18910
- // Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
18911
- if (state === State.ENDED && !(levelDetails != null && levelDetails.live) && Math.abs(currentTime - ((levelDetails == null ? undefined : levelDetails.edge) || 0)) < 1) {
18912
- if (this.ended) {
18913
- return;
18914
- }
18915
- this.ended = currentTime || 1;
18916
- this.hls.trigger(Events.MEDIA_ENDED, {
18917
- stalled: true
18918
- });
18919
- return;
19224
+
19225
+ // only process level loaded events matching with expected level or prior to switch when media playlist is loaded directly
19226
+ if (curLevel === this.currentLevel || data.withoutMultiVariant) {
19227
+ // reset level load error counter on successful level loaded only if there is no issues with fragments
19228
+ if (curLevel.fragmentError === 0) {
19229
+ curLevel.loadError = 0;
18920
19230
  }
18921
- // Report stalling after trying to fix
18922
- this._reportStall(bufferInfo);
18923
- if (!this.media || !this.hls) {
18924
- return;
19231
+ // Ignore matching details populated by loading a Media Playlist directly
19232
+ let previousDetails = curLevel.details;
19233
+ if (previousDetails === data.details && previousDetails.advanced) {
19234
+ previousDetails = undefined;
18925
19235
  }
19236
+ this.playlistLoaded(level, data, previousDetails);
19237
+ } else if ((_data$deliveryDirecti2 = data.deliveryDirectives) != null && _data$deliveryDirecti2.skip) {
19238
+ // received a delta playlist update that cannot be merged
19239
+ details.deltaUpdateFailed = true;
18926
19240
  }
18927
- const bufferedWithHoles = BufferHelper.bufferInfo(media, currentTime, config.maxBufferHole);
18928
- this._tryFixBufferStall(bufferedWithHoles, stalledDuration);
18929
19241
  }
18930
- stallResolved(currentTime) {
18931
- const stalled = this.stalled;
18932
- if (stalled && this.hls) {
18933
- this.stalled = null;
18934
- // The playhead is now moving, but was previously stalled
18935
- if (this.stallReported) {
18936
- const stalledDuration = self.performance.now() - stalled;
18937
- this.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(stalledDuration)}ms`);
18938
- this.stallReported = false;
18939
- this.waiting = 0;
18940
- this.hls.trigger(Events.STALL_RESOLVED, {});
18941
- }
19242
+ loadPlaylist(hlsUrlParameters) {
19243
+ super.loadPlaylist();
19244
+ if (this.shouldLoadPlaylist(this.currentLevel)) {
19245
+ this.scheduleLoading(this.currentLevel, hlsUrlParameters);
18942
19246
  }
18943
19247
  }
18944
-
18945
- /**
18946
- * Detects and attempts to fix known buffer stalling issues.
18947
- * @param bufferInfo - The properties of the current buffer.
18948
- * @param stalledDurationMs - The amount of time Hls.js has been stalling for.
18949
- * @private
18950
- */
18951
- _tryFixBufferStall(bufferInfo, stalledDurationMs) {
18952
- var _this$hls2;
18953
- const {
18954
- fragmentTracker,
18955
- media
18956
- } = this;
18957
- const config = (_this$hls2 = this.hls) == null ? undefined : _this$hls2.config;
18958
- if (!media || !fragmentTracker || !config) {
18959
- return;
18960
- }
18961
- const currentTime = media.currentTime;
18962
- const partial = fragmentTracker.getPartialFragment(currentTime);
18963
- if (partial) {
18964
- // Try to skip over the buffer hole caused by a partial fragment
18965
- // This method isn't limited by the size of the gap between buffered ranges
18966
- const targetTime = this._trySkipBufferHole(partial);
18967
- // we return here in this case, meaning
18968
- // the branch below only executes when we haven't seeked to a new position
18969
- if (targetTime || !this.media) {
18970
- return;
18971
- }
18972
- }
18973
-
18974
- // if we haven't had to skip over a buffer hole of a partial fragment
18975
- // we may just have to "nudge" the playlist as the browser decoding/rendering engine
18976
- // needs to cross some sort of threshold covering all source-buffers content
18977
- // to start playing properly.
18978
- const bufferedRanges = bufferInfo.buffered;
18979
- if ((bufferedRanges && bufferedRanges.length > 1 && bufferInfo.len > config.maxBufferHole || bufferInfo.nextStart && bufferInfo.nextStart - currentTime < config.maxBufferHole) && stalledDurationMs > config.highBufferWatchdogPeriod * 1000) {
18980
- this.warn('Trying to nudge playhead over buffer-hole');
18981
- // Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
18982
- // We only try to jump the hole if it's under the configured size
18983
- this._tryNudgeBuffer(bufferInfo);
19248
+ loadingPlaylist(currentLevel, hlsUrlParameters) {
19249
+ super.loadingPlaylist(currentLevel, hlsUrlParameters);
19250
+ const url = this.getUrlWithDirectives(currentLevel.uri, hlsUrlParameters);
19251
+ const currentLevelIndex = this.currentLevelIndex;
19252
+ const pathwayId = currentLevel.attrs['PATHWAY-ID'];
19253
+ const details = currentLevel.details;
19254
+ const age = details == null ? undefined : details.age;
19255
+ this.log(`Loading level index ${currentLevelIndex}${(hlsUrlParameters == null ? undefined : hlsUrlParameters.msn) !== undefined ? ' at sn ' + hlsUrlParameters.msn + ' part ' + hlsUrlParameters.part : ''}${pathwayId ? ' Pathway ' + pathwayId : ''}${age && details.live ? ' age ' + age.toFixed(1) + (details.type ? ' ' + details.type || '' : '') : ''} ${url}`);
19256
+ this.hls.trigger(Events.LEVEL_LOADING, {
19257
+ url,
19258
+ level: currentLevelIndex,
19259
+ levelInfo: currentLevel,
19260
+ pathwayId: currentLevel.attrs['PATHWAY-ID'],
19261
+ id: 0,
19262
+ // Deprecated Level urlId
19263
+ deliveryDirectives: hlsUrlParameters || null
19264
+ });
19265
+ }
19266
+ get nextLoadLevel() {
19267
+ if (this.manualLevelIndex !== -1) {
19268
+ return this.manualLevelIndex;
19269
+ } else {
19270
+ return this.hls.nextAutoLevel;
18984
19271
  }
18985
19272
  }
18986
-
18987
- /**
18988
- * Triggers a BUFFER_STALLED_ERROR event, but only once per stall period.
18989
- * @param bufferLen - The playhead distance from the end of the current buffer segment.
18990
- * @private
18991
- */
18992
- _reportStall(bufferInfo) {
18993
- const {
18994
- hls,
18995
- media,
18996
- stallReported,
18997
- stalled
18998
- } = this;
18999
- if (!stallReported && stalled !== null && media && hls) {
19000
- // Report stalled error once
19001
- this.stallReported = true;
19002
- const error = new Error(`Playback stalling at @${media.currentTime} due to low buffer (${JSON.stringify(bufferInfo)})`);
19003
- this.warn(error.message);
19004
- hls.trigger(Events.ERROR, {
19005
- type: ErrorTypes.MEDIA_ERROR,
19006
- details: ErrorDetails.BUFFER_STALLED_ERROR,
19007
- fatal: false,
19008
- error,
19009
- buffer: bufferInfo.len,
19010
- bufferInfo,
19011
- stalled: {
19012
- start: stalled
19013
- }
19014
- });
19273
+ set nextLoadLevel(nextLevel) {
19274
+ this.level = nextLevel;
19275
+ if (this.manualLevelIndex === -1) {
19276
+ this.hls.nextAutoLevel = nextLevel;
19015
19277
  }
19016
19278
  }
19017
-
19018
- /**
19019
- * Attempts to fix buffer stalls by jumping over known gaps caused by partial fragments
19020
- * @param partial - The partial fragment found at the current time (where playback is stalling).
19021
- * @private
19022
- */
19023
- _trySkipBufferHole(partial) {
19024
- var _this$hls3;
19025
- const {
19026
- fragmentTracker,
19027
- media
19028
- } = this;
19029
- const config = (_this$hls3 = this.hls) == null ? undefined : _this$hls3.config;
19030
- if (!media || !fragmentTracker || !config) {
19031
- return 0;
19279
+ removeLevel(levelIndex) {
19280
+ var _this$currentLevel;
19281
+ if (this._levels.length === 1) {
19282
+ return;
19032
19283
  }
19033
-
19034
- // Check if currentTime is between unbuffered regions of partial fragments
19035
- const currentTime = media.currentTime;
19036
- const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
19037
- const startTime = currentTime < bufferInfo.start ? bufferInfo.start : bufferInfo.nextStart;
19038
- if (startTime) {
19039
- const bufferStarved = bufferInfo.len <= config.maxBufferHole;
19040
- const waiting = bufferInfo.len > 0 && bufferInfo.len < 1 && media.readyState < 3;
19041
- const gapLength = startTime - currentTime;
19042
- if (gapLength > 0 && (bufferStarved || waiting)) {
19043
- // Only allow large gaps to be skipped if it is a start gap, or all fragments in skip range are partial
19044
- if (gapLength > config.maxBufferHole) {
19045
- let startGap = false;
19046
- if (currentTime === 0) {
19047
- const startFrag = fragmentTracker.getAppendedFrag(0, PlaylistLevelType.MAIN);
19048
- if (startFrag && startTime < startFrag.end) {
19049
- startGap = true;
19050
- }
19051
- }
19052
- if (!startGap) {
19053
- const startProvisioned = partial || fragmentTracker.getAppendedFrag(currentTime, PlaylistLevelType.MAIN);
19054
- if (startProvisioned) {
19055
- let moreToLoad = false;
19056
- let pos = startProvisioned.end;
19057
- while (pos < startTime) {
19058
- const provisioned = fragmentTracker.getPartialFragment(pos);
19059
- if (provisioned) {
19060
- pos += provisioned.duration;
19061
- } else {
19062
- moreToLoad = true;
19063
- break;
19064
- }
19065
- }
19066
- if (moreToLoad) {
19067
- return 0;
19068
- }
19069
- }
19070
- }
19071
- }
19072
- const targetTime = Math.max(startTime + SKIP_BUFFER_RANGE_START, currentTime + SKIP_BUFFER_HOLE_STEP_SECONDS);
19073
- this.warn(`skipping hole, adjusting currentTime from ${currentTime} to ${targetTime}`);
19074
- this.moved = true;
19075
- media.currentTime = targetTime;
19076
- if (partial && !partial.gap && this.hls) {
19077
- const error = new Error(`fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`);
19078
- this.hls.trigger(Events.ERROR, {
19079
- type: ErrorTypes.MEDIA_ERROR,
19080
- details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
19081
- fatal: false,
19082
- error,
19083
- reason: error.message,
19084
- frag: partial,
19085
- buffer: bufferInfo.len,
19086
- bufferInfo
19087
- });
19284
+ const levels = this._levels.filter((level, index) => {
19285
+ if (index !== levelIndex) {
19286
+ return true;
19287
+ }
19288
+ if (this.steering) {
19289
+ this.steering.removeLevel(level);
19290
+ }
19291
+ if (level === this.currentLevel) {
19292
+ this.currentLevel = null;
19293
+ this.currentLevelIndex = -1;
19294
+ if (level.details) {
19295
+ level.details.fragments.forEach(f => f.level = -1);
19088
19296
  }
19089
- return targetTime;
19090
19297
  }
19298
+ return false;
19299
+ });
19300
+ reassignFragmentLevelIndexes(levels);
19301
+ this._levels = levels;
19302
+ if (this.currentLevelIndex > -1 && (_this$currentLevel = this.currentLevel) != null && _this$currentLevel.details) {
19303
+ this.currentLevelIndex = this.currentLevel.details.fragments[0].level;
19091
19304
  }
19092
- return 0;
19305
+ if (this.manualLevelIndex > -1) {
19306
+ this.manualLevelIndex = this.currentLevelIndex;
19307
+ }
19308
+ const maxLevel = levels.length - 1;
19309
+ this._firstLevel = Math.min(this._firstLevel, maxLevel);
19310
+ if (this._startLevel) {
19311
+ this._startLevel = Math.min(this._startLevel, maxLevel);
19312
+ }
19313
+ this.hls.trigger(Events.LEVELS_UPDATED, {
19314
+ levels
19315
+ });
19093
19316
  }
19094
-
19095
- /**
19096
- * Attempts to fix buffer stalls by advancing the mediaElement's current time by a small amount.
19097
- * @private
19098
- */
19099
- _tryNudgeBuffer(bufferInfo) {
19317
+ onLevelsUpdated(event, {
19318
+ levels
19319
+ }) {
19320
+ this._levels = levels;
19321
+ }
19322
+ checkMaxAutoUpdated() {
19100
19323
  const {
19101
- hls,
19102
- media,
19103
- nudgeRetry
19104
- } = this;
19105
- const config = hls == null ? undefined : hls.config;
19106
- if (!media || !config) {
19107
- return 0;
19108
- }
19109
- const currentTime = media.currentTime;
19110
- this.nudgeRetry++;
19111
- if (nudgeRetry < config.nudgeMaxRetry) {
19112
- const targetTime = currentTime + (nudgeRetry + 1) * config.nudgeOffset;
19113
- // playback stalled in buffered area ... let's nudge currentTime to try to overcome this
19114
- const error = new Error(`Nudging 'currentTime' from ${currentTime} to ${targetTime}`);
19115
- this.warn(error.message);
19116
- media.currentTime = targetTime;
19117
- hls.trigger(Events.ERROR, {
19118
- type: ErrorTypes.MEDIA_ERROR,
19119
- details: ErrorDetails.BUFFER_NUDGE_ON_STALL,
19120
- error,
19121
- fatal: false,
19122
- buffer: bufferInfo.len,
19123
- bufferInfo
19124
- });
19125
- } else {
19126
- const error = new Error(`Playhead still not moving while enough data buffered @${currentTime} after ${config.nudgeMaxRetry} nudges`);
19127
- this.error(error.message);
19128
- hls.trigger(Events.ERROR, {
19129
- type: ErrorTypes.MEDIA_ERROR,
19130
- details: ErrorDetails.BUFFER_STALLED_ERROR,
19131
- error,
19132
- fatal: true,
19133
- buffer: bufferInfo.len,
19134
- bufferInfo
19324
+ autoLevelCapping,
19325
+ maxAutoLevel,
19326
+ maxHdcpLevel
19327
+ } = this.hls;
19328
+ if (this._maxAutoLevel !== maxAutoLevel) {
19329
+ this._maxAutoLevel = maxAutoLevel;
19330
+ this.hls.trigger(Events.MAX_AUTO_LEVEL_UPDATED, {
19331
+ autoLevelCapping,
19332
+ levels: this.levels,
19333
+ maxAutoLevel,
19334
+ minAutoLevel: this.hls.minAutoLevel,
19335
+ maxHdcpLevel
19135
19336
  });
19136
19337
  }
19137
19338
  }
19138
19339
  }
19340
+ function assignTrackIdsByGroup(tracks) {
19341
+ const groups = {};
19342
+ tracks.forEach(track => {
19343
+ const groupId = track.groupId || '';
19344
+ track.id = groups[groupId] = groups[groupId] || 0;
19345
+ groups[groupId]++;
19346
+ });
19347
+ }
19139
19348
 
19140
- const version = "1.6.0-beta.2.0.canary.10924";
19349
+ const version = "1.6.0-beta.2.0.canary.10926";
19141
19350
 
19142
19351
  // ensure the worker ends up in the bundle
19143
19352
  // If the worker should not be included this gets aliased to empty.js
@@ -19552,7 +19761,6 @@ class StreamController extends BaseStreamController {
19552
19761
  constructor(hls, fragmentTracker, keyLoader) {
19553
19762
  super(hls, fragmentTracker, keyLoader, 'stream-controller', PlaylistLevelType.MAIN);
19554
19763
  this.audioCodecSwap = false;
19555
- this.gapController = null;
19556
19764
  this.level = -1;
19557
19765
  this._forceStartLoad = false;
19558
19766
  this._hasEnoughToStart = false;
@@ -19564,19 +19772,8 @@ class StreamController extends BaseStreamController {
19564
19772
  this.backtrackFragment = null;
19565
19773
  this.audioCodecSwitch = false;
19566
19774
  this.videoBuffer = null;
19567
- this.onMediaWaiting = () => {
19568
- const gapController = this.gapController;
19569
- if (gapController) {
19570
- gapController.waiting = self.performance.now();
19571
- }
19572
- };
19573
19775
  this.onMediaPlaying = () => {
19574
19776
  // tick to speed up FRAG_CHANGED triggering
19575
- const gapController = this.gapController;
19576
- if (gapController) {
19577
- gapController.ended = 0;
19578
- gapController.waiting = 0;
19579
- }
19580
19777
  this.tick();
19581
19778
  };
19582
19779
  this.onMediaSeeked = () => {
@@ -19631,7 +19828,7 @@ class StreamController extends BaseStreamController {
19631
19828
  }
19632
19829
  onHandlerDestroying() {
19633
19830
  // @ts-ignore
19634
- this.onMediaPlaying = this.onMediaSeeked = this.onMediaWaiting = null;
19831
+ this.onMediaPlaying = this.onMediaSeeked = null;
19635
19832
  this.unregisterListeners();
19636
19833
  super.onHandlerDestroying();
19637
19834
  }
@@ -19726,8 +19923,11 @@ class StreamController extends BaseStreamController {
19726
19923
  this.onTickEnd();
19727
19924
  }
19728
19925
  onTickEnd() {
19926
+ var _this$media2;
19729
19927
  super.onTickEnd();
19730
- this.checkBuffer();
19928
+ if ((_this$media2 = this.media) != null && _this$media2.readyState && this.media.seeking === false) {
19929
+ this.lastCurrentTime = this.media.currentTime;
19930
+ }
19731
19931
  this.checkFragmentChanged();
19732
19932
  }
19733
19933
  doTickIdle() {
@@ -19960,29 +20160,19 @@ class StreamController extends BaseStreamController {
19960
20160
  onMediaAttached(event, data) {
19961
20161
  super.onMediaAttached(event, data);
19962
20162
  const media = data.media;
19963
- media.removeEventListener('playing', this.onMediaPlaying);
19964
- media.removeEventListener('seeked', this.onMediaSeeked);
19965
- media.removeEventListener('waiting', this.onMediaWaiting);
19966
- media.addEventListener('playing', this.onMediaPlaying);
19967
- media.addEventListener('seeked', this.onMediaSeeked);
19968
- media.addEventListener('waiting', this.onMediaWaiting);
19969
- this.gapController = new GapController(media, this.fragmentTracker, this.hls);
20163
+ addEventListener(media, 'playing', this.onMediaPlaying);
20164
+ addEventListener(media, 'seeked', this.onMediaSeeked);
19970
20165
  }
19971
20166
  onMediaDetaching(event, data) {
19972
20167
  const {
19973
20168
  media
19974
20169
  } = this;
19975
20170
  if (media) {
19976
- media.removeEventListener('playing', this.onMediaPlaying);
19977
- media.removeEventListener('seeked', this.onMediaSeeked);
19978
- media.removeEventListener('waiting', this.onMediaWaiting);
20171
+ removeEventListener(media, 'playing', this.onMediaPlaying);
20172
+ removeEventListener(media, 'seeked', this.onMediaSeeked);
19979
20173
  }
19980
20174
  this.videoBuffer = null;
19981
20175
  this.fragPlaying = null;
19982
- if (this.gapController) {
19983
- this.gapController.destroy();
19984
- this.gapController = null;
19985
- }
19986
20176
  super.onMediaDetaching(event, data);
19987
20177
  const transferringMedia = !!data.transferMedia;
19988
20178
  if (transferringMedia) {
@@ -19990,19 +20180,6 @@ class StreamController extends BaseStreamController {
19990
20180
  }
19991
20181
  this._hasEnoughToStart = false;
19992
20182
  }
19993
- triggerEnded() {
19994
- const gapController = this.gapController;
19995
- if (gapController) {
19996
- var _this$media2;
19997
- if (gapController.ended) {
19998
- return;
19999
- }
20000
- gapController.ended = ((_this$media2 = this.media) == null ? undefined : _this$media2.currentTime) || 1;
20001
- }
20002
- this.hls.trigger(Events.MEDIA_ENDED, {
20003
- stalled: false
20004
- });
20005
- }
20006
20183
  onManifestLoading() {
20007
20184
  super.onManifestLoading();
20008
20185
  // reset buffer on manifest loading
@@ -20337,26 +20514,6 @@ class StreamController extends BaseStreamController {
20337
20514
  break;
20338
20515
  }
20339
20516
  }
20340
-
20341
- // Checks the health of the buffer and attempts to resolve playback stalls.
20342
- checkBuffer() {
20343
- const {
20344
- media,
20345
- gapController
20346
- } = this;
20347
- if (!media || !gapController || !media.readyState) {
20348
- // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
20349
- return;
20350
- }
20351
- if (this._hasEnoughToStart || !BufferHelper.getBuffered(media).length) {
20352
- // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
20353
- const state = this.state;
20354
- const activeFrag = state !== State.IDLE ? this.fragCurrent : null;
20355
- const levelDetails = this.getLevelDetails();
20356
- gapController.poll(this.lastCurrentTime, activeFrag, levelDetails, state);
20357
- }
20358
- this.lastCurrentTime = media.currentTime;
20359
- }
20360
20517
  onFragLoadEmergencyAborted() {
20361
20518
  this.state = State.IDLE;
20362
20519
  // if loadedmetadata is not set, it means that we are emergency switch down on first frag
@@ -20372,8 +20529,10 @@ class StreamController extends BaseStreamController {
20372
20529
  }) {
20373
20530
  if (type !== ElementaryStreamTypes.AUDIO || !this.altAudio) {
20374
20531
  const mediaBuffer = (type === ElementaryStreamTypes.VIDEO ? this.videoBuffer : this.mediaBuffer) || this.media;
20375
- this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
20376
- this.tick();
20532
+ if (mediaBuffer) {
20533
+ this.afterBufferFlushed(mediaBuffer, type, PlaylistLevelType.MAIN);
20534
+ this.tick();
20535
+ }
20377
20536
  }
20378
20537
  }
20379
20538
  onLevelsUpdated(event, data) {
@@ -21847,9 +22006,12 @@ class Hls {
21847
22006
  this.latencyController = undefined;
21848
22007
  this.levelController = undefined;
21849
22008
  this.streamController = undefined;
22009
+ this.audioStreamController = undefined;
22010
+ this.subtititleStreamController = undefined;
21850
22011
  this.audioTrackController = undefined;
21851
22012
  this.subtitleTrackController = undefined;
21852
22013
  this.interstitialsController = undefined;
22014
+ this.gapController = undefined;
21853
22015
  this.emeController = undefined;
21854
22016
  this.cmcdController = undefined;
21855
22017
  this._media = null;
@@ -21889,6 +22051,7 @@ class Hls {
21889
22051
  const id3TrackController = new ID3TrackController(this);
21890
22052
  const keyLoader = new KeyLoader(this.config);
21891
22053
  const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
22054
+ const gapController = this.gapController = new GapController(this, fragmentTracker);
21892
22055
 
21893
22056
  // Cap level controller uses streamController to flush the buffer
21894
22057
  capLevelController.setStreamController(streamController);
@@ -21902,17 +22065,17 @@ class Hls {
21902
22065
  networkControllers.splice(1, 0, contentSteering);
21903
22066
  }
21904
22067
  this.networkControllers = networkControllers;
21905
- const coreComponents = [abrController, bufferController, capLevelController, fpsController, id3TrackController, fragmentTracker];
22068
+ const coreComponents = [abrController, bufferController, gapController, capLevelController, fpsController, id3TrackController, fragmentTracker];
21906
22069
  this.audioTrackController = this.createController(config.audioTrackController, networkControllers);
21907
22070
  const AudioStreamControllerClass = config.audioStreamController;
21908
22071
  if (AudioStreamControllerClass) {
21909
- networkControllers.push(new AudioStreamControllerClass(this, fragmentTracker, keyLoader));
22072
+ networkControllers.push(this.audioStreamController = new AudioStreamControllerClass(this, fragmentTracker, keyLoader));
21910
22073
  }
21911
22074
  // Instantiate subtitleTrackController before SubtitleStreamController to receive level events first
21912
22075
  this.subtitleTrackController = this.createController(config.subtitleTrackController, networkControllers);
21913
22076
  const SubtitleStreamControllerClass = config.subtitleStreamController;
21914
22077
  if (SubtitleStreamControllerClass) {
21915
- networkControllers.push(new SubtitleStreamControllerClass(this, fragmentTracker, keyLoader));
22078
+ networkControllers.push(this.subtititleStreamController = new SubtitleStreamControllerClass(this, fragmentTracker, keyLoader));
21916
22079
  }
21917
22080
  this.createController(config.timelineController, coreComponents);
21918
22081
  keyLoader.emeController = this.emeController = this.createController(config.emeController, coreComponents);
@@ -22179,6 +22342,18 @@ class Hls {
22179
22342
  });
22180
22343
  }
22181
22344
  }
22345
+ get inFlightFragments() {
22346
+ const inFlightData = {
22347
+ [PlaylistLevelType.MAIN]: this.streamController.inFlightFrag
22348
+ };
22349
+ if (this.audioStreamController) {
22350
+ inFlightData[PlaylistLevelType.AUDIO] = this.audioStreamController.inFlightFrag;
22351
+ }
22352
+ if (this.subtititleStreamController) {
22353
+ inFlightData[PlaylistLevelType.SUBTITLE] = this.subtititleStreamController.inFlightFrag;
22354
+ }
22355
+ return inFlightData;
22356
+ }
22182
22357
 
22183
22358
  /**
22184
22359
  * Swap through possible audio codecs in the stream (for example to switch from stereo to 5.1)
@@ -22228,10 +22403,21 @@ class Hls {
22228
22403
  const levels = this.levelController.levels;
22229
22404
  return levels ? levels : [];
22230
22405
  }
22406
+
22407
+ /**
22408
+ * @returns LevelDetails of last loaded level (variant) or `null` prior to loading a media playlist.
22409
+ */
22231
22410
  get latestLevelDetails() {
22232
22411
  return this.streamController.getLevelDetails() || null;
22233
22412
  }
22234
22413
 
22414
+ /**
22415
+ * @returns Level object of selected level (variant) or `null` prior to selecting a level or once the level is removed.
22416
+ */
22417
+ get loadLevelObj() {
22418
+ return this.levelController.loadLevelObj;
22419
+ }
22420
+
22235
22421
  /**
22236
22422
  * Index of quality level (variant) currently played
22237
22423
  */