hls.js 1.5.7-0.canary.10014 → 1.5.7-0.canary.10016

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/hls.light.js CHANGED
@@ -613,7 +613,7 @@
613
613
  // Some browsers don't allow to use bind on console object anyway
614
614
  // fallback to default if needed
615
615
  try {
616
- newLogger.log("Debug logs enabled for \"" + context + "\" in hls.js version " + "1.5.7-0.canary.10014");
616
+ newLogger.log("Debug logs enabled for \"" + context + "\" in hls.js version " + "1.5.7-0.canary.10016");
617
617
  } catch (e) {
618
618
  /* log fn threw an exception. All logger methods are no-ops. */
619
619
  return createLogger();
@@ -1952,7 +1952,9 @@
1952
1952
  }
1953
1953
  function skipBERInteger(bytes, i) {
1954
1954
  var limit = i + 5;
1955
- while (bytes[i++] & 0x80 && i < limit) {}
1955
+ while (bytes[i++] & 0x80 && i < limit) {
1956
+ /* do nothing */
1957
+ }
1956
1958
  return i;
1957
1959
  }
1958
1960
  function toHex(x) {
@@ -6811,6 +6813,9 @@
6811
6813
  var fragCurrent = this.fragCurrent,
6812
6814
  partCurrent = this.partCurrent,
6813
6815
  hls = this.hls;
6816
+ if (hls.levels.length <= 1) {
6817
+ return hls.loadLevel;
6818
+ }
6814
6819
  var maxAutoLevel = hls.maxAutoLevel,
6815
6820
  config = hls.config,
6816
6821
  minAutoLevel = hls.minAutoLevel;
@@ -7228,24 +7233,23 @@
7228
7233
  this.executeNext(type);
7229
7234
  }
7230
7235
  };
7231
- _proto.insertAbort = function insertAbort(operation, type) {
7232
- var queue = this.queues[type];
7233
- queue.unshift(operation);
7234
- this.executeNext(type);
7235
- };
7236
7236
  _proto.appendBlocker = function appendBlocker(type) {
7237
- var execute;
7238
- var promise = new Promise(function (resolve) {
7239
- execute = resolve;
7237
+ var _this = this;
7238
+ return new Promise(function (resolve) {
7239
+ var operation = {
7240
+ execute: resolve,
7241
+ onStart: function onStart() {},
7242
+ onComplete: function onComplete() {},
7243
+ onError: function onError() {}
7244
+ };
7245
+ _this.append(operation, type);
7240
7246
  });
7241
- var operation = {
7242
- execute: execute,
7243
- onStart: function onStart() {},
7244
- onComplete: function onComplete() {},
7245
- onError: function onError() {}
7246
- };
7247
- this.append(operation, type);
7248
- return promise;
7247
+ };
7248
+ _proto.unblockAudio = function unblockAudio(op) {
7249
+ var queue = this.queues.audio;
7250
+ if (queue[0] === op) {
7251
+ this.shiftAndExecuteNext('audio');
7252
+ }
7249
7253
  };
7250
7254
  _proto.executeNext = function executeNext(type) {
7251
7255
  var queue = this.queues[type];
@@ -7280,7 +7284,7 @@
7280
7284
  var VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
7281
7285
  var BufferController = /*#__PURE__*/function (_Logger) {
7282
7286
  _inheritsLoose(BufferController, _Logger);
7283
- function BufferController(hls) {
7287
+ function BufferController(hls, fragmentTracker) {
7284
7288
  var _this;
7285
7289
  _this = _Logger.call(this, 'buffer-controller', hls.logger) || this;
7286
7290
  // The level details used to determine duration, target-duration and live
@@ -7292,6 +7296,7 @@
7292
7296
  // References to event listeners for each SourceBuffer, so that they can be referenced for event removal
7293
7297
  _this.listeners = void 0;
7294
7298
  _this.hls = void 0;
7299
+ _this.fragmentTracker = void 0;
7295
7300
  // The number of BUFFER_CODEC events received before any sourceBuffers are created
7296
7301
  _this.bufferCodecEventsExpected = 0;
7297
7302
  // The total number of BUFFER_CODEC events received
@@ -7302,6 +7307,10 @@
7302
7307
  _this.mediaSource = null;
7303
7308
  // Last MP3 audio chunk appended
7304
7309
  _this.lastMpegAudioChunk = null;
7310
+ // Audio fragment blocked from appending until corresponding video appends or context changes
7311
+ _this.blockedAudioAppend = null;
7312
+ // Keep track of video append position for unblocking audio
7313
+ _this.lastVideoAppendEnd = 0;
7305
7314
  _this.appendSource = void 0;
7306
7315
  // counters
7307
7316
  _this.appendErrors = {
@@ -7332,7 +7341,10 @@
7332
7341
  _this.log('Media source opened');
7333
7342
  if (media) {
7334
7343
  media.removeEventListener('emptied', _this._onMediaEmptied);
7335
- _this.updateMediaElementDuration();
7344
+ var durationAndRange = _this.getDurationAndRange();
7345
+ if (durationAndRange) {
7346
+ _this.updateMediaSource(durationAndRange);
7347
+ }
7336
7348
  _this.hls.trigger(Events.MEDIA_ATTACHED, {
7337
7349
  media: media,
7338
7350
  mediaSource: mediaSource
@@ -7359,6 +7371,7 @@
7359
7371
  }
7360
7372
  };
7361
7373
  _this.hls = hls;
7374
+ _this.fragmentTracker = fragmentTracker;
7362
7375
  _this.appendSource = hls.config.preferManagedMediaSource;
7363
7376
  _this._initSourceBuffer();
7364
7377
  _this.registerListeners();
@@ -7373,7 +7386,7 @@
7373
7386
  this.details = null;
7374
7387
  this.lastMpegAudioChunk = null;
7375
7388
  // @ts-ignore
7376
- this.hls = null;
7389
+ this.hls = this.fragmentTracker = null;
7377
7390
  // @ts-ignore
7378
7391
  this._onMediaSourceOpen = this._onMediaSourceClose = null;
7379
7392
  // @ts-ignore
@@ -7425,6 +7438,8 @@
7425
7438
  audiovideo: 0
7426
7439
  };
7427
7440
  this.lastMpegAudioChunk = null;
7441
+ this.blockedAudioAppend = null;
7442
+ this.lastVideoAppendEnd = 0;
7428
7443
  };
7429
7444
  _proto.onManifestLoading = function onManifestLoading() {
7430
7445
  this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
@@ -7562,9 +7577,10 @@
7562
7577
  var trackNames = Object.keys(data);
7563
7578
  trackNames.forEach(function (trackName) {
7564
7579
  if (sourceBufferCount) {
7580
+ var _track$buffer;
7565
7581
  // check if SourceBuffer codec needs to change
7566
7582
  var track = _this3.tracks[trackName];
7567
- if (track && typeof track.buffer.changeType === 'function') {
7583
+ if (track && typeof ((_track$buffer = track.buffer) == null ? void 0 : _track$buffer.changeType) === 'function') {
7568
7584
  var _trackCodec;
7569
7585
  var _data$trackName = data[trackName],
7570
7586
  id = _data$trackName.id,
@@ -7632,17 +7648,52 @@
7632
7648
  };
7633
7649
  operationQueue.append(operation, type, !!this.pendingTracks[type]);
7634
7650
  };
7651
+ _proto.blockAudio = function blockAudio(partOrFrag) {
7652
+ var _this$fragmentTracker,
7653
+ _this5 = this;
7654
+ var pStart = partOrFrag.start;
7655
+ var pTime = pStart + partOrFrag.duration * 0.05;
7656
+ var atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
7657
+ if (atGap) {
7658
+ return;
7659
+ }
7660
+ var op = {
7661
+ execute: function execute() {
7662
+ var _this5$fragmentTracke;
7663
+ if (_this5.lastVideoAppendEnd > pTime || _this5.sourceBuffer.video && BufferHelper.isBuffered(_this5.sourceBuffer.video, pTime) || ((_this5$fragmentTracke = _this5.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this5$fragmentTracke.gap) === true) {
7664
+ _this5.blockedAudioAppend = null;
7665
+ _this5.operationQueue.shiftAndExecuteNext('audio');
7666
+ }
7667
+ },
7668
+ onStart: function onStart() {},
7669
+ onComplete: function onComplete() {},
7670
+ onError: function onError() {}
7671
+ };
7672
+ this.blockedAudioAppend = {
7673
+ op: op,
7674
+ frag: partOrFrag
7675
+ };
7676
+ this.operationQueue.append(op, 'audio', true);
7677
+ };
7678
+ _proto.unblockAudio = function unblockAudio() {
7679
+ var blockedAudioAppend = this.blockedAudioAppend;
7680
+ if (blockedAudioAppend) {
7681
+ this.blockedAudioAppend = null;
7682
+ this.operationQueue.unblockAudio(blockedAudioAppend.op);
7683
+ }
7684
+ };
7635
7685
  _proto.onBufferAppending = function onBufferAppending(event, eventData) {
7636
- var _this5 = this;
7637
- var hls = this.hls,
7638
- operationQueue = this.operationQueue,
7686
+ var _this6 = this;
7687
+ var operationQueue = this.operationQueue,
7639
7688
  tracks = this.tracks;
7640
7689
  var data = eventData.data,
7641
7690
  type = eventData.type,
7691
+ parent = eventData.parent,
7642
7692
  frag = eventData.frag,
7643
7693
  part = eventData.part,
7644
7694
  chunkMeta = eventData.chunkMeta;
7645
7695
  var chunkStats = chunkMeta.buffering[type];
7696
+ var sn = frag.sn;
7646
7697
  var bufferAppendingStart = self.performance.now();
7647
7698
  chunkStats.start = bufferAppendingStart;
7648
7699
  var fragBuffering = frag.stats.buffering;
@@ -7665,21 +7716,50 @@
7665
7716
  checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
7666
7717
  this.lastMpegAudioChunk = chunkMeta;
7667
7718
  }
7668
- var fragStart = frag.start;
7719
+
7720
+ // Block audio append until overlapping video append
7721
+ var videoSb = this.sourceBuffer.video;
7722
+ if (videoSb && sn !== 'initSegment') {
7723
+ var partOrFrag = part || frag;
7724
+ var blockedAudioAppend = this.blockedAudioAppend;
7725
+ if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
7726
+ var pStart = partOrFrag.start;
7727
+ var pTime = pStart + partOrFrag.duration * 0.05;
7728
+ var vbuffered = videoSb.buffered;
7729
+ var vappending = this.operationQueue.current('video');
7730
+ if (!vbuffered.length && !vappending) {
7731
+ // wait for video before appending audio
7732
+ this.blockAudio(partOrFrag);
7733
+ } else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
7734
+ // audio is ahead of video
7735
+ this.blockAudio(partOrFrag);
7736
+ }
7737
+ } else if (type === 'video') {
7738
+ var videoAppendEnd = partOrFrag.end;
7739
+ if (blockedAudioAppend) {
7740
+ var audioStart = blockedAudioAppend.frag.start;
7741
+ if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
7742
+ this.unblockAudio();
7743
+ }
7744
+ }
7745
+ this.lastVideoAppendEnd = videoAppendEnd;
7746
+ }
7747
+ }
7748
+ var fragStart = (part || frag).start;
7669
7749
  var operation = {
7670
7750
  execute: function execute() {
7671
7751
  chunkStats.executeStart = self.performance.now();
7672
7752
  if (checkTimestampOffset) {
7673
- var sb = _this5.sourceBuffer[type];
7753
+ var sb = _this6.sourceBuffer[type];
7674
7754
  if (sb) {
7675
7755
  var delta = fragStart - sb.timestampOffset;
7676
7756
  if (Math.abs(delta) >= 0.1) {
7677
- _this5.log("Updating audio SourceBuffer timestampOffset to " + fragStart + " (delta: " + delta + ") sn: " + frag.sn + ")");
7757
+ _this6.log("Updating audio SourceBuffer timestampOffset to " + fragStart + " (delta: " + delta + ") sn: " + sn + ")");
7678
7758
  sb.timestampOffset = fragStart;
7679
7759
  }
7680
7760
  }
7681
7761
  }
7682
- _this5.appendExecutor(data, type);
7762
+ _this6.appendExecutor(data, type);
7683
7763
  },
7684
7764
  onStart: function onStart() {
7685
7765
  // logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
@@ -7694,19 +7774,19 @@
7694
7774
  if (partBuffering && partBuffering.first === 0) {
7695
7775
  partBuffering.first = end;
7696
7776
  }
7697
- var sourceBuffer = _this5.sourceBuffer;
7777
+ var sourceBuffer = _this6.sourceBuffer;
7698
7778
  var timeRanges = {};
7699
7779
  for (var _type in sourceBuffer) {
7700
7780
  timeRanges[_type] = BufferHelper.getBuffered(sourceBuffer[_type]);
7701
7781
  }
7702
- _this5.appendErrors[type] = 0;
7782
+ _this6.appendErrors[type] = 0;
7703
7783
  if (type === 'audio' || type === 'video') {
7704
- _this5.appendErrors.audiovideo = 0;
7784
+ _this6.appendErrors.audiovideo = 0;
7705
7785
  } else {
7706
- _this5.appendErrors.audio = 0;
7707
- _this5.appendErrors.video = 0;
7786
+ _this6.appendErrors.audio = 0;
7787
+ _this6.appendErrors.video = 0;
7708
7788
  }
7709
- _this5.hls.trigger(Events.BUFFER_APPENDED, {
7789
+ _this6.hls.trigger(Events.BUFFER_APPENDED, {
7710
7790
  type: type,
7711
7791
  frag: frag,
7712
7792
  part: part,
@@ -7734,51 +7814,57 @@
7734
7814
  // let's stop appending any segments, and report BUFFER_FULL_ERROR error
7735
7815
  event.details = ErrorDetails.BUFFER_FULL_ERROR;
7736
7816
  } else {
7737
- var appendErrorCount = ++_this5.appendErrors[type];
7817
+ var appendErrorCount = ++_this6.appendErrors[type];
7738
7818
  event.details = ErrorDetails.BUFFER_APPEND_ERROR;
7739
7819
  /* with UHD content, we could get loop of quota exceeded error until
7740
7820
  browser is able to evict some data from sourcebuffer. Retrying can help recover.
7741
7821
  */
7742
- _this5.warn("Failed " + appendErrorCount + "/" + hls.config.appendErrorMaxRetry + " times to append segment in \"" + type + "\" sourceBuffer");
7743
- if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
7822
+ _this6.warn("Failed " + appendErrorCount + "/" + _this6.hls.config.appendErrorMaxRetry + " times to append segment in \"" + type + "\" sourceBuffer");
7823
+ if (appendErrorCount >= _this6.hls.config.appendErrorMaxRetry) {
7744
7824
  event.fatal = true;
7745
7825
  }
7746
7826
  }
7747
- hls.trigger(Events.ERROR, event);
7827
+ _this6.hls.trigger(Events.ERROR, event);
7748
7828
  }
7749
7829
  };
7750
7830
  operationQueue.append(operation, type, !!this.pendingTracks[type]);
7751
7831
  };
7832
+ _proto.getFlushOp = function getFlushOp(type, start, end) {
7833
+ var _this7 = this;
7834
+ return {
7835
+ execute: function execute() {
7836
+ _this7.removeExecutor(type, start, end);
7837
+ },
7838
+ onStart: function onStart() {
7839
+ // logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
7840
+ },
7841
+ onComplete: function onComplete() {
7842
+ // logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
7843
+ _this7.hls.trigger(Events.BUFFER_FLUSHED, {
7844
+ type: type
7845
+ });
7846
+ },
7847
+ onError: function onError(error) {
7848
+ _this7.warn("Failed to remove from " + type + " SourceBuffer", error);
7849
+ }
7850
+ };
7851
+ };
7752
7852
  _proto.onBufferFlushing = function onBufferFlushing(event, data) {
7753
- var _this6 = this;
7853
+ var _this8 = this;
7754
7854
  var operationQueue = this.operationQueue;
7755
- var flushOperation = function flushOperation(type) {
7756
- return {
7757
- execute: _this6.removeExecutor.bind(_this6, type, data.startOffset, data.endOffset),
7758
- onStart: function onStart() {
7759
- // logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
7760
- },
7761
- onComplete: function onComplete() {
7762
- // logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
7763
- _this6.hls.trigger(Events.BUFFER_FLUSHED, {
7764
- type: type
7765
- });
7766
- },
7767
- onError: function onError(error) {
7768
- _this6.warn("Failed to remove from " + type + " SourceBuffer", error);
7769
- }
7770
- };
7771
- };
7772
- if (data.type) {
7773
- operationQueue.append(flushOperation(data.type), data.type);
7855
+ var type = data.type,
7856
+ startOffset = data.startOffset,
7857
+ endOffset = data.endOffset;
7858
+ if (type) {
7859
+ operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
7774
7860
  } else {
7775
- this.getSourceBufferTypes().forEach(function (type) {
7776
- operationQueue.append(flushOperation(type), type);
7861
+ this.getSourceBufferTypes().forEach(function (sbType) {
7862
+ operationQueue.append(_this8.getFlushOp(sbType, startOffset, endOffset), sbType);
7777
7863
  });
7778
7864
  }
7779
7865
  };
7780
7866
  _proto.onFragParsed = function onFragParsed(event, data) {
7781
- var _this7 = this;
7867
+ var _this9 = this;
7782
7868
  var frag = data.frag,
7783
7869
  part = data.part;
7784
7870
  var buffersAppendedTo = [];
@@ -7800,7 +7886,7 @@
7800
7886
  part.stats.buffering.end = now;
7801
7887
  }
7802
7888
  var stats = part ? part.stats : frag.stats;
7803
- _this7.hls.trigger(Events.FRAG_BUFFERED, {
7889
+ _this9.hls.trigger(Events.FRAG_BUFFERED, {
7804
7890
  frag: frag,
7805
7891
  part: part,
7806
7892
  stats: stats,
@@ -7820,14 +7906,17 @@
7820
7906
  // an undefined data.type will mark all buffers as EOS.
7821
7907
  ;
7822
7908
  _proto.onBufferEos = function onBufferEos(event, data) {
7823
- var _this8 = this;
7909
+ var _this10 = this;
7910
+ if (data.type === 'video') {
7911
+ this.unblockAudio();
7912
+ }
7824
7913
  var ended = this.getSourceBufferTypes().reduce(function (acc, type) {
7825
- var sb = _this8.sourceBuffer[type];
7914
+ var sb = _this10.sourceBuffer[type];
7826
7915
  if (sb && (!data.type || data.type === type)) {
7827
7916
  sb.ending = true;
7828
7917
  if (!sb.ended) {
7829
7918
  sb.ended = true;
7830
- _this8.log(type + " sourceBuffer now EOS");
7919
+ _this10.log(type + " sourceBuffer now EOS");
7831
7920
  }
7832
7921
  }
7833
7922
  return acc && !!(!sb || sb.ended);
@@ -7835,35 +7924,42 @@
7835
7924
  if (ended) {
7836
7925
  this.log("Queueing mediaSource.endOfStream()");
7837
7926
  this.blockBuffers(function () {
7838
- _this8.getSourceBufferTypes().forEach(function (type) {
7839
- var sb = _this8.sourceBuffer[type];
7927
+ _this10.getSourceBufferTypes().forEach(function (type) {
7928
+ var sb = _this10.sourceBuffer[type];
7840
7929
  if (sb) {
7841
7930
  sb.ending = false;
7842
7931
  }
7843
7932
  });
7844
- var mediaSource = _this8.mediaSource;
7933
+ var mediaSource = _this10.mediaSource;
7845
7934
  if (!mediaSource || mediaSource.readyState !== 'open') {
7846
7935
  if (mediaSource) {
7847
- _this8.log("Could not call mediaSource.endOfStream(). mediaSource.readyState: " + mediaSource.readyState);
7936
+ _this10.log("Could not call mediaSource.endOfStream(). mediaSource.readyState: " + mediaSource.readyState);
7848
7937
  }
7849
7938
  return;
7850
7939
  }
7851
- _this8.log("Calling mediaSource.endOfStream()");
7940
+ _this10.log("Calling mediaSource.endOfStream()");
7852
7941
  // Allow this to throw and be caught by the enqueueing function
7853
7942
  mediaSource.endOfStream();
7854
7943
  });
7855
7944
  }
7856
7945
  };
7857
7946
  _proto.onLevelUpdated = function onLevelUpdated(event, _ref) {
7947
+ var _this11 = this;
7858
7948
  var details = _ref.details;
7859
7949
  if (!details.fragments.length) {
7860
7950
  return;
7861
7951
  }
7862
7952
  this.details = details;
7953
+ var durationAndRange = this.getDurationAndRange();
7954
+ if (!durationAndRange) {
7955
+ return;
7956
+ }
7863
7957
  if (this.getSourceBufferTypes().length) {
7864
- this.blockBuffers(this.updateMediaElementDuration.bind(this));
7958
+ this.blockBuffers(function () {
7959
+ return _this11.updateMediaSource(durationAndRange);
7960
+ });
7865
7961
  } else {
7866
- this.updateMediaElementDuration();
7962
+ this.updateMediaSource(durationAndRange);
7867
7963
  }
7868
7964
  };
7869
7965
  _proto.trimBuffers = function trimBuffers() {
@@ -7896,7 +7992,7 @@
7896
7992
  }
7897
7993
  };
7898
7994
  _proto.flushBackBuffer = function flushBackBuffer(currentTime, targetDuration, targetBackBufferPosition) {
7899
- var _this9 = this;
7995
+ var _this12 = this;
7900
7996
  var details = this.details,
7901
7997
  sourceBuffer = this.sourceBuffer;
7902
7998
  var sourceBufferTypes = this.getSourceBufferTypes();
@@ -7906,20 +8002,20 @@
7906
8002
  var buffered = BufferHelper.getBuffered(sb);
7907
8003
  // when target buffer start exceeds actual buffer start
7908
8004
  if (buffered.length > 0 && targetBackBufferPosition > buffered.start(0)) {
7909
- _this9.hls.trigger(Events.BACK_BUFFER_REACHED, {
8005
+ _this12.hls.trigger(Events.BACK_BUFFER_REACHED, {
7910
8006
  bufferEnd: targetBackBufferPosition
7911
8007
  });
7912
8008
 
7913
8009
  // Support for deprecated event:
7914
8010
  if (details != null && details.live) {
7915
- _this9.hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, {
8011
+ _this12.hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, {
7916
8012
  bufferEnd: targetBackBufferPosition
7917
8013
  });
7918
8014
  } else if (sb.ended && buffered.end(buffered.length - 1) - currentTime < targetDuration * 2) {
7919
- _this9.log("Cannot flush " + type + " back buffer while SourceBuffer is in ended state");
8015
+ _this12.log("Cannot flush " + type + " back buffer while SourceBuffer is in ended state");
7920
8016
  return;
7921
8017
  }
7922
- _this9.hls.trigger(Events.BUFFER_FLUSHING, {
8018
+ _this12.hls.trigger(Events.BUFFER_FLUSHING, {
7923
8019
  startOffset: 0,
7924
8020
  endOffset: targetBackBufferPosition,
7925
8021
  type: type
@@ -7929,7 +8025,7 @@
7929
8025
  });
7930
8026
  };
7931
8027
  _proto.flushFrontBuffer = function flushFrontBuffer(currentTime, targetDuration, targetFrontBufferPosition) {
7932
- var _this10 = this;
8028
+ var _this13 = this;
7933
8029
  var sourceBuffer = this.sourceBuffer;
7934
8030
  var sourceBufferTypes = this.getSourceBufferTypes();
7935
8031
  sourceBufferTypes.forEach(function (type) {
@@ -7947,10 +8043,10 @@
7947
8043
  if (targetFrontBufferPosition > bufferStart || currentTime >= bufferStart && currentTime <= bufferEnd) {
7948
8044
  return;
7949
8045
  } else if (sb.ended && currentTime - bufferEnd < 2 * targetDuration) {
7950
- _this10.log("Cannot flush " + type + " front buffer while SourceBuffer is in ended state");
8046
+ _this13.log("Cannot flush " + type + " front buffer while SourceBuffer is in ended state");
7951
8047
  return;
7952
8048
  }
7953
- _this10.hls.trigger(Events.BUFFER_FLUSHING, {
8049
+ _this13.hls.trigger(Events.BUFFER_FLUSHING, {
7954
8050
  startOffset: bufferStart,
7955
8051
  endOffset: Infinity,
7956
8052
  type: type
@@ -7964,9 +8060,9 @@
7964
8060
  * 'liveDurationInfinity` is set to `true`
7965
8061
  * More details: https://github.com/video-dev/hls.js/issues/355
7966
8062
  */;
7967
- _proto.updateMediaElementDuration = function updateMediaElementDuration() {
8063
+ _proto.getDurationAndRange = function getDurationAndRange() {
7968
8064
  if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
7969
- return;
8065
+ return null;
7970
8066
  }
7971
8067
  var details = this.details,
7972
8068
  hls = this.hls,
@@ -7978,25 +8074,40 @@
7978
8074
  if (details.live && hls.config.liveDurationInfinity) {
7979
8075
  // Override duration to Infinity
7980
8076
  mediaSource.duration = Infinity;
7981
- this.updateSeekableRange(details);
8077
+ var len = details.fragments.length;
8078
+ if (len && details.live && !!mediaSource.setLiveSeekableRange) {
8079
+ var start = Math.max(0, details.fragments[0].start);
8080
+ var end = Math.max(start, start + details.totalduration);
8081
+ return {
8082
+ duration: Infinity,
8083
+ start: start,
8084
+ end: end
8085
+ };
8086
+ }
8087
+ return {
8088
+ duration: Infinity
8089
+ };
7982
8090
  } else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
7983
- // levelDuration was the last value we set.
7984
- // not using mediaSource.duration as the browser may tweak this value
7985
- // only update Media Source duration if its value increase, this is to avoid
7986
- // flushing already buffered portion when switching between quality level
7987
- this.log("Updating Media Source duration to " + levelDuration.toFixed(3));
7988
- mediaSource.duration = levelDuration;
8091
+ return {
8092
+ duration: levelDuration
8093
+ };
7989
8094
  }
8095
+ return null;
7990
8096
  };
7991
- _proto.updateSeekableRange = function updateSeekableRange(levelDetails) {
7992
- var mediaSource = this.mediaSource;
7993
- var fragments = levelDetails.fragments;
7994
- var len = fragments.length;
7995
- if (len && levelDetails.live && mediaSource != null && mediaSource.setLiveSeekableRange) {
7996
- var start = Math.max(0, fragments[0].start);
7997
- var end = Math.max(start, start + levelDetails.totalduration);
7998
- this.log("Media Source duration is set to " + mediaSource.duration + ". Setting seekable range to " + start + "-" + end + ".");
7999
- mediaSource.setLiveSeekableRange(start, end);
8097
+ _proto.updateMediaSource = function updateMediaSource(_ref2) {
8098
+ var duration = _ref2.duration,
8099
+ start = _ref2.start,
8100
+ end = _ref2.end;
8101
+ if (!this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
8102
+ return;
8103
+ }
8104
+ if (isFiniteNumber(duration)) {
8105
+ this.log("Updating Media Source duration to " + duration.toFixed(3));
8106
+ }
8107
+ this.mediaSource.duration = duration;
8108
+ if (start !== undefined && end !== undefined) {
8109
+ this.log("Media Source duration is set to " + this.mediaSource.duration + ". Setting seekable range to " + start + "-" + end + ".");
8110
+ this.mediaSource.setLiveSeekableRange(start, end);
8000
8111
  }
8001
8112
  };
8002
8113
  _proto.checkPendingTracks = function checkPendingTracks() {
@@ -8035,7 +8146,7 @@
8035
8146
  }
8036
8147
  };
8037
8148
  _proto.createSourceBuffers = function createSourceBuffers(tracks) {
8038
- var _this11 = this;
8149
+ var _this14 = this;
8039
8150
  var sourceBuffer = this.sourceBuffer,
8040
8151
  mediaSource = this.mediaSource;
8041
8152
  if (!mediaSource) {
@@ -8051,28 +8162,28 @@
8051
8162
  var codec = track.levelCodec || track.codec;
8052
8163
  if (codec) {
8053
8164
  if (trackName.slice(0, 5) === 'audio') {
8054
- codec = getCodecCompatibleName(codec, _this11.hls.config.preferManagedMediaSource);
8165
+ codec = getCodecCompatibleName(codec, _this14.hls.config.preferManagedMediaSource);
8055
8166
  }
8056
8167
  }
8057
8168
  var mimeType = track.container + ";codecs=" + codec;
8058
- _this11.log("creating sourceBuffer(" + mimeType + ")");
8169
+ _this14.log("creating sourceBuffer(" + mimeType + ")");
8059
8170
  try {
8060
8171
  var sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
8061
8172
  var sbName = trackName;
8062
- _this11.addBufferListener(sbName, 'updatestart', _this11._onSBUpdateStart);
8063
- _this11.addBufferListener(sbName, 'updateend', _this11._onSBUpdateEnd);
8064
- _this11.addBufferListener(sbName, 'error', _this11._onSBUpdateError);
8173
+ _this14.addBufferListener(sbName, 'updatestart', _this14._onSBUpdateStart);
8174
+ _this14.addBufferListener(sbName, 'updateend', _this14._onSBUpdateEnd);
8175
+ _this14.addBufferListener(sbName, 'error', _this14._onSBUpdateError);
8065
8176
  // ManagedSourceBuffer bufferedchange event
8066
- _this11.addBufferListener(sbName, 'bufferedchange', function (type, event) {
8177
+ _this14.addBufferListener(sbName, 'bufferedchange', function (type, event) {
8067
8178
  // If media was ejected check for a change. Added ranges are redundant with changes on 'updateend' event.
8068
8179
  var removedRanges = event.removedRanges;
8069
8180
  if (removedRanges != null && removedRanges.length) {
8070
- _this11.hls.trigger(Events.BUFFER_FLUSHED, {
8181
+ _this14.hls.trigger(Events.BUFFER_FLUSHED, {
8071
8182
  type: trackName
8072
8183
  });
8073
8184
  }
8074
8185
  });
8075
- _this11.tracks[trackName] = {
8186
+ _this14.tracks[trackName] = {
8076
8187
  buffer: sb,
8077
8188
  codec: codec,
8078
8189
  container: track.container,
@@ -8081,8 +8192,8 @@
8081
8192
  id: track.id
8082
8193
  };
8083
8194
  } catch (err) {
8084
- _this11.error("error while trying to add sourceBuffer: " + err.message);
8085
- _this11.hls.trigger(Events.ERROR, {
8195
+ _this14.error("error while trying to add sourceBuffer: " + err.message);
8196
+ _this14.hls.trigger(Events.ERROR, {
8086
8197
  type: ErrorTypes.MEDIA_ERROR,
8087
8198
  details: ErrorDetails.BUFFER_ADD_CODEC_ERROR,
8088
8199
  fatal: false,
@@ -8170,6 +8281,7 @@
8170
8281
  }
8171
8282
  return;
8172
8283
  }
8284
+ sb.ending = false;
8173
8285
  sb.ended = false;
8174
8286
  sb.appendBuffer(data);
8175
8287
  }
@@ -8179,7 +8291,7 @@
8179
8291
  // upon completion, since we already do it here
8180
8292
  ;
8181
8293
  _proto.blockBuffers = function blockBuffers(onUnblocked, buffers) {
8182
- var _this12 = this;
8294
+ var _this15 = this;
8183
8295
  if (buffers === void 0) {
8184
8296
  buffers = this.getSourceBufferTypes();
8185
8297
  }
@@ -8194,11 +8306,15 @@
8194
8306
  var blockingOperations = buffers.map(function (type) {
8195
8307
  return operationQueue.appendBlocker(type);
8196
8308
  });
8197
- Promise.all(blockingOperations).then(function () {
8309
+ var audioBlocked = buffers.length > 1 && !!this.blockedAudioAppend;
8310
+ if (audioBlocked) {
8311
+ this.unblockAudio();
8312
+ }
8313
+ Promise.all(blockingOperations).then(function (result) {
8198
8314
  // logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
8199
8315
  onUnblocked();
8200
- buffers.forEach(function (type) {
8201
- var sb = _this12.sourceBuffer[type];
8316
+ buffers.forEach(function (type, i) {
8317
+ var sb = _this15.sourceBuffer[type];
8202
8318
  // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
8203
8319
  // true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
8204
8320
  // While this is a workaround, it's probably useful to have around
@@ -10557,11 +10673,14 @@
10557
10673
  * If not found any Fragment, return null
10558
10674
  */;
10559
10675
  _proto.getBufferedFrag = function getBufferedFrag(position, levelType) {
10676
+ return this.getFragAtPos(position, levelType, true);
10677
+ };
10678
+ _proto.getFragAtPos = function getFragAtPos(position, levelType, buffered) {
10560
10679
  var fragments = this.fragments;
10561
10680
  var keys = Object.keys(fragments);
10562
10681
  for (var i = keys.length; i--;) {
10563
10682
  var fragmentEntity = fragments[keys[i]];
10564
- if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
10683
+ if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && (!buffered || fragmentEntity.buffered)) {
10565
10684
  var frag = fragmentEntity.body;
10566
10685
  if (frag.start <= position && position <= frag.end) {
10567
10686
  return frag;
@@ -10811,10 +10930,10 @@
10811
10930
  };
10812
10931
  };
10813
10932
  _proto.onBufferAppended = function onBufferAppended(event, data) {
10814
- var _this3 = this;
10815
10933
  var frag = data.frag,
10816
10934
  part = data.part,
10817
- timeRanges = data.timeRanges;
10935
+ timeRanges = data.timeRanges,
10936
+ type = data.type;
10818
10937
  if (frag.sn === 'initSegment') {
10819
10938
  return;
10820
10939
  }
@@ -10828,10 +10947,8 @@
10828
10947
  }
10829
10948
  // Store the latest timeRanges loaded in the buffer
10830
10949
  this.timeRanges = timeRanges;
10831
- Object.keys(timeRanges).forEach(function (elementaryStream) {
10832
- var timeRange = timeRanges[elementaryStream];
10833
- _this3.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
10834
- });
10950
+ var timeRange = timeRanges[type];
10951
+ this.detectEvictedFragments(type, timeRange, playlistType, part);
10835
10952
  };
10836
10953
  _proto.onFragBuffered = function onFragBuffered(event, data) {
10837
10954
  this.detectPartialFragments(data);
@@ -10845,12 +10962,12 @@
10845
10962
  return !!((_this$activePartLists = this.activePartLists[type]) != null && _this$activePartLists.length);
10846
10963
  };
10847
10964
  _proto.removeFragmentsInRange = function removeFragmentsInRange(start, end, playlistType, withGapOnly, unbufferedOnly) {
10848
- var _this4 = this;
10965
+ var _this3 = this;
10849
10966
  if (withGapOnly && !this.hasGaps) {
10850
10967
  return;
10851
10968
  }
10852
10969
  Object.keys(this.fragments).forEach(function (key) {
10853
- var fragmentEntity = _this4.fragments[key];
10970
+ var fragmentEntity = _this3.fragments[key];
10854
10971
  if (!fragmentEntity) {
10855
10972
  return;
10856
10973
  }
@@ -10859,7 +10976,7 @@
10859
10976
  return;
10860
10977
  }
10861
10978
  if (frag.start < end && frag.end > start && (fragmentEntity.buffered || unbufferedOnly)) {
10862
- _this4.removeFragment(frag);
10979
+ _this3.removeFragment(frag);
10863
10980
  }
10864
10981
  });
10865
10982
  };
@@ -13047,7 +13164,7 @@
13047
13164
  // Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
13048
13165
  if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
13049
13166
  var bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
13050
- if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
13167
+ if (bufferedFragAtPos && (bufferInfo.nextStart <= bufferedFragAtPos.end || bufferedFragAtPos.gap)) {
13051
13168
  return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
13052
13169
  }
13053
13170
  }
@@ -20406,6 +20523,17 @@
20406
20523
  }
20407
20524
  };
20408
20525
  _createClass(StreamController, [{
20526
+ key: "maxBufferLength",
20527
+ get: function get() {
20528
+ var levels = this.levels,
20529
+ level = this.level;
20530
+ var levelInfo = levels == null ? void 0 : levels[level];
20531
+ if (!levelInfo) {
20532
+ return this.config.maxBufferLength;
20533
+ }
20534
+ return this.getMaxBufferLength(levelInfo.maxBitrate);
20535
+ }
20536
+ }, {
20409
20537
  key: "nextLevel",
20410
20538
  get: function get() {
20411
20539
  var frag = this.nextBufferedFrag;
@@ -20542,7 +20670,9 @@
20542
20670
  ConfigFpsController = config.fpsController;
20543
20671
  var errorController = new ConfigErrorController(this);
20544
20672
  var abrController = this.abrController = new ConfigAbrController(this);
20545
- var bufferController = this.bufferController = new ConfigBufferController(this);
20673
+ // FragmentTracker must be defined before StreamController because the order of event handling is important
20674
+ var fragmentTracker = new FragmentTracker(this);
20675
+ var bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
20546
20676
  var capLevelController = this.capLevelController = new ConfigCapLevelController(this);
20547
20677
  var fpsController = new ConfigFpsController(this);
20548
20678
  var playListLoader = new PlaylistLoader(this);
@@ -20551,8 +20681,6 @@
20551
20681
  // ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
20552
20682
  var contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
20553
20683
  var levelController = this.levelController = new LevelController(this, contentSteering);
20554
- // FragmentTracker must be defined before StreamController because the order of event handling is important
20555
- var fragmentTracker = new FragmentTracker(this);
20556
20684
  var keyLoader = new KeyLoader(this.config);
20557
20685
  var streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
20558
20686
 
@@ -21149,6 +21277,11 @@
21149
21277
  get: function get() {
21150
21278
  return this.streamController.getMainFwdBufferInfo();
21151
21279
  }
21280
+ }, {
21281
+ key: "maxBufferLength",
21282
+ get: function get() {
21283
+ return this.streamController.maxBufferLength;
21284
+ }
21152
21285
  }, {
21153
21286
  key: "allAudioTracks",
21154
21287
  get: function get() {
@@ -21331,7 +21464,7 @@
21331
21464
  * Get the video-dev/hls.js package version.
21332
21465
  */
21333
21466
  function get() {
21334
- return "1.5.7-0.canary.10014";
21467
+ return "1.5.7-0.canary.10016";
21335
21468
  }
21336
21469
  }, {
21337
21470
  key: "Events",