hls.js 1.5.7-0.canary.10014 → 1.5.7-0.canary.10016

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -512,7 +512,7 @@ function enableLogs(debugConfig, context, id) {
512
512
  // Some browsers don't allow to use bind on console object anyway
513
513
  // fallback to default if needed
514
514
  try {
515
- newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.7-0.canary.10014"}`);
515
+ newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.7-0.canary.10016"}`);
516
516
  } catch (e) {
517
517
  /* log fn threw an exception. All logger methods are no-ops. */
518
518
  return createLogger();
@@ -1783,7 +1783,9 @@ function parseStsd(stsd) {
1783
1783
  }
1784
1784
  function skipBERInteger(bytes, i) {
1785
1785
  const limit = i + 5;
1786
- while (bytes[i++] & 0x80 && i < limit) {}
1786
+ while (bytes[i++] & 0x80 && i < limit) {
1787
+ /* do nothing */
1788
+ }
1787
1789
  return i;
1788
1790
  }
1789
1791
  function toHex(x) {
@@ -6593,6 +6595,9 @@ class AbrController extends Logger {
6593
6595
  partCurrent,
6594
6596
  hls
6595
6597
  } = this;
6598
+ if (hls.levels.length <= 1) {
6599
+ return hls.loadLevel;
6600
+ }
6596
6601
  const {
6597
6602
  maxAutoLevel,
6598
6603
  config,
@@ -6947,24 +6952,22 @@ class BufferOperationQueue {
6947
6952
  this.executeNext(type);
6948
6953
  }
6949
6954
  }
6950
- insertAbort(operation, type) {
6951
- const queue = this.queues[type];
6952
- queue.unshift(operation);
6953
- this.executeNext(type);
6954
- }
6955
6955
  appendBlocker(type) {
6956
- let execute;
6957
- const promise = new Promise(resolve => {
6958
- execute = resolve;
6956
+ return new Promise(resolve => {
6957
+ const operation = {
6958
+ execute: resolve,
6959
+ onStart: () => {},
6960
+ onComplete: () => {},
6961
+ onError: () => {}
6962
+ };
6963
+ this.append(operation, type);
6959
6964
  });
6960
- const operation = {
6961
- execute,
6962
- onStart: () => {},
6963
- onComplete: () => {},
6964
- onError: () => {}
6965
- };
6966
- this.append(operation, type);
6967
- return promise;
6965
+ }
6966
+ unblockAudio(op) {
6967
+ const queue = this.queues.audio;
6968
+ if (queue[0] === op) {
6969
+ this.shiftAndExecuteNext('audio');
6970
+ }
6968
6971
  }
6969
6972
  executeNext(type) {
6970
6973
  const queue = this.queues[type];
@@ -6997,7 +7000,7 @@ class BufferOperationQueue {
6997
7000
 
6998
7001
  const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
6999
7002
  class BufferController extends Logger {
7000
- constructor(hls) {
7003
+ constructor(hls, fragmentTracker) {
7001
7004
  super('buffer-controller', hls.logger);
7002
7005
  // The level details used to determine duration, target-duration and live
7003
7006
  this.details = null;
@@ -7008,6 +7011,7 @@ class BufferController extends Logger {
7008
7011
  // References to event listeners for each SourceBuffer, so that they can be referenced for event removal
7009
7012
  this.listeners = void 0;
7010
7013
  this.hls = void 0;
7014
+ this.fragmentTracker = void 0;
7011
7015
  // The number of BUFFER_CODEC events received before any sourceBuffers are created
7012
7016
  this.bufferCodecEventsExpected = 0;
7013
7017
  // The total number of BUFFER_CODEC events received
@@ -7018,6 +7022,10 @@ class BufferController extends Logger {
7018
7022
  this.mediaSource = null;
7019
7023
  // Last MP3 audio chunk appended
7020
7024
  this.lastMpegAudioChunk = null;
7025
+ // Audio fragment blocked from appending until corresponding video appends or context changes
7026
+ this.blockedAudioAppend = null;
7027
+ // Keep track of video append position for unblocking audio
7028
+ this.lastVideoAppendEnd = 0;
7021
7029
  this.appendSource = void 0;
7022
7030
  // counters
7023
7031
  this.appendErrors = {
@@ -7049,7 +7057,10 @@ class BufferController extends Logger {
7049
7057
  this.log('Media source opened');
7050
7058
  if (media) {
7051
7059
  media.removeEventListener('emptied', this._onMediaEmptied);
7052
- this.updateMediaElementDuration();
7060
+ const durationAndRange = this.getDurationAndRange();
7061
+ if (durationAndRange) {
7062
+ this.updateMediaSource(durationAndRange);
7063
+ }
7053
7064
  this.hls.trigger(Events.MEDIA_ATTACHED, {
7054
7065
  media,
7055
7066
  mediaSource: mediaSource
@@ -7077,6 +7088,7 @@ class BufferController extends Logger {
7077
7088
  }
7078
7089
  };
7079
7090
  this.hls = hls;
7091
+ this.fragmentTracker = fragmentTracker;
7080
7092
  this.appendSource = hls.config.preferManagedMediaSource;
7081
7093
  this._initSourceBuffer();
7082
7094
  this.registerListeners();
@@ -7089,7 +7101,7 @@ class BufferController extends Logger {
7089
7101
  this.details = null;
7090
7102
  this.lastMpegAudioChunk = null;
7091
7103
  // @ts-ignore
7092
- this.hls = null;
7104
+ this.hls = this.fragmentTracker = null;
7093
7105
  // @ts-ignore
7094
7106
  this._onMediaSourceOpen = this._onMediaSourceClose = null;
7095
7107
  // @ts-ignore
@@ -7145,6 +7157,8 @@ class BufferController extends Logger {
7145
7157
  audiovideo: 0
7146
7158
  };
7147
7159
  this.lastMpegAudioChunk = null;
7160
+ this.blockedAudioAppend = null;
7161
+ this.lastVideoAppendEnd = 0;
7148
7162
  }
7149
7163
  onManifestLoading() {
7150
7164
  this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
@@ -7282,9 +7296,10 @@ class BufferController extends Logger {
7282
7296
  const trackNames = Object.keys(data);
7283
7297
  trackNames.forEach(trackName => {
7284
7298
  if (sourceBufferCount) {
7299
+ var _track$buffer;
7285
7300
  // check if SourceBuffer codec needs to change
7286
7301
  const track = this.tracks[trackName];
7287
- if (track && typeof track.buffer.changeType === 'function') {
7302
+ if (track && typeof ((_track$buffer = track.buffer) == null ? void 0 : _track$buffer.changeType) === 'function') {
7288
7303
  var _trackCodec;
7289
7304
  const {
7290
7305
  id,
@@ -7354,20 +7369,54 @@ class BufferController extends Logger {
7354
7369
  };
7355
7370
  operationQueue.append(operation, type, !!this.pendingTracks[type]);
7356
7371
  }
7372
+ blockAudio(partOrFrag) {
7373
+ var _this$fragmentTracker;
7374
+ const pStart = partOrFrag.start;
7375
+ const pTime = pStart + partOrFrag.duration * 0.05;
7376
+ const atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
7377
+ if (atGap) {
7378
+ return;
7379
+ }
7380
+ const op = {
7381
+ execute: () => {
7382
+ var _this$fragmentTracker2;
7383
+ if (this.lastVideoAppendEnd > pTime || this.sourceBuffer.video && BufferHelper.isBuffered(this.sourceBuffer.video, pTime) || ((_this$fragmentTracker2 = this.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker2.gap) === true) {
7384
+ this.blockedAudioAppend = null;
7385
+ this.operationQueue.shiftAndExecuteNext('audio');
7386
+ }
7387
+ },
7388
+ onStart: () => {},
7389
+ onComplete: () => {},
7390
+ onError: () => {}
7391
+ };
7392
+ this.blockedAudioAppend = {
7393
+ op,
7394
+ frag: partOrFrag
7395
+ };
7396
+ this.operationQueue.append(op, 'audio', true);
7397
+ }
7398
+ unblockAudio() {
7399
+ const blockedAudioAppend = this.blockedAudioAppend;
7400
+ if (blockedAudioAppend) {
7401
+ this.blockedAudioAppend = null;
7402
+ this.operationQueue.unblockAudio(blockedAudioAppend.op);
7403
+ }
7404
+ }
7357
7405
  onBufferAppending(event, eventData) {
7358
7406
  const {
7359
- hls,
7360
7407
  operationQueue,
7361
7408
  tracks
7362
7409
  } = this;
7363
7410
  const {
7364
7411
  data,
7365
7412
  type,
7413
+ parent,
7366
7414
  frag,
7367
7415
  part,
7368
7416
  chunkMeta
7369
7417
  } = eventData;
7370
7418
  const chunkStats = chunkMeta.buffering[type];
7419
+ const sn = frag.sn;
7371
7420
  const bufferAppendingStart = self.performance.now();
7372
7421
  chunkStats.start = bufferAppendingStart;
7373
7422
  const fragBuffering = frag.stats.buffering;
@@ -7390,7 +7439,36 @@ class BufferController extends Logger {
7390
7439
  checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
7391
7440
  this.lastMpegAudioChunk = chunkMeta;
7392
7441
  }
7393
- const fragStart = frag.start;
7442
+
7443
+ // Block audio append until overlapping video append
7444
+ const videoSb = this.sourceBuffer.video;
7445
+ if (videoSb && sn !== 'initSegment') {
7446
+ const partOrFrag = part || frag;
7447
+ const blockedAudioAppend = this.blockedAudioAppend;
7448
+ if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
7449
+ const pStart = partOrFrag.start;
7450
+ const pTime = pStart + partOrFrag.duration * 0.05;
7451
+ const vbuffered = videoSb.buffered;
7452
+ const vappending = this.operationQueue.current('video');
7453
+ if (!vbuffered.length && !vappending) {
7454
+ // wait for video before appending audio
7455
+ this.blockAudio(partOrFrag);
7456
+ } else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
7457
+ // audio is ahead of video
7458
+ this.blockAudio(partOrFrag);
7459
+ }
7460
+ } else if (type === 'video') {
7461
+ const videoAppendEnd = partOrFrag.end;
7462
+ if (blockedAudioAppend) {
7463
+ const audioStart = blockedAudioAppend.frag.start;
7464
+ if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
7465
+ this.unblockAudio();
7466
+ }
7467
+ }
7468
+ this.lastVideoAppendEnd = videoAppendEnd;
7469
+ }
7470
+ }
7471
+ const fragStart = (part || frag).start;
7394
7472
  const operation = {
7395
7473
  execute: () => {
7396
7474
  chunkStats.executeStart = self.performance.now();
@@ -7399,7 +7477,7 @@ class BufferController extends Logger {
7399
7477
  if (sb) {
7400
7478
  const delta = fragStart - sb.timestampOffset;
7401
7479
  if (Math.abs(delta) >= 0.1) {
7402
- this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`);
7480
+ this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${sn})`);
7403
7481
  sb.timestampOffset = fragStart;
7404
7482
  }
7405
7483
  }
@@ -7466,22 +7544,21 @@ class BufferController extends Logger {
7466
7544
  /* with UHD content, we could get loop of quota exceeded error until
7467
7545
  browser is able to evict some data from sourcebuffer. Retrying can help recover.
7468
7546
  */
7469
- this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
7470
- if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
7547
+ this.warn(`Failed ${appendErrorCount}/${this.hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
7548
+ if (appendErrorCount >= this.hls.config.appendErrorMaxRetry) {
7471
7549
  event.fatal = true;
7472
7550
  }
7473
7551
  }
7474
- hls.trigger(Events.ERROR, event);
7552
+ this.hls.trigger(Events.ERROR, event);
7475
7553
  }
7476
7554
  };
7477
7555
  operationQueue.append(operation, type, !!this.pendingTracks[type]);
7478
7556
  }
7479
- onBufferFlushing(event, data) {
7480
- const {
7481
- operationQueue
7482
- } = this;
7483
- const flushOperation = type => ({
7484
- execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset),
7557
+ getFlushOp(type, start, end) {
7558
+ return {
7559
+ execute: () => {
7560
+ this.removeExecutor(type, start, end);
7561
+ },
7485
7562
  onStart: () => {
7486
7563
  // logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
7487
7564
  },
@@ -7494,12 +7571,22 @@ class BufferController extends Logger {
7494
7571
  onError: error => {
7495
7572
  this.warn(`Failed to remove from ${type} SourceBuffer`, error);
7496
7573
  }
7497
- });
7498
- if (data.type) {
7499
- operationQueue.append(flushOperation(data.type), data.type);
7574
+ };
7575
+ }
7576
+ onBufferFlushing(event, data) {
7577
+ const {
7578
+ operationQueue
7579
+ } = this;
7580
+ const {
7581
+ type,
7582
+ startOffset,
7583
+ endOffset
7584
+ } = data;
7585
+ if (type) {
7586
+ operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
7500
7587
  } else {
7501
- this.getSourceBufferTypes().forEach(type => {
7502
- operationQueue.append(flushOperation(type), type);
7588
+ this.getSourceBufferTypes().forEach(sbType => {
7589
+ operationQueue.append(this.getFlushOp(sbType, startOffset, endOffset), sbType);
7503
7590
  });
7504
7591
  }
7505
7592
  }
@@ -7546,6 +7633,9 @@ class BufferController extends Logger {
7546
7633
  // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
7547
7634
  // an undefined data.type will mark all buffers as EOS.
7548
7635
  onBufferEos(event, data) {
7636
+ if (data.type === 'video') {
7637
+ this.unblockAudio();
7638
+ }
7549
7639
  const ended = this.getSourceBufferTypes().reduce((acc, type) => {
7550
7640
  const sb = this.sourceBuffer[type];
7551
7641
  if (sb && (!data.type || data.type === type)) {
@@ -7588,10 +7678,14 @@ class BufferController extends Logger {
7588
7678
  return;
7589
7679
  }
7590
7680
  this.details = details;
7681
+ const durationAndRange = this.getDurationAndRange();
7682
+ if (!durationAndRange) {
7683
+ return;
7684
+ }
7591
7685
  if (this.getSourceBufferTypes().length) {
7592
- this.blockBuffers(this.updateMediaElementDuration.bind(this));
7686
+ this.blockBuffers(() => this.updateMediaSource(durationAndRange));
7593
7687
  } else {
7594
- this.updateMediaElementDuration();
7688
+ this.updateMediaSource(durationAndRange);
7595
7689
  }
7596
7690
  }
7597
7691
  trimBuffers() {
@@ -7696,9 +7790,9 @@ class BufferController extends Logger {
7696
7790
  * 'liveDurationInfinity` is set to `true`
7697
7791
  * More details: https://github.com/video-dev/hls.js/issues/355
7698
7792
  */
7699
- updateMediaElementDuration() {
7793
+ getDurationAndRange() {
7700
7794
  if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
7701
- return;
7795
+ return null;
7702
7796
  }
7703
7797
  const {
7704
7798
  details,
@@ -7712,25 +7806,41 @@ class BufferController extends Logger {
7712
7806
  if (details.live && hls.config.liveDurationInfinity) {
7713
7807
  // Override duration to Infinity
7714
7808
  mediaSource.duration = Infinity;
7715
- this.updateSeekableRange(details);
7809
+ const len = details.fragments.length;
7810
+ if (len && details.live && !!mediaSource.setLiveSeekableRange) {
7811
+ const start = Math.max(0, details.fragments[0].start);
7812
+ const end = Math.max(start, start + details.totalduration);
7813
+ return {
7814
+ duration: Infinity,
7815
+ start,
7816
+ end
7817
+ };
7818
+ }
7819
+ return {
7820
+ duration: Infinity
7821
+ };
7716
7822
  } else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
7717
- // levelDuration was the last value we set.
7718
- // not using mediaSource.duration as the browser may tweak this value
7719
- // only update Media Source duration if its value increase, this is to avoid
7720
- // flushing already buffered portion when switching between quality level
7721
- this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);
7722
- mediaSource.duration = levelDuration;
7823
+ return {
7824
+ duration: levelDuration
7825
+ };
7723
7826
  }
7827
+ return null;
7724
7828
  }
7725
- updateSeekableRange(levelDetails) {
7726
- const mediaSource = this.mediaSource;
7727
- const fragments = levelDetails.fragments;
7728
- const len = fragments.length;
7729
- if (len && levelDetails.live && mediaSource != null && mediaSource.setLiveSeekableRange) {
7730
- const start = Math.max(0, fragments[0].start);
7731
- const end = Math.max(start, start + levelDetails.totalduration);
7732
- this.log(`Media Source duration is set to ${mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
7733
- mediaSource.setLiveSeekableRange(start, end);
7829
+ updateMediaSource({
7830
+ duration,
7831
+ start,
7832
+ end
7833
+ }) {
7834
+ if (!this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
7835
+ return;
7836
+ }
7837
+ if (isFiniteNumber(duration)) {
7838
+ this.log(`Updating Media Source duration to ${duration.toFixed(3)}`);
7839
+ }
7840
+ this.mediaSource.duration = duration;
7841
+ if (start !== undefined && end !== undefined) {
7842
+ this.log(`Media Source duration is set to ${this.mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
7843
+ this.mediaSource.setLiveSeekableRange(start, end);
7734
7844
  }
7735
7845
  }
7736
7846
  checkPendingTracks() {
@@ -7913,6 +8023,7 @@ class BufferController extends Logger {
7913
8023
  }
7914
8024
  return;
7915
8025
  }
8026
+ sb.ending = false;
7916
8027
  sb.ended = false;
7917
8028
  sb.appendBuffer(data);
7918
8029
  }
@@ -7932,10 +8043,14 @@ class BufferController extends Logger {
7932
8043
 
7933
8044
  // logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
7934
8045
  const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type));
7935
- Promise.all(blockingOperations).then(() => {
8046
+ const audioBlocked = buffers.length > 1 && !!this.blockedAudioAppend;
8047
+ if (audioBlocked) {
8048
+ this.unblockAudio();
8049
+ }
8050
+ Promise.all(blockingOperations).then(result => {
7936
8051
  // logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
7937
8052
  onUnblocked();
7938
- buffers.forEach(type => {
8053
+ buffers.forEach((type, i) => {
7939
8054
  const sb = this.sourceBuffer[type];
7940
8055
  // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
7941
8056
  // true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
@@ -10254,13 +10369,16 @@ class FragmentTracker {
10254
10369
  * If not found any Fragment, return null
10255
10370
  */
10256
10371
  getBufferedFrag(position, levelType) {
10372
+ return this.getFragAtPos(position, levelType, true);
10373
+ }
10374
+ getFragAtPos(position, levelType, buffered) {
10257
10375
  const {
10258
10376
  fragments
10259
10377
  } = this;
10260
10378
  const keys = Object.keys(fragments);
10261
10379
  for (let i = keys.length; i--;) {
10262
10380
  const fragmentEntity = fragments[keys[i]];
10263
- if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
10381
+ if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && (!buffered || fragmentEntity.buffered)) {
10264
10382
  const frag = fragmentEntity.body;
10265
10383
  if (frag.start <= position && position <= frag.end) {
10266
10384
  return frag;
@@ -10515,7 +10633,8 @@ class FragmentTracker {
10515
10633
  const {
10516
10634
  frag,
10517
10635
  part,
10518
- timeRanges
10636
+ timeRanges,
10637
+ type
10519
10638
  } = data;
10520
10639
  if (frag.sn === 'initSegment') {
10521
10640
  return;
@@ -10530,10 +10649,8 @@ class FragmentTracker {
10530
10649
  }
10531
10650
  // Store the latest timeRanges loaded in the buffer
10532
10651
  this.timeRanges = timeRanges;
10533
- Object.keys(timeRanges).forEach(elementaryStream => {
10534
- const timeRange = timeRanges[elementaryStream];
10535
- this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
10536
- });
10652
+ const timeRange = timeRanges[type];
10653
+ this.detectEvictedFragments(type, timeRange, playlistType, part);
10537
10654
  }
10538
10655
  onFragBuffered(event, data) {
10539
10656
  this.detectPartialFragments(data);
@@ -12710,7 +12827,7 @@ class BaseStreamController extends TaskLoop {
12710
12827
  // Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
12711
12828
  if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
12712
12829
  const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
12713
- if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
12830
+ if (bufferedFragAtPos && (bufferInfo.nextStart <= bufferedFragAtPos.end || bufferedFragAtPos.gap)) {
12714
12831
  return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
12715
12832
  }
12716
12833
  }
@@ -19890,6 +20007,17 @@ class StreamController extends BaseStreamController {
19890
20007
  getMainFwdBufferInfo() {
19891
20008
  return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN);
19892
20009
  }
20010
+ get maxBufferLength() {
20011
+ const {
20012
+ levels,
20013
+ level
20014
+ } = this;
20015
+ const levelInfo = levels == null ? void 0 : levels[level];
20016
+ if (!levelInfo) {
20017
+ return this.config.maxBufferLength;
20018
+ }
20019
+ return this.getMaxBufferLength(levelInfo.maxBitrate);
20020
+ }
19893
20021
  backtrack(frag) {
19894
20022
  this.couldBacktrack = true;
19895
20023
  // Causes findFragments to backtrack through fragments to find the keyframe
@@ -19995,7 +20123,7 @@ class Hls {
19995
20123
  * Get the video-dev/hls.js package version.
19996
20124
  */
19997
20125
  static get version() {
19998
- return "1.5.7-0.canary.10014";
20126
+ return "1.5.7-0.canary.10016";
19999
20127
  }
20000
20128
 
20001
20129
  /**
@@ -20097,7 +20225,9 @@ class Hls {
20097
20225
  } = config;
20098
20226
  const errorController = new ConfigErrorController(this);
20099
20227
  const abrController = this.abrController = new ConfigAbrController(this);
20100
- const bufferController = this.bufferController = new ConfigBufferController(this);
20228
+ // FragmentTracker must be defined before StreamController because the order of event handling is important
20229
+ const fragmentTracker = new FragmentTracker(this);
20230
+ const bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
20101
20231
  const capLevelController = this.capLevelController = new ConfigCapLevelController(this);
20102
20232
  const fpsController = new ConfigFpsController(this);
20103
20233
  const playListLoader = new PlaylistLoader(this);
@@ -20106,8 +20236,6 @@ class Hls {
20106
20236
  // ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
20107
20237
  const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
20108
20238
  const levelController = this.levelController = new LevelController(this, contentSteering);
20109
- // FragmentTracker must be defined before StreamController because the order of event handling is important
20110
- const fragmentTracker = new FragmentTracker(this);
20111
20239
  const keyLoader = new KeyLoader(this.config);
20112
20240
  const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
20113
20241
 
@@ -20635,6 +20763,9 @@ class Hls {
20635
20763
  get mainForwardBufferInfo() {
20636
20764
  return this.streamController.getMainFwdBufferInfo();
20637
20765
  }
20766
+ get maxBufferLength() {
20767
+ return this.streamController.maxBufferLength;
20768
+ }
20638
20769
 
20639
20770
  /**
20640
20771
  * Find and select the best matching audio track, making a level switch when a Group change is necessary.