hls.js 1.5.7-0.canary.10015 → 1.5.7-0.canary.10016

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/hls.mjs CHANGED
@@ -512,7 +512,7 @@ function enableLogs(debugConfig, context, id) {
512
512
  // Some browsers don't allow to use bind on console object anyway
513
513
  // fallback to default if needed
514
514
  try {
515
- newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.7-0.canary.10015"}`);
515
+ newLogger.log(`Debug logs enabled for "${context}" in hls.js version ${"1.5.7-0.canary.10016"}`);
516
516
  } catch (e) {
517
517
  /* log fn threw an exception. All logger methods are no-ops. */
518
518
  return createLogger();
@@ -7292,6 +7292,9 @@ class AbrController extends Logger {
7292
7292
  partCurrent,
7293
7293
  hls
7294
7294
  } = this;
7295
+ if (hls.levels.length <= 1) {
7296
+ return hls.loadLevel;
7297
+ }
7295
7298
  const {
7296
7299
  maxAutoLevel,
7297
7300
  config,
@@ -7702,13 +7705,16 @@ class FragmentTracker {
7702
7705
  * If not found any Fragment, return null
7703
7706
  */
7704
7707
  getBufferedFrag(position, levelType) {
7708
+ return this.getFragAtPos(position, levelType, true);
7709
+ }
7710
+ getFragAtPos(position, levelType, buffered) {
7705
7711
  const {
7706
7712
  fragments
7707
7713
  } = this;
7708
7714
  const keys = Object.keys(fragments);
7709
7715
  for (let i = keys.length; i--;) {
7710
7716
  const fragmentEntity = fragments[keys[i]];
7711
- if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
7717
+ if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && (!buffered || fragmentEntity.buffered)) {
7712
7718
  const frag = fragmentEntity.body;
7713
7719
  if (frag.start <= position && position <= frag.end) {
7714
7720
  return frag;
@@ -7963,7 +7969,8 @@ class FragmentTracker {
7963
7969
  const {
7964
7970
  frag,
7965
7971
  part,
7966
- timeRanges
7972
+ timeRanges,
7973
+ type
7967
7974
  } = data;
7968
7975
  if (frag.sn === 'initSegment') {
7969
7976
  return;
@@ -7978,10 +7985,8 @@ class FragmentTracker {
7978
7985
  }
7979
7986
  // Store the latest timeRanges loaded in the buffer
7980
7987
  this.timeRanges = timeRanges;
7981
- Object.keys(timeRanges).forEach(elementaryStream => {
7982
- const timeRange = timeRanges[elementaryStream];
7983
- this.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
7984
- });
7988
+ const timeRange = timeRanges[type];
7989
+ this.detectEvictedFragments(type, timeRange, playlistType, part);
7985
7990
  }
7986
7991
  onFragBuffered(event, data) {
7987
7992
  this.detectPartialFragments(data);
@@ -9936,7 +9941,7 @@ class BaseStreamController extends TaskLoop {
9936
9941
  // Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
9937
9942
  if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
9938
9943
  const bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
9939
- if (bufferedFragAtPos && bufferInfo.nextStart < bufferedFragAtPos.end) {
9944
+ if (bufferedFragAtPos && (bufferInfo.nextStart <= bufferedFragAtPos.end || bufferedFragAtPos.gap)) {
9940
9945
  return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
9941
9946
  }
9942
9947
  }
@@ -16792,9 +16797,8 @@ class AudioStreamController extends BaseStreamController {
16792
16797
  this.state = State.ENDED;
16793
16798
  return;
16794
16799
  }
16795
- const mainBufferInfo = this.getFwdBufferInfo(this.videoBuffer ? this.videoBuffer : this.media, PlaylistLevelType.MAIN);
16796
16800
  const bufferLen = bufferInfo.len;
16797
- const maxBufLen = this.getMaxBufferLength(mainBufferInfo == null ? void 0 : mainBufferInfo.len);
16801
+ const maxBufLen = hls.maxBufferLength;
16798
16802
  const fragments = trackDetails.fragments;
16799
16803
  const start = fragments[0].start;
16800
16804
  let targetBufferTime = this.flushing ? this.getLoadPosition() : bufferInfo.end;
@@ -16829,32 +16833,25 @@ class AudioStreamController extends BaseStreamController {
16829
16833
  this.bufferFlushed = true;
16830
16834
  return;
16831
16835
  }
16832
-
16833
- // Buffer audio up to one target duration ahead of main buffer
16834
- const atBufferSyncLimit = mainBufferInfo && frag.start > mainBufferInfo.end + trackDetails.targetduration;
16835
- if (atBufferSyncLimit ||
16836
- // Or wait for main buffer after buffing some audio
16837
- !(mainBufferInfo != null && mainBufferInfo.len) && bufferInfo.len) {
16838
- // Check fragment-tracker for main fragments since GAP segments do not show up in bufferInfo
16839
- const mainFrag = this.getAppendedFrag(frag.start, PlaylistLevelType.MAIN);
16840
- if (mainFrag === null) {
16841
- return;
16842
- }
16843
- // Bridge gaps in main buffer
16844
- atGap || (atGap = !!mainFrag.gap || !!atBufferSyncLimit && mainBufferInfo.len === 0);
16845
- if (atBufferSyncLimit && !atGap || atGap && bufferInfo.nextStart && bufferInfo.nextStart < mainFrag.end) {
16846
- return;
16836
+ if (!trackDetails.live || targetBufferTime < this.hls.liveSyncPosition) {
16837
+ // Request audio segments up to one fragment ahead of main buffer
16838
+ const mainBufferInfo = this.getFwdBufferInfo(this.videoBuffer ? this.videoBuffer : this.media, PlaylistLevelType.MAIN);
16839
+ const atBufferSyncLimit = !!mainBufferInfo && frag.start > mainBufferInfo.end + frag.duration;
16840
+ if (atBufferSyncLimit) {
16841
+ // Check fragment-tracker for main fragments since GAP segments do not show up in bufferInfo
16842
+ const mainFrag = this.fragmentTracker.getFragAtPos(frag.start, PlaylistLevelType.MAIN);
16843
+ if (mainFrag === null) {
16844
+ return;
16845
+ }
16846
+ // Bridge gaps in main buffer (also prevents loop loading at gaps)
16847
+ atGap || (atGap = !!mainFrag.gap || mainBufferInfo.len === 0);
16848
+ if (!atGap || bufferInfo.nextStart && bufferInfo.nextStart < mainFrag.end) {
16849
+ return;
16850
+ }
16847
16851
  }
16848
16852
  }
16849
16853
  this.loadFragment(frag, levelInfo, targetBufferTime);
16850
16854
  }
16851
- getMaxBufferLength(mainBufferLength) {
16852
- const maxConfigBuffer = super.getMaxBufferLength();
16853
- if (!mainBufferLength) {
16854
- return maxConfigBuffer;
16855
- }
16856
- return Math.min(Math.max(maxConfigBuffer, mainBufferLength), this.config.maxMaxBufferLength);
16857
- }
16858
16855
  onMediaDetaching() {
16859
16856
  this.videoBuffer = null;
16860
16857
  this.bufferFlushed = this.flushing = false;
@@ -17948,9 +17945,8 @@ class SubtitleStreamController extends BaseStreamController {
17948
17945
  end: targetBufferTime,
17949
17946
  len: bufferLen
17950
17947
  } = bufferedInfo;
17951
- const mainBufferInfo = this.getFwdBufferInfo(this.media, PlaylistLevelType.MAIN);
17952
17948
  const trackDetails = track.details;
17953
- const maxBufLen = this.getMaxBufferLength(mainBufferInfo == null ? void 0 : mainBufferInfo.len) + trackDetails.levelTargetDuration;
17949
+ const maxBufLen = this.hls.maxBufferLength + trackDetails.levelTargetDuration;
17954
17950
  if (bufferLen > maxBufLen) {
17955
17951
  return;
17956
17952
  }
@@ -17987,13 +17983,6 @@ class SubtitleStreamController extends BaseStreamController {
17987
17983
  }
17988
17984
  }
17989
17985
  }
17990
- getMaxBufferLength(mainBufferLength) {
17991
- const maxConfigBuffer = super.getMaxBufferLength();
17992
- if (!mainBufferLength) {
17993
- return maxConfigBuffer;
17994
- }
17995
- return Math.max(maxConfigBuffer, mainBufferLength);
17996
- }
17997
17986
  loadFragment(frag, level, targetBufferTime) {
17998
17987
  this.fragCurrent = frag;
17999
17988
  if (frag.sn === 'initSegment') {
@@ -18491,24 +18480,22 @@ class BufferOperationQueue {
18491
18480
  this.executeNext(type);
18492
18481
  }
18493
18482
  }
18494
- insertAbort(operation, type) {
18495
- const queue = this.queues[type];
18496
- queue.unshift(operation);
18497
- this.executeNext(type);
18498
- }
18499
18483
  appendBlocker(type) {
18500
- let execute;
18501
- const promise = new Promise(resolve => {
18502
- execute = resolve;
18484
+ return new Promise(resolve => {
18485
+ const operation = {
18486
+ execute: resolve,
18487
+ onStart: () => {},
18488
+ onComplete: () => {},
18489
+ onError: () => {}
18490
+ };
18491
+ this.append(operation, type);
18503
18492
  });
18504
- const operation = {
18505
- execute,
18506
- onStart: () => {},
18507
- onComplete: () => {},
18508
- onError: () => {}
18509
- };
18510
- this.append(operation, type);
18511
- return promise;
18493
+ }
18494
+ unblockAudio(op) {
18495
+ const queue = this.queues.audio;
18496
+ if (queue[0] === op) {
18497
+ this.shiftAndExecuteNext('audio');
18498
+ }
18512
18499
  }
18513
18500
  executeNext(type) {
18514
18501
  const queue = this.queues[type];
@@ -18541,7 +18528,7 @@ class BufferOperationQueue {
18541
18528
 
18542
18529
  const VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
18543
18530
  class BufferController extends Logger {
18544
- constructor(hls) {
18531
+ constructor(hls, fragmentTracker) {
18545
18532
  super('buffer-controller', hls.logger);
18546
18533
  // The level details used to determine duration, target-duration and live
18547
18534
  this.details = null;
@@ -18552,6 +18539,7 @@ class BufferController extends Logger {
18552
18539
  // References to event listeners for each SourceBuffer, so that they can be referenced for event removal
18553
18540
  this.listeners = void 0;
18554
18541
  this.hls = void 0;
18542
+ this.fragmentTracker = void 0;
18555
18543
  // The number of BUFFER_CODEC events received before any sourceBuffers are created
18556
18544
  this.bufferCodecEventsExpected = 0;
18557
18545
  // The total number of BUFFER_CODEC events received
@@ -18562,6 +18550,10 @@ class BufferController extends Logger {
18562
18550
  this.mediaSource = null;
18563
18551
  // Last MP3 audio chunk appended
18564
18552
  this.lastMpegAudioChunk = null;
18553
+ // Audio fragment blocked from appending until corresponding video appends or context changes
18554
+ this.blockedAudioAppend = null;
18555
+ // Keep track of video append position for unblocking audio
18556
+ this.lastVideoAppendEnd = 0;
18565
18557
  this.appendSource = void 0;
18566
18558
  // counters
18567
18559
  this.appendErrors = {
@@ -18593,7 +18585,10 @@ class BufferController extends Logger {
18593
18585
  this.log('Media source opened');
18594
18586
  if (media) {
18595
18587
  media.removeEventListener('emptied', this._onMediaEmptied);
18596
- this.updateMediaElementDuration();
18588
+ const durationAndRange = this.getDurationAndRange();
18589
+ if (durationAndRange) {
18590
+ this.updateMediaSource(durationAndRange);
18591
+ }
18597
18592
  this.hls.trigger(Events.MEDIA_ATTACHED, {
18598
18593
  media,
18599
18594
  mediaSource: mediaSource
@@ -18621,6 +18616,7 @@ class BufferController extends Logger {
18621
18616
  }
18622
18617
  };
18623
18618
  this.hls = hls;
18619
+ this.fragmentTracker = fragmentTracker;
18624
18620
  this.appendSource = hls.config.preferManagedMediaSource;
18625
18621
  this._initSourceBuffer();
18626
18622
  this.registerListeners();
@@ -18633,7 +18629,7 @@ class BufferController extends Logger {
18633
18629
  this.details = null;
18634
18630
  this.lastMpegAudioChunk = null;
18635
18631
  // @ts-ignore
18636
- this.hls = null;
18632
+ this.hls = this.fragmentTracker = null;
18637
18633
  // @ts-ignore
18638
18634
  this._onMediaSourceOpen = this._onMediaSourceClose = null;
18639
18635
  // @ts-ignore
@@ -18689,6 +18685,8 @@ class BufferController extends Logger {
18689
18685
  audiovideo: 0
18690
18686
  };
18691
18687
  this.lastMpegAudioChunk = null;
18688
+ this.blockedAudioAppend = null;
18689
+ this.lastVideoAppendEnd = 0;
18692
18690
  }
18693
18691
  onManifestLoading() {
18694
18692
  this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
@@ -18826,9 +18824,10 @@ class BufferController extends Logger {
18826
18824
  const trackNames = Object.keys(data);
18827
18825
  trackNames.forEach(trackName => {
18828
18826
  if (sourceBufferCount) {
18827
+ var _track$buffer;
18829
18828
  // check if SourceBuffer codec needs to change
18830
18829
  const track = this.tracks[trackName];
18831
- if (track && typeof track.buffer.changeType === 'function') {
18830
+ if (track && typeof ((_track$buffer = track.buffer) == null ? void 0 : _track$buffer.changeType) === 'function') {
18832
18831
  var _trackCodec;
18833
18832
  const {
18834
18833
  id,
@@ -18898,20 +18897,54 @@ class BufferController extends Logger {
18898
18897
  };
18899
18898
  operationQueue.append(operation, type, !!this.pendingTracks[type]);
18900
18899
  }
18900
+ blockAudio(partOrFrag) {
18901
+ var _this$fragmentTracker;
18902
+ const pStart = partOrFrag.start;
18903
+ const pTime = pStart + partOrFrag.duration * 0.05;
18904
+ const atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
18905
+ if (atGap) {
18906
+ return;
18907
+ }
18908
+ const op = {
18909
+ execute: () => {
18910
+ var _this$fragmentTracker2;
18911
+ if (this.lastVideoAppendEnd > pTime || this.sourceBuffer.video && BufferHelper.isBuffered(this.sourceBuffer.video, pTime) || ((_this$fragmentTracker2 = this.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker2.gap) === true) {
18912
+ this.blockedAudioAppend = null;
18913
+ this.operationQueue.shiftAndExecuteNext('audio');
18914
+ }
18915
+ },
18916
+ onStart: () => {},
18917
+ onComplete: () => {},
18918
+ onError: () => {}
18919
+ };
18920
+ this.blockedAudioAppend = {
18921
+ op,
18922
+ frag: partOrFrag
18923
+ };
18924
+ this.operationQueue.append(op, 'audio', true);
18925
+ }
18926
+ unblockAudio() {
18927
+ const blockedAudioAppend = this.blockedAudioAppend;
18928
+ if (blockedAudioAppend) {
18929
+ this.blockedAudioAppend = null;
18930
+ this.operationQueue.unblockAudio(blockedAudioAppend.op);
18931
+ }
18932
+ }
18901
18933
  onBufferAppending(event, eventData) {
18902
18934
  const {
18903
- hls,
18904
18935
  operationQueue,
18905
18936
  tracks
18906
18937
  } = this;
18907
18938
  const {
18908
18939
  data,
18909
18940
  type,
18941
+ parent,
18910
18942
  frag,
18911
18943
  part,
18912
18944
  chunkMeta
18913
18945
  } = eventData;
18914
18946
  const chunkStats = chunkMeta.buffering[type];
18947
+ const sn = frag.sn;
18915
18948
  const bufferAppendingStart = self.performance.now();
18916
18949
  chunkStats.start = bufferAppendingStart;
18917
18950
  const fragBuffering = frag.stats.buffering;
@@ -18934,7 +18967,36 @@ class BufferController extends Logger {
18934
18967
  checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
18935
18968
  this.lastMpegAudioChunk = chunkMeta;
18936
18969
  }
18937
- const fragStart = frag.start;
18970
+
18971
+ // Block audio append until overlapping video append
18972
+ const videoSb = this.sourceBuffer.video;
18973
+ if (videoSb && sn !== 'initSegment') {
18974
+ const partOrFrag = part || frag;
18975
+ const blockedAudioAppend = this.blockedAudioAppend;
18976
+ if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
18977
+ const pStart = partOrFrag.start;
18978
+ const pTime = pStart + partOrFrag.duration * 0.05;
18979
+ const vbuffered = videoSb.buffered;
18980
+ const vappending = this.operationQueue.current('video');
18981
+ if (!vbuffered.length && !vappending) {
18982
+ // wait for video before appending audio
18983
+ this.blockAudio(partOrFrag);
18984
+ } else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
18985
+ // audio is ahead of video
18986
+ this.blockAudio(partOrFrag);
18987
+ }
18988
+ } else if (type === 'video') {
18989
+ const videoAppendEnd = partOrFrag.end;
18990
+ if (blockedAudioAppend) {
18991
+ const audioStart = blockedAudioAppend.frag.start;
18992
+ if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
18993
+ this.unblockAudio();
18994
+ }
18995
+ }
18996
+ this.lastVideoAppendEnd = videoAppendEnd;
18997
+ }
18998
+ }
18999
+ const fragStart = (part || frag).start;
18938
19000
  const operation = {
18939
19001
  execute: () => {
18940
19002
  chunkStats.executeStart = self.performance.now();
@@ -18943,7 +19005,7 @@ class BufferController extends Logger {
18943
19005
  if (sb) {
18944
19006
  const delta = fragStart - sb.timestampOffset;
18945
19007
  if (Math.abs(delta) >= 0.1) {
18946
- this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${frag.sn})`);
19008
+ this.log(`Updating audio SourceBuffer timestampOffset to ${fragStart} (delta: ${delta}) sn: ${sn})`);
18947
19009
  sb.timestampOffset = fragStart;
18948
19010
  }
18949
19011
  }
@@ -19010,22 +19072,21 @@ class BufferController extends Logger {
19010
19072
  /* with UHD content, we could get loop of quota exceeded error until
19011
19073
  browser is able to evict some data from sourcebuffer. Retrying can help recover.
19012
19074
  */
19013
- this.warn(`Failed ${appendErrorCount}/${hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
19014
- if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
19075
+ this.warn(`Failed ${appendErrorCount}/${this.hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer`);
19076
+ if (appendErrorCount >= this.hls.config.appendErrorMaxRetry) {
19015
19077
  event.fatal = true;
19016
19078
  }
19017
19079
  }
19018
- hls.trigger(Events.ERROR, event);
19080
+ this.hls.trigger(Events.ERROR, event);
19019
19081
  }
19020
19082
  };
19021
19083
  operationQueue.append(operation, type, !!this.pendingTracks[type]);
19022
19084
  }
19023
- onBufferFlushing(event, data) {
19024
- const {
19025
- operationQueue
19026
- } = this;
19027
- const flushOperation = type => ({
19028
- execute: this.removeExecutor.bind(this, type, data.startOffset, data.endOffset),
19085
+ getFlushOp(type, start, end) {
19086
+ return {
19087
+ execute: () => {
19088
+ this.removeExecutor(type, start, end);
19089
+ },
19029
19090
  onStart: () => {
19030
19091
  // logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
19031
19092
  },
@@ -19038,12 +19099,22 @@ class BufferController extends Logger {
19038
19099
  onError: error => {
19039
19100
  this.warn(`Failed to remove from ${type} SourceBuffer`, error);
19040
19101
  }
19041
- });
19042
- if (data.type) {
19043
- operationQueue.append(flushOperation(data.type), data.type);
19102
+ };
19103
+ }
19104
+ onBufferFlushing(event, data) {
19105
+ const {
19106
+ operationQueue
19107
+ } = this;
19108
+ const {
19109
+ type,
19110
+ startOffset,
19111
+ endOffset
19112
+ } = data;
19113
+ if (type) {
19114
+ operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
19044
19115
  } else {
19045
- this.getSourceBufferTypes().forEach(type => {
19046
- operationQueue.append(flushOperation(type), type);
19116
+ this.getSourceBufferTypes().forEach(sbType => {
19117
+ operationQueue.append(this.getFlushOp(sbType, startOffset, endOffset), sbType);
19047
19118
  });
19048
19119
  }
19049
19120
  }
@@ -19090,6 +19161,9 @@ class BufferController extends Logger {
19090
19161
  // on BUFFER_EOS mark matching sourcebuffer(s) as ended and trigger checkEos()
19091
19162
  // an undefined data.type will mark all buffers as EOS.
19092
19163
  onBufferEos(event, data) {
19164
+ if (data.type === 'video') {
19165
+ this.unblockAudio();
19166
+ }
19093
19167
  const ended = this.getSourceBufferTypes().reduce((acc, type) => {
19094
19168
  const sb = this.sourceBuffer[type];
19095
19169
  if (sb && (!data.type || data.type === type)) {
@@ -19132,10 +19206,14 @@ class BufferController extends Logger {
19132
19206
  return;
19133
19207
  }
19134
19208
  this.details = details;
19209
+ const durationAndRange = this.getDurationAndRange();
19210
+ if (!durationAndRange) {
19211
+ return;
19212
+ }
19135
19213
  if (this.getSourceBufferTypes().length) {
19136
- this.blockBuffers(this.updateMediaElementDuration.bind(this));
19214
+ this.blockBuffers(() => this.updateMediaSource(durationAndRange));
19137
19215
  } else {
19138
- this.updateMediaElementDuration();
19216
+ this.updateMediaSource(durationAndRange);
19139
19217
  }
19140
19218
  }
19141
19219
  trimBuffers() {
@@ -19240,9 +19318,9 @@ class BufferController extends Logger {
19240
19318
  * 'liveDurationInfinity` is set to `true`
19241
19319
  * More details: https://github.com/video-dev/hls.js/issues/355
19242
19320
  */
19243
- updateMediaElementDuration() {
19321
+ getDurationAndRange() {
19244
19322
  if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
19245
- return;
19323
+ return null;
19246
19324
  }
19247
19325
  const {
19248
19326
  details,
@@ -19256,25 +19334,41 @@ class BufferController extends Logger {
19256
19334
  if (details.live && hls.config.liveDurationInfinity) {
19257
19335
  // Override duration to Infinity
19258
19336
  mediaSource.duration = Infinity;
19259
- this.updateSeekableRange(details);
19337
+ const len = details.fragments.length;
19338
+ if (len && details.live && !!mediaSource.setLiveSeekableRange) {
19339
+ const start = Math.max(0, details.fragments[0].start);
19340
+ const end = Math.max(start, start + details.totalduration);
19341
+ return {
19342
+ duration: Infinity,
19343
+ start,
19344
+ end
19345
+ };
19346
+ }
19347
+ return {
19348
+ duration: Infinity
19349
+ };
19260
19350
  } else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
19261
- // levelDuration was the last value we set.
19262
- // not using mediaSource.duration as the browser may tweak this value
19263
- // only update Media Source duration if its value increase, this is to avoid
19264
- // flushing already buffered portion when switching between quality level
19265
- this.log(`Updating Media Source duration to ${levelDuration.toFixed(3)}`);
19266
- mediaSource.duration = levelDuration;
19351
+ return {
19352
+ duration: levelDuration
19353
+ };
19267
19354
  }
19355
+ return null;
19268
19356
  }
19269
- updateSeekableRange(levelDetails) {
19270
- const mediaSource = this.mediaSource;
19271
- const fragments = levelDetails.fragments;
19272
- const len = fragments.length;
19273
- if (len && levelDetails.live && mediaSource != null && mediaSource.setLiveSeekableRange) {
19274
- const start = Math.max(0, fragments[0].start);
19275
- const end = Math.max(start, start + levelDetails.totalduration);
19276
- this.log(`Media Source duration is set to ${mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
19277
- mediaSource.setLiveSeekableRange(start, end);
19357
+ updateMediaSource({
19358
+ duration,
19359
+ start,
19360
+ end
19361
+ }) {
19362
+ if (!this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
19363
+ return;
19364
+ }
19365
+ if (isFiniteNumber(duration)) {
19366
+ this.log(`Updating Media Source duration to ${duration.toFixed(3)}`);
19367
+ }
19368
+ this.mediaSource.duration = duration;
19369
+ if (start !== undefined && end !== undefined) {
19370
+ this.log(`Media Source duration is set to ${this.mediaSource.duration}. Setting seekable range to ${start}-${end}.`);
19371
+ this.mediaSource.setLiveSeekableRange(start, end);
19278
19372
  }
19279
19373
  }
19280
19374
  checkPendingTracks() {
@@ -19457,6 +19551,7 @@ class BufferController extends Logger {
19457
19551
  }
19458
19552
  return;
19459
19553
  }
19554
+ sb.ending = false;
19460
19555
  sb.ended = false;
19461
19556
  sb.appendBuffer(data);
19462
19557
  }
@@ -19476,10 +19571,14 @@ class BufferController extends Logger {
19476
19571
 
19477
19572
  // logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
19478
19573
  const blockingOperations = buffers.map(type => operationQueue.appendBlocker(type));
19479
- Promise.all(blockingOperations).then(() => {
19574
+ const audioBlocked = buffers.length > 1 && !!this.blockedAudioAppend;
19575
+ if (audioBlocked) {
19576
+ this.unblockAudio();
19577
+ }
19578
+ Promise.all(blockingOperations).then(result => {
19480
19579
  // logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
19481
19580
  onUnblocked();
19482
- buffers.forEach(type => {
19581
+ buffers.forEach((type, i) => {
19483
19582
  const sb = this.sourceBuffer[type];
19484
19583
  // Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
19485
19584
  // true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
@@ -28498,6 +28597,17 @@ class StreamController extends BaseStreamController {
28498
28597
  getMainFwdBufferInfo() {
28499
28598
  return this.getFwdBufferInfo(this.mediaBuffer ? this.mediaBuffer : this.media, PlaylistLevelType.MAIN);
28500
28599
  }
28600
+ get maxBufferLength() {
28601
+ const {
28602
+ levels,
28603
+ level
28604
+ } = this;
28605
+ const levelInfo = levels == null ? void 0 : levels[level];
28606
+ if (!levelInfo) {
28607
+ return this.config.maxBufferLength;
28608
+ }
28609
+ return this.getMaxBufferLength(levelInfo.maxBitrate);
28610
+ }
28501
28611
  backtrack(frag) {
28502
28612
  this.couldBacktrack = true;
28503
28613
  // Causes findFragments to backtrack through fragments to find the keyframe
@@ -28603,7 +28713,7 @@ class Hls {
28603
28713
  * Get the video-dev/hls.js package version.
28604
28714
  */
28605
28715
  static get version() {
28606
- return "1.5.7-0.canary.10015";
28716
+ return "1.5.7-0.canary.10016";
28607
28717
  }
28608
28718
 
28609
28719
  /**
@@ -28705,7 +28815,9 @@ class Hls {
28705
28815
  } = config;
28706
28816
  const errorController = new ConfigErrorController(this);
28707
28817
  const abrController = this.abrController = new ConfigAbrController(this);
28708
- const bufferController = this.bufferController = new ConfigBufferController(this);
28818
+ // FragmentTracker must be defined before StreamController because the order of event handling is important
28819
+ const fragmentTracker = new FragmentTracker(this);
28820
+ const bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
28709
28821
  const capLevelController = this.capLevelController = new ConfigCapLevelController(this);
28710
28822
  const fpsController = new ConfigFpsController(this);
28711
28823
  const playListLoader = new PlaylistLoader(this);
@@ -28714,8 +28826,6 @@ class Hls {
28714
28826
  // ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
28715
28827
  const contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
28716
28828
  const levelController = this.levelController = new LevelController(this, contentSteering);
28717
- // FragmentTracker must be defined before StreamController because the order of event handling is important
28718
- const fragmentTracker = new FragmentTracker(this);
28719
28829
  const keyLoader = new KeyLoader(this.config);
28720
28830
  const streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
28721
28831
 
@@ -29243,6 +29353,9 @@ class Hls {
29243
29353
  get mainForwardBufferInfo() {
29244
29354
  return this.streamController.getMainFwdBufferInfo();
29245
29355
  }
29356
+ get maxBufferLength() {
29357
+ return this.streamController.maxBufferLength;
29358
+ }
29246
29359
 
29247
29360
  /**
29248
29361
  * Find and select the best matching audio track, making a level switch when a Group change is necessary.