hls.js 1.5.7-0.canary.10015 → 1.5.7-0.canary.10016
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/hls.js +276 -161
- package/dist/hls.js.d.ts +13 -6
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +259 -128
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +200 -71
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +217 -104
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +1 -1
- package/src/controller/abr-controller.ts +3 -0
- package/src/controller/audio-stream-controller.ts +26 -38
- package/src/controller/base-stream-controller.ts +5 -2
- package/src/controller/buffer-controller.ts +192 -56
- package/src/controller/buffer-operation-queue.ts +16 -19
- package/src/controller/fragment-tracker.ts +15 -11
- package/src/controller/stream-controller.ts +9 -0
- package/src/controller/subtitle-stream-controller.ts +1 -15
- package/src/hls.ts +7 -3
- package/src/utils/codecs.ts +1 -1
package/dist/hls.js
CHANGED
@@ -644,7 +644,7 @@
|
|
644
644
|
// Some browsers don't allow to use bind on console object anyway
|
645
645
|
// fallback to default if needed
|
646
646
|
try {
|
647
|
-
newLogger.log("Debug logs enabled for \"" + context + "\" in hls.js version " + "1.5.7-0.canary.
|
647
|
+
newLogger.log("Debug logs enabled for \"" + context + "\" in hls.js version " + "1.5.7-0.canary.10016");
|
648
648
|
} catch (e) {
|
649
649
|
/* log fn threw an exception. All logger methods are no-ops. */
|
650
650
|
return createLogger();
|
@@ -7567,6 +7567,9 @@
|
|
7567
7567
|
var fragCurrent = this.fragCurrent,
|
7568
7568
|
partCurrent = this.partCurrent,
|
7569
7569
|
hls = this.hls;
|
7570
|
+
if (hls.levels.length <= 1) {
|
7571
|
+
return hls.loadLevel;
|
7572
|
+
}
|
7570
7573
|
var maxAutoLevel = hls.maxAutoLevel,
|
7571
7574
|
config = hls.config,
|
7572
7575
|
minAutoLevel = hls.minAutoLevel;
|
@@ -8035,11 +8038,14 @@
|
|
8035
8038
|
* If not found any Fragment, return null
|
8036
8039
|
*/;
|
8037
8040
|
_proto.getBufferedFrag = function getBufferedFrag(position, levelType) {
|
8041
|
+
return this.getFragAtPos(position, levelType, true);
|
8042
|
+
};
|
8043
|
+
_proto.getFragAtPos = function getFragAtPos(position, levelType, buffered) {
|
8038
8044
|
var fragments = this.fragments;
|
8039
8045
|
var keys = Object.keys(fragments);
|
8040
8046
|
for (var i = keys.length; i--;) {
|
8041
8047
|
var fragmentEntity = fragments[keys[i]];
|
8042
|
-
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && fragmentEntity.buffered) {
|
8048
|
+
if ((fragmentEntity == null ? void 0 : fragmentEntity.body.type) === levelType && (!buffered || fragmentEntity.buffered)) {
|
8043
8049
|
var frag = fragmentEntity.body;
|
8044
8050
|
if (frag.start <= position && position <= frag.end) {
|
8045
8051
|
return frag;
|
@@ -8289,10 +8295,10 @@
|
|
8289
8295
|
};
|
8290
8296
|
};
|
8291
8297
|
_proto.onBufferAppended = function onBufferAppended(event, data) {
|
8292
|
-
var _this3 = this;
|
8293
8298
|
var frag = data.frag,
|
8294
8299
|
part = data.part,
|
8295
|
-
timeRanges = data.timeRanges
|
8300
|
+
timeRanges = data.timeRanges,
|
8301
|
+
type = data.type;
|
8296
8302
|
if (frag.sn === 'initSegment') {
|
8297
8303
|
return;
|
8298
8304
|
}
|
@@ -8306,10 +8312,8 @@
|
|
8306
8312
|
}
|
8307
8313
|
// Store the latest timeRanges loaded in the buffer
|
8308
8314
|
this.timeRanges = timeRanges;
|
8309
|
-
|
8310
|
-
|
8311
|
-
_this3.detectEvictedFragments(elementaryStream, timeRange, playlistType, part);
|
8312
|
-
});
|
8315
|
+
var timeRange = timeRanges[type];
|
8316
|
+
this.detectEvictedFragments(type, timeRange, playlistType, part);
|
8313
8317
|
};
|
8314
8318
|
_proto.onFragBuffered = function onFragBuffered(event, data) {
|
8315
8319
|
this.detectPartialFragments(data);
|
@@ -8323,12 +8327,12 @@
|
|
8323
8327
|
return !!((_this$activePartLists = this.activePartLists[type]) != null && _this$activePartLists.length);
|
8324
8328
|
};
|
8325
8329
|
_proto.removeFragmentsInRange = function removeFragmentsInRange(start, end, playlistType, withGapOnly, unbufferedOnly) {
|
8326
|
-
var
|
8330
|
+
var _this3 = this;
|
8327
8331
|
if (withGapOnly && !this.hasGaps) {
|
8328
8332
|
return;
|
8329
8333
|
}
|
8330
8334
|
Object.keys(this.fragments).forEach(function (key) {
|
8331
|
-
var fragmentEntity =
|
8335
|
+
var fragmentEntity = _this3.fragments[key];
|
8332
8336
|
if (!fragmentEntity) {
|
8333
8337
|
return;
|
8334
8338
|
}
|
@@ -8337,7 +8341,7 @@
|
|
8337
8341
|
return;
|
8338
8342
|
}
|
8339
8343
|
if (frag.start < end && frag.end > start && (fragmentEntity.buffered || unbufferedOnly)) {
|
8340
|
-
|
8344
|
+
_this3.removeFragment(frag);
|
8341
8345
|
}
|
8342
8346
|
});
|
8343
8347
|
};
|
@@ -10299,7 +10303,7 @@
|
|
10299
10303
|
// Workaround flaw in getting forward buffer when maxBufferHole is smaller than gap at current pos
|
10300
10304
|
if (bufferInfo.len === 0 && bufferInfo.nextStart !== undefined) {
|
10301
10305
|
var bufferedFragAtPos = this.fragmentTracker.getBufferedFrag(pos, type);
|
10302
|
-
if (bufferedFragAtPos && bufferInfo.nextStart
|
10306
|
+
if (bufferedFragAtPos && (bufferInfo.nextStart <= bufferedFragAtPos.end || bufferedFragAtPos.gap)) {
|
10303
10307
|
return BufferHelper.bufferInfo(bufferable, pos, Math.max(bufferInfo.nextStart, maxBufferHole));
|
10304
10308
|
}
|
10305
10309
|
}
|
@@ -17342,9 +17346,8 @@
|
|
17342
17346
|
this.state = State.ENDED;
|
17343
17347
|
return;
|
17344
17348
|
}
|
17345
|
-
var mainBufferInfo = this.getFwdBufferInfo(this.videoBuffer ? this.videoBuffer : this.media, PlaylistLevelType.MAIN);
|
17346
17349
|
var bufferLen = bufferInfo.len;
|
17347
|
-
var maxBufLen =
|
17350
|
+
var maxBufLen = hls.maxBufferLength;
|
17348
17351
|
var fragments = trackDetails.fragments;
|
17349
17352
|
var start = fragments[0].start;
|
17350
17353
|
var targetBufferTime = this.flushing ? this.getLoadPosition() : bufferInfo.end;
|
@@ -17379,32 +17382,25 @@
|
|
17379
17382
|
this.bufferFlushed = true;
|
17380
17383
|
return;
|
17381
17384
|
}
|
17382
|
-
|
17383
|
-
|
17384
|
-
|
17385
|
-
|
17386
|
-
|
17387
|
-
|
17388
|
-
|
17389
|
-
|
17390
|
-
|
17391
|
-
|
17392
|
-
|
17393
|
-
|
17394
|
-
|
17395
|
-
|
17396
|
-
|
17385
|
+
if (!trackDetails.live || targetBufferTime < this.hls.liveSyncPosition) {
|
17386
|
+
// Request audio segments up to one fragment ahead of main buffer
|
17387
|
+
var mainBufferInfo = this.getFwdBufferInfo(this.videoBuffer ? this.videoBuffer : this.media, PlaylistLevelType.MAIN);
|
17388
|
+
var atBufferSyncLimit = !!mainBufferInfo && frag.start > mainBufferInfo.end + frag.duration;
|
17389
|
+
if (atBufferSyncLimit) {
|
17390
|
+
// Check fragment-tracker for main fragments since GAP segments do not show up in bufferInfo
|
17391
|
+
var mainFrag = this.fragmentTracker.getFragAtPos(frag.start, PlaylistLevelType.MAIN);
|
17392
|
+
if (mainFrag === null) {
|
17393
|
+
return;
|
17394
|
+
}
|
17395
|
+
// Bridge gaps in main buffer (also prevents loop loading at gaps)
|
17396
|
+
atGap || (atGap = !!mainFrag.gap || mainBufferInfo.len === 0);
|
17397
|
+
if (!atGap || bufferInfo.nextStart && bufferInfo.nextStart < mainFrag.end) {
|
17398
|
+
return;
|
17399
|
+
}
|
17397
17400
|
}
|
17398
17401
|
}
|
17399
17402
|
this.loadFragment(frag, levelInfo, targetBufferTime);
|
17400
17403
|
};
|
17401
|
-
_proto.getMaxBufferLength = function getMaxBufferLength(mainBufferLength) {
|
17402
|
-
var maxConfigBuffer = _BaseStreamController.prototype.getMaxBufferLength.call(this);
|
17403
|
-
if (!mainBufferLength) {
|
17404
|
-
return maxConfigBuffer;
|
17405
|
-
}
|
17406
|
-
return Math.min(Math.max(maxConfigBuffer, mainBufferLength), this.config.maxMaxBufferLength);
|
17407
|
-
};
|
17408
17404
|
_proto.onMediaDetaching = function onMediaDetaching() {
|
17409
17405
|
this.videoBuffer = null;
|
17410
17406
|
this.bufferFlushed = this.flushing = false;
|
@@ -18469,9 +18465,8 @@
|
|
18469
18465
|
var bufferedInfo = BufferHelper.bufferedInfo(this.tracksBuffered[this.currentTrackId] || [], currentTime, config.maxBufferHole);
|
18470
18466
|
var targetBufferTime = bufferedInfo.end,
|
18471
18467
|
bufferLen = bufferedInfo.len;
|
18472
|
-
var mainBufferInfo = this.getFwdBufferInfo(this.media, PlaylistLevelType.MAIN);
|
18473
18468
|
var trackDetails = track.details;
|
18474
|
-
var maxBufLen = this.
|
18469
|
+
var maxBufLen = this.hls.maxBufferLength + trackDetails.levelTargetDuration;
|
18475
18470
|
if (bufferLen > maxBufLen) {
|
18476
18471
|
return;
|
18477
18472
|
}
|
@@ -18508,13 +18503,6 @@
|
|
18508
18503
|
}
|
18509
18504
|
}
|
18510
18505
|
};
|
18511
|
-
_proto.getMaxBufferLength = function getMaxBufferLength(mainBufferLength) {
|
18512
|
-
var maxConfigBuffer = _BaseStreamController.prototype.getMaxBufferLength.call(this);
|
18513
|
-
if (!mainBufferLength) {
|
18514
|
-
return maxConfigBuffer;
|
18515
|
-
}
|
18516
|
-
return Math.max(maxConfigBuffer, mainBufferLength);
|
18517
|
-
};
|
18518
18506
|
_proto.loadFragment = function loadFragment(frag, level, targetBufferTime) {
|
18519
18507
|
this.fragCurrent = frag;
|
18520
18508
|
if (frag.sn === 'initSegment') {
|
@@ -19032,24 +19020,23 @@
|
|
19032
19020
|
this.executeNext(type);
|
19033
19021
|
}
|
19034
19022
|
};
|
19035
|
-
_proto.insertAbort = function insertAbort(operation, type) {
|
19036
|
-
var queue = this.queues[type];
|
19037
|
-
queue.unshift(operation);
|
19038
|
-
this.executeNext(type);
|
19039
|
-
};
|
19040
19023
|
_proto.appendBlocker = function appendBlocker(type) {
|
19041
|
-
var
|
19042
|
-
|
19043
|
-
|
19024
|
+
var _this = this;
|
19025
|
+
return new Promise(function (resolve) {
|
19026
|
+
var operation = {
|
19027
|
+
execute: resolve,
|
19028
|
+
onStart: function onStart() {},
|
19029
|
+
onComplete: function onComplete() {},
|
19030
|
+
onError: function onError() {}
|
19031
|
+
};
|
19032
|
+
_this.append(operation, type);
|
19044
19033
|
});
|
19045
|
-
|
19046
|
-
|
19047
|
-
|
19048
|
-
|
19049
|
-
|
19050
|
-
}
|
19051
|
-
this.append(operation, type);
|
19052
|
-
return promise;
|
19034
|
+
};
|
19035
|
+
_proto.unblockAudio = function unblockAudio(op) {
|
19036
|
+
var queue = this.queues.audio;
|
19037
|
+
if (queue[0] === op) {
|
19038
|
+
this.shiftAndExecuteNext('audio');
|
19039
|
+
}
|
19053
19040
|
};
|
19054
19041
|
_proto.executeNext = function executeNext(type) {
|
19055
19042
|
var queue = this.queues[type];
|
@@ -19084,7 +19071,7 @@
|
|
19084
19071
|
var VIDEO_CODEC_PROFILE_REPLACE = /(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
19085
19072
|
var BufferController = /*#__PURE__*/function (_Logger) {
|
19086
19073
|
_inheritsLoose(BufferController, _Logger);
|
19087
|
-
function BufferController(hls) {
|
19074
|
+
function BufferController(hls, fragmentTracker) {
|
19088
19075
|
var _this;
|
19089
19076
|
_this = _Logger.call(this, 'buffer-controller', hls.logger) || this;
|
19090
19077
|
// The level details used to determine duration, target-duration and live
|
@@ -19096,6 +19083,7 @@
|
|
19096
19083
|
// References to event listeners for each SourceBuffer, so that they can be referenced for event removal
|
19097
19084
|
_this.listeners = void 0;
|
19098
19085
|
_this.hls = void 0;
|
19086
|
+
_this.fragmentTracker = void 0;
|
19099
19087
|
// The number of BUFFER_CODEC events received before any sourceBuffers are created
|
19100
19088
|
_this.bufferCodecEventsExpected = 0;
|
19101
19089
|
// The total number of BUFFER_CODEC events received
|
@@ -19106,6 +19094,10 @@
|
|
19106
19094
|
_this.mediaSource = null;
|
19107
19095
|
// Last MP3 audio chunk appended
|
19108
19096
|
_this.lastMpegAudioChunk = null;
|
19097
|
+
// Audio fragment blocked from appending until corresponding video appends or context changes
|
19098
|
+
_this.blockedAudioAppend = null;
|
19099
|
+
// Keep track of video append position for unblocking audio
|
19100
|
+
_this.lastVideoAppendEnd = 0;
|
19109
19101
|
_this.appendSource = void 0;
|
19110
19102
|
// counters
|
19111
19103
|
_this.appendErrors = {
|
@@ -19136,7 +19128,10 @@
|
|
19136
19128
|
_this.log('Media source opened');
|
19137
19129
|
if (media) {
|
19138
19130
|
media.removeEventListener('emptied', _this._onMediaEmptied);
|
19139
|
-
_this.
|
19131
|
+
var durationAndRange = _this.getDurationAndRange();
|
19132
|
+
if (durationAndRange) {
|
19133
|
+
_this.updateMediaSource(durationAndRange);
|
19134
|
+
}
|
19140
19135
|
_this.hls.trigger(Events.MEDIA_ATTACHED, {
|
19141
19136
|
media: media,
|
19142
19137
|
mediaSource: mediaSource
|
@@ -19163,6 +19158,7 @@
|
|
19163
19158
|
}
|
19164
19159
|
};
|
19165
19160
|
_this.hls = hls;
|
19161
|
+
_this.fragmentTracker = fragmentTracker;
|
19166
19162
|
_this.appendSource = hls.config.preferManagedMediaSource;
|
19167
19163
|
_this._initSourceBuffer();
|
19168
19164
|
_this.registerListeners();
|
@@ -19177,7 +19173,7 @@
|
|
19177
19173
|
this.details = null;
|
19178
19174
|
this.lastMpegAudioChunk = null;
|
19179
19175
|
// @ts-ignore
|
19180
|
-
this.hls = null;
|
19176
|
+
this.hls = this.fragmentTracker = null;
|
19181
19177
|
// @ts-ignore
|
19182
19178
|
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
19183
19179
|
// @ts-ignore
|
@@ -19229,6 +19225,8 @@
|
|
19229
19225
|
audiovideo: 0
|
19230
19226
|
};
|
19231
19227
|
this.lastMpegAudioChunk = null;
|
19228
|
+
this.blockedAudioAppend = null;
|
19229
|
+
this.lastVideoAppendEnd = 0;
|
19232
19230
|
};
|
19233
19231
|
_proto.onManifestLoading = function onManifestLoading() {
|
19234
19232
|
this.bufferCodecEventsExpected = this._bufferCodecEventsTotal = 0;
|
@@ -19366,9 +19364,10 @@
|
|
19366
19364
|
var trackNames = Object.keys(data);
|
19367
19365
|
trackNames.forEach(function (trackName) {
|
19368
19366
|
if (sourceBufferCount) {
|
19367
|
+
var _track$buffer;
|
19369
19368
|
// check if SourceBuffer codec needs to change
|
19370
19369
|
var track = _this3.tracks[trackName];
|
19371
|
-
if (track && typeof track.buffer.changeType === 'function') {
|
19370
|
+
if (track && typeof ((_track$buffer = track.buffer) == null ? void 0 : _track$buffer.changeType) === 'function') {
|
19372
19371
|
var _trackCodec;
|
19373
19372
|
var _data$trackName = data[trackName],
|
19374
19373
|
id = _data$trackName.id,
|
@@ -19436,17 +19435,52 @@
|
|
19436
19435
|
};
|
19437
19436
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
19438
19437
|
};
|
19438
|
+
_proto.blockAudio = function blockAudio(partOrFrag) {
|
19439
|
+
var _this$fragmentTracker,
|
19440
|
+
_this5 = this;
|
19441
|
+
var pStart = partOrFrag.start;
|
19442
|
+
var pTime = pStart + partOrFrag.duration * 0.05;
|
19443
|
+
var atGap = ((_this$fragmentTracker = this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)) == null ? void 0 : _this$fragmentTracker.gap) === true;
|
19444
|
+
if (atGap) {
|
19445
|
+
return;
|
19446
|
+
}
|
19447
|
+
var op = {
|
19448
|
+
execute: function execute() {
|
19449
|
+
var _this5$fragmentTracke;
|
19450
|
+
if (_this5.lastVideoAppendEnd > pTime || _this5.sourceBuffer.video && BufferHelper.isBuffered(_this5.sourceBuffer.video, pTime) || ((_this5$fragmentTracke = _this5.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)) == null ? void 0 : _this5$fragmentTracke.gap) === true) {
|
19451
|
+
_this5.blockedAudioAppend = null;
|
19452
|
+
_this5.operationQueue.shiftAndExecuteNext('audio');
|
19453
|
+
}
|
19454
|
+
},
|
19455
|
+
onStart: function onStart() {},
|
19456
|
+
onComplete: function onComplete() {},
|
19457
|
+
onError: function onError() {}
|
19458
|
+
};
|
19459
|
+
this.blockedAudioAppend = {
|
19460
|
+
op: op,
|
19461
|
+
frag: partOrFrag
|
19462
|
+
};
|
19463
|
+
this.operationQueue.append(op, 'audio', true);
|
19464
|
+
};
|
19465
|
+
_proto.unblockAudio = function unblockAudio() {
|
19466
|
+
var blockedAudioAppend = this.blockedAudioAppend;
|
19467
|
+
if (blockedAudioAppend) {
|
19468
|
+
this.blockedAudioAppend = null;
|
19469
|
+
this.operationQueue.unblockAudio(blockedAudioAppend.op);
|
19470
|
+
}
|
19471
|
+
};
|
19439
19472
|
_proto.onBufferAppending = function onBufferAppending(event, eventData) {
|
19440
|
-
var
|
19441
|
-
var
|
19442
|
-
operationQueue = this.operationQueue,
|
19473
|
+
var _this6 = this;
|
19474
|
+
var operationQueue = this.operationQueue,
|
19443
19475
|
tracks = this.tracks;
|
19444
19476
|
var data = eventData.data,
|
19445
19477
|
type = eventData.type,
|
19478
|
+
parent = eventData.parent,
|
19446
19479
|
frag = eventData.frag,
|
19447
19480
|
part = eventData.part,
|
19448
19481
|
chunkMeta = eventData.chunkMeta;
|
19449
19482
|
var chunkStats = chunkMeta.buffering[type];
|
19483
|
+
var sn = frag.sn;
|
19450
19484
|
var bufferAppendingStart = self.performance.now();
|
19451
19485
|
chunkStats.start = bufferAppendingStart;
|
19452
19486
|
var fragBuffering = frag.stats.buffering;
|
@@ -19469,21 +19503,50 @@
|
|
19469
19503
|
checkTimestampOffset = !this.lastMpegAudioChunk || chunkMeta.id === 1 || this.lastMpegAudioChunk.sn !== chunkMeta.sn;
|
19470
19504
|
this.lastMpegAudioChunk = chunkMeta;
|
19471
19505
|
}
|
19472
|
-
|
19506
|
+
|
19507
|
+
// Block audio append until overlapping video append
|
19508
|
+
var videoSb = this.sourceBuffer.video;
|
19509
|
+
if (videoSb && sn !== 'initSegment') {
|
19510
|
+
var partOrFrag = part || frag;
|
19511
|
+
var blockedAudioAppend = this.blockedAudioAppend;
|
19512
|
+
if (type === 'audio' && parent !== 'main' && !this.blockedAudioAppend) {
|
19513
|
+
var pStart = partOrFrag.start;
|
19514
|
+
var pTime = pStart + partOrFrag.duration * 0.05;
|
19515
|
+
var vbuffered = videoSb.buffered;
|
19516
|
+
var vappending = this.operationQueue.current('video');
|
19517
|
+
if (!vbuffered.length && !vappending) {
|
19518
|
+
// wait for video before appending audio
|
19519
|
+
this.blockAudio(partOrFrag);
|
19520
|
+
} else if (!vappending && !BufferHelper.isBuffered(videoSb, pTime) && this.lastVideoAppendEnd < pTime) {
|
19521
|
+
// audio is ahead of video
|
19522
|
+
this.blockAudio(partOrFrag);
|
19523
|
+
}
|
19524
|
+
} else if (type === 'video') {
|
19525
|
+
var videoAppendEnd = partOrFrag.end;
|
19526
|
+
if (blockedAudioAppend) {
|
19527
|
+
var audioStart = blockedAudioAppend.frag.start;
|
19528
|
+
if (videoAppendEnd > audioStart || videoAppendEnd < this.lastVideoAppendEnd || BufferHelper.isBuffered(videoSb, audioStart)) {
|
19529
|
+
this.unblockAudio();
|
19530
|
+
}
|
19531
|
+
}
|
19532
|
+
this.lastVideoAppendEnd = videoAppendEnd;
|
19533
|
+
}
|
19534
|
+
}
|
19535
|
+
var fragStart = (part || frag).start;
|
19473
19536
|
var operation = {
|
19474
19537
|
execute: function execute() {
|
19475
19538
|
chunkStats.executeStart = self.performance.now();
|
19476
19539
|
if (checkTimestampOffset) {
|
19477
|
-
var sb =
|
19540
|
+
var sb = _this6.sourceBuffer[type];
|
19478
19541
|
if (sb) {
|
19479
19542
|
var delta = fragStart - sb.timestampOffset;
|
19480
19543
|
if (Math.abs(delta) >= 0.1) {
|
19481
|
-
|
19544
|
+
_this6.log("Updating audio SourceBuffer timestampOffset to " + fragStart + " (delta: " + delta + ") sn: " + sn + ")");
|
19482
19545
|
sb.timestampOffset = fragStart;
|
19483
19546
|
}
|
19484
19547
|
}
|
19485
19548
|
}
|
19486
|
-
|
19549
|
+
_this6.appendExecutor(data, type);
|
19487
19550
|
},
|
19488
19551
|
onStart: function onStart() {
|
19489
19552
|
// logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
|
@@ -19498,19 +19561,19 @@
|
|
19498
19561
|
if (partBuffering && partBuffering.first === 0) {
|
19499
19562
|
partBuffering.first = end;
|
19500
19563
|
}
|
19501
|
-
var sourceBuffer =
|
19564
|
+
var sourceBuffer = _this6.sourceBuffer;
|
19502
19565
|
var timeRanges = {};
|
19503
19566
|
for (var _type in sourceBuffer) {
|
19504
19567
|
timeRanges[_type] = BufferHelper.getBuffered(sourceBuffer[_type]);
|
19505
19568
|
}
|
19506
|
-
|
19569
|
+
_this6.appendErrors[type] = 0;
|
19507
19570
|
if (type === 'audio' || type === 'video') {
|
19508
|
-
|
19571
|
+
_this6.appendErrors.audiovideo = 0;
|
19509
19572
|
} else {
|
19510
|
-
|
19511
|
-
|
19573
|
+
_this6.appendErrors.audio = 0;
|
19574
|
+
_this6.appendErrors.video = 0;
|
19512
19575
|
}
|
19513
|
-
|
19576
|
+
_this6.hls.trigger(Events.BUFFER_APPENDED, {
|
19514
19577
|
type: type,
|
19515
19578
|
frag: frag,
|
19516
19579
|
part: part,
|
@@ -19538,51 +19601,57 @@
|
|
19538
19601
|
// let's stop appending any segments, and report BUFFER_FULL_ERROR error
|
19539
19602
|
event.details = ErrorDetails.BUFFER_FULL_ERROR;
|
19540
19603
|
} else {
|
19541
|
-
var appendErrorCount = ++
|
19604
|
+
var appendErrorCount = ++_this6.appendErrors[type];
|
19542
19605
|
event.details = ErrorDetails.BUFFER_APPEND_ERROR;
|
19543
19606
|
/* with UHD content, we could get loop of quota exceeded error until
|
19544
19607
|
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
19545
19608
|
*/
|
19546
|
-
|
19547
|
-
if (appendErrorCount >= hls.config.appendErrorMaxRetry) {
|
19609
|
+
_this6.warn("Failed " + appendErrorCount + "/" + _this6.hls.config.appendErrorMaxRetry + " times to append segment in \"" + type + "\" sourceBuffer");
|
19610
|
+
if (appendErrorCount >= _this6.hls.config.appendErrorMaxRetry) {
|
19548
19611
|
event.fatal = true;
|
19549
19612
|
}
|
19550
19613
|
}
|
19551
|
-
hls.trigger(Events.ERROR, event);
|
19614
|
+
_this6.hls.trigger(Events.ERROR, event);
|
19552
19615
|
}
|
19553
19616
|
};
|
19554
19617
|
operationQueue.append(operation, type, !!this.pendingTracks[type]);
|
19555
19618
|
};
|
19619
|
+
_proto.getFlushOp = function getFlushOp(type, start, end) {
|
19620
|
+
var _this7 = this;
|
19621
|
+
return {
|
19622
|
+
execute: function execute() {
|
19623
|
+
_this7.removeExecutor(type, start, end);
|
19624
|
+
},
|
19625
|
+
onStart: function onStart() {
|
19626
|
+
// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
19627
|
+
},
|
19628
|
+
onComplete: function onComplete() {
|
19629
|
+
// logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
19630
|
+
_this7.hls.trigger(Events.BUFFER_FLUSHED, {
|
19631
|
+
type: type
|
19632
|
+
});
|
19633
|
+
},
|
19634
|
+
onError: function onError(error) {
|
19635
|
+
_this7.warn("Failed to remove from " + type + " SourceBuffer", error);
|
19636
|
+
}
|
19637
|
+
};
|
19638
|
+
};
|
19556
19639
|
_proto.onBufferFlushing = function onBufferFlushing(event, data) {
|
19557
|
-
var
|
19640
|
+
var _this8 = this;
|
19558
19641
|
var operationQueue = this.operationQueue;
|
19559
|
-
var
|
19560
|
-
|
19561
|
-
|
19562
|
-
|
19563
|
-
|
19564
|
-
},
|
19565
|
-
onComplete: function onComplete() {
|
19566
|
-
// logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
19567
|
-
_this6.hls.trigger(Events.BUFFER_FLUSHED, {
|
19568
|
-
type: type
|
19569
|
-
});
|
19570
|
-
},
|
19571
|
-
onError: function onError(error) {
|
19572
|
-
_this6.warn("Failed to remove from " + type + " SourceBuffer", error);
|
19573
|
-
}
|
19574
|
-
};
|
19575
|
-
};
|
19576
|
-
if (data.type) {
|
19577
|
-
operationQueue.append(flushOperation(data.type), data.type);
|
19642
|
+
var type = data.type,
|
19643
|
+
startOffset = data.startOffset,
|
19644
|
+
endOffset = data.endOffset;
|
19645
|
+
if (type) {
|
19646
|
+
operationQueue.append(this.getFlushOp(type, startOffset, endOffset), type);
|
19578
19647
|
} else {
|
19579
|
-
this.getSourceBufferTypes().forEach(function (
|
19580
|
-
operationQueue.append(
|
19648
|
+
this.getSourceBufferTypes().forEach(function (sbType) {
|
19649
|
+
operationQueue.append(_this8.getFlushOp(sbType, startOffset, endOffset), sbType);
|
19581
19650
|
});
|
19582
19651
|
}
|
19583
19652
|
};
|
19584
19653
|
_proto.onFragParsed = function onFragParsed(event, data) {
|
19585
|
-
var
|
19654
|
+
var _this9 = this;
|
19586
19655
|
var frag = data.frag,
|
19587
19656
|
part = data.part;
|
19588
19657
|
var buffersAppendedTo = [];
|
@@ -19604,7 +19673,7 @@
|
|
19604
19673
|
part.stats.buffering.end = now;
|
19605
19674
|
}
|
19606
19675
|
var stats = part ? part.stats : frag.stats;
|
19607
|
-
|
19676
|
+
_this9.hls.trigger(Events.FRAG_BUFFERED, {
|
19608
19677
|
frag: frag,
|
19609
19678
|
part: part,
|
19610
19679
|
stats: stats,
|
@@ -19624,14 +19693,17 @@
|
|
19624
19693
|
// an undefined data.type will mark all buffers as EOS.
|
19625
19694
|
;
|
19626
19695
|
_proto.onBufferEos = function onBufferEos(event, data) {
|
19627
|
-
var
|
19696
|
+
var _this10 = this;
|
19697
|
+
if (data.type === 'video') {
|
19698
|
+
this.unblockAudio();
|
19699
|
+
}
|
19628
19700
|
var ended = this.getSourceBufferTypes().reduce(function (acc, type) {
|
19629
|
-
var sb =
|
19701
|
+
var sb = _this10.sourceBuffer[type];
|
19630
19702
|
if (sb && (!data.type || data.type === type)) {
|
19631
19703
|
sb.ending = true;
|
19632
19704
|
if (!sb.ended) {
|
19633
19705
|
sb.ended = true;
|
19634
|
-
|
19706
|
+
_this10.log(type + " sourceBuffer now EOS");
|
19635
19707
|
}
|
19636
19708
|
}
|
19637
19709
|
return acc && !!(!sb || sb.ended);
|
@@ -19639,35 +19711,42 @@
|
|
19639
19711
|
if (ended) {
|
19640
19712
|
this.log("Queueing mediaSource.endOfStream()");
|
19641
19713
|
this.blockBuffers(function () {
|
19642
|
-
|
19643
|
-
var sb =
|
19714
|
+
_this10.getSourceBufferTypes().forEach(function (type) {
|
19715
|
+
var sb = _this10.sourceBuffer[type];
|
19644
19716
|
if (sb) {
|
19645
19717
|
sb.ending = false;
|
19646
19718
|
}
|
19647
19719
|
});
|
19648
|
-
var mediaSource =
|
19720
|
+
var mediaSource = _this10.mediaSource;
|
19649
19721
|
if (!mediaSource || mediaSource.readyState !== 'open') {
|
19650
19722
|
if (mediaSource) {
|
19651
|
-
|
19723
|
+
_this10.log("Could not call mediaSource.endOfStream(). mediaSource.readyState: " + mediaSource.readyState);
|
19652
19724
|
}
|
19653
19725
|
return;
|
19654
19726
|
}
|
19655
|
-
|
19727
|
+
_this10.log("Calling mediaSource.endOfStream()");
|
19656
19728
|
// Allow this to throw and be caught by the enqueueing function
|
19657
19729
|
mediaSource.endOfStream();
|
19658
19730
|
});
|
19659
19731
|
}
|
19660
19732
|
};
|
19661
19733
|
_proto.onLevelUpdated = function onLevelUpdated(event, _ref) {
|
19734
|
+
var _this11 = this;
|
19662
19735
|
var details = _ref.details;
|
19663
19736
|
if (!details.fragments.length) {
|
19664
19737
|
return;
|
19665
19738
|
}
|
19666
19739
|
this.details = details;
|
19740
|
+
var durationAndRange = this.getDurationAndRange();
|
19741
|
+
if (!durationAndRange) {
|
19742
|
+
return;
|
19743
|
+
}
|
19667
19744
|
if (this.getSourceBufferTypes().length) {
|
19668
|
-
this.blockBuffers(
|
19745
|
+
this.blockBuffers(function () {
|
19746
|
+
return _this11.updateMediaSource(durationAndRange);
|
19747
|
+
});
|
19669
19748
|
} else {
|
19670
|
-
this.
|
19749
|
+
this.updateMediaSource(durationAndRange);
|
19671
19750
|
}
|
19672
19751
|
};
|
19673
19752
|
_proto.trimBuffers = function trimBuffers() {
|
@@ -19700,7 +19779,7 @@
|
|
19700
19779
|
}
|
19701
19780
|
};
|
19702
19781
|
_proto.flushBackBuffer = function flushBackBuffer(currentTime, targetDuration, targetBackBufferPosition) {
|
19703
|
-
var
|
19782
|
+
var _this12 = this;
|
19704
19783
|
var details = this.details,
|
19705
19784
|
sourceBuffer = this.sourceBuffer;
|
19706
19785
|
var sourceBufferTypes = this.getSourceBufferTypes();
|
@@ -19710,20 +19789,20 @@
|
|
19710
19789
|
var buffered = BufferHelper.getBuffered(sb);
|
19711
19790
|
// when target buffer start exceeds actual buffer start
|
19712
19791
|
if (buffered.length > 0 && targetBackBufferPosition > buffered.start(0)) {
|
19713
|
-
|
19792
|
+
_this12.hls.trigger(Events.BACK_BUFFER_REACHED, {
|
19714
19793
|
bufferEnd: targetBackBufferPosition
|
19715
19794
|
});
|
19716
19795
|
|
19717
19796
|
// Support for deprecated event:
|
19718
19797
|
if (details != null && details.live) {
|
19719
|
-
|
19798
|
+
_this12.hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, {
|
19720
19799
|
bufferEnd: targetBackBufferPosition
|
19721
19800
|
});
|
19722
19801
|
} else if (sb.ended && buffered.end(buffered.length - 1) - currentTime < targetDuration * 2) {
|
19723
|
-
|
19802
|
+
_this12.log("Cannot flush " + type + " back buffer while SourceBuffer is in ended state");
|
19724
19803
|
return;
|
19725
19804
|
}
|
19726
|
-
|
19805
|
+
_this12.hls.trigger(Events.BUFFER_FLUSHING, {
|
19727
19806
|
startOffset: 0,
|
19728
19807
|
endOffset: targetBackBufferPosition,
|
19729
19808
|
type: type
|
@@ -19733,7 +19812,7 @@
|
|
19733
19812
|
});
|
19734
19813
|
};
|
19735
19814
|
_proto.flushFrontBuffer = function flushFrontBuffer(currentTime, targetDuration, targetFrontBufferPosition) {
|
19736
|
-
var
|
19815
|
+
var _this13 = this;
|
19737
19816
|
var sourceBuffer = this.sourceBuffer;
|
19738
19817
|
var sourceBufferTypes = this.getSourceBufferTypes();
|
19739
19818
|
sourceBufferTypes.forEach(function (type) {
|
@@ -19751,10 +19830,10 @@
|
|
19751
19830
|
if (targetFrontBufferPosition > bufferStart || currentTime >= bufferStart && currentTime <= bufferEnd) {
|
19752
19831
|
return;
|
19753
19832
|
} else if (sb.ended && currentTime - bufferEnd < 2 * targetDuration) {
|
19754
|
-
|
19833
|
+
_this13.log("Cannot flush " + type + " front buffer while SourceBuffer is in ended state");
|
19755
19834
|
return;
|
19756
19835
|
}
|
19757
|
-
|
19836
|
+
_this13.hls.trigger(Events.BUFFER_FLUSHING, {
|
19758
19837
|
startOffset: bufferStart,
|
19759
19838
|
endOffset: Infinity,
|
19760
19839
|
type: type
|
@@ -19768,9 +19847,9 @@
|
|
19768
19847
|
* 'liveDurationInfinity` is set to `true`
|
19769
19848
|
* More details: https://github.com/video-dev/hls.js/issues/355
|
19770
19849
|
*/;
|
19771
|
-
_proto.
|
19850
|
+
_proto.getDurationAndRange = function getDurationAndRange() {
|
19772
19851
|
if (!this.details || !this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
19773
|
-
return;
|
19852
|
+
return null;
|
19774
19853
|
}
|
19775
19854
|
var details = this.details,
|
19776
19855
|
hls = this.hls,
|
@@ -19782,25 +19861,40 @@
|
|
19782
19861
|
if (details.live && hls.config.liveDurationInfinity) {
|
19783
19862
|
// Override duration to Infinity
|
19784
19863
|
mediaSource.duration = Infinity;
|
19785
|
-
|
19864
|
+
var len = details.fragments.length;
|
19865
|
+
if (len && details.live && !!mediaSource.setLiveSeekableRange) {
|
19866
|
+
var start = Math.max(0, details.fragments[0].start);
|
19867
|
+
var end = Math.max(start, start + details.totalduration);
|
19868
|
+
return {
|
19869
|
+
duration: Infinity,
|
19870
|
+
start: start,
|
19871
|
+
end: end
|
19872
|
+
};
|
19873
|
+
}
|
19874
|
+
return {
|
19875
|
+
duration: Infinity
|
19876
|
+
};
|
19786
19877
|
} else if (levelDuration > msDuration && levelDuration > mediaDuration || !isFiniteNumber(mediaDuration)) {
|
19787
|
-
|
19788
|
-
|
19789
|
-
|
19790
|
-
// flushing already buffered portion when switching between quality level
|
19791
|
-
this.log("Updating Media Source duration to " + levelDuration.toFixed(3));
|
19792
|
-
mediaSource.duration = levelDuration;
|
19878
|
+
return {
|
19879
|
+
duration: levelDuration
|
19880
|
+
};
|
19793
19881
|
}
|
19882
|
+
return null;
|
19794
19883
|
};
|
19795
|
-
_proto.
|
19796
|
-
var
|
19797
|
-
|
19798
|
-
|
19799
|
-
if (
|
19800
|
-
|
19801
|
-
|
19802
|
-
|
19803
|
-
|
19884
|
+
_proto.updateMediaSource = function updateMediaSource(_ref2) {
|
19885
|
+
var duration = _ref2.duration,
|
19886
|
+
start = _ref2.start,
|
19887
|
+
end = _ref2.end;
|
19888
|
+
if (!this.media || !this.mediaSource || this.mediaSource.readyState !== 'open') {
|
19889
|
+
return;
|
19890
|
+
}
|
19891
|
+
if (isFiniteNumber(duration)) {
|
19892
|
+
this.log("Updating Media Source duration to " + duration.toFixed(3));
|
19893
|
+
}
|
19894
|
+
this.mediaSource.duration = duration;
|
19895
|
+
if (start !== undefined && end !== undefined) {
|
19896
|
+
this.log("Media Source duration is set to " + this.mediaSource.duration + ". Setting seekable range to " + start + "-" + end + ".");
|
19897
|
+
this.mediaSource.setLiveSeekableRange(start, end);
|
19804
19898
|
}
|
19805
19899
|
};
|
19806
19900
|
_proto.checkPendingTracks = function checkPendingTracks() {
|
@@ -19839,7 +19933,7 @@
|
|
19839
19933
|
}
|
19840
19934
|
};
|
19841
19935
|
_proto.createSourceBuffers = function createSourceBuffers(tracks) {
|
19842
|
-
var
|
19936
|
+
var _this14 = this;
|
19843
19937
|
var sourceBuffer = this.sourceBuffer,
|
19844
19938
|
mediaSource = this.mediaSource;
|
19845
19939
|
if (!mediaSource) {
|
@@ -19855,28 +19949,28 @@
|
|
19855
19949
|
var codec = track.levelCodec || track.codec;
|
19856
19950
|
if (codec) {
|
19857
19951
|
if (trackName.slice(0, 5) === 'audio') {
|
19858
|
-
codec = getCodecCompatibleName(codec,
|
19952
|
+
codec = getCodecCompatibleName(codec, _this14.hls.config.preferManagedMediaSource);
|
19859
19953
|
}
|
19860
19954
|
}
|
19861
19955
|
var mimeType = track.container + ";codecs=" + codec;
|
19862
|
-
|
19956
|
+
_this14.log("creating sourceBuffer(" + mimeType + ")");
|
19863
19957
|
try {
|
19864
19958
|
var sb = sourceBuffer[trackName] = mediaSource.addSourceBuffer(mimeType);
|
19865
19959
|
var sbName = trackName;
|
19866
|
-
|
19867
|
-
|
19868
|
-
|
19960
|
+
_this14.addBufferListener(sbName, 'updatestart', _this14._onSBUpdateStart);
|
19961
|
+
_this14.addBufferListener(sbName, 'updateend', _this14._onSBUpdateEnd);
|
19962
|
+
_this14.addBufferListener(sbName, 'error', _this14._onSBUpdateError);
|
19869
19963
|
// ManagedSourceBuffer bufferedchange event
|
19870
|
-
|
19964
|
+
_this14.addBufferListener(sbName, 'bufferedchange', function (type, event) {
|
19871
19965
|
// If media was ejected check for a change. Added ranges are redundant with changes on 'updateend' event.
|
19872
19966
|
var removedRanges = event.removedRanges;
|
19873
19967
|
if (removedRanges != null && removedRanges.length) {
|
19874
|
-
|
19968
|
+
_this14.hls.trigger(Events.BUFFER_FLUSHED, {
|
19875
19969
|
type: trackName
|
19876
19970
|
});
|
19877
19971
|
}
|
19878
19972
|
});
|
19879
|
-
|
19973
|
+
_this14.tracks[trackName] = {
|
19880
19974
|
buffer: sb,
|
19881
19975
|
codec: codec,
|
19882
19976
|
container: track.container,
|
@@ -19885,8 +19979,8 @@
|
|
19885
19979
|
id: track.id
|
19886
19980
|
};
|
19887
19981
|
} catch (err) {
|
19888
|
-
|
19889
|
-
|
19982
|
+
_this14.error("error while trying to add sourceBuffer: " + err.message);
|
19983
|
+
_this14.hls.trigger(Events.ERROR, {
|
19890
19984
|
type: ErrorTypes.MEDIA_ERROR,
|
19891
19985
|
details: ErrorDetails.BUFFER_ADD_CODEC_ERROR,
|
19892
19986
|
fatal: false,
|
@@ -19974,6 +20068,7 @@
|
|
19974
20068
|
}
|
19975
20069
|
return;
|
19976
20070
|
}
|
20071
|
+
sb.ending = false;
|
19977
20072
|
sb.ended = false;
|
19978
20073
|
sb.appendBuffer(data);
|
19979
20074
|
}
|
@@ -19983,7 +20078,7 @@
|
|
19983
20078
|
// upon completion, since we already do it here
|
19984
20079
|
;
|
19985
20080
|
_proto.blockBuffers = function blockBuffers(onUnblocked, buffers) {
|
19986
|
-
var
|
20081
|
+
var _this15 = this;
|
19987
20082
|
if (buffers === void 0) {
|
19988
20083
|
buffers = this.getSourceBufferTypes();
|
19989
20084
|
}
|
@@ -19998,11 +20093,15 @@
|
|
19998
20093
|
var blockingOperations = buffers.map(function (type) {
|
19999
20094
|
return operationQueue.appendBlocker(type);
|
20000
20095
|
});
|
20001
|
-
|
20096
|
+
var audioBlocked = buffers.length > 1 && !!this.blockedAudioAppend;
|
20097
|
+
if (audioBlocked) {
|
20098
|
+
this.unblockAudio();
|
20099
|
+
}
|
20100
|
+
Promise.all(blockingOperations).then(function (result) {
|
20002
20101
|
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
20003
20102
|
onUnblocked();
|
20004
|
-
buffers.forEach(function (type) {
|
20005
|
-
var sb =
|
20103
|
+
buffers.forEach(function (type, i) {
|
20104
|
+
var sb = _this15.sourceBuffer[type];
|
20006
20105
|
// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
20007
20106
|
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
20008
20107
|
// While this is a workaround, it's probably useful to have around
|
@@ -29177,6 +29276,17 @@
|
|
29177
29276
|
}
|
29178
29277
|
};
|
29179
29278
|
_createClass(StreamController, [{
|
29279
|
+
key: "maxBufferLength",
|
29280
|
+
get: function get() {
|
29281
|
+
var levels = this.levels,
|
29282
|
+
level = this.level;
|
29283
|
+
var levelInfo = levels == null ? void 0 : levels[level];
|
29284
|
+
if (!levelInfo) {
|
29285
|
+
return this.config.maxBufferLength;
|
29286
|
+
}
|
29287
|
+
return this.getMaxBufferLength(levelInfo.maxBitrate);
|
29288
|
+
}
|
29289
|
+
}, {
|
29180
29290
|
key: "nextLevel",
|
29181
29291
|
get: function get() {
|
29182
29292
|
var frag = this.nextBufferedFrag;
|
@@ -29313,7 +29423,9 @@
|
|
29313
29423
|
ConfigFpsController = config.fpsController;
|
29314
29424
|
var errorController = new ConfigErrorController(this);
|
29315
29425
|
var abrController = this.abrController = new ConfigAbrController(this);
|
29316
|
-
|
29426
|
+
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
29427
|
+
var fragmentTracker = new FragmentTracker(this);
|
29428
|
+
var bufferController = this.bufferController = new ConfigBufferController(this, fragmentTracker);
|
29317
29429
|
var capLevelController = this.capLevelController = new ConfigCapLevelController(this);
|
29318
29430
|
var fpsController = new ConfigFpsController(this);
|
29319
29431
|
var playListLoader = new PlaylistLoader(this);
|
@@ -29322,8 +29434,6 @@
|
|
29322
29434
|
// ConentSteeringController is defined before LevelController to receive Multivariant Playlist events first
|
29323
29435
|
var contentSteering = ConfigContentSteeringController ? new ConfigContentSteeringController(this) : null;
|
29324
29436
|
var levelController = this.levelController = new LevelController(this, contentSteering);
|
29325
|
-
// FragmentTracker must be defined before StreamController because the order of event handling is important
|
29326
|
-
var fragmentTracker = new FragmentTracker(this);
|
29327
29437
|
var keyLoader = new KeyLoader(this.config);
|
29328
29438
|
var streamController = this.streamController = new StreamController(this, fragmentTracker, keyLoader);
|
29329
29439
|
|
@@ -29920,6 +30030,11 @@
|
|
29920
30030
|
get: function get() {
|
29921
30031
|
return this.streamController.getMainFwdBufferInfo();
|
29922
30032
|
}
|
30033
|
+
}, {
|
30034
|
+
key: "maxBufferLength",
|
30035
|
+
get: function get() {
|
30036
|
+
return this.streamController.maxBufferLength;
|
30037
|
+
}
|
29923
30038
|
}, {
|
29924
30039
|
key: "allAudioTracks",
|
29925
30040
|
get: function get() {
|
@@ -30102,7 +30217,7 @@
|
|
30102
30217
|
* Get the video-dev/hls.js package version.
|
30103
30218
|
*/
|
30104
30219
|
function get() {
|
30105
|
-
return "1.5.7-0.canary.
|
30220
|
+
return "1.5.7-0.canary.10016";
|
30106
30221
|
}
|
30107
30222
|
}, {
|
30108
30223
|
key: "Events",
|