hls.js 1.5.12-0.canary.10366 → 1.5.12-0.canary.10367
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/hls.js +293 -194
- package/dist/hls.js.d.ts +14 -8
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +211 -131
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +208 -129
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +290 -192
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/dist/hls.worker.js.map +1 -1
- package/package.json +1 -1
- package/src/controller/abr-controller.ts +1 -1
- package/src/controller/audio-stream-controller.ts +86 -84
- package/src/controller/base-stream-controller.ts +50 -41
- package/src/controller/buffer-controller.ts +20 -5
- package/src/controller/error-controller.ts +22 -14
- package/src/controller/fragment-finders.ts +33 -3
- package/src/controller/fragment-tracker.ts +30 -1
- package/src/controller/stream-controller.ts +54 -47
- package/src/controller/subtitle-stream-controller.ts +4 -4
- package/src/controller/timeline-controller.ts +1 -1
- package/src/hls.ts +1 -1
- package/src/loader/fragment-loader.ts +14 -19
- package/src/loader/fragment.ts +2 -2
- package/src/loader/level-details.ts +1 -2
- package/src/loader/playlist-loader.ts +1 -2
- package/src/remux/mp4-remuxer.ts +12 -8
- package/src/remux/passthrough-remuxer.ts +1 -1
- package/src/types/events.ts +13 -1
- package/src/utils/level-helper.ts +4 -5
- package/src/utils/rendition-helper.ts +84 -79
package/package.json
CHANGED
@@ -879,7 +879,7 @@ class AbrController extends Logger implements AbrComponentAPI {
|
|
879
879
|
1,
|
880
880
|
)} fetchDuration:${fetchDuration.toFixed(
|
881
881
|
1,
|
882
|
-
)} firstSelection:${firstSelection} codecSet:${
|
882
|
+
)} firstSelection:${firstSelection} codecSet:${levelInfo.codecSet} videoRange:${levelInfo.videoRange} hls.loadLevel:${loadLevel}`,
|
883
883
|
);
|
884
884
|
}
|
885
885
|
if (firstSelection) {
|
@@ -1,6 +1,5 @@
|
|
1
1
|
import BaseStreamController, { State } from './base-stream-controller';
|
2
2
|
import { Events } from '../events';
|
3
|
-
import { Bufferable, BufferHelper } from '../utils/buffer-helper';
|
4
3
|
import { FragmentState } from './fragment-tracker';
|
5
4
|
import { Level } from '../types/level';
|
6
5
|
import { PlaylistContextType, PlaylistLevelType } from '../types/loader';
|
@@ -13,7 +12,7 @@ import {
|
|
13
12
|
import ChunkCache from '../demux/chunk-cache';
|
14
13
|
import TransmuxerInterface from '../demux/transmuxer-interface';
|
15
14
|
import { ChunkMetadata } from '../types/transmuxer';
|
16
|
-
import {
|
15
|
+
import { findFragWithCC, findNearestWithCC } from './fragment-finders';
|
17
16
|
import { alignMediaPlaylistByPDT } from '../utils/discontinuities';
|
18
17
|
import { mediaAttributesIdentical } from '../utils/media-option-attributes';
|
19
18
|
import { ErrorDetails } from '../errors';
|
@@ -39,6 +38,8 @@ import type {
|
|
39
38
|
FragBufferedData,
|
40
39
|
ErrorData,
|
41
40
|
BufferFlushingData,
|
41
|
+
BufferCodecsData,
|
42
|
+
FragLoadingData,
|
42
43
|
} from '../types/events';
|
43
44
|
import type { MediaPlaylist } from '../types/media-playlist';
|
44
45
|
|
@@ -55,9 +56,8 @@ class AudioStreamController
|
|
55
56
|
extends BaseStreamController
|
56
57
|
implements NetworkComponentAPI
|
57
58
|
{
|
58
|
-
private
|
59
|
-
private
|
60
|
-
private waitingVideoCC: number = -1;
|
59
|
+
private videoAnchor: MediaFragment | null = null;
|
60
|
+
private mainFragLoading: FragLoadingData | null = null;
|
61
61
|
private bufferedTrack: MediaPlaylist | null = null;
|
62
62
|
private switchingTrack: MediaPlaylist | null = null;
|
63
63
|
private trackId: number = -1;
|
@@ -102,6 +102,7 @@ class AudioStreamController
|
|
102
102
|
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
|
103
103
|
hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
|
104
104
|
hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
|
105
|
+
hls.on(Events.FRAG_LOADING, this.onFragLoading, this);
|
105
106
|
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
106
107
|
}
|
107
108
|
|
@@ -120,6 +121,7 @@ class AudioStreamController
|
|
120
121
|
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
|
121
122
|
hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
|
122
123
|
hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
|
124
|
+
hls.on(Events.FRAG_LOADING, this.onFragLoading, this);
|
123
125
|
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
124
126
|
}
|
125
127
|
|
@@ -130,20 +132,44 @@ class AudioStreamController
|
|
130
132
|
) {
|
131
133
|
// Always update the new INIT PTS
|
132
134
|
// Can change due level switch
|
133
|
-
if (id ===
|
135
|
+
if (id === PlaylistLevelType.MAIN) {
|
134
136
|
const cc = frag.cc;
|
135
|
-
|
137
|
+
const inFlightFrag = this.fragCurrent;
|
138
|
+
this.initPTS[cc] = { baseTime: initPTS, timescale };
|
136
139
|
this.log(
|
137
140
|
`InitPTS for cc: ${cc} found from main: ${initPTS}/${timescale}`,
|
138
141
|
);
|
139
|
-
this.
|
142
|
+
this.videoAnchor = frag;
|
140
143
|
// If we are waiting, tick immediately to unblock audio fragment transmuxing
|
141
144
|
if (this.state === State.WAITING_INIT_PTS) {
|
145
|
+
const waitingData = this.waitingData;
|
146
|
+
if (!waitingData || waitingData.frag.cc !== cc) {
|
147
|
+
this.nextLoadPosition = this.findSyncFrag(frag).start;
|
148
|
+
}
|
142
149
|
this.tick();
|
150
|
+
} else if (
|
151
|
+
!this.loadedmetadata &&
|
152
|
+
inFlightFrag &&
|
153
|
+
inFlightFrag.cc !== cc
|
154
|
+
) {
|
155
|
+
this.startFragRequested = false;
|
156
|
+
this.nextLoadPosition = this.findSyncFrag(frag).start;
|
157
|
+
inFlightFrag.abortRequests();
|
158
|
+
this.resetLoadingState();
|
143
159
|
}
|
144
160
|
}
|
145
161
|
}
|
146
162
|
|
163
|
+
private findSyncFrag(mainFrag: MediaFragment): MediaFragment {
|
164
|
+
const trackDetails = this.getLevelDetails();
|
165
|
+
const cc = mainFrag.cc;
|
166
|
+
return (
|
167
|
+
findNearestWithCC(trackDetails, cc, mainFrag) ||
|
168
|
+
(trackDetails && findFragWithCC(trackDetails.fragments, cc)) ||
|
169
|
+
mainFrag
|
170
|
+
);
|
171
|
+
}
|
172
|
+
|
147
173
|
startLoad(startPosition: number) {
|
148
174
|
if (!this.levels) {
|
149
175
|
this.startPosition = startPosition;
|
@@ -206,9 +232,9 @@ class AudioStreamController
|
|
206
232
|
const waitingData = this.waitingData;
|
207
233
|
if (waitingData) {
|
208
234
|
const { frag, part, cache, complete } = waitingData;
|
235
|
+
const videoAnchor = this.videoAnchor;
|
209
236
|
if (this.initPTS[frag.cc] !== undefined) {
|
210
237
|
this.waitingData = null;
|
211
|
-
this.waitingVideoCC = -1;
|
212
238
|
this.state = State.FRAG_LOADING;
|
213
239
|
const payload = cache.flush();
|
214
240
|
const data: FragLoadedData = {
|
@@ -221,33 +247,15 @@ class AudioStreamController
|
|
221
247
|
if (complete) {
|
222
248
|
super._handleFragmentLoadComplete(data);
|
223
249
|
}
|
224
|
-
} else if (
|
250
|
+
} else if (videoAnchor && videoAnchor.cc !== waitingData.frag.cc) {
|
225
251
|
// Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
|
226
252
|
this.log(
|
227
|
-
`Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${
|
253
|
+
`Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${videoAnchor.cc}`,
|
228
254
|
);
|
255
|
+
this.nextLoadPosition = this.findSyncFrag(videoAnchor).start;
|
229
256
|
this.clearWaitingFragment();
|
230
|
-
} else {
|
231
|
-
// Drop waiting fragment if an earlier fragment is needed
|
232
|
-
const pos = this.getLoadPosition();
|
233
|
-
const bufferInfo = BufferHelper.bufferInfo(
|
234
|
-
this.mediaBuffer,
|
235
|
-
pos,
|
236
|
-
this.config.maxBufferHole,
|
237
|
-
);
|
238
|
-
const waitingFragmentAtPosition = fragmentWithinToleranceTest(
|
239
|
-
bufferInfo.end,
|
240
|
-
this.config.maxFragLookUpTolerance,
|
241
|
-
frag,
|
242
|
-
);
|
243
|
-
if (waitingFragmentAtPosition < 0) {
|
244
|
-
this.log(
|
245
|
-
`Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`,
|
246
|
-
);
|
247
|
-
this.clearWaitingFragment();
|
248
|
-
}
|
249
257
|
}
|
250
|
-
} else {
|
258
|
+
} else if (this.state !== State.STOPPED) {
|
251
259
|
this.state = State.IDLE;
|
252
260
|
}
|
253
261
|
}
|
@@ -259,9 +267,12 @@ class AudioStreamController
|
|
259
267
|
clearWaitingFragment() {
|
260
268
|
const waitingData = this.waitingData;
|
261
269
|
if (waitingData) {
|
270
|
+
if (!this.loadedmetadata) {
|
271
|
+
// Load overlapping fragment on start when discontinuity start times are not aligned
|
272
|
+
this.startFragRequested = false;
|
273
|
+
}
|
262
274
|
this.fragmentTracker.removeFragment(waitingData.frag);
|
263
275
|
this.waitingData = null;
|
264
|
-
this.waitingVideoCC = -1;
|
265
276
|
if (this.state !== State.STOPPED) {
|
266
277
|
this.state = State.IDLE;
|
267
278
|
}
|
@@ -343,12 +354,11 @@ class AudioStreamController
|
|
343
354
|
|
344
355
|
const fragments = trackDetails.fragments;
|
345
356
|
const start = fragments[0].start;
|
346
|
-
|
347
|
-
|
348
|
-
: bufferInfo.end;
|
357
|
+
const loadPosition = this.getLoadPosition();
|
358
|
+
let targetBufferTime = this.flushing ? loadPosition : bufferInfo.end;
|
349
359
|
|
350
360
|
if (switchingTrack && media) {
|
351
|
-
const pos =
|
361
|
+
const pos = loadPosition;
|
352
362
|
// STABLE
|
353
363
|
if (
|
354
364
|
bufferedTrack &&
|
@@ -378,10 +388,8 @@ class AudioStreamController
|
|
378
388
|
}
|
379
389
|
|
380
390
|
let frag = this.getNextFragment(targetBufferTime, trackDetails);
|
381
|
-
let atGap = false;
|
382
391
|
// Avoid loop loading by using nextLoadPosition set for backtracking and skipping consecutive GAP tags
|
383
392
|
if (frag && this.isLoopLoading(frag, targetBufferTime)) {
|
384
|
-
atGap = !!frag.gap;
|
385
393
|
frag = this.getNextFragmentLoopLoading(
|
386
394
|
frag,
|
387
395
|
trackDetails,
|
@@ -395,31 +403,19 @@ class AudioStreamController
|
|
395
403
|
return;
|
396
404
|
}
|
397
405
|
|
398
|
-
if (
|
406
|
+
if (
|
407
|
+
this.startFragRequested &&
|
408
|
+
(!trackDetails.live || targetBufferTime < this.hls.liveSyncPosition!)
|
409
|
+
) {
|
399
410
|
// Request audio segments up to one fragment ahead of main buffer
|
400
|
-
const
|
401
|
-
|
402
|
-
|
403
|
-
|
411
|
+
const mainFragLoading = this.mainFragLoading;
|
412
|
+
const mainTargetBufferEnd = mainFragLoading
|
413
|
+
? (mainFragLoading.part || mainFragLoading.frag).end
|
414
|
+
: null;
|
404
415
|
const atBufferSyncLimit =
|
405
|
-
|
406
|
-
if (atBufferSyncLimit) {
|
407
|
-
|
408
|
-
const mainFrag = this.fragmentTracker.getFragAtPos(
|
409
|
-
frag.start,
|
410
|
-
PlaylistLevelType.MAIN,
|
411
|
-
);
|
412
|
-
if (mainFrag === null) {
|
413
|
-
return;
|
414
|
-
}
|
415
|
-
// Bridge gaps in main buffer (also prevents loop loading at gaps)
|
416
|
-
atGap ||= !!mainFrag.gap || mainBufferInfo.len === 0;
|
417
|
-
if (
|
418
|
-
!atGap ||
|
419
|
-
(bufferInfo.nextStart && bufferInfo.nextStart < mainFrag.end)
|
420
|
-
) {
|
421
|
-
return;
|
422
|
-
}
|
416
|
+
mainTargetBufferEnd !== null && frag.start > mainTargetBufferEnd;
|
417
|
+
if (atBufferSyncLimit && !frag.endList) {
|
418
|
+
return;
|
423
419
|
}
|
424
420
|
}
|
425
421
|
|
@@ -427,7 +423,6 @@ class AudioStreamController
|
|
427
423
|
}
|
428
424
|
|
429
425
|
protected onMediaDetaching() {
|
430
|
-
this.videoBuffer = null;
|
431
426
|
this.bufferFlushed = this.flushing = false;
|
432
427
|
super.onMediaDetaching();
|
433
428
|
}
|
@@ -477,18 +472,16 @@ class AudioStreamController
|
|
477
472
|
}
|
478
473
|
|
479
474
|
protected onManifestLoading() {
|
480
|
-
|
481
|
-
this.startPosition = this.lastCurrentTime = 0;
|
475
|
+
super.onManifestLoading();
|
482
476
|
this.bufferFlushed = this.flushing = false;
|
483
|
-
this.
|
484
|
-
this.mainDetails =
|
477
|
+
this.mainDetails =
|
485
478
|
this.waitingData =
|
479
|
+
this.videoAnchor =
|
486
480
|
this.bufferedTrack =
|
487
481
|
this.cachedTrackLoadedData =
|
488
482
|
this.switchingTrack =
|
489
483
|
null;
|
490
|
-
this.
|
491
|
-
this.trackId = this.videoTrackCC = this.waitingVideoCC = -1;
|
484
|
+
this.trackId = -1;
|
492
485
|
}
|
493
486
|
|
494
487
|
private onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
|
@@ -643,7 +636,6 @@ class AudioStreamController
|
|
643
636
|
complete: false,
|
644
637
|
});
|
645
638
|
cache.push(new Uint8Array(payload));
|
646
|
-
this.waitingVideoCC = this.videoTrackCC;
|
647
639
|
this.state = State.WAITING_INIT_PTS;
|
648
640
|
}
|
649
641
|
}
|
@@ -658,7 +650,7 @@ class AudioStreamController
|
|
658
650
|
|
659
651
|
private onBufferReset(/* event: Events.BUFFER_RESET */) {
|
660
652
|
// reset reference to sourcebuffers
|
661
|
-
this.mediaBuffer =
|
653
|
+
this.mediaBuffer = null;
|
662
654
|
this.loadedmetadata = false;
|
663
655
|
}
|
664
656
|
|
@@ -670,8 +662,17 @@ class AudioStreamController
|
|
670
662
|
if (audioTrack) {
|
671
663
|
this.mediaBuffer = audioTrack.buffer || null;
|
672
664
|
}
|
673
|
-
|
674
|
-
|
665
|
+
}
|
666
|
+
|
667
|
+
private onFragLoading(event: Events.FRAG_LOADING, data: FragLoadingData) {
|
668
|
+
if (
|
669
|
+
data.frag.type === PlaylistLevelType.MAIN &&
|
670
|
+
data.frag.sn !== 'initSegment'
|
671
|
+
) {
|
672
|
+
this.mainFragLoading = data;
|
673
|
+
if (this.state === State.IDLE) {
|
674
|
+
this.tick();
|
675
|
+
}
|
675
676
|
}
|
676
677
|
}
|
677
678
|
|
@@ -679,12 +680,12 @@ class AudioStreamController
|
|
679
680
|
const { frag, part } = data;
|
680
681
|
if (frag.type !== PlaylistLevelType.AUDIO) {
|
681
682
|
if (!this.loadedmetadata && frag.type === PlaylistLevelType.MAIN) {
|
682
|
-
const
|
683
|
-
if (
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
683
|
+
const bufferedState = this.fragmentTracker.getState(frag);
|
684
|
+
if (
|
685
|
+
bufferedState === FragmentState.OK ||
|
686
|
+
bufferedState === FragmentState.PARTIAL
|
687
|
+
) {
|
688
|
+
this.loadedmetadata = true;
|
688
689
|
}
|
689
690
|
}
|
690
691
|
return;
|
@@ -704,7 +705,7 @@ class AudioStreamController
|
|
704
705
|
return;
|
705
706
|
}
|
706
707
|
if (frag.sn !== 'initSegment') {
|
707
|
-
this.fragPrevious = frag;
|
708
|
+
this.fragPrevious = frag as MediaFragment;
|
708
709
|
const track = this.switchingTrack;
|
709
710
|
if (track) {
|
710
711
|
this.bufferedTrack = track;
|
@@ -889,12 +890,15 @@ class AudioStreamController
|
|
889
890
|
if (tracks.video) {
|
890
891
|
delete tracks.video;
|
891
892
|
}
|
893
|
+
if (tracks.audiovideo) {
|
894
|
+
delete tracks.audiovideo;
|
895
|
+
}
|
892
896
|
|
893
897
|
// include levelCodec in audio and video tracks
|
894
|
-
|
895
|
-
if (!track) {
|
898
|
+
if (!tracks.audio) {
|
896
899
|
return;
|
897
900
|
}
|
901
|
+
const track = tracks.audio;
|
898
902
|
|
899
903
|
track.id = 'audio';
|
900
904
|
|
@@ -906,7 +910,7 @@ class AudioStreamController
|
|
906
910
|
if (variantAudioCodecs && variantAudioCodecs.split(',').length === 1) {
|
907
911
|
track.levelCodec = variantAudioCodecs;
|
908
912
|
}
|
909
|
-
this.hls.trigger(Events.BUFFER_CODECS, tracks);
|
913
|
+
this.hls.trigger(Events.BUFFER_CODECS, tracks as BufferCodecsData);
|
910
914
|
const initSegment = track.initSegment;
|
911
915
|
if (initSegment?.byteLength) {
|
912
916
|
const segment: BufferAppendingData = {
|
@@ -930,7 +934,6 @@ class AudioStreamController
|
|
930
934
|
) {
|
931
935
|
// only load if fragment is not loaded or if in audio switch
|
932
936
|
const fragState = this.fragmentTracker.getState(frag);
|
933
|
-
this.fragCurrent = frag;
|
934
937
|
|
935
938
|
// we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
|
936
939
|
if (
|
@@ -953,7 +956,6 @@ class AudioStreamController
|
|
953
956
|
alignMediaPlaylistByPDT(track.details, mainDetails);
|
954
957
|
}
|
955
958
|
} else {
|
956
|
-
this.startFragRequested = true;
|
957
959
|
super.loadFragment(frag, track, targetBufferTime);
|
958
960
|
}
|
959
961
|
} else {
|
@@ -77,7 +77,7 @@ export default class BaseStreamController
|
|
77
77
|
{
|
78
78
|
protected hls: Hls;
|
79
79
|
|
80
|
-
protected fragPrevious:
|
80
|
+
protected fragPrevious: MediaFragment | null = null;
|
81
81
|
protected fragCurrent: Fragment | null = null;
|
82
82
|
protected fragmentTracker: FragmentTracker;
|
83
83
|
protected transmuxer: TransmuxerInterface | null = null;
|
@@ -102,6 +102,7 @@ export default class BaseStreamController
|
|
102
102
|
protected initPTS: RationalTimestamp[] = [];
|
103
103
|
protected buffering: boolean = true;
|
104
104
|
private loadingParts: boolean = false;
|
105
|
+
private loopSn?: string | number;
|
105
106
|
|
106
107
|
constructor(
|
107
108
|
hls: Hls,
|
@@ -148,6 +149,9 @@ export default class BaseStreamController
|
|
148
149
|
public startLoad(startPosition: number): void {}
|
149
150
|
|
150
151
|
public stopLoad() {
|
152
|
+
if (this.state === State.STOPPED) {
|
153
|
+
return;
|
154
|
+
}
|
151
155
|
this.fragmentLoader.abort();
|
152
156
|
this.keyLoader.abort(this.playlistType);
|
153
157
|
const frag = this.fragCurrent;
|
@@ -242,12 +246,18 @@ export default class BaseStreamController
|
|
242
246
|
this.keyLoader.detach();
|
243
247
|
}
|
244
248
|
this.media = this.mediaBuffer = null;
|
245
|
-
this.
|
249
|
+
this.loopSn = undefined;
|
250
|
+
this.startFragRequested = this.loadedmetadata = this.loadingParts = false;
|
246
251
|
this.fragmentTracker.removeAllFragments();
|
247
252
|
this.stopLoad();
|
248
253
|
}
|
249
254
|
|
250
|
-
protected onManifestLoading() {
|
255
|
+
protected onManifestLoading() {
|
256
|
+
this.initPTS = [];
|
257
|
+
this.levels = this.levelLastLoaded = this.fragCurrent = null;
|
258
|
+
this.lastCurrentTime = this.startPosition = 0;
|
259
|
+
this.startFragRequested = false;
|
260
|
+
}
|
251
261
|
|
252
262
|
protected onError(event: Events.ERROR, data: ErrorData) {}
|
253
263
|
|
@@ -346,7 +356,6 @@ export default class BaseStreamController
|
|
346
356
|
data: ManifestLoadedData,
|
347
357
|
): void {
|
348
358
|
this.startTimeOffset = data.startTimeOffset;
|
349
|
-
this.initPTS = [];
|
350
359
|
}
|
351
360
|
|
352
361
|
protected onHandlerDestroying() {
|
@@ -384,6 +393,7 @@ export default class BaseStreamController
|
|
384
393
|
level: Level,
|
385
394
|
targetBufferTime: number,
|
386
395
|
) {
|
396
|
+
this.startFragRequested = true;
|
387
397
|
this._loadFragForPlayback(frag, level, targetBufferTime);
|
388
398
|
}
|
389
399
|
|
@@ -692,6 +702,7 @@ export default class BaseStreamController
|
|
692
702
|
targetBufferTime: number | null = null,
|
693
703
|
progressCallback?: FragmentLoadProgressCallback,
|
694
704
|
): Promise<PartsLoadedData | FragLoadedData | null> {
|
705
|
+
this.fragCurrent = frag;
|
695
706
|
const details = level?.details;
|
696
707
|
if (!this.levels || !details) {
|
697
708
|
throw new Error(
|
@@ -896,7 +907,7 @@ export default class BaseStreamController
|
|
896
907
|
const loadedPart = partLoadedData.part as Part;
|
897
908
|
this.hls.trigger(Events.FRAG_LOADED, partLoadedData);
|
898
909
|
const nextPart =
|
899
|
-
getPartWith(level, frag.sn as number, part.index + 1) ||
|
910
|
+
getPartWith(level.details, frag.sn as number, part.index + 1) ||
|
900
911
|
findPart(initialPartList, frag.sn as number, part.index + 1);
|
901
912
|
if (nextPart) {
|
902
913
|
loadPart(nextPart);
|
@@ -966,12 +977,7 @@ export default class BaseStreamController
|
|
966
977
|
);
|
967
978
|
this.loadingParts = shouldLoadParts;
|
968
979
|
}
|
969
|
-
this.updateLevelTiming(
|
970
|
-
frag as MediaFragment,
|
971
|
-
part,
|
972
|
-
level,
|
973
|
-
chunkMeta.partial,
|
974
|
-
);
|
980
|
+
this.updateLevelTiming(frag, part, level, chunkMeta.partial);
|
975
981
|
}
|
976
982
|
|
977
983
|
private shouldLoadParts(
|
@@ -1011,10 +1017,13 @@ export default class BaseStreamController
|
|
1011
1017
|
return null;
|
1012
1018
|
}
|
1013
1019
|
const level = levels[levelIndex];
|
1014
|
-
const
|
1020
|
+
const levelDetails = level.details;
|
1021
|
+
|
1022
|
+
const part =
|
1023
|
+
partIndex > -1 ? getPartWith(levelDetails, sn, partIndex) : null;
|
1015
1024
|
const frag = part
|
1016
1025
|
? part.fragment
|
1017
|
-
: getFragmentWithSN(
|
1026
|
+
: getFragmentWithSN(levelDetails, sn, fragCurrent);
|
1018
1027
|
if (!frag) {
|
1019
1028
|
return null;
|
1020
1029
|
}
|
@@ -1160,9 +1169,9 @@ export default class BaseStreamController
|
|
1160
1169
|
position: number,
|
1161
1170
|
playlistType: PlaylistLevelType = PlaylistLevelType.MAIN,
|
1162
1171
|
): Fragment | null {
|
1163
|
-
const fragOrPart = this.fragmentTracker
|
1172
|
+
const fragOrPart = this.fragmentTracker?.getAppendedFrag(
|
1164
1173
|
position,
|
1165
|
-
|
1174
|
+
playlistType,
|
1166
1175
|
);
|
1167
1176
|
if (fragOrPart && 'fragment' in fragOrPart) {
|
1168
1177
|
return fragOrPart.fragment;
|
@@ -1246,34 +1255,34 @@ export default class BaseStreamController
|
|
1246
1255
|
playlistType: PlaylistLevelType,
|
1247
1256
|
maxBufLen: number,
|
1248
1257
|
): Fragment | null {
|
1249
|
-
|
1250
|
-
|
1251
|
-
this.nextLoadPosition,
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
|
1256
|
-
|
1257
|
-
|
1258
|
-
if (gapStart && frag && !frag.gap && bufferInfo.nextStart) {
|
1259
|
-
// Media buffered after GAP tags should not make the next buffer timerange exceed forward buffer length
|
1260
|
-
const nextbufferInfo = this.getFwdBufferInfoAtPos(
|
1261
|
-
this.mediaBuffer ? this.mediaBuffer : this.media,
|
1262
|
-
bufferInfo.nextStart,
|
1263
|
-
playlistType,
|
1264
|
-
);
|
1265
|
-
if (
|
1266
|
-
nextbufferInfo !== null &&
|
1267
|
-
bufferInfo.len + nextbufferInfo.len >= maxBufLen
|
1268
|
-
) {
|
1269
|
-
// Returning here might result in not finding an audio and video candiate to skip to
|
1270
|
-
this.log(
|
1271
|
-
`buffer full after gaps in "${playlistType}" playlist starting at sn: ${frag.sn}`,
|
1258
|
+
let nextFragment: Fragment | null = null;
|
1259
|
+
if (frag.gap) {
|
1260
|
+
nextFragment = this.getNextFragment(this.nextLoadPosition, levelDetails);
|
1261
|
+
if (nextFragment && !nextFragment.gap && bufferInfo.nextStart) {
|
1262
|
+
// Media buffered after GAP tags should not make the next buffer timerange exceed forward buffer length
|
1263
|
+
const nextbufferInfo = this.getFwdBufferInfoAtPos(
|
1264
|
+
this.mediaBuffer ? this.mediaBuffer : this.media,
|
1265
|
+
bufferInfo.nextStart,
|
1266
|
+
playlistType,
|
1272
1267
|
);
|
1273
|
-
|
1268
|
+
if (
|
1269
|
+
nextbufferInfo !== null &&
|
1270
|
+
bufferInfo.len + nextbufferInfo.len >= maxBufLen
|
1271
|
+
) {
|
1272
|
+
// Returning here might result in not finding an audio and video candiate to skip to
|
1273
|
+
const sn = nextFragment.sn;
|
1274
|
+
if (this.loopSn !== sn) {
|
1275
|
+
this.log(
|
1276
|
+
`buffer full after gaps in "${playlistType}" playlist starting at sn: ${sn}`,
|
1277
|
+
);
|
1278
|
+
this.loopSn = sn;
|
1279
|
+
}
|
1280
|
+
return null;
|
1281
|
+
}
|
1274
1282
|
}
|
1275
1283
|
}
|
1276
|
-
|
1284
|
+
this.loopSn = undefined;
|
1285
|
+
return nextFragment;
|
1277
1286
|
}
|
1278
1287
|
|
1279
1288
|
mapToInitFragWhenRequired(frag: Fragment | null): typeof frag {
|
@@ -1570,7 +1579,7 @@ export default class BaseStreamController
|
|
1570
1579
|
if (startTimeOffset !== null && Number.isFinite(startTimeOffset)) {
|
1571
1580
|
startPosition = sliding + startTimeOffset;
|
1572
1581
|
if (startTimeOffset < 0) {
|
1573
|
-
startPosition += details.
|
1582
|
+
startPosition += details.edge;
|
1574
1583
|
}
|
1575
1584
|
startPosition = Math.min(
|
1576
1585
|
Math.max(sliding, startPosition),
|
@@ -147,6 +147,7 @@ export default class BufferController extends Logger implements ComponentAPI {
|
|
147
147
|
hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
|
148
148
|
hls.on(Events.FRAG_PARSED, this.onFragParsed, this);
|
149
149
|
hls.on(Events.FRAG_CHANGED, this.onFragChanged, this);
|
150
|
+
hls.on(Events.ERROR, this.onError, this);
|
150
151
|
}
|
151
152
|
|
152
153
|
protected unregisterListeners() {
|
@@ -163,6 +164,7 @@ export default class BufferController extends Logger implements ComponentAPI {
|
|
163
164
|
hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
|
164
165
|
hls.off(Events.FRAG_PARSED, this.onFragParsed, this);
|
165
166
|
hls.off(Events.FRAG_CHANGED, this.onFragChanged, this);
|
167
|
+
hls.off(Events.ERROR, this.onError, this);
|
166
168
|
}
|
167
169
|
|
168
170
|
private _initSourceBuffer() {
|
@@ -173,11 +175,7 @@ export default class BufferController extends Logger implements ComponentAPI {
|
|
173
175
|
video: [],
|
174
176
|
audiovideo: [],
|
175
177
|
};
|
176
|
-
this.
|
177
|
-
audio: 0,
|
178
|
-
video: 0,
|
179
|
-
audiovideo: 0,
|
180
|
-
};
|
178
|
+
this.resetAppendErrors();
|
181
179
|
this.lastMpegAudioChunk = null;
|
182
180
|
this.blockedAudioAppend = null;
|
183
181
|
this.lastVideoAppendEnd = 0;
|
@@ -793,6 +791,23 @@ export default class BufferController extends Logger implements ComponentAPI {
|
|
793
791
|
}
|
794
792
|
}
|
795
793
|
|
794
|
+
private onError(event: Events.ERROR, data: ErrorData) {
|
795
|
+
if (data.details === ErrorDetails.BUFFER_APPEND_ERROR && data.frag) {
|
796
|
+
const nextAutoLevel = data.errorAction?.nextAutoLevel;
|
797
|
+
if (Number.isFinite(nextAutoLevel) && nextAutoLevel !== data.frag.level) {
|
798
|
+
this.resetAppendErrors();
|
799
|
+
}
|
800
|
+
}
|
801
|
+
}
|
802
|
+
|
803
|
+
private resetAppendErrors() {
|
804
|
+
this.appendErrors = {
|
805
|
+
audio: 0,
|
806
|
+
video: 0,
|
807
|
+
audiovideo: 0,
|
808
|
+
};
|
809
|
+
}
|
810
|
+
|
796
811
|
trimBuffers() {
|
797
812
|
const { hls, details, media } = this;
|
798
813
|
if (!media || details === null) {
|