hls.js 1.6.0-beta.2.0.canary.10924 → 1.6.0-beta.2.0.canary.10925
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/hls.d.mts +60 -29
- package/dist/hls.d.ts +60 -29
- package/dist/hls.js +646 -488
- package/dist/hls.js.d.ts +60 -29
- package/dist/hls.js.map +1 -1
- package/dist/hls.light.js +3855 -3696
- package/dist/hls.light.js.map +1 -1
- package/dist/hls.light.min.js +1 -1
- package/dist/hls.light.min.js.map +1 -1
- package/dist/hls.light.mjs +1650 -1490
- package/dist/hls.light.mjs.map +1 -1
- package/dist/hls.min.js +1 -1
- package/dist/hls.min.js.map +1 -1
- package/dist/hls.mjs +650 -491
- package/dist/hls.mjs.map +1 -1
- package/dist/hls.worker.js +1 -1
- package/package.json +1 -1
- package/src/config.ts +15 -9
- package/src/controller/base-stream-controller.ts +12 -8
- package/src/controller/buffer-controller.ts +19 -22
- package/src/controller/gap-controller.ts +257 -35
- package/src/controller/interstitials-controller.ts +13 -10
- package/src/controller/stream-controller.ts +26 -73
- package/src/hls.ts +47 -3
- package/src/utils/buffer-helper.ts +35 -13
- package/src/utils/event-listener-helper.ts +16 -0
@@ -79,6 +79,11 @@ export const State = {
|
|
79
79
|
WAITING_LEVEL: 'WAITING_LEVEL',
|
80
80
|
};
|
81
81
|
|
82
|
+
export type InFlightData = {
|
83
|
+
frag: Fragment | null;
|
84
|
+
state: (typeof State)[keyof typeof State];
|
85
|
+
};
|
86
|
+
|
82
87
|
export default class BaseStreamController
|
83
88
|
extends TaskLoop
|
84
89
|
implements NetworkComponentAPI
|
@@ -89,7 +94,7 @@ export default class BaseStreamController
|
|
89
94
|
protected fragCurrent: Fragment | null = null;
|
90
95
|
protected fragmentTracker: FragmentTracker;
|
91
96
|
protected transmuxer: TransmuxerInterface | null = null;
|
92
|
-
protected _state:
|
97
|
+
protected _state: (typeof State)[keyof typeof State] = State.STOPPED;
|
93
98
|
protected playlistType: PlaylistLevelType;
|
94
99
|
protected media: HTMLMediaElement | null = null;
|
95
100
|
protected mediaBuffer: Bufferable | null = null;
|
@@ -194,6 +199,10 @@ export default class BaseStreamController
|
|
194
199
|
this.buffering = true;
|
195
200
|
}
|
196
201
|
|
202
|
+
public get inFlightFrag(): InFlightData {
|
203
|
+
return { frag: this.fragCurrent, state: this.state };
|
204
|
+
}
|
205
|
+
|
197
206
|
protected _streamEnded(
|
198
207
|
bufferInfo: BufferInfo,
|
199
208
|
levelDetails: LevelDetails,
|
@@ -395,13 +404,8 @@ export default class BaseStreamController
|
|
395
404
|
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
|
396
405
|
this.log(`setting startPosition to 0 because media ended`);
|
397
406
|
this.startPosition = this.lastCurrentTime = 0;
|
398
|
-
this.triggerEnded();
|
399
407
|
};
|
400
408
|
|
401
|
-
protected triggerEnded() {
|
402
|
-
/* overridden in stream-controller */
|
403
|
-
}
|
404
|
-
|
405
409
|
protected onManifestLoaded(
|
406
410
|
event: Events.MANIFEST_LOADED,
|
407
411
|
data: ManifestLoadedData,
|
@@ -2023,7 +2027,7 @@ export default class BaseStreamController
|
|
2023
2027
|
}
|
2024
2028
|
}
|
2025
2029
|
|
2026
|
-
set state(nextState) {
|
2030
|
+
set state(nextState: (typeof State)[keyof typeof State]) {
|
2027
2031
|
const previousState = this._state;
|
2028
2032
|
if (previousState !== nextState) {
|
2029
2033
|
this._state = nextState;
|
@@ -2031,7 +2035,7 @@ export default class BaseStreamController
|
|
2031
2035
|
}
|
2032
2036
|
}
|
2033
2037
|
|
2034
|
-
get state() {
|
2038
|
+
get state(): (typeof State)[keyof typeof State] {
|
2035
2039
|
return this._state;
|
2036
2040
|
}
|
2037
2041
|
}
|
@@ -82,10 +82,10 @@ export default class BufferController extends Logger implements ComponentAPI {
|
|
82
82
|
private bufferCodecEventsTotal: number = 0;
|
83
83
|
|
84
84
|
// A reference to the attached media element
|
85
|
-
|
85
|
+
private media: HTMLMediaElement | null = null;
|
86
86
|
|
87
87
|
// A reference to the active media source
|
88
|
-
|
88
|
+
private mediaSource: MediaSource | null = null;
|
89
89
|
|
90
90
|
// Last MP3 audio chunk appended
|
91
91
|
private lastMpegAudioChunk: ChunkMetadata | null = null;
|
@@ -152,7 +152,7 @@ export default class BufferController extends Logger implements ComponentAPI {
|
|
152
152
|
this._onStartStreaming = this._onEndStreaming = null;
|
153
153
|
}
|
154
154
|
|
155
|
-
|
155
|
+
private registerListeners() {
|
156
156
|
const { hls } = this;
|
157
157
|
hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
158
158
|
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
@@ -169,7 +169,7 @@ export default class BufferController extends Logger implements ComponentAPI {
|
|
169
169
|
hls.on(Events.ERROR, this.onError, this);
|
170
170
|
}
|
171
171
|
|
172
|
-
|
172
|
+
private unregisterListeners() {
|
173
173
|
const { hls } = this;
|
174
174
|
hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
175
175
|
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
@@ -247,7 +247,7 @@ export default class BufferController extends Logger implements ComponentAPI {
|
|
247
247
|
this.details = null;
|
248
248
|
}
|
249
249
|
|
250
|
-
|
250
|
+
private onManifestParsed(
|
251
251
|
event: Events.MANIFEST_PARSED,
|
252
252
|
data: ManifestParsedData,
|
253
253
|
) {
|
@@ -270,7 +270,7 @@ export default class BufferController extends Logger implements ComponentAPI {
|
|
270
270
|
}
|
271
271
|
}
|
272
272
|
|
273
|
-
|
273
|
+
private onMediaAttaching(
|
274
274
|
event: Events.MEDIA_ATTACHING,
|
275
275
|
data: MediaAttachingData,
|
276
276
|
) {
|
@@ -440,6 +440,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
440
440
|
}
|
441
441
|
this.hls.pauseBuffering();
|
442
442
|
};
|
443
|
+
|
443
444
|
private _onStartStreaming = (event) => {
|
444
445
|
if (!this.hls) {
|
445
446
|
return;
|
@@ -447,7 +448,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
447
448
|
this.hls.resumeBuffering();
|
448
449
|
};
|
449
450
|
|
450
|
-
|
451
|
+
private onMediaDetaching(
|
451
452
|
event: Events.MEDIA_DETACHING,
|
452
453
|
data: MediaDetachingData,
|
453
454
|
) {
|
@@ -540,7 +541,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
540
541
|
this.hls.trigger(Events.MEDIA_DETACHED, data);
|
541
542
|
}
|
542
543
|
|
543
|
-
|
544
|
+
private onBufferReset() {
|
544
545
|
this.sourceBuffers.forEach(([type]) => {
|
545
546
|
if (type) {
|
546
547
|
this.resetBuffer(type);
|
@@ -580,10 +581,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
580
581
|
this.operationQueue = new BufferOperationQueue(this.tracks);
|
581
582
|
}
|
582
583
|
|
583
|
-
|
584
|
-
event: Events.BUFFER_CODECS,
|
585
|
-
data: BufferCodecsData,
|
586
|
-
) {
|
584
|
+
private onBufferCodecs(event: Events.BUFFER_CODECS, data: BufferCodecsData) {
|
587
585
|
const tracks = this.tracks;
|
588
586
|
const trackNames = Object.keys(data);
|
589
587
|
this.log(
|
@@ -614,7 +612,6 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
614
612
|
const sbTrack = transferredTrack?.buffer ? transferredTrack : track;
|
615
613
|
const sbCodec = sbTrack?.pendingCodec || sbTrack?.codec;
|
616
614
|
const trackLevelCodec = sbTrack?.levelCodec;
|
617
|
-
const forceChangeType = !sbTrack || !!this.hls.config.assetPlayerId;
|
618
615
|
if (!track) {
|
619
616
|
track = tracks[trackName] = {
|
620
617
|
buffer: undefined,
|
@@ -637,7 +634,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
637
634
|
);
|
638
635
|
let trackCodec = pickMostCompleteCodecName(codec, levelCodec);
|
639
636
|
const nextCodec = trackCodec?.replace(VIDEO_CODEC_PROFILE_REPLACE, '$1');
|
640
|
-
if (trackCodec &&
|
637
|
+
if (trackCodec && currentCodecFull && currentCodec !== nextCodec) {
|
641
638
|
if (trackName.slice(0, 5) === 'audio') {
|
642
639
|
trackCodec = getCodecCompatibleName(trackCodec, this.appendSource);
|
643
640
|
}
|
@@ -676,7 +673,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
676
673
|
}, {});
|
677
674
|
}
|
678
675
|
|
679
|
-
|
676
|
+
private appendChangeType(
|
680
677
|
type: SourceBufferName,
|
681
678
|
container: string,
|
682
679
|
codec: string,
|
@@ -748,7 +745,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
748
745
|
}
|
749
746
|
}
|
750
747
|
|
751
|
-
|
748
|
+
private onBufferAppending(
|
752
749
|
event: Events.BUFFER_APPENDING,
|
753
750
|
eventData: BufferAppendingData,
|
754
751
|
) {
|
@@ -956,7 +953,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
956
953
|
};
|
957
954
|
}
|
958
955
|
|
959
|
-
|
956
|
+
private onBufferFlushing(
|
960
957
|
event: Events.BUFFER_FLUSHING,
|
961
958
|
data: BufferFlushingData,
|
962
959
|
) {
|
@@ -972,7 +969,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
972
969
|
}
|
973
970
|
}
|
974
971
|
|
975
|
-
|
972
|
+
private onFragParsed(event: Events.FRAG_PARSED, data: FragParsedData) {
|
976
973
|
const { frag, part } = data;
|
977
974
|
const buffersAppendedTo: SourceBufferName[] = [];
|
978
975
|
const elementaryStreams = part
|
@@ -1017,7 +1014,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
1017
1014
|
this.trimBuffers();
|
1018
1015
|
}
|
1019
1016
|
|
1020
|
-
get bufferedToEnd(): boolean {
|
1017
|
+
public get bufferedToEnd(): boolean {
|
1021
1018
|
return (
|
1022
1019
|
this.sourceBufferCount > 0 &&
|
1023
1020
|
!this.sourceBuffers.some(
|
@@ -1029,7 +1026,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
1029
1026
|
|
1030
1027
|
// on BUFFER_EOS mark matching sourcebuffer(s) as "ending" and "ended" and queue endOfStream after remaining operations(s)
|
1031
1028
|
// an undefined data.type will mark all buffers as EOS.
|
1032
|
-
|
1029
|
+
private onBufferEos(event: Events.BUFFER_EOS, data: BufferEOSData) {
|
1033
1030
|
this.sourceBuffers.forEach(([type]) => {
|
1034
1031
|
if (type) {
|
1035
1032
|
const track = this.tracks[type] as SourceBufferTrack;
|
@@ -1078,7 +1075,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
1078
1075
|
}
|
1079
1076
|
}
|
1080
1077
|
|
1081
|
-
|
1078
|
+
private onLevelUpdated(
|
1082
1079
|
event: Events.LEVEL_UPDATED,
|
1083
1080
|
{ details }: LevelUpdatedData,
|
1084
1081
|
) {
|
@@ -1321,7 +1318,7 @@ transfer tracks: ${JSON.stringify(transferredTracks, (key, value) => (key === 'i
|
|
1321
1318
|
);
|
1322
1319
|
}
|
1323
1320
|
|
1324
|
-
|
1321
|
+
private checkPendingTracks() {
|
1325
1322
|
const { bufferCodecEventsTotal, pendingTrackCount, tracks } = this;
|
1326
1323
|
this.log(
|
1327
1324
|
`checkPendingTracks (pending: ${pendingTrackCount} codec events expected: ${bufferCodecEventsTotal}) ${JSON.stringify(tracks)}`,
|
@@ -1,44 +1,149 @@
|
|
1
1
|
import { State } from './base-stream-controller';
|
2
2
|
import { ErrorDetails, ErrorTypes } from '../errors';
|
3
3
|
import { Events } from '../events';
|
4
|
+
import TaskLoop from '../task-loop';
|
4
5
|
import { PlaylistLevelType } from '../types/loader';
|
5
6
|
import { BufferHelper } from '../utils/buffer-helper';
|
6
|
-
import {
|
7
|
+
import {
|
8
|
+
addEventListener,
|
9
|
+
removeEventListener,
|
10
|
+
} from '../utils/event-listener-helper';
|
11
|
+
import type { InFlightData } from './base-stream-controller';
|
12
|
+
import type { InFlightFragments } from '../hls';
|
7
13
|
import type Hls from '../hls';
|
8
14
|
import type { FragmentTracker } from './fragment-tracker';
|
9
15
|
import type { Fragment } from '../loader/fragment';
|
10
|
-
import type {
|
16
|
+
import type { SourceBufferName } from '../types/buffer';
|
17
|
+
import type {
|
18
|
+
BufferAppendedData,
|
19
|
+
MediaAttachedData,
|
20
|
+
MediaDetachingData,
|
21
|
+
} from '../types/events';
|
11
22
|
import type { BufferInfo } from '../utils/buffer-helper';
|
12
23
|
|
13
24
|
export const MAX_START_GAP_JUMP = 2.0;
|
14
25
|
export const SKIP_BUFFER_HOLE_STEP_SECONDS = 0.1;
|
15
26
|
export const SKIP_BUFFER_RANGE_START = 0.05;
|
27
|
+
const TICK_INTERVAL = 100;
|
16
28
|
|
17
|
-
export default class GapController extends
|
18
|
-
private media: HTMLMediaElement | null = null;
|
19
|
-
private fragmentTracker: FragmentTracker | null = null;
|
29
|
+
export default class GapController extends TaskLoop {
|
20
30
|
private hls: Hls | null = null;
|
31
|
+
private fragmentTracker: FragmentTracker | null = null;
|
32
|
+
private media: HTMLMediaElement | null = null;
|
33
|
+
private mediaSource?: MediaSource;
|
34
|
+
|
21
35
|
private nudgeRetry: number = 0;
|
22
36
|
private stallReported: boolean = false;
|
23
37
|
private stalled: number | null = null;
|
24
38
|
private moved: boolean = false;
|
25
39
|
private seeking: boolean = false;
|
40
|
+
private buffered: Partial<Record<SourceBufferName, TimeRanges>> = {};
|
41
|
+
|
42
|
+
private lastCurrentTime: number = 0;
|
26
43
|
public ended: number = 0;
|
27
44
|
public waiting: number = 0;
|
28
45
|
|
29
|
-
constructor(
|
30
|
-
media: HTMLMediaElement,
|
31
|
-
fragmentTracker: FragmentTracker,
|
32
|
-
hls: Hls,
|
33
|
-
) {
|
46
|
+
constructor(hls: Hls, fragmentTracker: FragmentTracker) {
|
34
47
|
super('gap-controller', hls.logger);
|
35
|
-
this.media = media;
|
36
|
-
this.fragmentTracker = fragmentTracker;
|
37
48
|
this.hls = hls;
|
49
|
+
this.fragmentTracker = fragmentTracker;
|
50
|
+
this.registerListeners();
|
51
|
+
}
|
52
|
+
|
53
|
+
private registerListeners() {
|
54
|
+
const { hls } = this;
|
55
|
+
if (hls) {
|
56
|
+
hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
57
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
58
|
+
hls.on(Events.BUFFER_APPENDED, this.onBufferAppended, this);
|
59
|
+
}
|
60
|
+
}
|
61
|
+
|
62
|
+
private unregisterListeners() {
|
63
|
+
const { hls } = this;
|
64
|
+
if (hls) {
|
65
|
+
hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
|
66
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
67
|
+
hls.off(Events.BUFFER_APPENDED, this.onBufferAppended, this);
|
68
|
+
}
|
38
69
|
}
|
39
70
|
|
40
71
|
public destroy() {
|
72
|
+
super.destroy();
|
73
|
+
this.unregisterListeners();
|
41
74
|
this.media = this.hls = this.fragmentTracker = null;
|
75
|
+
this.mediaSource = undefined;
|
76
|
+
}
|
77
|
+
|
78
|
+
private onMediaAttached(
|
79
|
+
event: Events.MEDIA_ATTACHED,
|
80
|
+
data: MediaAttachedData,
|
81
|
+
) {
|
82
|
+
this.setInterval(TICK_INTERVAL);
|
83
|
+
this.mediaSource = data.mediaSource;
|
84
|
+
const media = (this.media = data.media);
|
85
|
+
addEventListener(media, 'playing', this.onMediaPlaying);
|
86
|
+
addEventListener(media, 'waiting', this.onMediaWaiting);
|
87
|
+
addEventListener(media, 'ended', this.onMediaEnded);
|
88
|
+
}
|
89
|
+
|
90
|
+
private onMediaDetaching(
|
91
|
+
event: Events.MEDIA_DETACHING,
|
92
|
+
data: MediaDetachingData,
|
93
|
+
) {
|
94
|
+
this.clearInterval();
|
95
|
+
const { media } = this;
|
96
|
+
if (media) {
|
97
|
+
removeEventListener(media, 'playing', this.onMediaPlaying);
|
98
|
+
removeEventListener(media, 'waiting', this.onMediaWaiting);
|
99
|
+
removeEventListener(media, 'ended', this.onMediaEnded);
|
100
|
+
this.media = null;
|
101
|
+
}
|
102
|
+
this.mediaSource = undefined;
|
103
|
+
}
|
104
|
+
|
105
|
+
private onBufferAppended(
|
106
|
+
event: Events.BUFFER_APPENDED,
|
107
|
+
data: BufferAppendedData,
|
108
|
+
) {
|
109
|
+
this.buffered = data.timeRanges;
|
110
|
+
}
|
111
|
+
|
112
|
+
private onMediaPlaying = () => {
|
113
|
+
this.ended = 0;
|
114
|
+
this.waiting = 0;
|
115
|
+
};
|
116
|
+
|
117
|
+
private onMediaWaiting = () => {
|
118
|
+
if (this.media?.seeking) {
|
119
|
+
return;
|
120
|
+
}
|
121
|
+
this.waiting = self.performance.now();
|
122
|
+
this.tick();
|
123
|
+
};
|
124
|
+
|
125
|
+
private onMediaEnded = () => {
|
126
|
+
if (this.hls) {
|
127
|
+
// ended is set when triggering MEDIA_ENDED so that we do not trigger it again on stall or on tick with media.ended
|
128
|
+
this.ended = this.media?.currentTime || 1;
|
129
|
+
this.hls.trigger(Events.MEDIA_ENDED, {
|
130
|
+
stalled: false,
|
131
|
+
});
|
132
|
+
}
|
133
|
+
};
|
134
|
+
|
135
|
+
public get hasBuffered(): boolean {
|
136
|
+
return Object.keys(this.buffered).length > 0;
|
137
|
+
}
|
138
|
+
|
139
|
+
public tick() {
|
140
|
+
if (!this.media?.readyState || !this.hasBuffered) {
|
141
|
+
return;
|
142
|
+
}
|
143
|
+
|
144
|
+
const currentTime = this.media.currentTime;
|
145
|
+
this.poll(currentTime, this.lastCurrentTime);
|
146
|
+
this.lastCurrentTime = currentTime;
|
42
147
|
}
|
43
148
|
|
44
149
|
/**
|
@@ -47,20 +152,20 @@ export default class GapController extends Logger {
|
|
47
152
|
*
|
48
153
|
* @param lastCurrentTime - Previously read playhead position
|
49
154
|
*/
|
50
|
-
public poll(
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
) {
|
155
|
+
public poll(currentTime: number, lastCurrentTime: number) {
|
156
|
+
const config = this.hls?.config;
|
157
|
+
if (!config) {
|
158
|
+
return;
|
159
|
+
}
|
56
160
|
const { media, stalled } = this;
|
57
|
-
|
58
161
|
if (!media) {
|
59
162
|
return;
|
60
163
|
}
|
61
|
-
const {
|
164
|
+
const { seeking } = media;
|
62
165
|
const seeked = this.seeking && !seeking;
|
63
166
|
const beginSeek = !this.seeking && seeking;
|
167
|
+
const pausedEndedOrHalted =
|
168
|
+
(media.paused && !seeking) || media.ended || media.playbackRate === 0;
|
64
169
|
|
65
170
|
this.seeking = seeking;
|
66
171
|
|
@@ -72,6 +177,14 @@ export default class GapController extends Logger {
|
|
72
177
|
this.moved = true;
|
73
178
|
if (!seeking) {
|
74
179
|
this.nudgeRetry = 0;
|
180
|
+
// When crossing between buffered video time ranges, but not audio, flush pipeline with seek (Chrome)
|
181
|
+
if (
|
182
|
+
config.nudgeOnVideoHole &&
|
183
|
+
!pausedEndedOrHalted &&
|
184
|
+
currentTime > lastCurrentTime
|
185
|
+
) {
|
186
|
+
this.nudgeOnVideoHole(currentTime, lastCurrentTime);
|
187
|
+
}
|
75
188
|
}
|
76
189
|
if (this.waiting === 0) {
|
77
190
|
this.stallResolved(currentTime);
|
@@ -88,7 +201,7 @@ export default class GapController extends Logger {
|
|
88
201
|
}
|
89
202
|
|
90
203
|
// The playhead should not be moving
|
91
|
-
if (
|
204
|
+
if (pausedEndedOrHalted) {
|
92
205
|
this.nudgeRetry = 0;
|
93
206
|
this.stallResolved(currentTime);
|
94
207
|
// Fire MEDIA_ENDED to workaround event not being dispatched by browser
|
@@ -106,28 +219,36 @@ export default class GapController extends Logger {
|
|
106
219
|
return;
|
107
220
|
}
|
108
221
|
|
222
|
+
// Resolve stalls at buffer holes using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
|
109
223
|
const bufferInfo = BufferHelper.bufferInfo(media, currentTime, 0);
|
110
224
|
const nextStart = bufferInfo.nextStart || 0;
|
111
225
|
const fragmentTracker = this.fragmentTracker;
|
112
226
|
|
113
|
-
if (seeking && fragmentTracker) {
|
227
|
+
if (seeking && fragmentTracker && this.hls) {
|
228
|
+
// Is there a fragment loading/parsing/appending before currentTime?
|
229
|
+
const inFlightDependency = getInFlightDependency(
|
230
|
+
this.hls.inFlightFragments,
|
231
|
+
currentTime,
|
232
|
+
);
|
233
|
+
|
114
234
|
// Waiting for seeking in a buffered range to complete
|
115
235
|
const hasEnoughBuffer = bufferInfo.len > MAX_START_GAP_JUMP;
|
116
236
|
// Next buffered range is too far ahead to jump to while still seeking
|
117
|
-
const
|
237
|
+
const noBufferHole =
|
118
238
|
!nextStart ||
|
119
|
-
|
239
|
+
inFlightDependency ||
|
120
240
|
(nextStart - currentTime > MAX_START_GAP_JUMP &&
|
121
241
|
!fragmentTracker.getPartialFragment(currentTime));
|
122
|
-
if (hasEnoughBuffer ||
|
242
|
+
if (hasEnoughBuffer || noBufferHole) {
|
123
243
|
return;
|
124
244
|
}
|
125
|
-
// Reset moved state when seeking to a point in or before a gap
|
245
|
+
// Reset moved state when seeking to a point in or before a gap/hole
|
126
246
|
this.moved = false;
|
127
247
|
}
|
128
248
|
|
129
249
|
// Skip start gaps if we haven't played, but the last poll detected the start of a stall
|
130
250
|
// The addition poll gives the browser a chance to jump the gap for us
|
251
|
+
const levelDetails = this.hls?.latestLevelDetails;
|
131
252
|
if (!this.moved && this.stalled !== null && fragmentTracker) {
|
132
253
|
// There is no playable buffer (seeked, waiting for buffer)
|
133
254
|
const isBuffered = bufferInfo.len > 0;
|
@@ -155,10 +276,6 @@ export default class GapController extends Logger {
|
|
155
276
|
}
|
156
277
|
|
157
278
|
// Start tracking stall time
|
158
|
-
const config = this.hls?.config;
|
159
|
-
if (!config) {
|
160
|
-
return;
|
161
|
-
}
|
162
279
|
const detectStallWithCurrentTimeMs = config.detectStallWithCurrentTimeMs;
|
163
280
|
const tnow = self.performance.now();
|
164
281
|
const tWaiting = this.waiting;
|
@@ -180,7 +297,7 @@ export default class GapController extends Logger {
|
|
180
297
|
) {
|
181
298
|
// Dispatch MEDIA_ENDED when media.ended/ended event is not signalled at end of stream
|
182
299
|
if (
|
183
|
-
|
300
|
+
this.mediaSource?.readyState === 'ended' &&
|
184
301
|
!levelDetails?.live &&
|
185
302
|
Math.abs(currentTime - (levelDetails?.edge || 0)) < 1
|
186
303
|
) {
|
@@ -215,7 +332,7 @@ export default class GapController extends Logger {
|
|
215
332
|
// The playhead is now moving, but was previously stalled
|
216
333
|
if (this.stallReported) {
|
217
334
|
const stalledDuration = self.performance.now() - stalled;
|
218
|
-
this.
|
335
|
+
this.log(
|
219
336
|
`playback not stuck anymore @${currentTime}, after ${Math.round(
|
220
337
|
stalledDuration,
|
221
338
|
)}ms`,
|
@@ -227,6 +344,81 @@ export default class GapController extends Logger {
|
|
227
344
|
}
|
228
345
|
}
|
229
346
|
|
347
|
+
private nudgeOnVideoHole(currentTime: number, lastCurrentTime: number) {
|
348
|
+
// Chrome will play one second past a hole in video buffered time ranges without rendering any video from the subsequent range and then stall as long as audio is buffered:
|
349
|
+
// https://github.com/video-dev/hls.js/issues/5631
|
350
|
+
// https://issues.chromium.org/issues/40280613#comment10
|
351
|
+
// Detect the potential for this situation and proactively seek to flush the video pipeline once the playhead passes the start of the video hole.
|
352
|
+
// When there are audio and video buffers and currentTime is past the end of the first video buffered range...
|
353
|
+
const videoSourceBuffered = this.buffered.video;
|
354
|
+
if (
|
355
|
+
this.hls &&
|
356
|
+
this.media &&
|
357
|
+
this.fragmentTracker &&
|
358
|
+
this.buffered.audio?.length &&
|
359
|
+
videoSourceBuffered &&
|
360
|
+
videoSourceBuffered.length > 1 &&
|
361
|
+
currentTime > videoSourceBuffered.end(0)
|
362
|
+
) {
|
363
|
+
// and audio is buffered at the playhead
|
364
|
+
const audioBufferInfo = BufferHelper.bufferedInfo(
|
365
|
+
BufferHelper.timeRangesToArray(this.buffered.audio),
|
366
|
+
currentTime,
|
367
|
+
0,
|
368
|
+
);
|
369
|
+
if (audioBufferInfo.len > 1 && lastCurrentTime >= audioBufferInfo.start) {
|
370
|
+
const videoTimes = BufferHelper.timeRangesToArray(videoSourceBuffered);
|
371
|
+
const lastBufferedIndex = BufferHelper.bufferedInfo(
|
372
|
+
videoTimes,
|
373
|
+
lastCurrentTime,
|
374
|
+
0,
|
375
|
+
).bufferedIndex;
|
376
|
+
// nudge when crossing into another video buffered range (hole).
|
377
|
+
if (
|
378
|
+
lastBufferedIndex > -1 &&
|
379
|
+
lastBufferedIndex < videoTimes.length - 1
|
380
|
+
) {
|
381
|
+
const bufferedIndex = BufferHelper.bufferedInfo(
|
382
|
+
videoTimes,
|
383
|
+
currentTime,
|
384
|
+
0,
|
385
|
+
).bufferedIndex;
|
386
|
+
const holeStart = videoTimes[lastBufferedIndex].end;
|
387
|
+
const holeEnd = videoTimes[lastBufferedIndex + 1].start;
|
388
|
+
if (
|
389
|
+
(bufferedIndex === -1 || bufferedIndex > lastBufferedIndex) &&
|
390
|
+
holeEnd - holeStart < 1 && // `maxBufferHole` may be too small and setting it to 0 should not disable this feature
|
391
|
+
currentTime - holeStart < 2
|
392
|
+
) {
|
393
|
+
const error = new Error(
|
394
|
+
`nudging playhead to flush pipeline after video hole. currentTime: ${currentTime} hole: ${holeStart} -> ${holeEnd} buffered index: ${bufferedIndex}`,
|
395
|
+
);
|
396
|
+
this.warn(error.message);
|
397
|
+
// Magic number to flush the pipeline without interuption to audio playback:
|
398
|
+
this.media.currentTime += 0.000001;
|
399
|
+
const frag =
|
400
|
+
this.fragmentTracker.getPartialFragment(currentTime) || undefined;
|
401
|
+
const bufferInfo = BufferHelper.bufferInfo(
|
402
|
+
this.media,
|
403
|
+
currentTime,
|
404
|
+
0,
|
405
|
+
);
|
406
|
+
this.hls.trigger(Events.ERROR, {
|
407
|
+
type: ErrorTypes.MEDIA_ERROR,
|
408
|
+
details: ErrorDetails.BUFFER_SEEK_OVER_HOLE,
|
409
|
+
fatal: false,
|
410
|
+
error,
|
411
|
+
reason: error.message,
|
412
|
+
frag,
|
413
|
+
buffer: bufferInfo.len,
|
414
|
+
bufferInfo,
|
415
|
+
});
|
416
|
+
}
|
417
|
+
}
|
418
|
+
}
|
419
|
+
}
|
420
|
+
}
|
421
|
+
|
230
422
|
/**
|
231
423
|
* Detects and attempts to fix known buffer stalling issues.
|
232
424
|
* @param bufferInfo - The properties of the current buffer.
|
@@ -268,7 +460,8 @@ export default class GapController extends Logger {
|
|
268
460
|
bufferInfo.len > config.maxBufferHole) ||
|
269
461
|
(bufferInfo.nextStart &&
|
270
462
|
bufferInfo.nextStart - currentTime < config.maxBufferHole)) &&
|
271
|
-
stalledDurationMs > config.highBufferWatchdogPeriod * 1000
|
463
|
+
(stalledDurationMs > config.highBufferWatchdogPeriod * 1000 ||
|
464
|
+
this.waiting)
|
272
465
|
) {
|
273
466
|
this.warn('Trying to nudge playhead over buffer-hole');
|
274
467
|
// Try to nudge currentTime over a buffer hole if we've been stalling for the configured amount of seconds
|
@@ -374,7 +567,7 @@ export default class GapController extends Logger {
|
|
374
567
|
);
|
375
568
|
this.moved = true;
|
376
569
|
media.currentTime = targetTime;
|
377
|
-
if (
|
570
|
+
if (!partial?.gap && this.hls) {
|
378
571
|
const error = new Error(
|
379
572
|
`fragment loaded with buffer holes, seeking from ${currentTime} to ${targetTime}`,
|
380
573
|
);
|
@@ -384,7 +577,7 @@ export default class GapController extends Logger {
|
|
384
577
|
fatal: false,
|
385
578
|
error,
|
386
579
|
reason: error.message,
|
387
|
-
frag: partial,
|
580
|
+
frag: partial || undefined,
|
388
581
|
buffer: bufferInfo.len,
|
389
582
|
bufferInfo,
|
390
583
|
});
|
@@ -440,3 +633,32 @@ export default class GapController extends Logger {
|
|
440
633
|
}
|
441
634
|
}
|
442
635
|
}
|
636
|
+
|
637
|
+
function getInFlightDependency(
|
638
|
+
inFlightFragments: InFlightFragments,
|
639
|
+
currentTime: number,
|
640
|
+
): Fragment | null {
|
641
|
+
const main = inFlight(inFlightFragments.main);
|
642
|
+
if (main && main.start <= currentTime) {
|
643
|
+
return main;
|
644
|
+
}
|
645
|
+
const audio = inFlight(inFlightFragments.audio);
|
646
|
+
if (audio && audio.start <= currentTime) {
|
647
|
+
return audio;
|
648
|
+
}
|
649
|
+
return null;
|
650
|
+
}
|
651
|
+
|
652
|
+
function inFlight(inFlightData: InFlightData | undefined): Fragment | null {
|
653
|
+
if (!inFlightData) {
|
654
|
+
return null;
|
655
|
+
}
|
656
|
+
switch (inFlightData.state) {
|
657
|
+
case State.IDLE:
|
658
|
+
case State.STOPPED:
|
659
|
+
case State.ENDED:
|
660
|
+
case State.ERROR:
|
661
|
+
return null;
|
662
|
+
}
|
663
|
+
return inFlightData.frag;
|
664
|
+
}
|