@zenvor/hls.js 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +28 -0
- package/README.md +472 -0
- package/dist/hls-demo.js +26995 -0
- package/dist/hls-demo.js.map +1 -0
- package/dist/hls.d.mts +4204 -0
- package/dist/hls.d.ts +4204 -0
- package/dist/hls.js +40050 -0
- package/dist/hls.js.d.ts +4204 -0
- package/dist/hls.js.map +1 -0
- package/dist/hls.light.js +27145 -0
- package/dist/hls.light.js.map +1 -0
- package/dist/hls.light.min.js +2 -0
- package/dist/hls.light.min.js.map +1 -0
- package/dist/hls.light.mjs +26392 -0
- package/dist/hls.light.mjs.map +1 -0
- package/dist/hls.min.js +2 -0
- package/dist/hls.min.js.map +1 -0
- package/dist/hls.mjs +38956 -0
- package/dist/hls.mjs.map +1 -0
- package/dist/hls.worker.js +2 -0
- package/dist/hls.worker.js.map +1 -0
- package/package.json +143 -0
- package/src/config.ts +794 -0
- package/src/controller/abr-controller.ts +1019 -0
- package/src/controller/algo-data-controller.ts +794 -0
- package/src/controller/audio-stream-controller.ts +1099 -0
- package/src/controller/audio-track-controller.ts +454 -0
- package/src/controller/base-playlist-controller.ts +438 -0
- package/src/controller/base-stream-controller.ts +2526 -0
- package/src/controller/buffer-controller.ts +2015 -0
- package/src/controller/buffer-operation-queue.ts +159 -0
- package/src/controller/cap-level-controller.ts +367 -0
- package/src/controller/cmcd-controller.ts +422 -0
- package/src/controller/content-steering-controller.ts +622 -0
- package/src/controller/eme-controller.ts +1617 -0
- package/src/controller/error-controller.ts +627 -0
- package/src/controller/fps-controller.ts +146 -0
- package/src/controller/fragment-finders.ts +256 -0
- package/src/controller/fragment-tracker.ts +567 -0
- package/src/controller/gap-controller.ts +719 -0
- package/src/controller/id3-track-controller.ts +488 -0
- package/src/controller/interstitial-player.ts +302 -0
- package/src/controller/interstitials-controller.ts +2895 -0
- package/src/controller/interstitials-schedule.ts +698 -0
- package/src/controller/latency-controller.ts +294 -0
- package/src/controller/level-controller.ts +776 -0
- package/src/controller/stream-controller.ts +1597 -0
- package/src/controller/subtitle-stream-controller.ts +508 -0
- package/src/controller/subtitle-track-controller.ts +617 -0
- package/src/controller/timeline-controller.ts +677 -0
- package/src/crypt/aes-crypto.ts +36 -0
- package/src/crypt/aes-decryptor.ts +339 -0
- package/src/crypt/decrypter-aes-mode.ts +4 -0
- package/src/crypt/decrypter.ts +225 -0
- package/src/crypt/fast-aes-key.ts +39 -0
- package/src/define-plugin.d.ts +17 -0
- package/src/demux/audio/aacdemuxer.ts +126 -0
- package/src/demux/audio/ac3-demuxer.ts +170 -0
- package/src/demux/audio/adts.ts +249 -0
- package/src/demux/audio/base-audio-demuxer.ts +205 -0
- package/src/demux/audio/dolby.ts +21 -0
- package/src/demux/audio/mp3demuxer.ts +85 -0
- package/src/demux/audio/mpegaudio.ts +177 -0
- package/src/demux/chunk-cache.ts +42 -0
- package/src/demux/dummy-demuxed-track.ts +13 -0
- package/src/demux/inject-worker.ts +75 -0
- package/src/demux/mp4demuxer.ts +234 -0
- package/src/demux/sample-aes.ts +198 -0
- package/src/demux/transmuxer-interface.ts +449 -0
- package/src/demux/transmuxer-worker.ts +221 -0
- package/src/demux/transmuxer.ts +560 -0
- package/src/demux/tsdemuxer.ts +1256 -0
- package/src/demux/video/avc-video-parser.ts +401 -0
- package/src/demux/video/base-video-parser.ts +198 -0
- package/src/demux/video/exp-golomb.ts +153 -0
- package/src/demux/video/hevc-video-parser.ts +736 -0
- package/src/empty-es.js +5 -0
- package/src/empty.js +3 -0
- package/src/errors.ts +107 -0
- package/src/events.ts +548 -0
- package/src/exports-default.ts +3 -0
- package/src/exports-named.ts +81 -0
- package/src/hls.ts +1613 -0
- package/src/is-supported.ts +54 -0
- package/src/loader/date-range.ts +207 -0
- package/src/loader/fragment-loader.ts +403 -0
- package/src/loader/fragment.ts +487 -0
- package/src/loader/interstitial-asset-list.ts +162 -0
- package/src/loader/interstitial-event.ts +337 -0
- package/src/loader/key-loader.ts +439 -0
- package/src/loader/level-details.ts +203 -0
- package/src/loader/level-key.ts +259 -0
- package/src/loader/load-stats.ts +17 -0
- package/src/loader/m3u8-parser.ts +1072 -0
- package/src/loader/playlist-loader.ts +839 -0
- package/src/polyfills/number.ts +15 -0
- package/src/remux/aac-helper.ts +81 -0
- package/src/remux/mp4-generator.ts +1380 -0
- package/src/remux/mp4-remuxer.ts +1261 -0
- package/src/remux/passthrough-remuxer.ts +434 -0
- package/src/task-loop.ts +130 -0
- package/src/types/algo.ts +44 -0
- package/src/types/buffer.ts +105 -0
- package/src/types/component-api.ts +20 -0
- package/src/types/demuxer.ts +208 -0
- package/src/types/events.ts +574 -0
- package/src/types/fragment-tracker.ts +23 -0
- package/src/types/level.ts +268 -0
- package/src/types/loader.ts +198 -0
- package/src/types/media-playlist.ts +92 -0
- package/src/types/network-details.ts +3 -0
- package/src/types/remuxer.ts +104 -0
- package/src/types/track.ts +12 -0
- package/src/types/transmuxer.ts +46 -0
- package/src/types/tuples.ts +6 -0
- package/src/types/vtt.ts +11 -0
- package/src/utils/arrays.ts +22 -0
- package/src/utils/attr-list.ts +192 -0
- package/src/utils/binary-search.ts +46 -0
- package/src/utils/buffer-helper.ts +173 -0
- package/src/utils/cea-608-parser.ts +1413 -0
- package/src/utils/chunker.ts +41 -0
- package/src/utils/codecs.ts +314 -0
- package/src/utils/cues.ts +96 -0
- package/src/utils/discontinuities.ts +174 -0
- package/src/utils/encryption-methods-util.ts +21 -0
- package/src/utils/error-helper.ts +95 -0
- package/src/utils/event-listener-helper.ts +16 -0
- package/src/utils/ewma-bandwidth-estimator.ts +97 -0
- package/src/utils/ewma.ts +43 -0
- package/src/utils/fetch-loader.ts +331 -0
- package/src/utils/global.ts +2 -0
- package/src/utils/hash.ts +10 -0
- package/src/utils/hdr.ts +67 -0
- package/src/utils/hex.ts +32 -0
- package/src/utils/imsc1-ttml-parser.ts +261 -0
- package/src/utils/keysystem-util.ts +45 -0
- package/src/utils/level-helper.ts +629 -0
- package/src/utils/logger.ts +120 -0
- package/src/utils/media-option-attributes.ts +49 -0
- package/src/utils/mediacapabilities-helper.ts +301 -0
- package/src/utils/mediakeys-helper.ts +210 -0
- package/src/utils/mediasource-helper.ts +37 -0
- package/src/utils/mp4-tools.ts +1473 -0
- package/src/utils/number.ts +3 -0
- package/src/utils/numeric-encoding-utils.ts +26 -0
- package/src/utils/output-filter.ts +46 -0
- package/src/utils/rendition-helper.ts +505 -0
- package/src/utils/safe-json-stringify.ts +22 -0
- package/src/utils/texttrack-utils.ts +164 -0
- package/src/utils/time-ranges.ts +17 -0
- package/src/utils/timescale-conversion.ts +46 -0
- package/src/utils/utf8-utils.ts +18 -0
- package/src/utils/variable-substitution.ts +105 -0
- package/src/utils/vttcue.ts +384 -0
- package/src/utils/vttparser.ts +497 -0
- package/src/utils/webvtt-parser.ts +166 -0
- package/src/utils/xhr-loader.ts +337 -0
- package/src/version.ts +1 -0
|
@@ -0,0 +1,1597 @@
|
|
|
1
|
+
import BaseStreamController, { State } from './base-stream-controller';
|
|
2
|
+
import { findFragmentByPTS } from './fragment-finders';
|
|
3
|
+
import { FragmentState } from './fragment-tracker';
|
|
4
|
+
import { MAX_START_GAP_JUMP } from './gap-controller';
|
|
5
|
+
import TransmuxerInterface from '../demux/transmuxer-interface';
|
|
6
|
+
import { ErrorDetails } from '../errors';
|
|
7
|
+
import { Events } from '../events';
|
|
8
|
+
import { changeTypeSupported } from '../is-supported';
|
|
9
|
+
import { ElementaryStreamTypes, isMediaFragment } from '../loader/fragment';
|
|
10
|
+
import { PlaylistContextType, PlaylistLevelType } from '../types/loader';
|
|
11
|
+
import { ChunkMetadata } from '../types/transmuxer';
|
|
12
|
+
import { BufferHelper } from '../utils/buffer-helper';
|
|
13
|
+
import { pickMostCompleteCodecName } from '../utils/codecs';
|
|
14
|
+
import {
|
|
15
|
+
addEventListener,
|
|
16
|
+
removeEventListener,
|
|
17
|
+
} from '../utils/event-listener-helper';
|
|
18
|
+
import { useAlternateAudio } from '../utils/rendition-helper';
|
|
19
|
+
import type { FragmentTracker } from './fragment-tracker';
|
|
20
|
+
import type Hls from '../hls';
|
|
21
|
+
import type { Fragment, MediaFragment } from '../loader/fragment';
|
|
22
|
+
import type KeyLoader from '../loader/key-loader';
|
|
23
|
+
import type { LevelDetails } from '../loader/level-details';
|
|
24
|
+
import type {
|
|
25
|
+
BufferCreatedTrack,
|
|
26
|
+
ExtendedSourceBuffer,
|
|
27
|
+
SourceBufferName,
|
|
28
|
+
} from '../types/buffer';
|
|
29
|
+
import type { NetworkComponentAPI } from '../types/component-api';
|
|
30
|
+
import type {
|
|
31
|
+
AudioTrackSwitchedData,
|
|
32
|
+
AudioTrackSwitchingData,
|
|
33
|
+
BufferCodecsData,
|
|
34
|
+
BufferCreatedData,
|
|
35
|
+
BufferEOSData,
|
|
36
|
+
BufferFlushedData,
|
|
37
|
+
ErrorData,
|
|
38
|
+
FragBufferedData,
|
|
39
|
+
FragLoadedData,
|
|
40
|
+
FragParsingMetadataData,
|
|
41
|
+
FragParsingUserdataData,
|
|
42
|
+
LevelLoadedData,
|
|
43
|
+
LevelLoadingData,
|
|
44
|
+
LevelsUpdatedData,
|
|
45
|
+
ManifestParsedData,
|
|
46
|
+
MediaAttachedData,
|
|
47
|
+
MediaDetachingData,
|
|
48
|
+
} from '../types/events';
|
|
49
|
+
import type { Level } from '../types/level';
|
|
50
|
+
import type { Track, TrackSet } from '../types/track';
|
|
51
|
+
import type { TransmuxerResult } from '../types/transmuxer';
|
|
52
|
+
import type { Bufferable, BufferInfo } from '../utils/buffer-helper';
|
|
53
|
+
|
|
54
|
+
export const enum AlternateAudio {
|
|
55
|
+
DISABLED = 0,
|
|
56
|
+
SWITCHING,
|
|
57
|
+
SWITCHED,
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
const TICK_INTERVAL = 100; // how often to tick in ms
|
|
61
|
+
|
|
62
|
+
export default class StreamController
|
|
63
|
+
extends BaseStreamController
|
|
64
|
+
implements NetworkComponentAPI
|
|
65
|
+
{
|
|
66
|
+
private audioCodecSwap: boolean = false;
|
|
67
|
+
private level: number = -1;
|
|
68
|
+
private _forceStartLoad: boolean = false;
|
|
69
|
+
private _hasEnoughToStart: boolean = false;
|
|
70
|
+
private altAudio: AlternateAudio = AlternateAudio.DISABLED;
|
|
71
|
+
private audioOnly: boolean = false;
|
|
72
|
+
private _couldBacktrack: boolean = false;
|
|
73
|
+
private _backtrackFragment: Fragment | undefined = undefined;
|
|
74
|
+
private audioCodecSwitch: boolean = false;
|
|
75
|
+
private videoBuffer: ExtendedSourceBuffer | null = null;
|
|
76
|
+
|
|
77
|
+
constructor(
|
|
78
|
+
hls: Hls,
|
|
79
|
+
fragmentTracker: FragmentTracker,
|
|
80
|
+
keyLoader: KeyLoader,
|
|
81
|
+
) {
|
|
82
|
+
super(
|
|
83
|
+
hls,
|
|
84
|
+
fragmentTracker,
|
|
85
|
+
keyLoader,
|
|
86
|
+
'stream-controller',
|
|
87
|
+
PlaylistLevelType.MAIN,
|
|
88
|
+
);
|
|
89
|
+
this.registerListeners();
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
protected registerListeners() {
|
|
93
|
+
super.registerListeners();
|
|
94
|
+
const { hls } = this;
|
|
95
|
+
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
|
96
|
+
hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
|
|
97
|
+
hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
|
98
|
+
hls.on(
|
|
99
|
+
Events.FRAG_LOAD_EMERGENCY_ABORTED,
|
|
100
|
+
this.onFragLoadEmergencyAborted,
|
|
101
|
+
this,
|
|
102
|
+
);
|
|
103
|
+
hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
|
104
|
+
hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
|
105
|
+
hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
|
106
|
+
hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
|
|
107
|
+
hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
|
108
|
+
hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
protected unregisterListeners() {
|
|
112
|
+
super.unregisterListeners();
|
|
113
|
+
const { hls } = this;
|
|
114
|
+
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
|
115
|
+
hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
|
|
116
|
+
hls.off(
|
|
117
|
+
Events.FRAG_LOAD_EMERGENCY_ABORTED,
|
|
118
|
+
this.onFragLoadEmergencyAborted,
|
|
119
|
+
this,
|
|
120
|
+
);
|
|
121
|
+
hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
|
|
122
|
+
hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
|
|
123
|
+
hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
|
|
124
|
+
hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
|
|
125
|
+
hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
|
|
126
|
+
hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
protected onHandlerDestroying() {
|
|
130
|
+
// @ts-ignore
|
|
131
|
+
this.onMediaPlaying = this.onMediaSeeked = null;
|
|
132
|
+
this.unregisterListeners();
|
|
133
|
+
super.onHandlerDestroying();
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
public startLoad(
|
|
137
|
+
startPosition: number,
|
|
138
|
+
skipSeekToStartPosition?: boolean,
|
|
139
|
+
): void {
|
|
140
|
+
if (this.levels) {
|
|
141
|
+
const { lastCurrentTime, hls } = this;
|
|
142
|
+
this.stopLoad();
|
|
143
|
+
this.setInterval(TICK_INTERVAL);
|
|
144
|
+
this.level = -1;
|
|
145
|
+
if (!this.startFragRequested) {
|
|
146
|
+
// determine load level
|
|
147
|
+
let startLevel = hls.startLevel;
|
|
148
|
+
if (startLevel === -1) {
|
|
149
|
+
if (hls.config.testBandwidth && this.levels.length > 1) {
|
|
150
|
+
// -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
|
|
151
|
+
startLevel = 0;
|
|
152
|
+
this.bitrateTest = true;
|
|
153
|
+
} else {
|
|
154
|
+
startLevel = hls.firstAutoLevel;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
// set new level to playlist loader : this will trigger start level load
|
|
158
|
+
// hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
|
|
159
|
+
hls.nextLoadLevel = startLevel;
|
|
160
|
+
this.level = hls.loadLevel;
|
|
161
|
+
this._hasEnoughToStart = !!skipSeekToStartPosition;
|
|
162
|
+
}
|
|
163
|
+
// if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
|
|
164
|
+
if (
|
|
165
|
+
lastCurrentTime > 0 &&
|
|
166
|
+
startPosition === -1 &&
|
|
167
|
+
!skipSeekToStartPosition
|
|
168
|
+
) {
|
|
169
|
+
this.log(
|
|
170
|
+
`Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
|
|
171
|
+
3,
|
|
172
|
+
)}`,
|
|
173
|
+
);
|
|
174
|
+
startPosition = lastCurrentTime;
|
|
175
|
+
}
|
|
176
|
+
this.state = State.IDLE;
|
|
177
|
+
this.nextLoadPosition = this.lastCurrentTime =
|
|
178
|
+
startPosition + this.timelineOffset;
|
|
179
|
+
this.startPosition = skipSeekToStartPosition ? -1 : startPosition;
|
|
180
|
+
this.tick();
|
|
181
|
+
} else {
|
|
182
|
+
this._forceStartLoad = true;
|
|
183
|
+
this.state = State.STOPPED;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
public stopLoad() {
|
|
188
|
+
this._forceStartLoad = false;
|
|
189
|
+
super.stopLoad();
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
protected doTick() {
|
|
193
|
+
switch (this.state) {
|
|
194
|
+
case State.WAITING_LEVEL: {
|
|
195
|
+
const { levels, level } = this;
|
|
196
|
+
const currentLevel = levels?.[level];
|
|
197
|
+
const details = currentLevel?.details;
|
|
198
|
+
if (
|
|
199
|
+
details &&
|
|
200
|
+
(!details.live ||
|
|
201
|
+
(this.levelLastLoaded === currentLevel &&
|
|
202
|
+
!this.waitForLive(currentLevel)))
|
|
203
|
+
) {
|
|
204
|
+
if (this.waitForCdnTuneIn(details)) {
|
|
205
|
+
break;
|
|
206
|
+
}
|
|
207
|
+
this.state = State.IDLE;
|
|
208
|
+
break;
|
|
209
|
+
} else if (this.hls.nextLoadLevel !== this.level) {
|
|
210
|
+
this.state = State.IDLE;
|
|
211
|
+
break;
|
|
212
|
+
}
|
|
213
|
+
break;
|
|
214
|
+
}
|
|
215
|
+
case State.FRAG_LOADING_WAITING_RETRY:
|
|
216
|
+
this.checkRetryDate();
|
|
217
|
+
break;
|
|
218
|
+
default:
|
|
219
|
+
break;
|
|
220
|
+
}
|
|
221
|
+
if (this.state === State.IDLE) {
|
|
222
|
+
this.doTickIdle();
|
|
223
|
+
}
|
|
224
|
+
this.onTickEnd();
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
protected onTickEnd() {
|
|
228
|
+
super.onTickEnd();
|
|
229
|
+
if (this.media?.readyState && this.media.seeking === false) {
|
|
230
|
+
this.lastCurrentTime = this.media.currentTime;
|
|
231
|
+
}
|
|
232
|
+
this.checkFragmentChanged();
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
private doTickIdle() {
|
|
236
|
+
const { hls, levelLastLoaded, levels, media } = this;
|
|
237
|
+
|
|
238
|
+
// if start level not parsed yet OR
|
|
239
|
+
// if video not attached AND start fragment already requested OR start frag prefetch not enabled
|
|
240
|
+
// exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
|
|
241
|
+
if (
|
|
242
|
+
levelLastLoaded === null ||
|
|
243
|
+
(!media &&
|
|
244
|
+
!this.primaryPrefetch &&
|
|
245
|
+
(this.startFragRequested || !hls.config.startFragPrefetch))
|
|
246
|
+
) {
|
|
247
|
+
return;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
// If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything
|
|
251
|
+
if (this.altAudio && this.audioOnly) {
|
|
252
|
+
return;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
const level = this.buffering ? hls.nextLoadLevel : hls.loadLevel;
|
|
256
|
+
if (!levels?.[level]) {
|
|
257
|
+
return;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
const levelInfo = levels[level];
|
|
261
|
+
|
|
262
|
+
// if buffer length is less than maxBufLen try to load a new fragment
|
|
263
|
+
|
|
264
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
|
265
|
+
if (bufferInfo === null) {
|
|
266
|
+
return;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
const lastDetails = this.getLevelDetails();
|
|
270
|
+
if (lastDetails && this._streamEnded(bufferInfo, lastDetails)) {
|
|
271
|
+
const data: BufferEOSData = {};
|
|
272
|
+
if (this.altAudio === AlternateAudio.SWITCHED) {
|
|
273
|
+
data.type = 'video';
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
this.hls.trigger(Events.BUFFER_EOS, data);
|
|
277
|
+
this.state = State.ENDED;
|
|
278
|
+
return;
|
|
279
|
+
}
|
|
280
|
+
if (!this.buffering) {
|
|
281
|
+
return;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// set next load level : this will trigger a playlist load if needed
|
|
285
|
+
if (hls.loadLevel !== level && hls.manualLevel === -1) {
|
|
286
|
+
this.log(`Adapting to level ${level} from level ${this.level}`);
|
|
287
|
+
}
|
|
288
|
+
this.level = hls.nextLoadLevel = level;
|
|
289
|
+
|
|
290
|
+
const levelDetails = levelInfo.details;
|
|
291
|
+
// if level info not retrieved yet, switch state and wait for level retrieval
|
|
292
|
+
// if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
|
|
293
|
+
// a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
|
|
294
|
+
if (
|
|
295
|
+
!levelDetails ||
|
|
296
|
+
this.state === State.WAITING_LEVEL ||
|
|
297
|
+
this.waitForLive(levelInfo)
|
|
298
|
+
) {
|
|
299
|
+
this.level = level;
|
|
300
|
+
this.state = State.WAITING_LEVEL;
|
|
301
|
+
this.startFragRequested = false;
|
|
302
|
+
return;
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
const bufferLen = bufferInfo.len;
|
|
306
|
+
|
|
307
|
+
// compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
|
|
308
|
+
const maxBufLen = this.getMaxBufferLength(levelInfo.maxBitrate);
|
|
309
|
+
|
|
310
|
+
// Stay idle if we are still with buffer margins
|
|
311
|
+
if (bufferLen >= maxBufLen) {
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
if (
|
|
316
|
+
this.backtrackFragment &&
|
|
317
|
+
this.backtrackFragment.start > bufferInfo.end
|
|
318
|
+
) {
|
|
319
|
+
this.backtrackFragment = undefined;
|
|
320
|
+
}
|
|
321
|
+
const targetBufferTime = this.backtrackFragment
|
|
322
|
+
? this.backtrackFragment.start
|
|
323
|
+
: bufferInfo.end;
|
|
324
|
+
let frag = this.getNextFragment(targetBufferTime, levelDetails);
|
|
325
|
+
// Avoid backtracking by loading an earlier segment in streams with segments that do not start with a key frame (flagged by `couldBacktrack`)
|
|
326
|
+
if (
|
|
327
|
+
this.couldBacktrack &&
|
|
328
|
+
!this.fragPrevious &&
|
|
329
|
+
frag &&
|
|
330
|
+
isMediaFragment(frag) &&
|
|
331
|
+
this.fragmentTracker.getState(frag) !== FragmentState.OK
|
|
332
|
+
) {
|
|
333
|
+
const backtrackSn = (this.backtrackFragment ?? frag).sn as number;
|
|
334
|
+
const fragIdx = backtrackSn - levelDetails.startSN;
|
|
335
|
+
const backtrackFrag = levelDetails.fragments[fragIdx - 1];
|
|
336
|
+
if ((backtrackFrag as any) && frag.cc === backtrackFrag.cc) {
|
|
337
|
+
frag = backtrackFrag;
|
|
338
|
+
this.fragmentTracker.removeFragment(backtrackFrag);
|
|
339
|
+
}
|
|
340
|
+
} else if (this.backtrackFragment && bufferInfo.len) {
|
|
341
|
+
this.backtrackFragment = undefined;
|
|
342
|
+
}
|
|
343
|
+
// Avoid loop loading by using nextLoadPosition set for backtracking and skipping consecutive GAP tags
|
|
344
|
+
if (frag && this.isLoopLoading(frag, targetBufferTime)) {
|
|
345
|
+
const gapStart = frag.gap;
|
|
346
|
+
if (!gapStart) {
|
|
347
|
+
// Cleanup the fragment tracker before trying to find the next unbuffered fragment
|
|
348
|
+
const type =
|
|
349
|
+
this.audioOnly && !this.altAudio
|
|
350
|
+
? ElementaryStreamTypes.AUDIO
|
|
351
|
+
: ElementaryStreamTypes.VIDEO;
|
|
352
|
+
const mediaBuffer =
|
|
353
|
+
(type === ElementaryStreamTypes.VIDEO
|
|
354
|
+
? this.videoBuffer
|
|
355
|
+
: this.mediaBuffer) || this.media;
|
|
356
|
+
if (mediaBuffer) {
|
|
357
|
+
this.afterBufferFlushed(mediaBuffer, type);
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
frag = this.getNextFragmentLoopLoading(
|
|
361
|
+
frag,
|
|
362
|
+
levelDetails,
|
|
363
|
+
bufferInfo,
|
|
364
|
+
PlaylistLevelType.MAIN,
|
|
365
|
+
maxBufLen,
|
|
366
|
+
);
|
|
367
|
+
}
|
|
368
|
+
if (!frag) {
|
|
369
|
+
return;
|
|
370
|
+
}
|
|
371
|
+
if (frag.initSegment && !frag.initSegment.data && !this.bitrateTest) {
|
|
372
|
+
frag = frag.initSegment;
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
this.loadFragment(frag, levelInfo, targetBufferTime);
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
protected loadFragment(
|
|
379
|
+
frag: Fragment,
|
|
380
|
+
level: Level,
|
|
381
|
+
targetBufferTime: number,
|
|
382
|
+
) {
|
|
383
|
+
// Check if fragment is not loaded
|
|
384
|
+
const fragState = this.fragmentTracker.getState(frag);
|
|
385
|
+
if (
|
|
386
|
+
fragState === FragmentState.NOT_LOADED ||
|
|
387
|
+
fragState === FragmentState.PARTIAL
|
|
388
|
+
) {
|
|
389
|
+
if (!isMediaFragment(frag)) {
|
|
390
|
+
this._loadInitSegment(frag, level);
|
|
391
|
+
} else if (this.bitrateTest) {
|
|
392
|
+
this.log(
|
|
393
|
+
`Fragment ${frag.sn} of level ${frag.level} is being downloaded to test bitrate and will not be buffered`,
|
|
394
|
+
);
|
|
395
|
+
this._loadBitrateTestFrag(frag, level);
|
|
396
|
+
} else {
|
|
397
|
+
super.loadFragment(frag, level, targetBufferTime);
|
|
398
|
+
}
|
|
399
|
+
} else {
|
|
400
|
+
this.clearTrackerIfNeeded(frag);
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
/*
|
|
405
|
+
on immediate level switch :
|
|
406
|
+
- pause playback if playing
|
|
407
|
+
- cancel any pending load request
|
|
408
|
+
- and trigger a buffer flush
|
|
409
|
+
*/
|
|
410
|
+
public immediateLevelSwitch() {
|
|
411
|
+
this.abortCurrentFrag();
|
|
412
|
+
this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
/**
|
|
416
|
+
* Get the buffer output to use for buffer calculations.
|
|
417
|
+
* Override to use altAudio logic in stream-controller.
|
|
418
|
+
*/
|
|
419
|
+
protected getBufferOutput(): Bufferable | null {
|
|
420
|
+
if (this.mediaBuffer && this.altAudio === AlternateAudio.SWITCHED) {
|
|
421
|
+
return this.mediaBuffer;
|
|
422
|
+
}
|
|
423
|
+
return this.media;
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
protected checkFragmentChanged(): boolean {
|
|
427
|
+
const previousFrag = this.fragPlaying;
|
|
428
|
+
const fragChanged = super.checkFragmentChanged();
|
|
429
|
+
if (!fragChanged) {
|
|
430
|
+
return false;
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
const fragPlaying = this.fragPlaying;
|
|
434
|
+
if (fragPlaying) {
|
|
435
|
+
const fragCurrentLevel = fragPlaying.level;
|
|
436
|
+
this.hls.trigger(Events.FRAG_CHANGED, { frag: fragPlaying });
|
|
437
|
+
if (previousFrag?.level !== fragCurrentLevel) {
|
|
438
|
+
this.hls.trigger(Events.LEVEL_SWITCHED, {
|
|
439
|
+
level: fragCurrentLevel,
|
|
440
|
+
});
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
return true;
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
/**
|
|
447
|
+
* Get backtrack fragment. Override to return actual backtrack fragment.
|
|
448
|
+
*/
|
|
449
|
+
protected get backtrackFragment(): Fragment | undefined {
|
|
450
|
+
return this._backtrackFragment;
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
/**
|
|
454
|
+
* Set backtrack fragment. Override to set actual backtrack fragment.
|
|
455
|
+
*/
|
|
456
|
+
protected set backtrackFragment(value: Fragment | undefined) {
|
|
457
|
+
this._backtrackFragment = value;
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
/**
|
|
461
|
+
* Get could backtrack flag. Override to return actual value.
|
|
462
|
+
*/
|
|
463
|
+
protected get couldBacktrack(): boolean {
|
|
464
|
+
return this._couldBacktrack;
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
/**
|
|
468
|
+
* Set could backtrack flag. Override to set actual value.
|
|
469
|
+
*/
|
|
470
|
+
protected set couldBacktrack(value: boolean) {
|
|
471
|
+
this._couldBacktrack = value;
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
protected abortCurrentFrag(): void {
|
|
475
|
+
this.backtrackFragment = undefined;
|
|
476
|
+
super.abortCurrentFrag();
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
protected flushMainBuffer(startOffset: number, endOffset: number) {
|
|
480
|
+
super.flushMainBuffer(
|
|
481
|
+
startOffset,
|
|
482
|
+
endOffset,
|
|
483
|
+
this.altAudio === AlternateAudio.SWITCHED ? 'video' : null,
|
|
484
|
+
);
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
protected onMediaAttached(
|
|
488
|
+
event: Events.MEDIA_ATTACHED,
|
|
489
|
+
data: MediaAttachedData,
|
|
490
|
+
) {
|
|
491
|
+
super.onMediaAttached(event, data);
|
|
492
|
+
const media = data.media;
|
|
493
|
+
addEventListener(media, 'playing', this.onMediaPlaying);
|
|
494
|
+
addEventListener(media, 'seeked', this.onMediaSeeked);
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
protected onMediaDetaching(
|
|
498
|
+
event: Events.MEDIA_DETACHING,
|
|
499
|
+
data: MediaDetachingData,
|
|
500
|
+
) {
|
|
501
|
+
const { media } = this;
|
|
502
|
+
if (media) {
|
|
503
|
+
removeEventListener(media, 'playing', this.onMediaPlaying);
|
|
504
|
+
removeEventListener(media, 'seeked', this.onMediaSeeked);
|
|
505
|
+
}
|
|
506
|
+
this.videoBuffer = null;
|
|
507
|
+
super.onMediaDetaching(event, data);
|
|
508
|
+
const transferringMedia = !!data.transferMedia;
|
|
509
|
+
if (transferringMedia) {
|
|
510
|
+
return;
|
|
511
|
+
}
|
|
512
|
+
this._hasEnoughToStart = false;
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
private onMediaPlaying = () => {
|
|
516
|
+
// tick to speed up FRAG_CHANGED triggering
|
|
517
|
+
this.tick();
|
|
518
|
+
};
|
|
519
|
+
|
|
520
|
+
private onMediaSeeked = () => {
|
|
521
|
+
const media = this.media;
|
|
522
|
+
const currentTime = media ? media.currentTime : null;
|
|
523
|
+
if (currentTime === null || !Number.isFinite(currentTime)) {
|
|
524
|
+
return;
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
this.log(`Media seeked to ${currentTime.toFixed(3)}`);
|
|
528
|
+
|
|
529
|
+
// If seeked was issued before buffer was appended do not tick immediately
|
|
530
|
+
if (!this.getBufferedFrag(currentTime)) {
|
|
531
|
+
return;
|
|
532
|
+
}
|
|
533
|
+
const bufferInfo = this.getFwdBufferInfoAtPos(
|
|
534
|
+
media,
|
|
535
|
+
currentTime,
|
|
536
|
+
PlaylistLevelType.MAIN,
|
|
537
|
+
0,
|
|
538
|
+
);
|
|
539
|
+
if (bufferInfo === null || bufferInfo.len === 0) {
|
|
540
|
+
this.warn(
|
|
541
|
+
`Main forward buffer length at ${currentTime} on "seeked" event ${
|
|
542
|
+
bufferInfo ? bufferInfo.len : 'empty'
|
|
543
|
+
})`,
|
|
544
|
+
);
|
|
545
|
+
return;
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
// tick to speed up FRAG_CHANGED triggering
|
|
549
|
+
this.tick();
|
|
550
|
+
};
|
|
551
|
+
|
|
552
|
+
protected onManifestLoading() {
|
|
553
|
+
super.onManifestLoading();
|
|
554
|
+
// reset buffer on manifest loading
|
|
555
|
+
this.log('Trigger BUFFER_RESET');
|
|
556
|
+
this.hls.trigger(Events.BUFFER_RESET, undefined);
|
|
557
|
+
this.couldBacktrack = false;
|
|
558
|
+
this.backtrackFragment = undefined;
|
|
559
|
+
this.altAudio = AlternateAudio.DISABLED;
|
|
560
|
+
this.audioOnly = false;
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
private onManifestParsed(
|
|
564
|
+
event: Events.MANIFEST_PARSED,
|
|
565
|
+
data: ManifestParsedData,
|
|
566
|
+
) {
|
|
567
|
+
// detect if we have different kind of audio codecs used amongst playlists
|
|
568
|
+
let aac = false;
|
|
569
|
+
let heaac = false;
|
|
570
|
+
for (let i = 0; i < data.levels.length; i++) {
|
|
571
|
+
const codec = data.levels[i].audioCodec;
|
|
572
|
+
if (codec) {
|
|
573
|
+
aac = aac || codec.indexOf('mp4a.40.2') !== -1;
|
|
574
|
+
heaac = heaac || codec.indexOf('mp4a.40.5') !== -1;
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
this.audioCodecSwitch = aac && heaac && !changeTypeSupported();
|
|
578
|
+
if (this.audioCodecSwitch) {
|
|
579
|
+
this.log(
|
|
580
|
+
'Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC',
|
|
581
|
+
);
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
this.levels = data.levels;
|
|
585
|
+
this.startFragRequested = false;
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
private onLevelLoading(event: Events.LEVEL_LOADING, data: LevelLoadingData) {
|
|
589
|
+
const { levels } = this;
|
|
590
|
+
if (!levels || this.state !== State.IDLE) {
|
|
591
|
+
return;
|
|
592
|
+
}
|
|
593
|
+
const level = data.levelInfo;
|
|
594
|
+
if (
|
|
595
|
+
!level.details ||
|
|
596
|
+
(level.details.live &&
|
|
597
|
+
(this.levelLastLoaded !== level || level.details.expired)) ||
|
|
598
|
+
this.waitForCdnTuneIn(level.details)
|
|
599
|
+
) {
|
|
600
|
+
this.state = State.WAITING_LEVEL;
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
private onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
|
|
605
|
+
const { levels, startFragRequested } = this;
|
|
606
|
+
const newLevelId = data.level;
|
|
607
|
+
const newDetails = data.details;
|
|
608
|
+
const duration = newDetails.totalduration;
|
|
609
|
+
|
|
610
|
+
if (!levels) {
|
|
611
|
+
this.warn(`Levels were reset while loading level ${newLevelId}`);
|
|
612
|
+
return;
|
|
613
|
+
}
|
|
614
|
+
this.log(
|
|
615
|
+
`Level ${newLevelId} loaded [${newDetails.startSN},${newDetails.endSN}]${
|
|
616
|
+
newDetails.lastPartSn
|
|
617
|
+
? `[part-${newDetails.lastPartSn}-${newDetails.lastPartIndex}]`
|
|
618
|
+
: ''
|
|
619
|
+
}, cc [${newDetails.startCC}, ${newDetails.endCC}] duration:${duration}`,
|
|
620
|
+
);
|
|
621
|
+
|
|
622
|
+
const curLevel = data.levelInfo;
|
|
623
|
+
const fragCurrent = this.fragCurrent;
|
|
624
|
+
if (
|
|
625
|
+
fragCurrent &&
|
|
626
|
+
(this.state === State.FRAG_LOADING ||
|
|
627
|
+
this.state === State.FRAG_LOADING_WAITING_RETRY)
|
|
628
|
+
) {
|
|
629
|
+
if (fragCurrent.level !== data.level && fragCurrent.loader) {
|
|
630
|
+
this.abortCurrentFrag();
|
|
631
|
+
}
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
let sliding = 0;
|
|
635
|
+
if (newDetails.live || curLevel.details?.live) {
|
|
636
|
+
this.checkLiveUpdate(newDetails);
|
|
637
|
+
if (newDetails.deltaUpdateFailed) {
|
|
638
|
+
return;
|
|
639
|
+
}
|
|
640
|
+
sliding = this.alignPlaylists(
|
|
641
|
+
newDetails,
|
|
642
|
+
curLevel.details,
|
|
643
|
+
this.levelLastLoaded?.details,
|
|
644
|
+
);
|
|
645
|
+
}
|
|
646
|
+
// override level info
|
|
647
|
+
curLevel.details = newDetails;
|
|
648
|
+
this.levelLastLoaded = curLevel;
|
|
649
|
+
|
|
650
|
+
if (!startFragRequested) {
|
|
651
|
+
this.setStartPosition(newDetails, sliding);
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
this.hls.trigger(Events.LEVEL_UPDATED, {
|
|
655
|
+
details: newDetails,
|
|
656
|
+
level: newLevelId,
|
|
657
|
+
});
|
|
658
|
+
|
|
659
|
+
// only switch back to IDLE state if we were waiting for level to start downloading a new fragment
|
|
660
|
+
if (this.state === State.WAITING_LEVEL) {
|
|
661
|
+
if (this.waitForCdnTuneIn(newDetails)) {
|
|
662
|
+
// Wait for Low-Latency CDN Tune-in
|
|
663
|
+
return;
|
|
664
|
+
}
|
|
665
|
+
this.state = State.IDLE;
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
if (startFragRequested && newDetails.live) {
|
|
669
|
+
this.synchronizeToLiveEdge(newDetails);
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
// trigger handler right now
|
|
673
|
+
this.tick();
|
|
674
|
+
}
|
|
675
|
+
|
|
676
|
+
private synchronizeToLiveEdge(levelDetails: LevelDetails) {
|
|
677
|
+
const { config, media } = this;
|
|
678
|
+
if (!media) {
|
|
679
|
+
return;
|
|
680
|
+
}
|
|
681
|
+
const liveSyncPosition = this.hls.liveSyncPosition;
|
|
682
|
+
const currentTime = this.getLoadPosition();
|
|
683
|
+
const start = levelDetails.fragmentStart;
|
|
684
|
+
const end = levelDetails.edge;
|
|
685
|
+
const withinSlidingWindow =
|
|
686
|
+
currentTime >= start - config.maxFragLookUpTolerance &&
|
|
687
|
+
currentTime <= end;
|
|
688
|
+
// Continue if we can seek forward to sync position or if current time is outside of sliding window
|
|
689
|
+
if (
|
|
690
|
+
liveSyncPosition !== null &&
|
|
691
|
+
media.duration > liveSyncPosition &&
|
|
692
|
+
(currentTime < liveSyncPosition || !withinSlidingWindow)
|
|
693
|
+
) {
|
|
694
|
+
// Continue if buffer is starving or if current time is behind max latency
|
|
695
|
+
const maxLatency =
|
|
696
|
+
config.liveMaxLatencyDuration !== undefined
|
|
697
|
+
? config.liveMaxLatencyDuration
|
|
698
|
+
: config.liveMaxLatencyDurationCount * levelDetails.targetduration;
|
|
699
|
+
if (
|
|
700
|
+
(!withinSlidingWindow && media.readyState < 4) ||
|
|
701
|
+
currentTime < end - maxLatency
|
|
702
|
+
) {
|
|
703
|
+
if (!this._hasEnoughToStart) {
|
|
704
|
+
this.nextLoadPosition = liveSyncPosition;
|
|
705
|
+
}
|
|
706
|
+
// Only seek if ready and there is not a significant forward buffer available for playback
|
|
707
|
+
if (media.readyState) {
|
|
708
|
+
this.warn(
|
|
709
|
+
`Playback: ${currentTime.toFixed(
|
|
710
|
+
3,
|
|
711
|
+
)} is located too far from the end of live sliding playlist: ${end}, reset currentTime to : ${liveSyncPosition.toFixed(
|
|
712
|
+
3,
|
|
713
|
+
)}`,
|
|
714
|
+
);
|
|
715
|
+
|
|
716
|
+
if (this.config.liveSyncMode === 'buffered') {
|
|
717
|
+
const bufferInfo = BufferHelper.bufferInfo(
|
|
718
|
+
media,
|
|
719
|
+
liveSyncPosition,
|
|
720
|
+
0,
|
|
721
|
+
);
|
|
722
|
+
|
|
723
|
+
if (!bufferInfo.buffered?.length) {
|
|
724
|
+
media.currentTime = liveSyncPosition;
|
|
725
|
+
return;
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
const isLiveSyncInBuffer = bufferInfo.start <= currentTime;
|
|
729
|
+
|
|
730
|
+
if (isLiveSyncInBuffer) {
|
|
731
|
+
media.currentTime = liveSyncPosition;
|
|
732
|
+
return;
|
|
733
|
+
}
|
|
734
|
+
|
|
735
|
+
const { nextStart } = BufferHelper.bufferedInfo(
|
|
736
|
+
bufferInfo.buffered,
|
|
737
|
+
currentTime,
|
|
738
|
+
0,
|
|
739
|
+
);
|
|
740
|
+
if (nextStart) {
|
|
741
|
+
media.currentTime = nextStart;
|
|
742
|
+
}
|
|
743
|
+
} else {
|
|
744
|
+
media.currentTime = liveSyncPosition;
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
protected _handleFragmentLoadProgress(data: FragLoadedData) {
|
|
752
|
+
const frag = data.frag as MediaFragment;
|
|
753
|
+
const { part, payload } = data;
|
|
754
|
+
const { levels } = this;
|
|
755
|
+
if (!levels) {
|
|
756
|
+
this.warn(
|
|
757
|
+
`Levels were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`,
|
|
758
|
+
);
|
|
759
|
+
return;
|
|
760
|
+
}
|
|
761
|
+
const currentLevel = levels[frag.level];
|
|
762
|
+
if (!currentLevel as any) {
|
|
763
|
+
this.warn(`Level ${frag.level} not found on progress`);
|
|
764
|
+
return;
|
|
765
|
+
}
|
|
766
|
+
const details = currentLevel.details;
|
|
767
|
+
if (!details) {
|
|
768
|
+
this.warn(
|
|
769
|
+
`Dropping fragment ${frag.sn} of level ${frag.level} after level details were reset`,
|
|
770
|
+
);
|
|
771
|
+
this.fragmentTracker.removeFragment(frag);
|
|
772
|
+
return;
|
|
773
|
+
}
|
|
774
|
+
const videoCodec = currentLevel.videoCodec;
|
|
775
|
+
|
|
776
|
+
// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
|
|
777
|
+
const accurateTimeOffset = details.PTSKnown || !details.live;
|
|
778
|
+
const initSegmentData = frag.initSegment?.data;
|
|
779
|
+
const audioCodec = this._getAudioCodec(currentLevel);
|
|
780
|
+
|
|
781
|
+
// transmux the MPEG-TS data to ISO-BMFF segments
|
|
782
|
+
// this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`);
|
|
783
|
+
const transmuxer = (this.transmuxer =
|
|
784
|
+
this.transmuxer ||
|
|
785
|
+
new TransmuxerInterface(
|
|
786
|
+
this.hls,
|
|
787
|
+
PlaylistLevelType.MAIN,
|
|
788
|
+
this._handleTransmuxComplete.bind(this),
|
|
789
|
+
this._handleTransmuxerFlush.bind(this),
|
|
790
|
+
));
|
|
791
|
+
const partIndex = part ? part.index : -1;
|
|
792
|
+
const partial = partIndex !== -1;
|
|
793
|
+
const chunkMeta = new ChunkMetadata(
|
|
794
|
+
frag.level,
|
|
795
|
+
frag.sn,
|
|
796
|
+
frag.stats.chunkCount,
|
|
797
|
+
payload.byteLength,
|
|
798
|
+
partIndex,
|
|
799
|
+
partial,
|
|
800
|
+
);
|
|
801
|
+
const initPTS = this.initPTS[frag.cc];
|
|
802
|
+
|
|
803
|
+
transmuxer.push(
|
|
804
|
+
payload,
|
|
805
|
+
initSegmentData,
|
|
806
|
+
audioCodec,
|
|
807
|
+
videoCodec,
|
|
808
|
+
frag,
|
|
809
|
+
part,
|
|
810
|
+
details.totalduration,
|
|
811
|
+
accurateTimeOffset,
|
|
812
|
+
chunkMeta,
|
|
813
|
+
initPTS,
|
|
814
|
+
);
|
|
815
|
+
}
|
|
816
|
+
|
|
817
|
+
private onAudioTrackSwitching(
|
|
818
|
+
event: Events.AUDIO_TRACK_SWITCHING,
|
|
819
|
+
data: AudioTrackSwitchingData,
|
|
820
|
+
) {
|
|
821
|
+
const hls = this.hls;
|
|
822
|
+
// if any URL found on new audio track, it is an alternate audio track
|
|
823
|
+
const fromAltAudio = this.altAudio !== AlternateAudio.DISABLED;
|
|
824
|
+
const altAudio = useAlternateAudio(data.url, hls);
|
|
825
|
+
// if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
|
|
826
|
+
// don't do anything if we switch to alt audio: audio stream controller is handling it.
|
|
827
|
+
// we will just have to change buffer scheduling on audioTrackSwitched
|
|
828
|
+
if (!altAudio) {
|
|
829
|
+
if (this.mediaBuffer !== this.media) {
|
|
830
|
+
this.log(
|
|
831
|
+
'Switching on main audio, use media.buffered to schedule main fragment loading',
|
|
832
|
+
);
|
|
833
|
+
this.mediaBuffer = this.media;
|
|
834
|
+
const fragCurrent = this.fragCurrent;
|
|
835
|
+
// we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
|
|
836
|
+
if (fragCurrent) {
|
|
837
|
+
this.log('Switching to main audio track, cancel main fragment load');
|
|
838
|
+
fragCurrent.abortRequests();
|
|
839
|
+
this.fragmentTracker.removeFragment(fragCurrent);
|
|
840
|
+
}
|
|
841
|
+
// destroy transmuxer to force init segment generation (following audio switch)
|
|
842
|
+
this.resetTransmuxer();
|
|
843
|
+
// switch to IDLE state to load new fragment
|
|
844
|
+
this.resetLoadingState();
|
|
845
|
+
} else if (this.audioOnly) {
|
|
846
|
+
// Reset audio transmuxer so when switching back to main audio we're not still appending where we left off
|
|
847
|
+
this.resetTransmuxer();
|
|
848
|
+
}
|
|
849
|
+
// If switching from alt to main audio, flush all audio and trigger track switched
|
|
850
|
+
if (fromAltAudio) {
|
|
851
|
+
this.altAudio = AlternateAudio.DISABLED;
|
|
852
|
+
this.fragmentTracker.removeAllFragments();
|
|
853
|
+
hls.once(Events.BUFFER_FLUSHED, () => {
|
|
854
|
+
if (!this.hls as any) {
|
|
855
|
+
return;
|
|
856
|
+
}
|
|
857
|
+
this.hls.trigger(Events.AUDIO_TRACK_SWITCHED, data);
|
|
858
|
+
});
|
|
859
|
+
hls.trigger(Events.BUFFER_FLUSHING, {
|
|
860
|
+
startOffset: 0,
|
|
861
|
+
endOffset: Number.POSITIVE_INFINITY,
|
|
862
|
+
type: null,
|
|
863
|
+
});
|
|
864
|
+
return;
|
|
865
|
+
}
|
|
866
|
+
hls.trigger(Events.AUDIO_TRACK_SWITCHED, data);
|
|
867
|
+
} else {
|
|
868
|
+
this.altAudio = AlternateAudio.SWITCHING;
|
|
869
|
+
}
|
|
870
|
+
}
|
|
871
|
+
|
|
872
|
+
private onAudioTrackSwitched(
|
|
873
|
+
event: Events.AUDIO_TRACK_SWITCHED,
|
|
874
|
+
data: AudioTrackSwitchedData,
|
|
875
|
+
) {
|
|
876
|
+
const altAudio = useAlternateAudio(data.url, this.hls);
|
|
877
|
+
if (altAudio) {
|
|
878
|
+
const videoBuffer = this.videoBuffer;
|
|
879
|
+
// if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
|
|
880
|
+
if (videoBuffer && this.mediaBuffer !== videoBuffer) {
|
|
881
|
+
this.log(
|
|
882
|
+
'Switching on alternate audio, use video.buffered to schedule main fragment loading',
|
|
883
|
+
);
|
|
884
|
+
this.mediaBuffer = videoBuffer;
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
this.altAudio = altAudio
|
|
888
|
+
? AlternateAudio.SWITCHED
|
|
889
|
+
: AlternateAudio.DISABLED;
|
|
890
|
+
this.tick();
|
|
891
|
+
}
|
|
892
|
+
|
|
893
|
+
private onBufferCreated(
|
|
894
|
+
event: Events.BUFFER_CREATED,
|
|
895
|
+
data: BufferCreatedData,
|
|
896
|
+
) {
|
|
897
|
+
const tracks = data.tracks;
|
|
898
|
+
let mediaTrack: BufferCreatedTrack | undefined;
|
|
899
|
+
let name: string | undefined;
|
|
900
|
+
let alternate = false;
|
|
901
|
+
for (const type in tracks) {
|
|
902
|
+
const track: BufferCreatedTrack = tracks[type];
|
|
903
|
+
if (track.id === 'main') {
|
|
904
|
+
name = type;
|
|
905
|
+
mediaTrack = track;
|
|
906
|
+
// keep video source buffer reference
|
|
907
|
+
if (type === 'video') {
|
|
908
|
+
const videoTrack = tracks[type];
|
|
909
|
+
if (videoTrack) {
|
|
910
|
+
this.videoBuffer = videoTrack.buffer;
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
} else {
|
|
914
|
+
alternate = true;
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
if (alternate && mediaTrack) {
|
|
918
|
+
this.log(
|
|
919
|
+
`Alternate track found, use ${name}.buffered to schedule main fragment loading`,
|
|
920
|
+
);
|
|
921
|
+
this.mediaBuffer = mediaTrack.buffer;
|
|
922
|
+
} else {
|
|
923
|
+
this.mediaBuffer = this.media;
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
|
|
927
|
+
private onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
|
|
928
|
+
const { frag, part } = data;
|
|
929
|
+
const bufferedMainFragment = frag.type === PlaylistLevelType.MAIN;
|
|
930
|
+
if (bufferedMainFragment) {
|
|
931
|
+
if (this.fragContextChanged(frag)) {
|
|
932
|
+
// If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
|
|
933
|
+
// Avoid setting state back to IDLE, since that will interfere with a level switch
|
|
934
|
+
this.warn(
|
|
935
|
+
`Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
|
|
936
|
+
frag.level
|
|
937
|
+
} finished buffering, but was aborted. state: ${this.state}`,
|
|
938
|
+
);
|
|
939
|
+
if (this.state === State.PARSED) {
|
|
940
|
+
this.state = State.IDLE;
|
|
941
|
+
}
|
|
942
|
+
return;
|
|
943
|
+
}
|
|
944
|
+
if (isMediaFragment(frag)) {
|
|
945
|
+
this.fragPrevious = frag;
|
|
946
|
+
}
|
|
947
|
+
this.fragBufferedComplete(frag, part);
|
|
948
|
+
}
|
|
949
|
+
|
|
950
|
+
const media = this.media;
|
|
951
|
+
if (!media) {
|
|
952
|
+
return;
|
|
953
|
+
}
|
|
954
|
+
if (!this._hasEnoughToStart && BufferHelper.getBuffered(media).length) {
|
|
955
|
+
this._hasEnoughToStart = true;
|
|
956
|
+
this.seekToStartPos();
|
|
957
|
+
}
|
|
958
|
+
if (bufferedMainFragment) {
|
|
959
|
+
this.tick();
|
|
960
|
+
}
|
|
961
|
+
}
|
|
962
|
+
|
|
963
|
+
public get hasEnoughToStart(): boolean {
|
|
964
|
+
return this._hasEnoughToStart;
|
|
965
|
+
}
|
|
966
|
+
|
|
967
|
+
protected onError(event: Events.ERROR, data: ErrorData) {
|
|
968
|
+
if (data.fatal) {
|
|
969
|
+
this.state = State.ERROR;
|
|
970
|
+
return;
|
|
971
|
+
}
|
|
972
|
+
switch (data.details) {
|
|
973
|
+
case ErrorDetails.FRAG_GAP:
|
|
974
|
+
case ErrorDetails.FRAG_PARSING_ERROR:
|
|
975
|
+
case ErrorDetails.FRAG_DECRYPT_ERROR:
|
|
976
|
+
case ErrorDetails.FRAG_LOAD_ERROR:
|
|
977
|
+
case ErrorDetails.FRAG_LOAD_TIMEOUT:
|
|
978
|
+
case ErrorDetails.KEY_LOAD_ERROR:
|
|
979
|
+
case ErrorDetails.KEY_LOAD_TIMEOUT:
|
|
980
|
+
this.onFragmentOrKeyLoadError(PlaylistLevelType.MAIN, data);
|
|
981
|
+
break;
|
|
982
|
+
case ErrorDetails.LEVEL_LOAD_ERROR:
|
|
983
|
+
case ErrorDetails.LEVEL_LOAD_TIMEOUT:
|
|
984
|
+
case ErrorDetails.LEVEL_PARSING_ERROR:
|
|
985
|
+
// in case of non fatal error while loading level, if level controller is not retrying to load level, switch back to IDLE
|
|
986
|
+
if (
|
|
987
|
+
!data.levelRetry &&
|
|
988
|
+
this.state === State.WAITING_LEVEL &&
|
|
989
|
+
data.context?.type === PlaylistContextType.LEVEL
|
|
990
|
+
) {
|
|
991
|
+
this.state = State.IDLE;
|
|
992
|
+
}
|
|
993
|
+
break;
|
|
994
|
+
case ErrorDetails.BUFFER_ADD_CODEC_ERROR:
|
|
995
|
+
case ErrorDetails.BUFFER_APPEND_ERROR:
|
|
996
|
+
if (data.parent !== 'main') {
|
|
997
|
+
return;
|
|
998
|
+
}
|
|
999
|
+
if (this.reduceLengthAndFlushBuffer(data)) {
|
|
1000
|
+
this.resetLoadingState();
|
|
1001
|
+
}
|
|
1002
|
+
break;
|
|
1003
|
+
case ErrorDetails.BUFFER_FULL_ERROR:
|
|
1004
|
+
if (data.parent !== 'main') {
|
|
1005
|
+
return;
|
|
1006
|
+
}
|
|
1007
|
+
if (this.reduceLengthAndFlushBuffer(data)) {
|
|
1008
|
+
const isAssetPlayer =
|
|
1009
|
+
!this.config.interstitialsController && this.config.assetPlayerId;
|
|
1010
|
+
if (isAssetPlayer) {
|
|
1011
|
+
// Use currentTime in buffer estimate to prevent loading more until playback advances
|
|
1012
|
+
this._hasEnoughToStart = true;
|
|
1013
|
+
} else {
|
|
1014
|
+
this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
|
|
1015
|
+
}
|
|
1016
|
+
}
|
|
1017
|
+
break;
|
|
1018
|
+
case ErrorDetails.INTERNAL_EXCEPTION:
|
|
1019
|
+
this.recoverWorkerError(data);
|
|
1020
|
+
break;
|
|
1021
|
+
default:
|
|
1022
|
+
break;
|
|
1023
|
+
}
|
|
1024
|
+
}
|
|
1025
|
+
|
|
1026
|
+
private onFragLoadEmergencyAborted() {
|
|
1027
|
+
this.state = State.IDLE;
|
|
1028
|
+
// if loadedmetadata is not set, it means that we are emergency switch down on first frag
|
|
1029
|
+
// in that case, reset startFragRequested flag
|
|
1030
|
+
if (!this._hasEnoughToStart) {
|
|
1031
|
+
this.startFragRequested = false;
|
|
1032
|
+
this.nextLoadPosition = this.lastCurrentTime;
|
|
1033
|
+
}
|
|
1034
|
+
this.tickImmediate();
|
|
1035
|
+
}
|
|
1036
|
+
|
|
1037
|
+
private onBufferFlushed(
|
|
1038
|
+
event: Events.BUFFER_FLUSHED,
|
|
1039
|
+
{ type }: BufferFlushedData,
|
|
1040
|
+
) {
|
|
1041
|
+
if (type !== ElementaryStreamTypes.AUDIO || !this.altAudio) {
|
|
1042
|
+
const mediaBuffer =
|
|
1043
|
+
(type === ElementaryStreamTypes.VIDEO
|
|
1044
|
+
? this.videoBuffer
|
|
1045
|
+
: this.mediaBuffer) || this.media;
|
|
1046
|
+
if (mediaBuffer) {
|
|
1047
|
+
this.afterBufferFlushed(mediaBuffer, type);
|
|
1048
|
+
this.tick();
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
}
|
|
1052
|
+
|
|
1053
|
+
private onLevelsUpdated(
|
|
1054
|
+
event: Events.LEVELS_UPDATED,
|
|
1055
|
+
data: LevelsUpdatedData,
|
|
1056
|
+
) {
|
|
1057
|
+
if (this.level > -1 && this.fragCurrent) {
|
|
1058
|
+
this.level = this.fragCurrent.level;
|
|
1059
|
+
if (this.level === -1) {
|
|
1060
|
+
this.resetWhenMissingContext(this.fragCurrent);
|
|
1061
|
+
}
|
|
1062
|
+
}
|
|
1063
|
+
this.levels = data.levels;
|
|
1064
|
+
}
|
|
1065
|
+
|
|
1066
|
+
public swapAudioCodec() {
|
|
1067
|
+
this.audioCodecSwap = !this.audioCodecSwap;
|
|
1068
|
+
}
|
|
1069
|
+
|
|
1070
|
+
/**
|
|
1071
|
+
* Seeks to the set startPosition if not equal to the mediaElement's current time.
|
|
1072
|
+
*/
|
|
1073
|
+
protected seekToStartPos() {
|
|
1074
|
+
const { media } = this;
|
|
1075
|
+
if (!media) {
|
|
1076
|
+
return;
|
|
1077
|
+
}
|
|
1078
|
+
const currentTime = media.currentTime;
|
|
1079
|
+
let startPosition = this.startPosition;
|
|
1080
|
+
// only adjust currentTime if different from startPosition or if startPosition not buffered
|
|
1081
|
+
// at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
|
|
1082
|
+
if (startPosition >= 0 && currentTime < startPosition) {
|
|
1083
|
+
if (media.seeking) {
|
|
1084
|
+
this.log(
|
|
1085
|
+
`could not seek to ${startPosition}, already seeking at ${currentTime}`,
|
|
1086
|
+
);
|
|
1087
|
+
return;
|
|
1088
|
+
}
|
|
1089
|
+
|
|
1090
|
+
// Offset start position by timeline offset
|
|
1091
|
+
const timelineOffset = this.timelineOffset;
|
|
1092
|
+
if (timelineOffset && startPosition) {
|
|
1093
|
+
startPosition += timelineOffset;
|
|
1094
|
+
}
|
|
1095
|
+
const details = this.getLevelDetails();
|
|
1096
|
+
const buffered = BufferHelper.getBuffered(media);
|
|
1097
|
+
const bufferStart = buffered.length ? buffered.start(0) : 0;
|
|
1098
|
+
const delta = bufferStart - startPosition;
|
|
1099
|
+
const skipTolerance = Math.max(
|
|
1100
|
+
this.config.maxBufferHole,
|
|
1101
|
+
this.config.maxFragLookUpTolerance,
|
|
1102
|
+
);
|
|
1103
|
+
if (
|
|
1104
|
+
this.config.startOnSegmentBoundary ||
|
|
1105
|
+
(delta > 0 &&
|
|
1106
|
+
(delta < skipTolerance ||
|
|
1107
|
+
(this.loadingParts && delta < 2 * (details?.partTarget || 0))))
|
|
1108
|
+
) {
|
|
1109
|
+
this.log(`adjusting start position by ${delta} to match buffer start`);
|
|
1110
|
+
startPosition += delta;
|
|
1111
|
+
this.startPosition = startPosition;
|
|
1112
|
+
}
|
|
1113
|
+
if (currentTime < startPosition) {
|
|
1114
|
+
this.log(
|
|
1115
|
+
`seek to target start position ${startPosition} from current time ${currentTime} buffer start ${bufferStart}`,
|
|
1116
|
+
);
|
|
1117
|
+
media.currentTime = startPosition;
|
|
1118
|
+
}
|
|
1119
|
+
}
|
|
1120
|
+
}
|
|
1121
|
+
|
|
1122
|
+
private _getAudioCodec(currentLevel) {
|
|
1123
|
+
let audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
|
|
1124
|
+
if (this.audioCodecSwap && audioCodec) {
|
|
1125
|
+
this.log('Swapping audio codec');
|
|
1126
|
+
if (audioCodec.indexOf('mp4a.40.5') !== -1) {
|
|
1127
|
+
audioCodec = 'mp4a.40.2';
|
|
1128
|
+
} else {
|
|
1129
|
+
audioCodec = 'mp4a.40.5';
|
|
1130
|
+
}
|
|
1131
|
+
}
|
|
1132
|
+
|
|
1133
|
+
return audioCodec;
|
|
1134
|
+
}
|
|
1135
|
+
|
|
1136
|
+
private _loadBitrateTestFrag(fragment: Fragment, level: Level) {
|
|
1137
|
+
fragment.bitrateTest = true;
|
|
1138
|
+
this._doFragLoad(fragment, level)
|
|
1139
|
+
.then((data) => {
|
|
1140
|
+
const { hls } = this;
|
|
1141
|
+
const frag = data?.frag;
|
|
1142
|
+
if (!frag || this.fragContextChanged(frag)) {
|
|
1143
|
+
return;
|
|
1144
|
+
}
|
|
1145
|
+
level.fragmentError = 0;
|
|
1146
|
+
this.state = State.IDLE;
|
|
1147
|
+
this.startFragRequested = false;
|
|
1148
|
+
this.bitrateTest = false;
|
|
1149
|
+
const stats = frag.stats;
|
|
1150
|
+
// Bitrate tests fragments are neither parsed nor buffered
|
|
1151
|
+
stats.parsing.start =
|
|
1152
|
+
stats.parsing.end =
|
|
1153
|
+
stats.buffering.start =
|
|
1154
|
+
stats.buffering.end =
|
|
1155
|
+
self.performance.now();
|
|
1156
|
+
hls.trigger(Events.FRAG_LOADED, data as FragLoadedData);
|
|
1157
|
+
frag.bitrateTest = false;
|
|
1158
|
+
})
|
|
1159
|
+
.catch((reason) => {
|
|
1160
|
+
if (this.state === State.STOPPED || this.state === State.ERROR) {
|
|
1161
|
+
return;
|
|
1162
|
+
}
|
|
1163
|
+
this.warn(reason);
|
|
1164
|
+
this.resetFragmentLoading(fragment);
|
|
1165
|
+
});
|
|
1166
|
+
}
|
|
1167
|
+
|
|
1168
|
+
private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
|
|
1169
|
+
const id = this.playlistType;
|
|
1170
|
+
const { hls } = this;
|
|
1171
|
+
const { remuxResult, chunkMeta } = transmuxResult;
|
|
1172
|
+
|
|
1173
|
+
const context = this.getCurrentContext(chunkMeta);
|
|
1174
|
+
if (!context) {
|
|
1175
|
+
this.resetWhenMissingContext(chunkMeta);
|
|
1176
|
+
return;
|
|
1177
|
+
}
|
|
1178
|
+
const { frag, part, level } = context;
|
|
1179
|
+
const { video, text, id3, initSegment } = remuxResult;
|
|
1180
|
+
const { details } = level;
|
|
1181
|
+
// The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track
|
|
1182
|
+
const audio = this.altAudio ? undefined : remuxResult.audio;
|
|
1183
|
+
|
|
1184
|
+
// Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
|
|
1185
|
+
// If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
|
|
1186
|
+
if (this.fragContextChanged(frag)) {
|
|
1187
|
+
this.fragmentTracker.removeFragment(frag);
|
|
1188
|
+
return;
|
|
1189
|
+
}
|
|
1190
|
+
|
|
1191
|
+
this.state = State.PARSING;
|
|
1192
|
+
|
|
1193
|
+
if (initSegment) {
|
|
1194
|
+
const tracks = initSegment.tracks;
|
|
1195
|
+
if (tracks) {
|
|
1196
|
+
const mapFragment = frag.initSegment || frag;
|
|
1197
|
+
if (this.unhandledEncryptionError(initSegment, frag)) {
|
|
1198
|
+
return;
|
|
1199
|
+
}
|
|
1200
|
+
this._bufferInitSegment(level, tracks, mapFragment, chunkMeta);
|
|
1201
|
+
hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
|
|
1202
|
+
frag: mapFragment,
|
|
1203
|
+
id,
|
|
1204
|
+
tracks,
|
|
1205
|
+
});
|
|
1206
|
+
}
|
|
1207
|
+
|
|
1208
|
+
const baseTime = initSegment.initPTS as number;
|
|
1209
|
+
const timescale = initSegment.timescale as number;
|
|
1210
|
+
const initPTS = this.initPTS[frag.cc];
|
|
1211
|
+
if (
|
|
1212
|
+
Number.isFinite(baseTime) &&
|
|
1213
|
+
((!initPTS as any) ||
|
|
1214
|
+
initPTS.baseTime !== baseTime ||
|
|
1215
|
+
initPTS.timescale !== timescale)
|
|
1216
|
+
) {
|
|
1217
|
+
const trackId = initSegment.trackId as number;
|
|
1218
|
+
this.initPTS[frag.cc] = {
|
|
1219
|
+
baseTime,
|
|
1220
|
+
timescale,
|
|
1221
|
+
trackId,
|
|
1222
|
+
};
|
|
1223
|
+
hls.trigger(Events.INIT_PTS_FOUND, {
|
|
1224
|
+
frag,
|
|
1225
|
+
id,
|
|
1226
|
+
initPTS: baseTime,
|
|
1227
|
+
timescale,
|
|
1228
|
+
trackId,
|
|
1229
|
+
});
|
|
1230
|
+
}
|
|
1231
|
+
}
|
|
1232
|
+
|
|
1233
|
+
// Avoid buffering if backtracking this fragment
|
|
1234
|
+
if (video && details) {
|
|
1235
|
+
if (audio && video.type === 'audiovideo') {
|
|
1236
|
+
this.logMuxedErr(frag);
|
|
1237
|
+
}
|
|
1238
|
+
const prevFrag = details.fragments[frag.sn - 1 - details.startSN];
|
|
1239
|
+
const isFirstFragment = frag.sn === details.startSN;
|
|
1240
|
+
const isFirstInDiscontinuity =
|
|
1241
|
+
(!prevFrag as any) || frag.cc > prevFrag.cc;
|
|
1242
|
+
if (remuxResult.independent !== false) {
|
|
1243
|
+
const { startPTS, endPTS, startDTS, endDTS } = video;
|
|
1244
|
+
if (part) {
|
|
1245
|
+
part.elementaryStreams[video.type] = {
|
|
1246
|
+
startPTS,
|
|
1247
|
+
endPTS,
|
|
1248
|
+
startDTS,
|
|
1249
|
+
endDTS,
|
|
1250
|
+
};
|
|
1251
|
+
} else {
|
|
1252
|
+
if (
|
|
1253
|
+
video.firstKeyFrame &&
|
|
1254
|
+
video.independent &&
|
|
1255
|
+
chunkMeta.id === 1 &&
|
|
1256
|
+
!isFirstInDiscontinuity
|
|
1257
|
+
) {
|
|
1258
|
+
this.couldBacktrack = true;
|
|
1259
|
+
}
|
|
1260
|
+
if (video.dropped && video.independent) {
|
|
1261
|
+
// Backtrack if dropped frames create a gap after currentTime
|
|
1262
|
+
|
|
1263
|
+
const bufferInfo = this.getMainFwdBufferInfo();
|
|
1264
|
+
const targetBufferTime =
|
|
1265
|
+
(bufferInfo ? bufferInfo.end : this.getLoadPosition()) +
|
|
1266
|
+
this.config.maxBufferHole;
|
|
1267
|
+
const startTime = video.firstKeyFramePTS
|
|
1268
|
+
? video.firstKeyFramePTS
|
|
1269
|
+
: startPTS;
|
|
1270
|
+
if (
|
|
1271
|
+
!isFirstFragment &&
|
|
1272
|
+
targetBufferTime < startTime - this.config.maxBufferHole &&
|
|
1273
|
+
!isFirstInDiscontinuity
|
|
1274
|
+
) {
|
|
1275
|
+
this.backtrack(frag);
|
|
1276
|
+
return;
|
|
1277
|
+
} else if (isFirstInDiscontinuity) {
|
|
1278
|
+
// Mark segment with a gap to avoid loop loading
|
|
1279
|
+
frag.gap = true;
|
|
1280
|
+
}
|
|
1281
|
+
// Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial
|
|
1282
|
+
frag.setElementaryStreamInfo(
|
|
1283
|
+
video.type as ElementaryStreamTypes,
|
|
1284
|
+
frag.start,
|
|
1285
|
+
endPTS,
|
|
1286
|
+
frag.start,
|
|
1287
|
+
endDTS,
|
|
1288
|
+
true,
|
|
1289
|
+
);
|
|
1290
|
+
} else if (
|
|
1291
|
+
isFirstFragment &&
|
|
1292
|
+
startPTS - (details.appliedTimelineOffset || 0) > MAX_START_GAP_JUMP
|
|
1293
|
+
) {
|
|
1294
|
+
// Mark segment with a gap to skip large start gap
|
|
1295
|
+
frag.gap = true;
|
|
1296
|
+
}
|
|
1297
|
+
}
|
|
1298
|
+
frag.setElementaryStreamInfo(
|
|
1299
|
+
video.type as ElementaryStreamTypes,
|
|
1300
|
+
startPTS,
|
|
1301
|
+
endPTS,
|
|
1302
|
+
startDTS,
|
|
1303
|
+
endDTS,
|
|
1304
|
+
);
|
|
1305
|
+
if (this.backtrackFragment) {
|
|
1306
|
+
this.backtrackFragment = frag;
|
|
1307
|
+
}
|
|
1308
|
+
this.bufferFragmentData(
|
|
1309
|
+
video,
|
|
1310
|
+
frag,
|
|
1311
|
+
part,
|
|
1312
|
+
chunkMeta,
|
|
1313
|
+
isFirstFragment || isFirstInDiscontinuity,
|
|
1314
|
+
);
|
|
1315
|
+
} else if (isFirstFragment || isFirstInDiscontinuity) {
|
|
1316
|
+
// Mark segment with a gap to avoid loop loading
|
|
1317
|
+
frag.gap = true;
|
|
1318
|
+
} else {
|
|
1319
|
+
this.backtrack(frag);
|
|
1320
|
+
return;
|
|
1321
|
+
}
|
|
1322
|
+
}
|
|
1323
|
+
|
|
1324
|
+
if (audio) {
|
|
1325
|
+
const { startPTS, endPTS, startDTS, endDTS } = audio;
|
|
1326
|
+
if (part) {
|
|
1327
|
+
part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
|
|
1328
|
+
startPTS,
|
|
1329
|
+
endPTS,
|
|
1330
|
+
startDTS,
|
|
1331
|
+
endDTS,
|
|
1332
|
+
};
|
|
1333
|
+
}
|
|
1334
|
+
frag.setElementaryStreamInfo(
|
|
1335
|
+
ElementaryStreamTypes.AUDIO,
|
|
1336
|
+
startPTS,
|
|
1337
|
+
endPTS,
|
|
1338
|
+
startDTS,
|
|
1339
|
+
endDTS,
|
|
1340
|
+
);
|
|
1341
|
+
this.bufferFragmentData(audio, frag, part, chunkMeta);
|
|
1342
|
+
}
|
|
1343
|
+
|
|
1344
|
+
if (details && id3?.samples.length) {
|
|
1345
|
+
const emittedID3: FragParsingMetadataData = {
|
|
1346
|
+
id,
|
|
1347
|
+
frag,
|
|
1348
|
+
details,
|
|
1349
|
+
samples: id3.samples,
|
|
1350
|
+
};
|
|
1351
|
+
hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
|
|
1352
|
+
}
|
|
1353
|
+
if (details && text) {
|
|
1354
|
+
const emittedText: FragParsingUserdataData = {
|
|
1355
|
+
id,
|
|
1356
|
+
frag,
|
|
1357
|
+
details,
|
|
1358
|
+
samples: text.samples,
|
|
1359
|
+
};
|
|
1360
|
+
hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
|
|
1361
|
+
}
|
|
1362
|
+
}
|
|
1363
|
+
|
|
1364
|
+
private logMuxedErr(frag: Fragment) {
|
|
1365
|
+
this.warn(
|
|
1366
|
+
`${isMediaFragment(frag) ? 'Media' : 'Init'} segment with muxed audiovideo where only video expected: ${frag.url}`,
|
|
1367
|
+
);
|
|
1368
|
+
}
|
|
1369
|
+
|
|
1370
|
+
private _bufferInitSegment(
|
|
1371
|
+
currentLevel: Level,
|
|
1372
|
+
tracks: TrackSet,
|
|
1373
|
+
frag: Fragment,
|
|
1374
|
+
chunkMeta: ChunkMetadata,
|
|
1375
|
+
) {
|
|
1376
|
+
if (this.state !== State.PARSING) {
|
|
1377
|
+
return;
|
|
1378
|
+
}
|
|
1379
|
+
|
|
1380
|
+
this.audioOnly = !!tracks.audio && !tracks.video;
|
|
1381
|
+
|
|
1382
|
+
// if audio track is expected to come from audio stream controller, discard any coming from main
|
|
1383
|
+
if (this.altAudio && !this.audioOnly) {
|
|
1384
|
+
delete tracks.audio;
|
|
1385
|
+
if (tracks.audiovideo) {
|
|
1386
|
+
this.logMuxedErr(frag);
|
|
1387
|
+
}
|
|
1388
|
+
}
|
|
1389
|
+
// include levelCodec in audio and video tracks
|
|
1390
|
+
const { audio, video, audiovideo } = tracks;
|
|
1391
|
+
if (audio) {
|
|
1392
|
+
const levelCodec = currentLevel.audioCodec;
|
|
1393
|
+
let audioCodec = pickMostCompleteCodecName(audio.codec, levelCodec);
|
|
1394
|
+
// Add level and profile to make up for remuxer not being able to parse full codec
|
|
1395
|
+
// (logger warning "Unhandled audio codec...")
|
|
1396
|
+
if (audioCodec === 'mp4a') {
|
|
1397
|
+
audioCodec = 'mp4a.40.5';
|
|
1398
|
+
}
|
|
1399
|
+
// Handle `audioCodecSwitch`
|
|
1400
|
+
const ua = navigator.userAgent.toLowerCase();
|
|
1401
|
+
if (this.audioCodecSwitch) {
|
|
1402
|
+
if (audioCodec) {
|
|
1403
|
+
if (audioCodec.indexOf('mp4a.40.5') !== -1) {
|
|
1404
|
+
audioCodec = 'mp4a.40.2';
|
|
1405
|
+
} else {
|
|
1406
|
+
audioCodec = 'mp4a.40.5';
|
|
1407
|
+
}
|
|
1408
|
+
}
|
|
1409
|
+
// In the case that AAC and HE-AAC audio codecs are signalled in manifest,
|
|
1410
|
+
// force HE-AAC, as it seems that most browsers prefers it.
|
|
1411
|
+
// don't force HE-AAC if mono stream, or in Firefox
|
|
1412
|
+
const audioMetadata = audio.metadata;
|
|
1413
|
+
if (
|
|
1414
|
+
audioMetadata &&
|
|
1415
|
+
'channelCount' in audioMetadata &&
|
|
1416
|
+
(audioMetadata.channelCount || 1) !== 1 &&
|
|
1417
|
+
ua.indexOf('firefox') === -1
|
|
1418
|
+
) {
|
|
1419
|
+
audioCodec = 'mp4a.40.5';
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
// HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
|
|
1423
|
+
if (
|
|
1424
|
+
audioCodec &&
|
|
1425
|
+
audioCodec.indexOf('mp4a.40.5') !== -1 &&
|
|
1426
|
+
ua.indexOf('android') !== -1 &&
|
|
1427
|
+
audio.container !== 'audio/mpeg'
|
|
1428
|
+
) {
|
|
1429
|
+
// Exclude mpeg audio
|
|
1430
|
+
audioCodec = 'mp4a.40.2';
|
|
1431
|
+
this.log(`Android: force audio codec to ${audioCodec}`);
|
|
1432
|
+
}
|
|
1433
|
+
if (levelCodec && levelCodec !== audioCodec) {
|
|
1434
|
+
this.log(
|
|
1435
|
+
`Swapping manifest audio codec "${levelCodec}" for "${audioCodec}"`,
|
|
1436
|
+
);
|
|
1437
|
+
}
|
|
1438
|
+
audio.levelCodec = audioCodec;
|
|
1439
|
+
audio.id = PlaylistLevelType.MAIN;
|
|
1440
|
+
this.log(
|
|
1441
|
+
`Init audio buffer, container:${
|
|
1442
|
+
audio.container
|
|
1443
|
+
}, codecs[selected/level/parsed]=[${audioCodec || ''}/${
|
|
1444
|
+
levelCodec || ''
|
|
1445
|
+
}/${audio.codec}]`,
|
|
1446
|
+
);
|
|
1447
|
+
delete tracks.audiovideo;
|
|
1448
|
+
}
|
|
1449
|
+
if (video) {
|
|
1450
|
+
video.levelCodec = currentLevel.videoCodec;
|
|
1451
|
+
video.id = PlaylistLevelType.MAIN;
|
|
1452
|
+
const parsedVideoCodec = video.codec;
|
|
1453
|
+
if (parsedVideoCodec?.length === 4) {
|
|
1454
|
+
// Make up for passthrough-remuxer not being able to parse full codec
|
|
1455
|
+
// (logger warning "Unhandled video codec...")
|
|
1456
|
+
switch (parsedVideoCodec) {
|
|
1457
|
+
case 'hvc1':
|
|
1458
|
+
case 'hev1':
|
|
1459
|
+
video.codec = 'hvc1.1.6.L120.90';
|
|
1460
|
+
break;
|
|
1461
|
+
case 'av01':
|
|
1462
|
+
video.codec = 'av01.0.04M.08';
|
|
1463
|
+
break;
|
|
1464
|
+
case 'avc1':
|
|
1465
|
+
video.codec = 'avc1.42e01e';
|
|
1466
|
+
break;
|
|
1467
|
+
}
|
|
1468
|
+
}
|
|
1469
|
+
this.log(
|
|
1470
|
+
`Init video buffer, container:${
|
|
1471
|
+
video.container
|
|
1472
|
+
}, codecs[level/parsed]=[${currentLevel.videoCodec || ''}/${
|
|
1473
|
+
parsedVideoCodec
|
|
1474
|
+
}]${video.codec !== parsedVideoCodec ? ' parsed-corrected=' + video.codec : ''}${video.supplemental ? ' supplemental=' + video.supplemental : ''}`,
|
|
1475
|
+
);
|
|
1476
|
+
delete tracks.audiovideo;
|
|
1477
|
+
}
|
|
1478
|
+
if (audiovideo) {
|
|
1479
|
+
this.log(
|
|
1480
|
+
`Init audiovideo buffer, container:${audiovideo.container}, codecs[level/parsed]=[${currentLevel.codecs}/${audiovideo.codec}]`,
|
|
1481
|
+
);
|
|
1482
|
+
delete tracks.video;
|
|
1483
|
+
delete tracks.audio;
|
|
1484
|
+
}
|
|
1485
|
+
const trackTypes = Object.keys(tracks);
|
|
1486
|
+
if (trackTypes.length) {
|
|
1487
|
+
this.hls.trigger(Events.BUFFER_CODECS, tracks as BufferCodecsData);
|
|
1488
|
+
if (!this.hls as any) {
|
|
1489
|
+
// Exit after fatal tracks error
|
|
1490
|
+
return;
|
|
1491
|
+
}
|
|
1492
|
+
// loop through tracks that are going to be provided to bufferController
|
|
1493
|
+
trackTypes.forEach((trackName) => {
|
|
1494
|
+
const track = tracks[trackName] as Track;
|
|
1495
|
+
const initSegment = track.initSegment;
|
|
1496
|
+
if (initSegment?.byteLength) {
|
|
1497
|
+
this.hls.trigger(Events.BUFFER_APPENDING, {
|
|
1498
|
+
type: trackName as SourceBufferName,
|
|
1499
|
+
data: initSegment,
|
|
1500
|
+
frag,
|
|
1501
|
+
part: null,
|
|
1502
|
+
chunkMeta,
|
|
1503
|
+
parent: frag.type,
|
|
1504
|
+
});
|
|
1505
|
+
}
|
|
1506
|
+
});
|
|
1507
|
+
}
|
|
1508
|
+
// trigger handler right now
|
|
1509
|
+
this.tickImmediate();
|
|
1510
|
+
}
|
|
1511
|
+
|
|
1512
|
+
public getMainFwdBufferInfo(): BufferInfo | null {
|
|
1513
|
+
// Observe video SourceBuffer (this.mediaBuffer) only when alt-audio is used, otherwise observe combined media buffer
|
|
1514
|
+
const bufferOutput = this.getBufferOutput();
|
|
1515
|
+
return this.getFwdBufferInfo(bufferOutput, PlaylistLevelType.MAIN);
|
|
1516
|
+
}
|
|
1517
|
+
|
|
1518
|
+
public get maxBufferLength(): number {
|
|
1519
|
+
const { levels, level } = this;
|
|
1520
|
+
const levelInfo = levels?.[level];
|
|
1521
|
+
if (!levelInfo) {
|
|
1522
|
+
return this.config.maxBufferLength;
|
|
1523
|
+
}
|
|
1524
|
+
return this.getMaxBufferLength(levelInfo.maxBitrate);
|
|
1525
|
+
}
|
|
1526
|
+
|
|
1527
|
+
private backtrack(frag: Fragment) {
|
|
1528
|
+
this.couldBacktrack = true;
|
|
1529
|
+
// Causes findFragments to backtrack through fragments to find the keyframe
|
|
1530
|
+
this.backtrackFragment = frag;
|
|
1531
|
+
this.resetTransmuxer();
|
|
1532
|
+
this.flushBufferGap(frag);
|
|
1533
|
+
this.fragmentTracker.removeFragment(frag);
|
|
1534
|
+
this.fragPrevious = null;
|
|
1535
|
+
this.nextLoadPosition = frag.start;
|
|
1536
|
+
this.state = State.IDLE;
|
|
1537
|
+
}
|
|
1538
|
+
|
|
1539
|
+
get nextLevel(): number {
|
|
1540
|
+
const frag = this.nextBufferedFrag;
|
|
1541
|
+
if (frag) {
|
|
1542
|
+
return frag.level;
|
|
1543
|
+
}
|
|
1544
|
+
return -1;
|
|
1545
|
+
}
|
|
1546
|
+
|
|
1547
|
+
get currentFrag(): Fragment | null {
|
|
1548
|
+
if (this.fragPlaying) {
|
|
1549
|
+
return this.fragPlaying;
|
|
1550
|
+
}
|
|
1551
|
+
const currentTime = this.media?.currentTime || this.lastCurrentTime;
|
|
1552
|
+
if (Number.isFinite(currentTime)) {
|
|
1553
|
+
return this.getAppendedFrag(currentTime);
|
|
1554
|
+
}
|
|
1555
|
+
return null;
|
|
1556
|
+
}
|
|
1557
|
+
|
|
1558
|
+
get currentProgramDateTime(): Date | null {
|
|
1559
|
+
const currentTime = this.media?.currentTime || this.lastCurrentTime;
|
|
1560
|
+
if (Number.isFinite(currentTime)) {
|
|
1561
|
+
const details = this.getLevelDetails();
|
|
1562
|
+
const frag =
|
|
1563
|
+
this.currentFrag ||
|
|
1564
|
+
(details
|
|
1565
|
+
? findFragmentByPTS(null, details.fragments, currentTime)
|
|
1566
|
+
: null);
|
|
1567
|
+
if (frag) {
|
|
1568
|
+
const programDateTime = frag.programDateTime;
|
|
1569
|
+
if (programDateTime !== null) {
|
|
1570
|
+
const epocMs = programDateTime + (currentTime - frag.start) * 1000;
|
|
1571
|
+
return new Date(epocMs);
|
|
1572
|
+
}
|
|
1573
|
+
}
|
|
1574
|
+
}
|
|
1575
|
+
return null;
|
|
1576
|
+
}
|
|
1577
|
+
|
|
1578
|
+
get currentLevel(): number {
|
|
1579
|
+
const frag = this.currentFrag;
|
|
1580
|
+
if (frag) {
|
|
1581
|
+
return frag.level;
|
|
1582
|
+
}
|
|
1583
|
+
return -1;
|
|
1584
|
+
}
|
|
1585
|
+
|
|
1586
|
+
get nextBufferedFrag() {
|
|
1587
|
+
const frag = this.currentFrag;
|
|
1588
|
+
if (frag) {
|
|
1589
|
+
return this.followingBufferedFrag(frag);
|
|
1590
|
+
}
|
|
1591
|
+
return null;
|
|
1592
|
+
}
|
|
1593
|
+
|
|
1594
|
+
get forceStartLoad() {
|
|
1595
|
+
return this._forceStartLoad;
|
|
1596
|
+
}
|
|
1597
|
+
}
|