@zenvor/hls.js 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +28 -0
- package/README.md +472 -0
- package/dist/hls-demo.js +26995 -0
- package/dist/hls-demo.js.map +1 -0
- package/dist/hls.d.mts +4204 -0
- package/dist/hls.d.ts +4204 -0
- package/dist/hls.js +40050 -0
- package/dist/hls.js.d.ts +4204 -0
- package/dist/hls.js.map +1 -0
- package/dist/hls.light.js +27145 -0
- package/dist/hls.light.js.map +1 -0
- package/dist/hls.light.min.js +2 -0
- package/dist/hls.light.min.js.map +1 -0
- package/dist/hls.light.mjs +26392 -0
- package/dist/hls.light.mjs.map +1 -0
- package/dist/hls.min.js +2 -0
- package/dist/hls.min.js.map +1 -0
- package/dist/hls.mjs +38956 -0
- package/dist/hls.mjs.map +1 -0
- package/dist/hls.worker.js +2 -0
- package/dist/hls.worker.js.map +1 -0
- package/package.json +143 -0
- package/src/config.ts +794 -0
- package/src/controller/abr-controller.ts +1019 -0
- package/src/controller/algo-data-controller.ts +794 -0
- package/src/controller/audio-stream-controller.ts +1099 -0
- package/src/controller/audio-track-controller.ts +454 -0
- package/src/controller/base-playlist-controller.ts +438 -0
- package/src/controller/base-stream-controller.ts +2526 -0
- package/src/controller/buffer-controller.ts +2015 -0
- package/src/controller/buffer-operation-queue.ts +159 -0
- package/src/controller/cap-level-controller.ts +367 -0
- package/src/controller/cmcd-controller.ts +422 -0
- package/src/controller/content-steering-controller.ts +622 -0
- package/src/controller/eme-controller.ts +1617 -0
- package/src/controller/error-controller.ts +627 -0
- package/src/controller/fps-controller.ts +146 -0
- package/src/controller/fragment-finders.ts +256 -0
- package/src/controller/fragment-tracker.ts +567 -0
- package/src/controller/gap-controller.ts +719 -0
- package/src/controller/id3-track-controller.ts +488 -0
- package/src/controller/interstitial-player.ts +302 -0
- package/src/controller/interstitials-controller.ts +2895 -0
- package/src/controller/interstitials-schedule.ts +698 -0
- package/src/controller/latency-controller.ts +294 -0
- package/src/controller/level-controller.ts +776 -0
- package/src/controller/stream-controller.ts +1597 -0
- package/src/controller/subtitle-stream-controller.ts +508 -0
- package/src/controller/subtitle-track-controller.ts +617 -0
- package/src/controller/timeline-controller.ts +677 -0
- package/src/crypt/aes-crypto.ts +36 -0
- package/src/crypt/aes-decryptor.ts +339 -0
- package/src/crypt/decrypter-aes-mode.ts +4 -0
- package/src/crypt/decrypter.ts +225 -0
- package/src/crypt/fast-aes-key.ts +39 -0
- package/src/define-plugin.d.ts +17 -0
- package/src/demux/audio/aacdemuxer.ts +126 -0
- package/src/demux/audio/ac3-demuxer.ts +170 -0
- package/src/demux/audio/adts.ts +249 -0
- package/src/demux/audio/base-audio-demuxer.ts +205 -0
- package/src/demux/audio/dolby.ts +21 -0
- package/src/demux/audio/mp3demuxer.ts +85 -0
- package/src/demux/audio/mpegaudio.ts +177 -0
- package/src/demux/chunk-cache.ts +42 -0
- package/src/demux/dummy-demuxed-track.ts +13 -0
- package/src/demux/inject-worker.ts +75 -0
- package/src/demux/mp4demuxer.ts +234 -0
- package/src/demux/sample-aes.ts +198 -0
- package/src/demux/transmuxer-interface.ts +449 -0
- package/src/demux/transmuxer-worker.ts +221 -0
- package/src/demux/transmuxer.ts +560 -0
- package/src/demux/tsdemuxer.ts +1256 -0
- package/src/demux/video/avc-video-parser.ts +401 -0
- package/src/demux/video/base-video-parser.ts +198 -0
- package/src/demux/video/exp-golomb.ts +153 -0
- package/src/demux/video/hevc-video-parser.ts +736 -0
- package/src/empty-es.js +5 -0
- package/src/empty.js +3 -0
- package/src/errors.ts +107 -0
- package/src/events.ts +548 -0
- package/src/exports-default.ts +3 -0
- package/src/exports-named.ts +81 -0
- package/src/hls.ts +1613 -0
- package/src/is-supported.ts +54 -0
- package/src/loader/date-range.ts +207 -0
- package/src/loader/fragment-loader.ts +403 -0
- package/src/loader/fragment.ts +487 -0
- package/src/loader/interstitial-asset-list.ts +162 -0
- package/src/loader/interstitial-event.ts +337 -0
- package/src/loader/key-loader.ts +439 -0
- package/src/loader/level-details.ts +203 -0
- package/src/loader/level-key.ts +259 -0
- package/src/loader/load-stats.ts +17 -0
- package/src/loader/m3u8-parser.ts +1072 -0
- package/src/loader/playlist-loader.ts +839 -0
- package/src/polyfills/number.ts +15 -0
- package/src/remux/aac-helper.ts +81 -0
- package/src/remux/mp4-generator.ts +1380 -0
- package/src/remux/mp4-remuxer.ts +1261 -0
- package/src/remux/passthrough-remuxer.ts +434 -0
- package/src/task-loop.ts +130 -0
- package/src/types/algo.ts +44 -0
- package/src/types/buffer.ts +105 -0
- package/src/types/component-api.ts +20 -0
- package/src/types/demuxer.ts +208 -0
- package/src/types/events.ts +574 -0
- package/src/types/fragment-tracker.ts +23 -0
- package/src/types/level.ts +268 -0
- package/src/types/loader.ts +198 -0
- package/src/types/media-playlist.ts +92 -0
- package/src/types/network-details.ts +3 -0
- package/src/types/remuxer.ts +104 -0
- package/src/types/track.ts +12 -0
- package/src/types/transmuxer.ts +46 -0
- package/src/types/tuples.ts +6 -0
- package/src/types/vtt.ts +11 -0
- package/src/utils/arrays.ts +22 -0
- package/src/utils/attr-list.ts +192 -0
- package/src/utils/binary-search.ts +46 -0
- package/src/utils/buffer-helper.ts +173 -0
- package/src/utils/cea-608-parser.ts +1413 -0
- package/src/utils/chunker.ts +41 -0
- package/src/utils/codecs.ts +314 -0
- package/src/utils/cues.ts +96 -0
- package/src/utils/discontinuities.ts +174 -0
- package/src/utils/encryption-methods-util.ts +21 -0
- package/src/utils/error-helper.ts +95 -0
- package/src/utils/event-listener-helper.ts +16 -0
- package/src/utils/ewma-bandwidth-estimator.ts +97 -0
- package/src/utils/ewma.ts +43 -0
- package/src/utils/fetch-loader.ts +331 -0
- package/src/utils/global.ts +2 -0
- package/src/utils/hash.ts +10 -0
- package/src/utils/hdr.ts +67 -0
- package/src/utils/hex.ts +32 -0
- package/src/utils/imsc1-ttml-parser.ts +261 -0
- package/src/utils/keysystem-util.ts +45 -0
- package/src/utils/level-helper.ts +629 -0
- package/src/utils/logger.ts +120 -0
- package/src/utils/media-option-attributes.ts +49 -0
- package/src/utils/mediacapabilities-helper.ts +301 -0
- package/src/utils/mediakeys-helper.ts +210 -0
- package/src/utils/mediasource-helper.ts +37 -0
- package/src/utils/mp4-tools.ts +1473 -0
- package/src/utils/number.ts +3 -0
- package/src/utils/numeric-encoding-utils.ts +26 -0
- package/src/utils/output-filter.ts +46 -0
- package/src/utils/rendition-helper.ts +505 -0
- package/src/utils/safe-json-stringify.ts +22 -0
- package/src/utils/texttrack-utils.ts +164 -0
- package/src/utils/time-ranges.ts +17 -0
- package/src/utils/timescale-conversion.ts +46 -0
- package/src/utils/utf8-utils.ts +18 -0
- package/src/utils/variable-substitution.ts +105 -0
- package/src/utils/vttcue.ts +384 -0
- package/src/utils/vttparser.ts +497 -0
- package/src/utils/webvtt-parser.ts +166 -0
- package/src/utils/xhr-loader.ts +337 -0
- package/src/version.ts +1 -0
|
@@ -0,0 +1,2015 @@
|
|
|
1
|
+
import BufferOperationQueue from './buffer-operation-queue';
|
|
2
|
+
import { createDoNothingErrorAction } from './error-controller';
|
|
3
|
+
import { ErrorDetails, ErrorTypes } from '../errors';
|
|
4
|
+
import { Events } from '../events';
|
|
5
|
+
import { ElementaryStreamTypes, isMediaFragment } from '../loader/fragment';
|
|
6
|
+
import { DEFAULT_TARGET_DURATION } from '../loader/level-details';
|
|
7
|
+
import { PlaylistLevelType } from '../types/loader';
|
|
8
|
+
import { BufferHelper } from '../utils/buffer-helper';
|
|
9
|
+
import {
|
|
10
|
+
areCodecsMediaSourceSupported,
|
|
11
|
+
getCodecCompatibleName,
|
|
12
|
+
pickMostCompleteCodecName,
|
|
13
|
+
replaceVideoCodec,
|
|
14
|
+
} from '../utils/codecs';
|
|
15
|
+
import {
|
|
16
|
+
addEventListener,
|
|
17
|
+
removeEventListener,
|
|
18
|
+
} from '../utils/event-listener-helper';
|
|
19
|
+
import { Logger } from '../utils/logger';
|
|
20
|
+
import {
|
|
21
|
+
getMediaSource,
|
|
22
|
+
isCompatibleTrackChange,
|
|
23
|
+
isManagedMediaSource,
|
|
24
|
+
} from '../utils/mediasource-helper';
|
|
25
|
+
import { stringify } from '../utils/safe-json-stringify';
|
|
26
|
+
import type { FragmentTracker } from './fragment-tracker';
|
|
27
|
+
import type { HlsConfig } from '../config';
|
|
28
|
+
import type Hls from '../hls';
|
|
29
|
+
import type { MediaFragment, Part } from '../loader/fragment';
|
|
30
|
+
import type { LevelDetails } from '../loader/level-details';
|
|
31
|
+
import type {
|
|
32
|
+
AttachMediaSourceData,
|
|
33
|
+
BaseTrack,
|
|
34
|
+
BaseTrackSet,
|
|
35
|
+
BufferCreatedTrackSet,
|
|
36
|
+
BufferOperation,
|
|
37
|
+
EmptyTuple,
|
|
38
|
+
ExtendedSourceBuffer,
|
|
39
|
+
MediaOverrides,
|
|
40
|
+
ParsedTrack,
|
|
41
|
+
SourceBufferName,
|
|
42
|
+
SourceBuffersTuple,
|
|
43
|
+
SourceBufferTrack,
|
|
44
|
+
SourceBufferTrackSet,
|
|
45
|
+
} from '../types/buffer';
|
|
46
|
+
import type { ComponentAPI } from '../types/component-api';
|
|
47
|
+
import type {
|
|
48
|
+
BufferAppendingData,
|
|
49
|
+
BufferCodecsData,
|
|
50
|
+
BufferEOSData,
|
|
51
|
+
BufferFlushingData,
|
|
52
|
+
ErrorData,
|
|
53
|
+
FragChangedData,
|
|
54
|
+
FragParsedData,
|
|
55
|
+
LevelUpdatedData,
|
|
56
|
+
ManifestParsedData,
|
|
57
|
+
MediaAttachingData,
|
|
58
|
+
MediaDetachingData,
|
|
59
|
+
} from '../types/events';
|
|
60
|
+
import type { ChunkMetadata } from '../types/transmuxer';
|
|
61
|
+
|
|
62
|
+
interface BufferedChangeEvent extends Event {
|
|
63
|
+
readonly addedRanges?: TimeRanges;
|
|
64
|
+
readonly removedRanges?: TimeRanges;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const VIDEO_CODEC_PROFILE_REPLACE =
|
|
68
|
+
/(avc[1234]|hvc1|hev1|dvh[1e]|vp09|av01)(?:\.[^.,]+)+/;
|
|
69
|
+
|
|
70
|
+
const TRACK_REMOVED_ERROR_NAME = 'HlsJsTrackRemovedError';
|
|
71
|
+
|
|
72
|
+
class HlsJsTrackRemovedError extends Error {
|
|
73
|
+
constructor(message) {
|
|
74
|
+
super(message);
|
|
75
|
+
this.name = TRACK_REMOVED_ERROR_NAME;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
export default class BufferController extends Logger implements ComponentAPI {
|
|
80
|
+
private hls: Hls;
|
|
81
|
+
private fragmentTracker: FragmentTracker;
|
|
82
|
+
// The level details used to determine duration, target-duration and live
|
|
83
|
+
private details: LevelDetails | null = null;
|
|
84
|
+
// cache the self generated object url to detect hijack of video tag
|
|
85
|
+
private _objectUrl: string | null = null;
|
|
86
|
+
// A queue of buffer operations which require the SourceBuffer to not be updating upon execution
|
|
87
|
+
private operationQueue: BufferOperationQueue | null = null;
|
|
88
|
+
|
|
89
|
+
// The total number track codecs expected before any sourceBuffers are created (2: audio and video or 1: audiovideo | audio | video)
|
|
90
|
+
private bufferCodecEventsTotal: number = 0;
|
|
91
|
+
|
|
92
|
+
// A reference to the attached media element
|
|
93
|
+
private media: HTMLMediaElement | null = null;
|
|
94
|
+
|
|
95
|
+
// A reference to the active media source
|
|
96
|
+
private mediaSource: MediaSource | null = null;
|
|
97
|
+
|
|
98
|
+
// Last MP3 audio chunk appended
|
|
99
|
+
private lastMpegAudioChunk: ChunkMetadata | null = null;
|
|
100
|
+
|
|
101
|
+
// Audio fragment blocked from appending until corresponding video appends or context changes
|
|
102
|
+
private blockedAudioAppend: {
|
|
103
|
+
op: BufferOperation;
|
|
104
|
+
frag: MediaFragment | Part;
|
|
105
|
+
} | null = null;
|
|
106
|
+
// Keep track of video append position for unblocking audio
|
|
107
|
+
private lastVideoAppendEnd: number = 0;
|
|
108
|
+
// Whether or not to use ManagedMediaSource API and append source element to media element.
|
|
109
|
+
private appendSource: boolean;
|
|
110
|
+
// Transferred MediaSource information used to detmerine if duration end endstream may be appended
|
|
111
|
+
private transferData?: MediaAttachingData;
|
|
112
|
+
// Directives used to override default MediaSource handling
|
|
113
|
+
private overrides?: MediaOverrides;
|
|
114
|
+
// Error counters
|
|
115
|
+
private appendErrors = {
|
|
116
|
+
audio: 0,
|
|
117
|
+
video: 0,
|
|
118
|
+
audiovideo: 0,
|
|
119
|
+
};
|
|
120
|
+
private appendError?: ErrorData;
|
|
121
|
+
// Record of required or created buffers by type. SourceBuffer is stored in Track.buffer once created.
|
|
122
|
+
private tracks: SourceBufferTrackSet = {};
|
|
123
|
+
// Array of SourceBuffer type and SourceBuffer (or null). One entry per TrackSet in this.tracks.
|
|
124
|
+
private sourceBuffers: SourceBuffersTuple = [
|
|
125
|
+
[null, null],
|
|
126
|
+
[null, null],
|
|
127
|
+
];
|
|
128
|
+
|
|
129
|
+
constructor(hls: Hls, fragmentTracker: FragmentTracker) {
|
|
130
|
+
super('buffer-controller', hls.logger);
|
|
131
|
+
this.hls = hls;
|
|
132
|
+
this.fragmentTracker = fragmentTracker;
|
|
133
|
+
this.appendSource = isManagedMediaSource(
|
|
134
|
+
getMediaSource(hls.config.preferManagedMediaSource),
|
|
135
|
+
);
|
|
136
|
+
|
|
137
|
+
this.initTracks();
|
|
138
|
+
this.registerListeners();
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
public hasSourceTypes(): boolean {
|
|
142
|
+
return Object.keys(this.tracks).length > 0;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
public destroy() {
|
|
146
|
+
this.unregisterListeners();
|
|
147
|
+
this.details = null;
|
|
148
|
+
this.lastMpegAudioChunk = this.blockedAudioAppend = null;
|
|
149
|
+
this.transferData = this.overrides = undefined;
|
|
150
|
+
if (this.operationQueue) {
|
|
151
|
+
this.operationQueue.destroy();
|
|
152
|
+
this.operationQueue = null;
|
|
153
|
+
}
|
|
154
|
+
// @ts-ignore
|
|
155
|
+
this.hls = this.fragmentTracker = null;
|
|
156
|
+
// @ts-ignore
|
|
157
|
+
this._onMediaSourceOpen = this._onMediaSourceClose = null;
|
|
158
|
+
// @ts-ignore
|
|
159
|
+
this._onMediaSourceEnded = null;
|
|
160
|
+
// @ts-ignore
|
|
161
|
+
this._onStartStreaming = this._onEndStreaming = null;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
private registerListeners() {
|
|
165
|
+
const { hls } = this;
|
|
166
|
+
hls.on(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
|
167
|
+
hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
|
168
|
+
hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
|
169
|
+
hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
|
170
|
+
hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
|
|
171
|
+
hls.on(Events.BUFFER_APPENDING, this.onBufferAppending, this);
|
|
172
|
+
hls.on(Events.BUFFER_CODECS, this.onBufferCodecs, this);
|
|
173
|
+
hls.on(Events.BUFFER_EOS, this.onBufferEos, this);
|
|
174
|
+
hls.on(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
|
|
175
|
+
hls.on(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
|
|
176
|
+
hls.on(Events.FRAG_PARSED, this.onFragParsed, this);
|
|
177
|
+
hls.on(Events.FRAG_CHANGED, this.onFragChanged, this);
|
|
178
|
+
hls.on(Events.ERROR, this.onError, this);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
private unregisterListeners() {
|
|
182
|
+
const { hls } = this;
|
|
183
|
+
hls.off(Events.MEDIA_ATTACHING, this.onMediaAttaching, this);
|
|
184
|
+
hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
|
|
185
|
+
hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
|
|
186
|
+
hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
|
|
187
|
+
hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
|
|
188
|
+
hls.off(Events.BUFFER_APPENDING, this.onBufferAppending, this);
|
|
189
|
+
hls.off(Events.BUFFER_CODECS, this.onBufferCodecs, this);
|
|
190
|
+
hls.off(Events.BUFFER_EOS, this.onBufferEos, this);
|
|
191
|
+
hls.off(Events.BUFFER_FLUSHING, this.onBufferFlushing, this);
|
|
192
|
+
hls.off(Events.LEVEL_UPDATED, this.onLevelUpdated, this);
|
|
193
|
+
hls.off(Events.FRAG_PARSED, this.onFragParsed, this);
|
|
194
|
+
hls.off(Events.FRAG_CHANGED, this.onFragChanged, this);
|
|
195
|
+
hls.off(Events.ERROR, this.onError, this);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
public transferMedia(): AttachMediaSourceData | null {
|
|
199
|
+
const { media, mediaSource } = this;
|
|
200
|
+
if (!media) {
|
|
201
|
+
return null;
|
|
202
|
+
}
|
|
203
|
+
const tracks = {};
|
|
204
|
+
if (this.operationQueue) {
|
|
205
|
+
const updating = this.isUpdating();
|
|
206
|
+
if (!updating) {
|
|
207
|
+
this.operationQueue.removeBlockers();
|
|
208
|
+
}
|
|
209
|
+
const queued = this.isQueued();
|
|
210
|
+
if (updating || queued) {
|
|
211
|
+
this.warn(
|
|
212
|
+
`Transfering MediaSource with${queued ? ' operations in queue' : ''}${updating ? ' updating SourceBuffer(s)' : ''} ${this.operationQueue}`,
|
|
213
|
+
);
|
|
214
|
+
}
|
|
215
|
+
this.operationQueue.destroy();
|
|
216
|
+
}
|
|
217
|
+
const transferData = this.transferData;
|
|
218
|
+
if (
|
|
219
|
+
transferData &&
|
|
220
|
+
!this.sourceBufferCount &&
|
|
221
|
+
transferData.mediaSource === mediaSource
|
|
222
|
+
) {
|
|
223
|
+
Object.assign(tracks, transferData.tracks);
|
|
224
|
+
} else {
|
|
225
|
+
this.sourceBuffers.forEach((tuple) => {
|
|
226
|
+
const [type] = tuple;
|
|
227
|
+
if (type) {
|
|
228
|
+
tracks[type] = Object.assign({}, this.tracks[type]);
|
|
229
|
+
this.removeBuffer(type);
|
|
230
|
+
}
|
|
231
|
+
tuple[0] = tuple[1] = null;
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
return {
|
|
235
|
+
media,
|
|
236
|
+
mediaSource,
|
|
237
|
+
tracks,
|
|
238
|
+
};
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
private initTracks() {
|
|
242
|
+
const tracks = {};
|
|
243
|
+
this.sourceBuffers = [
|
|
244
|
+
[null, null],
|
|
245
|
+
[null, null],
|
|
246
|
+
];
|
|
247
|
+
this.tracks = tracks;
|
|
248
|
+
this.resetQueue();
|
|
249
|
+
this.lastMpegAudioChunk = this.blockedAudioAppend = null;
|
|
250
|
+
this.lastVideoAppendEnd = 0;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
private onManifestLoading() {
|
|
254
|
+
this.bufferCodecEventsTotal = 0;
|
|
255
|
+
this.details = null;
|
|
256
|
+
this.resetAppendErrors();
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
private onManifestParsed(
|
|
260
|
+
event: Events.MANIFEST_PARSED,
|
|
261
|
+
data: ManifestParsedData,
|
|
262
|
+
) {
|
|
263
|
+
// in case of alt audio 2 BUFFER_CODECS events will be triggered, one per stream controller
|
|
264
|
+
// sourcebuffers will be created all at once when the expected nb of tracks will be reached
|
|
265
|
+
// in case alt audio is not used, only one BUFFER_CODEC event will be fired from main stream controller
|
|
266
|
+
// it will contain the expected nb of source buffers, no need to compute it
|
|
267
|
+
let codecEvents: number = 2;
|
|
268
|
+
if ((data.audio && !data.video) || !data.altAudio) {
|
|
269
|
+
codecEvents = 1;
|
|
270
|
+
}
|
|
271
|
+
this.bufferCodecEventsTotal = codecEvents;
|
|
272
|
+
this.log(`${codecEvents} bufferCodec event(s) expected.`);
|
|
273
|
+
if (
|
|
274
|
+
this.transferData?.mediaSource &&
|
|
275
|
+
this.sourceBufferCount &&
|
|
276
|
+
codecEvents
|
|
277
|
+
) {
|
|
278
|
+
this.bufferCreated();
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
private onMediaAttaching(
|
|
283
|
+
event: Events.MEDIA_ATTACHING,
|
|
284
|
+
data: MediaAttachingData,
|
|
285
|
+
) {
|
|
286
|
+
const media = (this.media = data.media);
|
|
287
|
+
this.transferData = this.overrides = undefined;
|
|
288
|
+
const MediaSource = getMediaSource(this.appendSource);
|
|
289
|
+
if (MediaSource) {
|
|
290
|
+
const transferringMedia = !!data.mediaSource;
|
|
291
|
+
if (transferringMedia || data.overrides) {
|
|
292
|
+
this.transferData = data;
|
|
293
|
+
this.overrides = data.overrides;
|
|
294
|
+
}
|
|
295
|
+
const ms = (this.mediaSource = data.mediaSource || new MediaSource());
|
|
296
|
+
this.assignMediaSource(ms);
|
|
297
|
+
if (transferringMedia) {
|
|
298
|
+
this._objectUrl = media.src;
|
|
299
|
+
this.attachTransferred();
|
|
300
|
+
} else {
|
|
301
|
+
// cache the locally generated object url
|
|
302
|
+
const objectUrl = (this._objectUrl = self.URL.createObjectURL(ms));
|
|
303
|
+
// link video and media Source
|
|
304
|
+
if (this.appendSource) {
|
|
305
|
+
try {
|
|
306
|
+
media.removeAttribute('src');
|
|
307
|
+
// ManagedMediaSource will not open without disableRemotePlayback set to false or source alternatives
|
|
308
|
+
const MMS = (self as any).ManagedMediaSource;
|
|
309
|
+
media.disableRemotePlayback =
|
|
310
|
+
media.disableRemotePlayback || (MMS && ms instanceof MMS);
|
|
311
|
+
removeSourceChildren(media);
|
|
312
|
+
addSource(media, objectUrl);
|
|
313
|
+
media.load();
|
|
314
|
+
} catch (error) {
|
|
315
|
+
media.src = objectUrl;
|
|
316
|
+
}
|
|
317
|
+
} else {
|
|
318
|
+
media.src = objectUrl;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
addEventListener(media, 'emptied', this._onMediaEmptied);
|
|
322
|
+
addEventListener(media, 'error', this._onMediaError);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
private assignMediaSource(ms: MediaSource) {
|
|
327
|
+
this.log(
|
|
328
|
+
`${this.transferData?.mediaSource === ms ? 'transferred' : 'created'} media source: ${(ms.constructor as any)?.name}`,
|
|
329
|
+
);
|
|
330
|
+
// MediaSource listeners are arrow functions with a lexical scope, and do not need to be bound
|
|
331
|
+
addEventListener(ms, 'sourceopen', this._onMediaSourceOpen);
|
|
332
|
+
addEventListener(ms, 'sourceended', this._onMediaSourceEnded);
|
|
333
|
+
addEventListener(ms, 'sourceclose', this._onMediaSourceClose);
|
|
334
|
+
|
|
335
|
+
if (this.appendSource) {
|
|
336
|
+
addEventListener(ms, 'startstreaming', this._onStartStreaming);
|
|
337
|
+
addEventListener(ms, 'endstreaming', this._onEndStreaming);
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
private attachTransferred() {
|
|
342
|
+
const media = this.media;
|
|
343
|
+
const data = this.transferData;
|
|
344
|
+
if (!data || !media) {
|
|
345
|
+
return;
|
|
346
|
+
}
|
|
347
|
+
const requiredTracks = this.tracks;
|
|
348
|
+
const transferredTracks = data.tracks;
|
|
349
|
+
const trackNames = transferredTracks
|
|
350
|
+
? Object.keys(transferredTracks)
|
|
351
|
+
: null;
|
|
352
|
+
const trackCount = trackNames ? trackNames.length : 0;
|
|
353
|
+
const mediaSourceOpenCallback = () => {
|
|
354
|
+
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
|
355
|
+
Promise.resolve().then(() => {
|
|
356
|
+
if (this.media && this.mediaSourceOpenOrEnded) {
|
|
357
|
+
this._onMediaSourceOpen();
|
|
358
|
+
}
|
|
359
|
+
});
|
|
360
|
+
};
|
|
361
|
+
if (transferredTracks && trackNames && trackCount) {
|
|
362
|
+
if (!this.tracksReady) {
|
|
363
|
+
// Wait for CODECS event(s)
|
|
364
|
+
this.hls.config.startFragPrefetch = true;
|
|
365
|
+
this.log(`attachTransferred: waiting for SourceBuffer track info`);
|
|
366
|
+
return;
|
|
367
|
+
}
|
|
368
|
+
this
|
|
369
|
+
.log(`attachTransferred: (bufferCodecEventsTotal ${this.bufferCodecEventsTotal})
|
|
370
|
+
required tracks: ${stringify(requiredTracks, (key, value) => (key === 'initSegment' ? undefined : value))};
|
|
371
|
+
transfer tracks: ${stringify(transferredTracks, (key, value) => (key === 'initSegment' ? undefined : value))}}`);
|
|
372
|
+
if (!isCompatibleTrackChange(transferredTracks, requiredTracks)) {
|
|
373
|
+
// destroy attaching media source
|
|
374
|
+
data.mediaSource = null;
|
|
375
|
+
data.tracks = undefined;
|
|
376
|
+
|
|
377
|
+
const currentTime = media.currentTime;
|
|
378
|
+
|
|
379
|
+
const details = this.details;
|
|
380
|
+
const startTime = Math.max(
|
|
381
|
+
currentTime,
|
|
382
|
+
details?.fragments[0].start || 0,
|
|
383
|
+
);
|
|
384
|
+
if (startTime - currentTime > 1) {
|
|
385
|
+
this.log(
|
|
386
|
+
`attachTransferred: waiting for playback to reach new tracks start time ${currentTime} -> ${startTime}`,
|
|
387
|
+
);
|
|
388
|
+
return;
|
|
389
|
+
}
|
|
390
|
+
this.warn(
|
|
391
|
+
`attachTransferred: resetting MediaSource for incompatible tracks ("${Object.keys(transferredTracks)}"->"${Object.keys(requiredTracks)}") start time: ${startTime} currentTime: ${currentTime}`,
|
|
392
|
+
);
|
|
393
|
+
this.onMediaDetaching(Events.MEDIA_DETACHING, {});
|
|
394
|
+
this.onMediaAttaching(Events.MEDIA_ATTACHING, data);
|
|
395
|
+
media.currentTime = startTime;
|
|
396
|
+
return;
|
|
397
|
+
}
|
|
398
|
+
this.transferData = undefined;
|
|
399
|
+
trackNames.forEach((trackName) => {
|
|
400
|
+
const type = trackName as SourceBufferName;
|
|
401
|
+
const track = transferredTracks[type];
|
|
402
|
+
if (track) {
|
|
403
|
+
const sb = track.buffer;
|
|
404
|
+
if (sb) {
|
|
405
|
+
// Purge fragment tracker of ejected segments for existing buffer
|
|
406
|
+
const fragmentTracker = this.fragmentTracker;
|
|
407
|
+
const playlistType = track.id as PlaylistLevelType;
|
|
408
|
+
if (
|
|
409
|
+
fragmentTracker.hasFragments(playlistType) ||
|
|
410
|
+
fragmentTracker.hasParts(playlistType)
|
|
411
|
+
) {
|
|
412
|
+
const bufferedTimeRanges = BufferHelper.getBuffered(sb);
|
|
413
|
+
fragmentTracker.detectEvictedFragments(
|
|
414
|
+
type,
|
|
415
|
+
bufferedTimeRanges,
|
|
416
|
+
playlistType,
|
|
417
|
+
null,
|
|
418
|
+
true,
|
|
419
|
+
);
|
|
420
|
+
}
|
|
421
|
+
// Transfer SourceBuffer
|
|
422
|
+
const sbIndex = sourceBufferNameToIndex(type);
|
|
423
|
+
const sbTuple = [type, sb] as Exclude<
|
|
424
|
+
SourceBuffersTuple[typeof sbIndex],
|
|
425
|
+
EmptyTuple
|
|
426
|
+
>;
|
|
427
|
+
this.sourceBuffers[sbIndex] = sbTuple as any;
|
|
428
|
+
if (sb.updating && this.operationQueue) {
|
|
429
|
+
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
|
430
|
+
this.operationQueue.prependBlocker(type);
|
|
431
|
+
}
|
|
432
|
+
this.trackSourceBuffer(type, track);
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
});
|
|
436
|
+
mediaSourceOpenCallback();
|
|
437
|
+
this.bufferCreated();
|
|
438
|
+
} else {
|
|
439
|
+
this.log(`attachTransferred: MediaSource w/o SourceBuffers`);
|
|
440
|
+
mediaSourceOpenCallback();
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
private get mediaSourceOpenOrEnded(): boolean {
|
|
445
|
+
const readyState = this.mediaSource?.readyState;
|
|
446
|
+
return readyState === 'open' || readyState === 'ended';
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
private _onEndStreaming = (event) => {
|
|
450
|
+
if (!this.hls as any) {
|
|
451
|
+
return;
|
|
452
|
+
}
|
|
453
|
+
if (this.mediaSource?.readyState !== 'open') {
|
|
454
|
+
return;
|
|
455
|
+
}
|
|
456
|
+
this.hls.pauseBuffering();
|
|
457
|
+
};
|
|
458
|
+
|
|
459
|
+
private _onStartStreaming = (event) => {
|
|
460
|
+
if (!this.hls as any) {
|
|
461
|
+
return;
|
|
462
|
+
}
|
|
463
|
+
this.hls.resumeBuffering();
|
|
464
|
+
};
|
|
465
|
+
|
|
466
|
+
private onMediaDetaching(
|
|
467
|
+
event: Events.MEDIA_DETACHING,
|
|
468
|
+
data: MediaDetachingData,
|
|
469
|
+
) {
|
|
470
|
+
const transferringMedia = !!data.transferMedia;
|
|
471
|
+
this.transferData = this.overrides = undefined;
|
|
472
|
+
const { media, mediaSource, _objectUrl } = this;
|
|
473
|
+
if (mediaSource) {
|
|
474
|
+
this.log(
|
|
475
|
+
`media source ${transferringMedia ? 'transferring' : 'detaching'}`,
|
|
476
|
+
);
|
|
477
|
+
if (transferringMedia) {
|
|
478
|
+
// Detach SourceBuffers without removing from MediaSource
|
|
479
|
+
// and leave `tracks` (required SourceBuffers configuration)
|
|
480
|
+
this.sourceBuffers.forEach(([type]) => {
|
|
481
|
+
if (type) {
|
|
482
|
+
this.removeBuffer(type);
|
|
483
|
+
}
|
|
484
|
+
});
|
|
485
|
+
this.resetQueue();
|
|
486
|
+
} else {
|
|
487
|
+
if (this.mediaSourceOpenOrEnded) {
|
|
488
|
+
const open = mediaSource.readyState === 'open';
|
|
489
|
+
try {
|
|
490
|
+
const sourceBuffers = mediaSource.sourceBuffers;
|
|
491
|
+
for (let i = sourceBuffers.length; i--; ) {
|
|
492
|
+
if (open) {
|
|
493
|
+
sourceBuffers[i].abort();
|
|
494
|
+
}
|
|
495
|
+
mediaSource.removeSourceBuffer(sourceBuffers[i]);
|
|
496
|
+
}
|
|
497
|
+
if (open) {
|
|
498
|
+
// endOfStream could trigger exception if any sourcebuffer is in updating state
|
|
499
|
+
// we don't really care about checking sourcebuffer state here,
|
|
500
|
+
// as we are anyway detaching the MediaSource
|
|
501
|
+
// let's just avoid this exception to propagate
|
|
502
|
+
mediaSource.endOfStream();
|
|
503
|
+
}
|
|
504
|
+
} catch (err) {
|
|
505
|
+
this.warn(
|
|
506
|
+
`onMediaDetaching: ${err.message} while calling endOfStream`,
|
|
507
|
+
);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
// Clean up the SourceBuffers by invoking onBufferReset
|
|
511
|
+
if (this.sourceBufferCount) {
|
|
512
|
+
this.onBufferReset();
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
removeEventListener(mediaSource, 'sourceopen', this._onMediaSourceOpen);
|
|
516
|
+
removeEventListener(mediaSource, 'sourceended', this._onMediaSourceEnded);
|
|
517
|
+
removeEventListener(mediaSource, 'sourceclose', this._onMediaSourceClose);
|
|
518
|
+
if (this.appendSource) {
|
|
519
|
+
removeEventListener(
|
|
520
|
+
mediaSource,
|
|
521
|
+
'startstreaming',
|
|
522
|
+
this._onStartStreaming,
|
|
523
|
+
);
|
|
524
|
+
removeEventListener(mediaSource, 'endstreaming', this._onEndStreaming);
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
this.mediaSource = null;
|
|
528
|
+
this._objectUrl = null;
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
// Detach properly the MediaSource from the HTMLMediaElement as
|
|
532
|
+
// suggested in https://github.com/w3c/media-source/issues/53.
|
|
533
|
+
if (media) {
|
|
534
|
+
removeEventListener(media, 'emptied', this._onMediaEmptied);
|
|
535
|
+
removeEventListener(media, 'error', this._onMediaError);
|
|
536
|
+
if (!transferringMedia) {
|
|
537
|
+
if (_objectUrl) {
|
|
538
|
+
self.URL.revokeObjectURL(_objectUrl);
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
// clean up video tag src only if it's our own url. some external libraries might
|
|
542
|
+
// hijack the video tag and change its 'src' without destroying the Hls instance first
|
|
543
|
+
if (this.mediaSrc === _objectUrl) {
|
|
544
|
+
media.removeAttribute('src');
|
|
545
|
+
if (this.appendSource) {
|
|
546
|
+
removeSourceChildren(media);
|
|
547
|
+
}
|
|
548
|
+
media.load();
|
|
549
|
+
} else {
|
|
550
|
+
this.warn(
|
|
551
|
+
'media|source.src was changed by a third party - skip cleanup',
|
|
552
|
+
);
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
this.media = null;
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
private onBufferReset() {
|
|
560
|
+
this.sourceBuffers.forEach(([type]) => {
|
|
561
|
+
if (type) {
|
|
562
|
+
this.resetBuffer(type);
|
|
563
|
+
}
|
|
564
|
+
});
|
|
565
|
+
this.initTracks();
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
private resetBuffer(type: SourceBufferName) {
|
|
569
|
+
const sb = this.tracks[type]?.buffer;
|
|
570
|
+
this.removeBuffer(type);
|
|
571
|
+
if (sb) {
|
|
572
|
+
try {
|
|
573
|
+
if (this.mediaSource?.sourceBuffers.length) {
|
|
574
|
+
this.mediaSource.removeSourceBuffer(sb);
|
|
575
|
+
}
|
|
576
|
+
} catch (err) {
|
|
577
|
+
this.warn(`onBufferReset ${type}`, err);
|
|
578
|
+
}
|
|
579
|
+
}
|
|
580
|
+
delete this.tracks[type];
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
private removeBuffer(type: SourceBufferName) {
|
|
584
|
+
this.removeBufferListeners(type);
|
|
585
|
+
this.sourceBuffers[sourceBufferNameToIndex(type)] = [null, null];
|
|
586
|
+
const track = this.tracks[type];
|
|
587
|
+
if (track) {
|
|
588
|
+
this.clearBufferAppendTimeoutId(track);
|
|
589
|
+
track.buffer = undefined;
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
private resetQueue() {
|
|
594
|
+
if (this.operationQueue) {
|
|
595
|
+
this.operationQueue.destroy();
|
|
596
|
+
}
|
|
597
|
+
this.operationQueue = new BufferOperationQueue(this.tracks);
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
private onBufferCodecs(event: Events.BUFFER_CODECS, data: BufferCodecsData) {
|
|
601
|
+
const tracks = this.tracks;
|
|
602
|
+
const trackNames = Object.keys(data);
|
|
603
|
+
this.log(
|
|
604
|
+
`BUFFER_CODECS: "${trackNames}" (current SB count ${this.sourceBufferCount})`,
|
|
605
|
+
);
|
|
606
|
+
const unmuxedToMuxed =
|
|
607
|
+
('audiovideo' in data && (tracks.audio || tracks.video)) ||
|
|
608
|
+
(tracks.audiovideo && ('audio' in data || 'video' in data));
|
|
609
|
+
const muxedToUnmuxed =
|
|
610
|
+
!unmuxedToMuxed &&
|
|
611
|
+
this.sourceBufferCount &&
|
|
612
|
+
this.media &&
|
|
613
|
+
trackNames.some((sbName) => !tracks[sbName]);
|
|
614
|
+
if (unmuxedToMuxed || muxedToUnmuxed) {
|
|
615
|
+
this.warn(
|
|
616
|
+
`Unsupported transition between "${Object.keys(tracks)}" and "${trackNames}" SourceBuffers`,
|
|
617
|
+
);
|
|
618
|
+
// Do not add incompatible track ('audiovideo' <-> 'video'/'audio').
|
|
619
|
+
// Allow following onBufferAppending handle to trigger BUFFER_APPEND_ERROR.
|
|
620
|
+
// This will either be resolved by level switch or could be handled with recoverMediaError().
|
|
621
|
+
return;
|
|
622
|
+
}
|
|
623
|
+
trackNames.forEach((trackName: SourceBufferName) => {
|
|
624
|
+
const parsedTrack = data[trackName] as ParsedTrack;
|
|
625
|
+
const { id, codec, levelCodec, container, metadata, supplemental } =
|
|
626
|
+
parsedTrack;
|
|
627
|
+
let track = tracks[trackName];
|
|
628
|
+
const transferredTrack = this.transferData?.tracks?.[trackName];
|
|
629
|
+
const sbTrack = transferredTrack?.buffer ? transferredTrack : track;
|
|
630
|
+
const sbCodec = sbTrack?.pendingCodec || sbTrack?.codec;
|
|
631
|
+
const trackLevelCodec = sbTrack?.levelCodec;
|
|
632
|
+
if (!track) {
|
|
633
|
+
track = tracks[trackName] = {
|
|
634
|
+
buffer: undefined,
|
|
635
|
+
listeners: [],
|
|
636
|
+
codec,
|
|
637
|
+
supplemental,
|
|
638
|
+
container,
|
|
639
|
+
levelCodec,
|
|
640
|
+
metadata,
|
|
641
|
+
id,
|
|
642
|
+
};
|
|
643
|
+
}
|
|
644
|
+
// check if SourceBuffer codec needs to change
|
|
645
|
+
const currentCodecFull = pickMostCompleteCodecName(
|
|
646
|
+
sbCodec,
|
|
647
|
+
trackLevelCodec,
|
|
648
|
+
);
|
|
649
|
+
const currentCodec = currentCodecFull?.replace(
|
|
650
|
+
VIDEO_CODEC_PROFILE_REPLACE,
|
|
651
|
+
'$1',
|
|
652
|
+
);
|
|
653
|
+
let trackCodec = pickMostCompleteCodecName(codec, levelCodec);
|
|
654
|
+
const nextCodec = trackCodec?.replace(VIDEO_CODEC_PROFILE_REPLACE, '$1');
|
|
655
|
+
if (trackCodec && currentCodecFull && currentCodec !== nextCodec) {
|
|
656
|
+
if (trackName.slice(0, 5) === 'audio') {
|
|
657
|
+
trackCodec = getCodecCompatibleName(trackCodec, this.appendSource);
|
|
658
|
+
}
|
|
659
|
+
this.log(`switching codec ${sbCodec} to ${trackCodec}`);
|
|
660
|
+
if (trackCodec !== (track.pendingCodec || track.codec)) {
|
|
661
|
+
track.pendingCodec = trackCodec;
|
|
662
|
+
}
|
|
663
|
+
track.container = container;
|
|
664
|
+
this.appendChangeType(trackName, container, trackCodec);
|
|
665
|
+
}
|
|
666
|
+
});
|
|
667
|
+
|
|
668
|
+
if (this.tracksReady || this.sourceBufferCount) {
|
|
669
|
+
data.tracks = this.sourceBufferTracks;
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
// if sourcebuffers already created, do nothing ...
|
|
673
|
+
if (this.sourceBufferCount) {
|
|
674
|
+
return;
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
if (
|
|
678
|
+
this.bufferCodecEventsTotal > 1 &&
|
|
679
|
+
!this.tracks.video &&
|
|
680
|
+
!data.video &&
|
|
681
|
+
data.audio?.id === 'main'
|
|
682
|
+
) {
|
|
683
|
+
// MVP is missing CODECS and only audio was found in main segment (#7524)
|
|
684
|
+
this.log(`Main audio-only`);
|
|
685
|
+
this.bufferCodecEventsTotal = 1;
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
if (this.mediaSourceOpenOrEnded) {
|
|
689
|
+
this.checkPendingTracks();
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
public get sourceBufferTracks(): BaseTrackSet {
|
|
694
|
+
return Object.keys(this.tracks).reduce((baseTracks: BaseTrackSet, type) => {
|
|
695
|
+
const track = this.tracks[type] as SourceBufferTrack;
|
|
696
|
+
baseTracks[type] = {
|
|
697
|
+
id: track.id,
|
|
698
|
+
container: track.container,
|
|
699
|
+
codec: track.codec,
|
|
700
|
+
levelCodec: track.levelCodec,
|
|
701
|
+
};
|
|
702
|
+
return baseTracks;
|
|
703
|
+
}, {});
|
|
704
|
+
}
|
|
705
|
+
|
|
706
|
+
private appendChangeType(
|
|
707
|
+
type: SourceBufferName,
|
|
708
|
+
container: string,
|
|
709
|
+
codec: string,
|
|
710
|
+
) {
|
|
711
|
+
const mimeType = `${container};codecs=${codec}`;
|
|
712
|
+
const operation: BufferOperation = {
|
|
713
|
+
label: `change-type=${mimeType}`,
|
|
714
|
+
execute: () => {
|
|
715
|
+
const track = this.tracks[type];
|
|
716
|
+
if (track) {
|
|
717
|
+
const sb = track.buffer;
|
|
718
|
+
if (sb?.changeType) {
|
|
719
|
+
this.log(`changing ${type} sourceBuffer type to ${mimeType}`);
|
|
720
|
+
sb.changeType(mimeType);
|
|
721
|
+
track.codec = codec;
|
|
722
|
+
track.container = container;
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
this.shiftAndExecuteNext(type);
|
|
726
|
+
},
|
|
727
|
+
onStart: () => {},
|
|
728
|
+
onComplete: () => {},
|
|
729
|
+
onError: (error: Error) => {
|
|
730
|
+
this.warn(`Failed to change ${type} SourceBuffer type`, error);
|
|
731
|
+
},
|
|
732
|
+
};
|
|
733
|
+
this.append(operation, type, this.isPending(this.tracks[type]));
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
private blockAudio(partOrFrag: MediaFragment | Part) {
|
|
737
|
+
const pStart = partOrFrag.start;
|
|
738
|
+
const pTime = pStart + partOrFrag.duration * 0.05;
|
|
739
|
+
const atGap =
|
|
740
|
+
this.fragmentTracker.getAppendedFrag(pStart, PlaylistLevelType.MAIN)
|
|
741
|
+
?.gap === true;
|
|
742
|
+
if (atGap) {
|
|
743
|
+
return;
|
|
744
|
+
}
|
|
745
|
+
const op: BufferOperation = {
|
|
746
|
+
label: 'block-audio',
|
|
747
|
+
execute: () => {
|
|
748
|
+
const videoTrack = this.tracks.video;
|
|
749
|
+
if (
|
|
750
|
+
this.lastVideoAppendEnd > pTime ||
|
|
751
|
+
(videoTrack?.buffer &&
|
|
752
|
+
BufferHelper.isBuffered(videoTrack.buffer, pTime)) ||
|
|
753
|
+
this.fragmentTracker.getAppendedFrag(pTime, PlaylistLevelType.MAIN)
|
|
754
|
+
?.gap === true
|
|
755
|
+
) {
|
|
756
|
+
this.blockedAudioAppend = null;
|
|
757
|
+
this.shiftAndExecuteNext('audio');
|
|
758
|
+
}
|
|
759
|
+
},
|
|
760
|
+
onStart: () => {},
|
|
761
|
+
onComplete: () => {},
|
|
762
|
+
onError: (error) => {
|
|
763
|
+
this.warn('Error executing block-audio operation', error);
|
|
764
|
+
},
|
|
765
|
+
};
|
|
766
|
+
this.blockedAudioAppend = { op, frag: partOrFrag };
|
|
767
|
+
this.append(op, 'audio', true);
|
|
768
|
+
}
|
|
769
|
+
|
|
770
|
+
private unblockAudio() {
|
|
771
|
+
const { blockedAudioAppend, operationQueue } = this;
|
|
772
|
+
if (blockedAudioAppend && operationQueue) {
|
|
773
|
+
this.blockedAudioAppend = null;
|
|
774
|
+
operationQueue.unblockAudio(blockedAudioAppend.op);
|
|
775
|
+
}
|
|
776
|
+
}
|
|
777
|
+
|
|
778
|
+
private onBufferAppending(
|
|
779
|
+
event: Events.BUFFER_APPENDING,
|
|
780
|
+
eventData: BufferAppendingData,
|
|
781
|
+
) {
|
|
782
|
+
const { tracks } = this;
|
|
783
|
+
const { data, type, parent, frag, part, chunkMeta, offset } = eventData;
|
|
784
|
+
const chunkStats = chunkMeta.buffering[type];
|
|
785
|
+
const { sn, cc } = frag;
|
|
786
|
+
const bufferAppendingStart = self.performance.now();
|
|
787
|
+
chunkStats.start = bufferAppendingStart;
|
|
788
|
+
const fragBuffering = frag.stats.buffering;
|
|
789
|
+
const partBuffering = part ? part.stats.buffering : null;
|
|
790
|
+
if (fragBuffering.start === 0) {
|
|
791
|
+
fragBuffering.start = bufferAppendingStart;
|
|
792
|
+
}
|
|
793
|
+
if (partBuffering?.start === 0) {
|
|
794
|
+
partBuffering.start = bufferAppendingStart;
|
|
795
|
+
}
|
|
796
|
+
|
|
797
|
+
// TODO: Only update timestampOffset when audio/mpeg fragment or part is not contiguous with previously appended
|
|
798
|
+
// Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
|
|
799
|
+
// in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
|
|
800
|
+
// is greater than 100ms (this is enough to handle seek for VOD or level change for LIVE videos).
|
|
801
|
+
// More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486
|
|
802
|
+
const audioTrack = tracks.audio;
|
|
803
|
+
let checkTimestampOffset = false;
|
|
804
|
+
if (type === 'audio' && audioTrack?.container === 'audio/mpeg') {
|
|
805
|
+
checkTimestampOffset =
|
|
806
|
+
!this.lastMpegAudioChunk ||
|
|
807
|
+
chunkMeta.id === 1 ||
|
|
808
|
+
this.lastMpegAudioChunk.sn !== chunkMeta.sn;
|
|
809
|
+
this.lastMpegAudioChunk = chunkMeta;
|
|
810
|
+
}
|
|
811
|
+
|
|
812
|
+
// Block audio append until overlapping video append
|
|
813
|
+
const videoTrack = tracks.video;
|
|
814
|
+
const videoSb = videoTrack?.buffer;
|
|
815
|
+
if (videoSb && sn !== 'initSegment' && offset !== undefined) {
|
|
816
|
+
const partOrFrag = part || (frag as MediaFragment);
|
|
817
|
+
const blockedAudioAppend = this.blockedAudioAppend;
|
|
818
|
+
if (
|
|
819
|
+
type === 'audio' &&
|
|
820
|
+
parent !== 'main' &&
|
|
821
|
+
!this.blockedAudioAppend &&
|
|
822
|
+
!(videoTrack.ending || videoTrack.ended)
|
|
823
|
+
) {
|
|
824
|
+
const pStart = partOrFrag.start;
|
|
825
|
+
const pTime = pStart + partOrFrag.duration * 0.05;
|
|
826
|
+
const vbuffered = videoSb.buffered;
|
|
827
|
+
const vappending = this.currentOp('video');
|
|
828
|
+
if (!vbuffered.length && !vappending) {
|
|
829
|
+
// wait for video before appending audio
|
|
830
|
+
this.blockAudio(partOrFrag);
|
|
831
|
+
} else if (
|
|
832
|
+
!vappending &&
|
|
833
|
+
!BufferHelper.isBuffered(videoSb, pTime) &&
|
|
834
|
+
this.lastVideoAppendEnd < pTime
|
|
835
|
+
) {
|
|
836
|
+
// audio is ahead of video
|
|
837
|
+
this.blockAudio(partOrFrag);
|
|
838
|
+
}
|
|
839
|
+
} else if (type === 'video') {
|
|
840
|
+
const videoAppendEnd = partOrFrag.end;
|
|
841
|
+
if (blockedAudioAppend) {
|
|
842
|
+
const audioStart = blockedAudioAppend.frag.start;
|
|
843
|
+
if (
|
|
844
|
+
videoAppendEnd > audioStart ||
|
|
845
|
+
videoAppendEnd < this.lastVideoAppendEnd ||
|
|
846
|
+
BufferHelper.isBuffered(videoSb, audioStart)
|
|
847
|
+
) {
|
|
848
|
+
this.unblockAudio();
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
this.lastVideoAppendEnd = videoAppendEnd;
|
|
852
|
+
}
|
|
853
|
+
}
|
|
854
|
+
|
|
855
|
+
const fragStart = (part || frag).start;
|
|
856
|
+
const operation: BufferOperation = {
|
|
857
|
+
label: `append-${type}`,
|
|
858
|
+
execute: () => {
|
|
859
|
+
chunkStats.executeStart = self.performance.now();
|
|
860
|
+
|
|
861
|
+
const sb = this.tracks[type]?.buffer;
|
|
862
|
+
if (sb) {
|
|
863
|
+
if (checkTimestampOffset) {
|
|
864
|
+
this.updateTimestampOffset(sb, fragStart, 0.1, type, sn, cc);
|
|
865
|
+
} else if (offset !== undefined && Number.isFinite(offset)) {
|
|
866
|
+
this.updateTimestampOffset(sb, offset, 0.000001, type, sn, cc);
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
this.appendExecutor(data, type);
|
|
870
|
+
},
|
|
871
|
+
onStart: () => {
|
|
872
|
+
// logger.debug(`[buffer-controller]: ${type} SourceBuffer updatestart`);
|
|
873
|
+
},
|
|
874
|
+
onComplete: () => {
|
|
875
|
+
this.clearBufferAppendTimeoutId(this.tracks[type]);
|
|
876
|
+
// logger.debug(`[buffer-controller]: ${type} SourceBuffer updateend`);
|
|
877
|
+
const end = self.performance.now();
|
|
878
|
+
chunkStats.executeEnd = chunkStats.end = end;
|
|
879
|
+
if (fragBuffering.first === 0) {
|
|
880
|
+
fragBuffering.first = end;
|
|
881
|
+
}
|
|
882
|
+
if (partBuffering?.first === 0) {
|
|
883
|
+
partBuffering.first = end;
|
|
884
|
+
}
|
|
885
|
+
|
|
886
|
+
const timeRanges = {};
|
|
887
|
+
this.sourceBuffers.forEach(([type, sb]) => {
|
|
888
|
+
if (type) {
|
|
889
|
+
timeRanges[type] = BufferHelper.getBuffered(sb);
|
|
890
|
+
}
|
|
891
|
+
});
|
|
892
|
+
const { appendErrors } = this;
|
|
893
|
+
const appendErrorType = this.appendError?.sourceBufferName;
|
|
894
|
+
if (isMediaFragment(frag)) {
|
|
895
|
+
// Only clear error on successful media fragment append. Init segments may complete without error for unsupported media.
|
|
896
|
+
appendErrors[type] = 0;
|
|
897
|
+
if (type === appendErrorType) {
|
|
898
|
+
this.appendError = undefined;
|
|
899
|
+
}
|
|
900
|
+
if (type === 'audio' || type === 'video') {
|
|
901
|
+
appendErrors.audiovideo = 0;
|
|
902
|
+
if (appendErrorType === 'audiovideo') {
|
|
903
|
+
this.appendError = undefined;
|
|
904
|
+
}
|
|
905
|
+
} else {
|
|
906
|
+
appendErrors.audio = 0;
|
|
907
|
+
appendErrors.video = 0;
|
|
908
|
+
if (appendErrorType !== 'audiovideo') {
|
|
909
|
+
this.appendError = undefined;
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
this.hls.trigger(Events.BUFFER_APPENDED, {
|
|
914
|
+
type,
|
|
915
|
+
frag,
|
|
916
|
+
part,
|
|
917
|
+
chunkMeta,
|
|
918
|
+
parent,
|
|
919
|
+
timeRanges,
|
|
920
|
+
});
|
|
921
|
+
},
|
|
922
|
+
onError: (error: Error) => {
|
|
923
|
+
this.clearBufferAppendTimeoutId(this.tracks[type]);
|
|
924
|
+
// in case any error occured while appending, put back segment in segments table
|
|
925
|
+
const event: ErrorData = {
|
|
926
|
+
type: ErrorTypes.MEDIA_ERROR,
|
|
927
|
+
parent,
|
|
928
|
+
details: ErrorDetails.BUFFER_APPEND_ERROR,
|
|
929
|
+
sourceBufferName: type,
|
|
930
|
+
frag,
|
|
931
|
+
part,
|
|
932
|
+
chunkMeta,
|
|
933
|
+
error,
|
|
934
|
+
err: error,
|
|
935
|
+
fatal: false,
|
|
936
|
+
};
|
|
937
|
+
const mediaError = this.media?.error;
|
|
938
|
+
if (
|
|
939
|
+
(error as DOMException).code === DOMException.QUOTA_EXCEEDED_ERR ||
|
|
940
|
+
error.name == 'QuotaExceededError' ||
|
|
941
|
+
`quota` in error
|
|
942
|
+
) {
|
|
943
|
+
// QuotaExceededError: http://www.w3.org/TR/html5/infrastructure.html#quotaexceedederror
|
|
944
|
+
// let's stop appending any segments, and report BUFFER_FULL_ERROR error
|
|
945
|
+
event.details = ErrorDetails.BUFFER_FULL_ERROR;
|
|
946
|
+
} else if (
|
|
947
|
+
(error as DOMException).code === DOMException.INVALID_STATE_ERR &&
|
|
948
|
+
this.mediaSourceOpenOrEnded &&
|
|
949
|
+
!mediaError
|
|
950
|
+
) {
|
|
951
|
+
// Allow retry for "Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer is still processing" errors
|
|
952
|
+
event.errorAction = createDoNothingErrorAction(true);
|
|
953
|
+
} else if (
|
|
954
|
+
error.name === TRACK_REMOVED_ERROR_NAME &&
|
|
955
|
+
this.sourceBufferCount === 0
|
|
956
|
+
) {
|
|
957
|
+
// Do nothing if sourceBuffers were removed (media is detached and append was not aborted)
|
|
958
|
+
event.errorAction = createDoNothingErrorAction(true);
|
|
959
|
+
} else {
|
|
960
|
+
const appendErrorCount = ++this.appendErrors[type];
|
|
961
|
+
/* with UHD content, we could get loop of quota exceeded error until
|
|
962
|
+
browser is able to evict some data from sourcebuffer. Retrying can help recover.
|
|
963
|
+
*/
|
|
964
|
+
this.warn(
|
|
965
|
+
`Failed ${appendErrorCount}/${this.hls.config.appendErrorMaxRetry} times to append segment in "${type}" sourceBuffer (${mediaError ? mediaError : 'no media error'})`,
|
|
966
|
+
);
|
|
967
|
+
if (
|
|
968
|
+
appendErrorCount >= this.hls.config.appendErrorMaxRetry ||
|
|
969
|
+
!!mediaError
|
|
970
|
+
) {
|
|
971
|
+
event.fatal = true;
|
|
972
|
+
}
|
|
973
|
+
const readyState = this.mediaSource?.readyState;
|
|
974
|
+
if (readyState === 'ended' || readyState === 'closed') {
|
|
975
|
+
// "ended" readyState on cold start https://bugs.webkit.org/show_bug.cgi?id=305712
|
|
976
|
+
this.warn(
|
|
977
|
+
`MediaSource readyState "${readyState}" during SourceBuffer error${event.fatal ? '' : ' - triggering recovery'}`,
|
|
978
|
+
);
|
|
979
|
+
event.details = ErrorDetails.MEDIA_SOURCE_REQUIRES_RESET;
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
this.appendError = event;
|
|
983
|
+
this.hls.trigger(Events.ERROR, event);
|
|
984
|
+
},
|
|
985
|
+
};
|
|
986
|
+
this.log(
|
|
987
|
+
`queuing "${type}" append sn: ${sn}${part ? ' p: ' + part.index : ''} of ${
|
|
988
|
+
parent === PlaylistLevelType.MAIN ? 'level' : 'track'
|
|
989
|
+
} ${frag.level} cc: ${cc} offset: ${offset} bytes: ${data.byteLength}`,
|
|
990
|
+
);
|
|
991
|
+
this.append(operation, type, this.isPending(this.tracks[type]));
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
private getFlushOp(
|
|
995
|
+
type: SourceBufferName,
|
|
996
|
+
start: number,
|
|
997
|
+
end: number,
|
|
998
|
+
): BufferOperation {
|
|
999
|
+
this.log(`queuing "${type}" remove ${start}-${end}`);
|
|
1000
|
+
return {
|
|
1001
|
+
label: 'remove',
|
|
1002
|
+
execute: () => {
|
|
1003
|
+
this.removeExecutor(type, start, end);
|
|
1004
|
+
},
|
|
1005
|
+
onStart: () => {
|
|
1006
|
+
// logger.debug(`[buffer-controller]: Started flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
|
1007
|
+
},
|
|
1008
|
+
onComplete: () => {
|
|
1009
|
+
// logger.debug(`[buffer-controller]: Finished flushing ${data.startOffset} -> ${data.endOffset} for ${type} Source Buffer`);
|
|
1010
|
+
this.hls.trigger(Events.BUFFER_FLUSHED, { type });
|
|
1011
|
+
},
|
|
1012
|
+
onError: (error: Error) => {
|
|
1013
|
+
this.warn(
|
|
1014
|
+
`Failed to remove ${start}-${end} from "${type}" SourceBuffer`,
|
|
1015
|
+
error,
|
|
1016
|
+
);
|
|
1017
|
+
},
|
|
1018
|
+
};
|
|
1019
|
+
}
|
|
1020
|
+
|
|
1021
|
+
private onBufferFlushing(
|
|
1022
|
+
event: Events.BUFFER_FLUSHING,
|
|
1023
|
+
data: BufferFlushingData,
|
|
1024
|
+
) {
|
|
1025
|
+
const { type, startOffset, endOffset } = data;
|
|
1026
|
+
if (type) {
|
|
1027
|
+
this.append(this.getFlushOp(type, startOffset, endOffset), type);
|
|
1028
|
+
} else {
|
|
1029
|
+
this.sourceBuffers.forEach(([type]) => {
|
|
1030
|
+
if (type) {
|
|
1031
|
+
this.append(this.getFlushOp(type, startOffset, endOffset), type);
|
|
1032
|
+
}
|
|
1033
|
+
});
|
|
1034
|
+
}
|
|
1035
|
+
}
|
|
1036
|
+
|
|
1037
|
+
private onFragParsed(event: Events.FRAG_PARSED, data: FragParsedData) {
|
|
1038
|
+
const { frag, part } = data;
|
|
1039
|
+
const buffersAppendedTo: SourceBufferName[] = [];
|
|
1040
|
+
const elementaryStreams = part
|
|
1041
|
+
? part.elementaryStreams
|
|
1042
|
+
: frag.elementaryStreams;
|
|
1043
|
+
if (elementaryStreams[ElementaryStreamTypes.AUDIOVIDEO]) {
|
|
1044
|
+
buffersAppendedTo.push('audiovideo');
|
|
1045
|
+
} else {
|
|
1046
|
+
if (elementaryStreams[ElementaryStreamTypes.AUDIO]) {
|
|
1047
|
+
buffersAppendedTo.push('audio');
|
|
1048
|
+
}
|
|
1049
|
+
if (elementaryStreams[ElementaryStreamTypes.VIDEO]) {
|
|
1050
|
+
buffersAppendedTo.push('video');
|
|
1051
|
+
}
|
|
1052
|
+
}
|
|
1053
|
+
|
|
1054
|
+
const onUnblocked = () => {
|
|
1055
|
+
const now = self.performance.now();
|
|
1056
|
+
frag.stats.buffering.end = now;
|
|
1057
|
+
if (part) {
|
|
1058
|
+
part.stats.buffering.end = now;
|
|
1059
|
+
}
|
|
1060
|
+
const stats = part ? part.stats : frag.stats;
|
|
1061
|
+
this.hls.trigger(Events.FRAG_BUFFERED, {
|
|
1062
|
+
frag,
|
|
1063
|
+
part,
|
|
1064
|
+
stats,
|
|
1065
|
+
id: frag.type,
|
|
1066
|
+
});
|
|
1067
|
+
};
|
|
1068
|
+
|
|
1069
|
+
if (buffersAppendedTo.length === 0) {
|
|
1070
|
+
this.warn(
|
|
1071
|
+
`Fragments must have at least one ElementaryStreamType set. type: ${frag.type} level: ${frag.level} sn: ${frag.sn}`,
|
|
1072
|
+
);
|
|
1073
|
+
}
|
|
1074
|
+
|
|
1075
|
+
this.blockBuffers(onUnblocked, buffersAppendedTo).catch((error) => {
|
|
1076
|
+
this.warn(`Fragment buffered callback ${error}`);
|
|
1077
|
+
this.stepOperationQueue(this.sourceBufferTypes);
|
|
1078
|
+
});
|
|
1079
|
+
}
|
|
1080
|
+
|
|
1081
|
+
private onFragChanged(event: Events.FRAG_CHANGED, data: FragChangedData) {
|
|
1082
|
+
this.trimBuffers();
|
|
1083
|
+
}
|
|
1084
|
+
|
|
1085
|
+
public get bufferedToEnd(): boolean {
|
|
1086
|
+
return (
|
|
1087
|
+
this.sourceBufferCount > 0 &&
|
|
1088
|
+
!this.sourceBuffers.some(([type]) => {
|
|
1089
|
+
if (type) {
|
|
1090
|
+
const track = this.tracks[type];
|
|
1091
|
+
if (track) {
|
|
1092
|
+
return !track.ended || track.ending;
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
return false;
|
|
1096
|
+
})
|
|
1097
|
+
);
|
|
1098
|
+
}
|
|
1099
|
+
|
|
1100
|
+
// on BUFFER_EOS mark matching sourcebuffer(s) as "ending" and "ended" and queue endOfStream after remaining operations(s)
|
|
1101
|
+
// an undefined data.type will mark all buffers as EOS.
|
|
1102
|
+
private onBufferEos(event: Events.BUFFER_EOS, data: BufferEOSData) {
|
|
1103
|
+
this.sourceBuffers.forEach(([type]) => {
|
|
1104
|
+
if (type) {
|
|
1105
|
+
const track = this.tracks[type] as SourceBufferTrack;
|
|
1106
|
+
if (!data.type || data.type === type) {
|
|
1107
|
+
track.ending = true;
|
|
1108
|
+
if (!track.ended) {
|
|
1109
|
+
track.ended = true;
|
|
1110
|
+
this.log(`${type} buffer reached EOS`);
|
|
1111
|
+
}
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
1114
|
+
});
|
|
1115
|
+
|
|
1116
|
+
const allowEndOfStream = this.overrides?.endOfStream !== false;
|
|
1117
|
+
const allTracksEnding =
|
|
1118
|
+
this.sourceBufferCount > 0 &&
|
|
1119
|
+
!this.sourceBuffers.some(([type]) => type && !this.tracks[type]?.ended);
|
|
1120
|
+
|
|
1121
|
+
if (allTracksEnding) {
|
|
1122
|
+
if (allowEndOfStream) {
|
|
1123
|
+
this.log(`Queueing EOS`);
|
|
1124
|
+
this.blockUntilOpen(() => {
|
|
1125
|
+
this.tracksEnded();
|
|
1126
|
+
const { mediaSource } = this;
|
|
1127
|
+
if (mediaSource?.readyState !== 'open') {
|
|
1128
|
+
if (mediaSource) {
|
|
1129
|
+
this.log(
|
|
1130
|
+
`Could not call mediaSource.endOfStream(). mediaSource.readyState: ${mediaSource.readyState}`,
|
|
1131
|
+
);
|
|
1132
|
+
}
|
|
1133
|
+
return;
|
|
1134
|
+
}
|
|
1135
|
+
this.log(`Calling mediaSource.endOfStream()`);
|
|
1136
|
+
// Allow this to throw and be caught by the enqueueing function
|
|
1137
|
+
mediaSource.endOfStream();
|
|
1138
|
+
|
|
1139
|
+
this.hls.trigger(Events.BUFFERED_TO_END, undefined);
|
|
1140
|
+
});
|
|
1141
|
+
} else {
|
|
1142
|
+
this.tracksEnded();
|
|
1143
|
+
this.hls.trigger(Events.BUFFERED_TO_END, undefined);
|
|
1144
|
+
}
|
|
1145
|
+
} else if (data.type === 'video') {
|
|
1146
|
+
// Make sure pending audio appends are unblocked when video reaches end
|
|
1147
|
+
this.unblockAudio();
|
|
1148
|
+
}
|
|
1149
|
+
}
|
|
1150
|
+
|
|
1151
|
+
private tracksEnded() {
|
|
1152
|
+
this.sourceBuffers.forEach(([type]) => {
|
|
1153
|
+
if (type !== null) {
|
|
1154
|
+
const track = this.tracks[type];
|
|
1155
|
+
if (track) {
|
|
1156
|
+
track.ending = false;
|
|
1157
|
+
}
|
|
1158
|
+
}
|
|
1159
|
+
});
|
|
1160
|
+
}
|
|
1161
|
+
|
|
1162
|
+
private onLevelUpdated(
|
|
1163
|
+
event: Events.LEVEL_UPDATED,
|
|
1164
|
+
{ details }: LevelUpdatedData,
|
|
1165
|
+
) {
|
|
1166
|
+
if (!details.fragments.length) {
|
|
1167
|
+
return;
|
|
1168
|
+
}
|
|
1169
|
+
this.details = details;
|
|
1170
|
+
this.updateDuration();
|
|
1171
|
+
}
|
|
1172
|
+
|
|
1173
|
+
private updateDuration() {
|
|
1174
|
+
this.blockUntilOpen(() => {
|
|
1175
|
+
const durationAndRange = this.getDurationAndRange();
|
|
1176
|
+
if (!durationAndRange) {
|
|
1177
|
+
return;
|
|
1178
|
+
}
|
|
1179
|
+
this.updateMediaSource(durationAndRange);
|
|
1180
|
+
});
|
|
1181
|
+
}
|
|
1182
|
+
|
|
1183
|
+
private onError(event: Events.ERROR, data: ErrorData) {
|
|
1184
|
+
if (data.details === ErrorDetails.BUFFER_APPEND_ERROR && data.frag) {
|
|
1185
|
+
const nextAutoLevel = data.errorAction?.nextAutoLevel;
|
|
1186
|
+
if (Number.isFinite(nextAutoLevel) && nextAutoLevel !== data.frag.level) {
|
|
1187
|
+
this.resetAppendErrors();
|
|
1188
|
+
}
|
|
1189
|
+
}
|
|
1190
|
+
}
|
|
1191
|
+
|
|
1192
|
+
private resetAppendErrors() {
|
|
1193
|
+
this.appendErrors = {
|
|
1194
|
+
audio: 0,
|
|
1195
|
+
video: 0,
|
|
1196
|
+
audiovideo: 0,
|
|
1197
|
+
};
|
|
1198
|
+
this.appendError = undefined;
|
|
1199
|
+
}
|
|
1200
|
+
|
|
1201
|
+
private trimBuffers() {
|
|
1202
|
+
const { hls, details, media } = this;
|
|
1203
|
+
if (!media || details === null) {
|
|
1204
|
+
return;
|
|
1205
|
+
}
|
|
1206
|
+
|
|
1207
|
+
if (!this.sourceBufferCount) {
|
|
1208
|
+
return;
|
|
1209
|
+
}
|
|
1210
|
+
|
|
1211
|
+
const config: Readonly<HlsConfig> = hls.config;
|
|
1212
|
+
const currentTime = media.currentTime;
|
|
1213
|
+
const targetDuration = details.levelTargetDuration;
|
|
1214
|
+
|
|
1215
|
+
// Support for deprecated liveBackBufferLength
|
|
1216
|
+
const backBufferLength =
|
|
1217
|
+
details.live && config.liveBackBufferLength !== null
|
|
1218
|
+
? config.liveBackBufferLength
|
|
1219
|
+
: config.backBufferLength;
|
|
1220
|
+
|
|
1221
|
+
if (Number.isFinite(backBufferLength) && backBufferLength >= 0) {
|
|
1222
|
+
const maxBackBufferLength = Math.max(backBufferLength, targetDuration);
|
|
1223
|
+
const targetBackBufferPosition =
|
|
1224
|
+
Math.floor(currentTime / targetDuration) * targetDuration -
|
|
1225
|
+
maxBackBufferLength;
|
|
1226
|
+
|
|
1227
|
+
this.flushBackBuffer(
|
|
1228
|
+
currentTime,
|
|
1229
|
+
targetDuration,
|
|
1230
|
+
targetBackBufferPosition,
|
|
1231
|
+
);
|
|
1232
|
+
}
|
|
1233
|
+
|
|
1234
|
+
const frontBufferFlushThreshold = config.frontBufferFlushThreshold;
|
|
1235
|
+
if (
|
|
1236
|
+
Number.isFinite(frontBufferFlushThreshold) &&
|
|
1237
|
+
frontBufferFlushThreshold > 0
|
|
1238
|
+
) {
|
|
1239
|
+
const frontBufferLength = Math.max(
|
|
1240
|
+
config.maxBufferLength,
|
|
1241
|
+
frontBufferFlushThreshold,
|
|
1242
|
+
);
|
|
1243
|
+
|
|
1244
|
+
const maxFrontBufferLength = Math.max(frontBufferLength, targetDuration);
|
|
1245
|
+
const targetFrontBufferPosition =
|
|
1246
|
+
Math.floor(currentTime / targetDuration) * targetDuration +
|
|
1247
|
+
maxFrontBufferLength;
|
|
1248
|
+
|
|
1249
|
+
this.flushFrontBuffer(
|
|
1250
|
+
currentTime,
|
|
1251
|
+
targetDuration,
|
|
1252
|
+
targetFrontBufferPosition,
|
|
1253
|
+
);
|
|
1254
|
+
}
|
|
1255
|
+
}
|
|
1256
|
+
|
|
1257
|
+
private flushBackBuffer(
|
|
1258
|
+
currentTime: number,
|
|
1259
|
+
targetDuration: number,
|
|
1260
|
+
targetBackBufferPosition: number,
|
|
1261
|
+
) {
|
|
1262
|
+
this.sourceBuffers.forEach(([type, sb]) => {
|
|
1263
|
+
if (sb) {
|
|
1264
|
+
const buffered = BufferHelper.getBuffered(sb);
|
|
1265
|
+
// when target buffer start exceeds actual buffer start
|
|
1266
|
+
if (
|
|
1267
|
+
buffered.length > 0 &&
|
|
1268
|
+
targetBackBufferPosition > buffered.start(0)
|
|
1269
|
+
) {
|
|
1270
|
+
this.hls.trigger(Events.BACK_BUFFER_REACHED, {
|
|
1271
|
+
bufferEnd: targetBackBufferPosition,
|
|
1272
|
+
});
|
|
1273
|
+
|
|
1274
|
+
// Support for deprecated event:
|
|
1275
|
+
const track = this.tracks[type];
|
|
1276
|
+
if (this.details?.live) {
|
|
1277
|
+
this.hls.trigger(Events.LIVE_BACK_BUFFER_REACHED, {
|
|
1278
|
+
bufferEnd: targetBackBufferPosition,
|
|
1279
|
+
});
|
|
1280
|
+
} else if (track?.ended) {
|
|
1281
|
+
this.log(
|
|
1282
|
+
`Cannot flush ${type} back buffer while SourceBuffer is in ended state`,
|
|
1283
|
+
);
|
|
1284
|
+
return;
|
|
1285
|
+
}
|
|
1286
|
+
|
|
1287
|
+
this.hls.trigger(Events.BUFFER_FLUSHING, {
|
|
1288
|
+
startOffset: 0,
|
|
1289
|
+
endOffset: targetBackBufferPosition,
|
|
1290
|
+
type,
|
|
1291
|
+
});
|
|
1292
|
+
}
|
|
1293
|
+
}
|
|
1294
|
+
});
|
|
1295
|
+
}
|
|
1296
|
+
|
|
1297
|
+
private flushFrontBuffer(
|
|
1298
|
+
currentTime: number,
|
|
1299
|
+
targetDuration: number,
|
|
1300
|
+
targetFrontBufferPosition: number,
|
|
1301
|
+
) {
|
|
1302
|
+
this.sourceBuffers.forEach(([type, sb]) => {
|
|
1303
|
+
if (sb) {
|
|
1304
|
+
const buffered = BufferHelper.getBuffered(sb);
|
|
1305
|
+
const numBufferedRanges = buffered.length;
|
|
1306
|
+
// The buffer is either empty or contiguous
|
|
1307
|
+
if (numBufferedRanges < 2) {
|
|
1308
|
+
return;
|
|
1309
|
+
}
|
|
1310
|
+
const bufferStart = buffered.start(numBufferedRanges - 1);
|
|
1311
|
+
const bufferEnd = buffered.end(numBufferedRanges - 1);
|
|
1312
|
+
// No flush if we can tolerate the current buffer length or the current buffer range we would flush is contiguous with current position
|
|
1313
|
+
if (
|
|
1314
|
+
targetFrontBufferPosition > bufferStart ||
|
|
1315
|
+
(currentTime >= bufferStart && currentTime <= bufferEnd)
|
|
1316
|
+
) {
|
|
1317
|
+
return;
|
|
1318
|
+
}
|
|
1319
|
+
|
|
1320
|
+
this.hls.trigger(Events.BUFFER_FLUSHING, {
|
|
1321
|
+
startOffset: bufferStart,
|
|
1322
|
+
endOffset: Infinity,
|
|
1323
|
+
type,
|
|
1324
|
+
});
|
|
1325
|
+
}
|
|
1326
|
+
});
|
|
1327
|
+
}
|
|
1328
|
+
|
|
1329
|
+
/**
|
|
1330
|
+
* Update Media Source duration to current level duration or override to Infinity if configuration parameter
|
|
1331
|
+
* 'liveDurationInfinity` is set to `true`
|
|
1332
|
+
* More details: https://github.com/video-dev/hls.js/issues/355
|
|
1333
|
+
*/
|
|
1334
|
+
private getDurationAndRange(): {
|
|
1335
|
+
duration: number;
|
|
1336
|
+
start?: number;
|
|
1337
|
+
end?: number;
|
|
1338
|
+
} | null {
|
|
1339
|
+
const { details, mediaSource } = this;
|
|
1340
|
+
if (!details || !this.media || mediaSource?.readyState !== 'open') {
|
|
1341
|
+
return null;
|
|
1342
|
+
}
|
|
1343
|
+
const playlistEnd = details.edge;
|
|
1344
|
+
if (details.live && this.hls.config.liveDurationInfinity) {
|
|
1345
|
+
const len = details.fragments.length;
|
|
1346
|
+
if (len && !!(mediaSource as any).setLiveSeekableRange) {
|
|
1347
|
+
const start = Math.max(0, details.fragmentStart);
|
|
1348
|
+
const end = Math.max(start, playlistEnd);
|
|
1349
|
+
|
|
1350
|
+
return { duration: Infinity, start, end };
|
|
1351
|
+
}
|
|
1352
|
+
return { duration: Infinity };
|
|
1353
|
+
}
|
|
1354
|
+
const overrideDuration = this.overrides?.duration;
|
|
1355
|
+
if (overrideDuration) {
|
|
1356
|
+
if (!Number.isFinite(overrideDuration)) {
|
|
1357
|
+
return null;
|
|
1358
|
+
}
|
|
1359
|
+
return { duration: overrideDuration };
|
|
1360
|
+
}
|
|
1361
|
+
const mediaDuration = this.media.duration;
|
|
1362
|
+
const msDuration = Number.isFinite(mediaSource.duration)
|
|
1363
|
+
? mediaSource.duration
|
|
1364
|
+
: 0;
|
|
1365
|
+
if (
|
|
1366
|
+
(playlistEnd > msDuration && playlistEnd > mediaDuration) ||
|
|
1367
|
+
!Number.isFinite(mediaDuration)
|
|
1368
|
+
) {
|
|
1369
|
+
return { duration: playlistEnd };
|
|
1370
|
+
}
|
|
1371
|
+
return null;
|
|
1372
|
+
}
|
|
1373
|
+
|
|
1374
|
+
private updateMediaSource({
|
|
1375
|
+
duration,
|
|
1376
|
+
start,
|
|
1377
|
+
end,
|
|
1378
|
+
}: {
|
|
1379
|
+
duration: number;
|
|
1380
|
+
start?: number;
|
|
1381
|
+
end?: number;
|
|
1382
|
+
}) {
|
|
1383
|
+
const mediaSource = this.mediaSource;
|
|
1384
|
+
if (!mediaSource || !this.media || mediaSource.readyState !== 'open') {
|
|
1385
|
+
return;
|
|
1386
|
+
}
|
|
1387
|
+
if (mediaSource.duration !== duration) {
|
|
1388
|
+
if (Number.isFinite(duration)) {
|
|
1389
|
+
this.log(`Updating MediaSource duration to ${duration.toFixed(3)}`);
|
|
1390
|
+
}
|
|
1391
|
+
mediaSource.duration = duration;
|
|
1392
|
+
}
|
|
1393
|
+
if (start !== undefined && end !== undefined) {
|
|
1394
|
+
this.log(
|
|
1395
|
+
`MediaSource duration is set to ${mediaSource.duration}. Setting seekable range to ${start}-${end}.`,
|
|
1396
|
+
);
|
|
1397
|
+
mediaSource.setLiveSeekableRange(start, end);
|
|
1398
|
+
}
|
|
1399
|
+
}
|
|
1400
|
+
|
|
1401
|
+
private get tracksReady(): boolean {
|
|
1402
|
+
const pendingTrackCount = this.pendingTrackCount;
|
|
1403
|
+
return (
|
|
1404
|
+
pendingTrackCount > 0 &&
|
|
1405
|
+
(pendingTrackCount >= this.bufferCodecEventsTotal ||
|
|
1406
|
+
this.isPending(this.tracks.audiovideo))
|
|
1407
|
+
);
|
|
1408
|
+
}
|
|
1409
|
+
|
|
1410
|
+
private checkPendingTracks() {
|
|
1411
|
+
const { bufferCodecEventsTotal, pendingTrackCount, tracks } = this;
|
|
1412
|
+
this.log(
|
|
1413
|
+
`checkPendingTracks (pending: ${pendingTrackCount} codec events expected: ${bufferCodecEventsTotal}) ${stringify(tracks)}`,
|
|
1414
|
+
);
|
|
1415
|
+
// Check if we've received all of the expected bufferCodec events. When none remain, create all the sourceBuffers at once.
|
|
1416
|
+
// This is important because the MSE spec allows implementations to throw QuotaExceededErrors if creating new sourceBuffers after
|
|
1417
|
+
// data has been appended to existing ones.
|
|
1418
|
+
// 2 tracks is the max (one for audio, one for video). If we've reach this max go ahead and create the buffers.
|
|
1419
|
+
if (this.tracksReady) {
|
|
1420
|
+
const transferredTracks = this.transferData?.tracks;
|
|
1421
|
+
if (transferredTracks && Object.keys(transferredTracks).length) {
|
|
1422
|
+
this.attachTransferred();
|
|
1423
|
+
} else {
|
|
1424
|
+
// ok, let's create them now !
|
|
1425
|
+
this.createSourceBuffers();
|
|
1426
|
+
}
|
|
1427
|
+
}
|
|
1428
|
+
}
|
|
1429
|
+
|
|
1430
|
+
private bufferCreated() {
|
|
1431
|
+
if (this.sourceBufferCount) {
|
|
1432
|
+
const tracks: BufferCreatedTrackSet = {};
|
|
1433
|
+
this.sourceBuffers.forEach(([type, buffer]) => {
|
|
1434
|
+
if (type) {
|
|
1435
|
+
const track = this.tracks[type] as SourceBufferTrack;
|
|
1436
|
+
tracks[type] = {
|
|
1437
|
+
buffer,
|
|
1438
|
+
container: track.container,
|
|
1439
|
+
codec: track.codec,
|
|
1440
|
+
supplemental: track.supplemental,
|
|
1441
|
+
levelCodec: track.levelCodec,
|
|
1442
|
+
id: track.id,
|
|
1443
|
+
metadata: track.metadata,
|
|
1444
|
+
};
|
|
1445
|
+
}
|
|
1446
|
+
});
|
|
1447
|
+
this.hls.trigger(Events.BUFFER_CREATED, {
|
|
1448
|
+
tracks,
|
|
1449
|
+
});
|
|
1450
|
+
this.log(`SourceBuffers created. Running queue: ${this.operationQueue}`);
|
|
1451
|
+
this.sourceBuffers.forEach(([type]) => {
|
|
1452
|
+
this.executeNext(type);
|
|
1453
|
+
});
|
|
1454
|
+
} else {
|
|
1455
|
+
const error = new Error(
|
|
1456
|
+
'could not create source buffer for media codec(s)',
|
|
1457
|
+
);
|
|
1458
|
+
this.hls.trigger(Events.ERROR, {
|
|
1459
|
+
type: ErrorTypes.MEDIA_ERROR,
|
|
1460
|
+
details: ErrorDetails.BUFFER_INCOMPATIBLE_CODECS_ERROR,
|
|
1461
|
+
fatal: true,
|
|
1462
|
+
error,
|
|
1463
|
+
reason: error.message,
|
|
1464
|
+
});
|
|
1465
|
+
}
|
|
1466
|
+
}
|
|
1467
|
+
|
|
1468
|
+
private createSourceBuffers() {
|
|
1469
|
+
const { tracks, sourceBuffers, mediaSource } = this;
|
|
1470
|
+
if (!mediaSource) {
|
|
1471
|
+
throw new Error('createSourceBuffers called when mediaSource was null');
|
|
1472
|
+
}
|
|
1473
|
+
|
|
1474
|
+
for (const trackName in tracks) {
|
|
1475
|
+
const type = trackName as SourceBufferName;
|
|
1476
|
+
const track = tracks[type];
|
|
1477
|
+
if (this.isPending(track)) {
|
|
1478
|
+
const codec = this.getTrackCodec(track, type);
|
|
1479
|
+
const mimeType = `${track.container};codecs=${codec}`;
|
|
1480
|
+
track.codec = codec;
|
|
1481
|
+
this.log(
|
|
1482
|
+
`creating sourceBuffer(${mimeType})${this.currentOp(type) ? ' Queued' : ''} ${stringify(track)}`,
|
|
1483
|
+
);
|
|
1484
|
+
try {
|
|
1485
|
+
const sb = mediaSource.addSourceBuffer(
|
|
1486
|
+
mimeType,
|
|
1487
|
+
) as ExtendedSourceBuffer;
|
|
1488
|
+
const sbIndex = sourceBufferNameToIndex(type);
|
|
1489
|
+
const sbTuple = [type, sb] as Exclude<
|
|
1490
|
+
SourceBuffersTuple[typeof sbIndex],
|
|
1491
|
+
EmptyTuple
|
|
1492
|
+
>;
|
|
1493
|
+
sourceBuffers[sbIndex] = sbTuple as any;
|
|
1494
|
+
track.buffer = sb;
|
|
1495
|
+
} catch (error) {
|
|
1496
|
+
this.error(
|
|
1497
|
+
`error while trying to add sourceBuffer: ${error.message}`,
|
|
1498
|
+
);
|
|
1499
|
+
// remove init segment from queue and delete track info
|
|
1500
|
+
this.shiftAndExecuteNext(type);
|
|
1501
|
+
this.operationQueue?.removeBlockers();
|
|
1502
|
+
delete this.tracks[type];
|
|
1503
|
+
this.hls.trigger(Events.ERROR, {
|
|
1504
|
+
type: ErrorTypes.MEDIA_ERROR,
|
|
1505
|
+
details: ErrorDetails.BUFFER_ADD_CODEC_ERROR,
|
|
1506
|
+
fatal: false,
|
|
1507
|
+
error,
|
|
1508
|
+
sourceBufferName: type,
|
|
1509
|
+
mimeType: mimeType,
|
|
1510
|
+
parent: track.id as PlaylistLevelType,
|
|
1511
|
+
});
|
|
1512
|
+
return;
|
|
1513
|
+
}
|
|
1514
|
+
this.trackSourceBuffer(type, track);
|
|
1515
|
+
}
|
|
1516
|
+
}
|
|
1517
|
+
this.bufferCreated();
|
|
1518
|
+
}
|
|
1519
|
+
|
|
1520
|
+
private clearBufferAppendTimeoutId(track?: SourceBufferTrack): void {
|
|
1521
|
+
if (!track) {
|
|
1522
|
+
return;
|
|
1523
|
+
}
|
|
1524
|
+
|
|
1525
|
+
self.clearTimeout(track.bufferAppendTimeoutId);
|
|
1526
|
+
track.bufferAppendTimeoutId = undefined;
|
|
1527
|
+
}
|
|
1528
|
+
|
|
1529
|
+
private getTrackCodec(track: BaseTrack, trackName: SourceBufferName): string {
|
|
1530
|
+
// Use supplemental video codec when supported when adding SourceBuffer (#5558)
|
|
1531
|
+
const supplementalCodec = track.supplemental;
|
|
1532
|
+
let trackCodec = track.codec;
|
|
1533
|
+
if (
|
|
1534
|
+
supplementalCodec &&
|
|
1535
|
+
(trackName === 'video' || trackName === 'audiovideo') &&
|
|
1536
|
+
areCodecsMediaSourceSupported(supplementalCodec, 'video')
|
|
1537
|
+
) {
|
|
1538
|
+
trackCodec = replaceVideoCodec(trackCodec, supplementalCodec);
|
|
1539
|
+
}
|
|
1540
|
+
const codec = pickMostCompleteCodecName(trackCodec, track.levelCodec);
|
|
1541
|
+
if (codec) {
|
|
1542
|
+
if (trackName.slice(0, 5) === 'audio') {
|
|
1543
|
+
return getCodecCompatibleName(codec, this.appendSource);
|
|
1544
|
+
}
|
|
1545
|
+
return codec;
|
|
1546
|
+
}
|
|
1547
|
+
return '';
|
|
1548
|
+
}
|
|
1549
|
+
|
|
1550
|
+
private trackSourceBuffer(type: SourceBufferName, track: SourceBufferTrack) {
|
|
1551
|
+
const buffer = track.buffer;
|
|
1552
|
+
if (!buffer) {
|
|
1553
|
+
return;
|
|
1554
|
+
}
|
|
1555
|
+
const codec = this.getTrackCodec(track, type);
|
|
1556
|
+
this.tracks[type] = {
|
|
1557
|
+
buffer,
|
|
1558
|
+
codec,
|
|
1559
|
+
container: track.container,
|
|
1560
|
+
levelCodec: track.levelCodec,
|
|
1561
|
+
supplemental: track.supplemental,
|
|
1562
|
+
metadata: track.metadata,
|
|
1563
|
+
id: track.id,
|
|
1564
|
+
listeners: [],
|
|
1565
|
+
};
|
|
1566
|
+
this.removeBufferListeners(type);
|
|
1567
|
+
this.addBufferListener(type, 'updatestart', this.onSBUpdateStart);
|
|
1568
|
+
this.addBufferListener(type, 'updateend', this.onSBUpdateEnd);
|
|
1569
|
+
this.addBufferListener(type, 'error', this.onSBUpdateError);
|
|
1570
|
+
// ManagedSourceBuffer bufferedchange event
|
|
1571
|
+
if (this.appendSource) {
|
|
1572
|
+
this.addBufferListener(
|
|
1573
|
+
type,
|
|
1574
|
+
'bufferedchange',
|
|
1575
|
+
(type: SourceBufferName, event: BufferedChangeEvent) => {
|
|
1576
|
+
// If media was ejected check for a change. Added ranges are redundant with changes on 'updateend' event.
|
|
1577
|
+
const removedRanges = event.removedRanges;
|
|
1578
|
+
if (removedRanges?.length) {
|
|
1579
|
+
this.hls.trigger(Events.BUFFER_FLUSHED, {
|
|
1580
|
+
type: type,
|
|
1581
|
+
});
|
|
1582
|
+
}
|
|
1583
|
+
},
|
|
1584
|
+
);
|
|
1585
|
+
}
|
|
1586
|
+
}
|
|
1587
|
+
|
|
1588
|
+
// Keep as arrow functions so that we can directly reference these functions directly as event listeners
|
|
1589
|
+
private _onMediaSourceOpen = (e?: Event) => {
|
|
1590
|
+
const { media, mediaSource } = this;
|
|
1591
|
+
if (e) {
|
|
1592
|
+
this.log('Media source opened');
|
|
1593
|
+
}
|
|
1594
|
+
if (!media || !mediaSource) {
|
|
1595
|
+
return;
|
|
1596
|
+
}
|
|
1597
|
+
// once received, don't listen anymore to sourceopen event
|
|
1598
|
+
removeEventListener(mediaSource, 'sourceopen', this._onMediaSourceOpen);
|
|
1599
|
+
removeEventListener(media, 'emptied', this._onMediaEmptied);
|
|
1600
|
+
this.updateDuration();
|
|
1601
|
+
this.hls.trigger(Events.MEDIA_ATTACHED, {
|
|
1602
|
+
media,
|
|
1603
|
+
mediaSource: mediaSource as MediaSource,
|
|
1604
|
+
});
|
|
1605
|
+
|
|
1606
|
+
if (this.mediaSource !== null) {
|
|
1607
|
+
this.checkPendingTracks();
|
|
1608
|
+
}
|
|
1609
|
+
};
|
|
1610
|
+
|
|
1611
|
+
private _onMediaSourceClose = () => {
|
|
1612
|
+
this.log('Media source closed');
|
|
1613
|
+
// Safari/WebKit bug: MediaSource becomes invalid after bfcache restoration.
|
|
1614
|
+
// When the user navigates back, the MediaSource is in a 'closed' state and cannot be used.
|
|
1615
|
+
// If sourceclose fires while media is still attached, trigger recovery to reattach media.
|
|
1616
|
+
if (this.media) {
|
|
1617
|
+
this.warn(
|
|
1618
|
+
'MediaSource closed while media attached - triggering recovery',
|
|
1619
|
+
);
|
|
1620
|
+
this.hls.trigger(
|
|
1621
|
+
Events.ERROR,
|
|
1622
|
+
Object.assign({ fatal: false }, this.appendError, {
|
|
1623
|
+
type: ErrorTypes.MEDIA_ERROR,
|
|
1624
|
+
details: ErrorDetails.MEDIA_SOURCE_REQUIRES_RESET,
|
|
1625
|
+
error: new Error('MediaSource closed while media is still attached'),
|
|
1626
|
+
}),
|
|
1627
|
+
);
|
|
1628
|
+
}
|
|
1629
|
+
};
|
|
1630
|
+
|
|
1631
|
+
private _onMediaSourceEnded = () => {
|
|
1632
|
+
this.log('Media source ended');
|
|
1633
|
+
};
|
|
1634
|
+
|
|
1635
|
+
private _onMediaEmptied = () => {
|
|
1636
|
+
const { mediaSrc, _objectUrl } = this;
|
|
1637
|
+
if (mediaSrc !== _objectUrl) {
|
|
1638
|
+
this.error(
|
|
1639
|
+
`Media element src was set while attaching MediaSource (${_objectUrl} > ${mediaSrc})`,
|
|
1640
|
+
);
|
|
1641
|
+
}
|
|
1642
|
+
};
|
|
1643
|
+
|
|
1644
|
+
private _onMediaError = () => {
|
|
1645
|
+
const { media } = this;
|
|
1646
|
+
if (media) {
|
|
1647
|
+
this.log(`Media error (code: ${media.error?.code}): ${media.error}`);
|
|
1648
|
+
}
|
|
1649
|
+
};
|
|
1650
|
+
|
|
1651
|
+
private get mediaSrc(): string | undefined {
|
|
1652
|
+
const media = (this.media?.querySelector as any)?.('source') || this.media;
|
|
1653
|
+
return media?.src;
|
|
1654
|
+
}
|
|
1655
|
+
|
|
1656
|
+
private onSBUpdateStart(type: SourceBufferName) {
|
|
1657
|
+
const operation = this.currentOp(type);
|
|
1658
|
+
if (!operation) {
|
|
1659
|
+
return;
|
|
1660
|
+
}
|
|
1661
|
+
operation.onStart();
|
|
1662
|
+
}
|
|
1663
|
+
|
|
1664
|
+
private onSBUpdateEnd(type: SourceBufferName) {
|
|
1665
|
+
if (this.mediaSource?.readyState === 'closed') {
|
|
1666
|
+
this.resetBuffer(type);
|
|
1667
|
+
return;
|
|
1668
|
+
}
|
|
1669
|
+
const operation = this.currentOp(type);
|
|
1670
|
+
if (!operation) {
|
|
1671
|
+
return;
|
|
1672
|
+
}
|
|
1673
|
+
operation.onComplete();
|
|
1674
|
+
this.shiftAndExecuteNext(type);
|
|
1675
|
+
}
|
|
1676
|
+
|
|
1677
|
+
private onSBUpdateError(type: SourceBufferName, event: Event) {
|
|
1678
|
+
const readyState = this.mediaSource?.readyState;
|
|
1679
|
+
const error = new Error(
|
|
1680
|
+
`${type} SourceBuffer error. MediaSource readyState: ${readyState}`,
|
|
1681
|
+
);
|
|
1682
|
+
this.error(`${error}`, event);
|
|
1683
|
+
// according to http://www.w3.org/TR/media-source/#sourcebuffer-append-error
|
|
1684
|
+
// SourceBuffer errors are not necessarily fatal; if so, the HTMLMediaElement will fire an error event
|
|
1685
|
+
this.hls.trigger(Events.ERROR, {
|
|
1686
|
+
type: ErrorTypes.MEDIA_ERROR,
|
|
1687
|
+
details: ErrorDetails.BUFFER_APPENDING_ERROR,
|
|
1688
|
+
sourceBufferName: type,
|
|
1689
|
+
error,
|
|
1690
|
+
fatal: false,
|
|
1691
|
+
});
|
|
1692
|
+
// updateend is always fired after error, so we'll allow that to shift the current operation off of the queue
|
|
1693
|
+
const operation = this.currentOp(type);
|
|
1694
|
+
if (operation) {
|
|
1695
|
+
operation.onError(error);
|
|
1696
|
+
}
|
|
1697
|
+
}
|
|
1698
|
+
|
|
1699
|
+
private updateTimestampOffset(
|
|
1700
|
+
sb: ExtendedSourceBuffer,
|
|
1701
|
+
timestampOffset: number,
|
|
1702
|
+
tolerance: number,
|
|
1703
|
+
type: SourceBufferName,
|
|
1704
|
+
sn: number | 'initSegment',
|
|
1705
|
+
cc: number,
|
|
1706
|
+
) {
|
|
1707
|
+
const delta = timestampOffset - sb.timestampOffset;
|
|
1708
|
+
if (Math.abs(delta) >= tolerance) {
|
|
1709
|
+
this.log(
|
|
1710
|
+
`Updating ${type} SourceBuffer timestampOffset to ${timestampOffset} (sn: ${sn} cc: ${cc})`,
|
|
1711
|
+
);
|
|
1712
|
+
sb.timestampOffset = timestampOffset;
|
|
1713
|
+
}
|
|
1714
|
+
}
|
|
1715
|
+
|
|
1716
|
+
// This method must result in an updateend event; if remove is not called, onSBUpdateEnd must be called manually
|
|
1717
|
+
private removeExecutor(
|
|
1718
|
+
type: SourceBufferName,
|
|
1719
|
+
startOffset: number,
|
|
1720
|
+
endOffset: number,
|
|
1721
|
+
) {
|
|
1722
|
+
const { media, mediaSource } = this;
|
|
1723
|
+
const track = this.tracks[type];
|
|
1724
|
+
const sb = track?.buffer;
|
|
1725
|
+
if (!media || !mediaSource || !sb) {
|
|
1726
|
+
this.warn(
|
|
1727
|
+
`Attempting to remove from the ${type} SourceBuffer, but it does not exist`,
|
|
1728
|
+
);
|
|
1729
|
+
this.shiftAndExecuteNext(type);
|
|
1730
|
+
return;
|
|
1731
|
+
}
|
|
1732
|
+
const mediaDuration = Number.isFinite(media.duration)
|
|
1733
|
+
? media.duration
|
|
1734
|
+
: Infinity;
|
|
1735
|
+
const msDuration = Number.isFinite(mediaSource.duration)
|
|
1736
|
+
? mediaSource.duration
|
|
1737
|
+
: Infinity;
|
|
1738
|
+
const removeStart = Math.max(0, startOffset);
|
|
1739
|
+
const removeEnd = Math.min(endOffset, mediaDuration, msDuration);
|
|
1740
|
+
if (removeEnd > removeStart && (!track.ending || track.ended)) {
|
|
1741
|
+
track.ended = false;
|
|
1742
|
+
this.log(
|
|
1743
|
+
`Removing [${removeStart},${removeEnd}] from the ${type} SourceBuffer`,
|
|
1744
|
+
);
|
|
1745
|
+
sb.remove(removeStart, removeEnd);
|
|
1746
|
+
} else {
|
|
1747
|
+
// Cycle the queue
|
|
1748
|
+
this.shiftAndExecuteNext(type);
|
|
1749
|
+
}
|
|
1750
|
+
}
|
|
1751
|
+
|
|
1752
|
+
// This method must result in an updateend event; if append is not called, onSBUpdateEnd must be called manually
|
|
1753
|
+
private appendExecutor(
|
|
1754
|
+
data: Uint8Array<ArrayBuffer>,
|
|
1755
|
+
type: SourceBufferName,
|
|
1756
|
+
) {
|
|
1757
|
+
const track = this.tracks[type];
|
|
1758
|
+
const sb = track?.buffer;
|
|
1759
|
+
if (!sb) {
|
|
1760
|
+
throw new HlsJsTrackRemovedError(
|
|
1761
|
+
`Attempting to append to the ${type} SourceBuffer, but it does not exist`,
|
|
1762
|
+
);
|
|
1763
|
+
}
|
|
1764
|
+
track.ending = false;
|
|
1765
|
+
track.ended = false;
|
|
1766
|
+
|
|
1767
|
+
if (this.hls.config.appendTimeout !== Infinity) {
|
|
1768
|
+
const appendTimeoutTime = this.calculateAppendTimeoutTime(sb);
|
|
1769
|
+
|
|
1770
|
+
track.bufferAppendTimeoutId = self.setTimeout(
|
|
1771
|
+
() => this.appendTimeoutHandler(type, sb, appendTimeoutTime),
|
|
1772
|
+
appendTimeoutTime,
|
|
1773
|
+
);
|
|
1774
|
+
}
|
|
1775
|
+
|
|
1776
|
+
sb.appendBuffer(data);
|
|
1777
|
+
}
|
|
1778
|
+
|
|
1779
|
+
private appendTimeoutHandler(
|
|
1780
|
+
type: SourceBufferName,
|
|
1781
|
+
sb: ExtendedSourceBuffer,
|
|
1782
|
+
appendTimeoutTime: number,
|
|
1783
|
+
) {
|
|
1784
|
+
this.log(
|
|
1785
|
+
`Received timeout after ${appendTimeoutTime}ms for append on ${type} source buffer. Aborting and triggering error.`,
|
|
1786
|
+
);
|
|
1787
|
+
|
|
1788
|
+
try {
|
|
1789
|
+
sb.abort();
|
|
1790
|
+
} catch (e) {
|
|
1791
|
+
this.log(
|
|
1792
|
+
`Failed to abort append on ${type} source buffer after timeout.`,
|
|
1793
|
+
);
|
|
1794
|
+
}
|
|
1795
|
+
|
|
1796
|
+
const operation = this.currentOp(type);
|
|
1797
|
+
if (operation) {
|
|
1798
|
+
operation.onError(new Error(`${type}-append-timeout`));
|
|
1799
|
+
}
|
|
1800
|
+
}
|
|
1801
|
+
|
|
1802
|
+
private calculateAppendTimeoutTime(sb: ExtendedSourceBuffer): number {
|
|
1803
|
+
let targetDuration = DEFAULT_TARGET_DURATION;
|
|
1804
|
+
|
|
1805
|
+
if (this.details) {
|
|
1806
|
+
targetDuration = this.details.levelTargetDuration;
|
|
1807
|
+
}
|
|
1808
|
+
|
|
1809
|
+
// 2 Target durations
|
|
1810
|
+
let desiredDefaultTimeoutValue = 2 * targetDuration * 1000;
|
|
1811
|
+
|
|
1812
|
+
if (this.media === null) {
|
|
1813
|
+
return desiredDefaultTimeoutValue;
|
|
1814
|
+
}
|
|
1815
|
+
|
|
1816
|
+
const activeBufferedRange = BufferHelper.bufferInfo(
|
|
1817
|
+
sb,
|
|
1818
|
+
this.media.currentTime,
|
|
1819
|
+
0,
|
|
1820
|
+
);
|
|
1821
|
+
|
|
1822
|
+
if (!activeBufferedRange.len) {
|
|
1823
|
+
return desiredDefaultTimeoutValue;
|
|
1824
|
+
}
|
|
1825
|
+
|
|
1826
|
+
desiredDefaultTimeoutValue = Math.max(
|
|
1827
|
+
activeBufferedRange.len * 1000,
|
|
1828
|
+
desiredDefaultTimeoutValue,
|
|
1829
|
+
);
|
|
1830
|
+
|
|
1831
|
+
return Math.max(this.hls.config.appendTimeout, desiredDefaultTimeoutValue);
|
|
1832
|
+
}
|
|
1833
|
+
|
|
1834
|
+
private blockUntilOpen(callback: () => void) {
|
|
1835
|
+
if (this.isUpdating() || this.isQueued()) {
|
|
1836
|
+
this.blockBuffers(callback).catch((error) => {
|
|
1837
|
+
this.warn(`SourceBuffer blocked callback ${error}`);
|
|
1838
|
+
this.stepOperationQueue(this.sourceBufferTypes);
|
|
1839
|
+
});
|
|
1840
|
+
} else {
|
|
1841
|
+
try {
|
|
1842
|
+
callback();
|
|
1843
|
+
} catch (error) {
|
|
1844
|
+
this.warn(
|
|
1845
|
+
`Callback run without blocking ${this.operationQueue} ${error}`,
|
|
1846
|
+
);
|
|
1847
|
+
}
|
|
1848
|
+
}
|
|
1849
|
+
}
|
|
1850
|
+
|
|
1851
|
+
private isUpdating(): boolean {
|
|
1852
|
+
return this.sourceBuffers.some(([type, sb]) => type && sb.updating);
|
|
1853
|
+
}
|
|
1854
|
+
|
|
1855
|
+
private isQueued(): boolean {
|
|
1856
|
+
return this.sourceBuffers.some(([type]) => type && !!this.currentOp(type));
|
|
1857
|
+
}
|
|
1858
|
+
|
|
1859
|
+
private isPending(
|
|
1860
|
+
track: SourceBufferTrack | undefined,
|
|
1861
|
+
): track is SourceBufferTrack {
|
|
1862
|
+
return !!track && !track.buffer;
|
|
1863
|
+
}
|
|
1864
|
+
|
|
1865
|
+
// Enqueues an operation to each SourceBuffer queue which, upon execution, resolves a promise. When all promises
|
|
1866
|
+
// resolve, the onUnblocked function is executed. Functions calling this method do not need to unblock the queue
|
|
1867
|
+
// upon completion, since we already do it here
|
|
1868
|
+
private blockBuffers(
|
|
1869
|
+
onUnblocked: () => void,
|
|
1870
|
+
bufferNames: SourceBufferName[] = this.sourceBufferTypes,
|
|
1871
|
+
): Promise<void> {
|
|
1872
|
+
if (!bufferNames.length) {
|
|
1873
|
+
this.log('Blocking operation requested, but no SourceBuffers exist');
|
|
1874
|
+
return Promise.resolve().then(onUnblocked);
|
|
1875
|
+
}
|
|
1876
|
+
const { operationQueue } = this;
|
|
1877
|
+
|
|
1878
|
+
// logger.debug(`[buffer-controller]: Blocking ${buffers} SourceBuffer`);
|
|
1879
|
+
const blockingOperations = bufferNames.map((type) =>
|
|
1880
|
+
this.appendBlocker(type),
|
|
1881
|
+
);
|
|
1882
|
+
const audioBlocked = bufferNames.length > 1 && !!this.blockedAudioAppend;
|
|
1883
|
+
if (audioBlocked) {
|
|
1884
|
+
this.unblockAudio();
|
|
1885
|
+
}
|
|
1886
|
+
return Promise.all(blockingOperations).then((result) => {
|
|
1887
|
+
if (operationQueue !== this.operationQueue) {
|
|
1888
|
+
return;
|
|
1889
|
+
}
|
|
1890
|
+
// logger.debug(`[buffer-controller]: Blocking operation resolved; unblocking ${buffers} SourceBuffer`);
|
|
1891
|
+
onUnblocked();
|
|
1892
|
+
this.stepOperationQueue(this.sourceBufferTypes);
|
|
1893
|
+
});
|
|
1894
|
+
}
|
|
1895
|
+
|
|
1896
|
+
private stepOperationQueue(bufferNames: SourceBufferName[]) {
|
|
1897
|
+
bufferNames.forEach((type) => {
|
|
1898
|
+
const sb = this.tracks[type]?.buffer;
|
|
1899
|
+
// Only cycle the queue if the SB is not updating. There's a bug in Chrome which sets the SB updating flag to
|
|
1900
|
+
// true when changing the MediaSource duration (https://bugs.chromium.org/p/chromium/issues/detail?id=959359&can=2&q=mediasource%20duration)
|
|
1901
|
+
// While this is a workaround, it's probably useful to have around
|
|
1902
|
+
if (!sb || sb.updating) {
|
|
1903
|
+
return;
|
|
1904
|
+
}
|
|
1905
|
+
this.shiftAndExecuteNext(type);
|
|
1906
|
+
});
|
|
1907
|
+
}
|
|
1908
|
+
|
|
1909
|
+
private append(
|
|
1910
|
+
operation: BufferOperation,
|
|
1911
|
+
type: SourceBufferName,
|
|
1912
|
+
pending?: boolean,
|
|
1913
|
+
) {
|
|
1914
|
+
if (this.operationQueue) {
|
|
1915
|
+
this.operationQueue.append(operation, type, pending);
|
|
1916
|
+
}
|
|
1917
|
+
}
|
|
1918
|
+
|
|
1919
|
+
private appendBlocker(type: SourceBufferName): Promise<void> | undefined {
|
|
1920
|
+
if (this.operationQueue) {
|
|
1921
|
+
return this.operationQueue.appendBlocker(type);
|
|
1922
|
+
}
|
|
1923
|
+
}
|
|
1924
|
+
|
|
1925
|
+
private currentOp(type: SourceBufferName): BufferOperation | null {
|
|
1926
|
+
if (this.operationQueue) {
|
|
1927
|
+
return this.operationQueue.current(type);
|
|
1928
|
+
}
|
|
1929
|
+
return null;
|
|
1930
|
+
}
|
|
1931
|
+
|
|
1932
|
+
private executeNext(type: SourceBufferName | null) {
|
|
1933
|
+
if (type && this.operationQueue) {
|
|
1934
|
+
this.operationQueue.executeNext(type);
|
|
1935
|
+
}
|
|
1936
|
+
}
|
|
1937
|
+
|
|
1938
|
+
private shiftAndExecuteNext(type: SourceBufferName) {
|
|
1939
|
+
if (this.operationQueue) {
|
|
1940
|
+
this.operationQueue.shiftAndExecuteNext(type);
|
|
1941
|
+
}
|
|
1942
|
+
}
|
|
1943
|
+
|
|
1944
|
+
private get pendingTrackCount(): number {
|
|
1945
|
+
return Object.keys(this.tracks).reduce(
|
|
1946
|
+
(acc, type) =>
|
|
1947
|
+
acc + (this.isPending(this.tracks[type as SourceBufferName]) ? 1 : 0),
|
|
1948
|
+
0,
|
|
1949
|
+
);
|
|
1950
|
+
}
|
|
1951
|
+
|
|
1952
|
+
private get sourceBufferCount(): number {
|
|
1953
|
+
return this.sourceBuffers.reduce((acc, [type]) => acc + (type ? 1 : 0), 0);
|
|
1954
|
+
}
|
|
1955
|
+
|
|
1956
|
+
private get sourceBufferTypes(): SourceBufferName[] {
|
|
1957
|
+
return this.sourceBuffers
|
|
1958
|
+
.map(([type]) => type)
|
|
1959
|
+
.filter((type) => !!type) as SourceBufferName[];
|
|
1960
|
+
}
|
|
1961
|
+
|
|
1962
|
+
private addBufferListener(
|
|
1963
|
+
type: SourceBufferName,
|
|
1964
|
+
event: string,
|
|
1965
|
+
fn: <K extends keyof SourceBufferEventMap>(
|
|
1966
|
+
type: SourceBufferName,
|
|
1967
|
+
event: SourceBufferEventMap[K],
|
|
1968
|
+
) => any,
|
|
1969
|
+
) {
|
|
1970
|
+
const track = this.tracks[type];
|
|
1971
|
+
if (!track) {
|
|
1972
|
+
return;
|
|
1973
|
+
}
|
|
1974
|
+
const buffer = track.buffer;
|
|
1975
|
+
if (!buffer) {
|
|
1976
|
+
return;
|
|
1977
|
+
}
|
|
1978
|
+
const listener = fn.bind(this, type);
|
|
1979
|
+
track.listeners.push({ event, listener });
|
|
1980
|
+
addEventListener(buffer, event, listener);
|
|
1981
|
+
}
|
|
1982
|
+
|
|
1983
|
+
private removeBufferListeners(type: SourceBufferName) {
|
|
1984
|
+
const track = this.tracks[type];
|
|
1985
|
+
if (!track) {
|
|
1986
|
+
return;
|
|
1987
|
+
}
|
|
1988
|
+
const buffer = track.buffer;
|
|
1989
|
+
if (!buffer) {
|
|
1990
|
+
return;
|
|
1991
|
+
}
|
|
1992
|
+
track.listeners.forEach((l) => {
|
|
1993
|
+
removeEventListener(buffer, l.event, l.listener);
|
|
1994
|
+
});
|
|
1995
|
+
track.listeners.length = 0;
|
|
1996
|
+
}
|
|
1997
|
+
}
|
|
1998
|
+
|
|
1999
|
+
function removeSourceChildren(node: HTMLElement) {
|
|
2000
|
+
const sourceChildren = node.querySelectorAll('source');
|
|
2001
|
+
[].slice.call(sourceChildren).forEach((source) => {
|
|
2002
|
+
node.removeChild(source);
|
|
2003
|
+
});
|
|
2004
|
+
}
|
|
2005
|
+
|
|
2006
|
+
function addSource(media: HTMLMediaElement, url: string) {
|
|
2007
|
+
const source = self.document.createElement('source');
|
|
2008
|
+
source.type = 'video/mp4';
|
|
2009
|
+
source.src = url;
|
|
2010
|
+
media.appendChild(source);
|
|
2011
|
+
}
|
|
2012
|
+
|
|
2013
|
+
function sourceBufferNameToIndex(type: SourceBufferName) {
|
|
2014
|
+
return type === 'audio' ? 1 : 0;
|
|
2015
|
+
}
|