@livepeer-frameworks/player-core 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.js +19493 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/esm/index.js +19398 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/player.css +2140 -0
- package/dist/types/core/ABRController.d.ts +164 -0
- package/dist/types/core/CodecUtils.d.ts +54 -0
- package/dist/types/core/Disposable.d.ts +61 -0
- package/dist/types/core/EventEmitter.d.ts +73 -0
- package/dist/types/core/GatewayClient.d.ts +144 -0
- package/dist/types/core/InteractionController.d.ts +121 -0
- package/dist/types/core/LiveDurationProxy.d.ts +102 -0
- package/dist/types/core/MetaTrackManager.d.ts +220 -0
- package/dist/types/core/MistReporter.d.ts +163 -0
- package/dist/types/core/MistSignaling.d.ts +148 -0
- package/dist/types/core/PlayerController.d.ts +665 -0
- package/dist/types/core/PlayerInterface.d.ts +230 -0
- package/dist/types/core/PlayerManager.d.ts +182 -0
- package/dist/types/core/PlayerRegistry.d.ts +27 -0
- package/dist/types/core/QualityMonitor.d.ts +184 -0
- package/dist/types/core/ScreenWakeLockManager.d.ts +70 -0
- package/dist/types/core/SeekingUtils.d.ts +142 -0
- package/dist/types/core/StreamStateClient.d.ts +108 -0
- package/dist/types/core/SubtitleManager.d.ts +111 -0
- package/dist/types/core/TelemetryReporter.d.ts +79 -0
- package/dist/types/core/TimeFormat.d.ts +97 -0
- package/dist/types/core/TimerManager.d.ts +83 -0
- package/dist/types/core/UrlUtils.d.ts +81 -0
- package/dist/types/core/detector.d.ts +149 -0
- package/dist/types/core/index.d.ts +49 -0
- package/dist/types/core/scorer.d.ts +167 -0
- package/dist/types/core/selector.d.ts +9 -0
- package/dist/types/index.d.ts +45 -0
- package/dist/types/lib/utils.d.ts +2 -0
- package/dist/types/players/DashJsPlayer.d.ts +102 -0
- package/dist/types/players/HlsJsPlayer.d.ts +70 -0
- package/dist/types/players/MewsWsPlayer/SourceBufferManager.d.ts +119 -0
- package/dist/types/players/MewsWsPlayer/WebSocketManager.d.ts +60 -0
- package/dist/types/players/MewsWsPlayer/index.d.ts +220 -0
- package/dist/types/players/MewsWsPlayer/types.d.ts +89 -0
- package/dist/types/players/MistPlayer.d.ts +25 -0
- package/dist/types/players/MistWebRTCPlayer/index.d.ts +133 -0
- package/dist/types/players/NativePlayer.d.ts +143 -0
- package/dist/types/players/VideoJsPlayer.d.ts +59 -0
- package/dist/types/players/WebCodecsPlayer/JitterBuffer.d.ts +118 -0
- package/dist/types/players/WebCodecsPlayer/LatencyProfiles.d.ts +64 -0
- package/dist/types/players/WebCodecsPlayer/RawChunkParser.d.ts +63 -0
- package/dist/types/players/WebCodecsPlayer/SyncController.d.ts +174 -0
- package/dist/types/players/WebCodecsPlayer/WebSocketController.d.ts +164 -0
- package/dist/types/players/WebCodecsPlayer/index.d.ts +149 -0
- package/dist/types/players/WebCodecsPlayer/polyfills/MediaStreamTrackGenerator.d.ts +105 -0
- package/dist/types/players/WebCodecsPlayer/types.d.ts +395 -0
- package/dist/types/players/WebCodecsPlayer/worker/decoder.worker.d.ts +13 -0
- package/dist/types/players/WebCodecsPlayer/worker/types.d.ts +197 -0
- package/dist/types/players/index.d.ts +14 -0
- package/dist/types/styles/index.d.ts +11 -0
- package/dist/types/types.d.ts +363 -0
- package/dist/types/vanilla/FrameWorksPlayer.d.ts +143 -0
- package/dist/types/vanilla/index.d.ts +19 -0
- package/dist/workers/decoder.worker.js +989 -0
- package/dist/workers/decoder.worker.js.map +1 -0
- package/package.json +80 -0
- package/src/core/ABRController.ts +550 -0
- package/src/core/CodecUtils.ts +257 -0
- package/src/core/Disposable.ts +120 -0
- package/src/core/EventEmitter.ts +113 -0
- package/src/core/GatewayClient.ts +439 -0
- package/src/core/InteractionController.ts +712 -0
- package/src/core/LiveDurationProxy.ts +270 -0
- package/src/core/MetaTrackManager.ts +753 -0
- package/src/core/MistReporter.ts +543 -0
- package/src/core/MistSignaling.ts +346 -0
- package/src/core/PlayerController.ts +2829 -0
- package/src/core/PlayerInterface.ts +432 -0
- package/src/core/PlayerManager.ts +900 -0
- package/src/core/PlayerRegistry.ts +149 -0
- package/src/core/QualityMonitor.ts +597 -0
- package/src/core/ScreenWakeLockManager.ts +163 -0
- package/src/core/SeekingUtils.ts +364 -0
- package/src/core/StreamStateClient.ts +457 -0
- package/src/core/SubtitleManager.ts +297 -0
- package/src/core/TelemetryReporter.ts +308 -0
- package/src/core/TimeFormat.ts +205 -0
- package/src/core/TimerManager.ts +209 -0
- package/src/core/UrlUtils.ts +179 -0
- package/src/core/detector.ts +382 -0
- package/src/core/index.ts +140 -0
- package/src/core/scorer.ts +553 -0
- package/src/core/selector.ts +16 -0
- package/src/global.d.ts +11 -0
- package/src/index.ts +75 -0
- package/src/lib/utils.ts +6 -0
- package/src/players/DashJsPlayer.ts +642 -0
- package/src/players/HlsJsPlayer.ts +483 -0
- package/src/players/MewsWsPlayer/SourceBufferManager.ts +572 -0
- package/src/players/MewsWsPlayer/WebSocketManager.ts +241 -0
- package/src/players/MewsWsPlayer/index.ts +1065 -0
- package/src/players/MewsWsPlayer/types.ts +106 -0
- package/src/players/MistPlayer.ts +188 -0
- package/src/players/MistWebRTCPlayer/index.ts +703 -0
- package/src/players/NativePlayer.ts +820 -0
- package/src/players/VideoJsPlayer.ts +643 -0
- package/src/players/WebCodecsPlayer/JitterBuffer.ts +299 -0
- package/src/players/WebCodecsPlayer/LatencyProfiles.ts +151 -0
- package/src/players/WebCodecsPlayer/RawChunkParser.ts +151 -0
- package/src/players/WebCodecsPlayer/SyncController.ts +456 -0
- package/src/players/WebCodecsPlayer/WebSocketController.ts +564 -0
- package/src/players/WebCodecsPlayer/index.ts +1650 -0
- package/src/players/WebCodecsPlayer/polyfills/MediaStreamTrackGenerator.ts +379 -0
- package/src/players/WebCodecsPlayer/types.ts +542 -0
- package/src/players/WebCodecsPlayer/worker/decoder.worker.ts +1360 -0
- package/src/players/WebCodecsPlayer/worker/types.ts +276 -0
- package/src/players/index.ts +22 -0
- package/src/styles/animations.css +21 -0
- package/src/styles/index.ts +52 -0
- package/src/styles/player.css +2126 -0
- package/src/styles/tailwind.css +1015 -0
- package/src/types.ts +421 -0
- package/src/vanilla/FrameWorksPlayer.ts +367 -0
- package/src/vanilla/index.ts +22 -0
|
@@ -0,0 +1,703 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MistWebRTCPlayerImpl - IPlayer implementation for MistServer native WebRTC
|
|
3
|
+
*
|
|
4
|
+
* Uses MistServer's WebSocket signaling protocol instead of WHEP.
|
|
5
|
+
* Key advantages over WHEP:
|
|
6
|
+
* - Server-side track selection via signaling
|
|
7
|
+
* - Playback speed control (including "auto" for live catch-up)
|
|
8
|
+
* - Seeking via signaling (DVR support)
|
|
9
|
+
* - Real-time buffer_window updates
|
|
10
|
+
* - DataChannel for timed metadata
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import { BasePlayer } from '../../core/PlayerInterface';
|
|
14
|
+
import type { StreamSource, StreamInfo, PlayerOptions, PlayerCapability } from '../../core/PlayerInterface';
|
|
15
|
+
import { MistSignaling, type MistTimeUpdate } from '../../core/MistSignaling';
|
|
16
|
+
import { checkWebRTCCodecCompatibility } from '../../core/detector';
|
|
17
|
+
|
|
18
|
+
export class MistWebRTCPlayerImpl extends BasePlayer {
|
|
19
|
+
readonly capability: PlayerCapability = {
|
|
20
|
+
name: "MistServer WebRTC",
|
|
21
|
+
shortname: "mist-webrtc",
|
|
22
|
+
priority: 2, // After direct (WHEP=1), before HLS.js (3)
|
|
23
|
+
mimes: ["webrtc", "mist/webrtc"]
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
private signaling: MistSignaling | null = null;
|
|
27
|
+
private peerConnection: RTCPeerConnection | null = null;
|
|
28
|
+
private dataChannel: RTCDataChannel | null = null;
|
|
29
|
+
private container: HTMLElement | null = null;
|
|
30
|
+
private destroyed = false;
|
|
31
|
+
|
|
32
|
+
// Time tracking
|
|
33
|
+
private seekOffset = 0;
|
|
34
|
+
private durationMs = 0;
|
|
35
|
+
private isLiveStream = true;
|
|
36
|
+
private playRate: number | 'auto' = 'auto';
|
|
37
|
+
|
|
38
|
+
// Buffer window tracking (P2)
|
|
39
|
+
private bufferWindow = 0;
|
|
40
|
+
|
|
41
|
+
// Track change detection (P1)
|
|
42
|
+
private currentTracks: string[] = [];
|
|
43
|
+
|
|
44
|
+
// Store source/options for loop reconnect (P1)
|
|
45
|
+
private currentSource: StreamSource | null = null;
|
|
46
|
+
private currentOptions: PlayerOptions | null = null;
|
|
47
|
+
|
|
48
|
+
// Stats tracking
|
|
49
|
+
private lastInboundStats: { video?: { bytesReceived: number }; audio?: { bytesReceived: number }; timestamp: number } | null = null;
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Chrome on Android has a bug where H264 is not available immediately
|
|
53
|
+
* after the tab is opened. Retry up to 5 times with 100ms intervals.
|
|
54
|
+
* https://bugs.chromium.org/p/webrtc/issues/detail?id=11620
|
|
55
|
+
*/
|
|
56
|
+
private async checkH264Available(retries = 5): Promise<boolean> {
|
|
57
|
+
for (let i = 0; i < retries; i++) {
|
|
58
|
+
try {
|
|
59
|
+
const caps = RTCRtpReceiver.getCapabilities?.('video');
|
|
60
|
+
if (caps?.codecs.some(c => c.mimeType === 'video/H264')) {
|
|
61
|
+
return true;
|
|
62
|
+
}
|
|
63
|
+
} catch {}
|
|
64
|
+
if (i < retries - 1) {
|
|
65
|
+
await new Promise(r => setTimeout(r, 100));
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
console.warn('[MistWebRTC] H264 not available after retries');
|
|
69
|
+
return false;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Load MistServer's WebRTC browser equalizer script for browser-specific fixes.
|
|
74
|
+
* This is non-fatal if it fails to load.
|
|
75
|
+
*/
|
|
76
|
+
private async loadBrowserEqualizer(host: string): Promise<void> {
|
|
77
|
+
if ((window as any).WebRTCBrowserEqualizerLoaded) return;
|
|
78
|
+
|
|
79
|
+
return new Promise((resolve) => {
|
|
80
|
+
const script = document.createElement('script');
|
|
81
|
+
script.src = `${host}/webrtc.js`;
|
|
82
|
+
script.onload = () => {
|
|
83
|
+
console.debug('[MistWebRTC] Browser equalizer loaded');
|
|
84
|
+
resolve();
|
|
85
|
+
};
|
|
86
|
+
script.onerror = () => {
|
|
87
|
+
console.warn('[MistWebRTC] Failed to load browser equalizer');
|
|
88
|
+
resolve(); // Non-fatal
|
|
89
|
+
};
|
|
90
|
+
document.head.appendChild(script);
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Compare two arrays for equality (order-independent)
|
|
96
|
+
*/
|
|
97
|
+
private arraysEqual(a: string[], b: string[]): boolean {
|
|
98
|
+
if (a.length !== b.length) return false;
|
|
99
|
+
const sortedA = [...a].sort();
|
|
100
|
+
const sortedB = [...b].sort();
|
|
101
|
+
return sortedA.every((v, i) => v === sortedB[i]);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
isMimeSupported(mimetype: string): boolean {
|
|
105
|
+
return this.capability.mimes.includes(mimetype);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
isBrowserSupported(mimetype: string, source: StreamSource, streamInfo: StreamInfo): boolean | string[] {
|
|
109
|
+
// Check basic WebRTC support
|
|
110
|
+
if (!('RTCPeerConnection' in window) || !('WebSocket' in window)) return false;
|
|
111
|
+
|
|
112
|
+
// Check codec compatibility
|
|
113
|
+
const codecCompat = checkWebRTCCodecCompatibility(streamInfo.meta.tracks);
|
|
114
|
+
if (!codecCompat.compatible) {
|
|
115
|
+
console.debug('[MistWebRTC] Skipping - incompatible codecs:', codecCompat.incompatibleCodecs.join(', '));
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Return which track types we can play
|
|
120
|
+
const playable: string[] = [];
|
|
121
|
+
if (codecCompat.details.compatibleVideoCodecs.length > 0) {
|
|
122
|
+
playable.push('video');
|
|
123
|
+
}
|
|
124
|
+
if (codecCompat.details.compatibleAudioCodecs.length > 0) {
|
|
125
|
+
playable.push('audio');
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
return playable.length > 0 ? playable : false;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
async initialize(container: HTMLElement, source: StreamSource, options: PlayerOptions): Promise<HTMLVideoElement> {
|
|
132
|
+
this.destroyed = false;
|
|
133
|
+
this.container = container;
|
|
134
|
+
this.currentSource = source;
|
|
135
|
+
this.currentOptions = options;
|
|
136
|
+
container.classList.add('fw-player-container');
|
|
137
|
+
|
|
138
|
+
// Load browser equalizer script (P0) - extract host from source URL
|
|
139
|
+
try {
|
|
140
|
+
const url = new URL(source.url, window.location.href);
|
|
141
|
+
const host = `${url.protocol}//${url.host}`;
|
|
142
|
+
await this.loadBrowserEqualizer(host);
|
|
143
|
+
} catch {}
|
|
144
|
+
|
|
145
|
+
// Check H264 availability with retry for Chrome Android bug (P0)
|
|
146
|
+
await this.checkH264Available();
|
|
147
|
+
|
|
148
|
+
// Create video element
|
|
149
|
+
const video = document.createElement('video');
|
|
150
|
+
video.classList.add('fw-player-video');
|
|
151
|
+
video.setAttribute('playsinline', '');
|
|
152
|
+
video.setAttribute('crossorigin', 'anonymous');
|
|
153
|
+
|
|
154
|
+
if (options.autoplay) video.autoplay = true;
|
|
155
|
+
if (options.muted) video.muted = true;
|
|
156
|
+
video.controls = options.controls === true; // Explicit false to hide native controls
|
|
157
|
+
if (options.loop) video.loop = true;
|
|
158
|
+
if (options.poster) video.poster = options.poster;
|
|
159
|
+
|
|
160
|
+
this.videoElement = video;
|
|
161
|
+
container.appendChild(video);
|
|
162
|
+
this.setupVideoEventListeners(video, options);
|
|
163
|
+
|
|
164
|
+
try {
|
|
165
|
+
await this.setupWebRTC(video, source, options);
|
|
166
|
+
return video;
|
|
167
|
+
} catch (error: any) {
|
|
168
|
+
this.emit('error', error.message || String(error));
|
|
169
|
+
throw error;
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
async destroy(): Promise<void> {
|
|
174
|
+
this.destroyed = true;
|
|
175
|
+
|
|
176
|
+
// Close signaling
|
|
177
|
+
if (this.signaling) {
|
|
178
|
+
try {
|
|
179
|
+
this.signaling.stop();
|
|
180
|
+
this.signaling.close();
|
|
181
|
+
} catch {}
|
|
182
|
+
this.signaling = null;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Close data channel
|
|
186
|
+
if (this.dataChannel) {
|
|
187
|
+
try { this.dataChannel.close(); } catch {}
|
|
188
|
+
this.dataChannel = null;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Close peer connection
|
|
192
|
+
if (this.peerConnection) {
|
|
193
|
+
try { this.peerConnection.close(); } catch {}
|
|
194
|
+
this.peerConnection = null;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
// Clean up video element
|
|
198
|
+
if (this.videoElement) {
|
|
199
|
+
try { (this.videoElement as any).srcObject = null; } catch {}
|
|
200
|
+
this.videoElement.pause();
|
|
201
|
+
|
|
202
|
+
if (this.container) {
|
|
203
|
+
try { this.container.removeChild(this.videoElement); } catch {}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
this.videoElement = null;
|
|
208
|
+
this.container = null;
|
|
209
|
+
this.listeners.clear();
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
// Override seek to use signaling
|
|
213
|
+
seek(time: number): void {
|
|
214
|
+
if (!this.signaling?.isConnected || !this.videoElement) return;
|
|
215
|
+
|
|
216
|
+
this.videoElement.pause();
|
|
217
|
+
this.seekOffset = time - this.videoElement.currentTime;
|
|
218
|
+
this.signaling.seek(time).catch((e) => {
|
|
219
|
+
console.warn('[MistWebRTC] Seek failed:', e);
|
|
220
|
+
});
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// Override setPlaybackRate to use signaling
|
|
224
|
+
setPlaybackRate(rate: number): void {
|
|
225
|
+
this.signaling?.setSpeed(rate);
|
|
226
|
+
this.playRate = rate;
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// Implement jumpToLive via signaling
|
|
230
|
+
jumpToLive(): void {
|
|
231
|
+
if (!this.signaling?.isConnected || !this.videoElement) return;
|
|
232
|
+
|
|
233
|
+
this.videoElement.pause();
|
|
234
|
+
this.seekOffset = 0;
|
|
235
|
+
this.signaling.seek('live').catch((e) => {
|
|
236
|
+
console.warn('[MistWebRTC] Jump to live failed:', e);
|
|
237
|
+
});
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Override isLive
|
|
241
|
+
isLive(): boolean {
|
|
242
|
+
return this.isLiveStream;
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
// Override getDuration to use signaling data
|
|
246
|
+
getDuration(): number {
|
|
247
|
+
return this.durationMs > 0 ? this.durationMs / 1000 : super.getDuration();
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
// Override getCurrentTime to include seek offset
|
|
251
|
+
getCurrentTime(): number {
|
|
252
|
+
const v = this.videoElement;
|
|
253
|
+
if (!v) return 0;
|
|
254
|
+
return this.seekOffset + v.currentTime;
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
/**
|
|
258
|
+
* Get available quality levels from signaling
|
|
259
|
+
*/
|
|
260
|
+
getQualities(): Array<{ id: string; label: string; isAuto?: boolean; active?: boolean }> {
|
|
261
|
+
// Always offer auto as first option
|
|
262
|
+
const qualities: Array<{ id: string; label: string; isAuto?: boolean; active?: boolean }> = [
|
|
263
|
+
{ id: 'auto', label: 'Auto', isAuto: true, active: this.playRate === 'auto' }
|
|
264
|
+
];
|
|
265
|
+
|
|
266
|
+
// If we have track info from signaling, add quality options
|
|
267
|
+
// MistServer provides track selection via ~widthxheight or |bitrate patterns
|
|
268
|
+
// For now, we expose auto mode - full track enumeration would require
|
|
269
|
+
// parsing the signaling track info which varies by stream
|
|
270
|
+
return qualities;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
// Track selection via signaling
|
|
274
|
+
selectQuality(id: string): void {
|
|
275
|
+
if (!this.signaling?.isConnected) return;
|
|
276
|
+
|
|
277
|
+
if (id === 'auto') {
|
|
278
|
+
this.signaling.setSpeed('auto');
|
|
279
|
+
} else {
|
|
280
|
+
// Track selection: ~widthxheight or |bitrate
|
|
281
|
+
this.signaling.setTracks({ video: id });
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
// Text track selection via signaling
|
|
286
|
+
selectTextTrack(id: string | null): void {
|
|
287
|
+
if (!this.signaling?.isConnected) return;
|
|
288
|
+
|
|
289
|
+
if (id === null) {
|
|
290
|
+
this.signaling.setTracks({ video: 'none' });
|
|
291
|
+
} else {
|
|
292
|
+
this.signaling.setTracks({ video: id });
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
async getStats(): Promise<{
|
|
297
|
+
type: 'webrtc';
|
|
298
|
+
video?: {
|
|
299
|
+
bytesReceived: number;
|
|
300
|
+
packetsReceived: number;
|
|
301
|
+
packetsLost: number;
|
|
302
|
+
packetLossRate: number;
|
|
303
|
+
jitter: number;
|
|
304
|
+
framesDecoded: number;
|
|
305
|
+
framesDropped: number;
|
|
306
|
+
frameDropRate: number;
|
|
307
|
+
frameWidth: number;
|
|
308
|
+
frameHeight: number;
|
|
309
|
+
framesPerSecond: number;
|
|
310
|
+
bitrate: number;
|
|
311
|
+
jitterBufferDelay: number;
|
|
312
|
+
};
|
|
313
|
+
audio?: {
|
|
314
|
+
bytesReceived: number;
|
|
315
|
+
packetsReceived: number;
|
|
316
|
+
packetsLost: number;
|
|
317
|
+
packetLossRate: number;
|
|
318
|
+
jitter: number;
|
|
319
|
+
bitrate: number;
|
|
320
|
+
};
|
|
321
|
+
network?: {
|
|
322
|
+
rtt: number;
|
|
323
|
+
availableOutgoingBitrate: number;
|
|
324
|
+
availableIncomingBitrate: number;
|
|
325
|
+
bytesSent: number;
|
|
326
|
+
bytesReceived: number;
|
|
327
|
+
};
|
|
328
|
+
timestamp: number;
|
|
329
|
+
} | undefined> {
|
|
330
|
+
if (!this.peerConnection) return undefined;
|
|
331
|
+
|
|
332
|
+
try {
|
|
333
|
+
const stats = await this.peerConnection.getStats();
|
|
334
|
+
const now = Date.now();
|
|
335
|
+
const result: any = { type: 'webrtc', timestamp: now };
|
|
336
|
+
|
|
337
|
+
stats.forEach((report: any) => {
|
|
338
|
+
if (report.type === 'inbound-rtp') {
|
|
339
|
+
const packetLossRate = report.packetsReceived > 0
|
|
340
|
+
? (report.packetsLost / (report.packetsReceived + report.packetsLost)) * 100
|
|
341
|
+
: 0;
|
|
342
|
+
|
|
343
|
+
// Calculate bitrate from previous sample
|
|
344
|
+
let bitrate = 0;
|
|
345
|
+
if (this.lastInboundStats && this.lastInboundStats[report.kind as 'video' | 'audio']) {
|
|
346
|
+
const prev = this.lastInboundStats[report.kind as 'video' | 'audio'];
|
|
347
|
+
const timeDelta = (now - this.lastInboundStats.timestamp) / 1000;
|
|
348
|
+
if (timeDelta > 0 && prev) {
|
|
349
|
+
const bytesDelta = report.bytesReceived - prev.bytesReceived;
|
|
350
|
+
bitrate = Math.round((bytesDelta * 8) / timeDelta);
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
if (report.kind === 'video') {
|
|
355
|
+
const frameDropRate = report.framesDecoded > 0
|
|
356
|
+
? (report.framesDropped / (report.framesDecoded + report.framesDropped)) * 100
|
|
357
|
+
: 0;
|
|
358
|
+
|
|
359
|
+
result.video = {
|
|
360
|
+
bytesReceived: report.bytesReceived || 0,
|
|
361
|
+
packetsReceived: report.packetsReceived || 0,
|
|
362
|
+
packetsLost: report.packetsLost || 0,
|
|
363
|
+
packetLossRate,
|
|
364
|
+
jitter: (report.jitter || 0) * 1000,
|
|
365
|
+
framesDecoded: report.framesDecoded || 0,
|
|
366
|
+
framesDropped: report.framesDropped || 0,
|
|
367
|
+
frameDropRate,
|
|
368
|
+
frameWidth: report.frameWidth || 0,
|
|
369
|
+
frameHeight: report.frameHeight || 0,
|
|
370
|
+
framesPerSecond: report.framesPerSecond || 0,
|
|
371
|
+
bitrate,
|
|
372
|
+
jitterBufferDelay: report.jitterBufferDelay && report.jitterBufferEmittedCount
|
|
373
|
+
? (report.jitterBufferDelay / report.jitterBufferEmittedCount) * 1000
|
|
374
|
+
: 0,
|
|
375
|
+
};
|
|
376
|
+
}
|
|
377
|
+
if (report.kind === 'audio') {
|
|
378
|
+
result.audio = {
|
|
379
|
+
bytesReceived: report.bytesReceived || 0,
|
|
380
|
+
packetsReceived: report.packetsReceived || 0,
|
|
381
|
+
packetsLost: report.packetsLost || 0,
|
|
382
|
+
packetLossRate,
|
|
383
|
+
jitter: (report.jitter || 0) * 1000,
|
|
384
|
+
bitrate,
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
if (report.type === 'candidate-pair' && report.nominated) {
|
|
389
|
+
result.network = {
|
|
390
|
+
rtt: report.currentRoundTripTime ? report.currentRoundTripTime * 1000 : 0,
|
|
391
|
+
availableOutgoingBitrate: report.availableOutgoingBitrate || 0,
|
|
392
|
+
availableIncomingBitrate: report.availableIncomingBitrate || 0,
|
|
393
|
+
bytesSent: report.bytesSent || 0,
|
|
394
|
+
bytesReceived: report.bytesReceived || 0,
|
|
395
|
+
};
|
|
396
|
+
}
|
|
397
|
+
});
|
|
398
|
+
|
|
399
|
+
// Store for next sample's bitrate calculation
|
|
400
|
+
this.lastInboundStats = {
|
|
401
|
+
video: result.video ? { bytesReceived: result.video.bytesReceived } : undefined,
|
|
402
|
+
audio: result.audio ? { bytesReceived: result.audio.bytesReceived } : undefined,
|
|
403
|
+
timestamp: now,
|
|
404
|
+
};
|
|
405
|
+
|
|
406
|
+
return result;
|
|
407
|
+
} catch {
|
|
408
|
+
return undefined;
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
async getLatency(): Promise<{ estimatedMs: number; jitterBufferMs: number; rttMs: number } | undefined> {
|
|
413
|
+
const s = await this.getStats();
|
|
414
|
+
if (!s) return undefined;
|
|
415
|
+
|
|
416
|
+
return {
|
|
417
|
+
estimatedMs: s.video?.jitterBufferDelay || 0,
|
|
418
|
+
jitterBufferMs: s.video?.jitterBufferDelay || 0,
|
|
419
|
+
rttMs: s.network?.rtt || 0,
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
/**
|
|
424
|
+
* Get the current buffer window in milliseconds (P2)
|
|
425
|
+
* This is the difference between buffer end and begin from on_time messages.
|
|
426
|
+
*/
|
|
427
|
+
getBufferWindow(): number {
|
|
428
|
+
return this.bufferWindow;
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
/**
|
|
432
|
+
* Request video track matching the given player size (P2 - ABR_resize)
|
|
433
|
+
* Uses MistServer's ~widthxheight track selection syntax.
|
|
434
|
+
*/
|
|
435
|
+
setQualityForSize(size: { width: number; height: number }): void {
|
|
436
|
+
if (!this.signaling?.isConnected) return;
|
|
437
|
+
this.signaling.setTracks({ video: `~${size.width}x${size.height}` });
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
/**
|
|
441
|
+
* Get the metadata DataChannel for timed metadata (P2)
|
|
442
|
+
* Returns the RTCDataChannel or null if not available.
|
|
443
|
+
*/
|
|
444
|
+
getMetaDataChannel(): RTCDataChannel | null {
|
|
445
|
+
return this.dataChannel;
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
/**
|
|
449
|
+
* Override to add WebRTC-specific event handling:
|
|
450
|
+
* - Loop reconnect for VoD (P1)
|
|
451
|
+
* - Proper autoplay disable (P2)
|
|
452
|
+
*/
|
|
453
|
+
protected setupVideoEventListeners(video: HTMLVideoElement, options: PlayerOptions): void {
|
|
454
|
+
// Call parent implementation first
|
|
455
|
+
super.setupVideoEventListeners(video, options);
|
|
456
|
+
|
|
457
|
+
// Proper autoplay disable handling (P2)
|
|
458
|
+
// WebRTC may auto-start even with autoplay=false
|
|
459
|
+
if (!options.autoplay) {
|
|
460
|
+
const pauseOnFirstPlay = () => {
|
|
461
|
+
video.pause();
|
|
462
|
+
this.signaling?.pause();
|
|
463
|
+
video.removeEventListener('play', pauseOnFirstPlay);
|
|
464
|
+
};
|
|
465
|
+
video.addEventListener('play', pauseOnFirstPlay);
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
// Loop reconnect for VoD content (P1)
|
|
469
|
+
video.addEventListener('ended', async () => {
|
|
470
|
+
if (video.loop && !this.isLiveStream && this.currentSource && this.currentOptions) {
|
|
471
|
+
console.debug('[MistWebRTC] VoD ended with loop enabled, reconnecting...');
|
|
472
|
+
try {
|
|
473
|
+
// Partial cleanup - keep container and video element
|
|
474
|
+
if (this.signaling) {
|
|
475
|
+
try {
|
|
476
|
+
this.signaling.stop();
|
|
477
|
+
this.signaling.close();
|
|
478
|
+
} catch {}
|
|
479
|
+
this.signaling = null;
|
|
480
|
+
}
|
|
481
|
+
if (this.dataChannel) {
|
|
482
|
+
try { this.dataChannel.close(); } catch {}
|
|
483
|
+
this.dataChannel = null;
|
|
484
|
+
}
|
|
485
|
+
if (this.peerConnection) {
|
|
486
|
+
try { this.peerConnection.close(); } catch {}
|
|
487
|
+
this.peerConnection = null;
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
// Reconnect WebRTC
|
|
491
|
+
await this.setupWebRTC(video, this.currentSource, this.currentOptions);
|
|
492
|
+
} catch (e) {
|
|
493
|
+
console.error('[MistWebRTC] Failed to reconnect for loop:', e);
|
|
494
|
+
this.emit('error', 'Failed to reconnect for loop');
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
});
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
// Private methods
|
|
501
|
+
|
|
502
|
+
private async setupWebRTC(video: HTMLVideoElement, source: StreamSource, options: PlayerOptions): Promise<void> {
|
|
503
|
+
const sourceAny = source as any;
|
|
504
|
+
const iceServers: RTCIceServer[] = sourceAny?.iceServers || [];
|
|
505
|
+
|
|
506
|
+
// Create signaling
|
|
507
|
+
this.signaling = new MistSignaling({
|
|
508
|
+
url: source.url,
|
|
509
|
+
timeout: 5000,
|
|
510
|
+
onLog: (msg) => console.debug(`[MistWebRTC] ${msg}`),
|
|
511
|
+
});
|
|
512
|
+
|
|
513
|
+
// Create peer connection
|
|
514
|
+
const pc = new RTCPeerConnection({ iceServers });
|
|
515
|
+
this.peerConnection = pc;
|
|
516
|
+
|
|
517
|
+
// Create data channel for metadata
|
|
518
|
+
this.dataChannel = pc.createDataChannel('*', { protocol: 'JSON' });
|
|
519
|
+
this.dataChannel.onmessage = (event) => {
|
|
520
|
+
if (this.destroyed) return;
|
|
521
|
+
console.debug('[MistWebRTC] DataChannel message:', event.data);
|
|
522
|
+
// Handle timed metadata here if needed
|
|
523
|
+
};
|
|
524
|
+
|
|
525
|
+
// Handle incoming tracks
|
|
526
|
+
pc.ontrack = (event) => {
|
|
527
|
+
if (this.destroyed) return;
|
|
528
|
+
if (video && event.streams[0]) {
|
|
529
|
+
video.srcObject = event.streams[0];
|
|
530
|
+
}
|
|
531
|
+
};
|
|
532
|
+
|
|
533
|
+
// Connection state changes
|
|
534
|
+
pc.onconnectionstatechange = () => {
|
|
535
|
+
if (this.destroyed) return;
|
|
536
|
+
const state = pc.connectionState;
|
|
537
|
+
console.debug(`[MistWebRTC] Connection state: ${state}`);
|
|
538
|
+
|
|
539
|
+
if (state === 'failed') {
|
|
540
|
+
this.emit('error', 'WebRTC connection failed (firewall?)');
|
|
541
|
+
}
|
|
542
|
+
};
|
|
543
|
+
|
|
544
|
+
// ICE connection state
|
|
545
|
+
pc.oniceconnectionstatechange = () => {
|
|
546
|
+
if (this.destroyed) return;
|
|
547
|
+
const state = pc.iceConnectionState;
|
|
548
|
+
console.debug(`[MistWebRTC] ICE state: ${state}`);
|
|
549
|
+
|
|
550
|
+
if (state === 'failed') {
|
|
551
|
+
this.emit('error', 'ICE connection failed');
|
|
552
|
+
}
|
|
553
|
+
};
|
|
554
|
+
|
|
555
|
+
// Set up signaling event handlers
|
|
556
|
+
this.setupSignalingHandlers(pc, video);
|
|
557
|
+
|
|
558
|
+
// Connect signaling
|
|
559
|
+
this.signaling.connect();
|
|
560
|
+
|
|
561
|
+
// Wait for signaling to connect
|
|
562
|
+
await new Promise<void>((resolve, reject) => {
|
|
563
|
+
const timeout = setTimeout(() => {
|
|
564
|
+
reject(new Error('Signaling connection timeout'));
|
|
565
|
+
}, 10000);
|
|
566
|
+
|
|
567
|
+
this.signaling!.once('connected', () => {
|
|
568
|
+
clearTimeout(timeout);
|
|
569
|
+
resolve();
|
|
570
|
+
});
|
|
571
|
+
|
|
572
|
+
this.signaling!.once('error', ({ message }) => {
|
|
573
|
+
clearTimeout(timeout);
|
|
574
|
+
reject(new Error(message));
|
|
575
|
+
});
|
|
576
|
+
});
|
|
577
|
+
|
|
578
|
+
// Create and send offer
|
|
579
|
+
await this.createAndSendOffer(pc);
|
|
580
|
+
|
|
581
|
+
// Wait for answer
|
|
582
|
+
await new Promise<void>((resolve, reject) => {
|
|
583
|
+
const timeout = setTimeout(() => {
|
|
584
|
+
reject(new Error('SDP answer timeout'));
|
|
585
|
+
}, 10000);
|
|
586
|
+
|
|
587
|
+
this.signaling!.once('answer_sdp', async ({ result, answer_sdp }) => {
|
|
588
|
+
clearTimeout(timeout);
|
|
589
|
+
if (!result) {
|
|
590
|
+
reject(new Error('Failed to get SDP answer'));
|
|
591
|
+
return;
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
try {
|
|
595
|
+
await pc.setRemoteDescription({ type: 'answer', sdp: answer_sdp });
|
|
596
|
+
resolve();
|
|
597
|
+
} catch (err) {
|
|
598
|
+
reject(err);
|
|
599
|
+
}
|
|
600
|
+
});
|
|
601
|
+
});
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
private setupSignalingHandlers(pc: RTCPeerConnection, video: HTMLVideoElement): void {
|
|
605
|
+
if (!this.signaling) return;
|
|
606
|
+
|
|
607
|
+
// Dispatch webrtc_connected event (P2)
|
|
608
|
+
this.signaling.on('connected', () => {
|
|
609
|
+
if (this.destroyed) return;
|
|
610
|
+
video.dispatchEvent(new Event('webrtc_connected'));
|
|
611
|
+
});
|
|
612
|
+
|
|
613
|
+
this.signaling.on('time_update', (update: MistTimeUpdate) => {
|
|
614
|
+
if (this.destroyed) return;
|
|
615
|
+
this.handleTimeUpdate(update, video);
|
|
616
|
+
});
|
|
617
|
+
|
|
618
|
+
this.signaling.on('seeked', ({ live_point }) => {
|
|
619
|
+
if (this.destroyed) return;
|
|
620
|
+
// Dispatch seeked event
|
|
621
|
+
video.dispatchEvent(new CustomEvent('seeked', { detail: { seekOffset: this.seekOffset } }));
|
|
622
|
+
// Set playback rate to auto if seeked to live point
|
|
623
|
+
if (live_point && this.signaling) {
|
|
624
|
+
this.signaling.setSpeed('auto');
|
|
625
|
+
}
|
|
626
|
+
video.play().catch(() => {});
|
|
627
|
+
});
|
|
628
|
+
|
|
629
|
+
this.signaling.on('speed_changed', ({ play_rate_curr }) => {
|
|
630
|
+
if (this.destroyed) return;
|
|
631
|
+
this.playRate = play_rate_curr;
|
|
632
|
+
video.dispatchEvent(new CustomEvent('ratechange', { detail: { play_rate_curr } }));
|
|
633
|
+
});
|
|
634
|
+
|
|
635
|
+
this.signaling.on('stopped', () => {
|
|
636
|
+
if (this.destroyed) return;
|
|
637
|
+
this.isLiveStream = false;
|
|
638
|
+
video.pause();
|
|
639
|
+
this.emit('ended', undefined);
|
|
640
|
+
});
|
|
641
|
+
|
|
642
|
+
this.signaling.on('error', ({ message }) => {
|
|
643
|
+
if (this.destroyed) return;
|
|
644
|
+
this.emit('error', message);
|
|
645
|
+
});
|
|
646
|
+
|
|
647
|
+
// Dispatch webrtc_disconnected event (P2)
|
|
648
|
+
this.signaling.on('disconnected', () => {
|
|
649
|
+
if (this.destroyed) return;
|
|
650
|
+
video.dispatchEvent(new Event('webrtc_disconnected'));
|
|
651
|
+
video.pause();
|
|
652
|
+
});
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
private handleTimeUpdate(update: MistTimeUpdate, video: HTMLVideoElement): void {
|
|
656
|
+
// Update seek offset
|
|
657
|
+
this.seekOffset = update.current / 1000 - video.currentTime;
|
|
658
|
+
|
|
659
|
+
// Update duration
|
|
660
|
+
const newDuration = update.end === 0 ? Infinity : update.end;
|
|
661
|
+
this.durationMs = newDuration;
|
|
662
|
+
this.isLiveStream = !isFinite(newDuration) || newDuration === 0;
|
|
663
|
+
|
|
664
|
+
// Track buffer window (P2)
|
|
665
|
+
this.bufferWindow = update.end - update.begin;
|
|
666
|
+
|
|
667
|
+
// Fire track changed events (P1)
|
|
668
|
+
if (update.tracks && !this.arraysEqual(update.tracks, this.currentTracks)) {
|
|
669
|
+
for (const trackId of update.tracks) {
|
|
670
|
+
if (!this.currentTracks.includes(trackId)) {
|
|
671
|
+
video.dispatchEvent(new CustomEvent('playerUpdate_trackChanged', {
|
|
672
|
+
detail: { trackId }
|
|
673
|
+
}));
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
this.currentTracks = [...update.tracks];
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
// Resume playback if not paused on server
|
|
680
|
+
if (!update.paused && video.paused) {
|
|
681
|
+
video.play().catch(() => {});
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
|
|
685
|
+
private async createAndSendOffer(pc: RTCPeerConnection): Promise<void> {
|
|
686
|
+
if (!this.signaling) return;
|
|
687
|
+
|
|
688
|
+
// Add transceivers for receiving
|
|
689
|
+
pc.addTransceiver('video', { direction: 'recvonly' });
|
|
690
|
+
pc.addTransceiver('audio', { direction: 'recvonly' });
|
|
691
|
+
|
|
692
|
+
const offer = await pc.createOffer({
|
|
693
|
+
offerToReceiveAudio: true,
|
|
694
|
+
offerToReceiveVideo: true,
|
|
695
|
+
});
|
|
696
|
+
|
|
697
|
+
await pc.setLocalDescription(offer);
|
|
698
|
+
|
|
699
|
+
if (offer.sdp) {
|
|
700
|
+
this.signaling.sendOfferSDP(offer.sdp);
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
}
|