@livepeer-frameworks/player-core 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.js +19493 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/esm/index.js +19398 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/player.css +2140 -0
- package/dist/types/core/ABRController.d.ts +164 -0
- package/dist/types/core/CodecUtils.d.ts +54 -0
- package/dist/types/core/Disposable.d.ts +61 -0
- package/dist/types/core/EventEmitter.d.ts +73 -0
- package/dist/types/core/GatewayClient.d.ts +144 -0
- package/dist/types/core/InteractionController.d.ts +121 -0
- package/dist/types/core/LiveDurationProxy.d.ts +102 -0
- package/dist/types/core/MetaTrackManager.d.ts +220 -0
- package/dist/types/core/MistReporter.d.ts +163 -0
- package/dist/types/core/MistSignaling.d.ts +148 -0
- package/dist/types/core/PlayerController.d.ts +665 -0
- package/dist/types/core/PlayerInterface.d.ts +230 -0
- package/dist/types/core/PlayerManager.d.ts +182 -0
- package/dist/types/core/PlayerRegistry.d.ts +27 -0
- package/dist/types/core/QualityMonitor.d.ts +184 -0
- package/dist/types/core/ScreenWakeLockManager.d.ts +70 -0
- package/dist/types/core/SeekingUtils.d.ts +142 -0
- package/dist/types/core/StreamStateClient.d.ts +108 -0
- package/dist/types/core/SubtitleManager.d.ts +111 -0
- package/dist/types/core/TelemetryReporter.d.ts +79 -0
- package/dist/types/core/TimeFormat.d.ts +97 -0
- package/dist/types/core/TimerManager.d.ts +83 -0
- package/dist/types/core/UrlUtils.d.ts +81 -0
- package/dist/types/core/detector.d.ts +149 -0
- package/dist/types/core/index.d.ts +49 -0
- package/dist/types/core/scorer.d.ts +167 -0
- package/dist/types/core/selector.d.ts +9 -0
- package/dist/types/index.d.ts +45 -0
- package/dist/types/lib/utils.d.ts +2 -0
- package/dist/types/players/DashJsPlayer.d.ts +102 -0
- package/dist/types/players/HlsJsPlayer.d.ts +70 -0
- package/dist/types/players/MewsWsPlayer/SourceBufferManager.d.ts +119 -0
- package/dist/types/players/MewsWsPlayer/WebSocketManager.d.ts +60 -0
- package/dist/types/players/MewsWsPlayer/index.d.ts +220 -0
- package/dist/types/players/MewsWsPlayer/types.d.ts +89 -0
- package/dist/types/players/MistPlayer.d.ts +25 -0
- package/dist/types/players/MistWebRTCPlayer/index.d.ts +133 -0
- package/dist/types/players/NativePlayer.d.ts +143 -0
- package/dist/types/players/VideoJsPlayer.d.ts +59 -0
- package/dist/types/players/WebCodecsPlayer/JitterBuffer.d.ts +118 -0
- package/dist/types/players/WebCodecsPlayer/LatencyProfiles.d.ts +64 -0
- package/dist/types/players/WebCodecsPlayer/RawChunkParser.d.ts +63 -0
- package/dist/types/players/WebCodecsPlayer/SyncController.d.ts +174 -0
- package/dist/types/players/WebCodecsPlayer/WebSocketController.d.ts +164 -0
- package/dist/types/players/WebCodecsPlayer/index.d.ts +149 -0
- package/dist/types/players/WebCodecsPlayer/polyfills/MediaStreamTrackGenerator.d.ts +105 -0
- package/dist/types/players/WebCodecsPlayer/types.d.ts +395 -0
- package/dist/types/players/WebCodecsPlayer/worker/decoder.worker.d.ts +13 -0
- package/dist/types/players/WebCodecsPlayer/worker/types.d.ts +197 -0
- package/dist/types/players/index.d.ts +14 -0
- package/dist/types/styles/index.d.ts +11 -0
- package/dist/types/types.d.ts +363 -0
- package/dist/types/vanilla/FrameWorksPlayer.d.ts +143 -0
- package/dist/types/vanilla/index.d.ts +19 -0
- package/dist/workers/decoder.worker.js +989 -0
- package/dist/workers/decoder.worker.js.map +1 -0
- package/package.json +80 -0
- package/src/core/ABRController.ts +550 -0
- package/src/core/CodecUtils.ts +257 -0
- package/src/core/Disposable.ts +120 -0
- package/src/core/EventEmitter.ts +113 -0
- package/src/core/GatewayClient.ts +439 -0
- package/src/core/InteractionController.ts +712 -0
- package/src/core/LiveDurationProxy.ts +270 -0
- package/src/core/MetaTrackManager.ts +753 -0
- package/src/core/MistReporter.ts +543 -0
- package/src/core/MistSignaling.ts +346 -0
- package/src/core/PlayerController.ts +2829 -0
- package/src/core/PlayerInterface.ts +432 -0
- package/src/core/PlayerManager.ts +900 -0
- package/src/core/PlayerRegistry.ts +149 -0
- package/src/core/QualityMonitor.ts +597 -0
- package/src/core/ScreenWakeLockManager.ts +163 -0
- package/src/core/SeekingUtils.ts +364 -0
- package/src/core/StreamStateClient.ts +457 -0
- package/src/core/SubtitleManager.ts +297 -0
- package/src/core/TelemetryReporter.ts +308 -0
- package/src/core/TimeFormat.ts +205 -0
- package/src/core/TimerManager.ts +209 -0
- package/src/core/UrlUtils.ts +179 -0
- package/src/core/detector.ts +382 -0
- package/src/core/index.ts +140 -0
- package/src/core/scorer.ts +553 -0
- package/src/core/selector.ts +16 -0
- package/src/global.d.ts +11 -0
- package/src/index.ts +75 -0
- package/src/lib/utils.ts +6 -0
- package/src/players/DashJsPlayer.ts +642 -0
- package/src/players/HlsJsPlayer.ts +483 -0
- package/src/players/MewsWsPlayer/SourceBufferManager.ts +572 -0
- package/src/players/MewsWsPlayer/WebSocketManager.ts +241 -0
- package/src/players/MewsWsPlayer/index.ts +1065 -0
- package/src/players/MewsWsPlayer/types.ts +106 -0
- package/src/players/MistPlayer.ts +188 -0
- package/src/players/MistWebRTCPlayer/index.ts +703 -0
- package/src/players/NativePlayer.ts +820 -0
- package/src/players/VideoJsPlayer.ts +643 -0
- package/src/players/WebCodecsPlayer/JitterBuffer.ts +299 -0
- package/src/players/WebCodecsPlayer/LatencyProfiles.ts +151 -0
- package/src/players/WebCodecsPlayer/RawChunkParser.ts +151 -0
- package/src/players/WebCodecsPlayer/SyncController.ts +456 -0
- package/src/players/WebCodecsPlayer/WebSocketController.ts +564 -0
- package/src/players/WebCodecsPlayer/index.ts +1650 -0
- package/src/players/WebCodecsPlayer/polyfills/MediaStreamTrackGenerator.ts +379 -0
- package/src/players/WebCodecsPlayer/types.ts +542 -0
- package/src/players/WebCodecsPlayer/worker/decoder.worker.ts +1360 -0
- package/src/players/WebCodecsPlayer/worker/types.ts +276 -0
- package/src/players/index.ts +22 -0
- package/src/styles/animations.css +21 -0
- package/src/styles/index.ts +52 -0
- package/src/styles/player.css +2126 -0
- package/src/styles/tailwind.css +1015 -0
- package/src/types.ts +421 -0
- package/src/vanilla/FrameWorksPlayer.ts +367 -0
- package/src/vanilla/index.ts +22 -0
|
@@ -0,0 +1,1650 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* WebCodecs Player Implementation
|
|
3
|
+
*
|
|
4
|
+
* Low-latency WebSocket streaming using WebCodecs API for video/audio decoding.
|
|
5
|
+
* Decoding runs in a Web Worker for optimal performance.
|
|
6
|
+
*
|
|
7
|
+
* Features:
|
|
8
|
+
* - Ultra-low latency streaming (configurable via profiles)
|
|
9
|
+
* - Worker-based VideoDecoder/AudioDecoder
|
|
10
|
+
* - Adaptive playback speed for live catchup/slowdown
|
|
11
|
+
* - Jitter compensation
|
|
12
|
+
* - Firefox polyfill for MediaStreamTrackGenerator
|
|
13
|
+
*
|
|
14
|
+
* Protocol: MistServer raw WebSocket frames (12-byte header + data)
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
import { BasePlayer } from '../../core/PlayerInterface';
|
|
18
|
+
import type {
|
|
19
|
+
StreamSource,
|
|
20
|
+
StreamInfo,
|
|
21
|
+
PlayerOptions,
|
|
22
|
+
PlayerCapability,
|
|
23
|
+
} from '../../core/PlayerInterface';
|
|
24
|
+
import type {
|
|
25
|
+
TrackInfo,
|
|
26
|
+
CodecDataMessage,
|
|
27
|
+
InfoMessage,
|
|
28
|
+
OnTimeMessage,
|
|
29
|
+
RawChunk,
|
|
30
|
+
LatencyProfileName,
|
|
31
|
+
WebCodecsPlayerOptions,
|
|
32
|
+
WebCodecsStats,
|
|
33
|
+
MainToWorkerMessage,
|
|
34
|
+
WorkerToMainMessage,
|
|
35
|
+
} from './types';
|
|
36
|
+
import { WebSocketController } from './WebSocketController';
|
|
37
|
+
import { SyncController } from './SyncController';
|
|
38
|
+
import { getPresentationTimestamp, isInitData } from './RawChunkParser';
|
|
39
|
+
import { getLatencyProfile, mergeLatencyProfile, selectDefaultProfile } from './LatencyProfiles';
|
|
40
|
+
import { createTrackGenerator, hasNativeMediaStreamTrackGenerator } from './polyfills/MediaStreamTrackGenerator';
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Detect if running on Safari (which has VideoTrackGenerator in worker but not MediaStreamTrackGenerator on main thread)
|
|
44
|
+
*/
|
|
45
|
+
function isSafari(): boolean {
|
|
46
|
+
if (typeof navigator === 'undefined') return false;
|
|
47
|
+
const ua = navigator.userAgent;
|
|
48
|
+
return /^((?!chrome|android).)*safari/i.test(ua);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Import inline worker (bundled via rollup-plugin-web-worker-loader)
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Convert string (ASCII with escaped chars) to Uint8Array
|
|
55
|
+
* Reference: rawws.js:76-84 - init data is raw ASCII from stream info JSON
|
|
56
|
+
*/
|
|
57
|
+
function str2bin(str: string): Uint8Array {
|
|
58
|
+
const out = new Uint8Array(str.length);
|
|
59
|
+
for (let i = 0; i < str.length; i++) {
|
|
60
|
+
out[i] = str.charCodeAt(i);
|
|
61
|
+
}
|
|
62
|
+
return out;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Create a TimeRanges-like object from an array of [start, end] pairs
|
|
67
|
+
*/
|
|
68
|
+
function createTimeRanges(ranges: [number, number][]): TimeRanges {
|
|
69
|
+
return {
|
|
70
|
+
length: ranges.length,
|
|
71
|
+
start(index: number): number {
|
|
72
|
+
if (index < 0 || index >= ranges.length) throw new DOMException('Index out of bounds');
|
|
73
|
+
return ranges[index][0];
|
|
74
|
+
},
|
|
75
|
+
end(index: number): number {
|
|
76
|
+
if (index < 0 || index >= ranges.length) throw new DOMException('Index out of bounds');
|
|
77
|
+
return ranges[index][1];
|
|
78
|
+
},
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Type for requestVideoFrameCallback metadata
|
|
84
|
+
*/
|
|
85
|
+
interface VideoFrameCallbackMetadata {
|
|
86
|
+
presentationTime: DOMHighResTimeStamp;
|
|
87
|
+
expectedDisplayTime: DOMHighResTimeStamp;
|
|
88
|
+
width: number;
|
|
89
|
+
height: number;
|
|
90
|
+
mediaTime: number;
|
|
91
|
+
presentedFrames: number;
|
|
92
|
+
processingDuration?: number;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Pipeline state for tracking per-track resources
|
|
97
|
+
*/
|
|
98
|
+
interface PipelineInfo {
|
|
99
|
+
idx: number;
|
|
100
|
+
track: TrackInfo;
|
|
101
|
+
generator: ReturnType<typeof createTrackGenerator> | null;
|
|
102
|
+
configured: boolean;
|
|
103
|
+
/** Safari audio: writer for audio frames relayed from worker */
|
|
104
|
+
safariAudioWriter?: WritableStreamDefaultWriter<AudioData>;
|
|
105
|
+
/** Safari audio: the audio generator created on main thread */
|
|
106
|
+
safariAudioGenerator?: MediaStreamTrack;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
/**
|
|
110
|
+
* WebCodecsPlayerImpl - WebCodecs-based low-latency player
|
|
111
|
+
*/
|
|
112
|
+
export class WebCodecsPlayerImpl extends BasePlayer {
|
|
113
|
+
readonly capability: PlayerCapability = {
|
|
114
|
+
name: 'WebCodecs Player',
|
|
115
|
+
shortname: 'webcodecs',
|
|
116
|
+
priority: 0, // Highest priority - lowest latency option
|
|
117
|
+
// Raw WebSocket (12-byte header + AVCC NAL units) - NOT MP4-muxed
|
|
118
|
+
// MistServer's output_wsraw.cpp provides full codec negotiation (audio + video)
|
|
119
|
+
// NOTE: ws/video/mp4 is MP4-fragmented which needs MEWS player (uses MSE)
|
|
120
|
+
mimes: [
|
|
121
|
+
'ws/video/raw', 'wss/video/raw', // Raw codec frames (audio + video)
|
|
122
|
+
],
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
private wsController: WebSocketController | null = null;
|
|
126
|
+
private syncController: SyncController | null = null;
|
|
127
|
+
private worker: Worker | null = null;
|
|
128
|
+
private mediaStream: MediaStream | null = null;
|
|
129
|
+
private container: HTMLElement | null = null;
|
|
130
|
+
private pipelines = new Map<number, PipelineInfo>();
|
|
131
|
+
private tracks: TrackInfo[] = [];
|
|
132
|
+
private tracksByIndex = new Map<number, TrackInfo>(); // Track metadata indexed by track idx
|
|
133
|
+
private queuedInitData = new Map<number, Uint8Array>(); // Queued INIT data waiting for track info
|
|
134
|
+
private queuedChunks = new Map<number, RawChunk[]>(); // Queued chunks waiting for decoder config
|
|
135
|
+
private isDestroyed = false;
|
|
136
|
+
private debugging = false;
|
|
137
|
+
private verboseDebugging = false;
|
|
138
|
+
private streamType: 'live' | 'vod' = 'live';
|
|
139
|
+
private workerUidCounter = 0;
|
|
140
|
+
private workerListeners = new Map<number, (msg: WorkerToMainMessage) => void>();
|
|
141
|
+
|
|
142
|
+
// Playback state
|
|
143
|
+
private _duration = Infinity;
|
|
144
|
+
private _currentTime = 0;
|
|
145
|
+
private _bufferMs = 0;
|
|
146
|
+
private _avDrift = 0;
|
|
147
|
+
private _frameCallbackId: number | null = null;
|
|
148
|
+
private _statsInterval: ReturnType<typeof setInterval> | null = null;
|
|
149
|
+
private _framesDropped = 0;
|
|
150
|
+
private _framesDecoded = 0;
|
|
151
|
+
private _bytesReceived = 0;
|
|
152
|
+
private _messagesReceived = 0;
|
|
153
|
+
private _isPaused = true;
|
|
154
|
+
private _suppressPlayPauseSync = false;
|
|
155
|
+
private _onVideoPlay?: () => void;
|
|
156
|
+
private _onVideoPause?: () => void;
|
|
157
|
+
private _pendingStepPause = false;
|
|
158
|
+
private _stepPauseTimeout: ReturnType<typeof setTimeout> | null = null;
|
|
159
|
+
|
|
160
|
+
// Codec support cache - keyed by "codec|init_hash"
|
|
161
|
+
private static codecCache = new Map<string, boolean>();
|
|
162
|
+
|
|
163
|
+
/**
|
|
164
|
+
* Get cache key for a track's codec configuration
|
|
165
|
+
*/
|
|
166
|
+
private static getCodecCacheKey(track: { codec: string; codecstring?: string; init?: string }): string {
|
|
167
|
+
const codecStr = track.codecstring ?? track.codec?.toLowerCase() ?? '';
|
|
168
|
+
// Simple hash of init data for cache key (just first/last bytes + length)
|
|
169
|
+
const init = track.init ?? '';
|
|
170
|
+
const initHash = init.length > 0 ? `${init.length}_${init.charCodeAt(0)}_${init.charCodeAt(init.length - 1)}` : '';
|
|
171
|
+
return `${codecStr}|${initHash}`;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Test if a track's codec is supported by WebCodecs
|
|
176
|
+
* Reference: rawws.js:75-137 - isTrackSupported()
|
|
177
|
+
*/
|
|
178
|
+
static async isTrackSupported(track: TrackInfo): Promise<{ supported: boolean; config: any }> {
|
|
179
|
+
const cacheKey = WebCodecsPlayerImpl.getCodecCacheKey(track);
|
|
180
|
+
|
|
181
|
+
// Check cache first
|
|
182
|
+
if (WebCodecsPlayerImpl.codecCache.has(cacheKey)) {
|
|
183
|
+
const cached = WebCodecsPlayerImpl.codecCache.get(cacheKey)!;
|
|
184
|
+
return { supported: cached, config: { codec: track.codecstring ?? track.codec } };
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
// Build codec config
|
|
188
|
+
const codecStr = track.codecstring ?? (track.codec ?? '').toLowerCase();
|
|
189
|
+
const config: any = { codec: codecStr };
|
|
190
|
+
|
|
191
|
+
// Add description (init data) if present
|
|
192
|
+
if (track.init && track.init !== '') {
|
|
193
|
+
config.description = str2bin(track.init);
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
let result: { supported: boolean; config: any };
|
|
197
|
+
|
|
198
|
+
try {
|
|
199
|
+
switch (track.type) {
|
|
200
|
+
case 'video': {
|
|
201
|
+
// Special handling for JPEG - uses ImageDecoder
|
|
202
|
+
if (track.codec === 'JPEG') {
|
|
203
|
+
if (!('ImageDecoder' in window)) {
|
|
204
|
+
result = { supported: false, config: { codec: 'image/jpeg' } };
|
|
205
|
+
} else {
|
|
206
|
+
// @ts-ignore - ImageDecoder may not have types
|
|
207
|
+
const isSupported = await (window as any).ImageDecoder.isTypeSupported('image/jpeg');
|
|
208
|
+
result = { supported: isSupported, config: { codec: 'image/jpeg' } };
|
|
209
|
+
}
|
|
210
|
+
} else {
|
|
211
|
+
// Use VideoDecoder.isConfigSupported()
|
|
212
|
+
result = await VideoDecoder.isConfigSupported(config as VideoDecoderConfig);
|
|
213
|
+
}
|
|
214
|
+
break;
|
|
215
|
+
}
|
|
216
|
+
case 'audio': {
|
|
217
|
+
// Audio requires numberOfChannels and sampleRate
|
|
218
|
+
config.numberOfChannels = track.channels ?? 2;
|
|
219
|
+
config.sampleRate = track.rate ?? 48000;
|
|
220
|
+
result = await AudioDecoder.isConfigSupported(config as AudioDecoderConfig);
|
|
221
|
+
break;
|
|
222
|
+
}
|
|
223
|
+
default:
|
|
224
|
+
result = { supported: false, config };
|
|
225
|
+
}
|
|
226
|
+
} catch (err) {
|
|
227
|
+
console.warn(`[WebCodecs] isConfigSupported failed for ${track.codec}:`, err);
|
|
228
|
+
result = { supported: false, config };
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Cache the result
|
|
232
|
+
WebCodecsPlayerImpl.codecCache.set(cacheKey, result.supported);
|
|
233
|
+
return result;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
/**
|
|
237
|
+
* Validate all tracks and return which are supported
|
|
238
|
+
* Returns array of supported track types ('video', 'audio')
|
|
239
|
+
*/
|
|
240
|
+
static async validateTracks(tracks: TrackInfo[]): Promise<string[]> {
|
|
241
|
+
const supportedTypes: Set<string> = new Set();
|
|
242
|
+
|
|
243
|
+
const validationPromises = tracks
|
|
244
|
+
.filter(t => t.type === 'video' || t.type === 'audio')
|
|
245
|
+
.map(async (track) => {
|
|
246
|
+
const result = await WebCodecsPlayerImpl.isTrackSupported(track);
|
|
247
|
+
if (result.supported) {
|
|
248
|
+
supportedTypes.add(track.type);
|
|
249
|
+
}
|
|
250
|
+
return { track, supported: result.supported };
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
const results = await Promise.all(validationPromises);
|
|
254
|
+
|
|
255
|
+
// Log validation results for debugging
|
|
256
|
+
for (const { track, supported } of results) {
|
|
257
|
+
console.debug(`[WebCodecs] Track ${track.idx} (${track.type} ${track.codec}): ${supported ? 'supported' : 'UNSUPPORTED'}`);
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
return Array.from(supportedTypes);
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
isMimeSupported(mimetype: string): boolean {
|
|
264
|
+
return this.capability.mimes.includes(mimetype);
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
isBrowserSupported(
|
|
268
|
+
mimetype: string,
|
|
269
|
+
source: StreamSource,
|
|
270
|
+
streamInfo: StreamInfo
|
|
271
|
+
): boolean | string[] {
|
|
272
|
+
// Basic requirements
|
|
273
|
+
if (!('WebSocket' in window)) {
|
|
274
|
+
return false;
|
|
275
|
+
}
|
|
276
|
+
if (!('Worker' in window)) {
|
|
277
|
+
return false;
|
|
278
|
+
}
|
|
279
|
+
if (!('VideoDecoder' in window) || !('AudioDecoder' in window)) {
|
|
280
|
+
// WebCodecs not available (requires HTTPS)
|
|
281
|
+
return false;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// Check for HTTP/HTTPS mismatch
|
|
285
|
+
const sourceUrl = new URL(source.url.replace(/^ws/, 'http'), location.href);
|
|
286
|
+
if (location.protocol === 'https:' && sourceUrl.protocol === 'http:') {
|
|
287
|
+
return false;
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
// Check track codec support using cache when available
|
|
291
|
+
// Reference: rawws.js tests codecs via isConfigSupported() before selection
|
|
292
|
+
const playableTracks: Record<string, boolean> = {};
|
|
293
|
+
|
|
294
|
+
for (const track of streamInfo.meta.tracks) {
|
|
295
|
+
if (track.type === 'video' || track.type === 'audio') {
|
|
296
|
+
// Check cache for this track's codec
|
|
297
|
+
const cacheKey = WebCodecsPlayerImpl.getCodecCacheKey(track as any);
|
|
298
|
+
if (WebCodecsPlayerImpl.codecCache.has(cacheKey)) {
|
|
299
|
+
// Use cached result
|
|
300
|
+
if (WebCodecsPlayerImpl.codecCache.get(cacheKey)) {
|
|
301
|
+
playableTracks[track.type] = true;
|
|
302
|
+
}
|
|
303
|
+
} else {
|
|
304
|
+
// Not in cache - assume supported for now, validate in initialize()
|
|
305
|
+
// This is necessary because isBrowserSupported is synchronous
|
|
306
|
+
playableTracks[track.type] = true;
|
|
307
|
+
}
|
|
308
|
+
} else if (track.type === 'meta' && track.codec === 'subtitle') {
|
|
309
|
+
// Subtitles supported via text track
|
|
310
|
+
playableTracks['subtitle'] = true;
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
if (Object.keys(playableTracks).length === 0) {
|
|
315
|
+
return false;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
return Object.keys(playableTracks);
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
async initialize(
|
|
322
|
+
container: HTMLElement,
|
|
323
|
+
source: StreamSource,
|
|
324
|
+
options: PlayerOptions,
|
|
325
|
+
streamInfo?: StreamInfo
|
|
326
|
+
): Promise<HTMLVideoElement> {
|
|
327
|
+
// Clear any leftover state from previous initialization FIRST
|
|
328
|
+
// This fixes race condition where async destroy() clears state after new initialize()
|
|
329
|
+
this.tracksByIndex.clear();
|
|
330
|
+
this.pipelines.clear();
|
|
331
|
+
this.tracks = [];
|
|
332
|
+
this.queuedInitData.clear();
|
|
333
|
+
this.queuedChunks.clear();
|
|
334
|
+
this.isDestroyed = false;
|
|
335
|
+
this._duration = Infinity;
|
|
336
|
+
this._currentTime = 0;
|
|
337
|
+
this._bufferMs = 0;
|
|
338
|
+
this._avDrift = 0;
|
|
339
|
+
this._framesDropped = 0;
|
|
340
|
+
this._framesDecoded = 0;
|
|
341
|
+
this._bytesReceived = 0;
|
|
342
|
+
this._messagesReceived = 0;
|
|
343
|
+
|
|
344
|
+
this.container = container;
|
|
345
|
+
container.classList.add('fw-player-container');
|
|
346
|
+
|
|
347
|
+
// Pre-populate track metadata from streamInfo (fetched via HTTP before WebSocket)
|
|
348
|
+
// This is how the reference player (rawws.js) gets track info - from MistVideo.info.meta.tracks
|
|
349
|
+
if (streamInfo?.meta?.tracks) {
|
|
350
|
+
this.log(`Pre-populating ${streamInfo.meta.tracks.length} tracks from streamInfo`);
|
|
351
|
+
for (const track of streamInfo.meta.tracks) {
|
|
352
|
+
if (track.idx !== undefined) {
|
|
353
|
+
// Convert StreamTrack to TrackInfo (WebCodecs format)
|
|
354
|
+
const trackInfo: TrackInfo = {
|
|
355
|
+
idx: track.idx,
|
|
356
|
+
type: track.type as TrackInfo['type'],
|
|
357
|
+
codec: track.codec,
|
|
358
|
+
codecstring: track.codecstring,
|
|
359
|
+
init: track.init,
|
|
360
|
+
width: track.width,
|
|
361
|
+
height: track.height,
|
|
362
|
+
fpks: track.fpks,
|
|
363
|
+
channels: track.channels,
|
|
364
|
+
rate: track.rate,
|
|
365
|
+
size: track.size,
|
|
366
|
+
};
|
|
367
|
+
this.tracksByIndex.set(track.idx, trackInfo);
|
|
368
|
+
this.log(`Pre-registered track ${track.idx}: ${track.type} ${track.codec}`);
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
// Parse WebCodecs-specific options
|
|
374
|
+
const wcOptions = options as PlayerOptions & WebCodecsPlayerOptions;
|
|
375
|
+
this.debugging = wcOptions.debug ?? wcOptions.devMode ?? false;
|
|
376
|
+
this.verboseDebugging = wcOptions.verboseDebug ?? false;
|
|
377
|
+
|
|
378
|
+
// Determine stream type
|
|
379
|
+
this.streamType = (source as any).type === 'live' ? 'live' : 'vod';
|
|
380
|
+
|
|
381
|
+
// Select latency profile
|
|
382
|
+
const profileName = wcOptions.latencyProfile ?? selectDefaultProfile(this.streamType === 'live');
|
|
383
|
+
const profile = mergeLatencyProfile(profileName, wcOptions.customLatencyProfile);
|
|
384
|
+
|
|
385
|
+
this.log(`Initializing WebCodecs player with ${profile.name} profile`);
|
|
386
|
+
|
|
387
|
+
// Create video element
|
|
388
|
+
const video = document.createElement('video');
|
|
389
|
+
video.classList.add('fw-player-video');
|
|
390
|
+
video.setAttribute('playsinline', '');
|
|
391
|
+
video.setAttribute('crossorigin', 'anonymous');
|
|
392
|
+
|
|
393
|
+
if (options.autoplay) video.autoplay = true;
|
|
394
|
+
if (options.muted) video.muted = true;
|
|
395
|
+
video.controls = options.controls === true;
|
|
396
|
+
if (options.loop && this.streamType !== 'live') video.loop = true;
|
|
397
|
+
if (options.poster) video.poster = options.poster;
|
|
398
|
+
|
|
399
|
+
this.videoElement = video;
|
|
400
|
+
container.appendChild(video);
|
|
401
|
+
|
|
402
|
+
// Keep paused state in sync with actual element state
|
|
403
|
+
this._onVideoPlay = () => {
|
|
404
|
+
if (this._suppressPlayPauseSync) return;
|
|
405
|
+
this._isPaused = false;
|
|
406
|
+
this.sendToWorker({
|
|
407
|
+
type: 'frametiming',
|
|
408
|
+
action: 'setPaused',
|
|
409
|
+
paused: false,
|
|
410
|
+
uid: this.workerUidCounter++,
|
|
411
|
+
}).catch(() => {});
|
|
412
|
+
};
|
|
413
|
+
this._onVideoPause = () => {
|
|
414
|
+
if (this._suppressPlayPauseSync) return;
|
|
415
|
+
this._isPaused = true;
|
|
416
|
+
this.sendToWorker({
|
|
417
|
+
type: 'frametiming',
|
|
418
|
+
action: 'setPaused',
|
|
419
|
+
paused: true,
|
|
420
|
+
uid: this.workerUidCounter++,
|
|
421
|
+
}).catch(() => {});
|
|
422
|
+
};
|
|
423
|
+
video.addEventListener('play', this._onVideoPlay);
|
|
424
|
+
video.addEventListener('pause', this._onVideoPause);
|
|
425
|
+
|
|
426
|
+
// Create MediaStream for output
|
|
427
|
+
this.mediaStream = new MediaStream();
|
|
428
|
+
video.srcObject = this.mediaStream;
|
|
429
|
+
|
|
430
|
+
// Initialize worker
|
|
431
|
+
await this.initializeWorker();
|
|
432
|
+
|
|
433
|
+
// Initialize sync controller
|
|
434
|
+
this.syncController = new SyncController({
|
|
435
|
+
profile,
|
|
436
|
+
isLive: this.streamType === 'live',
|
|
437
|
+
onSpeedChange: (main, tweak) => {
|
|
438
|
+
this.sendToWorker({
|
|
439
|
+
type: 'frametiming',
|
|
440
|
+
action: 'setSpeed',
|
|
441
|
+
speed: main,
|
|
442
|
+
tweak,
|
|
443
|
+
uid: this.workerUidCounter++,
|
|
444
|
+
});
|
|
445
|
+
if (this.videoElement) {
|
|
446
|
+
this.videoElement.playbackRate = main * tweak;
|
|
447
|
+
}
|
|
448
|
+
},
|
|
449
|
+
onFastForwardRequest: (ms) => {
|
|
450
|
+
this.wsController?.fastForward(ms);
|
|
451
|
+
},
|
|
452
|
+
});
|
|
453
|
+
|
|
454
|
+
// Initialize WebSocket - URL should already be .raw from source selection
|
|
455
|
+
this.wsController = new WebSocketController(source.url, {
|
|
456
|
+
debug: this.debugging,
|
|
457
|
+
});
|
|
458
|
+
|
|
459
|
+
this.setupWebSocketHandlers();
|
|
460
|
+
|
|
461
|
+
// Validate track codecs using isConfigSupported() BEFORE connecting
|
|
462
|
+
// Reference: rawws.js:75-137 tests each track's codec support
|
|
463
|
+
// This fixes "codec unsupported" errors by only sending verified codecs
|
|
464
|
+
const supportedAudioCodecs: Set<string> = new Set();
|
|
465
|
+
const supportedVideoCodecs: Set<string> = new Set();
|
|
466
|
+
|
|
467
|
+
if (streamInfo?.meta?.tracks) {
|
|
468
|
+
this.log('Validating track codecs with isConfigSupported()...');
|
|
469
|
+
|
|
470
|
+
for (const track of streamInfo.meta.tracks) {
|
|
471
|
+
if (track.type === 'video' || track.type === 'audio') {
|
|
472
|
+
const trackInfo: TrackInfo = {
|
|
473
|
+
idx: track.idx ?? 0,
|
|
474
|
+
type: track.type as 'video' | 'audio',
|
|
475
|
+
codec: track.codec,
|
|
476
|
+
codecstring: track.codecstring,
|
|
477
|
+
init: track.init,
|
|
478
|
+
width: track.width,
|
|
479
|
+
height: track.height,
|
|
480
|
+
channels: track.channels,
|
|
481
|
+
rate: track.rate,
|
|
482
|
+
};
|
|
483
|
+
|
|
484
|
+
const result = await WebCodecsPlayerImpl.isTrackSupported(trackInfo);
|
|
485
|
+
if (result.supported) {
|
|
486
|
+
if (track.type === 'audio') {
|
|
487
|
+
supportedAudioCodecs.add(track.codec);
|
|
488
|
+
} else {
|
|
489
|
+
supportedVideoCodecs.add(track.codec);
|
|
490
|
+
}
|
|
491
|
+
this.log(`Track ${track.idx} (${track.type} ${track.codec}): SUPPORTED`);
|
|
492
|
+
} else {
|
|
493
|
+
this.log(`Track ${track.idx} (${track.type} ${track.codec}): NOT SUPPORTED`, 'warn');
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
// If no codecs validated, check if we have any tracks at all
|
|
500
|
+
if (supportedAudioCodecs.size === 0 && supportedVideoCodecs.size === 0) {
|
|
501
|
+
// Fallback: Use default codec list if no tracks provided or all failed
|
|
502
|
+
// This handles streams where track info isn't available until WebSocket connects
|
|
503
|
+
this.log('No validated codecs, using default codec list');
|
|
504
|
+
['AAC', 'MP3', 'opus', 'FLAC', 'AC3'].forEach(c => supportedAudioCodecs.add(c));
|
|
505
|
+
['H264', 'HEVC', 'VP8', 'VP9', 'AV1', 'JPEG'].forEach(c => supportedVideoCodecs.add(c));
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
// Connect and request codec data
|
|
509
|
+
// Per MistServer rawws.js line 1544, we need to tell the server what codecs we support
|
|
510
|
+
// Format: [[ [audio codecs], [video codecs] ]] - audio FIRST per Object.values({audio:[], video:[]}) order
|
|
511
|
+
const supportedCombinations: string[][][] = [[
|
|
512
|
+
Array.from(supportedAudioCodecs), // Audio codecs (position 0)
|
|
513
|
+
Array.from(supportedVideoCodecs), // Video codecs (position 1)
|
|
514
|
+
]];
|
|
515
|
+
|
|
516
|
+
this.log(`Requesting codecs: audio=[${supportedCombinations[0][0].join(', ')}], video=[${supportedCombinations[0][1].join(', ')}]`);
|
|
517
|
+
|
|
518
|
+
try {
|
|
519
|
+
await this.wsController.connect();
|
|
520
|
+
this.wsController.requestCodecData(supportedCombinations);
|
|
521
|
+
} catch (err) {
|
|
522
|
+
this.log(`Failed to connect: ${err}`, 'error');
|
|
523
|
+
this.emit('error', err instanceof Error ? err : new Error(String(err)));
|
|
524
|
+
throw err;
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
// Proactively create pipelines for pre-populated tracks
|
|
528
|
+
// This ensures pipelines exist when first chunks arrive, they just need init data
|
|
529
|
+
for (const [idx, track] of this.tracksByIndex) {
|
|
530
|
+
if (track.type === 'video' || track.type === 'audio') {
|
|
531
|
+
this.log(`Creating pipeline proactively for track ${idx} (${track.type} ${track.codec})`);
|
|
532
|
+
await this.createPipeline(track);
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
// Set up video event listeners
|
|
537
|
+
this.setupVideoEventListeners(video, options);
|
|
538
|
+
|
|
539
|
+
// Set up requestVideoFrameCallback for accurate frame timing
|
|
540
|
+
this.setupFrameCallback();
|
|
541
|
+
|
|
542
|
+
this.isDestroyed = false;
|
|
543
|
+
return video;
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
async destroy(): Promise<void> {
|
|
547
|
+
if (this.isDestroyed) return;
|
|
548
|
+
this.isDestroyed = true;
|
|
549
|
+
|
|
550
|
+
this.log('Destroying WebCodecs player');
|
|
551
|
+
|
|
552
|
+
// Cancel frame callback
|
|
553
|
+
this.cancelFrameCallback();
|
|
554
|
+
|
|
555
|
+
// Stop stats interval
|
|
556
|
+
if (this._statsInterval) {
|
|
557
|
+
clearInterval(this._statsInterval);
|
|
558
|
+
this._statsInterval = null;
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
// Stop WebSocket
|
|
562
|
+
this.wsController?.disconnect();
|
|
563
|
+
this.wsController = null;
|
|
564
|
+
|
|
565
|
+
// Close all pipelines
|
|
566
|
+
for (const pipeline of this.pipelines.values()) {
|
|
567
|
+
await this.closePipeline(pipeline.idx, false);
|
|
568
|
+
}
|
|
569
|
+
this.pipelines.clear();
|
|
570
|
+
|
|
571
|
+
// Terminate worker
|
|
572
|
+
this.worker?.terminate();
|
|
573
|
+
this.worker = null;
|
|
574
|
+
this.workerListeners.clear();
|
|
575
|
+
|
|
576
|
+
// Clean up MediaStream
|
|
577
|
+
if (this.mediaStream) {
|
|
578
|
+
for (const track of this.mediaStream.getTracks()) {
|
|
579
|
+
track.stop();
|
|
580
|
+
this.mediaStream.removeTrack(track);
|
|
581
|
+
}
|
|
582
|
+
this.mediaStream = null;
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
// Clean up video element
|
|
586
|
+
if (this.videoElement) {
|
|
587
|
+
if (this._onVideoPlay) {
|
|
588
|
+
this.videoElement.removeEventListener('play', this._onVideoPlay);
|
|
589
|
+
this._onVideoPlay = undefined;
|
|
590
|
+
}
|
|
591
|
+
if (this._onVideoPause) {
|
|
592
|
+
this.videoElement.removeEventListener('pause', this._onVideoPause);
|
|
593
|
+
this._onVideoPause = undefined;
|
|
594
|
+
}
|
|
595
|
+
if (this._stepPauseTimeout) {
|
|
596
|
+
clearTimeout(this._stepPauseTimeout);
|
|
597
|
+
this._stepPauseTimeout = null;
|
|
598
|
+
}
|
|
599
|
+
this._pendingStepPause = false;
|
|
600
|
+
this.videoElement.srcObject = null;
|
|
601
|
+
this.videoElement.remove();
|
|
602
|
+
this.videoElement = null;
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
this.syncController = null;
|
|
606
|
+
// NOTE: Don't clear tracks/tracksByIndex/queues here!
|
|
607
|
+
// Since PlayerManager reuses instances, a concurrent initialize() may have
|
|
608
|
+
// already pre-populated these. Clearing happens at the START of initialize().
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
// ============================================================================
|
|
612
|
+
// Worker Management
|
|
613
|
+
// ============================================================================
|
|
614
|
+
|
|
615
|
+
/**
|
|
616
|
+
* Try to load a worker from a URL with proper async error detection.
|
|
617
|
+
* new Worker() doesn't throw on invalid URLs - it fires error events async.
|
|
618
|
+
*/
|
|
619
|
+
private tryLoadWorker(url: string): Promise<Worker> {
|
|
620
|
+
return new Promise((resolve, reject) => {
|
|
621
|
+
let worker: Worker;
|
|
622
|
+
try {
|
|
623
|
+
worker = new Worker(url, { type: 'module' });
|
|
624
|
+
} catch (e) {
|
|
625
|
+
reject(e);
|
|
626
|
+
return;
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
const cleanup = () => {
|
|
630
|
+
clearTimeout(timeout);
|
|
631
|
+
worker.removeEventListener('error', onError);
|
|
632
|
+
worker.removeEventListener('message', onMessage);
|
|
633
|
+
};
|
|
634
|
+
|
|
635
|
+
const onError = (e: ErrorEvent) => {
|
|
636
|
+
cleanup();
|
|
637
|
+
worker.terminate();
|
|
638
|
+
reject(new Error(e.message || 'Worker failed to load'));
|
|
639
|
+
};
|
|
640
|
+
|
|
641
|
+
const onMessage = () => {
|
|
642
|
+
cleanup();
|
|
643
|
+
resolve(worker);
|
|
644
|
+
};
|
|
645
|
+
|
|
646
|
+
// Timeout: if no error after 500ms, assume loaded (worker may not send immediate message)
|
|
647
|
+
const timeout = setTimeout(() => {
|
|
648
|
+
cleanup();
|
|
649
|
+
resolve(worker);
|
|
650
|
+
}, 500);
|
|
651
|
+
|
|
652
|
+
worker.addEventListener('error', onError);
|
|
653
|
+
worker.addEventListener('message', onMessage);
|
|
654
|
+
});
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
private async initializeWorker(): Promise<void> {
|
|
658
|
+
// Worker paths to try in order:
|
|
659
|
+
// 1. Dev server path (Vite plugin serves /workers/* from source)
|
|
660
|
+
// 2. Production npm package path (relative to built module)
|
|
661
|
+
const paths = [
|
|
662
|
+
'/workers/decoder.worker.js',
|
|
663
|
+
];
|
|
664
|
+
|
|
665
|
+
// Add production path (may fail in dev but that's ok)
|
|
666
|
+
try {
|
|
667
|
+
paths.push(new URL('../workers/decoder.worker.js', import.meta.url).href);
|
|
668
|
+
} catch {
|
|
669
|
+
// import.meta.url may not work in all environments
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
let lastError: Error | null = null;
|
|
673
|
+
for (const path of paths) {
|
|
674
|
+
try {
|
|
675
|
+
this.log(`Trying worker path: ${path}`);
|
|
676
|
+
this.worker = await this.tryLoadWorker(path);
|
|
677
|
+
this.log(`Worker loaded from: ${path}`);
|
|
678
|
+
break;
|
|
679
|
+
} catch (e) {
|
|
680
|
+
lastError = e instanceof Error ? e : new Error(String(e));
|
|
681
|
+
this.log(`Worker path failed: ${path} - ${lastError.message}`, 'warn');
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
|
|
685
|
+
if (!this.worker) {
|
|
686
|
+
throw new Error(
|
|
687
|
+
'Failed to initialize WebCodecs worker. ' +
|
|
688
|
+
`Last error: ${lastError?.message ?? 'unknown'}`
|
|
689
|
+
);
|
|
690
|
+
}
|
|
691
|
+
|
|
692
|
+
// Set up worker event handlers (replace the ones from tryLoadWorker)
|
|
693
|
+
this.worker.onmessage = (event: MessageEvent<WorkerToMainMessage>) => {
|
|
694
|
+
this.handleWorkerMessage(event.data);
|
|
695
|
+
};
|
|
696
|
+
|
|
697
|
+
this.worker.onerror = (err) => {
|
|
698
|
+
this.log(`Worker error: ${err?.message ?? 'unknown error'}`, 'error');
|
|
699
|
+
this.emit('error', new Error(`Worker error: ${err?.message ?? 'unknown'}`));
|
|
700
|
+
};
|
|
701
|
+
|
|
702
|
+
// Configure debugging mode in worker
|
|
703
|
+
this.sendToWorker({
|
|
704
|
+
type: 'debugging',
|
|
705
|
+
value: this.verboseDebugging ? 'verbose' : this.debugging,
|
|
706
|
+
uid: this.workerUidCounter++,
|
|
707
|
+
});
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
private sendToWorker(msg: MainToWorkerMessage & { uid: number }, transfer?: Transferable[]): Promise<WorkerToMainMessage> {
|
|
711
|
+
return new Promise((resolve, reject) => {
|
|
712
|
+
// Reject with proper error if destroyed or no worker
|
|
713
|
+
// This prevents silent failures and allows callers to handle errors appropriately
|
|
714
|
+
if (this.isDestroyed) {
|
|
715
|
+
reject(new Error('Player destroyed'));
|
|
716
|
+
return;
|
|
717
|
+
}
|
|
718
|
+
if (!this.worker) {
|
|
719
|
+
reject(new Error('Worker not initialized'));
|
|
720
|
+
return;
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
const uid = msg.uid;
|
|
724
|
+
|
|
725
|
+
// Register listener for response
|
|
726
|
+
this.workerListeners.set(uid, (response) => {
|
|
727
|
+
this.workerListeners.delete(uid);
|
|
728
|
+
if (response.type === 'ack' && response.status === 'error') {
|
|
729
|
+
reject(new Error(response.error));
|
|
730
|
+
} else {
|
|
731
|
+
resolve(response);
|
|
732
|
+
}
|
|
733
|
+
});
|
|
734
|
+
|
|
735
|
+
if (transfer) {
|
|
736
|
+
this.worker.postMessage(msg, transfer);
|
|
737
|
+
} else {
|
|
738
|
+
this.worker.postMessage(msg);
|
|
739
|
+
}
|
|
740
|
+
});
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
private handleWorkerMessage(msg: WorkerToMainMessage): void {
|
|
744
|
+
// Check for specific listener
|
|
745
|
+
if (msg.uid !== undefined && this.workerListeners.has(msg.uid)) {
|
|
746
|
+
this.workerListeners.get(msg.uid)!(msg);
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
// Handle message by type
|
|
750
|
+
switch (msg.type) {
|
|
751
|
+
case 'addtrack': {
|
|
752
|
+
const pipeline = this.pipelines.get(msg.idx);
|
|
753
|
+
if (pipeline && this.mediaStream) {
|
|
754
|
+
// If track was created in worker (Safari), use it directly
|
|
755
|
+
if (msg.track) {
|
|
756
|
+
this.mediaStream.addTrack(msg.track);
|
|
757
|
+
} else if (pipeline.generator) {
|
|
758
|
+
// Otherwise use generator's track
|
|
759
|
+
this.mediaStream.addTrack(pipeline.generator.getTrack());
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
break;
|
|
763
|
+
}
|
|
764
|
+
|
|
765
|
+
case 'removetrack': {
|
|
766
|
+
const pipeline = this.pipelines.get(msg.idx);
|
|
767
|
+
if (pipeline?.generator && this.mediaStream) {
|
|
768
|
+
const track = pipeline.generator.getTrack();
|
|
769
|
+
this.mediaStream.removeTrack(track);
|
|
770
|
+
}
|
|
771
|
+
break;
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
case 'setplaybackrate': {
|
|
775
|
+
if (this.videoElement) {
|
|
776
|
+
this.videoElement.playbackRate = msg.speed;
|
|
777
|
+
}
|
|
778
|
+
break;
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
case 'sendevent': {
|
|
782
|
+
if (msg.kind === 'timeupdate') {
|
|
783
|
+
if (this._pendingStepPause) {
|
|
784
|
+
this.finishStepPause();
|
|
785
|
+
}
|
|
786
|
+
if (typeof msg.time === 'number' && Number.isFinite(msg.time)) {
|
|
787
|
+
this._currentTime = msg.time;
|
|
788
|
+
this.emit('timeupdate', this._currentTime);
|
|
789
|
+
} else if (this.videoElement) {
|
|
790
|
+
this.emit('timeupdate', this.videoElement.currentTime);
|
|
791
|
+
}
|
|
792
|
+
} else if (msg.kind === 'error') {
|
|
793
|
+
this.emit('error', new Error(msg.message ?? 'Unknown error'));
|
|
794
|
+
}
|
|
795
|
+
break;
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
case 'writeframe': {
|
|
799
|
+
// Safari audio: worker sends frames via postMessage, we write them here
|
|
800
|
+
// Reference: rawws.js line 897-918
|
|
801
|
+
const pipeline = this.pipelines.get(msg.idx);
|
|
802
|
+
if (pipeline?.safariAudioWriter) {
|
|
803
|
+
const frame = msg.frame;
|
|
804
|
+
const frameUid = msg.uid;
|
|
805
|
+
pipeline.safariAudioWriter.write(frame).then(() => {
|
|
806
|
+
this.worker?.postMessage({
|
|
807
|
+
type: 'writeframe',
|
|
808
|
+
idx: msg.idx,
|
|
809
|
+
uid: frameUid,
|
|
810
|
+
status: 'ok',
|
|
811
|
+
});
|
|
812
|
+
}).catch((err: Error) => {
|
|
813
|
+
this.worker?.postMessage({
|
|
814
|
+
type: 'writeframe',
|
|
815
|
+
idx: msg.idx,
|
|
816
|
+
uid: frameUid,
|
|
817
|
+
status: 'error',
|
|
818
|
+
error: err.message,
|
|
819
|
+
});
|
|
820
|
+
});
|
|
821
|
+
} else {
|
|
822
|
+
this.worker?.postMessage({
|
|
823
|
+
type: 'writeframe',
|
|
824
|
+
idx: msg.idx,
|
|
825
|
+
uid: msg.uid,
|
|
826
|
+
status: 'error',
|
|
827
|
+
error: 'Pipeline not active or no audio writer',
|
|
828
|
+
});
|
|
829
|
+
}
|
|
830
|
+
break;
|
|
831
|
+
}
|
|
832
|
+
|
|
833
|
+
case 'log': {
|
|
834
|
+
if (this.debugging) {
|
|
835
|
+
const level = (msg as any).level ?? 'info';
|
|
836
|
+
const logFn = level === 'error' ? console.error : level === 'warn' ? console.warn : console.log;
|
|
837
|
+
logFn(`[WebCodecs Worker] ${msg.msg}`);
|
|
838
|
+
}
|
|
839
|
+
break;
|
|
840
|
+
}
|
|
841
|
+
|
|
842
|
+
case 'stats': {
|
|
843
|
+
// Could emit stats for monitoring
|
|
844
|
+
break;
|
|
845
|
+
}
|
|
846
|
+
|
|
847
|
+
case 'closed': {
|
|
848
|
+
this.pipelines.delete(msg.idx);
|
|
849
|
+
break;
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
}
|
|
853
|
+
|
|
854
|
+
// ============================================================================
|
|
855
|
+
// WebSocket Handlers
|
|
856
|
+
// ============================================================================
|
|
857
|
+
|
|
858
|
+
private setupWebSocketHandlers(): void {
|
|
859
|
+
if (!this.wsController) return;
|
|
860
|
+
|
|
861
|
+
this.wsController.on('codecdata', (msg) => this.handleCodecData(msg));
|
|
862
|
+
this.wsController.on('info', (msg) => this.handleInfo(msg));
|
|
863
|
+
this.wsController.on('ontime', (msg) => this.handleOnTime(msg));
|
|
864
|
+
this.wsController.on('tracks', (tracks) => this.handleTracksChange(tracks));
|
|
865
|
+
this.wsController.on('chunk', (chunk) => this.handleChunk(chunk));
|
|
866
|
+
this.wsController.on('stop', () => this.handleStop());
|
|
867
|
+
this.wsController.on('error', (err) => this.handleError(err));
|
|
868
|
+
this.wsController.on('statechange', (state) => {
|
|
869
|
+
this.log(`Connection state: ${state}`);
|
|
870
|
+
if (state === 'error') {
|
|
871
|
+
this.emit('error', new Error('WebSocket connection failed'));
|
|
872
|
+
}
|
|
873
|
+
});
|
|
874
|
+
}
|
|
875
|
+
|
|
876
|
+
private async handleCodecData(msg: CodecDataMessage): Promise<void> {
|
|
877
|
+
const codecs = msg.codecs ?? [];
|
|
878
|
+
const trackIndices = msg.tracks ?? []; // Array of track indices (numbers), NOT TrackInfo
|
|
879
|
+
this.log(`Received codec data: codecs=[${codecs.join(', ') || 'none'}], tracks=[${trackIndices.join(', ') || 'none'}]`);
|
|
880
|
+
|
|
881
|
+
if (codecs.length === 0 || trackIndices.length === 0) {
|
|
882
|
+
this.log('No playable codecs/tracks selected by server', 'warn');
|
|
883
|
+
// Still start playback - info message may populate tracks later
|
|
884
|
+
this.wsController?.play();
|
|
885
|
+
return;
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
// Store codec strings by track index for later lookup
|
|
889
|
+
// Per rawws.js: codecs[i] corresponds to tracks[i]
|
|
890
|
+
for (let i = 0; i < trackIndices.length; i++) {
|
|
891
|
+
const trackIdx = trackIndices[i];
|
|
892
|
+
const codec = codecs[i];
|
|
893
|
+
if (codec) {
|
|
894
|
+
// If we have track metadata from info message, update it with codec
|
|
895
|
+
const existingTrack = this.tracksByIndex.get(trackIdx);
|
|
896
|
+
if (existingTrack) {
|
|
897
|
+
existingTrack.codec = codec;
|
|
898
|
+
} else {
|
|
899
|
+
// Create minimal track info - will be filled in by info message
|
|
900
|
+
this.tracksByIndex.set(trackIdx, {
|
|
901
|
+
idx: trackIdx,
|
|
902
|
+
type: codec.match(/^(H264|HEVC|VP[89]|AV1|JPEG)/i) ? 'video' :
|
|
903
|
+
codec.match(/^(AAC|MP3|opus|FLAC|AC3|pcm)/i) ? 'audio' : 'meta',
|
|
904
|
+
codec,
|
|
905
|
+
});
|
|
906
|
+
}
|
|
907
|
+
this.log(`Track ${trackIdx}: codec=${codec}`);
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
|
|
911
|
+
// Create pipelines for selected tracks that have metadata
|
|
912
|
+
for (const trackIdx of trackIndices) {
|
|
913
|
+
const track = this.tracksByIndex.get(trackIdx);
|
|
914
|
+
if (track && (track.type === 'video' || track.type === 'audio')) {
|
|
915
|
+
await this.createPipeline(track);
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
|
|
919
|
+
// Start playback
|
|
920
|
+
this.wsController?.play();
|
|
921
|
+
}
|
|
922
|
+
|
|
923
|
+
/**
|
|
924
|
+
* Handle stream info message containing track metadata
|
|
925
|
+
* This is sent by MistServer with full track information
|
|
926
|
+
*/
|
|
927
|
+
private async handleInfo(msg: InfoMessage): Promise<void> {
|
|
928
|
+
this.log('Received stream info');
|
|
929
|
+
|
|
930
|
+
// Extract tracks from meta.tracks object
|
|
931
|
+
if (msg.meta?.tracks) {
|
|
932
|
+
const tracksObj = msg.meta.tracks;
|
|
933
|
+
this.log(`Info contains ${Object.keys(tracksObj).length} tracks`);
|
|
934
|
+
|
|
935
|
+
for (const [name, track] of Object.entries(tracksObj)) {
|
|
936
|
+
// Store track by its index for lookup when chunks arrive
|
|
937
|
+
if (track.idx !== undefined) {
|
|
938
|
+
this.tracksByIndex.set(track.idx, track);
|
|
939
|
+
this.log(`Registered track ${track.idx}: ${track.type} ${track.codec}`);
|
|
940
|
+
|
|
941
|
+
// Process any queued init data for this track
|
|
942
|
+
if (this.queuedInitData.has(track.idx)) {
|
|
943
|
+
if (track.type === 'video' || track.type === 'audio') {
|
|
944
|
+
this.log(`Processing queued INIT data for track ${track.idx}`);
|
|
945
|
+
await this.createPipeline(track);
|
|
946
|
+
const initData = this.queuedInitData.get(track.idx)!;
|
|
947
|
+
this.configurePipeline(track.idx, initData);
|
|
948
|
+
this.queuedInitData.delete(track.idx);
|
|
949
|
+
}
|
|
950
|
+
}
|
|
951
|
+
}
|
|
952
|
+
}
|
|
953
|
+
|
|
954
|
+
// Also update tracks array
|
|
955
|
+
this.tracks = Object.values(tracksObj);
|
|
956
|
+
}
|
|
957
|
+
}
|
|
958
|
+
|
|
959
|
+
private handleOnTime(msg: OnTimeMessage): void {
|
|
960
|
+
// Update sync controller with server time
|
|
961
|
+
this.syncController?.updateServerTime(msg.current);
|
|
962
|
+
|
|
963
|
+
// Update current time if no frame callback available
|
|
964
|
+
if (this._frameCallbackId === null) {
|
|
965
|
+
this._currentTime = msg.current;
|
|
966
|
+
}
|
|
967
|
+
|
|
968
|
+
// Record server delay
|
|
969
|
+
const delay = this.wsController?.getServerDelay() ?? 0;
|
|
970
|
+
if (delay > 0) {
|
|
971
|
+
this.syncController?.recordServerDelay(delay);
|
|
972
|
+
}
|
|
973
|
+
|
|
974
|
+
// Update duration from server (VOD streams have finite duration)
|
|
975
|
+
if (msg.total !== undefined && isFinite(msg.total) && msg.total > 0) {
|
|
976
|
+
this._duration = msg.total;
|
|
977
|
+
}
|
|
978
|
+
|
|
979
|
+
// Update buffer level
|
|
980
|
+
const syncState = this.syncController?.getState();
|
|
981
|
+
if (syncState) {
|
|
982
|
+
this._bufferMs = syncState.buffer.current;
|
|
983
|
+
}
|
|
984
|
+
|
|
985
|
+
// Create pipelines for tracks mentioned in on_time.tracks (like reference player)
|
|
986
|
+
if (msg.tracks && msg.tracks.length > 0) {
|
|
987
|
+
for (const trackIdx of msg.tracks) {
|
|
988
|
+
if (!this.pipelines.has(trackIdx)) {
|
|
989
|
+
const track = this.tracksByIndex.get(trackIdx);
|
|
990
|
+
if (track && (track.type === 'video' || track.type === 'audio')) {
|
|
991
|
+
this.log(`Creating pipeline from on_time for track ${track.idx} (${track.type} ${track.codec})`);
|
|
992
|
+
this.createPipeline(track).then(() => {
|
|
993
|
+
// Process any queued init data
|
|
994
|
+
const queuedInit = this.queuedInitData.get(track.idx);
|
|
995
|
+
if (queuedInit) {
|
|
996
|
+
this.configurePipeline(track.idx, queuedInit);
|
|
997
|
+
this.queuedInitData.delete(track.idx);
|
|
998
|
+
}
|
|
999
|
+
});
|
|
1000
|
+
}
|
|
1001
|
+
}
|
|
1002
|
+
}
|
|
1003
|
+
}
|
|
1004
|
+
}
|
|
1005
|
+
|
|
1006
|
+
private async handleTracksChange(tracks: TrackInfo[]): Promise<void> {
|
|
1007
|
+
this.log(`Tracks changed: ${tracks.map(t => `${t.idx}:${t.type}`).join(', ')}`);
|
|
1008
|
+
|
|
1009
|
+
// Check if codecs changed
|
|
1010
|
+
const newTrackIds = new Set(tracks.map(t => t.idx));
|
|
1011
|
+
const oldTrackIds = new Set(this.pipelines.keys());
|
|
1012
|
+
|
|
1013
|
+
// Remove old pipelines
|
|
1014
|
+
for (const idx of oldTrackIds) {
|
|
1015
|
+
if (!newTrackIds.has(idx)) {
|
|
1016
|
+
await this.closePipeline(idx, true);
|
|
1017
|
+
}
|
|
1018
|
+
}
|
|
1019
|
+
|
|
1020
|
+
// Update tracksByIndex and create new pipelines
|
|
1021
|
+
for (const track of tracks) {
|
|
1022
|
+
this.tracksByIndex.set(track.idx, track);
|
|
1023
|
+
|
|
1024
|
+
if (track.type === 'video' || track.type === 'audio') {
|
|
1025
|
+
if (!this.pipelines.has(track.idx)) {
|
|
1026
|
+
await this.createPipeline(track);
|
|
1027
|
+
}
|
|
1028
|
+
}
|
|
1029
|
+
}
|
|
1030
|
+
|
|
1031
|
+
this.tracks = tracks;
|
|
1032
|
+
}
|
|
1033
|
+
|
|
1034
|
+
private handleChunk(chunk: RawChunk): void {
|
|
1035
|
+
if (this.isDestroyed) return;
|
|
1036
|
+
|
|
1037
|
+
let pipeline = this.pipelines.get(chunk.trackIndex);
|
|
1038
|
+
|
|
1039
|
+
// Create pipeline if missing - look up track from tracksByIndex (populated by info message)
|
|
1040
|
+
if (!pipeline) {
|
|
1041
|
+
let track = this.tracksByIndex.get(chunk.trackIndex);
|
|
1042
|
+
|
|
1043
|
+
// If track info not available, try to infer from chunk type
|
|
1044
|
+
// MistServer track indices: video typically 1, audio typically 2, meta typically 9
|
|
1045
|
+
if (!track) {
|
|
1046
|
+
// INIT data for an unknown track - we need to infer the track type
|
|
1047
|
+
// For now, create a placeholder track entry based on common MistServer patterns
|
|
1048
|
+
if (isInitData(chunk)) {
|
|
1049
|
+
this.log(`Received INIT for unknown track ${chunk.trackIndex}, queuing for later`);
|
|
1050
|
+
// Queue the init data - it will be processed when track info becomes available
|
|
1051
|
+
this.queuedInitData.set(chunk.trackIndex, chunk.data);
|
|
1052
|
+
return;
|
|
1053
|
+
}
|
|
1054
|
+
|
|
1055
|
+
// For regular chunks without track info, we can't decode without codec config
|
|
1056
|
+
this.log(`Received chunk for unknown track ${chunk.trackIndex} without track info`, 'warn');
|
|
1057
|
+
return;
|
|
1058
|
+
}
|
|
1059
|
+
|
|
1060
|
+
if (track.type === 'video' || track.type === 'audio') {
|
|
1061
|
+
this.log(`Creating pipeline for discovered track ${track.idx} (${track.type} ${track.codec})`);
|
|
1062
|
+
this.createPipeline(track).then(() => {
|
|
1063
|
+
if (this.isDestroyed) return; // Guard against async completion after destroy
|
|
1064
|
+
// Process any queued init data for this track
|
|
1065
|
+
const queuedInit = this.queuedInitData.get(track!.idx);
|
|
1066
|
+
if (queuedInit) {
|
|
1067
|
+
this.configurePipeline(track!.idx, queuedInit);
|
|
1068
|
+
this.queuedInitData.delete(track!.idx);
|
|
1069
|
+
}
|
|
1070
|
+
// Re-process this chunk now that pipeline exists
|
|
1071
|
+
this.handleChunk(chunk);
|
|
1072
|
+
});
|
|
1073
|
+
}
|
|
1074
|
+
return;
|
|
1075
|
+
}
|
|
1076
|
+
|
|
1077
|
+
// Handle init data
|
|
1078
|
+
if (isInitData(chunk)) {
|
|
1079
|
+
this.configurePipeline(pipeline.idx, chunk.data);
|
|
1080
|
+
return;
|
|
1081
|
+
}
|
|
1082
|
+
|
|
1083
|
+
// Queue chunks until pipeline is configured (decoder needs init data first)
|
|
1084
|
+
// Per rawws.js: frames are queued when decoder is "unconfigured" (line 1408-1410)
|
|
1085
|
+
if (!pipeline.configured) {
|
|
1086
|
+
// For AUDIO tracks: configure on FIRST frame (audio doesn't have key/delta distinction)
|
|
1087
|
+
// Audio chunks are sent as type 0 (delta) by the server even though they're independent
|
|
1088
|
+
// Reference: rawws.js line 768-769 forces audio type to 'key'
|
|
1089
|
+
const isAudioTrack = pipeline.track.type === 'audio';
|
|
1090
|
+
|
|
1091
|
+
// For VIDEO tracks: wait for KEY frame before configuring
|
|
1092
|
+
// This handles Annex B streams where SPS/PPS is inline with keyframes
|
|
1093
|
+
const shouldConfigure = isAudioTrack || chunk.type === 'key';
|
|
1094
|
+
|
|
1095
|
+
if (shouldConfigure) {
|
|
1096
|
+
this.log(`Received ${chunk.type.toUpperCase()} frame for unconfigured ${pipeline.track.type} track ${chunk.trackIndex}, configuring`);
|
|
1097
|
+
|
|
1098
|
+
// Queue this frame at the FRONT so it's sent before any DELTAs
|
|
1099
|
+
if (!this.queuedChunks.has(chunk.trackIndex)) {
|
|
1100
|
+
this.queuedChunks.set(chunk.trackIndex, []);
|
|
1101
|
+
}
|
|
1102
|
+
this.queuedChunks.get(chunk.trackIndex)!.unshift(chunk);
|
|
1103
|
+
|
|
1104
|
+
// Configure without description (or with description from track.init if available)
|
|
1105
|
+
// For audio codecs like opus/mp3 that don't need init data, this works fine
|
|
1106
|
+
// For AAC, the description should come from track.init or the server will send INIT
|
|
1107
|
+
const initData = pipeline.track.init ? str2bin(pipeline.track.init) : new Uint8Array(0);
|
|
1108
|
+
this.configurePipeline(chunk.trackIndex, initData).catch(err => {
|
|
1109
|
+
this.log(`Failed to configure track ${chunk.trackIndex}: ${err}`, 'error');
|
|
1110
|
+
});
|
|
1111
|
+
return;
|
|
1112
|
+
}
|
|
1113
|
+
|
|
1114
|
+
// Otherwise queue the chunk (video delta before first keyframe)
|
|
1115
|
+
if (!this.queuedChunks.has(chunk.trackIndex)) {
|
|
1116
|
+
this.queuedChunks.set(chunk.trackIndex, []);
|
|
1117
|
+
}
|
|
1118
|
+
this.queuedChunks.get(chunk.trackIndex)!.push(chunk);
|
|
1119
|
+
if (this.verboseDebugging) {
|
|
1120
|
+
this.log(`Queued chunk for track ${chunk.trackIndex} (waiting for decoder config)`);
|
|
1121
|
+
}
|
|
1122
|
+
return;
|
|
1123
|
+
}
|
|
1124
|
+
|
|
1125
|
+
// Track jitter
|
|
1126
|
+
this.syncController?.recordChunkArrival(chunk.trackIndex, chunk.timestamp);
|
|
1127
|
+
|
|
1128
|
+
// Send to worker for decoding
|
|
1129
|
+
this.sendChunkToWorker(chunk);
|
|
1130
|
+
}
|
|
1131
|
+
|
|
1132
|
+
private sendChunkToWorker(chunk: RawChunk): void {
|
|
1133
|
+
const msg: MainToWorkerMessage = {
|
|
1134
|
+
type: 'receive',
|
|
1135
|
+
idx: chunk.trackIndex,
|
|
1136
|
+
chunk: {
|
|
1137
|
+
type: chunk.type === 'key' ? 'key' : 'delta',
|
|
1138
|
+
timestamp: getPresentationTimestamp(chunk),
|
|
1139
|
+
data: chunk.data,
|
|
1140
|
+
},
|
|
1141
|
+
uid: this.workerUidCounter++,
|
|
1142
|
+
};
|
|
1143
|
+
|
|
1144
|
+
this.worker?.postMessage(msg, [chunk.data.buffer]);
|
|
1145
|
+
}
|
|
1146
|
+
|
|
1147
|
+
private handleStop(): void {
|
|
1148
|
+
this.log('Stream stopped');
|
|
1149
|
+
this.emit('ended', undefined);
|
|
1150
|
+
}
|
|
1151
|
+
|
|
1152
|
+
private handleError(err: Error): void {
|
|
1153
|
+
this.log(`WebSocket error: ${err.message}`, 'error');
|
|
1154
|
+
this.emit('error', err);
|
|
1155
|
+
}
|
|
1156
|
+
|
|
1157
|
+
// ============================================================================
|
|
1158
|
+
// Pipeline Management
|
|
1159
|
+
// ============================================================================
|
|
1160
|
+
|
|
1161
|
+
private async createPipeline(track: TrackInfo): Promise<void> {
|
|
1162
|
+
if (this.pipelines.has(track.idx)) return;
|
|
1163
|
+
|
|
1164
|
+
this.log(`Creating pipeline for track ${track.idx} (${track.type} ${track.codec})`);
|
|
1165
|
+
|
|
1166
|
+
const pipeline: PipelineInfo = {
|
|
1167
|
+
idx: track.idx,
|
|
1168
|
+
track,
|
|
1169
|
+
generator: null,
|
|
1170
|
+
configured: false,
|
|
1171
|
+
};
|
|
1172
|
+
|
|
1173
|
+
this.pipelines.set(track.idx, pipeline);
|
|
1174
|
+
this.syncController?.addTrack(track.idx, track);
|
|
1175
|
+
|
|
1176
|
+
// Create worker pipeline
|
|
1177
|
+
await this.sendToWorker({
|
|
1178
|
+
type: 'create',
|
|
1179
|
+
idx: track.idx,
|
|
1180
|
+
track,
|
|
1181
|
+
opts: {
|
|
1182
|
+
optimizeForLatency: this.streamType === 'live',
|
|
1183
|
+
},
|
|
1184
|
+
uid: this.workerUidCounter++,
|
|
1185
|
+
});
|
|
1186
|
+
|
|
1187
|
+
// Create track generator - three paths:
|
|
1188
|
+
// 1. Chrome/Edge: MediaStreamTrackGenerator on main thread, transfer writable to worker
|
|
1189
|
+
// 2. Safari: VideoTrackGenerator in worker (video) or frame relay (audio)
|
|
1190
|
+
// 3. Firefox: Use canvas/AudioWorklet polyfill
|
|
1191
|
+
if (hasNativeMediaStreamTrackGenerator()) {
|
|
1192
|
+
// Chrome/Edge: Create generator and transfer writable to worker
|
|
1193
|
+
// @ts-ignore
|
|
1194
|
+
const generator = new MediaStreamTrackGenerator({ kind: track.type });
|
|
1195
|
+
pipeline.generator = {
|
|
1196
|
+
writable: generator.writable,
|
|
1197
|
+
getTrack: () => generator,
|
|
1198
|
+
close: () => generator.stop?.(),
|
|
1199
|
+
};
|
|
1200
|
+
|
|
1201
|
+
await this.sendToWorker(
|
|
1202
|
+
{
|
|
1203
|
+
type: 'setwritable',
|
|
1204
|
+
idx: track.idx,
|
|
1205
|
+
writable: generator.writable,
|
|
1206
|
+
uid: this.workerUidCounter++,
|
|
1207
|
+
},
|
|
1208
|
+
[generator.writable]
|
|
1209
|
+
);
|
|
1210
|
+
} else if (isSafari()) {
|
|
1211
|
+
// Safari: Worker uses VideoTrackGenerator (video) or frame relay (audio)
|
|
1212
|
+
// Reference: rawws.js line 1012-1037
|
|
1213
|
+
this.log(`Safari detected - using worker-based track generator for ${track.type}`);
|
|
1214
|
+
|
|
1215
|
+
if (track.type === 'audio') {
|
|
1216
|
+
// Safari audio: create generator on main thread, frames relayed from worker
|
|
1217
|
+
// @ts-ignore - Safari has MediaStreamTrackGenerator for audio
|
|
1218
|
+
if (typeof MediaStreamTrackGenerator !== 'undefined') {
|
|
1219
|
+
// @ts-ignore
|
|
1220
|
+
const audioGen = new MediaStreamTrackGenerator({ kind: 'audio' });
|
|
1221
|
+
pipeline.safariAudioGenerator = audioGen;
|
|
1222
|
+
pipeline.safariAudioWriter = audioGen.writable.getWriter();
|
|
1223
|
+
|
|
1224
|
+
// Add track to stream
|
|
1225
|
+
if (this.mediaStream) {
|
|
1226
|
+
this.mediaStream.addTrack(audioGen);
|
|
1227
|
+
}
|
|
1228
|
+
}
|
|
1229
|
+
}
|
|
1230
|
+
|
|
1231
|
+
// Ask worker to create generator (video uses VideoTrackGenerator, audio sets up relay)
|
|
1232
|
+
await this.sendToWorker({
|
|
1233
|
+
type: 'creategenerator',
|
|
1234
|
+
idx: track.idx,
|
|
1235
|
+
uid: this.workerUidCounter++,
|
|
1236
|
+
});
|
|
1237
|
+
} else {
|
|
1238
|
+
// Firefox/other: Use canvas/AudioWorklet polyfill
|
|
1239
|
+
pipeline.generator = createTrackGenerator(track.type as 'video' | 'audio');
|
|
1240
|
+
|
|
1241
|
+
if (pipeline.generator.waitForInit) {
|
|
1242
|
+
await pipeline.generator.waitForInit();
|
|
1243
|
+
}
|
|
1244
|
+
|
|
1245
|
+
// For polyfill, writable stays on main thread
|
|
1246
|
+
// Worker would need different architecture - for now, fall back to main thread decode
|
|
1247
|
+
this.log('Using MediaStreamTrackGenerator polyfill - main thread decode');
|
|
1248
|
+
|
|
1249
|
+
// Add track to stream directly
|
|
1250
|
+
if (this.mediaStream && pipeline.generator) {
|
|
1251
|
+
this.mediaStream.addTrack(pipeline.generator.getTrack());
|
|
1252
|
+
}
|
|
1253
|
+
}
|
|
1254
|
+
|
|
1255
|
+
// Per rawws.js: Do NOT configure from HTTP info automatically.
|
|
1256
|
+
// Wait for WebSocket binary INIT frames to configure decoders.
|
|
1257
|
+
// This ensures we use the exact init data the server sends for this session.
|
|
1258
|
+
//
|
|
1259
|
+
// However, if track.init is empty/undefined, the codec doesn't need init data
|
|
1260
|
+
// and we can configure immediately (per rawws.js line 1239-1241).
|
|
1261
|
+
// This applies to codecs like opus, mp3, vp8, vp9 that don't need init data.
|
|
1262
|
+
if (!track.init || track.init === '') {
|
|
1263
|
+
this.log(`Track ${track.idx} (${track.codec}) doesn't need init data, configuring immediately`);
|
|
1264
|
+
await this.configurePipeline(track.idx, new Uint8Array(0));
|
|
1265
|
+
} else {
|
|
1266
|
+
// For codecs that need init data (H264, HEVC, AAC), we have two paths:
|
|
1267
|
+
// 1. WebSocket sends INIT frame -> handleChunk triggers configurePipeline
|
|
1268
|
+
// 2. First frame arrives without prior INIT -> handleChunk uses track.init
|
|
1269
|
+
this.log(`Track ${track.idx} (${track.codec}) has init data (${track.init.length} bytes), waiting for first frame`);
|
|
1270
|
+
}
|
|
1271
|
+
}
|
|
1272
|
+
|
|
1273
|
+
private async configurePipeline(idx: number, header: Uint8Array): Promise<void> {
|
|
1274
|
+
const pipeline = this.pipelines.get(idx);
|
|
1275
|
+
if (!pipeline || pipeline.configured) return;
|
|
1276
|
+
|
|
1277
|
+
this.log(`Configuring decoder for track ${idx}`);
|
|
1278
|
+
|
|
1279
|
+
// Copy the header to avoid transfer issues (neutered buffers)
|
|
1280
|
+
// The structured clone will copy this automatically
|
|
1281
|
+
const headerCopy = new Uint8Array(header);
|
|
1282
|
+
|
|
1283
|
+
await this.sendToWorker({
|
|
1284
|
+
type: 'configure',
|
|
1285
|
+
idx,
|
|
1286
|
+
header: headerCopy,
|
|
1287
|
+
uid: this.workerUidCounter++,
|
|
1288
|
+
});
|
|
1289
|
+
|
|
1290
|
+
pipeline.configured = true;
|
|
1291
|
+
|
|
1292
|
+
// Flush any queued chunks now that decoder is configured
|
|
1293
|
+
const queued = this.queuedChunks.get(idx);
|
|
1294
|
+
if (queued && queued.length > 0) {
|
|
1295
|
+
this.log(`Flushing ${queued.length} queued chunks for track ${idx}`);
|
|
1296
|
+
// Find first keyframe to start from (can't decode deltas without reference)
|
|
1297
|
+
let startIdx = 0;
|
|
1298
|
+
for (let i = 0; i < queued.length; i++) {
|
|
1299
|
+
if (queued[i].type === 'key') {
|
|
1300
|
+
startIdx = i;
|
|
1301
|
+
break;
|
|
1302
|
+
}
|
|
1303
|
+
}
|
|
1304
|
+
if (startIdx > 0) {
|
|
1305
|
+
this.log(`Skipping ${startIdx} delta frames, starting from keyframe`);
|
|
1306
|
+
}
|
|
1307
|
+
for (let i = startIdx; i < queued.length; i++) {
|
|
1308
|
+
this.sendChunkToWorker(queued[i]);
|
|
1309
|
+
}
|
|
1310
|
+
this.queuedChunks.delete(idx);
|
|
1311
|
+
}
|
|
1312
|
+
}
|
|
1313
|
+
|
|
1314
|
+
private async closePipeline(idx: number, waitEmpty: boolean): Promise<void> {
|
|
1315
|
+
const pipeline = this.pipelines.get(idx);
|
|
1316
|
+
if (!pipeline) return;
|
|
1317
|
+
|
|
1318
|
+
this.log(`Closing pipeline ${idx}`);
|
|
1319
|
+
|
|
1320
|
+
// Close worker pipeline
|
|
1321
|
+
await this.sendToWorker({
|
|
1322
|
+
type: 'close',
|
|
1323
|
+
idx,
|
|
1324
|
+
waitEmpty,
|
|
1325
|
+
uid: this.workerUidCounter++,
|
|
1326
|
+
});
|
|
1327
|
+
|
|
1328
|
+
// Close generator
|
|
1329
|
+
pipeline.generator?.close();
|
|
1330
|
+
|
|
1331
|
+
// Remove from sync controller
|
|
1332
|
+
this.syncController?.removeTrack(idx);
|
|
1333
|
+
|
|
1334
|
+
this.pipelines.delete(idx);
|
|
1335
|
+
}
|
|
1336
|
+
|
|
1337
|
+
// ============================================================================
|
|
1338
|
+
// Playback Control
|
|
1339
|
+
// ============================================================================
|
|
1340
|
+
|
|
1341
|
+
async play(): Promise<void> {
|
|
1342
|
+
this._isPaused = false;
|
|
1343
|
+
this.wsController?.play();
|
|
1344
|
+
this.sendToWorker({
|
|
1345
|
+
type: 'frametiming',
|
|
1346
|
+
action: 'setPaused',
|
|
1347
|
+
paused: false,
|
|
1348
|
+
uid: this.workerUidCounter++,
|
|
1349
|
+
});
|
|
1350
|
+
await this.videoElement?.play();
|
|
1351
|
+
}
|
|
1352
|
+
|
|
1353
|
+
pause(): void {
|
|
1354
|
+
this._isPaused = true;
|
|
1355
|
+
this.wsController?.hold();
|
|
1356
|
+
this.sendToWorker({
|
|
1357
|
+
type: 'frametiming',
|
|
1358
|
+
action: 'setPaused',
|
|
1359
|
+
paused: true,
|
|
1360
|
+
uid: this.workerUidCounter++,
|
|
1361
|
+
});
|
|
1362
|
+
this.videoElement?.pause();
|
|
1363
|
+
}
|
|
1364
|
+
|
|
1365
|
+
private finishStepPause(): void {
|
|
1366
|
+
if (!this.videoElement) {
|
|
1367
|
+
this._pendingStepPause = false;
|
|
1368
|
+
this._suppressPlayPauseSync = false;
|
|
1369
|
+
if (this._stepPauseTimeout) {
|
|
1370
|
+
clearTimeout(this._stepPauseTimeout);
|
|
1371
|
+
this._stepPauseTimeout = null;
|
|
1372
|
+
}
|
|
1373
|
+
return;
|
|
1374
|
+
}
|
|
1375
|
+
|
|
1376
|
+
if (this._stepPauseTimeout) {
|
|
1377
|
+
clearTimeout(this._stepPauseTimeout);
|
|
1378
|
+
this._stepPauseTimeout = null;
|
|
1379
|
+
}
|
|
1380
|
+
|
|
1381
|
+
this._pendingStepPause = false;
|
|
1382
|
+
this._suppressPlayPauseSync = false;
|
|
1383
|
+
try { this.videoElement.pause(); } catch {}
|
|
1384
|
+
}
|
|
1385
|
+
|
|
1386
|
+
frameStep(direction: -1 | 1, _seconds?: number): void {
|
|
1387
|
+
if (!this._isPaused) return;
|
|
1388
|
+
if (!this.videoElement) return;
|
|
1389
|
+
this.log(`Frame step requested dir=${direction} paused=${this._isPaused} videoPaused=${this.videoElement.paused}`);
|
|
1390
|
+
// Ensure worker is paused (in case pause didn't flow through)
|
|
1391
|
+
this.sendToWorker({
|
|
1392
|
+
type: 'frametiming',
|
|
1393
|
+
action: 'setPaused',
|
|
1394
|
+
paused: true,
|
|
1395
|
+
uid: this.workerUidCounter++,
|
|
1396
|
+
}).catch(() => {});
|
|
1397
|
+
|
|
1398
|
+
// MediaStream-backed video elements don't present new frames while paused.
|
|
1399
|
+
// Pulse playback briefly so the stepped frame can render, then pause again.
|
|
1400
|
+
if (this.videoElement.paused) {
|
|
1401
|
+
const video = this.videoElement;
|
|
1402
|
+
this._suppressPlayPauseSync = true;
|
|
1403
|
+
this._pendingStepPause = true;
|
|
1404
|
+
try {
|
|
1405
|
+
const maybePromise = video.play();
|
|
1406
|
+
if (maybePromise && typeof (maybePromise as Promise<void>).catch === 'function') {
|
|
1407
|
+
(maybePromise as Promise<void>).catch(() => {});
|
|
1408
|
+
}
|
|
1409
|
+
} catch {}
|
|
1410
|
+
|
|
1411
|
+
if ('requestVideoFrameCallback' in video) {
|
|
1412
|
+
(video as any).requestVideoFrameCallback(() => this.finishStepPause());
|
|
1413
|
+
}
|
|
1414
|
+
// Failsafe: avoid staying in suppressed state if no frame is delivered
|
|
1415
|
+
this._stepPauseTimeout = setTimeout(() => this.finishStepPause(), 200);
|
|
1416
|
+
}
|
|
1417
|
+
this.sendToWorker({
|
|
1418
|
+
type: 'framestep',
|
|
1419
|
+
direction,
|
|
1420
|
+
uid: this.workerUidCounter++,
|
|
1421
|
+
});
|
|
1422
|
+
}
|
|
1423
|
+
|
|
1424
|
+
seek(time: number): void {
|
|
1425
|
+
if (!this.wsController || !this.syncController) return;
|
|
1426
|
+
|
|
1427
|
+
const timeMs = time * 1000;
|
|
1428
|
+
const seekId = this.syncController.startSeek(timeMs);
|
|
1429
|
+
|
|
1430
|
+
// Optimistically update current time for immediate UI feedback
|
|
1431
|
+
this._currentTime = time;
|
|
1432
|
+
this.emit('timeupdate', this._currentTime);
|
|
1433
|
+
|
|
1434
|
+
// Flush worker queues
|
|
1435
|
+
this.sendToWorker({
|
|
1436
|
+
type: 'seek',
|
|
1437
|
+
seekTime: timeMs,
|
|
1438
|
+
uid: this.workerUidCounter++,
|
|
1439
|
+
});
|
|
1440
|
+
|
|
1441
|
+
// Send seek to server
|
|
1442
|
+
const desiredBuffer = this.syncController.getDesiredBuffer();
|
|
1443
|
+
this.wsController.seek(timeMs, desiredBuffer);
|
|
1444
|
+
|
|
1445
|
+
// Mark seek complete after first frame (handled by worker)
|
|
1446
|
+
// In practice, we'd wait for first frame callback
|
|
1447
|
+
setTimeout(() => {
|
|
1448
|
+
if (this.syncController?.isSeekActive(seekId)) {
|
|
1449
|
+
this.syncController.completeSeek(seekId);
|
|
1450
|
+
this.sendToWorker({
|
|
1451
|
+
type: 'frametiming',
|
|
1452
|
+
action: 'reset',
|
|
1453
|
+
uid: this.workerUidCounter++,
|
|
1454
|
+
});
|
|
1455
|
+
}
|
|
1456
|
+
}, 100);
|
|
1457
|
+
}
|
|
1458
|
+
|
|
1459
|
+
setPlaybackRate(rate: number): void {
|
|
1460
|
+
this.syncController?.setMainSpeed(rate);
|
|
1461
|
+
}
|
|
1462
|
+
|
|
1463
|
+
isPaused(): boolean {
|
|
1464
|
+
return this._isPaused;
|
|
1465
|
+
}
|
|
1466
|
+
|
|
1467
|
+
isLive(): boolean {
|
|
1468
|
+
return this.streamType === 'live';
|
|
1469
|
+
}
|
|
1470
|
+
|
|
1471
|
+
jumpToLive(): void {
|
|
1472
|
+
if (this.streamType === 'live' && this.wsController) {
|
|
1473
|
+
// For WebCodecs live, request fresh data from live edge
|
|
1474
|
+
// Send fast_forward to request 5 seconds of new data
|
|
1475
|
+
// Reference: rawws.js live catchup sends fast_forward
|
|
1476
|
+
const desiredBuffer = this.syncController?.getDesiredBuffer() ?? 2000;
|
|
1477
|
+
this.wsController.send({
|
|
1478
|
+
type: 'fast_forward',
|
|
1479
|
+
ff_add: 5000, // Request 5 seconds ahead
|
|
1480
|
+
});
|
|
1481
|
+
|
|
1482
|
+
// Also request buffer from current time to rebuild
|
|
1483
|
+
const serverTime = this.syncController?.getEstimatedServerTime() ?? 0;
|
|
1484
|
+
if (serverTime > 0) {
|
|
1485
|
+
this.wsController.seek(serverTime * 1000, desiredBuffer);
|
|
1486
|
+
}
|
|
1487
|
+
|
|
1488
|
+
this.log('Jump to live: requested fresh data from server');
|
|
1489
|
+
}
|
|
1490
|
+
}
|
|
1491
|
+
|
|
1492
|
+
/**
|
|
1493
|
+
* Check if seeking is supported.
|
|
1494
|
+
* WebCodecs can seek via server commands when connected.
|
|
1495
|
+
* Reference: rawws.js line 1294-1304 implements seeking via control channel
|
|
1496
|
+
*/
|
|
1497
|
+
canSeek(): boolean {
|
|
1498
|
+
// WebCodecs CAN seek via server commands when WebSocket is connected
|
|
1499
|
+
// This overrides the default MediaStream check in SeekingUtils
|
|
1500
|
+
return this.wsController !== null && !this.isDestroyed;
|
|
1501
|
+
}
|
|
1502
|
+
|
|
1503
|
+
// ============================================================================
|
|
1504
|
+
// Media Properties (Phase 2A)
|
|
1505
|
+
// ============================================================================
|
|
1506
|
+
|
|
1507
|
+
/**
|
|
1508
|
+
* Get stream duration (Infinity for live streams)
|
|
1509
|
+
*/
|
|
1510
|
+
get duration(): number {
|
|
1511
|
+
return this._duration;
|
|
1512
|
+
}
|
|
1513
|
+
|
|
1514
|
+
getDuration(): number {
|
|
1515
|
+
return this._duration;
|
|
1516
|
+
}
|
|
1517
|
+
|
|
1518
|
+
/**
|
|
1519
|
+
* Get current playback time (seconds)
|
|
1520
|
+
* Uses requestVideoFrameCallback for accurate timing when available
|
|
1521
|
+
*/
|
|
1522
|
+
get currentTime(): number {
|
|
1523
|
+
return this._currentTime;
|
|
1524
|
+
}
|
|
1525
|
+
|
|
1526
|
+
getCurrentTime(): number {
|
|
1527
|
+
return this._currentTime;
|
|
1528
|
+
}
|
|
1529
|
+
|
|
1530
|
+
/**
|
|
1531
|
+
* Get buffered time ranges
|
|
1532
|
+
* Returns single range from current time to current + buffer
|
|
1533
|
+
*/
|
|
1534
|
+
get buffered(): TimeRanges {
|
|
1535
|
+
if (this._bufferMs <= 0) {
|
|
1536
|
+
return createTimeRanges([]);
|
|
1537
|
+
}
|
|
1538
|
+
const start = this._currentTime;
|
|
1539
|
+
const end = start + (this._bufferMs / 1000);
|
|
1540
|
+
return createTimeRanges([[start, end]]);
|
|
1541
|
+
}
|
|
1542
|
+
|
|
1543
|
+
/**
|
|
1544
|
+
* Get comprehensive player statistics
|
|
1545
|
+
*/
|
|
1546
|
+
async getStats(): Promise<WebCodecsStats> {
|
|
1547
|
+
const syncState = this.syncController?.getState();
|
|
1548
|
+
return {
|
|
1549
|
+
latency: {
|
|
1550
|
+
buffer: syncState?.buffer.current ?? 0,
|
|
1551
|
+
target: syncState?.buffer.desired ?? 0,
|
|
1552
|
+
jitter: syncState?.jitter.weighted ?? 0,
|
|
1553
|
+
},
|
|
1554
|
+
sync: {
|
|
1555
|
+
avDrift: this._avDrift,
|
|
1556
|
+
playbackSpeed: syncState?.playbackSpeed ?? 1,
|
|
1557
|
+
},
|
|
1558
|
+
decoder: {
|
|
1559
|
+
videoQueueSize: 0, // Will be populated from worker stats
|
|
1560
|
+
audioQueueSize: 0,
|
|
1561
|
+
framesDropped: this._framesDropped,
|
|
1562
|
+
framesDecoded: this._framesDecoded,
|
|
1563
|
+
},
|
|
1564
|
+
network: {
|
|
1565
|
+
bytesReceived: this._bytesReceived,
|
|
1566
|
+
messagesReceived: this._messagesReceived,
|
|
1567
|
+
},
|
|
1568
|
+
};
|
|
1569
|
+
}
|
|
1570
|
+
|
|
1571
|
+
// ============================================================================
|
|
1572
|
+
// Frame Timing (requestVideoFrameCallback)
|
|
1573
|
+
// ============================================================================
|
|
1574
|
+
|
|
1575
|
+
/**
|
|
1576
|
+
* Set up requestVideoFrameCallback for accurate frame timing
|
|
1577
|
+
* This provides vsync-aligned frame metadata for A/V sync
|
|
1578
|
+
*/
|
|
1579
|
+
private setupFrameCallback(): void {
|
|
1580
|
+
if (!this.videoElement) return;
|
|
1581
|
+
|
|
1582
|
+
// Check if requestVideoFrameCallback is available
|
|
1583
|
+
if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
|
|
1584
|
+
const callback = (_now: DOMHighResTimeStamp, metadata: VideoFrameCallbackMetadata) => {
|
|
1585
|
+
if (this.isDestroyed || !this.videoElement) return;
|
|
1586
|
+
|
|
1587
|
+
this.onVideoFrame(metadata);
|
|
1588
|
+
|
|
1589
|
+
// Schedule next callback
|
|
1590
|
+
this._frameCallbackId = (this.videoElement as any).requestVideoFrameCallback(callback);
|
|
1591
|
+
};
|
|
1592
|
+
|
|
1593
|
+
this._frameCallbackId = (this.videoElement as any).requestVideoFrameCallback(callback);
|
|
1594
|
+
this.log('requestVideoFrameCallback enabled for accurate frame timing');
|
|
1595
|
+
} else {
|
|
1596
|
+
// Fallback: Use video element's currentTime directly
|
|
1597
|
+
this.log('requestVideoFrameCallback not available, using fallback timing');
|
|
1598
|
+
}
|
|
1599
|
+
}
|
|
1600
|
+
|
|
1601
|
+
/**
|
|
1602
|
+
* Handle video frame presentation callback
|
|
1603
|
+
* Updates current time
|
|
1604
|
+
*/
|
|
1605
|
+
private onVideoFrame(metadata: VideoFrameCallbackMetadata): void {
|
|
1606
|
+
// Update current time from actual frame presentation
|
|
1607
|
+
this._currentTime = metadata.mediaTime;
|
|
1608
|
+
|
|
1609
|
+
// Update buffer level from sync controller
|
|
1610
|
+
const syncState = this.syncController?.getState();
|
|
1611
|
+
if (syncState) {
|
|
1612
|
+
this._bufferMs = syncState.buffer.current;
|
|
1613
|
+
}
|
|
1614
|
+
|
|
1615
|
+
// Emit timeupdate event
|
|
1616
|
+
this.emit('timeupdate', this._currentTime);
|
|
1617
|
+
|
|
1618
|
+
// Update frame stats
|
|
1619
|
+
this._framesDecoded = metadata.presentedFrames;
|
|
1620
|
+
}
|
|
1621
|
+
|
|
1622
|
+
/**
|
|
1623
|
+
* Cancel frame callback on cleanup
|
|
1624
|
+
*/
|
|
1625
|
+
private cancelFrameCallback(): void {
|
|
1626
|
+
if (this._frameCallbackId !== null && this.videoElement) {
|
|
1627
|
+
if ('cancelVideoFrameCallback' in HTMLVideoElement.prototype) {
|
|
1628
|
+
(this.videoElement as any).cancelVideoFrameCallback(this._frameCallbackId);
|
|
1629
|
+
}
|
|
1630
|
+
this._frameCallbackId = null;
|
|
1631
|
+
}
|
|
1632
|
+
}
|
|
1633
|
+
|
|
1634
|
+
// ============================================================================
|
|
1635
|
+
// Logging
|
|
1636
|
+
// ============================================================================
|
|
1637
|
+
|
|
1638
|
+
private log(message: string, level: 'info' | 'warn' | 'error' = 'info'): void {
|
|
1639
|
+
if (!this.debugging && level === 'info') return;
|
|
1640
|
+
console[level](`[WebCodecs] ${message}`);
|
|
1641
|
+
}
|
|
1642
|
+
}
|
|
1643
|
+
|
|
1644
|
+
// Export for direct use
|
|
1645
|
+
export { WebSocketController } from './WebSocketController';
|
|
1646
|
+
export { SyncController } from './SyncController';
|
|
1647
|
+
export { JitterTracker, MultiTrackJitterTracker } from './JitterBuffer';
|
|
1648
|
+
export { getLatencyProfile, mergeLatencyProfile, LATENCY_PROFILES } from './LatencyProfiles';
|
|
1649
|
+
export { parseRawChunk, RawChunkParser } from './RawChunkParser';
|
|
1650
|
+
export * from './types';
|