@livepeer-frameworks/player-core 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. package/dist/cjs/index.js +19493 -0
  2. package/dist/cjs/index.js.map +1 -0
  3. package/dist/esm/index.js +19398 -0
  4. package/dist/esm/index.js.map +1 -0
  5. package/dist/player.css +2140 -0
  6. package/dist/types/core/ABRController.d.ts +164 -0
  7. package/dist/types/core/CodecUtils.d.ts +54 -0
  8. package/dist/types/core/Disposable.d.ts +61 -0
  9. package/dist/types/core/EventEmitter.d.ts +73 -0
  10. package/dist/types/core/GatewayClient.d.ts +144 -0
  11. package/dist/types/core/InteractionController.d.ts +121 -0
  12. package/dist/types/core/LiveDurationProxy.d.ts +102 -0
  13. package/dist/types/core/MetaTrackManager.d.ts +220 -0
  14. package/dist/types/core/MistReporter.d.ts +163 -0
  15. package/dist/types/core/MistSignaling.d.ts +148 -0
  16. package/dist/types/core/PlayerController.d.ts +665 -0
  17. package/dist/types/core/PlayerInterface.d.ts +230 -0
  18. package/dist/types/core/PlayerManager.d.ts +182 -0
  19. package/dist/types/core/PlayerRegistry.d.ts +27 -0
  20. package/dist/types/core/QualityMonitor.d.ts +184 -0
  21. package/dist/types/core/ScreenWakeLockManager.d.ts +70 -0
  22. package/dist/types/core/SeekingUtils.d.ts +142 -0
  23. package/dist/types/core/StreamStateClient.d.ts +108 -0
  24. package/dist/types/core/SubtitleManager.d.ts +111 -0
  25. package/dist/types/core/TelemetryReporter.d.ts +79 -0
  26. package/dist/types/core/TimeFormat.d.ts +97 -0
  27. package/dist/types/core/TimerManager.d.ts +83 -0
  28. package/dist/types/core/UrlUtils.d.ts +81 -0
  29. package/dist/types/core/detector.d.ts +149 -0
  30. package/dist/types/core/index.d.ts +49 -0
  31. package/dist/types/core/scorer.d.ts +167 -0
  32. package/dist/types/core/selector.d.ts +9 -0
  33. package/dist/types/index.d.ts +45 -0
  34. package/dist/types/lib/utils.d.ts +2 -0
  35. package/dist/types/players/DashJsPlayer.d.ts +102 -0
  36. package/dist/types/players/HlsJsPlayer.d.ts +70 -0
  37. package/dist/types/players/MewsWsPlayer/SourceBufferManager.d.ts +119 -0
  38. package/dist/types/players/MewsWsPlayer/WebSocketManager.d.ts +60 -0
  39. package/dist/types/players/MewsWsPlayer/index.d.ts +220 -0
  40. package/dist/types/players/MewsWsPlayer/types.d.ts +89 -0
  41. package/dist/types/players/MistPlayer.d.ts +25 -0
  42. package/dist/types/players/MistWebRTCPlayer/index.d.ts +133 -0
  43. package/dist/types/players/NativePlayer.d.ts +143 -0
  44. package/dist/types/players/VideoJsPlayer.d.ts +59 -0
  45. package/dist/types/players/WebCodecsPlayer/JitterBuffer.d.ts +118 -0
  46. package/dist/types/players/WebCodecsPlayer/LatencyProfiles.d.ts +64 -0
  47. package/dist/types/players/WebCodecsPlayer/RawChunkParser.d.ts +63 -0
  48. package/dist/types/players/WebCodecsPlayer/SyncController.d.ts +174 -0
  49. package/dist/types/players/WebCodecsPlayer/WebSocketController.d.ts +164 -0
  50. package/dist/types/players/WebCodecsPlayer/index.d.ts +149 -0
  51. package/dist/types/players/WebCodecsPlayer/polyfills/MediaStreamTrackGenerator.d.ts +105 -0
  52. package/dist/types/players/WebCodecsPlayer/types.d.ts +395 -0
  53. package/dist/types/players/WebCodecsPlayer/worker/decoder.worker.d.ts +13 -0
  54. package/dist/types/players/WebCodecsPlayer/worker/types.d.ts +197 -0
  55. package/dist/types/players/index.d.ts +14 -0
  56. package/dist/types/styles/index.d.ts +11 -0
  57. package/dist/types/types.d.ts +363 -0
  58. package/dist/types/vanilla/FrameWorksPlayer.d.ts +143 -0
  59. package/dist/types/vanilla/index.d.ts +19 -0
  60. package/dist/workers/decoder.worker.js +989 -0
  61. package/dist/workers/decoder.worker.js.map +1 -0
  62. package/package.json +80 -0
  63. package/src/core/ABRController.ts +550 -0
  64. package/src/core/CodecUtils.ts +257 -0
  65. package/src/core/Disposable.ts +120 -0
  66. package/src/core/EventEmitter.ts +113 -0
  67. package/src/core/GatewayClient.ts +439 -0
  68. package/src/core/InteractionController.ts +712 -0
  69. package/src/core/LiveDurationProxy.ts +270 -0
  70. package/src/core/MetaTrackManager.ts +753 -0
  71. package/src/core/MistReporter.ts +543 -0
  72. package/src/core/MistSignaling.ts +346 -0
  73. package/src/core/PlayerController.ts +2829 -0
  74. package/src/core/PlayerInterface.ts +432 -0
  75. package/src/core/PlayerManager.ts +900 -0
  76. package/src/core/PlayerRegistry.ts +149 -0
  77. package/src/core/QualityMonitor.ts +597 -0
  78. package/src/core/ScreenWakeLockManager.ts +163 -0
  79. package/src/core/SeekingUtils.ts +364 -0
  80. package/src/core/StreamStateClient.ts +457 -0
  81. package/src/core/SubtitleManager.ts +297 -0
  82. package/src/core/TelemetryReporter.ts +308 -0
  83. package/src/core/TimeFormat.ts +205 -0
  84. package/src/core/TimerManager.ts +209 -0
  85. package/src/core/UrlUtils.ts +179 -0
  86. package/src/core/detector.ts +382 -0
  87. package/src/core/index.ts +140 -0
  88. package/src/core/scorer.ts +553 -0
  89. package/src/core/selector.ts +16 -0
  90. package/src/global.d.ts +11 -0
  91. package/src/index.ts +75 -0
  92. package/src/lib/utils.ts +6 -0
  93. package/src/players/DashJsPlayer.ts +642 -0
  94. package/src/players/HlsJsPlayer.ts +483 -0
  95. package/src/players/MewsWsPlayer/SourceBufferManager.ts +572 -0
  96. package/src/players/MewsWsPlayer/WebSocketManager.ts +241 -0
  97. package/src/players/MewsWsPlayer/index.ts +1065 -0
  98. package/src/players/MewsWsPlayer/types.ts +106 -0
  99. package/src/players/MistPlayer.ts +188 -0
  100. package/src/players/MistWebRTCPlayer/index.ts +703 -0
  101. package/src/players/NativePlayer.ts +820 -0
  102. package/src/players/VideoJsPlayer.ts +643 -0
  103. package/src/players/WebCodecsPlayer/JitterBuffer.ts +299 -0
  104. package/src/players/WebCodecsPlayer/LatencyProfiles.ts +151 -0
  105. package/src/players/WebCodecsPlayer/RawChunkParser.ts +151 -0
  106. package/src/players/WebCodecsPlayer/SyncController.ts +456 -0
  107. package/src/players/WebCodecsPlayer/WebSocketController.ts +564 -0
  108. package/src/players/WebCodecsPlayer/index.ts +1650 -0
  109. package/src/players/WebCodecsPlayer/polyfills/MediaStreamTrackGenerator.ts +379 -0
  110. package/src/players/WebCodecsPlayer/types.ts +542 -0
  111. package/src/players/WebCodecsPlayer/worker/decoder.worker.ts +1360 -0
  112. package/src/players/WebCodecsPlayer/worker/types.ts +276 -0
  113. package/src/players/index.ts +22 -0
  114. package/src/styles/animations.css +21 -0
  115. package/src/styles/index.ts +52 -0
  116. package/src/styles/player.css +2126 -0
  117. package/src/styles/tailwind.css +1015 -0
  118. package/src/types.ts +421 -0
  119. package/src/vanilla/FrameWorksPlayer.ts +367 -0
  120. package/src/vanilla/index.ts +22 -0
@@ -0,0 +1,1360 @@
1
+ /**
2
+ * WebCodecs Decoder Worker
3
+ *
4
+ * Handles VideoDecoder and AudioDecoder in a dedicated worker thread.
5
+ * This keeps decoding off the main thread for better performance.
6
+ *
7
+ * Features:
8
+ * - Video/Audio pipeline management per track
9
+ * - Frame scheduling based on timestamps and playback speed
10
+ * - Stats collection and reporting
11
+ * - Seek handling with queue flush
12
+ */
13
+
14
+ import type {
15
+ MainToWorkerMessage,
16
+ WorkerToMainMessage,
17
+ PipelineState,
18
+ FrameTiming,
19
+ DecodedFrame,
20
+ VideoDecoderInit,
21
+ AudioDecoderInit,
22
+ } from './types';
23
+ import type { TrackInfo, PipelineStats, FrameTrackerStats } from '../types';
24
+
25
+ // ============================================================================
26
+ // Global State
27
+ // ============================================================================
28
+
29
+ const pipelines = new Map<number, PipelineState>();
30
+ let debugging: boolean | 'verbose' = false;
31
+ let uidCounter = 0;
32
+
33
+ // Frame timing state (shared across all pipelines)
34
+ const frameTiming: FrameTiming = {
35
+ in: 0,
36
+ decoded: 0,
37
+ out: 0,
38
+ speed: {
39
+ main: 1,
40
+ tweak: 1,
41
+ combined: 1,
42
+ },
43
+ seeking: false,
44
+ paused: false,
45
+ serverTime: 0,
46
+ };
47
+
48
+ // Per-track wall-clock reference points for frame scheduling
49
+ // Each track gets its own baseTime to handle different timestamp bases for A/V
50
+ const trackBaseTimes = new Map<number, number>();
51
+
52
+ // Buffer warmup state - prevents initial jitter by waiting for buffer to build
53
+ // Before warmup, frames are queued but not output
54
+ let warmupComplete = false;
55
+ let warmupStartTime: number | null = null;
56
+ const WARMUP_BUFFER_MS = 100; // Wait for ~100ms of frames before starting output
57
+ const WARMUP_TIMEOUT_MS = 300; // Reduced from 500ms - start faster to reduce latency
58
+
59
+ /**
60
+ * Get or initialize baseTime for a specific track
61
+ */
62
+ function getTrackBaseTime(idx: number, frameTimeMs: number, now: number): number {
63
+ if (!trackBaseTimes.has(idx)) {
64
+ trackBaseTimes.set(idx, now - frameTimeMs / frameTiming.speed.combined);
65
+ log(`Track ${idx} baseTime: ${trackBaseTimes.get(idx)!.toFixed(0)} (first frame @ ${frameTimeMs.toFixed(0)}ms)`);
66
+ }
67
+ return trackBaseTimes.get(idx)!;
68
+ }
69
+
70
+ /**
71
+ * Reset all track baseTimes (used during seek or reset)
72
+ */
73
+ function resetBaseTime(): void {
74
+ trackBaseTimes.clear();
75
+ log(`Reset all track baseTimes`);
76
+ }
77
+
78
+ function cloneVideoFrame(frame: VideoFrame): VideoFrame | null {
79
+ try {
80
+ if ('clone' in frame) {
81
+ return (frame as VideoFrame).clone();
82
+ }
83
+ return new VideoFrame(frame);
84
+ } catch {
85
+ return null;
86
+ }
87
+ }
88
+
89
+ function pushFrameHistory(pipeline: PipelineState, frame: VideoFrame, timestamp: number): void {
90
+ if (pipeline.track.type !== 'video') return;
91
+ if (!pipeline.frameHistory) pipeline.frameHistory = [];
92
+
93
+ const cloned = cloneVideoFrame(frame);
94
+ if (!cloned) return;
95
+
96
+ pipeline.frameHistory.push({ frame: cloned, timestamp });
97
+
98
+ // Trim history
99
+ while (pipeline.frameHistory.length > MAX_FRAME_HISTORY) {
100
+ const entry = pipeline.frameHistory.shift();
101
+ if (entry) {
102
+ try { entry.frame.close(); } catch {}
103
+ }
104
+ }
105
+
106
+ pipeline.historyCursor = pipeline.frameHistory.length - 1;
107
+ }
108
+
109
+ function alignHistoryCursorToLastOutput(pipeline: PipelineState): void {
110
+ if (!pipeline.frameHistory || pipeline.frameHistory.length === 0) return;
111
+ const lastTs = pipeline.stats.lastOutputTimestamp;
112
+ if (!Number.isFinite(lastTs)) {
113
+ pipeline.historyCursor = pipeline.frameHistory.length - 1;
114
+ return;
115
+ }
116
+ // Find first history entry greater than last output, then step back one
117
+ const idx = pipeline.frameHistory.findIndex(entry => entry.timestamp > lastTs);
118
+ if (idx === -1) {
119
+ pipeline.historyCursor = pipeline.frameHistory.length - 1;
120
+ return;
121
+ }
122
+ pipeline.historyCursor = Math.max(0, idx - 1);
123
+ }
124
+
125
+ function getPrimaryVideoPipeline(): PipelineState | null {
126
+ let selected: PipelineState | null = null;
127
+ for (const pipeline of pipelines.values()) {
128
+ if (pipeline.track.type === 'video') {
129
+ if (!selected || pipeline.idx < selected.idx) {
130
+ selected = pipeline;
131
+ }
132
+ }
133
+ }
134
+ return selected;
135
+ }
136
+
137
+ // Stats update interval
138
+ let statsTimer: ReturnType<typeof setInterval> | null = null;
139
+ const STATS_INTERVAL_MS = 250;
140
+
141
+ // Frame dropping stats (Phase 2B)
142
+ let totalFramesDropped = 0;
143
+
144
+ // Chrome-recommended decoder queue threshold
145
+ // Per Chrome WebCodecs best practices: drop when decodeQueueSize > 2
146
+ // This ensures decoder doesn't fall too far behind before corrective action
147
+ const MAX_DECODER_QUEUE_SIZE = 2;
148
+ const MAX_FRAME_HISTORY = 60;
149
+ const MAX_PAUSED_OUTPUT_QUEUE = 120;
150
+ const MAX_PAUSED_INPUT_QUEUE = 600;
151
+
152
+ // ============================================================================
153
+ // Logging
154
+ // ============================================================================
155
+
156
+ function log(msg: string, level: 'info' | 'warn' | 'error' = 'info'): void {
157
+ if (!debugging) return;
158
+
159
+ const message: WorkerToMainMessage = {
160
+ type: 'log',
161
+ msg,
162
+ level,
163
+ uid: uidCounter++,
164
+ };
165
+ self.postMessage(message);
166
+ }
167
+
168
+ function logVerbose(msg: string): void {
169
+ if (debugging !== 'verbose') return;
170
+ log(msg);
171
+ }
172
+
173
+ // ============================================================================
174
+ // Message Handling
175
+ // ============================================================================
176
+
177
+ self.onmessage = (event: MessageEvent<MainToWorkerMessage>) => {
178
+ const msg = event.data;
179
+
180
+ switch (msg.type) {
181
+ case 'create':
182
+ handleCreate(msg);
183
+ break;
184
+
185
+ case 'configure':
186
+ handleConfigure(msg);
187
+ break;
188
+
189
+ case 'receive':
190
+ handleReceive(msg);
191
+ break;
192
+
193
+ case 'setwritable':
194
+ handleSetWritable(msg);
195
+ break;
196
+
197
+ case 'creategenerator':
198
+ handleCreateGenerator(msg);
199
+ break;
200
+
201
+ case 'close':
202
+ handleClose(msg);
203
+ break;
204
+
205
+ case 'frametiming':
206
+ handleFrameTiming(msg);
207
+ break;
208
+
209
+ case 'seek':
210
+ handleSeek(msg);
211
+ break;
212
+
213
+ case 'framestep':
214
+ handleFrameStep(msg);
215
+ break;
216
+
217
+ case 'debugging':
218
+ debugging = msg.value;
219
+ log(`Debugging set to: ${msg.value}`);
220
+ break;
221
+
222
+ default:
223
+ log(`Unknown message type: ${(msg as any).type}`, 'warn');
224
+ }
225
+ };
226
+
227
+ // ============================================================================
228
+ // Pipeline Management
229
+ // ============================================================================
230
+
231
+ function handleCreate(msg: MainToWorkerMessage & { type: 'create' }): void {
232
+ const { idx, track, opts, uid } = msg;
233
+
234
+ log(`Creating pipeline for track ${idx} (${track.type} ${track.codec})`);
235
+
236
+ const pipeline: PipelineState = {
237
+ idx,
238
+ track,
239
+ configured: false,
240
+ closed: false,
241
+ decoder: null,
242
+ writable: null,
243
+ writer: null,
244
+ inputQueue: [],
245
+ outputQueue: [],
246
+ frameHistory: track.type === 'video' ? [] : undefined,
247
+ historyCursor: track.type === 'video' ? null : undefined,
248
+ stats: {
249
+ framesIn: 0,
250
+ framesDecoded: 0,
251
+ framesOut: 0,
252
+ framesDropped: 0,
253
+ lastInputTimestamp: 0,
254
+ lastOutputTimestamp: 0,
255
+ decoderQueueSize: 0,
256
+ // Debug info for error diagnosis
257
+ lastChunkType: '' as string,
258
+ lastChunkSize: 0,
259
+ lastChunkBytes: '' as string,
260
+ },
261
+ optimizeForLatency: opts.optimizeForLatency,
262
+ };
263
+
264
+ pipelines.set(idx, pipeline);
265
+
266
+ // Start stats reporting if not already running
267
+ if (!statsTimer) {
268
+ statsTimer = setInterval(sendStats, STATS_INTERVAL_MS);
269
+ }
270
+
271
+ sendAck(uid, idx);
272
+ }
273
+
274
+ function handleConfigure(msg: MainToWorkerMessage & { type: 'configure' }): void {
275
+ const { idx, header, uid } = msg;
276
+
277
+ log(`Received configure for track ${idx}, header length=${header?.byteLength ?? 'null'}`);
278
+
279
+ const pipeline = pipelines.get(idx);
280
+
281
+ if (!pipeline) {
282
+ log(`Cannot configure: pipeline ${idx} not found`, 'error');
283
+ sendError(uid, idx, 'Pipeline not found');
284
+ return;
285
+ }
286
+
287
+ // Skip if already configured and decoder is ready
288
+ // This prevents duplicate configuration when both WS INIT and HTTP fallback fire
289
+ if (pipeline.configured && pipeline.decoder && pipeline.decoder.state === 'configured') {
290
+ log(`Track ${idx} already configured, skipping duplicate configure`);
291
+ sendAck(uid, idx);
292
+ return;
293
+ }
294
+
295
+ try {
296
+ if (pipeline.track.type === 'video') {
297
+ log(`Configuring video decoder for track ${idx}...`);
298
+ configureVideoDecoder(pipeline, header);
299
+ } else if (pipeline.track.type === 'audio') {
300
+ log(`Configuring audio decoder for track ${idx}...`);
301
+ configureAudioDecoder(pipeline, header);
302
+ }
303
+
304
+ pipeline.configured = true;
305
+ log(`Successfully configured decoder for track ${idx}`);
306
+ sendAck(uid, idx);
307
+ } catch (err) {
308
+ log(`Failed to configure decoder for track ${idx}: ${err}`, 'error');
309
+ sendError(uid, idx, String(err));
310
+ }
311
+ }
312
+
313
+ function configureVideoDecoder(pipeline: PipelineState, description?: Uint8Array): void {
314
+ const track = pipeline.track;
315
+
316
+ // Handle JPEG codec separately via ImageDecoder (Phase 2C)
317
+ if (track.codec === 'JPEG' || track.codec.toLowerCase() === 'jpeg') {
318
+ log('JPEG codec detected - will use ImageDecoder');
319
+ pipeline.configured = true;
320
+ // JPEG doesn't need a persistent decoder - each frame is decoded individually
321
+ return;
322
+ }
323
+
324
+ // Close existing decoder if any (per rawws.js reconfiguration pattern)
325
+ if (pipeline.decoder) {
326
+ if (pipeline.decoder.state === 'configured') {
327
+ try {
328
+ pipeline.decoder.reset();
329
+ } catch {
330
+ // Ignore reset errors
331
+ }
332
+ }
333
+ if (pipeline.decoder.state !== 'closed') {
334
+ try {
335
+ pipeline.decoder.close();
336
+ } catch {
337
+ // Ignore close errors
338
+ }
339
+ }
340
+ pipeline.decoder = null;
341
+ }
342
+
343
+ // Match reference rawws.js configOpts pattern:
344
+ // codec, optimizeForLatency, description + hw acceleration hint
345
+ const config: VideoDecoderInit = {
346
+ codec: track.codecstring || track.codec.toLowerCase(),
347
+ optimizeForLatency: pipeline.optimizeForLatency,
348
+ hardwareAcceleration: 'prefer-hardware',
349
+ };
350
+
351
+ // Pass description directly from WebSocket INIT data (per reference rawws.js line 1052)
352
+ if (description && description.byteLength > 0) {
353
+ config.description = description;
354
+ log(`Configuring with description (${description.byteLength} bytes)`);
355
+ } else {
356
+ log(`No description provided - decoder may fail on H.264/HEVC`, 'warn');
357
+ }
358
+
359
+ log(`Configuring video decoder: ${config.codec}`);
360
+
361
+ const decoder = new VideoDecoder({
362
+ output: (frame: VideoFrame) => handleDecodedFrame(pipeline, frame),
363
+ error: (err: DOMException) => handleDecoderError(pipeline, err),
364
+ });
365
+
366
+ decoder.configure(config as VideoDecoderConfig);
367
+ pipeline.decoder = decoder;
368
+
369
+ log(`Video decoder configured: ${config.codec}`);
370
+ }
371
+
372
+ /**
373
+ * Map MistServer audio codec names to WebCodecs-compatible codec strings
374
+ * Per W3C AAC WebCodecs Registration: https://www.w3.org/TR/webcodecs-aac-codec-registration/
375
+ */
376
+ function mapAudioCodec(codec: string, codecstring?: string): string {
377
+ // If we have a full codec string like "mp4a.40.2", use it
378
+ if (codecstring && codecstring.startsWith('mp4a.')) {
379
+ return codecstring;
380
+ }
381
+
382
+ // Map common MistServer codec names to WebCodecs codec strings
383
+ const normalized = codec.toLowerCase();
384
+ switch (normalized) {
385
+ case 'aac':
386
+ case 'mp4a':
387
+ return 'mp4a.40.2'; // AAC-LC
388
+ case 'mp3':
389
+ return 'mp3';
390
+ case 'opus':
391
+ return 'opus';
392
+ case 'flac':
393
+ return 'flac';
394
+ case 'ac3':
395
+ case 'ac-3':
396
+ return 'ac-3';
397
+ case 'pcm_s16le':
398
+ case 'pcm_s32le':
399
+ case 'pcm_f32le':
400
+ return 'pcm-' + normalized.replace('pcm_', '').replace('le', '-le');
401
+ default:
402
+ log(`Unknown audio codec: ${codec}, trying as-is`);
403
+ return codecstring || codec;
404
+ }
405
+ }
406
+
407
+ function configureAudioDecoder(pipeline: PipelineState, description?: Uint8Array): void {
408
+ const track = pipeline.track;
409
+
410
+ const codec = mapAudioCodec(track.codec, track.codecstring);
411
+ log(`Audio codec mapping: ${track.codec} -> ${codec}`);
412
+
413
+ const config: AudioDecoderInit = {
414
+ codec,
415
+ sampleRate: track.rate || 48000,
416
+ numberOfChannels: track.channels || 2,
417
+ };
418
+
419
+ if (description && description.byteLength > 0) {
420
+ config.description = description;
421
+ }
422
+
423
+ const decoder = new AudioDecoder({
424
+ output: (data: AudioData) => handleDecodedFrame(pipeline, data),
425
+ error: (err: DOMException) => handleDecoderError(pipeline, err),
426
+ });
427
+
428
+ decoder.configure(config as AudioDecoderConfig);
429
+ pipeline.decoder = decoder;
430
+
431
+ log(`Audio decoder configured: ${config.codec} ${config.sampleRate}Hz ${config.numberOfChannels}ch`);
432
+ }
433
+
434
+ function handleDecodedFrame(pipeline: PipelineState, frame: VideoFrame | AudioData): void {
435
+ if (pipeline.closed) {
436
+ frame.close();
437
+ return;
438
+ }
439
+
440
+ const now = performance.now() * 1000; // Convert to microseconds
441
+ const timestamp = frame.timestamp ?? 0;
442
+
443
+ pipeline.stats.framesDecoded++;
444
+ frameTiming.decoded = now;
445
+
446
+ // Log first few decoded frames
447
+ if (pipeline.stats.framesDecoded <= 3) {
448
+ const frameType = pipeline.track.type;
449
+ const extraInfo = frameType === 'audio'
450
+ ? ` (${(frame as AudioData).numberOfFrames} samples, ${(frame as AudioData).sampleRate}Hz)`
451
+ : ` (${(frame as VideoFrame).displayWidth}x${(frame as VideoFrame).displayHeight})`;
452
+ log(`Decoded ${frameType} frame ${pipeline.stats.framesDecoded} for track ${pipeline.idx}: ts=${timestamp}μs${extraInfo}`);
453
+ }
454
+
455
+ // Add to output queue for scheduled release
456
+ pipeline.outputQueue.push({
457
+ frame,
458
+ timestamp,
459
+ decodedAt: performance.now(),
460
+ });
461
+
462
+ // Try to output frames
463
+ processOutputQueue(pipeline);
464
+ }
465
+
466
+ function handleDecoderError(pipeline: PipelineState, err: DOMException): void {
467
+ log(`Decoder error on track ${pipeline.idx}: ${err.name}: ${err.message}`, 'error');
468
+ log(` Last chunk info: type=${pipeline.stats.lastChunkType}, size=${pipeline.stats.lastChunkSize}, first bytes=[${pipeline.stats.lastChunkBytes}]`, 'error');
469
+
470
+ // Per rawws.js: reset the pipeline after decoder error
471
+ // This clears queues and recreates the decoder if needed
472
+ resetPipelineAfterError(pipeline);
473
+
474
+ const message: WorkerToMainMessage = {
475
+ type: 'sendevent',
476
+ kind: 'error',
477
+ message: `Decoder error: ${err.message}`,
478
+ idx: pipeline.idx,
479
+ uid: uidCounter++,
480
+ };
481
+ self.postMessage(message);
482
+ }
483
+
484
+ /**
485
+ * Reset pipeline after a decoder error
486
+ * Per rawws.js: recreate decoder if closed, otherwise just reset
487
+ */
488
+ function resetPipelineAfterError(pipeline: PipelineState): void {
489
+ // Clear queues
490
+ pipeline.inputQueue = [];
491
+ for (const entry of pipeline.outputQueue) {
492
+ entry.frame.close();
493
+ }
494
+ pipeline.outputQueue = [];
495
+
496
+ // Mark as needing reconfiguration - we'll wait for next keyframe
497
+ pipeline.configured = false;
498
+
499
+ // If decoder is closed, we need to recreate it (can't reset a closed decoder)
500
+ if (pipeline.decoder && pipeline.decoder.state === 'closed') {
501
+ log(`Decoder closed for track ${pipeline.idx}, will recreate on next configure`);
502
+ pipeline.decoder = null;
503
+ } else if (pipeline.decoder && pipeline.decoder.state !== 'closed') {
504
+ // Try to reset if not closed
505
+ try {
506
+ pipeline.decoder.reset();
507
+ log(`Reset decoder for track ${pipeline.idx}`);
508
+ } catch (e) {
509
+ log(`Failed to reset decoder for track ${pipeline.idx}: ${e}`, 'warn');
510
+ pipeline.decoder = null;
511
+ }
512
+ }
513
+ }
514
+
515
+ // ============================================================================
516
+ // Frame Input/Output
517
+ // ============================================================================
518
+
519
+ function handleReceive(msg: MainToWorkerMessage & { type: 'receive' }): void {
520
+ const { idx, chunk, uid } = msg;
521
+ const pipeline = pipelines.get(idx);
522
+
523
+ if (!pipeline) {
524
+ logVerbose(`Received chunk for unknown pipeline ${idx}`);
525
+ return;
526
+ }
527
+
528
+ if (!pipeline.configured || !pipeline.decoder) {
529
+ // Queue for later
530
+ pipeline.inputQueue.push(chunk);
531
+ logVerbose(`Queued chunk for track ${idx} (configured=${pipeline.configured}, decoder=${!!pipeline.decoder})`);
532
+ return;
533
+ }
534
+
535
+ // If paused and output queue is saturated, queue input to preserve per-frame stepping
536
+ if (frameTiming.paused && pipeline.outputQueue.length >= MAX_PAUSED_OUTPUT_QUEUE) {
537
+ pipeline.inputQueue.push(chunk);
538
+ if (pipeline.inputQueue.length > MAX_PAUSED_INPUT_QUEUE) {
539
+ pipeline.inputQueue.splice(0, pipeline.inputQueue.length - MAX_PAUSED_INPUT_QUEUE);
540
+ logVerbose(`Trimmed paused input queue for track ${idx} to ${MAX_PAUSED_INPUT_QUEUE}`);
541
+ }
542
+ return;
543
+ }
544
+
545
+ // Log only first 3 chunks per track to confirm receiving
546
+ if (pipeline.stats.framesIn < 3) {
547
+ log(`Received chunk ${pipeline.stats.framesIn} for track ${idx}: type=${chunk.type}, ts=${chunk.timestamp / 1000}ms, size=${chunk.data.byteLength}`);
548
+ }
549
+
550
+ // Check if we need to drop frames due to decoder pressure (Phase 2B)
551
+ if (shouldDropFramesDueToDecoderPressure(pipeline)) {
552
+ if (chunk.type === 'key') {
553
+ // Always accept keyframes - they're needed to resume
554
+ decodeChunk(pipeline, chunk);
555
+ } else {
556
+ // Drop delta frames when decoder is overwhelmed
557
+ pipeline.stats.framesDropped++;
558
+ totalFramesDropped++;
559
+ logVerbose(`Dropped delta frame @ ${chunk.timestamp / 1000}ms (decoder queue: ${pipeline.decoder.decodeQueueSize})`);
560
+ }
561
+ return;
562
+ }
563
+
564
+ decodeChunk(pipeline, chunk);
565
+ }
566
+
567
+ /**
568
+ * Check if decoder is under pressure and frames should be dropped
569
+ * Based on Chrome WebCodecs best practices: drop when decodeQueueSize > 2
570
+ */
571
+ function shouldDropFramesDueToDecoderPressure(pipeline: PipelineState): boolean {
572
+ if (frameTiming.paused) return false;
573
+ if (!pipeline.decoder) return false;
574
+
575
+ const queueSize = pipeline.decoder.decodeQueueSize;
576
+ pipeline.stats.decoderQueueSize = queueSize;
577
+
578
+ // Chrome recommendation: drop frames when queue > 2
579
+ return queueSize > MAX_DECODER_QUEUE_SIZE;
580
+ }
581
+
582
+ /**
583
+ * Drop all frames up to the next keyframe in the input queue
584
+ * Called when decoder is severely backed up
585
+ */
586
+ function dropToNextKeyframe(pipeline: PipelineState): number {
587
+ if (pipeline.inputQueue.length === 0) return 0;
588
+
589
+ // Find next keyframe in queue
590
+ const keyframeIdx = pipeline.inputQueue.findIndex(c => c.type === 'key');
591
+
592
+ if (keyframeIdx <= 0) {
593
+ // No keyframe or keyframe is first - nothing to drop
594
+ return 0;
595
+ }
596
+
597
+ // Drop all frames before keyframe
598
+ const dropped = pipeline.inputQueue.splice(0, keyframeIdx);
599
+ pipeline.stats.framesDropped += dropped.length;
600
+ totalFramesDropped += dropped.length;
601
+
602
+ log(`Dropped ${dropped.length} frames to next keyframe`, 'warn');
603
+
604
+ return dropped.length;
605
+ }
606
+
607
+ function decodeChunk(
608
+ pipeline: PipelineState,
609
+ chunk: { type: 'key' | 'delta'; timestamp: number; data: Uint8Array }
610
+ ): void {
611
+ if (pipeline.closed) return;
612
+
613
+ const now = performance.now() * 1000;
614
+ frameTiming.in = now;
615
+ pipeline.stats.framesIn++;
616
+ pipeline.stats.lastInputTimestamp = chunk.timestamp;
617
+
618
+ try {
619
+ // Handle JPEG via ImageDecoder (Phase 2C)
620
+ const codec = pipeline.track.codec;
621
+ if (codec === 'JPEG' || codec.toLowerCase() === 'jpeg') {
622
+ decodeJpegFrame(pipeline, chunk);
623
+ return;
624
+ }
625
+
626
+ if (!pipeline.decoder) return;
627
+
628
+ // chunk.timestamp is ALREADY in microseconds (converted by main thread via getPresentationTimestamp)
629
+ const timestampUs = chunk.timestamp;
630
+
631
+ // Record debug info before decode (for error diagnosis)
632
+ pipeline.stats.lastChunkType = chunk.type;
633
+ pipeline.stats.lastChunkSize = chunk.data.byteLength;
634
+ // Show first 8 bytes to identify format (Annex B starts 0x00 0x00 0x00 0x01, AVCC starts with length)
635
+ const firstBytes = Array.from(chunk.data.slice(0, 8)).map(b => '0x' + b.toString(16).padStart(2, '0')).join(' ');
636
+ pipeline.stats.lastChunkBytes = firstBytes;
637
+
638
+ if (pipeline.track.type === 'video') {
639
+ // AVCC mode: frames pass through unchanged (decoder has SPS/PPS from description)
640
+ const encodedChunk = new EncodedVideoChunk({
641
+ type: chunk.type,
642
+ timestamp: timestampUs,
643
+ data: chunk.data,
644
+ });
645
+
646
+ const decoder = pipeline.decoder as VideoDecoder;
647
+ if (pipeline.stats.framesIn <= 3) {
648
+ const firstBytes = Array.from(chunk.data.slice(0, 16)).map(b => '0x' + b.toString(16).padStart(2, '0')).join(' ');
649
+ log(`Calling decode() for track ${pipeline.idx}: state=${decoder.state}, queueSize=${decoder.decodeQueueSize}, chunk type=${chunk.type}, ts=${timestampUs}μs`);
650
+ log(` First 16 bytes: ${firstBytes}`);
651
+ }
652
+
653
+ decoder.decode(encodedChunk);
654
+
655
+ if (pipeline.stats.framesIn <= 3) {
656
+ log(`After decode() for track ${pipeline.idx}: queueSize=${decoder.decodeQueueSize}`);
657
+ }
658
+ } else if (pipeline.track.type === 'audio') {
659
+ // Audio chunks are always treated as "key" frames - per MistServer rawws.js line 1127
660
+ // Audio codecs don't use inter-frame dependencies like video does
661
+ const encodedChunk = new EncodedAudioChunk({
662
+ type: 'key',
663
+ timestamp: timestampUs,
664
+ data: chunk.data,
665
+ });
666
+ (pipeline.decoder as AudioDecoder).decode(encodedChunk);
667
+ }
668
+
669
+ // Update decoder queue size (decoder may have been nullified by error callback)
670
+ if (pipeline.decoder) {
671
+ pipeline.stats.decoderQueueSize = pipeline.decoder.decodeQueueSize;
672
+ }
673
+
674
+ logVerbose(`Decoded chunk ${chunk.type} @ ${chunk.timestamp / 1000}ms for track ${pipeline.idx}`);
675
+ } catch (err) {
676
+ log(`Decode error on track ${pipeline.idx}: ${err}`, 'error');
677
+ }
678
+ }
679
+
680
+ /**
681
+ * Decode JPEG frame using ImageDecoder API (Phase 2C)
682
+ * ImageDecoder is simpler than VideoDecoder for still images
683
+ */
684
+ async function decodeJpegFrame(
685
+ pipeline: PipelineState,
686
+ chunk: { type: 'key' | 'delta'; timestamp: number; data: Uint8Array }
687
+ ): Promise<void> {
688
+ if (pipeline.closed) return;
689
+
690
+ // Check if ImageDecoder is available
691
+ if (typeof ImageDecoder === 'undefined') {
692
+ log('ImageDecoder not available - JPEG streams not supported', 'error');
693
+ return;
694
+ }
695
+
696
+ try {
697
+ // Create ImageDecoder for this frame
698
+ const decoder = new ImageDecoder({
699
+ type: 'image/jpeg',
700
+ data: chunk.data,
701
+ });
702
+
703
+ // Decode the frame - single decode call per MistServer rawws.js line 1069
704
+ const result = await decoder.decode({ frameIndex: 0 });
705
+
706
+ // Create VideoFrame from ImageBitmap with the correct timestamp
707
+ const frame = new VideoFrame(result.image, {
708
+ timestamp: chunk.timestamp, // Preserve original timestamp
709
+ });
710
+
711
+ // Clean up ImageDecoder resources
712
+ result.image.close();
713
+ decoder.close();
714
+
715
+ // Pass frame through normal output handling
716
+ handleDecodedFrame(pipeline, frame);
717
+
718
+ logVerbose(`Decoded JPEG frame @ ${chunk.timestamp / 1000}ms for track ${pipeline.idx}`);
719
+ } catch (err) {
720
+ log(`JPEG decode error on track ${pipeline.idx}: ${err}`, 'error');
721
+ }
722
+ }
723
+
724
+ function processOutputQueue(pipeline: PipelineState): void {
725
+ if (frameTiming.paused) {
726
+ return;
727
+ }
728
+ // Check if pipeline is closed (e.g., player destroyed) - clean up queued frames
729
+ if (pipeline.closed) {
730
+ while (pipeline.outputQueue.length > 0) {
731
+ const entry = pipeline.outputQueue.shift()!;
732
+ entry.frame.close();
733
+ }
734
+ return;
735
+ }
736
+
737
+ if (!pipeline.writer || pipeline.outputQueue.length === 0) {
738
+ if (pipeline.outputQueue.length > 0 && !pipeline.writer) {
739
+ log(`Cannot output: no writer for track ${pipeline.idx} (queue has ${pipeline.outputQueue.length} frames)`, 'warn');
740
+ }
741
+ return;
742
+ }
743
+
744
+ const now = performance.now();
745
+
746
+ // Sort output queue by timestamp - MistServer can send frames out of order
747
+ // This is more robust than just swapping adjacent frames
748
+ if (pipeline.outputQueue.length > 1) {
749
+ const wasSorted = pipeline.outputQueue.every((entry, i, arr) =>
750
+ i === 0 || arr[i - 1].timestamp <= entry.timestamp
751
+ );
752
+ if (!wasSorted) {
753
+ pipeline.outputQueue.sort((a, b) => a.timestamp - b.timestamp);
754
+ log(`Sorted ${pipeline.outputQueue.length} frames in output queue for track ${pipeline.idx}`);
755
+ }
756
+ }
757
+
758
+ // Buffer warmup - wait for buffer to build before starting output to prevent initial jitter
759
+ // With per-track baseTime, each track can start independently once it has enough buffer
760
+ if (!warmupComplete) {
761
+ // Track when warmup started
762
+ if (warmupStartTime === null) {
763
+ warmupStartTime = now;
764
+ log(`Starting buffer warmup (target: ${WARMUP_BUFFER_MS}ms)`);
765
+ }
766
+
767
+ const elapsed = now - warmupStartTime;
768
+
769
+ // Calculate buffer from timestamp range in queue
770
+ if (pipeline.outputQueue.length >= 2) {
771
+ const oldest = pipeline.outputQueue[0].timestamp / 1000; // Convert to ms
772
+ const newest = pipeline.outputQueue[pipeline.outputQueue.length - 1].timestamp / 1000;
773
+ const bufferMs = newest - oldest;
774
+
775
+ // Complete warmup when we have enough buffer OR timeout
776
+ if (bufferMs >= WARMUP_BUFFER_MS || elapsed >= WARMUP_TIMEOUT_MS) {
777
+ warmupComplete = true;
778
+ log(`Buffer warmup complete: ${bufferMs.toFixed(0)}ms buffer, ${pipeline.outputQueue.length} frames queued (track ${pipeline.idx})`);
779
+ } else {
780
+ // Not ready yet - schedule another check
781
+ setTimeout(() => processOutputQueue(pipeline), 10);
782
+ return;
783
+ }
784
+ } else {
785
+ // Not enough frames yet - schedule another check
786
+ if (elapsed >= WARMUP_TIMEOUT_MS) {
787
+ warmupComplete = true;
788
+ log(`Buffer warmup timeout - starting with ${pipeline.outputQueue.length} frame(s) (track ${pipeline.idx})`);
789
+ } else {
790
+ setTimeout(() => processOutputQueue(pipeline), 10);
791
+ return;
792
+ }
793
+ }
794
+ }
795
+
796
+ // Process all frames that are ready
797
+ while (pipeline.outputQueue.length > 0) {
798
+ const entry = pipeline.outputQueue[0];
799
+
800
+ // Frame timing (per-track baseTime for A/V with different timestamp bases)
801
+ const schedule = shouldOutputFrame(pipeline, entry, now);
802
+
803
+ if (!schedule.shouldOutput) {
804
+ // Schedule next check
805
+ if (schedule.checkDelayMs > 0) {
806
+ setTimeout(() => processOutputQueue(pipeline), schedule.checkDelayMs);
807
+ }
808
+ break;
809
+ }
810
+
811
+ // Output this frame
812
+ pipeline.outputQueue.shift();
813
+ outputFrame(pipeline, entry);
814
+ }
815
+ }
816
+
817
+ function shouldOutputFrame(
818
+ pipeline: PipelineState,
819
+ entry: DecodedFrame,
820
+ now: number
821
+ ): { shouldOutput: boolean; earliness: number; checkDelayMs: number } {
822
+ const trackIdx = pipeline.idx;
823
+
824
+ if (frameTiming.seeking) {
825
+ // During seek, reset baseTime and output first keyframe immediately
826
+ trackBaseTimes.delete(trackIdx);
827
+ return { shouldOutput: true, earliness: 0, checkDelayMs: 0 };
828
+ }
829
+
830
+ // Frame timestamp in milliseconds (entry.timestamp is in microseconds)
831
+ const frameTimeMs = entry.timestamp / 1000;
832
+ const speed = frameTiming.speed.combined;
833
+
834
+ // Per-track baseTime to handle different timestamp bases for audio/video
835
+ const baseTime = getTrackBaseTime(trackIdx, frameTimeMs, now);
836
+
837
+ // Calculate target wall-clock time for this frame (per rawws.js line 872)
838
+ // targetTime = baseTime + frameTimeMs / speed
839
+ const targetTime = baseTime + frameTimeMs / speed;
840
+
841
+ // How early/late is this frame? Positive = too early, negative = late
842
+ const delay = targetTime - now;
843
+
844
+ logVerbose(`Frame timing: track=${trackIdx} frame=${frameTimeMs.toFixed(0)}ms, target=${targetTime.toFixed(0)}, now=${now.toFixed(0)}, delay=${delay.toFixed(1)}ms`);
845
+
846
+ // Output immediately if ready or late (per rawws.js line 889: delay <= 2)
847
+ if (delay <= 2) {
848
+ return { shouldOutput: true, earliness: -delay, checkDelayMs: 0 };
849
+ }
850
+
851
+ // Schedule check for when frame should be ready
852
+ return { shouldOutput: false, earliness: -delay, checkDelayMs: Math.max(1, Math.floor(delay)) };
853
+ }
854
+
855
+ function outputFrame(pipeline: PipelineState, entry: DecodedFrame, options?: { skipHistory?: boolean }): void {
856
+ if (!pipeline.writer || pipeline.closed) {
857
+ entry.frame.close();
858
+ return;
859
+ }
860
+
861
+ const now = performance.now() * 1000;
862
+ frameTiming.out = now;
863
+ pipeline.stats.framesOut++;
864
+ pipeline.stats.lastOutputTimestamp = entry.timestamp;
865
+
866
+ // Log first few output frames
867
+ if (pipeline.stats.framesOut <= 3) {
868
+ log(`Output frame ${pipeline.stats.framesOut} for track ${pipeline.idx}: ts=${entry.timestamp}μs`);
869
+ }
870
+
871
+ // Store history for frame stepping (video only)
872
+ if (pipeline.track.type === 'video' && !(options?.skipHistory)) {
873
+ pushFrameHistory(pipeline, entry.frame as VideoFrame, entry.timestamp);
874
+ }
875
+
876
+ // Write returns a Promise - handle rejection to avoid unhandled promise errors
877
+ // Frame ownership is transferred to the stream, so we don't need to close() on success
878
+ pipeline.writer.write(entry.frame).then(() => {
879
+ // Send timeupdate event on successful write
880
+ const message: WorkerToMainMessage = {
881
+ type: 'sendevent',
882
+ kind: 'timeupdate',
883
+ idx: pipeline.idx,
884
+ time: entry.timestamp / 1e6,
885
+ uid: uidCounter++,
886
+ };
887
+ self.postMessage(message);
888
+ }).catch((err: Error) => {
889
+ // Check for "stream closed" errors - these are expected during cleanup
890
+ const errStr = String(err);
891
+ if (errStr.includes('Stream closed') || errStr.includes('InvalidStateError')) {
892
+ // Expected during player cleanup - silently mark pipeline as closed
893
+ pipeline.closed = true;
894
+ } else {
895
+ log(`Failed to write frame: ${err}`, 'error');
896
+ }
897
+ // Frame may not have been consumed by the stream - try to close it
898
+ try {
899
+ entry.frame.close();
900
+ } catch {
901
+ // Frame may already be detached/closed
902
+ }
903
+ });
904
+ }
905
+
906
+ // ============================================================================
907
+ // Track Generator / Writable Stream
908
+ // ============================================================================
909
+
910
+ function handleSetWritable(msg: MainToWorkerMessage & { type: 'setwritable' }): void {
911
+ const { idx, writable, uid } = msg;
912
+ const pipeline = pipelines.get(idx);
913
+
914
+ if (!pipeline) {
915
+ log(`Cannot set writable: pipeline ${idx} not found`, 'error');
916
+ sendError(uid, idx, 'Pipeline not found');
917
+ return;
918
+ }
919
+
920
+ pipeline.writable = writable;
921
+ pipeline.writer = writable.getWriter();
922
+
923
+ log(`Writable stream set for track ${idx}`);
924
+
925
+ // Process any queued frames
926
+ processOutputQueue(pipeline);
927
+
928
+ // Notify main thread track is ready
929
+ const message: WorkerToMainMessage = {
930
+ type: 'addtrack',
931
+ idx,
932
+ uid,
933
+ status: 'ok',
934
+ };
935
+ self.postMessage(message);
936
+ }
937
+
938
+ function handleCreateGenerator(msg: MainToWorkerMessage & { type: 'creategenerator' }): void {
939
+ const { idx, uid } = msg;
940
+ const pipeline = pipelines.get(idx);
941
+
942
+ if (!pipeline) {
943
+ log(`Cannot create generator: pipeline ${idx} not found`, 'error');
944
+ sendError(uid, idx, 'Pipeline not found');
945
+ return;
946
+ }
947
+
948
+ // Safari: VideoTrackGenerator is available in worker (not MediaStreamTrackGenerator)
949
+ // Reference: webcodecsworker.js line 852-863
950
+ // @ts-ignore - VideoTrackGenerator may not be in types
951
+ if (typeof VideoTrackGenerator !== 'undefined') {
952
+ if (pipeline.track.type === 'video') {
953
+ // Safari video: use VideoTrackGenerator
954
+ // @ts-ignore
955
+ const generator = new VideoTrackGenerator();
956
+ pipeline.writable = generator.writable;
957
+ pipeline.writer = generator.writable.getWriter();
958
+
959
+ // Send track back to main thread
960
+ const message: WorkerToMainMessage = {
961
+ type: 'addtrack',
962
+ idx,
963
+ track: generator.track,
964
+ uid,
965
+ status: 'ok',
966
+ };
967
+ // @ts-ignore - transferring MediaStreamTrack
968
+ self.postMessage(message, [generator.track]);
969
+ log(`Created VideoTrackGenerator for track ${idx} (Safari video)`);
970
+ } else if (pipeline.track.type === 'audio') {
971
+ // Safari audio: relay frames to main thread via postMessage
972
+ // Reference: webcodecsworker.js line 773-800
973
+ // Main thread creates the audio generator, we just send frames
974
+ pipeline.writer = {
975
+ write: (frame: AudioData): Promise<void> => {
976
+ return new Promise((resolve, reject) => {
977
+ const frameUid = uidCounter++;
978
+ // Set up listener for response
979
+ const timeoutId = setTimeout(() => {
980
+ reject(new Error('writeframe timeout'));
981
+ }, 5000);
982
+
983
+ const handler = (e: MessageEvent) => {
984
+ const msg = e.data;
985
+ if (msg.type === 'writeframe' && msg.idx === idx && msg.uid === frameUid) {
986
+ clearTimeout(timeoutId);
987
+ self.removeEventListener('message', handler);
988
+ if (msg.status === 'ok') {
989
+ resolve();
990
+ } else {
991
+ reject(new Error(msg.error || 'writeframe failed'));
992
+ }
993
+ }
994
+ };
995
+ self.addEventListener('message', handler);
996
+
997
+ // Send frame to main thread
998
+ self.postMessage({
999
+ type: 'writeframe',
1000
+ idx,
1001
+ frame,
1002
+ uid: frameUid,
1003
+ }, [frame]);
1004
+ });
1005
+ },
1006
+ close: () => Promise.resolve(),
1007
+ } as WritableStreamDefaultWriter<AudioData>;
1008
+
1009
+ // Notify main thread to set up audio generator
1010
+ const message: WorkerToMainMessage = {
1011
+ type: 'addtrack',
1012
+ idx,
1013
+ uid,
1014
+ status: 'ok',
1015
+ };
1016
+ self.postMessage(message);
1017
+ log(`Set up frame relay for track ${idx} (Safari audio)`);
1018
+ }
1019
+ } else if (typeof MediaStreamTrackGenerator !== 'undefined') {
1020
+ // Chrome/Edge: use MediaStreamTrackGenerator in worker
1021
+ // @ts-ignore
1022
+ const generator = new MediaStreamTrackGenerator({ kind: pipeline.track.type });
1023
+ pipeline.writable = generator.writable;
1024
+ pipeline.writer = generator.writable.getWriter();
1025
+
1026
+ // Send track back to main thread
1027
+ const message: WorkerToMainMessage = {
1028
+ type: 'addtrack',
1029
+ idx,
1030
+ track: generator,
1031
+ uid,
1032
+ status: 'ok',
1033
+ };
1034
+ // @ts-ignore - transferring MediaStreamTrack
1035
+ self.postMessage(message, [generator]);
1036
+ log(`Created MediaStreamTrackGenerator for track ${idx}`);
1037
+ } else {
1038
+ log('Neither VideoTrackGenerator nor MediaStreamTrackGenerator available in worker', 'warn');
1039
+ sendError(uid, idx, 'No track generator available');
1040
+ }
1041
+ }
1042
+
1043
+ // ============================================================================
1044
+ // Seeking & Timing
1045
+ // ============================================================================
1046
+
1047
+ function handleSeek(msg: MainToWorkerMessage & { type: 'seek' }): void {
1048
+ const { seekTime, uid } = msg;
1049
+
1050
+ log(`Seek to ${seekTime}ms`);
1051
+ frameTiming.seeking = true;
1052
+ resetBaseTime(); // Reset timing reference for new position
1053
+
1054
+ // Reset warmup state - need to rebuild buffer after seek
1055
+ warmupComplete = false;
1056
+ warmupStartTime = null;
1057
+
1058
+ // Flush all pipeline queues
1059
+ for (const pipeline of pipelines.values()) {
1060
+ flushPipeline(pipeline);
1061
+ }
1062
+
1063
+ sendAck(uid);
1064
+ }
1065
+
1066
+ function flushPipeline(pipeline: PipelineState): void {
1067
+ // Clear input queue
1068
+ pipeline.inputQueue = [];
1069
+
1070
+ // Close and clear output queue frames
1071
+ for (const entry of pipeline.outputQueue) {
1072
+ entry.frame.close();
1073
+ }
1074
+ pipeline.outputQueue = [];
1075
+
1076
+ // Reset decoder if possible
1077
+ if (pipeline.decoder && pipeline.decoder.state !== 'closed') {
1078
+ try {
1079
+ pipeline.decoder.reset();
1080
+ } catch {
1081
+ // Ignore reset errors
1082
+ }
1083
+ }
1084
+ }
1085
+
1086
+ function handleFrameTiming(msg: MainToWorkerMessage & { type: 'frametiming' }): void {
1087
+ const { action, speed, tweak, uid } = msg;
1088
+
1089
+ if (action === 'setSpeed') {
1090
+ if (speed !== undefined) frameTiming.speed.main = speed;
1091
+ if (tweak !== undefined) frameTiming.speed.tweak = tweak;
1092
+ frameTiming.speed.combined = frameTiming.speed.main * frameTiming.speed.tweak;
1093
+ log(`Speed set to ${frameTiming.speed.combined} (main: ${frameTiming.speed.main}, tweak: ${frameTiming.speed.tweak})`);
1094
+ } else if (action === 'setPaused') {
1095
+ frameTiming.paused = msg.paused === true;
1096
+ log(`Frame timing paused=${frameTiming.paused}`);
1097
+ } else if (action === 'reset') {
1098
+ frameTiming.seeking = false;
1099
+ log('Frame timing reset (seek complete)');
1100
+ }
1101
+
1102
+ sendAck(uid);
1103
+ }
1104
+
1105
+ function handleFrameStep(msg: MainToWorkerMessage & { type: 'framestep' }): void {
1106
+ const { direction, uid } = msg;
1107
+
1108
+ log(`FrameStep request dir=${direction} paused=${frameTiming.paused}`);
1109
+
1110
+ if (!frameTiming.paused) {
1111
+ log(`FrameStep ignored (not paused)`);
1112
+ sendAck(uid);
1113
+ return;
1114
+ }
1115
+
1116
+ const pipeline = getPrimaryVideoPipeline();
1117
+ if (!pipeline || !pipeline.writer || pipeline.closed) {
1118
+ log(`FrameStep ignored (pipeline missing or closed)`);
1119
+ sendAck(uid);
1120
+ return;
1121
+ }
1122
+
1123
+ pipeline.frameHistory = pipeline.frameHistory ?? [];
1124
+ if (pipeline.historyCursor === null || pipeline.historyCursor === undefined) {
1125
+ alignHistoryCursorToLastOutput(pipeline);
1126
+ }
1127
+ log(`FrameStep pipeline idx=${pipeline.idx} outQueue=${pipeline.outputQueue.length} history=${pipeline.frameHistory.length} cursor=${pipeline.historyCursor}`);
1128
+
1129
+ if (direction < 0) {
1130
+ const nextIndex = (pipeline.historyCursor ?? 0) - 1;
1131
+ if (nextIndex < 0 || pipeline.frameHistory.length === 0) {
1132
+ log(`FrameStep back: no history`);
1133
+ sendAck(uid);
1134
+ return;
1135
+ }
1136
+ pipeline.historyCursor = nextIndex;
1137
+ const entry = pipeline.frameHistory[nextIndex];
1138
+ const clone = entry ? cloneVideoFrame(entry.frame) : null;
1139
+ if (!clone) {
1140
+ log(`FrameStep back: failed to clone frame`);
1141
+ sendAck(uid);
1142
+ return;
1143
+ }
1144
+ log(`FrameStep back: output ts=${entry.timestamp}`);
1145
+ outputFrame(pipeline, { frame: clone, timestamp: entry.timestamp, decodedAt: performance.now() }, { skipHistory: true });
1146
+ sendAck(uid);
1147
+ return;
1148
+ }
1149
+
1150
+ if (direction > 0) {
1151
+ // If we're stepping forward within history (after stepping back), use history
1152
+ if (pipeline.historyCursor !== null && pipeline.historyCursor < pipeline.frameHistory.length - 1) {
1153
+ pipeline.historyCursor += 1;
1154
+ const entry = pipeline.frameHistory[pipeline.historyCursor];
1155
+ const clone = entry ? cloneVideoFrame(entry.frame) : null;
1156
+ if (!clone) {
1157
+ log(`FrameStep forward: failed to clone frame`);
1158
+ sendAck(uid);
1159
+ return;
1160
+ }
1161
+ log(`FrameStep forward (history): output ts=${entry.timestamp}`);
1162
+ outputFrame(pipeline, { frame: clone, timestamp: entry.timestamp, decodedAt: performance.now() }, { skipHistory: true });
1163
+ sendAck(uid);
1164
+ return;
1165
+ }
1166
+
1167
+ // Otherwise, output the next queued frame
1168
+ if (pipeline.outputQueue.length > 1) {
1169
+ const wasSorted = pipeline.outputQueue.every((entry, i, arr) =>
1170
+ i === 0 || arr[i - 1].timestamp <= entry.timestamp
1171
+ );
1172
+ if (!wasSorted) {
1173
+ pipeline.outputQueue.sort((a, b) => a.timestamp - b.timestamp);
1174
+ }
1175
+ }
1176
+
1177
+ const lastTs = pipeline.stats.lastOutputTimestamp;
1178
+ let idx = pipeline.outputQueue.findIndex(e => e.timestamp > lastTs);
1179
+ if (idx === -1 && pipeline.outputQueue.length > 0) idx = 0;
1180
+ if (idx === -1) {
1181
+ log(`FrameStep forward: no queued frame available`);
1182
+ sendAck(uid);
1183
+ return;
1184
+ }
1185
+
1186
+ const entry = pipeline.outputQueue.splice(idx, 1)[0];
1187
+ log(`FrameStep forward (queue): output ts=${entry.timestamp}`);
1188
+ outputFrame(pipeline, entry);
1189
+ sendAck(uid);
1190
+ return;
1191
+ }
1192
+
1193
+ sendAck(uid);
1194
+ }
1195
+
1196
+ // ============================================================================
1197
+ // Cleanup
1198
+ // ============================================================================
1199
+
1200
+ function handleClose(msg: MainToWorkerMessage & { type: 'close' }): void {
1201
+ const { idx, waitEmpty, uid } = msg;
1202
+ const pipeline = pipelines.get(idx);
1203
+
1204
+ if (!pipeline) {
1205
+ sendAck(uid, idx);
1206
+ return;
1207
+ }
1208
+
1209
+ if (waitEmpty && pipeline.outputQueue.length > 0) {
1210
+ // Wait for queue to drain
1211
+ const checkDrain = () => {
1212
+ if (pipeline.outputQueue.length === 0) {
1213
+ closePipeline(pipeline, uid);
1214
+ } else {
1215
+ setTimeout(checkDrain, 10);
1216
+ }
1217
+ };
1218
+ checkDrain();
1219
+ } else {
1220
+ closePipeline(pipeline, uid);
1221
+ }
1222
+ }
1223
+
1224
+ function closePipeline(pipeline: PipelineState, uid: number): void {
1225
+ pipeline.closed = true;
1226
+
1227
+ // Close decoder
1228
+ if (pipeline.decoder && pipeline.decoder.state !== 'closed') {
1229
+ try {
1230
+ pipeline.decoder.close();
1231
+ } catch {
1232
+ // Ignore close errors
1233
+ }
1234
+ }
1235
+
1236
+ // Close writer
1237
+ if (pipeline.writer) {
1238
+ try {
1239
+ pipeline.writer.close();
1240
+ } catch {
1241
+ // Ignore close errors
1242
+ }
1243
+ }
1244
+
1245
+ // Clear queues
1246
+ for (const entry of pipeline.outputQueue) {
1247
+ entry.frame.close();
1248
+ }
1249
+ pipeline.outputQueue = [];
1250
+ pipeline.inputQueue = [];
1251
+
1252
+ // Clean up per-track timing
1253
+ trackBaseTimes.delete(pipeline.idx);
1254
+
1255
+ pipelines.delete(pipeline.idx);
1256
+
1257
+ log(`Closed pipeline ${pipeline.idx}`);
1258
+
1259
+ // Stop stats if no more pipelines
1260
+ if (pipelines.size === 0 && statsTimer) {
1261
+ clearInterval(statsTimer);
1262
+ statsTimer = null;
1263
+ }
1264
+
1265
+ const message: WorkerToMainMessage = {
1266
+ type: 'closed',
1267
+ idx: pipeline.idx,
1268
+ uid,
1269
+ status: 'ok',
1270
+ };
1271
+ self.postMessage(message);
1272
+ }
1273
+
1274
+ // ============================================================================
1275
+ // Stats Reporting
1276
+ // ============================================================================
1277
+
1278
+ function sendStats(): void {
1279
+ const pipelineStats: Record<number, PipelineStats> = {};
1280
+
1281
+ for (const [idx, pipeline] of pipelines) {
1282
+ pipelineStats[idx] = {
1283
+ early: null, // Would need frame timing to calculate
1284
+ frameDuration: null,
1285
+ frames: {
1286
+ in: pipeline.stats.framesIn,
1287
+ decoded: pipeline.stats.framesDecoded,
1288
+ out: pipeline.stats.framesOut,
1289
+ },
1290
+ queues: {
1291
+ in: pipeline.inputQueue.length,
1292
+ decoder: pipeline.stats.decoderQueueSize,
1293
+ out: pipeline.outputQueue.length,
1294
+ },
1295
+ timing: {
1296
+ decoder: createFrameTrackerStats(),
1297
+ writable: createFrameTrackerStats(),
1298
+ },
1299
+ };
1300
+ }
1301
+
1302
+ const message: WorkerToMainMessage = {
1303
+ type: 'stats',
1304
+ stats: {
1305
+ frameTiming: {
1306
+ in: frameTiming.in,
1307
+ decoded: frameTiming.decoded,
1308
+ out: frameTiming.out,
1309
+ speed: { ...frameTiming.speed },
1310
+ seeking: frameTiming.seeking,
1311
+ paused: frameTiming.paused,
1312
+ },
1313
+ pipelines: pipelineStats,
1314
+ },
1315
+ uid: uidCounter++,
1316
+ };
1317
+
1318
+ self.postMessage(message);
1319
+ }
1320
+
1321
+ function createFrameTrackerStats(): FrameTrackerStats {
1322
+ return {
1323
+ lastIn: undefined,
1324
+ lastOut: undefined,
1325
+ delay: undefined,
1326
+ delta: undefined,
1327
+ shift: undefined,
1328
+ };
1329
+ }
1330
+
1331
+ // ============================================================================
1332
+ // Response Helpers
1333
+ // ============================================================================
1334
+
1335
+ function sendAck(uid: number, idx?: number): void {
1336
+ const message: WorkerToMainMessage = {
1337
+ type: 'ack',
1338
+ uid,
1339
+ idx,
1340
+ status: 'ok',
1341
+ };
1342
+ self.postMessage(message);
1343
+ }
1344
+
1345
+ function sendError(uid: number, idx: number | undefined, error: string): void {
1346
+ const message: WorkerToMainMessage = {
1347
+ type: 'ack',
1348
+ uid,
1349
+ idx,
1350
+ status: 'error',
1351
+ error,
1352
+ };
1353
+ self.postMessage(message);
1354
+ }
1355
+
1356
+ // ============================================================================
1357
+ // Worker Initialization
1358
+ // ============================================================================
1359
+
1360
+ log('WebCodecs decoder worker initialized');