@livepeer-frameworks/player-core 0.0.4 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. package/README.md +21 -6
  2. package/dist/cjs/index.js +792 -146
  3. package/dist/cjs/index.js.map +1 -1
  4. package/dist/esm/index.js +792 -146
  5. package/dist/esm/index.js.map +1 -1
  6. package/dist/player.css +185 -373
  7. package/dist/types/core/GatewayClient.d.ts +3 -4
  8. package/dist/types/core/InteractionController.d.ts +12 -0
  9. package/dist/types/core/MetaTrackManager.d.ts +1 -1
  10. package/dist/types/core/PlayerController.d.ts +18 -2
  11. package/dist/types/core/PlayerInterface.d.ts +10 -0
  12. package/dist/types/core/SeekingUtils.d.ts +3 -1
  13. package/dist/types/core/StreamStateClient.d.ts +1 -1
  14. package/dist/types/players/HlsJsPlayer.d.ts +8 -0
  15. package/dist/types/players/MewsWsPlayer/index.d.ts +1 -1
  16. package/dist/types/players/VideoJsPlayer.d.ts +12 -4
  17. package/dist/types/players/WebCodecsPlayer/SyncController.d.ts +1 -1
  18. package/dist/types/players/WebCodecsPlayer/index.d.ts +11 -0
  19. package/dist/types/players/WebCodecsPlayer/types.d.ts +25 -3
  20. package/dist/types/players/WebCodecsPlayer/worker/types.d.ts +20 -2
  21. package/dist/types/types.d.ts +32 -1
  22. package/dist/types/vanilla/FrameWorksPlayer.d.ts +5 -5
  23. package/dist/types/vanilla/index.d.ts +3 -3
  24. package/dist/workers/decoder.worker.js +183 -6
  25. package/dist/workers/decoder.worker.js.map +1 -1
  26. package/package.json +1 -1
  27. package/src/core/ABRController.ts +38 -36
  28. package/src/core/CodecUtils.ts +50 -47
  29. package/src/core/Disposable.ts +4 -4
  30. package/src/core/EventEmitter.ts +1 -1
  31. package/src/core/GatewayClient.ts +48 -48
  32. package/src/core/InteractionController.ts +89 -82
  33. package/src/core/LiveDurationProxy.ts +14 -16
  34. package/src/core/MetaTrackManager.ts +74 -66
  35. package/src/core/MistReporter.ts +72 -45
  36. package/src/core/MistSignaling.ts +59 -56
  37. package/src/core/PlayerController.ts +724 -375
  38. package/src/core/PlayerInterface.ts +89 -59
  39. package/src/core/PlayerManager.ts +118 -123
  40. package/src/core/PlayerRegistry.ts +59 -42
  41. package/src/core/QualityMonitor.ts +38 -31
  42. package/src/core/ScreenWakeLockManager.ts +8 -9
  43. package/src/core/SeekingUtils.ts +31 -22
  44. package/src/core/StreamStateClient.ts +75 -69
  45. package/src/core/SubtitleManager.ts +25 -23
  46. package/src/core/TelemetryReporter.ts +34 -31
  47. package/src/core/TimeFormat.ts +13 -17
  48. package/src/core/TimerManager.ts +25 -9
  49. package/src/core/UrlUtils.ts +20 -17
  50. package/src/core/detector.ts +44 -44
  51. package/src/core/index.ts +57 -48
  52. package/src/core/scorer.ts +137 -138
  53. package/src/core/selector.ts +2 -6
  54. package/src/global.d.ts +1 -1
  55. package/src/index.ts +46 -35
  56. package/src/players/DashJsPlayer.ts +175 -114
  57. package/src/players/HlsJsPlayer.ts +154 -76
  58. package/src/players/MewsWsPlayer/SourceBufferManager.ts +44 -39
  59. package/src/players/MewsWsPlayer/WebSocketManager.ts +9 -10
  60. package/src/players/MewsWsPlayer/index.ts +196 -154
  61. package/src/players/MewsWsPlayer/types.ts +21 -21
  62. package/src/players/MistPlayer.ts +46 -27
  63. package/src/players/MistWebRTCPlayer/index.ts +175 -129
  64. package/src/players/NativePlayer.ts +203 -143
  65. package/src/players/VideoJsPlayer.ts +200 -146
  66. package/src/players/WebCodecsPlayer/JitterBuffer.ts +6 -7
  67. package/src/players/WebCodecsPlayer/LatencyProfiles.ts +43 -43
  68. package/src/players/WebCodecsPlayer/RawChunkParser.ts +10 -10
  69. package/src/players/WebCodecsPlayer/SyncController.ts +46 -55
  70. package/src/players/WebCodecsPlayer/WebSocketController.ts +67 -69
  71. package/src/players/WebCodecsPlayer/index.ts +280 -220
  72. package/src/players/WebCodecsPlayer/polyfills/MediaStreamTrackGenerator.ts +12 -17
  73. package/src/players/WebCodecsPlayer/types.ts +81 -53
  74. package/src/players/WebCodecsPlayer/worker/decoder.worker.ts +255 -192
  75. package/src/players/WebCodecsPlayer/worker/types.ts +33 -29
  76. package/src/players/index.ts +8 -8
  77. package/src/styles/animations.css +2 -1
  78. package/src/styles/player.css +182 -356
  79. package/src/styles/tailwind.css +473 -159
  80. package/src/types.ts +75 -33
  81. package/src/vanilla/FrameWorksPlayer.ts +34 -19
  82. package/src/vanilla/index.ts +7 -7
@@ -19,15 +19,15 @@ import type {
19
19
  DecodedFrame,
20
20
  VideoDecoderInit,
21
21
  AudioDecoderInit,
22
- } from './types';
23
- import type { TrackInfo, PipelineStats, FrameTrackerStats } from '../types';
22
+ } from "./types";
23
+ import type { PipelineStats, FrameTrackerStats } from "../types";
24
24
 
25
25
  // ============================================================================
26
26
  // Global State
27
27
  // ============================================================================
28
28
 
29
29
  const pipelines = new Map<number, PipelineState>();
30
- let debugging: boolean | 'verbose' = false;
30
+ let debugging: boolean | "verbose" = false;
31
31
  let uidCounter = 0;
32
32
 
33
33
  // Frame timing state (shared across all pipelines)
@@ -62,7 +62,9 @@ const WARMUP_TIMEOUT_MS = 300; // Reduced from 500ms - start faster to reduce la
62
62
  function getTrackBaseTime(idx: number, frameTimeMs: number, now: number): number {
63
63
  if (!trackBaseTimes.has(idx)) {
64
64
  trackBaseTimes.set(idx, now - frameTimeMs / frameTiming.speed.combined);
65
- log(`Track ${idx} baseTime: ${trackBaseTimes.get(idx)!.toFixed(0)} (first frame @ ${frameTimeMs.toFixed(0)}ms)`);
65
+ log(
66
+ `Track ${idx} baseTime: ${trackBaseTimes.get(idx)!.toFixed(0)} (first frame @ ${frameTimeMs.toFixed(0)}ms)`
67
+ );
66
68
  }
67
69
  return trackBaseTimes.get(idx)!;
68
70
  }
@@ -77,7 +79,7 @@ function resetBaseTime(): void {
77
79
 
78
80
  function cloneVideoFrame(frame: VideoFrame): VideoFrame | null {
79
81
  try {
80
- if ('clone' in frame) {
82
+ if ("clone" in frame) {
81
83
  return (frame as VideoFrame).clone();
82
84
  }
83
85
  return new VideoFrame(frame);
@@ -87,7 +89,7 @@ function cloneVideoFrame(frame: VideoFrame): VideoFrame | null {
87
89
  }
88
90
 
89
91
  function pushFrameHistory(pipeline: PipelineState, frame: VideoFrame, timestamp: number): void {
90
- if (pipeline.track.type !== 'video') return;
92
+ if (pipeline.track.type !== "video") return;
91
93
  if (!pipeline.frameHistory) pipeline.frameHistory = [];
92
94
 
93
95
  const cloned = cloneVideoFrame(frame);
@@ -99,7 +101,9 @@ function pushFrameHistory(pipeline: PipelineState, frame: VideoFrame, timestamp:
99
101
  while (pipeline.frameHistory.length > MAX_FRAME_HISTORY) {
100
102
  const entry = pipeline.frameHistory.shift();
101
103
  if (entry) {
102
- try { entry.frame.close(); } catch {}
104
+ try {
105
+ entry.frame.close();
106
+ } catch {}
103
107
  }
104
108
  }
105
109
 
@@ -114,7 +118,7 @@ function alignHistoryCursorToLastOutput(pipeline: PipelineState): void {
114
118
  return;
115
119
  }
116
120
  // Find first history entry greater than last output, then step back one
117
- const idx = pipeline.frameHistory.findIndex(entry => entry.timestamp > lastTs);
121
+ const idx = pipeline.frameHistory.findIndex((entry) => entry.timestamp > lastTs);
118
122
  if (idx === -1) {
119
123
  pipeline.historyCursor = pipeline.frameHistory.length - 1;
120
124
  return;
@@ -125,7 +129,7 @@ function alignHistoryCursorToLastOutput(pipeline: PipelineState): void {
125
129
  function getPrimaryVideoPipeline(): PipelineState | null {
126
130
  let selected: PipelineState | null = null;
127
131
  for (const pipeline of pipelines.values()) {
128
- if (pipeline.track.type === 'video') {
132
+ if (pipeline.track.type === "video") {
129
133
  if (!selected || pipeline.idx < selected.idx) {
130
134
  selected = pipeline;
131
135
  }
@@ -139,7 +143,7 @@ let statsTimer: ReturnType<typeof setInterval> | null = null;
139
143
  const STATS_INTERVAL_MS = 250;
140
144
 
141
145
  // Frame dropping stats (Phase 2B)
142
- let totalFramesDropped = 0;
146
+ let _totalFramesDropped = 0;
143
147
 
144
148
  // Chrome-recommended decoder queue threshold
145
149
  // Per Chrome WebCodecs best practices: drop when decodeQueueSize > 2
@@ -153,11 +157,11 @@ const MAX_PAUSED_INPUT_QUEUE = 600;
153
157
  // Logging
154
158
  // ============================================================================
155
159
 
156
- function log(msg: string, level: 'info' | 'warn' | 'error' = 'info'): void {
160
+ function log(msg: string, level: "info" | "warn" | "error" = "info"): void {
157
161
  if (!debugging) return;
158
162
 
159
163
  const message: WorkerToMainMessage = {
160
- type: 'log',
164
+ type: "log",
161
165
  msg,
162
166
  level,
163
167
  uid: uidCounter++,
@@ -166,7 +170,7 @@ function log(msg: string, level: 'info' | 'warn' | 'error' = 'info'): void {
166
170
  }
167
171
 
168
172
  function logVerbose(msg: string): void {
169
- if (debugging !== 'verbose') return;
173
+ if (debugging !== "verbose") return;
170
174
  log(msg);
171
175
  }
172
176
 
@@ -178,49 +182,49 @@ self.onmessage = (event: MessageEvent<MainToWorkerMessage>) => {
178
182
  const msg = event.data;
179
183
 
180
184
  switch (msg.type) {
181
- case 'create':
185
+ case "create":
182
186
  handleCreate(msg);
183
187
  break;
184
188
 
185
- case 'configure':
189
+ case "configure":
186
190
  handleConfigure(msg);
187
191
  break;
188
192
 
189
- case 'receive':
193
+ case "receive":
190
194
  handleReceive(msg);
191
195
  break;
192
196
 
193
- case 'setwritable':
197
+ case "setwritable":
194
198
  handleSetWritable(msg);
195
199
  break;
196
200
 
197
- case 'creategenerator':
201
+ case "creategenerator":
198
202
  handleCreateGenerator(msg);
199
203
  break;
200
204
 
201
- case 'close':
205
+ case "close":
202
206
  handleClose(msg);
203
207
  break;
204
208
 
205
- case 'frametiming':
209
+ case "frametiming":
206
210
  handleFrameTiming(msg);
207
211
  break;
208
212
 
209
- case 'seek':
213
+ case "seek":
210
214
  handleSeek(msg);
211
215
  break;
212
216
 
213
- case 'framestep':
217
+ case "framestep":
214
218
  handleFrameStep(msg);
215
219
  break;
216
220
 
217
- case 'debugging':
221
+ case "debugging":
218
222
  debugging = msg.value;
219
223
  log(`Debugging set to: ${msg.value}`);
220
224
  break;
221
225
 
222
226
  default:
223
- log(`Unknown message type: ${(msg as any).type}`, 'warn');
227
+ log(`Unknown message type: ${(msg as any).type}`, "warn");
224
228
  }
225
229
  };
226
230
 
@@ -228,7 +232,7 @@ self.onmessage = (event: MessageEvent<MainToWorkerMessage>) => {
228
232
  // Pipeline Management
229
233
  // ============================================================================
230
234
 
231
- function handleCreate(msg: MainToWorkerMessage & { type: 'create' }): void {
235
+ function handleCreate(msg: MainToWorkerMessage & { type: "create" }): void {
232
236
  const { idx, track, opts, uid } = msg;
233
237
 
234
238
  log(`Creating pipeline for track ${idx} (${track.type} ${track.codec})`);
@@ -243,8 +247,8 @@ function handleCreate(msg: MainToWorkerMessage & { type: 'create' }): void {
243
247
  writer: null,
244
248
  inputQueue: [],
245
249
  outputQueue: [],
246
- frameHistory: track.type === 'video' ? [] : undefined,
247
- historyCursor: track.type === 'video' ? null : undefined,
250
+ frameHistory: track.type === "video" ? [] : undefined,
251
+ historyCursor: track.type === "video" ? null : undefined,
248
252
  stats: {
249
253
  framesIn: 0,
250
254
  framesDecoded: 0,
@@ -254,11 +258,12 @@ function handleCreate(msg: MainToWorkerMessage & { type: 'create' }): void {
254
258
  lastOutputTimestamp: 0,
255
259
  decoderQueueSize: 0,
256
260
  // Debug info for error diagnosis
257
- lastChunkType: '' as string,
261
+ lastChunkType: "" as string,
258
262
  lastChunkSize: 0,
259
- lastChunkBytes: '' as string,
263
+ lastChunkBytes: "" as string,
260
264
  },
261
265
  optimizeForLatency: opts.optimizeForLatency,
266
+ payloadFormat: opts.payloadFormat || "avcc",
262
267
  };
263
268
 
264
269
  pipelines.set(idx, pipeline);
@@ -271,32 +276,32 @@ function handleCreate(msg: MainToWorkerMessage & { type: 'create' }): void {
271
276
  sendAck(uid, idx);
272
277
  }
273
278
 
274
- function handleConfigure(msg: MainToWorkerMessage & { type: 'configure' }): void {
279
+ function handleConfigure(msg: MainToWorkerMessage & { type: "configure" }): void {
275
280
  const { idx, header, uid } = msg;
276
281
 
277
- log(`Received configure for track ${idx}, header length=${header?.byteLength ?? 'null'}`);
282
+ log(`Received configure for track ${idx}, header length=${header?.byteLength ?? "null"}`);
278
283
 
279
284
  const pipeline = pipelines.get(idx);
280
285
 
281
286
  if (!pipeline) {
282
- log(`Cannot configure: pipeline ${idx} not found`, 'error');
283
- sendError(uid, idx, 'Pipeline not found');
287
+ log(`Cannot configure: pipeline ${idx} not found`, "error");
288
+ sendError(uid, idx, "Pipeline not found");
284
289
  return;
285
290
  }
286
291
 
287
292
  // Skip if already configured and decoder is ready
288
293
  // This prevents duplicate configuration when both WS INIT and HTTP fallback fire
289
- if (pipeline.configured && pipeline.decoder && pipeline.decoder.state === 'configured') {
294
+ if (pipeline.configured && pipeline.decoder && pipeline.decoder.state === "configured") {
290
295
  log(`Track ${idx} already configured, skipping duplicate configure`);
291
296
  sendAck(uid, idx);
292
297
  return;
293
298
  }
294
299
 
295
300
  try {
296
- if (pipeline.track.type === 'video') {
301
+ if (pipeline.track.type === "video") {
297
302
  log(`Configuring video decoder for track ${idx}...`);
298
303
  configureVideoDecoder(pipeline, header);
299
- } else if (pipeline.track.type === 'audio') {
304
+ } else if (pipeline.track.type === "audio") {
300
305
  log(`Configuring audio decoder for track ${idx}...`);
301
306
  configureAudioDecoder(pipeline, header);
302
307
  }
@@ -305,7 +310,7 @@ function handleConfigure(msg: MainToWorkerMessage & { type: 'configure' }): void
305
310
  log(`Successfully configured decoder for track ${idx}`);
306
311
  sendAck(uid, idx);
307
312
  } catch (err) {
308
- log(`Failed to configure decoder for track ${idx}: ${err}`, 'error');
313
+ log(`Failed to configure decoder for track ${idx}: ${err}`, "error");
309
314
  sendError(uid, idx, String(err));
310
315
  }
311
316
  }
@@ -314,8 +319,8 @@ function configureVideoDecoder(pipeline: PipelineState, description?: Uint8Array
314
319
  const track = pipeline.track;
315
320
 
316
321
  // Handle JPEG codec separately via ImageDecoder (Phase 2C)
317
- if (track.codec === 'JPEG' || track.codec.toLowerCase() === 'jpeg') {
318
- log('JPEG codec detected - will use ImageDecoder');
322
+ if (track.codec === "JPEG" || track.codec.toLowerCase() === "jpeg") {
323
+ log("JPEG codec detected - will use ImageDecoder");
319
324
  pipeline.configured = true;
320
325
  // JPEG doesn't need a persistent decoder - each frame is decoded individually
321
326
  return;
@@ -323,14 +328,14 @@ function configureVideoDecoder(pipeline: PipelineState, description?: Uint8Array
323
328
 
324
329
  // Close existing decoder if any (per rawws.js reconfiguration pattern)
325
330
  if (pipeline.decoder) {
326
- if (pipeline.decoder.state === 'configured') {
331
+ if (pipeline.decoder.state === "configured") {
327
332
  try {
328
333
  pipeline.decoder.reset();
329
334
  } catch {
330
335
  // Ignore reset errors
331
336
  }
332
337
  }
333
- if (pipeline.decoder.state !== 'closed') {
338
+ if (pipeline.decoder.state !== "closed") {
334
339
  try {
335
340
  pipeline.decoder.close();
336
341
  } catch {
@@ -345,15 +350,18 @@ function configureVideoDecoder(pipeline: PipelineState, description?: Uint8Array
345
350
  const config: VideoDecoderInit = {
346
351
  codec: track.codecstring || track.codec.toLowerCase(),
347
352
  optimizeForLatency: pipeline.optimizeForLatency,
348
- hardwareAcceleration: 'prefer-hardware',
353
+ hardwareAcceleration: "prefer-hardware",
349
354
  };
350
355
 
351
356
  // Pass description directly from WebSocket INIT data (per reference rawws.js line 1052)
352
- if (description && description.byteLength > 0) {
357
+ // For Annex B format (ws/video/h264), SPS/PPS comes inline in the bitstream - skip description
358
+ if (pipeline.payloadFormat === "annexb") {
359
+ log(`Annex B mode - SPS/PPS inline in bitstream, no description needed`);
360
+ } else if (description && description.byteLength > 0) {
353
361
  config.description = description;
354
362
  log(`Configuring with description (${description.byteLength} bytes)`);
355
363
  } else {
356
- log(`No description provided - decoder may fail on H.264/HEVC`, 'warn');
364
+ log(`No description provided - decoder may fail on H.264/HEVC`, "warn");
357
365
  }
358
366
 
359
367
  log(`Configuring video decoder: ${config.codec}`);
@@ -375,29 +383,29 @@ function configureVideoDecoder(pipeline: PipelineState, description?: Uint8Array
375
383
  */
376
384
  function mapAudioCodec(codec: string, codecstring?: string): string {
377
385
  // If we have a full codec string like "mp4a.40.2", use it
378
- if (codecstring && codecstring.startsWith('mp4a.')) {
386
+ if (codecstring && codecstring.startsWith("mp4a.")) {
379
387
  return codecstring;
380
388
  }
381
389
 
382
390
  // Map common MistServer codec names to WebCodecs codec strings
383
391
  const normalized = codec.toLowerCase();
384
392
  switch (normalized) {
385
- case 'aac':
386
- case 'mp4a':
387
- return 'mp4a.40.2'; // AAC-LC
388
- case 'mp3':
389
- return 'mp3';
390
- case 'opus':
391
- return 'opus';
392
- case 'flac':
393
- return 'flac';
394
- case 'ac3':
395
- case 'ac-3':
396
- return 'ac-3';
397
- case 'pcm_s16le':
398
- case 'pcm_s32le':
399
- case 'pcm_f32le':
400
- return 'pcm-' + normalized.replace('pcm_', '').replace('le', '-le');
393
+ case "aac":
394
+ case "mp4a":
395
+ return "mp4a.40.2"; // AAC-LC
396
+ case "mp3":
397
+ return "mp3";
398
+ case "opus":
399
+ return "opus";
400
+ case "flac":
401
+ return "flac";
402
+ case "ac3":
403
+ case "ac-3":
404
+ return "ac-3";
405
+ case "pcm_s16le":
406
+ case "pcm_s32le":
407
+ case "pcm_f32le":
408
+ return "pcm-" + normalized.replace("pcm_", "").replace("le", "-le");
401
409
  default:
402
410
  log(`Unknown audio codec: ${codec}, trying as-is`);
403
411
  return codecstring || codec;
@@ -428,7 +436,9 @@ function configureAudioDecoder(pipeline: PipelineState, description?: Uint8Array
428
436
  decoder.configure(config as AudioDecoderConfig);
429
437
  pipeline.decoder = decoder;
430
438
 
431
- log(`Audio decoder configured: ${config.codec} ${config.sampleRate}Hz ${config.numberOfChannels}ch`);
439
+ log(
440
+ `Audio decoder configured: ${config.codec} ${config.sampleRate}Hz ${config.numberOfChannels}ch`
441
+ );
432
442
  }
433
443
 
434
444
  function handleDecodedFrame(pipeline: PipelineState, frame: VideoFrame | AudioData): void {
@@ -446,10 +456,13 @@ function handleDecodedFrame(pipeline: PipelineState, frame: VideoFrame | AudioDa
446
456
  // Log first few decoded frames
447
457
  if (pipeline.stats.framesDecoded <= 3) {
448
458
  const frameType = pipeline.track.type;
449
- const extraInfo = frameType === 'audio'
450
- ? ` (${(frame as AudioData).numberOfFrames} samples, ${(frame as AudioData).sampleRate}Hz)`
451
- : ` (${(frame as VideoFrame).displayWidth}x${(frame as VideoFrame).displayHeight})`;
452
- log(`Decoded ${frameType} frame ${pipeline.stats.framesDecoded} for track ${pipeline.idx}: ts=${timestamp}μs${extraInfo}`);
459
+ const extraInfo =
460
+ frameType === "audio"
461
+ ? ` (${(frame as AudioData).numberOfFrames} samples, ${(frame as AudioData).sampleRate}Hz)`
462
+ : ` (${(frame as VideoFrame).displayWidth}x${(frame as VideoFrame).displayHeight})`;
463
+ log(
464
+ `Decoded ${frameType} frame ${pipeline.stats.framesDecoded} for track ${pipeline.idx}: ts=${timestamp}μs${extraInfo}`
465
+ );
453
466
  }
454
467
 
455
468
  // Add to output queue for scheduled release
@@ -464,16 +477,19 @@ function handleDecodedFrame(pipeline: PipelineState, frame: VideoFrame | AudioDa
464
477
  }
465
478
 
466
479
  function handleDecoderError(pipeline: PipelineState, err: DOMException): void {
467
- log(`Decoder error on track ${pipeline.idx}: ${err.name}: ${err.message}`, 'error');
468
- log(` Last chunk info: type=${pipeline.stats.lastChunkType}, size=${pipeline.stats.lastChunkSize}, first bytes=[${pipeline.stats.lastChunkBytes}]`, 'error');
480
+ log(`Decoder error on track ${pipeline.idx}: ${err.name}: ${err.message}`, "error");
481
+ log(
482
+ ` Last chunk info: type=${pipeline.stats.lastChunkType}, size=${pipeline.stats.lastChunkSize}, first bytes=[${pipeline.stats.lastChunkBytes}]`,
483
+ "error"
484
+ );
469
485
 
470
486
  // Per rawws.js: reset the pipeline after decoder error
471
487
  // This clears queues and recreates the decoder if needed
472
488
  resetPipelineAfterError(pipeline);
473
489
 
474
490
  const message: WorkerToMainMessage = {
475
- type: 'sendevent',
476
- kind: 'error',
491
+ type: "sendevent",
492
+ kind: "error",
477
493
  message: `Decoder error: ${err.message}`,
478
494
  idx: pipeline.idx,
479
495
  uid: uidCounter++,
@@ -497,16 +513,16 @@ function resetPipelineAfterError(pipeline: PipelineState): void {
497
513
  pipeline.configured = false;
498
514
 
499
515
  // If decoder is closed, we need to recreate it (can't reset a closed decoder)
500
- if (pipeline.decoder && pipeline.decoder.state === 'closed') {
516
+ if (pipeline.decoder && pipeline.decoder.state === "closed") {
501
517
  log(`Decoder closed for track ${pipeline.idx}, will recreate on next configure`);
502
518
  pipeline.decoder = null;
503
- } else if (pipeline.decoder && pipeline.decoder.state !== 'closed') {
519
+ } else if (pipeline.decoder && pipeline.decoder.state !== "closed") {
504
520
  // Try to reset if not closed
505
521
  try {
506
522
  pipeline.decoder.reset();
507
523
  log(`Reset decoder for track ${pipeline.idx}`);
508
524
  } catch (e) {
509
- log(`Failed to reset decoder for track ${pipeline.idx}: ${e}`, 'warn');
525
+ log(`Failed to reset decoder for track ${pipeline.idx}: ${e}`, "warn");
510
526
  pipeline.decoder = null;
511
527
  }
512
528
  }
@@ -516,8 +532,8 @@ function resetPipelineAfterError(pipeline: PipelineState): void {
516
532
  // Frame Input/Output
517
533
  // ============================================================================
518
534
 
519
- function handleReceive(msg: MainToWorkerMessage & { type: 'receive' }): void {
520
- const { idx, chunk, uid } = msg;
535
+ function handleReceive(msg: MainToWorkerMessage & { type: "receive" }): void {
536
+ const { idx, chunk } = msg;
521
537
  const pipeline = pipelines.get(idx);
522
538
 
523
539
  if (!pipeline) {
@@ -528,7 +544,9 @@ function handleReceive(msg: MainToWorkerMessage & { type: 'receive' }): void {
528
544
  if (!pipeline.configured || !pipeline.decoder) {
529
545
  // Queue for later
530
546
  pipeline.inputQueue.push(chunk);
531
- logVerbose(`Queued chunk for track ${idx} (configured=${pipeline.configured}, decoder=${!!pipeline.decoder})`);
547
+ logVerbose(
548
+ `Queued chunk for track ${idx} (configured=${pipeline.configured}, decoder=${!!pipeline.decoder})`
549
+ );
532
550
  return;
533
551
  }
534
552
 
@@ -544,19 +562,23 @@ function handleReceive(msg: MainToWorkerMessage & { type: 'receive' }): void {
544
562
 
545
563
  // Log only first 3 chunks per track to confirm receiving
546
564
  if (pipeline.stats.framesIn < 3) {
547
- log(`Received chunk ${pipeline.stats.framesIn} for track ${idx}: type=${chunk.type}, ts=${chunk.timestamp / 1000}ms, size=${chunk.data.byteLength}`);
565
+ log(
566
+ `Received chunk ${pipeline.stats.framesIn} for track ${idx}: type=${chunk.type}, ts=${chunk.timestamp / 1000}ms, size=${chunk.data.byteLength}`
567
+ );
548
568
  }
549
569
 
550
570
  // Check if we need to drop frames due to decoder pressure (Phase 2B)
551
571
  if (shouldDropFramesDueToDecoderPressure(pipeline)) {
552
- if (chunk.type === 'key') {
572
+ if (chunk.type === "key") {
553
573
  // Always accept keyframes - they're needed to resume
554
574
  decodeChunk(pipeline, chunk);
555
575
  } else {
556
576
  // Drop delta frames when decoder is overwhelmed
557
577
  pipeline.stats.framesDropped++;
558
- totalFramesDropped++;
559
- logVerbose(`Dropped delta frame @ ${chunk.timestamp / 1000}ms (decoder queue: ${pipeline.decoder.decodeQueueSize})`);
578
+ _totalFramesDropped++;
579
+ logVerbose(
580
+ `Dropped delta frame @ ${chunk.timestamp / 1000}ms (decoder queue: ${pipeline.decoder.decodeQueueSize})`
581
+ );
560
582
  }
561
583
  return;
562
584
  }
@@ -583,11 +605,11 @@ function shouldDropFramesDueToDecoderPressure(pipeline: PipelineState): boolean
583
605
  * Drop all frames up to the next keyframe in the input queue
584
606
  * Called when decoder is severely backed up
585
607
  */
586
- function dropToNextKeyframe(pipeline: PipelineState): number {
608
+ function _dropToNextKeyframe(pipeline: PipelineState): number {
587
609
  if (pipeline.inputQueue.length === 0) return 0;
588
610
 
589
611
  // Find next keyframe in queue
590
- const keyframeIdx = pipeline.inputQueue.findIndex(c => c.type === 'key');
612
+ const keyframeIdx = pipeline.inputQueue.findIndex((c) => c.type === "key");
591
613
 
592
614
  if (keyframeIdx <= 0) {
593
615
  // No keyframe or keyframe is first - nothing to drop
@@ -597,16 +619,16 @@ function dropToNextKeyframe(pipeline: PipelineState): number {
597
619
  // Drop all frames before keyframe
598
620
  const dropped = pipeline.inputQueue.splice(0, keyframeIdx);
599
621
  pipeline.stats.framesDropped += dropped.length;
600
- totalFramesDropped += dropped.length;
622
+ _totalFramesDropped += dropped.length;
601
623
 
602
- log(`Dropped ${dropped.length} frames to next keyframe`, 'warn');
624
+ log(`Dropped ${dropped.length} frames to next keyframe`, "warn");
603
625
 
604
626
  return dropped.length;
605
627
  }
606
628
 
607
629
  function decodeChunk(
608
630
  pipeline: PipelineState,
609
- chunk: { type: 'key' | 'delta'; timestamp: number; data: Uint8Array }
631
+ chunk: { type: "key" | "delta"; timestamp: number; data: Uint8Array }
610
632
  ): void {
611
633
  if (pipeline.closed) return;
612
634
 
@@ -618,7 +640,7 @@ function decodeChunk(
618
640
  try {
619
641
  // Handle JPEG via ImageDecoder (Phase 2C)
620
642
  const codec = pipeline.track.codec;
621
- if (codec === 'JPEG' || codec.toLowerCase() === 'jpeg') {
643
+ if (codec === "JPEG" || codec.toLowerCase() === "jpeg") {
622
644
  decodeJpegFrame(pipeline, chunk);
623
645
  return;
624
646
  }
@@ -632,10 +654,12 @@ function decodeChunk(
632
654
  pipeline.stats.lastChunkType = chunk.type;
633
655
  pipeline.stats.lastChunkSize = chunk.data.byteLength;
634
656
  // Show first 8 bytes to identify format (Annex B starts 0x00 0x00 0x00 0x01, AVCC starts with length)
635
- const firstBytes = Array.from(chunk.data.slice(0, 8)).map(b => '0x' + b.toString(16).padStart(2, '0')).join(' ');
657
+ const firstBytes = Array.from(chunk.data.slice(0, 8))
658
+ .map((b) => "0x" + b.toString(16).padStart(2, "0"))
659
+ .join(" ");
636
660
  pipeline.stats.lastChunkBytes = firstBytes;
637
661
 
638
- if (pipeline.track.type === 'video') {
662
+ if (pipeline.track.type === "video") {
639
663
  // AVCC mode: frames pass through unchanged (decoder has SPS/PPS from description)
640
664
  const encodedChunk = new EncodedVideoChunk({
641
665
  type: chunk.type,
@@ -645,8 +669,12 @@ function decodeChunk(
645
669
 
646
670
  const decoder = pipeline.decoder as VideoDecoder;
647
671
  if (pipeline.stats.framesIn <= 3) {
648
- const firstBytes = Array.from(chunk.data.slice(0, 16)).map(b => '0x' + b.toString(16).padStart(2, '0')).join(' ');
649
- log(`Calling decode() for track ${pipeline.idx}: state=${decoder.state}, queueSize=${decoder.decodeQueueSize}, chunk type=${chunk.type}, ts=${timestampUs}μs`);
672
+ const firstBytes = Array.from(chunk.data.slice(0, 16))
673
+ .map((b) => "0x" + b.toString(16).padStart(2, "0"))
674
+ .join(" ");
675
+ log(
676
+ `Calling decode() for track ${pipeline.idx}: state=${decoder.state}, queueSize=${decoder.decodeQueueSize}, chunk type=${chunk.type}, ts=${timestampUs}μs`
677
+ );
650
678
  log(` First 16 bytes: ${firstBytes}`);
651
679
  }
652
680
 
@@ -655,11 +683,11 @@ function decodeChunk(
655
683
  if (pipeline.stats.framesIn <= 3) {
656
684
  log(`After decode() for track ${pipeline.idx}: queueSize=${decoder.decodeQueueSize}`);
657
685
  }
658
- } else if (pipeline.track.type === 'audio') {
686
+ } else if (pipeline.track.type === "audio") {
659
687
  // Audio chunks are always treated as "key" frames - per MistServer rawws.js line 1127
660
688
  // Audio codecs don't use inter-frame dependencies like video does
661
689
  const encodedChunk = new EncodedAudioChunk({
662
- type: 'key',
690
+ type: "key",
663
691
  timestamp: timestampUs,
664
692
  data: chunk.data,
665
693
  });
@@ -671,9 +699,11 @@ function decodeChunk(
671
699
  pipeline.stats.decoderQueueSize = pipeline.decoder.decodeQueueSize;
672
700
  }
673
701
 
674
- logVerbose(`Decoded chunk ${chunk.type} @ ${chunk.timestamp / 1000}ms for track ${pipeline.idx}`);
702
+ logVerbose(
703
+ `Decoded chunk ${chunk.type} @ ${chunk.timestamp / 1000}ms for track ${pipeline.idx}`
704
+ );
675
705
  } catch (err) {
676
- log(`Decode error on track ${pipeline.idx}: ${err}`, 'error');
706
+ log(`Decode error on track ${pipeline.idx}: ${err}`, "error");
677
707
  }
678
708
  }
679
709
 
@@ -683,20 +713,20 @@ function decodeChunk(
683
713
  */
684
714
  async function decodeJpegFrame(
685
715
  pipeline: PipelineState,
686
- chunk: { type: 'key' | 'delta'; timestamp: number; data: Uint8Array }
716
+ chunk: { type: "key" | "delta"; timestamp: number; data: Uint8Array }
687
717
  ): Promise<void> {
688
718
  if (pipeline.closed) return;
689
719
 
690
720
  // Check if ImageDecoder is available
691
- if (typeof ImageDecoder === 'undefined') {
692
- log('ImageDecoder not available - JPEG streams not supported', 'error');
721
+ if (typeof ImageDecoder === "undefined") {
722
+ log("ImageDecoder not available - JPEG streams not supported", "error");
693
723
  return;
694
724
  }
695
725
 
696
726
  try {
697
727
  // Create ImageDecoder for this frame
698
728
  const decoder = new ImageDecoder({
699
- type: 'image/jpeg',
729
+ type: "image/jpeg",
700
730
  data: chunk.data,
701
731
  });
702
732
 
@@ -717,7 +747,7 @@ async function decodeJpegFrame(
717
747
 
718
748
  logVerbose(`Decoded JPEG frame @ ${chunk.timestamp / 1000}ms for track ${pipeline.idx}`);
719
749
  } catch (err) {
720
- log(`JPEG decode error on track ${pipeline.idx}: ${err}`, 'error');
750
+ log(`JPEG decode error on track ${pipeline.idx}: ${err}`, "error");
721
751
  }
722
752
  }
723
753
 
@@ -736,7 +766,10 @@ function processOutputQueue(pipeline: PipelineState): void {
736
766
 
737
767
  if (!pipeline.writer || pipeline.outputQueue.length === 0) {
738
768
  if (pipeline.outputQueue.length > 0 && !pipeline.writer) {
739
- log(`Cannot output: no writer for track ${pipeline.idx} (queue has ${pipeline.outputQueue.length} frames)`, 'warn');
769
+ log(
770
+ `Cannot output: no writer for track ${pipeline.idx} (queue has ${pipeline.outputQueue.length} frames)`,
771
+ "warn"
772
+ );
740
773
  }
741
774
  return;
742
775
  }
@@ -746,8 +779,8 @@ function processOutputQueue(pipeline: PipelineState): void {
746
779
  // Sort output queue by timestamp - MistServer can send frames out of order
747
780
  // This is more robust than just swapping adjacent frames
748
781
  if (pipeline.outputQueue.length > 1) {
749
- const wasSorted = pipeline.outputQueue.every((entry, i, arr) =>
750
- i === 0 || arr[i - 1].timestamp <= entry.timestamp
782
+ const wasSorted = pipeline.outputQueue.every(
783
+ (entry, i, arr) => i === 0 || arr[i - 1].timestamp <= entry.timestamp
751
784
  );
752
785
  if (!wasSorted) {
753
786
  pipeline.outputQueue.sort((a, b) => a.timestamp - b.timestamp);
@@ -775,7 +808,9 @@ function processOutputQueue(pipeline: PipelineState): void {
775
808
  // Complete warmup when we have enough buffer OR timeout
776
809
  if (bufferMs >= WARMUP_BUFFER_MS || elapsed >= WARMUP_TIMEOUT_MS) {
777
810
  warmupComplete = true;
778
- log(`Buffer warmup complete: ${bufferMs.toFixed(0)}ms buffer, ${pipeline.outputQueue.length} frames queued (track ${pipeline.idx})`);
811
+ log(
812
+ `Buffer warmup complete: ${bufferMs.toFixed(0)}ms buffer, ${pipeline.outputQueue.length} frames queued (track ${pipeline.idx})`
813
+ );
779
814
  } else {
780
815
  // Not ready yet - schedule another check
781
816
  setTimeout(() => processOutputQueue(pipeline), 10);
@@ -785,7 +820,9 @@ function processOutputQueue(pipeline: PipelineState): void {
785
820
  // Not enough frames yet - schedule another check
786
821
  if (elapsed >= WARMUP_TIMEOUT_MS) {
787
822
  warmupComplete = true;
788
- log(`Buffer warmup timeout - starting with ${pipeline.outputQueue.length} frame(s) (track ${pipeline.idx})`);
823
+ log(
824
+ `Buffer warmup timeout - starting with ${pipeline.outputQueue.length} frame(s) (track ${pipeline.idx})`
825
+ );
789
826
  } else {
790
827
  setTimeout(() => processOutputQueue(pipeline), 10);
791
828
  return;
@@ -841,7 +878,9 @@ function shouldOutputFrame(
841
878
  // How early/late is this frame? Positive = too early, negative = late
842
879
  const delay = targetTime - now;
843
880
 
844
- logVerbose(`Frame timing: track=${trackIdx} frame=${frameTimeMs.toFixed(0)}ms, target=${targetTime.toFixed(0)}, now=${now.toFixed(0)}, delay=${delay.toFixed(1)}ms`);
881
+ logVerbose(
882
+ `Frame timing: track=${trackIdx} frame=${frameTimeMs.toFixed(0)}ms, target=${targetTime.toFixed(0)}, now=${now.toFixed(0)}, delay=${delay.toFixed(1)}ms`
883
+ );
845
884
 
846
885
  // Output immediately if ready or late (per rawws.js line 889: delay <= 2)
847
886
  if (delay <= 2) {
@@ -852,7 +891,11 @@ function shouldOutputFrame(
852
891
  return { shouldOutput: false, earliness: -delay, checkDelayMs: Math.max(1, Math.floor(delay)) };
853
892
  }
854
893
 
855
- function outputFrame(pipeline: PipelineState, entry: DecodedFrame, options?: { skipHistory?: boolean }): void {
894
+ function outputFrame(
895
+ pipeline: PipelineState,
896
+ entry: DecodedFrame,
897
+ options?: { skipHistory?: boolean }
898
+ ): void {
856
899
  if (!pipeline.writer || pipeline.closed) {
857
900
  entry.frame.close();
858
901
  return;
@@ -865,55 +908,60 @@ function outputFrame(pipeline: PipelineState, entry: DecodedFrame, options?: { s
865
908
 
866
909
  // Log first few output frames
867
910
  if (pipeline.stats.framesOut <= 3) {
868
- log(`Output frame ${pipeline.stats.framesOut} for track ${pipeline.idx}: ts=${entry.timestamp}μs`);
911
+ log(
912
+ `Output frame ${pipeline.stats.framesOut} for track ${pipeline.idx}: ts=${entry.timestamp}μs`
913
+ );
869
914
  }
870
915
 
871
916
  // Store history for frame stepping (video only)
872
- if (pipeline.track.type === 'video' && !(options?.skipHistory)) {
917
+ if (pipeline.track.type === "video" && !options?.skipHistory) {
873
918
  pushFrameHistory(pipeline, entry.frame as VideoFrame, entry.timestamp);
874
919
  }
875
920
 
876
921
  // Write returns a Promise - handle rejection to avoid unhandled promise errors
877
922
  // Frame ownership is transferred to the stream, so we don't need to close() on success
878
- pipeline.writer.write(entry.frame).then(() => {
879
- // Send timeupdate event on successful write
880
- const message: WorkerToMainMessage = {
881
- type: 'sendevent',
882
- kind: 'timeupdate',
883
- idx: pipeline.idx,
884
- time: entry.timestamp / 1e6,
885
- uid: uidCounter++,
886
- };
887
- self.postMessage(message);
888
- }).catch((err: Error) => {
889
- // Check for "stream closed" errors - these are expected during cleanup
890
- const errStr = String(err);
891
- if (errStr.includes('Stream closed') || errStr.includes('InvalidStateError')) {
892
- // Expected during player cleanup - silently mark pipeline as closed
893
- pipeline.closed = true;
894
- } else {
895
- log(`Failed to write frame: ${err}`, 'error');
896
- }
897
- // Frame may not have been consumed by the stream - try to close it
898
- try {
899
- entry.frame.close();
900
- } catch {
901
- // Frame may already be detached/closed
902
- }
903
- });
923
+ pipeline.writer
924
+ .write(entry.frame)
925
+ .then(() => {
926
+ // Send timeupdate event on successful write
927
+ const message: WorkerToMainMessage = {
928
+ type: "sendevent",
929
+ kind: "timeupdate",
930
+ idx: pipeline.idx,
931
+ time: entry.timestamp / 1e6,
932
+ uid: uidCounter++,
933
+ };
934
+ self.postMessage(message);
935
+ })
936
+ .catch((err: Error) => {
937
+ // Check for "stream closed" errors - these are expected during cleanup
938
+ const errStr = String(err);
939
+ if (errStr.includes("Stream closed") || errStr.includes("InvalidStateError")) {
940
+ // Expected during player cleanup - silently mark pipeline as closed
941
+ pipeline.closed = true;
942
+ } else {
943
+ log(`Failed to write frame: ${err}`, "error");
944
+ }
945
+ // Frame may not have been consumed by the stream - try to close it
946
+ try {
947
+ entry.frame.close();
948
+ } catch {
949
+ // Frame may already be detached/closed
950
+ }
951
+ });
904
952
  }
905
953
 
906
954
  // ============================================================================
907
955
  // Track Generator / Writable Stream
908
956
  // ============================================================================
909
957
 
910
- function handleSetWritable(msg: MainToWorkerMessage & { type: 'setwritable' }): void {
958
+ function handleSetWritable(msg: MainToWorkerMessage & { type: "setwritable" }): void {
911
959
  const { idx, writable, uid } = msg;
912
960
  const pipeline = pipelines.get(idx);
913
961
 
914
962
  if (!pipeline) {
915
- log(`Cannot set writable: pipeline ${idx} not found`, 'error');
916
- sendError(uid, idx, 'Pipeline not found');
963
+ log(`Cannot set writable: pipeline ${idx} not found`, "error");
964
+ sendError(uid, idx, "Pipeline not found");
917
965
  return;
918
966
  }
919
967
 
@@ -927,29 +975,29 @@ function handleSetWritable(msg: MainToWorkerMessage & { type: 'setwritable' }):
927
975
 
928
976
  // Notify main thread track is ready
929
977
  const message: WorkerToMainMessage = {
930
- type: 'addtrack',
978
+ type: "addtrack",
931
979
  idx,
932
980
  uid,
933
- status: 'ok',
981
+ status: "ok",
934
982
  };
935
983
  self.postMessage(message);
936
984
  }
937
985
 
938
- function handleCreateGenerator(msg: MainToWorkerMessage & { type: 'creategenerator' }): void {
986
+ function handleCreateGenerator(msg: MainToWorkerMessage & { type: "creategenerator" }): void {
939
987
  const { idx, uid } = msg;
940
988
  const pipeline = pipelines.get(idx);
941
989
 
942
990
  if (!pipeline) {
943
- log(`Cannot create generator: pipeline ${idx} not found`, 'error');
944
- sendError(uid, idx, 'Pipeline not found');
991
+ log(`Cannot create generator: pipeline ${idx} not found`, "error");
992
+ sendError(uid, idx, "Pipeline not found");
945
993
  return;
946
994
  }
947
995
 
948
996
  // Safari: VideoTrackGenerator is available in worker (not MediaStreamTrackGenerator)
949
997
  // Reference: webcodecsworker.js line 852-863
950
998
  // @ts-ignore - VideoTrackGenerator may not be in types
951
- if (typeof VideoTrackGenerator !== 'undefined') {
952
- if (pipeline.track.type === 'video') {
999
+ if (typeof VideoTrackGenerator !== "undefined") {
1000
+ if (pipeline.track.type === "video") {
953
1001
  // Safari video: use VideoTrackGenerator
954
1002
  // @ts-ignore
955
1003
  const generator = new VideoTrackGenerator();
@@ -958,16 +1006,16 @@ function handleCreateGenerator(msg: MainToWorkerMessage & { type: 'creategenerat
958
1006
 
959
1007
  // Send track back to main thread
960
1008
  const message: WorkerToMainMessage = {
961
- type: 'addtrack',
1009
+ type: "addtrack",
962
1010
  idx,
963
1011
  track: generator.track,
964
1012
  uid,
965
- status: 'ok',
1013
+ status: "ok",
966
1014
  };
967
1015
  // @ts-ignore - transferring MediaStreamTrack
968
1016
  self.postMessage(message, [generator.track]);
969
1017
  log(`Created VideoTrackGenerator for track ${idx} (Safari video)`);
970
- } else if (pipeline.track.type === 'audio') {
1018
+ } else if (pipeline.track.type === "audio") {
971
1019
  // Safari audio: relay frames to main thread via postMessage
972
1020
  // Reference: webcodecsworker.js line 773-800
973
1021
  // Main thread creates the audio generator, we just send frames
@@ -977,30 +1025,31 @@ function handleCreateGenerator(msg: MainToWorkerMessage & { type: 'creategenerat
977
1025
  const frameUid = uidCounter++;
978
1026
  // Set up listener for response
979
1027
  const timeoutId = setTimeout(() => {
980
- reject(new Error('writeframe timeout'));
1028
+ reject(new Error("writeframe timeout"));
981
1029
  }, 5000);
982
1030
 
983
1031
  const handler = (e: MessageEvent) => {
984
1032
  const msg = e.data;
985
- if (msg.type === 'writeframe' && msg.idx === idx && msg.uid === frameUid) {
1033
+ if (msg.type === "writeframe" && msg.idx === idx && msg.uid === frameUid) {
986
1034
  clearTimeout(timeoutId);
987
- self.removeEventListener('message', handler);
988
- if (msg.status === 'ok') {
1035
+ self.removeEventListener("message", handler);
1036
+ if (msg.status === "ok") {
989
1037
  resolve();
990
1038
  } else {
991
- reject(new Error(msg.error || 'writeframe failed'));
1039
+ reject(new Error(msg.error || "writeframe failed"));
992
1040
  }
993
1041
  }
994
1042
  };
995
- self.addEventListener('message', handler);
1043
+ self.addEventListener("message", handler);
996
1044
 
997
- // Send frame to main thread
998
- self.postMessage({
999
- type: 'writeframe',
1045
+ // Send frame to main thread (transfer AudioData)
1046
+ const msg = {
1047
+ type: "writeframe",
1000
1048
  idx,
1001
1049
  frame,
1002
1050
  uid: frameUid,
1003
- }, [frame]);
1051
+ };
1052
+ self.postMessage(msg, { transfer: [frame] });
1004
1053
  });
1005
1054
  },
1006
1055
  close: () => Promise.resolve(),
@@ -1008,15 +1057,16 @@ function handleCreateGenerator(msg: MainToWorkerMessage & { type: 'creategenerat
1008
1057
 
1009
1058
  // Notify main thread to set up audio generator
1010
1059
  const message: WorkerToMainMessage = {
1011
- type: 'addtrack',
1060
+ type: "addtrack",
1012
1061
  idx,
1013
1062
  uid,
1014
- status: 'ok',
1063
+ status: "ok",
1015
1064
  };
1016
1065
  self.postMessage(message);
1017
1066
  log(`Set up frame relay for track ${idx} (Safari audio)`);
1018
1067
  }
1019
- } else if (typeof MediaStreamTrackGenerator !== 'undefined') {
1068
+ // @ts-ignore - MediaStreamTrackGenerator may not be in standard types
1069
+ } else if (typeof MediaStreamTrackGenerator !== "undefined") {
1020
1070
  // Chrome/Edge: use MediaStreamTrackGenerator in worker
1021
1071
  // @ts-ignore
1022
1072
  const generator = new MediaStreamTrackGenerator({ kind: pipeline.track.type });
@@ -1025,18 +1075,18 @@ function handleCreateGenerator(msg: MainToWorkerMessage & { type: 'creategenerat
1025
1075
 
1026
1076
  // Send track back to main thread
1027
1077
  const message: WorkerToMainMessage = {
1028
- type: 'addtrack',
1078
+ type: "addtrack",
1029
1079
  idx,
1030
1080
  track: generator,
1031
1081
  uid,
1032
- status: 'ok',
1082
+ status: "ok",
1033
1083
  };
1034
1084
  // @ts-ignore - transferring MediaStreamTrack
1035
1085
  self.postMessage(message, [generator]);
1036
1086
  log(`Created MediaStreamTrackGenerator for track ${idx}`);
1037
1087
  } else {
1038
- log('Neither VideoTrackGenerator nor MediaStreamTrackGenerator available in worker', 'warn');
1039
- sendError(uid, idx, 'No track generator available');
1088
+ log("Neither VideoTrackGenerator nor MediaStreamTrackGenerator available in worker", "warn");
1089
+ sendError(uid, idx, "No track generator available");
1040
1090
  }
1041
1091
  }
1042
1092
 
@@ -1044,7 +1094,7 @@ function handleCreateGenerator(msg: MainToWorkerMessage & { type: 'creategenerat
1044
1094
  // Seeking & Timing
1045
1095
  // ============================================================================
1046
1096
 
1047
- function handleSeek(msg: MainToWorkerMessage & { type: 'seek' }): void {
1097
+ function handleSeek(msg: MainToWorkerMessage & { type: "seek" }): void {
1048
1098
  const { seekTime, uid } = msg;
1049
1099
 
1050
1100
  log(`Seek to ${seekTime}ms`);
@@ -1074,7 +1124,7 @@ function flushPipeline(pipeline: PipelineState): void {
1074
1124
  pipeline.outputQueue = [];
1075
1125
 
1076
1126
  // Reset decoder if possible
1077
- if (pipeline.decoder && pipeline.decoder.state !== 'closed') {
1127
+ if (pipeline.decoder && pipeline.decoder.state !== "closed") {
1078
1128
  try {
1079
1129
  pipeline.decoder.reset();
1080
1130
  } catch {
@@ -1083,26 +1133,28 @@ function flushPipeline(pipeline: PipelineState): void {
1083
1133
  }
1084
1134
  }
1085
1135
 
1086
- function handleFrameTiming(msg: MainToWorkerMessage & { type: 'frametiming' }): void {
1136
+ function handleFrameTiming(msg: MainToWorkerMessage & { type: "frametiming" }): void {
1087
1137
  const { action, speed, tweak, uid } = msg;
1088
1138
 
1089
- if (action === 'setSpeed') {
1139
+ if (action === "setSpeed") {
1090
1140
  if (speed !== undefined) frameTiming.speed.main = speed;
1091
1141
  if (tweak !== undefined) frameTiming.speed.tweak = tweak;
1092
1142
  frameTiming.speed.combined = frameTiming.speed.main * frameTiming.speed.tweak;
1093
- log(`Speed set to ${frameTiming.speed.combined} (main: ${frameTiming.speed.main}, tweak: ${frameTiming.speed.tweak})`);
1094
- } else if (action === 'setPaused') {
1143
+ log(
1144
+ `Speed set to ${frameTiming.speed.combined} (main: ${frameTiming.speed.main}, tweak: ${frameTiming.speed.tweak})`
1145
+ );
1146
+ } else if (action === "setPaused") {
1095
1147
  frameTiming.paused = msg.paused === true;
1096
1148
  log(`Frame timing paused=${frameTiming.paused}`);
1097
- } else if (action === 'reset') {
1149
+ } else if (action === "reset") {
1098
1150
  frameTiming.seeking = false;
1099
- log('Frame timing reset (seek complete)');
1151
+ log("Frame timing reset (seek complete)");
1100
1152
  }
1101
1153
 
1102
1154
  sendAck(uid);
1103
1155
  }
1104
1156
 
1105
- function handleFrameStep(msg: MainToWorkerMessage & { type: 'framestep' }): void {
1157
+ function handleFrameStep(msg: MainToWorkerMessage & { type: "framestep" }): void {
1106
1158
  const { direction, uid } = msg;
1107
1159
 
1108
1160
  log(`FrameStep request dir=${direction} paused=${frameTiming.paused}`);
@@ -1124,7 +1176,9 @@ function handleFrameStep(msg: MainToWorkerMessage & { type: 'framestep' }): void
1124
1176
  if (pipeline.historyCursor === null || pipeline.historyCursor === undefined) {
1125
1177
  alignHistoryCursorToLastOutput(pipeline);
1126
1178
  }
1127
- log(`FrameStep pipeline idx=${pipeline.idx} outQueue=${pipeline.outputQueue.length} history=${pipeline.frameHistory.length} cursor=${pipeline.historyCursor}`);
1179
+ log(
1180
+ `FrameStep pipeline idx=${pipeline.idx} outQueue=${pipeline.outputQueue.length} history=${pipeline.frameHistory.length} cursor=${pipeline.historyCursor}`
1181
+ );
1128
1182
 
1129
1183
  if (direction < 0) {
1130
1184
  const nextIndex = (pipeline.historyCursor ?? 0) - 1;
@@ -1142,16 +1196,21 @@ function handleFrameStep(msg: MainToWorkerMessage & { type: 'framestep' }): void
1142
1196
  return;
1143
1197
  }
1144
1198
  log(`FrameStep back: output ts=${entry.timestamp}`);
1145
- outputFrame(pipeline, { frame: clone, timestamp: entry.timestamp, decodedAt: performance.now() }, { skipHistory: true });
1199
+ outputFrame(
1200
+ pipeline,
1201
+ { frame: clone, timestamp: entry.timestamp, decodedAt: performance.now() },
1202
+ { skipHistory: true }
1203
+ );
1146
1204
  sendAck(uid);
1147
1205
  return;
1148
1206
  }
1149
1207
 
1150
1208
  if (direction > 0) {
1151
1209
  // If we're stepping forward within history (after stepping back), use history
1152
- if (pipeline.historyCursor !== null && pipeline.historyCursor < pipeline.frameHistory.length - 1) {
1153
- pipeline.historyCursor += 1;
1154
- const entry = pipeline.frameHistory[pipeline.historyCursor];
1210
+ const cursor = pipeline.historyCursor;
1211
+ if (cursor !== null && cursor !== undefined && cursor < pipeline.frameHistory.length - 1) {
1212
+ pipeline.historyCursor = cursor + 1;
1213
+ const entry = pipeline.frameHistory[pipeline.historyCursor!];
1155
1214
  const clone = entry ? cloneVideoFrame(entry.frame) : null;
1156
1215
  if (!clone) {
1157
1216
  log(`FrameStep forward: failed to clone frame`);
@@ -1159,15 +1218,19 @@ function handleFrameStep(msg: MainToWorkerMessage & { type: 'framestep' }): void
1159
1218
  return;
1160
1219
  }
1161
1220
  log(`FrameStep forward (history): output ts=${entry.timestamp}`);
1162
- outputFrame(pipeline, { frame: clone, timestamp: entry.timestamp, decodedAt: performance.now() }, { skipHistory: true });
1221
+ outputFrame(
1222
+ pipeline,
1223
+ { frame: clone, timestamp: entry.timestamp, decodedAt: performance.now() },
1224
+ { skipHistory: true }
1225
+ );
1163
1226
  sendAck(uid);
1164
1227
  return;
1165
1228
  }
1166
1229
 
1167
1230
  // Otherwise, output the next queued frame
1168
1231
  if (pipeline.outputQueue.length > 1) {
1169
- const wasSorted = pipeline.outputQueue.every((entry, i, arr) =>
1170
- i === 0 || arr[i - 1].timestamp <= entry.timestamp
1232
+ const wasSorted = pipeline.outputQueue.every(
1233
+ (entry, i, arr) => i === 0 || arr[i - 1].timestamp <= entry.timestamp
1171
1234
  );
1172
1235
  if (!wasSorted) {
1173
1236
  pipeline.outputQueue.sort((a, b) => a.timestamp - b.timestamp);
@@ -1175,7 +1238,7 @@ function handleFrameStep(msg: MainToWorkerMessage & { type: 'framestep' }): void
1175
1238
  }
1176
1239
 
1177
1240
  const lastTs = pipeline.stats.lastOutputTimestamp;
1178
- let idx = pipeline.outputQueue.findIndex(e => e.timestamp > lastTs);
1241
+ let idx = pipeline.outputQueue.findIndex((e) => e.timestamp > lastTs);
1179
1242
  if (idx === -1 && pipeline.outputQueue.length > 0) idx = 0;
1180
1243
  if (idx === -1) {
1181
1244
  log(`FrameStep forward: no queued frame available`);
@@ -1197,7 +1260,7 @@ function handleFrameStep(msg: MainToWorkerMessage & { type: 'framestep' }): void
1197
1260
  // Cleanup
1198
1261
  // ============================================================================
1199
1262
 
1200
- function handleClose(msg: MainToWorkerMessage & { type: 'close' }): void {
1263
+ function handleClose(msg: MainToWorkerMessage & { type: "close" }): void {
1201
1264
  const { idx, waitEmpty, uid } = msg;
1202
1265
  const pipeline = pipelines.get(idx);
1203
1266
 
@@ -1225,7 +1288,7 @@ function closePipeline(pipeline: PipelineState, uid: number): void {
1225
1288
  pipeline.closed = true;
1226
1289
 
1227
1290
  // Close decoder
1228
- if (pipeline.decoder && pipeline.decoder.state !== 'closed') {
1291
+ if (pipeline.decoder && pipeline.decoder.state !== "closed") {
1229
1292
  try {
1230
1293
  pipeline.decoder.close();
1231
1294
  } catch {
@@ -1263,10 +1326,10 @@ function closePipeline(pipeline: PipelineState, uid: number): void {
1263
1326
  }
1264
1327
 
1265
1328
  const message: WorkerToMainMessage = {
1266
- type: 'closed',
1329
+ type: "closed",
1267
1330
  idx: pipeline.idx,
1268
1331
  uid,
1269
- status: 'ok',
1332
+ status: "ok",
1270
1333
  };
1271
1334
  self.postMessage(message);
1272
1335
  }
@@ -1300,7 +1363,7 @@ function sendStats(): void {
1300
1363
  }
1301
1364
 
1302
1365
  const message: WorkerToMainMessage = {
1303
- type: 'stats',
1366
+ type: "stats",
1304
1367
  stats: {
1305
1368
  frameTiming: {
1306
1369
  in: frameTiming.in,
@@ -1334,20 +1397,20 @@ function createFrameTrackerStats(): FrameTrackerStats {
1334
1397
 
1335
1398
  function sendAck(uid: number, idx?: number): void {
1336
1399
  const message: WorkerToMainMessage = {
1337
- type: 'ack',
1400
+ type: "ack",
1338
1401
  uid,
1339
1402
  idx,
1340
- status: 'ok',
1403
+ status: "ok",
1341
1404
  };
1342
1405
  self.postMessage(message);
1343
1406
  }
1344
1407
 
1345
1408
  function sendError(uid: number, idx: number | undefined, error: string): void {
1346
1409
  const message: WorkerToMainMessage = {
1347
- type: 'ack',
1410
+ type: "ack",
1348
1411
  uid,
1349
1412
  idx,
1350
- status: 'error',
1413
+ status: "error",
1351
1414
  error,
1352
1415
  };
1353
1416
  self.postMessage(message);
@@ -1357,4 +1420,4 @@ function sendError(uid: number, idx: number | undefined, error: string): void {
1357
1420
  // Worker Initialization
1358
1421
  // ============================================================================
1359
1422
 
1360
- log('WebCodecs decoder worker initialized');
1423
+ log("WebCodecs decoder worker initialized");