@livepeer-frameworks/player-core 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. package/dist/cjs/index.js +19493 -0
  2. package/dist/cjs/index.js.map +1 -0
  3. package/dist/esm/index.js +19398 -0
  4. package/dist/esm/index.js.map +1 -0
  5. package/dist/player.css +2140 -0
  6. package/dist/types/core/ABRController.d.ts +164 -0
  7. package/dist/types/core/CodecUtils.d.ts +54 -0
  8. package/dist/types/core/Disposable.d.ts +61 -0
  9. package/dist/types/core/EventEmitter.d.ts +73 -0
  10. package/dist/types/core/GatewayClient.d.ts +144 -0
  11. package/dist/types/core/InteractionController.d.ts +121 -0
  12. package/dist/types/core/LiveDurationProxy.d.ts +102 -0
  13. package/dist/types/core/MetaTrackManager.d.ts +220 -0
  14. package/dist/types/core/MistReporter.d.ts +163 -0
  15. package/dist/types/core/MistSignaling.d.ts +148 -0
  16. package/dist/types/core/PlayerController.d.ts +665 -0
  17. package/dist/types/core/PlayerInterface.d.ts +230 -0
  18. package/dist/types/core/PlayerManager.d.ts +182 -0
  19. package/dist/types/core/PlayerRegistry.d.ts +27 -0
  20. package/dist/types/core/QualityMonitor.d.ts +184 -0
  21. package/dist/types/core/ScreenWakeLockManager.d.ts +70 -0
  22. package/dist/types/core/SeekingUtils.d.ts +142 -0
  23. package/dist/types/core/StreamStateClient.d.ts +108 -0
  24. package/dist/types/core/SubtitleManager.d.ts +111 -0
  25. package/dist/types/core/TelemetryReporter.d.ts +79 -0
  26. package/dist/types/core/TimeFormat.d.ts +97 -0
  27. package/dist/types/core/TimerManager.d.ts +83 -0
  28. package/dist/types/core/UrlUtils.d.ts +81 -0
  29. package/dist/types/core/detector.d.ts +149 -0
  30. package/dist/types/core/index.d.ts +49 -0
  31. package/dist/types/core/scorer.d.ts +167 -0
  32. package/dist/types/core/selector.d.ts +9 -0
  33. package/dist/types/index.d.ts +45 -0
  34. package/dist/types/lib/utils.d.ts +2 -0
  35. package/dist/types/players/DashJsPlayer.d.ts +102 -0
  36. package/dist/types/players/HlsJsPlayer.d.ts +70 -0
  37. package/dist/types/players/MewsWsPlayer/SourceBufferManager.d.ts +119 -0
  38. package/dist/types/players/MewsWsPlayer/WebSocketManager.d.ts +60 -0
  39. package/dist/types/players/MewsWsPlayer/index.d.ts +220 -0
  40. package/dist/types/players/MewsWsPlayer/types.d.ts +89 -0
  41. package/dist/types/players/MistPlayer.d.ts +25 -0
  42. package/dist/types/players/MistWebRTCPlayer/index.d.ts +133 -0
  43. package/dist/types/players/NativePlayer.d.ts +143 -0
  44. package/dist/types/players/VideoJsPlayer.d.ts +59 -0
  45. package/dist/types/players/WebCodecsPlayer/JitterBuffer.d.ts +118 -0
  46. package/dist/types/players/WebCodecsPlayer/LatencyProfiles.d.ts +64 -0
  47. package/dist/types/players/WebCodecsPlayer/RawChunkParser.d.ts +63 -0
  48. package/dist/types/players/WebCodecsPlayer/SyncController.d.ts +174 -0
  49. package/dist/types/players/WebCodecsPlayer/WebSocketController.d.ts +164 -0
  50. package/dist/types/players/WebCodecsPlayer/index.d.ts +149 -0
  51. package/dist/types/players/WebCodecsPlayer/polyfills/MediaStreamTrackGenerator.d.ts +105 -0
  52. package/dist/types/players/WebCodecsPlayer/types.d.ts +395 -0
  53. package/dist/types/players/WebCodecsPlayer/worker/decoder.worker.d.ts +13 -0
  54. package/dist/types/players/WebCodecsPlayer/worker/types.d.ts +197 -0
  55. package/dist/types/players/index.d.ts +14 -0
  56. package/dist/types/styles/index.d.ts +11 -0
  57. package/dist/types/types.d.ts +363 -0
  58. package/dist/types/vanilla/FrameWorksPlayer.d.ts +143 -0
  59. package/dist/types/vanilla/index.d.ts +19 -0
  60. package/dist/workers/decoder.worker.js +989 -0
  61. package/dist/workers/decoder.worker.js.map +1 -0
  62. package/package.json +80 -0
  63. package/src/core/ABRController.ts +550 -0
  64. package/src/core/CodecUtils.ts +257 -0
  65. package/src/core/Disposable.ts +120 -0
  66. package/src/core/EventEmitter.ts +113 -0
  67. package/src/core/GatewayClient.ts +439 -0
  68. package/src/core/InteractionController.ts +712 -0
  69. package/src/core/LiveDurationProxy.ts +270 -0
  70. package/src/core/MetaTrackManager.ts +753 -0
  71. package/src/core/MistReporter.ts +543 -0
  72. package/src/core/MistSignaling.ts +346 -0
  73. package/src/core/PlayerController.ts +2829 -0
  74. package/src/core/PlayerInterface.ts +432 -0
  75. package/src/core/PlayerManager.ts +900 -0
  76. package/src/core/PlayerRegistry.ts +149 -0
  77. package/src/core/QualityMonitor.ts +597 -0
  78. package/src/core/ScreenWakeLockManager.ts +163 -0
  79. package/src/core/SeekingUtils.ts +364 -0
  80. package/src/core/StreamStateClient.ts +457 -0
  81. package/src/core/SubtitleManager.ts +297 -0
  82. package/src/core/TelemetryReporter.ts +308 -0
  83. package/src/core/TimeFormat.ts +205 -0
  84. package/src/core/TimerManager.ts +209 -0
  85. package/src/core/UrlUtils.ts +179 -0
  86. package/src/core/detector.ts +382 -0
  87. package/src/core/index.ts +140 -0
  88. package/src/core/scorer.ts +553 -0
  89. package/src/core/selector.ts +16 -0
  90. package/src/global.d.ts +11 -0
  91. package/src/index.ts +75 -0
  92. package/src/lib/utils.ts +6 -0
  93. package/src/players/DashJsPlayer.ts +642 -0
  94. package/src/players/HlsJsPlayer.ts +483 -0
  95. package/src/players/MewsWsPlayer/SourceBufferManager.ts +572 -0
  96. package/src/players/MewsWsPlayer/WebSocketManager.ts +241 -0
  97. package/src/players/MewsWsPlayer/index.ts +1065 -0
  98. package/src/players/MewsWsPlayer/types.ts +106 -0
  99. package/src/players/MistPlayer.ts +188 -0
  100. package/src/players/MistWebRTCPlayer/index.ts +703 -0
  101. package/src/players/NativePlayer.ts +820 -0
  102. package/src/players/VideoJsPlayer.ts +643 -0
  103. package/src/players/WebCodecsPlayer/JitterBuffer.ts +299 -0
  104. package/src/players/WebCodecsPlayer/LatencyProfiles.ts +151 -0
  105. package/src/players/WebCodecsPlayer/RawChunkParser.ts +151 -0
  106. package/src/players/WebCodecsPlayer/SyncController.ts +456 -0
  107. package/src/players/WebCodecsPlayer/WebSocketController.ts +564 -0
  108. package/src/players/WebCodecsPlayer/index.ts +1650 -0
  109. package/src/players/WebCodecsPlayer/polyfills/MediaStreamTrackGenerator.ts +379 -0
  110. package/src/players/WebCodecsPlayer/types.ts +542 -0
  111. package/src/players/WebCodecsPlayer/worker/decoder.worker.ts +1360 -0
  112. package/src/players/WebCodecsPlayer/worker/types.ts +276 -0
  113. package/src/players/index.ts +22 -0
  114. package/src/styles/animations.css +21 -0
  115. package/src/styles/index.ts +52 -0
  116. package/src/styles/player.css +2126 -0
  117. package/src/styles/tailwind.css +1015 -0
  118. package/src/types.ts +421 -0
  119. package/src/vanilla/FrameWorksPlayer.ts +367 -0
  120. package/src/vanilla/index.ts +22 -0
@@ -0,0 +1,989 @@
1
+ (function () {
2
+ 'use strict';
3
+
4
+ /**
5
+ * WebCodecs Decoder Worker
6
+ *
7
+ * Handles VideoDecoder and AudioDecoder in a dedicated worker thread.
8
+ * This keeps decoding off the main thread for better performance.
9
+ *
10
+ * Features:
11
+ * - Video/Audio pipeline management per track
12
+ * - Frame scheduling based on timestamps and playback speed
13
+ * - Stats collection and reporting
14
+ * - Seek handling with queue flush
15
+ */
16
+ // ============================================================================
17
+ // Global State
18
+ // ============================================================================
19
+ const pipelines = new Map();
20
+ let debugging = false;
21
+ let uidCounter = 0;
22
+ // Frame timing state (shared across all pipelines)
23
+ const frameTiming = {
24
+ in: 0,
25
+ decoded: 0,
26
+ out: 0,
27
+ speed: {
28
+ main: 1,
29
+ tweak: 1,
30
+ combined: 1,
31
+ },
32
+ seeking: false,
33
+ paused: false};
34
+ // Per-track wall-clock reference points for frame scheduling
35
+ // Each track gets its own baseTime to handle different timestamp bases for A/V
36
+ const trackBaseTimes = new Map();
37
+ // Buffer warmup state - prevents initial jitter by waiting for buffer to build
38
+ // Before warmup, frames are queued but not output
39
+ let warmupComplete = false;
40
+ let warmupStartTime = null;
41
+ const WARMUP_BUFFER_MS = 100; // Wait for ~100ms of frames before starting output
42
+ const WARMUP_TIMEOUT_MS = 300; // Reduced from 500ms - start faster to reduce latency
43
+ /**
44
+ * Get or initialize baseTime for a specific track
45
+ */
46
+ function getTrackBaseTime(idx, frameTimeMs, now) {
47
+ if (!trackBaseTimes.has(idx)) {
48
+ trackBaseTimes.set(idx, now - frameTimeMs / frameTiming.speed.combined);
49
+ log(`Track ${idx} baseTime: ${trackBaseTimes.get(idx).toFixed(0)} (first frame @ ${frameTimeMs.toFixed(0)}ms)`);
50
+ }
51
+ return trackBaseTimes.get(idx);
52
+ }
53
+ /**
54
+ * Reset all track baseTimes (used during seek or reset)
55
+ */
56
+ function resetBaseTime() {
57
+ trackBaseTimes.clear();
58
+ log(`Reset all track baseTimes`);
59
+ }
60
+ // Stats update interval
61
+ let statsTimer = null;
62
+ const STATS_INTERVAL_MS = 250;
63
+ // Chrome-recommended decoder queue threshold
64
+ // Per Chrome WebCodecs best practices: drop when decodeQueueSize > 2
65
+ // This ensures decoder doesn't fall too far behind before corrective action
66
+ const MAX_DECODER_QUEUE_SIZE = 2;
67
+ // ============================================================================
68
+ // Logging
69
+ // ============================================================================
70
+ function log(msg, level = 'info') {
71
+ if (!debugging)
72
+ return;
73
+ const message = {
74
+ type: 'log',
75
+ msg,
76
+ level,
77
+ uid: uidCounter++,
78
+ };
79
+ self.postMessage(message);
80
+ }
81
+ function logVerbose(msg) {
82
+ if (debugging !== 'verbose')
83
+ return;
84
+ log(msg);
85
+ }
86
+ // ============================================================================
87
+ // Message Handling
88
+ // ============================================================================
89
+ self.onmessage = (event) => {
90
+ const msg = event.data;
91
+ switch (msg.type) {
92
+ case 'create':
93
+ handleCreate(msg);
94
+ break;
95
+ case 'configure':
96
+ handleConfigure(msg);
97
+ break;
98
+ case 'receive':
99
+ handleReceive(msg);
100
+ break;
101
+ case 'setwritable':
102
+ handleSetWritable(msg);
103
+ break;
104
+ case 'creategenerator':
105
+ handleCreateGenerator(msg);
106
+ break;
107
+ case 'close':
108
+ handleClose(msg);
109
+ break;
110
+ case 'frametiming':
111
+ handleFrameTiming(msg);
112
+ break;
113
+ case 'seek':
114
+ handleSeek(msg);
115
+ break;
116
+ case 'debugging':
117
+ debugging = msg.value;
118
+ log(`Debugging set to: ${msg.value}`);
119
+ break;
120
+ default:
121
+ log(`Unknown message type: ${msg.type}`, 'warn');
122
+ }
123
+ };
124
+ // ============================================================================
125
+ // Pipeline Management
126
+ // ============================================================================
127
+ function handleCreate(msg) {
128
+ const { idx, track, opts, uid } = msg;
129
+ log(`Creating pipeline for track ${idx} (${track.type} ${track.codec})`);
130
+ const pipeline = {
131
+ idx,
132
+ track,
133
+ configured: false,
134
+ closed: false,
135
+ decoder: null,
136
+ writable: null,
137
+ writer: null,
138
+ inputQueue: [],
139
+ outputQueue: [],
140
+ stats: {
141
+ framesIn: 0,
142
+ framesDecoded: 0,
143
+ framesOut: 0,
144
+ framesDropped: 0,
145
+ lastInputTimestamp: 0,
146
+ lastOutputTimestamp: 0,
147
+ decoderQueueSize: 0,
148
+ // Debug info for error diagnosis
149
+ lastChunkType: '',
150
+ lastChunkSize: 0,
151
+ lastChunkBytes: '',
152
+ },
153
+ optimizeForLatency: opts.optimizeForLatency,
154
+ };
155
+ pipelines.set(idx, pipeline);
156
+ // Start stats reporting if not already running
157
+ if (!statsTimer) {
158
+ statsTimer = setInterval(sendStats, STATS_INTERVAL_MS);
159
+ }
160
+ sendAck(uid, idx);
161
+ }
162
+ function handleConfigure(msg) {
163
+ const { idx, header, uid } = msg;
164
+ log(`Received configure for track ${idx}, header length=${header?.byteLength ?? 'null'}`);
165
+ const pipeline = pipelines.get(idx);
166
+ if (!pipeline) {
167
+ log(`Cannot configure: pipeline ${idx} not found`, 'error');
168
+ sendError(uid, idx, 'Pipeline not found');
169
+ return;
170
+ }
171
+ // Skip if already configured and decoder is ready
172
+ // This prevents duplicate configuration when both WS INIT and HTTP fallback fire
173
+ if (pipeline.configured && pipeline.decoder && pipeline.decoder.state === 'configured') {
174
+ log(`Track ${idx} already configured, skipping duplicate configure`);
175
+ sendAck(uid, idx);
176
+ return;
177
+ }
178
+ try {
179
+ if (pipeline.track.type === 'video') {
180
+ log(`Configuring video decoder for track ${idx}...`);
181
+ configureVideoDecoder(pipeline, header);
182
+ }
183
+ else if (pipeline.track.type === 'audio') {
184
+ log(`Configuring audio decoder for track ${idx}...`);
185
+ configureAudioDecoder(pipeline, header);
186
+ }
187
+ pipeline.configured = true;
188
+ log(`Successfully configured decoder for track ${idx}`);
189
+ sendAck(uid, idx);
190
+ }
191
+ catch (err) {
192
+ log(`Failed to configure decoder for track ${idx}: ${err}`, 'error');
193
+ sendError(uid, idx, String(err));
194
+ }
195
+ }
196
+ function configureVideoDecoder(pipeline, description) {
197
+ const track = pipeline.track;
198
+ // Handle JPEG codec separately via ImageDecoder (Phase 2C)
199
+ if (track.codec === 'JPEG' || track.codec.toLowerCase() === 'jpeg') {
200
+ log('JPEG codec detected - will use ImageDecoder');
201
+ pipeline.configured = true;
202
+ // JPEG doesn't need a persistent decoder - each frame is decoded individually
203
+ return;
204
+ }
205
+ // Close existing decoder if any (per rawws.js reconfiguration pattern)
206
+ if (pipeline.decoder) {
207
+ if (pipeline.decoder.state === 'configured') {
208
+ try {
209
+ pipeline.decoder.reset();
210
+ }
211
+ catch {
212
+ // Ignore reset errors
213
+ }
214
+ }
215
+ if (pipeline.decoder.state !== 'closed') {
216
+ try {
217
+ pipeline.decoder.close();
218
+ }
219
+ catch {
220
+ // Ignore close errors
221
+ }
222
+ }
223
+ pipeline.decoder = null;
224
+ }
225
+ // Match reference rawws.js configOpts pattern:
226
+ // codec, optimizeForLatency, description + hw acceleration hint
227
+ const config = {
228
+ codec: track.codecstring || track.codec.toLowerCase(),
229
+ optimizeForLatency: pipeline.optimizeForLatency,
230
+ hardwareAcceleration: 'prefer-hardware',
231
+ };
232
+ // Pass description directly from WebSocket INIT data (per reference rawws.js line 1052)
233
+ if (description && description.byteLength > 0) {
234
+ config.description = description;
235
+ log(`Configuring with description (${description.byteLength} bytes)`);
236
+ }
237
+ else {
238
+ log(`No description provided - decoder may fail on H.264/HEVC`, 'warn');
239
+ }
240
+ log(`Configuring video decoder: ${config.codec}`);
241
+ const decoder = new VideoDecoder({
242
+ output: (frame) => handleDecodedFrame(pipeline, frame),
243
+ error: (err) => handleDecoderError(pipeline, err),
244
+ });
245
+ decoder.configure(config);
246
+ pipeline.decoder = decoder;
247
+ log(`Video decoder configured: ${config.codec}`);
248
+ }
249
+ /**
250
+ * Map MistServer audio codec names to WebCodecs-compatible codec strings
251
+ * Per W3C AAC WebCodecs Registration: https://www.w3.org/TR/webcodecs-aac-codec-registration/
252
+ */
253
+ function mapAudioCodec(codec, codecstring) {
254
+ // If we have a full codec string like "mp4a.40.2", use it
255
+ if (codecstring && codecstring.startsWith('mp4a.')) {
256
+ return codecstring;
257
+ }
258
+ // Map common MistServer codec names to WebCodecs codec strings
259
+ const normalized = codec.toLowerCase();
260
+ switch (normalized) {
261
+ case 'aac':
262
+ case 'mp4a':
263
+ return 'mp4a.40.2'; // AAC-LC
264
+ case 'mp3':
265
+ return 'mp3';
266
+ case 'opus':
267
+ return 'opus';
268
+ case 'flac':
269
+ return 'flac';
270
+ case 'ac3':
271
+ case 'ac-3':
272
+ return 'ac-3';
273
+ case 'pcm_s16le':
274
+ case 'pcm_s32le':
275
+ case 'pcm_f32le':
276
+ return 'pcm-' + normalized.replace('pcm_', '').replace('le', '-le');
277
+ default:
278
+ log(`Unknown audio codec: ${codec}, trying as-is`);
279
+ return codecstring || codec;
280
+ }
281
+ }
282
+ function configureAudioDecoder(pipeline, description) {
283
+ const track = pipeline.track;
284
+ const codec = mapAudioCodec(track.codec, track.codecstring);
285
+ log(`Audio codec mapping: ${track.codec} -> ${codec}`);
286
+ const config = {
287
+ codec,
288
+ sampleRate: track.rate || 48000,
289
+ numberOfChannels: track.channels || 2,
290
+ };
291
+ if (description && description.byteLength > 0) {
292
+ config.description = description;
293
+ }
294
+ const decoder = new AudioDecoder({
295
+ output: (data) => handleDecodedFrame(pipeline, data),
296
+ error: (err) => handleDecoderError(pipeline, err),
297
+ });
298
+ decoder.configure(config);
299
+ pipeline.decoder = decoder;
300
+ log(`Audio decoder configured: ${config.codec} ${config.sampleRate}Hz ${config.numberOfChannels}ch`);
301
+ }
302
+ function handleDecodedFrame(pipeline, frame) {
303
+ if (pipeline.closed) {
304
+ frame.close();
305
+ return;
306
+ }
307
+ const now = performance.now() * 1000; // Convert to microseconds
308
+ const timestamp = frame.timestamp ?? 0;
309
+ pipeline.stats.framesDecoded++;
310
+ frameTiming.decoded = now;
311
+ // Log first few decoded frames
312
+ if (pipeline.stats.framesDecoded <= 3) {
313
+ const frameType = pipeline.track.type;
314
+ const extraInfo = frameType === 'audio'
315
+ ? ` (${frame.numberOfFrames} samples, ${frame.sampleRate}Hz)`
316
+ : ` (${frame.displayWidth}x${frame.displayHeight})`;
317
+ log(`Decoded ${frameType} frame ${pipeline.stats.framesDecoded} for track ${pipeline.idx}: ts=${timestamp}μs${extraInfo}`);
318
+ }
319
+ // Add to output queue for scheduled release
320
+ pipeline.outputQueue.push({
321
+ frame,
322
+ timestamp,
323
+ decodedAt: performance.now(),
324
+ });
325
+ // Try to output frames
326
+ processOutputQueue(pipeline);
327
+ }
328
+ function handleDecoderError(pipeline, err) {
329
+ log(`Decoder error on track ${pipeline.idx}: ${err.name}: ${err.message}`, 'error');
330
+ log(` Last chunk info: type=${pipeline.stats.lastChunkType}, size=${pipeline.stats.lastChunkSize}, first bytes=[${pipeline.stats.lastChunkBytes}]`, 'error');
331
+ // Per rawws.js: reset the pipeline after decoder error
332
+ // This clears queues and recreates the decoder if needed
333
+ resetPipelineAfterError(pipeline);
334
+ const message = {
335
+ type: 'sendevent',
336
+ kind: 'error',
337
+ message: `Decoder error: ${err.message}`,
338
+ idx: pipeline.idx,
339
+ uid: uidCounter++,
340
+ };
341
+ self.postMessage(message);
342
+ }
343
+ /**
344
+ * Reset pipeline after a decoder error
345
+ * Per rawws.js: recreate decoder if closed, otherwise just reset
346
+ */
347
+ function resetPipelineAfterError(pipeline) {
348
+ // Clear queues
349
+ pipeline.inputQueue = [];
350
+ for (const entry of pipeline.outputQueue) {
351
+ entry.frame.close();
352
+ }
353
+ pipeline.outputQueue = [];
354
+ // Mark as needing reconfiguration - we'll wait for next keyframe
355
+ pipeline.configured = false;
356
+ // If decoder is closed, we need to recreate it (can't reset a closed decoder)
357
+ if (pipeline.decoder && pipeline.decoder.state === 'closed') {
358
+ log(`Decoder closed for track ${pipeline.idx}, will recreate on next configure`);
359
+ pipeline.decoder = null;
360
+ }
361
+ else if (pipeline.decoder && pipeline.decoder.state !== 'closed') {
362
+ // Try to reset if not closed
363
+ try {
364
+ pipeline.decoder.reset();
365
+ log(`Reset decoder for track ${pipeline.idx}`);
366
+ }
367
+ catch (e) {
368
+ log(`Failed to reset decoder for track ${pipeline.idx}: ${e}`, 'warn');
369
+ pipeline.decoder = null;
370
+ }
371
+ }
372
+ }
373
+ // ============================================================================
374
+ // Frame Input/Output
375
+ // ============================================================================
376
+ function handleReceive(msg) {
377
+ const { idx, chunk, uid } = msg;
378
+ const pipeline = pipelines.get(idx);
379
+ if (!pipeline) {
380
+ logVerbose(`Received chunk for unknown pipeline ${idx}`);
381
+ return;
382
+ }
383
+ if (!pipeline.configured || !pipeline.decoder) {
384
+ // Queue for later
385
+ pipeline.inputQueue.push(chunk);
386
+ logVerbose(`Queued chunk for track ${idx} (configured=${pipeline.configured}, decoder=${!!pipeline.decoder})`);
387
+ return;
388
+ }
389
+ // Log only first 3 chunks per track to confirm receiving
390
+ if (pipeline.stats.framesIn < 3) {
391
+ log(`Received chunk ${pipeline.stats.framesIn} for track ${idx}: type=${chunk.type}, ts=${chunk.timestamp / 1000}ms, size=${chunk.data.byteLength}`);
392
+ }
393
+ // Check if we need to drop frames due to decoder pressure (Phase 2B)
394
+ if (shouldDropFramesDueToDecoderPressure(pipeline)) {
395
+ if (chunk.type === 'key') {
396
+ // Always accept keyframes - they're needed to resume
397
+ decodeChunk(pipeline, chunk);
398
+ }
399
+ else {
400
+ // Drop delta frames when decoder is overwhelmed
401
+ pipeline.stats.framesDropped++;
402
+ logVerbose(`Dropped delta frame @ ${chunk.timestamp / 1000}ms (decoder queue: ${pipeline.decoder.decodeQueueSize})`);
403
+ }
404
+ return;
405
+ }
406
+ decodeChunk(pipeline, chunk);
407
+ }
408
+ /**
409
+ * Check if decoder is under pressure and frames should be dropped
410
+ * Based on Chrome WebCodecs best practices: drop when decodeQueueSize > 2
411
+ */
412
+ function shouldDropFramesDueToDecoderPressure(pipeline) {
413
+ if (!pipeline.decoder)
414
+ return false;
415
+ const queueSize = pipeline.decoder.decodeQueueSize;
416
+ pipeline.stats.decoderQueueSize = queueSize;
417
+ // Chrome recommendation: drop frames when queue > 2
418
+ return queueSize > MAX_DECODER_QUEUE_SIZE;
419
+ }
420
+ function decodeChunk(pipeline, chunk) {
421
+ if (pipeline.closed)
422
+ return;
423
+ const now = performance.now() * 1000;
424
+ frameTiming.in = now;
425
+ pipeline.stats.framesIn++;
426
+ pipeline.stats.lastInputTimestamp = chunk.timestamp;
427
+ try {
428
+ // Handle JPEG via ImageDecoder (Phase 2C)
429
+ const codec = pipeline.track.codec;
430
+ if (codec === 'JPEG' || codec.toLowerCase() === 'jpeg') {
431
+ decodeJpegFrame(pipeline, chunk);
432
+ return;
433
+ }
434
+ if (!pipeline.decoder)
435
+ return;
436
+ // chunk.timestamp is ALREADY in microseconds (converted by main thread via getPresentationTimestamp)
437
+ const timestampUs = chunk.timestamp;
438
+ // Record debug info before decode (for error diagnosis)
439
+ pipeline.stats.lastChunkType = chunk.type;
440
+ pipeline.stats.lastChunkSize = chunk.data.byteLength;
441
+ // Show first 8 bytes to identify format (Annex B starts 0x00 0x00 0x00 0x01, AVCC starts with length)
442
+ const firstBytes = Array.from(chunk.data.slice(0, 8)).map(b => '0x' + b.toString(16).padStart(2, '0')).join(' ');
443
+ pipeline.stats.lastChunkBytes = firstBytes;
444
+ if (pipeline.track.type === 'video') {
445
+ // AVCC mode: frames pass through unchanged (decoder has SPS/PPS from description)
446
+ const encodedChunk = new EncodedVideoChunk({
447
+ type: chunk.type,
448
+ timestamp: timestampUs,
449
+ data: chunk.data,
450
+ });
451
+ const decoder = pipeline.decoder;
452
+ if (pipeline.stats.framesIn <= 3) {
453
+ const firstBytes = Array.from(chunk.data.slice(0, 16)).map(b => '0x' + b.toString(16).padStart(2, '0')).join(' ');
454
+ log(`Calling decode() for track ${pipeline.idx}: state=${decoder.state}, queueSize=${decoder.decodeQueueSize}, chunk type=${chunk.type}, ts=${timestampUs}μs`);
455
+ log(` First 16 bytes: ${firstBytes}`);
456
+ }
457
+ decoder.decode(encodedChunk);
458
+ if (pipeline.stats.framesIn <= 3) {
459
+ log(`After decode() for track ${pipeline.idx}: queueSize=${decoder.decodeQueueSize}`);
460
+ }
461
+ }
462
+ else if (pipeline.track.type === 'audio') {
463
+ // Audio chunks are always treated as "key" frames - per MistServer rawws.js line 1127
464
+ // Audio codecs don't use inter-frame dependencies like video does
465
+ const encodedChunk = new EncodedAudioChunk({
466
+ type: 'key',
467
+ timestamp: timestampUs,
468
+ data: chunk.data,
469
+ });
470
+ pipeline.decoder.decode(encodedChunk);
471
+ }
472
+ // Update decoder queue size (decoder may have been nullified by error callback)
473
+ if (pipeline.decoder) {
474
+ pipeline.stats.decoderQueueSize = pipeline.decoder.decodeQueueSize;
475
+ }
476
+ logVerbose(`Decoded chunk ${chunk.type} @ ${chunk.timestamp / 1000}ms for track ${pipeline.idx}`);
477
+ }
478
+ catch (err) {
479
+ log(`Decode error on track ${pipeline.idx}: ${err}`, 'error');
480
+ }
481
+ }
482
+ /**
483
+ * Decode JPEG frame using ImageDecoder API (Phase 2C)
484
+ * ImageDecoder is simpler than VideoDecoder for still images
485
+ */
486
+ async function decodeJpegFrame(pipeline, chunk) {
487
+ if (pipeline.closed)
488
+ return;
489
+ // Check if ImageDecoder is available
490
+ if (typeof ImageDecoder === 'undefined') {
491
+ log('ImageDecoder not available - JPEG streams not supported', 'error');
492
+ return;
493
+ }
494
+ try {
495
+ // Create ImageDecoder for this frame
496
+ const decoder = new ImageDecoder({
497
+ type: 'image/jpeg',
498
+ data: chunk.data,
499
+ });
500
+ // Decode the frame - single decode call per MistServer rawws.js line 1069
501
+ const result = await decoder.decode({ frameIndex: 0 });
502
+ // Create VideoFrame from ImageBitmap with the correct timestamp
503
+ const frame = new VideoFrame(result.image, {
504
+ timestamp: chunk.timestamp, // Preserve original timestamp
505
+ });
506
+ // Clean up ImageDecoder resources
507
+ result.image.close();
508
+ decoder.close();
509
+ // Pass frame through normal output handling
510
+ handleDecodedFrame(pipeline, frame);
511
+ logVerbose(`Decoded JPEG frame @ ${chunk.timestamp / 1000}ms for track ${pipeline.idx}`);
512
+ }
513
+ catch (err) {
514
+ log(`JPEG decode error on track ${pipeline.idx}: ${err}`, 'error');
515
+ }
516
+ }
517
+ function processOutputQueue(pipeline) {
518
+ // Check if pipeline is closed (e.g., player destroyed) - clean up queued frames
519
+ if (pipeline.closed) {
520
+ while (pipeline.outputQueue.length > 0) {
521
+ const entry = pipeline.outputQueue.shift();
522
+ entry.frame.close();
523
+ }
524
+ return;
525
+ }
526
+ if (!pipeline.writer || pipeline.outputQueue.length === 0) {
527
+ if (pipeline.outputQueue.length > 0 && !pipeline.writer) {
528
+ log(`Cannot output: no writer for track ${pipeline.idx} (queue has ${pipeline.outputQueue.length} frames)`, 'warn');
529
+ }
530
+ return;
531
+ }
532
+ const now = performance.now();
533
+ // Sort output queue by timestamp - MistServer can send frames out of order
534
+ // This is more robust than just swapping adjacent frames
535
+ if (pipeline.outputQueue.length > 1) {
536
+ const wasSorted = pipeline.outputQueue.every((entry, i, arr) => i === 0 || arr[i - 1].timestamp <= entry.timestamp);
537
+ if (!wasSorted) {
538
+ pipeline.outputQueue.sort((a, b) => a.timestamp - b.timestamp);
539
+ log(`Sorted ${pipeline.outputQueue.length} frames in output queue for track ${pipeline.idx}`);
540
+ }
541
+ }
542
+ // Buffer warmup - wait for buffer to build before starting output to prevent initial jitter
543
+ // With per-track baseTime, each track can start independently once it has enough buffer
544
+ if (!warmupComplete) {
545
+ // Track when warmup started
546
+ if (warmupStartTime === null) {
547
+ warmupStartTime = now;
548
+ log(`Starting buffer warmup (target: ${WARMUP_BUFFER_MS}ms)`);
549
+ }
550
+ const elapsed = now - warmupStartTime;
551
+ // Calculate buffer from timestamp range in queue
552
+ if (pipeline.outputQueue.length >= 2) {
553
+ const oldest = pipeline.outputQueue[0].timestamp / 1000; // Convert to ms
554
+ const newest = pipeline.outputQueue[pipeline.outputQueue.length - 1].timestamp / 1000;
555
+ const bufferMs = newest - oldest;
556
+ // Complete warmup when we have enough buffer OR timeout
557
+ if (bufferMs >= WARMUP_BUFFER_MS || elapsed >= WARMUP_TIMEOUT_MS) {
558
+ warmupComplete = true;
559
+ log(`Buffer warmup complete: ${bufferMs.toFixed(0)}ms buffer, ${pipeline.outputQueue.length} frames queued (track ${pipeline.idx})`);
560
+ }
561
+ else {
562
+ // Not ready yet - schedule another check
563
+ setTimeout(() => processOutputQueue(pipeline), 10);
564
+ return;
565
+ }
566
+ }
567
+ else {
568
+ // Not enough frames yet - schedule another check
569
+ if (elapsed >= WARMUP_TIMEOUT_MS) {
570
+ warmupComplete = true;
571
+ log(`Buffer warmup timeout - starting with ${pipeline.outputQueue.length} frame(s) (track ${pipeline.idx})`);
572
+ }
573
+ else {
574
+ setTimeout(() => processOutputQueue(pipeline), 10);
575
+ return;
576
+ }
577
+ }
578
+ }
579
+ // Process all frames that are ready
580
+ while (pipeline.outputQueue.length > 0) {
581
+ const entry = pipeline.outputQueue[0];
582
+ // Frame timing (per-track baseTime for A/V with different timestamp bases)
583
+ const schedule = shouldOutputFrame(pipeline, entry, now);
584
+ if (!schedule.shouldOutput) {
585
+ // Schedule next check
586
+ if (schedule.checkDelayMs > 0) {
587
+ setTimeout(() => processOutputQueue(pipeline), schedule.checkDelayMs);
588
+ }
589
+ break;
590
+ }
591
+ // Output this frame
592
+ pipeline.outputQueue.shift();
593
+ outputFrame(pipeline, entry);
594
+ }
595
+ }
596
+ function shouldOutputFrame(pipeline, entry, now) {
597
+ const trackIdx = pipeline.idx;
598
+ if (frameTiming.seeking) {
599
+ // During seek, reset baseTime and output first keyframe immediately
600
+ trackBaseTimes.delete(trackIdx);
601
+ return { shouldOutput: true, earliness: 0, checkDelayMs: 0 };
602
+ }
603
+ // Frame timestamp in milliseconds (entry.timestamp is in microseconds)
604
+ const frameTimeMs = entry.timestamp / 1000;
605
+ const speed = frameTiming.speed.combined;
606
+ // Per-track baseTime to handle different timestamp bases for audio/video
607
+ const baseTime = getTrackBaseTime(trackIdx, frameTimeMs, now);
608
+ // Calculate target wall-clock time for this frame (per rawws.js line 872)
609
+ // targetTime = baseTime + frameTimeMs / speed
610
+ const targetTime = baseTime + frameTimeMs / speed;
611
+ // How early/late is this frame? Positive = too early, negative = late
612
+ const delay = targetTime - now;
613
+ logVerbose(`Frame timing: track=${trackIdx} frame=${frameTimeMs.toFixed(0)}ms, target=${targetTime.toFixed(0)}, now=${now.toFixed(0)}, delay=${delay.toFixed(1)}ms`);
614
+ // Output immediately if ready or late (per rawws.js line 889: delay <= 2)
615
+ if (delay <= 2) {
616
+ return { shouldOutput: true, earliness: -delay, checkDelayMs: 0 };
617
+ }
618
+ // Schedule check for when frame should be ready
619
+ return { shouldOutput: false, earliness: -delay, checkDelayMs: Math.max(1, Math.floor(delay)) };
620
+ }
621
+ function outputFrame(pipeline, entry) {
622
+ if (!pipeline.writer || pipeline.closed) {
623
+ entry.frame.close();
624
+ return;
625
+ }
626
+ const now = performance.now() * 1000;
627
+ frameTiming.out = now;
628
+ pipeline.stats.framesOut++;
629
+ pipeline.stats.lastOutputTimestamp = entry.timestamp;
630
+ // Log first few output frames
631
+ if (pipeline.stats.framesOut <= 3) {
632
+ log(`Output frame ${pipeline.stats.framesOut} for track ${pipeline.idx}: ts=${entry.timestamp}μs`);
633
+ }
634
+ // Write returns a Promise - handle rejection to avoid unhandled promise errors
635
+ // Frame ownership is transferred to the stream, so we don't need to close() on success
636
+ pipeline.writer.write(entry.frame).then(() => {
637
+ // Send timeupdate event on successful write
638
+ const message = {
639
+ type: 'sendevent',
640
+ kind: 'timeupdate',
641
+ idx: pipeline.idx,
642
+ uid: uidCounter++,
643
+ };
644
+ self.postMessage(message);
645
+ }).catch((err) => {
646
+ // Check for "stream closed" errors - these are expected during cleanup
647
+ const errStr = String(err);
648
+ if (errStr.includes('Stream closed') || errStr.includes('InvalidStateError')) {
649
+ // Expected during player cleanup - silently mark pipeline as closed
650
+ pipeline.closed = true;
651
+ }
652
+ else {
653
+ log(`Failed to write frame: ${err}`, 'error');
654
+ }
655
+ // Frame may not have been consumed by the stream - try to close it
656
+ try {
657
+ entry.frame.close();
658
+ }
659
+ catch {
660
+ // Frame may already be detached/closed
661
+ }
662
+ });
663
+ }
664
+ // ============================================================================
665
+ // Track Generator / Writable Stream
666
+ // ============================================================================
667
+ function handleSetWritable(msg) {
668
+ const { idx, writable, uid } = msg;
669
+ const pipeline = pipelines.get(idx);
670
+ if (!pipeline) {
671
+ log(`Cannot set writable: pipeline ${idx} not found`, 'error');
672
+ sendError(uid, idx, 'Pipeline not found');
673
+ return;
674
+ }
675
+ pipeline.writable = writable;
676
+ pipeline.writer = writable.getWriter();
677
+ log(`Writable stream set for track ${idx}`);
678
+ // Process any queued frames
679
+ processOutputQueue(pipeline);
680
+ // Notify main thread track is ready
681
+ const message = {
682
+ type: 'addtrack',
683
+ idx,
684
+ uid,
685
+ status: 'ok',
686
+ };
687
+ self.postMessage(message);
688
+ }
689
+ function handleCreateGenerator(msg) {
690
+ const { idx, uid } = msg;
691
+ const pipeline = pipelines.get(idx);
692
+ if (!pipeline) {
693
+ log(`Cannot create generator: pipeline ${idx} not found`, 'error');
694
+ sendError(uid, idx, 'Pipeline not found');
695
+ return;
696
+ }
697
+ // Safari: VideoTrackGenerator is available in worker (not MediaStreamTrackGenerator)
698
+ // Reference: webcodecsworker.js line 852-863
699
+ // @ts-ignore - VideoTrackGenerator may not be in types
700
+ if (typeof VideoTrackGenerator !== 'undefined') {
701
+ if (pipeline.track.type === 'video') {
702
+ // Safari video: use VideoTrackGenerator
703
+ // @ts-ignore
704
+ const generator = new VideoTrackGenerator();
705
+ pipeline.writable = generator.writable;
706
+ pipeline.writer = generator.writable.getWriter();
707
+ // Send track back to main thread
708
+ const message = {
709
+ type: 'addtrack',
710
+ idx,
711
+ track: generator.track,
712
+ uid,
713
+ status: 'ok',
714
+ };
715
+ // @ts-ignore - transferring MediaStreamTrack
716
+ self.postMessage(message, [generator.track]);
717
+ log(`Created VideoTrackGenerator for track ${idx} (Safari video)`);
718
+ }
719
+ else if (pipeline.track.type === 'audio') {
720
+ // Safari audio: relay frames to main thread via postMessage
721
+ // Reference: webcodecsworker.js line 773-800
722
+ // Main thread creates the audio generator, we just send frames
723
+ pipeline.writer = {
724
+ write: (frame) => {
725
+ return new Promise((resolve, reject) => {
726
+ const frameUid = uidCounter++;
727
+ // Set up listener for response
728
+ const timeoutId = setTimeout(() => {
729
+ reject(new Error('writeframe timeout'));
730
+ }, 5000);
731
+ const handler = (e) => {
732
+ const msg = e.data;
733
+ if (msg.type === 'writeframe' && msg.idx === idx && msg.uid === frameUid) {
734
+ clearTimeout(timeoutId);
735
+ self.removeEventListener('message', handler);
736
+ if (msg.status === 'ok') {
737
+ resolve();
738
+ }
739
+ else {
740
+ reject(new Error(msg.error || 'writeframe failed'));
741
+ }
742
+ }
743
+ };
744
+ self.addEventListener('message', handler);
745
+ // Send frame to main thread
746
+ self.postMessage({
747
+ type: 'writeframe',
748
+ idx,
749
+ frame,
750
+ uid: frameUid,
751
+ }, [frame]);
752
+ });
753
+ },
754
+ close: () => Promise.resolve(),
755
+ };
756
+ // Notify main thread to set up audio generator
757
+ const message = {
758
+ type: 'addtrack',
759
+ idx,
760
+ uid,
761
+ status: 'ok',
762
+ };
763
+ self.postMessage(message);
764
+ log(`Set up frame relay for track ${idx} (Safari audio)`);
765
+ }
766
+ }
767
+ else if (typeof MediaStreamTrackGenerator !== 'undefined') {
768
+ // Chrome/Edge: use MediaStreamTrackGenerator in worker
769
+ // @ts-ignore
770
+ const generator = new MediaStreamTrackGenerator({ kind: pipeline.track.type });
771
+ pipeline.writable = generator.writable;
772
+ pipeline.writer = generator.writable.getWriter();
773
+ // Send track back to main thread
774
+ const message = {
775
+ type: 'addtrack',
776
+ idx,
777
+ track: generator,
778
+ uid,
779
+ status: 'ok',
780
+ };
781
+ // @ts-ignore - transferring MediaStreamTrack
782
+ self.postMessage(message, [generator]);
783
+ log(`Created MediaStreamTrackGenerator for track ${idx}`);
784
+ }
785
+ else {
786
+ log('Neither VideoTrackGenerator nor MediaStreamTrackGenerator available in worker', 'warn');
787
+ sendError(uid, idx, 'No track generator available');
788
+ }
789
+ }
790
+ // ============================================================================
791
+ // Seeking & Timing
792
+ // ============================================================================
793
+ function handleSeek(msg) {
794
+ const { seekTime, uid } = msg;
795
+ log(`Seek to ${seekTime}ms`);
796
+ frameTiming.seeking = true;
797
+ resetBaseTime(); // Reset timing reference for new position
798
+ // Reset warmup state - need to rebuild buffer after seek
799
+ warmupComplete = false;
800
+ warmupStartTime = null;
801
+ // Flush all pipeline queues
802
+ for (const pipeline of pipelines.values()) {
803
+ flushPipeline(pipeline);
804
+ }
805
+ sendAck(uid);
806
+ }
807
+ function flushPipeline(pipeline) {
808
+ // Clear input queue
809
+ pipeline.inputQueue = [];
810
+ // Close and clear output queue frames
811
+ for (const entry of pipeline.outputQueue) {
812
+ entry.frame.close();
813
+ }
814
+ pipeline.outputQueue = [];
815
+ // Reset decoder if possible
816
+ if (pipeline.decoder && pipeline.decoder.state !== 'closed') {
817
+ try {
818
+ pipeline.decoder.reset();
819
+ }
820
+ catch {
821
+ // Ignore reset errors
822
+ }
823
+ }
824
+ }
825
+ function handleFrameTiming(msg) {
826
+ const { action, speed, tweak, uid } = msg;
827
+ if (action === 'setSpeed') {
828
+ if (speed !== undefined)
829
+ frameTiming.speed.main = speed;
830
+ if (tweak !== undefined)
831
+ frameTiming.speed.tweak = tweak;
832
+ frameTiming.speed.combined = frameTiming.speed.main * frameTiming.speed.tweak;
833
+ log(`Speed set to ${frameTiming.speed.combined} (main: ${frameTiming.speed.main}, tweak: ${frameTiming.speed.tweak})`);
834
+ }
835
+ else if (action === 'reset') {
836
+ frameTiming.seeking = false;
837
+ log('Frame timing reset (seek complete)');
838
+ }
839
+ sendAck(uid);
840
+ }
841
+ // ============================================================================
842
+ // Cleanup
843
+ // ============================================================================
844
+ function handleClose(msg) {
845
+ const { idx, waitEmpty, uid } = msg;
846
+ const pipeline = pipelines.get(idx);
847
+ if (!pipeline) {
848
+ sendAck(uid, idx);
849
+ return;
850
+ }
851
+ if (waitEmpty && pipeline.outputQueue.length > 0) {
852
+ // Wait for queue to drain
853
+ const checkDrain = () => {
854
+ if (pipeline.outputQueue.length === 0) {
855
+ closePipeline(pipeline, uid);
856
+ }
857
+ else {
858
+ setTimeout(checkDrain, 10);
859
+ }
860
+ };
861
+ checkDrain();
862
+ }
863
+ else {
864
+ closePipeline(pipeline, uid);
865
+ }
866
+ }
867
+ function closePipeline(pipeline, uid) {
868
+ pipeline.closed = true;
869
+ // Close decoder
870
+ if (pipeline.decoder && pipeline.decoder.state !== 'closed') {
871
+ try {
872
+ pipeline.decoder.close();
873
+ }
874
+ catch {
875
+ // Ignore close errors
876
+ }
877
+ }
878
+ // Close writer
879
+ if (pipeline.writer) {
880
+ try {
881
+ pipeline.writer.close();
882
+ }
883
+ catch {
884
+ // Ignore close errors
885
+ }
886
+ }
887
+ // Clear queues
888
+ for (const entry of pipeline.outputQueue) {
889
+ entry.frame.close();
890
+ }
891
+ pipeline.outputQueue = [];
892
+ pipeline.inputQueue = [];
893
+ // Clean up per-track timing
894
+ trackBaseTimes.delete(pipeline.idx);
895
+ pipelines.delete(pipeline.idx);
896
+ log(`Closed pipeline ${pipeline.idx}`);
897
+ // Stop stats if no more pipelines
898
+ if (pipelines.size === 0 && statsTimer) {
899
+ clearInterval(statsTimer);
900
+ statsTimer = null;
901
+ }
902
+ const message = {
903
+ type: 'closed',
904
+ idx: pipeline.idx,
905
+ uid,
906
+ status: 'ok',
907
+ };
908
+ self.postMessage(message);
909
+ }
910
+ // ============================================================================
911
+ // Stats Reporting
912
+ // ============================================================================
913
+ function sendStats() {
914
+ const pipelineStats = {};
915
+ for (const [idx, pipeline] of pipelines) {
916
+ pipelineStats[idx] = {
917
+ early: null, // Would need frame timing to calculate
918
+ frameDuration: null,
919
+ frames: {
920
+ in: pipeline.stats.framesIn,
921
+ decoded: pipeline.stats.framesDecoded,
922
+ out: pipeline.stats.framesOut,
923
+ },
924
+ queues: {
925
+ in: pipeline.inputQueue.length,
926
+ decoder: pipeline.stats.decoderQueueSize,
927
+ out: pipeline.outputQueue.length,
928
+ },
929
+ timing: {
930
+ decoder: createFrameTrackerStats(),
931
+ writable: createFrameTrackerStats(),
932
+ },
933
+ };
934
+ }
935
+ const message = {
936
+ type: 'stats',
937
+ stats: {
938
+ frameTiming: {
939
+ in: frameTiming.in,
940
+ decoded: frameTiming.decoded,
941
+ out: frameTiming.out,
942
+ speed: { ...frameTiming.speed },
943
+ seeking: frameTiming.seeking,
944
+ paused: frameTiming.paused,
945
+ },
946
+ pipelines: pipelineStats,
947
+ },
948
+ uid: uidCounter++,
949
+ };
950
+ self.postMessage(message);
951
+ }
952
+ function createFrameTrackerStats() {
953
+ return {
954
+ lastIn: undefined,
955
+ lastOut: undefined,
956
+ delay: undefined,
957
+ delta: undefined,
958
+ shift: undefined,
959
+ };
960
+ }
961
+ // ============================================================================
962
+ // Response Helpers
963
+ // ============================================================================
964
+ function sendAck(uid, idx) {
965
+ const message = {
966
+ type: 'ack',
967
+ uid,
968
+ idx,
969
+ status: 'ok',
970
+ };
971
+ self.postMessage(message);
972
+ }
973
+ function sendError(uid, idx, error) {
974
+ const message = {
975
+ type: 'ack',
976
+ uid,
977
+ idx,
978
+ status: 'error',
979
+ error,
980
+ };
981
+ self.postMessage(message);
982
+ }
983
+ // ============================================================================
984
+ // Worker Initialization
985
+ // ============================================================================
986
+ log('WebCodecs decoder worker initialized');
987
+
988
+ })();
989
+ //# sourceMappingURL=decoder.worker.js.map