avbridge 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. package/CHANGELOG.md +120 -0
  2. package/LICENSE +21 -0
  3. package/README.md +415 -0
  4. package/dist/avi-M5B4SHRM.cjs +164 -0
  5. package/dist/avi-M5B4SHRM.cjs.map +1 -0
  6. package/dist/avi-POCGZ4JX.js +162 -0
  7. package/dist/avi-POCGZ4JX.js.map +1 -0
  8. package/dist/chunk-5ISVAODK.js +80 -0
  9. package/dist/chunk-5ISVAODK.js.map +1 -0
  10. package/dist/chunk-F7YS2XOA.cjs +2966 -0
  11. package/dist/chunk-F7YS2XOA.cjs.map +1 -0
  12. package/dist/chunk-FKM7QBZU.js +2957 -0
  13. package/dist/chunk-FKM7QBZU.js.map +1 -0
  14. package/dist/chunk-J5MCMN3S.js +27 -0
  15. package/dist/chunk-J5MCMN3S.js.map +1 -0
  16. package/dist/chunk-L4NPOJ36.cjs +180 -0
  17. package/dist/chunk-L4NPOJ36.cjs.map +1 -0
  18. package/dist/chunk-NZU7W256.cjs +29 -0
  19. package/dist/chunk-NZU7W256.cjs.map +1 -0
  20. package/dist/chunk-PQTZS7OA.js +147 -0
  21. package/dist/chunk-PQTZS7OA.js.map +1 -0
  22. package/dist/chunk-WD2ZNQA7.js +177 -0
  23. package/dist/chunk-WD2ZNQA7.js.map +1 -0
  24. package/dist/chunk-Y5FYF5KG.cjs +153 -0
  25. package/dist/chunk-Y5FYF5KG.cjs.map +1 -0
  26. package/dist/chunk-Z2FJ5TJC.cjs +82 -0
  27. package/dist/chunk-Z2FJ5TJC.cjs.map +1 -0
  28. package/dist/element.cjs +433 -0
  29. package/dist/element.cjs.map +1 -0
  30. package/dist/element.d.cts +158 -0
  31. package/dist/element.d.ts +158 -0
  32. package/dist/element.js +431 -0
  33. package/dist/element.js.map +1 -0
  34. package/dist/index.cjs +576 -0
  35. package/dist/index.cjs.map +1 -0
  36. package/dist/index.d.cts +80 -0
  37. package/dist/index.d.ts +80 -0
  38. package/dist/index.js +554 -0
  39. package/dist/index.js.map +1 -0
  40. package/dist/libav-http-reader-FPYDBMYK.cjs +16 -0
  41. package/dist/libav-http-reader-FPYDBMYK.cjs.map +1 -0
  42. package/dist/libav-http-reader-NQJVY273.js +3 -0
  43. package/dist/libav-http-reader-NQJVY273.js.map +1 -0
  44. package/dist/libav-import-2JURFHEW.js +8 -0
  45. package/dist/libav-import-2JURFHEW.js.map +1 -0
  46. package/dist/libav-import-GST2AMPL.cjs +30 -0
  47. package/dist/libav-import-GST2AMPL.cjs.map +1 -0
  48. package/dist/libav-loader-KA2MAWLM.js +3 -0
  49. package/dist/libav-loader-KA2MAWLM.js.map +1 -0
  50. package/dist/libav-loader-ZHOERPHW.cjs +12 -0
  51. package/dist/libav-loader-ZHOERPHW.cjs.map +1 -0
  52. package/dist/player-BBwbCkdL.d.cts +365 -0
  53. package/dist/player-BBwbCkdL.d.ts +365 -0
  54. package/dist/source-SC6ZEQYR.cjs +28 -0
  55. package/dist/source-SC6ZEQYR.cjs.map +1 -0
  56. package/dist/source-ZFS4H7J3.js +3 -0
  57. package/dist/source-ZFS4H7J3.js.map +1 -0
  58. package/dist/variant-routing-GOHB2RZN.cjs +12 -0
  59. package/dist/variant-routing-GOHB2RZN.cjs.map +1 -0
  60. package/dist/variant-routing-JOBWXYKD.js +3 -0
  61. package/dist/variant-routing-JOBWXYKD.js.map +1 -0
  62. package/package.json +95 -0
  63. package/src/classify/index.ts +1 -0
  64. package/src/classify/rules.ts +214 -0
  65. package/src/convert/index.ts +2 -0
  66. package/src/convert/remux.ts +522 -0
  67. package/src/convert/transcode.ts +329 -0
  68. package/src/diagnostics.ts +99 -0
  69. package/src/element/avbridge-player.ts +576 -0
  70. package/src/element.ts +19 -0
  71. package/src/events.ts +71 -0
  72. package/src/index.ts +42 -0
  73. package/src/libav-stubs.d.ts +24 -0
  74. package/src/player.ts +455 -0
  75. package/src/plugins/builtin.ts +37 -0
  76. package/src/plugins/registry.ts +32 -0
  77. package/src/probe/avi.ts +242 -0
  78. package/src/probe/index.ts +59 -0
  79. package/src/probe/mediabunny.ts +194 -0
  80. package/src/strategies/fallback/audio-output.ts +293 -0
  81. package/src/strategies/fallback/clock.ts +7 -0
  82. package/src/strategies/fallback/decoder.ts +660 -0
  83. package/src/strategies/fallback/index.ts +170 -0
  84. package/src/strategies/fallback/libav-import.ts +27 -0
  85. package/src/strategies/fallback/libav-loader.ts +190 -0
  86. package/src/strategies/fallback/variant-routing.ts +43 -0
  87. package/src/strategies/fallback/video-renderer.ts +216 -0
  88. package/src/strategies/hybrid/decoder.ts +641 -0
  89. package/src/strategies/hybrid/index.ts +139 -0
  90. package/src/strategies/native.ts +107 -0
  91. package/src/strategies/remux/annexb.ts +112 -0
  92. package/src/strategies/remux/index.ts +79 -0
  93. package/src/strategies/remux/mse.ts +234 -0
  94. package/src/strategies/remux/pipeline.ts +254 -0
  95. package/src/subtitles/index.ts +91 -0
  96. package/src/subtitles/render.ts +62 -0
  97. package/src/subtitles/srt.ts +62 -0
  98. package/src/subtitles/vtt.ts +5 -0
  99. package/src/types-shim.d.ts +3 -0
  100. package/src/types.ts +360 -0
  101. package/src/util/codec-strings.ts +86 -0
  102. package/src/util/libav-http-reader.ts +315 -0
  103. package/src/util/source.ts +274 -0
@@ -0,0 +1,2957 @@
1
+ import { normalizeSource, sniffNormalizedSource } from './chunk-PQTZS7OA.js';
2
+ import { loadLibav } from './chunk-5ISVAODK.js';
3
+ import { pickLibavVariant } from './chunk-J5MCMN3S.js';
4
+
5
+ // src/probe/mediabunny.ts
6
+ async function probeWithMediabunny(source, sniffedContainer) {
7
+ const mb = await import('mediabunny');
8
+ const input = new mb.Input({
9
+ source: await buildMediabunnySource(mb, source),
10
+ formats: mb.ALL_FORMATS
11
+ });
12
+ const allTracks = await input.getTracks();
13
+ const duration = await safeNumber(() => input.computeDuration());
14
+ const videoTracks = [];
15
+ const audioTracks = [];
16
+ for (const track of allTracks) {
17
+ if (track.isVideoTrack()) {
18
+ const codecParam = await safe(() => track.getCodecParameterString());
19
+ videoTracks.push({
20
+ id: track.id,
21
+ codec: mediabunnyVideoToAvbridge(track.codec),
22
+ width: track.displayWidth ?? track.codedWidth ?? 0,
23
+ height: track.displayHeight ?? track.codedHeight ?? 0,
24
+ codecString: codecParam ?? void 0
25
+ });
26
+ } else if (track.isAudioTrack()) {
27
+ const codecParam = await safe(() => track.getCodecParameterString());
28
+ audioTracks.push({
29
+ id: track.id,
30
+ codec: mediabunnyAudioToAvbridge(track.codec),
31
+ channels: track.numberOfChannels ?? 0,
32
+ sampleRate: track.sampleRate ?? 0,
33
+ language: track.languageCode,
34
+ codecString: codecParam ?? void 0
35
+ });
36
+ }
37
+ }
38
+ const format = await safe(() => input.getFormat());
39
+ const container = resolveContainer(format?.name, sniffedContainer);
40
+ return {
41
+ source: source.original,
42
+ name: source.name,
43
+ byteLength: source.byteLength,
44
+ container,
45
+ videoTracks,
46
+ audioTracks,
47
+ subtitleTracks: [],
48
+ probedBy: "mediabunny",
49
+ duration
50
+ };
51
+ }
52
+ async function buildMediabunnySource(mb, source) {
53
+ if (source.kind === "url") {
54
+ return new mb.UrlSource(source.url);
55
+ }
56
+ return new mb.BlobSource(source.blob);
57
+ }
58
+ async function buildMediabunnySourceFromInput(mb, source) {
59
+ if (typeof source === "string") return new mb.UrlSource(source);
60
+ if (source instanceof URL) return new mb.UrlSource(source.toString());
61
+ if (source instanceof Blob) return new mb.BlobSource(source);
62
+ if (source instanceof ArrayBuffer) return new mb.BlobSource(new Blob([source]));
63
+ if (source instanceof Uint8Array) return new mb.BlobSource(new Blob([source]));
64
+ throw new TypeError("unsupported source type for mediabunny");
65
+ }
66
+ function resolveContainer(formatName, sniffed) {
67
+ const name = (formatName ?? "").toLowerCase();
68
+ if (name.includes("matroska") || name.includes("mkv")) return "mkv";
69
+ if (name.includes("webm")) return "webm";
70
+ if (name.includes("mp4") || name.includes("isom")) return "mp4";
71
+ if (name.includes("mov") || name.includes("quicktime")) return "mov";
72
+ if (name.includes("ogg")) return "ogg";
73
+ if (name.includes("wav")) return "wav";
74
+ if (name.includes("flac")) return "flac";
75
+ if (name.includes("mp3")) return "mp3";
76
+ if (name.includes("adts") || name.includes("aac")) return "adts";
77
+ if (name.includes("mpegts") || name.includes("mpeg-ts") || name.includes("transport")) return "mpegts";
78
+ return sniffed;
79
+ }
80
+ function mediabunnyVideoToAvbridge(c) {
81
+ switch (c) {
82
+ case "avc":
83
+ return "h264";
84
+ case "hevc":
85
+ return "h265";
86
+ case "vp8":
87
+ return "vp8";
88
+ case "vp9":
89
+ return "vp9";
90
+ case "av1":
91
+ return "av1";
92
+ default:
93
+ return "h264";
94
+ }
95
+ }
96
+ function avbridgeVideoToMediabunny(c) {
97
+ switch (c) {
98
+ case "h264":
99
+ return "avc";
100
+ case "h265":
101
+ return "hevc";
102
+ case "vp8":
103
+ return "vp8";
104
+ case "vp9":
105
+ return "vp9";
106
+ case "av1":
107
+ return "av1";
108
+ default:
109
+ return null;
110
+ }
111
+ }
112
+ function mediabunnyAudioToAvbridge(c) {
113
+ switch (c) {
114
+ case "aac":
115
+ return "aac";
116
+ case "mp3":
117
+ return "mp3";
118
+ case "opus":
119
+ return "opus";
120
+ case "vorbis":
121
+ return "vorbis";
122
+ case "flac":
123
+ return "flac";
124
+ case "ac3":
125
+ return "ac3";
126
+ case "eac3":
127
+ return "eac3";
128
+ default:
129
+ return c ?? "aac";
130
+ }
131
+ }
132
+ function avbridgeAudioToMediabunny(c) {
133
+ switch (c) {
134
+ case "aac":
135
+ return "aac";
136
+ case "mp3":
137
+ return "mp3";
138
+ case "opus":
139
+ return "opus";
140
+ case "vorbis":
141
+ return "vorbis";
142
+ case "flac":
143
+ return "flac";
144
+ case "ac3":
145
+ return "ac3";
146
+ case "eac3":
147
+ return "eac3";
148
+ default:
149
+ return null;
150
+ }
151
+ }
152
+ async function safeNumber(fn) {
153
+ try {
154
+ const v = await fn();
155
+ return typeof v === "number" && Number.isFinite(v) ? v : void 0;
156
+ } catch {
157
+ return void 0;
158
+ }
159
+ }
160
+ async function safe(fn) {
161
+ try {
162
+ return await fn();
163
+ } catch {
164
+ return void 0;
165
+ }
166
+ }
167
+
168
+ // src/probe/index.ts
169
+ var MEDIABUNNY_CONTAINERS = /* @__PURE__ */ new Set([
170
+ "mp4",
171
+ "mov",
172
+ "mkv",
173
+ "webm",
174
+ "ogg",
175
+ "wav",
176
+ "mp3",
177
+ "flac",
178
+ "adts",
179
+ "mpegts"
180
+ ]);
181
+ async function probe(source) {
182
+ const normalized = await normalizeSource(source);
183
+ const sniffed = await sniffNormalizedSource(normalized);
184
+ if (MEDIABUNNY_CONTAINERS.has(sniffed)) {
185
+ try {
186
+ return await probeWithMediabunny(normalized, sniffed);
187
+ } catch (err) {
188
+ throw new Error(
189
+ `mediabunny failed to probe a ${sniffed} file: ${err.message}`
190
+ );
191
+ }
192
+ }
193
+ try {
194
+ const { probeWithLibav } = await import('./avi-POCGZ4JX.js');
195
+ return await probeWithLibav(normalized, sniffed);
196
+ } catch (err) {
197
+ const inner = err instanceof Error ? err.message : String(err);
198
+ console.error("[avbridge] libav probe failed for", sniffed, "file:", err);
199
+ throw new Error(
200
+ sniffed === "unknown" ? `unable to probe source: container could not be identified, and the libav.js fallback also failed: ${inner || "(no message \u2014 see browser console for the original error)"}` : `${sniffed.toUpperCase()} files require libav.js, which failed to load: ${inner || "(no message \u2014 see browser console for the original error)"}`
201
+ );
202
+ }
203
+ }
204
+
205
+ // src/util/codec-strings.ts
206
+ function videoCodecString(track) {
207
+ if (track.codecString) return track.codecString;
208
+ switch (track.codec) {
209
+ case "h264": {
210
+ const profileHex = profileToHex(track.profile) ?? "64";
211
+ const constraint = "00";
212
+ const level = ((track.level ?? 40) & 255).toString(16).padStart(2, "0");
213
+ return `avc1.${profileHex}${constraint}${level}`;
214
+ }
215
+ case "h265":
216
+ return "hvc1.1.6.L93.B0";
217
+ case "vp8":
218
+ return "vp8";
219
+ case "vp9":
220
+ return "vp09.00.10.08";
221
+ case "av1":
222
+ return "av01.0.04M.08";
223
+ default:
224
+ return null;
225
+ }
226
+ }
227
+ function profileToHex(profile) {
228
+ if (!profile) return null;
229
+ const p = profile.toLowerCase();
230
+ if (p.includes("baseline")) return "42";
231
+ if (p.includes("main")) return "4d";
232
+ if (p.includes("high 10")) return "6e";
233
+ if (p.includes("high 4:2:2")) return "7a";
234
+ if (p.includes("high 4:4:4")) return "f4";
235
+ if (p.includes("high")) return "64";
236
+ return null;
237
+ }
238
+ function audioCodecString(track) {
239
+ if (track.codecString) return track.codecString;
240
+ switch (track.codec) {
241
+ case "aac":
242
+ return "mp4a.40.2";
243
+ // AAC-LC
244
+ case "mp3":
245
+ return "mp4a.40.34";
246
+ case "opus":
247
+ return "opus";
248
+ case "vorbis":
249
+ return "vorbis";
250
+ case "flac":
251
+ return "flac";
252
+ default:
253
+ return null;
254
+ }
255
+ }
256
+ function mp4MimeFor(video, audio) {
257
+ const v = videoCodecString(video);
258
+ if (!v) return null;
259
+ const codecs = audio ? `${v},${audioCodecString(audio) ?? ""}`.replace(/,$/, "") : v;
260
+ return `video/mp4; codecs="${codecs}"`;
261
+ }
262
+ function mseSupports(mime) {
263
+ if (typeof MediaSource === "undefined") return false;
264
+ try {
265
+ return MediaSource.isTypeSupported(mime);
266
+ } catch {
267
+ return false;
268
+ }
269
+ }
270
+
271
+ // src/classify/rules.ts
272
+ var NATIVE_VIDEO_CODECS = /* @__PURE__ */ new Set(["h264", "h265", "vp8", "vp9", "av1"]);
273
+ var NATIVE_AUDIO_CODECS = /* @__PURE__ */ new Set([
274
+ "aac",
275
+ "mp3",
276
+ "opus",
277
+ "vorbis",
278
+ "flac"
279
+ ]);
280
+ var FALLBACK_VIDEO_CODECS = /* @__PURE__ */ new Set(["wmv3", "vc1", "mpeg4", "rv40", "mpeg2", "mpeg1", "theora"]);
281
+ var FALLBACK_AUDIO_CODECS = /* @__PURE__ */ new Set(["wmav2", "wmapro", "ac3", "eac3"]);
282
+ var NATIVE_CONTAINERS = /* @__PURE__ */ new Set([
283
+ "mp4",
284
+ "mov",
285
+ "webm",
286
+ "ogg",
287
+ "wav",
288
+ "mp3",
289
+ "flac",
290
+ "adts"
291
+ ]);
292
+ var REMUXABLE_CONTAINERS = /* @__PURE__ */ new Set([
293
+ "mp4",
294
+ "mov",
295
+ "mkv",
296
+ "webm",
297
+ "ogg",
298
+ "wav",
299
+ "mp3",
300
+ "flac",
301
+ "adts",
302
+ "mpegts"
303
+ ]);
304
+ function classifyContext(ctx) {
305
+ const video = ctx.videoTracks[0];
306
+ const audio = ctx.audioTracks[0];
307
+ if (!video) {
308
+ if (NATIVE_CONTAINERS.has(ctx.container) && (!audio || NATIVE_AUDIO_CODECS.has(audio.codec))) {
309
+ return {
310
+ class: "NATIVE",
311
+ strategy: "native",
312
+ reason: `audio-only ${ctx.container} with native codec`
313
+ };
314
+ }
315
+ if (audio && FALLBACK_AUDIO_CODECS.has(audio.codec)) {
316
+ return {
317
+ class: "FALLBACK_REQUIRED",
318
+ strategy: "fallback",
319
+ reason: `audio codec "${audio.codec}" requires WASM decode`
320
+ };
321
+ }
322
+ if (REMUXABLE_CONTAINERS.has(ctx.container)) {
323
+ return {
324
+ class: "REMUX_CANDIDATE",
325
+ strategy: "remux",
326
+ reason: `audio-only file in non-native container "${ctx.container}"`
327
+ };
328
+ }
329
+ return {
330
+ class: "FALLBACK_REQUIRED",
331
+ strategy: "fallback",
332
+ reason: `audio-only file in "${ctx.container}" (not remuxable by mediabunny)`
333
+ };
334
+ }
335
+ if (FALLBACK_VIDEO_CODECS.has(video.codec)) {
336
+ return {
337
+ class: "FALLBACK_REQUIRED",
338
+ strategy: "fallback",
339
+ reason: `video codec "${video.codec}" has no browser decoder; WASM fallback required`
340
+ };
341
+ }
342
+ if (audio && FALLBACK_AUDIO_CODECS.has(audio.codec)) {
343
+ return {
344
+ class: "FALLBACK_REQUIRED",
345
+ strategy: "fallback",
346
+ reason: `audio codec "${audio.codec}" has no browser decoder; WASM fallback required`
347
+ };
348
+ }
349
+ if (!NATIVE_VIDEO_CODECS.has(video.codec)) {
350
+ return {
351
+ class: "FALLBACK_REQUIRED",
352
+ strategy: "fallback",
353
+ reason: `unknown video codec "${video.codec}", routing to fallback`
354
+ };
355
+ }
356
+ const isNativeContainer = NATIVE_CONTAINERS.has(ctx.container);
357
+ if (isNativeContainer && isSafeNativeCombo(video, audio)) {
358
+ const mime = mp4MimeFor(video, audio);
359
+ if (mime && mseSupports(mime)) {
360
+ return {
361
+ class: "NATIVE",
362
+ strategy: "native",
363
+ reason: `${ctx.container} + ${video.codec}${audio ? "/" + audio.codec : ""} plays natively`
364
+ };
365
+ }
366
+ if (mime == null || typeof MediaSource === "undefined") {
367
+ return {
368
+ class: "NATIVE",
369
+ strategy: "native",
370
+ reason: `${ctx.container} + ${video.codec}${audio ? "/" + audio.codec : ""} (heuristic native)`
371
+ };
372
+ }
373
+ }
374
+ if (isNativeContainer && isRiskyNative(video)) {
375
+ return {
376
+ class: "RISKY_NATIVE",
377
+ strategy: "native",
378
+ reason: `${video.codec} ${video.profile ?? ""} ${video.bitDepth ?? 8}-bit may stutter on mobile; will escalate to remux on stall`,
379
+ fallbackChain: ["remux", "hybrid", "fallback"]
380
+ };
381
+ }
382
+ if (REMUXABLE_CONTAINERS.has(ctx.container)) {
383
+ return {
384
+ class: "REMUX_CANDIDATE",
385
+ strategy: "remux",
386
+ reason: `${ctx.container} container with native-supported codecs \u2014 remux to fragmented MP4 for reliable playback`
387
+ };
388
+ }
389
+ if (webCodecsAvailable()) {
390
+ return {
391
+ class: "HYBRID_CANDIDATE",
392
+ strategy: "hybrid",
393
+ reason: `${ctx.container} container requires libav demux; codecs (${video.codec}${audio ? "/" + audio.codec : ""}) are hardware-decodable via WebCodecs`,
394
+ fallbackChain: ["fallback"]
395
+ };
396
+ }
397
+ return {
398
+ class: "FALLBACK_REQUIRED",
399
+ strategy: "fallback",
400
+ reason: `${ctx.container} container cannot be remuxed by mediabunny; falling back to WASM decode (${video.codec}${audio ? "/" + audio.codec : ""})`
401
+ };
402
+ }
403
+ function webCodecsAvailable() {
404
+ return typeof globalThis.VideoDecoder !== "undefined";
405
+ }
406
+ function isSafeNativeCombo(video, audio) {
407
+ if (video.codec === "h264") {
408
+ if (video.bitDepth && video.bitDepth > 8) return false;
409
+ if (video.pixelFormat && !/yuv420p$/.test(video.pixelFormat)) return false;
410
+ }
411
+ if (audio && !NATIVE_AUDIO_CODECS.has(audio.codec)) return false;
412
+ return true;
413
+ }
414
+ function isRiskyNative(video) {
415
+ if (video.bitDepth && video.bitDepth > 8) return true;
416
+ if (video.pixelFormat && /yuv4(2[24]|44)/.test(video.pixelFormat)) return true;
417
+ if (video.width > 3840 || video.height > 2160) return true;
418
+ if (video.fps && video.fps > 60) return true;
419
+ return false;
420
+ }
421
+
422
+ // src/subtitles/srt.ts
423
+ function srtToVtt(srt) {
424
+ if (srt.charCodeAt(0) === 65279) srt = srt.slice(1);
425
+ const normalized = srt.replace(/\r\n/g, "\n").replace(/\r/g, "\n").trim();
426
+ const blocks = normalized.split(/\n{2,}/);
427
+ const out = ["WEBVTT", ""];
428
+ for (const block of blocks) {
429
+ const lines = block.split("\n");
430
+ if (lines.length > 0 && /^\d+$/.test(lines[0].trim())) {
431
+ lines.shift();
432
+ }
433
+ if (lines.length === 0) continue;
434
+ const timing = lines.shift();
435
+ const vttTiming = convertTiming(timing);
436
+ if (!vttTiming) continue;
437
+ out.push(vttTiming);
438
+ for (const l of lines) out.push(l);
439
+ out.push("");
440
+ }
441
+ return out.join("\n");
442
+ }
443
+ function convertTiming(line) {
444
+ const m = /^(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})\s*-->\s*(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})(.*)$/.exec(
445
+ line.trim()
446
+ );
447
+ if (!m) return null;
448
+ const fmt = (h, mm, s, ms) => `${h.padStart(2, "0")}:${mm}:${s}.${ms.padEnd(3, "0").slice(0, 3)}`;
449
+ return `${fmt(m[1], m[2], m[3], m[4])} --> ${fmt(m[5], m[6], m[7], m[8])}${m[9] ?? ""}`;
450
+ }
451
+
452
+ // src/events.ts
453
+ var TypedEmitter = class {
454
+ listeners = {};
455
+ sticky = {};
456
+ on(event, fn) {
457
+ let set = this.listeners[event];
458
+ if (!set) {
459
+ set = /* @__PURE__ */ new Set();
460
+ this.listeners[event] = set;
461
+ }
462
+ set.add(fn);
463
+ if (Object.prototype.hasOwnProperty.call(this.sticky, event)) {
464
+ try {
465
+ fn(this.sticky[event]);
466
+ } catch (err) {
467
+ console.error("[avbridge] listener threw replaying sticky value:", err);
468
+ }
469
+ }
470
+ return () => this.off(event, fn);
471
+ }
472
+ off(event, fn) {
473
+ this.listeners[event]?.delete(fn);
474
+ }
475
+ emit(event, payload) {
476
+ const set = this.listeners[event];
477
+ if (!set) return;
478
+ for (const fn of [...set]) {
479
+ try {
480
+ fn(payload);
481
+ } catch (err) {
482
+ console.error("[avbridge] listener threw:", err);
483
+ }
484
+ }
485
+ }
486
+ /**
487
+ * Like {@link emit} but also remembers the value so future subscribers
488
+ * receive it on `on()`. Use for one-shot state-snapshot events.
489
+ */
490
+ emitSticky(event, payload) {
491
+ this.sticky[event] = payload;
492
+ this.emit(event, payload);
493
+ }
494
+ removeAll() {
495
+ this.listeners = {};
496
+ this.sticky = {};
497
+ }
498
+ };
499
+
500
+ // src/diagnostics.ts
501
+ var Diagnostics = class {
502
+ container = "unknown";
503
+ videoCodec;
504
+ audioCodec;
505
+ width;
506
+ height;
507
+ fps;
508
+ duration;
509
+ strategy = "pending";
510
+ strategyClass = "pending";
511
+ reason = "";
512
+ probedBy;
513
+ sourceType;
514
+ transport;
515
+ rangeSupported;
516
+ runtime = {};
517
+ lastError;
518
+ strategyHistory = [];
519
+ recordProbe(ctx) {
520
+ this.container = ctx.container;
521
+ this.probedBy = ctx.probedBy;
522
+ this.duration = ctx.duration;
523
+ const v = ctx.videoTracks[0];
524
+ if (v) {
525
+ this.videoCodec = v.codec;
526
+ this.width = v.width;
527
+ this.height = v.height;
528
+ this.fps = v.fps;
529
+ }
530
+ const a = ctx.audioTracks[0];
531
+ if (a) this.audioCodec = a.codec;
532
+ const src = ctx.source;
533
+ if (typeof src === "string" || src instanceof URL) {
534
+ this.sourceType = "url";
535
+ this.transport = "http-range";
536
+ this.rangeSupported = true;
537
+ } else {
538
+ this.sourceType = "blob";
539
+ this.transport = "memory";
540
+ }
541
+ }
542
+ recordClassification(c) {
543
+ this.strategy = c.strategy;
544
+ this.strategyClass = c.class;
545
+ this.reason = c.reason;
546
+ }
547
+ recordRuntime(stats) {
548
+ this.runtime = { ...this.runtime, ...stats };
549
+ }
550
+ recordStrategySwitch(strategy, reason) {
551
+ this.strategy = strategy;
552
+ this.reason = reason;
553
+ this.strategyHistory.push({ strategy, reason, at: Date.now() });
554
+ }
555
+ recordError(err) {
556
+ this.lastError = err;
557
+ }
558
+ snapshot() {
559
+ const snap = {
560
+ container: this.container,
561
+ videoCodec: this.videoCodec,
562
+ audioCodec: this.audioCodec,
563
+ width: this.width,
564
+ height: this.height,
565
+ fps: this.fps,
566
+ duration: this.duration,
567
+ strategy: this.strategy,
568
+ strategyClass: this.strategyClass,
569
+ reason: this.reason,
570
+ probedBy: this.probedBy,
571
+ sourceType: this.sourceType,
572
+ transport: this.transport,
573
+ rangeSupported: this.rangeSupported,
574
+ runtime: { ...this.runtime, ...this.lastError ? { error: this.lastError.message } : {} },
575
+ strategyHistory: this.strategyHistory.length > 0 ? [...this.strategyHistory] : void 0
576
+ };
577
+ return Object.freeze(snap);
578
+ }
579
+ };
580
+
581
+ // src/plugins/registry.ts
582
+ var PluginRegistry = class {
583
+ plugins = [];
584
+ register(plugin, prepend = false) {
585
+ if (prepend) this.plugins.unshift(plugin);
586
+ else this.plugins.push(plugin);
587
+ }
588
+ all() {
589
+ return this.plugins;
590
+ }
591
+ /**
592
+ * Find the first plugin that claims this context AND its name matches the
593
+ * strategy. Built-in strategy plugins are named exactly `"native"`,
594
+ * `"remux"`, `"fallback"`.
595
+ */
596
+ findFor(context, strategy) {
597
+ for (const p of this.plugins) {
598
+ if (p.name === strategy && p.canHandle(context)) return p;
599
+ }
600
+ return null;
601
+ }
602
+ };
603
+
604
+ // src/strategies/native.ts
605
+ async function createNativeSession(context, video) {
606
+ const { url, revoke } = sourceToVideoUrl(context.source);
607
+ video.src = url;
608
+ await new Promise((resolve, reject) => {
609
+ const onMeta = () => {
610
+ cleanup();
611
+ resolve();
612
+ };
613
+ const onError = () => {
614
+ cleanup();
615
+ reject(new Error(`<video> failed to load: ${video.error?.message ?? "unknown"}`));
616
+ };
617
+ const cleanup = () => {
618
+ video.removeEventListener("loadedmetadata", onMeta);
619
+ video.removeEventListener("error", onError);
620
+ };
621
+ video.addEventListener("loadedmetadata", onMeta);
622
+ video.addEventListener("error", onError);
623
+ });
624
+ let stats = { framesDecoded: 0, framesDropped: 0 };
625
+ return {
626
+ strategy: "native",
627
+ async play() {
628
+ await video.play();
629
+ },
630
+ pause() {
631
+ video.pause();
632
+ },
633
+ async seek(time) {
634
+ video.currentTime = time;
635
+ },
636
+ async setAudioTrack(id) {
637
+ const tracks = video.audioTracks;
638
+ if (!tracks) return;
639
+ for (let i = 0; i < tracks.length; i++) {
640
+ tracks[i].enabled = tracks[i].id === String(id) || i === id;
641
+ }
642
+ },
643
+ async setSubtitleTrack(id) {
644
+ const tracks = video.textTracks;
645
+ for (let i = 0; i < tracks.length; i++) {
646
+ tracks[i].mode = i === id ? "showing" : "disabled";
647
+ }
648
+ },
649
+ async destroy() {
650
+ video.pause();
651
+ video.removeAttribute("src");
652
+ video.load();
653
+ revoke?.();
654
+ },
655
+ getCurrentTime() {
656
+ return video.currentTime || 0;
657
+ },
658
+ getRuntimeStats() {
659
+ const q = video.getVideoPlaybackQuality?.();
660
+ if (q) {
661
+ stats = {
662
+ framesDecoded: q.totalVideoFrames,
663
+ framesDropped: q.droppedVideoFrames
664
+ };
665
+ }
666
+ return { ...stats, decoderType: "native" };
667
+ }
668
+ };
669
+ }
670
+ function sourceToVideoUrl(source) {
671
+ if (source instanceof Blob) {
672
+ const url = URL.createObjectURL(source);
673
+ return { url, revoke: () => URL.revokeObjectURL(url) };
674
+ }
675
+ if (source instanceof ArrayBuffer || source instanceof Uint8Array) {
676
+ const blob = new Blob([source]);
677
+ const url = URL.createObjectURL(blob);
678
+ return { url, revoke: () => URL.revokeObjectURL(url) };
679
+ }
680
+ if (typeof source === "string") return { url: source };
681
+ if (source instanceof URL) return { url: source.toString() };
682
+ throw new TypeError("native strategy: unsupported source type");
683
+ }
684
+
685
+ // src/strategies/remux/mse.ts
686
+ var MseSink = class {
687
+ constructor(options) {
688
+ this.options = options;
689
+ if (typeof MediaSource === "undefined") {
690
+ throw new Error("MSE not supported in this environment");
691
+ }
692
+ if (!MediaSource.isTypeSupported(options.mime)) {
693
+ throw new Error(`MSE does not support MIME "${options.mime}" \u2014 cannot remux`);
694
+ }
695
+ this.mediaSource = new MediaSource();
696
+ this.objectUrl = URL.createObjectURL(this.mediaSource);
697
+ options.video.src = this.objectUrl;
698
+ this.readyPromise = new Promise((resolve, reject) => {
699
+ this.resolveReady = resolve;
700
+ this.rejectReady = reject;
701
+ });
702
+ this.mediaSource.addEventListener("sourceopen", () => {
703
+ try {
704
+ this.sourceBuffer = this.mediaSource.addSourceBuffer(options.mime);
705
+ this.sourceBuffer.mode = "segments";
706
+ this.sourceBuffer.addEventListener("updateend", () => this.pump());
707
+ this.resolveReady();
708
+ options.onReady?.();
709
+ } catch (err) {
710
+ this.rejectReady(err instanceof Error ? err : new Error(String(err)));
711
+ }
712
+ });
713
+ }
714
+ options;
715
+ mediaSource;
716
+ sourceBuffer = null;
717
+ queue = [];
718
+ endOfStreamCalled = false;
719
+ destroyed = false;
720
+ readyPromise;
721
+ resolveReady;
722
+ rejectReady;
723
+ objectUrl;
724
+ ready() {
725
+ return this.readyPromise;
726
+ }
727
+ /** Queue a chunk of fMP4 bytes (init segment or media segment). */
728
+ append(chunk) {
729
+ if (this.destroyed) return;
730
+ const ab = chunk instanceof Uint8Array ? chunk.buffer.slice(chunk.byteOffset, chunk.byteOffset + chunk.byteLength) : chunk;
731
+ this.queue.push(ab);
732
+ this.pump();
733
+ }
734
+ pump() {
735
+ const sb = this.sourceBuffer;
736
+ if (!sb || sb.updating) return;
737
+ if (sb.buffered.length > 0) {
738
+ if (this.pendingSeekTime !== null) {
739
+ this.options.video.currentTime = this.pendingSeekTime;
740
+ this.pendingSeekTime = null;
741
+ } else if (!this.hasSnappedToFirstBuffered) {
742
+ const v = this.options.video;
743
+ const firstStart = sb.buffered.start(0);
744
+ const firstEnd = sb.buffered.end(0);
745
+ if (v.currentTime < firstStart || v.currentTime > firstEnd) {
746
+ v.currentTime = firstStart;
747
+ }
748
+ this.hasSnappedToFirstBuffered = true;
749
+ }
750
+ if (this.playOnSeek) {
751
+ this.playOnSeek = false;
752
+ this.options.video.play().catch(() => {
753
+ });
754
+ }
755
+ }
756
+ const next = this.queue.shift();
757
+ if (!next) return;
758
+ try {
759
+ sb.appendBuffer(next);
760
+ } catch (err) {
761
+ if (err.name === "QuotaExceededError") {
762
+ this.evict();
763
+ try {
764
+ sb.appendBuffer(next);
765
+ return;
766
+ } catch {
767
+ }
768
+ }
769
+ this.rejectReady(err instanceof Error ? err : new Error(String(err)));
770
+ }
771
+ }
772
+ evict() {
773
+ const sb = this.sourceBuffer;
774
+ if (!sb || sb.buffered.length === 0) return;
775
+ const start = sb.buffered.start(0);
776
+ const current = this.options.video.currentTime;
777
+ if (current - start > 10) {
778
+ try {
779
+ sb.remove(start, current - 10);
780
+ } catch {
781
+ }
782
+ }
783
+ }
784
+ /** Indicate the source is finished. Future seeks past the end will fail. */
785
+ endOfStream() {
786
+ if (this.endOfStreamCalled || this.destroyed) return;
787
+ this.endOfStreamCalled = true;
788
+ const tryEnd = () => {
789
+ if (this.queue.length > 0 || this.sourceBuffer?.updating) {
790
+ this.sourceBuffer?.addEventListener("updateend", tryEnd, { once: true });
791
+ return;
792
+ }
793
+ try {
794
+ if (this.mediaSource.readyState === "open") {
795
+ this.mediaSource.endOfStream();
796
+ }
797
+ } catch {
798
+ }
799
+ };
800
+ tryEnd();
801
+ }
802
+ /** Seconds of media buffered ahead of the current playback position. */
803
+ bufferedAhead() {
804
+ const sb = this.sourceBuffer;
805
+ if (!sb || sb.buffered.length === 0) return 0;
806
+ const current = this.options.video.currentTime;
807
+ for (let i = 0; i < sb.buffered.length; i++) {
808
+ if (sb.buffered.start(i) <= current && sb.buffered.end(i) > current) {
809
+ return sb.buffered.end(i) - current;
810
+ }
811
+ }
812
+ return 0;
813
+ }
814
+ /** Total seconds of media buffered across all ranges. */
815
+ totalBuffered() {
816
+ const sb = this.sourceBuffer;
817
+ if (!sb || sb.buffered.length === 0) return 0;
818
+ let total = 0;
819
+ for (let i = 0; i < sb.buffered.length; i++) {
820
+ total += sb.buffered.end(i) - sb.buffered.start(i);
821
+ }
822
+ return total;
823
+ }
824
+ /** Number of chunks waiting in the append queue. */
825
+ queueLength() {
826
+ return this.queue.length;
827
+ }
828
+ /** Time to seek to once the SourceBuffer has data at this position. */
829
+ pendingSeekTime = null;
830
+ /** Whether to resume playback after the deferred seek completes. */
831
+ playOnSeek = false;
832
+ /**
833
+ * On the very first data arrival, if `currentTime` falls outside the first
834
+ * buffered range, snap it to the start of that range. MPEG-TS sources
835
+ * commonly start their PTS at a non-zero value (e.g. ~1.5s); without this
836
+ * snap, the video element sits at `currentTime=0` waiting forever for
837
+ * data that doesn't exist.
838
+ */
839
+ hasSnappedToFirstBuffered = false;
840
+ /** Request that playback resumes automatically once the deferred seek fires. */
841
+ setPlayOnSeek(play) {
842
+ this.playOnSeek = play;
843
+ }
844
+ /**
845
+ * Discard all buffered media and schedule a deferred seek. The actual
846
+ * `video.currentTime` assignment happens in `pump()` once the SourceBuffer
847
+ * has data at the target position — setting it earlier causes the browser
848
+ * to snap back to the nearest buffered range.
849
+ */
850
+ invalidate(seekTime) {
851
+ const sb = this.sourceBuffer;
852
+ this.queue = [];
853
+ this.pendingSeekTime = seekTime;
854
+ this.hasSnappedToFirstBuffered = true;
855
+ if (!sb || sb.buffered.length === 0) return;
856
+ try {
857
+ const start = sb.buffered.start(0);
858
+ const end = sb.buffered.end(sb.buffered.length - 1);
859
+ sb.remove(start, end);
860
+ } catch {
861
+ }
862
+ }
863
+ destroy() {
864
+ this.destroyed = true;
865
+ this.queue = [];
866
+ try {
867
+ if (this.mediaSource.readyState === "open") this.mediaSource.endOfStream();
868
+ } catch {
869
+ }
870
+ URL.revokeObjectURL(this.objectUrl);
871
+ }
872
+ };
873
+
874
+ // src/strategies/remux/pipeline.ts
875
+ async function createRemuxPipeline(ctx, video) {
876
+ const mb = await import('mediabunny');
877
+ const videoTrackInfo = ctx.videoTracks[0];
878
+ const audioTrackInfo = ctx.audioTracks[0];
879
+ if (!videoTrackInfo) throw new Error("remux: source has no video track");
880
+ const mbVideoCodec = avbridgeVideoToMediabunny(videoTrackInfo.codec);
881
+ if (!mbVideoCodec) {
882
+ throw new Error(`remux: video codec "${videoTrackInfo.codec}" is not supported by mediabunny output`);
883
+ }
884
+ const mbAudioCodec = audioTrackInfo ? avbridgeAudioToMediabunny(audioTrackInfo.codec) : null;
885
+ const input = new mb.Input({
886
+ source: await buildMediabunnySourceFromInput(mb, ctx.source),
887
+ formats: mb.ALL_FORMATS
888
+ });
889
+ const allTracks = await input.getTracks();
890
+ const inputVideo = allTracks.find((t) => t.id === videoTrackInfo.id && t.isVideoTrack());
891
+ const inputAudio = audioTrackInfo ? allTracks.find((t) => t.id === audioTrackInfo.id && t.isAudioTrack()) : null;
892
+ if (!inputVideo || !inputVideo.isVideoTrack()) {
893
+ throw new Error("remux: video track not found in input");
894
+ }
895
+ if (audioTrackInfo && (!inputAudio || !inputAudio.isAudioTrack())) {
896
+ throw new Error("remux: audio track not found in input");
897
+ }
898
+ const videoConfig = await inputVideo.getDecoderConfig();
899
+ const audioConfig = inputAudio && inputAudio.isAudioTrack() ? await inputAudio.getDecoderConfig() : null;
900
+ const videoSink = new mb.EncodedPacketSink(inputVideo);
901
+ const audioSink = inputAudio?.isAudioTrack() ? new mb.EncodedPacketSink(inputAudio) : null;
902
+ let sink = null;
903
+ const stats = { videoPackets: 0, audioPackets: 0, bytesWritten: 0, fragments: 0 };
904
+ let destroyed = false;
905
+ let pumpToken = 0;
906
+ let pendingAutoPlay = false;
907
+ let pendingStartTime = 0;
908
+ let currentOutput = null;
909
+ function createOutput() {
910
+ if (currentOutput) {
911
+ try {
912
+ void currentOutput.cancel();
913
+ } catch {
914
+ }
915
+ }
916
+ let mimePromise = null;
917
+ const writable = new WritableStream({
918
+ write: async (chunk) => {
919
+ if (destroyed) return;
920
+ if (!sink) {
921
+ const mime = await (mimePromise ??= output.getMimeType());
922
+ sink = new MseSink({ mime, video });
923
+ await sink.ready();
924
+ if (pendingStartTime > 0) {
925
+ sink.invalidate(pendingStartTime);
926
+ }
927
+ sink.setPlayOnSeek(pendingAutoPlay);
928
+ }
929
+ while (sink && !destroyed && (sink.queueLength() > 10 || sink.bufferedAhead() > 60 || sink.totalBuffered() > 120)) {
930
+ await new Promise((r) => setTimeout(r, 500));
931
+ }
932
+ if (destroyed) return;
933
+ sink.append(chunk.data);
934
+ stats.bytesWritten += chunk.data.byteLength;
935
+ stats.fragments++;
936
+ }
937
+ });
938
+ const target = new mb.StreamTarget(writable);
939
+ const output = new mb.Output({
940
+ format: new mb.Mp4OutputFormat({ fastStart: "fragmented" }),
941
+ target
942
+ });
943
+ const videoSource = new mb.EncodedVideoPacketSource(mbVideoCodec);
944
+ output.addVideoTrack(videoSource);
945
+ let audioSource = null;
946
+ if (mbAudioCodec && inputAudio?.isAudioTrack()) {
947
+ audioSource = new mb.EncodedAudioPacketSource(mbAudioCodec);
948
+ output.addAudioTrack(audioSource);
949
+ }
950
+ currentOutput = output;
951
+ return { output, videoSource, audioSource };
952
+ }
953
+ async function pumpLoop(token, fromTime) {
954
+ const { output, videoSource, audioSource } = createOutput();
955
+ await output.start();
956
+ const startVideoPacket = fromTime > 0 ? await videoSink.getKeyPacket(fromTime) ?? await videoSink.getFirstPacket() : await videoSink.getFirstPacket();
957
+ if (!startVideoPacket) return;
958
+ const startAudioPacket = audioSink ? audioSink && fromTime > 0 ? await audioSink.getPacket(fromTime) ?? await audioSink.getFirstPacket() : await audioSink.getFirstPacket() : null;
959
+ const videoIter = videoSink.packets(startVideoPacket);
960
+ const audioIter = audioSink && startAudioPacket ? audioSink.packets(startAudioPacket) : null;
961
+ let vNext = await videoIter.next();
962
+ let aNext = audioIter ? await audioIter.next() : { done: true, value: void 0 };
963
+ let firstVideo = true;
964
+ let firstAudio = true;
965
+ while (!destroyed && pumpToken === token && (!vNext.done || !aNext.done)) {
966
+ while (!destroyed && pumpToken === token && sink && (sink.bufferedAhead() > 30 || sink.queueLength() > 20 || sink.totalBuffered() > 90)) {
967
+ await new Promise((r) => setTimeout(r, 500));
968
+ }
969
+ if (destroyed || pumpToken !== token) break;
970
+ const vTs = !vNext.done ? vNext.value.timestamp : Number.POSITIVE_INFINITY;
971
+ const aTs = !aNext.done ? aNext.value.timestamp : Number.POSITIVE_INFINITY;
972
+ if (!vNext.done && vTs <= aTs) {
973
+ await videoSource.add(
974
+ vNext.value,
975
+ firstVideo && videoConfig ? { decoderConfig: videoConfig } : void 0
976
+ );
977
+ firstVideo = false;
978
+ stats.videoPackets++;
979
+ vNext = await videoIter.next();
980
+ } else if (audioIter && audioSource && !aNext.done) {
981
+ await audioSource.add(
982
+ aNext.value,
983
+ firstAudio && audioConfig ? { decoderConfig: audioConfig } : void 0
984
+ );
985
+ firstAudio = false;
986
+ stats.audioPackets++;
987
+ aNext = await audioIter.next();
988
+ } else {
989
+ break;
990
+ }
991
+ }
992
+ if (!destroyed && pumpToken === token) {
993
+ await output.finalize();
994
+ sink?.endOfStream();
995
+ }
996
+ }
997
+ return {
998
+ async start(fromTime = 0, autoPlay = false) {
999
+ pendingAutoPlay = autoPlay;
1000
+ pendingStartTime = fromTime;
1001
+ pumpLoop(++pumpToken, fromTime).catch((err) => {
1002
+ console.error("[avbridge] remux pipeline failed:", err);
1003
+ try {
1004
+ sink?.destroy();
1005
+ } catch {
1006
+ }
1007
+ });
1008
+ },
1009
+ async seek(time, autoPlay = false) {
1010
+ if (sink) {
1011
+ sink.setPlayOnSeek(autoPlay);
1012
+ sink.invalidate(time);
1013
+ } else {
1014
+ pendingAutoPlay = autoPlay;
1015
+ pendingStartTime = time;
1016
+ }
1017
+ pumpLoop(++pumpToken, time).catch((err) => {
1018
+ console.error("[avbridge] remux pipeline reseek failed:", err);
1019
+ });
1020
+ },
1021
+ async destroy() {
1022
+ destroyed = true;
1023
+ pumpToken++;
1024
+ try {
1025
+ if (currentOutput) await currentOutput.cancel();
1026
+ } catch {
1027
+ }
1028
+ try {
1029
+ await input.dispose();
1030
+ } catch {
1031
+ }
1032
+ sink?.destroy();
1033
+ },
1034
+ stats() {
1035
+ return { ...stats, decoderType: "remux" };
1036
+ }
1037
+ };
1038
+ }
1039
+
1040
+ // src/strategies/remux/index.ts
1041
+ async function createRemuxSession(context, video) {
1042
+ let pipeline;
1043
+ try {
1044
+ pipeline = await createRemuxPipeline(context, video);
1045
+ } catch (err) {
1046
+ throw new Error(
1047
+ `remux strategy failed to start: ${err.message}. The container or codec combination is not supported by mediabunny + MSE on this browser.`
1048
+ );
1049
+ }
1050
+ let started = false;
1051
+ let wantPlay = false;
1052
+ return {
1053
+ strategy: "remux",
1054
+ async play() {
1055
+ wantPlay = true;
1056
+ if (!started) {
1057
+ started = true;
1058
+ await pipeline.start(video.currentTime || 0, true);
1059
+ return;
1060
+ }
1061
+ await video.play();
1062
+ },
1063
+ pause() {
1064
+ wantPlay = false;
1065
+ video.pause();
1066
+ },
1067
+ async seek(time) {
1068
+ if (!started) {
1069
+ started = true;
1070
+ await pipeline.seek(time, wantPlay);
1071
+ return;
1072
+ }
1073
+ const wasPlaying = !video.paused;
1074
+ await pipeline.seek(time, wasPlaying || wantPlay);
1075
+ },
1076
+ async setAudioTrack(_id) {
1077
+ },
1078
+ async setSubtitleTrack(id) {
1079
+ const tracks = video.textTracks;
1080
+ for (let i = 0; i < tracks.length; i++) {
1081
+ tracks[i].mode = i === id ? "showing" : "disabled";
1082
+ }
1083
+ },
1084
+ getCurrentTime() {
1085
+ return video.currentTime || 0;
1086
+ },
1087
+ async destroy() {
1088
+ video.pause();
1089
+ await pipeline.destroy();
1090
+ video.removeAttribute("src");
1091
+ video.load();
1092
+ },
1093
+ getRuntimeStats() {
1094
+ return pipeline.stats();
1095
+ }
1096
+ };
1097
+ }
1098
+
1099
+ // src/strategies/fallback/video-renderer.ts
1100
+ var VideoRenderer = class {
1101
+ constructor(target, clock, fps = 30) {
1102
+ this.target = target;
1103
+ this.clock = clock;
1104
+ this.paintIntervalMs = Math.max(1, 1e3 / fps);
1105
+ this.firstFrameReady = new Promise((resolve) => {
1106
+ this.resolveFirstFrame = resolve;
1107
+ });
1108
+ this.canvas = document.createElement("canvas");
1109
+ this.canvas.style.cssText = "position:absolute;left:0;top:0;width:100%;height:100%;background:black;";
1110
+ const parent = target.parentElement;
1111
+ if (parent && getComputedStyle(parent).position === "static") {
1112
+ parent.style.position = "relative";
1113
+ }
1114
+ parent?.insertBefore(this.canvas, target);
1115
+ target.style.visibility = "hidden";
1116
+ const ctx = this.canvas.getContext("2d");
1117
+ if (!ctx) throw new Error("video renderer: failed to acquire 2D context");
1118
+ this.ctx = ctx;
1119
+ this.tick = this.tick.bind(this);
1120
+ this.rafHandle = requestAnimationFrame(this.tick);
1121
+ }
1122
+ target;
1123
+ clock;
1124
+ canvas;
1125
+ ctx;
1126
+ queue = [];
1127
+ rafHandle = null;
1128
+ destroyed = false;
1129
+ framesPainted = 0;
1130
+ framesDroppedLate = 0;
1131
+ framesDroppedOverflow = 0;
1132
+ prerolled = false;
1133
+ /** Wall-clock time of the last paint, in ms (performance.now()). */
1134
+ lastPaintWall = 0;
1135
+ /** Minimum ms between paints — paces video at roughly source fps. */
1136
+ paintIntervalMs;
1137
+ /** Resolves once the first decoded frame has been enqueued. */
1138
+ firstFrameReady;
1139
+ resolveFirstFrame;
1140
+ /** True once at least one frame has been enqueued. */
1141
+ hasFrames() {
1142
+ return this.queue.length > 0 || this.framesPainted > 0;
1143
+ }
1144
+ /** Current depth of the frame queue. Used by the decoder for backpressure. */
1145
+ queueDepth() {
1146
+ return this.queue.length;
1147
+ }
1148
+ /**
1149
+ * Soft cap for decoder backpressure. The decoder pump throttles when
1150
+ * `queueDepth() >= queueHighWater`. Set high enough that normal decode
1151
+ * bursts don't trigger the renderer's overflow-drop loop (which runs at
1152
+ * every paint), but low enough that the decoder doesn't run unboundedly
1153
+ * ahead. The hard cap in `enqueue()` is 64.
1154
+ */
1155
+ queueHighWater = 30;
1156
+ enqueue(frame) {
1157
+ if (this.destroyed) {
1158
+ frame.close();
1159
+ return;
1160
+ }
1161
+ this.queue.push(frame);
1162
+ if (this.queue.length === 1 && this.framesPainted === 0) {
1163
+ this.resolveFirstFrame();
1164
+ }
1165
+ while (this.queue.length > 60) {
1166
+ this.queue.shift()?.close();
1167
+ this.framesDroppedOverflow++;
1168
+ }
1169
+ }
1170
+ tick() {
1171
+ if (this.destroyed) return;
1172
+ this.rafHandle = requestAnimationFrame(this.tick);
1173
+ if (this.queue.length === 0) return;
1174
+ const playing = this.clock.isPlaying();
1175
+ if (!playing) {
1176
+ if (!this.prerolled) {
1177
+ const head = this.queue.shift();
1178
+ this.paint(head);
1179
+ head.close();
1180
+ this.prerolled = true;
1181
+ this.lastPaintWall = performance.now();
1182
+ }
1183
+ return;
1184
+ }
1185
+ const wallNow = performance.now();
1186
+ if (wallNow - this.lastPaintWall < this.paintIntervalMs - 2) return;
1187
+ if (this.queue.length === 0) return;
1188
+ if (this.framesPainted > 0 && this.framesPainted % 30 === 0) {
1189
+ const audioNowUs = this.clock.now() * 1e6;
1190
+ const headTs = this.queue[0].timestamp ?? 0;
1191
+ const driftUs = headTs - audioNowUs;
1192
+ if (driftUs < -15e4) {
1193
+ this.queue.shift()?.close();
1194
+ this.framesDroppedLate++;
1195
+ if (this.queue.length === 0) return;
1196
+ } else if (driftUs > 15e4) {
1197
+ return;
1198
+ }
1199
+ }
1200
+ const frame = this.queue.shift();
1201
+ this.paint(frame);
1202
+ frame.close();
1203
+ this.lastPaintWall = wallNow;
1204
+ }
1205
+ paint(frame) {
1206
+ if (this.canvas.width !== frame.displayWidth || this.canvas.height !== frame.displayHeight) {
1207
+ this.canvas.width = frame.displayWidth;
1208
+ this.canvas.height = frame.displayHeight;
1209
+ }
1210
+ try {
1211
+ this.ctx.drawImage(frame, 0, 0, this.canvas.width, this.canvas.height);
1212
+ this.framesPainted++;
1213
+ } catch (err) {
1214
+ if (this.framesPainted === 0 && this.framesDroppedLate === 0) {
1215
+ console.warn("[avbridge] canvas drawImage failed:", err);
1216
+ }
1217
+ }
1218
+ }
1219
+ /** Discard all queued frames. Used by seek to drop stale buffers. */
1220
+ flush() {
1221
+ while (this.queue.length > 0) this.queue.shift()?.close();
1222
+ this.prerolled = false;
1223
+ }
1224
+ stats() {
1225
+ return {
1226
+ framesPainted: this.framesPainted,
1227
+ framesDroppedLate: this.framesDroppedLate,
1228
+ framesDroppedOverflow: this.framesDroppedOverflow,
1229
+ queueDepth: this.queue.length
1230
+ };
1231
+ }
1232
+ destroy() {
1233
+ this.destroyed = true;
1234
+ if (this.rafHandle != null) cancelAnimationFrame(this.rafHandle);
1235
+ this.flush();
1236
+ this.canvas.remove();
1237
+ this.target.style.visibility = "";
1238
+ }
1239
+ };
1240
+
1241
+ // src/strategies/fallback/audio-output.ts
1242
+ var AudioOutput = class {
1243
+ ctx;
1244
+ gain;
1245
+ state = "idle";
1246
+ /**
1247
+ * Wall-clock fallback mode. When true, this output behaves as if audio
1248
+ * is unavailable — `now()` advances from `performance.now()` instead of
1249
+ * the audio context, `schedule()` is a no-op, and `bufferAhead()` returns
1250
+ * Infinity so the session's `waitForBuffer()` doesn't block on audio.
1251
+ *
1252
+ * Set by the decoder via {@link setNoAudio} when audio decode init fails.
1253
+ * This is what lets video play even when the audio codec isn't supported
1254
+ * by the loaded libav variant.
1255
+ */
1256
+ noAudio = false;
1257
+ /** Wall-clock anchor (ms from `performance.now()`) for noAudio mode. */
1258
+ wallAnchorMs = 0;
1259
+ /** Media time at which the next sample will be scheduled. */
1260
+ mediaTimeOfNext = 0;
1261
+ /** Anchor: media time `mediaTimeOfAnchor` corresponds to ctx time `ctxTimeAtAnchor`. */
1262
+ mediaTimeOfAnchor = 0;
1263
+ ctxTimeAtAnchor = 0;
1264
+ pendingQueue = [];
1265
+ framesScheduled = 0;
1266
+ destroyed = false;
1267
+ constructor() {
1268
+ this.ctx = new AudioContext();
1269
+ this.gain = this.ctx.createGain();
1270
+ this.gain.connect(this.ctx.destination);
1271
+ }
1272
+ /**
1273
+ * Switch into wall-clock fallback mode. Called by the decoder when no
1274
+ * audio decoder could be initialized for the source. Once set, this
1275
+ * output drives playback time from `performance.now()` and ignores
1276
+ * any incoming audio samples.
1277
+ */
1278
+ setNoAudio() {
1279
+ this.noAudio = true;
1280
+ }
1281
+ // ── ClockSource ────────────────────────────────────────────────────────
1282
+ now() {
1283
+ if (this.noAudio) {
1284
+ if (this.state === "playing") {
1285
+ return this.mediaTimeOfAnchor + (performance.now() - this.wallAnchorMs) / 1e3;
1286
+ }
1287
+ return this.mediaTimeOfAnchor;
1288
+ }
1289
+ if (this.state === "playing") {
1290
+ return this.mediaTimeOfAnchor + (this.ctx.currentTime - this.ctxTimeAtAnchor);
1291
+ }
1292
+ return this.mediaTimeOfAnchor;
1293
+ }
1294
+ isPlaying() {
1295
+ return this.state === "playing";
1296
+ }
1297
+ // ── Buffering ─────────────────────────────────────────────────────────
1298
+ /**
1299
+ * How many seconds of audio are buffered ahead of the current playback
1300
+ * position. While idle, this counts the pending queue. While playing,
1301
+ * it counts how far `mediaTimeOfNext` is ahead of `now()`.
1302
+ */
1303
+ bufferAhead() {
1304
+ if (this.noAudio) return 0;
1305
+ if (this.state === "idle") {
1306
+ let sec = 0;
1307
+ for (const c of this.pendingQueue) sec += c.durationSec;
1308
+ return sec;
1309
+ }
1310
+ return Math.max(0, this.mediaTimeOfNext - this.now());
1311
+ }
1312
+ /** True if this output is in wall-clock fallback mode (no audio decode). */
1313
+ isNoAudio() {
1314
+ return this.noAudio;
1315
+ }
1316
+ /**
1317
+ * Schedule a chunk of decoded samples. Queues internally while idle (cold
1318
+ * start or post-seek), schedules directly to the audio graph while playing.
1319
+ * In wall-clock mode, samples are silently discarded.
1320
+ */
1321
+ schedule(samples, channels, sampleRate) {
1322
+ if (this.destroyed || this.noAudio) return;
1323
+ const frameCount = samples.length / channels;
1324
+ const durationSec = frameCount / sampleRate;
1325
+ if (this.state === "idle" || this.state === "paused") {
1326
+ this.pendingQueue.push({ samples, channels, sampleRate, frameCount, durationSec });
1327
+ return;
1328
+ }
1329
+ this.scheduleNow(samples, channels, sampleRate, frameCount);
1330
+ }
1331
+ scheduleNow(samples, channels, sampleRate, frameCount) {
1332
+ const buffer = this.ctx.createBuffer(channels, frameCount, sampleRate);
1333
+ for (let ch = 0; ch < channels; ch++) {
1334
+ const channelData = buffer.getChannelData(ch);
1335
+ for (let i = 0; i < frameCount; i++) {
1336
+ channelData[i] = samples[i * channels + ch];
1337
+ }
1338
+ }
1339
+ const node = this.ctx.createBufferSource();
1340
+ node.buffer = buffer;
1341
+ node.connect(this.gain);
1342
+ const ctxStart = this.ctxTimeAtAnchor + (this.mediaTimeOfNext - this.mediaTimeOfAnchor);
1343
+ const safeStart = Math.max(ctxStart, this.ctx.currentTime);
1344
+ node.start(safeStart);
1345
+ this.mediaTimeOfNext += frameCount / sampleRate;
1346
+ this.framesScheduled++;
1347
+ }
1348
+ // ── Lifecycle ─────────────────────────────────────────────────────────
1349
+ /**
1350
+ * Start (or resume) playback. On a cold start (or after a reset), drains
1351
+ * the pending queue scheduling all queued samples to play starting at
1352
+ * `ctx.currentTime + STARTUP_DELAY`. On resume from pause, just re-anchors
1353
+ * the media↔ctx time mapping and unsuspends the context.
1354
+ */
1355
+ async start() {
1356
+ if (this.destroyed || this.state === "playing") return;
1357
+ if (this.noAudio) {
1358
+ this.wallAnchorMs = performance.now();
1359
+ this.state = "playing";
1360
+ return;
1361
+ }
1362
+ if (this.ctx.state === "suspended") {
1363
+ await this.ctx.resume();
1364
+ }
1365
+ if (this.state === "paused") {
1366
+ this.ctxTimeAtAnchor = this.ctx.currentTime;
1367
+ this.state = "playing";
1368
+ const drain2 = this.pendingQueue;
1369
+ this.pendingQueue = [];
1370
+ for (const c of drain2) {
1371
+ this.scheduleNow(c.samples, c.channels, c.sampleRate, c.frameCount);
1372
+ }
1373
+ return;
1374
+ }
1375
+ const STARTUP_DELAY = 0.05;
1376
+ this.ctxTimeAtAnchor = this.ctx.currentTime + STARTUP_DELAY;
1377
+ this.mediaTimeOfNext = this.mediaTimeOfAnchor;
1378
+ this.state = "playing";
1379
+ const drain = this.pendingQueue;
1380
+ this.pendingQueue = [];
1381
+ for (const c of drain) {
1382
+ this.scheduleNow(c.samples, c.channels, c.sampleRate, c.frameCount);
1383
+ }
1384
+ }
1385
+ /** Pause playback. Suspends the audio context. */
1386
+ async pause() {
1387
+ if (this.state !== "playing") return;
1388
+ this.mediaTimeOfAnchor = this.now();
1389
+ this.state = "paused";
1390
+ if (this.noAudio) return;
1391
+ if (this.ctx.state === "running") {
1392
+ await this.ctx.suspend();
1393
+ }
1394
+ }
1395
+ /**
1396
+ * Reset to a new media time. Discards all queued and scheduled audio,
1397
+ * disconnects the gain node so any in-flight scheduled buffers are cut
1398
+ * off, and returns to the idle state. Used by `seek()`.
1399
+ *
1400
+ * After reset, callers should re-buffer audio (the decoder will start
1401
+ * supplying new samples) and then call `start()` to resume playback.
1402
+ */
1403
+ async reset(newMediaTime) {
1404
+ if (this.noAudio) {
1405
+ this.pendingQueue = [];
1406
+ this.mediaTimeOfAnchor = newMediaTime;
1407
+ this.wallAnchorMs = performance.now();
1408
+ this.state = "idle";
1409
+ return;
1410
+ }
1411
+ try {
1412
+ this.gain.disconnect();
1413
+ } catch {
1414
+ }
1415
+ this.gain = this.ctx.createGain();
1416
+ this.gain.connect(this.ctx.destination);
1417
+ this.pendingQueue = [];
1418
+ this.mediaTimeOfAnchor = newMediaTime;
1419
+ this.mediaTimeOfNext = newMediaTime;
1420
+ this.ctxTimeAtAnchor = this.ctx.currentTime;
1421
+ this.state = "idle";
1422
+ if (this.ctx.state === "running") {
1423
+ await this.ctx.suspend();
1424
+ }
1425
+ }
1426
+ stats() {
1427
+ return {
1428
+ framesScheduled: this.framesScheduled,
1429
+ bufferAhead: this.bufferAhead(),
1430
+ audioState: this.state,
1431
+ clockMode: this.noAudio ? "wall" : "audio"
1432
+ };
1433
+ }
1434
+ destroy() {
1435
+ this.destroyed = true;
1436
+ try {
1437
+ this.ctx.close();
1438
+ } catch {
1439
+ }
1440
+ }
1441
+ };
1442
+
1443
+ // src/strategies/hybrid/decoder.ts
1444
+ async function startHybridDecoder(opts) {
1445
+ const variant = pickLibavVariant(opts.context);
1446
+ const libav = await loadLibav(variant);
1447
+ const bridge = await loadBridge();
1448
+ const { prepareLibavInput } = await import('./libav-http-reader-NQJVY273.js');
1449
+ const inputHandle = await prepareLibavInput(libav, opts.filename, opts.source);
1450
+ const readPkt = await libav.av_packet_alloc();
1451
+ const [fmt_ctx, streams] = await libav.ff_init_demuxer_file(opts.filename);
1452
+ const videoStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_VIDEO) ?? null;
1453
+ const audioStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO) ?? null;
1454
+ if (!videoStream && !audioStream) {
1455
+ throw new Error("hybrid decoder: file has no decodable streams");
1456
+ }
1457
+ let fatalHandler = null;
1458
+ let fatalFired = false;
1459
+ function fireFatal(reason) {
1460
+ if (fatalFired) return;
1461
+ fatalFired = true;
1462
+ fatalHandler?.(reason);
1463
+ }
1464
+ let videoDecoder = null;
1465
+ let videoTimeBase;
1466
+ if (videoStream) {
1467
+ try {
1468
+ const config = await bridge.videoStreamToConfig(libav, videoStream);
1469
+ if (!config) throw new Error("bridge returned null config");
1470
+ const supported = await VideoDecoder.isConfigSupported(config);
1471
+ if (!supported.supported) throw new Error(`VideoDecoder does not support config: ${JSON.stringify(config)}`);
1472
+ videoDecoder = new VideoDecoder({
1473
+ output: (frame) => {
1474
+ opts.renderer.enqueue(frame);
1475
+ videoFramesDecoded++;
1476
+ },
1477
+ error: (err) => {
1478
+ console.error("[avbridge] WebCodecs VideoDecoder error:", err);
1479
+ fireFatal(`WebCodecs VideoDecoder error: ${err.message}`);
1480
+ }
1481
+ });
1482
+ videoDecoder.configure(config);
1483
+ if (videoStream.time_base_num && videoStream.time_base_den) {
1484
+ videoTimeBase = [videoStream.time_base_num, videoStream.time_base_den];
1485
+ }
1486
+ } catch (err) {
1487
+ console.error("[avbridge] hybrid: failed to init WebCodecs VideoDecoder:", err);
1488
+ fireFatal(`WebCodecs VideoDecoder init failed: ${err.message}`);
1489
+ await inputHandle.detach().catch(() => {
1490
+ });
1491
+ throw err;
1492
+ }
1493
+ }
1494
+ let audioDec = null;
1495
+ let audioTimeBase;
1496
+ if (audioStream) {
1497
+ try {
1498
+ const [, c, pkt, frame] = await libav.ff_init_decoder(audioStream.codec_id, {
1499
+ codecpar: audioStream.codecpar
1500
+ });
1501
+ audioDec = { c, pkt, frame };
1502
+ if (audioStream.time_base_num && audioStream.time_base_den) {
1503
+ audioTimeBase = [audioStream.time_base_num, audioStream.time_base_den];
1504
+ }
1505
+ } catch (err) {
1506
+ console.warn(
1507
+ "[avbridge] hybrid: audio decoder unavailable for this codec \u2014 playing video with wall-clock timing:",
1508
+ err.message
1509
+ );
1510
+ }
1511
+ }
1512
+ if (!audioDec) {
1513
+ opts.audio.setNoAudio();
1514
+ }
1515
+ if (!videoDecoder && !audioDec) {
1516
+ await inputHandle.detach().catch(() => {
1517
+ });
1518
+ throw new Error("hybrid decoder: could not initialize any decoders");
1519
+ }
1520
+ let destroyed = false;
1521
+ let pumpToken = 0;
1522
+ let pumpRunning = null;
1523
+ let packetsRead = 0;
1524
+ let videoFramesDecoded = 0;
1525
+ let audioFramesDecoded = 0;
1526
+ let videoChunksFed = 0;
1527
+ let syntheticVideoUs = 0;
1528
+ let syntheticAudioUs = 0;
1529
+ const videoTrackInfo = opts.context.videoTracks.find((t) => t.id === videoStream?.index);
1530
+ const videoFps = videoTrackInfo?.fps && videoTrackInfo.fps > 0 ? videoTrackInfo.fps : 30;
1531
+ const videoFrameStepUs = Math.max(1, Math.round(1e6 / videoFps));
1532
+ async function pumpLoop(myToken) {
1533
+ while (!destroyed && myToken === pumpToken) {
1534
+ let readErr;
1535
+ let packets;
1536
+ try {
1537
+ [readErr, packets] = await libav.ff_read_frame_multi(fmt_ctx, readPkt, {
1538
+ limit: 16 * 1024
1539
+ });
1540
+ } catch (err) {
1541
+ console.error("[avbridge] hybrid ff_read_frame_multi failed:", err);
1542
+ return;
1543
+ }
1544
+ if (myToken !== pumpToken || destroyed) return;
1545
+ const videoPackets = videoStream ? packets[videoStream.index] : void 0;
1546
+ const audioPackets = audioStream ? packets[audioStream.index] : void 0;
1547
+ if (videoDecoder && videoPackets && videoPackets.length > 0) {
1548
+ for (const pkt of videoPackets) {
1549
+ if (myToken !== pumpToken || destroyed) return;
1550
+ sanitizePacketTimestamp(pkt, () => {
1551
+ const ts = syntheticVideoUs;
1552
+ syntheticVideoUs += videoFrameStepUs;
1553
+ return ts;
1554
+ }, videoTimeBase);
1555
+ try {
1556
+ const chunk = bridge.packetToEncodedVideoChunk(pkt, videoStream);
1557
+ videoDecoder.decode(chunk);
1558
+ videoChunksFed++;
1559
+ } catch (err) {
1560
+ if (videoChunksFed === 0) {
1561
+ console.warn("[avbridge] hybrid: packetToEncodedVideoChunk failed:", err);
1562
+ fireFatal(`WebCodecs chunk creation failed: ${err.message}`);
1563
+ return;
1564
+ }
1565
+ }
1566
+ }
1567
+ }
1568
+ if (audioDec && audioPackets && audioPackets.length > 0) {
1569
+ await decodeAudioBatch(audioPackets, myToken);
1570
+ }
1571
+ packetsRead += (videoPackets?.length ?? 0) + (audioPackets?.length ?? 0);
1572
+ while (!destroyed && myToken === pumpToken && (videoDecoder && videoDecoder.decodeQueueSize > 10 || opts.audio.bufferAhead() > 2 || opts.renderer.queueDepth() >= opts.renderer.queueHighWater)) {
1573
+ await new Promise((r) => setTimeout(r, 50));
1574
+ }
1575
+ if (readErr === libav.AVERROR_EOF) {
1576
+ if (videoDecoder && videoDecoder.state === "configured") {
1577
+ try {
1578
+ await videoDecoder.flush();
1579
+ } catch {
1580
+ }
1581
+ }
1582
+ if (audioDec) await decodeAudioBatch([], myToken, true);
1583
+ return;
1584
+ }
1585
+ if (readErr && readErr !== 0 && readErr !== -libav.EAGAIN) {
1586
+ console.warn("[avbridge] hybrid ff_read_frame_multi returned", readErr);
1587
+ return;
1588
+ }
1589
+ }
1590
+ }
1591
+ async function decodeAudioBatch(pkts, myToken, flush = false) {
1592
+ if (!audioDec || destroyed || myToken !== pumpToken) return;
1593
+ let frames;
1594
+ try {
1595
+ frames = await libav.ff_decode_multi(
1596
+ audioDec.c,
1597
+ audioDec.pkt,
1598
+ audioDec.frame,
1599
+ pkts,
1600
+ flush ? { fin: true, ignoreErrors: true } : { ignoreErrors: true }
1601
+ );
1602
+ } catch (err) {
1603
+ console.error("[avbridge] hybrid audio decode failed:", err);
1604
+ return;
1605
+ }
1606
+ if (myToken !== pumpToken || destroyed) return;
1607
+ for (const f of frames) {
1608
+ if (myToken !== pumpToken || destroyed) return;
1609
+ sanitizeFrameTimestamp(
1610
+ f,
1611
+ () => {
1612
+ const ts = syntheticAudioUs;
1613
+ const samples2 = f.nb_samples ?? 1024;
1614
+ const sampleRate = f.sample_rate ?? 44100;
1615
+ syntheticAudioUs += Math.round(samples2 * 1e6 / sampleRate);
1616
+ return ts;
1617
+ },
1618
+ audioTimeBase
1619
+ );
1620
+ const samples = libavFrameToInterleavedFloat32(f);
1621
+ if (samples) {
1622
+ opts.audio.schedule(samples.data, samples.channels, samples.sampleRate);
1623
+ audioFramesDecoded++;
1624
+ }
1625
+ }
1626
+ }
1627
+ pumpToken = 1;
1628
+ pumpRunning = pumpLoop(pumpToken).catch(
1629
+ (err) => console.error("[avbridge] hybrid pump failed:", err)
1630
+ );
1631
+ return {
1632
+ onFatalError(handler) {
1633
+ fatalHandler = handler;
1634
+ if (fatalFired) handler("WebCodecs decode failed (error occurred before handler attached)");
1635
+ },
1636
+ async destroy() {
1637
+ destroyed = true;
1638
+ pumpToken++;
1639
+ try {
1640
+ await pumpRunning;
1641
+ } catch {
1642
+ }
1643
+ try {
1644
+ if (videoDecoder && videoDecoder.state !== "closed") videoDecoder.close();
1645
+ } catch {
1646
+ }
1647
+ try {
1648
+ if (audioDec) await libav.ff_free_decoder?.(audioDec.c, audioDec.pkt, audioDec.frame);
1649
+ } catch {
1650
+ }
1651
+ try {
1652
+ await libav.av_packet_free?.(readPkt);
1653
+ } catch {
1654
+ }
1655
+ try {
1656
+ await libav.avformat_close_input_js(fmt_ctx);
1657
+ } catch {
1658
+ }
1659
+ try {
1660
+ await inputHandle.detach();
1661
+ } catch {
1662
+ }
1663
+ },
1664
+ async seek(timeSec) {
1665
+ const newToken = ++pumpToken;
1666
+ if (pumpRunning) {
1667
+ try {
1668
+ await pumpRunning;
1669
+ } catch {
1670
+ }
1671
+ }
1672
+ if (destroyed) return;
1673
+ try {
1674
+ const tsUs = Math.floor(timeSec * 1e6);
1675
+ const [tsLo, tsHi] = libav.f64toi64 ? libav.f64toi64(tsUs) : [tsUs | 0, Math.floor(tsUs / 4294967296)];
1676
+ await libav.av_seek_frame(
1677
+ fmt_ctx,
1678
+ -1,
1679
+ tsLo,
1680
+ tsHi,
1681
+ libav.AVSEEK_FLAG_BACKWARD ?? 0
1682
+ );
1683
+ } catch (err) {
1684
+ console.warn("[avbridge] hybrid av_seek_frame failed:", err);
1685
+ }
1686
+ try {
1687
+ if (videoDecoder && videoDecoder.state === "configured") {
1688
+ await videoDecoder.flush();
1689
+ }
1690
+ } catch {
1691
+ }
1692
+ try {
1693
+ if (audioDec) await libav.avcodec_flush_buffers?.(audioDec.c);
1694
+ } catch {
1695
+ }
1696
+ syntheticVideoUs = Math.round(timeSec * 1e6);
1697
+ syntheticAudioUs = Math.round(timeSec * 1e6);
1698
+ pumpRunning = pumpLoop(newToken).catch(
1699
+ (err) => console.error("[avbridge] hybrid pump failed (post-seek):", err)
1700
+ );
1701
+ },
1702
+ stats() {
1703
+ return {
1704
+ decoderType: "webcodecs-hybrid",
1705
+ packetsRead,
1706
+ videoFramesDecoded,
1707
+ videoChunksFed,
1708
+ audioFramesDecoded,
1709
+ videoDecodeQueueSize: videoDecoder?.decodeQueueSize ?? 0,
1710
+ ...opts.renderer.stats(),
1711
+ ...opts.audio.stats()
1712
+ };
1713
+ }
1714
+ };
1715
+ }
1716
+ function sanitizePacketTimestamp(pkt, nextUs, fallbackTimeBase) {
1717
+ const lo = pkt.pts ?? 0;
1718
+ const hi = pkt.ptshi ?? 0;
1719
+ const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
1720
+ if (isInvalid) {
1721
+ const us2 = nextUs();
1722
+ pkt.pts = us2;
1723
+ pkt.ptshi = 0;
1724
+ pkt.time_base_num = 1;
1725
+ pkt.time_base_den = 1e6;
1726
+ return;
1727
+ }
1728
+ const tb = fallbackTimeBase ?? [1, 1e6];
1729
+ const pts64 = hi * 4294967296 + lo;
1730
+ const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
1731
+ if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
1732
+ pkt.pts = us;
1733
+ pkt.ptshi = us < 0 ? -1 : 0;
1734
+ pkt.time_base_num = 1;
1735
+ pkt.time_base_den = 1e6;
1736
+ return;
1737
+ }
1738
+ const fallback = nextUs();
1739
+ pkt.pts = fallback;
1740
+ pkt.ptshi = 0;
1741
+ pkt.time_base_num = 1;
1742
+ pkt.time_base_den = 1e6;
1743
+ }
1744
+ function sanitizeFrameTimestamp(frame, nextUs, fallbackTimeBase) {
1745
+ const lo = frame.pts ?? 0;
1746
+ const hi = frame.ptshi ?? 0;
1747
+ const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
1748
+ if (isInvalid) {
1749
+ const us2 = nextUs();
1750
+ frame.pts = us2;
1751
+ frame.ptshi = 0;
1752
+ return;
1753
+ }
1754
+ const tb = fallbackTimeBase ?? [1, 1e6];
1755
+ const pts64 = hi * 4294967296 + lo;
1756
+ const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
1757
+ if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
1758
+ frame.pts = us;
1759
+ frame.ptshi = us < 0 ? -1 : 0;
1760
+ return;
1761
+ }
1762
+ const fallback = nextUs();
1763
+ frame.pts = fallback;
1764
+ frame.ptshi = 0;
1765
+ }
1766
+ var AV_SAMPLE_FMT_U8 = 0;
1767
+ var AV_SAMPLE_FMT_S16 = 1;
1768
+ var AV_SAMPLE_FMT_S32 = 2;
1769
+ var AV_SAMPLE_FMT_FLT = 3;
1770
+ var AV_SAMPLE_FMT_U8P = 5;
1771
+ var AV_SAMPLE_FMT_S16P = 6;
1772
+ var AV_SAMPLE_FMT_S32P = 7;
1773
+ var AV_SAMPLE_FMT_FLTP = 8;
1774
+ function libavFrameToInterleavedFloat32(frame) {
1775
+ const channels = frame.channels ?? frame.ch_layout_nb_channels ?? 1;
1776
+ const sampleRate = frame.sample_rate ?? 44100;
1777
+ const nbSamples = frame.nb_samples ?? 0;
1778
+ if (nbSamples === 0) return null;
1779
+ const out = new Float32Array(nbSamples * channels);
1780
+ switch (frame.format) {
1781
+ case AV_SAMPLE_FMT_FLTP: {
1782
+ const planes = ensurePlanes(frame.data, channels);
1783
+ for (let ch = 0; ch < channels; ch++) {
1784
+ const plane = asFloat32(planes[ch]);
1785
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i];
1786
+ }
1787
+ return { data: out, channels, sampleRate };
1788
+ }
1789
+ case AV_SAMPLE_FMT_FLT: {
1790
+ const flat = asFloat32(frame.data);
1791
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i];
1792
+ return { data: out, channels, sampleRate };
1793
+ }
1794
+ case AV_SAMPLE_FMT_S16P: {
1795
+ const planes = ensurePlanes(frame.data, channels);
1796
+ for (let ch = 0; ch < channels; ch++) {
1797
+ const plane = asInt16(planes[ch]);
1798
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 32768;
1799
+ }
1800
+ return { data: out, channels, sampleRate };
1801
+ }
1802
+ case AV_SAMPLE_FMT_S16: {
1803
+ const flat = asInt16(frame.data);
1804
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 32768;
1805
+ return { data: out, channels, sampleRate };
1806
+ }
1807
+ case AV_SAMPLE_FMT_S32P: {
1808
+ const planes = ensurePlanes(frame.data, channels);
1809
+ for (let ch = 0; ch < channels; ch++) {
1810
+ const plane = asInt32(planes[ch]);
1811
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 2147483648;
1812
+ }
1813
+ return { data: out, channels, sampleRate };
1814
+ }
1815
+ case AV_SAMPLE_FMT_S32: {
1816
+ const flat = asInt32(frame.data);
1817
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 2147483648;
1818
+ return { data: out, channels, sampleRate };
1819
+ }
1820
+ case AV_SAMPLE_FMT_U8P: {
1821
+ const planes = ensurePlanes(frame.data, channels);
1822
+ for (let ch = 0; ch < channels; ch++) {
1823
+ const plane = asUint8(planes[ch]);
1824
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = (plane[i] - 128) / 128;
1825
+ }
1826
+ return { data: out, channels, sampleRate };
1827
+ }
1828
+ case AV_SAMPLE_FMT_U8: {
1829
+ const flat = asUint8(frame.data);
1830
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = (flat[i] - 128) / 128;
1831
+ return { data: out, channels, sampleRate };
1832
+ }
1833
+ default:
1834
+ return null;
1835
+ }
1836
+ }
1837
+ function ensurePlanes(data, channels) {
1838
+ if (Array.isArray(data)) return data;
1839
+ const arr = data;
1840
+ const len = arr.length;
1841
+ const perChannel = Math.floor(len / channels);
1842
+ const planes = [];
1843
+ for (let ch = 0; ch < channels; ch++) {
1844
+ planes.push(arr.subarray ? arr.subarray(ch * perChannel, (ch + 1) * perChannel) : arr);
1845
+ }
1846
+ return planes;
1847
+ }
1848
+ function asFloat32(x) {
1849
+ if (x instanceof Float32Array) return x;
1850
+ const ta = x;
1851
+ return new Float32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
1852
+ }
1853
+ function asInt16(x) {
1854
+ if (x instanceof Int16Array) return x;
1855
+ const ta = x;
1856
+ return new Int16Array(ta.buffer, ta.byteOffset, ta.byteLength / 2);
1857
+ }
1858
+ function asInt32(x) {
1859
+ if (x instanceof Int32Array) return x;
1860
+ const ta = x;
1861
+ return new Int32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
1862
+ }
1863
+ function asUint8(x) {
1864
+ if (x instanceof Uint8Array) return x;
1865
+ const ta = x;
1866
+ return new Uint8Array(ta.buffer, ta.byteOffset, ta.byteLength);
1867
+ }
1868
+ async function loadBridge() {
1869
+ try {
1870
+ const wrapper = await import('./libav-import-2JURFHEW.js');
1871
+ return wrapper.libavBridge;
1872
+ } catch (err) {
1873
+ throw new Error(
1874
+ `failed to load libavjs-webcodecs-bridge: ${err.message}`
1875
+ );
1876
+ }
1877
+ }
1878
+
1879
+ // src/strategies/hybrid/index.ts
1880
+ var READY_AUDIO_BUFFER_SECONDS = 0.3;
1881
+ var READY_TIMEOUT_SECONDS = 10;
1882
+ async function createHybridSession(ctx, target) {
1883
+ const { normalizeSource: normalizeSource2 } = await import('./source-ZFS4H7J3.js');
1884
+ const source = await normalizeSource2(ctx.source);
1885
+ const fps = ctx.videoTracks[0]?.fps ?? 30;
1886
+ const audio = new AudioOutput();
1887
+ const renderer = new VideoRenderer(target, audio, fps);
1888
+ let handles;
1889
+ try {
1890
+ handles = await startHybridDecoder({
1891
+ source,
1892
+ filename: ctx.name ?? "input.bin",
1893
+ context: ctx,
1894
+ renderer,
1895
+ audio
1896
+ });
1897
+ } catch (err) {
1898
+ audio.destroy();
1899
+ renderer.destroy();
1900
+ throw err;
1901
+ }
1902
+ Object.defineProperty(target, "currentTime", {
1903
+ configurable: true,
1904
+ get: () => audio.now(),
1905
+ set: (v) => {
1906
+ void doSeek(v);
1907
+ }
1908
+ });
1909
+ if (ctx.duration && Number.isFinite(ctx.duration)) {
1910
+ Object.defineProperty(target, "duration", {
1911
+ configurable: true,
1912
+ get: () => ctx.duration ?? NaN
1913
+ });
1914
+ }
1915
+ async function waitForBuffer() {
1916
+ const start = performance.now();
1917
+ while (true) {
1918
+ const audioReady = audio.isNoAudio() || audio.bufferAhead() >= READY_AUDIO_BUFFER_SECONDS;
1919
+ if (audioReady && renderer.hasFrames()) {
1920
+ return;
1921
+ }
1922
+ if ((performance.now() - start) / 1e3 > READY_TIMEOUT_SECONDS) return;
1923
+ await new Promise((r) => setTimeout(r, 50));
1924
+ }
1925
+ }
1926
+ async function doSeek(timeSec) {
1927
+ const wasPlaying = audio.isPlaying();
1928
+ await audio.pause().catch(() => {
1929
+ });
1930
+ await handles.seek(timeSec).catch(
1931
+ (err) => console.warn("[avbridge] hybrid decoder seek failed:", err)
1932
+ );
1933
+ await audio.reset(timeSec);
1934
+ renderer.flush();
1935
+ if (wasPlaying) {
1936
+ await waitForBuffer();
1937
+ await audio.start();
1938
+ }
1939
+ }
1940
+ let fatalErrorHandler = null;
1941
+ handles.onFatalError((reason) => fatalErrorHandler?.(reason));
1942
+ return {
1943
+ strategy: "hybrid",
1944
+ async play() {
1945
+ if (!audio.isPlaying()) {
1946
+ await waitForBuffer();
1947
+ await audio.start();
1948
+ }
1949
+ },
1950
+ pause() {
1951
+ void audio.pause();
1952
+ },
1953
+ async seek(time) {
1954
+ await doSeek(time);
1955
+ },
1956
+ async setAudioTrack(_id) {
1957
+ },
1958
+ async setSubtitleTrack(_id) {
1959
+ },
1960
+ getCurrentTime() {
1961
+ return audio.now();
1962
+ },
1963
+ onFatalError(handler) {
1964
+ fatalErrorHandler = handler;
1965
+ },
1966
+ async destroy() {
1967
+ await handles.destroy();
1968
+ renderer.destroy();
1969
+ audio.destroy();
1970
+ try {
1971
+ delete target.currentTime;
1972
+ delete target.duration;
1973
+ } catch {
1974
+ }
1975
+ },
1976
+ getRuntimeStats() {
1977
+ return handles.stats();
1978
+ }
1979
+ };
1980
+ }
1981
+
1982
+ // src/strategies/fallback/decoder.ts
1983
+ async function startDecoder(opts) {
1984
+ const variant = pickLibavVariant(opts.context);
1985
+ const libav = await loadLibav(variant);
1986
+ const bridge = await loadBridge2();
1987
+ const { prepareLibavInput } = await import('./libav-http-reader-NQJVY273.js');
1988
+ const inputHandle = await prepareLibavInput(libav, opts.filename, opts.source);
1989
+ const readPkt = await libav.av_packet_alloc();
1990
+ const [fmt_ctx, streams] = await libav.ff_init_demuxer_file(opts.filename);
1991
+ const videoStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_VIDEO) ?? null;
1992
+ const audioStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO) ?? null;
1993
+ if (!videoStream && !audioStream) {
1994
+ throw new Error("fallback decoder: file has no decodable streams");
1995
+ }
1996
+ let videoDec = null;
1997
+ let audioDec = null;
1998
+ let videoTimeBase;
1999
+ let audioTimeBase;
2000
+ if (videoStream) {
2001
+ try {
2002
+ const [, c, pkt, frame] = await libav.ff_init_decoder(videoStream.codec_id, {
2003
+ codecpar: videoStream.codecpar
2004
+ });
2005
+ videoDec = { c, pkt, frame };
2006
+ if (videoStream.time_base_num && videoStream.time_base_den) {
2007
+ videoTimeBase = [videoStream.time_base_num, videoStream.time_base_den];
2008
+ }
2009
+ } catch (err) {
2010
+ console.error("[avbridge] failed to init video decoder:", err);
2011
+ }
2012
+ }
2013
+ if (audioStream) {
2014
+ try {
2015
+ const [, c, pkt, frame] = await libav.ff_init_decoder(audioStream.codec_id, {
2016
+ codecpar: audioStream.codecpar
2017
+ });
2018
+ audioDec = { c, pkt, frame };
2019
+ if (audioStream.time_base_num && audioStream.time_base_den) {
2020
+ audioTimeBase = [audioStream.time_base_num, audioStream.time_base_den];
2021
+ }
2022
+ } catch (err) {
2023
+ console.warn(
2024
+ "[avbridge] fallback: audio decoder unavailable \u2014 playing video with wall-clock timing:",
2025
+ err.message
2026
+ );
2027
+ }
2028
+ }
2029
+ if (!audioDec) {
2030
+ opts.audio.setNoAudio();
2031
+ }
2032
+ if (!videoDec && !audioDec) {
2033
+ await inputHandle.detach().catch(() => {
2034
+ });
2035
+ const codecs = [
2036
+ videoStream ? `video: ${opts.context.videoTracks[0]?.codec ?? "unknown"}` : null,
2037
+ audioStream ? `audio: ${opts.context.audioTracks[0]?.codec ?? "unknown"}` : null
2038
+ ].filter(Boolean).join(", ");
2039
+ const hint = variant === "webcodecs" ? ` The "${variant}" libav variant does not include software decoders for these codecs. Try the custom "avbridge" variant (scripts/build-libav.sh) for broader codec support, or use a lighter strategy (native, remux, hybrid) instead.` : "";
2040
+ throw new Error(
2041
+ `fallback decoder: could not initialize any libav decoders (${codecs}).${hint}`
2042
+ );
2043
+ }
2044
+ let destroyed = false;
2045
+ let pumpToken = 0;
2046
+ let pumpRunning = null;
2047
+ let packetsRead = 0;
2048
+ let videoFramesDecoded = 0;
2049
+ let audioFramesDecoded = 0;
2050
+ let syntheticVideoUs = 0;
2051
+ let syntheticAudioUs = 0;
2052
+ const videoTrackInfo = opts.context.videoTracks.find((t) => t.id === videoStream?.index);
2053
+ const videoFps = videoTrackInfo?.fps && videoTrackInfo.fps > 0 ? videoTrackInfo.fps : 30;
2054
+ const videoFrameStepUs = Math.max(1, Math.round(1e6 / videoFps));
2055
+ async function pumpLoop(myToken) {
2056
+ while (!destroyed && myToken === pumpToken) {
2057
+ let readErr;
2058
+ let packets;
2059
+ try {
2060
+ [readErr, packets] = await libav.ff_read_frame_multi(fmt_ctx, readPkt, {
2061
+ limit: 16 * 1024
2062
+ });
2063
+ } catch (err) {
2064
+ console.error("[avbridge] ff_read_frame_multi failed:", err);
2065
+ return;
2066
+ }
2067
+ if (myToken !== pumpToken || destroyed) return;
2068
+ const videoPackets = videoStream ? packets[videoStream.index] : void 0;
2069
+ const audioPackets = audioStream ? packets[audioStream.index] : void 0;
2070
+ if (videoDec && videoPackets && videoPackets.length > 0) {
2071
+ await decodeVideoBatch(videoPackets, myToken);
2072
+ }
2073
+ if (myToken !== pumpToken || destroyed) return;
2074
+ if (audioDec && audioPackets && audioPackets.length > 0) {
2075
+ await decodeAudioBatch(audioPackets, myToken);
2076
+ }
2077
+ packetsRead += (videoPackets?.length ?? 0) + (audioPackets?.length ?? 0);
2078
+ while (!destroyed && myToken === pumpToken && (opts.audio.bufferAhead() > 2 || opts.renderer.queueDepth() >= opts.renderer.queueHighWater)) {
2079
+ await new Promise((r) => setTimeout(r, 50));
2080
+ }
2081
+ if (readErr === libav.AVERROR_EOF) {
2082
+ if (videoDec) await decodeVideoBatch(
2083
+ [],
2084
+ myToken,
2085
+ /*flush*/
2086
+ true
2087
+ );
2088
+ if (audioDec) await decodeAudioBatch(
2089
+ [],
2090
+ myToken,
2091
+ /*flush*/
2092
+ true
2093
+ );
2094
+ return;
2095
+ }
2096
+ if (readErr && readErr !== 0 && readErr !== -libav.EAGAIN) {
2097
+ console.warn("[avbridge] ff_read_frame_multi returned", readErr);
2098
+ return;
2099
+ }
2100
+ }
2101
+ }
2102
+ async function decodeVideoBatch(pkts, myToken, flush = false) {
2103
+ if (!videoDec || destroyed || myToken !== pumpToken) return;
2104
+ let frames;
2105
+ try {
2106
+ frames = await libav.ff_decode_multi(
2107
+ videoDec.c,
2108
+ videoDec.pkt,
2109
+ videoDec.frame,
2110
+ pkts,
2111
+ flush ? { fin: true, ignoreErrors: true } : { ignoreErrors: true }
2112
+ );
2113
+ } catch (err) {
2114
+ console.error("[avbridge] video decode batch failed:", err);
2115
+ return;
2116
+ }
2117
+ if (myToken !== pumpToken || destroyed) return;
2118
+ for (const f of frames) {
2119
+ if (myToken !== pumpToken || destroyed) return;
2120
+ const bridgeOpts = sanitizeFrameTimestamp2(
2121
+ f,
2122
+ () => {
2123
+ const ts = syntheticVideoUs;
2124
+ syntheticVideoUs += videoFrameStepUs;
2125
+ return ts;
2126
+ },
2127
+ videoTimeBase
2128
+ );
2129
+ try {
2130
+ const vf = bridge.laFrameToVideoFrame(f, bridgeOpts);
2131
+ opts.renderer.enqueue(vf);
2132
+ videoFramesDecoded++;
2133
+ } catch (err) {
2134
+ if (videoFramesDecoded === 0) {
2135
+ console.warn("[avbridge] laFrameToVideoFrame failed:", err);
2136
+ }
2137
+ }
2138
+ }
2139
+ }
2140
+ async function decodeAudioBatch(pkts, myToken, flush = false) {
2141
+ if (!audioDec || destroyed || myToken !== pumpToken) return;
2142
+ let frames;
2143
+ try {
2144
+ frames = await libav.ff_decode_multi(
2145
+ audioDec.c,
2146
+ audioDec.pkt,
2147
+ audioDec.frame,
2148
+ pkts,
2149
+ flush ? { fin: true, ignoreErrors: true } : { ignoreErrors: true }
2150
+ );
2151
+ } catch (err) {
2152
+ console.error("[avbridge] audio decode batch failed:", err);
2153
+ return;
2154
+ }
2155
+ if (myToken !== pumpToken || destroyed) return;
2156
+ for (const f of frames) {
2157
+ if (myToken !== pumpToken || destroyed) return;
2158
+ sanitizeFrameTimestamp2(
2159
+ f,
2160
+ () => {
2161
+ const ts = syntheticAudioUs;
2162
+ const samples2 = f.nb_samples ?? 1024;
2163
+ const sampleRate = f.sample_rate ?? 44100;
2164
+ syntheticAudioUs += Math.round(samples2 * 1e6 / sampleRate);
2165
+ return ts;
2166
+ },
2167
+ audioTimeBase
2168
+ );
2169
+ const samples = libavFrameToInterleavedFloat322(f);
2170
+ if (samples) {
2171
+ opts.audio.schedule(samples.data, samples.channels, samples.sampleRate);
2172
+ audioFramesDecoded++;
2173
+ }
2174
+ }
2175
+ }
2176
+ pumpToken = 1;
2177
+ pumpRunning = pumpLoop(pumpToken).catch(
2178
+ (err) => console.error("[avbridge] decoder pump failed:", err)
2179
+ );
2180
+ return {
2181
+ async destroy() {
2182
+ destroyed = true;
2183
+ pumpToken++;
2184
+ try {
2185
+ await pumpRunning;
2186
+ } catch {
2187
+ }
2188
+ try {
2189
+ if (videoDec) await libav.ff_free_decoder?.(videoDec.c, videoDec.pkt, videoDec.frame);
2190
+ } catch {
2191
+ }
2192
+ try {
2193
+ if (audioDec) await libav.ff_free_decoder?.(audioDec.c, audioDec.pkt, audioDec.frame);
2194
+ } catch {
2195
+ }
2196
+ try {
2197
+ await libav.av_packet_free?.(readPkt);
2198
+ } catch {
2199
+ }
2200
+ try {
2201
+ await libav.avformat_close_input_js(fmt_ctx);
2202
+ } catch {
2203
+ }
2204
+ try {
2205
+ await inputHandle.detach();
2206
+ } catch {
2207
+ }
2208
+ },
2209
+ async seek(timeSec) {
2210
+ const newToken = ++pumpToken;
2211
+ if (pumpRunning) {
2212
+ try {
2213
+ await pumpRunning;
2214
+ } catch {
2215
+ }
2216
+ }
2217
+ if (destroyed) return;
2218
+ try {
2219
+ const tsUs = Math.floor(timeSec * 1e6);
2220
+ const [tsLo, tsHi] = libav.f64toi64 ? libav.f64toi64(tsUs) : [tsUs | 0, Math.floor(tsUs / 4294967296)];
2221
+ await libav.av_seek_frame(
2222
+ fmt_ctx,
2223
+ -1,
2224
+ tsLo,
2225
+ tsHi,
2226
+ libav.AVSEEK_FLAG_BACKWARD ?? 0
2227
+ );
2228
+ } catch (err) {
2229
+ console.warn("[avbridge] av_seek_frame failed:", err);
2230
+ }
2231
+ try {
2232
+ if (videoDec) await libav.avcodec_flush_buffers?.(videoDec.c);
2233
+ } catch {
2234
+ }
2235
+ try {
2236
+ if (audioDec) await libav.avcodec_flush_buffers?.(audioDec.c);
2237
+ } catch {
2238
+ }
2239
+ syntheticVideoUs = Math.round(timeSec * 1e6);
2240
+ syntheticAudioUs = Math.round(timeSec * 1e6);
2241
+ pumpRunning = pumpLoop(newToken).catch(
2242
+ (err) => console.error("[avbridge] decoder pump failed (post-seek):", err)
2243
+ );
2244
+ },
2245
+ stats() {
2246
+ return {
2247
+ decoderType: "libav-wasm",
2248
+ packetsRead,
2249
+ videoFramesDecoded,
2250
+ audioFramesDecoded,
2251
+ ...opts.renderer.stats(),
2252
+ ...opts.audio.stats()
2253
+ };
2254
+ }
2255
+ };
2256
+ }
2257
+ function sanitizeFrameTimestamp2(frame, nextUs, fallbackTimeBase) {
2258
+ const lo = frame.pts ?? 0;
2259
+ const hi = frame.ptshi ?? 0;
2260
+ const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
2261
+ if (isInvalid) {
2262
+ const us2 = nextUs();
2263
+ frame.pts = us2;
2264
+ frame.ptshi = 0;
2265
+ return { timeBase: [1, 1e6] };
2266
+ }
2267
+ const tb = fallbackTimeBase ?? [1, 1e6];
2268
+ const pts64 = hi * 4294967296 + lo;
2269
+ const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
2270
+ if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
2271
+ frame.pts = us;
2272
+ frame.ptshi = us < 0 ? -1 : 0;
2273
+ return { timeBase: [1, 1e6] };
2274
+ }
2275
+ const fallback = nextUs();
2276
+ frame.pts = fallback;
2277
+ frame.ptshi = 0;
2278
+ return { timeBase: [1, 1e6] };
2279
+ }
2280
+ var AV_SAMPLE_FMT_U82 = 0;
2281
+ var AV_SAMPLE_FMT_S162 = 1;
2282
+ var AV_SAMPLE_FMT_S322 = 2;
2283
+ var AV_SAMPLE_FMT_FLT2 = 3;
2284
+ var AV_SAMPLE_FMT_U8P2 = 5;
2285
+ var AV_SAMPLE_FMT_S16P2 = 6;
2286
+ var AV_SAMPLE_FMT_S32P2 = 7;
2287
+ var AV_SAMPLE_FMT_FLTP2 = 8;
2288
+ function libavFrameToInterleavedFloat322(frame) {
2289
+ const channels = frame.channels ?? frame.ch_layout_nb_channels ?? 1;
2290
+ const sampleRate = frame.sample_rate ?? 44100;
2291
+ const nbSamples = frame.nb_samples ?? 0;
2292
+ if (nbSamples === 0) return null;
2293
+ const out = new Float32Array(nbSamples * channels);
2294
+ switch (frame.format) {
2295
+ case AV_SAMPLE_FMT_FLTP2: {
2296
+ const planes = ensurePlanes2(frame.data, channels);
2297
+ for (let ch = 0; ch < channels; ch++) {
2298
+ const plane = asFloat322(planes[ch]);
2299
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i];
2300
+ }
2301
+ return { data: out, channels, sampleRate };
2302
+ }
2303
+ case AV_SAMPLE_FMT_FLT2: {
2304
+ const flat = asFloat322(frame.data);
2305
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i];
2306
+ return { data: out, channels, sampleRate };
2307
+ }
2308
+ case AV_SAMPLE_FMT_S16P2: {
2309
+ const planes = ensurePlanes2(frame.data, channels);
2310
+ for (let ch = 0; ch < channels; ch++) {
2311
+ const plane = asInt162(planes[ch]);
2312
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 32768;
2313
+ }
2314
+ return { data: out, channels, sampleRate };
2315
+ }
2316
+ case AV_SAMPLE_FMT_S162: {
2317
+ const flat = asInt162(frame.data);
2318
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 32768;
2319
+ return { data: out, channels, sampleRate };
2320
+ }
2321
+ case AV_SAMPLE_FMT_S32P2: {
2322
+ const planes = ensurePlanes2(frame.data, channels);
2323
+ for (let ch = 0; ch < channels; ch++) {
2324
+ const plane = asInt322(planes[ch]);
2325
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 2147483648;
2326
+ }
2327
+ return { data: out, channels, sampleRate };
2328
+ }
2329
+ case AV_SAMPLE_FMT_S322: {
2330
+ const flat = asInt322(frame.data);
2331
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 2147483648;
2332
+ return { data: out, channels, sampleRate };
2333
+ }
2334
+ case AV_SAMPLE_FMT_U8P2: {
2335
+ const planes = ensurePlanes2(frame.data, channels);
2336
+ for (let ch = 0; ch < channels; ch++) {
2337
+ const plane = asUint82(planes[ch]);
2338
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = (plane[i] - 128) / 128;
2339
+ }
2340
+ return { data: out, channels, sampleRate };
2341
+ }
2342
+ case AV_SAMPLE_FMT_U82: {
2343
+ const flat = asUint82(frame.data);
2344
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = (flat[i] - 128) / 128;
2345
+ return { data: out, channels, sampleRate };
2346
+ }
2347
+ default:
2348
+ if (!globalThis.__avbridgeLoggedSampleFmt) {
2349
+ globalThis.__avbridgeLoggedSampleFmt = frame.format;
2350
+ console.warn(`[avbridge] unsupported audio sample format from libav: ${frame.format}`);
2351
+ }
2352
+ return null;
2353
+ }
2354
+ }
2355
+ function ensurePlanes2(data, channels) {
2356
+ if (Array.isArray(data)) return data;
2357
+ const arr = data;
2358
+ const len = arr.length;
2359
+ const perChannel = Math.floor(len / channels);
2360
+ const planes = [];
2361
+ for (let ch = 0; ch < channels; ch++) {
2362
+ planes.push(arr.subarray ? arr.subarray(ch * perChannel, (ch + 1) * perChannel) : arr);
2363
+ }
2364
+ return planes;
2365
+ }
2366
+ function asFloat322(x) {
2367
+ if (x instanceof Float32Array) return x;
2368
+ const ta = x;
2369
+ return new Float32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
2370
+ }
2371
+ function asInt162(x) {
2372
+ if (x instanceof Int16Array) return x;
2373
+ const ta = x;
2374
+ return new Int16Array(ta.buffer, ta.byteOffset, ta.byteLength / 2);
2375
+ }
2376
+ function asInt322(x) {
2377
+ if (x instanceof Int32Array) return x;
2378
+ const ta = x;
2379
+ return new Int32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
2380
+ }
2381
+ function asUint82(x) {
2382
+ if (x instanceof Uint8Array) return x;
2383
+ const ta = x;
2384
+ return new Uint8Array(ta.buffer, ta.byteOffset, ta.byteLength);
2385
+ }
2386
+ async function loadBridge2() {
2387
+ try {
2388
+ const wrapper = await import('./libav-import-2JURFHEW.js');
2389
+ return wrapper.libavBridge;
2390
+ } catch (err) {
2391
+ throw new Error(
2392
+ `failed to load libavjs-webcodecs-bridge \u2014 install the optional peer deps with: npm i libavjs-webcodecs-bridge @libav.js/variant-webcodecs. (${err.message})`
2393
+ );
2394
+ }
2395
+ }
2396
+
2397
+ // src/strategies/fallback/index.ts
2398
+ var READY_AUDIO_BUFFER_SECONDS2 = 0.3;
2399
+ var READY_TIMEOUT_SECONDS2 = 10;
2400
+ async function createFallbackSession(ctx, target) {
2401
+ const { normalizeSource: normalizeSource2 } = await import('./source-ZFS4H7J3.js');
2402
+ const source = await normalizeSource2(ctx.source);
2403
+ const fps = ctx.videoTracks[0]?.fps ?? 30;
2404
+ const audio = new AudioOutput();
2405
+ const renderer = new VideoRenderer(target, audio, fps);
2406
+ let handles;
2407
+ try {
2408
+ handles = await startDecoder({
2409
+ source,
2410
+ filename: ctx.name ?? "input.bin",
2411
+ context: ctx,
2412
+ renderer,
2413
+ audio
2414
+ });
2415
+ } catch (err) {
2416
+ audio.destroy();
2417
+ renderer.destroy();
2418
+ throw err;
2419
+ }
2420
+ Object.defineProperty(target, "currentTime", {
2421
+ configurable: true,
2422
+ get: () => audio.now(),
2423
+ set: (v) => {
2424
+ void doSeek(v);
2425
+ }
2426
+ });
2427
+ if (ctx.duration && Number.isFinite(ctx.duration)) {
2428
+ Object.defineProperty(target, "duration", {
2429
+ configurable: true,
2430
+ get: () => ctx.duration ?? NaN
2431
+ });
2432
+ }
2433
+ async function waitForBuffer() {
2434
+ const start = performance.now();
2435
+ while (true) {
2436
+ const audioReady = audio.isNoAudio() || audio.bufferAhead() >= READY_AUDIO_BUFFER_SECONDS2;
2437
+ if (audioReady && renderer.hasFrames()) {
2438
+ return;
2439
+ }
2440
+ if ((performance.now() - start) / 1e3 > READY_TIMEOUT_SECONDS2) {
2441
+ return;
2442
+ }
2443
+ await new Promise((r) => setTimeout(r, 50));
2444
+ }
2445
+ }
2446
+ async function doSeek(timeSec) {
2447
+ const wasPlaying = audio.isPlaying();
2448
+ await audio.pause().catch(() => {
2449
+ });
2450
+ await handles.seek(timeSec).catch(
2451
+ (err) => console.warn("[avbridge] decoder seek failed:", err)
2452
+ );
2453
+ await audio.reset(timeSec);
2454
+ renderer.flush();
2455
+ if (wasPlaying) {
2456
+ await waitForBuffer();
2457
+ await audio.start();
2458
+ }
2459
+ }
2460
+ return {
2461
+ strategy: "fallback",
2462
+ async play() {
2463
+ if (!audio.isPlaying()) {
2464
+ await waitForBuffer();
2465
+ await audio.start();
2466
+ }
2467
+ },
2468
+ pause() {
2469
+ void audio.pause();
2470
+ },
2471
+ async seek(time) {
2472
+ await doSeek(time);
2473
+ },
2474
+ async setAudioTrack(_id) {
2475
+ },
2476
+ async setSubtitleTrack(_id) {
2477
+ },
2478
+ getCurrentTime() {
2479
+ return audio.now();
2480
+ },
2481
+ async destroy() {
2482
+ await handles.destroy();
2483
+ renderer.destroy();
2484
+ audio.destroy();
2485
+ try {
2486
+ delete target.currentTime;
2487
+ delete target.duration;
2488
+ } catch {
2489
+ }
2490
+ },
2491
+ getRuntimeStats() {
2492
+ return handles.stats();
2493
+ }
2494
+ };
2495
+ }
2496
+
2497
+ // src/plugins/builtin.ts
2498
+ var nativePlugin = {
2499
+ name: "native",
2500
+ canHandle: () => true,
2501
+ execute: (ctx, video) => createNativeSession(ctx, video)
2502
+ };
2503
+ var remuxPlugin = {
2504
+ name: "remux",
2505
+ canHandle: () => true,
2506
+ execute: (ctx, video) => createRemuxSession(ctx, video)
2507
+ };
2508
+ var hybridPlugin = {
2509
+ name: "hybrid",
2510
+ canHandle: () => typeof VideoDecoder !== "undefined",
2511
+ execute: (ctx, video) => createHybridSession(ctx, video)
2512
+ };
2513
+ var fallbackPlugin = {
2514
+ name: "fallback",
2515
+ canHandle: () => true,
2516
+ execute: (ctx, video) => createFallbackSession(ctx, video)
2517
+ };
2518
+ function registerBuiltins(registry) {
2519
+ registry.register(nativePlugin);
2520
+ registry.register(remuxPlugin);
2521
+ registry.register(hybridPlugin);
2522
+ registry.register(fallbackPlugin);
2523
+ }
2524
+
2525
+ // src/subtitles/vtt.ts
2526
+ function isVtt(text) {
2527
+ const trimmed = text.replace(/^\ufeff/, "").trimStart();
2528
+ return trimmed.startsWith("WEBVTT");
2529
+ }
2530
+
2531
+ // src/subtitles/index.ts
2532
+ async function discoverSidecar(file, directory) {
2533
+ const baseName = file.name.replace(/\.[^.]+$/, "");
2534
+ const found = [];
2535
+ for await (const [name, handle] of directory) {
2536
+ if (handle.kind !== "file") continue;
2537
+ if (!name.startsWith(baseName)) continue;
2538
+ const lower = name.toLowerCase();
2539
+ let format = null;
2540
+ if (lower.endsWith(".srt")) format = "srt";
2541
+ else if (lower.endsWith(".vtt")) format = "vtt";
2542
+ if (!format) continue;
2543
+ const sidecarFile = await handle.getFile();
2544
+ const url = URL.createObjectURL(sidecarFile);
2545
+ const langMatch = name.slice(baseName.length).match(/[._-]([a-z]{2,3})(?:[._-]|\.)/i);
2546
+ found.push({
2547
+ url,
2548
+ format,
2549
+ language: langMatch?.[1]
2550
+ });
2551
+ }
2552
+ return found;
2553
+ }
2554
+ async function attachSubtitleTracks(video, tracks) {
2555
+ for (const t of Array.from(video.querySelectorAll("track[data-avbridge]"))) {
2556
+ t.remove();
2557
+ }
2558
+ for (const t of tracks) {
2559
+ if (!t.sidecarUrl) continue;
2560
+ let url = t.sidecarUrl;
2561
+ if (t.format === "srt") {
2562
+ const res = await fetch(t.sidecarUrl);
2563
+ const text = await res.text();
2564
+ const vtt = srtToVtt(text);
2565
+ const blob = new Blob([vtt], { type: "text/vtt" });
2566
+ url = URL.createObjectURL(blob);
2567
+ } else if (t.format === "vtt") {
2568
+ const res = await fetch(t.sidecarUrl);
2569
+ const text = await res.text();
2570
+ if (!isVtt(text)) {
2571
+ console.warn("[avbridge] subtitle missing WEBVTT header:", t.sidecarUrl);
2572
+ }
2573
+ }
2574
+ const track = document.createElement("track");
2575
+ track.kind = "subtitles";
2576
+ track.src = url;
2577
+ track.srclang = t.language ?? "und";
2578
+ track.label = t.language ?? `Subtitle ${t.id}`;
2579
+ track.dataset.avbridge = "true";
2580
+ video.appendChild(track);
2581
+ }
2582
+ }
2583
+
2584
+ // src/player.ts
2585
+ var UnifiedPlayer = class _UnifiedPlayer {
2586
+ /**
2587
+ * @internal Use {@link createPlayer} or {@link UnifiedPlayer.create} instead.
2588
+ */
2589
+ constructor(options, registry) {
2590
+ this.options = options;
2591
+ this.registry = registry;
2592
+ }
2593
+ options;
2594
+ registry;
2595
+ emitter = new TypedEmitter();
2596
+ session = null;
2597
+ diag = new Diagnostics();
2598
+ timeupdateInterval = null;
2599
+ // Saved from bootstrap for strategy switching
2600
+ mediaContext = null;
2601
+ classification = null;
2602
+ // Stall detection
2603
+ stallTimer = null;
2604
+ lastProgressTime = 0;
2605
+ lastProgressPosition = -1;
2606
+ errorListener = null;
2607
+ // Serializes escalation / setStrategy calls
2608
+ switchingPromise = Promise.resolve();
2609
+ static async create(options) {
2610
+ const registry = new PluginRegistry();
2611
+ registerBuiltins(registry);
2612
+ if (options.plugins) {
2613
+ for (const p of options.plugins) registry.register(
2614
+ p,
2615
+ /* prepend */
2616
+ true
2617
+ );
2618
+ }
2619
+ const player = new _UnifiedPlayer(options, registry);
2620
+ try {
2621
+ await player.bootstrap();
2622
+ } catch (err) {
2623
+ err.player = player;
2624
+ throw err;
2625
+ }
2626
+ return player;
2627
+ }
2628
+ async bootstrap() {
2629
+ try {
2630
+ const ctx = await probe(this.options.source);
2631
+ this.diag.recordProbe(ctx);
2632
+ this.mediaContext = ctx;
2633
+ if (this.options.subtitles) {
2634
+ for (const s of this.options.subtitles) {
2635
+ ctx.subtitleTracks.push({
2636
+ id: ctx.subtitleTracks.length,
2637
+ format: s.format ?? (s.url.endsWith(".srt") ? "srt" : "vtt"),
2638
+ language: s.language,
2639
+ sidecarUrl: s.url
2640
+ });
2641
+ }
2642
+ }
2643
+ if (this.options.directory && this.options.source instanceof File) {
2644
+ const found = await discoverSidecar(this.options.source, this.options.directory);
2645
+ for (const s of found) {
2646
+ ctx.subtitleTracks.push({
2647
+ id: ctx.subtitleTracks.length,
2648
+ format: s.format,
2649
+ language: s.language,
2650
+ sidecarUrl: s.url
2651
+ });
2652
+ }
2653
+ }
2654
+ const decision = this.options.forceStrategy ? {
2655
+ class: "NATIVE",
2656
+ strategy: this.options.forceStrategy,
2657
+ reason: `forced via options.forceStrategy=${this.options.forceStrategy}`
2658
+ } : classifyContext(ctx);
2659
+ this.classification = decision;
2660
+ this.diag.recordClassification(decision);
2661
+ this.emitter.emitSticky("strategy", {
2662
+ strategy: decision.strategy,
2663
+ reason: decision.reason
2664
+ });
2665
+ await this.startSession(decision.strategy, decision.reason);
2666
+ if (this.session.strategy !== "fallback" && this.session.strategy !== "hybrid") {
2667
+ attachSubtitleTracks(this.options.target, ctx.subtitleTracks);
2668
+ }
2669
+ this.emitter.emitSticky("tracks", {
2670
+ video: ctx.videoTracks,
2671
+ audio: ctx.audioTracks,
2672
+ subtitle: ctx.subtitleTracks
2673
+ });
2674
+ this.startTimeupdateLoop();
2675
+ this.options.target.addEventListener("ended", () => this.emitter.emit("ended", void 0));
2676
+ this.emitter.emitSticky("ready", void 0);
2677
+ } catch (err) {
2678
+ const e = err instanceof Error ? err : new Error(String(err));
2679
+ this.diag.recordError(e);
2680
+ this.emitter.emit("error", e);
2681
+ throw e;
2682
+ }
2683
+ }
2684
+ /**
2685
+ * Try to start a session with the given strategy. On failure, walk the
2686
+ * fallback chain. Throws only if all strategies are exhausted.
2687
+ */
2688
+ async startSession(strategy, reason) {
2689
+ const plugin = this.registry.findFor(this.mediaContext, strategy);
2690
+ if (!plugin) {
2691
+ throw new Error(`no plugin available for strategy "${strategy}"`);
2692
+ }
2693
+ try {
2694
+ this.session = await plugin.execute(this.mediaContext, this.options.target);
2695
+ } catch (err) {
2696
+ const chain = this.classification?.fallbackChain;
2697
+ if (chain && chain.length > 0) {
2698
+ const next = chain.shift();
2699
+ console.warn(`[avbridge] ${strategy} failed (${err.message}), escalating to ${next}`);
2700
+ this.emitter.emit("strategychange", {
2701
+ from: strategy,
2702
+ to: next,
2703
+ reason: `${strategy} failed: ${err.message}`,
2704
+ currentTime: 0
2705
+ });
2706
+ this.diag.recordStrategySwitch(next, `${strategy} failed: ${err.message}`);
2707
+ return this.startSession(next, `escalated from ${strategy}`);
2708
+ }
2709
+ throw err;
2710
+ }
2711
+ this.session.onFatalError?.((fatalReason) => {
2712
+ void this.escalate(fatalReason);
2713
+ });
2714
+ this.attachSupervisor();
2715
+ if (this.session.strategy !== strategy) {
2716
+ this.emitter.emitSticky("strategy", {
2717
+ strategy: this.session.strategy,
2718
+ reason
2719
+ });
2720
+ }
2721
+ }
2722
+ // ── Escalation ──────────────────────────────────────────────────────────
2723
+ async escalate(reason) {
2724
+ this.switchingPromise = this.switchingPromise.then(
2725
+ () => this.doEscalate(reason)
2726
+ ).catch((err) => {
2727
+ this.emitter.emit("error", err instanceof Error ? err : new Error(String(err)));
2728
+ });
2729
+ await this.switchingPromise;
2730
+ }
2731
+ async doEscalate(reason) {
2732
+ const chain = this.classification?.fallbackChain;
2733
+ if (!chain || chain.length === 0) {
2734
+ this.emitter.emit("error", new Error(
2735
+ `strategy "${this.session?.strategy}" failed: ${reason} (no fallback available)`
2736
+ ));
2737
+ return;
2738
+ }
2739
+ const currentTime = this.session?.getCurrentTime() ?? 0;
2740
+ const wasPlaying = this.session ? !this.options.target.paused : false;
2741
+ const fromStrategy = this.session?.strategy ?? "native";
2742
+ const nextStrategy = chain.shift();
2743
+ console.warn(`[avbridge] escalating from ${fromStrategy} to ${nextStrategy}: ${reason}`);
2744
+ this.emitter.emit("strategychange", {
2745
+ from: fromStrategy,
2746
+ to: nextStrategy,
2747
+ reason,
2748
+ currentTime
2749
+ });
2750
+ this.diag.recordStrategySwitch(nextStrategy, reason);
2751
+ this.clearSupervisor();
2752
+ if (this.session) {
2753
+ try {
2754
+ await this.session.destroy();
2755
+ } catch {
2756
+ }
2757
+ this.session = null;
2758
+ }
2759
+ const plugin = this.registry.findFor(this.mediaContext, nextStrategy);
2760
+ if (!plugin) {
2761
+ this.emitter.emit("error", new Error(`no plugin for fallback strategy "${nextStrategy}"`));
2762
+ return;
2763
+ }
2764
+ try {
2765
+ this.session = await plugin.execute(this.mediaContext, this.options.target);
2766
+ } catch (err) {
2767
+ this.emitter.emit("error", err instanceof Error ? err : new Error(String(err)));
2768
+ return;
2769
+ }
2770
+ this.emitter.emitSticky("strategy", {
2771
+ strategy: nextStrategy,
2772
+ reason: `escalated: ${reason}`
2773
+ });
2774
+ this.session.onFatalError?.((fatalReason) => {
2775
+ void this.escalate(fatalReason);
2776
+ });
2777
+ this.attachSupervisor();
2778
+ try {
2779
+ await this.session.seek(currentTime);
2780
+ if (wasPlaying) await this.session.play();
2781
+ } catch (err) {
2782
+ console.warn("[avbridge] failed to restore position after escalation:", err);
2783
+ }
2784
+ }
2785
+ // ── Stall supervision ─────────────────────────────────────────────────
2786
+ attachSupervisor() {
2787
+ this.clearSupervisor();
2788
+ if (this.options.autoEscalate === false) return;
2789
+ if (!this.classification?.fallbackChain?.length) return;
2790
+ const strategy = this.session?.strategy;
2791
+ if (strategy === "native" || strategy === "remux") {
2792
+ this.lastProgressPosition = this.options.target.currentTime;
2793
+ this.lastProgressTime = performance.now();
2794
+ this.stallTimer = setInterval(() => {
2795
+ const t = this.options.target;
2796
+ if (t.paused || t.ended || t.readyState < 2) {
2797
+ this.lastProgressPosition = t.currentTime;
2798
+ this.lastProgressTime = performance.now();
2799
+ return;
2800
+ }
2801
+ if (t.currentTime !== this.lastProgressPosition) {
2802
+ this.lastProgressPosition = t.currentTime;
2803
+ this.lastProgressTime = performance.now();
2804
+ return;
2805
+ }
2806
+ if (performance.now() - this.lastProgressTime > 5e3) {
2807
+ void this.escalate(
2808
+ `${strategy} strategy stalled for 5s at ${t.currentTime.toFixed(1)}s`
2809
+ );
2810
+ }
2811
+ }, 1e3);
2812
+ const onError = () => {
2813
+ void this.escalate(
2814
+ `${strategy} strategy error: ${this.options.target.error?.message ?? "unknown"}`
2815
+ );
2816
+ };
2817
+ this.options.target.addEventListener("error", onError, { once: true });
2818
+ this.errorListener = onError;
2819
+ }
2820
+ }
2821
+ clearSupervisor() {
2822
+ if (this.stallTimer) {
2823
+ clearInterval(this.stallTimer);
2824
+ this.stallTimer = null;
2825
+ }
2826
+ if (this.errorListener) {
2827
+ this.options.target.removeEventListener("error", this.errorListener);
2828
+ this.errorListener = null;
2829
+ }
2830
+ }
2831
+ // ── Public: manual strategy switch ────────────────────────────────────
2832
+ /** Manually switch to a different playback strategy. Preserves current position and play/pause state. Concurrent calls are serialized. */
2833
+ async setStrategy(strategy, reason) {
2834
+ if (!this.mediaContext) throw new Error("player not ready");
2835
+ if (this.session?.strategy === strategy) return;
2836
+ this.switchingPromise = this.switchingPromise.then(
2837
+ () => this.doSetStrategy(strategy, reason)
2838
+ );
2839
+ await this.switchingPromise;
2840
+ }
2841
+ async doSetStrategy(strategy, reason) {
2842
+ const currentTime = this.session?.getCurrentTime() ?? 0;
2843
+ const wasPlaying = this.session ? !this.options.target.paused : false;
2844
+ const fromStrategy = this.session?.strategy ?? "native";
2845
+ const switchReason = reason ?? `manual switch to ${strategy}`;
2846
+ this.emitter.emit("strategychange", {
2847
+ from: fromStrategy,
2848
+ to: strategy,
2849
+ reason: switchReason,
2850
+ currentTime
2851
+ });
2852
+ this.diag.recordStrategySwitch(strategy, switchReason);
2853
+ this.clearSupervisor();
2854
+ if (this.session) {
2855
+ try {
2856
+ await this.session.destroy();
2857
+ } catch {
2858
+ }
2859
+ this.session = null;
2860
+ }
2861
+ const plugin = this.registry.findFor(this.mediaContext, strategy);
2862
+ if (!plugin) throw new Error(`no plugin available for strategy "${strategy}"`);
2863
+ this.session = await plugin.execute(this.mediaContext, this.options.target);
2864
+ this.emitter.emitSticky("strategy", {
2865
+ strategy,
2866
+ reason: switchReason
2867
+ });
2868
+ this.session.onFatalError?.((fatalReason) => {
2869
+ void this.escalate(fatalReason);
2870
+ });
2871
+ this.attachSupervisor();
2872
+ try {
2873
+ await this.session.seek(currentTime);
2874
+ if (wasPlaying) await this.session.play();
2875
+ } catch (err) {
2876
+ console.warn("[avbridge] failed to restore position after strategy switch:", err);
2877
+ }
2878
+ }
2879
+ // ── Timeupdate loop ───────────────────────────────────────────────────
2880
+ startTimeupdateLoop() {
2881
+ this.timeupdateInterval = setInterval(() => {
2882
+ const t = this.session?.getCurrentTime() ?? this.options.target.currentTime;
2883
+ this.emitter.emit("timeupdate", { currentTime: t });
2884
+ }, 250);
2885
+ }
2886
+ // ── Public API ────────────────────────────────────────────────────────
2887
+ /** Subscribe to a player event. Returns an unsubscribe function. Sticky events (strategy, ready, tracks) replay for late subscribers. */
2888
+ on(event, fn) {
2889
+ return this.emitter.on(event, fn);
2890
+ }
2891
+ /** Remove a previously registered event listener. */
2892
+ off(event, fn) {
2893
+ this.emitter.off(event, fn);
2894
+ }
2895
+ /** Begin or resume playback. Throws if the player is not ready. */
2896
+ async play() {
2897
+ if (!this.session) throw new Error("player not ready");
2898
+ await this.session.play();
2899
+ }
2900
+ /** Pause playback. No-op if the player is not ready or already paused. */
2901
+ pause() {
2902
+ this.session?.pause();
2903
+ }
2904
+ /** Seek to the given time in seconds. Throws if the player is not ready. */
2905
+ async seek(time) {
2906
+ if (!this.session) throw new Error("player not ready");
2907
+ await this.session.seek(time);
2908
+ }
2909
+ /** Switch the active audio track by track ID. Throws if the player is not ready. */
2910
+ async setAudioTrack(id) {
2911
+ if (!this.session) throw new Error("player not ready");
2912
+ await this.session.setAudioTrack(id);
2913
+ }
2914
+ /** Switch the active subtitle track by track ID, or pass `null` to disable subtitles. */
2915
+ async setSubtitleTrack(id) {
2916
+ if (!this.session) throw new Error("player not ready");
2917
+ await this.session.setSubtitleTrack(id);
2918
+ }
2919
+ /** Return a snapshot of current diagnostics: container, codecs, strategy, runtime stats, and strategy history. */
2920
+ getDiagnostics() {
2921
+ if (this.session) {
2922
+ this.diag.recordRuntime(this.session.getRuntimeStats());
2923
+ }
2924
+ return this.diag.snapshot();
2925
+ }
2926
+ /** Return the total duration in seconds, or `NaN` if unknown. */
2927
+ getDuration() {
2928
+ const fromDiag = this.diag.snapshot().duration;
2929
+ if (typeof fromDiag === "number" && Number.isFinite(fromDiag)) return fromDiag;
2930
+ const fromVideo = this.options.target.duration;
2931
+ return Number.isFinite(fromVideo) ? fromVideo : NaN;
2932
+ }
2933
+ /** Return the current playback position in seconds. */
2934
+ getCurrentTime() {
2935
+ return this.session?.getCurrentTime() ?? this.options.target.currentTime ?? 0;
2936
+ }
2937
+ /** Tear down the player: stop timers, destroy the active session, remove all event listeners. The player is unusable after this call. */
2938
+ async destroy() {
2939
+ if (this.timeupdateInterval) {
2940
+ clearInterval(this.timeupdateInterval);
2941
+ this.timeupdateInterval = null;
2942
+ }
2943
+ this.clearSupervisor();
2944
+ if (this.session) {
2945
+ await this.session.destroy();
2946
+ this.session = null;
2947
+ }
2948
+ this.emitter.removeAll();
2949
+ }
2950
+ };
2951
+ async function createPlayer(options) {
2952
+ return UnifiedPlayer.create(options);
2953
+ }
2954
+
2955
+ export { UnifiedPlayer, avbridgeAudioToMediabunny, avbridgeVideoToMediabunny, buildMediabunnySourceFromInput, classifyContext, createPlayer, probe, srtToVtt };
2956
+ //# sourceMappingURL=chunk-FKM7QBZU.js.map
2957
+ //# sourceMappingURL=chunk-FKM7QBZU.js.map