avbridge 2.3.0 → 2.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. package/CHANGELOG.md +114 -0
  2. package/dist/{chunk-6UUT4BEA.cjs → chunk-2IJ66NTD.cjs} +13 -20
  3. package/dist/chunk-2IJ66NTD.cjs.map +1 -0
  4. package/dist/{chunk-XKPSTC34.cjs → chunk-2XW2O3YI.cjs} +5 -20
  5. package/dist/chunk-2XW2O3YI.cjs.map +1 -0
  6. package/dist/chunk-5KVLE6YI.js +167 -0
  7. package/dist/chunk-5KVLE6YI.js.map +1 -0
  8. package/dist/{chunk-7RGG6ME7.cjs → chunk-6SOFJV44.cjs} +422 -688
  9. package/dist/chunk-6SOFJV44.cjs.map +1 -0
  10. package/dist/{chunk-2PGRFCWB.js → chunk-CPJLFFCC.js} +8 -18
  11. package/dist/chunk-CPJLFFCC.js.map +1 -0
  12. package/dist/chunk-CPZ7PXAM.cjs +240 -0
  13. package/dist/chunk-CPZ7PXAM.cjs.map +1 -0
  14. package/dist/{chunk-QQXBPW72.js → chunk-E76AMWI4.js} +4 -18
  15. package/dist/chunk-E76AMWI4.js.map +1 -0
  16. package/dist/chunk-LUFA47FP.js +19 -0
  17. package/dist/chunk-LUFA47FP.js.map +1 -0
  18. package/dist/{chunk-NV7ILLWH.js → chunk-OGYHFY6K.js} +404 -665
  19. package/dist/chunk-OGYHFY6K.js.map +1 -0
  20. package/dist/chunk-Q2VUO52Z.cjs +374 -0
  21. package/dist/chunk-Q2VUO52Z.cjs.map +1 -0
  22. package/dist/chunk-QDJLQR53.cjs +22 -0
  23. package/dist/chunk-QDJLQR53.cjs.map +1 -0
  24. package/dist/chunk-S4WAZC2T.cjs +173 -0
  25. package/dist/chunk-S4WAZC2T.cjs.map +1 -0
  26. package/dist/chunk-SMH6IOP2.js +368 -0
  27. package/dist/chunk-SMH6IOP2.js.map +1 -0
  28. package/dist/chunk-SR3MPV4D.js +237 -0
  29. package/dist/chunk-SR3MPV4D.js.map +1 -0
  30. package/dist/chunk-X2K3GIWE.js +235 -0
  31. package/dist/chunk-X2K3GIWE.js.map +1 -0
  32. package/dist/chunk-ZCUXHW55.cjs +242 -0
  33. package/dist/chunk-ZCUXHW55.cjs.map +1 -0
  34. package/dist/element-browser.js +883 -492
  35. package/dist/element-browser.js.map +1 -1
  36. package/dist/element.cjs +88 -6
  37. package/dist/element.cjs.map +1 -1
  38. package/dist/element.d.cts +51 -1
  39. package/dist/element.d.ts +51 -1
  40. package/dist/element.js +87 -5
  41. package/dist/element.js.map +1 -1
  42. package/dist/index.cjs +523 -393
  43. package/dist/index.cjs.map +1 -1
  44. package/dist/index.d.cts +2 -2
  45. package/dist/index.d.ts +2 -2
  46. package/dist/index.js +494 -366
  47. package/dist/index.js.map +1 -1
  48. package/dist/libav-demux-H2GS46GH.cjs +27 -0
  49. package/dist/libav-demux-H2GS46GH.cjs.map +1 -0
  50. package/dist/libav-demux-OWZ4T2YW.js +6 -0
  51. package/dist/libav-demux-OWZ4T2YW.js.map +1 -0
  52. package/dist/{libav-import-GST2AMPL.cjs → libav-import-2ZVKV2E7.cjs} +2 -2
  53. package/dist/{libav-import-GST2AMPL.cjs.map → libav-import-2ZVKV2E7.cjs.map} +1 -1
  54. package/dist/{libav-import-2JURFHEW.js → libav-import-6MGLCXVQ.js} +2 -2
  55. package/dist/{libav-import-2JURFHEW.js.map → libav-import-6MGLCXVQ.js.map} +1 -1
  56. package/dist/{player-B6WB74RD.d.ts → player-DGXeCNfD.d.cts} +41 -1
  57. package/dist/{player-B6WB74RD.d.cts → player-DGXeCNfD.d.ts} +41 -1
  58. package/dist/player.cjs +731 -472
  59. package/dist/player.cjs.map +1 -1
  60. package/dist/player.d.cts +229 -120
  61. package/dist/player.d.ts +229 -120
  62. package/dist/player.js +710 -451
  63. package/dist/player.js.map +1 -1
  64. package/dist/remux-OBSMIENG.cjs +35 -0
  65. package/dist/remux-OBSMIENG.cjs.map +1 -0
  66. package/dist/remux-WBYIZBBX.js +10 -0
  67. package/dist/remux-WBYIZBBX.js.map +1 -0
  68. package/dist/source-4TZ6KMNV.js +4 -0
  69. package/dist/{source-F656KYYV.js.map → source-4TZ6KMNV.js.map} +1 -1
  70. package/dist/source-7YLO6E7X.cjs +29 -0
  71. package/dist/{source-73CAH6HW.cjs.map → source-7YLO6E7X.cjs.map} +1 -1
  72. package/dist/source-MTX5ELUZ.js +4 -0
  73. package/dist/{source-QJR3OHTW.js.map → source-MTX5ELUZ.js.map} +1 -1
  74. package/dist/source-VFLXLOCN.cjs +29 -0
  75. package/dist/{source-VB74JQ7Z.cjs.map → source-VFLXLOCN.cjs.map} +1 -1
  76. package/dist/subtitles-4T74JRGT.js +4 -0
  77. package/dist/subtitles-4T74JRGT.js.map +1 -0
  78. package/dist/subtitles-QUH4LPI4.cjs +29 -0
  79. package/dist/subtitles-QUH4LPI4.cjs.map +1 -0
  80. package/package.json +1 -1
  81. package/src/convert/remux.ts +1 -35
  82. package/src/convert/transcode-libav.ts +691 -0
  83. package/src/convert/transcode.ts +12 -4
  84. package/src/element/avbridge-player.ts +100 -0
  85. package/src/element/avbridge-video.ts +140 -3
  86. package/src/element/player-styles.ts +12 -0
  87. package/src/errors.ts +6 -0
  88. package/src/player.ts +15 -16
  89. package/src/strategies/fallback/decoder.ts +96 -173
  90. package/src/strategies/fallback/index.ts +46 -2
  91. package/src/strategies/fallback/libav-import.ts +9 -1
  92. package/src/strategies/fallback/video-renderer.ts +107 -0
  93. package/src/strategies/hybrid/decoder.ts +88 -180
  94. package/src/strategies/hybrid/index.ts +35 -2
  95. package/src/strategies/native.ts +6 -3
  96. package/src/strategies/remux/index.ts +14 -2
  97. package/src/strategies/remux/pipeline.ts +72 -12
  98. package/src/subtitles/render.ts +8 -0
  99. package/src/types.ts +32 -0
  100. package/src/util/libav-demux.ts +405 -0
  101. package/src/util/time-ranges.ts +40 -0
  102. package/dist/chunk-2PGRFCWB.js.map +0 -1
  103. package/dist/chunk-6UUT4BEA.cjs.map +0 -1
  104. package/dist/chunk-7RGG6ME7.cjs.map +0 -1
  105. package/dist/chunk-NV7ILLWH.js.map +0 -1
  106. package/dist/chunk-QQXBPW72.js.map +0 -1
  107. package/dist/chunk-XKPSTC34.cjs.map +0 -1
  108. package/dist/source-73CAH6HW.cjs +0 -28
  109. package/dist/source-F656KYYV.js +0 -3
  110. package/dist/source-QJR3OHTW.js +0 -3
  111. package/dist/source-VB74JQ7Z.cjs +0 -28
@@ -30085,6 +30085,60 @@ var init_avi = __esm({
30085
30085
  }
30086
30086
  });
30087
30087
 
30088
+ // src/subtitles/render.ts
30089
+ function parseVtt(text) {
30090
+ const cues = [];
30091
+ const blocks = text.replace(/\r\n/g, "\n").split(/\n{2,}/);
30092
+ for (const block of blocks) {
30093
+ const lines = block.split("\n").filter(Boolean);
30094
+ if (lines.length === 0 || lines[0] === "WEBVTT") continue;
30095
+ const timingIdx = lines.findIndex((l) => l.includes("-->"));
30096
+ if (timingIdx < 0) continue;
30097
+ const m = /(\d{2}):(\d{2}):(\d{2})\.(\d{3})\s*-->\s*(\d{2}):(\d{2}):(\d{2})\.(\d{3})/.exec(
30098
+ lines[timingIdx]
30099
+ );
30100
+ if (!m) continue;
30101
+ const t = (h, mm, s, ms) => Number(h) * 3600 + Number(mm) * 60 + Number(s) + Number(ms) / 1e3;
30102
+ cues.push({
30103
+ start: t(m[1], m[2], m[3], m[4]),
30104
+ end: t(m[5], m[6], m[7], m[8]),
30105
+ text: lines.slice(timingIdx + 1).join("\n")
30106
+ });
30107
+ }
30108
+ return cues;
30109
+ }
30110
+ var SubtitleOverlay;
30111
+ var init_render = __esm({
30112
+ "src/subtitles/render.ts"() {
30113
+ SubtitleOverlay = class {
30114
+ el;
30115
+ cues = [];
30116
+ constructor(parent) {
30117
+ this.el = document.createElement("div");
30118
+ this.el.style.cssText = "position:absolute;left:0;right:0;bottom:8%;text-align:center;color:white;text-shadow:0 0 4px black;font-family:sans-serif;font-size:1.4em;pointer-events:none;";
30119
+ parent.appendChild(this.el);
30120
+ }
30121
+ loadVtt(text) {
30122
+ this.cues = parseVtt(text);
30123
+ }
30124
+ update(currentTime) {
30125
+ const active = this.cues.find((c) => currentTime >= c.start && currentTime <= c.end);
30126
+ this.el.textContent = active?.text ?? "";
30127
+ }
30128
+ /** Set the currently-displayed text directly (bypasses loadVtt/update). */
30129
+ setText(text) {
30130
+ if (this.el.textContent !== text) {
30131
+ this.el.textContent = text;
30132
+ }
30133
+ }
30134
+ destroy() {
30135
+ this.el.remove();
30136
+ this.cues = [];
30137
+ }
30138
+ };
30139
+ }
30140
+ });
30141
+
30088
30142
  // node_modules/libavjs-webcodecs-bridge/dist/libavjs-webcodecs-bridge.mjs
30089
30143
  var libavjs_webcodecs_bridge_exports = {};
30090
30144
  __export(libavjs_webcodecs_bridge_exports, {
@@ -31002,6 +31056,145 @@ var init_libav_import = __esm({
31002
31056
  }
31003
31057
  });
31004
31058
 
31059
+ // src/subtitles/srt.ts
31060
+ function srtToVtt(srt) {
31061
+ if (srt.charCodeAt(0) === 65279) srt = srt.slice(1);
31062
+ const normalized = srt.replace(/\r\n/g, "\n").replace(/\r/g, "\n").trim();
31063
+ const blocks = normalized.split(/\n{2,}/);
31064
+ const out = ["WEBVTT", ""];
31065
+ for (const block of blocks) {
31066
+ const lines = block.split("\n");
31067
+ if (lines.length > 0 && /^\d+$/.test(lines[0].trim())) {
31068
+ lines.shift();
31069
+ }
31070
+ if (lines.length === 0) continue;
31071
+ const timing = lines.shift();
31072
+ const vttTiming = convertTiming(timing);
31073
+ if (!vttTiming) continue;
31074
+ out.push(vttTiming);
31075
+ for (const l of lines) out.push(l);
31076
+ out.push("");
31077
+ }
31078
+ return out.join("\n");
31079
+ }
31080
+ function convertTiming(line) {
31081
+ const m = /^(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})\s*-->\s*(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})(.*)$/.exec(
31082
+ line.trim()
31083
+ );
31084
+ if (!m) return null;
31085
+ const fmt2 = (h, mm, s, ms) => `${h.padStart(2, "0")}:${mm}:${s}.${ms.padEnd(3, "0").slice(0, 3)}`;
31086
+ return `${fmt2(m[1], m[2], m[3], m[4])} --> ${fmt2(m[5], m[6], m[7], m[8])}${m[9] ?? ""}`;
31087
+ }
31088
+ var init_srt = __esm({
31089
+ "src/subtitles/srt.ts"() {
31090
+ }
31091
+ });
31092
+
31093
+ // src/subtitles/vtt.ts
31094
+ function isVtt(text) {
31095
+ const trimmed = text.replace(/^\ufeff/, "").trimStart();
31096
+ return trimmed.startsWith("WEBVTT");
31097
+ }
31098
+ var init_vtt = __esm({
31099
+ "src/subtitles/vtt.ts"() {
31100
+ }
31101
+ });
31102
+
31103
+ // src/subtitles/index.ts
31104
+ var subtitles_exports = {};
31105
+ __export(subtitles_exports, {
31106
+ SubtitleOverlay: () => SubtitleOverlay,
31107
+ SubtitleResourceBag: () => SubtitleResourceBag,
31108
+ attachSubtitleTracks: () => attachSubtitleTracks,
31109
+ discoverSidecars: () => discoverSidecars,
31110
+ srtToVtt: () => srtToVtt
31111
+ });
31112
+ async function discoverSidecars(file, directory) {
31113
+ const baseName = file.name.replace(/\.[^.]+$/, "");
31114
+ const found = [];
31115
+ for await (const [name, handle] of directory) {
31116
+ if (handle.kind !== "file") continue;
31117
+ if (!name.startsWith(baseName)) continue;
31118
+ const lower = name.toLowerCase();
31119
+ let format = null;
31120
+ if (lower.endsWith(".srt")) format = "srt";
31121
+ else if (lower.endsWith(".vtt")) format = "vtt";
31122
+ if (!format) continue;
31123
+ const sidecarFile = await handle.getFile();
31124
+ const url2 = URL.createObjectURL(sidecarFile);
31125
+ const langMatch = name.slice(baseName.length).match(/[._-]([a-z]{2,3})(?:[._-]|\.)/i);
31126
+ found.push({
31127
+ url: url2,
31128
+ format,
31129
+ language: langMatch?.[1]
31130
+ });
31131
+ }
31132
+ return found;
31133
+ }
31134
+ async function attachSubtitleTracks(video, tracks, bag, onError, transport) {
31135
+ const doFetch = fetchWith(transport);
31136
+ for (const t of Array.from(video.querySelectorAll("track[data-avbridge]"))) {
31137
+ t.remove();
31138
+ }
31139
+ for (const t of tracks) {
31140
+ if (!t.sidecarUrl) continue;
31141
+ try {
31142
+ let url2 = t.sidecarUrl;
31143
+ if (t.format === "srt") {
31144
+ const res = await doFetch(t.sidecarUrl, transport?.requestInit);
31145
+ const text = await res.text();
31146
+ const vtt = srtToVtt(text);
31147
+ const blob = new Blob([vtt], { type: "text/vtt" });
31148
+ url2 = bag ? bag.createObjectURL(blob) : URL.createObjectURL(blob);
31149
+ } else if (t.format === "vtt") {
31150
+ const res = await doFetch(t.sidecarUrl, transport?.requestInit);
31151
+ const text = await res.text();
31152
+ if (!isVtt(text)) {
31153
+ console.warn("[avbridge] subtitle missing WEBVTT header:", t.sidecarUrl);
31154
+ }
31155
+ }
31156
+ const trackEl = document.createElement("track");
31157
+ trackEl.kind = "subtitles";
31158
+ trackEl.src = url2;
31159
+ trackEl.srclang = t.language ?? "und";
31160
+ trackEl.label = t.language ?? `Subtitle ${t.id}`;
31161
+ trackEl.dataset.avbridge = "true";
31162
+ video.appendChild(trackEl);
31163
+ } catch (err) {
31164
+ const e = err instanceof Error ? err : new Error(String(err));
31165
+ onError?.(e, t);
31166
+ }
31167
+ }
31168
+ }
31169
+ var SubtitleResourceBag;
31170
+ var init_subtitles2 = __esm({
31171
+ "src/subtitles/index.ts"() {
31172
+ init_transport();
31173
+ init_srt();
31174
+ init_vtt();
31175
+ init_srt();
31176
+ init_render();
31177
+ SubtitleResourceBag = class {
31178
+ urls = /* @__PURE__ */ new Set();
31179
+ /** Track an externally-created blob URL (e.g. from `discoverSidecars`). */
31180
+ track(url2) {
31181
+ this.urls.add(url2);
31182
+ }
31183
+ /** Convenience: create a blob URL and track it in one call. */
31184
+ createObjectURL(blob) {
31185
+ const url2 = URL.createObjectURL(blob);
31186
+ this.urls.add(url2);
31187
+ return url2;
31188
+ }
31189
+ /** Revoke every tracked URL. Idempotent — safe to call multiple times. */
31190
+ revokeAll() {
31191
+ for (const u of this.urls) URL.revokeObjectURL(u);
31192
+ this.urls.clear();
31193
+ }
31194
+ };
31195
+ }
31196
+ });
31197
+
31005
31198
  // src/events.ts
31006
31199
  var TypedEmitter = class {
31007
31200
  listeners = {};
@@ -31692,7 +31885,7 @@ async function createNativeSession(context, video) {
31692
31885
  },
31693
31886
  async setAudioTrack(id) {
31694
31887
  const tracks = video.audioTracks;
31695
- if (!tracks) return;
31888
+ if (!tracks || tracks.length === 0) return;
31696
31889
  for (let i = 0; i < tracks.length; i++) {
31697
31890
  tracks[i].enabled = tracks[i].id === String(id) || i === id;
31698
31891
  }
@@ -31941,30 +32134,49 @@ var MseSink = class {
31941
32134
  async function createRemuxPipeline(ctx, video) {
31942
32135
  const mb = await Promise.resolve().then(() => (init_src(), src_exports));
31943
32136
  const videoTrackInfo = ctx.videoTracks[0];
31944
- const audioTrackInfo = ctx.audioTracks[0];
31945
32137
  if (!videoTrackInfo) throw new Error("remux: source has no video track");
31946
32138
  const mbVideoCodec = avbridgeVideoToMediabunny(videoTrackInfo.codec);
31947
32139
  if (!mbVideoCodec) {
31948
32140
  throw new Error(`remux: video codec "${videoTrackInfo.codec}" is not supported by mediabunny output`);
31949
32141
  }
31950
- const mbAudioCodec = audioTrackInfo ? avbridgeAudioToMediabunny(audioTrackInfo.codec) : null;
31951
32142
  const input = new mb.Input({
31952
32143
  source: await buildMediabunnySourceFromInput(mb, ctx.source),
31953
32144
  formats: mb.ALL_FORMATS
31954
32145
  });
31955
32146
  const allTracks = await input.getTracks();
31956
32147
  const inputVideo = allTracks.find((t) => t.id === videoTrackInfo.id && t.isVideoTrack());
31957
- const inputAudio = audioTrackInfo ? allTracks.find((t) => t.id === audioTrackInfo.id && t.isAudioTrack()) : null;
31958
32148
  if (!inputVideo || !inputVideo.isVideoTrack()) {
31959
32149
  throw new Error("remux: video track not found in input");
31960
32150
  }
31961
- if (audioTrackInfo && (!inputAudio || !inputAudio.isAudioTrack())) {
31962
- throw new Error("remux: audio track not found in input");
31963
- }
31964
32151
  const videoConfig = await inputVideo.getDecoderConfig();
31965
- const audioConfig = inputAudio && inputAudio.isAudioTrack() ? await inputAudio.getDecoderConfig() : null;
31966
32152
  const videoSink = new mb.EncodedPacketSink(inputVideo);
31967
- const audioSink = inputAudio?.isAudioTrack() ? new mb.EncodedPacketSink(inputAudio) : null;
32153
+ let selectedAudioTrackId = ctx.audioTracks[0]?.id ?? null;
32154
+ let inputAudio = null;
32155
+ let mbAudioCodec = null;
32156
+ let audioSink = null;
32157
+ let audioConfig = null;
32158
+ async function rebuildAudio() {
32159
+ if (selectedAudioTrackId == null) {
32160
+ inputAudio = null;
32161
+ mbAudioCodec = null;
32162
+ audioSink = null;
32163
+ audioConfig = null;
32164
+ return;
32165
+ }
32166
+ const trackInfo = ctx.audioTracks.find((t) => t.id === selectedAudioTrackId);
32167
+ if (!trackInfo) {
32168
+ throw new Error(`remux: no audio track with id ${selectedAudioTrackId}`);
32169
+ }
32170
+ const newInput = allTracks.find((t) => t.id === trackInfo.id && t.isAudioTrack());
32171
+ if (!newInput || !newInput.isAudioTrack()) {
32172
+ throw new Error("remux: audio track not found in input");
32173
+ }
32174
+ inputAudio = newInput;
32175
+ mbAudioCodec = avbridgeAudioToMediabunny(trackInfo.codec);
32176
+ audioSink = new mb.EncodedPacketSink(newInput);
32177
+ audioConfig = await newInput.getDecoderConfig();
32178
+ }
32179
+ await rebuildAudio();
31968
32180
  let sink = null;
31969
32181
  const stats = { videoPackets: 0, audioPackets: 0, bytesWritten: 0, fragments: 0 };
31970
32182
  let destroyed = false;
@@ -32089,6 +32301,30 @@ async function createRemuxPipeline(ctx, video) {
32089
32301
  pendingAutoPlay = autoPlay;
32090
32302
  if (sink) sink.setPlayOnSeek(autoPlay);
32091
32303
  },
32304
+ async setAudioTrack(trackId, time, autoPlay) {
32305
+ if (selectedAudioTrackId === trackId) return;
32306
+ if (!ctx.audioTracks.some((t) => t.id === trackId)) {
32307
+ console.warn("[avbridge] remux: setAudioTrack \u2014 unknown track id", trackId);
32308
+ return;
32309
+ }
32310
+ pumpToken++;
32311
+ selectedAudioTrackId = trackId;
32312
+ await rebuildAudio().catch((err) => {
32313
+ console.warn("[avbridge] remux: rebuildAudio failed:", err.message);
32314
+ });
32315
+ if (sink) {
32316
+ try {
32317
+ sink.destroy();
32318
+ } catch {
32319
+ }
32320
+ sink = null;
32321
+ }
32322
+ pendingAutoPlay = autoPlay;
32323
+ pendingStartTime = time;
32324
+ pumpLoop(++pumpToken, time).catch((err) => {
32325
+ console.error("[avbridge] remux pipeline setAudioTrack pump failed:", err);
32326
+ });
32327
+ },
32092
32328
  async destroy() {
32093
32329
  destroyed = true;
32094
32330
  pumpToken++;
@@ -32148,7 +32384,19 @@ async function createRemuxSession(context, video) {
32148
32384
  const wasPlaying = !video.paused;
32149
32385
  await pipeline.seek(time, wasPlaying || wantPlay);
32150
32386
  },
32151
- async setAudioTrack(_id) {
32387
+ async setAudioTrack(id) {
32388
+ if (!context.audioTracks.some((t) => t.id === id)) {
32389
+ console.warn("[avbridge] remux: setAudioTrack \u2014 unknown track id", id);
32390
+ return;
32391
+ }
32392
+ const wasPlaying = !video.paused;
32393
+ const time = video.currentTime || 0;
32394
+ if (!started) {
32395
+ started = true;
32396
+ await pipeline.setAudioTrack(id, time, wantPlay || wasPlaying);
32397
+ return;
32398
+ }
32399
+ await pipeline.setAudioTrack(id, time, wasPlaying || wantPlay);
32152
32400
  },
32153
32401
  async setSubtitleTrack(id) {
32154
32402
  const tracks = video.textTracks;
@@ -32172,6 +32420,7 @@ async function createRemuxSession(context, video) {
32172
32420
  }
32173
32421
 
32174
32422
  // src/strategies/fallback/video-renderer.ts
32423
+ init_render();
32175
32424
  function isDebug() {
32176
32425
  return typeof globalThis !== "undefined" && !!globalThis.AVBRIDGE_DEBUG;
32177
32426
  }
@@ -32201,6 +32450,9 @@ var VideoRenderer = class {
32201
32450
  document.body.appendChild(this.canvas);
32202
32451
  }
32203
32452
  target.style.visibility = "hidden";
32453
+ const overlayParent = parent instanceof HTMLElement ? parent : document.body;
32454
+ this.subtitleOverlay = new SubtitleOverlay(overlayParent);
32455
+ this.watchTextTracks(target);
32204
32456
  const ctx = this.canvas.getContext("2d");
32205
32457
  if (!ctx) throw new Error("video renderer: failed to acquire 2D context");
32206
32458
  this.ctx = ctx;
@@ -32226,6 +32478,15 @@ var VideoRenderer = class {
32226
32478
  ticksWaiting = 0;
32227
32479
  /** Cumulative count of ticks where PTS mode painted a frame. */
32228
32480
  ticksPainted = 0;
32481
+ /**
32482
+ * Subtitle overlay div attached to the stage wrapper alongside the
32483
+ * canvas. Created lazily when subtitle tracks are attached via the
32484
+ * target's `<track>` children. Canvas strategies (hybrid, fallback)
32485
+ * hide the <video>, so we can't rely on the browser's native cue
32486
+ * rendering; we read TextTrack.cues and render into this overlay.
32487
+ */
32488
+ subtitleOverlay = null;
32489
+ subtitleTrack = null;
32229
32490
  /**
32230
32491
  * Calibration offset (microseconds) between video PTS and audio clock.
32231
32492
  * Video PTS and AudioContext.currentTime can drift ~0.1% relative to
@@ -32269,9 +32530,80 @@ var VideoRenderer = class {
32269
32530
  this.framesDroppedOverflow++;
32270
32531
  }
32271
32532
  }
32533
+ /**
32534
+ * Watch the target <video>'s textTracks list. When a track is added,
32535
+ * grab it and start polling cues on each render tick. Existing tracks
32536
+ * (if any) are picked up immediately.
32537
+ */
32538
+ watchTextTracks(target) {
32539
+ const pick = () => {
32540
+ if (this.subtitleTrack) return;
32541
+ const tracks = target.textTracks;
32542
+ if (isDebug()) {
32543
+ console.log(`[avbridge:subs] watchTextTracks pick() \u2014 ${tracks.length} tracks`);
32544
+ }
32545
+ for (let i = 0; i < tracks.length; i++) {
32546
+ const t = tracks[i];
32547
+ if (isDebug()) {
32548
+ console.log(`[avbridge:subs] track ${i}: kind=${t.kind} mode=${t.mode} cues=${t.cues?.length ?? 0}`);
32549
+ }
32550
+ if (t.kind === "subtitles" || t.kind === "captions") {
32551
+ this.subtitleTrack = t;
32552
+ t.mode = "hidden";
32553
+ if (isDebug()) {
32554
+ console.log(`[avbridge:subs] picked track, mode=hidden`);
32555
+ }
32556
+ const trackEl = target.querySelector(`track[srclang="${t.language}"]`);
32557
+ if (trackEl) {
32558
+ trackEl.addEventListener("load", () => {
32559
+ if (isDebug()) {
32560
+ console.log(`[avbridge:subs] track element loaded, cues=${t.cues?.length ?? 0}`);
32561
+ }
32562
+ });
32563
+ trackEl.addEventListener("error", (ev) => {
32564
+ console.warn(`[avbridge:subs] track element error:`, ev);
32565
+ });
32566
+ }
32567
+ break;
32568
+ }
32569
+ }
32570
+ };
32571
+ pick();
32572
+ if (typeof target.textTracks.addEventListener === "function") {
32573
+ target.textTracks.addEventListener("addtrack", (e) => {
32574
+ if (isDebug()) {
32575
+ console.log("[avbridge:subs] addtrack event fired");
32576
+ }
32577
+ pick();
32578
+ });
32579
+ }
32580
+ }
32581
+ _loggedCues = false;
32582
+ /** Find the active cue (if any) for the given media time. */
32583
+ updateSubtitles() {
32584
+ if (!this.subtitleOverlay || !this.subtitleTrack) return;
32585
+ const cues = this.subtitleTrack.cues;
32586
+ if (!cues || cues.length === 0) return;
32587
+ if (isDebug() && !this._loggedCues) {
32588
+ this._loggedCues = true;
32589
+ console.log(`[avbridge:subs] cues available: ${cues.length}, first start=${cues[0].startTime}, last end=${cues[cues.length - 1].endTime}`);
32590
+ }
32591
+ const t = this.clock.now();
32592
+ let activeText = "";
32593
+ for (let i = 0; i < cues.length; i++) {
32594
+ const c = cues[i];
32595
+ if (t >= c.startTime && t <= c.endTime) {
32596
+ const vttCue = c;
32597
+ activeText = vttCue.text ?? "";
32598
+ break;
32599
+ }
32600
+ }
32601
+ this.subtitleOverlay.setText(activeText.replace(/<[^>]+>/g, ""));
32602
+ }
32272
32603
  tick() {
32273
32604
  if (this.destroyed) return;
32274
32605
  this.rafHandle = requestAnimationFrame(this.tick);
32606
+ this.updateSubtitles();
32275
32607
  if (this.queue.length === 0) return;
32276
32608
  const playing = this.clock.isPlaying();
32277
32609
  if (!playing) {
@@ -32400,6 +32732,11 @@ var VideoRenderer = class {
32400
32732
  this.destroyed = true;
32401
32733
  if (this.rafHandle != null) cancelAnimationFrame(this.rafHandle);
32402
32734
  this.flush();
32735
+ if (this.subtitleOverlay) {
32736
+ this.subtitleOverlay.destroy();
32737
+ this.subtitleOverlay = null;
32738
+ }
32739
+ this.subtitleTrack = null;
32403
32740
  this.canvas.remove();
32404
32741
  this.target.style.visibility = "";
32405
32742
  }
@@ -32658,67 +32995,222 @@ function pickLibavVariant(ctx) {
32658
32995
  return "webcodecs";
32659
32996
  }
32660
32997
 
32661
- // src/strategies/hybrid/decoder.ts
32662
- async function startHybridDecoder(opts) {
32663
- const variant = pickLibavVariant(opts.context);
32664
- const libav = await loadLibav(variant);
32665
- const bridge = await loadBridge();
32666
- const { prepareLibavInput: prepareLibavInput2 } = await Promise.resolve().then(() => (init_libav_http_reader(), libav_http_reader_exports));
32667
- const inputHandle = await prepareLibavInput2(libav, opts.filename, opts.source, opts.transport);
32668
- const readPkt = await libav.av_packet_alloc();
32669
- const [fmt_ctx, streams] = await libav.ff_init_demuxer_file(opts.filename);
32670
- const videoStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_VIDEO) ?? null;
32671
- const audioStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO) ?? null;
32672
- if (!videoStream && !audioStream) {
32673
- throw new Error("hybrid decoder: file has no decodable streams");
32674
- }
32675
- let fatalHandler = null;
32676
- let fatalFired = false;
32677
- function fireFatal(reason) {
32678
- if (fatalFired) return;
32679
- fatalFired = true;
32680
- fatalHandler?.(reason);
32998
+ // src/util/libav-demux.ts
32999
+ function sanitizePacketTimestamp(pkt, nextUs, fallbackTimeBase) {
33000
+ const lo = pkt.pts ?? 0;
33001
+ const hi = pkt.ptshi ?? 0;
33002
+ const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
33003
+ if (isInvalid) {
33004
+ const us2 = nextUs();
33005
+ pkt.pts = us2;
33006
+ pkt.ptshi = 0;
33007
+ pkt.time_base_num = 1;
33008
+ pkt.time_base_den = 1e6;
33009
+ return;
32681
33010
  }
32682
- let videoDecoder = null;
32683
- let videoTimeBase;
32684
- if (videoStream) {
32685
- try {
32686
- const config = await bridge.videoStreamToConfig(libav, videoStream);
32687
- if (!config) throw new Error("bridge returned null config");
32688
- const supported = await VideoDecoder.isConfigSupported(config);
32689
- if (!supported.supported) throw new Error(`VideoDecoder does not support config: ${JSON.stringify(config)}`);
32690
- videoDecoder = new VideoDecoder({
32691
- output: (frame) => {
32692
- opts.renderer.enqueue(frame);
32693
- videoFramesDecoded++;
32694
- },
32695
- error: (err) => {
32696
- console.error("[avbridge] WebCodecs VideoDecoder error:", err);
32697
- fireFatal(`WebCodecs VideoDecoder error: ${err.message}`);
32698
- }
32699
- });
32700
- videoDecoder.configure(config);
32701
- if (videoStream.time_base_num && videoStream.time_base_den) {
32702
- videoTimeBase = [videoStream.time_base_num, videoStream.time_base_den];
32703
- }
32704
- } catch (err) {
32705
- console.error("[avbridge] hybrid: failed to init WebCodecs VideoDecoder:", err);
32706
- fireFatal(`WebCodecs VideoDecoder init failed: ${err.message}`);
32707
- await inputHandle.detach().catch(() => {
32708
- });
32709
- throw err;
32710
- }
33011
+ const tb = fallbackTimeBase ?? [1, 1e6];
33012
+ const pts64 = hi * 4294967296 + lo;
33013
+ const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
33014
+ if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
33015
+ pkt.pts = us;
33016
+ pkt.ptshi = us < 0 ? -1 : 0;
33017
+ pkt.time_base_num = 1;
33018
+ pkt.time_base_den = 1e6;
33019
+ return;
32711
33020
  }
32712
- let audioDec = null;
32713
- let audioTimeBase;
32714
- if (audioStream) {
32715
- try {
32716
- const [, c, pkt, frame] = await libav.ff_init_decoder(audioStream.codec_id, {
32717
- codecpar: audioStream.codecpar
32718
- });
32719
- audioDec = { c, pkt, frame };
32720
- if (audioStream.time_base_num && audioStream.time_base_den) {
32721
- audioTimeBase = [audioStream.time_base_num, audioStream.time_base_den];
33021
+ const fallback = nextUs();
33022
+ pkt.pts = fallback;
33023
+ pkt.ptshi = 0;
33024
+ pkt.time_base_num = 1;
33025
+ pkt.time_base_den = 1e6;
33026
+ }
33027
+ var AV_SAMPLE_FMT_U8 = 0;
33028
+ var AV_SAMPLE_FMT_S16 = 1;
33029
+ var AV_SAMPLE_FMT_S32 = 2;
33030
+ var AV_SAMPLE_FMT_FLT = 3;
33031
+ var AV_SAMPLE_FMT_U8P = 5;
33032
+ var AV_SAMPLE_FMT_S16P = 6;
33033
+ var AV_SAMPLE_FMT_S32P = 7;
33034
+ var AV_SAMPLE_FMT_FLTP = 8;
33035
+ function libavFrameToInterleavedFloat32(frame) {
33036
+ const channels = frame.channels ?? frame.ch_layout_nb_channels ?? 1;
33037
+ const sampleRate = frame.sample_rate ?? 44100;
33038
+ const nbSamples = frame.nb_samples ?? 0;
33039
+ if (nbSamples === 0) return null;
33040
+ const out = new Float32Array(nbSamples * channels);
33041
+ switch (frame.format) {
33042
+ case AV_SAMPLE_FMT_FLTP: {
33043
+ const planes = ensurePlanes(frame.data, channels);
33044
+ for (let ch = 0; ch < channels; ch++) {
33045
+ const plane = asFloat32(planes[ch]);
33046
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i];
33047
+ }
33048
+ return { data: out, channels, sampleRate };
33049
+ }
33050
+ case AV_SAMPLE_FMT_FLT: {
33051
+ const flat = asFloat32(frame.data);
33052
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i];
33053
+ return { data: out, channels, sampleRate };
33054
+ }
33055
+ case AV_SAMPLE_FMT_S16P: {
33056
+ const planes = ensurePlanes(frame.data, channels);
33057
+ for (let ch = 0; ch < channels; ch++) {
33058
+ const plane = asInt16(planes[ch]);
33059
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 32768;
33060
+ }
33061
+ return { data: out, channels, sampleRate };
33062
+ }
33063
+ case AV_SAMPLE_FMT_S16: {
33064
+ const flat = asInt16(frame.data);
33065
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 32768;
33066
+ return { data: out, channels, sampleRate };
33067
+ }
33068
+ case AV_SAMPLE_FMT_S32P: {
33069
+ const planes = ensurePlanes(frame.data, channels);
33070
+ for (let ch = 0; ch < channels; ch++) {
33071
+ const plane = asInt32(planes[ch]);
33072
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 2147483648;
33073
+ }
33074
+ return { data: out, channels, sampleRate };
33075
+ }
33076
+ case AV_SAMPLE_FMT_S32: {
33077
+ const flat = asInt32(frame.data);
33078
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 2147483648;
33079
+ return { data: out, channels, sampleRate };
33080
+ }
33081
+ case AV_SAMPLE_FMT_U8P: {
33082
+ const planes = ensurePlanes(frame.data, channels);
33083
+ for (let ch = 0; ch < channels; ch++) {
33084
+ const plane = asUint8(planes[ch]);
33085
+ for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = (plane[i] - 128) / 128;
33086
+ }
33087
+ return { data: out, channels, sampleRate };
33088
+ }
33089
+ case AV_SAMPLE_FMT_U8: {
33090
+ const flat = asUint8(frame.data);
33091
+ for (let i = 0; i < nbSamples * channels; i++) out[i] = (flat[i] - 128) / 128;
33092
+ return { data: out, channels, sampleRate };
33093
+ }
33094
+ default:
33095
+ return null;
33096
+ }
33097
+ }
33098
+ function ensurePlanes(data, channels) {
33099
+ if (Array.isArray(data)) return data;
33100
+ const arr = data;
33101
+ const len = arr.length;
33102
+ const perChannel = Math.floor(len / channels);
33103
+ const planes = [];
33104
+ for (let ch = 0; ch < channels; ch++) {
33105
+ planes.push(arr.subarray ? arr.subarray(ch * perChannel, (ch + 1) * perChannel) : arr);
33106
+ }
33107
+ return planes;
33108
+ }
33109
+ function asFloat32(x) {
33110
+ if (x instanceof Float32Array) return x;
33111
+ const ta = x;
33112
+ return new Float32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
33113
+ }
33114
+ function asInt16(x) {
33115
+ if (x instanceof Int16Array) return x;
33116
+ const ta = x;
33117
+ return new Int16Array(ta.buffer, ta.byteOffset, ta.byteLength / 2);
33118
+ }
33119
+ function asInt32(x) {
33120
+ if (x instanceof Int32Array) return x;
33121
+ const ta = x;
33122
+ return new Int32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
33123
+ }
33124
+ function asUint8(x) {
33125
+ if (x instanceof Uint8Array) return x;
33126
+ const ta = x;
33127
+ return new Uint8Array(ta.buffer, ta.byteOffset, ta.byteLength);
33128
+ }
33129
+ function sanitizeFrameTimestamp(frame, nextUs, fallbackTimeBase) {
33130
+ const lo = frame.pts ?? 0;
33131
+ const hi = frame.ptshi ?? 0;
33132
+ const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
33133
+ if (isInvalid) {
33134
+ const us2 = nextUs();
33135
+ frame.pts = us2;
33136
+ frame.ptshi = 0;
33137
+ return;
33138
+ }
33139
+ const tb = fallbackTimeBase ?? [1, 1e6];
33140
+ const pts64 = hi * 4294967296 + lo;
33141
+ const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
33142
+ if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
33143
+ frame.pts = us;
33144
+ frame.ptshi = us < 0 ? -1 : 0;
33145
+ return;
33146
+ }
33147
+ const fallback = nextUs();
33148
+ frame.pts = fallback;
33149
+ frame.ptshi = 0;
33150
+ }
33151
+
33152
+ // src/strategies/hybrid/decoder.ts
33153
+ async function startHybridDecoder(opts) {
33154
+ const variant = pickLibavVariant(opts.context);
33155
+ const libav = await loadLibav(variant);
33156
+ const bridge = await loadBridge();
33157
+ const { prepareLibavInput: prepareLibavInput2 } = await Promise.resolve().then(() => (init_libav_http_reader(), libav_http_reader_exports));
33158
+ const inputHandle = await prepareLibavInput2(libav, opts.filename, opts.source, opts.transport);
33159
+ const readPkt = await libav.av_packet_alloc();
33160
+ const [fmt_ctx, streams] = await libav.ff_init_demuxer_file(opts.filename);
33161
+ const videoStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_VIDEO) ?? null;
33162
+ const firstAudioTrackId = opts.context.audioTracks[0]?.id;
33163
+ let audioStream = (firstAudioTrackId != null ? streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO && s.index === firstAudioTrackId) : void 0) ?? streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO) ?? null;
33164
+ if (!videoStream && !audioStream) {
33165
+ throw new Error("hybrid decoder: file has no decodable streams");
33166
+ }
33167
+ let fatalHandler = null;
33168
+ let fatalFired = false;
33169
+ function fireFatal(reason) {
33170
+ if (fatalFired) return;
33171
+ fatalFired = true;
33172
+ fatalHandler?.(reason);
33173
+ }
33174
+ let videoDecoder = null;
33175
+ let videoTimeBase;
33176
+ if (videoStream) {
33177
+ try {
33178
+ const config = await bridge.videoStreamToConfig(libav, videoStream);
33179
+ if (!config) throw new Error("bridge returned null config");
33180
+ const supported = await VideoDecoder.isConfigSupported(config);
33181
+ if (!supported.supported) throw new Error(`VideoDecoder does not support config: ${JSON.stringify(config)}`);
33182
+ videoDecoder = new VideoDecoder({
33183
+ output: (frame) => {
33184
+ opts.renderer.enqueue(frame);
33185
+ videoFramesDecoded++;
33186
+ },
33187
+ error: (err) => {
33188
+ console.error("[avbridge] WebCodecs VideoDecoder error:", err);
33189
+ fireFatal(`WebCodecs VideoDecoder error: ${err.message}`);
33190
+ }
33191
+ });
33192
+ videoDecoder.configure(config);
33193
+ if (videoStream.time_base_num && videoStream.time_base_den) {
33194
+ videoTimeBase = [videoStream.time_base_num, videoStream.time_base_den];
33195
+ }
33196
+ } catch (err) {
33197
+ console.error("[avbridge] hybrid: failed to init WebCodecs VideoDecoder:", err);
33198
+ fireFatal(`WebCodecs VideoDecoder init failed: ${err.message}`);
33199
+ await inputHandle.detach().catch(() => {
33200
+ });
33201
+ throw err;
33202
+ }
33203
+ }
33204
+ let audioDec = null;
33205
+ let audioTimeBase;
33206
+ if (audioStream) {
33207
+ try {
33208
+ const [, c, pkt, frame] = await libav.ff_init_decoder(audioStream.codec_id, {
33209
+ codecpar: audioStream.codecpar
33210
+ });
33211
+ audioDec = { c, pkt, frame };
33212
+ if (audioStream.time_base_num && audioStream.time_base_den) {
33213
+ audioTimeBase = [audioStream.time_base_num, audioStream.time_base_den];
32722
33214
  }
32723
33215
  } catch (err) {
32724
33216
  console.warn(
@@ -32964,6 +33456,71 @@ async function startHybridDecoder(opts) {
32964
33456
  } catch {
32965
33457
  }
32966
33458
  },
33459
+ async setAudioTrack(trackId, timeSec) {
33460
+ if (audioStream && audioStream.index === trackId) return;
33461
+ const newStream = streams.find(
33462
+ (s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO && s.index === trackId
33463
+ );
33464
+ if (!newStream) {
33465
+ console.warn("[avbridge] hybrid: setAudioTrack \u2014 no stream with id", trackId);
33466
+ return;
33467
+ }
33468
+ const newToken = ++pumpToken;
33469
+ if (pumpRunning) {
33470
+ try {
33471
+ await pumpRunning;
33472
+ } catch {
33473
+ }
33474
+ }
33475
+ if (destroyed) return;
33476
+ if (audioDec) {
33477
+ try {
33478
+ await libav.ff_free_decoder?.(audioDec.c, audioDec.pkt, audioDec.frame);
33479
+ } catch {
33480
+ }
33481
+ audioDec = null;
33482
+ }
33483
+ try {
33484
+ const [, c, pkt, frame] = await libav.ff_init_decoder(newStream.codec_id, {
33485
+ codecpar: newStream.codecpar
33486
+ });
33487
+ audioDec = { c, pkt, frame };
33488
+ audioTimeBase = newStream.time_base_num && newStream.time_base_den ? [newStream.time_base_num, newStream.time_base_den] : void 0;
33489
+ } catch (err) {
33490
+ console.warn(
33491
+ "[avbridge] hybrid: setAudioTrack init failed \u2014 switching to no-audio:",
33492
+ err.message
33493
+ );
33494
+ audioDec = null;
33495
+ opts.audio.setNoAudio();
33496
+ }
33497
+ audioStream = newStream;
33498
+ try {
33499
+ const tsUs = Math.floor(timeSec * 1e6);
33500
+ const [tsLo, tsHi] = libav.f64toi64 ? libav.f64toi64(tsUs) : [tsUs | 0, Math.floor(tsUs / 4294967296)];
33501
+ await libav.av_seek_frame(
33502
+ fmt_ctx,
33503
+ -1,
33504
+ tsLo,
33505
+ tsHi,
33506
+ libav.AVSEEK_FLAG_BACKWARD ?? 0
33507
+ );
33508
+ } catch (err) {
33509
+ console.warn("[avbridge] hybrid: setAudioTrack seek failed:", err);
33510
+ }
33511
+ try {
33512
+ if (videoDecoder && videoDecoder.state === "configured") {
33513
+ await videoDecoder.flush();
33514
+ }
33515
+ } catch {
33516
+ }
33517
+ await flushBSF();
33518
+ syntheticVideoUs = Math.round(timeSec * 1e6);
33519
+ syntheticAudioUs = Math.round(timeSec * 1e6);
33520
+ pumpRunning = pumpLoop(newToken).catch(
33521
+ (err) => console.error("[avbridge] hybrid pump failed (post-setAudioTrack):", err)
33522
+ );
33523
+ },
32967
33524
  async seek(timeSec) {
32968
33525
  const newToken = ++pumpToken;
32969
33526
  if (pumpRunning) {
@@ -33021,158 +33578,6 @@ async function startHybridDecoder(opts) {
33021
33578
  }
33022
33579
  };
33023
33580
  }
33024
- function sanitizePacketTimestamp(pkt, nextUs, fallbackTimeBase) {
33025
- const lo = pkt.pts ?? 0;
33026
- const hi = pkt.ptshi ?? 0;
33027
- const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
33028
- if (isInvalid) {
33029
- const us2 = nextUs();
33030
- pkt.pts = us2;
33031
- pkt.ptshi = 0;
33032
- pkt.time_base_num = 1;
33033
- pkt.time_base_den = 1e6;
33034
- return;
33035
- }
33036
- const tb = fallbackTimeBase ?? [1, 1e6];
33037
- const pts64 = hi * 4294967296 + lo;
33038
- const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
33039
- if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
33040
- pkt.pts = us;
33041
- pkt.ptshi = us < 0 ? -1 : 0;
33042
- pkt.time_base_num = 1;
33043
- pkt.time_base_den = 1e6;
33044
- return;
33045
- }
33046
- const fallback = nextUs();
33047
- pkt.pts = fallback;
33048
- pkt.ptshi = 0;
33049
- pkt.time_base_num = 1;
33050
- pkt.time_base_den = 1e6;
33051
- }
33052
- function sanitizeFrameTimestamp(frame, nextUs, fallbackTimeBase) {
33053
- const lo = frame.pts ?? 0;
33054
- const hi = frame.ptshi ?? 0;
33055
- const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
33056
- if (isInvalid) {
33057
- const us2 = nextUs();
33058
- frame.pts = us2;
33059
- frame.ptshi = 0;
33060
- return;
33061
- }
33062
- const tb = fallbackTimeBase ?? [1, 1e6];
33063
- const pts64 = hi * 4294967296 + lo;
33064
- const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
33065
- if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
33066
- frame.pts = us;
33067
- frame.ptshi = us < 0 ? -1 : 0;
33068
- return;
33069
- }
33070
- const fallback = nextUs();
33071
- frame.pts = fallback;
33072
- frame.ptshi = 0;
33073
- }
33074
- var AV_SAMPLE_FMT_U8 = 0;
33075
- var AV_SAMPLE_FMT_S16 = 1;
33076
- var AV_SAMPLE_FMT_S32 = 2;
33077
- var AV_SAMPLE_FMT_FLT = 3;
33078
- var AV_SAMPLE_FMT_U8P = 5;
33079
- var AV_SAMPLE_FMT_S16P = 6;
33080
- var AV_SAMPLE_FMT_S32P = 7;
33081
- var AV_SAMPLE_FMT_FLTP = 8;
33082
- function libavFrameToInterleavedFloat32(frame) {
33083
- const channels = frame.channels ?? frame.ch_layout_nb_channels ?? 1;
33084
- const sampleRate = frame.sample_rate ?? 44100;
33085
- const nbSamples = frame.nb_samples ?? 0;
33086
- if (nbSamples === 0) return null;
33087
- const out = new Float32Array(nbSamples * channels);
33088
- switch (frame.format) {
33089
- case AV_SAMPLE_FMT_FLTP: {
33090
- const planes = ensurePlanes(frame.data, channels);
33091
- for (let ch = 0; ch < channels; ch++) {
33092
- const plane = asFloat32(planes[ch]);
33093
- for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i];
33094
- }
33095
- return { data: out, channels, sampleRate };
33096
- }
33097
- case AV_SAMPLE_FMT_FLT: {
33098
- const flat = asFloat32(frame.data);
33099
- for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i];
33100
- return { data: out, channels, sampleRate };
33101
- }
33102
- case AV_SAMPLE_FMT_S16P: {
33103
- const planes = ensurePlanes(frame.data, channels);
33104
- for (let ch = 0; ch < channels; ch++) {
33105
- const plane = asInt16(planes[ch]);
33106
- for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 32768;
33107
- }
33108
- return { data: out, channels, sampleRate };
33109
- }
33110
- case AV_SAMPLE_FMT_S16: {
33111
- const flat = asInt16(frame.data);
33112
- for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 32768;
33113
- return { data: out, channels, sampleRate };
33114
- }
33115
- case AV_SAMPLE_FMT_S32P: {
33116
- const planes = ensurePlanes(frame.data, channels);
33117
- for (let ch = 0; ch < channels; ch++) {
33118
- const plane = asInt32(planes[ch]);
33119
- for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 2147483648;
33120
- }
33121
- return { data: out, channels, sampleRate };
33122
- }
33123
- case AV_SAMPLE_FMT_S32: {
33124
- const flat = asInt32(frame.data);
33125
- for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 2147483648;
33126
- return { data: out, channels, sampleRate };
33127
- }
33128
- case AV_SAMPLE_FMT_U8P: {
33129
- const planes = ensurePlanes(frame.data, channels);
33130
- for (let ch = 0; ch < channels; ch++) {
33131
- const plane = asUint8(planes[ch]);
33132
- for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = (plane[i] - 128) / 128;
33133
- }
33134
- return { data: out, channels, sampleRate };
33135
- }
33136
- case AV_SAMPLE_FMT_U8: {
33137
- const flat = asUint8(frame.data);
33138
- for (let i = 0; i < nbSamples * channels; i++) out[i] = (flat[i] - 128) / 128;
33139
- return { data: out, channels, sampleRate };
33140
- }
33141
- default:
33142
- return null;
33143
- }
33144
- }
33145
- function ensurePlanes(data, channels) {
33146
- if (Array.isArray(data)) return data;
33147
- const arr = data;
33148
- const len = arr.length;
33149
- const perChannel = Math.floor(len / channels);
33150
- const planes = [];
33151
- for (let ch = 0; ch < channels; ch++) {
33152
- planes.push(arr.subarray ? arr.subarray(ch * perChannel, (ch + 1) * perChannel) : arr);
33153
- }
33154
- return planes;
33155
- }
33156
- function asFloat32(x) {
33157
- if (x instanceof Float32Array) return x;
33158
- const ta = x;
33159
- return new Float32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
33160
- }
33161
- function asInt16(x) {
33162
- if (x instanceof Int16Array) return x;
33163
- const ta = x;
33164
- return new Int16Array(ta.buffer, ta.byteOffset, ta.byteLength / 2);
33165
- }
33166
- function asInt32(x) {
33167
- if (x instanceof Int32Array) return x;
33168
- const ta = x;
33169
- return new Int32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
33170
- }
33171
- function asUint8(x) {
33172
- if (x instanceof Uint8Array) return x;
33173
- const ta = x;
33174
- return new Uint8Array(ta.buffer, ta.byteOffset, ta.byteLength);
33175
- }
33176
33581
  async function loadBridge() {
33177
33582
  try {
33178
33583
  const wrapper = await Promise.resolve().then(() => (init_libav_import(), libav_import_exports));
@@ -33184,6 +33589,35 @@ async function loadBridge() {
33184
33589
  }
33185
33590
  }
33186
33591
 
33592
+ // src/util/time-ranges.ts
33593
+ function makeTimeRanges(ranges) {
33594
+ const frozen = ranges.slice();
33595
+ const impl = {
33596
+ get length() {
33597
+ return frozen.length;
33598
+ },
33599
+ start(index) {
33600
+ if (index < 0 || index >= frozen.length) {
33601
+ throw new DOMException(
33602
+ `TimeRanges.start: index ${index} out of range (length=${frozen.length})`,
33603
+ "IndexSizeError"
33604
+ );
33605
+ }
33606
+ return frozen[index][0];
33607
+ },
33608
+ end(index) {
33609
+ if (index < 0 || index >= frozen.length) {
33610
+ throw new DOMException(
33611
+ `TimeRanges.end: index ${index} out of range (length=${frozen.length})`,
33612
+ "IndexSizeError"
33613
+ );
33614
+ }
33615
+ return frozen[index][1];
33616
+ }
33617
+ };
33618
+ return impl;
33619
+ }
33620
+
33187
33621
  // src/strategies/hybrid/index.ts
33188
33622
  var READY_AUDIO_BUFFER_SECONDS = 0.3;
33189
33623
  var READY_TIMEOUT_SECONDS = 10;
@@ -33241,6 +33675,18 @@ async function createHybridSession(ctx, target, transport) {
33241
33675
  get: () => ctx.duration ?? NaN
33242
33676
  });
33243
33677
  }
33678
+ Object.defineProperty(target, "readyState", {
33679
+ configurable: true,
33680
+ get: () => {
33681
+ if (!renderer.hasFrames()) return 0;
33682
+ if (!audio.isPlaying() && audio.bufferAhead() <= 0 && !audio.isNoAudio()) return 1;
33683
+ return 2;
33684
+ }
33685
+ });
33686
+ Object.defineProperty(target, "seekable", {
33687
+ configurable: true,
33688
+ get: () => makeTimeRanges(ctx.duration && Number.isFinite(ctx.duration) && ctx.duration > 0 ? [[0, ctx.duration]] : [])
33689
+ });
33244
33690
  async function waitForBuffer() {
33245
33691
  const start = performance.now();
33246
33692
  while (true) {
@@ -33285,7 +33731,24 @@ async function createHybridSession(ctx, target, transport) {
33285
33731
  async seek(time) {
33286
33732
  await doSeek(time);
33287
33733
  },
33288
- async setAudioTrack(_id) {
33734
+ async setAudioTrack(id) {
33735
+ if (!ctx.audioTracks.some((t) => t.id === id)) {
33736
+ console.warn("[avbridge] hybrid: setAudioTrack \u2014 unknown track id", id);
33737
+ return;
33738
+ }
33739
+ const wasPlaying = audio.isPlaying();
33740
+ const currentTime = audio.now();
33741
+ await audio.pause().catch(() => {
33742
+ });
33743
+ await handles.setAudioTrack(id, currentTime).catch(
33744
+ (err) => console.warn("[avbridge] hybrid: handles.setAudioTrack failed:", err)
33745
+ );
33746
+ await audio.reset(currentTime);
33747
+ renderer.flush();
33748
+ if (wasPlaying) {
33749
+ await waitForBuffer();
33750
+ await audio.start();
33751
+ }
33289
33752
  },
33290
33753
  async setSubtitleTrack(_id) {
33291
33754
  },
@@ -33305,6 +33768,8 @@ async function createHybridSession(ctx, target, transport) {
33305
33768
  delete target.paused;
33306
33769
  delete target.volume;
33307
33770
  delete target.muted;
33771
+ delete target.readyState;
33772
+ delete target.seekable;
33308
33773
  } catch {
33309
33774
  }
33310
33775
  },
@@ -33326,7 +33791,8 @@ async function startDecoder(opts) {
33326
33791
  const readPkt = await libav.av_packet_alloc();
33327
33792
  const [fmt_ctx, streams] = await libav.ff_init_demuxer_file(opts.filename);
33328
33793
  const videoStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_VIDEO) ?? null;
33329
- const audioStream = streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO) ?? null;
33794
+ const firstAudioTrackId = opts.context.audioTracks[0]?.id;
33795
+ let audioStream = (firstAudioTrackId != null ? streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO && s.index === firstAudioTrackId) : void 0) ?? streams.find((s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO) ?? null;
33330
33796
  if (!videoStream && !audioStream) {
33331
33797
  throw new Error("fallback decoder: file has no decodable streams");
33332
33798
  }
@@ -33542,7 +34008,7 @@ async function startDecoder(opts) {
33542
34008
  if (myToken !== pumpToken || destroyed) return;
33543
34009
  for (const f of frames) {
33544
34010
  if (myToken !== pumpToken || destroyed) return;
33545
- const bridgeOpts = sanitizeFrameTimestamp2(
34011
+ sanitizeFrameTimestamp(
33546
34012
  f,
33547
34013
  () => {
33548
34014
  const ts = syntheticVideoUs;
@@ -33552,7 +34018,7 @@ async function startDecoder(opts) {
33552
34018
  videoTimeBase
33553
34019
  );
33554
34020
  try {
33555
- const vf = bridge.laFrameToVideoFrame(f, bridgeOpts);
34021
+ const vf = bridge.laFrameToVideoFrame(f, { timeBase: [1, 1e6] });
33556
34022
  opts.renderer.enqueue(vf);
33557
34023
  videoFramesDecoded++;
33558
34024
  } catch (err) {
@@ -33580,7 +34046,7 @@ async function startDecoder(opts) {
33580
34046
  if (myToken !== pumpToken || destroyed) return;
33581
34047
  for (const f of frames) {
33582
34048
  if (myToken !== pumpToken || destroyed) return;
33583
- sanitizeFrameTimestamp2(
34049
+ sanitizeFrameTimestamp(
33584
34050
  f,
33585
34051
  () => {
33586
34052
  const ts = syntheticAudioUs;
@@ -33591,7 +34057,7 @@ async function startDecoder(opts) {
33591
34057
  },
33592
34058
  audioTimeBase
33593
34059
  );
33594
- const samples = libavFrameToInterleavedFloat322(f);
34060
+ const samples = libavFrameToInterleavedFloat32(f);
33595
34061
  if (samples) {
33596
34062
  opts.audio.schedule(samples.data, samples.channels, samples.sampleRate);
33597
34063
  audioFramesDecoded++;
@@ -33627,17 +34093,80 @@ async function startDecoder(opts) {
33627
34093
  } catch {
33628
34094
  }
33629
34095
  try {
33630
- await libav.av_packet_free?.(readPkt);
33631
- } catch {
34096
+ await libav.av_packet_free?.(readPkt);
34097
+ } catch {
34098
+ }
34099
+ try {
34100
+ await libav.avformat_close_input_js(fmt_ctx);
34101
+ } catch {
34102
+ }
34103
+ try {
34104
+ await inputHandle.detach();
34105
+ } catch {
34106
+ }
34107
+ },
34108
+ async setAudioTrack(trackId, timeSec) {
34109
+ if (audioStream && audioStream.index === trackId) return;
34110
+ const newStream = streams.find(
34111
+ (s) => s.codec_type === libav.AVMEDIA_TYPE_AUDIO && s.index === trackId
34112
+ );
34113
+ if (!newStream) {
34114
+ console.warn("[avbridge] fallback: setAudioTrack \u2014 no stream with id", trackId);
34115
+ return;
34116
+ }
34117
+ const newToken = ++pumpToken;
34118
+ if (pumpRunning) {
34119
+ try {
34120
+ await pumpRunning;
34121
+ } catch {
34122
+ }
34123
+ }
34124
+ if (destroyed) return;
34125
+ if (audioDec) {
34126
+ try {
34127
+ await libav.ff_free_decoder?.(audioDec.c, audioDec.pkt, audioDec.frame);
34128
+ } catch {
34129
+ }
34130
+ audioDec = null;
34131
+ }
34132
+ try {
34133
+ const [, c, pkt, frame] = await libav.ff_init_decoder(newStream.codec_id, {
34134
+ codecpar: newStream.codecpar
34135
+ });
34136
+ audioDec = { c, pkt, frame };
34137
+ audioTimeBase = newStream.time_base_num && newStream.time_base_den ? [newStream.time_base_num, newStream.time_base_den] : void 0;
34138
+ } catch (err) {
34139
+ console.warn(
34140
+ "[avbridge] fallback: setAudioTrack init failed \u2014 falling back to no-audio mode:",
34141
+ err.message
34142
+ );
34143
+ audioDec = null;
34144
+ opts.audio.setNoAudio();
33632
34145
  }
34146
+ audioStream = newStream;
33633
34147
  try {
33634
- await libav.avformat_close_input_js(fmt_ctx);
33635
- } catch {
34148
+ const tsUs = Math.floor(timeSec * 1e6);
34149
+ const [tsLo, tsHi] = libav.f64toi64 ? libav.f64toi64(tsUs) : [tsUs | 0, Math.floor(tsUs / 4294967296)];
34150
+ await libav.av_seek_frame(
34151
+ fmt_ctx,
34152
+ -1,
34153
+ tsLo,
34154
+ tsHi,
34155
+ libav.AVSEEK_FLAG_BACKWARD ?? 0
34156
+ );
34157
+ } catch (err) {
34158
+ console.warn("[avbridge] fallback: setAudioTrack seek failed:", err);
33636
34159
  }
33637
34160
  try {
33638
- await inputHandle.detach();
34161
+ if (videoDec) await libav.avcodec_flush_buffers?.(videoDec.c);
33639
34162
  } catch {
33640
34163
  }
34164
+ await flushBSF();
34165
+ syntheticVideoUs = Math.round(timeSec * 1e6);
34166
+ syntheticAudioUs = Math.round(timeSec * 1e6);
34167
+ pumpRunning = pumpLoop(newToken).catch(
34168
+ (err) => console.error("[avbridge] fallback pump failed (post-setAudioTrack):", err)
34169
+ );
33641
34170
  },
33642
34171
  async seek(timeSec) {
33643
34172
  const newToken = ++pumpToken;
@@ -33695,135 +34224,6 @@ async function startDecoder(opts) {
33695
34224
  }
33696
34225
  };
33697
34226
  }
33698
- function sanitizeFrameTimestamp2(frame, nextUs, fallbackTimeBase) {
33699
- const lo = frame.pts ?? 0;
33700
- const hi = frame.ptshi ?? 0;
33701
- const isInvalid = hi === -2147483648 && lo === 0 || !Number.isFinite(lo);
33702
- if (isInvalid) {
33703
- const us2 = nextUs();
33704
- frame.pts = us2;
33705
- frame.ptshi = 0;
33706
- return { timeBase: [1, 1e6] };
33707
- }
33708
- const tb = fallbackTimeBase ?? [1, 1e6];
33709
- const pts64 = hi * 4294967296 + lo;
33710
- const us = Math.round(pts64 * 1e6 * tb[0] / tb[1]);
33711
- if (Number.isFinite(us) && Math.abs(us) <= Number.MAX_SAFE_INTEGER) {
33712
- frame.pts = us;
33713
- frame.ptshi = us < 0 ? -1 : 0;
33714
- return { timeBase: [1, 1e6] };
33715
- }
33716
- const fallback = nextUs();
33717
- frame.pts = fallback;
33718
- frame.ptshi = 0;
33719
- return { timeBase: [1, 1e6] };
33720
- }
33721
- var AV_SAMPLE_FMT_U82 = 0;
33722
- var AV_SAMPLE_FMT_S162 = 1;
33723
- var AV_SAMPLE_FMT_S322 = 2;
33724
- var AV_SAMPLE_FMT_FLT2 = 3;
33725
- var AV_SAMPLE_FMT_U8P2 = 5;
33726
- var AV_SAMPLE_FMT_S16P2 = 6;
33727
- var AV_SAMPLE_FMT_S32P2 = 7;
33728
- var AV_SAMPLE_FMT_FLTP2 = 8;
33729
- function libavFrameToInterleavedFloat322(frame) {
33730
- const channels = frame.channels ?? frame.ch_layout_nb_channels ?? 1;
33731
- const sampleRate = frame.sample_rate ?? 44100;
33732
- const nbSamples = frame.nb_samples ?? 0;
33733
- if (nbSamples === 0) return null;
33734
- const out = new Float32Array(nbSamples * channels);
33735
- switch (frame.format) {
33736
- case AV_SAMPLE_FMT_FLTP2: {
33737
- const planes = ensurePlanes2(frame.data, channels);
33738
- for (let ch = 0; ch < channels; ch++) {
33739
- const plane = asFloat322(planes[ch]);
33740
- for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i];
33741
- }
33742
- return { data: out, channels, sampleRate };
33743
- }
33744
- case AV_SAMPLE_FMT_FLT2: {
33745
- const flat = asFloat322(frame.data);
33746
- for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i];
33747
- return { data: out, channels, sampleRate };
33748
- }
33749
- case AV_SAMPLE_FMT_S16P2: {
33750
- const planes = ensurePlanes2(frame.data, channels);
33751
- for (let ch = 0; ch < channels; ch++) {
33752
- const plane = asInt162(planes[ch]);
33753
- for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 32768;
33754
- }
33755
- return { data: out, channels, sampleRate };
33756
- }
33757
- case AV_SAMPLE_FMT_S162: {
33758
- const flat = asInt162(frame.data);
33759
- for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 32768;
33760
- return { data: out, channels, sampleRate };
33761
- }
33762
- case AV_SAMPLE_FMT_S32P2: {
33763
- const planes = ensurePlanes2(frame.data, channels);
33764
- for (let ch = 0; ch < channels; ch++) {
33765
- const plane = asInt322(planes[ch]);
33766
- for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = plane[i] / 2147483648;
33767
- }
33768
- return { data: out, channels, sampleRate };
33769
- }
33770
- case AV_SAMPLE_FMT_S322: {
33771
- const flat = asInt322(frame.data);
33772
- for (let i = 0; i < nbSamples * channels; i++) out[i] = flat[i] / 2147483648;
33773
- return { data: out, channels, sampleRate };
33774
- }
33775
- case AV_SAMPLE_FMT_U8P2: {
33776
- const planes = ensurePlanes2(frame.data, channels);
33777
- for (let ch = 0; ch < channels; ch++) {
33778
- const plane = asUint82(planes[ch]);
33779
- for (let i = 0; i < nbSamples; i++) out[i * channels + ch] = (plane[i] - 128) / 128;
33780
- }
33781
- return { data: out, channels, sampleRate };
33782
- }
33783
- case AV_SAMPLE_FMT_U82: {
33784
- const flat = asUint82(frame.data);
33785
- for (let i = 0; i < nbSamples * channels; i++) out[i] = (flat[i] - 128) / 128;
33786
- return { data: out, channels, sampleRate };
33787
- }
33788
- default:
33789
- if (!globalThis.__avbridgeLoggedSampleFmt) {
33790
- globalThis.__avbridgeLoggedSampleFmt = frame.format;
33791
- console.warn(`[avbridge] unsupported audio sample format from libav: ${frame.format}`);
33792
- }
33793
- return null;
33794
- }
33795
- }
33796
- function ensurePlanes2(data, channels) {
33797
- if (Array.isArray(data)) return data;
33798
- const arr = data;
33799
- const len = arr.length;
33800
- const perChannel = Math.floor(len / channels);
33801
- const planes = [];
33802
- for (let ch = 0; ch < channels; ch++) {
33803
- planes.push(arr.subarray ? arr.subarray(ch * perChannel, (ch + 1) * perChannel) : arr);
33804
- }
33805
- return planes;
33806
- }
33807
- function asFloat322(x) {
33808
- if (x instanceof Float32Array) return x;
33809
- const ta = x;
33810
- return new Float32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
33811
- }
33812
- function asInt162(x) {
33813
- if (x instanceof Int16Array) return x;
33814
- const ta = x;
33815
- return new Int16Array(ta.buffer, ta.byteOffset, ta.byteLength / 2);
33816
- }
33817
- function asInt322(x) {
33818
- if (x instanceof Int32Array) return x;
33819
- const ta = x;
33820
- return new Int32Array(ta.buffer, ta.byteOffset, ta.byteLength / 4);
33821
- }
33822
- function asUint82(x) {
33823
- if (x instanceof Uint8Array) return x;
33824
- const ta = x;
33825
- return new Uint8Array(ta.buffer, ta.byteOffset, ta.byteLength);
33826
- }
33827
34227
  async function loadBridge2() {
33828
34228
  try {
33829
34229
  const wrapper = await Promise.resolve().then(() => (init_libav_import(), libav_import_exports));
@@ -33893,6 +34293,18 @@ async function createFallbackSession(ctx, target, transport) {
33893
34293
  get: () => ctx.duration ?? NaN
33894
34294
  });
33895
34295
  }
34296
+ Object.defineProperty(target, "readyState", {
34297
+ configurable: true,
34298
+ get: () => {
34299
+ if (!renderer.hasFrames()) return 0;
34300
+ if (!audio.isPlaying() && audio.bufferAhead() <= 0 && !audio.isNoAudio()) return 1;
34301
+ return 2;
34302
+ }
34303
+ });
34304
+ Object.defineProperty(target, "seekable", {
34305
+ configurable: true,
34306
+ get: () => makeTimeRanges(ctx.duration && Number.isFinite(ctx.duration) && ctx.duration > 0 ? [[0, ctx.duration]] : [])
34307
+ });
33896
34308
  async function waitForBuffer() {
33897
34309
  const start = performance.now();
33898
34310
  let firstFrameAtMs = 0;
@@ -33961,7 +34373,24 @@ async function createFallbackSession(ctx, target, transport) {
33961
34373
  async seek(time) {
33962
34374
  await doSeek(time);
33963
34375
  },
33964
- async setAudioTrack(_id) {
34376
+ async setAudioTrack(id) {
34377
+ if (!ctx.audioTracks.some((t) => t.id === id)) {
34378
+ console.warn("[avbridge] fallback: setAudioTrack \u2014 unknown track id", id);
34379
+ return;
34380
+ }
34381
+ const wasPlaying = audio.isPlaying();
34382
+ const currentTime = audio.now();
34383
+ await audio.pause().catch(() => {
34384
+ });
34385
+ await handles.setAudioTrack(id, currentTime).catch(
34386
+ (err) => console.warn("[avbridge] fallback: handles.setAudioTrack failed:", err)
34387
+ );
34388
+ await audio.reset(currentTime);
34389
+ renderer.flush();
34390
+ if (wasPlaying) {
34391
+ await waitForBuffer();
34392
+ await audio.start();
34393
+ }
33965
34394
  },
33966
34395
  async setSubtitleTrack(_id) {
33967
34396
  },
@@ -33978,6 +34407,8 @@ async function createFallbackSession(ctx, target, transport) {
33978
34407
  delete target.paused;
33979
34408
  delete target.volume;
33980
34409
  delete target.muted;
34410
+ delete target.readyState;
34411
+ delete target.seekable;
33981
34412
  } catch {
33982
34413
  }
33983
34414
  },
@@ -34015,123 +34446,8 @@ function registerBuiltins(registry) {
34015
34446
  registry.register(fallbackPlugin);
34016
34447
  }
34017
34448
 
34018
- // src/subtitles/index.ts
34019
- init_transport();
34020
-
34021
- // src/subtitles/srt.ts
34022
- function srtToVtt(srt) {
34023
- if (srt.charCodeAt(0) === 65279) srt = srt.slice(1);
34024
- const normalized = srt.replace(/\r\n/g, "\n").replace(/\r/g, "\n").trim();
34025
- const blocks = normalized.split(/\n{2,}/);
34026
- const out = ["WEBVTT", ""];
34027
- for (const block of blocks) {
34028
- const lines = block.split("\n");
34029
- if (lines.length > 0 && /^\d+$/.test(lines[0].trim())) {
34030
- lines.shift();
34031
- }
34032
- if (lines.length === 0) continue;
34033
- const timing = lines.shift();
34034
- const vttTiming = convertTiming(timing);
34035
- if (!vttTiming) continue;
34036
- out.push(vttTiming);
34037
- for (const l of lines) out.push(l);
34038
- out.push("");
34039
- }
34040
- return out.join("\n");
34041
- }
34042
- function convertTiming(line) {
34043
- const m = /^(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})\s*-->\s*(\d{1,2}):(\d{2}):(\d{2})[,.](\d{1,3})(.*)$/.exec(
34044
- line.trim()
34045
- );
34046
- if (!m) return null;
34047
- const fmt2 = (h, mm, s, ms) => `${h.padStart(2, "0")}:${mm}:${s}.${ms.padEnd(3, "0").slice(0, 3)}`;
34048
- return `${fmt2(m[1], m[2], m[3], m[4])} --> ${fmt2(m[5], m[6], m[7], m[8])}${m[9] ?? ""}`;
34049
- }
34050
-
34051
- // src/subtitles/vtt.ts
34052
- function isVtt(text) {
34053
- const trimmed = text.replace(/^\ufeff/, "").trimStart();
34054
- return trimmed.startsWith("WEBVTT");
34055
- }
34056
-
34057
- // src/subtitles/index.ts
34058
- async function discoverSidecars(file, directory) {
34059
- const baseName = file.name.replace(/\.[^.]+$/, "");
34060
- const found = [];
34061
- for await (const [name, handle] of directory) {
34062
- if (handle.kind !== "file") continue;
34063
- if (!name.startsWith(baseName)) continue;
34064
- const lower = name.toLowerCase();
34065
- let format = null;
34066
- if (lower.endsWith(".srt")) format = "srt";
34067
- else if (lower.endsWith(".vtt")) format = "vtt";
34068
- if (!format) continue;
34069
- const sidecarFile = await handle.getFile();
34070
- const url2 = URL.createObjectURL(sidecarFile);
34071
- const langMatch = name.slice(baseName.length).match(/[._-]([a-z]{2,3})(?:[._-]|\.)/i);
34072
- found.push({
34073
- url: url2,
34074
- format,
34075
- language: langMatch?.[1]
34076
- });
34077
- }
34078
- return found;
34079
- }
34080
- var SubtitleResourceBag = class {
34081
- urls = /* @__PURE__ */ new Set();
34082
- /** Track an externally-created blob URL (e.g. from `discoverSidecars`). */
34083
- track(url2) {
34084
- this.urls.add(url2);
34085
- }
34086
- /** Convenience: create a blob URL and track it in one call. */
34087
- createObjectURL(blob) {
34088
- const url2 = URL.createObjectURL(blob);
34089
- this.urls.add(url2);
34090
- return url2;
34091
- }
34092
- /** Revoke every tracked URL. Idempotent — safe to call multiple times. */
34093
- revokeAll() {
34094
- for (const u of this.urls) URL.revokeObjectURL(u);
34095
- this.urls.clear();
34096
- }
34097
- };
34098
- async function attachSubtitleTracks(video, tracks, bag, onError, transport) {
34099
- const doFetch = fetchWith(transport);
34100
- for (const t of Array.from(video.querySelectorAll("track[data-avbridge]"))) {
34101
- t.remove();
34102
- }
34103
- for (const t of tracks) {
34104
- if (!t.sidecarUrl) continue;
34105
- try {
34106
- let url2 = t.sidecarUrl;
34107
- if (t.format === "srt") {
34108
- const res = await doFetch(t.sidecarUrl, transport?.requestInit);
34109
- const text = await res.text();
34110
- const vtt = srtToVtt(text);
34111
- const blob = new Blob([vtt], { type: "text/vtt" });
34112
- url2 = bag ? bag.createObjectURL(blob) : URL.createObjectURL(blob);
34113
- } else if (t.format === "vtt") {
34114
- const res = await doFetch(t.sidecarUrl, transport?.requestInit);
34115
- const text = await res.text();
34116
- if (!isVtt(text)) {
34117
- console.warn("[avbridge] subtitle missing WEBVTT header:", t.sidecarUrl);
34118
- }
34119
- }
34120
- const trackEl = document.createElement("track");
34121
- trackEl.kind = "subtitles";
34122
- trackEl.src = url2;
34123
- trackEl.srclang = t.language ?? "und";
34124
- trackEl.label = t.language ?? `Subtitle ${t.id}`;
34125
- trackEl.dataset.avbridge = "true";
34126
- video.appendChild(trackEl);
34127
- } catch (err) {
34128
- const e = err instanceof Error ? err : new Error(String(err));
34129
- onError?.(e, t);
34130
- }
34131
- }
34132
- }
34133
-
34134
34449
  // src/player.ts
34450
+ init_subtitles2();
34135
34451
  init_debug();
34136
34452
  init_errors();
34137
34453
  var UnifiedPlayer = class _UnifiedPlayer {
@@ -34245,17 +34561,15 @@ var UnifiedPlayer = class _UnifiedPlayer {
34245
34561
  reason: decision.reason
34246
34562
  });
34247
34563
  await this.startSession(decision.strategy, decision.reason);
34248
- if (this.session.strategy !== "fallback" && this.session.strategy !== "hybrid") {
34249
- await attachSubtitleTracks(
34250
- this.options.target,
34251
- ctx.subtitleTracks,
34252
- this.subtitleResources,
34253
- (err, track) => {
34254
- console.warn(`[avbridge] subtitle ${track.id} failed: ${err.message}`);
34255
- },
34256
- this.transport
34257
- );
34258
- }
34564
+ await attachSubtitleTracks(
34565
+ this.options.target,
34566
+ ctx.subtitleTracks,
34567
+ this.subtitleResources,
34568
+ (err, track) => {
34569
+ console.warn(`[avbridge] subtitle ${track.id} failed: ${err.message}`);
34570
+ },
34571
+ this.transport
34572
+ );
34259
34573
  this.emitter.emitSticky("tracks", {
34260
34574
  video: ctx.videoTracks,
34261
34575
  audio: ctx.audioTracks,
@@ -34721,7 +35035,20 @@ var AvbridgeVideoElement = class extends HTMLElementCtor {
34721
35035
  _strategy = null;
34722
35036
  _strategyClass = null;
34723
35037
  _audioTracks = [];
35038
+ /** Subtitle tracks reported by the active UnifiedPlayer (options.subtitles
35039
+ * + embedded container tracks + programmatic addSubtitle calls). */
34724
35040
  _subtitleTracks = [];
35041
+ /** Subtitle tracks derived from light-DOM `<track>` children. Maintained
35042
+ * by _syncTextTracks on every mutation. Merged into the public
35043
+ * `subtitleTracks` getter so the player's settings menu sees them. */
35044
+ _htmlTrackInfo = [];
35045
+ /**
35046
+ * External subtitle list forwarded to `createPlayer()` on the next
35047
+ * bootstrap. Setting this after bootstrap queues it for the next
35048
+ * source change; consumers that need to swap subtitles mid-playback
35049
+ * should set `source` to reload.
35050
+ */
35051
+ _subtitles = null;
34725
35052
  /**
34726
35053
  * Initial strategy preference. `"auto"` means "let the classifier decide";
34727
35054
  * any other value is passed to `createPlayer({ initialStrategy })` and
@@ -34834,12 +35161,28 @@ var AvbridgeVideoElement = class extends HTMLElementCtor {
34834
35161
  _syncTextTracks() {
34835
35162
  const existing = this._videoEl.querySelectorAll("track");
34836
35163
  for (const t of Array.from(existing)) t.remove();
35164
+ this._htmlTrackInfo = [];
35165
+ let htmlIdx = 0;
34837
35166
  for (const child of Array.from(this.children)) {
34838
35167
  if (child.tagName === "TRACK") {
34839
- const clone = child.cloneNode(true);
35168
+ const track = child;
35169
+ const clone = track.cloneNode(true);
34840
35170
  this._videoEl.appendChild(clone);
35171
+ const src = track.getAttribute("src") ?? void 0;
35172
+ const format = src?.toLowerCase().endsWith(".srt") ? "srt" : "vtt";
35173
+ this._htmlTrackInfo.push({
35174
+ id: 1e4 + htmlIdx,
35175
+ format,
35176
+ language: track.srclang || track.getAttribute("label") || void 0,
35177
+ sidecarUrl: src
35178
+ });
35179
+ htmlIdx++;
34841
35180
  }
34842
35181
  }
35182
+ this._dispatch("trackschange", {
35183
+ audioTracks: this._audioTracks,
35184
+ subtitleTracks: this.subtitleTracks
35185
+ });
34843
35186
  }
34844
35187
  /** Internal src setter — separate from the property setter so the
34845
35188
  * attributeChangedCallback can use it without re-entering reflection. */
@@ -34877,7 +35220,8 @@ var AvbridgeVideoElement = class extends HTMLElementCtor {
34877
35220
  // Honor the consumer's preferred initial strategy. "auto" means
34878
35221
  // "let the classifier decide" — the createPlayer call simply doesn't
34879
35222
  // pass initialStrategy in that case.
34880
- ...this._preferredStrategy !== "auto" ? { initialStrategy: this._preferredStrategy } : {}
35223
+ ...this._preferredStrategy !== "auto" ? { initialStrategy: this._preferredStrategy } : {},
35224
+ ...this._subtitles ? { subtitles: this._subtitles } : {}
34881
35225
  });
34882
35226
  } catch (err) {
34883
35227
  if (id !== this._bootstrapId || this._destroyed) return;
@@ -35166,7 +35510,48 @@ var AvbridgeVideoElement = class extends HTMLElementCtor {
35166
35510
  return this._audioTracks;
35167
35511
  }
35168
35512
  get subtitleTracks() {
35169
- return this._subtitleTracks;
35513
+ return this._htmlTrackInfo.length === 0 ? this._subtitleTracks : [...this._subtitleTracks, ...this._htmlTrackInfo];
35514
+ }
35515
+ /**
35516
+ * External subtitle files to attach when the source loads. Takes effect
35517
+ * on the next bootstrap — set before assigning `source`, or reload via
35518
+ * `load()` after changing. For dynamic post-bootstrap addition, use
35519
+ * `addSubtitle()` instead.
35520
+ *
35521
+ * @example
35522
+ * el.subtitles = [{ url: "/en.srt", format: "srt", language: "en" }];
35523
+ * el.src = "/movie.mp4";
35524
+ */
35525
+ get subtitles() {
35526
+ return this._subtitles;
35527
+ }
35528
+ set subtitles(value) {
35529
+ this._subtitles = value;
35530
+ }
35531
+ /**
35532
+ * Attach a subtitle track to the current playback without rebuilding
35533
+ * the player. Works while the element is playing — converts SRT to
35534
+ * VTT if needed, adds a `<track>` to the inner `<video>`. Canvas
35535
+ * strategies pick up the new track via their textTracks watcher.
35536
+ */
35537
+ async addSubtitle(subtitle) {
35538
+ const { attachSubtitleTracks: attachSubtitleTracks2 } = await Promise.resolve().then(() => (init_subtitles2(), subtitles_exports));
35539
+ const format = subtitle.format ?? (subtitle.url.endsWith(".srt") ? "srt" : "vtt");
35540
+ const track = {
35541
+ id: this._subtitleTracks.length,
35542
+ format,
35543
+ language: subtitle.language,
35544
+ sidecarUrl: subtitle.url
35545
+ };
35546
+ this._subtitleTracks.push(track);
35547
+ await attachSubtitleTracks2(
35548
+ this._videoEl,
35549
+ this._subtitleTracks,
35550
+ void 0,
35551
+ (err, t) => {
35552
+ console.warn(`[avbridge] subtitle ${t.id} failed: ${err.message}`);
35553
+ }
35554
+ );
35170
35555
  }
35171
35556
  // ── Public methods ─────────────────────────────────────────────────────
35172
35557
  /** Force a (re-)bootstrap if a source is currently set. */
@@ -35214,6 +35599,12 @@ var AvbridgeVideoElement = class extends HTMLElementCtor {
35214
35599
  getDiagnostics() {
35215
35600
  return this._player?.getDiagnostics() ?? null;
35216
35601
  }
35602
+ addEventListener(type, listener, options) {
35603
+ super.addEventListener(type, listener, options);
35604
+ }
35605
+ removeEventListener(type, listener, options) {
35606
+ super.removeEventListener(type, listener, options);
35607
+ }
35217
35608
  // ── Event helpers ──────────────────────────────────────────────────────
35218
35609
  _dispatch(name, detail) {
35219
35610
  this.dispatchEvent(new CustomEvent(name, { detail, bubbles: false }));