avbridge 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. package/CHANGELOG.md +120 -0
  2. package/LICENSE +21 -0
  3. package/README.md +415 -0
  4. package/dist/avi-M5B4SHRM.cjs +164 -0
  5. package/dist/avi-M5B4SHRM.cjs.map +1 -0
  6. package/dist/avi-POCGZ4JX.js +162 -0
  7. package/dist/avi-POCGZ4JX.js.map +1 -0
  8. package/dist/chunk-5ISVAODK.js +80 -0
  9. package/dist/chunk-5ISVAODK.js.map +1 -0
  10. package/dist/chunk-F7YS2XOA.cjs +2966 -0
  11. package/dist/chunk-F7YS2XOA.cjs.map +1 -0
  12. package/dist/chunk-FKM7QBZU.js +2957 -0
  13. package/dist/chunk-FKM7QBZU.js.map +1 -0
  14. package/dist/chunk-J5MCMN3S.js +27 -0
  15. package/dist/chunk-J5MCMN3S.js.map +1 -0
  16. package/dist/chunk-L4NPOJ36.cjs +180 -0
  17. package/dist/chunk-L4NPOJ36.cjs.map +1 -0
  18. package/dist/chunk-NZU7W256.cjs +29 -0
  19. package/dist/chunk-NZU7W256.cjs.map +1 -0
  20. package/dist/chunk-PQTZS7OA.js +147 -0
  21. package/dist/chunk-PQTZS7OA.js.map +1 -0
  22. package/dist/chunk-WD2ZNQA7.js +177 -0
  23. package/dist/chunk-WD2ZNQA7.js.map +1 -0
  24. package/dist/chunk-Y5FYF5KG.cjs +153 -0
  25. package/dist/chunk-Y5FYF5KG.cjs.map +1 -0
  26. package/dist/chunk-Z2FJ5TJC.cjs +82 -0
  27. package/dist/chunk-Z2FJ5TJC.cjs.map +1 -0
  28. package/dist/element.cjs +433 -0
  29. package/dist/element.cjs.map +1 -0
  30. package/dist/element.d.cts +158 -0
  31. package/dist/element.d.ts +158 -0
  32. package/dist/element.js +431 -0
  33. package/dist/element.js.map +1 -0
  34. package/dist/index.cjs +576 -0
  35. package/dist/index.cjs.map +1 -0
  36. package/dist/index.d.cts +80 -0
  37. package/dist/index.d.ts +80 -0
  38. package/dist/index.js +554 -0
  39. package/dist/index.js.map +1 -0
  40. package/dist/libav-http-reader-FPYDBMYK.cjs +16 -0
  41. package/dist/libav-http-reader-FPYDBMYK.cjs.map +1 -0
  42. package/dist/libav-http-reader-NQJVY273.js +3 -0
  43. package/dist/libav-http-reader-NQJVY273.js.map +1 -0
  44. package/dist/libav-import-2JURFHEW.js +8 -0
  45. package/dist/libav-import-2JURFHEW.js.map +1 -0
  46. package/dist/libav-import-GST2AMPL.cjs +30 -0
  47. package/dist/libav-import-GST2AMPL.cjs.map +1 -0
  48. package/dist/libav-loader-KA2MAWLM.js +3 -0
  49. package/dist/libav-loader-KA2MAWLM.js.map +1 -0
  50. package/dist/libav-loader-ZHOERPHW.cjs +12 -0
  51. package/dist/libav-loader-ZHOERPHW.cjs.map +1 -0
  52. package/dist/player-BBwbCkdL.d.cts +365 -0
  53. package/dist/player-BBwbCkdL.d.ts +365 -0
  54. package/dist/source-SC6ZEQYR.cjs +28 -0
  55. package/dist/source-SC6ZEQYR.cjs.map +1 -0
  56. package/dist/source-ZFS4H7J3.js +3 -0
  57. package/dist/source-ZFS4H7J3.js.map +1 -0
  58. package/dist/variant-routing-GOHB2RZN.cjs +12 -0
  59. package/dist/variant-routing-GOHB2RZN.cjs.map +1 -0
  60. package/dist/variant-routing-JOBWXYKD.js +3 -0
  61. package/dist/variant-routing-JOBWXYKD.js.map +1 -0
  62. package/package.json +95 -0
  63. package/src/classify/index.ts +1 -0
  64. package/src/classify/rules.ts +214 -0
  65. package/src/convert/index.ts +2 -0
  66. package/src/convert/remux.ts +522 -0
  67. package/src/convert/transcode.ts +329 -0
  68. package/src/diagnostics.ts +99 -0
  69. package/src/element/avbridge-player.ts +576 -0
  70. package/src/element.ts +19 -0
  71. package/src/events.ts +71 -0
  72. package/src/index.ts +42 -0
  73. package/src/libav-stubs.d.ts +24 -0
  74. package/src/player.ts +455 -0
  75. package/src/plugins/builtin.ts +37 -0
  76. package/src/plugins/registry.ts +32 -0
  77. package/src/probe/avi.ts +242 -0
  78. package/src/probe/index.ts +59 -0
  79. package/src/probe/mediabunny.ts +194 -0
  80. package/src/strategies/fallback/audio-output.ts +293 -0
  81. package/src/strategies/fallback/clock.ts +7 -0
  82. package/src/strategies/fallback/decoder.ts +660 -0
  83. package/src/strategies/fallback/index.ts +170 -0
  84. package/src/strategies/fallback/libav-import.ts +27 -0
  85. package/src/strategies/fallback/libav-loader.ts +190 -0
  86. package/src/strategies/fallback/variant-routing.ts +43 -0
  87. package/src/strategies/fallback/video-renderer.ts +216 -0
  88. package/src/strategies/hybrid/decoder.ts +641 -0
  89. package/src/strategies/hybrid/index.ts +139 -0
  90. package/src/strategies/native.ts +107 -0
  91. package/src/strategies/remux/annexb.ts +112 -0
  92. package/src/strategies/remux/index.ts +79 -0
  93. package/src/strategies/remux/mse.ts +234 -0
  94. package/src/strategies/remux/pipeline.ts +254 -0
  95. package/src/subtitles/index.ts +91 -0
  96. package/src/subtitles/render.ts +62 -0
  97. package/src/subtitles/srt.ts +62 -0
  98. package/src/subtitles/vtt.ts +5 -0
  99. package/src/types-shim.d.ts +3 -0
  100. package/src/types.ts +360 -0
  101. package/src/util/codec-strings.ts +86 -0
  102. package/src/util/libav-http-reader.ts +315 -0
  103. package/src/util/source.ts +274 -0
@@ -0,0 +1,139 @@
1
+ import type { MediaContext, PlaybackSession } from "../../types.js";
2
+ import { VideoRenderer } from "../fallback/video-renderer.js";
3
+ import { AudioOutput } from "../fallback/audio-output.js";
4
+ import { startHybridDecoder, type HybridDecoderHandles } from "./decoder.js";
5
+
6
+ /**
7
+ * Hybrid strategy session.
8
+ *
9
+ * Uses libav.js for demuxing + WebCodecs VideoDecoder for hardware-accelerated
10
+ * video decode + libav.js software decode for audio. Same canvas + Web Audio
11
+ * output as the fallback strategy.
12
+ *
13
+ * Falls back to the pure-WASM fallback strategy if WebCodecs fails (via the
14
+ * onFatalError callback that the player wires to its escalation mechanism).
15
+ */
16
+
17
+ const READY_AUDIO_BUFFER_SECONDS = 0.3;
18
+ const READY_TIMEOUT_SECONDS = 10;
19
+
20
+ export async function createHybridSession(
21
+ ctx: MediaContext,
22
+ target: HTMLVideoElement,
23
+ ): Promise<PlaybackSession> {
24
+ // Normalize the source so URL inputs go through the libav HTTP block
25
+ // reader instead of being buffered into memory.
26
+ const { normalizeSource } = await import("../../util/source.js");
27
+ const source = await normalizeSource(ctx.source);
28
+
29
+ const fps = ctx.videoTracks[0]?.fps ?? 30;
30
+ const audio = new AudioOutput();
31
+ const renderer = new VideoRenderer(target, audio, fps);
32
+
33
+ let handles: HybridDecoderHandles;
34
+ try {
35
+ handles = await startHybridDecoder({
36
+ source,
37
+ filename: ctx.name ?? "input.bin",
38
+ context: ctx,
39
+ renderer,
40
+ audio,
41
+ });
42
+ } catch (err) {
43
+ audio.destroy();
44
+ renderer.destroy();
45
+ throw err;
46
+ }
47
+
48
+ // Patch <video> element for the unified player layer
49
+ Object.defineProperty(target, "currentTime", {
50
+ configurable: true,
51
+ get: () => audio.now(),
52
+ set: (v: number) => { void doSeek(v); },
53
+ });
54
+ if (ctx.duration && Number.isFinite(ctx.duration)) {
55
+ Object.defineProperty(target, "duration", {
56
+ configurable: true,
57
+ get: () => ctx.duration ?? NaN,
58
+ });
59
+ }
60
+
61
+ async function waitForBuffer(): Promise<void> {
62
+ const start = performance.now();
63
+ while (true) {
64
+ const audioReady = audio.isNoAudio() || audio.bufferAhead() >= READY_AUDIO_BUFFER_SECONDS;
65
+ if (audioReady && renderer.hasFrames()) {
66
+ return;
67
+ }
68
+ if ((performance.now() - start) / 1000 > READY_TIMEOUT_SECONDS) return;
69
+ await new Promise((r) => setTimeout(r, 50));
70
+ }
71
+ }
72
+
73
+ async function doSeek(timeSec: number): Promise<void> {
74
+ const wasPlaying = audio.isPlaying();
75
+ await audio.pause().catch(() => {});
76
+ await handles.seek(timeSec).catch((err) =>
77
+ console.warn("[avbridge] hybrid decoder seek failed:", err),
78
+ );
79
+ await audio.reset(timeSec);
80
+ renderer.flush();
81
+ if (wasPlaying) {
82
+ await waitForBuffer();
83
+ await audio.start();
84
+ }
85
+ }
86
+
87
+ // Store the fatal error handler so the player can wire escalation
88
+ let fatalErrorHandler: ((reason: string) => void) | null = null;
89
+ handles.onFatalError((reason) => fatalErrorHandler?.(reason));
90
+
91
+ return {
92
+ strategy: "hybrid",
93
+
94
+ async play() {
95
+ if (!audio.isPlaying()) {
96
+ await waitForBuffer();
97
+ await audio.start();
98
+ }
99
+ },
100
+
101
+ pause() {
102
+ void audio.pause();
103
+ },
104
+
105
+ async seek(time) {
106
+ await doSeek(time);
107
+ },
108
+
109
+ async setAudioTrack(_id) {
110
+ // Post-MVP for hybrid strategy
111
+ },
112
+
113
+ async setSubtitleTrack(_id) {
114
+ // Post-MVP for hybrid strategy
115
+ },
116
+
117
+ getCurrentTime() {
118
+ return audio.now();
119
+ },
120
+
121
+ onFatalError(handler: (reason: string) => void) {
122
+ fatalErrorHandler = handler;
123
+ },
124
+
125
+ async destroy() {
126
+ await handles.destroy();
127
+ renderer.destroy();
128
+ audio.destroy();
129
+ try {
130
+ delete (target as unknown as Record<string, unknown>).currentTime;
131
+ delete (target as unknown as Record<string, unknown>).duration;
132
+ } catch { /* ignore */ }
133
+ },
134
+
135
+ getRuntimeStats() {
136
+ return handles.stats();
137
+ },
138
+ };
139
+ }
@@ -0,0 +1,107 @@
1
+ import type { MediaContext, PlaybackSession } from "../types.js";
2
+
3
+ /**
4
+ * Simplest strategy: hand the source to the browser. Works for any
5
+ * MP4/WebM/MP3/etc. that the user agent already plays.
6
+ *
7
+ * The only complexity is that the source might be a `File`/`Blob` (use
8
+ * `URL.createObjectURL`), an `ArrayBuffer`/`Uint8Array` (wrap in a Blob first),
9
+ * or a string URL (assign directly).
10
+ */
11
+ export async function createNativeSession(
12
+ context: MediaContext,
13
+ video: HTMLVideoElement,
14
+ ): Promise<PlaybackSession> {
15
+ const { url, revoke } = sourceToVideoUrl(context.source);
16
+ video.src = url;
17
+
18
+ // Wait for metadata so the player resolves only once playback is actually
19
+ // ready. We expose errors via the player's "error" event, not by throwing
20
+ // here, because failure here often means we should escalate to remux.
21
+ await new Promise<void>((resolve, reject) => {
22
+ const onMeta = () => {
23
+ cleanup();
24
+ resolve();
25
+ };
26
+ const onError = () => {
27
+ cleanup();
28
+ reject(new Error(`<video> failed to load: ${video.error?.message ?? "unknown"}`));
29
+ };
30
+ const cleanup = () => {
31
+ video.removeEventListener("loadedmetadata", onMeta);
32
+ video.removeEventListener("error", onError);
33
+ };
34
+ video.addEventListener("loadedmetadata", onMeta);
35
+ video.addEventListener("error", onError);
36
+ });
37
+
38
+ let stats = { framesDecoded: 0, framesDropped: 0 };
39
+
40
+ return {
41
+ strategy: "native",
42
+ async play() {
43
+ await video.play();
44
+ },
45
+ pause() {
46
+ video.pause();
47
+ },
48
+ async seek(time) {
49
+ video.currentTime = time;
50
+ },
51
+ async setAudioTrack(id) {
52
+ // HTMLMediaElement.audioTracks is not exposed in all browsers, so we
53
+ // try-catch and no-op if not available.
54
+ const tracks = (video as unknown as { audioTracks?: { length: number; [i: number]: { id: string; enabled: boolean } } }).audioTracks;
55
+ if (!tracks) return;
56
+ for (let i = 0; i < tracks.length; i++) {
57
+ tracks[i].enabled = tracks[i].id === String(id) || i === id;
58
+ }
59
+ },
60
+ async setSubtitleTrack(id) {
61
+ const tracks = video.textTracks;
62
+ for (let i = 0; i < tracks.length; i++) {
63
+ tracks[i].mode = i === id ? "showing" : "disabled";
64
+ }
65
+ },
66
+ async destroy() {
67
+ video.pause();
68
+ video.removeAttribute("src");
69
+ video.load();
70
+ revoke?.();
71
+ },
72
+ getCurrentTime() {
73
+ return video.currentTime || 0;
74
+ },
75
+ getRuntimeStats() {
76
+ // getVideoPlaybackQuality is the standard hook; not all UAs implement it.
77
+ const q = (video as unknown as { getVideoPlaybackQuality?: () => VideoPlaybackQuality }).getVideoPlaybackQuality?.();
78
+ if (q) {
79
+ stats = {
80
+ framesDecoded: q.totalVideoFrames,
81
+ framesDropped: q.droppedVideoFrames,
82
+ };
83
+ }
84
+ return { ...stats, decoderType: "native" };
85
+ },
86
+ };
87
+ }
88
+
89
+ function sourceToVideoUrl(source: unknown): { url: string; revoke?: () => void } {
90
+ if (source instanceof Blob) {
91
+ const url = URL.createObjectURL(source);
92
+ return { url, revoke: () => URL.revokeObjectURL(url) };
93
+ }
94
+ if (source instanceof ArrayBuffer || source instanceof Uint8Array) {
95
+ const blob = new Blob([source as BlobPart]);
96
+ const url = URL.createObjectURL(blob);
97
+ return { url, revoke: () => URL.revokeObjectURL(url) };
98
+ }
99
+ if (typeof source === "string") return { url: source };
100
+ if (source instanceof URL) return { url: source.toString() };
101
+ throw new TypeError("native strategy: unsupported source type");
102
+ }
103
+
104
+ interface VideoPlaybackQuality {
105
+ totalVideoFrames: number;
106
+ droppedVideoFrames: number;
107
+ }
@@ -0,0 +1,112 @@
1
+ /**
2
+ * H.264/HEVC bitstream conversion helpers.
3
+ *
4
+ * Demuxers from MP4-family containers (mediabunny) hand us packets in **AVCC**
5
+ * format: each NAL unit prefixed with a 4-byte big-endian length.
6
+ *
7
+ * Demuxers from elementary-stream/AVI/TS hand us **Annex B**: NAL units
8
+ * separated by `00 00 00 01` (or `00 00 01`) start codes.
9
+ *
10
+ * MSE expects AVCC inside fragmented MP4. So when the source side emits Annex
11
+ * B, we need to convert before muxing. Going the other way (AVCC → Annex B) is
12
+ * useful for feeding `VideoDecoder` configured with `description` omitted.
13
+ */
14
+
15
+ const START_CODE_4 = new Uint8Array([0, 0, 0, 1]);
16
+
17
+ /** True if the bytes look like Annex B (start with `00 00 00 01` or `00 00 01`). */
18
+ export function isAnnexB(bytes: Uint8Array): boolean {
19
+ if (bytes.length < 3) return false;
20
+ if (bytes[0] === 0 && bytes[1] === 0 && bytes[2] === 1) return true;
21
+ if (bytes.length >= 4 && bytes[0] === 0 && bytes[1] === 0 && bytes[2] === 0 && bytes[3] === 1) return true;
22
+ return false;
23
+ }
24
+
25
+ /**
26
+ * Walk an Annex B byte stream and yield each NAL unit (without start code).
27
+ * This is the standard byte-by-byte scan; no SIMD tricks because the typical
28
+ * frame is small.
29
+ */
30
+ export function* iterateAnnexBNalus(bytes: Uint8Array): Generator<Uint8Array> {
31
+ const length = bytes.length;
32
+ let i = 0;
33
+ let nalStart = -1;
34
+
35
+ while (i < length) {
36
+ // Look for start code at position i
37
+ let scLen = 0;
38
+ if (i + 3 < length && bytes[i] === 0 && bytes[i + 1] === 0 && bytes[i + 2] === 0 && bytes[i + 3] === 1) {
39
+ scLen = 4;
40
+ } else if (i + 2 < length && bytes[i] === 0 && bytes[i + 1] === 0 && bytes[i + 2] === 1) {
41
+ scLen = 3;
42
+ }
43
+
44
+ if (scLen > 0) {
45
+ if (nalStart >= 0) {
46
+ yield bytes.subarray(nalStart, i);
47
+ }
48
+ nalStart = i + scLen;
49
+ i += scLen;
50
+ } else {
51
+ i += 1;
52
+ }
53
+ }
54
+
55
+ if (nalStart >= 0 && nalStart < length) {
56
+ yield bytes.subarray(nalStart, length);
57
+ }
58
+ }
59
+
60
+ /**
61
+ * Convert an Annex B byte stream to AVCC. Each NALU is prefixed with its
62
+ * 4-byte big-endian length.
63
+ */
64
+ export function annexBToAvcc(annexB: Uint8Array): Uint8Array {
65
+ const nalus: Uint8Array[] = [];
66
+ let total = 0;
67
+ for (const nal of iterateAnnexBNalus(annexB)) {
68
+ nalus.push(nal);
69
+ total += 4 + nal.length;
70
+ }
71
+ const out = new Uint8Array(total);
72
+ let off = 0;
73
+ for (const nal of nalus) {
74
+ const len = nal.length;
75
+ out[off++] = (len >>> 24) & 0xff;
76
+ out[off++] = (len >>> 16) & 0xff;
77
+ out[off++] = (len >>> 8) & 0xff;
78
+ out[off++] = len & 0xff;
79
+ out.set(nal, off);
80
+ off += len;
81
+ }
82
+ return out;
83
+ }
84
+
85
+ /**
86
+ * Convert AVCC (4-byte length-prefixed) NALUs to Annex B. Each NALU is
87
+ * prefixed with `00 00 00 01`.
88
+ */
89
+ export function avccToAnnexB(avcc: Uint8Array): Uint8Array {
90
+ const out: Uint8Array[] = [];
91
+ let total = 0;
92
+ let i = 0;
93
+ while (i + 4 <= avcc.length) {
94
+ const len =
95
+ (avcc[i] << 24) | (avcc[i + 1] << 16) | (avcc[i + 2] << 8) | avcc[i + 3];
96
+ i += 4;
97
+ if (i + len > avcc.length) {
98
+ throw new Error(`avccToAnnexB: NAL length ${len} overflows buffer at offset ${i}`);
99
+ }
100
+ out.push(START_CODE_4);
101
+ out.push(avcc.subarray(i, i + len));
102
+ total += 4 + len;
103
+ i += len;
104
+ }
105
+ const merged = new Uint8Array(total);
106
+ let off = 0;
107
+ for (const chunk of out) {
108
+ merged.set(chunk, off);
109
+ off += chunk.length;
110
+ }
111
+ return merged;
112
+ }
@@ -0,0 +1,79 @@
1
+ import type { MediaContext, PlaybackSession } from "../../types.js";
2
+ import { createRemuxPipeline, type RemuxPipeline } from "./pipeline.js";
3
+
4
+ /**
5
+ * Strategy entry: build the remux pipeline, then expose a {@link PlaybackSession}
6
+ * that delegates to the underlying `<video>` element for playback control and
7
+ * to the pipeline for source-side seek invalidation.
8
+ */
9
+ export async function createRemuxSession(
10
+ context: MediaContext,
11
+ video: HTMLVideoElement,
12
+ ): Promise<PlaybackSession> {
13
+ let pipeline: RemuxPipeline;
14
+ try {
15
+ pipeline = await createRemuxPipeline(context, video);
16
+ } catch (err) {
17
+ throw new Error(
18
+ `remux strategy failed to start: ${(err as Error).message}. The container or codec combination is not supported by mediabunny + MSE on this browser.`,
19
+ );
20
+ }
21
+
22
+ // Don't pump yet — wait for the first play() or seek() to start from the
23
+ // right position. The player's strategy-switch flow calls seek(currentTime)
24
+ // immediately after creation, so pumping from 0 here would be wasted work.
25
+ let started = false;
26
+ let wantPlay = false;
27
+
28
+ return {
29
+ strategy: "remux",
30
+ async play() {
31
+ wantPlay = true;
32
+ if (!started) {
33
+ // First play — start the pump. The deferred seek in MseSink will
34
+ // call video.play() once data is available (via autoPlay flag).
35
+ started = true;
36
+ await pipeline.start(video.currentTime || 0, true);
37
+ return;
38
+ }
39
+ await video.play();
40
+ },
41
+ pause() {
42
+ wantPlay = false;
43
+ video.pause();
44
+ },
45
+ async seek(time) {
46
+ if (!started) {
47
+ started = true;
48
+ // autoPlay=true so playback starts as soon as data arrives at
49
+ // the seek target (handles the strategy-switch case where play()
50
+ // is called right after seek()).
51
+ await pipeline.seek(time, wantPlay);
52
+ return;
53
+ }
54
+ const wasPlaying = !video.paused;
55
+ await pipeline.seek(time, wasPlaying || wantPlay);
56
+ },
57
+ async setAudioTrack(_id) {
58
+ // v1: single-track output. Multi-audio remuxing is post-MVP.
59
+ },
60
+ async setSubtitleTrack(id) {
61
+ const tracks = video.textTracks;
62
+ for (let i = 0; i < tracks.length; i++) {
63
+ tracks[i].mode = i === id ? "showing" : "disabled";
64
+ }
65
+ },
66
+ getCurrentTime() {
67
+ return video.currentTime || 0;
68
+ },
69
+ async destroy() {
70
+ video.pause();
71
+ await pipeline.destroy();
72
+ video.removeAttribute("src");
73
+ video.load();
74
+ },
75
+ getRuntimeStats() {
76
+ return pipeline.stats();
77
+ },
78
+ };
79
+ }
@@ -0,0 +1,234 @@
1
+ /**
2
+ * MediaSource Extensions plumbing. Wraps a `MediaSource` + single
3
+ * `SourceBuffer` with an append queue that respects `updateend` backpressure.
4
+ */
5
+
6
+ export interface MseSinkOptions {
7
+ mime: string;
8
+ video: HTMLVideoElement;
9
+ /** Called once the MediaSource is open and ready for appends. */
10
+ onReady?: () => void;
11
+ }
12
+
13
+ export class MseSink {
14
+ private mediaSource: MediaSource;
15
+ private sourceBuffer: SourceBuffer | null = null;
16
+ private queue: ArrayBuffer[] = [];
17
+ private endOfStreamCalled = false;
18
+ private destroyed = false;
19
+ private readyPromise: Promise<void>;
20
+ private resolveReady!: () => void;
21
+ private rejectReady!: (err: Error) => void;
22
+ private objectUrl: string;
23
+
24
+ constructor(private readonly options: MseSinkOptions) {
25
+ if (typeof MediaSource === "undefined") {
26
+ throw new Error("MSE not supported in this environment");
27
+ }
28
+ if (!MediaSource.isTypeSupported(options.mime)) {
29
+ throw new Error(`MSE does not support MIME "${options.mime}" — cannot remux`);
30
+ }
31
+
32
+ this.mediaSource = new MediaSource();
33
+ this.objectUrl = URL.createObjectURL(this.mediaSource);
34
+ options.video.src = this.objectUrl;
35
+
36
+ this.readyPromise = new Promise((resolve, reject) => {
37
+ this.resolveReady = resolve;
38
+ this.rejectReady = reject;
39
+ });
40
+
41
+ this.mediaSource.addEventListener("sourceopen", () => {
42
+ try {
43
+ this.sourceBuffer = this.mediaSource.addSourceBuffer(options.mime);
44
+ this.sourceBuffer.mode = "segments";
45
+ this.sourceBuffer.addEventListener("updateend", () => this.pump());
46
+ this.resolveReady();
47
+ options.onReady?.();
48
+ } catch (err) {
49
+ this.rejectReady(err instanceof Error ? err : new Error(String(err)));
50
+ }
51
+ });
52
+ }
53
+
54
+ ready(): Promise<void> {
55
+ return this.readyPromise;
56
+ }
57
+
58
+ /** Queue a chunk of fMP4 bytes (init segment or media segment). */
59
+ append(chunk: ArrayBuffer | Uint8Array): void {
60
+ if (this.destroyed) return;
61
+ const ab = chunk instanceof Uint8Array
62
+ ? (chunk.buffer.slice(chunk.byteOffset, chunk.byteOffset + chunk.byteLength) as ArrayBuffer)
63
+ : chunk;
64
+ this.queue.push(ab);
65
+ this.pump();
66
+ }
67
+
68
+ private pump(): void {
69
+ const sb = this.sourceBuffer;
70
+ if (!sb || sb.updating) return;
71
+
72
+ // Apply deferred actions once the SourceBuffer has any data. Deferred
73
+ // seek and deferred autoplay are independent — both can fire here, or
74
+ // either alone. Setting `currentTime` before data exists causes the
75
+ // browser to snap back to the nearest buffered range; calling `play()`
76
+ // before data exists puts the video into a stuck waiting state.
77
+ if (sb.buffered.length > 0) {
78
+ if (this.pendingSeekTime !== null) {
79
+ this.options.video.currentTime = this.pendingSeekTime;
80
+ this.pendingSeekTime = null;
81
+ } else if (!this.hasSnappedToFirstBuffered) {
82
+ // First data arrival with no pending seek. If currentTime is
83
+ // outside the first buffered range (typical for MPEG-TS sources
84
+ // whose PTS doesn't start at 0), snap into the buffered range
85
+ // so the video element doesn't wait forever for nonexistent data.
86
+ const v = this.options.video;
87
+ const firstStart = sb.buffered.start(0);
88
+ const firstEnd = sb.buffered.end(0);
89
+ if (v.currentTime < firstStart || v.currentTime > firstEnd) {
90
+ v.currentTime = firstStart;
91
+ }
92
+ this.hasSnappedToFirstBuffered = true;
93
+ }
94
+ if (this.playOnSeek) {
95
+ this.playOnSeek = false;
96
+ this.options.video.play().catch(() => { /* ignore — autoplay may be blocked */ });
97
+ }
98
+ }
99
+
100
+ const next = this.queue.shift();
101
+ if (!next) return;
102
+ try {
103
+ sb.appendBuffer(next);
104
+ } catch (err) {
105
+ // QuotaExceededError → evict the oldest few seconds and retry once.
106
+ if ((err as DOMException).name === "QuotaExceededError") {
107
+ this.evict();
108
+ try {
109
+ sb.appendBuffer(next);
110
+ return;
111
+ } catch {
112
+ /* fall through to error */
113
+ }
114
+ }
115
+ this.rejectReady(err instanceof Error ? err : new Error(String(err)));
116
+ }
117
+ }
118
+
119
+ private evict(): void {
120
+ const sb = this.sourceBuffer;
121
+ if (!sb || sb.buffered.length === 0) return;
122
+ const start = sb.buffered.start(0);
123
+ const current = this.options.video.currentTime;
124
+ // Drop everything that's at least 10s behind the current position.
125
+ if (current - start > 10) {
126
+ try {
127
+ sb.remove(start, current - 10);
128
+ } catch {
129
+ /* ignore */
130
+ }
131
+ }
132
+ }
133
+
134
+ /** Indicate the source is finished. Future seeks past the end will fail. */
135
+ endOfStream(): void {
136
+ if (this.endOfStreamCalled || this.destroyed) return;
137
+ this.endOfStreamCalled = true;
138
+ const tryEnd = () => {
139
+ if (this.queue.length > 0 || this.sourceBuffer?.updating) {
140
+ // Wait for the queue to drain.
141
+ this.sourceBuffer?.addEventListener("updateend", tryEnd, { once: true });
142
+ return;
143
+ }
144
+ try {
145
+ if (this.mediaSource.readyState === "open") {
146
+ this.mediaSource.endOfStream();
147
+ }
148
+ } catch {
149
+ /* ignore */
150
+ }
151
+ };
152
+ tryEnd();
153
+ }
154
+
155
+ /** Seconds of media buffered ahead of the current playback position. */
156
+ bufferedAhead(): number {
157
+ const sb = this.sourceBuffer;
158
+ if (!sb || sb.buffered.length === 0) return 0;
159
+ const current = this.options.video.currentTime;
160
+ for (let i = 0; i < sb.buffered.length; i++) {
161
+ if (sb.buffered.start(i) <= current && sb.buffered.end(i) > current) {
162
+ return sb.buffered.end(i) - current;
163
+ }
164
+ }
165
+ return 0;
166
+ }
167
+
168
+ /** Total seconds of media buffered across all ranges. */
169
+ totalBuffered(): number {
170
+ const sb = this.sourceBuffer;
171
+ if (!sb || sb.buffered.length === 0) return 0;
172
+ let total = 0;
173
+ for (let i = 0; i < sb.buffered.length; i++) {
174
+ total += sb.buffered.end(i) - sb.buffered.start(i);
175
+ }
176
+ return total;
177
+ }
178
+
179
+ /** Number of chunks waiting in the append queue. */
180
+ queueLength(): number {
181
+ return this.queue.length;
182
+ }
183
+
184
+ /** Time to seek to once the SourceBuffer has data at this position. */
185
+ private pendingSeekTime: number | null = null;
186
+ /** Whether to resume playback after the deferred seek completes. */
187
+ private playOnSeek = false;
188
+ /**
189
+ * On the very first data arrival, if `currentTime` falls outside the first
190
+ * buffered range, snap it to the start of that range. MPEG-TS sources
191
+ * commonly start their PTS at a non-zero value (e.g. ~1.5s); without this
192
+ * snap, the video element sits at `currentTime=0` waiting forever for
193
+ * data that doesn't exist.
194
+ */
195
+ private hasSnappedToFirstBuffered = false;
196
+
197
+ /** Request that playback resumes automatically once the deferred seek fires. */
198
+ setPlayOnSeek(play: boolean): void {
199
+ this.playOnSeek = play;
200
+ }
201
+
202
+ /**
203
+ * Discard all buffered media and schedule a deferred seek. The actual
204
+ * `video.currentTime` assignment happens in `pump()` once the SourceBuffer
205
+ * has data at the target position — setting it earlier causes the browser
206
+ * to snap back to the nearest buffered range.
207
+ */
208
+ invalidate(seekTime: number): void {
209
+ const sb = this.sourceBuffer;
210
+ // Clear the pending queue — stale fragments from the old pump position.
211
+ this.queue = [];
212
+ this.pendingSeekTime = seekTime;
213
+ this.hasSnappedToFirstBuffered = true; // explicit seek overrides the auto-snap
214
+ if (!sb || sb.buffered.length === 0) return;
215
+ try {
216
+ const start = sb.buffered.start(0);
217
+ const end = sb.buffered.end(sb.buffered.length - 1);
218
+ sb.remove(start, end);
219
+ } catch {
220
+ /* ignore — sourcebuffer may be in updating state */
221
+ }
222
+ }
223
+
224
+ destroy(): void {
225
+ this.destroyed = true;
226
+ this.queue = [];
227
+ try {
228
+ if (this.mediaSource.readyState === "open") this.mediaSource.endOfStream();
229
+ } catch {
230
+ /* ignore */
231
+ }
232
+ URL.revokeObjectURL(this.objectUrl);
233
+ }
234
+ }